+ ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.Q8w3Bp39ki --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-release-asan Configuring dependencies for platform tools [2 ymakes processing] [7792/7792 modules configured] [1893/4553 modules rendered] [2 ymakes processing] [7792/7792 modules configured] [4500/4553 modules rendered] [2 ymakes processing] [7792/7792 modules configured] [4553/4553 modules rendered] Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [7798/7798 modules configured] [4553/4553 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 2.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut | 6.3%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.a | 6.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a | 6.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a | 6.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a | 6.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a | 7.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a | 7.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a | 7.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a | 7.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a | 8.0%| PREPARE $(VCS) | 8.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a | 8.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a | 8.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a | 8.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a | 8.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a | 8.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a | 8.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a | 8.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a | 8.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a | 8.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |10.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |12.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a |12.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a |13.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |14.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |14.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a |14.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |14.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |14.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |14.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |14.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |14.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |15.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |15.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |15.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/version/libversion_definition.a |15.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |15.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.a |15.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/libcore-backup-common.a |15.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |15.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |15.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |15.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/system/libsystem_allocator.a |15.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |15.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |15.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |15.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |15.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a |16.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |16.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |16.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |16.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |16.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |16.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |16.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |17.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |17.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |17.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |17.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.a |17.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |17.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.a |17.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |17.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |17.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.a |16.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |16.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |16.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typeguard/libpy3contrib-python-typeguard.a |16.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |16.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a |16.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |16.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typing-extensions/py3/libpy3python-typing-extensions-py3.global.a |17.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |17.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.global.a |17.1%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |17.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a |17.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/skiff_ext/libyt-library-skiff_ext.a |17.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |18.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.global.a |18.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |18.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |18.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |18.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |18.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |18.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/libydb-core-base.a |19.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/proto/libytflow-integration-proto.a |19.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/interface/libytflow-integration-interface.a |19.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |19.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/expr_nodes/libproviders-ytflow-expr_nodes.a |20.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |20.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |20.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/fbs/libclient-arrow-fbs.a |20.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/libyt-client-arrow.a |21.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |21.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-python-commons/libpy3contrib-python-allure-python-commons.global.a |21.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_ansi/libv1-lexer-antlr4_ansi.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |21.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4/libv1-lexer-antlr4.a |22.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |22.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |22.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |22.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.global.a |22.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |22.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |22.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |22.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |23.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |23.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |23.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/colorama/py3/libpy3python-colorama-py3.global.a |23.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/colorama/py3/libpy3python-colorama-py3.a |23.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |23.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |23.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |23.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |23.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |24.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/click/py3/libpy3python-click-py3.a |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |24.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/click/py3/libpy3python-click-py3.global.a |24.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |24.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |24.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |24.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |24.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |24.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a |24.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |25.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/itsdangerous/py3/libpy3python-itsdangerous-py3.a |25.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |25.3%| PREPARE $(YMAKE_PYTHON3-4256832079) |25.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |25.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |25.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |25.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |25.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/formats/libyt-client-formats.a |25.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |25.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |25.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typing-extensions/py3/libpy3python-typing-extensions-py3.a |25.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |25.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |25.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typeguard/libpy3contrib-python-typeguard.global.a |26.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/parser/libminikql-jsonpath-parser.a |26.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |26.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |26.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wheel/libpy3contrib-python-wheel.global.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/libpy3contrib-python-websocket-client.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.global.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.a |27.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |27.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.global.a |27.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.global.a |27.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.global.a |27.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |28.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/k8s_api/libpy3tools-cfg-k8s_api.global.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wheel/libpy3contrib-python-wheel.a |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |28.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/libpy3contrib-python-websocket-client.global.a |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask-Cors/py3/libpy3python-Flask-Cors-py3.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.a |28.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |28.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a |28.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a |28.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask-Cors/py3/libpy3python-Flask-Cors-py3.global.a |29.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/responses/py3/libpy3python-responses-py3.a |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |29.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |29.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |29.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |30.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/proto/libfile_storage-http_download-proto.a |30.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask/py3/libpy3python-Flask-py3.a |30.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |30.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a |30.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.global.a |30.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask/py3/libpy3python-Flask-py3.global.a |30.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |31.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |31.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |31.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libpy3api-service-protos.global.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |32.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |32.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libpy3providers-s3-proto.global.a |32.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |32.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |32.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/fatal_error_handlers/libclient-types-fatal_error_handlers.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/libclient-types-credentials.a |32.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/proto/libsrc-client-proto.a |32.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/grpc/client/libsdk-library-grpc-client-v3.a |32.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/login/libtypes-credentials-login.a |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |32.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |32.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |32.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/params/libsrc-client-params.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/include/libclient-persqueue_public-include.a |32.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/adapters/issue/libcpp-adapters-issue.a |32.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.a |33.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/botocore/py3/libpy3python-botocore-py3.global.a |32.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/scheme/libsrc-client-scheme.a |33.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi/libproto_ast-gen-v1_ansi.a |33.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |33.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1/libproto_ast-gen-v1.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/operation/libsrc-client-operation.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |31.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |31.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |31.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |31.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.a |31.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |31.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.global.a |32.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.global.a |32.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.a |32.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.a |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.a |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.global.a |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |32.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |32.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |32.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/more-itertools/py3/libpy3python-more-itertools-py3.global.a |32.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/more-itertools/py3/libpy3python-more-itertools-py3.a |32.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.a |32.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.a |33.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.global.a |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.global.a |33.3%| PREPARE $(LLD_ROOT-3808007503) |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.functools/py3/libpy3python-jaraco.functools-py3.global.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.functools/py3/libpy3python-jaraco.functools-py3.a |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.a |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.global.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |33.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/py3/libpy3python-moto-py3.global.a |33.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |33.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |33.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.global.a |33.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.a |33.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.a |33.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.global.a |34.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |34.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |34.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.a |34.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |34.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |34.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |34.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |34.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |34.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |34.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |34.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |34.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |35.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |35.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |35.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |35.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |35.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |35.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |35.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |35.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |35.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |35.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |35.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |35.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |35.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |35.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |35.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |35.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |35.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |35.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_update.cpp |35.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |35.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |36.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |36.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |36.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |36.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |36.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.global.a |36.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |36.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |36.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |36.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |36.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |36.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |36.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |36.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |36.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |36.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |36.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |36.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |36.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |37.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |37.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |37.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |37.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |37.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/signal_registry.cpp |37.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |38.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |38.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |38.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |38.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |38.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |38.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |38.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |39.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |39.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |39.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |40.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |40.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/yson_builder.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |40.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |40.5%| PREPARE $(PYTHON) |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |41.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |41.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |41.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |41.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_helpers.cpp |41.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_context.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/local_bypass.cpp |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split/libproto_ast-gen-v1_proto_split.a |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm14/libminikql-invoke_builtins-llvm14.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |43.5%| PREPARE $(CLANG_FORMAT-2212207123) |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/slices/libsbuckets-logic-slices.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/one_head/libsbuckets-logic-one_head.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/optimizer/liboptimizer-sbuckets-optimizer.global.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/granules/libreader-sys_view-granules.global.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/constructor/libreader-sys_view-constructor.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/optimizer/libreader-sys_view-optimizer.global.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/chunks/libreader-sys_view-chunks.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/libreader-sys_view-abstract.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/insert_table/libcolumnshard-engines-insert_table.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/purecalc/libcore-persqueue-purecalc.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/partition_key_range/libcore-persqueue-partition_key_range.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/config/libcore-persqueue-config.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/codecs/libcore-persqueue-codecs.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.global.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |48.6%| PREPARE $(FLAKE8_PY3-715603131) |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/run/librun.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lua/liblibrary-cpp-lua.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/hash/libabseil-cpp-absl-hash.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/types/libabseil-cpp-absl-types.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/time/libabseil-cpp-absl-time.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/synchronization/libabseil-cpp-absl-synchronization.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/types/libabseil-cpp-tstring-y_absl-types.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/random/libabseil-cpp-tstring-y_absl-random.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/profiling/libabseil-cpp-tstring-y_absl-profiling.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/base/libabseil-cpp-tstring-y_absl-base.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/flags/libabseil-cpp-absl-flags.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/strings/libabseil-cpp-absl-strings.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/numeric/libabseil-cpp-tstring-y_absl-numeric.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/hash/libabseil-cpp-tstring-y_absl-hash.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/container/libabseil-cpp-tstring-y_absl-container.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/libabseil-cpp-tstring-y_absl-debugging.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |50.8%| [CP] {default-linux-x86_64, release, asan} $(B)/common_test.context |50.8%| PREPARE $(TEST_TOOL_HOST-sbr:8067063302) |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lua/libcontrib-libs-lua.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ProfileData/libllvm14-lib-ProfileData.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/BinaryFormat/libllvm14-lib-BinaryFormat.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |50.6%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Remarks/libllvm14-lib-Remarks.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Linker/libllvm14-lib-Linker.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/MC/libllvm14-lib-MC.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |50.7%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/dec/liblibs-brotli-dec.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan_static/libclang_rt.asan_static-x86_64.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan/libclang_rt.asan-x86_64.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/common/liblibs-brotli-common.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Object/libllvm14-lib-Object.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/enc/liblibs-brotli-enc.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan_cxx/libclang_rt.asan_cxx-x86_64.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/AsmParser/libllvm14-lib-AsmParser.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/Utils/liblib-Transforms-Utils.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Support/libllvm14-lib-Support.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Demangle/libllvm14-lib-Demangle.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/libllvm14-lib-ExecutionEngine.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/MC/MCParser/liblib-MC-MCParser.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/IRReader/libllvm14-lib-IRReader.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/libllvm14-lib-Target.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/TextAPI/libllvm14-lib-TextAPI.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/liblib-Target-X86.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp3/libcontrib-libs-nghttp3.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yajl/libcontrib-libs-yajl.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/status/libabseil-cpp-tstring-y_absl-status.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/log/libabseil-cpp-tstring-y_absl-log.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/status/libabseil-cpp-absl-status.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/container/libabseil-cpp-absl-container.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/base/libabseil-cpp-absl-base.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/libabseil-cpp-tstring-y_absl-synchronization.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/profiling/libabseil-cpp-absl-profiling.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/numeric/libabseil-cpp-absl-numeric.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/random/libabseil-cpp-absl-random.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/log/libabseil-cpp-absl-log.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/debugging/libabseil-cpp-absl-debugging.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/libabseil-cpp-tstring-y_absl-flags.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/time/libabseil-cpp-tstring-y_absl-time.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/libabseil-cpp-tstring-y_absl-strings.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/ucontext_impl/libboost-context-ucontext_impl.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/http-parser/libcontrib-restricted-http-parser.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/config/liblibrary-cpp-config.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/IR/libllvm14-lib-IR.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |51.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/IPO/liblib-Transforms-IPO.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |51.0%| {BAZEL_DOWNLOAD} $(B)/library/cpp/sanitizer/plugin/sanitizer.py.pyplugin |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Analysis/libllvm14-lib-Analysis.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |51.2%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/libydb-core-control.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |50.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |50.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/CodeGen/libllvm14-lib-CodeGen.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/common/librow_dispatcher-format_handler-common.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |51.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/scheme/libio_formats-arrow-scheme.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |51.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |51.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |51.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |51.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |50.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_read_rows.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |50.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |50.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |50.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/query/rpc_attach_session.cpp |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/query/rpc_execute_script.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/create_session.cpp |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/audit_logins.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_rollback_transaction.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_kqp_base.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_list_operations.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_begin_transaction.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_backup.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |51.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_alter_table.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/list_directory.cpp |51.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_mon.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_helper.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/keep_alive.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_endpoint_publish_actor.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/explain_data_query.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_common/rpc_common_kqp_session.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/audit_log.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/query/rpc_kqp_tx.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/resolve_local_db_table.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/query/rpc_fetch_script_results.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_request_proxy_simple.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/execute_data_query.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/local_rate_limiter.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_alter_coordination_node.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_copy_table.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/query/rpc_execute_query.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_execute_data_query.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_export.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_describe_external_table.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/describe_table.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_commit_transaction.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_create_table.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_create_coordination_node.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_cancel_operation.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_describe_coordination_node.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_discovery.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_kh_snapshots.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_copy_tables.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_describe_table_options.cpp |51.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_drop_table.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_describe_path.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_drop_coordination_node.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_config.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_describe_table.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_describe_external_data_source.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/audit_dml_operations.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_publisher_service_actor.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_get_scale_recommendation.cpp |51.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_get_operation.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_execute_yql_script.cpp |51.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |51.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_explain_data_query.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_explain_yql_script.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_execute_scheme_query.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_forget_operation.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_get_shard_locations.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_kh_describe.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_fq_internal.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_import_data.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_import.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_request_proxy.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_keep_alive.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_dynamic_config.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_ping.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_node_registration.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_maintenance.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_modify_permissions.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_monitoring.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_object_storage.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_make_directory.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_log_store.cpp |50.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_remove_directory.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_load_rows.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_read_columns.cpp |51.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_whoami.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_prepare_data_query.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_read_table.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_rename_tables.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_view.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_scheme_base.cpp |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_login.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_rate_limiter_api.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_stream_execute_scan_query.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/table_settings.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_replication.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_cms.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_fq.cpp |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_keyvalue.cpp |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/service_impl.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_stream_execute_yql_script.cpp |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/libydb-core-security.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/common/libcolumnshard-counters-common.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/service.cpp |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/http_request.cpp |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/constructor/liboptimizer-sbuckets-constructor.global.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/common/libengines-scheme-common.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/abstract/libsbuckets-logic-abstract.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |48.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/portions/libreader-sys_view-portions.global.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.global.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/common/liboptimizer-sbuckets-common.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |47.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/counters/liboptimizer-sbuckets-counters.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/index/liboptimizer-sbuckets-index.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/insert_table/libcolumnshard-normalizer-insert_table.global.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/abstract/libsession-storage-abstract.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/tier/libsession-storage-tier.global.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/usage/libtx-limiter-usage.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/service/libtx-limiter-service.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/libydb-core-util.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/harmonizer/libactors-core-harmonizer.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |48.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/37007751efeb3aaba42f37a888.yasm |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_and_wait_dependencies_unit.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/alter_table_unit.cpp |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/remove_locks.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/incr_restore_helpers.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/follower_edge.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/import_s3.cpp |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/finish_propose_write_unit.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_query_executor.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_getter.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_effective_acl.cpp |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__data_erasure_manager.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/finalize_build_index_unit.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/execute_kqp_scan_tx_unit.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/execute_write_unit.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/execute_kqp_data_tx_unit.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/datashard_s3_upload.h_serialized.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/execute_distributed_erase_tx_unit.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/execute_data_tx_unit.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/execute_commit_writes_tx_unit.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_downloads.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__progress_tx.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__propose_tx_base.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_distributed_erase.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__read_columns.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_change_receiving.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__progress_resend_rs.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__plan_step.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_failpoints.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__get_state_tx.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__compaction.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__read_iterator.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__compact_borrowed.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_server.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/complete_data_tx_unit.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/create_table_unit.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/create_volatile_snapshot_unit.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_record.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/create_persistent_snapshot_unit.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_record_cdc_serializer.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__init.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_sender.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/change_exchange.h_serialized.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/check_read_unit.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_record_body_serializer.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_exchange_split.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/create_incremental_restore_src_unit.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_collector_base.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_exchange.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/check_data_tx_unit.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_sender_incr_restore.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_sender_cdc_stream.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_sender_async_index.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_collector_cdc_stream.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_collector_async_index.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_collector.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_scheme_tx_out_rs_unit.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/backup_restore_traits.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_sender_table_base.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/check_scheme_tx_unit.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/create_cdc_stream_unit.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/backup_restore_traits.h_serialized.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_data_tx_out_rs_unit.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/cdc_stream_heartbeat.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_distributed_erase_tx_out_rs_unit.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/alter_cdc_stream_unit.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_write_out_rs_unit.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/backup_unit.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/complete_write_unit.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/check_snapshot_tx_unit.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/completed_operations_unit.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_kqp_data_tx_out_rs_unit.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_tx.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/check_write_unit.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_uncommitted.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__cancel_tx_proposal.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_change_sender_activation.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_in_rs.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/conflicts_cache.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_borrowed.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__engine_host.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__s3_upload_txs.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__mon_reset_schema_version.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__monitoring.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__snapshot_txs.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__schema_changed.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__s3_download_txs.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__data_cleanup.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__store_table_path.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__object_storage_listing.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__op_rows.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__conditional_erase_rows.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__readset.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__store_scan_state.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_active_transaction.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_direct_erase.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_direct_upload.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_direct_transaction.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__column_stats.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_dep_tracker.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__write.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_lookup_table.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__stats.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__kqp_scan.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_effects.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_loans.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_common_upload.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_delete_rows.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_outreadset.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_change_sending.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/check_distributed_erase_tx_unit.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_offsets.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_compute.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_locks_db.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_apply.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_client.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_pipeline.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/erase_rows_condition.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/drop_volatile_snapshot_unit.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_overload.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_trans_queue.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_upload_rows.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_read_table.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_subdomain_path_id.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_uploads.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_snapshots.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_schema_snapshots.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_split_dst.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/drop_cdc_stream_unit.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/direct_tx_unit.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_split_src.cpp |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_user_db.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/drop_index_notice_unit.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/extstorage_usage_config.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/drop_table_unit.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_write_operation.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_buffer.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_common.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/execution_unit.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/execution_unit_kind.h_serialized.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_scan.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_user_table.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_iface.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_uploader.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__list_users.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |44.8%| PREPARE $(WITH_JDK17-sbr:7832760150) |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_incremental_backup_collection.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cansel_build_index.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__unmark_restore_tables.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__tenant_data_erasure_manager.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_billing_helpers.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log_fragment.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |44.2%| PREPARE $(WITH_JDK-sbr:7832760150) |44.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/read_table_scan.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_scheme_uploader.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_identificators.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_shard_deleter.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__root_data_erasure_manager.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_element.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |43.7%| PREPARE $(CLANG-1922233694) |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_self_pinger.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_types.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_types.h_serialized.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_utils.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/user_attributes.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |43.5%| PREPARE $(JDK_DEFAULT-472926544) |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/finish_propose_unit.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/remove_lock_change_records.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/key_conflicts.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/load_tx_details_unit.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/finalize_plan_tx_unit.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/load_and_wait_in_rs_unit.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/load_write_details_unit.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/key_validator.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/make_scan_snapshot_unit.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/kmeans_helper.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/initiate_build_index_unit.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/incr_restore_scan.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/protect_scheme_echoes_unit.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/move_index_unit.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/make_snapshot_unit.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/receive_snapshot_unit.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/operation.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/move_table_unit.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/memory_state_migration.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/local_kmeans.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/plan_queue_unit.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/prepare_distributed_erase_tx_in_rs_unit.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/prepare_scheme_tx_in_rs_unit.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/read_op_unit.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/probes.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/prepare_write_tx_in_rs_unit.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/prepare_kqp_data_tx_in_rs_unit.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/range_ops.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/receive_snapshot_cleanup_unit.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/read_table_scan_unit.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/check_commit_writes_tx_unit.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/upload_stats.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_commit_writes_tx_unit.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/remove_schema_snapshots.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/restore_unit.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/sample_k.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_and_send_write_out_rs_unit.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/type_serialization.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/scan_common.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/reshuffle_kmeans.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_snapshot_tx_unit.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_and_send_out_rs_unit.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/cdc_stream_scan.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_data_tx_unit.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_distributed_erase_tx_unit.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_write_unit.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_scheme_tx_unit.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/stream_scan_common.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/wait_for_plan_unit.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/drop_persistent_snapshot_unit.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__migrate_schemeshard.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/volatile_tx_mon.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/wait_for_stream_clearance_unit.cpp |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/prepare_data_tx_in_rs_unit.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/volatile_tx.cpp |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |43.2%| PREPARE $(CLANG14-1922233694) |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |43.1%| PREPARE $(JDK17-472926544) |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |42.9%| PREPARE $(GDB) |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_manager.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/compression.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_compression.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/configurable_singleton_def.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/memory_usage_tracker.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hedging_manager.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/recipe/libpy3python-testing-recipe.global.a |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3_ansi/libv1-proto_parser-antlr3_ansi.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_json/libydb-library-yaml_json.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-python-commons/libpy3contrib-python-allure-python-commons.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |41.6%| PREPARE $(CLANG-2518231432) |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/topic_parser_public/libsdk-library-persqueue-topic_parser_public-v3.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-pytest/libpy3contrib-python-allure-pytest.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/liblibrary-operation_id-protos.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/recipes/common/libpy3library-recipes-common.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/impl/libclient-common_client-impl.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/obfuscate/libsdk-library-persqueue-obfuscate-v3.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/jwt/libsrc-library-jwt.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-pytest/libpy3contrib-python-allure-pytest.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/native/libyt-gateway-native.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.global.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/chunk_queue/libcpp-threading-chunk_queue.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam/libsrc-client-iam.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_endpoints/libclient-impl-ydb_endpoints.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/kqp_session_common/libimpl-ydb_internal-kqp_session_common.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.global.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/coordination/libsrc-client-coordination.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.collections/libpy3contrib-python-jaraco.collections.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.collections/libpy3contrib-python-jaraco.collections.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.text/libpy3contrib-python-jaraco.text.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.context/libpy3contrib-python-jaraco.context.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/thread_pool/libimpl-ydb_internal-thread_pool.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/issue/libsrc-library-issue.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.global.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.text/libpy3contrib-python-jaraco.text.global.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.global.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/grpc_connections/libimpl-ydb_internal-grpc_connections.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/platformdirs/libpy3contrib-python-platformdirs.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/import/libsrc-client-import.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.global.a |41.1%| PREPARE $(CLANG18-3363451693) |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |41.1%| [BI] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/buildinfo_data.h |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.context/libpy3contrib-python-jaraco.context.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.global.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.global.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/ss_tasks/libsrc-client-ss_tasks.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.global.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.global.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.global.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/codecs/libclient-topic-codecs.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/common/libclient-topic-common.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/libyql-dq-comp_nodes.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.global.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.global.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.a |40.8%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svn_interface.c |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.a |40.9%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |40.7%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svnversion.cpp |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/value/libsrc-client-value.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/platformdirs/libpy3contrib-python-platformdirs.global.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.global.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytz/py3/libpy3python-pytz-py3.global.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |40.7%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/build_info/build_info_static.cpp |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.a |40.6%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.global.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm14/libminikql-codegen-llvm14.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm14/libminikql-computation-llvm14.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/converter/libarrow-csv-converter.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/column_converters/libyt-library-column_converters.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/async_io/libproviders-solomon-async_io.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/account_lockout/liblibrary-login-account_lockout.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |40.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/01e1cebcd98e239de10ed70b94_raw.auxcpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/1c64c271d5ab98619aec194092_raw.auxcpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/7b4fb5c8ee02eb7794d6f587ca_raw.auxcpp |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/itsdangerous/py3/libpy3python-itsdangerous-py3.global.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/accessservice/libclient-nc_private-accessservice.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/helpers/libpy3olap-scenario-helpers.global.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.global.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |40.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libpy3client-yc_public-common.global.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/libessentials-parser-proto_ast.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/libsrc-client-common_client.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libpy3client-yc_public-iam.global.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/files/libydb_cli-dump-files.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/find_root/libpy3library-python-find_root.global.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/include/ydb-cpp-sdk/client/topic/libydb-cpp-sdk-client-topic.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/driver/libsrc-client-driver.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |40.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extension_common/libsrc-client-extension_common.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam_private/libsrc-client-iam_private.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/impl/libclient-query-impl.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/common/libimpl-ydb_internal-common.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/export/libsrc-client-export.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/plugins/libpy3python-pytest-plugins.global.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/db_driver_state/libimpl-ydb_internal-db_driver_state.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/plain_status/libimpl-ydb_internal-plain_status.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/make_request/libimpl-ydb_internal-make_request.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/retry/libimpl-ydb_internal-retry.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/session_pool/libimpl-ydb_internal-session_pool.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/value_helpers/libimpl-ydb_internal-value_helpers.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_stats/libclient-impl-ydb_stats.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/logger/libimpl-ydb_internal-logger.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.global.a |40.9%| PREPARE $(CLANG16-1380963495) |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/formats/libyt-library-formats.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/rate_limiter/libsrc-client-rate_limiter.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/xmltodict/py3/libpy3python-xmltodict-py3.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/xmltodict/py3/libpy3python-xmltodict-py3.global.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/tz/libpublic-udf-tz.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/query_stats/libclient-table-query_stats.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/libsrc-client-query.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/log/libyt-lib-log.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/libsrc-client-topic.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/exceptions/libclient-types-exceptions.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/draft/libsrc-client-draft.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/libsrc-client-types.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/oauth2_token_exchange/libtypes-credentials-oauth2_token_exchange.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/operation/libclient-types-operation.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/libsrc-client-table.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/status/libclient-types-status.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/decimal/libsrc-library-decimal.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/datastreams/libsrc-client-datastreams.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3_ansi/libv1-lexer-antlr3_ansi.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3/libv1-lexer-antlr3.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/libsrc-library-operation_id.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/result/libsrc-client-result.a |41.1%| RESOURCE $(sbr:4966407557) |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/uuid/libsrc-library-uuid.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Werkzeug/py3/libpy3python-Werkzeug-py3.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/impl/libclient-persqueue_public-impl.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/boto3/py3/libpy3python-boto3-py3.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/impl/libclient-table-impl.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/config/libydb-services-config.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Werkzeug/py3/libpy3python-Werkzeug-py3.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/impl/libclient-topic-impl.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3/libv1-proto_parser-antlr3.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4/libv1-proto_parser-antlr4.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |41.3%| PREPARE $(FLAKE8_LINTER-sbr:6561765464) |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4_ansi/libv1-proto_parser-antlr4_ansi.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/proto/libproviders-yt-proto.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libpy3core-protos-schemeshard.global.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/lib/libpy3tests-olap-lib.global.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |41.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/61b9a72d28030d2c2784993364_raw.auxcpp |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/password_checker/liblibrary-login-password_checker.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libpy3contrib-libs-googleapis-common-protos.global.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |41.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/9deb10f11345dc3781f7d9d370_raw.auxcpp |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/counter_time_keeper/liblibrary-persqueue-counter_time_keeper.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/view/libydb-services-view.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.global.a |41.5%| [SB] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http_client/libcpp-mapreduce-http_client.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |40.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm14/libcodec-codegen-llvm14.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |40.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |40.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |40.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |40.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |40.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |40.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |40.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |40.6%| PREPARE $(BLACK_LINTER-sbr:6648883615) |40.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |40.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |40.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |40.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |40.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/string_utils/helpers/liblibrary-string_utils-helpers.a |40.9%| [ld] {default-linux-x86_64, release, asan} $(B)/tools/flake8_linter/flake8_linter |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/filter/libpy3python-testing-filter.global.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpython-symbols-python.global.a |41.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm14/libcodec-codegen-llvm14.global.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/llvm14/libyt-comp_nodes-llvm14.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/responses/py3/libpy3python-responses-py3.global.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/common/libproviders-yt-common.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/libproviders-yt-codec.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.global.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/hash/libyt-lib-hash.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/job/libproviders-yt-job.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |41.4%| [UN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/schema/libyt-lib-schema.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/dq/llvm14/libcomp_nodes-dq-llvm14.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/discovery/libsrc-client-discovery.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |41.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/55bf9af31223ef59f8d48f9afd_raw.auxcpp |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/opt/libproviders-yt-opt.a |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get_block.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/5c55a577ace90da680efc170e8_raw.auxcpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/4ef82527323f06a92bc089d35a_raw.auxcpp |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/shred.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils_ut.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/flavours/libpy3tests-library-flavours.global.a |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/DateLUT.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/JSON.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getFQDNOrHostName.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TabSeparatedRowOutputFormat.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/AggregateFunctionCombinatorFactory.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TabSeparatedRowInputFormat.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_optimize.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TSKVRowOutputFormat.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_constraints.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_type_ann.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_exec.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ParquetBlockOutputFormat.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/queryToString.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_field_subset.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TSKVRowInputFormat.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Chunk.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ConcatProcessor.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_join.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSetRoleQuery.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSystemQuery.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowPrivilegesQuery.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSetQuery.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowTablesQuery.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserTablePropertiesQuery.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_type_ann.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserProjectionSelectQuery.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserKillQueryQuery.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserInsertQuery.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_helper.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_content.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCheckQuery.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTTablesInSelectQuery.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/CommonParsers.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCase.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCreateQuery.cpp |42.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_fuse.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserAlterQuery.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWithElement.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNothing.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSetRoleQuery.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_hybrid.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSetQuery.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSelectWithUnionQuery.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSampleRatio.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ExpressionElementParsers.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTRolesOrUsersSet.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTExpressionList.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSelectQuery.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDictionaryAttributeDeclaration.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/QueryThreadLog.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/QueryLog.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/parseDateTimeBestEffort.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/InternalTextLogsQueue.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_logical_optimize.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileDescriptorDiscardOnFailure.cpp |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/UseSSL.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTAlterQuery.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ThreadPoolReader.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/SynchronousReader.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/TimeoutSetter.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileBase.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationLowCardinality.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMappedFileDescriptor.cpp |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFile.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationFixedString.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeTuple.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFunction.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeString.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/createReadBufferFromFileBase.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferValidUTF8.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteHelpers.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/NativeBlockOutputStream.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/PeekableReadBuffer.cpp |42.9%| [ld] {default-linux-x86_64, release, asan} $(B)/tools/black_linter/black_linter |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/NativeBlockInputStream.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/copyData.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/BlockStreamProfileInfo.cpp |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/ClientInfo.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/SettingsFields.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadHelpers.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Block.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnsTransformers.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorToString.cpp |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnString.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTBackupQuery.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileDescriptor.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/StringRef.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromPocoSocket.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTAsterisk.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/TablesStatus.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnsMatcher.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/castColumn.cpp |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/readFloatText.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnDeclaration.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTNameTypePair.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnMap.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDatabaseOrNone.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/ProfileEventsExt.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDropQuery.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnNullable.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTCreateQuery.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/errnoToString.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTConstraintDeclaration.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getPageSize.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/mremap.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/DateLUTImpl.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDictionary.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithTableAndOutput.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTIndexDeclaration.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getThreadId.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getResource.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithOnCluster.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTFunctionWithKeyValueArguments.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTFunction.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnFunction.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithOutput.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTKillQueryQuery.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTIdentifier.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnLowCardinality.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Config/AbstractConfigurationComparison.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/OpenSSLHelpers.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/MaskOperations.cpp >> conftest.py::flake8 [GOOD] >> docker_wrapper_test.py::flake8 [GOOD] |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTProjectionSelectQuery.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Exception.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQualifiedAsterisk.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTOrderByElement.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Epoll.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTOptimizeQuery.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/preciseExp10.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTProjectionDeclaration.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnDecimal.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadStatus.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTInsertQuery.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/RemoteHostFilter.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ErrorCodes.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserBackupQuery.cpp >> test.py::flake8 [GOOD] >> test_clickbench.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test_tpcds.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSystemQuery.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTTTLElement.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTShowTablesQuery.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSubquery.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWithAlias.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryParameter.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWindowDefinition.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTLiteral.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTUserNameWithHost.cpp >> test.py::flake8 [GOOD] >> test_multinode_cluster.py::flake8 [GOOD] >> test_recompiles_requests.py::flake8 [GOOD] >> test_leader_start_inflight.py::flake8 [GOOD] >> test_quota_exhaustion.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTPartition.cpp |43.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/ydb_supp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/formatReadable.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/InsertQuerySettingsPushDownVisitor.cpp >> test.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/IParserBase.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/isLocalAddress.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/IAST.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/ColumnGathererStream.cpp >> test_clickhouse.py::flake8 [GOOD] >> test_greenplum.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_stats_mode.py::flake8 [GOOD] >> test_postgresql.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_ydb.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ExpressionListParsers.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/Lexer.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDatabaseOrNone.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBuffer.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecNone.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecLZ4.cpp >> conftest.py::flake8 [GOOD] >> scenario.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_case.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserExplainQuery.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserExternalDDLQuery.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDropQuery.cpp >> test_clickbench.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] >> test_unknown_data_source.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_auditlog.py::flake8 [GOOD] |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDictionaryAttributeDeclaration.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDataType.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDictionary.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/ICompressionCodec.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/BlockInfo.cpp >> hive_matchers.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test_create_tablets.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> test_drain.py::flake8 [GOOD] >> test_kill_tablets.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime_with_service_name.py::flake8 [GOOD] >> select_positive_with_service_name.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_config_with_metadata.py::flake8 [GOOD] >> test_generate_dynamic_config.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_alter_compression.py::flake8 [GOOD] >> test_alter_tiering.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_scheme_load.py::flake8 [GOOD] >> test_simple.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_kqprun_recipe.py::flake8 [GOOD] |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDescribeTableQuery.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/IBlockInputStream.cpp |43.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 >> test.py::flake8 [GOOD] |43.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_crud.py::flake8 [GOOD] >> test_liveness_wardens.py::flake8 [GOOD] >> test_inserts.py::flake8 [GOOD] >> test_kv.py::flake8 [GOOD] |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/ExecutionSpeedLimits.cpp |43.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/postgres_integrations/go-libpq/flake8 >> docker_wrapper_test.py::flake8 [GOOD] |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeArray.cpp |43.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/load/flake8 >> test_tpch.py::flake8 [GOOD] |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/SettingsEnums.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserQuery.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserPartition.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSelectWithUnionQuery.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseIntervalKind.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserOptimizeQuery.cpp >> test_quoting.py::flake8 [GOOD] >> test_disk.py::flake8 [GOOD] >> test_postgres.py::flake8 [GOOD] >> test_tablet.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> data_correctness.py::flake8 [GOOD] >> data_migration_when_alter_ttl.py::flake8 [GOOD] >> ttl_delete_s3.py::flake8 [GOOD] >> ttl_unavailable_s3.py::flake8 [GOOD] >> unstable_connection.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_query.py::flake8 [GOOD] >> test_s3.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> select_positive_with_schema.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |43.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/large/flake8 >> test_leader_start_inflight.py::flake8 [GOOD] |43.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/flake8 >> test_quota_exhaustion.py::flake8 [GOOD] |43.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/recipe/flake8 >> __main__.py::flake8 [GOOD] |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserRolesOrUsersSet.cpp |43.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/multinode/flake8 >> test_recompiles_requests.py::flake8 [GOOD] |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserRenameQuery.cpp |43.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 >> test.py::flake8 [GOOD] |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowGrantsQuery.cpp >> test_ttl.py::flake8 [GOOD] |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSampleRatio.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFixedString.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeLowCardinalityHelpers.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/formatAST.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUnionQueryElement.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeEnum.cpp |43.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/simple_queue/tests/flake8 >> test_workload.py::flake8 [GOOD] |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserWatchQuery.cpp |43.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 >> test.py::flake8 [GOOD] |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNumberBase.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUseQuery.cpp |43.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/solomon/flake8 >> test.py::flake8 [GOOD] |43.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/analytics/flake8 >> test_ydb.py::flake8 [GOOD] |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Settings.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUserNameWithHost.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/TokenIterator.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSettingsProfileElement.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserWithElement.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/QueryWithOutputSettingsPushDownVisitor.cpp |43.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 >> test.py::flake8 [GOOD] |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseUserName.cpp >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |43.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/plans/flake8 >> test_stats_mode.py::flake8 [GOOD] |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseDatabaseAndTableName.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDateTime.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/formatSettingName.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSelectQuery.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserTablesInSelectQuery.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationIP.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationAggregateFunction.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseQuery.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationEnum.cpp |43.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 >> test.py::flake8 [GOOD] |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowBlockInputFormat.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IOutputFormat.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONEachRowRowOutputFormat.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONEachRowRowInputFormat.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Executors/PollingQueue.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationTupleElement.cpp |43.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/join/flake8 >> test_case.py::flake8 [GOOD] |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDate.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IRowOutputFormat.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IInputFormat.cpp >> test_sql.py::flake8 [GOOD] >> test_common.py::flake8 [GOOD] >> test_yandex_cloud_mode.py::flake8 [GOOD] >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/libminikql-comp_nodes-llvm14.a |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseIdentifierOrStringLiteral.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSettingsProfileElement.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IRowInputFormat.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CSVRowInputFormat.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/registerFormats.cpp |43.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/common/flake8 >> test_unknown_data_source.py::flake8 [GOOD] |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowBufferedStreams.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/AvroRowInputFormat.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/IFunction.cpp |43.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 >> test.py::flake8 [GOOD] |43.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/flake8 >> test_tpch.py::flake8 [GOOD] |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp |43.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 >> test.py::flake8 [GOOD] |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CHColumnToArrowColumn.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/AsynchronousReadBufferFromFile.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONAsStringRowInputFormat.cpp |43.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/flake8 >> test_auditlog.py::flake8 [GOOD] |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CSVRowOutputFormat.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/RawBLOBRowInputFormat.cpp |43.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 >> test.py::flake8 [GOOD] |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ORCBlockInputFormat.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFile.cpp |43.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/config/flake8 >> test_generate_dynamic_config.py::flake8 [GOOD] |43.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/log/tests/flake8 >> test_workload.py::flake8 [GOOD] |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_trackable.cpp |43.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 >> test.py::flake8 [GOOD] |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_ytql.cpp |43.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/flake8 >> test_kill_tablets.py::flake8 [GOOD] |43.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/flake8 >> test_simple.py::flake8 [GOOD] |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_constraints.cpp |43.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/tests/flake8 >> test_kqprun_recipe.py::flake8 [GOOD] |43.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 >> test.py::flake8 [GOOD] |43.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/viewer/tests/flake8 >> test.py::flake8 [GOOD] |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_weak_fields.cpp |43.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 >> test.py::flake8 [GOOD] |43.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 >> test.py::flake8 [GOOD] |43.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/sql/flake8 >> test_kv.py::flake8 [GOOD] |43.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/oltp_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |43.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/wardens/flake8 >> test_liveness_wardens.py::flake8 [GOOD] |43.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 >> test.py::flake8 [GOOD] |43.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/flake8 >> test_postgres.py::flake8 [GOOD] |43.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/flake8 >> test_quoting.py::flake8 [GOOD] |43.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/flake8 >> test_tablet.py::flake8 [GOOD] |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_lambda.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_merge.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_key_range.cpp |43.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/sql/lib/flake8 >> test_s3.py::flake8 [GOOD] |43.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 >> test.py::flake8 [GOOD] |43.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/flake8 >> unstable_connection.py::flake8 [GOOD] |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_sort.cpp |43.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 >> test.py::flake8 [GOOD] >> test_fifo_messaging.py::flake8 [GOOD] >> test_generic_messaging.py::flake8 [GOOD] >> test_polling.py::flake8 [GOOD] |43.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/flake8 >> test_ttl.py::flake8 [GOOD] |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_utils.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_map.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_filter.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_push.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_output.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_input.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_finalize.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_partition.cpp >> conftest.py::black [GOOD] >> test_join.py::black [GOOD] >> conftest.py::black [GOOD] |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_write.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_misc.cpp >> test_clickhouse.py::black [GOOD] >> test_greenplum.py::black [GOOD] >> test_join.py::black [GOOD] >> test_postgresql.py::black [GOOD] >> test_ydb.py::black [GOOD] |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_gateway.cpp |43.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 >> test.py::flake8 [GOOD] |43.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/canonical/flake8 >> test_sql.py::flake8 [GOOD] |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_forwarding_gateway.cpp >> test_crud.py::flake8 [GOOD] >> test_discovery.py::flake8 [GOOD] >> test_execute_scheme.py::flake8 [GOOD] >> test_indexes.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_isolation.py::flake8 [GOOD] >> test_public_api.py::flake8 [GOOD] >> test_read_table.py::flake8 [GOOD] >> test_session_grace_shutdown.py::flake8 [GOOD] >> test_session_pool.py::flake8 [GOOD] |43.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/flake8 >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_exec.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_optimize.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_epoch.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_key.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_hash.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_intent_determination.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_reorder.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/yql_yt_op_settings.h_serialized.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_table_meta.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_integration.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_optimize.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_columnar_stats.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_settings.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_context.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table_desc.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_impl.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/OutputStreamToOutputFormat.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_wide_flow.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_mkql_compiler.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/LimitTransform.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/TimerDescriptor.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnsCommon.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_peephole.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery_walk_folders.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_integration.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/IAccumulatingTransform.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNullable.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/IProcessor.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISimpleTransform.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadSettings.cpp >> test.py::flake8 [GOOD] |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISource.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Port.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromPocoSocket.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_optimize.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/RowInputFormatWithDiagnosticInfo.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ResizeProcessor.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISink.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnConst.cpp |44.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/flake8 >> test_polling.py::flake8 [GOOD] |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTShowGrantsQuery.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnCompressed.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/demangle.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnAggregateFunction.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/IAggregateFunction.cpp >> test_pdisk_format_info.py::flake8 [GOOD] >> test_replication.py::flake8 [GOOD] >> test_self_heal.py::flake8 [GOOD] |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnFixedString.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/shift10.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/AggregateFunctionFactory.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/sleep.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ClickHouseRevision.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/IntervalKind.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/DNSResolver.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromMemory.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/getMultipleKeysFromConfig.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDateTime64.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/IColumn.cpp >> test_tablet_channel_migration.py::flake8 [GOOD] |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnArray.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_horizontal_join.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_helpers.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/FilterDescription.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnTuple.cpp |44.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/streaming/black >> test_join.py::black [GOOD] >> test.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> parser.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_insert_restarts.py::flake8 [GOOD] >> test_actorsystem.py::flake8 [GOOD] |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Allocator.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/AlignedBuffer.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/clickhouse_client_udf.cpp |44.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/analytics/black >> test_ydb.py::black [GOOD] |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/MemoryTracker.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentThread.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentMemoryTracker.cpp >> conftest.py::flake8 [GOOD] >> test_2_selects_limit.py::flake8 [GOOD] >> test_3_selects.py::flake8 [GOOD] >> test_bad_syntax.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_big_state.py::flake8 [GOOD] >> test_continue_mode.py::flake8 [GOOD] >> test_cpu_quota.py::flake8 [GOOD] >> test_delete_read_rules_after_abort_by_system.py::flake8 [GOOD] >> test_eval.py::flake8 [GOOD] >> test_invalid_consumer.py::flake8 [GOOD] >> test_kill_pq_bill.py::flake8 [GOOD] >> test_mem_alloc.py::flake8 [GOOD] >> test_metrics_cleanup.py::flake8 [GOOD] >> test_pq_read_write.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_read_rules_deletion.py::flake8 [GOOD] >> test_recovery.py::flake8 [GOOD] >> test_recovery_match_recognize.py::flake8 [GOOD] >> test_recovery_mz.py::flake8 [GOOD] >> test_restart_query.py::flake8 [GOOD] >> test_row_dispatcher.py::flake8 [GOOD] >> test_select_1.py::flake8 [GOOD] >> test_select_limit.py::flake8 [GOOD] >> test_select_limit_db_id.py::flake8 [GOOD] >> test_select_timings.py::flake8 [GOOD] >> test_stop.py::flake8 [GOOD] >> test_watermarks.py::flake8 [GOOD] >> test_yds_bindings.py::flake8 [GOOD] >> test_yq_streaming.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table.cpp |44.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/api/flake8 >> test_session_pool.py::flake8 [GOOD] |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentMetrics.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/IPv6ToBinary.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorWriteBinary.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorDump.cpp >> common.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_rename.py::flake8 [GOOD] >> test_query_cache.py::flake8 [GOOD] >> test_schemeshard_limits.py::flake8 [GOOD] |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/formatIPv6.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ProcfsMetricsProvider.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadProfileEvents.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/PODArray.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/BaseSettings.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_impl.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/PipeFDs.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/TaskStatsInfoGetter.cpp >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadPool.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/escapeForFileName.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/checkStackSize.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_finalizing.cpp |44.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ZooKeeper/IKeeper.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/createHardLink.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDateTime.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/getNumberOfPhysicalCPUCores.cpp >> test_alter_ops.py::flake8 [GOOD] >> test_copy_ops.py::flake8 [GOOD] >> test_scheme_shard_operations.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/thread_local_rng.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/hex.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/hasLinuxCapability.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/setThreadName.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/randomSeed.cpp |44.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/flake8 >> test_tablet_channel_migration.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_commit.py::flake8 [GOOD] >> test_timeout.py::flake8 [GOOD] |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/quoteString.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedWriteBuffer.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ProfileEvents.cpp |44.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/serializable/flake8 >> test.py::flake8 [GOOD] >> test_tpcds.py::flake8 [GOOD] >> test_example.py::flake8 [GOOD] >> test_update_script_tables.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_http_api.py::flake8 [GOOD] >> test_compatibility.py::flake8 [GOOD] >> test_followers.py::flake8 [GOOD] >> test_stress.py::flake8 [GOOD] >> test_cp_ic.py::flake8 [GOOD] >> test_dispatch.py::flake8 [GOOD] >> test_retry.py::flake8 [GOOD] >> test_retry_high_rate.py::flake8 [GOOD] |44.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 >> test.py::flake8 [GOOD] |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnVector.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBufferFromFile.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/LZ4_decompress_faster.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBufferBase.cpp |44.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/kv/tests/flake8 >> test_workload.py::flake8 [GOOD] |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/materializeBlock.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDecimalBase.cpp |44.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 >> test.py::flake8 [GOOD] |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionFactory.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecMultiple.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/ColumnWithTypeAndName.cpp |44.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/docs/generator/flake8 >> parser.py::flake8 [GOOD] >> kikimr_config.py::flake8 [GOOD] |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/NamesAndTypes.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/parseAddress.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Field.cpp |44.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/restarts/flake8 >> test_insert_restarts.py::flake8 [GOOD] |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomSimpleAggregateFunction.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomIPv4AndIPv6.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeAggregateFunction.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/SizeLimits.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomGeo.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDate.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDate32.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationTuple.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/ISerialization.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeInterval.cpp |44.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/autoconfig/flake8 >> test_actorsystem.py::flake8 [GOOD] |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFileBase.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNested.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Throttler.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFactory.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeMap.cpp >> test_bulkupserts_tpch.py::flake8 [GOOD] >> test_cms_erasure.py::flake8 [GOOD] >> test_cms_restart.py::flake8 [GOOD] >> test_insert_delete_duplicate_records.py::flake8 [GOOD] >> test_cms_state_storage.py::flake8 [GOOD] >> test_insertinto_selectfrom.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> test_tiering.py::flake8 [GOOD] >> test_encryption.py::flake8 [GOOD] >> test_workload_manager.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> tablet_scheme_tests.py::flake8 [GOOD] |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeUUID.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/IDataType.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDate32.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationUUID.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/NestedUtils.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/EnumValues.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDecimalBase.cpp |44.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yds/flake8 >> test_yq_streaming.py::flake8 [GOOD] |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationCustomSimpleText.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypesNumber.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationArray.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypesDecimal.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNullable.cpp |44.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 >> test.py::flake8 [GOOD] |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDateTime64.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationString.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDecimal.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationMap.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNothing.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFileDescriptor.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/ProtobufWriter.cpp >> test_restarts.py::flake8 [GOOD] |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeLowCardinality.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/toFixedString.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNumber.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationWrapper.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/getLeastSupertype.cpp |44.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/flake8 >> test_rename.py::flake8 [GOOD] |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/ProtobufReader.cpp |44.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/query_cache/flake8 >> test_query_cache.py::flake8 [GOOD] |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/registerDataTypeDateTime.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/NativeFormat.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/JSONEachRowUtils.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/extractTimeZoneFromFunctionArguments.cpp |44.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/flake8 >> test_schemeshard_limits.py::flake8 [GOOD] |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/verbosePrintString.cpp >> conftest.py::flake8 [GOOD] >> test_serverless.py::flake8 [GOOD] |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |44.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/olap_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionFactory.cpp |44.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 >> test.py::flake8 [GOOD] |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionHelpers.cpp |44.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/flake8 >> test_scheme_shard_operations.py::flake8 [GOOD] |44.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/streaming/flake8 >> test_join.py::flake8 [GOOD] |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common >> test_alloc_default.py::flake8 [GOOD] >> test_dc_local.py::flake8 [GOOD] >> test_result_limits.py::flake8 [GOOD] >> test_scheduling.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_auth_system_views.py::flake8 [GOOD] >> test_create_users.py::flake8 [GOOD] >> test_create_users_strict_acl_checks.py::flake8 [GOOD] >> test_db_counters.py::flake8 [GOOD] >> test_dynamic_tenants.py::flake8 [GOOD] >> test_publish_into_schemeboard_with_common_ssring.py::flake8 [GOOD] >> test_storage_config.py::flake8 [GOOD] >> test_system_views.py::flake8 [GOOD] >> test_tenants.py::flake8 [GOOD] >> test_user_administration.py::flake8 [GOOD] >> test_users_groups_with_acl.py::flake8 [GOOD] |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFile.cpp |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMappedFile.cpp |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/DoubleConverter.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/CompressionMethod.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/AsynchronousReadBufferFromFileDescriptor.cpp |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFileWithCache.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFileDescriptor.cpp |44.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 >> test.py::flake8 [GOOD] |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |44.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/large/flake8 >> test_tpcds.py::flake8 [GOOD] |44.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 >> test.py::flake8 [GOOD] |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun |44.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 >> test.py::flake8 [GOOD] |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/Progress.cpp |44.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/pq_read/test/flake8 >> test_timeout.py::flake8 [GOOD] |44.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/example/flake8 >> test_example.py::flake8 [GOOD] |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/FormatFactory.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/OpenedFile.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/a2f62d357b5605fd2b1862392a_raw.auxcpp |44.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/flake8 >> test_retry_high_rate.py::flake8 [GOOD] >> allure_utils.py::flake8 [GOOD] >> results_processor.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_cli.py::flake8 [GOOD] >> ydb_cluster.py::flake8 [GOOD] |44.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |44.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/d944d7e4ad80f953076f3035b0_raw.auxcpp |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |44.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/0501e3d28d1343b3b35bd9e106_raw.auxcpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/303741dd40722ad6537588ee90_raw.auxcpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/941634496d12a2f77787e2d8b6_raw.auxcpp |44.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/flake8 >> test_update_script_tables.py::flake8 [GOOD] |44.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 >> test.py::flake8 [GOOD] |44.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/compatibility/flake8 >> test_stress.py::flake8 [GOOD] |44.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/http_api/flake8 >> test_http_api.py::flake8 [GOOD] |44.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/5a29460817a2b06eb29e851705_raw.auxcpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp >> test_account_actions.py::flake8 [GOOD] >> test_acl.py::flake8 [GOOD] >> test_counters.py::flake8 [GOOD] >> test_format_without_version.py::flake8 [GOOD] >> test_garbage_collection.py::flake8 [GOOD] >> test_multiplexing_tables_format.py::flake8 [GOOD] >> test_ping.py::flake8 [GOOD] >> test_queue_attributes_validation.py::flake8 [GOOD] >> test_queue_counters.py::flake8 [GOOD] >> test_queue_tags.py::flake8 [GOOD] >> test_queues_managing.py::flake8 [GOOD] >> test_throttling.py::flake8 [GOOD] |44.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/5f2e0075f4fa0174bcbd3aa364_raw.auxcpp |44.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/encryption/flake8 >> test_encryption.py::flake8 [GOOD] |44.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 >> test.py::flake8 [GOOD] |44.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/212489e9116ba5d499002387d2_raw.auxcpp |44.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/library/ut/flake8 >> kikimr_config.py::flake8 [GOOD] |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |44.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 >> test.py::flake8 [GOOD] |44.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/7277cf53cc5ced7cbba7185ec3_raw.auxcpp |44.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_tests/flake8 >> tablet_scheme_tests.py::flake8 [GOOD] |44.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/flake8 >> test_restarts.py::flake8 [GOOD] |44.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/sql/large/flake8 >> test_workload_manager.py::flake8 [GOOD] |44.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/flake8 >> utils.py::flake8 [GOOD] |44.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/44ecdf896c6139bd6e33a8cb14_raw.auxcpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reshuffle_kmeans.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/07b5e23f28915f8435d6dfaf60_raw.auxcpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/badbb372ca512cfa97cb30fa82_raw.auxcpp |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/test_meta/libpy3tests-library-test_meta.global.a |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |44.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/flake8 >> test_serverless.py::flake8 [GOOD] |44.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/8bc7ae6b6331065edc184a1928_raw.auxcpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/quota_tracker_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/utils_ut.cpp |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/solomon/async_io/ut/ydb-library-yql-providers-solomon-async_io-ut |44.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/mem_alloc/flake8 >> test_scheduling.py::flake8 [GOOD] |44.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/746459528cbcd4301762245782.yasm |44.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/7179c606fb7373cb8f04d9971a_raw.auxcpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/metering_sink_ut.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/5b18263e60071752e6742bca52_raw.auxcpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/457f0bfdb7e6ec7f5dfbf7d44f.auxcpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/7df772008138ac9dcc10169715_raw.auxcpp |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/libpy3stress-oltp_workload-workload.global.a |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/fetch_request_ut.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/make_config.cpp |45.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/tenants/flake8 >> test_users_groups_with_acl.py::flake8 [GOOD] |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/config/ut/ydb-services-config-ut |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/cd458120312d98d89080725721_raw.auxcpp |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionsConversion.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/9d960897eab4a362e914bbe7a2_raw.auxcpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/range_treap_ut.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/9a3d79cfe7077e38e05a544cf4_raw.auxcpp |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.a |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_compute_scheduler_ut.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/34874f44d5aa7de07e15ef45ce_raw.auxcpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_ut.cpp |45.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/service_mocks/ldap_mock/libtestlib-service_mocks-ldap_mock.a |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/CastOverloadResolver.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/7259f5dddbbe8f92a14d578888_raw.auxcpp |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ut_helpers/libpublic-lib-ut_helpers.a |45.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/33237f76a8fa5295213df19e1b_raw.auxcpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/validation.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/f365af675e3e745796dd4cfb1f_raw.auxcpp |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp |45.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/common/flake8 >> test_throttling.py::flake8 [GOOD] |45.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/lib/flake8 >> ydb_cluster.py::flake8 [GOOD] |45.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/b1deaf4c2b8c9585c6ea20bca8_raw.auxcpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/5d5d894047225f21850ed454f0_raw.auxcpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/339b48ad89d8961fd8bc3a5041_raw.auxcpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/5c0052a635ff29493b36f22cd1_raw.auxcpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/type_codecs_ut.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |45.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut >> conftest.py::flake8 [GOOD] >> s3_helpers.py::flake8 [GOOD] >> test_bindings_0.py::flake8 [GOOD] >> test_bindings_1.py::flake8 [GOOD] >> test_compressions.py::flake8 [GOOD] >> test_early_finish.py::flake8 [GOOD] >> test_empty.py::flake8 [GOOD] >> test_explicit_partitioning_0.py::flake8 [GOOD] >> test_explicit_partitioning_1.py::flake8 [GOOD] >> test_format_setting.py::flake8 [GOOD] >> test_formats.py::flake8 [GOOD] >> test_inflight.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_push_down.py::flake8 [GOOD] >> test_s3_0.py::flake8 [GOOD] >> test_s3_1.py::flake8 [GOOD] >> test_size_limit.py::flake8 [GOOD] >> test_statistics.py::flake8 [GOOD] >> test_streaming_join.py::flake8 [GOOD] >> test_test_connection.py::flake8 [GOOD] >> test_ydb_over_fq.py::flake8 [GOOD] >> test_yq_v2.py::flake8 [GOOD] |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/health_check/health_check_ut.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/microseconds_sliding_window_ut.cpp |45.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |45.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so >> conftest.py::flake8 [GOOD] >> test_ydb_backup.py::flake8 [GOOD] >> test_ydb_flame_graph.py::flake8 [GOOD] >> test_ydb_impex.py::flake8 [GOOD] >> test_ydb_scheme.py::flake8 [GOOD] >> test_ydb_scripting.py::flake8 [GOOD] >> test_ydb_sql.py::flake8 [GOOD] >> test_ydb_table.py::flake8 [GOOD] |45.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/2515672a66ee31a87e71ca14c9_raw.auxcpp |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/libpy3tools-lib-cmds.global.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.global.a |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/30ac75d7b44307a70faedd96a1_raw.auxcpp |45.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |45.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/30668747c21879e6b50aec4869_raw.auxcpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/http/ut/xml_builder_ut.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/df691ac52d0b755cb039db39b5_raw.auxcpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/2808056df6bc33cbbe819787cb_raw.auxcpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/d3625dcde81fef5b026617f242_raw.auxcpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/aa64a8e04a29eeddbdaf5d232a_raw.auxcpp |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql_simple_file/libproviders-common-mkql_simple_file.a |45.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/61d36fcabe1ec06135c1c9a2aa_raw.auxcpp |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/lib/libpy3tools-ydb_serializable-lib.global.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |45.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/f78a7d259417f6b6175ebcc3a6_raw.auxcpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/serializability/libpy3tests-library-serializability.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router_ut.cpp |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd |45.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/libkqprun-src-proto.a |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |45.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |45.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so |45.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/2fb6caa90443a13fc8ef3ce634_raw.auxcpp |45.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |45.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |45.4%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |45.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |45.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |45.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |45.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/e41242f4dd270cd9f8b0155497_raw.auxcpp |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/file/libyt-gateway-file.a |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection_ut.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/537fc8d4cd6d560a7e2197a2c4_raw.auxcpp |45.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/fixtures/libpy3tests-library-fixtures.global.a |45.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/b3afb0141d59efbf69af6e8738_raw.auxcpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp |45.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |45.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |45.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |45.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |45.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |45.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |45.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/40776f538b505636d7a7af0890_raw.auxcpp |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/798e427aed0f5c895bd1cc2861_raw.auxcpp |45.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |45.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/s3/flake8 >> test_yq_v2.py::flake8 [GOOD] |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |45.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/async_io/ut/ut_helpers.cpp |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |45.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/async_io/ut/dq_solomon_write_actor_ut.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/fa866f8cbf6cf12d0d84b68f44_raw.auxcpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/8ce1f9317555fc834f3bddb942_raw.auxcpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/72eb76778673b6da36f4c621f1_raw.auxcpp |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/common/libpy3tests-stress-common.global.a |45.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/1ca6afedf6d1a0d42afaea7668_raw.auxcpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/2222dfadb0b3f94b14a756c4d2_raw.auxcpp |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/common/libpy3functional-postgresql-common.global.a |45.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/9011681a8facd9077d743f6913_raw.auxcpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/b6f9b5cf8d08b0dda781522f1a_raw.auxcpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/90c244ae61f7a9450908ebeeea_raw.auxcpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/e760da9700736d9126e67bf076_raw.auxcpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/fbdb22a5235dbbf912627d14b5_raw.auxcpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/1014eddf47e2a39f853e9f5e5c_raw.auxcpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/util_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/cec4f9be03444dfd1b0ab0d57e_raw.auxcpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/config/bsconfig_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/73ecf2dab2f7fcbdc1c8a4e290_raw.auxcpp |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/workload/libpy3stress-olap_workload-workload.global.a |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/264129358bf5084f854a05de7c_raw.auxcpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_kafka_functions.cpp |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ut_helpers.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_serialization.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/640244d4f21f97165461024b10_raw.auxcpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/port_discovery_ut.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/aa07e435da43be62d969eb4c4e_raw.auxcpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/cache_eviction_ut.cpp |45.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/bulk_upsert/ydb-public-sdk-cpp-tests-integration-bulk_upsert |45.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/054bc7a8b4abb68765c46703c2_raw.auxcpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/68e85283dc842756bf0acd0b5e_raw.auxcpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/9199c76264c87fe3903d81a933_raw.auxcpp |45.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partitiongraph_ut.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_malfunction.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/ut/graph_ut.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/list_all_topics_ut.cpp |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/datastreams_helpers/libpy3tests-tools-datastreams_helpers.global.a |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |45.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |45.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |45.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/ce2037dbc8d416b7633ba9af0d_raw.auxcpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/e4e6817899c5468a0f92a1fd25_raw.auxcpp |45.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |45.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/ydb_cli/flake8 >> test_ydb_table.py::flake8 [GOOD] |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/ut_helpers/liblibs-quota_manager-ut_helpers.a |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |45.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.so |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |45.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |45.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |45.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |45.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |45.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/049165e94c82f5ea251a4fc97a_raw.auxcpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/38443b7f7d924737e5cd9afde1_raw.auxcpp |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql_compile_ut.cpp |45.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/query_actor/query_actor_ut.cpp |45.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |45.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/0dff0b13f2d02975a4a973a1e8_raw.auxcpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/6ce4bb8481fab4f56437fc9aa9_raw.auxcpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/8a2a5f2b3222da7a489a5aaf73_raw.auxcpp |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.global.a |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/main.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/export.cpp |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/include/libclient-ydb_topic-include.a |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/ydb-tests-olap |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |46.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} |46.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_sample_k.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/table_creator/table_creator_ut.cpp |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_data_cleanup.cpp |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/libsrc-client-federated_topic.a |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |46.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/impl/libclient-federated_topic-impl.a |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |46.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/example/ydb-tests-example |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/bind_queue_ut.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |46.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |46.3%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/issue_id.pb.{h, cc} |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_counters.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_labeled.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |46.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |46.3%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.h |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/tests/liblibrary-persqueue-tests.a |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_kqp.cpp |46.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |46.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/0a66f2b08b7083d08cdfe415ec_raw.auxcpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/8359fbd5c2e85c95c8873adedf_raw.auxcpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/ccbca5962974a59f1a899ac7f5_raw.auxcpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/68c91f7e1a318d8b2beb453576_raw.auxcpp |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |46.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} |46.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |46.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/yql_types.pb.{h, cc} |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ut_data_erasure_reboots.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/044c1f4f4ff40e174b8862254d_raw.auxcpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |46.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/88bbaa80cdac20912332514ee0_raw.auxcpp |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/ydb-core-security-ut |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |46.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/fields.pb.{h, cc} |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |46.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/287d3efd11361128cce3f950d3_raw.auxcpp |46.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |46.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.{pb.h ... grpc.pb.h} |46.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.{pb.h ... grpc.pb.h} |46.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/3240ce8937f3b09a8e2e5f16a2.yasm |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/inside_ydb_ut/inside_ydb_ut.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/c394ce038261e16ed3481e308d.auxcpp |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |46.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/checksum.{pb.h ... grpc.pb.h} |46.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |46.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |46.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/functions_executor_wrapper.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/libtools-stress_tool-proto.a |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/lib/libydb_device_test.a |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_configs.cpp |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |46.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |46.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.{pb.h ... grpc.pb.h} |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |46.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/a6cdeb11d516d52aca203998c2_raw.auxcpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/docs/generator/generator |46.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/ca83be3d0f21405953b2f7dc5a_raw.auxcpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/486e80645bbe8e863e1ae5b9aa_raw.auxcpp |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/portion_info.pb.{h, cc} |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/field_transformation.pb.{h, cc} |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/retry_config.pb.{h, cc} |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.{pb.h ... grpc.pb.h} |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.{pb.h ... grpc.pb.h} |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record_ut.cpp |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ru_calculator/ut_ru_calculator.cpp |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.{pb.h ... grpc.pb.h} |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/datastreams.pb.{h, cc} |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats_ut.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |46.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/partition_end_watcher_ut.cpp |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/libclient-yc_private-oauth.a |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/metric_meta.pb.{h, cc} |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/connector.pb.{h, cc} |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/dq_effects.pb.{h, cc} |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/mon_proto.pb.{h, cc} |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/persqueue_error_codes_v1.pb.{h, cc} |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_object_storage.pb.{h, cc} |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_main/libcpp-testing-gtest_main.a |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest/libcpp-testing-gtest.a |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.{pb.h ... grpc.pb.h} |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/ssa.pb.{h, cc} |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.{pb.h ... grpc.pb.h} |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ticket_parser_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |47.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |47.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/server_restart/public-sdk-cpp-tests-integration-server_restart |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/9491890bedc20286ea9b7bfbd4.auxcpp |47.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so |47.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |47.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/831059a5eed8c4171c5826ec58.yasm |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer_ut.cpp |47.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/services_common.pb.{h, cc} |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/json_proto_conversion_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |47.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session |47.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/connector.{pb.h ... grpc.pb.h} |47.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_datastreams_v1.{pb.h ... grpc.pb.h} |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/04a4b69a44d49ecc13ef9563bc_raw.auxcpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/3dc319ea0737df79ec660e8dce_raw.auxcpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |47.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_discovery_v1.{pb.h ... grpc.pb.h} |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |47.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/17653a66a88a858593aace7533_raw.auxcpp |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |47.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/libpy3olap-docs-generator.global.a |47.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |47.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/ca19f20383b4c13d97a4a24063_raw.auxcpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp |47.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |47.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/serverless_proxy_config.{pb.h ... grpc.pb.h} |47.2%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/expr_nodes/dq_expr_nodes.{gen.h ... defs.inl.h} |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libf2c/libcontrib-libs-libf2c.a |47.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |47.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc.{pb.h ... grpc.pb.h} |47.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/b9c5a7cd74c2c9a74f760af767.auxcpp |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |47.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.{pb.h ... grpc.pb.h} |47.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.{pb.h ... grpc.pb.h} |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |47.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cluster_info_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |47.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |47.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.{pb.h ... grpc.pb.h} |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |47.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |47.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/fqrun |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |47.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |47.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.{pb.h ... grpc.pb.h} |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/board_subscriber_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/sdk_sessions_pool_ut/sdk_sessions_pool_ut.cpp |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/sqs/libpy3tests-library-sqs.global.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.a |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/33018a22f666e26b01eec9a525_raw.auxcpp |47.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |47.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/7c56ed0639c0a21b31c888cd2a.yasm |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/196f922f568984f10265ee8cbb_raw.auxcpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/trace_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/df1b0f4874c7261d5f70ea3e65_raw.auxcpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_tenants_ut.cpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part1/liblibs-clapack-part1.a |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/ut_helpers.cpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part2/liblibs-clapack-part2.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.global.a |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/b7cccb4bd09eb1309d748a6a4b_raw.auxcpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.global.a |47.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/9ce1bc81f3630bc149770530dc_raw.auxcpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/48fa24502b21bfe7635f87d512_raw.auxcpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/8ee8a43773821cf2d3ab6aa107_raw.auxcpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/ed48c0b43007fe4b25963037e9_raw.auxcpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/da349b14efa6ffa4dc1dca99b6_raw.auxcpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/d27c45ded349314880a7fe1f95_raw.auxcpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cblas/libcontrib-libs-cblas.a |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/downtime_ut.cpp |47.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_monitoring.pb.{h, cc} |47.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/arrow/python/libpy3src-arrow-python.a |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/spec_patch.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |47.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_v1.pb.{h, cc} |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |47.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |47.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |47.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_cluster_discovery.pb.{h, cc} |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |47.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_clickhouse_internal.pb.{h, cc} |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.global.a |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |47.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |47.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/generator.cpp |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_buffered_dynamic_table_writer.cpp |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |47.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/acc07dfe79c6bf1f8bd660fe17_raw.auxcpp |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/libpy3functional-sqs-merge_split_common_table.global.a |47.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/7b3f4d803cdfd8b892b9248abc_raw.auxcpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/5cd7357deac8794bb38ff2cd95_raw.auxcpp |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |47.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/solomon/ydb-tests-fq-solomon |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |47.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |47.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay/ydb_query_replay |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |47.8%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/yql_res_expr_nodes.{gen.h ... defs.inl.h} |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |46.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |46.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |46.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/744ae2c1cb26129725b0d56bf6_raw.auxcpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/1f2ee669354f9c546c9a777c3e_raw.auxcpp |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |47.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/12b572cdbd08bfddfdb73f97ab_raw.auxcpp |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/json_ut.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/balance_coverage/balance_coverage_builder_ut.cpp |47.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/testing/group_overseer/libblobstorage-testing-group_overseer.a |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |47.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |47.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |47.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/cms/cms_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |47.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |47.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |47.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.{pb.h ... grpc.pb.h} |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |47.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |47.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |47.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |47.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |47.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/transfer/ydb-tests-functional-transfer |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/1feaa6e58d550e51e997dfc810_raw.auxcpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/fce6d82c6cf7925b36d50d3b8e_raw.auxcpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/fd3d85a8b58f29118bd6c6153d_raw.auxcpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/c422fc8006e44f83446f178271_raw.auxcpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/9a703437e19c1352efba332b14_raw.auxcpp |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/lib/libpy3olap-load-lib.global.a |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/lib/libpy3functional-tpc-lib.global.a |47.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/e4ea021d743a4e3a3244ccdb17_raw.auxcpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/8f8a44e3476002507faf1c985e_raw.auxcpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/237bb7f0cf8b097bd861bb6f35_raw.auxcpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/f36fa835e5ee1d156c5a38bfab_raw.auxcpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/f5d2a75ba5a5e8423331fea3fb_raw.auxcpp |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/dummy.{pb.h ... grpc.pb.h} |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |48.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/storagepoolmon/ut/storagepoolmon_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.a |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/cms/libsrc-client-cms.a |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/7e0b7f1b30aaae8fb759f136c7_raw.auxcpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/ede05c58749e095e0d963b7dcc_raw.auxcpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_metrics_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/viewer_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/c2fd54bcc3f79cc5c4e5ca0a08_raw.auxcpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/af223bd26f37a4e8d327bcbc83_raw.auxcpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_build_index.cpp |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/bdc69f8e4ec021d6a58591d0e6_raw.auxcpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/http_api_client/libpy3fq-libs-http_api_client.global.a |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/resource_broker_ut.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/main.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |48.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_replay.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_proccessor.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |48.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_compiler.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/local_bypass.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_self.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_sausage.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_proto.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/table/libtest-libs-table.a |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_redo.cpp |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a |48.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/basic_example/public-sdk-cpp-tests-integration-basic_example |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_screen.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/yson_builder.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part_multi.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |48.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_comp_gen.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |48.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_ut.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/configurable_singleton_def.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/ut_helpers.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/compression.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions_ut.cpp |48.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/1be310ddb8a4007766dca2cc77.yasm |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_iter_charge.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/b5139bcb976066539a8304ce75.auxcpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_cxx_database_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_s3fifo_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_clock_pro_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle_ut.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache_ut.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_scheme.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_switchable_ut.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_bloom.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_forward.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_memtable.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice_loader.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/signal_registry.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_update.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_pages.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_helpers.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_data_cleanup.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_decimal.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_nodes.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_context.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction_multi.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_charge.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_versions.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_manager.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_stat.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_iface.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_iterator.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dqrun/dqrun |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_compression.cpp |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |49.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hedging_manager.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |49.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/udf_resolver.pb.{h, cc} |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/memory_usage_tracker.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/signature.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/aclib.pb.{h, cc} |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_common.pb.{h, cc} |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_federation_discovery.pb.{h, cc} |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_replication.pb.{h, cc} |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.global.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/unistat/libmonlib-encode-unistat.a |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_import.pb.{h, cc} |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/file/libqplayer-storage-file.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.{pb.h ... grpc.pb.h} |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_url_lister/libyt-lib-yt_url_lister.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/libessentials-core-url_preprocessing.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/bindings/libyql-utils-bindings.a |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/memory/libqplayer-storage-memory.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/comp_nodes/libproviders-ydb-comp_nodes.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/local_gateway/libproviders-dq-local_gateway.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/table/libarrow-csv-table.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/actors/libproviders-ydb-actors.a |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/bb5e258fcf4873b2d7e0ac6b55_raw.auxcpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/4684d75c4b284df26d615e1791_raw.auxcpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/a8bcf8cb480d9034c80d9d8cca_raw.auxcpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/7bd82ced473dfc2581dbfb3654_raw.auxcpp |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/config/libsrc-client-config.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/actors/libproviders-clickhouse-actors.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/02db64c0fe9f0163ac88a95381_raw.auxcpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/params_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/type_info/libpy3python-yt-type_info.global.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/yson/libpy3python-yt-yson.global.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/utils/libpy3fq-generic-utils.global.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.global.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |49.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.global.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/b4be56355f3dca7701955e8d01_raw.auxcpp |49.5%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/common/v1/common.{pb.h ... grpc.pb.h} |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/ec0e8da6e3e1d00bf728887795_raw.auxcpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/b80cb45619726449158ffb2ccd_raw.auxcpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/cadb9d95ce58aefab2a2a18db0_raw.auxcpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/types/libpy3tests-utils-types.global.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/ada1606d436413e413e1497acf_raw.auxcpp |49.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/bcd4c6cc8e0df92e73c369bb72_raw.auxcpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/lib/libpy3tests-sql-lib.global.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/1d5a4a876bcca59e255f7bd334_raw.auxcpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/1baf00403b27f84ecee945de59_raw.auxcpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/6b65f3b8ba2345854c42f480b8_raw.auxcpp |49.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |49.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.{pb.h ... grpc.pb.h} |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |49.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.{pb.h ... grpc.pb.h} |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |49.5%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/resource/v1/resource.{pb.h ... grpc.pb.h} |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |49.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.{pb.h ... grpc.pb.h} |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |49.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.{pb.h ... grpc.pb.h} |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |49.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/libpy3yt-python-yt.global.a |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |49.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_pdisk.cpp |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/libpy3connector-tests-utils.global.a |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.a |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_data_erasure/ut_data_erasure.cpp |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |49.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/83ff84fdc8441806f2f651cf0c_raw.auxcpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/queue_id_ut.cpp |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/workload/libpy3stress-simple_queue-workload.global.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_ut.cpp |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.a |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/stats_collector/libproviders-dq-stats_collector.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_system/libyql-utils-actor_system.a |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers_ut.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/actors/libproviders-yt-actors.a |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/dq_task_preprocessor/libproviders-yt-dq_task_preprocessor.a |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_ext/libessentials-core-pg_ext.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/libessentials-core-url_lister.a |49.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/blob_range.pb.{h, cc} |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dqrun/dqrun.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.global.a |49.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_local_kmeans.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/2f68521593cd2f6153c5f63953_raw.auxcpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/a3db6ca222c21edbc27d9fce7c_raw.auxcpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write_ut.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/library/libpy3tools-nemesis-library.global.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |49.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/00c38f8eeb3bd5b98c74e340df_raw.auxcpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/monitoring/libsrc-client-monitoring.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |49.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/debug/libsrc-client-debug.a |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/service/libproviders-dq-service.a |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/validator.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/pgwire/pgwire |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/0edd6b85311ffa3399b8298f2d_raw.auxcpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/503faf14d98c87866b7e66a771_raw.auxcpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/bfd994a0e2446dfad4593039e1_raw.auxcpp |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/ydb-core-control-ut |49.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/a238052cf40be84a539ae7dc21_raw.auxcpp |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |49.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/965b8ef33d96033f6fd909085c_raw.auxcpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/2968c4f0d6679c28157e78cf22_raw.auxcpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/7a6e1e8711dc97a566b8744622_raw.auxcpp |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/9b17765a3764a8bfc02d590abd_raw.auxcpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/e9fedf36bd368eb1a3f8595d08_raw.auxcpp |50.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |50.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.{pb.h ... grpc.pb.h} |50.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |50.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/05cfcc0f16e876daad317194ce_raw.auxcpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/c0328543adf18f4d227ab3989d_raw.auxcpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/0458e8496d2fa1d1581f20273c_raw.auxcpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/55e21c99354b17881ca73184f7_raw.auxcpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/1f9c9bbf4fc29dd3d92d3adf07_raw.auxcpp |50.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/event.pb.{h, cc} |50.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/924155fd026f1d4781115d903f_raw.auxcpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/d410deaeee79925c78cffb9f52_raw.auxcpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/helper.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_insert_table.cpp |50.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |50.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/stop_pdisk.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_local.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_pool.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/f29a4445b9826397d56c55f363_raw.auxcpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/e4a2d22cf372b6654f76cb583b_raw.auxcpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/5b5c9adf28fe0e7380f3612b6d_raw.auxcpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |50.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |50.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/658cac42bb2b1331437dc3ba8b_raw.auxcpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/a7311b98600c28ad1733ae59f7_raw.auxcpp |50.1%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/kqp_expr_nodes.{gen.h ... defs.inl.h} |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/node_broker_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |50.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/147cb69a9d8db55746c3e17f50_raw.auxcpp |50.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |50.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/7de0980d4436ed731794ed4a87_raw.auxcpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/55d310341973ef956fc644867c_raw.auxcpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/bdcd76304a7707c686bd1bf480_raw.auxcpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/07aefe5ba3d9854b08b1b3d3e9_raw.auxcpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/d5a418e8e8f138185187e436bc_raw.auxcpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/6f89e5e467eb2e3c0ed66785fa_raw.auxcpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/411ec1823ee49dc0211177ca6b_raw.auxcpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |50.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |50.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |50.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |50.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |50.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |50.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/ut/helpers/libmkql_proto-ut-helpers.a |50.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.{pb.h ... grpc.pb.h} |50.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |50.1%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/trace/v1/trace.{pb.h ... grpc.pb.h} |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |50.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |50.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |50.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_host_ut.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |50.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.{pb.h ... grpc.pb.h} |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_proto_ut.cpp |50.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_logs_engine.cpp |50.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |50.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |50.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |50.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |50.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/ydb-core-client-ut |50.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |50.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/kikimr_program_builder_ut.cpp |50.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |50.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |50.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |50.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |50.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |50.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |50.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |50.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |50.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |50.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |50.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |50.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} |50.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |50.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |50.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |50.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |50.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |50.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |50.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |50.3%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/lwtrace.pb.{h, cc} |50.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |50.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |50.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |50.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |50.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |50.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |50.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.{pb.h ... grpc.pb.h} |50.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/key_range.{pb.h ... grpc.pb.h} |50.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |50.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |50.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/88bc5ce2bd8b50e782ff916853_raw.auxcpp |50.3%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/yql_expr_nodes.{gen.h ... defs.inl.h} |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |50.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_dynamic_config.pb.{h, cc} |50.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/main.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_connection.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/a956c93c59d67ebb7d1c17d97b_raw.auxcpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/267c0d0c5b43976a69607a8a24_raw.auxcpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |50.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_discovery.pb.{h, cc} |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_proxy.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pgwire.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut_ycsb.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/spilling/kqp_scan_spilling_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |50.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |50.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.{pb.h ... grpc.pb.h} |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |50.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |50.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |50.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |50.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} |50.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |50.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_view.pb.{h, cc} |50.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |50.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |50.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.{pb.h ... grpc.pb.h} |50.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/4d9c45e0837b061f27e1e37803_raw.auxcpp |50.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |50.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/641971b634d114734b6b80a383_raw.auxcpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/abfe7571da4d05177d56626236_raw.auxcpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |50.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/50ffc49d0d6226503e84ce9073_raw.auxcpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/30dee96196dd2ba7495f287fed_raw.auxcpp |50.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |50.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/06853d17e90b56883da87c33b9_raw.auxcpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp |50.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/libgrpc_streaming-ut-grpc.a |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_large.cpp |50.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |50.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/cca99a615fbde567fa627741ab_raw.auxcpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/24ae261376693b458fc0314867_raw.auxcpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/60251d129d0e55949cac832aaf_raw.auxcpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |50.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gbenchmark/libcpp-testing-gbenchmark.a |50.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |50.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |50.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |50.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/774434e4090285c2b97ab0ec77_raw.auxcpp |50.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |50.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/0a0e688b8d548b48a79bc75868_raw.auxcpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/87c6bcf63aff4a3e06b666a47e_raw.auxcpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/cancel_tx_ut.cpp |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |50.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/message_delay_stats_ut.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/sha256_ut.cpp |50.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |50.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/bde0ff3a5f40879e1495aad229_raw.auxcpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/metering_ut.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/infly_ut.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/benchmark/librestricted-google-benchmark.a |50.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/bdb90cc6391dc9392c5639cba8_raw.auxcpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/8416fe301a01579908123d1ca2_raw.auxcpp |50.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/locks_ut.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/object_storage_listing_ut.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |50.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.{pb.h ... grpc.pb.h} |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/0cbb0b39e8acc3ab5c7fce9b8a_raw.auxcpp |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/flat_ut.cpp |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_import_ut.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_login_ut.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |50.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |50.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/7cd6f68d06cdb33d309061286f_raw.auxcpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/41bb86090ec6941b06eb06b8ea_raw.auxcpp |50.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |50.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/039931d3ae0db42a510c844a97_raw.auxcpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/8dcb2a87802399f4ff8db71e7c_raw.auxcpp |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/77b8abd83551fcb56da0f2040f_raw.auxcpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/3e821c3431c0d7604a3035974e_raw.auxcpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |50.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |50.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ydb |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |50.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |50.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_query_ut.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |50.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |50.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |50.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/ebf11d7da98cadb764024109e6_raw.auxcpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/622222ac86ab251d1cfcde7d45_raw.auxcpp |50.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/ea02b8522102ed8c9574addfe5_raw.auxcpp |50.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/32eddccc11eb4ac1e853e3d046_raw.auxcpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/048cf21323c8defa3ef7fc507f_raw.auxcpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/9eb43f9f879132386b65f74163_raw.auxcpp |50.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/idx_test/libpublic-lib-idx_test.a |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/attributes_md5_ut.cpp |50.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |50.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |50.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |50.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |50.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/ydb-tests-sql |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |50.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} |50.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/timestamped_schema_helpers.cpp |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ut.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/1b47ff0dc74cd1bd40d6f3ba0c_raw.auxcpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_ut.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/23786fed5b2a32f323c463d9f5_raw.auxcpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/87c03a14385ef372838a575a6e_raw.auxcpp |51.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_client.cpp |51.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |51.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |51.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |51.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |51.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_config.pb.{h, cc} |51.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |51.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/yql_mount.pb.{h, cc} |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |51.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.{pb.h ... grpc.pb.h} |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |51.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression_ut.cpp |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a |51.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/60e0318359c6f321a643ba5530.auxcpp |51.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/85ebe612562ae66926ef97b463.yasm |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |51.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.{pb.h ... grpc.pb.h} |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |51.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |51.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/e4d969025966b6ff4581124367_raw.auxcpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/89bb6708cab342f89012141f85_raw.auxcpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/669ab1dbd5102d34fce06469c3_raw.auxcpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/main.cpp |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/libpy3tests-postgres_integrations-library.global.a |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/docker/libpy3contrib-python-docker.a |51.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/61e2a38eaef415538dc64305c1_raw.auxcpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert_ut.cpp |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/commands/libcommands.a |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/7f2670f15e9cf37b4c865b1f08_raw.auxcpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/4592b49398b4e8824c102f9b5e_raw.auxcpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/0ace69876c9a705e3e484ba47d_raw.auxcpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/874d828483b4c9a8c3abd06fd9_raw.auxcpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/0c198e62866eda1be8cd43a6a8_raw.auxcpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |51.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |51.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |51.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.{pb.h ... grpc.pb.h} |51.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/2f28738cea14768734ea784696_raw.auxcpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/1b72016b0e80e53a81a614e19e_raw.auxcpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |51.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.{pb.h ... grpc.pb.h} |51.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/60587457d460c9947baafa87bb_raw.auxcpp |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/docker/libpy3contrib-python-docker.global.a |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |51.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |51.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |51.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/0d946579fa4d55af3ec6ac5509_raw.auxcpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/058751865929f340b10f526d48_raw.auxcpp |51.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/a478ec3417ae6dee01876710a7_raw.auxcpp |51.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/transfer_writer_ut.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |51.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.{pb.h ... grpc.pb.h} |51.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |51.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/operations.{pb.h ... grpc.pb.h} |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |51.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} |51.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} |51.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |51.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} |51.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |51.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |51.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} |50.7%| COMPACTING CACHE 536.2MiB |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/datastreams_ut.cpp |50.8%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/flake8 |50.8%| [TS] {RESULT} ydb/tests/functional/postgresql/flake8 |50.8%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/flake8 |50.8%| [TS] {RESULT} ydb/tests/sql/flake8 |50.8%| [TS] {RESULT} ydb/tests/functional/script_execution/flake8 |50.8%| [TS] {RESULT} ydb/tests/stress/kv/tests/flake8 |50.8%| [TS] {RESULT} ydb/tests/fq/generic/analytics/black |50.8%| [TS] {RESULT} ydb/tests/olap/flake8 |50.8%| [TS] {RESULT} ydb/tests/fq/s3/flake8 |50.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 |50.8%| [TS] {RESULT} ydb/tests/fq/http_api/flake8 |50.8%| [TS] {RESULT} ydb/tests/functional/limits/flake8 |50.8%| [TS] {RESULT} ydb/tests/fq/generic/streaming/flake8 |50.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 |50.8%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 |50.8%| [TS] {RESULT} ydb/tests/functional/config/flake8 |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |50.8%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/flake8 |50.8%| [TS] {RESULT} ydb/tests/olap/lib/flake8 |50.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |50.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 |50.8%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/flake8 |50.9%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |50.9%| [TS] {RESULT} ydb/tests/functional/autoconfig/flake8 |50.9%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/flake8 |50.9%| [TS] {RESULT} ydb/tests/functional/hive/flake8 |50.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 |50.9%| [TS] {RESULT} ydb/tests/functional/rename/flake8 |50.9%| [TS] {RESULT} ydb/tests/functional/tpc/medium/flake8 |50.9%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/join/flake8 |50.9%| [TS] {RESULT} ydb/tests/functional/ttl/flake8 |50.9%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 |50.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 |50.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 |50.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 |50.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 |50.9%| [TS] {RESULT} ydb/tests/stress/simple_queue/tests/flake8 |50.9%| [TS] {RESULT} ydb/tests/olap/scenario/flake8 |50.9%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |50.9%| [TS] {RESULT} ydb/tests/functional/serializable/flake8 |50.9%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 |51.0%| [TS] {RESULT} ydb/tests/functional/tenants/flake8 |51.0%| [TS] {RESULT} ydb/tests/functional/scheme_shard/flake8 |51.0%| [TS] {RESULT} ydb/tests/olap/ttl_tiering/flake8 |51.0%| [TS] {RESULT} ydb/tests/functional/ydb_cli/flake8 |51.0%| [TS] {RESULT} ydb/tests/functional/blobstorage/flake8 |51.0%| [TS] {RESULT} ydb/tests/fq/generic/analytics/flake8 |51.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 |51.0%| [TS] {RESULT} ydb/tests/functional/api/flake8 |51.0%| [TS] {RESULT} ydb/tests/functional/wardens/flake8 |51.0%| [TS] {RESULT} ydb/tests/fq/generic/streaming/black |51.0%| [TS] {RESULT} ydb/tests/functional/serverless/flake8 |51.0%| [TS] {RESULT} ydb/tests/fq/restarts/flake8 |51.0%| [TS] {RESULT} ydb/tests/functional/sqs/common/flake8 |51.0%| [TS] {RESULT} ydb/tests/functional/sqs/large/flake8 |51.0%| [TS] {RESULT} ydb/tests/olap/docs/generator/flake8 |51.0%| [TS] {RESULT} ydb/tests/functional/tpc/large/flake8 |51.0%| [TS] {RESULT} ydb/tests/stress/olap_workload/tests/flake8 |51.0%| [TS] {RESULT} ydb/tests/functional/restarts/flake8 |51.0%| [TS] {RESULT} ydb/tests/fq/plans/flake8 |51.0%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/flake8 |51.0%| [TS] {RESULT} ydb/tests/fq/common/flake8 |51.1%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 |51.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 |51.1%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 |51.1%| [TS] {RESULT} ydb/tests/functional/audit/flake8 |51.1%| [TS] {RESULT} ydb/tests/functional/compatibility/flake8 |51.1%| [TS] {RESULT} ydb/tests/fq/multi_plane/flake8 |51.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 |51.1%| [TS] {RESULT} ydb/tests/functional/cms/flake8 |51.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 |51.1%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/flake8 |51.1%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/client/libyt-yt-client.a |51.1%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |51.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 |51.1%| [TS] {RESULT} ydb/tests/fq/solomon/flake8 |51.1%| [TS] {RESULT} ydb/tests/example/flake8 |51.1%| [TS] {RESULT} ydb/tests/sql/lib/flake8 |51.1%| [TS] {RESULT} ydb/tests/fq/yds/flake8 |51.1%| [TS] {RESULT} ydb/tests/stress/oltp_workload/tests/flake8 |51.1%| [TS] {RESULT} ydb/core/viewer/tests/flake8 |51.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 |51.2%| [TS] {RESULT} ydb/tests/fq/mem_alloc/flake8 |51.2%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |51.2%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 |51.2%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 |51.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 |51.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 |51.2%| [TS] {RESULT} ydb/tests/functional/query_cache/flake8 |51.2%| [TS] {RESULT} ydb/tests/functional/canonical/flake8 |51.2%| [TS] {RESULT} ydb/tests/functional/encryption/flake8 |51.2%| [TS] {RESULT} ydb/tests/olap/load/flake8 |51.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 |51.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 |51.2%| [TS] {RESULT} ydb/tests/library/ut/flake8 |51.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 |51.3%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 |51.3%| [TS] {RESULT} ydb/tests/sql/large/flake8 |51.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 |51.3%| [TS] {RESULT} ydb/tests/tools/pq_read/test/flake8 |51.4%| [TS] {RESULT} ydb/tests/functional/scheme_tests/flake8 |51.4%| [TS] {RESULT} ydb/tests/stress/log/tests/flake8 |51.4%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/flake8 |51.9%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/core/libyt-yt-core.a |52.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |52.0%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |57.7%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |57.8%| [AR] {RESULT} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |57.8%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |63.4%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |63.5%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |63.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |63.6%| [AR] {RESULT} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |63.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |63.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |63.7%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |63.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |63.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |63.6%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |63.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |63.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |63.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |63.7%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |63.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |63.7%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |63.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |63.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |63.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |63.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |63.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |63.8%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |63.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |63.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |63.8%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |63.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |63.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |63.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |63.8%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |63.8%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |63.9%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |63.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |63.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |63.9%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |63.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |64.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydb/ydb |64.5%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |64.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |66.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |66.1%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |66.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |67.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |67.0%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |67.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |67.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |67.2%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |67.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |67.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |67.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |67.2%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |67.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |67.2%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |67.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |67.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |67.2%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |67.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |67.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |67.6%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |67.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |67.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |67.7%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |67.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |67.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |67.7%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |67.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |67.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |67.8%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |67.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |67.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |67.9%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |67.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |68.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |68.0%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |68.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |68.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |68.0%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |68.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |68.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |68.8%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |68.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |68.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |68.8%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |68.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |68.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |68.9%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |68.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |68.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |68.9%| [LD] {RESULT} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |69.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |69.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/example/ydb-tests-example |69.1%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example |69.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/example/ydb-tests-example |69.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |69.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |69.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |69.2%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |69.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |69.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |69.2%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |69.2%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |69.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |71.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |71.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |71.2%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |71.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |71.6%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |71.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |72.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |72.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |72.1%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |72.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |72.1%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |72.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |73.2%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |73.3%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |73.3%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/sql/ydb-tests-sql |73.3%| [LD] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |73.3%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql |73.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/ydb-tests-sql |73.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |73.4%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |73.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |73.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |73.7%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |73.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |73.8%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |73.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |73.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |73.8%| [LD] {RESULT} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |73.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |73.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |73.8%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |73.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |73.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/ydb-tests-olap |73.8%| [LD] {RESULT} $(B)/ydb/tests/olap/ydb-tests-olap |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ydb-tests-olap |73.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |73.9%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |74.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |74.0%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |74.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |74.0%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |74.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |74.1%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |74.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |74.1%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |74.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |74.2%| [LD] {RESULT} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |74.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/docs/generator/generator |74.2%| [LD] {RESULT} $(B)/ydb/tests/olap/docs/generator/generator |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/docs/generator/generator |74.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |74.3%| [LD] {RESULT} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |74.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |74.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |74.4%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |74.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |74.4%| [AR] {default-linux-x86_64, release, asan, pic} $(B)/yt/yt/core/libyt-yt-core.a |74.4%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |74.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |74.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |74.4%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |74.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |74.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |74.4%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |74.5%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |74.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |74.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |74.5%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |74.5%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |74.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |74.5%| [LD] {RESULT} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |74.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |74.6%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |74.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |74.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/dqrun/dqrun |74.6%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun |74.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun |74.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/read_table_scan.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/read_table_scan.cpp |74.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |74.6%| [AR] {RESULT} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |74.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |74.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/service/service_impl.cpp |74.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |74.6%| [AR] {RESULT} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/service_impl.cpp |74.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |74.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_read_rows.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_read_rows.cpp |74.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |74.6%| [AR] {RESULT} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |74.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |74.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |74.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |74.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |74.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut >> EscapingBasics::HideSecretsOverEncloseSecretShouldWork [GOOD] >> EscapingBasics::EscapeStringShouldWork [GOOD] >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] >> EscapingBasics::HideSecretsShouldWork [GOOD] >> IssuesTextFiltering::ShouldRemoveDatabasePath [GOOD] >> EntityId::Distinct [GOOD] >> SplitterBasic::LimitExceed [GOOD] >> EntityId::MaxId [GOOD] >> EscapingBasics::EncloseSecretShouldWork [GOOD] >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] >> Cache::Test1 [GOOD] >> Cache::Test2 [GOOD] >> EntityId::Order >> Cache::Test3 [GOOD] >> Cache::Test4 [GOOD] >> Cache::Test5 |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EscapeStringShouldWork [GOOD] |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] >> EntityId::Order [GOOD] >> EntityId::MinId [GOOD] |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::LimitExceed [GOOD] |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::MaxId [GOOD] |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test2 [GOOD] |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IssuesTextFiltering::ShouldRemoveDatabasePath [GOOD] |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test4 [GOOD] |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::MinId [GOOD] >> Cache::Test5 [GOOD] >> EntityId::CheckId [GOOD] |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::CheckId [GOOD] |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |74.7%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |74.7%| [TA] $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |74.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty >> TKeyValueTest::TestWriteReadPatchRead >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMany >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks >> TKeyValueTest::TestRewriteThenLastValue >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |74.7%| [TA] {RESULT} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer >> TKeyValueTest::TestConcatWorks >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk >> KeyValueReadStorage::ReadRangeOk1Key [GOOD] >> KeyValueReadStorage::ReadRangeOk [GOOD] >> KeyValueReadStorage::ReadRangeNoData [GOOD] >> TKeyValueTest::TestRenameWorks >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents >> KeyValueReadStorage::ReadOk [GOOD] >> KeyValueReadStorage::ReadNotWholeBlobOk [GOOD] >> KeyValueReadStorage::ReadOneItemError [GOOD] >> TKeyValueTest::TestObtainLockNewApi >> TKeyValueTest::TestInlineCopyRangeWorks >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueCollectorTest::TestKeyValueCollectorSingle >> KeyValueReadStorage::ReadError [GOOD] >> KeyValueReadStorage::ReadErrorWithWrongGroupId [GOOD] >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMany [GOOD] >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] >> TKeyValueTest::TestWrite200KDeleteThenResponseError >> TKeyValueTest::TestWriteReadPatchRead [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi >> TKeyValueTest::TestCopyRangeWorks >> TKeyValueTest::TestIncorrectRequestThenResponseError >> TKeyValueTest::TestCleanUpDataOnEmptyTablet >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi >> TKeyValueTest::TestBasicWriteRead >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents [GOOD] >> TKeyValueTest::TestWriteLongKey >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueCollectorTest::TestKeyValueCollectorSingle [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadRangeNoData [GOOD] Test command err: 2025-04-04T16:51:24.447395Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-04T16:51:24.449875Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-04-04T16:51:24.472330Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2025-04-04T16:51:24.472389Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-04-04T16:51:24.494491Z 1 00h00m00.000000s :KEYVALUE INFO: {KV320@keyvalue_storage_read_request.cpp:122} Inline read request KeyValue# 1 Status# OK 2025-04-04T16:51:24.494571Z 1 00h00m00.000000s :KEYVALUE DEBUG: {KV322@keyvalue_storage_read_request.cpp:134} Expected OK or UNKNOWN and given OK readCount# 0 2025-04-04T16:51:24.494623Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadOneItemError [GOOD] Test command err: 2025-04-04T16:51:24.545722Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-04T16:51:24.556324Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-04-04T16:51:24.570582Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-04T16:51:24.570653Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-04-04T16:51:24.584619Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-04T16:51:24.584749Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV317@keyvalue_storage_read_request.cpp:310} Unexpected EvGetResult. KeyValue# 1 Status# OK Id# [1:2:3:2:0:1:0] ResponseStatus# ERROR Deadline# 586524-01-19T08:01:49.551615Z Now# 1970-01-01T00:00:00.000000Z SentAt# 1970-01-01T00:00:00.000000Z GotAt# 2025-04-04T16:51:24.584424Z ErrorReason# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] Test command err: 2025-04-04T16:51:24.749498Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# ERROR ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-04T16:51:24.749603Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV316@keyvalue_storage_read_request.cpp:270} Unexpected EvGetResult. KeyValue# 1 Status# ERROR Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1743785484741 ErrorReason# 2025-04-04T16:51:24.765876Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 2 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-04T16:51:24.765975Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV318@keyvalue_storage_read_request.cpp:240} Received EvGetResult from an unexpected storage group. KeyValue# 1 GroupId# 2 ExpecetedGroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1743785484765 ErrorReason# 2025-04-04T16:51:24.777390Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-04-04T16:51:24.777480Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV319@keyvalue_storage_read_request.cpp:222} Received EvGetResult with an unexpected cookie. KeyValue# 1 Cookie# 1000 SentGets# 1 GroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 GotAt# 1743785484777 ErrorReason# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] Test command err: 2025-04-04T16:51:24.798308Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2025-04-04T16:51:24.800813Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |74.8%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut >> TKeyValueTest::TestIncorrectRequestThenResponseError [GOOD] >> TKeyValueTest::TestIncrementalKeySet >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |74.8%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] >> TTransferTests::Create |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |74.8%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut >> TTransferTests::Create_Disabled |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |74.8%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk >> TTransferTests::Create_Disabled [GOOD] >> TTransferTests::CreateSequential >> TTransferTests::Create [GOOD] >> TTransferTests::CreateInParallel >> LdapAuthProviderTest::LdapServerIsUnavailable >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithoutLoginPlaceholders [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnames [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV4List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV6List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesLdapsScheme [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood >> SystemView::AuthGroups_Access >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD >> SystemView::CollectPreparedQueries >> SystemView::SystemViewFailOps >> SystemView::VSlotsFields >> DbCounters::TabletsSimple >> SystemView::Nodes >> SystemView::QueryStats >> SystemView::AuthGroups >> SystemView::PartitionStatsOneSchemeShard |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |74.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |74.8%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow >> SystemView::AuthUsers |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] >> BasicUsage::WriteSessionCloseWaitsForWrites >> BasicUsage::BasicWriteSession >> BasicUsage::SelectDatabaseByHash [GOOD] >> BasicUsage::SelectDatabase [GOOD] |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |74.8%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut >> BasicUsage::GetAllStartPartitionSessions >> TKeyValueTest::TestIncrementalKeySet [GOOD] >> TKeyValueTest::TestGetStatusWorksNewApi >> TTransferTests::CreateSequential [GOOD] >> TTransferTests::CreateWithoutCredentials >> BasicUsage::WriteSessionWriteInHandlers |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |74.8%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::SelectDatabase [GOOD] >> TTransferTests::CreateInParallel [GOOD] >> TTransferTests::CreateDropRecreate >> TTransferTests::CreateWithoutCredentials [GOOD] >> TTransferTests::CreateWrongConfig |74.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |74.9%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut >> BasicUsage::WriteSessionNoAvailableDatabase >> TTransferTests::CreateWrongConfig [GOOD] >> TTransferTests::CreateDropRecreate [GOOD] >> TTransferTests::ConsistencyLevel >> LdapAuthProviderTest::LdapServerIsUnavailable [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyHost >> BasicUsage::RetryDiscoveryWithCancel ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::CreateWrongConfig [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:51:26.988090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:51:26.988192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:51:26.988248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:51:26.988288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:51:26.988327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:51:26.988363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:51:26.988422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:51:26.988511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:51:26.988789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:51:27.042175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:51:27.042217Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:27.050671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:51:27.050861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:51:27.050982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:51:27.057771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:51:27.057994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:51:27.058433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:51:27.059036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:51:27.062738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:51:27.063575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:51:27.063619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:51:27.063762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:51:27.063805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:51:27.063836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:51:27.063915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:51:27.068812Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:51:27.208330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:51:27.208509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:51:27.208675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:51:27.208879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:51:27.208929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:51:27.210917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:51:27.211018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:51:27.211333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:51:27.211366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:51:27.211393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:51:27.211427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:51:27.213098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:51:27.213137Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:51:27.213161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:51:27.214446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:51:27.214480Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:51:27.214532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:51:27.214567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:51:27.216937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:51:27.218275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:51:27.218412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:51:27.219087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:51:27.219190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:51:27.219229Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:51:27.219450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:51:27.219484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:51:27.219604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:51:27.219668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:51:27.221127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:51:27.221162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:51:27.221278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:51:27.221304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:51:27.221498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:51:27.221530Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:51:27.221608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:51:27.221632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:51:27.221667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:51:27.221689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:51:27.221712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:51:27.221739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:51:27.221773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:51:27.221801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:51:27.221847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:51:27.221871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:51:27.221890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:51:27.223129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:51:27.223201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:51:27.223223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... dy parts: 1/1 2025-04-04T16:51:29.815057Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:51:29.816727Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:51:29.816915Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:51:29.817826Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:51:29.817966Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 17179871340 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:51:29.818022Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:51:29.818335Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:51:29.818407Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:51:29.818652Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:51:29.818740Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:51:29.821113Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:51:29.821168Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:51:29.821376Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:51:29.821437Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:51:29.821871Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:51:29.821927Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:51:29.822052Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:51:29.822098Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:51:29.822145Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:51:29.822192Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:51:29.822236Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:51:29.822304Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:51:29.822347Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:51:29.822385Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:51:29.822468Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:51:29.822512Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:51:29.822579Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:51:29.823060Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:51:29.823178Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:51:29.823222Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T16:51:29.823274Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T16:51:29.823322Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:51:29.823423Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T16:51:29.826394Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T16:51:29.826940Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-04T16:51:29.827717Z node 4 :TX_PROXY DEBUG: actor# [4:268:2259] Bootstrap 2025-04-04T16:51:29.848231Z node 4 :TX_PROXY DEBUG: actor# [4:268:2259] Become StateWork (SchemeCache [4:273:2264]) 2025-04-04T16:51:29.848694Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [4:272:2263], Recipient [4:122:2148]: {TEvModifySchemeTransaction txid# 101 TabletId# 72057594046678944} 2025-04-04T16:51:29.848749Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T16:51:29.851486Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTransfer Replication { Name: "Transfer" Config { Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:51:29.851718Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateReplication Propose: opId# 101:0, path# /MyRoot/Transfer 2025-04-04T16:51:29.851807Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Wrong transfer configuration, at schemeshard: 72057594046678944 2025-04-04T16:51:29.852008Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T16:51:29.853025Z node 4 :TX_PROXY DEBUG: actor# [4:268:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T16:51:29.855700Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Wrong transfer configuration" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:51:29.855855Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Wrong transfer configuration, operation: CREATE TRANSFER, path: /MyRoot/Transfer 2025-04-04T16:51:29.855912Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T16:51:29.856358Z node 4 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T16:51:29.856588Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T16:51:29.856634Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-04T16:51:29.856960Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [4:285:2276], Recipient [4:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:51:29.857014Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:51:29.857054Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-04T16:51:29.857198Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [4:282:2273], Recipient [4:122:2148]: NKikimrScheme.TEvNotifyTxCompletion TxId: 101 2025-04-04T16:51:29.857233Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-04T16:51:29.857300Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T16:51:29.857390Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T16:51:29.857432Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [4:283:2274] 2025-04-04T16:51:29.857610Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [4:285:2276], Recipient [4:122:2148]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:51:29.857648Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:51:29.857686Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-04-04T16:51:29.858027Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [4:286:2277], Recipient [4:122:2148]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-04T16:51:29.858077Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-04T16:51:29.858176Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:51:29.858394Z node 4 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Transfer" took 215us result status StatusPathDoesNotExist 2025-04-04T16:51:29.858576Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Transfer\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Transfer" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad |74.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad |74.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> TTransferTests::ConsistencyLevel [GOOD] >> TTransferTests::Alter >> SystemView::CollectPreparedQueries [GOOD] >> SystemView::CollectScanQueries >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK >> TTransferTests::Alter [GOOD] >> SystemView::SystemViewFailOps [GOOD] >> SystemView::TabletsFields ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::Alter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:51:26.807026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:51:26.807144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:51:26.807207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:51:26.807248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:51:26.807302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:51:26.807338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:51:26.807406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:51:26.807525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:51:26.807831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:51:26.865008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:51:26.865057Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:26.873144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:51:26.873313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:51:26.873449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:51:26.882767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:51:26.883094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:51:26.883721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:51:26.884520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:51:26.889544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:51:26.890890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:51:26.890951Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:51:26.891167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:51:26.891238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:51:26.891296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:51:26.891431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:51:26.898340Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:51:26.996756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:51:26.996923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:51:26.997200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:51:26.997365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:51:26.997412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:51:26.999716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:51:26.999823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:51:26.999976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:51:27.000018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:51:27.000046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:51:27.000086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:51:27.001604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:51:27.001647Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:51:27.001674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:51:27.003011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:51:27.003046Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:51:27.003090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:51:27.003137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:51:27.005751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:51:27.007036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:51:27.007199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:51:27.007865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:51:27.007969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:51:27.008014Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:51:27.008207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:51:27.008267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:51:27.008409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:51:27.008466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:51:27.009895Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:51:27.009927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:51:27.010039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:51:27.010071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:51:27.010351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:51:27.010390Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:51:27.010501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:51:27.010543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:51:27.010582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:51:27.010611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:51:27.010639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:51:27.010671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:51:27.010709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:51:27.010733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:51:27.010786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:51:27.010814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:51:27.010848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:51:27.012104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:51:27.012191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:51:27.012217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 04T16:51:33.429819Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T16:51:33.429897Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Ack tablet strongly msg opId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 2025-04-04T16:51:33.431694Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T16:51:33.431754Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Ack coordinator stepId#5000003 first txId#103 countTxs#1 2025-04-04T16:51:33.431814Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Ack mediator stepId#5000003 2025-04-04T16:51:33.431858Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 103:0 2025-04-04T16:51:33.432043Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [5:124:2150], Recipient [5:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T16:51:33.432078Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 103 2025-04-04T16:51:33.432188Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:51:33.432253Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T16:51:33.432515Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:51:33.432568Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:203:2205], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-04T16:51:33.433075Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T16:51:33.433130Z node 5 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-04T16:51:33.433260Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T16:51:33.433309Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T16:51:33.433355Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T16:51:33.433402Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T16:51:33.433439Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T16:51:33.433485Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-04T16:51:33.433532Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T16:51:33.433577Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T16:51:33.433615Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T16:51:33.433766Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T16:51:33.433813Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-04-04T16:51:33.433852Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-04T16:51:33.443052Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [5:203:2205], Recipient [5:124:2150]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 } 2025-04-04T16:51:33.443121Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-04T16:51:33.443229Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T16:51:33.443355Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T16:51:33.443401Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-04T16:51:33.443455Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T16:51:33.443504Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T16:51:33.450728Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-04T16:51:33.450824Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T16:51:33.466863Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T16:51:33.467358Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T16:51:33.467414Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-04T16:51:33.467690Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-04T16:51:33.467738Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-04T16:51:33.468146Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [5:435:2390], Recipient [5:124:2150]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:51:33.468203Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:51:33.468242Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-04T16:51:33.468416Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [5:396:2351], Recipient [5:124:2150]: NKikimrScheme.TEvNotifyTxCompletion TxId: 103 2025-04-04T16:51:33.468458Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-04T16:51:33.468526Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T16:51:33.468628Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T16:51:33.468672Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [5:433:2388] 2025-04-04T16:51:33.468883Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [5:435:2390], Recipient [5:124:2150]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:51:33.468921Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:51:33.468961Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-04-04T16:51:33.469353Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [5:436:2391], Recipient [5:124:2150]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-04T16:51:33.469411Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-04T16:51:33.469509Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:51:33.469785Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Transfer" took 267us result status StatusSuccess 2025-04-04T16:51:33.470121Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Transfer" PathDescription { Self { Name: "Transfer" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTransfer CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Transfer" Config { SrcConnectionParams { StaticCredentials { User: "user" } } ConsistencySettings { Row { } } TransferSpecific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 2 ControllerId: 72075186233409546 State { Done { FailoverMode: FAILOVER_MODE_FORCE } } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 104 2025-04-04T16:51:33.470961Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [5:437:2392], Recipient [5:124:2150]: {TEvModifySchemeTransaction txid# 104 TabletId# 72057594046678944} 2025-04-04T16:51:33.471015Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T16:51:33.473592Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTransfer AlterReplication { Name: "Transfer" State { StandBy { } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:51:33.473783Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TAlterReplication Propose: opId# 104:0, path# /MyRoot/Transfer, pathId# 2025-04-04T16:51:33.473884Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 104:1, propose status:StatusInvalidParameter, reason: Cannot switch state, at schemeshard: 72057594046678944 2025-04-04T16:51:33.474092Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T16:51:33.476794Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusInvalidParameter Reason: "Cannot switch state" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:51:33.476949Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Cannot switch state, operation: ALTER TRANSFER, no path 2025-04-04T16:51:33.477005Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 >> LdapAuthProviderTest::LdapRequestWithEmptyHost [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn >> SystemView::VSlotsFields [GOOD] >> SystemView::TopPartitionsFields |74.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |74.9%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] |74.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> TKeyValueTest::TestBasicWriteRead [GOOD] >> TKeyValueTest::TestBasicWriteReadOverrun >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi [GOOD] >> TKeyValueTest::TestLargeWriteAndDelete >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:75:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:77:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:79:2057] recipient: [4:78:2109] Leader for TabletID 72057594037927937 is [4:80:2110] sender: [4:81:2057] recipient: [4:78:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:80:2110] Leader for TabletID 72057594037927937 is [4:80:2110] sender: [4:134:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:75:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:78:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:77:2109] Leader for TabletID 72057594037927937 is [5:80:2110] sender: [5:81:2057] recipient: [5:77:2109] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:80:2110] Leader for TabletID 72057594037927937 is [5:80:2110] sender: [5:134:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:76:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:80:2057] recipient: [6:78:2109] Leader for TabletID 72057594037927937 is [6:81:2110] sender: [6:82:2057] recipient: [6:78:2109] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:81:2110] Leader for TabletID 72057594037927937 is [6:81:2110] sender: [6:135:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts |74.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |74.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |74.9%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK >> TKeyValueTest::TestWriteLongKey [GOOD] >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi >> SystemView::CollectScanQueries [GOOD] >> SystemView::CollectScriptingQueries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:75:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:78:2057] recipient: [4:77:2109] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:80:2110] sender: [4:81:2057] recipient: [4:77:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:80:2110] Leader for TabletID 72057594037927937 is [4:80:2110] sender: [4:134:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:76:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:78:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:80:2057] recipient: [5:79:2109] Leader for TabletID 72057594037927937 is [5:81:2110] sender: [5:82:2057] recipient: [5:79:2109] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:81:2110] Leader for TabletID 72057594037927937 is [5:81:2110] sender: [5:135:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:79:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:82:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:83:2057] recipient: [7:81:2112] Leader for TabletID 72057594037927937 is [7:84:2113] sender: [7:85:2057] recipient: [7:81:2112] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:84:2113] Leader for TabletID 72057594037927937 is [7:84:2113] sender: [7:138:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:80:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:83:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:84:2057] recipient: [8:82:2112] Leader for TabletID 72057594037927937 is [8:85:2113] sender: [8:86:2057] recipient: [8:82:2112] !Reboot 72057594037927937 (actor [8:55:2096]) rebooted! !Reboot 72057594037927937 (actor [8:55:2096]) tablet resolver refreshed! new actor is[8:85:2113] Leader for TabletID 72057594037927937 is [8:85:2113] sender: [8:139:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoWithError >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoWithError >> KqpScanArrowInChanels::AllTypesColumns >> SystemView::TabletsFields [GOOD] >> SystemView::TabletsShards >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly |74.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |74.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |74.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |74.9%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi >> SystemView::CollectScriptingQueries [GOOD] >> SystemView::AuthUsers_Access >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword >> TKeyValueTest::TestRenameWorks [GOOD] >> TKeyValueTest::TestRenameWorksNewApi |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |75.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql >> BasicUsage::GetAllStartPartitionSessions [GOOD] >> BasicUsage::PreferredDatabaseNoFallback >> SystemView::PartitionStatsOneSchemeShard [GOOD] >> SystemView::PartitionStatsOneSchemeShardDataQuery >> SystemView::TabletsShards [GOOD] >> SystemView::TabletsFollowers >> BasicUsage::WriteSessionWriteInHandlers [GOOD] |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |75.0%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood >> TKeyValueTest::TestWrite200KDeleteThenResponseError [GOOD] >> TKeyValueTest::TestSetExecutorFastLogPolicy >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionWriteInHandlers [GOOD] Test command err: 2025-04-04T16:51:28.549448Z :WriteSessionWriteInHandlers INFO: Random seed for debugging is 1743785488549407 2025-04-04T16:51:29.199205Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501647988353722:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:29.199581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:29.249976Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489501650616839284:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:29.250254Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:29.462897Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T16:51:29.475431Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ba2/r3tmp/tmplDTwpj/pdisk_1.dat 2025-04-04T16:51:29.976263Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:29.992063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:29.992144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:29.993996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:29.994066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:29.996851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:30.001119Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:51:30.003514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8207, node 1 2025-04-04T16:51:30.263972Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000ba2/r3tmp/yandex18m1fg.tmp 2025-04-04T16:51:30.264006Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000ba2/r3tmp/yandex18m1fg.tmp 2025-04-04T16:51:30.264170Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000ba2/r3tmp/yandex18m1fg.tmp 2025-04-04T16:51:30.264295Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:30.345197Z INFO: TTestServer started on Port 5585 GrpcPort 8207 TClient is connected to server localhost:5585 PQClient connected to localhost:8207 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:30.737741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-04T16:51:33.605999Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489501667796708655:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:33.606087Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489501667796708623:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:33.606179Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:33.622813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T16:51:33.705789Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489501667796708660:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T16:51:34.102445Z node 2 :TX_PROXY ERROR: Actor# [2:7489501667796708688:2130] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:51:34.135977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T16:51:34.230168Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489501665168223962:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:51:34.236943Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501647988353722:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:34.237067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:34.237161Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjIzYTZlYzgtY2Q2NjQ0Zi1lNjEyM2E5Mi0xMGI0ODBmNw==, ActorId: [1:7489501665168223903:2335], ActorState: ExecuteState, TraceId: 01jr0ve13x7kyffwwtb844b8sn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:51:34.242558Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:51:34.243801Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489501650616839284:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:34.243862Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:34.246458Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489501672091675991:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:51:34.247628Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTY2ZTdiMjMtM2VlZmU1NzEtZWFjM2Y0NzUtZGFkYzMxMDY=, ActorId: [2:7489501667796708620:2308], ActorState: ExecuteState, TraceId: 01jr0ve12sfem0z8jgzbp2ng93, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:51:34.247972Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:51:34.435800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:51:34.580777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:8207", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-04T16:51:35.043035Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr0ve26a8b9rsyx15b6tadvf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2JmNmRlYTEtNzNkODFhNmEtOTgwMmVhZWItMjlmZWMwMDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489501673758158985:2967] === CheckClustersList. Ok 2025-04-04T16:51:40.720635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:8207 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-04T16:51:40.890685Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:8207 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 ... oupId [src_id] Write 1 messages with Id from 2 to 2 === Inside ReadyToAcceptHandler 2025-04-04T16:51:42.664485Z :DEBUG: [/Root] TraceId [] SessionId [src_id|800295b7-3556a84a-f0cb2a76-78b52bef_0] MessageGroupId [src_id] Write session: try to update token 2025-04-04T16:51:42.664522Z :DEBUG: [/Root] TraceId [] SessionId [src_id|800295b7-3556a84a-f0cb2a76-78b52bef_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 2 2025-04-04T16:51:42.664990Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|800295b7-3556a84a-f0cb2a76-78b52bef_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-04T16:51:42.665197Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-04T16:51:42.668962Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T16:51:42.668993Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T16:51:42.669067Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 2 requestId: cookie: 2 2025-04-04T16:51:42.670015Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle === AcksHandler has written a message, closing the session 2025-04-04T16:51:42.671969Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T16:51:42.671999Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T16:51:42.672047Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 2 partNo : 0 messageNo: 3 size 107 offset: -1 2025-04-04T16:51:42.672220Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 2 partNo 0 2025-04-04T16:51:42.673080Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 181 count 1 nextOffset 2 batches 1 2025-04-04T16:51:42.673470Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 1,1 HeadOffset 0 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000| size 169 WTime 1743785502672 2025-04-04T16:51:42.673578Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T16:51:42.673630Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 1 partNo 0 count 1 size 169 2025-04-04T16:51:42.677814Z node 2 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 1 count 1 size 169 actorID [2:7489501702156447645:2412] 2025-04-04T16:51:42.677914Z node 2 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 1 partno 0 count 1 parts 0 size 169 2025-04-04T16:51:42.677925Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 114 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T16:51:42.677961Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T16:51:42.677996Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-04-04T16:51:42.678155Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 3 requestId: cookie: 2 2025-04-04T16:51:42.678427Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-04-04T16:51:42.679212Z :DEBUG: [/Root] TraceId [] SessionId [src_id|800295b7-3556a84a-f0cb2a76-78b52bef_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-04-04T16:51:42.679374Z :DEBUG: [/Root] TraceId [] SessionId [src_id|800295b7-3556a84a-f0cb2a76-78b52bef_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 2 written { offset: 1 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 4000000 } max_queue_wait_time { nanos: 4000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-04-04T16:51:42.679422Z :DEBUG: [/Root] TraceId [] SessionId [src_id|800295b7-3556a84a-f0cb2a76-78b52bef_0] MessageGroupId [src_id] OnAck: seqNo=2, txId=? 2025-04-04T16:51:42.679449Z :DEBUG: [/Root] TraceId [] SessionId [src_id|800295b7-3556a84a-f0cb2a76-78b52bef_0] MessageGroupId [src_id] Write session: acknoledged message 2 === Inside AcksHandler === Inside SessionClosedHandler 2025-04-04T16:51:42.682220Z :DEBUG: [/Root] TraceId [] SessionId [src_id|800295b7-3556a84a-f0cb2a76-78b52bef_0] MessageGroupId [src_id] Write 1 messages with Id from 3 to 3 === SessionClosedHandler has 'written' a message 2025-04-04T16:51:42.682367Z :INFO: [/Root] TraceId [] SessionId [src_id|800295b7-3556a84a-f0cb2a76-78b52bef_0] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2025-04-04T16:51:42.682398Z :INFO: [/Root] TraceId [] SessionId [src_id|800295b7-3556a84a-f0cb2a76-78b52bef_0] MessageGroupId [src_id] Write session will now close 2025-04-04T16:51:42.682459Z :DEBUG: [/Root] TraceId [] SessionId [src_id|800295b7-3556a84a-f0cb2a76-78b52bef_0] MessageGroupId [src_id] Write session: aborting 2025-04-04T16:51:42.682837Z :WARNING: [/Root] TraceId [] SessionId [src_id|800295b7-3556a84a-f0cb2a76-78b52bef_0] MessageGroupId [src_id] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-04-04T16:51:42.683190Z :DEBUG: [/Root] TraceId [] SessionId [src_id|800295b7-3556a84a-f0cb2a76-78b52bef_0] MessageGroupId [src_id] Write session: destroy 2025-04-04T16:51:42.683852Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|800295b7-3556a84a-f0cb2a76-78b52bef_0 grpc read done: success: 0 data: 2025-04-04T16:51:42.700128Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7489501703822930966:2520] destroyed 2025-04-04T16:51:42.700175Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-04T16:51:42.683875Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|800295b7-3556a84a-f0cb2a76-78b52bef_0 grpc read failed 2025-04-04T16:51:42.690278Z node 1 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 2 sessionId: src_id|800295b7-3556a84a-f0cb2a76-78b52bef_0 2025-04-04T16:51:42.690305Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|800295b7-3556a84a-f0cb2a76-78b52bef_0 is DEAD 2025-04-04T16:51:42.690642Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T16:51:43.034516Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489501708117898297:2525], TxId: 281474976710686, task: 1, CA Id [1:7489501708117898295:2525]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-04T16:51:43.085373Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489501708117898297:2525], TxId: 281474976710686, task: 1, CA Id [1:7489501708117898295:2525]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T16:51:43.147092Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489501708117898297:2525], TxId: 281474976710686, task: 1, CA Id [1:7489501708117898295:2525]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T16:51:43.572273Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489501708117898297:2525], TxId: 281474976710686, task: 1, CA Id [1:7489501708117898295:2525]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T16:51:43.670433Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489501708117898297:2525], TxId: 281474976710686, task: 1, CA Id [1:7489501708117898295:2525]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T16:51:43.847048Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489501708117898297:2525], TxId: 281474976710686, task: 1, CA Id [1:7489501708117898295:2525]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T16:51:44.055334Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489501708117898297:2525], TxId: 281474976710686, task: 1, CA Id [1:7489501708117898295:2525]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T16:51:44.130587Z node 1 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710687. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T16:51:44.130704Z node 1 :KQP_EXECUTER WARN: ActorId: [1:7489501708117898372:2526] TxId: 281474976710687. Ctx: { TraceId: 01jr0veacm2m1k6wa7kpe273ah, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmMxZDg2NTUtZDEzYWQ3ZGItZGJhMDk1NmQtNjBlZjcyOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T16:51:44.130970Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmMxZDg2NTUtZDEzYWQ3ZGItZGJhMDk1NmQtNjBlZjcyOTE=, ActorId: [1:7489501708117898311:2526], ActorState: ExecuteState, TraceId: 01jr0veacm2m1k6wa7kpe273ah, Create QueryResponse for error on request, msg: 2025-04-04T16:51:44.132288Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr0veb5tesw0766bd1mzpfmz" } } YdbStatus: UNAVAILABLE ConsumedRu: 533 } 2025-04-04T16:51:44.700321Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489501708117898297:2525], TxId: 281474976710686, task: 1, CA Id [1:7489501708117898295:2525]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T16:51:44.978827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:51:44.978852Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:45.638889Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489501708117898297:2525], TxId: 281474976710686, task: 1, CA Id [1:7489501708117898295:2525]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T16:51:46.217524Z node 1 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710689. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T16:51:46.217621Z node 1 :KQP_EXECUTER WARN: ActorId: [1:7489501716707833066:2537] TxId: 281474976710689. Ctx: { TraceId: 01jr0vecbc1s7sem1vqc89vv63, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWI4YTc1NDQtMmJiM2M3MDktNDVjZGE0YTEtNWQ0YmI1NDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T16:51:46.217792Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWI4YTc1NDQtMmJiM2M3MDktNDVjZGE0YTEtNWQ0YmI1NDA=, ActorId: [1:7489501716707833034:2537], ActorState: ExecuteState, TraceId: 01jr0vecbc1s7sem1vqc89vv63, Create QueryResponse for error on request, msg: 2025-04-04T16:51:46.218853Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr0vecze5j5wf6zbcqncgfk7" } } YdbStatus: UNAVAILABLE ConsumedRu: 423 } >> TKeyValueTest::TestBasicWriteReadOverrun [GOOD] >> TKeyValueTest::TestBlockedEvGetRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:75:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:78:2057] recipient: [4:77:2109] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:80:2110] sender: [4:81:2057] recipient: [4:77:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:80:2110] Leader for TabletID 72057594037927937 is [4:80:2110] sender: [4:134:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:76:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:78:2109] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:80:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:81:2110] sender: [5:82:2057] recipient: [5:78:2109] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:81:2110] Leader for TabletID 72057594037927937 is [5:81:2110] sender: [5:135:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:79:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:82:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:83:2057] recipient: [7:81:2112] Leader for TabletID 72057594037927937 is [7:84:2113] sender: [7:85:2057] recipient: [7:81:2112] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:84:2113] Leader for TabletID 72057594037927937 is [7:84:2113] sender: [7:138:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:80:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:83:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:84:2057] recipient: [8:82:2112] Leader for TabletID 72057594037927937 is [8:85:2113] sender: [8:86:2057] recipient: [8:82:2112] !Reboot 72057594037927937 (actor [8:55:2096]) rebooted! !Reboot 72057594037927937 (actor [8:55:2096]) tablet resolver refreshed! new actor is[8:85:2113] Leader for TabletID 72057594037927937 is [8:85:2113] sender: [8:103:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:82:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:85:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:86:2057] recipient: [9:84:2114] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:88:2057] recipient: [9:84:2114] !Reboot 72057594037927937 (actor [9:55:2096]) rebooted! !Reboot 72057594037927937 (actor [9:55:2096]) tablet resolver refreshed! new actor is[9:87:2115] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:141:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:82:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:85:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:86:2057] recipient: [10:84:2114] Leader for TabletID 72057594037927937 is [10:87:2115] sender: [10:88:2057] recipient: [10:84:2114] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:87:2115] Leader for TabletID 72057594037927937 is [10:87:2115] sender: [10:141:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:83:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:86:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:87:2057] recipient: [11:85:2114] Leader for TabletID 72057594037927937 is [11:88:2115] sender: [11:89:2057] recipient: [11:85:2114] !Reboot 72057594037927937 (actor [11:55:2096]) rebooted! !Reboot 72057594037927937 (actor [11:55:2096]) tablet resolver refreshed! new actor is[11:88:2115] Leader for TabletID 72057594037927937 is [11:88:2115] sender: [11:142:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:55:2096]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:84:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:87:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:88:2057] recipient: [12:86:2115] Leader for TabletID 72057594037927937 is [12:89:2116] sender: [12:90:2057] recipient: [12:86:2115] !Reboot 72057594037927937 (actor [12:55:2096]) rebooted! !Reboot 72057594037927937 (actor [12:55:2096]) tablet resolver refreshed! new actor is[12:89:2116] Leader for TabletID 72057594037927937 is [12:89:2116] sender: [12:109:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:53:2057] recipient: [13:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:53:2057] recipient: [13:50:2094] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:56:2057] recipient: [13:50:2094] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:73:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:55:2096]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:85:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:88:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:89:2057] recipient: [13:87:2116] Leader for TabletID 72057594037927937 is [13:90:2117] sender: [13:91:2057] recipient: [13:87:2116] !Reboot 72057594037927937 (actor [13:55:2096]) rebooted! !Reboot 72057594037927937 (actor [13:55:2096]) tablet resolver refreshed! new actor is[13:90:2117] Leader for TabletID 72057594037927937 is [13:90:2117] sender: [13:110:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:53:2057] recipient: [14:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:53:2057] recipient: [14:50:2094] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:56:2057] recipient: [14:50:2094] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:73:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:88:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:91:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:92:2057] recipient: [14:90:2119] Leader for TabletID 72057594037927937 is [14:93:2120] sender: [14:94:2057] recipient: [14:90:2119] !Reboot 72057594037927937 (actor [14:55:2096]) rebooted! !Reboot 72057594037927937 (actor [14:55:2096]) tablet resolver refreshed! new actor is[14:93:2120] Leader for TabletID 72057594037927937 is [14:93:2120] sender: [14:147:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:53:2057] recipient: [15:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:53:2057] recipient: [15:50:2094] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:56:2057] recipient: [15:50:2094] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:73:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:55:2096]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:88:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:91:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:92:2057] recipient: [15:90:2119] Leader for TabletID 72057594037927937 is [15:93:2120] sender: [15:94:2057] recipient: [15:90:2119] !Reboot 72057594037927937 (actor [15:55:2096]) rebooted! !Reboot 72057594037927937 (actor [15:55:2096]) tablet resolver refreshed! new actor is[15:93:2120] Leader for TabletID 72057594037927937 is [15:93:2120] sender: [15:147:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:53:2057] recipient: [16:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:53:2057] recipient: [16:50:2094] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:56:2057] recipient: [16:50:2094] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:73:2057] recipient: [16:14:2061] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoWithError [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoWithError [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetDefaultFilter [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithOneLoginPlaceholder [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithSearchAttribute [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] >> SystemView::QueryStats [GOOD] >> SystemView::QueryStatsFields >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi [GOOD] >> TKeyValueTest::TestWriteDeleteThenReadRemaining >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] >> BasicUsage::WriteSessionCloseWaitsForWrites [GOOD] >> BasicUsage::WriteSessionCloseIgnoresWrites >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD >> TKeyValueTest::TestCopyRangeWorks [GOOD] >> TKeyValueTest::TestCopyRangeWorksNewApi >> KqpScanArrowInChanels::AllTypesColumns [GOOD] >> KqpScanArrowInChanels::SingleKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:75:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:77:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:79:2057] recipient: [2:78:2109] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:81:2057] recipient: [2:78:2109] !Reboot 72057594037927937 (actor [2:55:2096]) rebooted! !Reboot 72057594037927937 (actor [2:55:2096]) tablet resolver refreshed! new actor is[2:80:2110] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:134:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:80:2057] recipient: [4:78:2109] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:82:2057] recipient: [4:78:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:81:2110] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:83:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:85:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:84:2113] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:138:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:83:2057] recipient: [7:82:2112] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:86:2057] recipient: [7:82:2112] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:85:2113] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:75:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:77:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:79:2057] recipient: [10:78:2109] Leader for TabletID 72057594037927937 is [10:80:2110] sender: [10:81:2057] recipient: [10:78:2109] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:80:2110] Leader for TabletID 72057594037927937 is [10:80:2110] sender: [10:134:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:75:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:78:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:79:2057] recipient: [11:77:2109] Leader for TabletID 72057594037927937 is [11:80:2110] sender: [11:81:2057] recipient: [11:77:2109] !Reboot 72057594037927937 (actor [11:55:2096]) rebooted! !Reboot 72057594037927937 (actor [11:55:2096]) tablet resolver refreshed! new actor is[11:80:2110] Leader for TabletID 72057594037927937 is [11:80:2110] sender: [11:134:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:76:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:79:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:80:2057] recipient: [12:78:2109] Leader for TabletID 72057594037927937 is [12:81:2110] sender: [12:82:2057] recipient: [12:78:2109] !Reboot 72057594037927937 (actor [12:55:2096]) rebooted! !Reboot 72057594037927937 (actor [12:55:2096]) tablet resolver refreshed! new actor is[12:81:2110] Leader for TabletID 72057594037927937 is [12:81:2110] sender: [12:135:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:53:2057] recipient: [13:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:53:2057] recipient: [13:50:2094] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:56:2057] recipient: [13:50:2094] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:73:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:79:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:82:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:83:2057] recipient: [13:81:2112] Leader for TabletID 72057594037927937 is [13:84:2113] sender: [13:85:2057] recipient: [13:81:2112] !Reboot 72057594037927937 (actor [13:55:2096]) rebooted! !Reboot 72057594037927937 (actor [13:55:2096]) tablet resolver refreshed! new actor is[13:84:2113] Leader for TabletID 72057594037927937 is [13:84:2113] sender: [13:138:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:53:2057] recipient: [14:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:53:2057] recipient: [14:50:2094] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:56:2057] recipient: [14:50:2094] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:73:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:55:2096]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:79:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:81:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:83:2057] recipient: [14:82:2112] Leader for TabletID 72057594037927937 is [14:84:2113] sender: [14:85:2057] recipient: [14:82:2112] !Reboot 72057594037927937 (actor [14:55:2096]) rebooted! !Reboot 72057594037927937 (actor [14:55:2096]) tablet resolver refreshed! new actor is[14:84:2113] Leader for TabletID 72057594037927937 is [14:84:2113] sender: [14:138:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:53:2057] recipient: [15:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:53:2057] recipient: [15:50:2094] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:56:2057] recipient: [15:50:2094] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:73:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:80:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:83:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:84:2057] recipient: [15:82:2112] Leader for TabletID 72057594037927937 is [15:85:2113] sender: [15:86:2057] recipient: [15:82:2112] !Reboot 72057594037927937 (actor [15:55:2096]) rebooted! !Reboot 72057594037927937 (actor [15:55:2096]) tablet resolver refreshed! new actor is[15:85:2113] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:53:2057] recipient: [16:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:53:2057] recipient: [16:50:2094] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:56:2057] recipient: [16:50:2094] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:73:2057] recipient: [16:14:2061] 2025-04-04T16:51:52.033159Z node 17 :KEYVALUE ERROR: {KV323@keyvalue_storage_read_request.cpp:254} Received BLOCKED EvGetResult. KeyValue# 72057594037927937 Status# BLOCKED Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 0 ErrorReason# block race detected 2025-04-04T16:51:52.053571Z node 17 :TABLET_MAIN ERROR: Tablet: 72057594037927937 HandleBlockBlobStorageResult, msg->Status: ALREADY, not discovered Marker# TSYS21 2025-04-04T16:51:52.053635Z node 17 :TABLET_MAIN ERROR: Tablet: 72057594037927937 Type: KeyValue, EReason: ReasonBootBSError, SuggestedGeneration: 0, KnownGeneration: 3 Marker# TSYS31 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] Test command err: 2025-04-04T16:51:27.739984Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501637961347368:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:27.740382Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016c4/r3tmp/tmpGKNlzs/pdisk_1.dat 2025-04-04T16:51:28.227862Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:28.234052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:28.234161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:28.244253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27355, node 1 2025-04-04T16:51:28.346824Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:28.346852Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:28.346863Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:28.346978Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:28.474232Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:28.477789Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:28.477823Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:28.479744Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:28752, port: 28752 2025-04-04T16:51:28.480494Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:28.532118Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:28.577886Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:28.579039Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:51:28.579087Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:28.626481Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:28.675819Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:28.676955Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****suwQ (99292324) () has now valid token of ldapuser@ldap 2025-04-04T16:51:32.742695Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501637961347368:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:32.742780Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:33.782390Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****suwQ (99292324) 2025-04-04T16:51:33.782717Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:28752, port: 28752 2025-04-04T16:51:33.782808Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:33.835501Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:33.835854Z node 1 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:28752 return no entries 2025-04-04T16:51:33.836089Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****suwQ (99292324) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:28752 return no entries)' 2025-04-04T16:51:38.795684Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****suwQ (99292324) 2025-04-04T16:51:40.101079Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489501696921673275:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:40.101918Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016c4/r3tmp/tmpicI2FC/pdisk_1.dat 2025-04-04T16:51:40.514578Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:40.517350Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:40.517415Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:40.522612Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28506, node 2 2025-04-04T16:51:40.734798Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:40.734822Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:40.734828Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:40.734935Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:41.021409Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:41.025366Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:41.025393Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:41.026035Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21913, port: 21913 2025-04-04T16:51:41.026114Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:41.097305Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:41.097619Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:21913. Server is busy 2025-04-04T16:51:41.097872Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****h18w (77403F9A) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:21913. Server is busy)' 2025-04-04T16:51:41.098139Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:41.098152Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:41.099087Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21913, port: 21913 2025-04-04T16:51:41.099166Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:41.151671Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:41.151996Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:21913. Server is busy 2025-04-04T16:51:41.152170Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****h18w (77403F9A) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:21913. Server is busy)' 2025-04-04T16:51:43.099598Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****h18w (77403F9A) 2025-04-04T16:51:43.099890Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:43.099912Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:43.123915Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21913, port: 21913 2025-04-04T16:51:43.123997Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:43.162068Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:43.162502Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:21913. Server is busy 2025-04-04T16:51:43.162930Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****h18w (77403F9A) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:21913. Server is busy)' 2025-04-04T16:51:45.102603Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489501696921673275:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:45.102687Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:46.109935Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****h18w (77403F9A) 2025-04-04T16:51:46.110207Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:46.110226Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:46.110938Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21913, port: 21913 2025-04-04T16:51:46.111002Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:46.262679Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:46.318596Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:46.320793Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:51:46.320837Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:46.366612Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:46.410636Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:46.411430Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****h18w (77403F9A) () has now valid token of ldapuser@ldap 2025-04-04T16:51:50.118564Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****h18w (77403F9A) 2025-04-04T16:51:50.123371Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21913, port: 21913 2025-04-04T16:51:50.123443Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:50.273410Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:50.331908Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:50.339949Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:51:50.339990Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:50.410606Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:50.474018Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:50.493824Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****h18w (77403F9A) () has now valid token of ldapuser@ldap |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicUsage::CloseWriteSessionImmediately >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadWhileWriteWorks >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |75.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC >> BasicUsage::WriteSessionNoAvailableDatabase [GOOD] >> BasicUsage::WriteSessionSwitchDatabases >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |75.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |75.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |75.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base >> SystemView::Nodes [GOOD] >> SystemView::ConcurrentScans >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test >> TFetchRequestTests::HappyWay |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |75.1%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |75.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer >> SystemView::AuthUsers [GOOD] >> SystemView::AuthOwners_TableRange |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad |75.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |75.1%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |75.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig >> SystemView::TabletsFollowers [GOOD] >> SystemView::TabletsRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:75:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:77:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:79:2057] recipient: [2:78:2109] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:81:2057] recipient: [2:78:2109] !Reboot 72057594037927937 (actor [2:55:2096]) rebooted! !Reboot 72057594037927937 (actor [2:55:2096]) tablet resolver refreshed! new actor is[2:80:2110] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:134:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:80:2057] recipient: [4:78:2109] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:82:2057] recipient: [4:78:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:81:2110] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:83:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:85:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:84:2113] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:138:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:83:2057] recipient: [7:82:2112] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:86:2057] recipient: [7:82:2112] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:85:2113] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:85:2057] recipient: [8:84:2114] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:88:2057] recipient: [8:84:2114] !Reboot 72057594037927937 (actor [8:55:2096]) rebooted! !Reboot 72057594037927937 (actor [8:55:2096]) tablet resolver refreshed! new actor is[8:87:2115] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:141:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:82:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:85:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:86:2057] recipient: [9:84:2114] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:88:2057] recipient: [9:84:2114] !Reboot 72057594037927937 (actor [9:55:2096]) rebooted! !Reboot 72057594037927937 (actor [9:55:2096]) tablet resolver refreshed! new actor is[9:87:2115] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:141:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:83:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:87:2057] recipient: [10:85:2114] Leader for TabletID 72057594037927937 is [10:88:2115] sender: [10:89:2057] recipient: [10:85:2114] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:88:2115] Leader for TabletID 72057594037927937 is [10:88:2115] sender: [10:142:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:55:2096]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:84:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:87:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:88:2057] recipient: [11:86:2115] Leader for TabletID 72057594037927937 is [11:89:2116] sender: [11:90:2057] recipient: [11:86:2115] !Reboot 72057594037927937 (actor [11:55:2096]) rebooted! !Reboot 72057594037927937 (actor [11:55:2096]) tablet resolver refreshed! new actor is[11:89:2116] Leader for TabletID 72057594037927937 is [11:89:2116] sender: [11:109:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:55:2096]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:85:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:87:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:89:2057] recipient: [12:88:2116] Leader for TabletID 72057594037927937 is [12:90:2117] sender: [12:91:2057] recipient: [12:88:2116] !Reboot 72057594037927937 (actor [12:55:2096]) rebooted! !Reboot 72057594037927937 (actor [12:55:2096]) tablet resolver refreshed! new actor is[12:90:2117] Leader for TabletID 72057594037927937 is [12:90:2117] sender: [12:110:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:53:2057] recipient: [13:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:53:2057] recipient: [13:50:2094] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:56:2057] recipient: [13:50:2094] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:73:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:88:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:91:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:92:2057] recipient: [13:90:2119] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:94:2057] recipient: [13:90:2119] !Reboot 72057594037927937 (actor [13:55:2096]) rebooted! !Reboot 72057594037927937 (actor [13:55:2096]) tablet resolver refreshed! new actor is[13:93:2120] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:147:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:53:2057] recipient: [14:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:53:2057] recipient: [14:50:2094] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:56:2057] recipient: [14:50:2094] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:73:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:88:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:90:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:92:2057] recipient: [14:91:2119] Leader for TabletID 72057594037927937 is [14:93:2120] sender: [14:94:2057] recipient: [14:91:2119] !Reboot 72057594037927937 (actor [14:55:2096]) rebooted! !Reboot 72057594037927937 (actor [14:55:2096]) tablet resolver refreshed! new actor is[14:93:2120] Leader for TabletID 72057594037927937 is [14:93:2120] sender: [14:147:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:53:2057] recipient: [15:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:53:2057] recipient: [15:50:2094] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:56:2057] recipient: [15:50:2094] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:73:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:89:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:92:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:93:2057] recipient: [15:91:2119] Leader for TabletID 72057594037927937 is [15:94:2120] sender: [15:95:2057] recipient: [15:91:2119] !Reboot 72057594037927937 (actor [15:55:2096]) rebooted! !Reboot 72057594037927937 (actor [15:55:2096]) tablet resolver refreshed! new actor is[15:94:2120] Leader for TabletID 72057594037927937 is [15:94:2120] sender: [15:148:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:53:2057] recipient: [16:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:53:2057] recipient: [16:50:2094] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:56:2057] recipient: [16:50:2094] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:73:2057] recipient: [16:14:2061] >> SystemView::AuthGroups [GOOD] >> SystemView::AuthGroupMembers >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> SystemView::AuthGroups_Access [GOOD] >> SystemView::AuthGroups_ResultOrder >> TKeyValueTest::TestWriteDeleteThenReadRemaining [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig [GOOD] >> TComputeScheduler::TTotalLimits [GOOD] >> TPartitionTests::ShadowPartitionCountersFirstClass ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] Test command err: 2025-04-04T16:51:27.557857Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501641883191882:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:27.558101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001707/r3tmp/tmpVQBzQS/pdisk_1.dat 2025-04-04T16:51:27.856955Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29109, node 1 2025-04-04T16:51:27.895557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:27.895647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:27.899528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:27.947661Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:27.947692Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:27.947909Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:27.948016Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:28.122111Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:28.122461Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:28.122487Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:28.123957Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://unavailablehost:2573, port: 2573 2025-04-04T16:51:28.124042Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:51:28.128109Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Could not start TLS. Can't contact LDAP server 2025-04-04T16:51:28.128696Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****628A (67FAA881) () has now retryable error message 'Could not login via LDAP (Could not start TLS. Can't contact LDAP server)' 2025-04-04T16:51:28.129015Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:28.129029Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:28.129872Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://unavailablehost:2573, port: 2573 2025-04-04T16:51:28.129930Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:51:28.133134Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Could not start TLS. Can't contact LDAP server 2025-04-04T16:51:28.133236Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****628A (67FAA881) () has now retryable error message 'Could not login via LDAP (Could not start TLS. Can't contact LDAP server)' 2025-04-04T16:51:30.918140Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489501652198261523:2199];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001707/r3tmp/tmp1wWEOo/pdisk_1.dat 2025-04-04T16:51:30.992255Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:31.105148Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:31.120737Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:31.120811Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:31.122194Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23141, node 2 2025-04-04T16:51:31.166349Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:31.166372Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:31.166378Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:31.166470Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:31.277120Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:31.282985Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:31.283014Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:31.283693Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****pMbA (33B37953) () has now permanent error message 'Could not login via LDAP (List of ldap server hosts is empty)' 2025-04-04T16:51:34.902421Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489501670787461415:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:34.902545Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001707/r3tmp/tmpfDFEWh/pdisk_1.dat 2025-04-04T16:51:35.093063Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:35.105656Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:35.105756Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:35.108316Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6949, node 3 2025-04-04T16:51:35.338788Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:35.338808Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:35.338815Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:35.338928Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:35.630377Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:35.633443Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:35.633461Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:35.634181Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****sjfA (8A04CF90) () has now permanent error message 'Could not login via LDAP (Parameter BaseDn is empty)' 2025-04-04T16:51:39.720479Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489501689674819530:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:39.721084Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001707/r3tmp/tmp0ydJCx/pdisk_1.dat 2025-04-04T16:51:39.925132Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:39.985340Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:39.985418Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:39.989592Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18877, node 4 2025-04-04T16:51:40.199644Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:40.199664Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:40.199671Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:40.199778Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:40.410633Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:40.414716Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:40.414745Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:40.416016Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****WfHA (7F5DBE1D) () has now permanent error message 'Could not login via LDAP (Parameter BindDn is empty)' 2025-04-04T16:51:44.507550Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489501714505437612:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:44.507685Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001707/r3tmp/tmpJFN7mu/pdisk_1.dat 2025-04-04T16:51:44.850481Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:44.895724Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:44.895845Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:44.897189Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11999, node 5 2025-04-04T16:51:45.049690Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:45.049724Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:45.049731Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:45.049846Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:45.270486Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:45.276156Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:45.276182Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:45.276914Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****OiiA (5DAAB320) () has now permanent error message 'Could not login via LDAP (Parameter BindPassword is empty)' 2025-04-04T16:51:53.010697Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489501747998694316:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:53.167074Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001707/r3tmp/tmp2DfcrC/pdisk_1.dat 2025-04-04T16:51:53.382578Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:53.409669Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:53.409752Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:53.415844Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9704, node 6 2025-04-04T16:51:53.603158Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:53.603181Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:53.603190Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:53.603337Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:53.714411Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:53.716092Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:53.716126Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:53.717021Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:11124, port: 11124 2025-04-04T16:51:53.717313Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:53.830861Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:53.876374Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****eQlA (55C6B3F3) () has now valid token of ldapuser@ldap >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2025-04-04T16:51:27.763665Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501639729977148:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:27.763737Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ea/r3tmp/tmprZn5RO/pdisk_1.dat 2025-04-04T16:51:28.234597Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:28.266401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:28.266488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 22027, node 1 2025-04-04T16:51:28.270054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:28.354754Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:28.354773Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:28.354781Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:28.354877Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:28.738380Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:28.758317Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:28.758364Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:28.759147Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:8009, port: 8009 2025-04-04T16:51:28.759804Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:28.810317Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:28.857649Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:28.906413Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****jjDw (9E4F458F) () has now valid token of ldapuser@ldap 2025-04-04T16:51:31.342466Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489501656022107886:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:31.342506Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ea/r3tmp/tmpNe8WV4/pdisk_1.dat 2025-04-04T16:51:31.479983Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:31.480071Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:31.480997Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:31.482664Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18525, node 2 2025-04-04T16:51:31.556346Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:31.556374Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:31.556381Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:31.556482Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:31.655031Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:31.658415Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:31.658438Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:31.659071Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:65418, port: 65418 2025-04-04T16:51:31.659136Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:31.697418Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:31.742790Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:31.743212Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:51:31.743276Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:31.790662Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:31.836214Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:31.838360Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****-9-w (48134689) () has now valid token of ldapuser@ldap 2025-04-04T16:51:36.226997Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489501676989684275:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:36.227044Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ea/r3tmp/tmpxRU433/pdisk_1.dat 2025-04-04T16:51:36.593831Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:36.606506Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:36.606897Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:36.611412Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32152, node 3 2025-04-04T16:51:36.887029Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:36.887051Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:36.887062Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:36.887187Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:37.239923Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:37.240212Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:37.240224Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:37.240896Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:29223, port: 29223 2025-04-04T16:51:37.240954Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:37.278821Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:37.327988Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****ulmg (776B197E) () has now valid token of ldapuser@ldap 2025-04-04T16:51:41.578271Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489501699007706382:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:41.615051Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ea/r3tmp/tmpUAn79I/pdisk_1.dat 2025-04-04T16:51:41.904778Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:41.921507Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:41.921596Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:41.925108Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22643, node 4 2025-04-04T16:51:42.097067Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:42.097088Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:42.097096Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:42.097252Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:42.350373Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:42.354609Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:42.354642Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:42.355353Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://qqq:16620 ldap://localhost:16620 ldap://localhost:11111, port: 16620 2025-04-04T16:51:42.355439Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:42.406417Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:42.453768Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:42.454226Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:51:42.454293Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:42.501306Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:42.542806Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:42.544458Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****hqfg (6B89A7A3) () has now valid token of ldapuser@ldap test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ea/r3tmp/tmp8tpZcI/pdisk_1.dat 2025-04-04T16:51:51.675045Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:51:51.926213Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:51.948461Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:51.948585Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:51.955825Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31960, node 5 2025-04-04T16:51:52.149973Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:52.149995Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:52.150002Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:52.150110Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:52.401651Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:52.404997Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:52.405026Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:52.405724Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:29098, port: 29098 2025-04-04T16:51:52.405787Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:52.535372Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-04T16:51:52.582746Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:52.583276Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:51:52.583324Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-04T16:51:52.630468Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-04T16:51:52.675107Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-04T16:51:52.675612Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****Wq2w (2402DF63) () has now valid token of ldapuser@ldap 2025-04-04T16:51:55.272753Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489501761950402486:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:55.272789Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ea/r3tmp/tmpCGl5EX/pdisk_1.dat 2025-04-04T16:51:55.482749Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:55.517529Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:55.517606Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:55.519101Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10706, node 6 2025-04-04T16:51:55.577229Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:55.577256Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:55.577263Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:55.577403Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:55.850399Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:55.854917Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:55.854944Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:55.856142Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:5024, port: 5024 2025-04-04T16:51:55.856802Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:55.894427Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2025-04-04T16:51:55.894539Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:5024. Bad search filter 2025-04-04T16:51:55.894807Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****vYiA (9AE63A5B) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:5024. Bad search filter)' ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteDeleteThenReadRemaining [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] 2025-04-04T16:51:33.512752Z node 1 :KEYVALUE ERROR: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] 2025-04-04T16:51:45.681403Z node 2 :KEYVALUE ERROR: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:448:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:451:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:452:2057] recipient: [4:450:2376] Leader for TabletID 72057594037927937 is [4:453:2377] sender: [4:454:2057] recipient: [4:450:2376] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:453:2377] Leader for TabletID 72057594037927937 is [4:453:2377] sender: [4:507:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:448:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:451:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:452:2057] recipient: [5:450:2376] Leader for TabletID 72057594037927937 is [5:453:2377] sender: [5:454:2057] recipient: [5:450:2376] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:453:2377] Leader for TabletID 72057594037927937 is [5:453:2377] sender: [5:507:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:449:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:452:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:453:2057] recipient: [6:451:2376] Leader for TabletID 72057594037927937 is [6:454:2377] sender: [6:455:2057] recipient: [6:451:2376] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:454:2377] Leader for TabletID 72057594037927937 is [6:454:2377] sender: [6:508:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TComputeScheduler::TTotalLimits [GOOD] Test command err: 1610 1600 1610 1600 >> DbCounters::TabletsSimple [GOOD] >> LabeledDbCounters::OneTablet >> TKeyValueTest::TestConcatWorks [GOOD] >> TKeyValueTest::TestConcatWorksNewApi |75.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |75.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |75.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] >> TKeyValueTest::TestCleanUpDataOnEmptyTablet [GOOD] >> TKeyValueTest::TestCleanUpDataOnEmptyTabletResetGeneration >> KqpScanArrowInChanels::SingleKey [GOOD] >> KqpScanArrowInChanels::JoinWithParams >> SystemView::QueryStatsFields [GOOD] >> SystemView::QueryStatsAllTables >> TKeyValueTest::TestObtainLockNewApi [GOOD] >> TKeyValueTest::TestRenameToLongKey >> TPartitionTests::ShadowPartitionCountersFirstClass [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:107:2057] recipient: [1:103:2136] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:107:2057] recipient: [1:103:2136] Leader for TabletID 9437185 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] Leader for TabletID 9437184 is [1:115:2144] sender: [1:116:2057] recipient: [1:103:2136] Leader for TabletID 9437185 is [1:117:2145] sender: [1:118:2057] recipient: [1:104:2137] Leader for TabletID 9437184 is [1:115:2144] sender: [1:155:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:117:2145] sender: [1:157:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:117:2145] sender: [1:160:2057] recipient: [1:100:2135] Leader for TabletID 9437185 is [1:117:2145] sender: [1:162:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:117:2145] sender: [1:164:2057] recipient: [1:163:2175] Leader for TabletID 9437185 is [1:165:2176] sender: [1:166:2057] recipient: [1:163:2175] Leader for TabletID 9437185 is [1:165:2176] sender: [1:194:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:115:2144] sender: [1:197:2057] recipient: [1:99:2134] Leader for TabletID 9437184 is [1:115:2144] sender: [1:200:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:115:2144] sender: [1:201:2057] recipient: [1:199:2199] Leader for TabletID 9437184 is [1:202:2200] sender: [1:203:2057] recipient: [1:199:2199] Leader for TabletID 9437184 is [1:202:2200] sender: [1:231:2057] recipient: [1:14:2061] >> TKeyValueTest::TestCleanUpDataOnEmptyTabletResetGeneration [GOOD] >> TKeyValueTest::TestCleanUpDataWithMockDisk >> TPartitionTests::ShadowPartitionCountersRestore >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed >> TKeyValueTest::TestRenameWorksNewApi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2025-04-04T16:51:27.738184Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501642028656391:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:27.738311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016f6/r3tmp/tmpvVij1q/pdisk_1.dat 2025-04-04T16:51:28.249651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:28.249755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:28.251394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:28.253327Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9194, node 1 2025-04-04T16:51:28.411835Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:28.411852Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:28.411862Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:28.411941Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:28.568901Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:28.572795Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:28.572843Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:28.574473Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:15271, port: 15271 2025-04-04T16:51:28.574717Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:28.658631Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:28.706538Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:28.763157Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****JjcA (B8B119A2) () has now valid token of ldapuser@ldap 2025-04-04T16:51:31.464926Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489501656120996911:2162];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:31.468783Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016f6/r3tmp/tmpWPunHe/pdisk_1.dat 2025-04-04T16:51:31.712949Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:31.754923Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:31.755031Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:31.757083Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31760, node 2 2025-04-04T16:51:31.962801Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:31.962822Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:31.962828Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:31.962915Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:32.074367Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:32.076479Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:32.076518Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:32.077191Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:24319, port: 24319 2025-04-04T16:51:32.077265Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:32.167001Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:32.215352Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:32.215856Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:51:32.215927Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:32.258704Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:32.309773Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:32.310747Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****kjLA (5713F63B) () has now valid token of ldapuser@ldap 2025-04-04T16:51:36.928829Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489501678247018616:2209];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016f6/r3tmp/tmpUDc23d/pdisk_1.dat 2025-04-04T16:51:37.030799Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:37.103958Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:37.120352Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:37.120599Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:37.123937Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62871, node 3 2025-04-04T16:51:37.291722Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:37.291742Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:37.291748Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:37.291844Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:37.486382Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:37.497334Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:37.497361Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:37.498371Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:5484, port: 5484 2025-04-04T16:51:37.498425Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:37.604178Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:37.654769Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****Q4pQ (76CE5DB5) () has now valid token of ldapuser@ldap 2025-04-04T16:51:42.231570Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489501705483225690:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:42.231762Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016f6/r3tmp/tmpo8ENiq/pdisk_1.dat 2025-04-04T16:51:42.412527Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:42.444692Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:42.444776Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:42.446552Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12525, node 4 2025-04-04T16:51:42.582766Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:42.582789Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:42.582800Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:42.582914Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:42.778501Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:42.785218Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:42.785245Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:42.785861Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://qqq:26993 ldaps://localhost:26993 ldaps://localhost:11111, port: 26993 2025-04-04T16:51:42.785947Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:42.920799Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:42.974364Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:42.975249Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:51:42.975293Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:43.022636Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:43.066615Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:43.067562Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****nAig (533746B2) () has now valid token of ldapuser@ldap 2025-04-04T16:51:51.711380Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489501742460254880:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:51.711431Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016f6/r3tmp/tmp4LpQ3H/pdisk_1.dat 2025-04-04T16:51:52.089878Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:52.089967Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:52.097948Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:52.116414Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10229, node 5 2025-04-04T16:51:52.443142Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:52.443165Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:52.443172Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:52.443279Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:52.625756Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:52.626061Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:52.626085Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:52.626833Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:6289, port: 6289 2025-04-04T16:51:52.626893Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:52.718682Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-04T16:51:52.762567Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:52.763192Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:51:52.763239Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-04T16:51:52.806717Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-04T16:51:52.850727Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-04T16:51:52.851840Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****ldBA (75A75C6D) () has now valid token of ldapuser@ldap 2025-04-04T16:51:58.395856Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489501774955478411:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:58.395919Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016f6/r3tmp/tmpaBXaT6/pdisk_1.dat 2025-04-04T16:51:59.762952Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:59.816915Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:51:59.967247Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:59.967342Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:59.993412Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25999, node 6 2025-04-04T16:52:00.476446Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:00.476476Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:00.476487Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:00.476662Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:00.663012Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:52:00.669346Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:52:00.669379Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:52:00.671539Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:19446, port: 19446 2025-04-04T16:52:00.671609Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:52:00.813001Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2025-04-04T16:52:00.814102Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldaps://localhost:19446. Bad search filter 2025-04-04T16:52:00.817014Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****ggwQ (0D6F0B29) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldaps://localhost:19446. Bad search filter)' ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:75:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:77:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:79:2057] recipient: [2:78:2109] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:81:2057] recipient: [2:78:2109] !Reboot 72057594037927937 (actor [2:55:2096]) rebooted! !Reboot 72057594037927937 (actor [2:55:2096]) tablet resolver refreshed! new actor is[2:80:2110] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:134:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:78:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:80:2057] recipient: [4:79:2109] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:82:2057] recipient: [4:79:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:81:2110] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:83:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:85:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:84:2113] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:138:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:84:2057] recipient: [7:82:2112] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:86:2057] recipient: [7:82:2112] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:85:2113] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:85:2057] recipient: [8:84:2114] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:88:2057] recipient: [8:84:2114] !Reboot 72057594037927937 (actor [8:55:2096]) rebooted! !Reboot 72057594037927937 (actor [8:55:2096]) tablet resolver refreshed! new actor is[8:87:2115] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:141:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:82:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:85:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:86:2057] recipient: [9:84:2114] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:88:2057] recipient: [9:84:2114] !Reboot 72057594037927937 (actor [9:55:2096]) rebooted! !Reboot 72057594037927937 (actor [9:55:2096]) tablet resolver refreshed! new actor is[9:87:2115] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:141:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:83:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:87:2057] recipient: [10:85:2114] Leader for TabletID 72057594037927937 is [10:88:2115] sender: [10:89:2057] recipient: [10:85:2114] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:88:2115] Leader for TabletID 72057594037927937 is [10:88:2115] sender: [10:142:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:53:2057] recipient: [13:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:53:2057] recipient: [13:50:2094] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:56:2057] recipient: [13:50:2094] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:73:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:75:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:78:2057] recipient: [13:77:2109] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:79:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:80:2110] sender: [13:81:2057] recipient: [13:77:2109] !Reboot 72057594037927937 (actor [13:55:2096]) rebooted! !Reboot 72057594037927937 (actor [13:55:2096]) tablet resolver refreshed! new actor is[13:80:2110] Leader for TabletID 72057594037927937 is [13:80:2110] sender: [13:134:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:53:2057] recipient: [14:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:53:2057] recipient: [14:50:2094] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:56:2057] recipient: [14:50:2094] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:73:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:75:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:77:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:79:2057] recipient: [14:78:2109] Leader for TabletID 72057594037927937 is [14:80:2110] sender: [14:81:2057] recipient: [14:78:2109] !Reboot 72057594037927937 (actor [14:55:2096]) rebooted! !Reboot 72057594037927937 (actor [14:55:2096]) tablet resolver refreshed! new actor is[14:80:2110] Leader for TabletID 72057594037927937 is [14:80:2110] sender: [14:134:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:53:2057] recipient: [15:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:53:2057] recipient: [15:50:2094] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:56:2057] recipient: [15:50:2094] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:73:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:76:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:79:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:80:2057] recipient: [15:78:2109] Leader for TabletID 72057594037927937 is [15:81:2110] sender: [15:82:2057] recipient: [15:78:2109] !Reboot 72057594037927937 (actor [15:55:2096]) rebooted! !Reboot 72057594037927937 (actor [15:55:2096]) tablet resolver refreshed! new actor is[15:81:2110] Leader for TabletID 72057594037927937 is [15:81:2110] sender: [15:135:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:53:2057] recipient: [16:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:53:2057] recipient: [16:50:2094] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:56:2057] recipient: [16:50:2094] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:73:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:79:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:82:2057] recipient: [16:81:2112] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:83:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:84:2113] sender: [16:85:2057] recipient: [16:81:2112] !Reboot 72057594037927937 (actor [16:55:2096]) rebooted! !Reboot 72057594037927937 (actor [16:55:2096]) tablet resolver refreshed! new actor is[16:84:2113] Leader for TabletID 72057594037927937 is [16:84:2113] sender: [16:138:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:53:2057] recipient: [17:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:53:2057] recipient: [17:50:2094] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:56:2057] recipient: [17:50:2094] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:73:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:55:2096]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:79:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:82:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:83:2057] recipient: [17:81:2112] Leader for TabletID 72057594037927937 is [17:84:2113] sender: [17:85:2057] recipient: [17:81:2112] !Reboot 72057594037927937 (actor [17:55:2096]) rebooted! !Reboot 72057594037927937 (actor [17:55:2096]) tablet resolver refreshed! new actor is[17:84:2113] Leader for TabletID 72057594037927937 is [17:84:2113] sender: [17:138:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:53:2057] recipient: [18:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:53:2057] recipient: [18:50:2094] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:56:2057] recipient: [18:50:2094] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:73:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:80:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:83:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:84:2057] recipient: [18:82:2112] Leader for TabletID 72057594037927937 is [18:85:2113] sender: [18:86:2057] recipient: [18:82:2112] !Reboot 72057594037927937 (actor [18:55:2096]) rebooted! !Reboot 72057594037927937 (actor [18:55:2096]) tablet resolver refreshed! new actor is[18:85:2113] Leader for TabletID 72057594037927937 is [18:85:2113] sender: [18:103:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:53:2057] recipient: [19:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:53:2057] recipient: [19:50:2094] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:56:2057] recipient: [19:50:2094] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:73:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:82:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:85:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:86:2057] recipient: [19:84:2114] Leader for TabletID 72057594037927937 is [19:87:2115] sender: [19:88:2057] recipient: [19:84:2114] !Reboot 72057594037927937 (actor [19:55:2096]) rebooted! !Reboot 72057594037927937 (actor [19:55:2096]) tablet resolver refreshed! new actor is[19:87:2115] Leader for TabletID 72057594037927937 is [19:87:2115] sender: [19:141:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:53:2057] recipient: [20:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:53:2057] recipient: [20:50:2094] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:56:2057] recipient: [20:50:2094] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:73:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:55:2096]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:82:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:85:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:86:2057] recipient: [20:84:2114] Leader for TabletID 72057594037927937 is [20:87:2115] sender: [20:88:2057] recipient: [20:84:2114] !Reboot 72057594037927937 (actor [20:55:2096]) rebooted! !Reboot 72057594037927937 (actor [20:55:2096]) tablet resolver refreshed! new actor is[20:87:2115] Leader for TabletID 72057594037927937 is [20:87:2115] sender: [20:141:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:53:2057] recipient: [21:51:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:53:2057] recipient: [21:51:2094] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:56:2057] recipient: [21:51:2094] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:73:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:83:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:86:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:87:2057] recipient: [21:85:2114] Leader for TabletID 72057594037927937 is [21:88:2115] sender: [21:89:2057] recipient: [21:85:2114] !Reboot 72057594037927937 (actor [21:55:2096]) rebooted! !Reboot 72057594037927937 (actor [21:55:2096]) tablet resolver refreshed! new actor is[21:88:2115] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:53:2057] recipient: [22:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:53:2057] recipient: [22:50:2094] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:56:2057] recipient: [22:50:2094] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:73:2057] recipient: [22:14:2061] >> TTabletResolver::NodeProblem >> BootstrapperTest::RestartUnavailableTablet >> TPartitionTests::ShadowPartitionCountersRestore [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed [GOOD] >> TTabletCountersAggregator::ColumnShardCounters >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:75:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:77:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:79:2057] recipient: [2:78:2109] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:81:2057] recipient: [2:78:2109] !Reboot 72057594037927937 (actor [2:55:2096]) rebooted! !Reboot 72057594037927937 (actor [2:55:2096]) tablet resolver refreshed! new actor is[2:80:2110] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:134:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:80:2057] recipient: [4:78:2109] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:82:2057] recipient: [4:78:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:81:2110] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:83:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:85:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:84:2113] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:138:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:84:2057] recipient: [7:82:2112] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:86:2057] recipient: [7:82:2112] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:85:2113] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:75:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:77:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:79:2057] recipient: [10:78:2109] Leader for TabletID 72057594037927937 is [10:80:2110] sender: [10:81:2057] recipient: [10:78:2109] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:80:2110] Leader for TabletID 72057594037927937 is [10:80:2110] sender: [10:134:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:75:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:78:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:79:2057] recipient: [11:77:2109] Leader for TabletID 72057594037927937 is [11:80:2110] sender: [11:81:2057] recipient: [11:77:2109] !Reboot 72057594037927937 (actor [11:55:2096]) rebooted! !Reboot 72057594037927937 (actor [11:55:2096]) tablet resolver refreshed! new actor is[11:80:2110] Leader for TabletID 72057594037927937 is [11:80:2110] sender: [11:134:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:76:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:79:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:80:2057] recipient: [12:78:2109] Leader for TabletID 72057594037927937 is [12:81:2110] sender: [12:82:2057] recipient: [12:78:2109] !Reboot 72057594037927937 (actor [12:55:2096]) rebooted! !Reboot 72057594037927937 (actor [12:55:2096]) tablet resolver refreshed! new actor is[12:81:2110] Leader for TabletID 72057594037927937 is [12:81:2110] sender: [12:135:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:53:2057] recipient: [13:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:53:2057] recipient: [13:50:2094] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:56:2057] recipient: [13:50:2094] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:73:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:79:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:82:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:83:2057] recipient: [13:81:2112] Leader for TabletID 72057594037927937 is [13:84:2113] sender: [13:85:2057] recipient: [13:81:2112] !Reboot 72057594037927937 (actor [13:55:2096]) rebooted! !Reboot 72057594037927937 (actor [13:55:2096]) tablet resolver refreshed! new actor is[13:84:2113] Leader for TabletID 72057594037927937 is [13:84:2113] sender: [13:138:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:53:2057] recipient: [14:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:53:2057] recipient: [14:50:2094] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:56:2057] recipient: [14:50:2094] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:73:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:79:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:81:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:83:2057] recipient: [14:82:2112] Leader for TabletID 72057594037927937 is [14:84:2113] sender: [14:85:2057] recipient: [14:82:2112] !Reboot 72057594037927937 (actor [14:55:2096]) rebooted! !Reboot 72057594037927937 (actor [14:55:2096]) tablet resolver refreshed! new actor is[14:84:2113] Leader for TabletID 72057594037927937 is [14:84:2113] sender: [14:138:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:53:2057] recipient: [15:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:53:2057] recipient: [15:50:2094] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:56:2057] recipient: [15:50:2094] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:73:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:80:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:83:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:84:2057] recipient: [15:82:2112] Leader for TabletID 72057594037927937 is [15:85:2113] sender: [15:86:2057] recipient: [15:82:2112] !Reboot 72057594037927937 (actor [15:55:2096]) rebooted! !Reboot 72057594037927937 (actor [15:55:2096]) tablet resolver refreshed! new actor is[15:85:2113] Leader for TabletID 72057594037927937 is [15:85:2113] sender: [15:139:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:53:2057] recipient: [16:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:53:2057] recipient: [16:50:2094] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:56:2057] recipient: [16:50:2094] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:73:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:83:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:86:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:87:2057] recipient: [16:85:2115] Leader for TabletID 72057594037927937 is [16:88:2116] sender: [16:89:2057] recipient: [16:85:2115] !Reboot 72057594037927937 (actor [16:55:2096]) rebooted! !Reboot 72057594037927937 (actor [16:55:2096]) tablet resolver refreshed! new actor is[16:88:2116] Leader for TabletID 72057594037927937 is [16:88:2116] sender: [16:142:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:53:2057] recipient: [17:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:53:2057] recipient: [17:50:2094] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:56:2057] recipient: [17:50:2094] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:73:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:83:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:86:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:87:2057] recipient: [17:85:2115] Leader for TabletID 72057594037927937 is [17:88:2116] sender: [17:89:2057] recipient: [17:85:2115] !Reboot 72057594037927937 (actor [17:55:2096]) rebooted! !Reboot 72057594037927937 (actor [17:55:2096]) tablet resolver refreshed! new actor is[17:88:2116] Leader for TabletID 72057594037927937 is [17:88:2116] sender: [17:142:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:53:2057] recipient: [18:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:53:2057] recipient: [18:50:2094] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:56:2057] recipient: [18:50:2094] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:73:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:84:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:87:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:88:2057] recipient: [18:86:2115] Leader for TabletID 72057594037927937 is [18:89:2116] sender: [18:90:2057] recipient: [18:86:2115] !Reboot 72057594037927937 (actor [18:55:2096]) rebooted! !Reboot 72057594037927937 (actor [18:55:2096]) tablet resolver refreshed! new actor is[18:89:2116] Leader for TabletID 72057594037927937 is [18:89:2116] sender: [18:143:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:53:2057] recipient: [19:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:53:2057] recipient: [19:50:2094] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:56:2057] recipient: [19:50:2094] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:73:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:87:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:90:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:91:2057] recipient: [19:89:2118] Leader for TabletID 72057594037927937 is [19:92:2119] sender: [19:93:2057] recipient: [19:89:2118] !Reboot 72057594037927937 (actor [19:55:2096]) rebooted! !Reboot 72057594037927937 (actor [19:55:2096]) tablet resolver refreshed! new actor is[19:92:2119] Leader for TabletID 72057594037927937 is [19:92:2119] sender: [19:146:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:53:2057] recipient: [20:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:53:2057] recipient: [20:50:2094] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:56:2057] recipient: [20:50:2094] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:73:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:55:2096]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:87:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:89:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:91:2057] recipient: [20:90:2118] Leader for TabletID 72057594037927937 is [20:92:2119] sender: [20:93:2057] recipient: [20:90:2118] !Reboot 72057594037927937 (actor [20:55:2096]) rebooted! !Reboot 72057594037927937 (actor [20:55:2096]) tablet resolver refreshed! new actor is[20:92:2119] Leader for TabletID 72057594037927937 is [20:92:2119] sender: [20:146:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:53:2057] recipient: [21:51:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:53:2057] recipient: [21:51:2094] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:56:2057] recipient: [21:51:2094] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:73:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:88:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:91:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:92:2057] recipient: [21:90:2118] Leader for TabletID 72057594037927937 is [21:93:2119] sender: [21:94:2057] recipient: [21:90:2118] !Reboot 72057594037927937 (actor [21:55:2096]) rebooted! !Reboot 72057594037927937 (actor [21:55:2096]) tablet resolver refreshed! new actor is[21:93:2119] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:53:2057] recipient: [22:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:53:2057] recipient: [22:50:2094] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:56:2057] recipient: [22:50:2094] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:73:2057] recipient: [22:14:2061] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors >> TTabletCountersAggregator::ColumnShardCounters [GOOD] >> TTabletPipeTest::TestConsumerSidePipeReset >> TPartitionTests::TestBatchingWithChangeConfig ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:75:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:77:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:79:2057] recipient: [2:78:2109] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:81:2057] recipient: [2:78:2109] !Reboot 72057594037927937 (actor [2:55:2096]) rebooted! !Reboot 72057594037927937 (actor [2:55:2096]) tablet resolver refreshed! new actor is[2:80:2110] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:134:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:80:2057] recipient: [4:78:2109] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:82:2057] recipient: [4:78:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:81:2110] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:82:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:85:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:84:2113] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:138:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:84:2057] recipient: [7:82:2112] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:86:2057] recipient: [7:82:2112] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:85:2113] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:55:2096]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:81:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:84:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:85:2057] recipient: [8:83:2113] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:87:2057] recipient: [8:83:2113] !Reboot 72057594037927937 (actor [8:55:2096]) rebooted! !Reboot 72057594037927937 (actor [8:55:2096]) tablet resolver refreshed! new actor is[8:86:2114] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:106:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:55:2096]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:82:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:85:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:86:2057] recipient: [9:84:2114] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:88:2057] recipient: [9:84:2114] !Reboot 72057594037927937 (actor [9:55:2096]) rebooted! !Reboot 72057594037927937 (actor [9:55:2096]) tablet resolver refreshed! new actor is[9:87:2115] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:107:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:89:2057] recipient: [10:87:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:87:2117] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:85:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:88:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:89:2057] recipient: [11:87:2117] Leader for TabletID 72057594037927937 is [11:90:2118] sender: [11:91:2057] recipient: [11:87:2117] !Reboot 72057594037927937 (actor [11:55:2096]) rebooted! !Reboot 72057594037927937 (actor [11:55:2096]) tablet resolver refreshed! new actor is[11:90:2118] Leader for TabletID 72057594037927937 is [11:90:2118] sender: [11:144:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:55:209 ... recipient: [15:77:2109] !Reboot 72057594037927937 (actor [15:55:2096]) rebooted! !Reboot 72057594037927937 (actor [15:55:2096]) tablet resolver refreshed! new actor is[15:80:2110] Leader for TabletID 72057594037927937 is [15:80:2110] sender: [15:134:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:53:2057] recipient: [16:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:53:2057] recipient: [16:50:2094] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:56:2057] recipient: [16:50:2094] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:73:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:75:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:78:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:79:2057] recipient: [16:77:2109] Leader for TabletID 72057594037927937 is [16:80:2110] sender: [16:81:2057] recipient: [16:77:2109] !Reboot 72057594037927937 (actor [16:55:2096]) rebooted! !Reboot 72057594037927937 (actor [16:55:2096]) tablet resolver refreshed! new actor is[16:80:2110] Leader for TabletID 72057594037927937 is [16:80:2110] sender: [16:134:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:53:2057] recipient: [17:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:53:2057] recipient: [17:50:2094] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:56:2057] recipient: [17:50:2094] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:73:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:76:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:79:2057] recipient: [17:78:2109] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:80:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:81:2110] sender: [17:82:2057] recipient: [17:78:2109] !Reboot 72057594037927937 (actor [17:55:2096]) rebooted! !Reboot 72057594037927937 (actor [17:55:2096]) tablet resolver refreshed! new actor is[17:81:2110] Leader for TabletID 72057594037927937 is [17:81:2110] sender: [17:135:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:53:2057] recipient: [18:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:53:2057] recipient: [18:50:2094] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:56:2057] recipient: [18:50:2094] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:73:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:79:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:82:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:83:2057] recipient: [18:81:2112] Leader for TabletID 72057594037927937 is [18:84:2113] sender: [18:85:2057] recipient: [18:81:2112] !Reboot 72057594037927937 (actor [18:55:2096]) rebooted! !Reboot 72057594037927937 (actor [18:55:2096]) tablet resolver refreshed! new actor is[18:84:2113] Leader for TabletID 72057594037927937 is [18:84:2113] sender: [18:138:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:53:2057] recipient: [19:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:53:2057] recipient: [19:50:2094] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:56:2057] recipient: [19:50:2094] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:73:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:79:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:82:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:83:2057] recipient: [19:81:2112] Leader for TabletID 72057594037927937 is [19:84:2113] sender: [19:85:2057] recipient: [19:81:2112] !Reboot 72057594037927937 (actor [19:55:2096]) rebooted! !Reboot 72057594037927937 (actor [19:55:2096]) tablet resolver refreshed! new actor is[19:84:2113] Leader for TabletID 72057594037927937 is [19:84:2113] sender: [19:138:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:53:2057] recipient: [20:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:53:2057] recipient: [20:50:2094] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:56:2057] recipient: [20:50:2094] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:73:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:80:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:83:2057] recipient: [20:82:2112] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:84:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:85:2113] sender: [20:86:2057] recipient: [20:82:2112] !Reboot 72057594037927937 (actor [20:55:2096]) rebooted! !Reboot 72057594037927937 (actor [20:55:2096]) tablet resolver refreshed! new actor is[20:85:2113] Leader for TabletID 72057594037927937 is [20:85:2113] sender: [20:139:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:53:2057] recipient: [21:51:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:53:2057] recipient: [21:51:2094] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:56:2057] recipient: [21:51:2094] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:73:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:55:2096]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:81:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:84:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:85:2057] recipient: [21:83:2113] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:87:2057] recipient: [21:83:2113] !Reboot 72057594037927937 (actor [21:55:2096]) rebooted! !Reboot 72057594037927937 (actor [21:55:2096]) tablet resolver refreshed! new actor is[21:86:2114] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:106:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:53:2057] recipient: [22:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:53:2057] recipient: [22:50:2094] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:56:2057] recipient: [22:50:2094] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:73:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:55:2096]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:82:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:85:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:86:2057] recipient: [22:84:2114] Leader for TabletID 72057594037927937 is [22:87:2115] sender: [22:88:2057] recipient: [22:84:2114] !Reboot 72057594037927937 (actor [22:55:2096]) rebooted! !Reboot 72057594037927937 (actor [22:55:2096]) tablet resolver refreshed! new actor is[22:87:2115] Leader for TabletID 72057594037927937 is [22:87:2115] sender: [22:107:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:53:2057] recipient: [23:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:53:2057] recipient: [23:50:2094] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:56:2057] recipient: [23:50:2094] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:73:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:85:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:87:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:89:2057] recipient: [23:88:2117] Leader for TabletID 72057594037927937 is [23:90:2118] sender: [23:91:2057] recipient: [23:88:2117] !Reboot 72057594037927937 (actor [23:55:2096]) rebooted! !Reboot 72057594037927937 (actor [23:55:2096]) tablet resolver refreshed! new actor is[23:90:2118] Leader for TabletID 72057594037927937 is [23:90:2118] sender: [23:144:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:53:2057] recipient: [24:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:53:2057] recipient: [24:49:2094] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:56:2057] recipient: [24:49:2094] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:73:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:55:2096]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:85:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:87:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:89:2057] recipient: [24:88:2117] Leader for TabletID 72057594037927937 is [24:90:2118] sender: [24:91:2057] recipient: [24:88:2117] !Reboot 72057594037927937 (actor [24:55:2096]) rebooted! !Reboot 72057594037927937 (actor [24:55:2096]) tablet resolver refreshed! new actor is[24:90:2118] Leader for TabletID 72057594037927937 is [24:90:2118] sender: [24:144:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:53:2057] recipient: [25:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:53:2057] recipient: [25:49:2094] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:56:2057] recipient: [25:49:2094] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:73:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:86:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:89:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:90:2057] recipient: [25:88:2117] Leader for TabletID 72057594037927937 is [25:91:2118] sender: [25:92:2057] recipient: [25:88:2117] !Reboot 72057594037927937 (actor [25:55:2096]) rebooted! !Reboot 72057594037927937 (actor [25:55:2096]) tablet resolver refreshed! new actor is[25:91:2118] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:53:2057] recipient: [26:51:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:53:2057] recipient: [26:51:2094] Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:56:2057] recipient: [26:51:2094] Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:73:2057] recipient: [26:14:2061] >> TTabletResolver::NodeProblem [GOOD] >> TTabletPipeTest::TestConsumerSidePipeReset [GOOD] >> TTabletPipeTest::TestInterconnectSession >> BootstrapperTest::KeepExistingTablet |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::ColumnShardCounters [GOOD] |75.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |75.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |75.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] Test command err: 2025-04-04T16:51:27.744898Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501641488969365:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:27.744980Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016e9/r3tmp/tmpwwXSdH/pdisk_1.dat 2025-04-04T16:51:28.204574Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:28.238774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:28.238870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 7251, node 1 2025-04-04T16:51:28.240469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:28.378676Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:28.378696Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:28.378703Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:28.378806Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:28.741150Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:28.762424Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:28.762468Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:28.764130Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:29133, port: 29133 2025-04-04T16:51:28.764245Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:51:28.848445Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:28.894918Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:28.938697Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:28.939501Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:51:28.939608Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:28.982721Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:29.028670Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:29.030159Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****F42Q (B5726216) () has now valid token of ldapuser@ldap test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016e9/r3tmp/tmpuIJemA/pdisk_1.dat 2025-04-04T16:51:31.984245Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:51:31.992279Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:32.063348Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:32.063426Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:32.071733Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2763, node 2 2025-04-04T16:51:32.247887Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:32.247906Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:32.247912Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:32.248017Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:32.465823Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:32.519498Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:32.519539Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:32.526992Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:4386, port: 4386 2025-04-04T16:51:32.527066Z node 2 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:51:32.600376Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:32.649781Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:32.695833Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****0eFA (E2235C0C) () has now valid token of ldapuser@ldap 2025-04-04T16:51:37.487053Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489501684486242007:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:37.581576Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016e9/r3tmp/tmpSijrrJ/pdisk_1.dat 2025-04-04T16:51:37.993952Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:38.075779Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:38.075855Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:38.082611Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30073, node 3 2025-04-04T16:51:38.596876Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:38.596894Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:38.596901Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:38.597022Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:38.850899Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:38.855088Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:38.855121Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:38.856213Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://qqq:13631 ldap://localhost:13631 ldap://localhost:11111, port: 13631 2025-04-04T16:51:38.856282Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:51:38.977133Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:39.022604Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:39.070498Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:39.074456Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:51:39.074508Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:39.118602Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:39.166580Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:39.167719Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****RO_Q (2791A637) () has now valid token of ldapuser@ldap 2025-04-04T16:51:44.172767Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489501712920400880:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:44.172799Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016e9/r3tmp/tmpvLU2Tl/pdisk_1.dat 2025-04-04T16:51:44.465906Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:44.518739Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:44.518820Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:44.523734Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31608, node 4 2025-04-04T16:51:44.633414Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:44.633436Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:44.633444Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:44.633564Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:44.841330Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:44.842278Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:44.842302Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:44.842941Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:9045, port: 9045 2025-04-04T16:51:44.842987Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:51:44.873928Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:44.921797Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-04T16:51:44.970995Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****5nYA (701876C1) () has now valid token of ldapuser@ldap 2025-04-04T16:51:52.983951Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489501746682418363:2219];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016e9/r3tmp/tmp7ElbU9/pdisk_1.dat 2025-04-04T16:51:53.221171Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:53.235601Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:53.241768Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:53.243628Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:53.257527Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12088, node 5 2025-04-04T16:51:53.436474Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:53.436501Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:53.436509Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:53.436645Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:53.542394Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:53.544854Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:53.544880Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:53.545605Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:16119, port: 16119 2025-04-04T16:51:53.545686Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:51:53.632057Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:53.674744Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:16119. Invalid credentials 2025-04-04T16:51:53.675208Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****sIhw (F93A9C34) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:16119. Invalid credentials)' 2025-04-04T16:52:00.183643Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489501780064256940:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:00.183687Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016e9/r3tmp/tmpLno1jg/pdisk_1.dat 2025-04-04T16:52:00.757049Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:00.800367Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:00.800462Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:00.814043Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4759, node 6 2025-04-04T16:52:00.916187Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:00.916208Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:00.916216Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:00.916333Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:01.220609Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:52:01.226732Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:52:01.226761Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:52:01.227761Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:2200, port: 2200 2025-04-04T16:52:01.227896Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:52:01.269717Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:52:01.314855Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:2200. Invalid credentials 2025-04-04T16:52:01.315680Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****-PNw (A6D14906) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:2200. Invalid credentials)' >> BootstrapperTest::RestartUnavailableTablet [GOOD] >> BootstrapperTest::UnavailableStateStorage >> SystemView::TabletsRanges [GOOD] >> SystemView::TabletsRangesPredicateExtractDisabled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletResolver::NodeProblem [GOOD] Test command err: 2025-04-04T16:52:08.997075Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StInit ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:52:08.997359Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [1:204:2135] CurrentLeaderTablet: [1:205:2136] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-04T16:52:08.997423Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2025-04-04T16:52:08.997478Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:204:2135] 2025-04-04T16:52:08.997720Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StInit ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:52:08.997931Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [1:210:2139] CurrentLeaderTablet: [1:211:2140] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-04T16:52:08.998002Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2025-04-04T16:52:08.998054Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:210:2139] 2025-04-04T16:52:08.999432Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:52:08.999487Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:204:2135] 2025-04-04T16:52:08.999677Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:52:08.999733Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:210:2139] 2025-04-04T16:52:08.999917Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 1 max(problemEpoch): 2 2025-04-04T16:52:08.999963Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 123 leader: [1:204:2135] by NodeId 2025-04-04T16:52:09.000021Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StProblemResolve ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:52:09.000207Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [2:220:2093] CurrentLeaderTablet: [2:221:2094] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-04T16:52:09.000238Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2025-04-04T16:52:09.000277Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:220:2093] 2025-04-04T16:52:09.000495Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 234 leader: [1:210:2139] by NodeId 2025-04-04T16:52:09.000572Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StProblemResolve ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:52:09.000783Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [2:226:2095] CurrentLeaderTablet: [2:227:2096] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-04T16:52:09.000825Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2025-04-04T16:52:09.000883Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:226:2095] 2025-04-04T16:52:09.002330Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 2 2025-04-04T16:52:09.002391Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:52:09.002434Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:220:2093] 2025-04-04T16:52:09.002683Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:52:09.002727Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:226:2095] 2025-04-04T16:52:09.002923Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 4 2025-04-04T16:52:09.002986Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 123 leader: [2:220:2093] by NodeId 2025-04-04T16:52:09.003037Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StProblemResolve ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:52:09.003305Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [3:238:2093] CurrentLeaderTablet: [3:239:2094] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-04T16:52:09.003341Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2025-04-04T16:52:09.003391Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:238:2093] 2025-04-04T16:52:09.003669Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:52:09.003715Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:226:2095] 2025-04-04T16:52:09.003899Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 5 2025-04-04T16:52:09.003949Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:52:09.003997Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:238:2093] 2025-04-04T16:52:09.004238Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 234 leader: [2:226:2095] by NodeId 2025-04-04T16:52:09.004299Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StProblemResolve ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:52:09.004573Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [3:244:2095] CurrentLeaderTablet: [3:245:2096] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-04T16:52:09.004612Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2025-04-04T16:52:09.004650Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [3:244:2095] |75.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |75.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |75.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr >> TTabletPipeTest::TestInterconnectSession [GOOD] >> BootstrapperTest::UnavailableStateStorage [GOOD] >> TFlatMetrics::TimeSeriesKV2 [GOOD] >> TPipeCacheTest::TestAutoConnect >> SystemView::ConcurrentScans [GOOD] >> SystemView::GroupsFields |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestInterconnectSession [GOOD] >> TargetDiscoverer::InvalidCredentials |75.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |75.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |75.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::UnavailableStateStorage [GOOD] Test command err: ... waiting for pipe to connect ... waiting for blocked connect attempt ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... waiting for blocked connect attempt (done) ... disconnecting nodes 2 <-> 1 ... waiting for pipe to disconnect ... waiting for pipe to connect ... waiting for pipe to connect ... waiting for multiple state storage lookup attempts 2025-04-04T16:52:10.104331Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA 2025-04-04T16:52:10.104985Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: ERROR, leader: [0:0:0] 2025-04-04T16:52:10.105026Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, state storage unavailable, sleeping for 0.148014s 2025-04-04T16:52:10.226291Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... waiting for multiple state storage lookup attempts (done) >> BootstrapperTest::KeepExistingTablet [GOOD] >> BootstrapperTest::DuplicateNodes >> SystemView::TopPartitionsFields [GOOD] >> SystemView::TopPartitionsTables >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate >> TPartitionTests::TestBatchingWithChangeConfig [GOOD] >> TPartitionTests::TestBatchingWithProposeConfig >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:75:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:77:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:79:2057] recipient: [2:78:2109] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:81:2057] recipient: [2:78:2109] !Reboot 72057594037927937 (actor [2:55:2096]) rebooted! !Reboot 72057594037927937 (actor [2:55:2096]) tablet resolver refreshed! new actor is[2:80:2110] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:134:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:78:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:80:2057] recipient: [4:79:2109] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:82:2057] recipient: [4:79:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:81:2110] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:83:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:85:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:84:2113] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:138:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:84:2057] recipient: [7:82:2112] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:86:2057] recipient: [7:82:2112] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:85:2113] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:75:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:77:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:79:2057] recipient: [10:78:2109] Leader for TabletID 72057594037927937 is [10:80:2110] sender: [10:81:2057] recipient: [10:78:2109] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:80:2110] Leader for TabletID 72057594037927937 is [10:80:2110] sender: [10:134:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:55:2096]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:75:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:78:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:79:2057] recipient: [11:77:2109] Leader for TabletID 72057594037927937 is [11:80:2110] sender: [11:81:2057] recipient: [11:77:2109] !Reboot 72057594037927937 (actor [11:55:2096]) rebooted! !Reboot 72057594037927937 (actor [11:55:2096]) tablet resolver refreshed! new actor is[11:80:2110] Leader for TabletID 72057594037927937 is [11:80:2110] sender: [11:134:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:76:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:79:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:80:2057] recipient: [12:78:2109] Leader for TabletID 72057594037927937 is [12:81:2110] sender: [12:82:2057] recipient: [12:78:2109] !Reboot 72057594037927937 (actor [12:55:2096]) rebooted! !Reboot 72057594037927937 (actor [12:55:2096]) tablet resolver refreshed! new actor is[12:81:2110] Leader for TabletID 72057594037927937 is [12:81:2110] sender: [12:135:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:53:2057] recipient: [13:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:53:2057] recipient: [13:50:2094] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:56:2057] recipient: [13:50:2094] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:73:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:79:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:82:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:83:2057] recipient: [13:81:2112] Leader for TabletID 72057594037927937 is [13:84:2113] sender: [13:85:2057] recipient: [13:81:2112] !Reboot 72057594037927937 (actor [13:55:2096]) rebooted! !Reboot 72057594037927937 (actor [13:55:2096]) tablet resolver refreshed! new actor is[13:84:2113] Leader for TabletID 72057594037927937 is [13:84:2113] sender: [13:138:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:53:2057] recipient: [14:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:53:2057] recipient: [14:50:2094] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:56:2057] recipient: [14:50:2094] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:73:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:79:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:81:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:83:2057] recipient: [14:82:2112] Leader for TabletID 72057594037927937 is [14:84:2113] sender: [14:85:2057] recipient: [14:82:2112] !Reboot 72057594037927937 (actor [14:55:2096]) rebooted! !Reboot 72057594037927937 (actor [14:55:2096]) tablet resolver refreshed! new actor is[14:84:2113] Leader for TabletID 72057594037927937 is [14:84:2113] sender: [14:138:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:53:2057] recipient: [15:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:53:2057] recipient: [15:50:2094] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:56:2057] recipient: [15:50:2094] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:73:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:80:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:83:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:84:2057] recipient: [15:82:2112] Leader for TabletID 72057594037927937 is [15:85:2113] sender: [15:86:2057] recipient: [15:82:2112] !Reboot 72057594037927937 (actor [15:55:2096]) rebooted! !Reboot 72057594037927937 (actor [15:55:2096]) tablet resolver refreshed! new actor is[15:85:2113] Leader for TabletID 72057594037927937 is [15:85:2113] sender: [15:139:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:53:2057] recipient: [16:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:53:2057] recipient: [16:50:2094] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:56:2057] recipient: [16:50:2094] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:73:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:83:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:86:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:87:2057] recipient: [16:85:2115] Leader for TabletID 72057594037927937 is [16:88:2116] sender: [16:89:2057] recipient: [16:85:2115] !Reboot 72057594037927937 (actor [16:55:2096]) rebooted! !Reboot 72057594037927937 (actor [16:55:2096]) tablet resolver refreshed! new actor is[16:88:2116] Leader for TabletID 72057594037927937 is [16:88:2116] sender: [16:142:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:53:2057] recipient: [17:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:53:2057] recipient: [17:50:2094] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:56:2057] recipient: [17:50:2094] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:73:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:83:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:85:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:87:2057] recipient: [17:86:2115] Leader for TabletID 72057594037927937 is [17:88:2116] sender: [17:89:2057] recipient: [17:86:2115] !Reboot 72057594037927937 (actor [17:55:2096]) rebooted! !Reboot 72057594037927937 (actor [17:55:2096]) tablet resolver refreshed! new actor is[17:88:2116] Leader for TabletID 72057594037927937 is [17:88:2116] sender: [17:142:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:53:2057] recipient: [18:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:53:2057] recipient: [18:50:2094] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:56:2057] recipient: [18:50:2094] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:73:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:84:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:87:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:88:2057] recipient: [18:86:2115] Leader for TabletID 72057594037927937 is [18:89:2116] sender: [18:90:2057] recipient: [18:86:2115] !Reboot 72057594037927937 (actor [18:55:2096]) rebooted! !Reboot 72057594037927937 (actor [18:55:2096]) tablet resolver refreshed! new actor is[18:89:2116] Leader for TabletID 72057594037927937 is [18:89:2116] sender: [18:143:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:53:2057] recipient: [19:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:53:2057] recipient: [19:50:2094] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:56:2057] recipient: [19:50:2094] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:73:2057] recipient: [19:14:2061] >> TPipeCacheTest::TestAutoConnect [GOOD] >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:75:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:77:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:79:2057] recipient: [4:78:2109] Leader for TabletID 72057594037927937 is [4:80:2110] sender: [4:81:2057] recipient: [4:78:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:80:2110] Leader for TabletID 72057594037927937 is [4:80:2110] sender: [4:134:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:75:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:78:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:77:2109] Leader for TabletID 72057594037927937 is [5:80:2110] sender: [5:81:2057] recipient: [5:77:2109] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:80:2110] Leader for TabletID 72057594037927937 is [5:80:2110] sender: [5:134:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:76:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:78:2109] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:80:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:81:2110] sender: [6:82:2057] recipient: [6:78:2109] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:81:2110] Leader for TabletID 72057594037927937 is [6:81:2110] sender: [6:135:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:78:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:81:2057] recipient: [7:80:2111] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:82:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:83:2112] sender: [7:84:2057] recipient: [7:80:2111] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:83:2112] Leader for TabletID 72057594037927937 is [7:83:2112] sender: [7:137:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:78:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:81:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:82:2057] recipient: [8:80:2111] Leader for TabletID 72057594037927937 is [8:83:2112] sender: [8:84:2057] recipient: [8:80:2111] !Reboot 72057594037927937 (actor [8:55:2096]) rebooted! !Reboot 72057594037927937 (actor [8:55:2096]) tablet resolver refreshed! new actor is[8:83:2112] Leader for TabletID 72057594037927937 is [8:83:2112] sender: [8:137:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:79:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:82:2057] recipient: [9:81:2111] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:83:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:84:2112] sender: [9:85:2057] recipient: [9:81:2111] !Reboot 72057594037927937 (actor [9:55:2096]) rebooted! !Reboot 72057594037927937 (actor [9:55:2096]) tablet resolver refreshed! new actor is[9:84:2112] Leader for TabletID 72057594037927937 is [9:84:2112] sender: [9:138:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:82:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:84:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:86:2057] recipient: [10:85:2114] Leader for TabletID 72057594037927937 is [10:87:2115] sender: [10:88:2057] recipient: [10:85:2114] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:87:2115] Leader for TabletID 72057594037927937 is [10:87:2115] sender: [10:141:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:82:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:85:2057] recipient: [11:84:2114] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:86:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:87:2115] sender: [11:88:2057] recipient: [11:84:2114] !Reboot 72057594037927937 (actor [11:55:2096]) rebooted! !Reboot 72057594037927937 (actor [11:55:2096]) tablet resolver refreshed! new actor is[11:87:2115] Leader for TabletID 72057594037927937 is [11:87:2115] sender: [11:141:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:83:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:86:2057] recipient: [12:85:2114] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:87:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:88:2115] sender: [12:89:2057] recipient: [12:85:2114] !Reboot 72057594037927937 (actor [12:55:2096]) rebooted! !Reboot 72057594037927937 (actor [12:55:2096]) tablet resolver refreshed! new actor is[12:88:2115] Leader for TabletID 72057594037927937 is [12:88:2115] sender: [12:142:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:53:2057] recipient: [13:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:53:2057] recipient: [13:50:2094] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:56:2057] recipient: [13:50:2094] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:73:2057] recipient: [13:14:2061] |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:75:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:77:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:79:2057] recipient: [2:78:2109] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:81:2057] recipient: [2:78:2109] !Reboot 72057594037927937 (actor [2:55:2096]) rebooted! !Reboot 72057594037927937 (actor [2:55:2096]) tablet resolver refreshed! new actor is[2:80:2110] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:134:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:80:2057] recipient: [4:78:2109] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:82:2057] recipient: [4:78:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:81:2110] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:83:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:85:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:84:2113] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:138:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:84:2057] recipient: [7:82:2112] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:86:2057] recipient: [7:82:2112] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:85:2113] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:85:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:86:2057] recipient: [8:84:2114] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:88:2057] recipient: [8:84:2114] !Reboot 72057594037927937 (actor [8:55:2096]) rebooted! !Reboot 72057594037927937 (actor [8:55:2096]) tablet resolver refreshed! new actor is[8:87:2115] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:141:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:82:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:85:2057] recipient: [9:84:2114] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:88:2057] recipient: [9:84:2114] !Reboot 72057594037927937 (actor [9:55:2096]) rebooted! !Reboot 72057594037927937 (actor [9:55:2096]) tablet resolver refreshed! new actor is[9:87:2115] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:141:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:83:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:86:2057] recipient: [10:85:2114] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:88:2115] sender: [10:89:2057] recipient: [10:85:2114] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:88:2115] Leader for TabletID 72057594037927937 is [10:88:2115] sender: [10:142:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:85:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:88:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:89:2057] recipient: [11:87:2116] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:91:2057] recipient: [11:87:2116] !Reboot 72057594037927937 (actor [11:55:2096]) rebooted! !Reboot 72057594037927937 (actor [11:55:2096]) tablet resolver refreshed! new actor is[11:90:2117] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:144:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... bletID 72057594037927937 is [13:55:2096] sender: [13:86:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:89:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:90:2057] recipient: [13:88:2116] Leader for TabletID 72057594037927937 is [13:91:2117] sender: [13:92:2057] recipient: [13:88:2116] !Reboot 72057594037927937 (actor [13:55:2096]) rebooted! !Reboot 72057594037927937 (actor [13:55:2096]) tablet resolver refreshed! new actor is[13:91:2117] Leader for TabletID 72057594037927937 is [13:91:2117] sender: [13:145:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:53:2057] recipient: [14:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:53:2057] recipient: [14:50:2094] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:56:2057] recipient: [14:50:2094] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:73:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:53:2057] recipient: [15:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:53:2057] recipient: [15:50:2094] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:56:2057] recipient: [15:50:2094] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:73:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:53:2057] recipient: [16:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:53:2057] recipient: [16:50:2094] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:56:2057] recipient: [16:50:2094] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:73:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:75:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:78:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:79:2057] recipient: [16:77:2109] Leader for TabletID 72057594037927937 is [16:80:2110] sender: [16:81:2057] recipient: [16:77:2109] !Reboot 72057594037927937 (actor [16:55:2096]) rebooted! !Reboot 72057594037927937 (actor [16:55:2096]) tablet resolver refreshed! new actor is[16:80:2110] Leader for TabletID 72057594037927937 is [16:80:2110] sender: [16:134:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:53:2057] recipient: [17:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:53:2057] recipient: [17:50:2094] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:56:2057] recipient: [17:50:2094] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:73:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:75:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:78:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:79:2057] recipient: [17:77:2109] Leader for TabletID 72057594037927937 is [17:80:2110] sender: [17:81:2057] recipient: [17:77:2109] !Reboot 72057594037927937 (actor [17:55:2096]) rebooted! !Reboot 72057594037927937 (actor [17:55:2096]) tablet resolver refreshed! new actor is[17:80:2110] Leader for TabletID 72057594037927937 is [17:80:2110] sender: [17:134:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:53:2057] recipient: [18:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:53:2057] recipient: [18:50:2094] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:56:2057] recipient: [18:50:2094] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:73:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:76:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:79:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:80:2057] recipient: [18:78:2109] Leader for TabletID 72057594037927937 is [18:81:2110] sender: [18:82:2057] recipient: [18:78:2109] !Reboot 72057594037927937 (actor [18:55:2096]) rebooted! !Reboot 72057594037927937 (actor [18:55:2096]) tablet resolver refreshed! new actor is[18:81:2110] Leader for TabletID 72057594037927937 is [18:81:2110] sender: [18:135:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:53:2057] recipient: [19:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:53:2057] recipient: [19:50:2094] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:56:2057] recipient: [19:50:2094] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:73:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:79:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:82:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:83:2057] recipient: [19:81:2112] Leader for TabletID 72057594037927937 is [19:84:2113] sender: [19:85:2057] recipient: [19:81:2112] !Reboot 72057594037927937 (actor [19:55:2096]) rebooted! !Reboot 72057594037927937 (actor [19:55:2096]) tablet resolver refreshed! new actor is[19:84:2113] Leader for TabletID 72057594037927937 is [19:84:2113] sender: [19:138:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:53:2057] recipient: [20:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:53:2057] recipient: [20:50:2094] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:56:2057] recipient: [20:50:2094] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:73:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:55:2096]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:79:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:82:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:83:2057] recipient: [20:81:2112] Leader for TabletID 72057594037927937 is [20:84:2113] sender: [20:85:2057] recipient: [20:81:2112] !Reboot 72057594037927937 (actor [20:55:2096]) rebooted! !Reboot 72057594037927937 (actor [20:55:2096]) tablet resolver refreshed! new actor is[20:84:2113] Leader for TabletID 72057594037927937 is [20:84:2113] sender: [20:138:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:53:2057] recipient: [21:51:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:53:2057] recipient: [21:51:2094] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:56:2057] recipient: [21:51:2094] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:73:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:80:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:82:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:84:2057] recipient: [21:83:2112] Leader for TabletID 72057594037927937 is [21:85:2113] sender: [21:86:2057] recipient: [21:83:2112] !Reboot 72057594037927937 (actor [21:55:2096]) rebooted! !Reboot 72057594037927937 (actor [21:55:2096]) tablet resolver refreshed! new actor is[21:85:2113] Leader for TabletID 72057594037927937 is [21:85:2113] sender: [21:103:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:53:2057] recipient: [22:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:53:2057] recipient: [22:50:2094] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:56:2057] recipient: [22:50:2094] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:73:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:82:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:85:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:86:2057] recipient: [22:84:2114] Leader for TabletID 72057594037927937 is [22:87:2115] sender: [22:88:2057] recipient: [22:84:2114] !Reboot 72057594037927937 (actor [22:55:2096]) rebooted! !Reboot 72057594037927937 (actor [22:55:2096]) tablet resolver refreshed! new actor is[22:87:2115] Leader for TabletID 72057594037927937 is [22:87:2115] sender: [22:141:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:53:2057] recipient: [23:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:53:2057] recipient: [23:50:2094] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:56:2057] recipient: [23:50:2094] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:73:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:55:2096]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:82:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:84:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:86:2057] recipient: [23:85:2114] Leader for TabletID 72057594037927937 is [23:87:2115] sender: [23:88:2057] recipient: [23:85:2114] !Reboot 72057594037927937 (actor [23:55:2096]) rebooted! !Reboot 72057594037927937 (actor [23:55:2096]) tablet resolver refreshed! new actor is[23:87:2115] Leader for TabletID 72057594037927937 is [23:87:2115] sender: [23:141:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:53:2057] recipient: [24:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:53:2057] recipient: [24:49:2094] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:56:2057] recipient: [24:49:2094] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:73:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:83:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:85:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:87:2057] recipient: [24:86:2114] Leader for TabletID 72057594037927937 is [24:88:2115] sender: [24:89:2057] recipient: [24:86:2114] !Reboot 72057594037927937 (actor [24:55:2096]) rebooted! !Reboot 72057594037927937 (actor [24:55:2096]) tablet resolver refreshed! new actor is[24:88:2115] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:53:2057] recipient: [25:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:53:2057] recipient: [25:49:2094] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:56:2057] recipient: [25:49:2094] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:73:2057] recipient: [25:14:2061] |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestAutoConnect [GOOD] >> BootstrapperTest::DuplicateNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::DuplicateNodes [GOOD] Test command err: ... waiting for pipe to connect ... sleeping (original instance should be preserved) ... waiting for original instance to stop ... waiting for original instance to stop (done) ... waiting for pipe to connect 2025-04-04T16:52:16.816964Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-04T16:52:16.817357Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-04T16:52:16.831330Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-04-04T16:52:16.831652Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 12552810490399048506 2025-04-04T16:52:16.832065Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-04-04T16:52:16.832088Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 15249746964198841502 2025-04-04T16:52:16.844928Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-04-04T16:52:16.845018Z node 5 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2025-04-04T16:52:16.846346Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-04-04T16:52:16.846381Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.139961s 2025-04-04T16:52:17.462646Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-04T16:52:17.463365Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:209:2094] 2025-04-04T16:52:17.463800Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-04T16:52:17.463845Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] Test command err: 2025-04-04T16:51:28.318420Z :WriteSessionCloseWaitsForWrites INFO: Random seed for debugging is 1743785488318386 2025-04-04T16:51:28.746207Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501644264725788:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:28.746266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:28.866991Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489501643394808110:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:29.097942Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T16:51:29.118646Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:29.129867Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c54/r3tmp/tmpbZ0vaz/pdisk_1.dat 2025-04-04T16:51:29.416568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:29.416633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:29.417865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:29.417935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:29.421768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:29.430672Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:29.434560Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:51:29.437997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16687, node 1 2025-04-04T16:51:29.606288Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000c54/r3tmp/yandexLqMUiS.tmp 2025-04-04T16:51:29.606313Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000c54/r3tmp/yandexLqMUiS.tmp 2025-04-04T16:51:29.606439Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000c54/r3tmp/yandexLqMUiS.tmp 2025-04-04T16:51:29.606589Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:29.728116Z INFO: TTestServer started on Port 4540 GrpcPort 16687 TClient is connected to server localhost:4540 PQClient connected to localhost:16687 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:30.190205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-04T16:51:33.268124Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489501664869644729:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:33.268220Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489501664869644762:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:33.268285Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:33.277190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T16:51:33.370488Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489501664869644767:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T16:51:33.597266Z node 2 :TX_PROXY ERROR: Actor# [2:7489501664869644795:2130] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:51:33.633938Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489501665739563360:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:51:33.635839Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTlhNDE3OWYtNDM0MGQzYy02ZTI4Njc2Ni01YjU5Mjg2OQ==, ActorId: [1:7489501665739563322:2337], ActorState: ExecuteState, TraceId: 01jr0ve0tp38dtkthdqz1zx75m, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:51:33.636922Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489501664869644802:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:51:33.638158Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:51:33.638499Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTRlNDIzMDUtNGFlNTgwNjEtOTgxMGUyZDAtZmNkYzlmOWY=, ActorId: [2:7489501664869644726:2307], ActorState: ExecuteState, TraceId: 01jr0ve0qxad8850fbk9y8w6qb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:51:33.638870Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:51:33.653393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T16:51:33.746740Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501644264725788:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:33.746808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:33.830350Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489501643394808110:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:33.830389Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:33.880821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:51:34.043335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:16687", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-04T16:51:34.412627Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr0ve1p43gzqceyhpmpwvnrn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzA1MTgzY2EtZmVlNjUzOWEtZjNmMTJkYWItNWEyOWUyMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489501670034531084:2987] === CheckClustersList. Ok 2025-04-04T16:51:41.095539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:16687 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-04T16:51:41.445136Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:16687 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 8 ... onfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 YdbDatabasePath: "/Root" } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--test-topic" name rt3.dc1--test-topic version1 CallPersQueueGRPC request to localhost:19728 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-04T16:52:08.547158Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:19728 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-04T16:52:09.053656Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--test-topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2025-04-04T16:52:09.077250Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2025-04-04T16:52:09.102673Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-04-04T16:52:09.104754Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:19728 2025-04-04T16:52:09.274623Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-04T16:52:09.297104Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-04T16:52:09.297139Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-04-04T16:52:09.298137Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-04T16:52:09.298254Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:37228 2025-04-04T16:52:09.298268Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:37228 proto=v1 topic=test-topic durationSec=0 2025-04-04T16:52:09.298276Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-04T16:52:09.301286Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-04T16:52:09.301391Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-04T16:52:09.301401Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-04T16:52:09.301410Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-04T16:52:09.301426Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489501821767785129:2493] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-04T16:52:09.308975Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489501821767785129:2493] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-04T16:52:09.695903Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489501821767785129:2493] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-04T16:52:09.697051Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489501821767785187:2493] connected; active server actors: 1 2025-04-04T16:52:09.697529Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489501821767785129:2493] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-04T16:52:09.697562Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489501821767785129:2493] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-04T16:52:09.702784Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489501821767785187:2493] disconnected; active server actors: 1 2025-04-04T16:52:09.702822Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489501821767785187:2493] disconnected no session 2025-04-04T16:52:09.824115Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489501821767785129:2493] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-04T16:52:09.824140Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489501821767785129:2493] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-04T16:52:09.824150Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489501821767785129:2493] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-04T16:52:09.824171Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-04T16:52:09.825789Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 3, Generation: 1 2025-04-04T16:52:09.825846Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7489501821767785214:2493], now have 1 active actors on pipe 2025-04-04T16:52:09.825895Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T16:52:09.825918Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T16:52:09.826010Z node 3 :PERSQUEUE INFO: new Cookie src|f8d4b55a-54099e6f-9c5cf732-b44909_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-04T16:52:09.826110Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-04T16:52:09.826225Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T16:52:09.830673Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T16:52:09.830716Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T16:52:09.830853Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T16:52:09.831022Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|f8d4b55a-54099e6f-9c5cf732-b44909_0 2025-04-04T16:52:09.835575Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743785529835 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T16:52:09.835680Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|f8d4b55a-54099e6f-9c5cf732-b44909_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-04T16:52:09.838339Z :INFO: [] MessageGroupId [src] SessionId [src|f8d4b55a-54099e6f-9c5cf732-b44909_0] Write session: close. Timeout = 0 ms 2025-04-04T16:52:09.838420Z :INFO: [] MessageGroupId [src] SessionId [src|f8d4b55a-54099e6f-9c5cf732-b44909_0] Write session will now close 2025-04-04T16:52:09.838465Z :DEBUG: [] MessageGroupId [src] SessionId [src|f8d4b55a-54099e6f-9c5cf732-b44909_0] Write session: aborting 2025-04-04T16:52:09.838994Z :INFO: [] MessageGroupId [src] SessionId [src|f8d4b55a-54099e6f-9c5cf732-b44909_0] Write session: gracefully shut down, all writes complete 2025-04-04T16:52:09.839027Z :DEBUG: [] MessageGroupId [src] SessionId [src|f8d4b55a-54099e6f-9c5cf732-b44909_0] Write session: destroy 2025-04-04T16:52:09.839860Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|f8d4b55a-54099e6f-9c5cf732-b44909_0 grpc read done: success: 0 data: 2025-04-04T16:52:09.839887Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|f8d4b55a-54099e6f-9c5cf732-b44909_0 grpc read failed 2025-04-04T16:52:09.839924Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|f8d4b55a-54099e6f-9c5cf732-b44909_0 grpc closed 2025-04-04T16:52:09.839955Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|f8d4b55a-54099e6f-9c5cf732-b44909_0 is DEAD 2025-04-04T16:52:09.840489Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T16:52:09.842524Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7489501821767785214:2493] destroyed 2025-04-04T16:52:09.842591Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created >>> Ready to answer: ok 2025-04-04T16:52:09.911554Z :ERROR: [/Root] OnFederationDiscovery: Got error. Status: UNAVAILABLE. Description: |75.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs |75.3%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |75.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TraverseDatashard::TraverseTwoTables >> KqpScanArrowInChanels::JoinWithParams [GOOD] >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] Test command err: 2025-04-04T16:51:27.649671Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501639897309111:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:27.650007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001701/r3tmp/tmpTrRbCW/pdisk_1.dat 2025-04-04T16:51:28.100374Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:28.109595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:28.109684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:28.111526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2426, node 1 2025-04-04T16:51:28.202008Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:28.202048Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:28.202057Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:28.202168Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:28.338395Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:28.340049Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:28.340068Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:28.341491Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:19797, port: 19797 2025-04-04T16:51:28.341571Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:28.438897Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-04T16:51:28.485352Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****8teQ (345FEC3F) () has now valid token of ldapuser@ldap 2025-04-04T16:51:31.367101Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489501655115590489:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:31.367234Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001701/r3tmp/tmpm5uwKG/pdisk_1.dat 2025-04-04T16:51:31.567769Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27767, node 2 2025-04-04T16:51:31.690175Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:31.690196Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:31.690203Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:31.690236Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:31.690325Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:31.690331Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:31.691701Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:31.950504Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:31.954410Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:31.954439Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:31.955151Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:21965, port: 21965 2025-04-04T16:51:31.955218Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:32.090841Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:21965. Invalid credentials 2025-04-04T16:51:32.091356Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****8ypQ (B5D12260) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:21965. Invalid credentials)' 2025-04-04T16:51:36.269291Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489501677122680315:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:36.269577Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001701/r3tmp/tmpHohq61/pdisk_1.dat 2025-04-04T16:51:36.449441Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:36.479053Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:36.479135Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:36.480666Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22514, node 3 2025-04-04T16:51:36.650740Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:36.650759Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:36.650767Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:36.650874Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:36.742362Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:36.742640Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:36.742653Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:36.743253Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:8016, port: 8016 2025-04-04T16:51:36.743304Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:36.818843Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:8016. Invalid credentials 2025-04-04T16:51:36.819223Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****62vw (288E3D05) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:8016. Invalid credentials)' 2025-04-04T16:51:41.571298Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489501701218993223:2092];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001701/r3tmp/tmpMQiDBn/pdisk_1.dat 2025-04-04T16:51:41.632251Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:41.695386Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:41.727956Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:41.728038Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:41.732894Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11143, node 4 2025-04-04T16:51:41.896385Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:41.896408Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:41.896415Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:41.896537Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:42.282379Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:42.283565Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:42.283588Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:42.284258Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:16642, port: 16642 2025-04-04T16:51:42.284318Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:42.402663Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:42.403199Z node 4 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:16642 return no entries 2025-04-04T16:51:42.403752Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****TzaA (016633F5) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:16642 return no entries)' 2025-04-04T16:51:51.583265Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489501741910459081:2210];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001701/r3tmp/tmpK8u0LB/pdisk_1.dat 2025-04-04T16:51:51.746864Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:51.917117Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:51.967039Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:51.967274Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:51.987282Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28599, node 5 2025-04-04T16:51:52.407268Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:52.407288Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:52.407295Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:52.407406Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:52.620988Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:52.621299Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:52.621317Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:52.622025Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:7771, port: 7771 2025-04-04T16:51:52.622089Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:52.750781Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:52.798760Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:52.799382Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:51:52.799432Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:52.846594Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:52.890548Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:52.892230Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****PE0g (A280F39D) () has now valid token of ldapuser@ldap 2025-04-04T16:51:56.486370Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489501741910459081:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:56.486446Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:56.486514Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****PE0g (A280F39D) 2025-04-04T16:51:56.486581Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:7771, port: 7771 2025-04-04T16:51:56.486664Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:56.577579Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:56.630818Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:56.650469Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:51:56.650541Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:56.733812Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:56.803165Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:56.804076Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****PE0g (A280F39D) () has now valid token of ldapuser@ldap 2025-04-04T16:52:00.957152Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****PE0g (A280F39D) 2025-04-04T16:52:00.957217Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:7771, port: 7771 2025-04-04T16:52:00.957268Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:52:01.079003Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:52:01.122577Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:52:01.123061Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:52:01.123098Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:52:01.172839Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:52:01.214588Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:52:01.215971Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****PE0g (A280F39D) () has now valid token of ldapuser@ldap 2025-04-04T16:52:07.411806Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489501811179166577:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:07.897911Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001701/r3tmp/tmppnasRI/pdisk_1.dat 2025-04-04T16:52:08.194410Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:08.194563Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:08.194917Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:08.198486Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20094, node 6 2025-04-04T16:52:08.406827Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:08.406849Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:08.406858Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:08.406983Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:08.788114Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:52:08.795411Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:52:08.795447Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:52:08.796337Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:5245, port: 5245 2025-04-04T16:52:08.796411Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:52:08.903457Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:52:08.951685Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****Kg-Q (A5334CA8) () has now valid token of ldapuser@ldap 2025-04-04T16:52:12.334790Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489501811179166577:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:12.334860Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:52:13.379148Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****Kg-Q (A5334CA8) 2025-04-04T16:52:13.379738Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:5245, port: 5245 2025-04-04T16:52:13.380484Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:52:13.694645Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:52:13.751709Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****Kg-Q (A5334CA8) () has now valid token of ldapuser@ldap 2025-04-04T16:52:17.394372Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****Kg-Q (A5334CA8) 2025-04-04T16:52:17.394475Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:5245, port: 5245 2025-04-04T16:52:17.394577Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:52:17.530783Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:52:17.576521Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****Kg-Q (A5334CA8) () has now valid token of ldapuser@ldap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] Test command err: 2025-04-04T16:51:27.577788Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501641532107459:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:27.577930Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016d6/r3tmp/tmplzd8LL/pdisk_1.dat 2025-04-04T16:51:27.897429Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11699, node 1 2025-04-04T16:51:27.969865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:27.969970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:28.039507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:28.094820Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:28.094839Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:28.094845Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:28.094950Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:28.254377Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:28.256533Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:28.256569Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:28.257318Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:19140, port: 19140 2025-04-04T16:51:28.257984Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:28.276320Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-04T16:51:28.319337Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****kmsA (52EB34DE) () has now valid token of ldapuser@ldap 2025-04-04T16:51:31.191875Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489501657370592247:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016d6/r3tmp/tmpe40Rkz/pdisk_1.dat 2025-04-04T16:51:31.276065Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:31.376272Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:31.376321Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:31.378641Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:31.381527Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28878, node 2 2025-04-04T16:51:31.530800Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:31.530821Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:31.530829Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:31.530924Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:31.722159Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:31.725715Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:31.725745Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:31.726432Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:23459, port: 23459 2025-04-04T16:51:31.726530Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:31.787227Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:23459. Invalid credentials 2025-04-04T16:51:31.787514Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****bmjg (C92CEE5C) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:23459. Invalid credentials)' 2025-04-04T16:51:36.091737Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489501678265135209:2209];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016d6/r3tmp/tmpaGBFe9/pdisk_1.dat 2025-04-04T16:51:36.197861Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:36.297840Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:36.342806Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:36.342876Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:36.348481Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28352, node 3 2025-04-04T16:51:36.522835Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:36.522857Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:36.522865Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:36.522985Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:37.068910Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:37.073443Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:37.073469Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:37.077148Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:6101, port: 6101 2025-04-04T16:51:37.077226Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:37.125360Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:6101. Invalid credentials 2025-04-04T16:51:37.125590Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****1K6Q (4A3892F3) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:6101. Invalid credentials)' 2025-04-04T16:51:40.961601Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489501693843059367:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:40.961748Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016d6/r3tmp/tmplORSsT/pdisk_1.dat 2025-04-04T16:51:41.134654Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:41.153141Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:41.153224Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:41.155890Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23210, node 4 2025-04-04T16:51:41.268075Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:41.268096Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:41.268104Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:41.268213Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:41.342371Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:41.344337Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:41.344358Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:41.344986Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:5770, port: 5770 2025-04-04T16:51:41.345059Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:41.412313Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:41.413791Z node 4 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:5770 return no entries 2025-04-04T16:51:41.414013Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****KWDw (8AF33801) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:5770 return no entries)' 2025-04-04T16:51:51.887774Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489501743851552599:2117];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016d6/r3tmp/tmpbh5ObY/pdisk_1.dat 2025-04-04T16:51:52.065914Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:52.336589Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:52.357136Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:52.357246Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:52.368329Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22739, node 5 2025-04-04T16:51:52.494871Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:52.494898Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:52.494908Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:52.495041Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:52.670757Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:52.673574Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:52.673605Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:52.674280Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:3910, port: 3910 2025-04-04T16:51:52.674385Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:52.735174Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:52.779622Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:52.781544Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:51:52.781622Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:52.826652Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:52.870754Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:52.880333Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****BnWg (6682E9E8) () has now valid token of ldapuser@ldap 2025-04-04T16:51:56.900168Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489501743851552599:2117];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:57.227525Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****BnWg (6682E9E8) 2025-04-04T16:51:57.227606Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:57.227943Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:3910, port: 3910 2025-04-04T16:51:57.228007Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:57.323389Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:57.374592Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:57.379179Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:51:57.379217Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:57.431219Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:57.486082Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:57.515026Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****BnWg (6682E9E8) () has now valid token of ldapuser@ldap 2025-04-04T16:52:01.225798Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****BnWg (6682E9E8) 2025-04-04T16:52:01.225965Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:3910, port: 3910 2025-04-04T16:52:01.226038Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:52:01.299312Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:52:01.346509Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:52:01.346922Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:52:01.346984Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:52:01.394760Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:52:01.446454Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:52:01.447318Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****BnWg (6682E9E8) () has now valid token of ldapuser@ldap 2025-04-04T16:52:06.807401Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489501805832174014:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:07.559669Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016d6/r3tmp/tmp5EUnbo/pdisk_1.dat 2025-04-04T16:52:07.918609Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:07.994613Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:07.994703Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:08.001650Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18101, node 6 2025-04-04T16:52:08.184892Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:08.184914Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:08.184922Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:08.185036Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:08.654273Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:52:08.663071Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:52:08.663106Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:52:08.663834Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:14901, port: 14901 2025-04-04T16:52:08.663896Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:52:08.717262Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:52:08.759390Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****NJRA (A6D9B23A) () has now valid token of ldapuser@ldap 2025-04-04T16:52:11.750763Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489501805832174014:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:11.750837Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:52:13.811265Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****NJRA (A6D9B23A) 2025-04-04T16:52:13.811580Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:14901, port: 14901 2025-04-04T16:52:13.812229Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:52:13.914453Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:52:13.975534Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****NJRA (A6D9B23A) () has now valid token of ldapuser@ldap 2025-04-04T16:52:16.882771Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****NJRA (A6D9B23A) 2025-04-04T16:52:16.883646Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:14901, port: 14901 2025-04-04T16:52:16.883713Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:52:17.086496Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:52:17.130910Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****NJRA (A6D9B23A) () has now valid token of ldapuser@ldap >> AnalyzeColumnshard::AnalyzeTwoColumnTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:75:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:77:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:79:2057] recipient: [2:78:2109] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:81:2057] recipient: [2:78:2109] !Reboot 72057594037927937 (actor [2:55:2096]) rebooted! !Reboot 72057594037927937 (actor [2:55:2096]) tablet resolver refreshed! new actor is[2:80:2110] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:134:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:78:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:80:2057] recipient: [4:79:2109] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:82:2057] recipient: [4:79:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:81:2110] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:83:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:85:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:84:2113] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:138:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:84:2057] recipient: [7:82:2112] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:86:2057] recipient: [7:82:2112] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:85:2113] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:85:2057] recipient: [8:84:2114] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:88:2057] recipient: [8:84:2114] !Reboot 72057594037927937 (actor [8:55:2096]) rebooted! !Reboot 72057594037927937 (actor [8:55:2096]) tablet resolver refreshed! new actor is[8:87:2115] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:141:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:82:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:85:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:86:2057] recipient: [9:84:2114] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:88:2057] recipient: [9:84:2114] !Reboot 72057594037927937 (actor [9:55:2096]) rebooted! !Reboot 72057594037927937 (actor [9:55:2096]) tablet resolver refreshed! new actor is[9:87:2115] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:141:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:83:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:87:2057] recipient: [10:85:2114] Leader for TabletID 72057594037927937 is [10:88:2115] sender: [10:89:2057] recipient: [10:85:2114] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:88:2115] Leader for TabletID 72057594037927937 is [10:88:2115] sender: [10:142:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:55:2096]) rebooted! !Reboot 72057594037927937 (actor [11:55:2096]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 7927937 (actor [16:55:2096]) tablet resolver refreshed! new actor is[16:80:2110] Leader for TabletID 72057594037927937 is [16:80:2110] sender: [16:134:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:53:2057] recipient: [17:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:53:2057] recipient: [17:50:2094] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:56:2057] recipient: [17:50:2094] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:73:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:75:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:78:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:79:2057] recipient: [17:77:2109] Leader for TabletID 72057594037927937 is [17:80:2110] sender: [17:81:2057] recipient: [17:77:2109] !Reboot 72057594037927937 (actor [17:55:2096]) rebooted! !Reboot 72057594037927937 (actor [17:55:2096]) tablet resolver refreshed! new actor is[17:80:2110] Leader for TabletID 72057594037927937 is [17:80:2110] sender: [17:134:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:53:2057] recipient: [18:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:53:2057] recipient: [18:50:2094] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:56:2057] recipient: [18:50:2094] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:73:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:76:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:79:2057] recipient: [18:78:2109] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:80:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:81:2110] sender: [18:82:2057] recipient: [18:78:2109] !Reboot 72057594037927937 (actor [18:55:2096]) rebooted! !Reboot 72057594037927937 (actor [18:55:2096]) tablet resolver refreshed! new actor is[18:81:2110] Leader for TabletID 72057594037927937 is [18:81:2110] sender: [18:135:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:53:2057] recipient: [19:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:53:2057] recipient: [19:50:2094] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:56:2057] recipient: [19:50:2094] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:73:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:79:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:82:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:83:2057] recipient: [19:81:2112] Leader for TabletID 72057594037927937 is [19:84:2113] sender: [19:85:2057] recipient: [19:81:2112] !Reboot 72057594037927937 (actor [19:55:2096]) rebooted! !Reboot 72057594037927937 (actor [19:55:2096]) tablet resolver refreshed! new actor is[19:84:2113] Leader for TabletID 72057594037927937 is [19:84:2113] sender: [19:138:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:53:2057] recipient: [20:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:53:2057] recipient: [20:50:2094] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:56:2057] recipient: [20:50:2094] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:73:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:55:2096]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:79:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:81:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:83:2057] recipient: [20:82:2112] Leader for TabletID 72057594037927937 is [20:84:2113] sender: [20:85:2057] recipient: [20:82:2112] !Reboot 72057594037927937 (actor [20:55:2096]) rebooted! !Reboot 72057594037927937 (actor [20:55:2096]) tablet resolver refreshed! new actor is[20:84:2113] Leader for TabletID 72057594037927937 is [20:84:2113] sender: [20:138:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:53:2057] recipient: [21:51:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:53:2057] recipient: [21:51:2094] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:56:2057] recipient: [21:51:2094] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:73:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:80:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:82:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:84:2057] recipient: [21:83:2112] Leader for TabletID 72057594037927937 is [21:85:2113] sender: [21:86:2057] recipient: [21:83:2112] !Reboot 72057594037927937 (actor [21:55:2096]) rebooted! !Reboot 72057594037927937 (actor [21:55:2096]) tablet resolver refreshed! new actor is[21:85:2113] Leader for TabletID 72057594037927937 is [21:85:2113] sender: [21:103:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:53:2057] recipient: [22:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:53:2057] recipient: [22:50:2094] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:56:2057] recipient: [22:50:2094] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:73:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:82:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:85:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:86:2057] recipient: [22:84:2114] Leader for TabletID 72057594037927937 is [22:87:2115] sender: [22:88:2057] recipient: [22:84:2114] !Reboot 72057594037927937 (actor [22:55:2096]) rebooted! !Reboot 72057594037927937 (actor [22:55:2096]) tablet resolver refreshed! new actor is[22:87:2115] Leader for TabletID 72057594037927937 is [22:87:2115] sender: [22:141:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:53:2057] recipient: [23:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:53:2057] recipient: [23:50:2094] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:56:2057] recipient: [23:50:2094] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:73:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:82:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:85:2057] recipient: [23:84:2114] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:86:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:87:2115] sender: [23:88:2057] recipient: [23:84:2114] !Reboot 72057594037927937 (actor [23:55:2096]) rebooted! !Reboot 72057594037927937 (actor [23:55:2096]) tablet resolver refreshed! new actor is[23:87:2115] Leader for TabletID 72057594037927937 is [23:87:2115] sender: [23:141:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:53:2057] recipient: [24:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:53:2057] recipient: [24:49:2094] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:56:2057] recipient: [24:49:2094] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:73:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:83:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:86:2057] recipient: [24:85:2114] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:87:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:88:2115] sender: [24:89:2057] recipient: [24:85:2114] !Reboot 72057594037927937 (actor [24:55:2096]) rebooted! !Reboot 72057594037927937 (actor [24:55:2096]) tablet resolver refreshed! new actor is[24:88:2115] Leader for TabletID 72057594037927937 is [24:88:2115] sender: [24:142:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:53:2057] recipient: [25:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:53:2057] recipient: [25:49:2094] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:56:2057] recipient: [25:49:2094] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:73:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:86:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:89:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:90:2057] recipient: [25:88:2117] Leader for TabletID 72057594037927937 is [25:91:2118] sender: [25:92:2057] recipient: [25:88:2117] !Reboot 72057594037927937 (actor [25:55:2096]) rebooted! !Reboot 72057594037927937 (actor [25:55:2096]) tablet resolver refreshed! new actor is[25:91:2118] Leader for TabletID 72057594037927937 is [25:91:2118] sender: [25:145:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:53:2057] recipient: [26:51:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:53:2057] recipient: [26:51:2094] Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:56:2057] recipient: [26:51:2094] Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:73:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:55:2096]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:86:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:89:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:90:2057] recipient: [26:88:2117] Leader for TabletID 72057594037927937 is [26:91:2118] sender: [26:92:2057] recipient: [26:88:2117] !Reboot 72057594037927937 (actor [26:55:2096]) rebooted! !Reboot 72057594037927937 (actor [26:55:2096]) tablet resolver refreshed! new actor is[26:91:2118] Leader for TabletID 72057594037927937 is [26:91:2118] sender: [26:145:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:53:2057] recipient: [27:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:53:2057] recipient: [27:50:2094] Leader for TabletID 72057594037927937 is [27:55:2096] sender: [27:56:2057] recipient: [27:50:2094] Leader for TabletID 72057594037927937 is [27:55:2096] sender: [27:73:2057] recipient: [27:14:2061] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::JoinWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 6361, MsgBus: 17391 2025-04-04T16:51:40.059295Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501695425168271:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:40.059328Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e9a/r3tmp/tmp1Bjhq7/pdisk_1.dat 2025-04-04T16:51:40.779675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:40.779780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:40.782570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:40.815977Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6361, node 1 2025-04-04T16:51:40.902896Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:40.902921Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:40.902927Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:40.903037Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17391 TClient is connected to server localhost:17391 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:41.849086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:41.878314Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:51:41.884532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:42.138725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:42.319461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:42.418903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:45.062259Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501695425168271:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:45.156598Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:45.604040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501716900006521:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:45.604157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:46.432203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:51:46.610876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:51:46.906874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:51:47.088659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:51:47.347362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:51:48.022850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:51:48.734866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501729784908995:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:48.750050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501729784909001:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:48.750169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:48.838718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:51:49.061958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489501729784909004:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:51:49.182092Z node 1 :TX_PROXY ERROR: Actor# [1:7489501734079876360:3492] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:51:51.781575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:51:52.548238Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785512567, txId: 281474976710675] shutting down 864000000000 Trying to start YDB, gRPC: 19445, MsgBus: 11608 2025-04-04T16:51:53.409439Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489501753313461020:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:53.409555Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e9a/r3tmp/tmp90yIbh/pdisk_1.dat 2025-04-04T16:51:53.615568Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:53.627905Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:53.628023Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:53.629662Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19445, node 2 2025-04-04T16:51:53.714798Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:53.714824Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:53.714832Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:53.714964Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11608 TClient is connected to server localhost:11608 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:54.234410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:54.243140Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T16:51:54.261381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:54.351341Z node 2 :FLAT_TX_SCHEMESHARD ... 1474976715662:0, at schemeshard: 72057594046644480 2025-04-04T16:52:00.304859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T16:52:00.371268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T16:52:00.435577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T16:52:00.513742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T16:52:00.581657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T16:52:00.772044Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489501783378234417:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:00.772127Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:00.772480Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489501783378234423:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:00.776569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T16:52:00.787251Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-04T16:52:00.787455Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489501783378234425:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T16:52:00.865315Z node 2 :TX_PROXY ERROR: Actor# [2:7489501783378234480:3462] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:52:02.207491Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785522234, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 18130, MsgBus: 18223 2025-04-04T16:52:06.864030Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489501806722835250:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:06.864099Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e9a/r3tmp/tmpyX5LX8/pdisk_1.dat 2025-04-04T16:52:07.899702Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:07.904843Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:07.967054Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:07.967139Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:07.968452Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18130, node 3 2025-04-04T16:52:08.198845Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:08.198871Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:08.198880Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:08.199034Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18223 TClient is connected to server localhost:18223 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T16:52:09.057998Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T16:52:09.066620Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:52:09.076550Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:09.154310Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:09.338033Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:09.446212Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:11.850496Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489501806722835250:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:12.130590Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:52:14.108346Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489501841082575393:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:14.108445Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:14.156549Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:52:14.379425Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:52:14.499726Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:52:14.726056Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:52:14.864830Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:52:15.164320Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:52:15.797620Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489501845377543262:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:15.797705Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:15.806600Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489501845377543267:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:15.812028Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:52:15.912153Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489501845377543269:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:52:15.999155Z node 3 :TX_PROXY ERROR: Actor# [3:7489501845377543327:3490] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:52:19.433561Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785539405, txId: 281474976710671] shutting down 2025-04-04T16:52:19.705028Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785539734, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] Test command err: 2025-04-04T16:51:28.194215Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501644818103712:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:28.194487Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c17/r3tmp/tmpEK8gMC/pdisk_1.dat 2025-04-04T16:51:28.702271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:28.702364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:28.715031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:28.747523Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11906, node 1 2025-04-04T16:51:28.856279Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:28.856308Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:28.856321Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:28.856436Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15484 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:29.251372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:31.671478Z node 1 :TX_PROXY ERROR: Actor# [1:7489501657703006289:2524] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.sys\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1])" issue_code: 200200 severity: 1 }
: Error: Check failed: path: '/Root/.sys', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 2025-04-04T16:51:31.708271Z node 1 :TX_PROXY ERROR: Actor# [1:7489501657703006298:2531] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.sys/partition_stats\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1])" issue_code: 200200 severity: 1 }
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 2025-04-04T16:51:31.723591Z node 1 :TX_PROXY ERROR: Actor# [1:7489501657703006307:2538] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.sys/partition_stats\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1])" issue_code: 200200 severity: 1 }
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 2025-04-04T16:51:31.735290Z node 1 :TX_PROXY ERROR: Actor# [1:7489501657703006315:2544] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.sys/partition_stats\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1])" issue_code: 200200 severity: 1 }
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 2025-04-04T16:51:31.755542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501657703006324:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:31.755618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:32.105283Z node 1 :TX_PROXY ERROR: Actor# [1:7489501661997973639:2557] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.sys/partition_stats\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1])" issue_code: 200200 severity: 1 }
: Error: Execution, code: 1060
:2:28: Error: Executing DROP TABLE
: Error: Incorrect scheme found while performing Kikimr operation., code: 2003
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 2025-04-04T16:51:32.128776Z node 1 :TX_PROXY ERROR: [ReadTable [1:7489501661997973650:2340] TxId# 281474976710663] Cannot read system table '/Root/.sys/partition_stats', tableId# [72057594046644480:1:0:partition_stats] 2025-04-04T16:51:32.128925Z node 1 :TX_PROXY ERROR: [ReadTable [1:7489501661997973650:2340] TxId# 281474976710663] RESPONSE Status# ResolveError shard: 0 table: /Root/.sys/partition_stats
: Error: Cannot read system table '/Root/.sys/partition_stats', tableId# [72057594046644480:1:0:partition_stats], code: 200400
: Error: Got ResolveError response from TxProxy
: Error: Cannot read system table '/Root/.sys/partition_stats', tableId# [72057594046644480:1:0:partition_stats]
: Error: Bulk upsert to table '/Root/.sys/partition_stats'is not supported. Table is a system view 2025-04-04T16:51:32.175479Z node 1 :TX_PROXY ERROR: Actor# [1:7489501661997973656:2566] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.sys\', error: path part \'.sys\' is reserved by the system" severity: 1 }
: Error: Check failed: path: '/Root/.sys', error: path part '.sys' is reserved by the system 2025-04-04T16:51:32.190188Z node 1 :TX_PROXY ERROR: Actor# [1:7489501661997973665:2573] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.sys\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1])" issue_code: 200200 severity: 1 }
: Error: Check failed: path: '/Root/.sys', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 2025-04-04T16:51:32.201298Z node 1 :TX_PROXY ERROR: Actor# [1:7489501661997973674:2580] txid# 281474976710666, issues: { message: "Check failed: path: \'/Root/.sys\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1])" issue_code: 200200 severity: 1 }
: Error: Check failed: path: '/Root/.sys', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 2025-04-04T16:51:32.225166Z node 1 :TX_PROXY ERROR: Actor# [1:7489501661997973684:2587] txid# 281474976710667, issues: { message: "Check failed: path: \'/Root/.sys/partition_stats\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1])" issue_code: 200200 severity: 1 }
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 2025-04-04T16:51:32.243976Z node 1 :TX_PROXY ERROR: Actor# [1:7489501661997973692:2593] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.sys/partition_stats\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1])" issue_code: 200200 severity: 1 }
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 2025-04-04T16:51:34.074552Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489501668852217115:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:34.074597Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c17/r3tmp/tmpjFq2V9/pdisk_1.dat 2025-04-04T16:51:34.360067Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:34.376110Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:34.376197Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:34.377864Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15551, node 6 2025-04-04T16:51:34.442820Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:34.442841Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:34.442848Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:34.442986Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 ... [9:7489501814513465372:2433], row count: 3, finished: 1 2025-04-04T16:52:08.743541Z node 9 :SYSTEM_VIEWS INFO: Scan finished, actor: [9:7489501814513465372:2433], owner: [9:7489501814513465369:2431], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-04T16:52:08.746635Z node 9 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785528734, txId: 281474976715673] shutting down 2025-04-04T16:52:08.887802Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jr0vf3dg1y1dds9bp0ef7r69, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=ODU1ZTI3ZjYtZjFlNmFkZjEtZWU3ZjQ5NzctZGNmMzQ4ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:52:08.893428Z node 9 :SYSTEM_VIEWS INFO: Scan started, actor: [9:7489501814513465406:2443], owner: [9:7489501814513465403:2441], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-04T16:52:08.913223Z node 9 :SYSTEM_VIEWS INFO: Scan prepared, actor: [9:7489501814513465406:2443], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T16:52:08.914876Z node 9 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [9:7489501814513465406:2443], row count: 3, finished: 1 2025-04-04T16:52:08.914911Z node 9 :SYSTEM_VIEWS INFO: Scan finished, actor: [9:7489501814513465406:2443], owner: [9:7489501814513465403:2441], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-04T16:52:08.918134Z node 9 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785528877, txId: 281474976715675] shutting down 2025-04-04T16:52:09.051491Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jr0vf3jzcja683k650r95xf1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=MTQ3N2NjZDAtYzU1MWM4ZDYtNGM0MjhlYy00YTM0NWIyNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:52:09.054585Z node 9 :SYSTEM_VIEWS INFO: Scan started, actor: [9:7489501818808432735:2452], owner: [9:7489501818808432732:2450], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-04T16:52:09.059165Z node 9 :SYSTEM_VIEWS INFO: Scan prepared, actor: [9:7489501818808432735:2452], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T16:52:09.060313Z node 9 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [9:7489501818808432735:2452], row count: 4, finished: 1 2025-04-04T16:52:09.060347Z node 9 :SYSTEM_VIEWS INFO: Scan finished, actor: [9:7489501818808432735:2452], owner: [9:7489501818808432732:2450], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-04T16:52:09.064053Z node 9 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785529050, txId: 281474976715677] shutting down 2025-04-04T16:52:09.243933Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jr0vf3qk9dng25qvg8x4vq5v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=NmNmNWFmNDQtZGJmMGI1ZTYtYTBiMmJiM2EtNzJhMTM0NjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:52:09.246360Z node 9 :SYSTEM_VIEWS INFO: Scan started, actor: [9:7489501818808432769:2461], owner: [9:7489501818808432765:2459], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-04T16:52:09.247070Z node 9 :SYSTEM_VIEWS INFO: Scan prepared, actor: [9:7489501818808432769:2461], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T16:52:09.247525Z node 9 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [9:7489501818808432769:2461], row count: 4, finished: 1 2025-04-04T16:52:09.247549Z node 9 :SYSTEM_VIEWS INFO: Scan finished, actor: [9:7489501818808432769:2461], owner: [9:7489501818808432765:2459], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-04T16:52:09.251630Z node 9 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785529242, txId: 281474976715679] shutting down 2025-04-04T16:52:10.276679Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489501824854151229:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:10.276755Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c17/r3tmp/tmpA8Vdxk/pdisk_1.dat 2025-04-04T16:52:10.807373Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:10.895582Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:10.895678Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:10.904769Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63789, node 10 2025-04-04T16:52:11.281812Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:11.281833Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:11.281844Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:11.282152Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15635 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:52:13.861386Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:15.275046Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489501824854151229:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:15.275412Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:52:18.821157Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T16:52:19.107657Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489501863508857832:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:19.107777Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:19.108051Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489501863508857844:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:19.112659Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T16:52:19.131701Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489501863508857846:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T16:52:19.185976Z node 10 :TX_PROXY ERROR: Actor# [10:7489501863508857897:2510] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:52:19.588897Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0vfdgvdzz6d7cmznp3g9xp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OWNkZDM5ODYtNjEwNDZjNDUtNjY3MzNmNWQtYzY3YTY4OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:52:19.594720Z node 10 :SYSTEM_VIEWS INFO: Scan started, actor: [10:7489501863508857947:2375], owner: [10:7489501863508857945:2374], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-04T16:52:19.595450Z node 10 :SYSTEM_VIEWS INFO: Scan prepared, actor: [10:7489501863508857947:2375], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T16:52:19.596004Z node 10 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [10:7489501863508857947:2375], row count: 4, finished: 1 2025-04-04T16:52:19.596026Z node 10 :SYSTEM_VIEWS INFO: Scan finished, actor: [10:7489501863508857947:2375], owner: [10:7489501863508857945:2374], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-04T16:52:19.599823Z node 10 :SYSTEM_VIEWS INFO: Scan started, actor: [10:7489501863508857953:2378], owner: [10:7489501863508857945:2374], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-04T16:52:19.604175Z node 10 :SYSTEM_VIEWS INFO: Scan prepared, actor: [10:7489501863508857953:2378], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T16:52:19.604807Z node 10 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [10:7489501863508857953:2378], row count: 4, finished: 1 2025-04-04T16:52:19.604852Z node 10 :SYSTEM_VIEWS INFO: Scan finished, actor: [10:7489501863508857953:2378], owner: [10:7489501863508857945:2374], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-04-04T16:52:19.608555Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785539586, txId: 281474976715661] shutting down |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |75.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer >> KqpCost::PointLookup |75.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |75.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> TKeyValueTest::TestCopyRangeWorksNewApi [GOOD] >> TKeyValueTest::TestCopyRangeToLongKey >> SystemView::AuthUsers_Access [GOOD] >> SystemView::AuthUsers_ResultOrder >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> TargetDiscoverer::InvalidCredentials [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:75:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:77:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:79:2057] recipient: [2:78:2109] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:81:2057] recipient: [2:78:2109] !Reboot 72057594037927937 (actor [2:55:2096]) rebooted! !Reboot 72057594037927937 (actor [2:55:2096]) tablet resolver refreshed! new actor is[2:80:2110] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:134:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:80:2057] recipient: [4:78:2109] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:82:2057] recipient: [4:78:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:81:2110] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:83:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:85:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:84:2113] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:138:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:82:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:84:2057] recipient: [7:83:2112] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:86:2057] recipient: [7:83:2112] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:85:2113] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:85:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:86:2057] recipient: [8:84:2114] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:88:2057] recipient: [8:84:2114] !Reboot 72057594037927937 (actor [8:55:2096]) rebooted! !Reboot 72057594037927937 (actor [8:55:2096]) tablet resolver refreshed! new actor is[8:87:2115] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:141:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:82:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:84:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:86:2057] recipient: [9:85:2114] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:88:2057] recipient: [9:85:2114] !Reboot 72057594037927937 (actor [9:55:2096]) rebooted! !Reboot 72057594037927937 (actor [9:55:2096]) tablet resolver refreshed! new actor is[9:87:2115] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:141:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:83:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:85:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:87:2057] recipient: [10:86:2114] Leader for TabletID 72057594037927937 is [10:88:2115] sender: [10:89:2057] recipient: [10:86:2114] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:88:2115] Leader for TabletID 72057594037927937 is [10:88:2115] sender: [10:142:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:53:2057] recipient: [13:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:53:2057] recipient: [13:50:2094] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:56:2057] recipient: [13:50:2094] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:73:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:75:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:78:2057] recipient: [13:77:2109] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:79:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:80:2110] sender: [13:81:2057] recipient: [13:77:2109] !Reboot 72057594037927937 (actor [13:55:2096]) rebooted! !Reboot 72057594037927937 (actor [13:55:2096]) tablet resolver refreshed! new actor is[13:80:2110] Leader for TabletID 72057594037927937 is [13:80:2110] sender: [13:134:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:53:2057] recipient: [14:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:53:2057] recipient: [14:50:2094] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:56:2057] recipient: [14:50:2094] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:73:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:75:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:77:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:79:2057] recipient: [14:78:2109] Leader for TabletID 72057594037927937 is [14:80:2110] sender: [14:81:2057] recipient: [14:78:2109] !Reboot 72057594037927937 (actor [14:55:2096]) rebooted! !Reboot 72057594037927937 (actor [14:55:2096]) tablet resolver refreshed! new actor is[14:80:2110] Leader for TabletID 72057594037927937 is [14:80:2110] sender: [14:134:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:53:2057] recipient: [15:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:53:2057] recipient: [15:50:2094] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:56:2057] recipient: [15:50:2094] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:73:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:76:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:79:2057] recipient: [15:78:2109] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:80:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:81:2110] sender: [15:82:2057] recipient: [15:78:2109] !Reboot 72057594037927937 (actor [15:55:2096]) rebooted! !Reboot 72057594037927937 (actor [15:55:2096]) tablet resolver refreshed! new actor is[15:81:2110] Leader for TabletID 72057594037927937 is [15:81:2110] sender: [15:135:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:53:2057] recipient: [16:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:53:2057] recipient: [16:50:2094] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:56:2057] recipient: [16:50:2094] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:73:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:79:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:82:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:83:2057] recipient: [16:81:2112] Leader for TabletID 72057594037927937 is [16:84:2113] sender: [16:85:2057] recipient: [16:81:2112] !Reboot 72057594037927937 (actor [16:55:2096]) rebooted! !Reboot 72057594037927937 (actor [16:55:2096]) tablet resolver refreshed! new actor is[16:84:2113] Leader for TabletID 72057594037927937 is [16:84:2113] sender: [16:138:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:53:2057] recipient: [17:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:53:2057] recipient: [17:50:2094] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:56:2057] recipient: [17:50:2094] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:73:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:55:2096]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:79:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:82:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:83:2057] recipient: [17:81:2112] Leader for TabletID 72057594037927937 is [17:84:2113] sender: [17:85:2057] recipient: [17:81:2112] !Reboot 72057594037927937 (actor [17:55:2096]) rebooted! !Reboot 72057594037927937 (actor [17:55:2096]) tablet resolver refreshed! new actor is[17:84:2113] Leader for TabletID 72057594037927937 is [17:84:2113] sender: [17:138:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:53:2057] recipient: [18:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:53:2057] recipient: [18:50:2094] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:56:2057] recipient: [18:50:2094] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:73:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:80:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:83:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:84:2057] recipient: [18:82:2112] Leader for TabletID 72057594037927937 is [18:85:2113] sender: [18:86:2057] recipient: [18:82:2112] !Reboot 72057594037927937 (actor [18:55:2096]) rebooted! !Reboot 72057594037927937 (actor [18:55:2096]) tablet resolver refreshed! new actor is[18:85:2113] Leader for TabletID 72057594037927937 is [18:85:2113] sender: [18:103:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:53:2057] recipient: [19:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:53:2057] recipient: [19:50:2094] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:56:2057] recipient: [19:50:2094] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:73:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:82:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:85:2057] recipient: [19:84:2114] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:86:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:87:2115] sender: [19:88:2057] recipient: [19:84:2114] !Reboot 72057594037927937 (actor [19:55:2096]) rebooted! !Reboot 72057594037927937 (actor [19:55:2096]) tablet resolver refreshed! new actor is[19:87:2115] Leader for TabletID 72057594037927937 is [19:87:2115] sender: [19:141:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:53:2057] recipient: [20:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:53:2057] recipient: [20:50:2094] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:56:2057] recipient: [20:50:2094] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:73:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:55:2096]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:82:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:85:2057] recipient: [20:84:2114] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:86:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:87:2115] sender: [20:88:2057] recipient: [20:84:2114] !Reboot 72057594037927937 (actor [20:55:2096]) rebooted! !Reboot 72057594037927937 (actor [20:55:2096]) tablet resolver refreshed! new actor is[20:87:2115] Leader for TabletID 72057594037927937 is [20:87:2115] sender: [20:141:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:53:2057] recipient: [21:51:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:53:2057] recipient: [21:51:2094] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:56:2057] recipient: [21:51:2094] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:73:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:83:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:85:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:87:2057] recipient: [21:86:2114] Leader for TabletID 72057594037927937 is [21:88:2115] sender: [21:89:2057] recipient: [21:86:2114] !Reboot 72057594037927937 (actor [21:55:2096]) rebooted! !Reboot 72057594037927937 (actor [21:55:2096]) tablet resolver refreshed! new actor is[21:88:2115] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:53:2057] recipient: [22:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:53:2057] recipient: [22:50:2094] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:56:2057] recipient: [22:50:2094] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:73:2057] recipient: [22:14:2061] |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> SystemView::PartitionStatsOneSchemeShardDataQuery [GOOD] >> SystemView::PgTablesOneSchemeShardDataQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] Test command err: 2025-04-04T16:52:00.882989Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:52:00.883166Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T16:52:00.925916Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:178:2193] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T16:52:00.927927Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T16:52:00.000000Z 2025-04-04T16:52:00.927975Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:178:2193] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-1" IncludeFrom: true To: "m0000000003cclient-1" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-1" IncludeFrom: true To: "m0000000003uclient-1" IncludeTo: true } } CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\200\327\335\215\3402" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\000\030\000\"\000(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\016\n\010client-2@\000H\000" StorageChannel: INLINE } 2025-04-04T16:52:01.758929Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:52:01.758998Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T16:52:01.771273Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [2:178:2193] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T16:52:01.774166Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T16:52:01.775057Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [2:178:2193] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T16:52:02.106925Z node 2 :PERSQUEUE INFO: new Cookie owner1|e05e40a2-233ddeb1-9693a357-a992361b_0 generated for partition {0, {0, 1111}, 123} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Send write: 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 1 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 2 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 3 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 4 Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 5 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 6 Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 7 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 8 Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captu ... ookie: 0 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Got batch complete: 1 Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Wait batch completion Send disk status response with cookie: 0 Wait immediate tx complete 3 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 3 2025-04-04T16:52:17.530528Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:52:17.530617Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T16:52:17.548313Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T16:52:17.551104Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T16:52:17.000000Z 2025-04-04T16:52:17.551182Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Wait batch completion Got batch complete: 2 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send disk status response with cookie: 0 Wait immediate tx complete 2 Got batch complete: 1 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 2 Wait batch completion Send disk status response with cookie: 0 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Got batch complete: 1 Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Wait batch completion Send disk status response with cookie: 0 Wait immediate tx complete 4 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 4 |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::InvalidCredentials [GOOD] Test command err: 2025-04-04T16:52:14.061417Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501837108879459:2259];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:14.062012Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001768/r3tmp/tmpIF6Huw/pdisk_1.dat 2025-04-04T16:52:17.352980Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:17.777242Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:17.781101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:17.781197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:17.788418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:19.036792Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501837108879459:2259];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:19.036845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:6145 TServer::EnableGrpc on GrpcPort 12411, node 1 2025-04-04T16:52:19.567048Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:19.567079Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:19.567090Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:19.567250Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6145 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:52:20.532430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:20.546536Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:52:20.550580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:20.925261Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: CLIENT_UNAUTHENTICATED, issues: {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:217: Cannot find user: user } } } 2025-04-04T16:52:20.926370Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# CLIENT_UNAUTHENTICATED, issues# {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:217: Cannot find user: user } >> TKeyValueTest::TestInlineCopyRangeWorks [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorksNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2025-04-04T16:51:27.568428Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501639115192430:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:27.568495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001714/r3tmp/tmp3nxq5w/pdisk_1.dat 2025-04-04T16:51:27.838939Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6988, node 1 2025-04-04T16:51:27.932030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:27.932102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:27.984959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:28.017059Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:28.017079Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:28.017095Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:28.017175Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:28.126653Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:28.130450Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:28.130488Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:28.131772Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:9843, port: 9843 2025-04-04T16:51:28.131824Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:28.214748Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:28.266210Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:28.266971Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:51:28.267024Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:28.314599Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:28.358595Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:28.361464Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****NOqQ (C0C57EC9) () has now valid token of ldapuser@ldap 2025-04-04T16:51:32.574012Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501639115192430:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:32.574095Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:32.590533Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****NOqQ (C0C57EC9) 2025-04-04T16:51:32.590846Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:9843, port: 9843 2025-04-04T16:51:32.590935Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:32.702450Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:32.710462Z node 1 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:9843 return no entries 2025-04-04T16:51:32.711018Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****NOqQ (C0C57EC9) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:9843 return no entries)' 2025-04-04T16:51:36.602392Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****NOqQ (C0C57EC9) 2025-04-04T16:51:39.404437Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489501691114570244:2214];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:39.404878Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001714/r3tmp/tmphcyrGr/pdisk_1.dat 2025-04-04T16:51:39.655132Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:39.719371Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:39.719453Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:39.720515Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12660, node 2 2025-04-04T16:51:39.910858Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:39.910880Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:39.910891Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:39.911015Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:40.094207Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:40.108919Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:40.108951Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:40.109859Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:4024, port: 4024 2025-04-04T16:51:40.109928Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:40.222878Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:40.230312Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldaps://localhost:4024. Server is busy 2025-04-04T16:51:40.230909Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****Mbeg (4168C3C5) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:4024. Server is busy)' 2025-04-04T16:51:40.231216Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:40.231232Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:40.232222Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:4024, port: 4024 2025-04-04T16:51:40.232291Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:40.346800Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:40.358613Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldaps://localhost:4024. Server is busy 2025-04-04T16:51:40.359049Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****Mbeg (4168C3C5) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:4024. Server is busy)' 2025-04-04T16:51:42.365015Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****Mbeg (4168C3C5) 2025-04-04T16:51:42.366705Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:42.366730Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:42.367721Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:4024, port: 4024 2025-04-04T16:51:42.367789Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:42.471087Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:42.471569Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldaps://localhost:4024. Server is busy 2025-04-04T16:51:42.472016Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****Mbeg (4168C3C5) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:4024. Server is busy)' 2025-04-04T16:51:44.438346Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489501691114570244:2214];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:44.438420Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:45.370706Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****Mbeg (4168C3C5) 2025-04-04T16:51:45.370976Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:45.370991Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:45.371681Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:4024, port: 4024 2025-04-04T16:51:45.371739Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:45.563851Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:45.618775Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:45.621887Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:51:45.621946Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:45.670587Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:45.715257Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ... VIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:4024. Can't contact LDAP server 2025-04-04T16:51:50.471087Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****Mbeg (4168C3C5) () has now retryable error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:4024. Can't contact LDAP server)' 2025-04-04T16:51:52.052159Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489501748569734473:2221];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001714/r3tmp/tmpQXpziT/pdisk_1.dat 2025-04-04T16:51:52.301255Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:52.350431Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:52.350509Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:52.370400Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19537, node 3 2025-04-04T16:51:52.397869Z node 3 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:51:52.397912Z node 3 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:51:52.434551Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:52.498634Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:52.498660Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:52.498670Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:52.498812Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:52.650402Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:52.654648Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:52.654684Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:52.655425Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:29102, port: 29102 2025-04-04T16:51:52.655502Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:51:52.743887Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:52.790752Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:52.838485Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:52.888015Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****ITOw (B8DAB9F6) () has now valid token of ldapuser@ldap 2025-04-04T16:52:00.242816Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489501782794671888:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:00.443797Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001714/r3tmp/tmp5Yhpwb/pdisk_1.dat 2025-04-04T16:52:00.673490Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:00.704033Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:00.704114Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:00.747185Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6519, node 4 2025-04-04T16:52:00.990809Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:00.990834Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:00.990841Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:00.990964Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:01.286380Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:52:01.292231Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:52:01.292264Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:52:01.292989Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:15010, port: 15010 2025-04-04T16:52:01.293066Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:52:01.333828Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:52:01.382860Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:52:01.427025Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****WZDQ (0A327A21) () has now valid token of ldapuser@ldap 2025-04-04T16:52:08.911859Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489501817125845574:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:08.916344Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001714/r3tmp/tmp5gPDVq/pdisk_1.dat 2025-04-04T16:52:09.250879Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:09.270808Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:09.270899Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:09.278673Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26010, node 5 2025-04-04T16:52:09.440909Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:09.440933Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:09.440941Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:09.441083Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:09.712137Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:52:09.715349Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:52:09.715385Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:52:09.716200Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:19910, port: 19910 2025-04-04T16:52:09.716304Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:52:09.774902Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:52:09.822847Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-04-04T16:52:09.870612Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:52:09.871069Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:52:09.871120Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-04T16:52:09.914737Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-04T16:52:09.962601Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-04-04T16:52:09.963726Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****ribw (D8023E8E) () has now valid token of ldapuser@ldap 2025-04-04T16:52:17.446706Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489501852983205846:2221];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001714/r3tmp/tmpy9sons/pdisk_1.dat 2025-04-04T16:52:17.570952Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:52:17.704714Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:17.711895Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:17.711986Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:17.715811Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63658, node 6 2025-04-04T16:52:18.178767Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:18.179030Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:18.179038Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:18.179390Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:19.190198Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:52:19.195025Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:52:19.195053Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:52:19.196402Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:29654, port: 29654 2025-04-04T16:52:19.196586Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:52:19.399497Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:52:19.446839Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2025-04-04T16:52:19.446932Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:29654. Bad search filter 2025-04-04T16:52:19.447469Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****9zPQ (7FD71D56) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:29654. Bad search filter)' |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> BasicUsage::CloseWriteSessionImmediately [GOOD] |75.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |75.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |75.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::CloseWriteSessionImmediately [GOOD] Test command err: 2025-04-04T16:51:28.315270Z :BasicWriteSession INFO: Random seed for debugging is 1743785488315234 2025-04-04T16:51:28.817570Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501645009835493:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:28.817662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:28.917913Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489501642580779702:2150];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:29.107770Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T16:51:29.129515Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bca/r3tmp/tmp5m8cAi/pdisk_1.dat 2025-04-04T16:51:29.129805Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T16:51:29.531856Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:29.535242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:29.535331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:29.546679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:29.546744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:29.549240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:29.582450Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:51:29.586117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30297, node 1 2025-04-04T16:51:29.894836Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000bca/r3tmp/yandexSBs01J.tmp 2025-04-04T16:51:29.894859Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000bca/r3tmp/yandexSBs01J.tmp 2025-04-04T16:51:29.894997Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000bca/r3tmp/yandexSBs01J.tmp 2025-04-04T16:51:29.895100Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:29.957877Z INFO: TTestServer started on Port 63252 GrpcPort 30297 TClient is connected to server localhost:63252 PQClient connected to localhost:30297 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:30.285141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-04T16:51:33.295015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501666484672842:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:33.295125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:33.297183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501666484672869:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:33.300644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-04-04T16:51:33.393168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489501666484672871:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-04T16:51:33.552605Z node 1 :TX_PROXY ERROR: Actor# [1:7489501666484672956:2688] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:51:33.592399Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489501666484672973:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:51:33.594191Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGM0NmU4ZTgtNmMzODgwM2MtZWYwMGNmOWItNTgzMzhiOA==, ActorId: [1:7489501666484672836:2337], ActorState: ExecuteState, TraceId: 01jr0ve0qa0qr5mz2m15nez5c3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:51:33.594608Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489501664055616441:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:51:33.596073Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2Y5OTkyZmQtYTVhYjQzZWQtZjU0MmI1Y2QtMzdmM2RiNGI=, ActorId: [2:7489501664055616391:2307], ActorState: ExecuteState, TraceId: 01jr0ve0vpa5h90xgx4c90jtn6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:51:33.596369Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:51:33.596470Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:51:33.607303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T16:51:33.809796Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501645009835493:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:33.809879Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:33.817116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T16:51:33.913751Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489501642580779702:2150];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:33.913826Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:33.991775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:30297", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-04T16:51:34.369345Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr0ve1m2c4z6y6axmymt9jt7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTA3ZjZkMmQtM2M4NmVhM2EtOTYwMzM5OWYtNzVhZGY5ZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489501670779640670:2993] === CheckClustersList. Ok 2025-04-04T16:51:41.009385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:30297 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:30297 2025-04-04T16:51:41.432774Z node 1 :PERSQUEUE INFO: proxy answer MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 ... Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-04T16:52:19.849419Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-04T16:52:19.849428Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-04T16:52:19.849456Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7489501865508771474:2521] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-04T16:52:19.851822Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7489501865508771474:2521] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-04T16:52:20.412753Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7489501865508771474:2521] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-04T16:52:20.414123Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7489501869803738817:2521] connected; active server actors: 1 2025-04-04T16:52:20.415996Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7489501865508771474:2521] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-04T16:52:20.416021Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7489501865508771474:2521] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-04T16:52:20.422296Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7489501869803738817:2521] disconnected; active server actors: 1 2025-04-04T16:52:20.422324Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7489501869803738817:2521] disconnected no session 2025-04-04T16:52:20.543097Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7489501865508771474:2521] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-04T16:52:20.543133Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7489501865508771474:2521] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-04T16:52:20.543150Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7489501865508771474:2521] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-04T16:52:20.543181Z node 1 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-04T16:52:20.545452Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 2, Generation: 1 2025-04-04T16:52:20.547224Z node 1 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|a6af8797-65da0ed5-4ecf601c-50c52ee0_0 2025-04-04T16:52:20.545082Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7489501869803738846:2521], now have 1 active actors on pipe 2025-04-04T16:52:20.545706Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T16:52:20.545742Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T16:52:20.545835Z node 2 :PERSQUEUE INFO: new Cookie src|a6af8797-65da0ed5-4ecf601c-50c52ee0_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-04T16:52:20.545944Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-04T16:52:20.545992Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T16:52:20.546839Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T16:52:20.546859Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T16:52:20.546927Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T16:52:20.548469Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743785540548 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T16:52:20.548606Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|a6af8797-65da0ed5-4ecf601c-50c52ee0_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-04T16:52:20.548818Z :INFO: [] MessageGroupId [src] SessionId [src|a6af8797-65da0ed5-4ecf601c-50c52ee0_0] Write session: close. Timeout = 0 ms 2025-04-04T16:52:20.548851Z :INFO: [] MessageGroupId [src] SessionId [src|a6af8797-65da0ed5-4ecf601c-50c52ee0_0] Write session will now close 2025-04-04T16:52:20.548884Z :DEBUG: [] MessageGroupId [src] SessionId [src|a6af8797-65da0ed5-4ecf601c-50c52ee0_0] Write session: aborting 2025-04-04T16:52:20.549250Z :INFO: [] MessageGroupId [src] SessionId [src|a6af8797-65da0ed5-4ecf601c-50c52ee0_0] Write session: gracefully shut down, all writes complete 2025-04-04T16:52:20.549283Z :DEBUG: [] MessageGroupId [src] SessionId [src|a6af8797-65da0ed5-4ecf601c-50c52ee0_0] Write session: destroy 2025-04-04T16:52:20.554632Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|a6af8797-65da0ed5-4ecf601c-50c52ee0_0 grpc read done: success: 0 data: 2025-04-04T16:52:20.554659Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|a6af8797-65da0ed5-4ecf601c-50c52ee0_0 grpc read failed 2025-04-04T16:52:20.554688Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|a6af8797-65da0ed5-4ecf601c-50c52ee0_0 grpc closed 2025-04-04T16:52:20.554717Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|a6af8797-65da0ed5-4ecf601c-50c52ee0_0 is DEAD 2025-04-04T16:52:20.555462Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T16:52:20.556334Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7489501869803738846:2521] destroyed 2025-04-04T16:52:20.556391Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created 2025-04-04T16:52:21.695678Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489501874098706222:2545], TxId: 281474976710689, task: 1, CA Id [1:7489501874098706220:2545]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-04T16:52:21.730396Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489501874098706222:2545], TxId: 281474976710689, task: 1, CA Id [1:7489501874098706220:2545]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T16:52:21.787754Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489501874098706222:2545], TxId: 281474976710689, task: 1, CA Id [1:7489501874098706220:2545]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T16:52:21.862529Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489501874098706222:2545], TxId: 281474976710689, task: 1, CA Id [1:7489501874098706220:2545]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T16:52:21.938981Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489501874098706222:2545], TxId: 281474976710689, task: 1, CA Id [1:7489501874098706220:2545]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T16:52:22.050969Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489501874098706222:2545], TxId: 281474976710689, task: 1, CA Id [1:7489501874098706220:2545]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T16:52:22.086427Z node 1 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710690. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T16:52:22.086576Z node 1 :KQP_EXECUTER WARN: ActorId: [1:7489501874098706249:2537] TxId: 281474976710690. Ctx: { TraceId: 01jr0vffvafhg209b9p4r5ck01, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzdiN2EyMzctNmFmMGY1Ni0zNGMyNjk3Yi0yMjJlMTIzZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T16:52:22.086811Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzdiN2EyMzctNmFmMGY1Ni0zNGMyNjk3Yi0yMjJlMTIzZA==, ActorId: [1:7489501874098706185:2537], ActorState: ExecuteState, TraceId: 01jr0vffvafhg209b9p4r5ck01, Create QueryResponse for error on request, msg: 2025-04-04T16:52:22.087618Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr0vfg8naq972pxhj0jz9s7e" } } YdbStatus: UNAVAILABLE ConsumedRu: 278 } 2025-04-04T16:52:22.382354Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489501874098706222:2545], TxId: 281474976710689, task: 1, CA Id [1:7489501874098706220:2545]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T16:52:22.806949Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489501874098706222:2545], TxId: 281474976710689, task: 1, CA Id [1:7489501874098706220:2545]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T16:52:23.750438Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489501874098706222:2545], TxId: 281474976710689, task: 1, CA Id [1:7489501874098706220:2545]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T16:52:24.018914Z node 1 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710692. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T16:52:24.019009Z node 1 :KQP_EXECUTER WARN: ActorId: [1:7489501882688640947:2548] TxId: 281474976710692. Ctx: { TraceId: 01jr0vfhdmd95nnzpn8q4dw58x, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmQyNGExYjAtNDg4ZDQ2MzUtMjdmMGNhODItZDg3N2M3M2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T16:52:24.019177Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmQyNGExYjAtNDg4ZDQ2MzUtMjdmMGNhODItZDg3N2M3M2M=, ActorId: [1:7489501882688640915:2548], ActorState: ExecuteState, TraceId: 01jr0vfhdmd95nnzpn8q4dw58x, Create QueryResponse for error on request, msg: 2025-04-04T16:52:24.020219Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr0vfj42568ggwyz3xhcj0w9" } } YdbStatus: UNAVAILABLE ConsumedRu: 468 } 2025-04-04T16:52:24.394292Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489501874098706222:2545], TxId: 281474976710689, task: 1, CA Id [1:7489501874098706220:2545]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> KqpCost::IndexLookup-useSink >> TKeyValueTest::TestRenameToLongKey [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError [GOOD] >> KqpRm::SingleTask >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestGetStatusWorks >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK >> KqpRm::ManyTasks >> KqpRm::ResourceBrokerNotEnoughResources >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:75:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:77:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:79:2057] recipient: [2:78:2109] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:81:2057] recipient: [2:78:2109] !Reboot 72057594037927937 (actor [2:55:2096]) rebooted! !Reboot 72057594037927937 (actor [2:55:2096]) tablet resolver refreshed! new actor is[2:80:2110] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:134:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:78:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:80:2057] recipient: [4:79:2109] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:82:2057] recipient: [4:79:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:81:2110] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:83:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:85:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:84:2113] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:138:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:85:2057] recipient: [7:83:2114] Leader for TabletID 72057594037927937 is [7:86:2115] sender: [7:87:2057] recipient: [7:83:2114] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:86:2115] Leader for TabletID 72057594037927937 is [7:86:2115] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:81:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:84:2057] recipient: [8:83:2114] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:85:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:86:2115] sender: [8:87:2057] recipient: [8:83:2114] !Reboot 72057594037927937 (actor [8:55:2096]) rebooted! !Reboot 72057594037927937 (actor [8:55:2096]) tablet resolver refreshed! new actor is[8:86:2115] Leader for TabletID 72057594037927937 is [8:86:2115] sender: [8:140:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:87:2057] recipient: [9:85:2116] Leader for TabletID 72057594037927937 is [9:88:2117] sender: [9:89:2057] recipient: [9:85:2116] !Reboot 72057594037927937 (actor [9:55:2096]) rebooted! !Reboot 72057594037927937 (actor [9:55:2096]) tablet resolver refreshed! new actor is[9:88:2117] Leader for TabletID 72057594037927937 is [9:88:2117] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:83:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:87:2057] recipient: [10:85:2116] Leader for TabletID 72057594037927937 is [10:88:2117] sender: [10:89:2057] recipient: [10:85:2116] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:88:2117] Leader for TabletID 72057594037927937 is [10:88:2117] sender: [10:142:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:85:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:88:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:89:2057] recipient: [11:87:2118] Leader for TabletID 72057594037927937 is [11:90:2119] sender: [11:91:2057] recipient: [11:87:2118] !Reboot 72057594037927937 (actor [11:55:2096]) rebooted! !Reboot 72057594037927937 (actor [11:55:2096]) tablet resolver refreshed! new actor is[11:90:2119] Leader for TabletID 72057594037927937 is [11:90:2119] sender: [11:144:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:14:2061] !Re ... is [18:55:2096] sender: [18:94:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:95:2057] recipient: [18:93:2122] Leader for TabletID 72057594037927937 is [18:96:2123] sender: [18:97:2057] recipient: [18:93:2122] !Reboot 72057594037927937 (actor [18:55:2096]) rebooted! !Reboot 72057594037927937 (actor [18:55:2096]) tablet resolver refreshed! new actor is[18:96:2123] Leader for TabletID 72057594037927937 is [18:96:2123] sender: [18:150:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:53:2057] recipient: [19:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:53:2057] recipient: [19:50:2094] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:56:2057] recipient: [19:50:2094] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:73:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:53:2057] recipient: [20:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:53:2057] recipient: [20:50:2094] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:56:2057] recipient: [20:50:2094] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:73:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:53:2057] recipient: [21:51:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:53:2057] recipient: [21:51:2094] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:56:2057] recipient: [21:51:2094] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:73:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:75:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:78:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:79:2057] recipient: [21:77:2109] Leader for TabletID 72057594037927937 is [21:80:2110] sender: [21:81:2057] recipient: [21:77:2109] !Reboot 72057594037927937 (actor [21:55:2096]) rebooted! !Reboot 72057594037927937 (actor [21:55:2096]) tablet resolver refreshed! new actor is[21:80:2110] Leader for TabletID 72057594037927937 is [21:80:2110] sender: [21:134:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:53:2057] recipient: [22:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:53:2057] recipient: [22:50:2094] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:56:2057] recipient: [22:50:2094] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:73:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:55:2096]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:75:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:77:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:79:2057] recipient: [22:78:2109] Leader for TabletID 72057594037927937 is [22:80:2110] sender: [22:81:2057] recipient: [22:78:2109] !Reboot 72057594037927937 (actor [22:55:2096]) rebooted! !Reboot 72057594037927937 (actor [22:55:2096]) tablet resolver refreshed! new actor is[22:80:2110] Leader for TabletID 72057594037927937 is [22:80:2110] sender: [22:134:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:53:2057] recipient: [23:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:53:2057] recipient: [23:50:2094] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:56:2057] recipient: [23:50:2094] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:73:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:76:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:79:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:80:2057] recipient: [23:78:2109] Leader for TabletID 72057594037927937 is [23:81:2110] sender: [23:82:2057] recipient: [23:78:2109] !Reboot 72057594037927937 (actor [23:55:2096]) rebooted! !Reboot 72057594037927937 (actor [23:55:2096]) tablet resolver refreshed! new actor is[23:81:2110] Leader for TabletID 72057594037927937 is [23:81:2110] sender: [23:135:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:53:2057] recipient: [24:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:53:2057] recipient: [24:49:2094] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:56:2057] recipient: [24:49:2094] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:73:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:79:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:82:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:83:2057] recipient: [24:81:2112] Leader for TabletID 72057594037927937 is [24:84:2113] sender: [24:85:2057] recipient: [24:81:2112] !Reboot 72057594037927937 (actor [24:55:2096]) rebooted! !Reboot 72057594037927937 (actor [24:55:2096]) tablet resolver refreshed! new actor is[24:84:2113] Leader for TabletID 72057594037927937 is [24:84:2113] sender: [24:138:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:53:2057] recipient: [25:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:53:2057] recipient: [25:49:2094] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:56:2057] recipient: [25:49:2094] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:73:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:79:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:82:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:83:2057] recipient: [25:81:2112] Leader for TabletID 72057594037927937 is [25:84:2113] sender: [25:85:2057] recipient: [25:81:2112] !Reboot 72057594037927937 (actor [25:55:2096]) rebooted! !Reboot 72057594037927937 (actor [25:55:2096]) tablet resolver refreshed! new actor is[25:84:2113] Leader for TabletID 72057594037927937 is [25:84:2113] sender: [25:138:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:53:2057] recipient: [26:51:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:53:2057] recipient: [26:51:2094] Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:56:2057] recipient: [26:51:2094] Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:73:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:80:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:83:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:84:2057] recipient: [26:82:2112] Leader for TabletID 72057594037927937 is [26:85:2113] sender: [26:86:2057] recipient: [26:82:2112] !Reboot 72057594037927937 (actor [26:55:2096]) rebooted! !Reboot 72057594037927937 (actor [26:55:2096]) tablet resolver refreshed! new actor is[26:85:2113] Leader for TabletID 72057594037927937 is [26:85:2113] sender: [26:139:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:53:2057] recipient: [27:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:53:2057] recipient: [27:50:2094] Leader for TabletID 72057594037927937 is [27:55:2096] sender: [27:56:2057] recipient: [27:50:2094] Leader for TabletID 72057594037927937 is [27:55:2096] sender: [27:73:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [27:55:2096] sender: [27:83:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:55:2096] sender: [27:86:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:55:2096] sender: [27:87:2057] recipient: [27:85:2115] Leader for TabletID 72057594037927937 is [27:88:2116] sender: [27:89:2057] recipient: [27:85:2115] !Reboot 72057594037927937 (actor [27:55:2096]) rebooted! !Reboot 72057594037927937 (actor [27:55:2096]) tablet resolver refreshed! new actor is[27:88:2116] Leader for TabletID 72057594037927937 is [27:88:2116] sender: [27:142:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:53:2057] recipient: [28:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:53:2057] recipient: [28:50:2094] Leader for TabletID 72057594037927937 is [28:55:2096] sender: [28:56:2057] recipient: [28:50:2094] Leader for TabletID 72057594037927937 is [28:55:2096] sender: [28:73:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [28:55:2096] sender: [28:83:2057] recipient: [28:36:2083] Leader for TabletID 72057594037927937 is [28:55:2096] sender: [28:86:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:55:2096] sender: [28:87:2057] recipient: [28:85:2115] Leader for TabletID 72057594037927937 is [28:88:2116] sender: [28:89:2057] recipient: [28:85:2115] !Reboot 72057594037927937 (actor [28:55:2096]) rebooted! !Reboot 72057594037927937 (actor [28:55:2096]) tablet resolver refreshed! new actor is[28:88:2116] Leader for TabletID 72057594037927937 is [28:88:2116] sender: [28:142:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:53:2057] recipient: [29:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:53:2057] recipient: [29:50:2094] Leader for TabletID 72057594037927937 is [29:55:2096] sender: [29:56:2057] recipient: [29:50:2094] Leader for TabletID 72057594037927937 is [29:55:2096] sender: [29:73:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [29:55:2096] sender: [29:84:2057] recipient: [29:36:2083] Leader for TabletID 72057594037927937 is [29:55:2096] sender: [29:87:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:55:2096] sender: [29:88:2057] recipient: [29:86:2115] Leader for TabletID 72057594037927937 is [29:89:2116] sender: [29:90:2057] recipient: [29:86:2115] !Reboot 72057594037927937 (actor [29:55:2096]) rebooted! !Reboot 72057594037927937 (actor [29:55:2096]) tablet resolver refreshed! new actor is[29:89:2116] Leader for TabletID 72057594037927937 is [29:89:2116] sender: [29:143:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:53:2057] recipient: [30:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:53:2057] recipient: [30:50:2094] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:56:2057] recipient: [30:50:2094] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:73:2057] recipient: [30:14:2061] >> KqpRm::Reduce >> KqpRm::NotEnoughExecutionUnits ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:75:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:77:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:79:2057] recipient: [2:78:2109] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:81:2057] recipient: [2:78:2109] !Reboot 72057594037927937 (actor [2:55:2096]) rebooted! !Reboot 72057594037927937 (actor [2:55:2096]) tablet resolver refreshed! new actor is[2:80:2110] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:134:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:78:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:80:2057] recipient: [4:79:2109] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:82:2057] recipient: [4:79:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:81:2110] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:83:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:85:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:84:2113] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:138:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:84:2057] recipient: [7:82:2112] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:86:2057] recipient: [7:82:2112] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:85:2113] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:85:2057] recipient: [8:84:2114] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:88:2057] recipient: [8:84:2114] !Reboot 72057594037927937 (actor [8:55:2096]) rebooted! !Reboot 72057594037927937 (actor [8:55:2096]) tablet resolver refreshed! new actor is[8:87:2115] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:141:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:82:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:85:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:86:2057] recipient: [9:84:2114] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:88:2057] recipient: [9:84:2114] !Reboot 72057594037927937 (actor [9:55:2096]) rebooted! !Reboot 72057594037927937 (actor [9:55:2096]) tablet resolver refreshed! new actor is[9:87:2115] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:141:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:83:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:87:2057] recipient: [10:85:2114] Leader for TabletID 72057594037927937 is [10:88:2115] sender: [10:89:2057] recipient: [10:85:2114] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:88:2115] Leader for TabletID 72057594037927937 is [10:88:2115] sender: [10:142:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:85:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:87:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:89:2057] recipient: [11:88:2116] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:91:2057] recipient: [11:88:2116] !Reboot 72057594037927937 (actor [11:55:2096]) rebooted! !Reboot 72057594037927937 (actor [11:55:2096]) tablet resolver refreshed! new actor is[11:90:2117] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:144:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... boot 72057594037927937 (actor [22:55:2096]) tablet resolver refreshed! new actor is[22:84:2113] Leader for TabletID 72057594037927937 is [22:84:2113] sender: [22:138:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:53:2057] recipient: [23:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:53:2057] recipient: [23:50:2094] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:56:2057] recipient: [23:50:2094] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:73:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:55:2096]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:79:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:81:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:83:2057] recipient: [23:82:2112] Leader for TabletID 72057594037927937 is [23:84:2113] sender: [23:85:2057] recipient: [23:82:2112] !Reboot 72057594037927937 (actor [23:55:2096]) rebooted! !Reboot 72057594037927937 (actor [23:55:2096]) tablet resolver refreshed! new actor is[23:84:2113] Leader for TabletID 72057594037927937 is [23:84:2113] sender: [23:138:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:53:2057] recipient: [24:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:53:2057] recipient: [24:49:2094] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:56:2057] recipient: [24:49:2094] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:73:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:80:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:83:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:84:2057] recipient: [24:82:2112] Leader for TabletID 72057594037927937 is [24:85:2113] sender: [24:86:2057] recipient: [24:82:2112] !Reboot 72057594037927937 (actor [24:55:2096]) rebooted! !Reboot 72057594037927937 (actor [24:55:2096]) tablet resolver refreshed! new actor is[24:85:2113] Leader for TabletID 72057594037927937 is [24:85:2113] sender: [24:103:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:53:2057] recipient: [25:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:53:2057] recipient: [25:49:2094] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:56:2057] recipient: [25:49:2094] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:73:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:82:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:85:2057] recipient: [25:84:2114] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:86:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:87:2115] sender: [25:88:2057] recipient: [25:84:2114] !Reboot 72057594037927937 (actor [25:55:2096]) rebooted! !Reboot 72057594037927937 (actor [25:55:2096]) tablet resolver refreshed! new actor is[25:87:2115] Leader for TabletID 72057594037927937 is [25:87:2115] sender: [25:141:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:53:2057] recipient: [26:51:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:53:2057] recipient: [26:51:2094] Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:56:2057] recipient: [26:51:2094] Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:73:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:55:2096]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:82:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:85:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:86:2057] recipient: [26:84:2114] Leader for TabletID 72057594037927937 is [26:87:2115] sender: [26:88:2057] recipient: [26:84:2114] !Reboot 72057594037927937 (actor [26:55:2096]) rebooted! !Reboot 72057594037927937 (actor [26:55:2096]) tablet resolver refreshed! new actor is[26:87:2115] Leader for TabletID 72057594037927937 is [26:87:2115] sender: [26:141:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:53:2057] recipient: [27:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:53:2057] recipient: [27:50:2094] Leader for TabletID 72057594037927937 is [27:55:2096] sender: [27:56:2057] recipient: [27:50:2094] Leader for TabletID 72057594037927937 is [27:55:2096] sender: [27:73:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [27:55:2096] sender: [27:83:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:55:2096] sender: [27:86:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:55:2096] sender: [27:87:2057] recipient: [27:85:2114] Leader for TabletID 72057594037927937 is [27:88:2115] sender: [27:89:2057] recipient: [27:85:2114] !Reboot 72057594037927937 (actor [27:55:2096]) rebooted! !Reboot 72057594037927937 (actor [27:55:2096]) tablet resolver refreshed! new actor is[27:88:2115] Leader for TabletID 72057594037927937 is [27:88:2115] sender: [27:106:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:53:2057] recipient: [28:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:53:2057] recipient: [28:50:2094] Leader for TabletID 72057594037927937 is [28:55:2096] sender: [28:56:2057] recipient: [28:50:2094] Leader for TabletID 72057594037927937 is [28:55:2096] sender: [28:73:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:55:2096] sender: [28:85:2057] recipient: [28:36:2083] Leader for TabletID 72057594037927937 is [28:55:2096] sender: [28:87:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:55:2096] sender: [28:89:2057] recipient: [28:88:2116] Leader for TabletID 72057594037927937 is [28:90:2117] sender: [28:91:2057] recipient: [28:88:2116] !Reboot 72057594037927937 (actor [28:55:2096]) rebooted! !Reboot 72057594037927937 (actor [28:55:2096]) tablet resolver refreshed! new actor is[28:90:2117] Leader for TabletID 72057594037927937 is [28:90:2117] sender: [28:144:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:53:2057] recipient: [29:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:53:2057] recipient: [29:50:2094] Leader for TabletID 72057594037927937 is [29:55:2096] sender: [29:56:2057] recipient: [29:50:2094] Leader for TabletID 72057594037927937 is [29:55:2096] sender: [29:73:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [29:55:2096] sender: [29:85:2057] recipient: [29:36:2083] Leader for TabletID 72057594037927937 is [29:55:2096] sender: [29:88:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:55:2096] sender: [29:89:2057] recipient: [29:87:2116] Leader for TabletID 72057594037927937 is [29:90:2117] sender: [29:91:2057] recipient: [29:87:2116] !Reboot 72057594037927937 (actor [29:55:2096]) rebooted! !Reboot 72057594037927937 (actor [29:55:2096]) tablet resolver refreshed! new actor is[29:90:2117] Leader for TabletID 72057594037927937 is [29:90:2117] sender: [29:144:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:53:2057] recipient: [30:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:53:2057] recipient: [30:50:2094] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:56:2057] recipient: [30:50:2094] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:73:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:86:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:89:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:90:2057] recipient: [30:88:2116] Leader for TabletID 72057594037927937 is [30:91:2117] sender: [30:92:2057] recipient: [30:88:2116] !Reboot 72057594037927937 (actor [30:55:2096]) rebooted! !Reboot 72057594037927937 (actor [30:55:2096]) tablet resolver refreshed! new actor is[30:91:2117] Leader for TabletID 72057594037927937 is [30:91:2117] sender: [30:145:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:53:2057] recipient: [31:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:53:2057] recipient: [31:50:2094] Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:56:2057] recipient: [31:50:2094] Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:73:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:89:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:92:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:93:2057] recipient: [31:91:2119] Leader for TabletID 72057594037927937 is [31:94:2120] sender: [31:95:2057] recipient: [31:91:2119] !Reboot 72057594037927937 (actor [31:55:2096]) rebooted! !Reboot 72057594037927937 (actor [31:55:2096]) tablet resolver refreshed! new actor is[31:94:2120] Leader for TabletID 72057594037927937 is [31:94:2120] sender: [31:148:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:53:2057] recipient: [32:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:53:2057] recipient: [32:50:2094] Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:56:2057] recipient: [32:50:2094] Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:73:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:55:2096]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:89:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:92:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:93:2057] recipient: [32:91:2119] Leader for TabletID 72057594037927937 is [32:94:2120] sender: [32:95:2057] recipient: [32:91:2119] !Reboot 72057594037927937 (actor [32:55:2096]) rebooted! !Reboot 72057594037927937 (actor [32:55:2096]) tablet resolver refreshed! new actor is[32:94:2120] Leader for TabletID 72057594037927937 is [32:94:2120] sender: [32:148:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:53:2057] recipient: [33:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:53:2057] recipient: [33:50:2094] Leader for TabletID 72057594037927937 is [33:55:2096] sender: [33:56:2057] recipient: [33:50:2094] Leader for TabletID 72057594037927937 is [33:55:2096] sender: [33:73:2057] recipient: [33:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError [GOOD] Test command err: 2025-04-04T16:51:27.737577Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501641819146476:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:27.737636Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016d5/r3tmp/tmpdjvBgj/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29405, node 1 2025-04-04T16:51:28.113143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:28.113249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:28.122430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:28.126764Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:28.132178Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:51:28.132197Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:51:28.174854Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:28.174878Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:28.174892Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:28.175003Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:28.278358Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:28.282722Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:28.282754Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:28.283552Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:16065, port: 16065 2025-04-04T16:51:28.284425Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:28.316888Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:28.359985Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****nh9g (1AAC5F42) () has now valid token of ldapuser@ldap 2025-04-04T16:51:31.334754Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489501656063500067:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:31.363587Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016d5/r3tmp/tmpD3s2jc/pdisk_1.dat 2025-04-04T16:51:31.619288Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:31.623622Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:31.623713Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:31.627693Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61540, node 2 2025-04-04T16:51:31.818808Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:31.818835Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:31.818841Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:31.818930Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:32.002409Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:32.006611Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:32.006650Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:32.007404Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:28127, port: 28127 2025-04-04T16:51:32.007473Z node 2 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:51:32.063079Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:32.106812Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:32.107313Z node 2 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:28127 return no entries 2025-04-04T16:51:32.107825Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****6x6A (A12564C0) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:28127 return no entries)' 2025-04-04T16:51:36.590875Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489501680107490692:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:36.590918Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016d5/r3tmp/tmpLrroFg/pdisk_1.dat 2025-04-04T16:51:36.823482Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:36.855242Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:36.855320Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:36.861011Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24267, node 3 2025-04-04T16:51:37.000294Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:37.000313Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:37.000319Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:37.000434Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:37.169719Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:51:37.173423Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:51:37.173452Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:51:37.174099Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:1576, port: 1576 2025-04-04T16:51:37.174152Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:51:37.213634Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:37.262798Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:37.310939Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:37.311659Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:51:37.311720Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:37.360648Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:37.406638Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:37.409432Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****nFeQ (A843894A) () has now valid token of ldapuser@ldap 2025-04-04T16:51:41.594398Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489501680107490692:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:41.594476Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:41.659279Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****nFeQ (A843894A) 2025-04-04T16:51:41.659402Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:1576, port: 1576 2025-04-04T16:51:41.659470Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:51:41.744092Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:41.786754Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:41.831474Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:41.832716Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:51:41.832757Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:41.874637Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:41.918797Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:51:41.919863Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****nFeQ (A843894A) () has now valid token of ldapuser@ldap 2025-04-04T16:51:45.670375Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****nFeQ (A843894A) 2025-04-04T16:51:45.670490Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:1576, port: 1576 2025-04-04T16:51:45.670568Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:51:45.732325Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:51:45.794652Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:51:45.842680Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:51:45.851386Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:51:45.851431Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entry ... _1.dat 2025-04-04T16:52:08.048231Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:08.048327Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:08.055601Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:08.073476Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 11053, node 5 2025-04-04T16:52:08.202784Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:08.326985Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:08.327005Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:08.327013Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:08.327183Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:08.597041Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:52:08.601496Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:52:08.601530Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:52:08.602234Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21086, port: 21086 2025-04-04T16:52:08.602308Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:52:08.666158Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:52:08.716956Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:52:08.759515Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:52:08.761340Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:52:08.761409Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:52:08.806551Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:52:08.858513Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:52:08.859639Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****fi0g (6BAB1F88) () has now valid token of ldapuser@ldap 2025-04-04T16:52:13.085220Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****fi0g (6BAB1F88) 2025-04-04T16:52:13.085561Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21086, port: 21086 2025-04-04T16:52:13.085610Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:52:13.189224Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:52:13.243350Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:52:13.249330Z node 5 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:21086 return no entries 2025-04-04T16:52:13.250404Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****fi0g (6BAB1F88) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:21086 return no entries)' 2025-04-04T16:52:16.102384Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****fi0g (6BAB1F88) 2025-04-04T16:52:20.152030Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489501867993369265:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:20.152084Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016d5/r3tmp/tmpApwlID/pdisk_1.dat 2025-04-04T16:52:20.373639Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:20.390589Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:20.390694Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:20.393881Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15774, node 6 2025-04-04T16:52:20.502901Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:20.502925Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:20.502932Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:20.503068Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:20.742660Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T16:52:20.746677Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:52:20.746709Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:52:20.747516Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:5399, port: 5399 2025-04-04T16:52:20.747605Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:52:20.834720Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:52:20.879658Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:52:20.880136Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:5399. Server is busy 2025-04-04T16:52:20.880614Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****h-Dg (A87041B0) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:5399. Server is busy)' 2025-04-04T16:52:20.881612Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:52:20.881637Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:52:20.882621Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:5399, port: 5399 2025-04-04T16:52:20.882712Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:52:20.935617Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:52:20.982718Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:52:20.983173Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:5399. Server is busy 2025-04-04T16:52:20.983563Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****h-Dg (A87041B0) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:5399. Server is busy)' 2025-04-04T16:52:23.195229Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****h-Dg (A87041B0) 2025-04-04T16:52:23.196620Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:52:23.196639Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:52:23.204418Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:5399, port: 5399 2025-04-04T16:52:23.211096Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:52:23.351522Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:52:23.403176Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:52:23.403689Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:5399. Server is busy 2025-04-04T16:52:23.404632Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****h-Dg (A87041B0) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:5399. Server is busy)' 2025-04-04T16:52:25.154718Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489501867993369265:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:25.344814Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:52:26.343848Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****h-Dg (A87041B0) 2025-04-04T16:52:26.344077Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T16:52:26.344091Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T16:52:26.345047Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:5399, port: 5399 2025-04-04T16:52:26.345272Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-04-04T16:52:26.469779Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T16:52:26.516018Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-04-04T16:52:26.567785Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-04-04T16:52:26.585364Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-04-04T16:52:26.585436Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:52:26.649569Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:52:26.738538Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-04-04T16:52:26.767209Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****h-Dg (A87041B0) () has now valid token of ldapuser@ldap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:75:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:77:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:79:2057] recipient: [2:78:2109] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:81:2057] recipient: [2:78:2109] !Reboot 72057594037927937 (actor [2:55:2096]) rebooted! !Reboot 72057594037927937 (actor [2:55:2096]) tablet resolver refreshed! new actor is[2:80:2110] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:134:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:80:2057] recipient: [4:78:2109] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:82:2057] recipient: [4:78:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:81:2110] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:82:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:85:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:84:2113] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:138:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:84:2057] recipient: [7:82:2112] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:86:2057] recipient: [7:82:2112] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:85:2113] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:103:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:85:2057] recipient: [8:84:2114] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:88:2057] recipient: [8:84:2114] !Reboot 72057594037927937 (actor [8:55:2096]) rebooted! !Reboot 72057594037927937 (actor [8:55:2096]) tablet resolver refreshed! new actor is[8:87:2115] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:141:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:55:2096]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:82:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:85:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:86:2057] recipient: [9:84:2114] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:88:2057] recipient: [9:84:2114] !Reboot 72057594037927937 (actor [9:55:2096]) rebooted! !Reboot 72057594037927937 (actor [9:55:2096]) tablet resolver refreshed! new actor is[9:87:2115] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:141:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:83:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:85:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:87:2057] recipient: [10:86:2114] Leader for TabletID 72057594037927937 is [10:88:2115] sender: [10:89:2057] recipient: [10:86:2114] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:88:2115] Leader for TabletID 72057594037927937 is [10:88:2115] sender: [10:106:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:85:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:88:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:89:2057] recipient: [11:87:2116] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:91:2057] recipient: [11:87:2116] !Reboot 72057594037927937 (actor [11:55:2096]) rebooted! !Reboot 72057594037927937 (actor [11:55:2096]) tablet resolver refreshed! new actor is[11:90:2117] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:144:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (a ... 594037927937 (actor [23:55:2096]) tablet resolver refreshed! new actor is[23:84:2113] Leader for TabletID 72057594037927937 is [23:84:2113] sender: [23:138:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:53:2057] recipient: [24:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:53:2057] recipient: [24:49:2094] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:56:2057] recipient: [24:49:2094] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:73:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:80:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:83:2057] recipient: [24:82:2112] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:84:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:85:2113] sender: [24:86:2057] recipient: [24:82:2112] !Reboot 72057594037927937 (actor [24:55:2096]) rebooted! !Reboot 72057594037927937 (actor [24:55:2096]) tablet resolver refreshed! new actor is[24:85:2113] Leader for TabletID 72057594037927937 is [24:85:2113] sender: [24:139:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:53:2057] recipient: [25:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:53:2057] recipient: [25:49:2094] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:56:2057] recipient: [25:49:2094] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:73:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:83:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:86:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:87:2057] recipient: [25:85:2115] Leader for TabletID 72057594037927937 is [25:88:2116] sender: [25:89:2057] recipient: [25:85:2115] !Reboot 72057594037927937 (actor [25:55:2096]) rebooted! !Reboot 72057594037927937 (actor [25:55:2096]) tablet resolver refreshed! new actor is[25:88:2116] Leader for TabletID 72057594037927937 is [25:88:2116] sender: [25:142:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:53:2057] recipient: [26:51:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:53:2057] recipient: [26:51:2094] Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:56:2057] recipient: [26:51:2094] Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:73:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:83:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:85:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:55:2096] sender: [26:87:2057] recipient: [26:86:2115] Leader for TabletID 72057594037927937 is [26:88:2116] sender: [26:89:2057] recipient: [26:86:2115] !Reboot 72057594037927937 (actor [26:55:2096]) rebooted! !Reboot 72057594037927937 (actor [26:55:2096]) tablet resolver refreshed! new actor is[26:88:2116] Leader for TabletID 72057594037927937 is [26:88:2116] sender: [26:142:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:53:2057] recipient: [27:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:53:2057] recipient: [27:50:2094] Leader for TabletID 72057594037927937 is [27:55:2096] sender: [27:56:2057] recipient: [27:50:2094] Leader for TabletID 72057594037927937 is [27:55:2096] sender: [27:73:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [27:55:2096] sender: [27:84:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:55:2096] sender: [27:87:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:55:2096] sender: [27:88:2057] recipient: [27:86:2115] Leader for TabletID 72057594037927937 is [27:89:2116] sender: [27:90:2057] recipient: [27:86:2115] !Reboot 72057594037927937 (actor [27:55:2096]) rebooted! !Reboot 72057594037927937 (actor [27:55:2096]) tablet resolver refreshed! new actor is[27:89:2116] Leader for TabletID 72057594037927937 is [27:89:2116] sender: [27:143:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:53:2057] recipient: [28:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:53:2057] recipient: [28:50:2094] Leader for TabletID 72057594037927937 is [28:55:2096] sender: [28:56:2057] recipient: [28:50:2094] Leader for TabletID 72057594037927937 is [28:55:2096] sender: [28:73:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:55:2096] sender: [28:87:2057] recipient: [28:36:2083] Leader for TabletID 72057594037927937 is [28:55:2096] sender: [28:90:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:55:2096] sender: [28:91:2057] recipient: [28:89:2118] Leader for TabletID 72057594037927937 is [28:92:2119] sender: [28:93:2057] recipient: [28:89:2118] !Reboot 72057594037927937 (actor [28:55:2096]) rebooted! !Reboot 72057594037927937 (actor [28:55:2096]) tablet resolver refreshed! new actor is[28:92:2119] Leader for TabletID 72057594037927937 is [28:92:2119] sender: [28:146:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:53:2057] recipient: [29:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:53:2057] recipient: [29:50:2094] Leader for TabletID 72057594037927937 is [29:55:2096] sender: [29:56:2057] recipient: [29:50:2094] Leader for TabletID 72057594037927937 is [29:55:2096] sender: [29:73:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [29:55:2096] sender: [29:87:2057] recipient: [29:36:2083] Leader for TabletID 72057594037927937 is [29:55:2096] sender: [29:90:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:55:2096] sender: [29:91:2057] recipient: [29:89:2118] Leader for TabletID 72057594037927937 is [29:92:2119] sender: [29:93:2057] recipient: [29:89:2118] !Reboot 72057594037927937 (actor [29:55:2096]) rebooted! !Reboot 72057594037927937 (actor [29:55:2096]) tablet resolver refreshed! new actor is[29:92:2119] Leader for TabletID 72057594037927937 is [29:92:2119] sender: [29:146:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:53:2057] recipient: [30:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:53:2057] recipient: [30:50:2094] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:56:2057] recipient: [30:50:2094] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:73:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:88:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:91:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:92:2057] recipient: [30:90:2118] Leader for TabletID 72057594037927937 is [30:93:2119] sender: [30:94:2057] recipient: [30:90:2118] !Reboot 72057594037927937 (actor [30:55:2096]) rebooted! !Reboot 72057594037927937 (actor [30:55:2096]) tablet resolver refreshed! new actor is[30:93:2119] Leader for TabletID 72057594037927937 is [30:93:2119] sender: [30:147:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:53:2057] recipient: [31:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:53:2057] recipient: [31:50:2094] Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:56:2057] recipient: [31:50:2094] Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:73:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:90:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:93:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:94:2057] recipient: [31:92:2120] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:96:2057] recipient: [31:92:2120] !Reboot 72057594037927937 (actor [31:55:2096]) rebooted! !Reboot 72057594037927937 (actor [31:55:2096]) tablet resolver refreshed! new actor is[31:95:2121] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:149:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:53:2057] recipient: [32:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:53:2057] recipient: [32:50:2094] Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:56:2057] recipient: [32:50:2094] Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:73:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:90:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:93:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:94:2057] recipient: [32:92:2120] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:96:2057] recipient: [32:92:2120] !Reboot 72057594037927937 (actor [32:55:2096]) rebooted! !Reboot 72057594037927937 (actor [32:55:2096]) tablet resolver refreshed! new actor is[32:95:2121] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:149:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:53:2057] recipient: [33:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:53:2057] recipient: [33:50:2094] Leader for TabletID 72057594037927937 is [33:55:2096] sender: [33:56:2057] recipient: [33:50:2094] Leader for TabletID 72057594037927937 is [33:55:2096] sender: [33:73:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:55:2096] sender: [33:91:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:55:2096] sender: [33:94:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:55:2096] sender: [33:95:2057] recipient: [33:93:2120] Leader for TabletID 72057594037927937 is [33:96:2121] sender: [33:97:2057] recipient: [33:93:2120] !Reboot 72057594037927937 (actor [33:55:2096]) rebooted! !Reboot 72057594037927937 (actor [33:55:2096]) tablet resolver refreshed! new actor is[33:96:2121] Leader for TabletID 72057594037927937 is [33:96:2121] sender: [33:150:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:53:2057] recipient: [34:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:53:2057] recipient: [34:49:2094] Leader for TabletID 72057594037927937 is [34:55:2096] sender: [34:56:2057] recipient: [34:49:2094] Leader for TabletID 72057594037927937 is [34:55:2096] sender: [34:73:2057] recipient: [34:14:2061] |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest |75.4%| [TA] $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] >> KqpRm::NotEnoughMemory >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs >> KqpRm::SingleTask [GOOD] >> KqpRm::ManyTasks [GOOD] >> SystemView::GroupsFields [GOOD] >> SystemView::Describe >> KqpRm::NotEnoughExecutionUnits [GOOD] >> LocalTableWriter::ApplyInCorrectOrder |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> KqpRm::SnapshotSharingByExchanger ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleTask [GOOD] Test command err: 2025-04-04T16:52:40.509389Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T16:52:40.528215Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/00168d/r3tmp/tmpqmd24x/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T16:52:40.531829Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/00168d/r3tmp/tmpqmd24x/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/00168d/r3tmp/tmpqmd24x/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16748578881663898003 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T16:52:40.806106Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-04T16:52:40.807675Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-04T16:52:40.832511Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:461:2099] with ResourceBroker at [2:432:2098] 2025-04-04T16:52:40.832649Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:462:2100] 2025-04-04T16:52:40.832841Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:460:2339] with ResourceBroker at [1:431:2320] 2025-04-04T16:52:40.832919Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:463:2340] 2025-04-04T16:52:40.833028Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-04T16:52:40.833066Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-04T16:52:40.833095Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-04T16:52:40.833114Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-04T16:52:40.833187Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:40.847173Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785560 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:40.847605Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:40.847671Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785560 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:40.847839Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-04T16:52:40.847941Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-04T16:52:40.847971Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:40.848034Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785560 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:40.848131Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-04T16:52:40.848147Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:40.848203Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785560 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:40.848305Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-04T16:52:40.848884Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-04T16:52:40.848950Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:40.849018Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:40.849382Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:40.849682Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-04T16:52:40.849811Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-04T16:52:40.849872Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-04T16:52:40.849980Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:40.854314Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-1 (1 by [1:460:2339]) priority=0 resources={0, 100} 2025-04-04T16:52:40.854408Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-1 (1 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:40.854464Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:460:2339]) from queue queue_kqp_resource_manager 2025-04-04T16:52:40.854503Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-1 (1 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:40.854576Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:460:2339])) 2025-04-04T16:52:40.854774Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-04T16:52:40.854970Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-2-1 (1 by [1:460:2339]) (release resources {0, 100}) 2025-04-04T16:52:40.855002Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.000000 (remove task kqp-1-2-1 (1 by [1:460:2339])) 2025-04-04T16:52:40.855032Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:75:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:77:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:79:2057] recipient: [2:78:2109] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:81:2057] recipient: [2:78:2109] !Reboot 72057594037927937 (actor [2:55:2096]) rebooted! !Reboot 72057594037927937 (actor [2:55:2096]) tablet resolver refreshed! new actor is[2:80:2110] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:134:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:80:2057] recipient: [4:78:2109] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:82:2057] recipient: [4:78:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:81:2110] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:82:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:85:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:84:2113] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:138:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:84:2057] recipient: [7:82:2112] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:86:2057] recipient: [7:82:2112] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:85:2113] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:85:2057] recipient: [8:84:2114] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:88:2057] recipient: [8:84:2114] !Reboot 72057594037927937 (actor [8:55:2096]) rebooted! !Reboot 72057594037927937 (actor [8:55:2096]) tablet resolver refreshed! new actor is[8:87:2115] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:141:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:82:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:85:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:86:2057] recipient: [9:84:2114] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:88:2057] recipient: [9:84:2114] !Reboot 72057594037927937 (actor [9:55:2096]) rebooted! !Reboot 72057594037927937 (actor [9:55:2096]) tablet resolver refreshed! new actor is[9:87:2115] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:141:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:83:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:85:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:87:2057] recipient: [10:86:2114] Leader for TabletID 72057594037927937 is [10:88:2115] sender: [10:89:2057] recipient: [10:86:2114] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:88:2115] Leader for TabletID 72057594037927937 is [10:88:2115] sender: [10:142:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:85:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:88:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:89:2057] recipient: [11:87:2116] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:91:2057] recipient: [11:87:2116] !Reboot 72057594037927937 (actor [11:55:2096]) rebooted! !Reboot 72057594037927937 (actor [11:55:2096]) tablet resolver refreshed! new actor is[11:90:2117] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:144:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... TabletID 72057594037927937 is [13:55:2096] sender: [13:88:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:55:2096] sender: [13:90:2057] recipient: [13:89:2116] Leader for TabletID 72057594037927937 is [13:91:2117] sender: [13:92:2057] recipient: [13:89:2116] !Reboot 72057594037927937 (actor [13:55:2096]) rebooted! !Reboot 72057594037927937 (actor [13:55:2096]) tablet resolver refreshed! new actor is[13:91:2117] Leader for TabletID 72057594037927937 is [13:91:2117] sender: [13:145:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:53:2057] recipient: [14:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:53:2057] recipient: [14:50:2094] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:56:2057] recipient: [14:50:2094] Leader for TabletID 72057594037927937 is [14:55:2096] sender: [14:73:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:53:2057] recipient: [15:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:53:2057] recipient: [15:50:2094] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:56:2057] recipient: [15:50:2094] Leader for TabletID 72057594037927937 is [15:55:2096] sender: [15:73:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:53:2057] recipient: [16:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:53:2057] recipient: [16:50:2094] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:56:2057] recipient: [16:50:2094] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:73:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:75:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:78:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:55:2096] sender: [16:79:2057] recipient: [16:77:2109] Leader for TabletID 72057594037927937 is [16:80:2110] sender: [16:81:2057] recipient: [16:77:2109] !Reboot 72057594037927937 (actor [16:55:2096]) rebooted! !Reboot 72057594037927937 (actor [16:55:2096]) tablet resolver refreshed! new actor is[16:80:2110] Leader for TabletID 72057594037927937 is [16:80:2110] sender: [16:134:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:53:2057] recipient: [17:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:53:2057] recipient: [17:50:2094] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:56:2057] recipient: [17:50:2094] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:73:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:75:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:78:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:55:2096] sender: [17:79:2057] recipient: [17:77:2109] Leader for TabletID 72057594037927937 is [17:80:2110] sender: [17:81:2057] recipient: [17:77:2109] !Reboot 72057594037927937 (actor [17:55:2096]) rebooted! !Reboot 72057594037927937 (actor [17:55:2096]) tablet resolver refreshed! new actor is[17:80:2110] Leader for TabletID 72057594037927937 is [17:80:2110] sender: [17:134:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:53:2057] recipient: [18:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:53:2057] recipient: [18:50:2094] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:56:2057] recipient: [18:50:2094] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:73:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:76:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:79:2057] recipient: [18:78:2109] Leader for TabletID 72057594037927937 is [18:55:2096] sender: [18:80:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:81:2110] sender: [18:82:2057] recipient: [18:78:2109] !Reboot 72057594037927937 (actor [18:55:2096]) rebooted! !Reboot 72057594037927937 (actor [18:55:2096]) tablet resolver refreshed! new actor is[18:81:2110] Leader for TabletID 72057594037927937 is [18:81:2110] sender: [18:135:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:53:2057] recipient: [19:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:53:2057] recipient: [19:50:2094] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:56:2057] recipient: [19:50:2094] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:73:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:79:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:82:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:55:2096] sender: [19:83:2057] recipient: [19:81:2112] Leader for TabletID 72057594037927937 is [19:84:2113] sender: [19:85:2057] recipient: [19:81:2112] !Reboot 72057594037927937 (actor [19:55:2096]) rebooted! !Reboot 72057594037927937 (actor [19:55:2096]) tablet resolver refreshed! new actor is[19:84:2113] Leader for TabletID 72057594037927937 is [19:84:2113] sender: [19:138:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:53:2057] recipient: [20:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:53:2057] recipient: [20:50:2094] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:56:2057] recipient: [20:50:2094] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:73:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:79:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:81:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:55:2096] sender: [20:83:2057] recipient: [20:82:2112] Leader for TabletID 72057594037927937 is [20:84:2113] sender: [20:85:2057] recipient: [20:82:2112] !Reboot 72057594037927937 (actor [20:55:2096]) rebooted! !Reboot 72057594037927937 (actor [20:55:2096]) tablet resolver refreshed! new actor is[20:84:2113] Leader for TabletID 72057594037927937 is [20:84:2113] sender: [20:138:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:53:2057] recipient: [21:51:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:53:2057] recipient: [21:51:2094] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:56:2057] recipient: [21:51:2094] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:73:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:80:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:82:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:55:2096] sender: [21:84:2057] recipient: [21:83:2112] Leader for TabletID 72057594037927937 is [21:85:2113] sender: [21:86:2057] recipient: [21:83:2112] !Reboot 72057594037927937 (actor [21:55:2096]) rebooted! !Reboot 72057594037927937 (actor [21:55:2096]) tablet resolver refreshed! new actor is[21:85:2113] Leader for TabletID 72057594037927937 is [21:85:2113] sender: [21:139:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:53:2057] recipient: [22:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:53:2057] recipient: [22:50:2094] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:56:2057] recipient: [22:50:2094] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:73:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:83:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:86:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:55:2096] sender: [22:87:2057] recipient: [22:85:2115] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:89:2057] recipient: [22:85:2115] !Reboot 72057594037927937 (actor [22:55:2096]) rebooted! !Reboot 72057594037927937 (actor [22:55:2096]) tablet resolver refreshed! new actor is[22:88:2116] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:142:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:53:2057] recipient: [23:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:53:2057] recipient: [23:50:2094] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:56:2057] recipient: [23:50:2094] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:73:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:83:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:86:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:55:2096] sender: [23:87:2057] recipient: [23:85:2115] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:89:2057] recipient: [23:85:2115] !Reboot 72057594037927937 (actor [23:55:2096]) rebooted! !Reboot 72057594037927937 (actor [23:55:2096]) tablet resolver refreshed! new actor is[23:88:2116] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:142:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:53:2057] recipient: [24:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:53:2057] recipient: [24:49:2094] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:56:2057] recipient: [24:49:2094] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:73:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:84:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:87:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:55:2096] sender: [24:88:2057] recipient: [24:86:2115] Leader for TabletID 72057594037927937 is [24:89:2116] sender: [24:90:2057] recipient: [24:86:2115] !Reboot 72057594037927937 (actor [24:55:2096]) rebooted! !Reboot 72057594037927937 (actor [24:55:2096]) tablet resolver refreshed! new actor is[24:89:2116] Leader for TabletID 72057594037927937 is [24:89:2116] sender: [24:143:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:53:2057] recipient: [25:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:53:2057] recipient: [25:49:2094] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:56:2057] recipient: [25:49:2094] Leader for TabletID 72057594037927937 is [25:55:2096] sender: [25:73:2057] recipient: [25:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ManyTasks [GOOD] Test command err: 2025-04-04T16:52:41.071523Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T16:52:41.072024Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001666/r3tmp/tmpCtuLAN/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T16:52:41.072583Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001666/r3tmp/tmpCtuLAN/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001666/r3tmp/tmpCtuLAN/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13534143499211992536 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T16:52:41.151032Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-04T16:52:41.151286Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-04T16:52:41.274658Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:461:2099] with ResourceBroker at [2:432:2098] 2025-04-04T16:52:41.275842Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:462:2100] 2025-04-04T16:52:41.277492Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:460:2339] with ResourceBroker at [1:431:2320] 2025-04-04T16:52:41.277862Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:463:2340] 2025-04-04T16:52:41.278654Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-04T16:52:41.279410Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-04T16:52:41.279785Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-04T16:52:41.279802Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-04T16:52:41.280209Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:41.379411Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785561 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:41.382229Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:41.382599Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785561 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:41.384512Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-04T16:52:41.384936Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-04T16:52:41.384976Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:41.385076Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785561 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:41.385204Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-04T16:52:41.385224Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:41.385273Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785561 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:41.385339Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-04T16:52:41.385904Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-04T16:52:41.385982Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:41.386063Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:41.387982Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:41.388227Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-04T16:52:41.388353Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-04T16:52:41.388422Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-04T16:52:41.388557Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:41.540451Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [1:460:2339]) priority=0 resources={0, 100} 2025-04-04T16:52:41.540533Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:41.540581Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:460:2339]) from queue queue_kqp_resource_manager 2025-04-04T16:52:41.540629Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:41.540674Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:460:2339])) 2025-04-04T16:52:41.540845Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-04T16:52:41.541027Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-2 (2 by [1:460:2339]) priority=0 resources={0, 100} 2025-04-04T16:52:41.541053Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-2 (2 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:41.541079Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-2-2 (2 by [1:460:2339]) from queue queue_kqp_resource_manager 2025-04-04T16:52:41.541101Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-2 (2 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:41.541137Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-1-2-2 (2 by [1:460:2339])) 2025-04-04T16:52:41.541166Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-04T16:52:41.541264Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-3-3 (3 by [1:460:2339]) priority=0 resources={0, 100} 2025-04-04T16:52:41.541287Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-3-3 (3 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:41.541311Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-3-3 (3 by [1:460:2339]) from queue queue_kqp_resource_manager 2025-04-04T16:52:41.541332Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-3-3 (3 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:41.541354Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.750000 (insert task kqp-1-3-3 (3 by [1:460:2339])) 2025-04-04T16:52:41.541375Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 3. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-04T16:52:41.541451Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-4-4 (4 by [1:460:2339]) priority=0 resources={0, 100} 2025-04-04T16:52:41.541469Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-4-4 (4 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:41.541505Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-4-4 (4 by [1:460:2339]) from queue queue_kqp_resource_manager 2025-04-04T16:52:41.541526Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-4-4 (4 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:41.541543Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.750000 to 1.000000 (insert task kqp-1-4-4 (4 by [1:460:2339])) 2025-04-04T16:52:41.542348Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 4. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-04T16:52:41.542959Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-5-5 (5 by [1:460:2339]) priority=0 resources={0, 100} 2025-04-04T16:52:41.545311Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-5-5 (5 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:41.545358Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-5-5 (5 by [1:460:2339]) from queue queue_kqp_resource_manager 2025-04-04T16:52:41.545632Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-5-5 (5 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:41.545660Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.000000 to 1.250000 (insert task kqp-1-5-5 (5 by [1:460:2339])) 2025-04-04T16:52:41.545686Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 5. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-04T16:52:41.545771Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-6-6 (6 by [1:460:2339]) priority=0 resources={0, 100} 2025-04-04T16:52:41.545789Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-6-6 (6 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:41.545809Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-6-6 (6 by [1:460:2339]) from queue queue_kqp_resource_manager 2025-04-04T16:52:41.546043Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-6-6 (6 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:41.546345Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.250000 to 1.500000 (insert task kqp-1-6-6 (6 by [1:460:2339])) 2025-04-04T16:52:41.546373Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 6. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-04T16:52:41.546690Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-7-7 (7 by [1:460:2339]) priority=0 resources={0, 100} 2025-04-04T16:52:41.546717Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-7-7 (7 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:41.546745Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-7-7 (7 by [1:460:2339]) from queue queue_kqp_resource_manager 2025-04-04T16:52:41.546769Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-7-7 (7 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:41.546796Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.500000 to 1.750000 (insert task kqp-1-7-7 (7 by [1:460:2339])) 2025-04-04T16:52:41.547049Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 7. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-04T16:52:41.547930Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-8-8 (8 by [1:460:2339]) priority=0 resources={0, 100} 2025-04-04T16:52:41.548242Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-8-8 (8 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:41.548269Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-8-8 (8 by [1:460:2339]) from queue queue_kqp_resource_manager 2025-04-04T16:52:41.548823Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-8-8 (8 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:41.548846Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.750000 to 2.000000 (insert task kqp-1-8-8 (8 by [1:460:2339])) 2025-04-04T16:52:41.548868Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 8. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-04T16:52:41.549482Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-9-9 (9 by [1:460:2339]) priority=0 resources={0, 100} 2025-04-04T16:52:41.549505Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-9-9 (9 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:41.549528Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-9-9 (9 by [1:460:2339]) from queue queue_kqp_resource_manager 2025-04-04T16:52:41.549554Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-9-9 (9 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:41.549580Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 2.000000 to 2.250000 (insert task kqp-1-9-9 (9 by [1:460:2339])) 2025-04-04T16:52:41.549603Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 9. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-04T16:52:41.550173Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-1-1 (1 by [1:460:2339]) (release resources {0, 100}) 2025-04-04T16:52:41.550495Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 2.250000 to 2.000000 (remove task kqp-1-1-1 (1 by [1:460:2339])) 2025-04-04T16:52:41.551090Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. >> LocalTableWriter::DecimalKeys >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughExecutionUnits [GOOD] Test command err: 2025-04-04T16:52:42.207259Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T16:52:42.220228Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001667/r3tmp/tmpNg01AW/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T16:52:42.225122Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001667/r3tmp/tmpNg01AW/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001667/r3tmp/tmpNg01AW/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9317840617272376803 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T16:52:42.282433Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-04T16:52:42.282751Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-04T16:52:42.311204Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:461:2099] with ResourceBroker at [2:432:2098] 2025-04-04T16:52:42.311354Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:462:2100] 2025-04-04T16:52:42.311532Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:460:2339] with ResourceBroker at [1:431:2320] 2025-04-04T16:52:42.311607Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:463:2340] 2025-04-04T16:52:42.311719Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-04T16:52:42.311748Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-04T16:52:42.311774Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-04T16:52:42.311793Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-04T16:52:42.311861Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:42.323165Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785562 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:42.323574Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:42.323660Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785562 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:42.323891Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-04T16:52:42.324040Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-04T16:52:42.324090Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:42.324186Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785562 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:42.324348Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-04T16:52:42.324369Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:42.324418Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785562 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:42.324499Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-04T16:52:42.325238Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-04T16:52:42.325336Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:42.325441Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:42.325891Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:42.326272Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-04T16:52:42.326424Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-04T16:52:42.326511Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-04T16:52:42.326659Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> KqpRm::SingleSnapshotByExchanger >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] >> KqpRm::Reduce [GOOD] >> LocalTableWriter::WriteTable >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] |75.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |75.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |75.4%| [TA] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |75.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless >> TKeyValueTest::TestConcatWorksNewApi [GOOD] >> TKeyValueTest::TestConcatToLongKey >> BasicUsage::PreferredDatabaseNoFallback [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] Test command err: 2025-04-04T16:52:42.961846Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T16:52:42.962230Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001674/r3tmp/tmpoBgFJR/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T16:52:42.962884Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001674/r3tmp/tmpoBgFJR/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001674/r3tmp/tmpoBgFJR/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7846157926297248596 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T16:52:43.054495Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-04T16:52:43.054817Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-04T16:52:43.101292Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:461:2099] with ResourceBroker at [2:432:2098] 2025-04-04T16:52:43.101471Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:462:2100] 2025-04-04T16:52:43.101671Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:460:2339] with ResourceBroker at [1:431:2320] 2025-04-04T16:52:43.101756Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:463:2340] 2025-04-04T16:52:43.101876Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-04T16:52:43.101911Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-04T16:52:43.101937Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-04T16:52:43.101964Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-04T16:52:43.102042Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:43.142290Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785563 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2025-04-04T16:52:43.142845Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:43.142970Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785563 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:43.143255Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-04T16:52:43.143419Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-04T16:52:43.143465Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:43.143569Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785563 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:43.143761Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-04T16:52:43.143790Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:43.143863Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785563 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2025-04-04T16:52:43.143976Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-04T16:52:43.144756Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-04T16:52:43.144870Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:43.144990Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:43.145487Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:43.145874Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-04T16:52:43.146047Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-04T16:52:43.146146Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-04T16:52:43.150543Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:43.153497Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-1 (1 by [1:460:2339]) priority=0 resources={0, 1000} 2025-04-04T16:52:43.153586Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-1 (1 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:43.153638Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 1000} for task kqp-1-2-1 (1 by [1:460:2339]) from queue queue_kqp_resource_manager 2025-04-04T16:52:43.153691Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-1 (1 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:43.153739Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 2.500000 (insert task kqp-1-2-1 (1 by [1:460:2339])) 2025-04-04T16:52:43.153904Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 1000ExternalMemory: 0 } 2025-04-04T16:52:43.153981Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-2 (2 by [1:460:2339]) priority=0 resources={0, 100000} 2025-04-04T16:52:43.154022Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-2 (2 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:43.154064Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task kqp-1-2-2 (2 by [1:460:2339]) 2025-04-04T16:52:43.154100Z node 1 :RESOURCE_BROKER DEBUG: Removing task kqp-1-2-2 (2 by [1:460:2339]) 2025-04-04T16:52:43.154193Z node 1 :KQP_RESOURCE_MANAGER NOTICE: TxId: 1, taskId: 2. Not enough memory for query, requested: 100000. TxResourcesInfo { TxId: 1, Database: , tx initially granted memory: 0B, tx total memory allocations: 1000B, tx largest successful memory allocation: 1000B, tx last failed memory allocation: 0B, tx total execution units: 0, started at: 2025-04-04T16:52:43.153388Z } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::Reduce [GOOD] Test command err: 2025-04-04T16:52:43.024882Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T16:52:43.025483Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001673/r3tmp/tmp9ae9hy/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T16:52:43.026006Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001673/r3tmp/tmp9ae9hy/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001673/r3tmp/tmp9ae9hy/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8959321657539765410 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T16:52:43.094864Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-04T16:52:43.095192Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-04T16:52:43.114339Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:461:2099] with ResourceBroker at [2:432:2098] 2025-04-04T16:52:43.114503Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:462:2100] 2025-04-04T16:52:43.114751Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:460:2339] with ResourceBroker at [1:431:2320] 2025-04-04T16:52:43.114839Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:463:2340] 2025-04-04T16:52:43.114973Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-04T16:52:43.115014Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-04T16:52:43.115044Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-04T16:52:43.115071Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-04T16:52:43.115156Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:43.135108Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785563 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:43.135612Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:43.135724Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785563 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:43.135972Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-04T16:52:43.136142Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-04T16:52:43.136202Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:43.136308Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785563 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:43.136470Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-04T16:52:43.136496Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:43.136554Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785563 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:43.136662Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-04T16:52:43.137427Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-04T16:52:43.137532Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:43.137646Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:43.138157Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:43.138659Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-04T16:52:43.138842Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-04T16:52:43.138964Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-04T16:52:43.139141Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:43.142446Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [1:460:2339]) priority=0 resources={0, 100} 2025-04-04T16:52:43.142525Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:43.142602Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:460:2339]) from queue queue_kqp_resource_manager 2025-04-04T16:52:43.142642Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:43.142690Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:460:2339])) 2025-04-04T16:52:43.142890Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-04T16:52:43.143101Z node 1 :RESOURCE_BROKER DEBUG: Update task kqp-1-1-1 (1 by [1:460:2339]) (priority=0 type=kqp_query resources={0, 30} resubmit=0) 2025-04-04T16:52:43.143143Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:43.143186Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.075000 (insert task kqp-1-1-1 (1 by [1:460:2339])) 2025-04-04T16:52:43.143227Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 70, Free Tier: 0, ExecutionUnits: 0. >> LocalTableWriter::SupportedTypes >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] Test command err: 2025-04-04T16:51:42.697713Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501703135771837:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:42.698233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00164d/r3tmp/tmpK6GQxn/pdisk_1.dat 2025-04-04T16:51:43.758434Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:51:43.949696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:43.954560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:43.970941Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:43.973766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24620, node 1 2025-04-04T16:51:44.270795Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:44.270819Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:44.270834Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:44.270941Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18530 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:44.847227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:47.710518Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501703135771837:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:47.710851Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:58.969599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:51:58.969625Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:09.589606Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489501818427543879:2273];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:09.589661Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00164d/r3tmp/tmp2L782c/pdisk_1.dat 2025-04-04T16:52:10.027301Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:10.060497Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:10.060582Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:10.062885Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62665, node 4 2025-04-04T16:52:10.182268Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:10.182287Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:10.182294Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:10.182440Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5140 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:52:10.423860Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:10.452654Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:52:14.593962Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489501818427543879:2273];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:14.594031Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:52:25.018615Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:52:25.018650Z node 4 :IMPORT WARN: Table profiles were not loaded >> LocalTableWriter::WaitTxIds ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::PreferredDatabaseNoFallback [GOOD] Test command err: 2025-04-04T16:51:28.513770Z :GetAllStartPartitionSessions INFO: Random seed for debugging is 1743785488513740 2025-04-04T16:51:29.003965Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501646196601847:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:29.004112Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c3e/r3tmp/tmp7jE5Bt/pdisk_1.dat 2025-04-04T16:51:29.323184Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T16:51:29.333803Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T16:51:29.380764Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:51:29.678744Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:29.690144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:29.690236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:29.696119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:29.696210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:29.704389Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:51:29.704517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:29.705442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23525, node 1 2025-04-04T16:51:29.955608Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000c3e/r3tmp/yandexPZiSy6.tmp 2025-04-04T16:51:29.955632Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000c3e/r3tmp/yandexPZiSy6.tmp 2025-04-04T16:51:29.955781Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000c3e/r3tmp/yandexPZiSy6.tmp 2025-04-04T16:51:29.955890Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:30.065158Z INFO: TTestServer started on Port 4910 GrpcPort 23525 TClient is connected to server localhost:4910 PQClient connected to localhost:23525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:30.492821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-04T16:51:33.579337Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489501665291186831:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:33.580415Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489501665291186842:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:33.580497Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:33.587853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T16:51:33.591818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501667671439216:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:33.591933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:33.610467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501667671439233:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:33.610561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501667671439235:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:33.610973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:33.638861Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489501665291186845:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T16:51:33.781584Z node 2 :TX_PROXY ERROR: Actor# [2:7489501665291186873:2131] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:51:33.781371Z node 1 :TX_PROXY ERROR: Actor# [1:7489501667671439262:2670] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T16:51:33.808386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:51:33.818775Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489501667671439320:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:51:33.820545Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODNlY2NjYy1jZTg0YjBmOS01MTJhNjY4Mi1hMzY0N2U0ZA==, ActorId: [1:7489501667671439213:2337], ActorState: ExecuteState, TraceId: 01jr0ve11gdcwq2snwbeeqxrty, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:51:33.820456Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489501665291186888:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:51:33.821769Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjEyNzI3ODQtMmViZmVjOWEtMzliZjNmMjItNjk5ZjVmODA=, ActorId: [2:7489501665291186829:2309], ActorState: ExecuteState, TraceId: 01jr0ve123bqaktf1f1y4knqps, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:51:33.823443Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:51:33.830666Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:51:33.992768Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501646196601847:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:33.992853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:33.997082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:51:34.195608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:23525", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-04T16:51:34.525330Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { Trace ... d: describe result for acl check 2025-04-04T16:52:08.774487Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-04T16:52:08.774496Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-04T16:52:08.774505Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-04T16:52:08.774520Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489501817979107225:2502] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-04T16:52:08.776911Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489501817979107225:2502] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-04T16:52:08.986885Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489501817979107225:2502] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-04T16:52:08.987912Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489501817979107225:2502] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-04T16:52:08.987948Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489501817979107225:2502] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-04T16:52:08.987325Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489501817979107284:2502] connected; active server actors: 1 2025-04-04T16:52:08.989390Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489501817979107284:2502] disconnected; active server actors: 1 2025-04-04T16:52:08.989416Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489501817979107284:2502] disconnected no session 2025-04-04T16:52:09.143345Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489501817979107225:2502] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-04T16:52:09.143398Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489501817979107225:2502] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-04T16:52:09.143416Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489501817979107225:2502] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-04T16:52:09.143454Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-04T16:52:09.144528Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 3, Generation: 1 2025-04-04T16:52:09.144582Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7489501822274074600:2502], now have 1 active actors on pipe 2025-04-04T16:52:09.144856Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T16:52:09.144884Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T16:52:09.144971Z node 3 :PERSQUEUE INFO: new Cookie src|56b6fce3-5a85f22f-18ec52c8-9a2b1d9b_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-04T16:52:09.145083Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-04T16:52:09.145142Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T16:52:09.145388Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T16:52:09.145402Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T16:52:09.145466Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T16:52:09.145573Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|56b6fce3-5a85f22f-18ec52c8-9a2b1d9b_0 2025-04-04T16:52:09.146471Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743785529146 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T16:52:09.146606Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|56b6fce3-5a85f22f-18ec52c8-9a2b1d9b_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-04T16:52:09.150363Z :INFO: [] MessageGroupId [src] SessionId [src|56b6fce3-5a85f22f-18ec52c8-9a2b1d9b_0] Write session: close. Timeout = 0 ms 2025-04-04T16:52:09.150440Z :INFO: [] MessageGroupId [src] SessionId [src|56b6fce3-5a85f22f-18ec52c8-9a2b1d9b_0] Write session will now close 2025-04-04T16:52:09.150496Z :DEBUG: [] MessageGroupId [src] SessionId [src|56b6fce3-5a85f22f-18ec52c8-9a2b1d9b_0] Write session: aborting 2025-04-04T16:52:09.151074Z :INFO: [] MessageGroupId [src] SessionId [src|56b6fce3-5a85f22f-18ec52c8-9a2b1d9b_0] Write session: gracefully shut down, all writes complete 2025-04-04T16:52:09.151133Z :DEBUG: [] MessageGroupId [src] SessionId [src|56b6fce3-5a85f22f-18ec52c8-9a2b1d9b_0] Write session: destroy 2025-04-04T16:52:09.152640Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|56b6fce3-5a85f22f-18ec52c8-9a2b1d9b_0 grpc closed 2025-04-04T16:52:09.152689Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|56b6fce3-5a85f22f-18ec52c8-9a2b1d9b_0 is DEAD 2025-04-04T16:52:09.153472Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T16:52:09.153609Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7489501822274074600:2502] destroyed 2025-04-04T16:52:09.153633Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. ====TYdbPqTestRetryPolicy() ====ExpectBreakDown === Session was created, waiting for retries >>> Ready to answer: ok ====CreateRetryState ====CreateRetryState Initialized Test retry state: get retry delay 2025-04-04T16:52:09.256639Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-04T16:52:11.263975Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-04T16:52:13.267793Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s === In the next federation discovery response dc2 will be available Test retry state: get retry delay 2025-04-04T16:52:15.278758Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-04T16:52:17.283811Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-04T16:52:19.285754Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-04T16:52:21.286609Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-04T16:52:23.288187Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-04T16:52:25.296857Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-04T16:52:27.310694Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-04T16:52:29.314855Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-04T16:52:31.322612Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-04T16:52:33.327075Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-04T16:52:35.331784Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-04-04T16:52:37.336230Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2025-04-04T16:52:38.008378Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] TPersQueueReadBalancer::HandleWakeup 2025-04-04T16:52:38.008736Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 1 2025-04-04T16:52:38.014833Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2025-04-04T16:52:38.018608Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 1 DataSize: 0 UsedReserveSize: 0 2025-04-04T16:52:38.019910Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] ProcessPendingStats. PendingUpdates size 1 Test retry state: get retry delay 2025-04-04T16:52:39.410306Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s === Waiting for repair >>> Ready to answer: ok === Closing the session 2025-04-04T16:52:41.435804Z :INFO: [/Root] [] [] Start federated write session to database 'dc2' (previous was ) FederationState: { Status: SUCCESS SelfLocation: "fancy_datacenter" DbInfos: [ { name: "dc1" path: "/Root" id: "account-dc1" endpoint: "localhost:7210" location: "dc1" status: AVAILABLE weight: 1000 } { name: "dc2" path: "/Root" id: "account-dc2" endpoint: "localhost:7210" location: "dc2" status: AVAILABLE weight: 500 } { name: "dc3" path: "/Root" id: "account-dc3" endpoint: "localhost:7210" location: "dc3" status: AVAILABLE weight: 500 } ] } 2025-04-04T16:52:41.821463Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: try to update token 2025-04-04T16:52:41.950549Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Start write session. Will connect to nodeId: 0 2025-04-04T16:52:42.010709Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2025-04-04T16:52:42.010744Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session will now close 2025-04-04T16:52:42.011191Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: aborting 2025-04-04T16:52:42.018367Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: gracefully shut down, all writes complete 2025-04-04T16:52:42.018706Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: destroy >> SystemView::AuthGroups_ResultOrder [GOOD] >> SystemView::AuthGroups_TableRange >> KqpRm::NotEnoughMemory [GOOD] >> KqpRm::DisonnectNodes >> LocalTableWriter::DataAlongWithHeartbeat ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:75:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:77:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:79:2057] recipient: [2:78:2109] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:81:2057] recipient: [2:78:2109] !Reboot 72057594037927937 (actor [2:55:2096]) rebooted! !Reboot 72057594037927937 (actor [2:55:2096]) tablet resolver refreshed! new actor is[2:80:2110] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:134:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:80:2057] recipient: [4:78:2109] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:82:2057] recipient: [4:78:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:81:2110] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:81:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:83:2057] recipient: [5:82:2112] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:85:2057] recipient: [5:82:2112] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:84:2113] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:138:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:83:2057] recipient: [7:82:2112] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:86:2057] recipient: [7:82:2112] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:85:2113] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:55:2096]) rebooted! !Reboot 72057594037927937 (actor [8:55:2096]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:55:2096]) rebooted! !Reboot 72057594037927937 (actor [9:55:2096]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:88:2057] recipient: [10:87:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:87:2115] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:87:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:90:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:91:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:93:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:55:2096]) rebooted! !Reboot 72057594037927937 (actor [11:55:2096]) tablet resolver refreshed! new actor is[11:92:2119] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:146:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 7 is [27:55:2096] sender: [27:88:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:55:2096] sender: [27:91:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:55:2096] sender: [27:92:2057] recipient: [27:90:2118] Leader for TabletID 72057594037927937 is [27:93:2119] sender: [27:94:2057] recipient: [27:90:2118] !Reboot 72057594037927937 (actor [27:55:2096]) rebooted! !Reboot 72057594037927937 (actor [27:55:2096]) tablet resolver refreshed! new actor is[27:93:2119] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:53:2057] recipient: [28:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:53:2057] recipient: [28:50:2094] Leader for TabletID 72057594037927937 is [28:55:2096] sender: [28:56:2057] recipient: [28:50:2094] Leader for TabletID 72057594037927937 is [28:55:2096] sender: [28:73:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:53:2057] recipient: [29:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:53:2057] recipient: [29:50:2094] Leader for TabletID 72057594037927937 is [29:55:2096] sender: [29:56:2057] recipient: [29:50:2094] Leader for TabletID 72057594037927937 is [29:55:2096] sender: [29:73:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:53:2057] recipient: [30:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:53:2057] recipient: [30:50:2094] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:56:2057] recipient: [30:50:2094] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:73:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:75:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:78:2057] recipient: [30:77:2109] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:79:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:80:2110] sender: [30:81:2057] recipient: [30:77:2109] !Reboot 72057594037927937 (actor [30:55:2096]) rebooted! !Reboot 72057594037927937 (actor [30:55:2096]) tablet resolver refreshed! new actor is[30:80:2110] Leader for TabletID 72057594037927937 is [30:80:2110] sender: [30:134:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:53:2057] recipient: [31:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:53:2057] recipient: [31:50:2094] Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:56:2057] recipient: [31:50:2094] Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:73:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:55:2096]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:75:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:78:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:79:2057] recipient: [31:77:2109] Leader for TabletID 72057594037927937 is [31:80:2110] sender: [31:81:2057] recipient: [31:77:2109] !Reboot 72057594037927937 (actor [31:55:2096]) rebooted! !Reboot 72057594037927937 (actor [31:55:2096]) tablet resolver refreshed! new actor is[31:80:2110] Leader for TabletID 72057594037927937 is [31:80:2110] sender: [31:134:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:53:2057] recipient: [32:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:53:2057] recipient: [32:50:2094] Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:56:2057] recipient: [32:50:2094] Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:73:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:76:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:79:2057] recipient: [32:78:2109] Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:80:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:81:2110] sender: [32:82:2057] recipient: [32:78:2109] !Reboot 72057594037927937 (actor [32:55:2096]) rebooted! !Reboot 72057594037927937 (actor [32:55:2096]) tablet resolver refreshed! new actor is[32:81:2110] Leader for TabletID 72057594037927937 is [32:81:2110] sender: [32:135:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:53:2057] recipient: [33:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:53:2057] recipient: [33:50:2094] Leader for TabletID 72057594037927937 is [33:55:2096] sender: [33:56:2057] recipient: [33:50:2094] Leader for TabletID 72057594037927937 is [33:55:2096] sender: [33:73:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [33:55:2096] sender: [33:79:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:55:2096] sender: [33:82:2057] recipient: [33:81:2112] Leader for TabletID 72057594037927937 is [33:55:2096] sender: [33:83:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:84:2113] sender: [33:85:2057] recipient: [33:81:2112] !Reboot 72057594037927937 (actor [33:55:2096]) rebooted! !Reboot 72057594037927937 (actor [33:55:2096]) tablet resolver refreshed! new actor is[33:84:2113] Leader for TabletID 72057594037927937 is [33:84:2113] sender: [33:138:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:53:2057] recipient: [34:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:53:2057] recipient: [34:49:2094] Leader for TabletID 72057594037927937 is [34:55:2096] sender: [34:56:2057] recipient: [34:49:2094] Leader for TabletID 72057594037927937 is [34:55:2096] sender: [34:73:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [34:55:2096] sender: [34:79:2057] recipient: [34:36:2083] Leader for TabletID 72057594037927937 is [34:55:2096] sender: [34:82:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [34:55:2096] sender: [34:83:2057] recipient: [34:81:2112] Leader for TabletID 72057594037927937 is [34:84:2113] sender: [34:85:2057] recipient: [34:81:2112] !Reboot 72057594037927937 (actor [34:55:2096]) rebooted! !Reboot 72057594037927937 (actor [34:55:2096]) tablet resolver refreshed! new actor is[34:84:2113] Leader for TabletID 72057594037927937 is [34:84:2113] sender: [34:138:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:53:2057] recipient: [35:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:53:2057] recipient: [35:50:2094] Leader for TabletID 72057594037927937 is [35:55:2096] sender: [35:56:2057] recipient: [35:50:2094] Leader for TabletID 72057594037927937 is [35:55:2096] sender: [35:73:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [35:55:2096] sender: [35:80:2057] recipient: [35:36:2083] Leader for TabletID 72057594037927937 is [35:55:2096] sender: [35:83:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:55:2096] sender: [35:84:2057] recipient: [35:82:2112] Leader for TabletID 72057594037927937 is [35:85:2113] sender: [35:86:2057] recipient: [35:82:2112] !Reboot 72057594037927937 (actor [35:55:2096]) rebooted! !Reboot 72057594037927937 (actor [35:55:2096]) tablet resolver refreshed! new actor is[35:85:2113] Leader for TabletID 72057594037927937 is [35:85:2113] sender: [35:139:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:53:2057] recipient: [36:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:53:2057] recipient: [36:50:2094] Leader for TabletID 72057594037927937 is [36:55:2096] sender: [36:56:2057] recipient: [36:50:2094] Leader for TabletID 72057594037927937 is [36:55:2096] sender: [36:73:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:55:2096] sender: [36:83:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:55:2096] sender: [36:85:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:55:2096] sender: [36:87:2057] recipient: [36:86:2115] Leader for TabletID 72057594037927937 is [36:88:2116] sender: [36:89:2057] recipient: [36:86:2115] !Reboot 72057594037927937 (actor [36:55:2096]) rebooted! !Reboot 72057594037927937 (actor [36:55:2096]) tablet resolver refreshed! new actor is[36:88:2116] Leader for TabletID 72057594037927937 is [36:88:2116] sender: [36:142:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:53:2057] recipient: [37:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:53:2057] recipient: [37:49:2094] Leader for TabletID 72057594037927937 is [37:55:2096] sender: [37:56:2057] recipient: [37:49:2094] Leader for TabletID 72057594037927937 is [37:55:2096] sender: [37:73:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [37:55:2096] sender: [37:83:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:55:2096] sender: [37:86:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:55:2096] sender: [37:87:2057] recipient: [37:85:2115] Leader for TabletID 72057594037927937 is [37:88:2116] sender: [37:89:2057] recipient: [37:85:2115] !Reboot 72057594037927937 (actor [37:55:2096]) rebooted! !Reboot 72057594037927937 (actor [37:55:2096]) tablet resolver refreshed! new actor is[37:88:2116] Leader for TabletID 72057594037927937 is [37:88:2116] sender: [37:142:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:53:2057] recipient: [38:51:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:53:2057] recipient: [38:51:2094] Leader for TabletID 72057594037927937 is [38:55:2096] sender: [38:56:2057] recipient: [38:51:2094] Leader for TabletID 72057594037927937 is [38:55:2096] sender: [38:73:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:55:2096] sender: [38:84:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:55:2096] sender: [38:86:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:55:2096] sender: [38:88:2057] recipient: [38:87:2115] Leader for TabletID 72057594037927937 is [38:89:2116] sender: [38:90:2057] recipient: [38:87:2115] !Reboot 72057594037927937 (actor [38:55:2096]) rebooted! !Reboot 72057594037927937 (actor [38:55:2096]) tablet resolver refreshed! new actor is[38:89:2116] Leader for TabletID 72057594037927937 is [38:89:2116] sender: [38:143:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:53:2057] recipient: [39:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:53:2057] recipient: [39:50:2094] Leader for TabletID 72057594037927937 is [39:55:2096] sender: [39:56:2057] recipient: [39:50:2094] Leader for TabletID 72057594037927937 is [39:55:2096] sender: [39:73:2057] recipient: [39:14:2061] |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::PointLookup [GOOD] >> LocalTableWriter::ConsistentWrite >> KqpRm::NodesMembershipByExchanger ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughMemory [GOOD] Test command err: 2025-04-04T16:52:47.401337Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T16:52:47.401892Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/00165e/r3tmp/tmpIWl1A5/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T16:52:47.406359Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/00165e/r3tmp/tmpIWl1A5/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/00165e/r3tmp/tmpIWl1A5/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 3784192989989151526 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T16:52:47.463240Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-04T16:52:47.463526Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-04T16:52:47.491330Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:461:2099] with ResourceBroker at [2:432:2098] 2025-04-04T16:52:47.491504Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:462:2100] 2025-04-04T16:52:47.491696Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:460:2339] with ResourceBroker at [1:431:2320] 2025-04-04T16:52:47.491786Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:463:2340] 2025-04-04T16:52:47.491894Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-04T16:52:47.491927Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-04T16:52:47.491952Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-04T16:52:47.491971Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-04T16:52:47.492051Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:47.509277Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785567 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:47.509757Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:47.509856Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785567 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:47.510115Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-04T16:52:47.510367Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-04T16:52:47.510421Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:47.510512Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785567 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:47.510706Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-04T16:52:47.510732Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:47.510791Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785567 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:47.510902Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-04T16:52:47.511682Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-04T16:52:47.511790Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:47.511898Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:47.512421Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:47.512777Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-04T16:52:47.512939Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-04T16:52:47.513045Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-04T16:52:47.513191Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 >> KqpCost::ScanScriptingRangeFullScan-SourceRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::PointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 27792, MsgBus: 2383 2025-04-04T16:52:22.518141Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501877542733672:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:22.518860Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001602/r3tmp/tmpvmlC7j/pdisk_1.dat 2025-04-04T16:52:23.868822Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:23.872320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:23.948391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:23.948481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:24.062531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27792, node 1 2025-04-04T16:52:26.348512Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:26.348531Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:26.348537Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:26.349548Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:27.519042Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501877542733672:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:27.519380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:2383 TClient is connected to server localhost:2383 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:52:36.924284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:37.183996Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:52:37.227663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:37.671695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:38.815958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:52:38.816313Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:38.894940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:39.629694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:42.436613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501963442081372:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:42.436708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:42.689492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:52:42.733411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:52:42.774571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:52:42.825816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:52:42.865787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:52:42.938089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:52:43.031957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501967737049195:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:43.032107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:43.032497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501967737049201:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:43.036160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:52:43.046998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489501967737049203:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:52:43.116695Z node 1 :TX_PROXY ERROR: Actor# [1:7489501967737049259:3502] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TFetchRequestTests::HappyWay [GOOD] >> TFetchRequestTests::BadTopicName |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpRm::SingleSnapshotByExchanger [GOOD] |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |75.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |75.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |75.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |75.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors >> SystemView::AuthGroupMembers [GOOD] >> SystemView::AuthGroupMembers_Access |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapPointLookup >> KqpRm::SnapshotSharingByExchanger [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleSnapshotByExchanger [GOOD] Test command err: 2025-04-04T16:52:47.822941Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T16:52:47.823466Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001653/r3tmp/tmpWzbhvN/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T16:52:47.823993Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001653/r3tmp/tmpWzbhvN/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001653/r3tmp/tmpWzbhvN/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10566042339760616281 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T16:52:47.985444Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-04T16:52:47.985745Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-04T16:52:48.035374Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:461:2099] with ResourceBroker at [2:432:2098] 2025-04-04T16:52:48.035570Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:462:2100] 2025-04-04T16:52:48.035769Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:460:2339] with ResourceBroker at [1:431:2320] 2025-04-04T16:52:48.035852Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:463:2340] 2025-04-04T16:52:48.035966Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-04T16:52:48.036000Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-04T16:52:48.036026Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-04T16:52:48.036050Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-04T16:52:48.036127Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:48.052537Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785567 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:48.052978Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:48.053050Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785567 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:48.053268Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-04T16:52:48.053382Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-04T16:52:48.053428Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:48.053503Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785567 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:48.053638Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-04T16:52:48.053658Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:48.053705Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785567 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:48.053784Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-04T16:52:48.062880Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-04T16:52:48.063028Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:48.063154Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:48.063699Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:48.064074Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-04T16:52:48.064243Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-04T16:52:48.064347Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-04T16:52:48.064527Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:48.070582Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-1 (1 by [1:460:2339]) priority=0 resources={0, 100} 2025-04-04T16:52:48.070684Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-1 (1 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:48.070741Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:460:2339]) from queue queue_kqp_resource_manager 2025-04-04T16:52:48.070781Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-1 (1 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:48.070826Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:460:2339])) 2025-04-04T16:52:48.071028Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-04T16:52:48.071116Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-2-1-2 (2 by [1:460:2339]) priority=0 resources={0, 100} 2025-04-04T16:52:48.071155Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-2-1-2 (2 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:48.071203Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:460:2339]) from queue queue_kqp_resource_manager 2025-04-04T16:52:48.071237Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-2-1-2 (2 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:48.071285Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:460:2339])) 2025-04-04T16:52:48.071366Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-04T16:52:48.071590Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:48.071728Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785567 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2025-04-04T16:52:48.072016Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:49.259758Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-04T16:52:49.259893Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-2-1 (1 by [1:460:2339]) (release resources {0, 100}) 2025-04-04T16:52:49.259970Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.300200 (remove task kqp-1-2-1 (1 by [1:460:2339])) 2025-04-04T16:52:49.260015Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.100400 2025-04-04T16:52:49.260063Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-04T16:52:49.260117Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-2-1-2 (2 by [1:460:2339]) (release resources {0, 100}) 2025-04-04T16:52:49.260156Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.300200 to 0.100400 (remove task kqp-2-1-2 (2 by [1:460:2339])) 2025-04-04T16:52:49.260194Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-04T16:52:49.260387Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:49.260543Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785568 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:49.260907Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:49.578807Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request >> KqpCost::ScanQueryRangeFullScan+SourceRead |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink >> KqpRm::DisonnectNodes [GOOD] |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |75.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SnapshotSharingByExchanger [GOOD] Test command err: 2025-04-04T16:52:47.674857Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T16:52:47.675387Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001655/r3tmp/tmpKTxCIX/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T16:52:47.675895Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001655/r3tmp/tmpKTxCIX/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001655/r3tmp/tmpKTxCIX/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 438313082404825430 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T16:52:47.792208Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-04T16:52:47.792496Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-04T16:52:47.828068Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:461:2099] with ResourceBroker at [2:432:2098] 2025-04-04T16:52:47.828192Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:462:2100] 2025-04-04T16:52:47.828336Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:460:2339] with ResourceBroker at [1:431:2320] 2025-04-04T16:52:47.828405Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:463:2340] 2025-04-04T16:52:47.828485Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-04T16:52:47.828566Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-04T16:52:47.828592Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-04T16:52:47.828610Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-04T16:52:47.828657Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:47.884964Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785567 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:47.885251Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:47.885314Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785567 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:47.885588Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-04T16:52:47.885683Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-04T16:52:47.885719Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:47.885783Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785567 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:47.885924Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-04T16:52:47.885942Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:47.885992Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785567 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:47.886068Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-04T16:52:47.886813Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-04T16:52:47.886895Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:47.886992Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:47.887343Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:47.887660Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-04T16:52:47.887750Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-04T16:52:47.887822Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-04T16:52:47.887926Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:49.150123Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-04T16:52:49.150212Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-04T16:52:49.150354Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [1:460:2339]) priority=0 resources={0, 100} 2025-04-04T16:52:49.150390Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:49.150435Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:460:2339]) from queue queue_kqp_resource_manager 2025-04-04T16:52:49.150488Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:49.150536Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:460:2339])) 2025-04-04T16:52:49.150750Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-04T16:52:49.150833Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-2-1-2 (2 by [1:460:2339]) priority=0 resources={0, 100} 2025-04-04T16:52:49.150871Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-2-1-2 (2 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:49.150911Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:460:2339]) from queue queue_kqp_resource_manager 2025-04-04T16:52:49.150961Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-2-1-2 (2 by [1:460:2339]) to queue queue_kqp_resource_manager 2025-04-04T16:52:49.150998Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:460:2339])) 2025-04-04T16:52:49.151057Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-04T16:52:49.151112Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:49.151220Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785568 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2025-04-04T16:52:49.151434Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:49.439463Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-04T16:52:49.439618Z node 2 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [2:461:2099]) priority=0 resources={0, 100} 2025-04-04T16:52:49.439669Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [2:461:2099]) to queue queue_kqp_resource_manager 2025-04-04T16:52:49.439754Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [2:461:2099]) from queue queue_kqp_resource_manager 2025-04-04T16:52:49.439820Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [2:461:2099]) to queue queue_kqp_resource_manager 2025-04-04T16:52:49.439882Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [2:461:2099])) 2025-04-04T16:52:49.439998Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-04T16:52:49.440117Z node 2 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-2-2-2 (2 by [2:461:2099]) priority=0 resources={0, 100} 2025-04-04T16:52:49.440171Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-2-2-2 (2 by [2:461:2099]) to queue queue_kqp_resource_manager 2025-04-04T16:52:49.440220Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-2-2-2 (2 by [2:461:2099]) from queue queue_kqp_resource_manager 2025-04-04T16:52:49.440259Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-2-2-2 (2 by [2:461:2099]) to queue queue_kqp_resource_manager 2025-04-04T16:52:49.440297Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-2-2 (2 by [2:461:2099])) 2025-04-04T16:52:49.440384Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-04-04T16:52:49.440492Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:49.440640Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785569 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2025-04-04T16:52:49.440964Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-04T16:52:49.750873Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-04T16:52:49.751022Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-1-1 (1 by [1:460:2339]) (release resources {0, 100}) 2025-04-04T16:52:49.751105Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350200 (remove task kqp-1-1-1 (1 by [1:460:2339])) 2025-04-04T16:52:49.751149Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200400 2025-04-04T16:52:49.751197Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-04T16:52:49.751257Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-2-1-2 (2 by [1:460:2339]) (release resources {0, 100}) 2025-04-04T16:52:49.751456Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350200 to 0.200400 (remove task kqp-2-1-2 (2 by [1:460:2339])) 2025-04-04T16:52:49.751504Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-04T16:52:49.751579Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:49.751743Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785570 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:49.752144Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:50.037435Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-04T16:52:50.037576Z node 2 :RESOURCE_BROKER DEBUG: Finish task kqp-1-1-1 (1 by [2:461:2099]) (release resources {0, 100}) 2025-04-04T16:52:50.037645Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350200 (remove task kqp-1-1-1 (1 by [2:461:2099])) 2025-04-04T16:52:50.037683Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200400 2025-04-04T16:52:50.037735Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-04T16:52:50.037781Z node 2 :RESOURCE_BROKER DEBUG: Finish task kqp-2-2-2 (2 by [2:461:2099]) (release resources {0, 100}) 2025-04-04T16:52:50.037820Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350200 to 0.200400 (remove task kqp-2-2-2 (2 by [2:461:2099])) 2025-04-04T16:52:50.037861Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-04-04T16:52:50.037937Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:50.038089Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785571 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:50.038478Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-04T16:52:50.303243Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request >> LocalTableWriter::ApplyInCorrectOrder [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::DisonnectNodes [GOOD] Test command err: 2025-04-04T16:52:49.215219Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T16:52:49.215782Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001649/r3tmp/tmpp6gZvg/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T16:52:49.221701Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001649/r3tmp/tmpp6gZvg/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001649/r3tmp/tmpp6gZvg/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16481312121256193907 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T16:52:49.296187Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-04T16:52:49.296526Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-04T16:52:49.317411Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:461:2099] with ResourceBroker at [2:432:2098] 2025-04-04T16:52:49.317561Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:462:2100] 2025-04-04T16:52:49.317745Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:460:2339] with ResourceBroker at [1:431:2320] 2025-04-04T16:52:49.317812Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:463:2340] 2025-04-04T16:52:49.317929Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-04T16:52:49.317961Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-04T16:52:49.317989Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-04T16:52:49.318008Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-04T16:52:49.318083Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:49.336213Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785569 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:49.336720Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:49.336810Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785569 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:49.337117Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-04T16:52:49.337256Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-04T16:52:49.337293Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:49.337403Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785569 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:49.337553Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-04T16:52:49.337574Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:49.337626Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785569 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:49.337726Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-04T16:52:49.338656Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-04T16:52:49.338769Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:49.338897Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:49.339382Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:49.339731Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-04T16:52:49.339899Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-04T16:52:49.340004Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-04T16:52:49.340165Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:50.440993Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-04T16:52:50.441111Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-04T16:52:50.441684Z node 1 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 2 2025-04-04T16:52:50.441762Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2025-04-04T16:52:50.442675Z node 1 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 2 2025-04-04T16:52:50.442950Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:77:2072] ServerId# [1:351:2270] TabletId# 72057594037932033 PipeClientId# [2:77:2072] 2025-04-04T16:52:50.443099Z node 2 :TX_PROXY WARN: actor# [2:143:2086] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-04-04T16:52:50.443300Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-04T16:52:50.443439Z node 2 :KQP_RESOURCE_MANAGER INFO: Subcriber is not available for info exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-04T16:52:50.443481Z node 2 :KQP_RESOURCE_MANAGER INFO: Kill previous info exchanger subscriber for 'kqpexch+/dc-1' at [2:465:2102], reason: tenant updated 2025-04-04T16:52:50.443866Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:50.445850Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:50.446012Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:50.802892Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request >> KqpCost::IndexLookupAndTake-useSink |75.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |75.6%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache >> LocalTableWriter::WriteTable [GOOD] |75.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |75.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt >> LocalTableWriter::DecimalKeys [GOOD] >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> LocalTableWriter::SupportedTypes [GOOD] >> TSchemeShardServerLess::Fake [GOOD] >> KqpRm::NodesMembershipByExchanger [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ApplyInCorrectOrder [GOOD] Test command err: 2025-04-04T16:52:42.959884Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501962725601525:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:42.960344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0010a0/r3tmp/tmpuIfGPc/pdisk_1.dat 2025-04-04T16:52:43.612143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:43.619706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:43.662917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:44.378279Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:44.453214Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:3432 TServer::EnableGrpc on GrpcPort 6506, node 1 2025-04-04T16:52:47.455675Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:47.455789Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:47.455797Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:47.456098Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:47.942781Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501962725601525:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:47.943044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:3432 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:52:49.496122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:49.519646Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:52:49.525332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785569645 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-04-04T16:52:49.691487Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501992790373199:2369] Handshake: worker# [1:7489501992790373105:2308] 2025-04-04T16:52:49.691742Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501992790373199:2369] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T16:52:49.692027Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501992790373199:2369] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-04T16:52:49.692069Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501992790373199:2369] Send handshake: worker# [1:7489501992790373105:2308] 2025-04-04T16:52:49.694530Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501992790373199:2369] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 1 Data: 48b CreateTime: 1970-01-01T00:00:00Z }] } 2025-04-04T16:52:49.717465Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501992790373199:2369] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-04-04T16:52:49.717620Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501992790373199:2369] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-04-04T16:52:49.717744Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501992790373202:2369] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-04T16:52:49.717800Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501992790373199:2369] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-04T16:52:49.717860Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501992790373202:2369] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-04-04T16:52:49.726683Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501992790373202:2369] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-04T16:52:49.726777Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501992790373199:2369] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-04T16:52:49.726843Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501992790373199:2369] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-04-04T16:52:49.727280Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501992790373199:2369] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 2 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 3 Data: 48b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 4 Data: 19b CreateTime: 1970-01-01T00:00:00Z }] } 2025-04-04T16:52:49.727751Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501992790373199:2369] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2025-04-04T16:52:49.727845Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501992790373199:2369] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 },{ Order: 3 BodySize: 48 }] } 2025-04-04T16:52:49.727959Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501992790373202:2369] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 3 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-04-04T16:52:49.735184Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501992790373202:2369] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-04T16:52:49.735246Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501992790373199:2369] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-04T16:52:49.735317Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501992790373199:2369] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2,3] } >> KqpCost::AAARangeFullScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WriteTable [GOOD] Test command err: 2025-04-04T16:52:45.707375Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501973859206785:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:45.707411Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00103c/r3tmp/tmptMlXrK/pdisk_1.dat 2025-04-04T16:52:47.465408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:48.554603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:48.555895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:48.556525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:48.568804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:48.728685Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18578 TServer::EnableGrpc on GrpcPort 30033, node 1 2025-04-04T16:52:49.573930Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:49.573948Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:49.573955Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:49.574065Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18578 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:52:50.065541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:50.083230Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:52:50.087480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785570191 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-04-04T16:52:50.202351Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501995334043981:2358] Handshake: worker# [1:7489501995334043888:2297] 2025-04-04T16:52:50.202653Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501995334043981:2358] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T16:52:50.202875Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501995334043981:2358] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-04T16:52:50.202904Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501995334043981:2358] Send handshake: worker# [1:7489501995334043888:2297] 2025-04-04T16:52:50.203419Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501995334043981:2358] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 1 Data: 36b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 2 Data: 36b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 3 Data: 36b CreateTime: 1970-01-01T00:00:00Z }] } 2025-04-04T16:52:50.203642Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501995334043981:2358] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 },{ Order: 2 BodySize: 36 },{ Order: 3 BodySize: 36 }] } 2025-04-04T16:52:50.203995Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501995334043984:2358] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-04T16:52:50.204030Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501995334043981:2358] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-04T16:52:50.204128Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501995334043984:2358] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-04-04T16:52:50.206636Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501995334043984:2358] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-04T16:52:50.206698Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501995334043981:2358] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-04T16:52:50.206743Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501995334043981:2358] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::Fake [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DecimalKeys [GOOD] Test command err: 2025-04-04T16:52:43.379440Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501965188446241:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:43.379481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001052/r3tmp/tmprT0rC1/pdisk_1.dat 2025-04-04T16:52:46.733280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:46.733862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:46.761971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:47.020014Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:47.035261Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:24653 TServer::EnableGrpc on GrpcPort 65404, node 1 2025-04-04T16:52:48.410415Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501965188446241:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:48.410513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:52:49.135357Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:49.135380Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:49.135386Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:49.135481Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24653 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:52:49.810195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:49.824221Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:52:49.827630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785569946 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Decimal(1,0)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 1 DecimalScale: 0 } IsBuildInProgress: false } Columns { Name: "value" Type: "Decimal(35,10)" TypeId: 4865 I... (TRUNCATED) 2025-04-04T16:52:49.989205Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501990958250496:2359] Handshake: worker# [1:7489501990958250404:2299] 2025-04-04T16:52:49.989469Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501990958250496:2359] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T16:52:49.989661Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501990958250496:2359] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Decimal(1,0) : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-04T16:52:49.989691Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501990958250496:2359] Send handshake: worker# [1:7489501990958250404:2299] 2025-04-04T16:52:49.989963Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501990958250496:2359] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 1 Data: 57b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 2 Data: 57b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 3 Data: 57b CreateTime: 1970-01-01T00:00:00Z }] } 2025-04-04T16:52:49.990151Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501990958250496:2359] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 57 },{ Order: 2 BodySize: 57 },{ Order: 3 BodySize: 57 }] } 2025-04-04T16:52:49.990312Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501990958250499:2359] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-04T16:52:49.990346Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501990958250496:2359] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-04T16:52:49.990414Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501990958250499:2359] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b }] } 2025-04-04T16:52:49.995243Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501990958250499:2359] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-04T16:52:49.995321Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501990958250496:2359] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-04T16:52:49.995389Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501990958250496:2359] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::SupportedTypes [GOOD] Test command err: 2025-04-04T16:52:47.655286Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501984149761672:2223];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:47.667061Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00107d/r3tmp/tmpQqaNMR/pdisk_1.dat 2025-04-04T16:52:48.782662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:49.440055Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:49.451676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:49.451790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:49.463377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12632 TServer::EnableGrpc on GrpcPort 13179, node 1 2025-04-04T16:52:49.850775Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:49.850804Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:49.850811Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:49.850954Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12632 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:52:50.232286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:50.251033Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:52:50.277483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785570394 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "int32_value" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "ui... (TRUNCATED) 2025-04-04T16:52:50.399112Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997034664110:2357] Handshake: worker# [1:7489501997034664017:2296] 2025-04-04T16:52:50.399397Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997034664110:2357] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T16:52:50.399645Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997034664110:2357] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-04T16:52:50.399678Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997034664110:2357] Send handshake: worker# [1:7489501997034664017:2296] 2025-04-04T16:52:50.400585Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997034664110:2357] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 1 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 2 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 3 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 4 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 5 Data: 41b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 6 Data: 41b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 7 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 8 Data: 44b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 9 Data: 66b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 10 Data: 71b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 11 Data: 72b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 12 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 13 Data: 48b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 14 Data: 51b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 15 Data: 58b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 16 Data: 51b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 17 Data: 54b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 18 Data: 57b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 19 Data: 76b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 20 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 21 Data: 54b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 22 Data: 61b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 23 Data: 51b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 24 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 25 Data: 46b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 26 Data: 47b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 27 Data: 50b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 28 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 29 Data: 72b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 30 Data: 57b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 31 Data: 64b CreateTime: 1970-01-01T00:00:00Z }] } 2025-04-04T16:52:50.401604Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997034664110:2357] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 45 },{ Order: 2 BodySize: 45 },{ Order: 3 BodySize: 45 },{ Order: 4 BodySize: 45 },{ Order: 5 BodySize: 41 },{ Order: 6 BodySize: 41 },{ Order: 7 BodySize: 45 },{ Order: 8 BodySize: 44 },{ Order: 9 BodySize: 66 },{ Order: 10 BodySize: 71 },{ Order: 11 BodySize: 72 },{ Order: 12 BodySize: 49 },{ Order: 13 BodySize: 48 },{ Order: 14 BodySize: 51 },{ Order: 15 BodySize: 58 },{ Order: 16 BodySize: 51 },{ Order: 17 BodySize: 54 },{ Order: 18 BodySize: 57 },{ Order: 19 BodySize: 76 },{ Order: 20 BodySize: 45 },{ Order: 21 BodySize: 54 },{ Order: 22 BodySize: 61 },{ Order: 23 BodySize: 51 },{ Order: 24 BodySize: 45 },{ Order: 25 BodySize: 46 },{ Order: 26 BodySize: 47 },{ Order: 27 BodySize: 50 },{ Order: 28 BodySize: 49 },{ Order: 29 BodySize: 72 },{ Order: 30 BodySize: 57 },{ Order: 31 BodySize: 64 }] } 2025-04-04T16:52:50.402044Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501997034664113:2357] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-04T16:52:50.402131Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997034664110:2357] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-04T16:52:50.402593Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501997034664113:2357] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 4 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 5 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 6 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 7 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 8 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 44b },{ Order: 9 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 66b },{ Order: 10 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 11 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 12 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 13 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 14 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 15 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 58b },{ Order: 16 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 17 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 18 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 19 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 76b },{ Order: 20 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 21 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 22 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 61b },{ Order: 23 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 24 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 25 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 46b },{ Order: 26 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 47b },{ Order: 27 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 50b },{ Order: 28 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 29 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 30 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 31 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 64b }] } 2025-04-04T16:52:50.428835Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501997034664113:2357] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-04T16:52:50.428902Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997034664110:2357] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-04T16:52:50.428959Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997034664110:2357] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31] } >> TKeyValueTest::TestGetStatusWorks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NodesMembershipByExchanger [GOOD] Test command err: 2025-04-04T16:52:49.941979Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T16:52:49.947127Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001644/r3tmp/tmpdG1zhz/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T16:52:49.954138Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001644/r3tmp/tmpdG1zhz/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001644/r3tmp/tmpdG1zhz/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 18306478629697543030 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T16:52:50.021540Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-04T16:52:50.021845Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-04T16:52:50.051131Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:461:2099] with ResourceBroker at [2:432:2098] 2025-04-04T16:52:50.051310Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:462:2100] 2025-04-04T16:52:50.051501Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:460:2339] with ResourceBroker at [1:431:2320] 2025-04-04T16:52:50.051573Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:463:2340] 2025-04-04T16:52:50.051676Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-04T16:52:50.051708Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-04T16:52:50.051738Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-04-04T16:52:50.051761Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-04-04T16:52:50.051842Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:50.082574Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785570 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:50.083075Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:50.083172Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785570 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:50.083468Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-04T16:52:50.083604Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-04T16:52:50.083650Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:50.083768Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743785570 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:50.083954Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-04-04T16:52:50.083985Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-04-04T16:52:50.084042Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785570 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-04-04T16:52:50.084143Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-04-04T16:52:50.084931Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-04-04T16:52:50.085040Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:50.085200Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:50.085625Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:50.086002Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-04T16:52:50.086150Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-04-04T16:52:50.089465Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-04T16:52:50.089782Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T16:52:51.247792Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-04T16:52:51.247908Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-04T16:52:51.248633Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-04-04T16:52:51.563954Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request >> KqpCost::IndexLookup-useSink [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesMode >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] |75.6%| [TA] $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> LocalTableWriter::WaitTxIds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:75:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:77:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:79:2057] recipient: [2:78:2109] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:81:2057] recipient: [2:78:2109] !Reboot 72057594037927937 (actor [2:55:2096]) rebooted! !Reboot 72057594037927937 (actor [2:55:2096]) tablet resolver refreshed! new actor is[2:80:2110] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:134:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:78:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:80:2057] recipient: [4:79:2109] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:82:2057] recipient: [4:79:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:81:2110] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:83:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:85:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:84:2113] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:138:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:84:2057] recipient: [7:82:2112] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:86:2057] recipient: [7:82:2112] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:85:2113] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:85:2057] recipient: [8:84:2114] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:88:2057] recipient: [8:84:2114] !Reboot 72057594037927937 (actor [8:55:2096]) rebooted! !Reboot 72057594037927937 (actor [8:55:2096]) tablet resolver refreshed! new actor is[8:87:2115] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:141:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:82:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:85:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:86:2057] recipient: [9:84:2114] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:88:2057] recipient: [9:84:2114] !Reboot 72057594037927937 (actor [9:55:2096]) rebooted! !Reboot 72057594037927937 (actor [9:55:2096]) tablet resolver refreshed! new actor is[9:87:2115] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:141:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:83:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:87:2057] recipient: [10:85:2114] Leader for TabletID 72057594037927937 is [10:88:2115] sender: [10:89:2057] recipient: [10:85:2114] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:88:2115] Leader for TabletID 72057594037927937 is [10:88:2115] sender: [10:142:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:85:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:87:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:89:2057] recipient: [11:88:2116] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:91:2057] recipient: [11:88:2116] !Reboot 72057594037927937 (actor [11:55:2096]) rebooted! !Reboot 72057594037927937 (actor [11:55:2096]) tablet resolver refreshed! new actor is[11:90:2117] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:144:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... or TabletID 72057594037927937 is [29:55:2096] sender: [29:88:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:55:2096] sender: [29:89:2057] recipient: [29:87:2116] Leader for TabletID 72057594037927937 is [29:90:2117] sender: [29:91:2057] recipient: [29:87:2116] !Reboot 72057594037927937 (actor [29:55:2096]) rebooted! !Reboot 72057594037927937 (actor [29:55:2096]) tablet resolver refreshed! new actor is[29:90:2117] Leader for TabletID 72057594037927937 is [29:90:2117] sender: [29:144:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:53:2057] recipient: [30:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:53:2057] recipient: [30:50:2094] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:56:2057] recipient: [30:50:2094] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:73:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:86:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:89:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:90:2057] recipient: [30:88:2116] Leader for TabletID 72057594037927937 is [30:91:2117] sender: [30:92:2057] recipient: [30:88:2116] !Reboot 72057594037927937 (actor [30:55:2096]) rebooted! !Reboot 72057594037927937 (actor [30:55:2096]) tablet resolver refreshed! new actor is[30:91:2117] Leader for TabletID 72057594037927937 is [30:91:2117] sender: [30:145:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:53:2057] recipient: [31:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:53:2057] recipient: [31:50:2094] Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:56:2057] recipient: [31:50:2094] Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:73:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:89:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:92:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:93:2057] recipient: [31:91:2119] Leader for TabletID 72057594037927937 is [31:94:2120] sender: [31:95:2057] recipient: [31:91:2119] !Reboot 72057594037927937 (actor [31:55:2096]) rebooted! !Reboot 72057594037927937 (actor [31:55:2096]) tablet resolver refreshed! new actor is[31:94:2120] Leader for TabletID 72057594037927937 is [31:94:2120] sender: [31:148:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:53:2057] recipient: [32:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:53:2057] recipient: [32:50:2094] Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:56:2057] recipient: [32:50:2094] Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:73:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:55:2096]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:89:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:92:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:93:2057] recipient: [32:91:2119] Leader for TabletID 72057594037927937 is [32:94:2120] sender: [32:95:2057] recipient: [32:91:2119] !Reboot 72057594037927937 (actor [32:55:2096]) rebooted! !Reboot 72057594037927937 (actor [32:55:2096]) tablet resolver refreshed! new actor is[32:94:2120] Leader for TabletID 72057594037927937 is [32:94:2120] sender: [32:148:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:53:2057] recipient: [33:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:53:2057] recipient: [33:50:2094] Leader for TabletID 72057594037927937 is [33:55:2096] sender: [33:56:2057] recipient: [33:50:2094] Leader for TabletID 72057594037927937 is [33:55:2096] sender: [33:73:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:53:2057] recipient: [34:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:53:2057] recipient: [34:49:2094] Leader for TabletID 72057594037927937 is [34:55:2096] sender: [34:56:2057] recipient: [34:49:2094] Leader for TabletID 72057594037927937 is [34:55:2096] sender: [34:73:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:53:2057] recipient: [35:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:53:2057] recipient: [35:50:2094] Leader for TabletID 72057594037927937 is [35:55:2096] sender: [35:56:2057] recipient: [35:50:2094] Leader for TabletID 72057594037927937 is [35:55:2096] sender: [35:73:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [35:55:2096] sender: [35:75:2057] recipient: [35:36:2083] Leader for TabletID 72057594037927937 is [35:55:2096] sender: [35:78:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:55:2096] sender: [35:79:2057] recipient: [35:77:2109] Leader for TabletID 72057594037927937 is [35:80:2110] sender: [35:81:2057] recipient: [35:77:2109] !Reboot 72057594037927937 (actor [35:55:2096]) rebooted! !Reboot 72057594037927937 (actor [35:55:2096]) tablet resolver refreshed! new actor is[35:80:2110] Leader for TabletID 72057594037927937 is [35:80:2110] sender: [35:134:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:53:2057] recipient: [36:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:53:2057] recipient: [36:50:2094] Leader for TabletID 72057594037927937 is [36:55:2096] sender: [36:56:2057] recipient: [36:50:2094] Leader for TabletID 72057594037927937 is [36:55:2096] sender: [36:73:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [36:55:2096] sender: [36:75:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:55:2096] sender: [36:78:2057] recipient: [36:77:2109] Leader for TabletID 72057594037927937 is [36:55:2096] sender: [36:79:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:80:2110] sender: [36:81:2057] recipient: [36:77:2109] !Reboot 72057594037927937 (actor [36:55:2096]) rebooted! !Reboot 72057594037927937 (actor [36:55:2096]) tablet resolver refreshed! new actor is[36:80:2110] Leader for TabletID 72057594037927937 is [36:80:2110] sender: [36:134:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:53:2057] recipient: [37:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:53:2057] recipient: [37:49:2094] Leader for TabletID 72057594037927937 is [37:55:2096] sender: [37:56:2057] recipient: [37:49:2094] Leader for TabletID 72057594037927937 is [37:55:2096] sender: [37:73:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [37:55:2096] sender: [37:76:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:55:2096] sender: [37:79:2057] recipient: [37:78:2109] Leader for TabletID 72057594037927937 is [37:55:2096] sender: [37:80:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:81:2110] sender: [37:82:2057] recipient: [37:78:2109] !Reboot 72057594037927937 (actor [37:55:2096]) rebooted! !Reboot 72057594037927937 (actor [37:55:2096]) tablet resolver refreshed! new actor is[37:81:2110] Leader for TabletID 72057594037927937 is [37:81:2110] sender: [37:135:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:53:2057] recipient: [38:51:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:53:2057] recipient: [38:51:2094] Leader for TabletID 72057594037927937 is [38:55:2096] sender: [38:56:2057] recipient: [38:51:2094] Leader for TabletID 72057594037927937 is [38:55:2096] sender: [38:73:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [38:55:2096] sender: [38:78:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:55:2096] sender: [38:81:2057] recipient: [38:80:2111] Leader for TabletID 72057594037927937 is [38:55:2096] sender: [38:82:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:83:2112] sender: [38:84:2057] recipient: [38:80:2111] !Reboot 72057594037927937 (actor [38:55:2096]) rebooted! !Reboot 72057594037927937 (actor [38:55:2096]) tablet resolver refreshed! new actor is[38:83:2112] Leader for TabletID 72057594037927937 is [38:83:2112] sender: [38:137:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:53:2057] recipient: [39:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:53:2057] recipient: [39:50:2094] Leader for TabletID 72057594037927937 is [39:55:2096] sender: [39:56:2057] recipient: [39:50:2094] Leader for TabletID 72057594037927937 is [39:55:2096] sender: [39:73:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [39:55:2096] sender: [39:78:2057] recipient: [39:36:2083] Leader for TabletID 72057594037927937 is [39:55:2096] sender: [39:81:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:55:2096] sender: [39:82:2057] recipient: [39:80:2111] Leader for TabletID 72057594037927937 is [39:83:2112] sender: [39:84:2057] recipient: [39:80:2111] !Reboot 72057594037927937 (actor [39:55:2096]) rebooted! !Reboot 72057594037927937 (actor [39:55:2096]) tablet resolver refreshed! new actor is[39:83:2112] Leader for TabletID 72057594037927937 is [39:83:2112] sender: [39:137:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:53:2057] recipient: [40:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:53:2057] recipient: [40:50:2094] Leader for TabletID 72057594037927937 is [40:55:2096] sender: [40:56:2057] recipient: [40:50:2094] Leader for TabletID 72057594037927937 is [40:55:2096] sender: [40:73:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [40:55:2096] sender: [40:79:2057] recipient: [40:36:2083] Leader for TabletID 72057594037927937 is [40:55:2096] sender: [40:82:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [40:55:2096] sender: [40:83:2057] recipient: [40:81:2111] Leader for TabletID 72057594037927937 is [40:84:2112] sender: [40:85:2057] recipient: [40:81:2111] !Reboot 72057594037927937 (actor [40:55:2096]) rebooted! !Reboot 72057594037927937 (actor [40:55:2096]) tablet resolver refreshed! new actor is[40:84:2112] Leader for TabletID 72057594037927937 is [40:84:2112] sender: [40:138:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:53:2057] recipient: [41:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:53:2057] recipient: [41:49:2094] Leader for TabletID 72057594037927937 is [41:55:2096] sender: [41:56:2057] recipient: [41:49:2094] Leader for TabletID 72057594037927937 is [41:55:2096] sender: [41:73:2057] recipient: [41:14:2061] >> SystemView::AuthOwners_TableRange [GOOD] >> SystemView::AuthPermissions >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] >> SystemView::PgTablesOneSchemeShardDataQuery [GOOD] >> SystemView::PartitionStatsTtlFields >> TSchemeShardServerLess::StorageBilling ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 17878, MsgBus: 28124 2025-04-04T16:52:34.642541Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501929608687955:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:34.642742Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015db/r3tmp/tmpPGeeN7/pdisk_1.dat 2025-04-04T16:52:37.599921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:38.812551Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:39.871795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:39.872435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:40.034819Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501929608687955:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:40.034870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:52:40.207435Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:40.210710Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:40.574954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17878, node 1 2025-04-04T16:52:41.152915Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:41.152936Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:41.153411Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:41.153816Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28124 TClient is connected to server localhost:28124 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:52:42.879253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:42.894932Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:52:42.906900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:43.106118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:43.348968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:43.459495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:48.324040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501989738231879:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:48.324159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:49.017534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:52:49.111508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:52:49.220830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:52:49.294607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:52:49.338979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:52:49.408231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:52:49.502758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501994033199705:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:49.502975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:49.503335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501994033199711:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:49.516207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:52:49.531965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489501994033199713:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:52:49.632400Z node 1 :TX_PROXY ERROR: Actor# [1:7489501994033199774:3476] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:52:50.637032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WaitTxIds [GOOD] Test command err: 2025-04-04T16:52:47.775370Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501984653166988:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:47.775705Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0010a1/r3tmp/tmpe5Jg24/pdisk_1.dat 2025-04-04T16:52:48.687378Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:48.760581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:48.760689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:48.765878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6189 TServer::EnableGrpc on GrpcPort 20396, node 1 2025-04-04T16:52:49.314934Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:49.314955Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:49.314962Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:49.315070Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6189 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:52:49.925269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:49.944291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785570093 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-04-04T16:52:50.164648Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997538069453:2358] Handshake: worker# [1:7489501997538069454:2359] 2025-04-04T16:52:50.164982Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997538069453:2358] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T16:52:50.165210Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997538069453:2358] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-04T16:52:50.165246Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997538069453:2358] Send handshake: worker# [1:7489501997538069454:2359] 2025-04-04T16:52:50.169411Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997538069453:2358] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 1 Data: 48b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 2 Data: 49b CreateTime: 1970-01-01T00:00:00Z }] } 2025-04-04T16:52:50.174578Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997538069453:2358] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-04-04T16:52:50.174748Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997538069453:2358] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-04-04T16:52:50.174921Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501997538069457:2358] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-04T16:52:50.174979Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997538069453:2358] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-04T16:52:50.175064Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501997538069457:2358] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-04-04T16:52:50.177468Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501997538069457:2358] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-04T16:52:50.177528Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997538069453:2358] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-04T16:52:50.177596Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997538069453:2358] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-04-04T16:52:51.170422Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997538069453:2358] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2025-04-04T16:52:51.170603Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997538069453:2358] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 }] } 2025-04-04T16:52:51.170729Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501997538069457:2358] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-04-04T16:52:51.181709Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501997538069457:2358] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-04T16:52:51.181772Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997538069453:2358] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-04T16:52:51.181820Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501997538069453:2358] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:75:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:77:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:79:2057] recipient: [2:78:2109] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:81:2057] recipient: [2:78:2109] !Reboot 72057594037927937 (actor [2:55:2096]) rebooted! !Reboot 72057594037927937 (actor [2:55:2096]) tablet resolver refreshed! new actor is[2:80:2110] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:134:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:78:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:80:2057] recipient: [4:79:2109] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:82:2057] recipient: [4:79:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:81:2110] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:83:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:85:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:84:2113] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:138:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:84:2057] recipient: [7:82:2112] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:86:2057] recipient: [7:82:2112] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:85:2113] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:55:2096]) rebooted! !Reboot 72057594037927937 (actor [8:55:2096]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:55:2096]) rebooted! !Reboot 72057594037927937 (actor [9:55:2096]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:88:2057] recipient: [10:87:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:87:2115] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:87:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:90:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:91:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:93:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:55:2096]) rebooted! !Reboot 72057594037927937 (actor [11:55:2096]) tablet resolver refreshed! new actor is[11:92:2119] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:146:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 29:77:2109] !Reboot 72057594037927937 (actor [29:55:2096]) rebooted! !Reboot 72057594037927937 (actor [29:55:2096]) tablet resolver refreshed! new actor is[29:80:2110] Leader for TabletID 72057594037927937 is [29:80:2110] sender: [29:134:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:53:2057] recipient: [30:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:53:2057] recipient: [30:50:2094] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:56:2057] recipient: [30:50:2094] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:73:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:76:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:79:2057] recipient: [30:78:2109] Leader for TabletID 72057594037927937 is [30:55:2096] sender: [30:80:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:81:2110] sender: [30:82:2057] recipient: [30:78:2109] !Reboot 72057594037927937 (actor [30:55:2096]) rebooted! !Reboot 72057594037927937 (actor [30:55:2096]) tablet resolver refreshed! new actor is[30:81:2110] Leader for TabletID 72057594037927937 is [30:81:2110] sender: [30:135:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:53:2057] recipient: [31:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:53:2057] recipient: [31:50:2094] Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:56:2057] recipient: [31:50:2094] Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:73:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:79:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:81:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:55:2096] sender: [31:83:2057] recipient: [31:82:2112] Leader for TabletID 72057594037927937 is [31:84:2113] sender: [31:85:2057] recipient: [31:82:2112] !Reboot 72057594037927937 (actor [31:55:2096]) rebooted! !Reboot 72057594037927937 (actor [31:55:2096]) tablet resolver refreshed! new actor is[31:84:2113] Leader for TabletID 72057594037927937 is [31:84:2113] sender: [31:138:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:53:2057] recipient: [32:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:53:2057] recipient: [32:50:2094] Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:56:2057] recipient: [32:50:2094] Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:73:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:79:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:82:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:55:2096] sender: [32:83:2057] recipient: [32:81:2112] Leader for TabletID 72057594037927937 is [32:84:2113] sender: [32:85:2057] recipient: [32:81:2112] !Reboot 72057594037927937 (actor [32:55:2096]) rebooted! !Reboot 72057594037927937 (actor [32:55:2096]) tablet resolver refreshed! new actor is[32:84:2113] Leader for TabletID 72057594037927937 is [32:84:2113] sender: [32:138:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:53:2057] recipient: [33:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:53:2057] recipient: [33:50:2094] Leader for TabletID 72057594037927937 is [33:55:2096] sender: [33:56:2057] recipient: [33:50:2094] Leader for TabletID 72057594037927937 is [33:55:2096] sender: [33:73:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:55:2096] sender: [33:80:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:55:2096] sender: [33:82:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:55:2096] sender: [33:84:2057] recipient: [33:83:2112] Leader for TabletID 72057594037927937 is [33:85:2113] sender: [33:86:2057] recipient: [33:83:2112] !Reboot 72057594037927937 (actor [33:55:2096]) rebooted! !Reboot 72057594037927937 (actor [33:55:2096]) tablet resolver refreshed! new actor is[33:85:2113] Leader for TabletID 72057594037927937 is [33:85:2113] sender: [33:139:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:53:2057] recipient: [34:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:53:2057] recipient: [34:49:2094] Leader for TabletID 72057594037927937 is [34:55:2096] sender: [34:56:2057] recipient: [34:49:2094] Leader for TabletID 72057594037927937 is [34:55:2096] sender: [34:73:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [34:55:2096] sender: [34:83:2057] recipient: [34:36:2083] Leader for TabletID 72057594037927937 is [34:55:2096] sender: [34:86:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [34:55:2096] sender: [34:87:2057] recipient: [34:85:2115] Leader for TabletID 72057594037927937 is [34:88:2116] sender: [34:89:2057] recipient: [34:85:2115] !Reboot 72057594037927937 (actor [34:55:2096]) rebooted! !Reboot 72057594037927937 (actor [34:55:2096]) tablet resolver refreshed! new actor is[34:88:2116] Leader for TabletID 72057594037927937 is [34:88:2116] sender: [34:142:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:53:2057] recipient: [35:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:53:2057] recipient: [35:50:2094] Leader for TabletID 72057594037927937 is [35:55:2096] sender: [35:56:2057] recipient: [35:50:2094] Leader for TabletID 72057594037927937 is [35:55:2096] sender: [35:73:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [35:55:2096] sender: [35:83:2057] recipient: [35:36:2083] Leader for TabletID 72057594037927937 is [35:55:2096] sender: [35:86:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:55:2096] sender: [35:87:2057] recipient: [35:85:2115] Leader for TabletID 72057594037927937 is [35:88:2116] sender: [35:89:2057] recipient: [35:85:2115] !Reboot 72057594037927937 (actor [35:55:2096]) rebooted! !Reboot 72057594037927937 (actor [35:55:2096]) tablet resolver refreshed! new actor is[35:88:2116] Leader for TabletID 72057594037927937 is [35:88:2116] sender: [35:142:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:53:2057] recipient: [36:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:53:2057] recipient: [36:50:2094] Leader for TabletID 72057594037927937 is [36:55:2096] sender: [36:56:2057] recipient: [36:50:2094] Leader for TabletID 72057594037927937 is [36:55:2096] sender: [36:73:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:55:2096] sender: [36:84:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:55:2096] sender: [36:87:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:55:2096] sender: [36:88:2057] recipient: [36:86:2115] Leader for TabletID 72057594037927937 is [36:89:2116] sender: [36:90:2057] recipient: [36:86:2115] !Reboot 72057594037927937 (actor [36:55:2096]) rebooted! !Reboot 72057594037927937 (actor [36:55:2096]) tablet resolver refreshed! new actor is[36:89:2116] Leader for TabletID 72057594037927937 is [36:89:2116] sender: [36:143:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:53:2057] recipient: [37:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:53:2057] recipient: [37:49:2094] Leader for TabletID 72057594037927937 is [37:55:2096] sender: [37:56:2057] recipient: [37:49:2094] Leader for TabletID 72057594037927937 is [37:55:2096] sender: [37:73:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [37:55:2096] sender: [37:87:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:55:2096] sender: [37:90:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:55:2096] sender: [37:91:2057] recipient: [37:89:2118] Leader for TabletID 72057594037927937 is [37:92:2119] sender: [37:93:2057] recipient: [37:89:2118] !Reboot 72057594037927937 (actor [37:55:2096]) rebooted! !Reboot 72057594037927937 (actor [37:55:2096]) tablet resolver refreshed! new actor is[37:92:2119] Leader for TabletID 72057594037927937 is [37:92:2119] sender: [37:146:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:53:2057] recipient: [38:51:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:53:2057] recipient: [38:51:2094] Leader for TabletID 72057594037927937 is [38:55:2096] sender: [38:56:2057] recipient: [38:51:2094] Leader for TabletID 72057594037927937 is [38:55:2096] sender: [38:73:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:55:2096]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [38:55:2096] sender: [38:87:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:55:2096] sender: [38:90:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:55:2096] sender: [38:91:2057] recipient: [38:89:2118] Leader for TabletID 72057594037927937 is [38:92:2119] sender: [38:93:2057] recipient: [38:89:2118] !Reboot 72057594037927937 (actor [38:55:2096]) rebooted! !Reboot 72057594037927937 (actor [38:55:2096]) tablet resolver refreshed! new actor is[38:92:2119] Leader for TabletID 72057594037927937 is [38:92:2119] sender: [38:146:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:53:2057] recipient: [39:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:53:2057] recipient: [39:50:2094] Leader for TabletID 72057594037927937 is [39:55:2096] sender: [39:56:2057] recipient: [39:50:2094] Leader for TabletID 72057594037927937 is [39:55:2096] sender: [39:73:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [39:55:2096] sender: [39:88:2057] recipient: [39:36:2083] Leader for TabletID 72057594037927937 is [39:55:2096] sender: [39:90:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:55:2096] sender: [39:92:2057] recipient: [39:91:2118] Leader for TabletID 72057594037927937 is [39:93:2119] sender: [39:94:2057] recipient: [39:91:2118] !Reboot 72057594037927937 (actor [39:55:2096]) rebooted! !Reboot 72057594037927937 (actor [39:55:2096]) tablet resolver refreshed! new actor is[39:93:2119] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:53:2057] recipient: [40:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:53:2057] recipient: [40:50:2094] Leader for TabletID 72057594037927937 is [40:55:2096] sender: [40:56:2057] recipient: [40:50:2094] Leader for TabletID 72057594037927937 is [40:55:2096] sender: [40:73:2057] recipient: [40:14:2061] |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> LocalTableWriter::ConsistentWrite [GOOD] >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] Test command err: 2025-04-04T16:52:26.294087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:52:26.297551Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:26.297970Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019e6/r3tmp/tmpbu0fxA/pdisk_1.dat 2025-04-04T16:52:29.616543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:52:29.739411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T16:52:29.743011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:52:29.744791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T16:52:29.785248Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-04-04T16:52:29.785309Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:408:2403] Proxy marker# C1 2025-04-04T16:52:29.885705Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:29.904760Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Merged config: { } 2025-04-04T16:52:30.045903Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:334:2374] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-04-04T16:52:30.046060Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2025-04-04T16:52:30.046221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:30.046307Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-04T16:52:30.046359Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-04T16:52:30.046442Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-04T16:52:30.046603Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-04T16:52:30.047208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:30.047790Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-04T16:52:30.048097Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-04T16:52:30.048545Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-04T16:52:30.048927Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-04T16:52:30.050185Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2025-04-04T16:52:30.074697Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2025-04-04T16:52:30.075004Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:334:2374]) 2025-04-04T16:52:30.075351Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-04T16:52:30.076631Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2025-04-04T16:52:30.076814Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:334:2374])::Execute 2025-04-04T16:52:30.076890Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-04T16:52:30.077025Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:334:2374])::Complete 2025-04-04T16:52:30.077336Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 270443339776 } 2025-04-04T16:52:30.077430Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-04-04T16:52:30.077497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:30.077752Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2025-04-04T16:52:30.110515Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-04T16:52:30.110870Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-04T16:52:30.111934Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-04T16:52:30.111973Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-04T16:52:30.112001Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-04T16:52:30.112322Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-04T16:52:30.123347Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-04-04T16:52:30.130689Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-04T16:52:30.216034Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2025-04-04T16:52:30.217136Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-04-04T16:52:30.219160Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-04-04T16:52:30.220329Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-04-04T16:52:30.221440Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:408:2403] Proxy 2025-04-04T16:52:30.316463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T16:52:30.370057Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-04T16:52:30.371241Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-04T16:52:30.371551Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-04-04T16:52:30.372118Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2025-04-04T16:52:30.397894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:52:30.418739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-04T16:52:30.449311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-04T16:52:30.483437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:52:30.484923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T16:52:30.484991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:52:30.485844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2025-04-04T16:52:30.510168Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2025-04-04T16:52:30.775928Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-04T16:52:30.776588Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-04-04T16:52:30.787488Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2025-04-04T16:52:30.787863Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2025-04-04T16:52:30.789033Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-04-04T16:52:30.803332Z node 1 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2025-04-04T16:52:30.809253Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-04-04T16:52:30.830955Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-04-04T16:52:30.850744Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2025-04-04T16:52:30.852588Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2025-04-04T16:52:30.853469Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004273376}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2025-04-04T16:52:30.854025Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004273376}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2025-04-04T16:52:30.876572Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004273376}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2025-04-04T16:52:30.876982Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004273376}: tablet 72075186224037888 channel 2 assigned to group 21810380 ... geResult::Execute(72075186224037888 OK) 2025-04-04T16:52:54.154824Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037888 OK) 2025-04-04T16:52:54.155116Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-04T16:52:54.155196Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2025-04-04T16:52:54.155247Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-04T16:52:54.155295Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2025-04-04T16:52:54.155720Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2025-04-04T16:52:54.171123Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T16:52:54.172229Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715666 HANDLE EvProposeTransaction marker# C0 2025-04-04T16:52:54.172286Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715666 step# 3500 Status# 16 SEND to# [2:417:2411] Proxy marker# C1 2025-04-04T16:52:54.183561Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2025-04-04T16:52:54.275517Z node 2 :TX_COORDINATOR DEBUG: Transaction 281474976715666 has been planned 2025-04-04T16:52:54.276158Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 2025-04-04T16:52:54.276195Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 2025-04-04T16:52:54.277272Z node 2 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 4000 in 0.500000s at 3.950000s 2025-04-04T16:52:54.278328Z node 2 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 3500, txid# 281474976715666 marker# C2 2025-04-04T16:52:54.279088Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715666 stepId# 3500 Status# 17 SEND EvProposeTransactionStatus to# [2:417:2411] Proxy 2025-04-04T16:52:54.281420Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 3500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T16:52:54.294228Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715666 at step 3500 at tablet 72075186224037889 { Transactions { TxId: 281474976715666 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-04T16:52:54.294572Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:52:54.295318Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T16:52:54.295934Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:52:54.296241Z node 2 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2025-04-04T16:52:54.297357Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2025-04-04T16:52:54.298168Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:52:54.299424Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T16:52:54.299810Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037889 2025-04-04T16:52:54.303299Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:52:54.305674Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-04-04T16:52:54.305740Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T16:52:54.305933Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-04T16:52:54.306014Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-04T16:52:54.306048Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2025-04-04T16:52:54.306074Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 for mediator 72057594046382081 acknowledged 2025-04-04T16:52:54.306109Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 acknowledged 2025-04-04T16:52:54.307080Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T16:52:54.307144Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [2:417:2411], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:52:54.307216Z node 2 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715666 state PreOffline TxInFly 0 2025-04-04T16:52:54.307325Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:52:54.307518Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715666, done: 0, blocked: 1 2025-04-04T16:52:54.309845Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715666 datashard 72075186224037889 state PreOffline 2025-04-04T16:52:54.309898Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-04T16:52:54.310757Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715666:0 2025-04-04T16:52:54.310866Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715666, publications: 2, subscribers: 1 2025-04-04T16:52:54.311495Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715666, subscribers: 1 2025-04-04T16:52:54.311928Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T16:52:54.312768Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NTE3NjlhNzItNTRiZTM0Ni02OTVmMGE3MC05YWE0Mzg4Nw== 2025-04-04 16:52:54.312 INFO ydb-core-tx-datashard-ut_minstep(pid=130786, tid=0x00007FE29D6B0CC0) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2025-04-04T16:52:54.312870Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NTE3NjlhNzItNTRiZTM0Ni02OTVmMGE3MC05YWE0Mzg4Nw== 2025-04-04 16:52:54.312 INFO ydb-core-tx-datashard-ut_minstep(pid=130786, tid=0x00007FE29D6B0CC0) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2025-04-04T16:52:54.312932Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NTE3NjlhNzItNTRiZTM0Ni02OTVmMGE3MC05YWE0Mzg4Nw== 2025-04-04 16:52:54.312 INFO ydb-core-tx-datashard-ut_minstep(pid=130786, tid=0x00007FE29D6B0CC0) [core exec] yql_execution.cpp:59: Begin, root #43 2025-04-04T16:52:54.312976Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NTE3NjlhNzItNTRiZTM0Ni02OTVmMGE3MC05YWE0Mzg4Nw== 2025-04-04 16:52:54.312 INFO ydb-core-tx-datashard-ut_minstep(pid=130786, tid=0x00007FE29D6B0CC0) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-04-04T16:52:54.313020Z node 2 :KQP_YQL TRACE: SessionId: ydb://session/3?node_id=2&id=NTE3NjlhNzItNTRiZTM0Ni02OTVmMGE3MC05YWE0Mzg4Nw== 2025-04-04 16:52:54.313 TRACE ydb-core-tx-datashard-ut_minstep(pid=130786, tid=0x00007FE29D6B0CC0) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-04-04T16:52:54.313080Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NTE3NjlhNzItNTRiZTM0Ni02OTVmMGE3MC05YWE0Mzg4Nw== 2025-04-04 16:52:54.313 INFO ydb-core-tx-datashard-ut_minstep(pid=130786, tid=0x00007FE29D6B0CC0) [core exec] yql_execution.cpp:577: Node #43 finished execution 2025-04-04T16:52:54.313140Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NTE3NjlhNzItNTRiZTM0Ni02OTVmMGE3MC05YWE0Mzg4Nw== 2025-04-04 16:52:54.313 INFO ydb-core-tx-datashard-ut_minstep(pid=130786, tid=0x00007FE29D6B0CC0) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2025-04-04T16:52:54.313183Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NTE3NjlhNzItNTRiZTM0Ni02OTVmMGE3MC05YWE0Mzg4Nw== 2025-04-04 16:52:54.313 INFO ydb-core-tx-datashard-ut_minstep(pid=130786, tid=0x00007FE29D6B0CC0) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2025-04-04T16:52:54.313229Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=NTE3NjlhNzItNTRiZTM0Ni02OTVmMGE3MC05YWE0Mzg4Nw== 2025-04-04 16:52:54.313 INFO ydb-core-tx-datashard-ut_minstep(pid=130786, tid=0x00007FE29D6B0CC0) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2025-04-04T16:52:54.313351Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=NTE3NjlhNzItNTRiZTM0Ni02OTVmMGE3MC05YWE0Mzg4Nw== 2025-04-04 16:52:54.313 NOTE ydb-core-tx-datashard-ut_minstep(pid=130786, tid=0x00007FE29D6B0CC0) [common provider] yql_provider_gateway.cpp:21:
: Info: Execution, code: 1060 2025-04-04T16:52:54.313398Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=NTE3NjlhNzItNTRiZTM0Ni02OTVmMGE3MC05YWE0Mzg4Nw== 2025-04-04 16:52:54.313 NOTE ydb-core-tx-datashard-ut_minstep(pid=130786, tid=0x00007FE29D6B0CC0) [common provider] yql_provider_gateway.cpp:21:
:1:12: Info: Executing DROP TABLE 2025-04-04T16:52:54.313432Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=NTE3NjlhNzItNTRiZTM0Ni02OTVmMGE3MC05YWE0Mzg4Nw== 2025-04-04 16:52:54.313 NOTE ydb-core-tx-datashard-ut_minstep(pid=130786, tid=0x00007FE29D6B0CC0) [common provider] yql_provider_gateway.cpp:21:
: Info: Success, code: 4 2025-04-04T16:52:54.333140Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T16:52:54.334173Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-04-04T16:52:54.339877Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T16:52:54.343629Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-04T16:52:54.344936Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186224037889 2025-04-04T16:52:54.345288Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2025-04-04T16:52:54.345989Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2025-04-04T16:52:54.347539Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2025-04-04T16:52:54.348241Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:52:53.450782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:52:53.450886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:52:53.450931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:52:53.450980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:52:53.451018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:52:53.451046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:52:53.451128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:52:53.451200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:52:53.451548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:52:53.534102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:52:53.534158Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:53.545570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:52:53.545842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:52:53.546007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:52:53.558743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:52:53.559039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:52:53.559655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:52:53.560516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:52:53.565564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:52:53.566832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:52:53.566917Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:52:53.567098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:52:53.567138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:52:53.567187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:52:53.567306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:52:53.573764Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:52:53.701900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:52:53.702145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:53.702391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:52:53.702628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:52:53.702685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:53.709731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:52:53.709880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:52:53.710091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:53.710152Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:52:53.710192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:52:53.710226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:52:53.717612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:53.717817Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:52:53.717966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:52:53.733067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:53.733128Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:53.733180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:52:53.733225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:52:53.738030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:52:53.740720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:52:53.740897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:52:53.741881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:52:53.742008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:52:53.742079Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:52:53.743663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:52:53.743730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:52:53.743890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:52:53.743975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:52:53.746117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:52:53.746157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:52:53.746401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:52:53.746445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:52:53.746759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:53.746803Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:52:53.746891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:52:53.746926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:52:53.746982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:52:53.747008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:52:53.747048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:52:53.747090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:52:53.747121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:52:53.747151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:52:53.747226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:52:53.747266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:52:53.747297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:52:53.748987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:52:53.749095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:52:53.749136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 06:0, at schemeshard: 72057594046678944 2025-04-04T16:52:55.935684Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-04-04T16:52:55.935770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-04T16:52:55.935797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T16:52:55.935827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-04T16:52:55.935851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T16:52:55.935879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-04-04T16:52:55.935929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T16:52:55.935959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-04T16:52:55.935981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-04T16:52:55.936075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-04T16:52:55.937703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-04T16:52:55.937892Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186234409546 2025-04-04T16:52:55.938057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-04T16:52:55.938285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T16:52:55.939406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Forgetting tablet 72075186234409546 2025-04-04T16:52:55.944573Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 7 TabletID: 72075186234409548 2025-04-04T16:52:55.945706Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186234409547 2025-04-04T16:52:55.945843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-04-04T16:52:55.946062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186234409548 2025-04-04T16:52:55.947200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 Forgetting tablet 72075186234409547 2025-04-04T16:52:55.947649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-04T16:52:55.947776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T16:52:55.948926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T16:52:55.948978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T16:52:55.949097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T16:52:55.949581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T16:52:55.949627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T16:52:55.949691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:52:55.951752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-04T16:52:55.951792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409546 2025-04-04T16:52:55.951847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-04T16:52:55.951860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409548 2025-04-04T16:52:55.951908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-04T16:52:55.951941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409547 2025-04-04T16:52:55.953655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T16:52:55.953755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-04T16:52:55.954022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-04T16:52:55.954058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-04T16:52:55.954505Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-04T16:52:55.954614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-04T16:52:55.954651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:926:2788] TestWaitNotification: OK eventTxId 106 2025-04-04T16:52:55.955253Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:52:55.955431Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 200us result status StatusPathDoesNotExist 2025-04-04T16:52:55.955574Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T16:52:55.955957Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:52:55.956100Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 162us result status StatusPathDoesNotExist 2025-04-04T16:52:55.956185Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T16:52:55.956567Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:52:55.956708Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 144us result status StatusSuccess 2025-04-04T16:52:55.957033Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted 2025-04-04T16:52:55.957660Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409550 2025-04-04T16:52:55.957722Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409551 2025-04-04T16:52:55.957760Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409552 2025-04-04T16:52:55.957794Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409553 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 >> KqpErrors::ProposeError >> KqpErrors::ResolveTableError >> SystemView::Describe [GOOD] >> SystemView::DescribeSystemFolder >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] Test command err: 2025-04-04T16:52:48.442936Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501987087042125:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:48.577118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00108c/r3tmp/tmp3hZFh5/pdisk_1.dat 2025-04-04T16:52:49.353386Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:49.355063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:49.355148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:49.360125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22731 TServer::EnableGrpc on GrpcPort 25323, node 1 2025-04-04T16:52:50.070879Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:50.070896Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:50.070902Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:50.071013Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:52:50.632131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:50.649812Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:52:50.656984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785570814 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-04-04T16:52:50.832039Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501995676977291:2357] Handshake: worker# [1:7489501995676977292:2358] 2025-04-04T16:52:50.832317Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501995676977291:2357] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T16:52:50.832594Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501995676977291:2357] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-04T16:52:50.832638Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501995676977291:2357] Send handshake: worker# [1:7489501995676977292:2358] 2025-04-04T16:52:50.833282Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501995676977291:2357] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 1 Data: 48b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 2 Data: 19b CreateTime: 1970-01-01T00:00:00Z }] } 2025-04-04T16:52:50.860855Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501995676977291:2357] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-04-04T16:52:50.861112Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501995676977291:2357] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-04-04T16:52:50.861292Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501995676977295:2357] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-04T16:52:50.861340Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501995676977291:2357] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-04T16:52:50.861455Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501995676977295:2357] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-04-04T16:52:50.879116Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501995676977295:2357] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-04T16:52:50.879226Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501995676977291:2357] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-04T16:52:50.879282Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501995676977291:2357] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } >> KqpErrors::ProposeResultLost_RwTx+UseSink >> KqpNewEngine::Select1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ConsistentWrite [GOOD] Test command err: 2025-04-04T16:52:48.892767Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501985863067379:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:48.893185Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ff7/r3tmp/tmphcVOwA/pdisk_1.dat 2025-04-04T16:52:49.633510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:49.633661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:49.639435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:49.774343Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:29666 TServer::EnableGrpc on GrpcPort 26782, node 1 2025-04-04T16:52:50.185029Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:50.185052Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:50.185076Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:50.185178Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29666 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:52:50.677580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:50.705365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785570814 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-04-04T16:52:50.929047Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501994453002539:2355] Handshake: worker# [1:7489501994453002445:2294] 2025-04-04T16:52:50.929334Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501994453002539:2355] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T16:52:50.929594Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501994453002539:2355] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-04T16:52:50.929649Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501994453002539:2355] Send handshake: worker# [1:7489501994453002445:2294] 2025-04-04T16:52:50.930354Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501994453002539:2355] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 1 Data: 48b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 2 Data: 48b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 3 Data: 48b CreateTime: 1970-01-01T00:00:00Z }] } 2025-04-04T16:52:50.936316Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501994453002539:2355] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-04-04T16:52:50.936496Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501994453002539:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 },{ Order: 2 BodySize: 48 },{ Order: 3 BodySize: 48 }] } 2025-04-04T16:52:50.936681Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501994453002542:2355] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-04T16:52:50.936716Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501994453002539:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-04T16:52:50.936829Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501994453002542:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 2 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 3 Group: 0 Step: 3 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-04-04T16:52:50.939135Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501994453002542:2355] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-04T16:52:50.939211Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501994453002539:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-04T16:52:50.939264Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501994453002539:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } 2025-04-04T16:52:50.942690Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501994453002539:2355] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 4 Data: 19b CreateTime: 1970-01-01T00:00:00Z }] } 2025-04-04T16:52:50.943191Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501994453002539:2355] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 5 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 6 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 7 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 8 Data: 49b CreateTime: 1970-01-01T00:00:00Z }] } 2025-04-04T16:52:50.943671Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501994453002539:2355] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } VersionTxIds { Version { Step: 30 TxId: 0 } TxId: 3 } 2025-04-04T16:52:50.943766Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501994453002539:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 5 BodySize: 49 },{ Order: 6 BodySize: 49 },{ Order: 7 BodySize: 49 },{ Order: 8 BodySize: 49 }] } 2025-04-04T16:52:50.943900Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501994453002542:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 6 Group: 0 Step: 12 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 7 Group: 0 Step: 21 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 8 Group: 0 Step: 22 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-04-04T16:52:50.945558Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501994453002542:2355] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-04T16:52:50.945613Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501994453002539:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-04T16:52:50.945646Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501994453002539:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [5,6,7,8] } 2025-04-04T16:52:50.947396Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501994453002539:2355] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 9 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 10 Data: 49b CreateTime: 1970-01-01T00:00:00Z }] } 2025-04-04T16:52:50.950527Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501994453002539:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 9 BodySize: 49 },{ Order: 10 BodySize: 49 }] } 2025-04-04T16:52:50.950684Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501994453002542:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 9 Group: 0 Step: 13 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 10 Group: 0 Step: 23 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-04-04T16:52:50.958850Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7489501994453002542:2355] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-04T16:52:50.958940Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501994453002539:2355] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-04T16:52:50.958983Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501994453002539:2355] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [9,10] } 2025-04-04T16:52:50.962574Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7489501994453002539:2355] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 11 Data: 19b CreateTime: 1970-01-01T00:00:00Z }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:52:57.107319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:52:57.107433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:52:57.107492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:52:57.107547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:52:57.107594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:52:57.107628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:52:57.107702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:52:57.107783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:52:57.108156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:52:57.307833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:52:57.307899Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:57.333642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:52:57.333932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:52:57.334127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:52:57.353182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:52:57.353532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:52:57.354262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:52:57.355250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:52:57.361042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:52:57.362372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:52:57.362454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:52:57.362699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:52:57.362754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:52:57.362815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:52:57.362966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.369569Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:52:57.491556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:52:57.491799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.492027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:52:57.492262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:52:57.492319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.495552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:52:57.495677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:52:57.495875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.495950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:52:57.495987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:52:57.496024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:52:57.501756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.501849Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:52:57.501888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:52:57.509551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.509626Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.509694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:52:57.509751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:52:57.513857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:52:57.524790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:52:57.525004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:52:57.526079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:52:57.526266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:52:57.526416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:52:57.526733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:52:57.526790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:52:57.526972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:52:57.527064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:52:57.532196Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:52:57.532262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:52:57.532458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:52:57.532503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:52:57.532886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.532940Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:52:57.533043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:52:57.533080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:52:57.533166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:52:57.533210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:52:57.533265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:52:57.533315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:52:57.533356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:52:57.533392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:52:57.533491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:52:57.533536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:52:57.533573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:52:57.535577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:52:57.535729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:52:57.535775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 06:0, at schemeshard: 72057594046678944 2025-04-04T16:52:58.436506Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-04-04T16:52:58.436651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-04T16:52:58.436696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T16:52:58.436756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-04T16:52:58.436807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T16:52:58.436851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-04-04T16:52:58.436897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T16:52:58.436936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-04T16:52:58.436969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-04T16:52:58.437116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-04T16:52:58.437607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-04T16:52:58.439233Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186234409549 2025-04-04T16:52:58.439575Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 7 TabletID: 72075186234409551 2025-04-04T16:52:58.439764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-04T16:52:58.440093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T16:52:58.440929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Forgetting tablet 72075186234409549 2025-04-04T16:52:58.444819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-04-04T16:52:58.445072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T16:52:58.446033Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186234409550 Forgetting tablet 72075186234409551 Forgetting tablet 72075186234409550 2025-04-04T16:52:58.448414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-04T16:52:58.448616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T16:52:58.450851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-04T16:52:58.451406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T16:52:58.451459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T16:52:58.451581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T16:52:58.451902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T16:52:58.451946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T16:52:58.452008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:52:58.455296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-04T16:52:58.455354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409549 2025-04-04T16:52:58.455818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-04T16:52:58.455851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409551 2025-04-04T16:52:58.455930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-04T16:52:58.455965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409550 2025-04-04T16:52:58.457540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T16:52:58.457658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-04T16:52:58.457958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-04T16:52:58.458012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-04T16:52:58.458540Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-04T16:52:58.458674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-04T16:52:58.458731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:931:2794] TestWaitNotification: OK eventTxId 106 2025-04-04T16:52:58.459506Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:52:58.459703Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 230us result status StatusPathDoesNotExist 2025-04-04T16:52:58.459892Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T16:52:58.460527Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:52:58.460685Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 167us result status StatusPathDoesNotExist 2025-04-04T16:52:58.460811Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T16:52:58.461289Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:52:58.461474Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 155us result status StatusSuccess 2025-04-04T16:52:58.461855Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186234409549 is deleted wait until 72075186234409550 is deleted wait until 72075186234409551 is deleted wait until 72075186234409552 is deleted 2025-04-04T16:52:58.462704Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409549 2025-04-04T16:52:58.462787Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409550 2025-04-04T16:52:58.462839Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409551 2025-04-04T16:52:58.462878Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409552 Deleted tabletId 72075186234409549 Deleted tabletId 72075186234409550 Deleted tabletId 72075186234409551 Deleted tabletId 72075186234409552 |75.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/ut/ydb-core-client-ut |75.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:52:57.234778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:52:57.234886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:52:57.234935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:52:57.234981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:52:57.235021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:52:57.235049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:52:57.235105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:52:57.235186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:52:57.235515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:52:57.374364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:52:57.374418Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:57.399808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:52:57.400107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:52:57.400273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:52:57.443754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:52:57.444089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:52:57.444828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:52:57.450441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:52:57.483463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:52:57.484912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:52:57.484996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:52:57.485212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:52:57.485261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:52:57.485315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:52:57.485455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.503156Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:52:57.716711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:52:57.716947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.717175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:52:57.717388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:52:57.717446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.720639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:52:57.720783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:52:57.720986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.721044Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:52:57.721084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:52:57.721129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:52:57.726179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.726226Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:52:57.726267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:52:57.729404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.729444Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.729487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:52:57.729531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:52:57.732702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:52:57.734987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:52:57.735134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:52:57.735987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:52:57.736112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:52:57.736156Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:52:57.736408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:52:57.736453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:52:57.736590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:52:57.736662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:52:57.743688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:52:57.743748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:52:57.743983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:52:57.744029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:52:57.744355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.744399Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:52:57.744497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:52:57.744532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:52:57.744595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:52:57.744635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:52:57.744683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:52:57.744728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:52:57.744767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:52:57.744798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:52:57.744863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:52:57.744899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:52:57.744933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:52:57.746906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:52:57.747029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:52:57.747079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 15 2025-04-04T16:52:58.234580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T16:52:58.234686Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:617:2545], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T16:52:58.234773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409549 2025-04-04T16:52:58.234804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409549, txId: 0, path id: [OwnerId: 72075186234409549, LocalPathId: 1] 2025-04-04T16:52:58.234933Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409549 2025-04-04T16:52:58.234990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:711:2613], at schemeshard: 72075186234409549, txId: 0, path id: 1 2025-04-04T16:52:58.235867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409549, msg: Owner: 72075186234409549 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409549, cookie: 0 2025-04-04T16:52:58.236511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:3 msg type: 268697640 2025-04-04T16:52:58.236648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2025-04-04T16:52:58.236972Z node 1 :HIVE INFO: [72075186233409546] TEvUpdateDomain, msg: DomainKey { SchemeShard: 72057594046678944 PathId: 3 } ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared TxId: 106 2025-04-04T16:52:58.237199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Update domain reply, message: Origin: 72075186233409546 TxId: 106, at schemeshard: 72057594046678944 2025-04-04T16:52:58.237246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-04-04T16:52:58.237378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2025-04-04T16:52:58.237430Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 106:0, HandleReply TEvUpdateDomainReply, from hive: 72075186233409546 2025-04-04T16:52:58.237471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 138 -> 240 2025-04-04T16:52:58.238003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-04T16:52:58.238060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T16:52:58.240304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-04T16:52:58.240442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-04T16:52:58.240485Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-04-04T16:52:58.240595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-04T16:52:58.240639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T16:52:58.240681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-04T16:52:58.240710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T16:52:58.240742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-04-04T16:52:58.240811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T16:52:58.240861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-04T16:52:58.240895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-04T16:52:58.240970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-04T16:52:58.243127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-04T16:52:58.243176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-04T16:52:58.243729Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-04T16:52:58.243832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-04T16:52:58.243884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:851:2733] TestWaitNotification: OK eventTxId 106 2025-04-04T16:52:58.244523Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:52:58.244701Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 207us result status StatusSuccess 2025-04-04T16:52:58.245035Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:52:58.246020Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409549 2025-04-04T16:52:58.246202Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186234409549 describe path "/MyRoot/ServerLess0" took 159us result status StatusSuccess 2025-04-04T16:52:58.246634Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "MyRoot/ServerLess0" PathId: 1 SchemeshardId: 72075186234409549 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/ServerLess0" } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 1 PathOwnerId: 72075186234409549, at schemeshard: 72075186234409549 2025-04-04T16:52:58.247241Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:52:58.247416Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 175us result status StatusSuccess 2025-04-04T16:52:58.247718Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:52:58.248347Z node 1 :HIVE INFO: [72075186233409546] TEvRequestDomainInfo, 72057594046678944:3 |75.7%| [TA] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |75.7%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut >> Cdc::DocApi[PqRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:52:56.525448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:52:56.531607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:52:56.532252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:52:56.532568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:52:56.532985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:52:56.533587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:52:56.534275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:52:56.534915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:52:56.541410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:52:57.080357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:52:57.080512Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:57.100910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:52:57.101175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:52:57.101334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:52:57.110188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:52:57.110496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:52:57.111213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:52:57.112120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:52:57.117161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:52:57.118553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:52:57.118647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:52:57.118859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:52:57.118902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:52:57.118957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:52:57.119085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.125720Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:52:57.359234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:52:57.359466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.359670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:52:57.359883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:52:57.359936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.363166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:52:57.363386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:52:57.363586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.363669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:52:57.363766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:52:57.363801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:52:57.365762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.365824Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:52:57.365875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:52:57.367728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.367775Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.367820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:52:57.367866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:52:57.371705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:52:57.375430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:52:57.375606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:52:57.376605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:52:57.376736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:52:57.376784Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:52:57.377051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:52:57.377103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:52:57.377247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:52:57.377339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:52:57.379459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:52:57.379506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:52:57.379658Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:52:57.379713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:52:57.380033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:57.380081Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:52:57.380179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:52:57.380232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:52:57.380298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:52:57.380334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:52:57.380368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:52:57.380414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:52:57.380450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:52:57.380481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:52:57.380554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:52:57.380586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:52:57.380610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:52:57.382550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:52:57.382679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:52:57.382733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... State::TPropose ProgressState leave, operationId 104:0, at tablet# 72057594046678944 2025-04-04T16:52:58.139776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2025-04-04T16:52:58.139977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:52:58.148290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-04-04T16:52:58.148435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-04-04T16:52:58.148766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:52:58.148912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:52:58.148974Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-04-04T16:52:58.149245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-04T16:52:58.149306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-04-04T16:52:58.149419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-04T16:52:58.149526Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:613:2542], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2025-04-04T16:52:58.155832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:52:58.155943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T16:52:58.156157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:52:58.156192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-04T16:52:58.156660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T16:52:58.156778Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2025-04-04T16:52:58.156866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 240 -> 240 2025-04-04T16:52:58.159134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T16:52:58.159410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T16:52:58.159648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-04T16:52:58.159985Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-04-04T16:52:58.160182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-04-04T16:52:58.160501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-04T16:52:58.170105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T16:52:58.170167Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-04T16:52:58.170296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T16:52:58.170356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T16:52:58.170402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T16:52:58.170431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T16:52:58.170465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-04T16:52:58.170510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T16:52:58.170553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-04T16:52:58.170597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-04T16:52:58.170740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-04T16:52:58.171165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-04T16:52:58.176791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-04T16:52:58.177024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-04T16:52:58.178699Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-04T16:52:58.178820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-04T16:52:58.178901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:768:2650] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-04T16:52:58.184800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:52:58.185463Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } 2025-04-04T16:52:58.185678Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/SharedDB 2025-04-04T16:52:58.186197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2025-04-04T16:52:58.186957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2025-04-04T16:52:58.198847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:52:58.199534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, operation: ALTER DATABASE, path: /MyRoot/SharedDB TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 2025-04-04T16:52:58.204399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:52:58.204553Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } 2025-04-04T16:52:58.204589Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, path /MyRoot/ServerLess0 2025-04-04T16:52:58.204734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 106:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2025-04-04T16:52:58.204786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2025-04-04T16:52:58.208844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:52:58.209002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 106, wait until txId: 106 >> KqpNewEngine::LocksSingleShard |75.7%| [TA] $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] >> KqpNewEngine::ReadAfterWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:52:58.013299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:52:58.013430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:52:58.013485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:52:58.013540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:52:58.013583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:52:58.013627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:52:58.013699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:52:58.013770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:52:58.014142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:52:58.105773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:52:58.105828Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:58.118921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:52:58.119193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:52:58.119354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:52:58.130761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:52:58.131069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:52:58.131777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:52:58.132600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:52:58.138104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:52:58.139541Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:52:58.139615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:52:58.139833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:52:58.139892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:52:58.139957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:52:58.140106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:52:58.147403Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:52:58.301822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:52:58.302058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:58.302281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:52:58.302491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:52:58.302542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:58.319293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:52:58.319478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:52:58.319693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:58.319769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:52:58.319821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:52:58.319858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:52:58.325576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:58.325651Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:52:58.325696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:52:58.336942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:58.337137Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:58.337237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:52:58.337393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:52:58.360259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:52:58.363746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:52:58.364013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:52:58.371211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:52:58.371629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:52:58.371805Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:52:58.375095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:52:58.375263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:52:58.376237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:52:58.376967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:52:58.387995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:52:58.388143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:52:58.388990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:52:58.389167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:52:58.395376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:58.395505Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:52:58.396013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:52:58.396049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:52:58.396809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:52:58.397017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:52:58.397167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:52:58.397431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:52:58.397613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:52:58.397745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:52:58.402352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:52:58.402536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:52:58.402688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:52:58.409613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:52:58.410333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:52:58.410493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186234409550 2025-04-04T16:53:00.533796Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 104:0 HandleReply TEvConfigureStatus operationId:104:0 at schemeshard:72057594046678944 2025-04-04T16:53:00.534617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 104:0 Got OK TEvConfigureStatus from tablet# 72075186234409550 shardIdx# 72057594046678944:6 at schemeshard# 72057594046678944 2025-04-04T16:53:00.547978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186234409551, partId: 0 2025-04-04T16:53:00.548094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186234409551 2025-04-04T16:53:00.548128Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 104:0 HandleReply TEvConfigureStatus operationId:104:0 at schemeshard:72057594046678944 2025-04-04T16:53:00.548160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 104:0 Got OK TEvConfigureStatus from tablet# 72075186234409551 shardIdx# 72057594046678944:7 at schemeshard# 72057594046678944 2025-04-04T16:53:00.548188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 3 -> 128 2025-04-04T16:53:00.575866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T16:53:00.667486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T16:53:00.673605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T16:53:00.675128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T16:53:00.675476Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T16:53:00.675840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 104:0, at tablet# 72057594046678944 2025-04-04T16:53:00.676795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2025-04-04T16:53:00.677570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:53:00.734018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-04-04T16:53:00.739396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-04-04T16:53:00.747326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:53:00.748119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:53:00.748860Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-04-04T16:53:00.751052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-04T16:53:00.751678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-04-04T16:53:00.752679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-04T16:53:00.753611Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:613:2542], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2025-04-04T16:53:00.762989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:53:00.766229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T16:53:00.774898Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:53:00.774948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-04T16:53:00.776943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T16:53:00.777335Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2025-04-04T16:53:00.777373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 240 -> 240 2025-04-04T16:53:00.790017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T16:53:00.791046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T16:53:00.791402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-04T16:53:00.792074Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-04-04T16:53:00.792751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-04-04T16:53:00.793503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-04T16:53:00.833581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T16:53:00.833926Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-04T16:53:00.835877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T16:53:00.836985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T16:53:00.840756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T16:53:00.841486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T16:53:00.841832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-04T16:53:00.841879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T16:53:00.841919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-04T16:53:00.842324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-04T16:53:00.843623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-04T16:53:00.851960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-04T16:53:00.863792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-04T16:53:00.865014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-04T16:53:00.871246Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-04T16:53:00.871890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-04T16:53:00.872238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:768:2650] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-04T16:53:00.888767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:53:00.891663Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } 2025-04-04T16:53:00.892017Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/ServerLess0 2025-04-04T16:53:00.893315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2025-04-04T16:53:00.893906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2025-04-04T16:53:00.910530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:53:00.911493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 105, wait until txId: 105 >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] >> TKeyValueTest::TestCleanUpDataWithMockDisk [GOOD] >> KqpReturning::ReturningWorksIndexedDeleteV2-QueryService >> KqpNewEngine::PkRangeSelect1 >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] >> TKeyValueTest::TestConcatToLongKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 2187, MsgBus: 21133 2025-04-04T16:52:49.879506Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501990734763057:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:49.879571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015cf/r3tmp/tmpMxaIXR/pdisk_1.dat 2025-04-04T16:52:50.355261Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:50.387019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:50.387123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:50.391622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2187, node 1 2025-04-04T16:52:50.462023Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:50.462059Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:50.462067Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:50.462222Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21133 TClient is connected to server localhost:21133 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:52:51.182357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:51.245752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:51.527727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:51.709210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:51.969122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:55.053641Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501990734763057:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:55.053721Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:52:57.651357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502025094503020:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:57.651503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:57.752388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:52:57.800981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:52:57.852198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:52:57.922541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:52:57.986712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:52:58.079219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:52:58.152098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502029389470837:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:58.152171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:58.152495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502029389470842:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:58.156402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:52:58.175591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502029389470844:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:52:58.242147Z node 1 :TX_PROXY ERROR: Actor# [1:7489502029389470901:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:53:04.425241Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785582952, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCleanUpDataWithMockDisk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:75:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:77:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:79:2057] recipient: [2:78:2109] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:81:2057] recipient: [2:78:2109] !Reboot 72057594037927937 (actor [2:55:2096]) rebooted! !Reboot 72057594037927937 (actor [2:55:2096]) tablet resolver refreshed! new actor is[2:80:2110] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:134:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:78:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:80:2057] recipient: [4:79:2109] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:82:2057] recipient: [4:79:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:81:2110] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:83:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:85:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:84:2113] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:138:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvCleanUpDataRequest ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvKeyValue::TEvForceTabletDataCleanup ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:79:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:82:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:83:2057] recipient: [7:81:2112] Leader for TabletID 72057594037927937 is [7:84:2113] sender: [7:85:2057] recipient: [7:81:2112] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:84:2113] Leader for TabletID 72057594037927937 is [7:84:2113] sender: [7:138:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:55:2096]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:88:2057] recipient: [8:86:2116] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:90:2057] recipient: [8:86:2116] !Reboot 72057594037927937 (actor [8:55:2096]) rebooted! !Reboot 72057594037927937 (actor [8:55:2096]) tablet resolver refreshed! new actor is[8:89:2117] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:143:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:88:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:91:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:92:2057] recipient: [9:90:2120] Leader for TabletID 72057594037927937 is [9:93:2121] sender: [9:94:2057] recipient: [9:90:2120] !Reboot 72057594037927937 (actor [9:55:2096]) rebooted! !Reboot 72057594037927937 (actor [9:55:2096]) tablet resolver refreshed! new actor is[9:93:2121] Leader for TabletID 72057594037927937 is [9:93:2121] sender: [9:147:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvKeyValue::TEvCleanUpDataRequest ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:88:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:91:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:92:2057] recipient: [10:90:2120] Leader for TabletID 72057594037927937 is [10:93:2121] sender: [10:94:2057] recipient: [10:90:2120] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:93:2121] Leader for TabletID 72057594037927937 is [10:93:2121] sender: [10:147:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:90:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:93:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:94:2057] recipient: [11:92:2122] Leader for TabletID 72057594037927937 is [11:95:2123] sender: [11:96:2057] recipient: [11:92:2122] !Reboot 72057594037927937 (actor [11:55:2096]) rebooted! !Reboot 72057594037927937 (actor [11:55:2096]) tablet resolver refreshed! new actor is[11:95:2123] Leader for TabletID 72057594037927937 is [11:95:2123] sender: [11:149:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:1 ... etID 72057594037927937 is [35:98:2124] sender: [35:152:2057] recipient: [35:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:55:2057] recipient: [36:52:2096] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:55:2057] recipient: [36:52:2096] Leader for TabletID 72057594037927937 is [36:57:2098] sender: [36:58:2057] recipient: [36:52:2096] Leader for TabletID 72057594037927937 is [36:57:2098] sender: [36:75:2057] recipient: [36:17:2064] !Reboot 72057594037927937 (actor [36:57:2098]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:57:2098] sender: [36:94:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:57:2098] sender: [36:97:2057] recipient: [36:17:2064] Leader for TabletID 72057594037927937 is [36:57:2098] sender: [36:98:2057] recipient: [36:96:2123] Leader for TabletID 72057594037927937 is [36:99:2124] sender: [36:100:2057] recipient: [36:96:2123] !Reboot 72057594037927937 (actor [36:57:2098]) rebooted! !Reboot 72057594037927937 (actor [36:57:2098]) tablet resolver refreshed! new actor is[36:99:2124] Leader for TabletID 72057594037927937 is [36:99:2124] sender: [36:153:2057] recipient: [36:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:55:2057] recipient: [37:53:2096] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:55:2057] recipient: [37:53:2096] Leader for TabletID 72057594037927937 is [37:57:2098] sender: [37:58:2057] recipient: [37:53:2096] Leader for TabletID 72057594037927937 is [37:57:2098] sender: [37:75:2057] recipient: [37:17:2064] !Reboot 72057594037927937 (actor [37:57:2098]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [37:57:2098] sender: [37:97:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:57:2098] sender: [37:100:2057] recipient: [37:17:2064] Leader for TabletID 72057594037927937 is [37:57:2098] sender: [37:101:2057] recipient: [37:99:2126] Leader for TabletID 72057594037927937 is [37:102:2127] sender: [37:103:2057] recipient: [37:99:2126] !Reboot 72057594037927937 (actor [37:57:2098]) rebooted! !Reboot 72057594037927937 (actor [37:57:2098]) tablet resolver refreshed! new actor is[37:102:2127] Leader for TabletID 72057594037927937 is [37:102:2127] sender: [37:156:2057] recipient: [37:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:55:2057] recipient: [38:53:2096] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:55:2057] recipient: [38:53:2096] Leader for TabletID 72057594037927937 is [38:57:2098] sender: [38:58:2057] recipient: [38:53:2096] Leader for TabletID 72057594037927937 is [38:57:2098] sender: [38:75:2057] recipient: [38:17:2064] !Reboot 72057594037927937 (actor [38:57:2098]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [38:57:2098] sender: [38:97:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:57:2098] sender: [38:100:2057] recipient: [38:17:2064] Leader for TabletID 72057594037927937 is [38:57:2098] sender: [38:101:2057] recipient: [38:99:2126] Leader for TabletID 72057594037927937 is [38:102:2127] sender: [38:103:2057] recipient: [38:99:2126] !Reboot 72057594037927937 (actor [38:57:2098]) rebooted! !Reboot 72057594037927937 (actor [38:57:2098]) tablet resolver refreshed! new actor is[38:102:2127] Leader for TabletID 72057594037927937 is [38:102:2127] sender: [38:156:2057] recipient: [38:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:55:2057] recipient: [39:52:2096] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:55:2057] recipient: [39:52:2096] Leader for TabletID 72057594037927937 is [39:57:2098] sender: [39:58:2057] recipient: [39:52:2096] Leader for TabletID 72057594037927937 is [39:57:2098] sender: [39:75:2057] recipient: [39:17:2064] !Reboot 72057594037927937 (actor [39:57:2098]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [39:57:2098] sender: [39:98:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:57:2098] sender: [39:101:2057] recipient: [39:17:2064] Leader for TabletID 72057594037927937 is [39:57:2098] sender: [39:102:2057] recipient: [39:100:2126] Leader for TabletID 72057594037927937 is [39:103:2127] sender: [39:104:2057] recipient: [39:100:2126] !Reboot 72057594037927937 (actor [39:57:2098]) rebooted! !Reboot 72057594037927937 (actor [39:57:2098]) tablet resolver refreshed! new actor is[39:103:2127] Leader for TabletID 72057594037927937 is [39:103:2127] sender: [39:157:2057] recipient: [39:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:55:2057] recipient: [40:53:2096] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:55:2057] recipient: [40:53:2096] Leader for TabletID 72057594037927937 is [40:57:2098] sender: [40:58:2057] recipient: [40:53:2096] Leader for TabletID 72057594037927937 is [40:57:2098] sender: [40:75:2057] recipient: [40:17:2064] !Reboot 72057594037927937 (actor [40:57:2098]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [40:57:2098] sender: [40:99:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:57:2098] sender: [40:102:2057] recipient: [40:17:2064] Leader for TabletID 72057594037927937 is [40:57:2098] sender: [40:103:2057] recipient: [40:101:2127] Leader for TabletID 72057594037927937 is [40:104:2128] sender: [40:105:2057] recipient: [40:101:2127] !Reboot 72057594037927937 (actor [40:57:2098]) rebooted! !Reboot 72057594037927937 (actor [40:57:2098]) tablet resolver refreshed! new actor is[40:104:2128] Leader for TabletID 72057594037927937 is [40:104:2128] sender: [40:124:2057] recipient: [40:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:55:2057] recipient: [41:51:2096] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:55:2057] recipient: [41:51:2096] Leader for TabletID 72057594037927937 is [41:57:2098] sender: [41:58:2057] recipient: [41:51:2096] Leader for TabletID 72057594037927937 is [41:57:2098] sender: [41:75:2057] recipient: [41:17:2064] !Reboot 72057594037927937 (actor [41:57:2098]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [41:57:2098] sender: [41:100:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:57:2098] sender: [41:103:2057] recipient: [41:17:2064] Leader for TabletID 72057594037927937 is [41:57:2098] sender: [41:104:2057] recipient: [41:102:2128] Leader for TabletID 72057594037927937 is [41:105:2129] sender: [41:106:2057] recipient: [41:102:2128] !Reboot 72057594037927937 (actor [41:57:2098]) rebooted! !Reboot 72057594037927937 (actor [41:57:2098]) tablet resolver refreshed! new actor is[41:105:2129] Leader for TabletID 72057594037927937 is [41:105:2129] sender: [41:125:2057] recipient: [41:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:55:2057] recipient: [42:52:2096] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:55:2057] recipient: [42:52:2096] Leader for TabletID 72057594037927937 is [42:57:2098] sender: [42:58:2057] recipient: [42:52:2096] Leader for TabletID 72057594037927937 is [42:57:2098] sender: [42:75:2057] recipient: [42:17:2064] !Reboot 72057594037927937 (actor [42:57:2098]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:57:2098] sender: [42:103:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:57:2098] sender: [42:106:2057] recipient: [42:17:2064] Leader for TabletID 72057594037927937 is [42:57:2098] sender: [42:107:2057] recipient: [42:105:2131] Leader for TabletID 72057594037927937 is [42:108:2132] sender: [42:109:2057] recipient: [42:105:2131] !Reboot 72057594037927937 (actor [42:57:2098]) rebooted! !Reboot 72057594037927937 (actor [42:57:2098]) tablet resolver refreshed! new actor is[42:108:2132] Leader for TabletID 72057594037927937 is [42:108:2132] sender: [42:162:2057] recipient: [42:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:55:2057] recipient: [43:52:2096] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:55:2057] recipient: [43:52:2096] Leader for TabletID 72057594037927937 is [43:57:2098] sender: [43:58:2057] recipient: [43:52:2096] Leader for TabletID 72057594037927937 is [43:57:2098] sender: [43:75:2057] recipient: [43:17:2064] !Reboot 72057594037927937 (actor [43:57:2098]) on event NKikimr::TEvKeyValue::TEvCleanUpDataRequest ! Leader for TabletID 72057594037927937 is [43:57:2098] sender: [43:103:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:57:2098] sender: [43:105:2057] recipient: [43:17:2064] Leader for TabletID 72057594037927937 is [43:57:2098] sender: [43:107:2057] recipient: [43:106:2131] Leader for TabletID 72057594037927937 is [43:108:2132] sender: [43:109:2057] recipient: [43:106:2131] !Reboot 72057594037927937 (actor [43:57:2098]) rebooted! !Reboot 72057594037927937 (actor [43:57:2098]) tablet resolver refreshed! new actor is[43:108:2132] Leader for TabletID 72057594037927937 is [43:108:2132] sender: [43:162:2057] recipient: [43:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:55:2057] recipient: [44:52:2096] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:55:2057] recipient: [44:52:2096] Leader for TabletID 72057594037927937 is [44:57:2098] sender: [44:58:2057] recipient: [44:52:2096] Leader for TabletID 72057594037927937 is [44:57:2098] sender: [44:75:2057] recipient: [44:17:2064] !Reboot 72057594037927937 (actor [44:57:2098]) on event NKikimr::TEvKeyValue::TEvForceTabletDataCleanup ! Leader for TabletID 72057594037927937 is [44:57:2098] sender: [44:103:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:57:2098] sender: [44:106:2057] recipient: [44:17:2064] Leader for TabletID 72057594037927937 is [44:57:2098] sender: [44:107:2057] recipient: [44:105:2131] Leader for TabletID 72057594037927937 is [44:108:2132] sender: [44:109:2057] recipient: [44:105:2131] !Reboot 72057594037927937 (actor [44:57:2098]) rebooted! !Reboot 72057594037927937 (actor [44:57:2098]) tablet resolver refreshed! new actor is[44:108:2132] Leader for TabletID 72057594037927937 is [44:108:2132] sender: [44:162:2057] recipient: [44:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:55:2057] recipient: [45:52:2096] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:55:2057] recipient: [45:52:2096] Leader for TabletID 72057594037927937 is [45:57:2098] sender: [45:58:2057] recipient: [45:52:2096] Leader for TabletID 72057594037927937 is [45:57:2098] sender: [45:75:2057] recipient: [45:17:2064] !Reboot 72057594037927937 (actor [45:57:2098]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [45:57:2098] sender: [45:108:2057] recipient: [45:39:2086] Leader for TabletID 72057594037927937 is [45:57:2098] sender: [45:111:2057] recipient: [45:17:2064] Leader for TabletID 72057594037927937 is [45:57:2098] sender: [45:112:2057] recipient: [45:110:2135] Leader for TabletID 72057594037927937 is [45:113:2136] sender: [45:114:2057] recipient: [45:110:2135] !Reboot 72057594037927937 (actor [45:57:2098]) rebooted! !Reboot 72057594037927937 (actor [45:57:2098]) tablet resolver refreshed! new actor is[45:113:2136] Leader for TabletID 72057594037927937 is [45:113:2136] sender: [45:167:2057] recipient: [45:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:55:2057] recipient: [46:52:2096] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:55:2057] recipient: [46:52:2096] Leader for TabletID 72057594037927937 is [46:57:2098] sender: [46:58:2057] recipient: [46:52:2096] Leader for TabletID 72057594037927937 is [46:57:2098] sender: [46:75:2057] recipient: [46:17:2064] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] Test command err: 2025-04-04T16:52:26.890395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:52:26.891047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:26.891123Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019de/r3tmp/tmpQYLwoc/pdisk_1.dat 2025-04-04T16:52:29.515007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:52:29.582021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T16:52:29.610115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:52:29.639229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T16:52:29.684712Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-04-04T16:52:29.685334Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:408:2403] Proxy marker# C1 2025-04-04T16:52:29.856602Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:29.908392Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Merged config: { } 2025-04-04T16:52:30.011531Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:334:2374] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-04-04T16:52:30.011688Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2025-04-04T16:52:30.011838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:30.011894Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-04T16:52:30.011942Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-04T16:52:30.011978Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-04T16:52:30.012010Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-04T16:52:30.012097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:30.012403Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-04T16:52:30.012439Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-04T16:52:30.012472Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-04T16:52:30.012512Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-04T16:52:30.012640Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2025-04-04T16:52:30.025040Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2025-04-04T16:52:30.025562Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:334:2374]) 2025-04-04T16:52:30.026302Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-04T16:52:30.029076Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2025-04-04T16:52:30.037020Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:334:2374])::Execute 2025-04-04T16:52:30.037469Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-04T16:52:30.037747Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:334:2374])::Complete 2025-04-04T16:52:30.038536Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 270443339776 } 2025-04-04T16:52:30.038976Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-04-04T16:52:30.039185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:30.039789Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2025-04-04T16:52:30.040882Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-04T16:52:30.041109Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-04T16:52:30.041947Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-04T16:52:30.042188Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-04T16:52:30.042222Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-04-04T16:52:30.042447Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-04T16:52:30.054877Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-04-04T16:52:30.054953Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-04T16:52:30.132288Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2025-04-04T16:52:30.133066Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-04-04T16:52:30.135459Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-04-04T16:52:30.138109Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-04-04T16:52:30.139190Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:408:2403] Proxy 2025-04-04T16:52:30.159867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T16:52:30.185430Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-04T16:52:30.190727Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-04T16:52:30.190814Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-04-04T16:52:30.191432Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2025-04-04T16:52:30.206567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:52:30.207334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-04T16:52:30.220537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-04T16:52:30.310715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:52:30.349157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T16:52:30.349712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:52:30.370055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2025-04-04T16:52:30.442787Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2025-04-04T16:52:30.528397Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-04T16:52:30.529085Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-04-04T16:52:30.531525Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2025-04-04T16:52:30.532556Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2025-04-04T16:52:30.533487Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-04-04T16:52:30.606293Z node 1 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2025-04-04T16:52:30.622268Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-04-04T16:52:30.624210Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-04-04T16:52:30.644169Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2025-04-04T16:52:30.645657Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2025-04-04T16:52:30.646771Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004273376}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2025-04-04T16:52:30.647471Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004273376}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2025-04-04T16:52:30.650173Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004273376}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2025-04-04T16:52:30.650519Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004273376}: tablet 72075186224037888 channel 2 assigned to group 21810380 ... 04-04T16:53:04.283449Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715667 ssId 72057594046644480 seqNo 2:4 2025-04-04T16:53:04.283841Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715667 at tablet 72075186224037889 2025-04-04T16:53:04.285308Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:53:04.286075Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:53:04.286186Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T16:53:04.306658Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:53:04.307052Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 1 2025-04-04T16:53:04.319050Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T16:53:04.320122Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T16:53:04.331280Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715667 HANDLE EvProposeTransaction marker# C0 2025-04-04T16:53:04.332262Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715667 step# 33000 Status# 16 SEND to# [2:417:2411] Proxy marker# C1 2025-04-04T16:53:04.409215Z node 2 :TX_COORDINATOR DEBUG: Transaction 281474976715667 has been planned 2025-04-04T16:53:04.409314Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715667 for mediator 72057594046382081 tablet 72057594046644480 2025-04-04T16:53:04.409351Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715667 for mediator 72057594046382081 tablet 72075186224037889 2025-04-04T16:53:04.409570Z node 2 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 33500 in 0.500000s at 33.450000s 2025-04-04T16:53:04.409993Z node 2 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 33000, txid# 281474976715667 marker# C2 2025-04-04T16:53:04.410084Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715667 stepId# 33000 Status# 17 SEND EvProposeTransactionStatus to# [2:417:2411] Proxy 2025-04-04T16:53:04.410738Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715667 at step 33000 at tablet 72075186224037889 { Transactions { TxId: 281474976715667 AckTo { RawX1: 0 RawX2: 0 } } Step: 33000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-04T16:53:04.410788Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:53:04.410950Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 33000, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T16:53:04.411364Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T16:53:04.411417Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:53:04.411473Z node 2 :TX_DATASHARD DEBUG: Found ready operation [33000:281474976715667] in PlanQueue unit at 72075186224037889 2025-04-04T16:53:04.411670Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 33000:281474976715667 keys extracted: 0 2025-04-04T16:53:04.411794Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:53:04.412014Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T16:53:04.412088Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037889 2025-04-04T16:53:04.412461Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:53:04.431489Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 33000} 2025-04-04T16:53:04.431916Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T16:53:04.432880Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-04T16:53:04.433957Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-04T16:53:04.458396Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2025-04-04T16:53:04.458485Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 for mediator 72057594046382081 acknowledged 2025-04-04T16:53:04.458558Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 acknowledged 2025-04-04T16:53:04.459184Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T16:53:04.459273Z node 2 :TX_DATASHARD DEBUG: Complete [33000 : 281474976715667] from 72075186224037889 at tablet 72075186224037889 send result to client [2:417:2411], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:53:04.459349Z node 2 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715667 state PreOffline TxInFly 0 2025-04-04T16:53:04.459481Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:53:04.460662Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715667, done: 0, blocked: 1 2025-04-04T16:53:04.487976Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715667 datashard 72075186224037889 state PreOffline 2025-04-04T16:53:04.488080Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-04T16:53:04.488684Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:0 2025-04-04T16:53:04.488801Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715667, publications: 2, subscribers: 1 2025-04-04T16:53:04.489946Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715667, subscribers: 1 2025-04-04T16:53:04.490168Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T16:53:04.491689Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MTJkOGM4YjQtZTQwYmE3YzQtMmIxMjQ1MTAtNDQ0NGU2ZGQ= 2025-04-04 16:53:04.491 INFO ydb-core-tx-datashard-ut_minstep(pid=130925, tid=0x00007F002F06FCC0) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2025-04-04T16:53:04.491813Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MTJkOGM4YjQtZTQwYmE3YzQtMmIxMjQ1MTAtNDQ0NGU2ZGQ= 2025-04-04 16:53:04.491 INFO ydb-core-tx-datashard-ut_minstep(pid=130925, tid=0x00007F002F06FCC0) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2025-04-04T16:53:04.491893Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MTJkOGM4YjQtZTQwYmE3YzQtMmIxMjQ1MTAtNDQ0NGU2ZGQ= 2025-04-04 16:53:04.491 INFO ydb-core-tx-datashard-ut_minstep(pid=130925, tid=0x00007F002F06FCC0) [core exec] yql_execution.cpp:59: Begin, root #43 2025-04-04T16:53:04.491958Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MTJkOGM4YjQtZTQwYmE3YzQtMmIxMjQ1MTAtNDQ0NGU2ZGQ= 2025-04-04 16:53:04.491 INFO ydb-core-tx-datashard-ut_minstep(pid=130925, tid=0x00007F002F06FCC0) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-04-04T16:53:04.492019Z node 2 :KQP_YQL TRACE: SessionId: ydb://session/3?node_id=2&id=MTJkOGM4YjQtZTQwYmE3YzQtMmIxMjQ1MTAtNDQ0NGU2ZGQ= 2025-04-04 16:53:04.491 TRACE ydb-core-tx-datashard-ut_minstep(pid=130925, tid=0x00007F002F06FCC0) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-04-04T16:53:04.492096Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MTJkOGM4YjQtZTQwYmE3YzQtMmIxMjQ1MTAtNDQ0NGU2ZGQ= 2025-04-04 16:53:04.492 INFO ydb-core-tx-datashard-ut_minstep(pid=130925, tid=0x00007F002F06FCC0) [core exec] yql_execution.cpp:577: Node #43 finished execution 2025-04-04T16:53:04.492175Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MTJkOGM4YjQtZTQwYmE3YzQtMmIxMjQ1MTAtNDQ0NGU2ZGQ= 2025-04-04 16:53:04.492 INFO ydb-core-tx-datashard-ut_minstep(pid=130925, tid=0x00007F002F06FCC0) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2025-04-04T16:53:04.492433Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MTJkOGM4YjQtZTQwYmE3YzQtMmIxMjQ1MTAtNDQ0NGU2ZGQ= 2025-04-04 16:53:04.492 INFO ydb-core-tx-datashard-ut_minstep(pid=130925, tid=0x00007F002F06FCC0) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2025-04-04T16:53:04.492495Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MTJkOGM4YjQtZTQwYmE3YzQtMmIxMjQ1MTAtNDQ0NGU2ZGQ= 2025-04-04 16:53:04.492 INFO ydb-core-tx-datashard-ut_minstep(pid=130925, tid=0x00007F002F06FCC0) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2025-04-04T16:53:04.492643Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=MTJkOGM4YjQtZTQwYmE3YzQtMmIxMjQ1MTAtNDQ0NGU2ZGQ= 2025-04-04 16:53:04.492 NOTE ydb-core-tx-datashard-ut_minstep(pid=130925, tid=0x00007F002F06FCC0) [common provider] yql_provider_gateway.cpp:21:
: Info: Execution, code: 1060 2025-04-04T16:53:04.492702Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=MTJkOGM4YjQtZTQwYmE3YzQtMmIxMjQ1MTAtNDQ0NGU2ZGQ= 2025-04-04 16:53:04.492 NOTE ydb-core-tx-datashard-ut_minstep(pid=130925, tid=0x00007F002F06FCC0) [common provider] yql_provider_gateway.cpp:21:
:1:12: Info: Executing DROP TABLE 2025-04-04T16:53:04.492755Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=MTJkOGM4YjQtZTQwYmE3YzQtMmIxMjQ1MTAtNDQ0NGU2ZGQ= 2025-04-04 16:53:04.492 NOTE ydb-core-tx-datashard-ut_minstep(pid=130925, tid=0x00007F002F06FCC0) [common provider] yql_provider_gateway.cpp:21:
: Info: Success, code: 4 2025-04-04T16:53:04.512547Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T16:53:04.512789Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-04-04T16:53:04.514746Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T16:53:04.515538Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-04T16:53:04.516149Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186224037889 2025-04-04T16:53:04.516384Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2025-04-04T16:53:04.516948Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2025-04-04T16:53:04.518089Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2025-04-04T16:53:04.535955Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 24355, MsgBus: 24092 2025-04-04T16:52:51.425354Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502002780548587:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:51.433055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015c0/r3tmp/tmpfK0F4Z/pdisk_1.dat 2025-04-04T16:52:51.872624Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:51.895802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:51.895917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:51.904585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24355, node 1 2025-04-04T16:52:52.062531Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:52.062552Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:52.062573Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:52.062690Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24092 TClient is connected to server localhost:24092 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:52:52.794027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:52.808245Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:52:52.821889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:52.979461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:53.164102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:53.253870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:56.422829Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502002780548587:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:56.422895Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:52:58.155772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502032845321320:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:58.157641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:58.678924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:52:58.748583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:52:58.798362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:52:58.893088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:52:58.984644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:52:59.041781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:52:59.102454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502037140289150:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:59.102532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:59.102985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502037140289155:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:59.107259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:52:59.119895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502037140289157:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:52:59.175815Z node 1 :TX_PROXY ERROR: Actor# [1:7489502037140289210:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:53:05.494079Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: kqp_proxy, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785585 AvailableComputeActors: 10000 UsedMemory: 0 TotalMemory: 10737418240 Memory { Pool: 1 Available: 10737418240 } ExecutionUnits: 10000 KqpProxyNodeResources { NodeId: 1 DataCenterNumId: 49 ActiveWorkersCount: 0 DataCenterId: "1" } 2025-04-04T16:53:05.494157Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T16:53:06.009420Z node 1 :KQP_GATEWAY DEBUG: Load table metadata from cache by path, request Path: /Root/Test 2025-04-04T16:53:06.871630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:06.872329Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:08.132166Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: kqp_proxy, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743785588 AvailableComputeActors: 10000 UsedMemory: 0 TotalMemory: 10737418240 Memory { Pool: 1 Available: 10737418240 } ExecutionUnits: 10000 KqpProxyNodeResources { NodeId: 1 DataCenterNumId: 49 ActiveWorkersCount: 1 DataCenterId: "1" } 2025-04-04T16:53:08.155455Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpSnapshotManager at [1:7489502075794995358:2526] 2025-04-04T16:53:08.155481Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: got snapshot request from [1:7489502062910093401:2526] 2025-04-04T16:53:08.155587Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T16:53:08.195902Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: snapshot 1743785588223:281474976710671 created 2025-04-04T16:53:08.199845Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489502075794995370:2526] TxId: 281474976710672. Ctx: { TraceId: 01jr0vgtxq3nh6p81yw3zezvbs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTEwZmQwM2QtMTAzNzM0NDAtMTM3ZjgzOWEtMmIyZDE0NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 3, stages: 2 2025-04-04T16:53:08.199918Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 1, OutputsCount: 1 2025-04-04T16:53:08.199930Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,1], InputsCount: 1, OutputsCount: 1 2025-04-04T16:53:08.206297Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key sets: 1 2025-04-04T16:53:08.207039Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-04T16:53:08.207078Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489502075794995370:2526] TxId: 281474976710672. Ctx: { TraceId: 01jr0vgtxq3nh6p81yw3zezvbs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTEwZmQwM2QtMTAzNzM0NDAtMTM3ZjgzOWEtMmIyZDE0NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Start resolving tablets nodes... (1) 2025-04-04T16:53:08.207472Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976710672. Shard resolve complete, resolved shards: 1 2025-04-04T16:53:08.2074 ... NFO: TxId: 281474976710672. Ctx: { TraceId: 01jr0vgtxq3nh6p81yw3zezvbs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTEwZmQwM2QtMTAzNzM0NDAtMTM3ZjgzOWEtMmIyZDE0NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7489502075794995375:2535] 2025-04-04T16:53:08.378789Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489502075794995370:2526] TxId: 281474976710672. Ctx: { TraceId: 01jr0vgtxq3nh6p81yw3zezvbs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTEwZmQwM2QtMTAzNzM0NDAtMTM3ZjgzOWEtMmIyZDE0NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7489502075794995376:2536], 2025-04-04T16:53:08.379519Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489502075794995370:2526] TxId: 281474976710672. Ctx: { TraceId: 01jr0vgtxq3nh6p81yw3zezvbs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTEwZmQwM2QtMTAzNzM0NDAtMTM3ZjgzOWEtMmIyZDE0NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [1:7489502062910093401:2526], seqNo: 1, nRows: 1 2025-04-04T16:53:08.379608Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489502075794995370:2526] TxId: 281474976710672. Ctx: { TraceId: 01jr0vgtxq3nh6p81yw3zezvbs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTEwZmQwM2QtMTAzNzM0NDAtMTM3ZjgzOWEtMmIyZDE0NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7489502075794995376:2536], task: 2, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 45958 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 21464 FinishTimeMs: 1743785588378 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 15851 BuildCpuTimeUs: 5613 HostName: "ghrun-mup5cka3jq" NodeId: 1 CreateTimeMs: 1743785588277 } MaxMemoryUsage: 1048576 } 2025-04-04T16:53:08.379628Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489502075794995370:2526] TxId: 281474976710672. Ctx: { TraceId: 01jr0vgtxq3nh6p81yw3zezvbs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTEwZmQwM2QtMTAzNzM0NDAtMTM3ZjgzOWEtMmIyZDE0NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7489502075794995376:2536], 2025-04-04T16:53:08.382673Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-04T16:53:08.385642Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502075794995376:2536], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jr0vgtxq3nh6p81yw3zezvbs. SessionId : ydb://session/3?node_id=1&id=ZTEwZmQwM2QtMTAzNzM0NDAtMTM3ZjgzOWEtMmIyZDE0NzE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 65538 2025-04-04T16:53:08.385652Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502075794995376:2536], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jr0vgtxq3nh6p81yw3zezvbs. SessionId : ydb://session/3?node_id=1&id=ZTEwZmQwM2QtMTAzNzM0NDAtMTM3ZjgzOWEtMmIyZDE0NzE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. wakeup with tag 2 2025-04-04T16:53:08.398621Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 2025-04-04T16:53:10.131271Z, after 1.732739s 2025-04-04T16:53:08.419115Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672, send ack to channelId: 2, seqNo: 1, enough: 0, freeSpace: 8388488, to: [1:7489502075794995381:2536] 2025-04-04T16:53:08.419172Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502075794995376:2536], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jr0vgtxq3nh6p81yw3zezvbs. SessionId : ydb://session/3?node_id=1&id=ZTEwZmQwM2QtMTAzNzM0NDAtMTM3ZjgzOWEtMmIyZDE0NzE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:53:08.419493Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-04T16:53:08.419788Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished 2025-04-04T16:53:08.420166Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502075794995376:2536], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jr0vgtxq3nh6p81yw3zezvbs. SessionId : ydb://session/3?node_id=1&id=ZTEwZmQwM2QtMTAzNzM0NDAtMTM3ZjgzOWEtMmIyZDE0NzE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-04T16:53:08.420527Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. pass away 2025-04-04T16:53:08.420875Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710672;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-04T16:53:08.421000Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502075794995376:2536], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jr0vgtxq3nh6p81yw3zezvbs. SessionId : ydb://session/3?node_id=1&id=ZTEwZmQwM2QtMTAzNzM0NDAtMTM3ZjgzOWEtMmIyZDE0NzE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Send stats to executor actor [1:7489502075794995370:2526] TaskId: 2 Stats: CpuTimeUs: 63137 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 21470 FinishTimeMs: 1743785588419 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 15857 BuildCpuTimeUs: 5613 HostName: "ghrun-mup5cka3jq" NodeId: 1 CreateTimeMs: 1743785588277 } MaxMemoryUsage: 1048576 2025-04-04T16:53:08.421683Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-04T16:53:08.422528Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489502075794995370:2526] TxId: 281474976710672. Ctx: { TraceId: 01jr0vgtxq3nh6p81yw3zezvbs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTEwZmQwM2QtMTAzNzM0NDAtMTM3ZjgzOWEtMmIyZDE0NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7489502075794995376:2536], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 63137 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 21470 FinishTimeMs: 1743785588419 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 15857 BuildCpuTimeUs: 5613 HostName: "ghrun-mup5cka3jq" NodeId: 1 CreateTimeMs: 1743785588277 } MaxMemoryUsage: 1048576 } 2025-04-04T16:53:08.422578Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jr0vgtxq3nh6p81yw3zezvbs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTEwZmQwM2QtMTAzNzM0NDAtMTM3ZjgzOWEtMmIyZDE0NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7489502075794995376:2536] 2025-04-04T16:53:08.460799Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7489502075794995370:2526] TxId: 281474976710672. Ctx: { TraceId: 01jr0vgtxq3nh6p81yw3zezvbs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTEwZmQwM2QtMTAzNzM0NDAtMTM3ZjgzOWEtMmIyZDE0NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 159849 DurationUs: 222770 Tables { TablePath: "/Root/Test" ReadRows: 1 ReadBytes: 20 AffectedPartitions: 1 } ExecuterCpuTimeUs: 22443 StartTimeMs: 1743785588199 FinishTimeMs: 1743785588422 Stages { StageId: 1 StageGuid: "c7482a23-6da4e2a3-c5eb9d39-c62e6f8" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 \'1)) (lambda \'($2 $3 $4 $5) (AsStruct \'(\'\"Amount\" $2) \'(\'\"Comment\" $3) \'(\'\"Group\" $4) \'(\'\"Name\" $5)))))))\n)\n" ComputeActors { CpuTimeUs: 63137 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 21470 FinishTimeMs: 1743785588419 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 15857 BuildCpuTimeUs: 5613 HostName: "ghrun-mup5cka3jq" NodeId: 1 CreateTimeMs: 1743785588277 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743785588341 } Stages { StageGuid: "f8f716e1-dca12ae5-3c4a5d8d-63867623" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (lambda \'($5) (block \'(\n (let $6 (Member $5 \'\"Amount\"))\n (return $6 (Member $5 \'\"Comment\") (Member $5 \'\"Group\") (Member $5 \'\"Name\") (Coalesce (< $6 (Uint64 \'\"5000\")) (Bool \'false)))\n ))))\n (let $3 (WideFilter (ExpandMap (ToFlow $1) $2) (lambda \'($7 $8 $9 $10 $11) $11) (Uint64 \'1)))\n (let $4 (lambda \'($12 $13 $14 $15 $16) $12 $13 $14 $15))\n (return (FromFlow (WideMap $3 $4)))\n))))\n)\n" BaseTimeMs: 1743785588341 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":6,\"Plans\":[{\"Node Type\":\"ResultSet\",\"PlanNodeId\":5,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Limit\":\"1\",\"Name\":\"Limit\"}],\"PlanNodeId\":4,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":3,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit-Filter\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Limit\":\"1\",\"Name\":\"Limit\"},{\"E-Cost\":\"0\",\"E-Rows\":\"0\",\"E-Size\":\"0\",\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Name\":\"Filter\",\"Predicate\":\"item.Amount \\u003C 5000\"}],\"PlanNodeId\":2,\"Plans\":[{\"Node Type\":\"TableFullScan\",\"Operators\":[{\"E-Cost\":\"0\",\"E-Rows\":\"0\",\"E-Size\":\"0\",\"Inputs\":[],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/Test\",\"ReadColumns\":[\"Amount\",\"Comment\",\"Group\",\"Name\"],\"ReadRanges\":[\"Group (-∞, +∞)\",\"Name (-∞, +∞)\"],\"ReadRangesPointPrefixLen\":\"0\",\"Scan\":\"Parallel\",\"Table\":\"Test\"}],\"PlanNodeId\":1,\"StageGuid\":\"\",\"Tables\":[\"Test\"]}],\"StageGuid\":\"f8f716e1-dca12ae5-3c4a5d8d-63867623\",\"Stats\":{\"BaseTimeMs\":1743785588341,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"SortColumns\":[\"Group (Asc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"c7482a23-6da4e2a3-c5eb9d39-c62e6f8\",\"Stats\":{\"BaseTimeMs\":1743785588341,\"ComputeNodes\":[{\"CpuTimeUs\":63137,\"Tasks\":[{\"ComputeTimeUs\":15857,\"FinishTimeMs\":1743785588419,\"Host\":\"ghrun-mup5cka3jq\",\"InputBytes\":19,\"InputRows\":1,\"NodeId\":1,\"OutputBytes\":19,\"OutputRows\":1,\"ResultBytes\":19,\"ResultRows\":1,\"TaskId\":2}]}],\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 1583 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\001\022\016\010\241\355\003\020\235\304\004\030\276\261\010 \002" } } 2025-04-04T16:53:08.460839Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489502075794995370:2526] TxId: 281474976710672. Ctx: { TraceId: 01jr0vgtxq3nh6p81yw3zezvbs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTEwZmQwM2QtMTAzNzM0NDAtMTM3ZjgzOWEtMmIyZDE0NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T16:53:08.461158Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489502075794995370:2526] TxId: 281474976710672. Ctx: { TraceId: 01jr0vgtxq3nh6p81yw3zezvbs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTEwZmQwM2QtMTAzNzM0NDAtMTM3ZjgzOWEtMmIyZDE0NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.137406s ReadRows: 1 ReadBytes: 20 ru: 91 rate limiter was not found force flag: 1 2025-04-04T16:53:08.480983Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785588223, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestConcatToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:75:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:77:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:79:2057] recipient: [2:78:2109] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:81:2057] recipient: [2:78:2109] !Reboot 72057594037927937 (actor [2:55:2096]) rebooted! !Reboot 72057594037927937 (actor [2:55:2096]) tablet resolver refreshed! new actor is[2:80:2110] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:134:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:80:2057] recipient: [4:78:2109] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:82:2057] recipient: [4:78:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:81:2110] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:82:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:85:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:84:2113] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:138:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:84:2057] recipient: [7:82:2112] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:86:2057] recipient: [7:82:2112] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:85:2113] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:55:2096]) rebooted! !Reboot 72057594037927937 (actor [8:55:2096]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:55:2096]) rebooted! !Reboot 72057594037927937 (actor [9:55:2096]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:88:2057] recipient: [10:87:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:87:2115] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:55:2096]) rebooted! !Reboot 72057594037927937 (actor [11:55:2096]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 7 is [33:55:2096] sender: [33:91:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:55:2096] sender: [33:94:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:55:2096] sender: [33:95:2057] recipient: [33:93:2120] Leader for TabletID 72057594037927937 is [33:96:2121] sender: [33:97:2057] recipient: [33:93:2120] !Reboot 72057594037927937 (actor [33:55:2096]) rebooted! !Reboot 72057594037927937 (actor [33:55:2096]) tablet resolver refreshed! new actor is[33:96:2121] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:53:2057] recipient: [34:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:53:2057] recipient: [34:49:2094] Leader for TabletID 72057594037927937 is [34:55:2096] sender: [34:56:2057] recipient: [34:49:2094] Leader for TabletID 72057594037927937 is [34:55:2096] sender: [34:73:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:53:2057] recipient: [35:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:53:2057] recipient: [35:50:2094] Leader for TabletID 72057594037927937 is [35:55:2096] sender: [35:56:2057] recipient: [35:50:2094] Leader for TabletID 72057594037927937 is [35:55:2096] sender: [35:73:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:53:2057] recipient: [36:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:53:2057] recipient: [36:50:2094] Leader for TabletID 72057594037927937 is [36:55:2096] sender: [36:56:2057] recipient: [36:50:2094] Leader for TabletID 72057594037927937 is [36:55:2096] sender: [36:73:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:55:2096] sender: [36:75:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:55:2096] sender: [36:78:2057] recipient: [36:77:2109] Leader for TabletID 72057594037927937 is [36:55:2096] sender: [36:79:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:80:2110] sender: [36:81:2057] recipient: [36:77:2109] !Reboot 72057594037927937 (actor [36:55:2096]) rebooted! !Reboot 72057594037927937 (actor [36:55:2096]) tablet resolver refreshed! new actor is[36:80:2110] Leader for TabletID 72057594037927937 is [36:80:2110] sender: [36:134:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:53:2057] recipient: [37:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:53:2057] recipient: [37:49:2094] Leader for TabletID 72057594037927937 is [37:55:2096] sender: [37:56:2057] recipient: [37:49:2094] Leader for TabletID 72057594037927937 is [37:55:2096] sender: [37:73:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:55:2096]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [37:55:2096] sender: [37:75:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:55:2096] sender: [37:77:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:55:2096] sender: [37:79:2057] recipient: [37:78:2109] Leader for TabletID 72057594037927937 is [37:80:2110] sender: [37:81:2057] recipient: [37:78:2109] !Reboot 72057594037927937 (actor [37:55:2096]) rebooted! !Reboot 72057594037927937 (actor [37:55:2096]) tablet resolver refreshed! new actor is[37:80:2110] Leader for TabletID 72057594037927937 is [37:80:2110] sender: [37:134:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:53:2057] recipient: [38:51:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:53:2057] recipient: [38:51:2094] Leader for TabletID 72057594037927937 is [38:55:2096] sender: [38:56:2057] recipient: [38:51:2094] Leader for TabletID 72057594037927937 is [38:55:2096] sender: [38:73:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:55:2096] sender: [38:76:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:55:2096] sender: [38:78:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:55:2096] sender: [38:80:2057] recipient: [38:79:2109] Leader for TabletID 72057594037927937 is [38:81:2110] sender: [38:82:2057] recipient: [38:79:2109] !Reboot 72057594037927937 (actor [38:55:2096]) rebooted! !Reboot 72057594037927937 (actor [38:55:2096]) tablet resolver refreshed! new actor is[38:81:2110] Leader for TabletID 72057594037927937 is [38:81:2110] sender: [38:135:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:53:2057] recipient: [39:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:53:2057] recipient: [39:50:2094] Leader for TabletID 72057594037927937 is [39:55:2096] sender: [39:56:2057] recipient: [39:50:2094] Leader for TabletID 72057594037927937 is [39:55:2096] sender: [39:73:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:55:2096] sender: [39:79:2057] recipient: [39:36:2083] Leader for TabletID 72057594037927937 is [39:55:2096] sender: [39:82:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:55:2096] sender: [39:83:2057] recipient: [39:81:2112] Leader for TabletID 72057594037927937 is [39:84:2113] sender: [39:85:2057] recipient: [39:81:2112] !Reboot 72057594037927937 (actor [39:55:2096]) rebooted! !Reboot 72057594037927937 (actor [39:55:2096]) tablet resolver refreshed! new actor is[39:84:2113] Leader for TabletID 72057594037927937 is [39:84:2113] sender: [39:138:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:53:2057] recipient: [40:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:53:2057] recipient: [40:50:2094] Leader for TabletID 72057594037927937 is [40:55:2096] sender: [40:56:2057] recipient: [40:50:2094] Leader for TabletID 72057594037927937 is [40:55:2096] sender: [40:73:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [40:55:2096] sender: [40:79:2057] recipient: [40:36:2083] Leader for TabletID 72057594037927937 is [40:55:2096] sender: [40:82:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [40:55:2096] sender: [40:83:2057] recipient: [40:81:2112] Leader for TabletID 72057594037927937 is [40:84:2113] sender: [40:85:2057] recipient: [40:81:2112] !Reboot 72057594037927937 (actor [40:55:2096]) rebooted! !Reboot 72057594037927937 (actor [40:55:2096]) tablet resolver refreshed! new actor is[40:84:2113] Leader for TabletID 72057594037927937 is [40:84:2113] sender: [40:138:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:53:2057] recipient: [41:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:53:2057] recipient: [41:49:2094] Leader for TabletID 72057594037927937 is [41:55:2096] sender: [41:56:2057] recipient: [41:49:2094] Leader for TabletID 72057594037927937 is [41:55:2096] sender: [41:73:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:55:2096] sender: [41:80:2057] recipient: [41:36:2083] Leader for TabletID 72057594037927937 is [41:55:2096] sender: [41:83:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [41:55:2096] sender: [41:84:2057] recipient: [41:82:2112] Leader for TabletID 72057594037927937 is [41:85:2113] sender: [41:86:2057] recipient: [41:82:2112] !Reboot 72057594037927937 (actor [41:55:2096]) rebooted! !Reboot 72057594037927937 (actor [41:55:2096]) tablet resolver refreshed! new actor is[41:85:2113] Leader for TabletID 72057594037927937 is [41:85:2113] sender: [41:139:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:53:2057] recipient: [42:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:53:2057] recipient: [42:50:2094] Leader for TabletID 72057594037927937 is [42:55:2096] sender: [42:56:2057] recipient: [42:50:2094] Leader for TabletID 72057594037927937 is [42:55:2096] sender: [42:73:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:55:2096] sender: [42:83:2057] recipient: [42:36:2083] Leader for TabletID 72057594037927937 is [42:55:2096] sender: [42:86:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [42:55:2096] sender: [42:87:2057] recipient: [42:85:2115] Leader for TabletID 72057594037927937 is [42:88:2116] sender: [42:89:2057] recipient: [42:85:2115] !Reboot 72057594037927937 (actor [42:55:2096]) rebooted! !Reboot 72057594037927937 (actor [42:55:2096]) tablet resolver refreshed! new actor is[42:88:2116] Leader for TabletID 72057594037927937 is [42:88:2116] sender: [42:142:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:53:2057] recipient: [43:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:53:2057] recipient: [43:50:2094] Leader for TabletID 72057594037927937 is [43:55:2096] sender: [43:56:2057] recipient: [43:50:2094] Leader for TabletID 72057594037927937 is [43:55:2096] sender: [43:73:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [43:55:2096] sender: [43:83:2057] recipient: [43:36:2083] Leader for TabletID 72057594037927937 is [43:55:2096] sender: [43:86:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [43:55:2096] sender: [43:87:2057] recipient: [43:85:2115] Leader for TabletID 72057594037927937 is [43:88:2116] sender: [43:89:2057] recipient: [43:85:2115] !Reboot 72057594037927937 (actor [43:55:2096]) rebooted! !Reboot 72057594037927937 (actor [43:55:2096]) tablet resolver refreshed! new actor is[43:88:2116] Leader for TabletID 72057594037927937 is [43:88:2116] sender: [43:142:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:53:2057] recipient: [44:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:53:2057] recipient: [44:50:2094] Leader for TabletID 72057594037927937 is [44:55:2096] sender: [44:56:2057] recipient: [44:50:2094] Leader for TabletID 72057594037927937 is [44:55:2096] sender: [44:73:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [44:55:2096] sender: [44:84:2057] recipient: [44:36:2083] Leader for TabletID 72057594037927937 is [44:55:2096] sender: [44:87:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [44:55:2096] sender: [44:88:2057] recipient: [44:86:2115] Leader for TabletID 72057594037927937 is [44:89:2116] sender: [44:90:2057] recipient: [44:86:2115] !Reboot 72057594037927937 (actor [44:55:2096]) rebooted! !Reboot 72057594037927937 (actor [44:55:2096]) tablet resolver refreshed! new actor is[44:89:2116] Leader for TabletID 72057594037927937 is [44:89:2116] sender: [44:143:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:53:2057] recipient: [45:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:53:2057] recipient: [45:50:2094] Leader for TabletID 72057594037927937 is [45:55:2096] sender: [45:56:2057] recipient: [45:50:2094] Leader for TabletID 72057594037927937 is [45:55:2096] sender: [45:73:2057] recipient: [45:14:2061] >> LabeledDbCounters::OneTablet [GOOD] >> LabeledDbCounters::OneTabletRemoveCounters >> KqpNewEngine::PureExpr >> TraverseDatashard::TraverseTwoTables [GOOD] >> KqpRanges::IsNotNullInJsonValue |75.8%| [TA] $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRanges::NullInKey >> KqpCost::IndexLookupAndTake-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTables [GOOD] Test command err: 2025-04-04T16:52:42.810113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:52:42.810810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:42.811084Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019c5/r3tmp/tmpoPR0Yi/pdisk_1.dat 2025-04-04T16:52:45.414583Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30633, node 1 2025-04-04T16:52:47.059917Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:47.059971Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:47.060017Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:47.060540Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:47.063257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:52:47.166683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:47.166878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:47.193201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26593 2025-04-04T16:52:48.100687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:52:53.908676Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T16:52:54.397831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:54.404419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:54.492990Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:52:54.505934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:55.187648Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:55.188130Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:55.188552Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:55.188653Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:55.188844Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:55.188938Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:55.188997Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:55.189069Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:55.189124Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:55.620648Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:55.620750Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:55.668671Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:56.774175Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:57.045423Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T16:52:57.045552Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T16:52:57.089691Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T16:52:57.090136Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T16:52:57.090384Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T16:52:57.090444Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T16:52:57.090499Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T16:52:57.090551Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T16:52:57.090628Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T16:52:57.090677Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T16:52:57.091751Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T16:52:57.137867Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T16:52:57.138002Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2594], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T16:52:57.144477Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2603] 2025-04-04T16:52:57.148379Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1898:2613] 2025-04-04T16:52:57.148582Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1898:2613], schemeshard id = 72075186224037897 2025-04-04T16:52:57.156464Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T16:52:57.188514Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T16:52:57.188593Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T16:52:57.188675Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T16:52:57.288420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T16:52:57.296523Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T16:52:57.296698Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T16:52:57.487225Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T16:52:57.667607Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T16:52:57.781688Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T16:52:58.852274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2243:3077], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:58.852415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:58.870401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T16:53:00.150517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2548:3127], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:00.150672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:00.151836Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2553:3131]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T16:53:00.151995Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T16:53:00.152059Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2555:3133] 2025-04-04T16:53:00.152123Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2555:3133] 2025-04-04T16:53:00.152599Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2556:2993] 2025-04-04T16:53:00.152819Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2555:3133], server id = [2:2556:2993], tablet id = 72075186224037894, status = OK 2025-04-04T16:53:00.153004Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2556:2993], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T16:53:00.153060Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-04T16:53:00.153218Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T16:53:00.153272Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2553:3131], StatRequests.size() = 1 2025-04-04T16:53:00.212217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2560:3137], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:00.212312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:00.213939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2565:3142], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:00.234144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-04T16:53:00.783620Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T16:53:00.783697Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T16:53:00.890978Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2555:3133], schemeshard count = 1 2025-04-04T16:53:01.609860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2567:3144], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-04T16:53:02.000579Z node 1 :TX_PROXY ERROR: Actor# [1:2686:3218] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:53:02.228253Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2709:3234]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T16:53:02.228420Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T16:53:02.228459Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2709:3234], StatRequests.size() = 1 2025-04-04T16:53:03.217951Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0vgngcd0qacy0mhn1zxkcv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWM5ZTQyMWEtNjU1OWJiN2ItMjdmNWIwYzgtMzg1NjBlMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:53:04.338958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037897 2025-04-04T16:53:07.821870Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3058:3301]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T16:53:07.833028Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T16:53:07.833409Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:3058:3301], StatRequests.size() = 1 2025-04-04T16:53:08.157002Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3067:3310]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T16:53:08.157165Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-04-04T16:53:08.157200Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:3067:3310], StatRequests.size() = 1 2025-04-04T16:53:08.733343Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0vgwvab642kr3wfnvkbpch, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2JhZmQ5M2MtMTQ0YTg1YzEtMjEzNWZiMDAtMWQ5MTY1MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:53:09.371664Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3113:3263]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T16:53:09.462872Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T16:53:09.463263Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T16:53:09.464000Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T16:53:09.464131Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-04T16:53:09.464190Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T16:53:09.591254Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-04T16:53:09.593412Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-04-04T16:53:09.595800Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3138:3276]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T16:53:09.611961Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T16:53:09.612725Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T16:53:09.613924Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T16:53:09.613986Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-04T16:53:09.614044Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 5] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T16:53:09.633344Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-04-04T16:53:09.635806Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 |75.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |75.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |75.8%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpCost::OlapPointLookup [GOOD] |75.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |75.8%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut >> KqpSqlIn::TableSource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 27098, MsgBus: 22916 2025-04-04T16:52:52.211657Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502006549358423:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:52.211714Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015b5/r3tmp/tmpo0bPTd/pdisk_1.dat 2025-04-04T16:52:52.881174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:52.881266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:52.883417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:52.885431Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27098, node 1 2025-04-04T16:52:52.961156Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:52.961178Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:52.961185Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:52.961293Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22916 TClient is connected to server localhost:22916 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:52:53.658534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:53.686779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:53.883070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:54.089975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:54.450732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:57.214493Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502006549358423:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:57.214580Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:52:59.116353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502036614131079:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:59.116468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:59.862634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:53:00.104180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:53:00.461538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:53:00.586719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:53:01.097362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:53:01.695022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:53:03.981088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502053794000854:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:03.981165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:03.986418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502053794000860:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:04.048351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:53:04.133260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502053794000863:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:53:04.250881Z node 1 :TX_PROXY ERROR: Actor# [1:7489502058088968234:3518] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:53:07.902352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:07.902382Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:10.557857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 2 16 /Root/SecondaryKeys 1 8 >> KqpMergeCn::TopSortByDesc_Double_Limit3 >> KqpNewEngine::Select1 [GOOD] >> KqpNewEngine::Replace |75.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |75.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |75.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex >> KqpNewEngine::StreamLookupWithView >> KqpNewEngine::ContainerRegistryCombiner >> KqpRanges::IsNull >> KqpSort::TopSortParameter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapPointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 16196, MsgBus: 26111 2025-04-04T16:52:51.045169Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501997981565929:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:51.045543Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015c9/r3tmp/tmpZiltJq/pdisk_1.dat 2025-04-04T16:52:52.602856Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:53.797909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:53.809578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:53.814814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:53.824984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:53.949981Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16196, node 1 2025-04-04T16:52:54.900220Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:54.900240Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:54.900249Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:54.900685Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:56.024375Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501997981565929:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:56.024743Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:26111 TClient is connected to server localhost:26111 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:52:57.568656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:57.625778Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:52:57.662056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:58.155548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:58.770362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:59.066029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:08.902994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:08.903026Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:10.362891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502075290978999:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:10.405431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:10.492420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:53:10.668083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:53:10.758625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:53:10.838830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:53:10.938653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:53:11.113704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:53:11.642692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502088175881470:2507], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:11.642796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:11.644277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502088175881475:2510], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:11.698209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:53:11.780300Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T16:53:11.782459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502088175881477:2511], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:53:11.868921Z node 1 :TX_PROXY ERROR: Actor# [1:7489502088175881540:3549] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:53:14.294217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:53:14.511392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7489502101060783848:2550];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:53:14.511392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489502101060783865:2556];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:53:14.511590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489502101060783865:2556];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:53:14.511870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489502101060783865:2556];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:53:14.511986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489502101060783865:2556];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:53:14.512083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489502101060783865:2556];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:53:14.512140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7489502101060783848:2550];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:53:14.512194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489502101060783865:2556];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:53:14.512323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489502101060783865:2556];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:53:14.512346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7489502101060783848:2550];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:53:14.512493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7489502101060783848:2550];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_ ... ;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:53:14.721581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:53:14.721658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:53:14.721681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:53:14.721728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:53:14.721746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:53:14.721788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:53:14.721815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:53:14.730839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:53:14.730914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:53:14.731069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:53:14.731099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:53:14.731201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:53:14.731225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:53:14.731373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T16:53:14.731410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T16:53:14.731516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T16:53:14.731536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T16:53:14.737877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:53:14.737938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:53:14.738025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:53:14.738048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:53:14.738186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:53:14.738216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:53:14.738460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:53:14.738504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:53:14.738554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:53:14.738589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:53:14.738620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:53:14.738640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:53:14.739110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:53:14.739149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:53:14.739313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:53:14.739337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:53:14.739456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:53:14.739495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:53:14.739649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T16:53:14.739672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T16:53:14.739763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T16:53:14.739791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T16:53:14.805747Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T16:53:14.806608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T16:53:14.811740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T16:53:14.811779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T16:53:14.817781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T16:53:14.823926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T16:53:14.824813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T16:53:14.829471Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T16:53:14.831426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T16:53:14.837130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T16:53:14.976902Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-04-04T16:53:14.977292Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-04-04T16:53:14.977907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;self_id=[1:7489502101060783865:2556];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037928;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037923;receive=72075186224037927; 2025-04-04T16:53:14.978300Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2 >> KqpNewEngine::BatchUpload >> TFetchRequestTests::BadTopicName [GOOD] >> TFetchRequestTests::CheckAccess >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 >> KqpCost::AAARangeFullScan [GOOD] >> KqpSort::ReverseFirstKeyOptimized >> SystemView::AuthUsers_ResultOrder [GOOD] >> SystemView::AuthUsers_TableRange >> KqpNotNullColumns::ReplaceNotNull >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink [GOOD] |75.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |75.8%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |75.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut >> SystemView::TopPartitionsTables [GOOD] >> SystemView::TopPartitionsRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::AAARangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 11864, MsgBus: 10301 2025-04-04T16:52:53.887724Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502008600422856:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:53.906027Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015bb/r3tmp/tmpVZ1uEM/pdisk_1.dat 2025-04-04T16:52:55.138672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:56.166089Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:56.273943Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:56.276828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:56.277398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:56.280995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11864, node 1 2025-04-04T16:52:57.230809Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:57.230832Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:57.230839Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:57.230960Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10301 TClient is connected to server localhost:10301 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:52:58.245657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:58.270716Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:52:58.292660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:58.429263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:58.651444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:58.764412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:58.909973Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502008600422856:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:58.912943Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:53:11.007262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:11.007293Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:13.315559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502085909835874:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:13.520998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:13.778737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:53:13.913155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:53:13.985077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:53:14.195225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:53:14.332611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:53:14.534882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:53:14.678409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502098794738365:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:14.678532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:14.679226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502098794738370:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:14.683682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:53:14.709439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502098794738372:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:53:14.816246Z node 1 :TX_PROXY ERROR: Actor# [1:7489502098794738429:3540] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } PONOS {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Test"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Group (-∞, +∞)","Name (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Test","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1"},{"Inputs":[{"ExternalPlanNodeId":1}],"E-Rows":"0","Predicate":"item.Amount \u003C 5000","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Limit-Filter","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"FirstMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"Bytes":{"Count":1,"Sum":19,"Max":19,"Min":19,"History":[9,19]}},"Name":"4","Push":{"WaitTimeUs":{"Count":1,"Sum":7452,"Max":7452,"Min":7452,"History":[9,7452]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"FirstMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[9,1048576]},"Tasks":1,"OutputRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"IngressRows":{"Count":1,"Sum":3,"Max":3,"Min":3},"PhysicalStageId":0,"StageDurationUs":0,"Table":[{"Path":"\/Root\/Test","ReadRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"ReadBytes":{"Count":1,"Sum":20,"Max":20,"Min":20}}],"BaseTimeMs":1743785599443,"CpuTimeUs":{"Count":1,"Sum":15893,"Max":15893,"Min":15893,"History":[8,15893]},"OutputBytes":{"Count":1,"Sum":19,"Max":19,"Min":19},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":3,"Max":3,"Min":3},"LastMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"FirstMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"Bytes":{"Count":1,"Sum":192,"Max":192,"Min":192,"History":[9,192]}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"Rows":{"Count":1,"Sum":3,"Max":3,"Min":3},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"FirstMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"Bytes":{"Count":1,"Sum":192,"Max":192,"Min":192,"History":[9,192]},"WaitTimeUs":{"Count":1,"Sum":7416,"Max":7416,"Min":7416,"History":[9,7416]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"Node Type":"Merge","SortColumns":["Group (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"FirstMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"Bytes":{"Count":1,"Sum":19,"Max":19,"Min":19,"History":[9,19]}},"Name":"RESULT" ... icate":"item.Amount \u003C 5000","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Filter"}],"Operators":[{"A-Rows":1,"A-SelfCpu":15.893,"A-Cpu":15.893,"A-Size":19,"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"A-Rows":1,"A-SelfCpu":0.49,"A-Cpu":16.383,"A-Size":19,"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} query_phases { duration_us: 85823 table_access { name: "/Root/Test" reads { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 69529 affected_shards: 1 } compilation { duration_us: 1139427 cpu_time_us: 869466 } process_cpu_time_us: 892 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"Test\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"ReadRanges\":[\"Group (-\342\210\236, +\342\210\236)\",\"Name (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/Test\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"Test\",\"ReadColumns\":[\"Amount\",\"Comment\",\"Group\",\"Name\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Limit\",\"Limit\":\"1\"},{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"E-Rows\":\"0\",\"Predicate\":\"item.Amount \\u003C 5000\",\"Name\":\"Filter\",\"E-Size\":\"0\",\"E-Cost\":\"0\"}],\"Node Type\":\"Limit-Filter\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"FirstMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[9,19]}},\"Name\":\"4\",\"Push\":{\"WaitTimeUs\":{\"Count\":1,\"Sum\":7452,\"Max\":7452,\"Min\":7452,\"History\":[9,7452]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"FirstMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[9,1048576]},\"Tasks\":1,\"OutputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"IngressRows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/Test\",\"ReadRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ReadBytes\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20}}],\"BaseTimeMs\":1743785599443,\"CpuTimeUs\":{\"Count\":1,\"Sum\":15893,\"Max\":15893,\"Min\":15893,\"History\":[8,15893]},\"OutputBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"LastMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"FirstMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"Bytes\":{\"Count\":1,\"Sum\":192,\"Max\":192,\"Min\":192,\"History\":[9,192]}},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"Rows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"FirstMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"Bytes\":{\"Count\":1,\"Sum\":192,\"Max\":192,\"Min\":192,\"History\":[9,192]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":7416,\"Max\":7416,\"Min\":7416,\"History\":[9,7416]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}]}}],\"Node Type\":\"Merge\",\"SortColumns\":[\"Group (Asc)\"],\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"FirstMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[9,19]}},\"Name\":\"RESULT\",\"Push\":{\"WaitTimeUs\":{\"Count\":1,\"Sum\":7894,\"Max\":7894,\"Min\":7894,\"History\":[9,7894]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"FirstMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8}}}],\"DurationUs\":{\"Count\":1,\"Sum\":1000,\"Max\":1000,\"Min\":1000},\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[9,1048576]},\"InputBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"ResultRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Tasks\":1,\"ResultBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"OutputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"InputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"PhysicalStageId\":1,\"StageDurationUs\":1000,\"BaseTimeMs\":1743785599443,\"OutputBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"CpuTimeUs\":{\"Count\":1,\"Sum\":490,\"Max\":490,\"Min\":490,\"History\":[9,490]},\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"FirstMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[9,19]}},\"Name\":\"2\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"FirstMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[9,19]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":7901,\"Max\":7901,\"Min\":7901,\"History\":[9,7901]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}]}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":1139427,\"CpuTimeUs\":869466},\"ProcessCpuTimeUs\":892,\"TotalDurationUs\":1260347,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":624},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":6,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"ReadRanges\":[\"Group (-\342\210\236, +\342\210\236)\",\"Name (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/Test\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"Test\",\"ReadColumns\":[\"Amount\",\"Comment\",\"Group\",\"Name\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"E-Rows\":\"0\",\"Predicate\":\"item.Amount \\u003C 5000\",\"Name\":\"Filter\",\"E-Size\":\"0\",\"E-Cost\":\"0\"}],\"Node Type\":\"Filter\"}],\"Operators\":[{\"A-Rows\":1,\"A-SelfCpu\":15.893,\"A-Cpu\":15.893,\"A-Size\":19,\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"A-Rows\":1,\"A-SelfCpu\":0.49,\"A-Cpu\":16.383,\"A-Size\":19,\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/Test\" \'\"72057594046644480:9\" \'\"\" \'1))\n(let $2 \'(\'\"Amount\" \'\"Comment\" \'\"Group\" \'\"Name\"))\n(let $3 (KqpRowsSourceSettings $1 $2 \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $4 (Uint64 \'1))\n(let $5 (OptionalType (DataType \'String)))\n(let $6 (StructType \'(\'\"Amount\" (OptionalType (DataType \'Uint64))) \'(\'\"Comment\" $5) \'(\'\"Group\" (OptionalType (DataType \'Uint32))) \'(\'\"Name\" $5)))\n(let $7 \'(\'(\'\"_logical_id\" \'559) \'(\'\"_id\" \'\"ce5a144f-b12f44a1-fdb30066-f9987019\") \'(\'\"_wide_channels\" $6)))\n(let $8 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $3)) (lambda \'($12) (block \'(\n (let $13 (lambda \'($16) (block \'(\n (let $17 (Member $16 \'\"Amount\"))\n (return $17 (Member $16 \'\"Comment\") (Member $16 \'\"Group\") (Member $16 \'\"Name\") (Coalesce (< $17 (Uint64 \'\"5000\")) (Bool \'false)))\n ))))\n (let $14 (WideFilter (ExpandMap (ToFlow $12) $13) (lambda \'($18 $19 $20 $21 $22) $22) $4))\n (let $15 (lambda \'($23 $24 $25 $26 $27) $23 $24 $25 $26))\n (return (FromFlow (WideMap $14 $15)))\n))) $7))\n(let $9 (DqCnMerge (TDqOutput $8 \'0) \'(\'(\'\"2\" \'\"Asc\"))))\n(let $10 (DqPhyStage \'($9) (lambda \'($28) (FromFlow (NarrowMap (Take (ToFlow $28) $4) (lambda \'($29 $30 $31 $32) (AsStruct \'(\'\"Amount\" $29) \'(\'\"Comment\" $30) \'(\'\"Group\" $31) \'(\'\"Name\" $32)))))) \'(\'(\'\"_logical_id\" \'572) \'(\'\"_id\" \'\"ed88966-3221bcf8-f8123012-f009b392\"))))\n(let $11 (DqCnResult (TDqOutput $10 \'0) \'()))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($8 $10) \'($11) \'() \'(\'(\'\"type\" \'\"data\")))) \'((KqpTxResultBinding (ListType $6) \'0 \'0)) \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 1260347 total_cpu_time_us: 939887 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/Test\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":9},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Amount\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Comment\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Group\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint32\\\",\\\"TypeId\\\":2,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Name\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Group\\\",\\\"Name\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1743785599\",\"query_type\":\"QUERY_TYPE_SQL_DML\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"61efac06-aa52825a-f3bb67ad-779ec2dc\",\"version\":\"1.0\"}" >> BasicUsage::RetryDiscoveryWithCancel [GOOD] >> BasicUsage::RecreateObserver >> KqpRanges::UpdateWhereInNoFullScan+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 61257, MsgBus: 5692 2025-04-04T16:52:51.586720Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502001084402968:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:51.588336Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015bd/r3tmp/tmpwaUFfP/pdisk_1.dat 2025-04-04T16:52:52.495598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:52.496082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:52.570924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:53.037475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 61257, node 1 2025-04-04T16:52:53.386526Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:53.658688Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:53.658853Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:53.658864Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:53.659393Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:56.595532Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502001084402968:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:56.596220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:5692 TClient is connected to server localhost:5692 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:52:57.525895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:57.542272Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:52:57.554964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:58.138516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:59.043443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:59.198002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:08.283044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:08.283070Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:10.531584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502082688783477:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:10.531741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:11.776657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:53:12.060135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:53:12.226134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:53:12.371493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:53:12.572626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:53:12.680218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:53:12.820939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502091278718627:2500], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:12.821029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:12.821288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502091278718632:2503], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:12.829738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:53:12.892934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502091278718634:2504], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:53:12.978530Z node 1 :TX_PROXY ERROR: Actor# [1:7489502091278718695:3536] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:53:14.791521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] |75.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |75.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |75.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:75:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:77:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:79:2057] recipient: [2:78:2109] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:81:2057] recipient: [2:78:2109] !Reboot 72057594037927937 (actor [2:55:2096]) rebooted! !Reboot 72057594037927937 (actor [2:55:2096]) tablet resolver refreshed! new actor is[2:80:2110] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:134:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:78:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:80:2057] recipient: [4:79:2109] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:82:2057] recipient: [4:79:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:81:2110] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:83:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:85:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:84:2113] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:138:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:83:2057] recipient: [7:82:2112] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:86:2057] recipient: [7:82:2112] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:85:2113] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:55:2096]) rebooted! !Reboot 72057594037927937 (actor [8:55:2096]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:55:2096]) rebooted! !Reboot 72057594037927937 (actor [9:55:2096]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:88:2057] recipient: [10:87:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:87:2115] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:87:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:90:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:91:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:93:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:55:2096]) rebooted! !Reboot 72057594037927937 (actor [11:55:2096]) tablet resolver refreshed! new actor is[11:92:2119] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:146:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... :55:2096]) rebooted! !Reboot 72057594037927937 (actor [44:55:2096]) tablet resolver refreshed! new actor is[44:100:2125] Leader for TabletID 72057594037927937 is [44:100:2125] sender: [44:154:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:53:2057] recipient: [45:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:53:2057] recipient: [45:50:2094] Leader for TabletID 72057594037927937 is [45:55:2096] sender: [45:56:2057] recipient: [45:50:2094] Leader for TabletID 72057594037927937 is [45:55:2096] sender: [45:73:2057] recipient: [45:14:2061] !Reboot 72057594037927937 (actor [45:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [45:55:2096] sender: [45:96:2057] recipient: [45:36:2083] Leader for TabletID 72057594037927937 is [45:55:2096] sender: [45:99:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [45:55:2096] sender: [45:100:2057] recipient: [45:98:2124] Leader for TabletID 72057594037927937 is [45:101:2125] sender: [45:102:2057] recipient: [45:98:2124] !Reboot 72057594037927937 (actor [45:55:2096]) rebooted! !Reboot 72057594037927937 (actor [45:55:2096]) tablet resolver refreshed! new actor is[45:101:2125] Leader for TabletID 72057594037927937 is [45:101:2125] sender: [45:119:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:53:2057] recipient: [46:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:53:2057] recipient: [46:50:2094] Leader for TabletID 72057594037927937 is [46:55:2096] sender: [46:56:2057] recipient: [46:50:2094] Leader for TabletID 72057594037927937 is [46:55:2096] sender: [46:73:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [46:55:2096] sender: [46:98:2057] recipient: [46:36:2083] Leader for TabletID 72057594037927937 is [46:55:2096] sender: [46:101:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [46:55:2096] sender: [46:102:2057] recipient: [46:100:2126] Leader for TabletID 72057594037927937 is [46:103:2127] sender: [46:104:2057] recipient: [46:100:2126] !Reboot 72057594037927937 (actor [46:55:2096]) rebooted! !Reboot 72057594037927937 (actor [46:55:2096]) tablet resolver refreshed! new actor is[46:103:2127] Leader for TabletID 72057594037927937 is [46:103:2127] sender: [46:157:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:53:2057] recipient: [47:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:53:2057] recipient: [47:50:2094] Leader for TabletID 72057594037927937 is [47:55:2096] sender: [47:56:2057] recipient: [47:50:2094] Leader for TabletID 72057594037927937 is [47:55:2096] sender: [47:73:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:55:2096]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [47:55:2096] sender: [47:98:2057] recipient: [47:36:2083] Leader for TabletID 72057594037927937 is [47:55:2096] sender: [47:101:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [47:55:2096] sender: [47:102:2057] recipient: [47:100:2126] Leader for TabletID 72057594037927937 is [47:103:2127] sender: [47:104:2057] recipient: [47:100:2126] !Reboot 72057594037927937 (actor [47:55:2096]) rebooted! !Reboot 72057594037927937 (actor [47:55:2096]) tablet resolver refreshed! new actor is[47:103:2127] Leader for TabletID 72057594037927937 is [47:103:2127] sender: [47:157:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:53:2057] recipient: [48:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:53:2057] recipient: [48:50:2094] Leader for TabletID 72057594037927937 is [48:55:2096] sender: [48:56:2057] recipient: [48:50:2094] Leader for TabletID 72057594037927937 is [48:55:2096] sender: [48:73:2057] recipient: [48:14:2061] !Reboot 72057594037927937 (actor [48:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [48:55:2096] sender: [48:99:2057] recipient: [48:36:2083] Leader for TabletID 72057594037927937 is [48:55:2096] sender: [48:102:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [48:55:2096] sender: [48:103:2057] recipient: [48:101:2126] Leader for TabletID 72057594037927937 is [48:104:2127] sender: [48:105:2057] recipient: [48:101:2126] !Reboot 72057594037927937 (actor [48:55:2096]) rebooted! !Reboot 72057594037927937 (actor [48:55:2096]) tablet resolver refreshed! new actor is[48:104:2127] Leader for TabletID 72057594037927937 is [48:104:2127] sender: [48:122:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:53:2057] recipient: [49:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:53:2057] recipient: [49:50:2094] Leader for TabletID 72057594037927937 is [49:55:2096] sender: [49:56:2057] recipient: [49:50:2094] Leader for TabletID 72057594037927937 is [49:55:2096] sender: [49:73:2057] recipient: [49:14:2061] !Reboot 72057594037927937 (actor [49:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [49:55:2096] sender: [49:101:2057] recipient: [49:36:2083] Leader for TabletID 72057594037927937 is [49:55:2096] sender: [49:104:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [49:55:2096] sender: [49:105:2057] recipient: [49:103:2128] Leader for TabletID 72057594037927937 is [49:106:2129] sender: [49:107:2057] recipient: [49:103:2128] !Reboot 72057594037927937 (actor [49:55:2096]) rebooted! !Reboot 72057594037927937 (actor [49:55:2096]) tablet resolver refreshed! new actor is[49:106:2129] Leader for TabletID 72057594037927937 is [49:106:2129] sender: [49:160:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:53:2057] recipient: [50:51:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:53:2057] recipient: [50:51:2094] Leader for TabletID 72057594037927937 is [50:55:2096] sender: [50:56:2057] recipient: [50:51:2094] Leader for TabletID 72057594037927937 is [50:55:2096] sender: [50:73:2057] recipient: [50:14:2061] !Reboot 72057594037927937 (actor [50:55:2096]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [50:55:2096] sender: [50:101:2057] recipient: [50:36:2083] Leader for TabletID 72057594037927937 is [50:55:2096] sender: [50:104:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [50:55:2096] sender: [50:105:2057] recipient: [50:103:2128] Leader for TabletID 72057594037927937 is [50:106:2129] sender: [50:107:2057] recipient: [50:103:2128] !Reboot 72057594037927937 (actor [50:55:2096]) rebooted! !Reboot 72057594037927937 (actor [50:55:2096]) tablet resolver refreshed! new actor is[50:106:2129] Leader for TabletID 72057594037927937 is [50:106:2129] sender: [50:160:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:53:2057] recipient: [51:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:53:2057] recipient: [51:50:2094] Leader for TabletID 72057594037927937 is [51:55:2096] sender: [51:56:2057] recipient: [51:50:2094] Leader for TabletID 72057594037927937 is [51:55:2096] sender: [51:73:2057] recipient: [51:14:2061] !Reboot 72057594037927937 (actor [51:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [51:55:2096] sender: [51:102:2057] recipient: [51:36:2083] Leader for TabletID 72057594037927937 is [51:55:2096] sender: [51:105:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [51:55:2096] sender: [51:106:2057] recipient: [51:104:2128] Leader for TabletID 72057594037927937 is [51:107:2129] sender: [51:108:2057] recipient: [51:104:2128] !Reboot 72057594037927937 (actor [51:55:2096]) rebooted! !Reboot 72057594037927937 (actor [51:55:2096]) tablet resolver refreshed! new actor is[51:107:2129] Leader for TabletID 72057594037927937 is [51:107:2129] sender: [51:125:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:53:2057] recipient: [52:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:53:2057] recipient: [52:50:2094] Leader for TabletID 72057594037927937 is [52:55:2096] sender: [52:56:2057] recipient: [52:50:2094] Leader for TabletID 72057594037927937 is [52:55:2096] sender: [52:73:2057] recipient: [52:14:2061] !Reboot 72057594037927937 (actor [52:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [52:55:2096] sender: [52:104:2057] recipient: [52:36:2083] Leader for TabletID 72057594037927937 is [52:55:2096] sender: [52:107:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [52:55:2096] sender: [52:108:2057] recipient: [52:106:2130] Leader for TabletID 72057594037927937 is [52:109:2131] sender: [52:110:2057] recipient: [52:106:2130] !Reboot 72057594037927937 (actor [52:55:2096]) rebooted! !Reboot 72057594037927937 (actor [52:55:2096]) tablet resolver refreshed! new actor is[52:109:2131] Leader for TabletID 72057594037927937 is [52:109:2131] sender: [52:163:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:53:2057] recipient: [53:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:53:2057] recipient: [53:49:2094] Leader for TabletID 72057594037927937 is [53:55:2096] sender: [53:56:2057] recipient: [53:49:2094] Leader for TabletID 72057594037927937 is [53:55:2096] sender: [53:73:2057] recipient: [53:14:2061] !Reboot 72057594037927937 (actor [53:55:2096]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [53:55:2096] sender: [53:104:2057] recipient: [53:36:2083] Leader for TabletID 72057594037927937 is [53:55:2096] sender: [53:107:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [53:55:2096] sender: [53:108:2057] recipient: [53:106:2130] Leader for TabletID 72057594037927937 is [53:109:2131] sender: [53:110:2057] recipient: [53:106:2130] !Reboot 72057594037927937 (actor [53:55:2096]) rebooted! !Reboot 72057594037927937 (actor [53:55:2096]) tablet resolver refreshed! new actor is[53:109:2131] Leader for TabletID 72057594037927937 is [53:109:2131] sender: [53:163:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:53:2057] recipient: [54:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:53:2057] recipient: [54:50:2094] Leader for TabletID 72057594037927937 is [54:55:2096] sender: [54:56:2057] recipient: [54:50:2094] Leader for TabletID 72057594037927937 is [54:55:2096] sender: [54:73:2057] recipient: [54:14:2061] !Reboot 72057594037927937 (actor [54:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [54:55:2096] sender: [54:105:2057] recipient: [54:36:2083] Leader for TabletID 72057594037927937 is [54:55:2096] sender: [54:108:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [54:55:2096] sender: [54:109:2057] recipient: [54:107:2130] Leader for TabletID 72057594037927937 is [54:110:2131] sender: [54:111:2057] recipient: [54:107:2130] !Reboot 72057594037927937 (actor [54:55:2096]) rebooted! !Reboot 72057594037927937 (actor [54:55:2096]) tablet resolver refreshed! new actor is[54:110:2131] Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:53:2057] recipient: [55:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:53:2057] recipient: [55:50:2094] Leader for TabletID 72057594037927937 is [55:55:2096] sender: [55:56:2057] recipient: [55:50:2094] Leader for TabletID 72057594037927937 is [55:55:2096] sender: [55:73:2057] recipient: [55:14:2061] >> KqpNewEngine::PureExpr [GOOD] >> KqpNewEngine::PureTxMixedWithDeferred >> TFlatTest::SplitEmptyToMany >> TFlatTest::SelectRangeBytesLimit >> TFlatTest::Init |75.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |75.8%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |75.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica >> TKeyValueTest::TestRewriteThenLastValue [GOOD] >> TKeyValueTest::TestRewriteThenLastValueNewApi >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test |75.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |75.8%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace >> KqpErrors::ProposeError [GOOD] >> KqpErrors::ProposeErrorEvWrite |75.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |75.9%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema >> KqpNewEngine::Replace [GOOD] >> KqpNewEngine::SelfJoin >> TFlatTest::PathSorting >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex >> TLocksTest::Range_IncorrectDot1 >> SystemView::AuthGroups_TableRange [GOOD] >> SystemView::AuthOwners >> TFlatTest::ShardUnfreezeNonFrozen >> KqpNewEngine::LocksSingleShard [GOOD] >> KqpNewEngine::LocksMultiShardOk >> KqpNewEngine::ReadAfterWrite [GOOD] >> KqpNewEngine::ReadRangeWithParams >> KqpNewEngine::PkRangeSelect1 [GOOD] >> KqpNewEngine::PkRangeSelect2 >> KqpSqlIn::TableSource [GOOD] >> KqpSqlIn::SimpleKey_Negated >> Cdc::DocApi[PqRunner] [GOOD] >> Cdc::DocApi[YdsRunner] >> KqpReturning::ReturningWorksIndexedDeleteV2-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedInsert+QueryService >> KqpMergeCn::TopSortByDesc_Double_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Date_Limit4 >> KqpNewEngine::ContainerRegistryCombiner [GOOD] >> KqpNewEngine::DeferredEffects >> KqpNotNullColumns::ReplaceNotNull [GOOD] >> KqpNotNullColumns::ReplaceNotNullPg >> TSchemeShardServerLess::StorageBilling [GOOD] >> KqpRanges::NullInKey [GOOD] >> KqpRanges::NullInKeySuffix >> KqpErrors::ResolveTableError [GOOD] >> KqpNewEngine::PureTxMixedWithDeferred [GOOD] >> KqpNewEngine::PrunePartitionsByLiteral >> KqpNewEngine::BatchUpload [GOOD] >> KqpNewEngine::Aggregate >> KqpSort::ReverseFirstKeyOptimized [GOOD] >> KqpSort::ReverseLimitOptimized >> TFlatTest::SelectRangeBytesLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs1 >> KqpErrors::ProposeResultLost_RwTx+UseSink [GOOD] >> KqpErrors::ProposeResultLost_RwTx-UseSink >> TFlatTest::Init [GOOD] >> TFlatTest::LargeDatashardReply >> KqpRanges::IsNotNullInJsonValue [GOOD] >> KqpRanges::DuplicateKeyPredicateLiteral >> SystemView::AuthGroupMembers_Access [GOOD] >> SystemView::AuthGroupMembers_ResultOrder >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBilling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:52:57.894750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:52:57.894869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:52:57.894950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:52:57.894990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:52:57.895033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:52:57.895060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:52:57.895118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:52:57.895190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:52:57.895520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:52:58.133139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:52:58.133188Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:58.159544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:52:58.159861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:52:58.160383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:52:58.188458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:52:58.189418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:52:58.195419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:52:58.198517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:52:58.208713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:52:58.210117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:52:58.210194Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:52:58.210398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:52:58.210440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:52:58.210495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:52:58.210843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:52:58.239454Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:52:58.510446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:52:58.511633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:58.512600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:52:58.513645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:52:58.513865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:58.519483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:52:58.519752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:52:58.520006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:58.520085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:52:58.520125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:52:58.520161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:52:58.522579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:58.522648Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:52:58.522691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:52:58.525076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:58.525139Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:58.525196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:52:58.525244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:52:58.529761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:52:58.532908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:52:58.533158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:52:58.534370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:52:58.534546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:52:58.534625Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:52:58.534927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:52:58.534991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:52:58.535160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:52:58.535277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:52:58.537881Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:52:58.537937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:52:58.538159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:52:58.538209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:52:58.538607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:52:58.538659Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:52:58.538763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:52:58.538805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:52:58.538891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:52:58.538930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:52:58.538969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:52:58.539014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:52:58.539071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:52:58.539107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:52:58.539181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:52:58.539222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:52:58.539257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:52:58.541349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:52:58.541474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:52:58.541517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-04T16:53:55.264663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2025-04-04T16:53:55.265790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet# 72075186233409549 2025-04-04T16:53:55.267426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2025-04-04T16:53:55.267596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 4 2025-04-04T16:53:55.268756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-04-04T16:53:55.270078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72075186233409549 2025-04-04T16:53:55.279586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72075186233409549 2025-04-04T16:53:55.295922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409549 2025-04-04T16:53:55.296276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409549, txId: 107, path id: [OwnerId: 72075186233409549, LocalPathId: 1] 2025-04-04T16:53:55.297580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409549, txId: 107, path id: [OwnerId: 72075186233409549, LocalPathId: 2] 2025-04-04T16:53:55.300055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409549 2025-04-04T16:53:55.300723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:658:2570], at schemeshard: 72075186233409549, txId: 107, path id: 1 2025-04-04T16:53:55.301095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:658:2570], at schemeshard: 72075186233409549, txId: 107, path id: 2 2025-04-04T16:53:55.320497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2025-04-04T16:53:55.320932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72075186233409549 2025-04-04T16:53:55.321054Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72075186233409549 2025-04-04T16:53:55.321095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409552, at schemeshard: 72075186233409549 2025-04-04T16:53:55.321137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-04-04T16:53:55.351246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409549, cookie: 107 2025-04-04T16:53:55.352878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409549, cookie: 107 2025-04-04T16:53:55.353278Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2025-04-04T16:53:55.353637Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 1], version: 9 2025-04-04T16:53:55.353976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 5 2025-04-04T16:53:55.367126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2025-04-04T16:53:55.367511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2025-04-04T16:53:55.367541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2025-04-04T16:53:55.367573Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 2], version: 18446744073709551615 2025-04-04T16:53:55.367603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 4 2025-04-04T16:53:55.367671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-04-04T16:53:55.384145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2025-04-04T16:53:55.384223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72075186233409549 2025-04-04T16:53:55.384556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-04-04T16:53:55.384695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-04T16:53:55.384731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-04T16:53:55.384774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-04T16:53:55.384806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-04T16:53:55.384841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-04-04T16:53:55.384920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:808:2689] message: TxId: 107 2025-04-04T16:53:55.384961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-04T16:53:55.385006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-04T16:53:55.385045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-04T16:53:55.385122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 2 2025-04-04T16:53:55.390535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2025-04-04T16:53:55.392359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2025-04-04T16:53:55.408160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-04T16:53:55.408552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:2202:4048] TestWaitNotification: OK eventTxId 107 2025-04-04T16:53:55.515862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72075186233409549, message: Source { RawX1: 780 RawX2: 4294969965 } TabletId: 72075186233409552 State: 4 2025-04-04T16:53:55.516318Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409552, state: Offline, at schemeshard: 72075186233409549 2025-04-04T16:53:55.551955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72075186233409549:4 hive 72057594037968897 at ss 72075186233409549 2025-04-04T16:53:55.552519Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409549 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409552 2025-04-04T16:53:55.559227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72075186233409549 ShardLocalIdx: 4, at schemeshard: 72075186233409549 2025-04-04T16:53:55.559562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 1 2025-04-04T16:53:55.561925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409549 2025-04-04T16:53:55.564620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409549, LocalPathId: 2], at schemeshard: 72075186233409549 2025-04-04T16:53:55.565444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 4 2025-04-04T16:53:55.580508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72075186233409549:4 2025-04-04T16:53:55.581635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72075186233409549:4 tabletId 72075186233409552 2025-04-04T16:53:55.585934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409549 2025-04-04T16:53:55.855901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-04-04T16:53:55.856010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-04-04T16:53:55.856533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-04-04T16:53:55.857034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-04-04T16:53:55.857069Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-04-04T16:53:55.857887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-04-04T16:53:55.857939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-04T16:53:55.857970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-04T16:53:55.857999Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-04T16:53:55.952770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:53:55.955660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling: make a bill, record: '{"usage":{"start":1600452180,"quantity":59,"finish":1600452239,"type":"delta","unit":"byte*second"},"tags":{"ydb_size":0},"id":"72057594046678944-3-1600452180-1600452239-0","cloud_id":"CLOUD_ID_VAL","source_wt":1600452240,"source_id":"sless-docapi-ydb-storage","resource_id":"DATABASE_ID_VAL","schema":"ydb.serverless.v1","folder_id":"FOLDER_ID_VAL","version":"1.0.0"} ', schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], now: 2020-09-18T18:04:00.029000Z, LastBillTime: 2020-09-18T18:02:00.000000Z, lastBilled: 2020-09-18T18:02:00.000000Z--2020-09-18T18:02:59.000000Z, toBill: 2020-09-18T18:03:00.000000Z--2020-09-18T18:03:59.000000Z, next retry at: 2020-09-18T18:05:00.000000Z 2025-04-04T16:53:55.979830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete grabMeteringMessage has happened 2025-04-04T16:53:55.980583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TFakeMetering got TEvMetering::TEvWriteMeteringJson ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ResolveTableError [GOOD] Test command err: 2025-04-04T16:53:39.792170Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:39.805455Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016df/r3tmp/tmp7w2IAg/pdisk_1.dat 2025-04-04T16:53:44.093711Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:45.305176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:53:45.771888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:45.772021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:45.777163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:45.777257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:45.866197Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:53:45.907053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:45.909595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:46.481876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:53:54.109574Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 0.000000s, cancelAfter: (empty maybe) 2025-04-04T16:53:54.110041Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2025-04-04T16:53:54.120482Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-04T16:53:54.120894Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-04-04T16:53:54.121940Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-04T16:53:54.129764Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2025-04-04T16:53:54.175471Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 300.000000s, cancelAfter: (empty maybe) 2025-04-04T16:53:54.175561Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2025-04-04T16:53:54.175616Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-04T16:53:54.175666Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-04-04T16:53:54.175717Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-04T16:53:54.176290Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2025-04-04T16:53:54.176529Z node 1 :KQP_EXECUTER TRACE: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Bootstrap done, become ReadyState 2025-04-04T16:53:54.176794Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2948] TxId: 281474976715658. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Executing physical tx, type: 2, stages: 1 2025-04-04T16:53:54.176879Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1558:2948] TxId: 281474976715658. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Got request, become WaitResolveState 2025-04-04T16:53:54.176986Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-04T16:53:54.177218Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Resolved key sets: 1 2025-04-04T16:53:54.177371Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-04T16:53:54.177501Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2948] TxId: 281474976715658. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"key" (OptionalType (DataType 'Uint32))) '('"value" (OptionalType (DataType 'Uint32)))))) (return (lambda '() (block '( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (OptionalType (DataType 'Uint32))) (return (KqpEffects (KqpUpsertRows $1 (Iterator %kqp%tx_result_binding_0_0) '('"key" '"value") '('('"Mode" '"upsert"))))) )))) ) 2025-04-04T16:53:54.178016Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2948] TxId: 281474976715658. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] will be executed on 1 shards. 2025-04-04T16:53:54.180072Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2948] TxId: 281474976715658. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, stage: [0,0] create datashard task: 1, shard: 72075186224037888, meta: TTaskMeta{ ShardId: 72075186224037888, Reads: { none }, Writes: { ranges: TShardKeyRanges{ (Uint32 : 1), (Uint32 : 2), (Uint32 : 3), } } } 2025-04-04T16:53:54.185000Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-04-04T16:53:54.185342Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 1, snapshot: {0, 0} 2025-04-04T16:53:54.245957Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2948] TxId: 281474976715658. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. datashard task: 1, proto: Id: 1 Executer { ActorId { RawX1: 1558 RawX2: 4294970244 } } Program { RuntimeVersion: 100000 Raw: "\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004?\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\n\014Arg\000\002)\211\002?\016\204\214\002(KqpEffects\000)\211\010?\032\213\010\203\010\203\010\203\005@\203\010\204?\006\210\203\004\203\004\203\0144KqpUpsertRows\000\013?&\003?\036\177\000\001\205\000\000\000\000\001\003? \004\003?\"\000\003?$\002\017)\211\002?(?\010 Iterator\000)\211\004?\010?\n\203\004\030Member\000?\026\003?@\000\002\004\000\006\010\002?.\003\203\004\004\003\203\004\002\003\003?0\000\r\010\000\n\001/" Settings { LevelDataPrediction: 1 InputDataPrediction: 1 OutputDataPrediction: 1 NodesCount: 52 } } Parameters { key: "%kqp%tx_result_binding_0_0" value { TransportVersion: 20000 Raw: "\010\000\000\000\000\006\002\002\004\004\006\006" Rows: 3 } } Outputs { Effects { } } Meta { [type.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta] { Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\001\000\000\000" KeyPoints: "\001\000\004\000\000\000\002\000\000\000" KeyPoints: "\001\000\004\000\000\000\003\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } } } UseLlvm: false RequestContext { key: "CurrentExecutionId" value: "" } RequestContext { key: "CustomerSuppliedId" value: "" } RequestContext { key: "Database" value: "" } RequestContext { key: "DatabaseId" value: "/Root" } RequestContext { key: "PoolId" value: "" } RequestContext { key: "SessionId" value: "ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==" } RequestContext { key: "TraceId" value: "01jr0vj7vk5p3ykvpbbn3tnkje" } EnableSpilling: false DisableMetering: true 2025-04-04T16:53:54.246163Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2948] TxId: 281474976715658. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. State: WaitResolveState, Executing KQP transaction on shard: 72075186224037888, tasks: [1], lockTxId: (empty maybe), locks: , immediate: 1 2025-04-04T16:53:54.246265Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2948] TxId: 281474976715658. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ExecuteDatashardTransaction traceId.verbosity: 0 2025-04-04T16:53:54.246367Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1558:2948] TxId: 281474976715658. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 1, readonly: 0, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-04T16:53:54.246408Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1558:2948] TxId: 281474976715658. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Updating channels after the creation of compute actors 2025-04-04T16:53:54.246453Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2948] TxId: 281474976715658. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2025-04-04T16:53:54.246822Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2948] TxId: 281474976715658. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2025-04-04T16:53:54.246864Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2948] TxId: 281474976715658. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-04T16:53:54.374852Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2948] TxId: 281474976715658. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2025-04-04T16:53:54.376071Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2948] TxId: 281474976715658. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. terminate execution. 2025-04-04T16:53:54.376434Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1558:2948] TxId: 281474976715658. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Terminate, become ZombieState 2025-04-04T16:53:54.377116Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2948] TxId: 281474976715658. Ctx: { TraceId: 01jr0vj7vk5p3ykvpbbn3tnkje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2YmI5MWItZWU1MzJkNmItZGFhZWI3ZDUtNDhlNmZlMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-04T16:53:54.650163Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:1573:2967], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[/Root/table-1]
: Error: LookupError, code: 2005 2025-04-04T16:53:54.653840Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmRkZGRkZC1lMTNiY2Y3Ny1kMThhMjBjZi03ZDE0MDg3YQ==, ActorId: [1:1571:2965], ActorState: ExecuteState, TraceId: 01jr0vjamb7qcbv5c5xvm9j59v, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: |75.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSort::TopSortParameter [GOOD] >> KqpSort::TopSortExpr >> KqpRanges::IsNull [GOOD] >> KqpRanges::IsNotNullSecondComponent |75.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |75.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |75.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |75.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |75.9%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut >> KqpNewEngine::SelfJoin [GOOD] >> KqpNewEngine::ScalarFunctions |75.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |75.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream >> KqpNewEngine::StreamLookupWithView [GOOD] >> KqpNewEngine::Truncated >> TFlatTest::PathSorting [GOOD] >> TFlatTest::PartBloomFilter >> TFlatTest::ShardUnfreezeNonFrozen [GOOD] >> TFlatTest::ShardFreezeUnfreezeRejectScheme >> TLocksFatTest::RangeSetBreak >> TObjectStorageListingTest::CornerCases >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] >> SystemView::AuthPermissions [GOOD] |75.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain >> SystemView::AuthPermissions_Access |75.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain >> KqpNewEngine::ReadRangeWithParams [GOOD] >> KqpNewEngine::ReadDifferentColumns >> KqpRanges::UpdateWhereInNoFullScan+UseSink [GOOD] >> KqpRanges::UpdateWhereInNoFullScan-UseSink >> KqpNotNullColumns::ReplaceNotNullPg [GOOD] >> KqpNotNullColumns::JoinRightTableWithNotNullColumns+StreamLookup >> KqpNewEngine::LocksMultiShardOk [GOOD] >> KqpNewEngine::LocksNoMutations |75.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |75.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] Test command err: 2025-04-04T16:53:33.905507Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502182758389135:2260];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:33.908645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001eac/r3tmp/tmpEqYAem/pdisk_1.dat 2025-04-04T16:53:34.675665Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:34.751332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:34.752328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:34.826978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24140 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:53:37.314712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:53:38.258536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:39.276656Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502182758389135:2260];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:39.334307Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:00.835255Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502298199008406:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:00.835306Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001eac/r3tmp/tmpOKps4J/pdisk_1.dat 2025-04-04T16:54:01.103119Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:01.134410Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:01.134520Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:01.204905Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22605 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:01.555739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:01.574411Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:01.602703Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:54:01.614392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... >> KqpNewEngine::PkRangeSelect2 [GOOD] >> KqpNewEngine::OnlineRO_Consistent >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_AutosplitByLoad >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK >> TFlatTest::PartBloomFilter [GOOD] >> KqpErrors::ProposeErrorEvWrite [GOOD] >> KqpNewEngine::PrunePartitionsByLiteral [GOOD] >> KqpNewEngine::PrunePartitionsByExpr |75.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |75.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing >> KqpNewEngine::DeferredEffects [GOOD] >> KqpNewEngine::Delete+UseSink >> KqpSort::ReverseLimitOptimized [GOOD] >> KqpSort::ReverseEightShardOptimized |75.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |75.9%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber >> KqpNewEngine::Aggregate [GOOD] >> KqpNewEngine::AggregateTuple |75.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |75.9%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] Test command err: 2025-04-04T16:53:51.041690Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502259583217800:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:51.041737Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea9/r3tmp/tmpCwHQEG/pdisk_1.dat 2025-04-04T16:53:56.051776Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502259583217800:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:56.052589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:53:59.286333Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502272468120025:2292];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:59.287049Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:53:59.914366Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:54:00.014365Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:54:00.151768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:00.151889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:00.159318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:54:00.231567Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:61474 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:00.961029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:00.985261Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:54:00.992250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:01.244576Z node 1 :TX_PROXY ERROR: Actor# [1:7489502302532891480:2377] txid# 281474976710659, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-04-04T16:54:05.410666Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502318096846264:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea9/r3tmp/tmpG2lgm9/pdisk_1.dat 2025-04-04T16:54:05.539373Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:54:05.678860Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:05.697036Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:05.697115Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:05.699903Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8736 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:06.011773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:06.026664Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:54:06.037046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:06.103145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... Error 128: Table is frozen. Only unfreeze alter is allowed 2025-04-04T16:54:06.132665Z node 2 :TX_PROXY ERROR: Actor# [2:7489502322391814106:2394] txid# 281474976710660, issues: { message: "Table is frozen. Only unfreeze alter is allowed" severity: 1 } 2025-04-04T16:54:06.136404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:06.157200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeErrorEvWrite [GOOD] Test command err: 2025-04-04T16:53:18.050040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:53:18.050734Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:18.051046Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:53:18.052901Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:53:18.053121Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:18.053325Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016a7/r3tmp/tmpsjTtsr/pdisk_1.dat 2025-04-04T16:53:18.495910Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:19.276449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:53:19.630796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:19.630918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:19.685521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:19.685614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:19.730566Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:53:19.734094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:19.737317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:20.221025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:53:22.885574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1597:2962], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:22.885669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1606:2967], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:22.885745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:22.913134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:53:24.031418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1611:2970], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:53:24.486775Z node 1 :TX_PROXY ERROR: Actor# [1:1765:3061] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:53:27.023133Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 0.000000s, cancelAfter: (empty maybe) 2025-04-04T16:53:27.023522Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2025-04-04T16:53:27.024294Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-04T16:53:27.024769Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-04-04T16:53:27.025857Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-04T16:53:27.118773Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2025-04-04T16:53:27.244993Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0vhbt3fxf25w4p4qwew3vm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRmOTUzMTctOGVjYzYwMmQtNzgzZTU3MzYtZmI2MDBmNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution. Operation timeout: 299.427019s, cancelAfter: (empty maybe) 2025-04-04T16:53:27.246415Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0vhbt3fxf25w4p4qwew3vm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRmOTUzMTctOGVjYzYwMmQtNzgzZTU3MzYtZmI2MDBmNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution, txs: 1 2025-04-04T16:53:27.246780Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-04T16:53:27.247518Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0vhbt3fxf25w4p4qwew3vm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRmOTUzMTctOGVjYzYwMmQtNzgzZTU3MzYtZmI2MDBmNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-04-04T16:53:27.248588Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-04T16:53:27.291155Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0vhbt3fxf25w4p4qwew3vm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRmOTUzMTctOGVjYzYwMmQtNzgzZTU3MzYtZmI2MDBmNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Execution is complete, results: 1 2025-04-04T16:53:27.293525Z node 1 :KQP_EXECUTER TRACE: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0vhbt3fxf25w4p4qwew3vm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRmOTUzMTctOGVjYzYwMmQtNzgzZTU3MzYtZmI2MDBmNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2025-04-04T16:53:27.309831Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1791:2960] TxId: 281474976715660. Ctx: { TraceId: 01jr0vhbt3fxf25w4p4qwew3vm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRmOTUzMTctOGVjYzYwMmQtNzgzZTU3MzYtZmI2MDBmNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2025-04-04T16:53:27.320616Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1791:2960] TxId: 281474976715660. Ctx: { TraceId: 01jr0vhbt3fxf25w4p4qwew3vm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRmOTUzMTctOGVjYzYwMmQtNzgzZTU3MzYtZmI2MDBmNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2025-04-04T16:53:27.321940Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-04T16:53:27.323313Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key sets: 1 2025-04-04T16:53:27.325102Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-04T16:53:27.325887Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1791:2960] TxId: 281474976715660. Ctx: { TraceId: 01jr0vhbt3fxf25w4p4qwew3vm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRmOTUzMTctOGVjYzYwMmQtNzgzZTU3MzYtZmI2MDBmNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"key" (OptionalType (DataType 'Uint32))) '('"value" (OptionalType (DataType 'Uint32)))))) (return (lambda '() (block '( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (OptionalType (DataType 'Uint32))) (return (KqpEffects (KqpUpsertRows $1 (Iterator %kqp%tx_result_binding_0_0) '('"key" '"value") '('('"Mode" '"upsert"))))) )))) ) 2025-04-04T16:53:27.330295Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1791:2960] TxId: 281474976715660. Ctx: { TraceId: 01jr0vhbt3fxf25w4p4qwew3vm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRmOTUzMTctOGVjYzYwMmQtNzgzZTU3MzYtZmI2MDBmNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] will be executed on 1 shards. 2025-04-04T16:53:27.331234Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1791:2960] TxId: 281474976715660. Ctx: { TraceId: 01jr0vhbt3fxf25w4p4qwew3vm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRmOTUzMTctOGVjYzYwMmQtNzgzZTU3MzYtZmI2MDBmNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, stage: [0,0] create datashard task: 1, shard: 72075186224037888, meta: TTaskMeta{ ShardId: 72075186224037888, Reads: { none }, Writes: { ranges: TShardKeyRanges{ (Uint32 : 1), (Uint32 : 2), (Uint32 : 3), } } } 2025-04-04T16:53:27.373597Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0vhbt3fxf25w4p4qwew3vm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRmOTUzMTctOGVjYzYwMmQtNzgzZTU3MzYtZmI2MDBmNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:53:27.3 ... =3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2025-04-04T16:54:08.678078Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:2045:3201] TxId: 281474976715672. Ctx: { TraceId: 01jr0vjrefew3z3e6179a32ex2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [3:2048:3201] 2025-04-04T16:54:08.678137Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:2045:3201] TxId: 281474976715672. Ctx: { TraceId: 01jr0vjrefew3z3e6179a32ex2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [3:2048:3201], channels: 0 2025-04-04T16:54:08.678223Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2045:3201] TxId: 281474976715672. Ctx: { TraceId: 01jr0vjrefew3z3e6179a32ex2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [3:2048:3201], 2025-04-04T16:54:08.678303Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2045:3201] TxId: 281474976715672. Ctx: { TraceId: 01jr0vjrefew3z3e6179a32ex2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2048:3201], 2025-04-04T16:54:08.678349Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2045:3201] TxId: 281474976715672. Ctx: { TraceId: 01jr0vjrefew3z3e6179a32ex2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-04T16:54:08.679328Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2045:3201] TxId: 281474976715672. Ctx: { TraceId: 01jr0vjrefew3z3e6179a32ex2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:2048:3201], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-04T16:54:08.679412Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2045:3201] TxId: 281474976715672. Ctx: { TraceId: 01jr0vjrefew3z3e6179a32ex2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [3:2048:3201], 2025-04-04T16:54:08.679477Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2045:3201] TxId: 281474976715672. Ctx: { TraceId: 01jr0vjrefew3z3e6179a32ex2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2048:3201], 2025-04-04T16:54:08.680477Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2045:3201] TxId: 281474976715672. Ctx: { TraceId: 01jr0vjrefew3z3e6179a32ex2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:2048:3201], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 694 Tasks { TaskId: 1 CpuTimeUs: 94 FinishTimeMs: 1743785648680 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 18 BuildCpuTimeUs: 76 HostName: "ghrun-mup5cka3jq" NodeId: 3 CreateTimeMs: 1743785648678 } MaxMemoryUsage: 1048576 } 2025-04-04T16:54:08.682820Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715672. Ctx: { TraceId: 01jr0vjrefew3z3e6179a32ex2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:2048:3201] 2025-04-04T16:54:08.683004Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2045:3201] TxId: 281474976715672. Ctx: { TraceId: 01jr0vjrefew3z3e6179a32ex2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send Commit to BufferActor=[3:2044:3201] 2025-04-04T16:54:08.685062Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2045:3201] TxId: 281474976715672. Ctx: { TraceId: 01jr0vjrefew3z3e6179a32ex2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000694s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-04T16:54:08.724978Z node 3 :KQP_COMPUTE WARN: SelfId: [3:2051:3201], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [3:2035:3201]Got OUT_OF_SPACE for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:2051:3201]. Ignored this error. 2025-04-04T16:54:08.725132Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:2044:3201], SessionActorId: [3:2035:3201], statusCode=OVERLOADED. Issue=
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2006 . sessionActorId=[3:2035:3201]. isRollback=0 2025-04-04T16:54:08.725494Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, ActorId: [3:2035:3201], ActorState: ExecuteState, TraceId: 01jr0vjrefew3z3e6179a32ex2, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [3:2045:3201] from: [3:2044:3201] 2025-04-04T16:54:08.725680Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2045:3201] TxId: 281474976715672. Ctx: { TraceId: 01jr0vjrefew3z3e6179a32ex2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got EvAbortExecution, status: OVERLOADED, message: {
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2006 } 2025-04-04T16:54:08.725771Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:2045:3201] TxId: 281474976715672. Ctx: { TraceId: 01jr0vjrefew3z3e6179a32ex2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: {
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2006 } 2025-04-04T16:54:08.725850Z node 3 :KQP_EXECUTER INFO: ActorId: [3:2045:3201] TxId: 281474976715672. Ctx: { TraceId: 01jr0vjrefew3z3e6179a32ex2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2025-04-04T16:54:08.726129Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:2045:3201] TxId: 281474976715672. Ctx: { TraceId: 01jr0vjrefew3z3e6179a32ex2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ReplyErrorAndDie. Response: Status: OVERLOADED Issues { message: "Tablet 72075186224037888 is out of space. Table `/Root/table-1`." issue_code: 2006 severity: 1 } Result { Stats { CpuTimeUs: 694 Stages { StageGuid: "717b612f-3fdcf56f-ea0eba40-8f4b4916" Program: "(\n(return (lambda \'() (block \'(\n (let $1 (Just (Uint32 \'5)))\n (return (Iterator (AsList (AsStruct \'(\'\"key\" $1) \'(\'\"value\" $1)))))\n))))\n)\n" ComputeActors { CpuTimeUs: 694 Tasks { TaskId: 1 CpuTimeUs: 94 FinishTimeMs: 1743785648680 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 18 BuildCpuTimeUs: 76 HostName: "ghrun-mup5cka3jq" NodeId: 3 CreateTimeMs: 1743785648678 } MaxMemoryUsage: 1048576 } } } } , to ActorId: [3:2035:3201] 2025-04-04T16:54:08.726200Z node 3 :KQP_EXECUTER INFO: ActorId: [3:2045:3201] TxId: 281474976715672. Ctx: { TraceId: 01jr0vjrefew3z3e6179a32ex2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shutdown immediately - nothing to wait 2025-04-04T16:54:08.731913Z node 3 :KQP_EXECUTER INFO: ActorId: [3:2045:3201] TxId: 281474976715672. Ctx: { TraceId: 01jr0vjrefew3z3e6179a32ex2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 11998 DurationUs: 1743785647155134 ExecuterCpuTimeUs: 11304 StartTimeMs: 1575 FinishTimeMs: 1743785648730 Stages { StageGuid: "717b612f-3fdcf56f-ea0eba40-8f4b4916" Program: "(\n(return (lambda \'() (block \'(\n (let $1 (Just (Uint32 \'5)))\n (return (Iterator (AsList (AsStruct \'(\'\"key\" $1) \'(\'\"value\" $1)))))\n))))\n)\n" ComputeActors { CpuTimeUs: 694 Tasks { TaskId: 1 CpuTimeUs: 94 FinishTimeMs: 1743785648680 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 18 BuildCpuTimeUs: 76 HostName: "ghrun-mup5cka3jq" NodeId: 3 CreateTimeMs: 1743785648678 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743785648680 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":3,\"Plans\":[{\"Node Type\":\"Sink\",\"PlanNodeId\":2,\"Plans\":[{\"Node Type\":\"ConstantExpr-Sink\",\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{key: 5,value: 5}]\",\"Name\":\"Iterator\"},{\"Inputs\":[],\"Name\":\"Upsert\",\"Path\":\"\\/Root\\/table-1\",\"SinkType\":\"KqpTableSink\",\"Table\":\"table-1\"}],\"PlanNodeId\":1,\"StageGuid\":\"717b612f-3fdcf56f-ea0eba40-8f4b4916\",\"Stats\":{\"BaseTimeMs\":1743785648680,\"ComputeNodes\":[{\"CpuTimeUs\":694,\"Tasks\":[{\"ComputeTimeUs\":18,\"EgressBytes\":10,\"EgressRows\":1,\"FinishTimeMs\":1743785648680,\"Host\":\"ghrun-mup5cka3jq\",\"NodeId\":3,\"TaskId\":1}]}],\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"table-1\"]}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 704 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\001\022\013\010\266\005\020\266\005\030\266\005 \001" } } 2025-04-04T16:54:08.732056Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2045:3201] TxId: 281474976715672. Ctx: { TraceId: 01jr0vjrefew3z3e6179a32ex2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T16:54:08.732146Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:2045:3201] TxId: 281474976715672. Ctx: { TraceId: 01jr0vjrefew3z3e6179a32ex2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-04-04T16:54:08.732497Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Y2NlNjU2NGQtNGM4NjUxZDEtNDk1Y2ZmZmItNjg5ODdlY2Y=, ActorId: [3:2035:3201], ActorState: ExecuteState, TraceId: 01jr0vjrefew3z3e6179a32ex2, Create QueryResponse for error on request, msg: >> KqpSort::TopSortExpr [GOOD] >> KqpSort::TopSortExprPk >> KqpMergeCn::TopSortBy_Date_Limit4 [GOOD] >> KqpMergeCn::TopSortByDesc_Datetime_Limit3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::PartBloomFilter [GOOD] Test command err: 2025-04-04T16:53:44.263174Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502227586982423:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:44.264086Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001eaa/r3tmp/tmpKNYOgy/pdisk_1.dat 2025-04-04T16:53:49.137824Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502227586982423:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:49.137899Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:53:53.049590Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:53.120620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:56.186912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:56.188057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:56.240367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:57.593369Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 1.082831s 2025-04-04T16:53:57.593843Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 1.084127s 2025-04-04T16:53:59.904466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:59.904496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:59.909708Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:62009 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:00.401905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:00.437076Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:00.459346Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T16:54:00.517263Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:00.540036Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743785640478 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "A" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710663 CreateStep: 1743785640597 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "B" PathId: 4 Sche... (TRUNCATED) 2025-04-04T16:54:05.457992Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502319331050404:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001eaa/r3tmp/tmpLGltqq/pdisk_1.dat 2025-04-04T16:54:05.505360Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:54:05.707154Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:05.707231Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:05.708979Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:54:05.714607Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:5457 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:06.087607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:06.097577Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:54:06.111060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:06.745321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710719:0, at schemeshard: 72057594046644480 waiting... >> KqpRanges::DuplicateKeyPredicateLiteral [GOOD] >> KqpRanges::DuplicateKeyPredicateParam >> BasicUsage::WriteSessionSwitchDatabases [GOOD] >> TObjectStorageListingTest::CornerCases [GOOD] >> TObjectStorageListingTest::Decimal >> KqpRanges::NullInKeySuffix [GOOD] >> KqpRanges::NullInPredicate >> SystemView::DescribeSystemFolder [GOOD] >> SystemView::DescribeAccessDenied >> TLocksTest::BrokenSameKeyLock >> KqpSqlIn::SimpleKey_Negated [GOOD] >> KqpSqlIn::TupleParameter >> KqpRanges::IsNotNullSecondComponent [GOOD] >> KqpRanges::IsNullInValue >> KqpNewEngine::Truncated [GOOD] >> KqpNewEngine::StaleRO_Immediate ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionSwitchDatabases [GOOD] Test command err: 2025-04-04T16:51:30.154330Z :WriteSessionNoAvailableDatabase INFO: Random seed for debugging is 1743785490154292 2025-04-04T16:51:30.579498Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501650905105572:2246];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:30.586659Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:30.642873Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489501653293199783:2100];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:30.643628Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:30.931309Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T16:51:30.955676Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c04/r3tmp/tmp3DwvPc/pdisk_1.dat 2025-04-04T16:51:31.269810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:31.269965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:31.272760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:31.272825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:31.275758Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:51:31.275871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:31.277349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:31.289163Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62965, node 1 2025-04-04T16:51:31.340908Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:51:31.340932Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:51:31.558852Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000c04/r3tmp/yandex52pGg9.tmp 2025-04-04T16:51:31.566797Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000c04/r3tmp/yandex52pGg9.tmp 2025-04-04T16:51:31.566975Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000c04/r3tmp/yandex52pGg9.tmp 2025-04-04T16:51:31.567106Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:31.690390Z INFO: TTestServer started on Port 15712 GrpcPort 62965 TClient is connected to server localhost:15712 PQClient connected to localhost:62965 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:32.276903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-04T16:51:35.586350Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501650905105572:2246];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:35.586429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:35.646618Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489501653293199783:2100];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:35.646693Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:36.274142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501676674910198:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:36.274228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:36.274819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501676674910210:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:36.279992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-04T16:51:36.316762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489501676674910212:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T16:51:36.899677Z node 1 :TX_PROXY ERROR: Actor# [1:7489501676674910305:2690] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:51:36.959200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:51:36.975075Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489501679063003857:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:51:36.976599Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjM4ZDA2NWUtMjg0ZDNjZTUtZmVhZjZjZTctZmQ3OTFmYzU=, ActorId: [2:7489501679063003812:2307], ActorState: ExecuteState, TraceId: 01jr0ve3wcbf5gq0kn4s2n3mv7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:51:36.978671Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489501676674910322:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:51:36.979896Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzQ1NzQxOTUtNGJmOGVlYTgtNmRhYjEyZGYtMmU4NDk5NGU=, ActorId: [1:7489501676674910196:2338], ActorState: ExecuteState, TraceId: 01jr0ve3pg9765q4823c35d6e2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:51:37.000440Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:51:37.002477Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:51:37.193150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:51:37.524255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:62965", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-04T16:51:39.074227Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr0ve5nt5sgfy73n2r20ajyk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzk5YWMzYmUtMjFjMjU3MzQtZDVlYjlkN2EtM2FjMmJlMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:51:39.086041Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 665 } === CheckClustersList. Subcribe to ClusterTracker from [1:7489501689559812669:3027] === CheckClustersLis ... ersisted=0 Status=SUCCESS 2025-04-04T16:53:50.519536Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489502252543644993:3209] (SourceId=src_id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-04T16:53:50.519552Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489502252543644993:3209] (SourceId=src_id, PreferedPartition=(NULL)) Start idle 2025-04-04T16:53:50.519577Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 4 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-04T16:53:50.542654Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2025-04-04T16:53:50.776830Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7489502252543645036:3209], now have 1 active actors on pipe 2025-04-04T16:53:50.776889Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T16:53:50.776906Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T16:53:50.776994Z node 4 :PERSQUEUE INFO: new Cookie src_id|a3c7ea0a-2dd25454-59a23999-f9c45140_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src_id 2025-04-04T16:53:50.777060Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-04T16:53:50.777096Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T16:53:50.780041Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T16:53:50.780070Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T16:53:50.780134Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T16:53:50.782961Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 4 partition: 0 MaxSeqNo: 2 sessionId: src_id|a3c7ea0a-2dd25454-59a23999-f9c45140_0 2025-04-04T16:53:50.790973Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-04-04T16:53:50.791012Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743785630790 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T16:53:50.791101Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session established. Init response: last_seq_no: 2 session_id: "src_id|a3c7ea0a-2dd25454-59a23999-f9c45140_0" supported_codecs { codecs: 1 codecs: 2 codecs: 3 } 2025-04-04T16:53:51.801012Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489502252543644993:3209] (SourceId=src_id, PreferedPartition=(NULL)) Update the table 2025-04-04T16:53:51.978316Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489502252543644993:3209] (SourceId=src_id, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=1 Status=SUCCESS 2025-04-04T16:53:51.978342Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489502252543644993:3209] (SourceId=src_id, PreferedPartition=(NULL)) Start idle 2025-04-04T16:54:08.866384Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] TPersQueueReadBalancer::HandleWakeup 2025-04-04T16:54:08.866442Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 4 2025-04-04T16:54:08.867653Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2025-04-04T16:54:08.870948Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 4 DataSize: 0 UsedReserveSize: 0 2025-04-04T16:54:08.871077Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] ProcessPendingStats. PendingUpdates size 1 2025-04-04T16:54:10.670608Z :DEBUG: [/Root] TraceId [] SessionId [src_id|a3c7ea0a-2dd25454-59a23999-f9c45140_0] MessageGroupId [src_id] Write 1 messages with Id from 1 to 1 >>> Got event: ReadyToAcceptEvent >>> Ready to answer: ok 2025-04-04T16:54:10.671751Z :DEBUG: [/Root] TraceId [] SessionId [src_id|a3c7ea0a-2dd25454-59a23999-f9c45140_0] MessageGroupId [src_id] Write session: try to update token 2025-04-04T16:54:10.671802Z :DEBUG: [/Root] TraceId [] SessionId [src_id|a3c7ea0a-2dd25454-59a23999-f9c45140_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 3 2025-04-04T16:54:10.674339Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|a3c7ea0a-2dd25454-59a23999-f9c45140_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-04T16:54:10.674645Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-04T16:54:10.676067Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T16:54:10.676106Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T16:54:10.676190Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-04T16:54:10.676606Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-04-04T16:54:10.677091Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T16:54:10.677124Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T16:54:10.677176Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 3 partNo : 0 messageNo: 1 size 98 offset: -1 2025-04-04T16:54:10.677361Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 3 partNo 0 2025-04-04T16:54:10.678289Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 172 count 1 nextOffset 3 batches 1 2025-04-04T16:54:10.678820Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 160 WTime 1743785650677 2025-04-04T16:54:10.678972Z node 4 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T16:54:10.679039Z node 4 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 160 2025-04-04T16:54:10.691742Z node 4 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 160 actorID [4:7489501817293448884:2396] 2025-04-04T16:54:10.691872Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 105 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T16:54:10.691921Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T16:54:10.691973Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-04-04T16:54:10.692194Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] topic 'rt3.dc1--test-topicCounters. CacheSize 480 CachedBlobs 3 2025-04-04T16:54:10.692226Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-04T16:54:10.693181Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-04-04T16:54:10.692691Z node 4 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 2 partno 0 count 1 parts 0 size 160 2025-04-04T16:54:10.698424Z :DEBUG: [/Root] TraceId [] SessionId [src_id|a3c7ea0a-2dd25454-59a23999-f9c45140_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-04-04T16:54:10.698663Z :DEBUG: [/Root] TraceId [] SessionId [src_id|a3c7ea0a-2dd25454-59a23999-f9c45140_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 3 written { offset: 2 } } write_statistics { persisting_time { nanos: 13000000 } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-04-04T16:54:10.698696Z :DEBUG: [/Root] TraceId [] SessionId [src_id|a3c7ea0a-2dd25454-59a23999-f9c45140_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2025-04-04T16:54:10.698730Z :DEBUG: [/Root] TraceId [] SessionId [src_id|a3c7ea0a-2dd25454-59a23999-f9c45140_0] MessageGroupId [src_id] Write session: acknoledged message 1 2025-04-04T16:54:10.705473Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|a3c7ea0a-2dd25454-59a23999-f9c45140_0 grpc read done: success: 0 data: 2025-04-04T16:54:10.705497Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|a3c7ea0a-2dd25454-59a23999-f9c45140_0 grpc read failed 2025-04-04T16:54:10.705525Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|a3c7ea0a-2dd25454-59a23999-f9c45140_0 grpc closed 2025-04-04T16:54:10.705543Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|a3c7ea0a-2dd25454-59a23999-f9c45140_0 is DEAD 2025-04-04T16:54:10.706057Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T16:54:10.706780Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7489502252543645036:3209] destroyed 2025-04-04T16:54:10.706839Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-04T16:54:10.714911Z :DEBUG: [/Root] TraceId [] SessionId [src_id|a3c7ea0a-2dd25454-59a23999-f9c45140_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: Cancelled on the server side, Details: , InternalError: 0 2025-04-04T16:54:10.715025Z :ERROR: [/Root] TraceId [] SessionId [src_id|a3c7ea0a-2dd25454-59a23999-f9c45140_0] MessageGroupId [src_id] Got error. Status: CLIENT_CANCELLED, Description:
: Error: GRpc error: (1): Cancelled on the server side 2025-04-04T16:54:10.715076Z :ERROR: [/Root] TraceId [] SessionId [src_id|a3c7ea0a-2dd25454-59a23999-f9c45140_0] MessageGroupId [src_id] Write session will not restart after a fatal error 2025-04-04T16:54:10.715111Z :INFO: [/Root] TraceId [] SessionId [src_id|a3c7ea0a-2dd25454-59a23999-f9c45140_0] MessageGroupId [src_id] Write session will now close 2025-04-04T16:54:10.715202Z :DEBUG: [/Root] TraceId [] SessionId [src_id|a3c7ea0a-2dd25454-59a23999-f9c45140_0] MessageGroupId [src_id] Write session: aborting 2025-04-04T16:54:10.749916Z :DEBUG: [/Root] TraceId [] SessionId [src_id|a3c7ea0a-2dd25454-59a23999-f9c45140_0] MessageGroupId [src_id] Write session: destroy >> KqpReturning::ReturningWorksIndexedInsert+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedInsert-QueryService >> DataShardWrite::UpsertPrepared+Volatile >> Cdc::DocApi[YdsRunner] [GOOD] >> Cdc::DocApi[TopicRunner] >> KikimrIcGateway::TestLoadExternalTable |76.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |76.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |76.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |76.0%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader >> TLocksTest::NoLocksSet >> KqpNewEngine::ReadDifferentColumns [GOOD] >> KqpNewEngine::ReadDifferentColumnsPk >> KqpErrors::ProposeResultLost_RwTx-UseSink [GOOD] |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> KqpNewEngine::LocksNoMutations [GOOD] >> KqpNewEngine::LocksNoMutationsSharded >> KqpNotNullColumns::JoinRightTableWithNotNullColumns+StreamLookup [GOOD] >> KqpNotNullColumns::JoinRightTableWithNotNullColumns-StreamLookup >> BasicUsage::RecreateObserver [GOOD] >> TObjectStorageListingTest::Decimal [GOOD] >> LabeledDbCounters::OneTabletRemoveCounters [GOOD] >> LabeledDbCounters::OneTabletRestart >> KqpRanges::UpdateWhereInNoFullScan-UseSink [GOOD] >> KqpRanges::UpdateWhereInWithNull >> TFetchRequestTests::CheckAccess [GOOD] >> PQCountersSimple::PartitionWriteQuota >> KqpNewEngine::OnlineRO_Consistent [GOOD] >> KqpNewEngine::OnlineRO_Inconsistent >> TLocksFatTest::RangeSetBreak [GOOD] >> TLocksFatTest::RangeSetNotBreak >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeResultLost_RwTx-UseSink [GOOD] Test command err: 2025-04-04T16:53:38.055408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:53:38.058176Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:38.061740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:53:38.080113Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:53:38.080300Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:38.080501Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00169d/r3tmp/tmp5hTkWu/pdisk_1.dat 2025-04-04T16:53:41.927015Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:43.141998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:53:43.592072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:43.592215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:43.604529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:43.604674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:43.657965Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:53:43.679287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:43.681803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:44.264145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:53:47.566054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1599:2962], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:47.575297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1609:2967], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:47.575743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:47.665428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:53:49.947551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1613:2970], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:53:50.681592Z node 1 :TX_PROXY ERROR: Actor# [1:1767:3057] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:53:53.131845Z node 1 :KQP_EXECUTER TRACE: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0vj3wwd2qhpz0x3mdnhavf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBiYWYzZGMtNTFhZjc3OWYtOGNkM2U0NjEtNDIxZWVkZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2025-04-04T16:53:53.132242Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1793:2960] TxId: 281474976715660. Ctx: { TraceId: 01jr0vj3wwd2qhpz0x3mdnhavf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBiYWYzZGMtNTFhZjc3OWYtOGNkM2U0NjEtNDIxZWVkZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2025-04-04T16:53:53.132358Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1793:2960] TxId: 281474976715660. Ctx: { TraceId: 01jr0vj3wwd2qhpz0x3mdnhavf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBiYWYzZGMtNTFhZjc3OWYtOGNkM2U0NjEtNDIxZWVkZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2025-04-04T16:53:53.132562Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-04T16:53:53.132852Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key sets: 1 2025-04-04T16:53:53.133387Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-04T16:53:53.133548Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1793:2960] TxId: 281474976715660. Ctx: { TraceId: 01jr0vj3wwd2qhpz0x3mdnhavf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBiYWYzZGMtNTFhZjc3OWYtOGNkM2U0NjEtNDIxZWVkZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (Iterator (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3))))) )))) ) 2025-04-04T16:53:53.133708Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1793:2960] TxId: 281474976715660. Ctx: { TraceId: 01jr0vj3wwd2qhpz0x3mdnhavf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBiYWYzZGMtNTFhZjc3OWYtOGNkM2U0NjEtNDIxZWVkZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] create compute task: 1 2025-04-04T16:53:53.133851Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0vj3wwd2qhpz0x3mdnhavf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBiYWYzZGMtNTFhZjc3OWYtOGNkM2U0NjEtNDIxZWVkZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:53:53.133912Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jr0vj3wwd2qhpz0x3mdnhavf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBiYWYzZGMtNTFhZjc3OWYtOGNkM2U0NjEtNDIxZWVkZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-04T16:53:53.143103Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1793:2960] TxId: 281474976715660. Ctx: { TraceId: 01jr0vj3wwd2qhpz0x3mdnhavf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBiYWYzZGMtNTFhZjc3OWYtOGNkM2U0NjEtNDIxZWVkZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 0, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-04T16:53:53.144140Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1793:2960] TxId: 281474976715660. Ctx: { TraceId: 01jr0vj3wwd2qhpz0x3mdnhavf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBiYWYzZGMtNTFhZjc3OWYtOGNkM2U0NjEtNDIxZWVkZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2025-04-04T16:53:53.144507Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1793:2960] TxId: 281474976715660. Ctx: { TraceId: 01jr0vj3wwd2qhpz0x3mdnhavf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBiYWYzZGMtNTFhZjc3OWYtOGNkM2U0NjEtNDIxZWVkZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [1:1796:2960] 2025-04-04T16:53:53.144866Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1793:2960] TxId: 281474976715660. Ctx: { TraceId: 01jr0vj3wwd2qhpz0x3mdnhavf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBiYWYzZGMtNTFhZjc3OWYtOGNkM2U0NjEtNDIxZWVkZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [1:1796:2960], channels: 0 2025-04-04T16:53:53.145513Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1793:2960] TxId: 281474976715660. Ctx: { TraceId: 01jr0vj3wwd2qhpz0x3mdnhavf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBiYWYzZGMtNTFhZjc3OWYtOGNkM2U0NjEtNDIxZWVkZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:1796:2960], 2025-04-04T16:53:53.154993Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1793:2960] TxId: 281474976715660. Ctx: { TraceId: 01jr0vj3wwd2qhpz0x3mdnhavf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBiYWYzZGMtNTFhZjc3OWYtOGNkM2U0NjEtNDIxZWVkZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1796:2960], 2025-04-04T16:53:53.155149Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1793:2960] TxId: 281474976715660. Ctx: { TraceId: 01jr0vj3wwd2qhpz0x3mdnhavf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBiYWYzZGMtNTFhZjc3OWYtOGNkM2U0NjEtNDIxZWVkZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-04T16:53:53.336159Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1793:2960] TxId: 281474976715660. Ctx: { TraceId: 01jr0vj3wwd2qhpz0x3mdnhavf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBiYWYzZGMtNTFhZjc3OWYtOGNkM2U0NjEtNDIxZWVkZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:1796:2960], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-04T16:53:53.337064Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1793:2960] TxId: 281474976715660. Ctx: { TraceId: 01jr0vj3wwd2qhpz0x3mdnhavf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBiYWYzZGMtNTFhZjc3OWYtOGNkM2U0NjEtNDIxZWVkZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:1796:2960], 2025-04-04T16:53:53.337311Z node 1 :KQP_EXECUTER DEBUG: ActorId ... state from compute actor: [4:1845:2481], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 795 Tasks { TaskId: 1 CpuTimeUs: 527 ComputeCpuTimeUs: 10 BuildCpuTimeUs: 517 HostName: "ghrun-mup5cka3jq" NodeId: 4 CreateTimeMs: 1743785656659 } MaxMemoryUsage: 1048576 } 2025-04-04T16:54:16.671206Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1835:3096] TxId: 281474976715663. Ctx: { TraceId: 01jr0vk041623zgngpbz18amks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGUwZTQ0YS1lNDA3MjQ4MS01YWRmNmUwZS1mZGMyMmU2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [4:1845:2481], CA [3:1843:3096], 2025-04-04T16:54:16.671284Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1835:3096] TxId: 281474976715663. Ctx: { TraceId: 01jr0vk041623zgngpbz18amks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGUwZTQ0YS1lNDA3MjQ4MS01YWRmNmUwZS1mZGMyMmU2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:1845:2481], CA [3:1843:3096], 2025-04-04T16:54:16.688823Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1835:3096] TxId: 281474976715663. Ctx: { TraceId: 01jr0vk041623zgngpbz18amks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGUwZTQ0YS1lNDA3MjQ4MS01YWRmNmUwZS1mZGMyMmU2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [4:1845:2481], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 975 Tasks { TaskId: 1 CpuTimeUs: 647 OutputRows: 3 OutputBytes: 12 IngressRows: 3 ComputeCpuTimeUs: 130 BuildCpuTimeUs: 517 HostName: "ghrun-mup5cka3jq" NodeId: 4 StartTimeMs: 1743785656687 CreateTimeMs: 1743785656659 } MaxMemoryUsage: 1048576 } 2025-04-04T16:54:16.688937Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1835:3096] TxId: 281474976715663. Ctx: { TraceId: 01jr0vk041623zgngpbz18amks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGUwZTQ0YS1lNDA3MjQ4MS01YWRmNmUwZS1mZGMyMmU2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [4:1845:2481], CA [3:1843:3096], 2025-04-04T16:54:16.688985Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1835:3096] TxId: 281474976715663. Ctx: { TraceId: 01jr0vk041623zgngpbz18amks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGUwZTQ0YS1lNDA3MjQ4MS01YWRmNmUwZS1mZGMyMmU2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:1845:2481], CA [3:1843:3096], 2025-04-04T16:54:16.689442Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1835:3096] TxId: 281474976715663. Ctx: { TraceId: 01jr0vk041623zgngpbz18amks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGUwZTQ0YS1lNDA3MjQ4MS01YWRmNmUwZS1mZGMyMmU2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got result, channelId: 2, shardId: 0, inputIndex: 0, from: [3:1844:3096], finished: 0 2025-04-04T16:54:16.689552Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1835:3096] TxId: 281474976715663. Ctx: { TraceId: 01jr0vk041623zgngpbz18amks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGUwZTQ0YS1lNDA3MjQ4MS01YWRmNmUwZS1mZGMyMmU2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send ack to channelId: 2, seqNo: 1, to: [3:1844:3096] 2025-04-04T16:54:16.706014Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1835:3096] TxId: 281474976715663. Ctx: { TraceId: 01jr0vk041623zgngpbz18amks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGUwZTQ0YS1lNDA3MjQ4MS01YWRmNmUwZS1mZGMyMmU2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got result, channelId: 2, shardId: 0, inputIndex: 0, from: [3:1844:3096], finished: 1 2025-04-04T16:54:16.706094Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1835:3096] TxId: 281474976715663. Ctx: { TraceId: 01jr0vk041623zgngpbz18amks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGUwZTQ0YS1lNDA3MjQ4MS01YWRmNmUwZS1mZGMyMmU2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send ack to channelId: 2, seqNo: 2, to: [3:1844:3096] 2025-04-04T16:54:16.711603Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1835:3096] TxId: 281474976715663. Ctx: { TraceId: 01jr0vk041623zgngpbz18amks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGUwZTQ0YS1lNDA3MjQ4MS01YWRmNmUwZS1mZGMyMmU2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:1843:3096], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 5654 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 809 FinishTimeMs: 1743785656706 InputRows: 3 InputBytes: 12 OutputRows: 3 OutputBytes: 12 ResultRows: 3 ResultBytes: 12 ComputeCpuTimeUs: 219 BuildCpuTimeUs: 590 HostName: "ghrun-mup5cka3jq" NodeId: 3 CreateTimeMs: 1743785656653 } MaxMemoryUsage: 1048576 } 2025-04-04T16:54:16.711755Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715663. Ctx: { TraceId: 01jr0vk041623zgngpbz18amks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGUwZTQ0YS1lNDA3MjQ4MS01YWRmNmUwZS1mZGMyMmU2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:1843:3096] 2025-04-04T16:54:16.711861Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1835:3096] TxId: 281474976715663. Ctx: { TraceId: 01jr0vk041623zgngpbz18amks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGUwZTQ0YS1lNDA3MjQ4MS01YWRmNmUwZS1mZGMyMmU2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [4:1845:2481], 2025-04-04T16:54:16.711912Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1835:3096] TxId: 281474976715663. Ctx: { TraceId: 01jr0vk041623zgngpbz18amks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGUwZTQ0YS1lNDA3MjQ4MS01YWRmNmUwZS1mZGMyMmU2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [4:1845:2481], 2025-04-04T16:54:16.712422Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1835:3096] TxId: 281474976715663. Ctx: { TraceId: 01jr0vk041623zgngpbz18amks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGUwZTQ0YS1lNDA3MjQ4MS01YWRmNmUwZS1mZGMyMmU2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [4:1845:2481], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 2001 DurationUs: 23000 Tasks { TaskId: 1 CpuTimeUs: 721 FinishTimeMs: 1743785656710 OutputRows: 3 OutputBytes: 12 Tables { TablePath: "/Root/table-1" ReadRows: 3 ReadBytes: 24 AffectedPartitions: 4 } IngressRows: 3 ComputeCpuTimeUs: 204 BuildCpuTimeUs: 517 WaitInputTimeUs: 17585 HostName: "ghrun-mup5cka3jq" NodeId: 4 StartTimeMs: 1743785656687 CreateTimeMs: 1743785656659 } MaxMemoryUsage: 1048576 } 2025-04-04T16:54:16.712509Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715663. Ctx: { TraceId: 01jr0vk041623zgngpbz18amks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGUwZTQ0YS1lNDA3MjQ4MS01YWRmNmUwZS1mZGMyMmU2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [4:1845:2481] 2025-04-04T16:54:16.718566Z node 3 :KQP_EXECUTER INFO: ActorId: [3:1835:3096] TxId: 281474976715663. Ctx: { TraceId: 01jr0vk041623zgngpbz18amks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGUwZTQ0YS1lNDA3MjQ4MS01YWRmNmUwZS1mZGMyMmU2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 13208 DurationUs: 1743785655195147 Tables { TablePath: "/Root/table-1" ReadRows: 3 ReadBytes: 24 AffectedPartitions: 4 } ExecuterCpuTimeUs: 5553 StartTimeMs: 1517 FinishTimeMs: 1743785656712 Stages { StageId: 1 StageGuid: "6d58b811-23bf2a93-4b90d777-4bb801f4" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 \'\"1001\")) (lambda \'($2 $3) (AsStruct \'(\'\"key\" $2) \'(\'\"value\" $3)))))))\n)\n" ComputeActors { CpuTimeUs: 5654 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 809 FinishTimeMs: 1743785656706 InputRows: 3 InputBytes: 12 OutputRows: 3 OutputBytes: 12 ResultRows: 3 ResultBytes: 12 ComputeCpuTimeUs: 219 BuildCpuTimeUs: 590 HostName: "ghrun-mup5cka3jq" NodeId: 3 CreateTimeMs: 1743785656653 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743785656687 } Stages { StageGuid: "9b942d87-d9d252be-735286ad-1eb1e442" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (lambda \'($3) (Member $3 \'\"key\") (Member $3 \'\"value\")))\n (return (FromFlow (ExpandMap (Take (ToFlow $1) (Uint64 \'\"1001\")) $2)))\n))))\n)\n" BaseTimeMs: 1743785656687 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":6,\"Plans\":[{\"Node Type\":\"ResultSet\",\"PlanNodeId\":5,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":4,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":3,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":2,\"Plans\":[{\"Node Type\":\"TableFullScan\",\"Operators\":[{\"E-Cost\":\"0\",\"E-Rows\":\"0\",\"E-Size\":\"0\",\"Inputs\":[],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/table-1\",\"ReadColumns\":[\"key\",\"value\"],\"ReadLimit\":\"1001\",\"ReadRanges\":[\"key (-∞, +∞)\"],\"ReadRangesPointPrefixLen\":\"0\",\"Scan\":\"Sequential\",\"Table\":\"table-1\"}],\"PlanNodeId\":1,\"StageGuid\":\"\",\"Tables\":[\"table-1\"]}],\"StageGuid\":\"9b942d87-d9d252be-735286ad-1eb1e442\",\"Stats\":{\"BaseTimeMs\":1743785656687,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"SortColumns\":[\"key (Asc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"6d58b811-23bf2a93-4b90d777-4bb801f4\",\"Stats\":{\"BaseTimeMs\":1743785656687,\"ComputeNodes\":[{\"CpuTimeUs\":5654,\"Tasks\":[{\"ComputeTimeUs\":219,\"FinishTimeMs\":1743785656706,\"Host\":\"ghrun-mup5cka3jq\",\"InputBytes\":12,\"InputRows\":3,\"NodeId\":3,\"OutputBytes\":12,\"OutputRows\":3,\"ResultBytes\":12,\"ResultRows\":3,\"TaskId\":2}]}],\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 1435 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\004\022\013\010\321\017\020\226,\030\347; \002" } } 2025-04-04T16:54:16.718754Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1835:3096] TxId: 281474976715663. Ctx: { TraceId: 01jr0vk041623zgngpbz18amks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGUwZTQ0YS1lNDA3MjQ4MS01YWRmNmUwZS1mZGMyMmU2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T16:54:16.718831Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1835:3096] TxId: 281474976715663. Ctx: { TraceId: 01jr0vk041623zgngpbz18amks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGUwZTQ0YS1lNDA3MjQ4MS01YWRmNmUwZS1mZGMyMmU2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-04-04T16:54:16.718907Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1835:3096] TxId: 281474976715663. Ctx: { TraceId: 01jr0vk041623zgngpbz18amks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGUwZTQ0YS1lNDA3MjQ4MS01YWRmNmUwZS1mZGMyMmU2OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.007655s ReadRows: 3 ReadBytes: 24 ru: 5 rate limiter was not found force flag: 1 { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } } |76.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |76.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing >> KqpSort::ReverseEightShardOptimized [GOOD] >> KqpSort::PassLimit |76.0%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::RecreateObserver [GOOD] Test command err: 2025-04-04T16:51:31.016905Z :RetryDiscoveryWithCancel INFO: Random seed for debugging is 1743785491016870 2025-04-04T16:51:31.444772Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501657238487376:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:31.444814Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:31.512201Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489501656871700798:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:31.512241Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:31.912945Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c2f/r3tmp/tmpcxgkO4/pdisk_1.dat 2025-04-04T16:51:31.983393Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T16:51:32.535861Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:51:32.582494Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:51:32.621825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:32.621912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:32.627287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:32.627348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:32.637930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:32.640286Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:51:32.643625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:32.671088Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3793, node 1 2025-04-04T16:51:33.007603Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000c2f/r3tmp/yandexiYUCeW.tmp 2025-04-04T16:51:33.007623Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000c2f/r3tmp/yandexiYUCeW.tmp 2025-04-04T16:51:33.007742Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000c2f/r3tmp/yandexiYUCeW.tmp 2025-04-04T16:51:33.007832Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:33.116183Z INFO: TTestServer started on Port 30730 GrpcPort 3793 TClient is connected to server localhost:30730 PQClient connected to localhost:3793 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:33.827298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-04T16:51:36.447296Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501657238487376:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:36.447352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:36.522442Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489501656871700798:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:36.522514Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:37.279832Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489501682641504909:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:37.280252Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489501682641504890:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:37.280340Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:37.280475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501683008292178:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:37.280546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:37.282540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501683008292206:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:37.294492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501683008292237:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:37.294557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:37.344672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-04T16:51:37.424291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489501683008292208:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T16:51:37.754531Z node 2 :TX_PROXY ERROR: Actor# [2:7489501682641504920:2125] txid# 281474976720657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T16:51:37.756227Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-04T16:51:37.756601Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489501682641504919:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T16:51:37.758357Z node 1 :TX_PROXY ERROR: Actor# [1:7489501683008292294:2693] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:51:37.777902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:51:37.827348Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489501683008292330:2354], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:51:37.828792Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2IwOWI5ZDctNWVlNDZkZWUtYTg2OWIwNzEtOWNmMjMwMjY=, ActorId: [1:7489501683008292176:2338], ActorState: ExecuteState, TraceId: 01jr0ve4m50jk5xbg4rj11y6wb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:51:37.836077Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:51:37.871694Z node 2 :TX_PROXY ERROR: Actor# [2:7489501682641504983:2153] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:51:37.933304Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489501682641505001:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' becaus ... eClient { RawX1: 7489502362944797990 RawX2: 4503612512274989 } Path: "/Root/PQ/rt3.dc1--test-topic" } 2025-04-04T16:54:15.660218Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_18386583589817687447_v1 INITING TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2025-04-04T16:54:15.660725Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_18386583589817687447_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037892 Generation: 1, pipe: [3:7489502362944797992:2614] 2025-04-04T16:54:15.660779Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7489502362944797992:2614], now have 1 active actors on pipe 2025-04-04T16:54:15.660974Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T16:54:15.661005Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T16:54:15.661049Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Created session shared/user_3_1_18386583589817687447_v1 on pipe: [3:7489502362944797992:2614] 2025-04-04T16:54:15.661117Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/user_3_1_18386583589817687447_v1:1 with generation 1 2025-04-04T16:54:15.661268Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user session is set to 0 (startOffset 0) session shared/user_3_1_18386583589817687447_v1 2025-04-04T16:54:15.661440Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T16:54:15.670461Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T16:54:15.670544Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 18446744073709551615 2025-04-04T16:54:15.670793Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_18386583589817687447_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 0 WriteTimestampMS: 1743785651328 CreateTimestampMS: 1743785651328 SizeLag: 0 WriteTimestampEstimateMS: 1743785655574 } Cookie: 18446744073709551615 } 2025-04-04T16:54:15.670836Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_18386583589817687447_v1 INIT DONE TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 0 readOffset 0 committedOffset 0 2025-04-04T16:54:15.670916Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_18386583589817687447_v1 sending to client partition status >>> Got event: StartPartitionSession { Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc1 Database path: /Root Database id: account-dc1 CommittedOffset: 0 EndOffset: 0 } 2025-04-04T16:54:15.678062Z :INFO: [/Root] [/Root] [76f63974-6faf0545-14582d1d-b78845cb] Closing read session. Close timeout: 0.000000s 2025-04-04T16:54:15.678132Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2025-04-04T16:54:15.678189Z :INFO: [/Root] [/Root] [76f63974-6faf0545-14582d1d-b78845cb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 108 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T16:54:15.678319Z :NOTICE: [/Root] [/Root] [76f63974-6faf0545-14582d1d-b78845cb] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-04T16:54:15.678366Z :DEBUG: [/Root] [/Root] [76f63974-6faf0545-14582d1d-b78845cb] [] Abort session to cluster 2025-04-04T16:54:15.678807Z :INFO: [/Root] [/Root] [fc14ede0-8bd4ed1a-11743034-7743ddc8] Closing read session. Close timeout: 0.000000s 2025-04-04T16:54:15.678848Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-04T16:54:15.678881Z :INFO: [/Root] [/Root] [fc14ede0-8bd4ed1a-11743034-7743ddc8] Counters: { Errors: 0 CurrentSessionLifetimeMs: 107 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T16:54:15.678937Z :NOTICE: [/Root] [/Root] [fc14ede0-8bd4ed1a-11743034-7743ddc8] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-04T16:54:15.678962Z :DEBUG: [/Root] [/Root] [fc14ede0-8bd4ed1a-11743034-7743ddc8] [] Abort session to cluster 2025-04-04T16:54:15.679163Z :INFO: [/Root] [/Root] [9116c595-b7132347-1229e171-3e1cc499] Closing read session. Close timeout: 0.000000s 2025-04-04T16:54:15.679198Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-04T16:54:15.679225Z :INFO: [/Root] [/Root] [9116c595-b7132347-1229e171-3e1cc499] Counters: { Errors: 0 CurrentSessionLifetimeMs: 106 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T16:54:15.679269Z :NOTICE: [/Root] [/Root] [9116c595-b7132347-1229e171-3e1cc499] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-04T16:54:15.679294Z :DEBUG: [/Root] [/Root] [9116c595-b7132347-1229e171-3e1cc499] [] Abort session to cluster 2025-04-04T16:54:15.679491Z :INFO: [/Root] [/Root] [9116c595-b7132347-1229e171-3e1cc499] Closing read session. Close timeout: 0.000000s 2025-04-04T16:54:15.679533Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-04T16:54:15.679584Z :INFO: [/Root] [/Root] [9116c595-b7132347-1229e171-3e1cc499] Counters: { Errors: 0 CurrentSessionLifetimeMs: 106 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T16:54:15.679662Z :NOTICE: [/Root] [/Root] [9116c595-b7132347-1229e171-3e1cc499] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T16:54:15.679771Z :INFO: [/Root] [/Root] [fc14ede0-8bd4ed1a-11743034-7743ddc8] Closing read session. Close timeout: 0.000000s 2025-04-04T16:54:15.679795Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-04T16:54:15.679818Z :INFO: [/Root] [/Root] [fc14ede0-8bd4ed1a-11743034-7743ddc8] Counters: { Errors: 0 CurrentSessionLifetimeMs: 108 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T16:54:15.679855Z :NOTICE: [/Root] [/Root] [fc14ede0-8bd4ed1a-11743034-7743ddc8] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T16:54:15.679893Z :INFO: [/Root] [/Root] [76f63974-6faf0545-14582d1d-b78845cb] Closing read session. Close timeout: 0.000000s 2025-04-04T16:54:15.679923Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2025-04-04T16:54:15.679948Z :INFO: [/Root] [/Root] [76f63974-6faf0545-14582d1d-b78845cb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 110 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T16:54:15.679984Z :NOTICE: [/Root] [/Root] [76f63974-6faf0545-14582d1d-b78845cb] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T16:54:15.690475Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_18386583589817687447_v1 grpc read done: success# 0, data# { } 2025-04-04T16:54:15.690531Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_18386583589817687447_v1 grpc read failed 2025-04-04T16:54:15.690579Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_18386583589817687447_v1 grpc closed 2025-04-04T16:54:15.690655Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_18386583589817687447_v1 is DEAD 2025-04-04T16:54:15.691696Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer shared/user session shared/user_3_3_1668820020072277380_v1 grpc read done: success# 0, data# { } 2025-04-04T16:54:15.691711Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_1668820020072277380_v1 grpc read failed 2025-04-04T16:54:15.691730Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_1668820020072277380_v1 grpc closed 2025-04-04T16:54:15.691746Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_1668820020072277380_v1 is DEAD 2025-04-04T16:54:15.692261Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_3_2_15370730309371212456_v1 grpc read done: success# 0, data# { } 2025-04-04T16:54:15.692272Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_15370730309371212456_v1 grpc read failed 2025-04-04T16:54:15.692290Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_15370730309371212456_v1 grpc closed 2025-04-04T16:54:15.692305Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_15370730309371212456_v1 is DEAD 2025-04-04T16:54:15.693806Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489502362944797990:2605] disconnected; active server actors: 1 2025-04-04T16:54:15.693864Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489502362944797990:2605] client user disconnected session shared/user_3_1_18386583589817687447_v1 2025-04-04T16:54:15.693960Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2025-04-04T16:54:15.694018Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489502362944797989:2607] disconnected; active server actors: 1 2025-04-04T16:54:15.694035Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489502362944797989:2607] client user disconnected session shared/user_3_3_1668820020072277380_v1 2025-04-04T16:54:15.694061Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489502362944797988:2606] disconnected; active server actors: 1 2025-04-04T16:54:15.694077Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489502362944797988:2606] client user disconnected session shared/user_3_2_15370730309371212456_v1 2025-04-04T16:54:15.698431Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_18386583589817687447_v1 2025-04-04T16:54:15.698516Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7489502362944797992:2614] destroyed 2025-04-04T16:54:15.698626Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_18386583589817687447_v1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::Decimal [GOOD] Test command err: 2025-04-04T16:54:06.472677Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502323597275973:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:06.472707Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea7/r3tmp/tmpaZnsfZ/pdisk_1.dat 2025-04-04T16:54:07.151369Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:07.175823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:07.175924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:07.177582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24551, node 1 2025-04-04T16:54:07.399050Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:07.399071Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:07.399081Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:07.399187Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8425 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:08.476196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:08.527048Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:08.704566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:13.731360Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502353755217216:2157];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:13.732032Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea7/r3tmp/tmp1rmsmI/pdisk_1.dat 2025-04-04T16:54:13.949487Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:13.985330Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:13.985406Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:13.987437Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17430, node 2 2025-04-04T16:54:14.170796Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:14.170819Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:14.170825Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:14.170922Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10092 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:14.639784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:14.654653Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:14.676534Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:54:14.689423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::LargeDatashardReply [GOOD] >> KqpNewEngine::Delete+UseSink [GOOD] >> KqpNewEngine::Delete-UseSink >> ColumnBuildTest::CancelBuild >> SystemView::AuthUsers_TableRange [GOOD] |76.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |76.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |76.1%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut >> KqpSort::TopSortExprPk [GOOD] >> KqpSort::TopSortTableExpr >> PQCountersSimple::PartitionWriteQuota [GOOD] >> PQCountersSimple::PartitionFirstClass >> KqpNewEngine::AggregateTuple [GOOD] >> KqpNewEngine::AsyncIndexUpdate >> KqpNewEngine::PrunePartitionsByExpr [GOOD] >> KqpNewEngine::PruneWritePartitions+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReply [GOOD] Test command err: 2025-04-04T16:53:34.155907Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502184492015672:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:34.156364Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ead/r3tmp/tmp4HK3Po/pdisk_1.dat 2025-04-04T16:53:36.691168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:36.788640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:36.788762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:36.790156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:36.991330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:53:36.991470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:53:36.991797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046644480 2025-04-04T16:53:37.005530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:53:37.012004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:53:37.026608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-04T16:53:37.026953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Subscription to Console has been set up, schemeshardId: 72057594046644480 2025-04-04T16:53:37.026999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2025-04-04T16:53:37.027010Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:37.027154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-04T16:53:37.029018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-04T16:53:37.029500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T16:53:37.029519Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T16:53:37.029545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:53:37.029565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T16:53:37.029580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:53:37.029636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-04T16:53:37.794722Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessTabletBalancer MaxUsage=0.000000000 on #1 MinUsage=0.000000000 on #1 Scatter=0.000000000 2025-04-04T16:53:37.794775Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle BalancerOut 2025-04-04T16:53:39.134974Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502184492015672:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:39.135029Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:5971 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T16:53:40.979531Z node 1 :TX_PROXY DEBUG: actor# [1:7489502184492015758:2096] Handle TEvNavigate describe path dc-1 2025-04-04T16:53:40.979585Z node 1 :TX_PROXY DEBUG: Actor# [1:7489502210261819874:2278] HANDLE EvNavigateScheme dc-1 2025-04-04T16:53:40.979902Z node 1 :TX_PROXY DEBUG: Actor# [1:7489502210261819874:2278] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T16:53:41.494211Z node 1 :TX_PROXY DEBUG: Actor# [1:7489502210261819874:2278] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-04T16:53:41.801607Z node 1 :TX_PROXY DEBUG: Actor# [1:7489502210261819874:2278] Handle TEvDescribeSchemeResult Forward to# [1:7489502210261819873:2277] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:53:43.202801Z node 1 :TX_PROXY DEBUG: actor# [1:7489502184492015758:2096] Handle TEvProposeTransaction 2025-04-04T16:53:43.202834Z node 1 :TX_PROXY DEBUG: actor# [1:7489502184492015758:2096] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T16:53:43.202910Z node 1 :TX_PROXY DEBUG: actor# [1:7489502184492015758:2096] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489502223146721780:2291] 2025-04-04T16:53:44.749439Z node 1 :TX_PROXY DEBUG: Actor# [1:7489502223146721780:2291] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-04-04T16:53:44.749485Z node 1 :TX_PROXY DEBUG: Actor# [1:7489502223146721780:2291] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T16:53:44.749841Z node 1 :TX_PROXY DEBUG: Actor# [1:7489502223146721780:2291] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T16:53:44.795205Z node 1 :TX_PROXY DEBUG: Actor# [1:7489502223146721780:2291] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T16:53:44.795710Z node 1 :TX_PROXY DEBUG: Actor# [1:7489502223146721780:2291] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-04T16:53:44.796076Z node 1 :TX_PROXY DEBUG: Actor# [1:7489502223146721780:2291] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-04T16:53:44.796348Z node 1 :TX_PROXY DEBUG: Actor# [1:7489502223146721780:2291] txid# 281474976710657 HANDLE EvClientConnected 2025-04-04T16:53:44.826211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T16:53:44.826462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T16:53:44.826907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T16:53:44.846104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T16:53:44.855115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T16:53:44.869177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T16:53:44.886427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2025-04-04T16:53:44.895437Z node 1 :TX_PROXY DEBUG: Actor# [1:7489502223146721780:2291] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-04T16:53:44.895492Z node 1 :TX_PROXY DEBUG: Actor# [1:7489502223146721780:2291] txid# 281474976710657 SEND to# [1:7489502223146721779:2290] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2025-04-04T16:53:44.911306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T16:53:44.911720Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T16:53:44.911735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-04T16:53:44.920702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-04T16:53:44.925681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T16:53:44.925703Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at sch ... 474976710674:0 2025-04-04T16:53:54.503946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 2 2025-04-04T16:53:54.505284Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037895)::Complete SideEffects {} 2025-04-04T16:53:54.526468Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037892)::Complete SideEffects {} 2025-04-04T16:53:54.527415Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037894)::Complete SideEffects {} 2025-04-04T16:53:54.527426Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037889)::Complete SideEffects {} 2025-04-04T16:53:54.527433Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037890)::Complete SideEffects {} 2025-04-04T16:53:54.527440Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037898)::Complete SideEffects {} 2025-04-04T16:53:54.527447Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037893)::Complete SideEffects {} 2025-04-04T16:53:54.528174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710674 2025-04-04T16:53:54.528214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710674 2025-04-04T16:53:54.528229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710674 2025-04-04T16:53:54.528765Z node 1 :TX_DATASHARD DEBUG: 72075186224037899 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T16:53:54.528810Z node 1 :TX_DATASHARD INFO: 72075186224037899 Initiating switch from PreOffline to Offline state 2025-04-04T16:53:54.554344Z node 1 :TX_DATASHARD INFO: 72075186224037899 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T16:53:54.564237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489502261801428109 RawX2: 4503603922340143 } TabletId: 72075186224037899 State: 4 2025-04-04T16:53:54.564314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037899, state: Offline, at schemeshard: 72057594046644480 2025-04-04T16:53:54.594879Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037899 state Offline 2025-04-04T16:53:54.595396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:12 hive 72057594037968897 at ss 72057594046644480 2025-04-04T16:53:54.595744Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 TxId_Deprecated: 12 TabletID: 72075186224037899 2025-04-04T16:53:54.595761Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037899 2025-04-04T16:53:54.595836Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037899.Leader.1) VolatileState: Running -> Stopped (Node 1) 2025-04-04T16:53:54.595896Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037899.Leader.1 gen 1) to node 1 2025-04-04T16:53:54.595947Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 2025-04-04T16:53:54.596646Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10080003 [1:7489502184492015804:2116] NKikimrLocal.TEvStopTablet TabletId: 72075186224037899 FollowerId: 0 Generation: 1,0x10040206 [1:7489502193081950561:2198] NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 Actions: NKikimr::TTabletReqBlockBlobStorage} 2025-04-04T16:53:54.596734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12, at schemeshard: 72057594046644480 2025-04-04T16:53:54.596898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 1 2025-04-04T16:53:54.597015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-04T16:53:54.597025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 6], at schemeshard: 72057594046644480 2025-04-04T16:53:54.597053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-04T16:53:54.597241Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037899 OK) 2025-04-04T16:53:54.597264Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037899 OK) 2025-04-04T16:53:54.597286Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037899 2025-04-04T16:53:54.597618Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037899 OK) 2025-04-04T16:53:54.597766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:12 2025-04-04T16:53:54.597779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:12 tabletId 72075186224037899 2025-04-04T16:53:54.597809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-04T16:53:54.598059Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037899 reason = ReasonStop 2025-04-04T16:53:54.598081Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037899, clientId# [1:7489502266096395580:2859], serverId# [1:7489502266096395581:2860], sessionId# [0:0:0] 2025-04-04T16:53:54.598514Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037899 2025-04-04T16:53:54.598530Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-04-04T16:53:54.598643Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037899)::Complete SideEffects {} 2025-04-04T16:53:54.598856Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037899 2025-04-04T16:53:54.598912Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037899 2025-04-04T16:54:00.923368Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502297504662596:2209];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ead/r3tmp/tmpvecXu6/pdisk_1.dat 2025-04-04T16:54:01.053369Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:54:01.212334Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:01.230013Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:01.230100Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:01.232852Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29970 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:01.600588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:01.631906Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:01.649365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:05.560105Z node 2 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037888, for tableId 3 don't have enough keys: RowCount: 164 DataSize: 16803148 IndexSize: 4137 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 1 2025-04-04T16:54:05.869385Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489502297504662596:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:05.869534Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:16.186036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:54:16.186066Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:16.706000Z node 2 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037888, for tableId 3 don't have enough keys: RowCount: 492 DataSize: 50409124 IndexSize: 12411 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 4 2025-04-04T16:54:21.634914Z node 2 :MINIKQL_ENGINE ERROR: Shard %72075186224037888, txid %281474976711360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-04-04T16:54:21.645511Z node 2 :TX_DATASHARD ERROR: Datashard execution error for [0:281474976711360] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-04-04T16:54:21.648263Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976711360 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2025-04-04T16:54:21.674464Z node 2 :TX_PROXY ERROR: Actor# [2:7489502383404014836:5956] txid# 281474976711360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable >> PQCountersSimple::PartitionFirstClass [GOOD] >> PQCountersSimple::SupportivePartitionCountersPersist >> DataShardWrite::UpsertPrepared+Volatile [GOOD] >> DataShardWrite::UpsertPrepared-Volatile >> KqpRanges::DuplicateKeyPredicateParam [GOOD] >> KqpRanges::DuplicateKeyPredicateMixed >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test >> KqpNewEngine::StaleRO_Immediate [GOOD] >> KqpNewEngine::UnionAllPure >> KqpMergeCn::TopSortByDesc_Datetime_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Decimal_Limit5 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthUsers_TableRange [GOOD] Test command err: 2025-04-04T16:51:27.847436Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501638531326628:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:27.854583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bf3/r3tmp/tmpPTAaZb/pdisk_1.dat 2025-04-04T16:51:28.304842Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:28.311815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:28.312033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:28.317132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23731, node 1 2025-04-04T16:51:28.418991Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:28.419010Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:28.428240Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:28.428363Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:28.824975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:28.860464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:31.177233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501655711196419:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:31.177256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501655711196425:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:31.177344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:31.180750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T16:51:31.197221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489501655711196433:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T16:51:31.293345Z node 1 :TX_PROXY ERROR: Actor# [1:7489501655711196484:2394] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:51:31.631396Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0vdyq66c9fe60axvdxtsqz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTYyYTMyZmYtNDY5NzJhOTItODA4MjRjZDgtOTljZTY0MzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:51:31.918447Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr0vdzec4h3gqrars8s4stfd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGM0YjM2YzctMWRlODhlYjEtZDk4Y2QxMWQtZTJlOWRmOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:51:32.066732Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr0vdzf3b9xs153vrna1eneb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTc5NWI0NmMtYmU1YzYxMTEtYTQ4YjI1MDgtMzQxNDczNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:51:32.069564Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7489501660006163874:2366], owner: [1:7489501660006163871:2364], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-04T16:51:32.072243Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7489501660006163874:2366], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T16:51:32.072764Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7489501660006163874:2366], row count: 2, finished: 1 2025-04-04T16:51:32.072794Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7489501660006163874:2366], owner: [1:7489501660006163871:2364], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-04-04T16:51:32.123364Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785492063, txId: 281474976710663] shutting down 2025-04-04T16:51:32.920142Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489501661421985141:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:32.920196Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bf3/r3tmp/tmpDjI7K8/pdisk_1.dat 2025-04-04T16:51:33.174471Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:33.175995Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:33.176077Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:33.178575Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23596, node 2 2025-04-04T16:51:33.318951Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:33.318971Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:33.318979Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:33.319096Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25863 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:33.611550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:33.627632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:36.803923Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489501678601855031:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:36.803983Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:36.804457Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489501678601855043:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:36.808274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T16:51:36.822809Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489501678601855045:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T16:51:36.897268Z node 2 :TX_PROXY ERROR: Actor# [2:7489501678601855096:2392] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:51:37.102710Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0ve4728gxge92y3w077xb8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y ... \":\"argon2id\"}" } 2025-04-04T16:54:17.740289Z node 14 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [14:7489502370223528417:2462], row count: 2, finished: 1 2025-04-04T16:54:17.740319Z node 14 :SYSTEM_VIEWS INFO: Scan finished, actor: [14:7489502370223528417:2462], owner: [14:7489502370223528414:2460], scan id: 0, table id: [72057594046644480:1:0:auth_users] 2025-04-04T16:54:17.745885Z node 14 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785657726, txId: 281474976715682] shutting down 2025-04-04T16:54:17.746491Z node 14 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [14:7489502185539932316:2159], database# , query hash# 8862277434384952876, cpu time# 180496 2025-04-04T16:54:18.226614Z node 16 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded top size# 0, time# 2025-04-04T16:54:18.226336Z 2025-04-04T16:54:18.304124Z node 14 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jr0vk1dh67zrq1y6y8f4z7g1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=YjZlMDU4YTEtMzQxNjM4ZjYtN2M3NWU4MGItYzU3YTVhNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:54:18.307997Z node 14 :SYSTEM_VIEWS INFO: Scan started, actor: [14:7489502374518495758:2472], owner: [14:7489502374518495754:2470], scan id: 0, table id: [72057594046644480:1:0:auth_users] 2025-04-04T16:54:18.311238Z node 14 :SYSTEM_VIEWS INFO: Scan prepared, actor: [14:7489502374518495758:2472], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T16:54:18.311305Z node 14 :SYSTEM_VIEWS TRACE: Sending list users request 2025-04-04T16:54:18.311514Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TTxListUsers Execute at schemeshard: 72057594046644480 2025-04-04T16:54:18.311786Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TTxListUsers Complete, result: Users { Name: "user4" IsEnabled: true IsLockedOut: false CreatedAt: 1743785655384312 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"6hFeSCOF9JrLoibXuG/Ivrbm9fWPvIw475Y92uLZOBQ=\",\"salt\":\"9AbvKRG3HXcDTKKgW21dmQ==\",\"type\":\"argon2id\"}" } Users { Name: "user3" IsEnabled: true IsLockedOut: false CreatedAt: 1743785655306611 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"ZE+J04CkHhQAfrFBtdSmEO0S92hIwusxg95IrrPk7FI=\",\"salt\":\"CcXMWsnlczVpqeU5TEn1MQ==\",\"type\":\"argon2id\"}" } Users { Name: "user2" IsEnabled: true IsLockedOut: false CreatedAt: 1743785655260266 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"1JNGfpodzF9GAYMDLWesdDqekeEGsS6v7kI6JK5tuDY=\",\"salt\":\"IzHVxVTVK2WiFkEe+NTXyg==\",\"type\":\"argon2id\"}" } Users { Name: "user1" IsEnabled: true IsLockedOut: false CreatedAt: 1743785655204913 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"WpA2GpIuBql/krVd1GXfSOZaUsyQTVdt7ieWySjyh+s=\",\"salt\":\"JR4RchhYL5UE+LIy2RqVvg==\",\"type\":\"argon2id\"}" }, at schemeshard: 72057594046644480 2025-04-04T16:54:18.312174Z node 14 :SYSTEM_VIEWS TRACE: Got list users response Users { Name: "user4" IsEnabled: true IsLockedOut: false CreatedAt: 1743785655384312 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"6hFeSCOF9JrLoibXuG/Ivrbm9fWPvIw475Y92uLZOBQ=\",\"salt\":\"9AbvKRG3HXcDTKKgW21dmQ==\",\"type\":\"argon2id\"}" } Users { Name: "user3" IsEnabled: true IsLockedOut: false CreatedAt: 1743785655306611 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"ZE+J04CkHhQAfrFBtdSmEO0S92hIwusxg95IrrPk7FI=\",\"salt\":\"CcXMWsnlczVpqeU5TEn1MQ==\",\"type\":\"argon2id\"}" } Users { Name: "user2" IsEnabled: true IsLockedOut: false CreatedAt: 1743785655260266 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"1JNGfpodzF9GAYMDLWesdDqekeEGsS6v7kI6JK5tuDY=\",\"salt\":\"IzHVxVTVK2WiFkEe+NTXyg==\",\"type\":\"argon2id\"}" } Users { Name: "user1" IsEnabled: true IsLockedOut: false CreatedAt: 1743785655204913 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"WpA2GpIuBql/krVd1GXfSOZaUsyQTVdt7ieWySjyh+s=\",\"salt\":\"JR4RchhYL5UE+LIy2RqVvg==\",\"type\":\"argon2id\"}" } 2025-04-04T16:54:18.312223Z node 14 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [14:7489502374518495758:2472], row count: 2, finished: 1 2025-04-04T16:54:18.312260Z node 14 :SYSTEM_VIEWS INFO: Scan finished, actor: [14:7489502374518495758:2472], owner: [14:7489502374518495754:2470], scan id: 0, table id: [72057594046644480:1:0:auth_users] 2025-04-04T16:54:18.315726Z node 14 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [14:7489502185539932316:2159], database# , query hash# 13069672625607331218, cpu time# 502349 2025-04-04T16:54:18.316401Z node 14 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785658298, txId: 281474976715684] shutting down 2025-04-04T16:54:18.714868Z node 17 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded top size# 0, time# 2025-04-04T16:54:18.714386Z 2025-04-04T16:54:18.807646Z node 14 :KQP_EXECUTER ERROR: TxId: 281474976715687. Ctx: { TraceId: 01jr0vk1z4bdatvqj5ggf4sz7t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=YTMxN2E0NTgtMjQ4NTQ1MjYtZDZiYTk4YWEtZGI5YWUxNWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:54:18.811663Z node 14 :SYSTEM_VIEWS INFO: Scan started, actor: [14:7489502374518495796:2483], owner: [14:7489502374518495793:2481], scan id: 0, table id: [72057594046644480:1:0:auth_users] 2025-04-04T16:54:18.816360Z node 14 :SYSTEM_VIEWS INFO: Scan prepared, actor: [14:7489502374518495796:2483], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T16:54:18.816409Z node 14 :SYSTEM_VIEWS TRACE: Sending list users request 2025-04-04T16:54:18.816689Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TTxListUsers Execute at schemeshard: 72057594046644480 2025-04-04T16:54:18.816915Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: TTxListUsers Complete, result: Users { Name: "user4" IsEnabled: true IsLockedOut: false CreatedAt: 1743785655384312 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"6hFeSCOF9JrLoibXuG/Ivrbm9fWPvIw475Y92uLZOBQ=\",\"salt\":\"9AbvKRG3HXcDTKKgW21dmQ==\",\"type\":\"argon2id\"}" } Users { Name: "user3" IsEnabled: true IsLockedOut: false CreatedAt: 1743785655306611 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"ZE+J04CkHhQAfrFBtdSmEO0S92hIwusxg95IrrPk7FI=\",\"salt\":\"CcXMWsnlczVpqeU5TEn1MQ==\",\"type\":\"argon2id\"}" } Users { Name: "user2" IsEnabled: true IsLockedOut: false CreatedAt: 1743785655260266 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"1JNGfpodzF9GAYMDLWesdDqekeEGsS6v7kI6JK5tuDY=\",\"salt\":\"IzHVxVTVK2WiFkEe+NTXyg==\",\"type\":\"argon2id\"}" } Users { Name: "user1" IsEnabled: true IsLockedOut: false CreatedAt: 1743785655204913 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"WpA2GpIuBql/krVd1GXfSOZaUsyQTVdt7ieWySjyh+s=\",\"salt\":\"JR4RchhYL5UE+LIy2RqVvg==\",\"type\":\"argon2id\"}" }, at schemeshard: 72057594046644480 2025-04-04T16:54:18.817316Z node 14 :SYSTEM_VIEWS TRACE: Got list users response Users { Name: "user4" IsEnabled: true IsLockedOut: false CreatedAt: 1743785655384312 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"6hFeSCOF9JrLoibXuG/Ivrbm9fWPvIw475Y92uLZOBQ=\",\"salt\":\"9AbvKRG3HXcDTKKgW21dmQ==\",\"type\":\"argon2id\"}" } Users { Name: "user3" IsEnabled: true IsLockedOut: false CreatedAt: 1743785655306611 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"ZE+J04CkHhQAfrFBtdSmEO0S92hIwusxg95IrrPk7FI=\",\"salt\":\"CcXMWsnlczVpqeU5TEn1MQ==\",\"type\":\"argon2id\"}" } Users { Name: "user2" IsEnabled: true IsLockedOut: false CreatedAt: 1743785655260266 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"1JNGfpodzF9GAYMDLWesdDqekeEGsS6v7kI6JK5tuDY=\",\"salt\":\"IzHVxVTVK2WiFkEe+NTXyg==\",\"type\":\"argon2id\"}" } Users { Name: "user1" IsEnabled: true IsLockedOut: false CreatedAt: 1743785655204913 FailedAttemptCount: 0 PasswordHash: "{\"hash\":\"WpA2GpIuBql/krVd1GXfSOZaUsyQTVdt7ieWySjyh+s=\",\"salt\":\"JR4RchhYL5UE+LIy2RqVvg==\",\"type\":\"argon2id\"}" } 2025-04-04T16:54:18.817381Z node 14 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [14:7489502374518495796:2483], row count: 1, finished: 1 2025-04-04T16:54:18.817408Z node 14 :SYSTEM_VIEWS INFO: Scan finished, actor: [14:7489502374518495796:2483], owner: [14:7489502374518495793:2481], scan id: 0, table id: [72057594046644480:1:0:auth_users] 2025-04-04T16:54:18.822322Z node 14 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785658805, txId: 281474976715686] shutting down 2025-04-04T16:54:18.822867Z node 14 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [14:7489502185539932316:2159], database# , query hash# 11995873958551672460, cpu time# 458365 2025-04-04T16:54:18.851905Z node 16 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:54:18.856120Z node 17 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:54:18.856381Z node 14 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 17 2025-04-04T16:54:18.857164Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:54:18.857320Z node 14 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 16 2025-04-04T16:54:18.857873Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:54:18.869813Z node 14 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-04-04T16:54:18.883400Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:54:18.883671Z node 14 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 18 2025-04-04T16:54:18.884277Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:54:18.892411Z node 14 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[18:7489502237628356853:2108], Type=268959746 2025-04-04T16:54:18.892472Z node 14 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[18:7489502237628356853:2108], Type=268959746 2025-04-04T16:54:18.897357Z node 14 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[18:7489502237628356853:2108], Type=268959746 2025-04-04T16:54:18.897418Z node 14 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[18:7489502237628356853:2108], Type=268959746 2025-04-04T16:54:18.897448Z node 14 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[18:7489502237628356853:2108], Type=268959746 2025-04-04T16:54:18.897474Z node 14 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[18:7489502237628356853:2108], Type=268959746 2025-04-04T16:54:18.897500Z node 14 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[15:7489502298080361699:2106], Type=268959746 2025-04-04T16:54:18.897524Z node 14 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[15:7489502298080361699:2106], Type=268959746 2025-04-04T16:54:18.897550Z node 14 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[15:7489502298080361699:2106], Type=268959746 2025-04-04T16:54:18.897577Z node 14 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[15:7489502298080361699:2106], Type=268959746 2025-04-04T16:54:18.897606Z node 14 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[15:7489502298080361699:2106], Type=268959746 2025-04-04T16:54:18.897642Z node 14 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[15:7489502298080361699:2106], Type=268959746 >> KqpRanges::NullInPredicate [GOOD] >> KqpRanges::NullInPredicateRow >> SystemView::AuthOwners [GOOD] >> SystemView::AuthOwners_Access |76.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |76.1%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] >> KqpRanges::IsNullInValue [GOOD] >> BsControllerConfig::AddDriveSerial |76.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |76.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build >> KqpRanges::IsNullInJsonValue |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex [GOOD] >> SystemView::TopPartitionsRanges [GOOD] >> SystemView::TopPartitionsFollowers >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable >> SystemView::AuthGroupMembers_ResultOrder [GOOD] >> SystemView::AuthGroupMembers_TableRange >> BsControllerConfig::PDiskCreate >> TLocksTest::Range_IncorrectDot1 [GOOD] >> TLocksTest::Range_IncorrectDot2 >> KqpNewEngine::ReadDifferentColumnsPk [GOOD] >> KqpNewEngine::PushFlatmapInnerConnectionsToStageInput >> KqpNewEngine::LocksNoMutationsSharded [GOOD] >> KqpNewEngine::MultiEffects |76.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |76.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] Test command err: 2025-04-04T16:51:55.543937Z :HappyWay INFO: Random seed for debugging is 1743785515543904 2025-04-04T16:52:00.487356Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501780021326513:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:00.487445Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:52:01.230632Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T16:52:01.265190Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001576/r3tmp/tmp1csOQj/pdisk_1.dat 2025-04-04T16:52:01.387903Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:01.589481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:01.830673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:01.830776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:01.831628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:01.831706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:01.836533Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:52:01.836672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:01.839240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:01.882611Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7810, node 1 2025-04-04T16:52:01.937441Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:52:01.937474Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:52:02.091079Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001576/r3tmp/yandexihatFR.tmp 2025-04-04T16:52:02.091104Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001576/r3tmp/yandexihatFR.tmp 2025-04-04T16:52:02.091304Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001576/r3tmp/yandexihatFR.tmp 2025-04-04T16:52:02.091399Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:02.154465Z INFO: TTestServer started on Port 8299 GrpcPort 7810 2025-04-04T16:52:05.490896Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501780021326513:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:05.491145Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:8299 PQClient connected to localhost:7810 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:52:07.937198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-04T16:52:15.354376Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489501845969489888:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:15.354494Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:15.398697Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489501845969489901:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:15.839866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T16:52:16.367735Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489501845969489904:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T16:52:16.604670Z node 2 :TX_PROXY ERROR: Actor# [2:7489501850264457232:2142] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:52:16.875042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:52:16.875064Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:17.160703Z node 1 :KQP_PROXY ERROR: TraceId: "01jr0vf2wpabjj3wgwtyshbg41", Request deadline has expired for 3.947429s seconds 2025-04-04T16:52:18.181517Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489501853035771774:2362], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:52:18.184130Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489501850264457247:2329], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:52:18.195121Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjBhNDhiNDEtYTZhNTA3ZjEtNzA0OGY2NTktMzZhM2VlZmY=, ActorId: [1:7489501848740804401:2353], ActorState: ExecuteState, TraceId: 01jr0vfbm85k85ff3sndm63edv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:52:18.208670Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDJmZmViMWItNTQzZjZiZDYtZDRhZTlmYzktOGJlNGVlZWQ=, ActorId: [2:7489501845969489886:2317], ActorState: ExecuteState, TraceId: 01jr0vf9q92s7xt0vw36tstcp4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:52:18.221967Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:52:18.459064Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:52:18.795139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T16:52:19.118702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:52:19.323456Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489501863149359234:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:52:19.324675Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWI2ZjBiMGEtNDY0ZjM5NDgtNmJiOWJkM2MtODQ2YTAwODQ=, ActorId: [2:7489501863149359227:2340], ActorState: ExecuteState, TraceId: 01jr0vfdnm5sj5r133ewfesxfe, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:52:19.325045Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:52:19.391179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 7 ... erval=5ms: 0 Interval=999999ms: 0 **** **** **** **** 2025-04-04T16:54:24.864776Z node 8 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:54:24.864923Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:54:24.898257Z node 8 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:54:24.900074Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [8:197:2212] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-04T16:54:24.901352Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [8:205:2218] 2025-04-04T16:54:24.903618Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [8:205:2218] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T16:54:24.905755Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [8:206:2219] 2025-04-04T16:54:24.907131Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [8:206:2219] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:54:24.921367Z node 8 :PERSQUEUE INFO: new Cookie default|e66ecb39-eb2c8f47-46a9cb1f-9c200b39_0 generated for partition 0 topic 'topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:54:24.932740Z node 8 :PERSQUEUE INFO: new Cookie default|adef1017-70c3eaf1-59591ce-a22251c7_1 generated for partition 0 topic 'topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:54:24.986161Z node 8 :PERSQUEUE INFO: new Cookie default|50c6d631-d2a985b5-7c6a3e59-40996716_2 generated for partition 0 topic 'topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:54:24.999799Z node 8 :PERSQUEUE INFO: new Cookie default|4fcf91cd-f2424aad-a4859ba6-2942f821_3 generated for partition 0 topic 'topic' owner default Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T16:54:26.197203Z node 9 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:54:26.197359Z node 9 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:54:26.264503Z node 9 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:54:26.265748Z node 9 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [9:197:2212] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 30720 BurstSize: 30720 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T16:54:26.273747Z node 9 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [9:206:2219] 2025-04-04T16:54:26.282719Z node 9 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [9:206:2219] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:54:26.308735Z node 9 :PERSQUEUE INFO: new Cookie default|f89f9da1-27541587-93ff0945-31513f37_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:222:2232] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:54:26.331219Z node 9 :PERSQUEUE INFO: new Cookie default|57981964-6b443-3dc2ca06-69eea659_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:222:2232] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:54:26.728843Z node 9 :PERSQUEUE INFO: new Cookie default|cf6d739e-f20c4c9e-5e5b7ca-18063733_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured kesus quota request event from [9:222:2232] Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:54:27.057151Z node 9 :PERSQUEUE INFO: new Cookie default|dd503706-eedbd825-5d1d4a61-1ed80209_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:222:2232] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvRequest, cmd write size: 1 Captured TEvRequest, cmd write size: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:54:27.428942Z node 9 :PERSQUEUE INFO: new Cookie default|7b3bd430-6800e83e-204606e9-113f4b67_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:222:2232] Captured TEvRequest, cmd write size: 3 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:54:27.751410Z node 9 :PERSQUEUE INFO: new Cookie default|aad514eb-49ab0f2e-64d0832-9d8d1084_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:222:2232] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR >> ColumnBuildTest::CancelBuild [GOOD] >> KqpSqlIn::TupleParameter [GOOD] >> KqpNotNullColumns::JoinRightTableWithNotNullColumns-StreamLookup [GOOD] >> KqpNotNullColumns::OptionalParametersDataQuery >> KqpSqlIn::TupleLiteral ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:54:24.759943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:54:24.760055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:54:24.760118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:54:24.760158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:54:24.760210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:54:24.760244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:54:24.760302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:54:24.760361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:54:24.760674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:54:24.847523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:54:24.847582Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:24.859143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:54:24.859420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:54:24.859579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:54:24.869639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:54:24.869943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:54:24.870563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:54:24.871386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:54:24.876315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:54:24.877530Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:54:24.877599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:54:24.877809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:54:24.877863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:54:24.877903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:54:24.878033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:54:24.884636Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:54:25.055708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:54:25.055951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:25.056180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:54:25.056407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:54:25.056457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:25.063224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:54:25.063373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:54:25.063608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:25.063666Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:54:25.063698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:54:25.063731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:54:25.079729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:25.079807Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:54:25.079841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:54:25.081932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:25.081995Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:25.082039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:54:25.082084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:54:25.085708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:54:25.092611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:54:25.092816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:54:25.093802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:54:25.093950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:54:25.094005Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:54:25.094358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:54:25.094440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:54:25.094640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:54:25.094744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:54:25.104728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:54:25.104795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:54:25.104963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:54:25.105009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:54:25.105305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:25.105350Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:54:25.105444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:54:25.105474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:54:25.105547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:54:25.105593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:54:25.105632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:54:25.105690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:54:25.105727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:54:25.105762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:54:25.105854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:54:25.105894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:54:25.105928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:54:25.107989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:54:25.108120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:54:25.108155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T16:54:30.357832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-04-04T16:54:30.357959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2025-04-04T16:54:30.358114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-04T16:54:30.358151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-04-04T16:54:30.358186Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000007 2025-04-04T16:54:30.358499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:54:30.358591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:54:30.358655Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710761:0 HandleReply TEvOperationPlan: step# 5000007 2025-04-04T16:54:30.358690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710761:0 128 -> 240 2025-04-04T16:54:30.360706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710761:0, at schemeshard: 72057594046678944 2025-04-04T16:54:30.360757Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710761:0 ProgressState 2025-04-04T16:54:30.360839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-04T16:54:30.360864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-04T16:54:30.360893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-04T16:54:30.360916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-04T16:54:30.360956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: true 2025-04-04T16:54:30.361015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:122:2148] message: TxId: 281474976710761 2025-04-04T16:54:30.361055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-04T16:54:30.361082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-04-04T16:54:30.361105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-04-04T16:54:30.361201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 12 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-04-04T16:54:30.368101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-04-04T16:54:30.368195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-04-04T16:54:30.368262Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710761, buildInfoId: 102 2025-04-04T16:54:30.368344Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710761, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1172:3025], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T16:54:30.375140Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-04T16:54:30.375243Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1172:3025], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T16:54:30.375298Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2025-04-04T16:54:30.377215Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-04T16:54:30.377306Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancelled, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1172:3025], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T16:54:30.377339Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-04T16:54:30.377455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T16:54:30.377503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1196:3049] TestWaitNotification: OK eventTxId 102 2025-04-04T16:54:30.379723Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-04-04T16:54:30.379980Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } 2025-04-04T16:54:30.392348Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:54:30.392594Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 307us result status StatusSuccess 2025-04-04T16:54:30.393029Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "DefaultValue" Type: "Uint64" TypeId: 4 Id: 4 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 10 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpReturning::ReturningWorksIndexedInsert-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedReplace+QueryService >> Cdc::DocApi[TopicRunner] [GOOD] >> Cdc::HugeKey[PqRunner] >> BsControllerConfig::AddDriveSerial [GOOD] >> BsControllerConfig::AddDriveSerialMassive >> BsControllerConfig::ReassignGroupDisk >> KikimrIcGateway::TestLoadExternalTable [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata >> DataShardWrite::UpsertPrepared-Volatile [GOOD] >> DataShardWrite::UpsertNoLocksArbiter >> BsControllerConfig::PDiskCreate [GOOD] >> KqpRanges::UpdateWhereInWithNull [GOOD] >> KqpRanges::UpdateWhereInMultipleUpdate >> KqpSort::PassLimit [GOOD] >> KqpSort::OffsetPk >> KqpNewEngine::AsyncIndexUpdate [GOOD] >> KqpNewEngine::AutoChooseIndex >> KqpNewEngine::Delete-UseSink [GOOD] >> KqpNewEngine::DecimalColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::PDiskCreate [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:194:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:194:2076] Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:217:2066] recipient: [1:194:2076] 2025-04-04T16:54:29.871182Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-04T16:54:29.880218Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-04T16:54:29.880614Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-04T16:54:29.886683Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T16:54:29.887394Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-04T16:54:29.887991Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-04T16:54:29.888026Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-04T16:54:29.888290Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-04T16:54:29.910993Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-04T16:54:29.911193Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-04T16:54:29.911432Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-04T16:54:29.911544Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T16:54:29.911712Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T16:54:29.911787Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:242:2066] recipient: [1:20:2067] 2025-04-04T16:54:29.926955Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-04T16:54:29.927117Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T16:54:29.938883Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T16:54:29.939036Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T16:54:29.939111Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T16:54:29.939194Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T16:54:29.939325Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T16:54:29.939376Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T16:54:29.939415Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T16:54:29.939480Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T16:54:29.950852Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T16:54:29.950986Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T16:54:29.962857Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T16:54:29.963034Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-04T16:54:29.964285Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-04T16:54:29.964346Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-04T16:54:29.964554Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-04T16:54:29.964609Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-04T16:54:29.999362Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } } } Command { QueryBaseConfig { } } } 2025-04-04T16:54:30.000038Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-04-04T16:54:30.000095Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-04-04T16:54:30.000135Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-04-04T16:54:30.000163Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-04-04T16:54:30.000202Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-04-04T16:54:30.000228Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-04-04T16:54:30.000262Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-04-04T16:54:30.000293Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-04-04T16:54:30.000327Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-04-04T16:54:30.000355Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-04-04T16:54:30.000379Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-04-04T16:54:30.000406Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-04-04T16:54:30.000454Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-04-04T16:54:30.000485Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-04-04T16:54:30.000508Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-04-04T16:54:30.000536Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-04-04T16:54:30.000564Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-04-04T16:54:30.000594Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-04-04T16:54:30.000630Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-04-04T16:54:30.000661Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-04-04T16:54:30.000706Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2025-04-04T16:54:30.000750Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2025-04-04T16:54:30.000799Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2025-04-04T16:54:30.000839Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2025-04-04T16:54:30.000868Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2025-04-04T16:54:30.000897Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2025-04-04T16:54:30.000926Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2025-04-04T16:54:30.000954Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2025-04-04T16:54:30.000984Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2025-04-04T16:54:30.001026Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:203:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:203:2076] Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:217:2066] recipient: [11:203:2076] 2025-04-04T16:54:31.892773Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-04T16:54:31.899327Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-04T16:54:31.899611Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-04T16:54:31.901257Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T16:54:31.901719Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-04T16:54:31.907549Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-04T16:54:31.907607Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-04T16:54:31.907847Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-04T16:54:31.937975Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-04T16:54:31.938118Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-04T16:54:31.944675Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-04T16:54:31.945012Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T16:54:31.945232Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T16:54:31.945330Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:242:2066] recipient: [11:20:2067] 2025-04-04T16:54:31.958920Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-04T16:54:31.959106Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T16:54:31.970836Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T16:54:31.971023Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T16:54:31.971120Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T16:54:31.971196Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T16:54:31.971306Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T16:54:31.971380Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T16:54:31.971446Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T16:54:31.971511Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T16:54:31.982976Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T16:54:31.983109Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T16:54:31.994835Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T16:54:31.994973Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-04T16:54:31.996331Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-04T16:54:31.996390Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-04T16:54:31.996607Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-04T16:54:31.996660Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-04T16:54:31.997506Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } } } Command { QueryBaseConfig { } } } 2025-04-04T16:54:31.997994Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2025-04-04T16:54:31.998033Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1001 Path# /dev/disk2 2025-04-04T16:54:31.998057Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1002 Path# /dev/disk3 2025-04-04T16:54:31.998090Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 12:1000 Path# /dev/disk1 2025-04-04T16:54:31.998123Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 12:1001 Path# /dev/disk2 2025-04-04T16:54:31.998148Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 12:1002 Path# /dev/disk3 2025-04-04T16:54:31.998183Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 13:1000 Path# /dev/disk1 2025-04-04T16:54:31.998211Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 13:1001 Path# /dev/disk2 2025-04-04T16:54:32.014890Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 13:1002 Path# /dev/disk3 2025-04-04T16:54:32.014969Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 14:1000 Path# /dev/disk1 2025-04-04T16:54:32.014995Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 14:1001 Path# /dev/disk2 2025-04-04T16:54:32.015017Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 14:1002 Path# /dev/disk3 2025-04-04T16:54:32.015051Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 15:1000 Path# /dev/disk1 2025-04-04T16:54:32.015074Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 15:1001 Path# /dev/disk2 2025-04-04T16:54:32.015176Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 15:1002 Path# /dev/disk3 2025-04-04T16:54:32.015218Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 16:1000 Path# /dev/disk1 2025-04-04T16:54:32.015244Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 16:1001 Path# /dev/disk2 2025-04-04T16:54:32.015281Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 16:1002 Path# /dev/disk3 2025-04-04T16:54:32.015310Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 17:1000 Path# /dev/disk1 2025-04-04T16:54:32.015335Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 17:1001 Path# /dev/disk2 2025-04-04T16:54:32.015368Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 17:1002 Path# /dev/disk3 2025-04-04T16:54:32.015399Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 18:1000 Path# /dev/disk1 2025-04-04T16:54:32.015435Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 18:1001 Path# /dev/disk2 2025-04-04T16:54:32.015465Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 18:1002 Path# /dev/disk3 2025-04-04T16:54:32.015488Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 19:1000 Path# /dev/disk1 2025-04-04T16:54:32.015540Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 19:1001 Path# /dev/disk2 2025-04-04T16:54:32.015566Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 19:1002 Path# /dev/disk3 2025-04-04T16:54:32.015588Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 20:1000 Path# /dev/disk1 2025-04-04T16:54:32.015610Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 20:1001 Path# /dev/disk2 2025-04-04T16:54:32.015637Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 20:1002 Path# /dev/disk3 >> KqpNewEngine::ScalarFunctions [GOOD] >> TLocksFatTest::RangeSetNotBreak [GOOD] >> KqpNewEngine::ScalarMultiUsage >> BsControllerConfig::Basic >> SystemView::AuthPermissions_Access [GOOD] >> SystemView::AuthPermissions_ResultOrder >> KqpNewEngine::OnlineRO_Inconsistent [GOOD] >> KqpNewEngine::Nondeterministic |76.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |76.1%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut >> KqpNewEngine::PruneWritePartitions+UseSink [GOOD] >> KqpNewEngine::PruneWritePartitions-UseSink >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable >> BsControllerConfig::AddDriveSerialMassive [GOOD] >> KqpSort::TopSortTableExpr [GOOD] >> KqpSort::TopSortResults >> KqpRanges::DuplicateKeyPredicateMixed [GOOD] >> KqpRanges::DuplicateCompositeKeyPredicate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::RangeSetNotBreak [GOOD] Test command err: 2025-04-04T16:54:06.354185Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502321022628989:2260];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:06.355096Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea8/r3tmp/tmpfHv6Wm/pdisk_1.dat 2025-04-04T16:54:07.016213Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:07.025387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:07.025500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:07.036832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32174 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:07.565795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:07.593346Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:07.610456Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:54:07.615954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:54:08.025322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T16:54:08.165364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:11.356508Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502321022628989:2260];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:11.357110Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:20.353740Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502382462037380:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:20.399785Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea8/r3tmp/tmpIwftrY/pdisk_1.dat 2025-04-04T16:54:20.692461Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:20.719236Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:20.719343Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:20.727664Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10379 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:21.180659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:21.197552Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:21.221724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:21.446410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:21.573077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:25.345894Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489502382462037380:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:25.345958Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; >> KqpNewEngine::UnionAllPure [GOOD] >> KqpNewEngine::StreamLookupForDataQuery+StreamLookupJoin >> SystemView::DescribeAccessDenied [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::AddDriveSerialMassive [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:194:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:194:2076] Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:217:2066] recipient: [1:194:2076] 2025-04-04T16:54:29.068632Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-04T16:54:29.085532Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-04T16:54:29.085913Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-04T16:54:29.087666Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T16:54:29.088266Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-04T16:54:29.088848Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-04T16:54:29.088880Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-04T16:54:29.089113Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-04T16:54:29.120706Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-04T16:54:29.120879Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-04T16:54:29.121080Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-04T16:54:29.121186Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T16:54:29.140021Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T16:54:29.140268Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:239:2066] recipient: [1:20:2067] 2025-04-04T16:54:29.152321Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-04T16:54:29.152495Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T16:54:29.164348Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T16:54:29.164486Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T16:54:29.164576Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T16:54:29.164666Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T16:54:29.164775Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T16:54:29.164829Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T16:54:29.164865Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T16:54:29.164929Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T16:54:29.175664Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T16:54:29.175779Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T16:54:29.194573Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T16:54:29.194789Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-04T16:54:29.195985Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-04T16:54:29.196048Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-04T16:54:29.196278Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-04T16:54:29.196340Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-04T16:54:29.217801Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-04-04T16:54:29.219289Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-04-04T16:54:29.220012Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:203:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:203:2076] Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:217:2066] recipient: [11:203:2076] 2025-04-04T16:54:30.865839Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-04T16:54:30.866779Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-04T16:54:30.867025Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-04T16:54:30.868337Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T16:54:30.868741Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-04T16:54:30.869343Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-04T16:54:30.869378Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-04T16:54:30.869604Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-04T16:54:30.882215Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-04T16:54:30.894813Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-04T16:54:30.894977Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-04T16:54:30.895112Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T16:54:30.895236Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T16:54:30.895321Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:239:2066] recipient: [11:20:2067] 2025-04-04T16:54:30.912926Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-04T16:54:30.913099Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T16:54:30.927753Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T16:54:30.927921Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T16:54:30.928009Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T16:54:30.928098Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T16:54:30.928232Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T16:54:30.928332Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T16:54:30.928371Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T16:54:30.928423Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T16:54:30.944214Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T16:54:30.944344Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T16:54:30.957306Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T16:54:30.957449Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-04T16:54:30.958757Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-04T16:54:30.958815Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-04T16:54:30.959042Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-04T16:54:30.959091Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-04T16:54:30.959746Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-04-04T16:54:30.960894Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# ... ommand { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2025-04-04T16:54:33.300462Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2025-04-04T16:54:33.301076Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2025-04-04T16:54:33.317749Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2025-04-04T16:54:33.318877Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2025-04-04T16:54:33.319934Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2025-04-04T16:54:33.320724Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2025-04-04T16:54:33.321375Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2025-04-04T16:54:33.322027Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2025-04-04T16:54:33.331238Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2025-04-04T16:54:33.332177Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2025-04-04T16:54:33.332856Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2025-04-04T16:54:33.333661Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2025-04-04T16:54:33.351695Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2025-04-04T16:54:33.352854Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:214:2066] recipient: [31:197:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:214:2066] recipient: [31:197:2076] Leader for TabletID 72057594037932033 is [31:216:2078] sender: [31:217:2066] recipient: [31:197:2076] 2025-04-04T16:54:35.673921Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-04T16:54:35.675396Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-04T16:54:35.675661Z node 31 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-04T16:54:35.677128Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T16:54:35.677381Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-04T16:54:35.678014Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-04T16:54:35.678045Z node 31 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-04T16:54:35.678336Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-04T16:54:35.688102Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-04T16:54:35.688252Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-04T16:54:35.688377Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-04T16:54:35.688499Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T16:54:35.688599Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T16:54:35.688671Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [31:216:2078] sender: [31:239:2066] recipient: [31:20:2067] 2025-04-04T16:54:35.702907Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-04T16:54:35.703089Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T16:54:35.714860Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T16:54:35.715011Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T16:54:35.715108Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T16:54:35.715218Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T16:54:35.715371Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T16:54:35.715431Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T16:54:35.715466Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T16:54:35.715515Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T16:54:35.726825Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T16:54:35.727008Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T16:54:35.737820Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T16:54:35.737979Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-04T16:54:35.739377Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-04T16:54:35.739447Z node 31 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-04T16:54:35.739671Z node 31 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-04T16:54:35.739722Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-04T16:54:35.740408Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_0" BoxId: 1 } } } 2025-04-04T16:54:35.741572Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_1" BoxId: 1 } } } 2025-04-04T16:54:35.742232Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_2" BoxId: 1 } } } 2025-04-04T16:54:35.747485Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_3" BoxId: 1 } } } 2025-04-04T16:54:35.748105Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_4" BoxId: 1 } } } 2025-04-04T16:54:35.748719Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2025-04-04T16:54:35.749424Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2025-04-04T16:54:35.750100Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2025-04-04T16:54:35.759459Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2025-04-04T16:54:35.760321Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2025-04-04T16:54:35.761135Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2025-04-04T16:54:35.761928Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2025-04-04T16:54:35.764713Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2025-04-04T16:54:35.765537Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2025-04-04T16:54:35.766328Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2025-04-04T16:54:35.767124Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2025-04-04T16:54:35.767858Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2025-04-04T16:54:35.768597Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2025-04-04T16:54:35.769310Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2025-04-04T16:54:35.770117Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } >> BsControllerConfig::ReassignGroupDisk [GOOD] |76.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |76.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |76.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx >> BsControllerConfig::Basic [GOOD] >> BsControllerConfig::DeleteStoragePool |76.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |76.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |76.2%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow >> KqpMergeCn::TopSortBy_Decimal_Limit5 [GOOD] >> KqpMergeCn::TopSortByDesc_Bool_And_PKUint64_Limit4 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ReassignGroupDisk [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:278:2068] recipient: [1:265:2078] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:278:2068] recipient: [1:265:2078] Leader for TabletID 72057594037932033 is [1:280:2080] sender: [1:281:2068] recipient: [1:265:2078] 2025-04-04T16:54:33.572586Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-04T16:54:33.577465Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-04T16:54:33.577802Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-04T16:54:33.779873Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T16:54:33.780371Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-04T16:54:33.781000Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-04T16:54:33.781031Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-04T16:54:33.781245Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-04T16:54:33.790482Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-04T16:54:33.790581Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-04T16:54:33.790722Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-04T16:54:33.790822Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T16:54:33.790933Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T16:54:33.790997Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:280:2080] sender: [1:306:2068] recipient: [1:22:2069] 2025-04-04T16:54:33.802809Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-04T16:54:33.802961Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T16:54:33.813587Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T16:54:33.813703Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T16:54:33.813768Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T16:54:33.813814Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T16:54:33.813885Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T16:54:33.813920Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T16:54:33.813956Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T16:54:33.814003Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T16:54:33.825172Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T16:54:33.825321Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T16:54:33.836461Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T16:54:33.836607Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-04T16:54:33.837715Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-04T16:54:33.837794Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-04T16:54:33.838002Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-04T16:54:33.838051Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-04T16:54:33.858811Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2025-04-04T16:54:33.859406Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1000 Path# /dev/disk 2025-04-04T16:54:33.859453Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /dev/disk 2025-04-04T16:54:33.859489Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1000 Path# /dev/disk 2025-04-04T16:54:33.859509Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1000 Path# /dev/disk 2025-04-04T16:54:33.859525Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1000 Path# /dev/disk 2025-04-04T16:54:33.859670Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1000 Path# /dev/disk 2025-04-04T16:54:33.859690Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1000 Path# /dev/disk 2025-04-04T16:54:33.859706Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1000 Path# /dev/disk 2025-04-04T16:54:33.859718Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1000 Path# /dev/disk 2025-04-04T16:54:33.859731Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1000 Path# /dev/disk 2025-04-04T16:54:33.859755Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1000 Path# /dev/disk 2025-04-04T16:54:33.859780Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 12:1000 Path# /dev/disk 2025-04-04T16:54:34.038758Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.200120s 2025-04-04T16:54:34.038895Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.200283s Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2025-04-04T16:54:34.053321Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { Success: true } Success: true ConfigTxSeqNo: 2 Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:278:2068] recipient: [13:265:2078] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:278:2068] recipient: [13:265:2078] Leader for TabletID 72057594037932033 is [13:280:2080] sender: [13:281:2068] recipient: [13:265:2078] 2025-04-04T16:54:36.488720Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-04T16:54:36.489574Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-04T16:54:36.489829Z node 13 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-04T16:54:36.500177Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T16:54:36.500424Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-04T16:54:36.501127Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-04T16:54:36.501177Z node 13 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-04T16:54:36.501406Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-04T16:54:36.548557Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-04T16:54:36.548713Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-04T16:54:36.548824Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-04T16:54:36.548958Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T16:54:36.549086Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T16:54:36.549163Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [13:280:2080] sender: [13:306:2068] recipient: [13:22:2069] 2025-04-04T16:54:36.571450Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-04T16:54:36.571590Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T16:54:36.582395Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T16:54:36.582541Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T16:54:36.582643Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T16:54:36.582714Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T16:54:36.582825Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T16:54:36.582884Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T16:54:36.582919Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T16:54:36.582973Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T16:54:36.594871Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T16:54:36.595000Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T16:54:36.612416Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T16:54:36.612547Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-04T16:54:36.613918Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-04T16:54:36.613983Z node 13 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-04T16:54:36.614200Z node 13 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-04T16:54:36.614265Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-04T16:54:36.615183Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 2 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2025-04-04T16:54:36.615675Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 13:1000 Path# /dev/disk 2025-04-04T16:54:36.615713Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 14:1000 Path# /dev/disk 2025-04-04T16:54:36.615743Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 15:1000 Path# /dev/disk 2025-04-04T16:54:36.615766Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 16:1000 Path# /dev/disk 2025-04-04T16:54:36.615789Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 17:1000 Path# /dev/disk 2025-04-04T16:54:36.615813Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 18:1000 Path# /dev/disk 2025-04-04T16:54:36.615867Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 19:1000 Path# /dev/disk 2025-04-04T16:54:36.615909Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 20:1000 Path# /dev/disk 2025-04-04T16:54:36.615939Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 21:1000 Path# /dev/disk 2025-04-04T16:54:36.615960Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 22:1000 Path# /dev/disk 2025-04-04T16:54:36.615981Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 23:1000 Path# /dev/disk 2025-04-04T16:54:36.616002Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 24:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2025-04-04T16:54:36.668834Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" FailReason: kHostNotFound FailParam { NodeId: 1 } } ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" >> KqpRanges::NullInPredicateRow [GOOD] >> KqpRanges::NoFullScanAtScanQuery >> KqpRanges::IsNullInJsonValue [GOOD] >> KqpRanges::IsNotNullInValue |76.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |76.2%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |76.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::DescribeAccessDenied [GOOD] Test command err: 2025-04-04T16:51:28.232328Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501646149509426:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:28.232380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c32/r3tmp/tmpANFxsN/pdisk_1.dat 2025-04-04T16:51:28.833658Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:28.840644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:28.840749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:28.846040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65314, node 1 2025-04-04T16:51:28.933518Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:28.933549Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:28.933558Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:28.933668Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:29.320544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:29.348795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:29.373935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:29.374011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:29.383079Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-04T16:51:29.388744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:29.410572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:29.410639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:29.415486Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-04T16:51:29.443788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:29.444831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:29.731881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:29.853857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T16:51:29.863294Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489501649259751364:2287];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:29.863345Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:29.886672Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489501646634903919:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:29.894063Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:29.914360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:29.922443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-04-04T16:51:30.012461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:30.012526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:30.027081Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T16:51:30.031098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:30.065255Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:51:30.076749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:33.246456Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501646149509426:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:33.246554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:34.774474Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489501649259751364:2287];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:34.774611Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:34.786428Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489501646634903919:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:34.786517Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:37.592852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:51:41.136055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501701984085835:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:41.136193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:41.137753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501701984085847:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:41.239606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480 2025-04-04T16:51:41.614365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489501701984085849:2368], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-04T16:51:41.727288Z node 1 :TX_PROXY ERROR: Actor# [1:7489501701984085935:3126] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:51:42.498993Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0ve8av3pcgqhz1pmzreq2d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmQyYTkyZjYtNGU2NDJhY2YtZjMwNGJmMTQtOTNlNzM1NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:51:42.704310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:51:43.826632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:51:43.826665Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:45.399803Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0vebmq3v0k250kgcj3kk29, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmQyYTkyZjYtNGU2NDJhY2YtZjMwNGJmMTQtOTNlNzM1NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:51:46.593096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-04T16:51:50.520355Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jr0veh3fehpja7xmh0fbbyd1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmQyYTkyZjYtNGU2NDJhY2YtZjMwNGJmMTQtOTNlNzM1NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:51:52.479765Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jr0vek217tgyx0crm6f0kmy2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmM2NjE5OGQtN2M2YTI4YmUtYTVmM2E3YmQtMzNlMTA2YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:51:52.481942Z node 1 :SYSTEM_VIEWS INFO: Scan started, acto ... 7.686922Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:54:17.694819Z node 30 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:54:17.718810Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:17.817439Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:17.817545Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:17.843972Z node 26 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 29 Cookie 29 2025-04-04T16:54:17.858974Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:54:17.863094Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:17.863256Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:17.876411Z node 26 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 30 Cookie 30 2025-04-04T16:54:17.879455Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:54:18.308700Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:18.384626Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:18.384739Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:18.393222Z node 26 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 28 Cookie 28 2025-04-04T16:54:18.395564Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:54:18.410159Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7489502373029181378:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:18.410397Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:54:18.570180Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T16:54:18.662315Z node 27 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[27:7489502373627005144:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:18.672197Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; waiting... 2025-04-04T16:54:19.076154Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:19.076272Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:19.091682Z node 26 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 27 Cookie 27 2025-04-04T16:54:19.097420Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:54:20.258479Z node 26 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[26:7489502362290144510:2244];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:20.258576Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:22.662408Z node 30 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[30:7489502371155671113:2146];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:22.662527Z node 30 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:22.686427Z node 29 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[29:7489502371676839887:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:22.686517Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:23.418478Z node 27 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[27:7489502373627005144:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:23.418611Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:23.430396Z node 28 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[28:7489502373029181378:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:23.435676Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:30.560204Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:30.856427Z node 26 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [26:7489502426714655324:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:30.856567Z node 26 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:30.857117Z node 26 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [26:7489502426714655336:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:30.864048Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480 2025-04-04T16:54:30.922784Z node 26 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [26:7489502426714655338:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-04T16:54:31.014589Z node 26 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:54:31.014634Z node 26 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:31.017061Z node 26 :TX_PROXY ERROR: Actor# [26:7489502426714655421:3119] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:54:31.318309Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0vke629p5r25zvf4y60hf8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=NzI2N2U4MjQtYjM5ODRlYy02NWQ2YzgxZi1iZmNjMTM5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:54:31.369668Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:32.092032Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0vkf2a5vv1kskrk2c0cg7g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=NzI2N2U4MjQtYjM5ODRlYy02NWQ2YzgxZi1iZmNjMTM5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:54:32.131498Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-04T16:54:32.592067Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jr0vkfq63r4ebzesh502g4ab, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=NzI2N2U4MjQtYjM5ODRlYy02NWQ2YzgxZi1iZmNjMTM5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:54:32.961698Z node 26 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root
: Error: Access denied 2025-04-04T16:54:32.974883Z node 26 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/Tenant1
: Error: Access denied 2025-04-04T16:54:32.986499Z node 26 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/.sys
: Error: Access denied 2025-04-04T16:54:32.999122Z node 26 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/Tenant1/.sys
: Error: Access denied 2025-04-04T16:54:33.062879Z node 26 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/.sys/partition_stats
: Error: Access denied 2025-04-04T16:54:33.085736Z node 26 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/Tenant1/.sys/partition_stats
: Error: Access denied 2025-04-04T16:54:33.131730Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 30 2025-04-04T16:54:33.132323Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:54:33.168422Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 27 2025-04-04T16:54:33.168890Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:54:33.169066Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 28 2025-04-04T16:54:33.169480Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:54:33.187087Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 29 2025-04-04T16:54:33.287279Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:54:33.342474Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[29:7489502371676839968:2104], Type=268959746 >> TOlap::CreateTableWithNullableKeysNotAllowed |76.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |76.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |76.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |76.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |76.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |76.2%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> KqpNewEngine::PushFlatmapInnerConnectionsToStageInput [GOOD] >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> Cdc::HugeKey[PqRunner] [GOOD] >> Cdc::HugeKey[YdsRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] Test command err: 2025-04-04T16:51:33.752090Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501664086336811:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:33.752158Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:34.003801Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000faa/r3tmp/tmpVENOPE/pdisk_1.dat 2025-04-04T16:51:34.334296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:34.334401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:34.336522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:34.342597Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30961, node 1 2025-04-04T16:51:34.541384Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000faa/r3tmp/yandexvtbWvQ.tmp 2025-04-04T16:51:34.541406Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000faa/r3tmp/yandexvtbWvQ.tmp 2025-04-04T16:51:34.541572Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000faa/r3tmp/yandexvtbWvQ.tmp 2025-04-04T16:51:34.541689Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:34.626787Z INFO: TTestServer started on Port 3021 GrpcPort 30961 TClient is connected to server localhost:3021 PQClient connected to localhost:30961 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:35.108035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:35.153346Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:35.194759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:51:35.438491Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:38.855176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501685561173874:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:38.855698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:38.862029Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501664086336811:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:38.862319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501685561173905:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:38.865402Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:38.894781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T16:51:38.946604Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-04-04T16:51:38.954574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489501685561173907:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T16:51:39.179709Z node 1 :TX_PROXY ERROR: Actor# [1:7489501689856141267:2455] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:51:39.212315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:51:39.255851Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489501689856141275:2351], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:51:39.257438Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjMyMTRiZDMtZDg0MjQyYTYtYTNkZWQ1MmEtYzE4NDBjZWQ=, ActorId: [1:7489501685561173872:2338], ActorState: ExecuteState, TraceId: 01jr0ve60x6kvw0yvvdx319mcx, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:51:39.266415Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:51:39.297474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:51:39.374478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489501689856141557:2637] === CheckClustersList. Ok 2025-04-04T16:51:44.998606Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-04T16:51:45.069086Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-04T16:51:45.070282Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7489501715625945588:2783], Recipient [1:7489501668381304284:2181]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:51:45.070319Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:51:45.070339Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T16:51:45.070384Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7489501715625945584:2780], Recipient [1:7489501668381304284:2181]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2025-04-04T16:51:45.070398Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T16:51:45.177583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710672 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T16:51:45.178029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T16:51:45.178329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-04T16:51:45.178370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-04T16:51:45.178402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-0 ... 94Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7489502455671172304:3606], Recipient [5:7489502451376204859:2826]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:54:38.084621Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:54:38.084634Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:54:38.084648Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897] Destroy direct read session test-consumer_5_1_4001773348473190305_v1 2025-04-04T16:54:38.084669Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897] server disconnected, pipe [5:7489502455671172298:2851] destroyed 2025-04-04T16:54:38.084712Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7489502447081237410:3449], Recipient [5:7489502404131563195:2464]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:54:38.084725Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:54:38.084739Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:54:38.084752Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_5_1_4001773348473190305_v1 2025-04-04T16:54:38.084774Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7489502447081237409:2794] destroyed 2025-04-04T16:54:38.084820Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_4001773348473190305_v1 2025-04-04T16:54:38.084840Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_4001773348473190305_v1 2025-04-04T16:54:38.084856Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_4001773348473190305_v1 2025-04-04T16:54:38.090402Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: producer-1|467e86de-24fe159d-eaf54b08-6b0851cc_0 grpc read done: success: 0 data: 2025-04-04T16:54:38.090452Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|467e86de-24fe159d-eaf54b08-6b0851cc_0 grpc read failed 2025-04-04T16:54:38.090498Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|467e86de-24fe159d-eaf54b08-6b0851cc_0 grpc closed 2025-04-04T16:54:38.090515Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|467e86de-24fe159d-eaf54b08-6b0851cc_0 is DEAD 2025-04-04T16:54:38.091454Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateIdle, received event# 65543, Sender [5:7489502447081237428:2798], Recipient [5:7489502447081237430:2798]: NActors::TEvents::TEvPoison 2025-04-04T16:54:38.091529Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T16:54:38.092045Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7489502447081237461:3469], Recipient [5:7489502404131563195:2464]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:54:38.092076Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:54:38.092097Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:54:38.092139Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7489502447081237459:2798] destroyed 2025-04-04T16:54:38.092218Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188506 (NKikimr::TEvPQ::TEvPipeDisconnected), Tablet [5:7489502404131563195:2464], Partition 0, Sender [5:7489502404131563195:2464], Recipient [5:7489502404131563258:2468], Cookie: 0 2025-04-04T16:54:38.092280Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188506, Sender [5:7489502404131563195:2464], Recipient [5:7489502404131563258:2468]: NKikimr::TEvPQ::TEvPipeDisconnected 2025-04-04T16:54:38.092319Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPipeDisconnected 2025-04-04T16:54:38.092353Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-04T16:54:38.092402Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessChangeOwnerRequests. 2025-04-04T16:54:38.092446Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T16:54:38.092535Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T16:54:38.092562Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T16:54:38.092589Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T16:54:38.158480Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489502451376204859:2826], Partition 1, Sender [0:0:0], Recipient [5:7489502451376204964:2842], Cookie: 0 2025-04-04T16:54:38.158581Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489502451376204964:2842]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:38.158634Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:38.158682Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T16:54:38.158765Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T16:54:38.158794Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T16:54:38.158826Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T16:54:38.158900Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489502404131563195:2464], Partition 0, Sender [0:0:0], Recipient [5:7489502404131563258:2468], Cookie: 0 2025-04-04T16:54:38.158942Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489502404131563258:2468]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:38.158957Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:38.158983Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T16:54:38.159013Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T16:54:38.159028Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T16:54:38.159048Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T16:54:38.159091Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489502451376204857:2825], Partition 2, Sender [0:0:0], Recipient [5:7489502451376204962:2840], Cookie: 0 2025-04-04T16:54:38.159129Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489502451376204962:2840]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:38.159143Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:38.159167Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T16:54:38.159197Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T16:54:38.159211Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T16:54:38.159229Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T16:54:38.266625Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489502404131563195:2464], Partition 0, Sender [0:0:0], Recipient [5:7489502404131563258:2468], Cookie: 0 2025-04-04T16:54:38.266725Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489502404131563258:2468]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:38.266753Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:38.266800Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T16:54:38.266885Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T16:54:38.266916Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T16:54:38.266950Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T16:54:38.267018Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489502451376204857:2825], Partition 2, Sender [0:0:0], Recipient [5:7489502451376204962:2840], Cookie: 0 2025-04-04T16:54:38.267059Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489502451376204962:2840]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:38.267075Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:38.267100Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T16:54:38.267134Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T16:54:38.267152Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T16:54:38.267172Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T16:54:38.267354Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489502451376204859:2826], Partition 1, Sender [0:0:0], Recipient [5:7489502451376204964:2842], Cookie: 0 2025-04-04T16:54:38.267391Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489502451376204964:2842]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:38.267404Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:38.267428Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T16:54:38.267459Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T16:54:38.267475Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T16:54:38.267491Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |76.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |76.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |76.3%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> KqpNewEngine::MultiEffects [GOOD] >> KqpNewEngine::MultiEffectsOnSameTable |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> KqpNewEngine::ScalarMultiUsage [GOOD] >> KqpNewEngine::SequentialReadsPragma+Enabled |76.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/ut/ydb-core-control-ut |76.3%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut |76.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut >> TContinuousBackupTests::TakeIncrementalBackup >> KqpNotNullColumns::OptionalParametersDataQuery [GOOD] >> KqpNotNullColumns::OptionalParametersScanQuery >> KqpSort::OffsetPk [GOOD] >> KqpSort::OffsetTopSort >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex >> KqpNewEngine::DecimalColumn [GOOD] >> KqpNewEngine::DecimalColumn35 >> TContinuousBackupTests::Basic >> VectorIndexBuildTest::BaseCase >> DataShardWrite::UpsertNoLocksArbiter [GOOD] >> DataShardWrite::UpsertLostPrepareArbiter >> TableCreation::SimpleTableCreation |76.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |76.3%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |76.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut >> TLocksTest::NoLocksSet [GOOD] >> TLocksTest::MultipleLocks >> KqpProxy::PassErrroViaSessionActor >> TLocksTest::BrokenSameKeyLock [GOOD] >> TLocksTest::BrokenSameShardLock >> KqpNewEngine::AutoChooseIndex [GOOD] >> KqpNewEngine::AutoChooseIndexOrderByLimit >> TPQTestSlow::TestOnDiskStoredSourceIds >> KqpReturning::ReturningWorksIndexedReplace+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedOperationsWithDefault+QueryService >> TOlap::CreateTableWithNullableKeysNotAllowed [GOOD] >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] >> KqpNewEngine::Nondeterministic [GOOD] >> KqpNewEngine::OrderedScalarContext ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTableWithNullableKeysNotAllowed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:54:45.626649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:54:45.626843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:54:45.626926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:54:45.626977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:54:45.627031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:54:45.627071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:54:45.627143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:54:45.627244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:54:45.654444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:54:46.115009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:54:46.115082Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:46.190261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:54:46.191037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:54:46.215475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:54:46.280261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:54:46.280704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:54:46.358753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:54:46.399431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:54:46.456516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:54:46.644657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:54:46.644798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:54:46.645210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:54:46.645312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:54:46.645396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:54:46.645575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:54:46.671430Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:54:46.930493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:54:46.946043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:46.962818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:54:46.982472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:54:46.982682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:46.995544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:54:47.034446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:54:47.046413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:47.046571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:54:47.046631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:54:47.046699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:54:47.055356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:47.071091Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:54:47.071199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:54:47.083891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:47.083969Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:47.084030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:54:47.102372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:54:47.108155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:54:47.119469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:54:47.119759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:54:47.147213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:54:47.147465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:54:47.147530Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:54:47.169972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:54:47.170147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:54:47.194060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:54:47.194260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:54:47.203698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:54:47.203768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:54:47.203989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:54:47.204039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:54:47.242679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:47.242778Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:54:47.242954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:54:47.243018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:54:47.243077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:54:47.243137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:54:47.243205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:54:47.243257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:54:47.243319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:54:47.243362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:54:47.243454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:54:47.243503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:54:47.243540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:54:47.245912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:54:47.246098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:54:47.246148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T16:54:47.246195Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T16:54:47.254386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:54:47.254659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T16:54:47.275575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T16:54:47.322761Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-04T16:54:47.324991Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-04-04T16:54:47.396653Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-04-04T16:54:47.417260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "MyStore" ColumnShardCount: 1 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "key1" Type: "Uint32" } Columns { Name: "data" Type: "Utf8" } KeyColumnNames: "timestamp" KeyColumnNames: "key1" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:54:47.417700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /MyRoot/MyStore, opId: 101:0, at schemeshard: 72057594046678944 2025-04-04T16:54:47.454525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Nullable key column 'key1', at schemeshard: 72057594046678944 2025-04-04T16:54:47.523212Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T16:54:47.539197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Nullable key column \'key1\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:54:47.539415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Nullable key column 'key1', operation: CREATE COLUMN STORE, path: /MyRoot/MyStore 2025-04-04T16:54:47.540106Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T16:54:47.540344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T16:54:47.540388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-04T16:54:47.540878Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T16:54:47.540996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T16:54:47.541043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:282:2273] TestWaitNotification: OK eventTxId 101 >> KqpRanges::UpdateWhereInMultipleUpdate [GOOD] >> KqpRanges::ValidatePredicates >> KqpNewEngine::PruneWritePartitions-UseSink [GOOD] >> KqpNewEngine::PruneEffectPartitions-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:54:45.767891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:54:45.767987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:54:45.768039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:54:45.768084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:54:45.768647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:54:45.768687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:54:45.768752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:54:45.768823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:54:45.769167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:54:45.855713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:54:45.855785Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:45.880084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:54:45.880397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:54:45.880576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:54:45.900807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:54:45.901215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:54:45.902028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:54:45.903381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:54:45.920792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:54:45.922353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:54:45.922452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:54:45.922726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:54:45.922790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:54:45.922834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:54:45.922990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:54:45.947653Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:54:46.207815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:54:46.208154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:46.208425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:54:46.208708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:54:46.208781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:46.219546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:54:46.219755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:54:46.220012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:46.220088Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:54:46.220133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:54:46.220171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:54:46.230634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:46.230738Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:54:46.230788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:54:46.239006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:46.239089Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:46.239149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:54:46.239209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:54:46.247561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:54:46.255261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:54:46.255572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:54:46.256690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:54:46.256883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:54:46.256944Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:54:46.257261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:54:46.257324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:54:46.257561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:54:46.257652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:54:46.264439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:54:46.264518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:54:46.264719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:54:46.264761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:54:46.265145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:46.265202Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:54:46.265311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:54:46.265351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:54:46.265413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:54:46.265466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:54:46.265504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:54:46.265551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:54:46.265608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:54:46.265651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:54:46.265731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:54:46.265773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:54:46.265810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:54:46.269151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:54:46.269298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:54:46.269342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T16:54:47.695783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:1 129 -> 240 2025-04-04T16:54:47.718999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2025-04-04T16:54:47.719780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2025-04-04T16:54:47.720166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:1, at schemeshard: 72057594046678944 2025-04-04T16:54:47.720216Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:1 ProgressState 2025-04-04T16:54:47.720301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:1 progress is 4/4 2025-04-04T16:54:47.720334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-04-04T16:54:47.720391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:1 progress is 4/4 2025-04-04T16:54:47.720436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-04-04T16:54:47.720472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/4, is published: true 2025-04-04T16:54:47.720559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 103 2025-04-04T16:54:47.720614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-04-04T16:54:47.720659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T16:54:47.720708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T16:54:47.720783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T16:54:47.720828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-04-04T16:54:47.720849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-04-04T16:54:47.720913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T16:54:47.720937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-04-04T16:54:47.720960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-04-04T16:54:47.720999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-04T16:54:47.721023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:3 2025-04-04T16:54:47.721061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:3 2025-04-04T16:54:47.721121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-04T16:54:47.730099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T16:54:47.730210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:718:2622] TestWaitNotification: OK eventTxId 103 2025-04-04T16:54:47.731005Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:54:47.731346Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 334us result status StatusSuccess 2025-04-04T16:54:47.731840Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:54:47.732500Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:54:47.732738Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 232us result status StatusSuccess 2025-04-04T16:54:47.738595Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409548 } PersQueueGroup { Name: "streamImpl" PathId: 4 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "continuousBackupImpl" TopicPath: "/MyRoot/Table/continuousBackupImpl/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 4 } MeteringMode: METERING_MODE_REQUEST_UNITS OffloadConfig { IncrementalBackup { DstPath: "/MyRoot/IncrBackupImpl" DstPathId { OwnerId: 72057594046678944 LocalId: 5 } } } } Partitions { PartitionId: 0 TabletId: 72075186233409547 Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409548 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:54:47.740244Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:54:47.740565Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 298us result status StatusSuccess 2025-04-04T16:54:47.741019Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpSqlIn::TupleLiteral [GOOD] >> KqpSqlIn::TupleSelect >> KqpNewEngine::StreamLookupForDataQuery+StreamLookupJoin [GOOD] >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin >> TContinuousBackupTests::Basic [GOOD] >> SystemView::AuthOwners_Access [GOOD] >> SystemView::AuthOwners_ResultOrder >> KqpRanges::IsNotNullInValue [GOOD] >> KqpRanges::IsNotNullInJsonValue2 >> KqpMergeCn::TopSortByDesc_Bool_And_PKUint64_Limit4 [GOOD] >> KqpMergeCn::TopSortBy_Date_And_Datetime_Limit4 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::Basic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:54:47.492546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:54:47.492665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:54:47.492713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:54:47.492756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:54:47.492808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:54:47.492840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:54:47.492889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:54:47.492959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:54:47.493291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:54:47.668720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:54:47.668789Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:47.733214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:54:47.733486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:54:47.733655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:54:47.779965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:54:47.780301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:54:47.780961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:54:47.784205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:54:47.806056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:54:47.807582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:54:47.807663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:54:47.807909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:54:47.807961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:54:47.808001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:54:47.808145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:54:47.819025Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:54:48.096862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:54:48.097132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:48.097364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:54:48.097631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:54:48.097687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:48.103454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:54:48.103634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:54:48.103851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:48.103915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:54:48.103953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:54:48.103993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:54:48.115569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:48.115648Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:54:48.115687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:54:48.123447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:48.123549Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:48.123604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:54:48.123654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:54:48.127867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:54:48.134585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:54:48.134844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:54:48.135881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:54:48.136089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:54:48.136140Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:54:48.136453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:54:48.136525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:54:48.136717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:54:48.136827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:54:48.147721Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:54:48.147796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:54:48.147991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:54:48.148028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:54:48.148368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:48.148416Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:54:48.148516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:54:48.148560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:54:48.148604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:54:48.148645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:54:48.148681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:54:48.148716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:54:48.148751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:54:48.148781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:54:48.148874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:54:48.148923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:54:48.148953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:54:48.170978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:54:48.171164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:54:48.171204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 04, ready parts: 2/3, is published: true 2025-04-04T16:54:49.650895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T16:54:49.650996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T16:54:49.651025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-04T16:54:49.651054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2025-04-04T16:54:49.659953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-04T16:54:49.660073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T16:54:49.660161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T16:54:49.660396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T16:54:49.660470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T16:54:49.660520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T16:54:49.660544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T16:54:49.688698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 3784 } } 2025-04-04T16:54:49.688763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-04-04T16:54:49.688885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 3784 } } 2025-04-04T16:54:49.688987Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 3784 } } FAKE_COORDINATOR: Erasing txId 104 2025-04-04T16:54:49.690086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-04T16:54:49.690140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-04-04T16:54:49.690314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-04T16:54:49.690376Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T16:54:49.690472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-04T16:54:49.690534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T16:54:49.690584Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T16:54:49.690638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T16:54:49.690679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-04-04T16:54:49.695339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T16:54:49.697053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T16:54:49.697428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T16:54:49.697475Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-04T16:54:49.697581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2025-04-04T16:54:49.697619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-04T16:54:49.697680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2025-04-04T16:54:49.697714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-04T16:54:49.697747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2025-04-04T16:54:49.697828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 104 2025-04-04T16:54:49.697892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-04T16:54:49.697951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-04T16:54:49.697990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-04T16:54:49.698102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T16:54:49.698140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2025-04-04T16:54:49.698160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2025-04-04T16:54:49.698183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T16:54:49.698205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2025-04-04T16:54:49.698221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2025-04-04T16:54:49.698295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-04T16:54:49.698953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T16:54:49.699003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-04T16:54:49.699069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T16:54:49.699116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T16:54:49.699143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T16:54:49.705873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-04T16:54:49.705966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:729:2644] 2025-04-04T16:54:49.706077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-04-04T16:54:49.706756Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:54:49.707016Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl" took 246us result status StatusPathDoesNotExist 2025-04-04T16:54:49.707695Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/Table/continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T16:54:49.708273Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:54:49.708485Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 211us result status StatusPathDoesNotExist 2025-04-04T16:54:49.708640Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |76.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRanges::DuplicateCompositeKeyPredicate [GOOD] >> KqpRanges::DeleteNotFullScan-UseSink >> Cdc::HugeKey[YdsRunner] [GOOD] >> Cdc::HugeKey[TopicRunner] >> YdbTableSplit::SplitByLoadWithReads >> TopicAutoscaling::PartitionSplit_AutosplitByLoad [GOOD] |76.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |76.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |76.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |76.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile >> ColumnShardTiers::DSConfigsStub |76.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |76.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |76.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector >> KqpProxy::PassErrroViaSessionActor [GOOD] >> KqpProxy::NodeDisconnectedTest >> SystemView::AuthGroupMembers_TableRange [GOOD] >> SystemView::AuthEffectivePermissions >> YdbTableSplit::SplitByLoadWithDeletes >> TableCreation::SimpleTableCreation [GOOD] >> TableCreation::SimpleUpdateTable >> SystemView::TopPartitionsFollowers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_AutosplitByLoad [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0010be/r3tmp/tmpNEC8eN/pdisk_1.dat 2025-04-04T16:51:38.526499Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501688623302321:2220];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:38.559667Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:38.856726Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T16:51:39.175161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:39.175250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:39.175635Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:39.191205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7840, node 1 2025-04-04T16:51:39.414723Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/0010be/r3tmp/yandexpAIQPV.tmp 2025-04-04T16:51:39.414753Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/0010be/r3tmp/yandexpAIQPV.tmp 2025-04-04T16:51:39.422545Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/0010be/r3tmp/yandexpAIQPV.tmp 2025-04-04T16:51:39.422647Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:51:39.486045Z INFO: TTestServer started on Port 63330 GrpcPort 7840 TClient is connected to server localhost:63330 PQClient connected to localhost:7840 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:39.927649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:51:39.952344Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:51:39.965878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:51:40.236904Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:42.815256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501705803172115:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:42.815410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:42.816045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501705803172142:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:42.820519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T16:51:42.870539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489501705803172144:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T16:51:43.191794Z node 1 :TX_PROXY ERROR: Actor# [1:7489501705803172208:2451] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:51:43.261009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:51:43.275294Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489501710098139512:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:51:43.276965Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2JmNDUzOTYtNmE2OGM5ZDktYWExYWYyM2EtNjQyYmY1M2Q=, ActorId: [1:7489501705803172111:2337], ActorState: ExecuteState, TraceId: 01jr0vea10fscr46a8vr800sq3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:51:43.279052Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:51:43.335995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:51:43.420703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:51:43.493606Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501688623302321:2220];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:43.503902Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489501710098139799:2632] === CheckClustersList. Ok 2025-04-04T16:51:49.626603Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-04T16:51:49.756774Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-04T16:51:49.757947Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7489501735867943843:2786], Recipient [1:7489501688623302543:2193]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:51:49.757978Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:51:49.757995Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T16:51:49.758032Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7489501735867943839:2783], Recipient [1:7489501688623302543:2193]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2025-04-04T16:51:49.758045Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T16:51:49.832814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710672 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T16:51:49.833233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T16:51:49.833532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-04T16:51:49.833570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-04T16:51:49.834813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-04T16:51:49.834890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] w ... : 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T16:54:51.274948Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T16:54:51.274975Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T16:54:51.278744Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489502454280809493:2860], Partition 2, Sender [0:0:0], Recipient [5:7489502454280809574:2867], Cookie: 0 2025-04-04T16:54:51.278806Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489502454280809574:2867]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:51.278827Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:51.278865Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T16:54:51.278933Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T16:54:51.278952Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T16:54:51.278977Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T16:54:51.290393Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489502454280809497:2861], Partition 1, Sender [0:0:0], Recipient [5:7489502454280809587:2874], Cookie: 0 2025-04-04T16:54:51.290476Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489502454280809587:2874]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:51.290498Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:51.290539Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T16:54:51.290633Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T16:54:51.290656Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T16:54:51.290683Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T16:54:51.290743Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489502492935515967:3060], Partition 3, Sender [0:0:0], Recipient [5:7489502492935516063:3070], Cookie: 0 2025-04-04T16:54:51.290777Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489502492935516063:3070]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:51.290792Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:51.290814Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T16:54:51.290845Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T16:54:51.290858Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T16:54:51.290874Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T16:54:51.290923Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489502398446233083:2469], Partition 0, Sender [0:0:0], Recipient [5:7489502398446233144:2473], Cookie: 0 2025-04-04T16:54:51.290952Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489502398446233144:2473]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:51.290963Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:51.290984Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T16:54:51.291009Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T16:54:51.291026Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T16:54:51.291040Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T16:54:51.372809Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489502492935515966:3059], Partition 4, Sender [0:0:0], Recipient [5:7489502492935516061:3068], Cookie: 0 2025-04-04T16:54:51.372889Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489502492935516061:3068]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:51.372910Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:51.372947Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T16:54:51.373014Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T16:54:51.373034Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T16:54:51.373060Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T16:54:51.378328Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [5:7489502398446233083:2469], Partition 0, Sender [5:7489502398446233147:2475], Recipient [5:7489502398446233144:2473], Cookie: 0 2025-04-04T16:54:51.378417Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [5:7489502398446233147:2475], Recipient [5:7489502398446233144:2473]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2025-04-04T16:54:51.378452Z node 5 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2025-04-04T16:54:51.379093Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489502454280809493:2860], Partition 2, Sender [0:0:0], Recipient [5:7489502454280809574:2867], Cookie: 0 2025-04-04T16:54:51.379138Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489502454280809574:2867]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:51.379157Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:51.379196Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T16:54:51.379281Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T16:54:51.379314Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T16:54:51.379341Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T16:54:51.391614Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489502398446233083:2469], Partition 0, Sender [0:0:0], Recipient [5:7489502398446233144:2473], Cookie: 0 2025-04-04T16:54:51.391693Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489502398446233144:2473]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:51.391733Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:51.391782Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T16:54:51.391859Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T16:54:51.391883Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T16:54:51.391923Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T16:54:51.391995Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489502454280809497:2861], Partition 1, Sender [0:0:0], Recipient [5:7489502454280809587:2874], Cookie: 0 2025-04-04T16:54:51.392031Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489502454280809587:2874]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:51.392054Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:51.392077Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T16:54:51.392113Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T16:54:51.392127Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T16:54:51.392144Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T16:54:51.392236Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489502492935515967:3060], Partition 3, Sender [0:0:0], Recipient [5:7489502492935516063:3070], Cookie: 0 2025-04-04T16:54:51.392266Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489502492935516063:3070]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:51.392278Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T16:54:51.392310Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T16:54:51.392341Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T16:54:51.392356Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T16:54:51.392371Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T16:54:51.392538Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [5:7489502398446233081:2468], Recipient [5:7489502338316689780:2137]: NKikimrSchemeOp.TDescribePath PathId: 13 SchemeshardId: 72057594046644480 2025-04-04T16:54:51.392565Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme >> DataShardWrite::UpsertLostPrepareArbiter [GOOD] >> DataShardWrite::UpsertNoLocksArbiterRestart >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild >> KqpSort::TopSortResults [GOOD] >> KqpSort::TopParameterFilter >> BsControllerConfig::DeleteStoragePool [GOOD] >> KqpNewEngine::SequentialReadsPragma+Enabled [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata >> KqpSort::OffsetTopSort [GOOD] >> KqpNewEngine::MultiEffectsOnSameTable [GOOD] >> KqpNewEngine::LookupColumns >> TLocksTest::MultipleLocks [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::DeleteStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:194:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:194:2076] Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:217:2066] recipient: [1:194:2076] 2025-04-04T16:54:35.524353Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-04T16:54:35.533922Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-04T16:54:35.535181Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-04T16:54:35.537185Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T16:54:35.537798Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-04T16:54:35.538535Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-04T16:54:35.538574Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-04T16:54:35.538849Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-04T16:54:35.553545Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-04T16:54:35.553700Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-04T16:54:35.553857Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-04T16:54:35.553955Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T16:54:35.554069Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T16:54:35.554184Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:239:2066] recipient: [1:20:2067] 2025-04-04T16:54:35.569066Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-04T16:54:35.569217Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T16:54:35.582890Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T16:54:35.583029Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T16:54:35.583110Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T16:54:35.583182Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T16:54:35.583306Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T16:54:35.583394Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T16:54:35.583440Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T16:54:35.583509Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T16:54:35.594518Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T16:54:35.594693Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T16:54:35.605592Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T16:54:35.605759Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-04T16:54:35.607058Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-04T16:54:35.607124Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-04T16:54:35.607368Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-04T16:54:35.607421Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-04T16:54:35.630874Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:203:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:203:2076] Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:217:2066] recipient: [11:203:2076] 2025-04-04T16:54:37.680829Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-04T16:54:37.681760Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-04T16:54:37.681991Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-04T16:54:37.683371Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T16:54:37.683829Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-04T16:54:37.684402Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-04T16:54:37.684435Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-04T16:54:37.684653Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-04T16:54:37.694482Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-04T16:54:37.694639Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-04T16:54:37.694742Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-04T16:54:37.694837Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T16:54:37.694954Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T16:54:37.695038Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:239:2066] recipient: [11:20:2067] 2025-04-04T16:54:37.706275Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-04T16:54:37.706430Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T16:54:37.719507Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T16:54:37.719655Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T16:54:37.719751Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T16:54:37.719843Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T16:54:37.719952Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T16:54:37.720001Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T16:54:37.720040Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T16:54:37.720094Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T16:54:37.731463Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T16:54:37.731626Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T16:54:37.742509Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T16:54:37.742682Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-04T16:54:37.743971Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-04T16:54:37.744046Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-04T16:54:37.744259Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-04T16:54:37.744306Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-04T16:54:37.744917Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:3014:2106] recipient: [21:2926:2116] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:3014:2106] recipient: [21:2926:2116] Leader for TabletID 72057594037932033 is [21:3016:2118] sender: [21:3017:2106] recipient: [21:2926:2116] 2025-04-04T16:54:40.071196Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-04T16:54:40.072189Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-04T16:54:40.072445Z n ... 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 96:1002 Path# /dev/disk3 2025-04-04T16:54:49.052312Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 97:1000 Path# /dev/disk1 2025-04-04T16:54:49.052335Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 97:1001 Path# /dev/disk2 2025-04-04T16:54:49.052360Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 97:1002 Path# /dev/disk3 2025-04-04T16:54:49.052382Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 98:1000 Path# /dev/disk1 2025-04-04T16:54:49.052405Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 98:1001 Path# /dev/disk2 2025-04-04T16:54:49.052430Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 98:1002 Path# /dev/disk3 2025-04-04T16:54:49.052453Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 99:1000 Path# /dev/disk1 2025-04-04T16:54:49.052475Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 99:1001 Path# /dev/disk2 2025-04-04T16:54:49.052499Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 99:1002 Path# /dev/disk3 2025-04-04T16:54:49.052521Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 100:1000 Path# /dev/disk1 2025-04-04T16:54:49.052544Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 100:1001 Path# /dev/disk2 2025-04-04T16:54:49.052567Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 100:1002 Path# /dev/disk3 2025-04-04T16:54:49.052595Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 101:1000 Path# /dev/disk1 2025-04-04T16:54:49.052619Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 101:1001 Path# /dev/disk2 2025-04-04T16:54:49.052646Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 101:1002 Path# /dev/disk3 2025-04-04T16:54:49.052671Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 102:1000 Path# /dev/disk1 2025-04-04T16:54:49.052696Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 102:1001 Path# /dev/disk2 2025-04-04T16:54:49.052718Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 102:1002 Path# /dev/disk3 2025-04-04T16:54:49.052742Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 103:1000 Path# /dev/disk1 2025-04-04T16:54:49.052766Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 103:1001 Path# /dev/disk2 2025-04-04T16:54:49.052789Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 103:1002 Path# /dev/disk3 2025-04-04T16:54:49.052811Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 104:1000 Path# /dev/disk1 2025-04-04T16:54:49.052833Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 104:1001 Path# /dev/disk2 2025-04-04T16:54:49.052853Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 104:1002 Path# /dev/disk3 2025-04-04T16:54:49.052877Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 105:1000 Path# /dev/disk1 2025-04-04T16:54:49.052897Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 105:1001 Path# /dev/disk2 2025-04-04T16:54:49.052920Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 105:1002 Path# /dev/disk3 2025-04-04T16:54:49.052943Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 106:1000 Path# /dev/disk1 2025-04-04T16:54:49.052965Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 106:1001 Path# /dev/disk2 2025-04-04T16:54:49.052987Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 106:1002 Path# /dev/disk3 2025-04-04T16:54:49.053009Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 107:1000 Path# /dev/disk1 2025-04-04T16:54:49.053032Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 107:1001 Path# /dev/disk2 2025-04-04T16:54:49.053059Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 107:1002 Path# /dev/disk3 2025-04-04T16:54:49.053087Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 108:1000 Path# /dev/disk1 2025-04-04T16:54:49.053111Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 108:1001 Path# /dev/disk2 2025-04-04T16:54:49.053137Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 108:1002 Path# /dev/disk3 2025-04-04T16:54:49.053161Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 109:1000 Path# /dev/disk1 2025-04-04T16:54:49.053182Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 109:1001 Path# /dev/disk2 2025-04-04T16:54:49.053207Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 109:1002 Path# /dev/disk3 2025-04-04T16:54:49.053231Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 110:1000 Path# /dev/disk1 2025-04-04T16:54:49.053253Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 110:1001 Path# /dev/disk2 2025-04-04T16:54:49.053274Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 110:1002 Path# /dev/disk3 2025-04-04T16:54:49.053297Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 111:1000 Path# /dev/disk1 2025-04-04T16:54:49.053318Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 111:1001 Path# /dev/disk2 2025-04-04T16:54:49.053338Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 111:1002 Path# /dev/disk3 2025-04-04T16:54:49.053359Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 112:1000 Path# /dev/disk1 2025-04-04T16:54:49.053381Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 112:1001 Path# /dev/disk2 2025-04-04T16:54:49.053403Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 112:1002 Path# /dev/disk3 2025-04-04T16:54:49.053432Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 113:1000 Path# /dev/disk1 2025-04-04T16:54:49.053456Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 113:1001 Path# /dev/disk2 2025-04-04T16:54:49.053480Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 113:1002 Path# /dev/disk3 2025-04-04T16:54:49.053501Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 114:1000 Path# /dev/disk1 2025-04-04T16:54:49.053528Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 114:1001 Path# /dev/disk2 2025-04-04T16:54:49.053551Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 114:1002 Path# /dev/disk3 2025-04-04T16:54:49.053574Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 115:1000 Path# /dev/disk1 2025-04-04T16:54:49.053597Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 115:1001 Path# /dev/disk2 2025-04-04T16:54:49.053620Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 115:1002 Path# /dev/disk3 2025-04-04T16:54:49.053646Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 116:1000 Path# /dev/disk1 2025-04-04T16:54:49.053670Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 116:1001 Path# /dev/disk2 2025-04-04T16:54:49.053695Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 116:1002 Path# /dev/disk3 2025-04-04T16:54:49.053717Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 117:1000 Path# /dev/disk1 2025-04-04T16:54:49.053741Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 117:1001 Path# /dev/disk2 2025-04-04T16:54:49.053765Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 117:1002 Path# /dev/disk3 2025-04-04T16:54:49.053790Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 118:1000 Path# /dev/disk1 2025-04-04T16:54:49.053816Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 118:1001 Path# /dev/disk2 2025-04-04T16:54:49.053844Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 118:1002 Path# /dev/disk3 2025-04-04T16:54:49.053874Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 119:1000 Path# /dev/disk1 2025-04-04T16:54:49.053899Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 119:1001 Path# /dev/disk2 2025-04-04T16:54:49.053923Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 119:1002 Path# /dev/disk3 2025-04-04T16:54:49.053946Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 120:1000 Path# /dev/disk1 2025-04-04T16:54:49.053968Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 120:1001 Path# /dev/disk2 2025-04-04T16:54:49.053989Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 120:1002 Path# /dev/disk3 2025-04-04T16:54:49.074438Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool 1" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: ROT } } } } } 2025-04-04T16:54:49.252379Z node 71 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.178036s 2025-04-04T16:54:49.252497Z node 71 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.178193s 2025-04-04T16:54:49.271522Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 2 Name: "storage pool 2" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: SSD } } } } Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 2 ItemConfigGeneration: 1 } } } 2025-04-04T16:54:49.331427Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 1 ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage [GOOD] Test command err: Trying to start YDB, gRPC: 22549, MsgBus: 62857 2025-04-04T16:53:10.311933Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502083572638730:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:10.311973Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001573/r3tmp/tmp015mQy/pdisk_1.dat 2025-04-04T16:53:13.518396Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:14.530363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:15.115642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:15.126636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:15.230016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:15.318107Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502083572638730:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:15.318157Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TServer::EnableGrpc on GrpcPort 22549, node 1 2025-04-04T16:53:15.494616Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:53:15.494638Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:53:15.546579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:16.311503Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:16.311527Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:16.311534Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:16.312009Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:53:16.558437Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:16.610423Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:16.610455Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:62857 TClient is connected to server localhost:62857 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:53:22.138351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:22.167116Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:53:22.212696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:22.537891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:24.362376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:24.580552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:30.838818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:30.838846Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:34.201170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502186651855647:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:34.201285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:34.996088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:53:35.112689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:53:35.240848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:53:35.405507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:53:35.544372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:53:36.024253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:53:36.344100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502195241790793:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:36.344179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:36.344515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502195241790798:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:36.348716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:53:36.365406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502195241790800:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:53:36.458125Z node 1 :TX_PROXY ERROR: Actor# [1:7489502195241790851:3530] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 17962, MsgBus: 3629 2025-04-04T16:53:50.887475Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502256375067004:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:51.058761Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001573/r3tmp/tmpqHLSwF/pdisk_1.dat 2025-04-04T16:53:53.326909Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:53.353710Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:54.160964Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:54.161054Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:54.179963Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17962, node 2 2025-04-04T16:53:55.858871Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489502256375067004:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:55.858923Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:53:55.884669Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:55.885050Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:55.885057Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:55.885147Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3629 TClient is connected to server localhost:3629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { ... fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489502425389199213:2083];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:35.202537Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:35.943578Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489502446864037438:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:35.943684Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:36.010176Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:36.124697Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:36.236764Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:36.337506Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:36.440428Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:36.686885Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:36.825481Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489502451159005271:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:36.825612Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:36.826726Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489502451159005276:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:36.838731Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:36.877193Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489502451159005278:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T16:54:36.959833Z node 5 :TX_PROXY ERROR: Actor# [5:7489502451159005339:3465] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 14854, MsgBus: 7375 2025-04-04T16:54:42.007953Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489502473694886852:2216];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001573/r3tmp/tmpWzUhis/pdisk_1.dat 2025-04-04T16:54:42.129896Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:54:42.234402Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:42.234539Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:42.246612Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:42.269948Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14854, node 6 2025-04-04T16:54:42.500107Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:42.500154Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:42.500170Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:42.500361Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7375 TClient is connected to server localhost:7375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:54:43.513231Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:43.548478Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:43.716853Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:44.209516Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:44.377826Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:46.970642Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489502473694886852:2216];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:46.970733Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:50.011347Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502508054626845:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:50.011446Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:50.054924Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:50.154411Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:50.249569Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:50.386215Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:50.523501Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:50.581690Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:50.670892Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502512349594665:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:50.670988Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:50.671378Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502512349594670:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:50.683425Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:50.712955Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489502512349594672:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:54:50.788075Z node 6 :TX_PROXY ERROR: Actor# [6:7489502512349594727:3466] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::TopPartitionsFollowers [GOOD] Test command err: 2025-04-04T16:51:27.833751Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501639871305154:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:27.834274Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c1b/r3tmp/tmpMkykZF/pdisk_1.dat 2025-04-04T16:51:28.338361Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:28.343251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:28.343340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:28.347453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20050, node 1 2025-04-04T16:51:28.507811Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:28.507828Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:28.507839Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:28.507942Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1579 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:28.851005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:28.866846Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:51:31.048141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501657051174752:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:31.048367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:31.048759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501657051174779:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:31.058367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T16:51:31.068333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489501657051174781:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T16:51:31.155460Z node 1 :TX_PROXY ERROR: Actor# [1:7489501657051174832:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:51:31.478441Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0vdwh33t0wmgg7epeb20nd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjk0OWI0NWItNWY3ZGUyMS1jMjQ1ODI0Zi02M2RjN2IyYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:51:31.511443Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7489501657051174867:2340], owner: [1:7489501657051174863:2338], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-04T16:51:31.514922Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7489501657051174867:2340], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T16:51:31.530797Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7489501657051174867:2340], row count: 1, finished: 1 2025-04-04T16:51:31.530862Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7489501657051174867:2340], owner: [1:7489501657051174863:2338], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-04T16:51:31.560612Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785491477, txId: 281474976710660] shutting down 2025-04-04T16:51:32.792066Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jr0ve0383v01g4vtztqvme89, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQ3ZGUwOGItNWQzMjZhY2YtYzdhZGU3MTItN2VlMGMwODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:51:32.796263Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7489501661346142207:2354], owner: [1:7489501661346142203:2352], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-04T16:51:32.797683Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7489501661346142207:2354], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T16:51:32.797975Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7489501661346142207:2354], row count: 1, finished: 1 2025-04-04T16:51:32.798003Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7489501661346142207:2354], owner: [1:7489501661346142203:2352], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-04T16:51:32.800855Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785492790, txId: 281474976710662] shutting down 2025-04-04T16:51:32.833824Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501639871305154:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:32.833892Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:34.053918Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0ve19v8dkefk93swhgfmv2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzIzOWY4OWItM2JhNjJlZDktNGU2MjI5NmEtZjhlZTVkMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:51:34.056581Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7489501669936076845:2369], owner: [1:7489501669936076842:2367], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-04T16:51:34.060583Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7489501669936076845:2369], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T16:51:34.061163Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7489501669936076845:2369], row count: 2, finished: 1 2025-04-04T16:51:34.061196Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7489501669936076845:2369], owner: [1:7489501669936076842:2367], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-04-04T16:51:34.064694Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785494052, txId: 281474976710664] shutting down 2025-04-04T16:51:36.051257Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489501679740325822:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:36.051320Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c1b/r3tmp/tmp2cXsYC/pdisk_1.dat 2025-04-04T16:51:36.413655Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:36.443784Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:36.443863Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:36.451607Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3830, node 2 2025-04-04T16:51:36.886071Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:36.886092Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:36.886098Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:36.886203Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22086 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:37.174969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, sub ... 28525536:2204]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T16:54:52.627483Z node 17 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T16:54:52.627547Z node 17 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [17:7489502432028525536:2204], Recipient [17:7489502432028525536:2204]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T16:54:52.627564Z node 17 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T16:54:52.648249Z node 17 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr0vm3495e9s16k19skd6se4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=17&id=NmZkZGRkYzUtYWQyZmIwYzUtM2Q5Y2JjNmYtMjgxMDRmYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:54:52.658182Z node 21 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reply batch: range# From { IntervalEndUs: 1743785691000000 Rank: 0 } InclusiveFrom: true To { IntervalEndUs: 1743785691000000 Rank: 4294967295 } InclusiveTo: true Type: TOP_PARTITIONS_ONE_MINUTE , rows# 2, bytes# 154, next# 2025-04-04T16:54:52.652646Z node 17 :SYSTEM_VIEWS INFO: Scan started, actor: [17:7489502522222840206:2465], owner: [17:7489502522222840203:2463], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2025-04-04T16:54:52.655942Z node 17 :SYSTEM_VIEWS INFO: Scan prepared, actor: [17:7489502522222840206:2465], schemeshard id: 72075186224037888, hive id: 72057594037968897, database: /Root/Tenant1, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], database node count: 2 2025-04-04T16:54:52.661052Z node 17 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [17:7489502522222840206:2465], row count: 2, finished: 1 2025-04-04T16:54:52.661102Z node 17 :SYSTEM_VIEWS INFO: Scan finished, actor: [17:7489502522222840206:2465], owner: [17:7489502522222840203:2463], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2025-04-04T16:54:52.672761Z node 17 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [17:7489502427733557931:2094], database# , query hash# 13052368464442696334, cpu time# 341184 2025-04-04T16:54:52.673772Z node 17 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785692646, txId: 281474976710676] shutting down 2025-04-04T16:54:52.709058Z node 17 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [17:7489502470683231880:2637], Recipient [17:7489502432028525536:2204]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:54:52.709104Z node 17 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:54:52.709128Z node 17 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-04T16:54:52.709374Z node 17 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 20 2025-04-04T16:54:52.709985Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:54:52.712663Z node 21 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 268829696, Sender [21:7489502473275999869:2289], Recipient [21:7489502473275999892:2324]: NKikimr::TEvTablet::TEvTabletDead 2025-04-04T16:54:52.712772Z node 21 :TX_DATASHARD INFO: OnTabletDead: 72075186224037894 2025-04-04T16:54:52.709343Z node 20 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 268829696, Sender [20:7489502472763200666:2150], Recipient [20:7489502472763200700:2299]: NKikimr::TEvTablet::TEvTabletDead 2025-04-04T16:54:52.709481Z node 20 :TX_DATASHARD INFO: OnTabletDead: 72075186224037894 2025-04-04T16:54:52.714044Z node 17 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [17:7489502470683231888:2645], Recipient [17:7489502432028525536:2204]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037892 ClientId: [17:7489502470683231888:2645] ServerId: [21:7489502468981032372:2161] } 2025-04-04T16:54:52.714090Z node 17 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-04T16:54:52.714116Z node 17 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037892, from:72057594046644480 is reset 2025-04-04T16:54:52.714210Z node 17 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [17:7489502470683231886:2643], Recipient [17:7489502432028525536:2204]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037890 ClientId: [17:7489502470683231886:2643] ServerId: [21:7489502473275999760:2221] } 2025-04-04T16:54:52.714222Z node 17 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-04T16:54:52.714237Z node 17 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037890, from:72057594046644480 is reset 2025-04-04T16:54:52.714315Z node 17 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [17:7489502470683231884:2641], Recipient [17:7489502432028525536:2204]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037888 ClientId: [17:7489502470683231884:2641] ServerId: [21:7489502468981032346:2146] } 2025-04-04T16:54:52.714329Z node 17 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-04T16:54:52.714342Z node 17 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037888, from:72057594046644480 is reset 2025-04-04T16:54:52.714408Z node 17 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [17:7489502470683231887:2644], Recipient [17:7489502432028525536:2204]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037891 ClientId: [17:7489502470683231887:2644] ServerId: [21:7489502468981032335:2144] } 2025-04-04T16:54:52.714422Z node 17 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-04T16:54:52.714439Z node 17 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037891, from:72057594046644480 is reset 2025-04-04T16:54:52.714496Z node 17 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [17:7489502470683231889:2646], Recipient [17:7489502432028525536:2204]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037893 ClientId: [17:7489502470683231889:2646] ServerId: [21:7489502468981032347:2147] } 2025-04-04T16:54:52.714510Z node 17 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-04T16:54:52.714523Z node 17 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037893, from:72057594046644480 is reset 2025-04-04T16:54:52.714580Z node 17 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [17:7489502470683231885:2642], Recipient [17:7489502432028525536:2204]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037889 ClientId: [17:7489502470683231885:2642] ServerId: [21:7489502468981032348:2148] } 2025-04-04T16:54:52.714591Z node 17 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-04T16:54:52.714622Z node 17 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72075186224037889, from:72057594046644480 is reset 2025-04-04T16:54:52.714667Z node 17 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 21 2025-04-04T16:54:52.714674Z node 17 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [17:7489502466388264520:2590], Recipient [17:7489502432028525536:2204]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:54:52.714689Z node 17 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:54:52.714700Z node 17 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-04T16:54:52.715917Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:54:52.712026Z node 20 :TX_DATASHARD TRACE: StateWork, received event# 268829696, Sender [20:7489502472763200622:2120], Recipient [20:7489502472763200638:2297]: NKikimr::TEvTablet::TEvTabletDead 2025-04-04T16:54:52.712466Z node 20 :TX_DATASHARD INFO: OnTabletDead: 72075186224037894 2025-04-04T16:54:52.712573Z node 20 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037894 2025-04-04T16:54:52.714791Z node 21 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 268829696, Sender [21:7489502473275999867:2287], Recipient [21:7489502473275999891:2323]: NKikimr::TEvTablet::TEvTabletDead 2025-04-04T16:54:52.714882Z node 21 :TX_DATASHARD INFO: OnTabletDead: 72075186224037894 2025-04-04T16:54:52.718577Z node 21 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [21:7489502464686064727:2076], processor id# 72075186224037893, database# /Root/Tenant1 2025-04-04T16:54:52.719178Z node 21 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268829696, Sender [21:7489502468981032276:2114], Recipient [21:7489502468981032326:2300]: NKikimr::TEvTablet::TEvTabletDead 2025-04-04T16:54:52.720232Z node 21 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:54:52.726011Z node 17 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[21:7489502464686064959:2108], Type=268959746 2025-04-04T16:54:52.730777Z node 20 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [20:7489502464173265744:2073], processor id# 72075186224037893, database# /Root/Tenant1 2025-04-04T16:54:52.736696Z node 20 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [20:7489502464173265744:2073], database# /Root/Tenant1, processor id# 72075186224037893 2025-04-04T16:54:52.737303Z node 21 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [21:7489502464686064727:2076], database# /Root/Tenant1, processor id# 72075186224037893 2025-04-04T16:54:52.738856Z node 17 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [17:7489502474978199284:2702], Recipient [17:7489502432028525536:2204]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:54:52.738887Z node 17 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:54:52.738899Z node 17 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-04T16:54:54.001476Z node 20 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [20:7489502464173265744:2073], interval end# 2025-04-04T16:54:54.000000Z, event interval end# 2025-04-04T16:54:54.000000Z 2025-04-04T16:54:54.001505Z node 20 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [20:7489502425518560007:2063], interval end# 2025-04-04T16:54:54.000000Z, event interval end# 2025-04-04T16:54:54.000000Z 2025-04-04T16:54:54.001532Z node 20 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [20:7489502464173265744:2073], query logs count# 0, processor ids count# 1, processor id to database count# 1 2025-04-04T16:54:54.001549Z node 20 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [20:7489502425518560007:2063], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-04T16:54:54.002675Z node 18 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [18:7489502425649279866:2063], interval end# 2025-04-04T16:54:54.000000Z, event interval end# 2025-04-04T16:54:54.000000Z 2025-04-04T16:54:54.002727Z node 18 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [18:7489502425649279866:2063], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-04-04T16:54:55.154636Z node 18 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [18:7489502425649279866:2063] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::SequentialReadsPragma+Enabled [GOOD] Test command err: Trying to start YDB, gRPC: 8463, MsgBus: 24041 2025-04-04T16:53:01.366592Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502042016154503:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:01.382520Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001596/r3tmp/tmp6APWEa/pdisk_1.dat 2025-04-04T16:53:07.366130Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502042016154503:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:07.426763Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:53:10.301003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:10.302192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:12.124739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:12.125870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:12.201556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:12.675003Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:12.694900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:12.694929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 8463, node 1 2025-04-04T16:53:13.758782Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:13.758804Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:13.758809Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:13.758897Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24041 TClient is connected to server localhost:24041 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:53:14.413578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:14.436577Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:53:16.580062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502106440664483:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:16.580178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:16.580647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502106440664495:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:16.584371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T16:53:16.603297Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:53:16.603593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502106440664497:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T16:53:16.710122Z node 1 :TX_PROXY ERROR: Actor# [1:7489502106440664549:2347] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 23030, MsgBus: 27406 2025-04-04T16:53:17.988425Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502113430117155:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:17.988454Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001596/r3tmp/tmpz9ot1e/pdisk_1.dat 2025-04-04T16:53:18.120470Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23030, node 2 2025-04-04T16:53:18.189295Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:18.189366Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:18.195008Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:18.398696Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:18.398718Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:18.398725Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:18.398832Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27406 TClient is connected to server localhost:27406 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T16:53:20.980072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:53:21.012042Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T16:53:21.122126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:21.957839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:22.420180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:22.501174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:22.990712Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489502113430117155:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:22.990749Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:53:28.562483Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489502160674759204:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:28.563502Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:29.072022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T16:53:29.397462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T16:53:29.849109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... 0.303467Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:40.726223Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489502468114865378:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:40.726344Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:40.752517Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:40.811619Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:40.876833Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:40.934514Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:40.999937Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:41.093730Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:41.222937Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489502472409833190:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:41.223042Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:41.226727Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489502472409833196:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:41.230868Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:41.243416Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489502472409833198:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:54:41.331936Z node 5 :TX_PROXY ERROR: Actor# [5:7489502472409833256:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 17725, MsgBus: 8447 2025-04-04T16:54:44.735350Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489502485238580436:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:44.735418Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001596/r3tmp/tmp4roK4Y/pdisk_1.dat 2025-04-04T16:54:44.977192Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:45.023833Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:45.023948Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:45.027678Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17725, node 6 2025-04-04T16:54:45.162853Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:45.162878Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:45.162887Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:45.163036Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8447 TClient is connected to server localhost:8447 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:54:45.897085Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:45.907382Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:54:45.915337Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:46.028513Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:46.261339Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:46.448819Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:49.737094Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489502485238580436:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:49.737191Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:51.511769Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502515303353293:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:51.511894Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:51.649927Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:51.757869Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:51.852678Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:52.141958Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:52.246470Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:52.427580Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:52.568126Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502519598321128:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:52.568292Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:52.568634Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502519598321133:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:52.576038Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:52.612182Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489502519598321135:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:54:52.699708Z node 6 :TX_PROXY ERROR: Actor# [6:7489502519598321188:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData >> KqpNewEngine::DecimalColumn35 [GOOD] >> KqpNewEngine::DeleteByKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::OffsetTopSort [GOOD] Test command err: Trying to start YDB, gRPC: 31950, MsgBus: 4368 2025-04-04T16:53:23.802223Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502137361318376:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:23.895416Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00150b/r3tmp/tmpt6RSak/pdisk_1.dat 2025-04-04T16:53:25.146389Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:28.095344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:28.169029Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:28.631623Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502137361318376:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:28.631691Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:53:29.368276Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:29.460096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:29.460173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:29.527915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:29.882223Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.125780s 2025-04-04T16:53:29.882294Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.125874s TServer::EnableGrpc on GrpcPort 31950, node 1 2025-04-04T16:53:31.791036Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:31.791208Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:31.791219Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:31.791329Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4368 TClient is connected to server localhost:4368 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:53:34.086655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:34.136483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:34.325750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:34.622722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:35.824610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:42.885600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:42.885630Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:45.460905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502231850600667:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:45.460999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:47.208852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:53:47.348947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:53:47.485680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:53:47.641843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:53:47.721710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:53:47.820582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:53:47.972576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502240440535811:2500], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:47.972642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:47.973050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502240440535816:2503], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:47.989321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:53:48.091670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502240440535818:2504], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:53:48.214495Z node 1 :TX_PROXY ERROR: Actor# [1:7489502244735503178:3530] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 12213, MsgBus: 29509 2025-04-04T16:54:00.638972Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502297992149623:2217];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00150b/r3tmp/tmplg1A96/pdisk_1.dat 2025-04-04T16:54:00.817093Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:54:00.935383Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:00.964648Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:00.964753Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:00.968908Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12213, node 2 2025-04-04T16:54:01.195998Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:01.196025Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:01.196034Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:01.196179Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29509 TClient is connected to server localhost:29509 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:54:02.327882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propos ... ne=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489502444473934856:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:39.222464Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:41.135739Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489502474538707711:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:41.135845Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:41.236993Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:41.320013Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:41.368889Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:41.418925Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:41.478007Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:41.558678Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:41.690637Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489502474538708238:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:41.690743Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:41.691103Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489502474538708243:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:41.708261Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:41.742832Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489502474538708245:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T16:54:41.813924Z node 5 :TX_PROXY ERROR: Actor# [5:7489502474538708312:3465] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19155, MsgBus: 14878 2025-04-04T16:54:45.647455Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489502488757645276:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:45.647528Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00150b/r3tmp/tmpb09j7R/pdisk_1.dat 2025-04-04T16:54:45.858174Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:45.891614Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:45.891812Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:45.893261Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19155, node 6 2025-04-04T16:54:46.043139Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:46.043167Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:46.043178Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:46.043347Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14878 TClient is connected to server localhost:14878 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:54:46.888755Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:46.920217Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:47.069378Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:47.696175Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:47.937887Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:50.650729Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489502488757645276:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:50.650817Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:51.903907Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502514527450832:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:51.904032Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:51.966404Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:52.060759Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:52.143236Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:52.238464Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:52.328693Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:52.427300Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:52.541791Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502518822418653:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:52.541920Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:52.542685Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502518822418658:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:52.549381Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:52.570899Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489502518822418660:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:54:52.642508Z node 6 :TX_PROXY ERROR: Actor# [6:7489502518822418725:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpReturning::ReturningWorksIndexedOperationsWithDefault+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedOperationsWithDefault-QueryService |76.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |76.4%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |76.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut >> KqpNewEngine::AutoChooseIndexOrderByLimit [GOOD] >> KqpNewEngine::AutoChooseIndexOrderByLambda |76.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |76.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |76.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut >> KqpNotNullColumns::OptionalParametersScanQuery [GOOD] >> TableCreation::SimpleUpdateTable [GOOD] >> YdbTableSplit::RenameTablesAndSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::MultipleLocks [GOOD] Test command err: 2025-04-04T16:54:18.743562Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502376444282002:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:18.743740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea5/r3tmp/tmp3y2Oz9/pdisk_1.dat 2025-04-04T16:54:19.466656Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:19.473449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:19.473540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:19.476848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22483 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:19.945276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:19.968668Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:19.996172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:20.255523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:20.389121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:25.316914Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502405154645805:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:25.316960Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea5/r3tmp/tmp0ffttQ/pdisk_1.dat 2025-04-04T16:54:25.515189Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:25.525206Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:25.525334Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:25.527611Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13265 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:25.791713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:25.799098Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:25.815097Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:54:25.819516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:25.951923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:26.060474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:31.061950Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489502432398811207:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:31.062036Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea5/r3tmp/tmpc67G4n/pdisk_1.dat 2025-04-04T16:54:31.318112Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:31.318210Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:31.321258Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:54:31.324767Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18628 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:31.532241Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:31.539318Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:31.549999Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:54:31.562462Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:31.642875Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:31.737934Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:36.624110Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489502452002423753:2207];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea5/r3tmp/tmppauM9W/pdisk_1.dat 2025-04-04T16:54:36.816443Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:54:37.076432Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:37.114673Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:37.114778Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:37.123751Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14676 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:37.595754Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:37.615310Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:37.626200Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:54:37.632236Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:37.712788Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:37.785565Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:42.011641Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489502475928683514:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:42.011698Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea5/r3tmp/tmp5puXrO/pdisk_1.dat 2025-04-04T16:54:42.222038Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:42.226361Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:42.226466Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:42.229392Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28006 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:42.473409Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:42.482172Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:42.492375Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:54:42.497444Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:42.633762Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:42.719763Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:46.749174Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489502493693208151:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea5/r3tmp/tmpqcpqb0/pdisk_1.dat 2025-04-04T16:54:46.905464Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:54:46.968110Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:46.988743Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:46.988855Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:47.009042Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1226 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:47.944002Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:47.962881Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:47.974871Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T16:54:47.987890Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:48.152681Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:48.306675Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:52.493673Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489502519001808646:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:52.495032Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea5/r3tmp/tmpZ7bqAw/pdisk_1.dat 2025-04-04T16:54:52.789334Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:52.789439Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:52.791301Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:52.818350Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62068 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:53.281302Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:53.293616Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:53.311357Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:54:53.318745Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:53.428090Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:53.509336Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> Cdc::HugeKey[TopicRunner] [GOOD] >> Cdc::HugeKeyDebezium >> SystemView::AuthPermissions_ResultOrder [GOOD] >> SystemView::AuthPermissions_Selects |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::SimpleUpdateTable [GOOD] Test command err: 2025-04-04T16:54:46.628590Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502494904519854:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:46.629010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001635/r3tmp/tmpmn7dPU/pdisk_1.dat 2025-04-04T16:54:47.382112Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:47.406013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:47.406141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:47.415389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23183 TServer::EnableGrpc on GrpcPort 15031, node 1 2025-04-04T16:54:48.258616Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:48.258639Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:48.258646Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:48.258798Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:48.818790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:48.847976Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:54:51.373740Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T16:54:51.376685Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T16:54:51.380547Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-04T16:54:51.380588Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-04T16:54:51.380605Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T16:54:51.380641Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T16:54:51.380723Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T16:54:51.380783Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T16:54:51.381977Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T16:54:51.382007Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T16:54:51.383580Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-04T16:54:51.383598Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-04T16:54:51.383620Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-04T16:54:51.383625Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-04T16:54:51.383633Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-04T16:54:51.383648Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-04T16:54:51.383790Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-04T16:54:51.383821Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-04T16:54:51.383839Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-04T16:54:51.387042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-04T16:54:51.389354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T16:54:51.395843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T16:54:51.413165Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-04T16:54:51.413225Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710658 2025-04-04T16:54:51.414325Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-04T16:54:51.414352Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710659 2025-04-04T16:54:51.415182Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-04T16:54:51.415209Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710660 2025-04-04T16:54:51.581152Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-04T16:54:51.623414Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502494904519854:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:51.623470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:51.659737Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-04T16:54:51.659914Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-04T16:54:51.664834Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-04T16:54:51.728960Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-04T16:54:51.743346Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-04T16:54:51.747348Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: c800d5ae-f78b1475-d9a7ba77-8863f81c, Bootstrap. Database: /dc-1 2025-04-04T16:54:51.763850Z node 1 :KQP_PROXY DEBUG: Request has 18445000288017.787793s seconds to be completed 2025-04-04T16:54:51.766340Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=NDNmMGY1MzUtOTMxOWIwZTAtNDAyZjViMjMtNzY0MmY2ODg=, workerId: [1:7489502516379357103:2335], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-04T16:54:51.766457Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T16:54:51.767418Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: c800d5ae-f78b1475-d9a7ba77-8863f81c, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-04T16:54:51.767951Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=NDNmMGY1MzUtOTMxOWIwZTAtNDAyZjViMjMtNzY0MmY2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7489502516379357103:2335] 2025-04-04T16:54:51.767977Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7489502516379357105:2472] 2025-04-04T16:54:51.769970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502516379357106:2337], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:51.770069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:51.770389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502516379357118:2340], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:51.773606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-04T16:54:51.784230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [ ... 502546825867677:2339], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:58.311307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:2, at schemeshard: 72057594046644480 2025-04-04T16:54:58.330418Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489502546825867679:2340], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-04T16:54:58.407172Z node 2 :TX_PROXY ERROR: Actor# [2:7489502546825867722:2498] txid# 281474976715662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:54:58.810450Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489502525351030294:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:58.810562Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:58.860046Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:7489502546825867663:2335], selfId: [2:7489502525351030504:2269], source: [2:7489502546825867662:2334] 2025-04-04T16:54:58.860595Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 3acdfe80-8e94b8a2-55ca3011-5951b6e6, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDAyMTc2ZTMtYzczMDZhZTItYzc3N2Q4MmMtNDVhNDNhNTY=, TxId: 2025-04-04T16:54:58.860632Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 3acdfe80-8e94b8a2-55ca3011-5951b6e6, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDAyMTc2ZTMtYzczMDZhZTItYzc3N2Q4MmMtNDVhNDNhNTY=, TxId: 2025-04-04T16:54:58.860646Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] Create script execution operation. ExecutionId: 3acdfe80-8e94b8a2-55ca3011-5951b6e6. Result: SUCCESS. Issues: 2025-04-04T16:54:58.873963Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=OWYzMzViNjgtMTNjNzhlMWEtMTljN2EyOWYtYjk2NGFjMDE=, workerId: [2:7489502546825867776:2351], database: dc-1, longSession: 1, local sessions count: 2 2025-04-04T16:54:58.874154Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T16:54:58.874667Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NDAyMTc2ZTMtYzczMDZhZTItYzc3N2Q4MmMtNDVhNDNhNTY=, workerId: [2:7489502546825867662:2334], local sessions count: 1 2025-04-04T16:54:58.874804Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=OWYzMzViNjgtMTNjNzhlMWEtMTljN2EyOWYtYjk2NGFjMDE=, CurrentExecutionId: 3acdfe80-8e94b8a2-55ca3011-5951b6e6, CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [2:7489502546825867776:2351] 2025-04-04T16:54:58.874839Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 604800.000000s actor id: [2:7489502546825867780:2536] 2025-04-04T16:54:58.931948Z node 2 :KQP_PROXY DEBUG: TraceId: "01jr0vm9kk6xerat4v913y9ek3", Request has 18445000288010.619696s seconds to be completed 2025-04-04T16:54:58.934224Z node 2 :KQP_PROXY DEBUG: TraceId: "01jr0vm9kk6xerat4v913y9ek3", Created new session, sessionId: ydb://session/3?node_id=2&id=YzhmOGI0ZmUtOTE5MWRjZWYtYjFkODYxZTMtNjRlNTBhNGM=, workerId: [2:7489502546825867797:2361], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-04T16:54:58.934382Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jr0vm9kk6xerat4v913y9ek3 2025-04-04T16:54:58.939757Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Describe result: PathErrorUnknown 2025-04-04T16:54:58.939781Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Creating table 2025-04-04T16:54:58.939810Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2025-04-04T16:54:58.942710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:1, at schemeshard: 72057594046644480 2025-04-04T16:54:58.944711Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715664 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 10 } 2025-04-04T16:54:58.944739Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976715664 2025-04-04T16:54:58.954765Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 3acdfe80-8e94b8a2-55ca3011-5951b6e6, Bootstrap. Database: /dc-1 2025-04-04T16:54:58.956451Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [2:7489502546825867659:2463], selfId: [2:7489502525351030504:2269], source: [2:7489502546825867776:2351] 2025-04-04T16:54:58.956548Z node 2 :KQP_PROXY DEBUG: Request has 18445000288010.595080s seconds to be completed 2025-04-04T16:54:58.966678Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=M2RkZTMzNmItOWYwMGU3NjktYTFkNzM1MjUtYjYxYWVjYzM=, workerId: [2:7489502546825867836:2363], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-04T16:54:58.966819Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T16:54:58.967292Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 3acdfe80-8e94b8a2-55ca3011-5951b6e6, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2025-04-04T16:54:58.967622Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=M2RkZTMzNmItOWYwMGU3NjktYTFkNzM1MjUtYjYxYWVjYzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 8, targetId: [2:7489502546825867836:2363] 2025-04-04T16:54:58.967657Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 8 timeout: 300.000000s actor id: [2:7489502546825867863:2578] 2025-04-04T16:54:59.012238Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: create. Transaction completed: 281474976715664. Doublechecking... 2025-04-04T16:54:59.090307Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-04T16:54:59.090997Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Adding columns. New columns: col4, col5. Existing columns: col1, col2, col3 2025-04-04T16:54:59.091051Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2025-04-04T16:54:59.092412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:59.093385Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715666 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 } 2025-04-04T16:54:59.093410Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976715666 2025-04-04T16:54:59.159658Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: alter. Transaction completed: 281474976715666. Doublechecking... 2025-04-04T16:54:59.232886Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-04T16:54:59.262935Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YzhmOGI0ZmUtOTE5MWRjZWYtYjFkODYxZTMtNjRlNTBhNGM=, workerId: [2:7489502546825867797:2361], local sessions count: 2 2025-04-04T16:54:59.302801Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T16:54:59.355543Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7489502546825867862:2365], selfId: [2:7489502525351030504:2269], source: [2:7489502546825867836:2363] 2025-04-04T16:54:59.356063Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 3acdfe80-8e94b8a2-55ca3011-5951b6e6, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=M2RkZTMzNmItOWYwMGU3NjktYTFkNzM1MjUtYjYxYWVjYzM=, TxId: 2025-04-04T16:54:59.356100Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 3acdfe80-8e94b8a2-55ca3011-5951b6e6, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2RkZTMzNmItOWYwMGU3NjktYTFkNzM1MjUtYjYxYWVjYzM=, TxId: 2025-04-04T16:54:59.356226Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 3acdfe80-8e94b8a2-55ca3011-5951b6e6, start saving rows range [0; 1) 2025-04-04T16:54:59.356289Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 3acdfe80-8e94b8a2-55ca3011-5951b6e6, Bootstrap. Database: /dc-1 2025-04-04T16:54:59.357645Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=M2RkZTMzNmItOWYwMGU3NjktYTFkNzM1MjUtYjYxYWVjYzM=, workerId: [2:7489502546825867836:2363], local sessions count: 1 2025-04-04T16:54:59.357680Z node 2 :KQP_PROXY DEBUG: Request has 18445000288010.193945s seconds to be completed 2025-04-04T16:54:59.362173Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZjZkZjk3MzQtMzU0MDA1YTktMzEwZDFkYWEtOTRhNDg4MmI=, workerId: [2:7489502551120835258:2379], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-04T16:54:59.362326Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T16:54:59.362793Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 3acdfe80-8e94b8a2-55ca3011-5951b6e6, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-04-04T16:54:59.366928Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZjZkZjk3MzQtMzU0MDA1YTktMzEwZDFkYWEtOTRhNDg4MmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7489502551120835258:2379] 2025-04-04T16:54:59.366962Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7489502551120835260:2652] >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> ReadSessionImplTest::ForcefulDestroyPartitionStream >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::OptionalParametersScanQuery [GOOD] Test command err: Trying to start YDB, gRPC: 11187, MsgBus: 4896 2025-04-04T16:53:22.636631Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502132227556295:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:22.639237Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00151e/r3tmp/tmpQjqMGG/pdisk_1.dat 2025-04-04T16:53:27.633161Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502132227556295:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:27.633205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:53:29.916099Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:29.938447Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:31.239200Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:31.258615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:32.755512Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:32.758460Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:32.861279Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:33.116166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:33.119175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:33.128717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11187, node 1 2025-04-04T16:53:34.254498Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:34.254525Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:34.254531Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:34.254660Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4896 TClient is connected to server localhost:4896 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:53:36.776169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:46.603680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:46.603711Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:48.518773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502243896706702:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:48.519203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:50.558844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:53:52.856563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502261076576011:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:52.856991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:52.862434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502261076576016:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:52.883955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T16:53:53.047588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502261076576018:2375], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T16:53:53.156373Z node 1 :TX_PROXY ERROR: Actor# [1:7489502265371543367:2459] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:53:54.671187Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489502269666510707:2389], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing not null column in input: Value. All not null columns should be initialized, code: 2032 2025-04-04T16:53:54.672329Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Mjk4NzAxNTYtMTU2N2IwODEtNjUyMGU4NTQtZTYzNmEyZTc=, ActorId: [1:7489502239601739397:2352], ActorState: ExecuteState, TraceId: 01jr0vjapv79a7hznyxwxdp55w, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-04T16:53:54.751861Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489502269666510720:2395], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64?,'Value':String>
:1:47: Error: Failed to convert 'Value': Null to String
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-04T16:53:54.753007Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Mjk4NzAxNTYtMTU2N2IwODEtNjUyMGU4NTQtZTYzNmEyZTc=, ActorId: [1:7489502239601739397:2352], ActorState: ExecuteState, TraceId: 01jr0vjaw56dd3qd1k00was9d8, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 15827, MsgBus: 14090 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00151e/r3tmp/tmpQj3uQu/pdisk_1.dat 2025-04-04T16:54:00.173365Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:54:00.276357Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:00.283846Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:00.283922Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:00.287681Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15827, node 2 2025-04-04T16:54:00.486832Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:00.486854Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:00.486860Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:00.486983Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14090 TClient is connected to server localhost:14090 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:54:01.208546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:06.159047Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489502320868025457:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don ... 4:40.480907Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:40.523106Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:40.608317Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:40.707240Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:40.778388Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:40.878136Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489502470011579289:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:40.878267Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:40.878489Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489502470011579294:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:40.883842Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:40.903765Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489502470011579296:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T16:54:40.973560Z node 5 :TX_PROXY ERROR: Actor# [5:7489502470011579351:3468] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:54:42.461369Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2453, MsgBus: 32263 2025-04-04T16:54:45.554349Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489502491546031364:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00151e/r3tmp/tmp7xeooL/pdisk_1.dat 2025-04-04T16:54:45.730852Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:54:45.823929Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:45.837923Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:45.838041Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:45.839872Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2453, node 6 2025-04-04T16:54:45.942894Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:45.942921Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:45.942931Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:45.943071Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32263 TClient is connected to server localhost:32263 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:54:46.743596Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:46.766523Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:54:46.790077Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:46.984149Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:47.349817Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:47.576470Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:50.520289Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489502491546031364:2220];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:50.520389Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:53.464343Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502525905771341:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:53.464497Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:53.635594Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:53.802374Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:53.906797Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:54.010414Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:54.139038Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:54.245137Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:54.409302Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502530200739174:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:54.409594Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:54.410286Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502530200739180:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:54.432015Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:54.492548Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489502530200739183:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:54:54.554926Z node 6 :TX_PROXY ERROR: Actor# [6:7489502530200739241:3477] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:54:57.067996Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:54:57.698897Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785697724, txId: 281474976710673] shutting down 2025-04-04T16:54:57.969705Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785698004, txId: 281474976710675] shutting down 2025-04-04T16:54:58.143871Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785698172, txId: 281474976710677] shutting down >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2025-04-04T16:55:01.467839Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.467874Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.467903Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T16:55:01.468400Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-04T16:55:01.468459Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.468489Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.469907Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007308s 2025-04-04T16:55:01.470570Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T16:55:01.474637Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-04T16:55:01.474727Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.475893Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.475926Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.475948Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T16:55:01.476478Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-04T16:55:01.476523Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.476552Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.476644Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007646s 2025-04-04T16:55:01.477343Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T16:55:01.479402Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-04T16:55:01.479483Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.481677Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.481707Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.481728Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T16:55:01.490807Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-04T16:55:01.490872Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.490904Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.490994Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.204378s 2025-04-04T16:55:01.494962Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T16:55:01.502529Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-04T16:55:01.502679Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.509846Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.509876Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.509900Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T16:55:01.511068Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-04T16:55:01.511116Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.511144Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.511244Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.166000s 2025-04-04T16:55:01.511759Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T16:55:01.513121Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-04T16:55:01.513210Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.514488Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.514522Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.514553Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T16:55:01.530596Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T16:55:01.543707Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T16:55:01.577298Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.577826Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2025-04-04T16:55:01.577872Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.577895Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.577975Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.221508s 2025-04-04T16:55:01.578234Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-04T16:55:01.589312Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.589346Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.589372Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T16:55:01.596317Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T16:55:01.602578Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T16:55:01.603324Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.606854Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T16:55:01.706258Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.706528Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-04T16:55:01.706596Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T16:55:01.706679Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-04T16:55:01.706752Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-04T16:55:01.810550Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-04T16:55:01.810763Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-04T16:55:01.812286Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.812317Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.812341Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T16:55:01.812755Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T16:55:01.813351Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T16:55:01.813577Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.818403Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T16:55:01.919810Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.920086Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-04T16:55:01.920159Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T16:55:01.920203Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-04T16:55:01.920289Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-04-04T16:55:01.920408Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-04T16:55:01.925822Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-04T16:55:01.925945Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-04T16:55:01.926081Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> KqpRanges::IsNotNullInJsonValue2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2025-04-04T16:55:01.658219Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.658685Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.658717Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T16:55:01.674951Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T16:55:01.678842Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-04T16:55:01.678929Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.680214Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.680235Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.680254Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T16:55:01.686532Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T16:55:01.687735Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-04T16:55:01.687805Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.689220Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.689249Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.689274Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T16:55:01.692905Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-04T16:55:01.692976Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.693018Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.693222Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2025-04-04T16:55:01.694536Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.694565Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.694591Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T16:55:01.695806Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-04T16:55:01.695848Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.695872Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.695938Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2025-04-04T16:55:01.704564Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-04T16:55:01.704605Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-04T16:55:01.704634Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T16:55:01.705148Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T16:55:01.705725Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T16:55:01.719952Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-04T16:55:01.720484Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T16:55:01.720980Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2025-04-04T16:55:01.725518Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2025-04-04T16:55:01.725828Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T16:55:01.725869Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-04T16:55:01.725898Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-04T16:55:01.725929Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-04-04T16:55:01.725962Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-04-04T16:55:01.725980Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-04-04T16:55:01.726000Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2025-04-04T16:55:01.726028Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2025-04-04T16:55:01.726069Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2025-04-04T16:55:01.726090Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2025-04-04T16:55:01.726107Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2025-04-04T16:55:01.726125Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2025-04-04T16:55:01.726141Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2025-04-04T16:55:01.726157Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2025-04-04T16:55:01.726174Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2025-04-04T16:55:01.726208Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2025-04-04T16:55:01.726290Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2025-04-04T16:55:01.726312Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2025-04-04T16:55:01.726331Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2025-04-04T16:55:01.726350Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2025-04-04T16:55:01.726385Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2025-04-04T16:55:01.726408Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2025-04-04T16:55:01.726427Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2025-04-04T16:55:01.726449Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2025-04-04T16:55:01.726468Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2025-04-04T16:55:01.726497Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2025-04-04T16:55:01.726517Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2025-04-04T16:55:01.726534Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2025-04-04T16:55:01.726550Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2025-04-04T16:55:01.726567Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2025-04-04T16:55:01.726583Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2025-04-04T16:55:01.726633Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2025-04-04T16:55:01.726704Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2025-04-04T16:55:01.726724Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2025-04-04T16:55:01.726740Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2025-04-04T16:55:01.726810Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2025-04-04T16:55:01.726833Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2025-04-04T16:55:01.726850Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2025-04-04T16:55:01.726872Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2025-04-04T16:55:01.726908Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2025-04-04T16:55:01.726930Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2025-04-04T16:55:01.726946Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2025-04-04T16:55:01.726961Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2025-04-04T16:55:01.726977Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2025-04-04T16:55:01.726993Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2025-04-04T16:55:01.727009Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2025-04-04T16:55:01.727027Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2025-04-04T16:55:01.727059Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2025-04-04T16:55:01.727076Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2025-04-04T16:55:01.727097Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2025-04-04T16:55:01.727154Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-04-04T16:55:01.730169Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2025-04-04T16:55:01.730412Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2025-04-04T16:55:01.730453Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2025-04-04T16:55:01.730475Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2025-04-04T16:55:01.730498Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2025-04-04T16:55:01.730527Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2025-04-04T16:55:01.730546Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2025-04-04T16:55:01.730643Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2025-04-04T16:55:01.730668Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2025-04-04T16:55:01.730702Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2025-04-04T16:55:01.730719Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2025-04-04T16:55:01.730749Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2025-04-04T16:55:01.730772Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2025-04-04T16:55:01.730807Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2025-04-04T16:55:01.730823Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2025-04-04T16:55:01.730839Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2025-04-04T16:55:01.730869Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2025-04-04T16:55:01.730918Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2025-04-04T16:55:01.730938Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2025-04-04T16:55:01.730953Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2025-04-04T16:55:01.730975Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2025-04-04T16:55:01.730992Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2025-04-04T16:55:01.731009Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2025-04-04T16:55:01.731037Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2025-04-04T16:55:01.731062Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2025-04-04T16:55:01.731078Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2025-04-04T16:55:01.731098Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2025-04-04T16:55:01.731115Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2025-04-04T16:55:01.731132Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2025-04-04T16:55:01.731164Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2025-04-04T16:55:01.731182Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2025-04-04T16:55:01.731198Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2025-04-04T16:55:01.731221Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2025-04-04T16:55:01.731287Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2025-04-04T16:55:01.731315Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2025-04-04T16:55:01.731337Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2025-04-04T16:55:01.731355Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2025-04-04T16:55:01.731371Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2025-04-04T16:55:01.731389Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2025-04-04T16:55:01.731418Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2025-04-04T16:55:01.731444Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2025-04-04T16:55:01.731460Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2025-04-04T16:55:01.731482Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2025-04-04T16:55:01.731499Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2025-04-04T16:55:01.731516Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2025-04-04T16:55:01.731549Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2025-04-04T16:55:01.731566Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2025-04-04T16:55:01.731583Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2025-04-04T16:55:01.731607Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2025-04-04T16:55:01.731625Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2025-04-04T16:55:01.731641Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2025-04-04T16:55:01.731698Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-04-04T16:55:01.731858Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-04T16:55:01.733463Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.733494Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.733519Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T16:55:01.739340Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T16:55:01.739905Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T16:55:01.740083Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.750556Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T16:55:01.841467Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:01.846679Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-04T16:55:01.846767Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T16:55:01.846809Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-04T16:55:01.846887Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-04T16:55:02.054351Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-04-04T16:55:02.159579Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-04T16:55:02.159720Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-04T16:55:02.159868Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-04T16:55:02.161550Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.161575Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.161603Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T16:55:02.162119Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T16:55:02.163065Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T16:55:02.163806Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.166956Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T16:55:02.272563Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.273026Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-04T16:55:02.273087Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T16:55:02.273127Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-04T16:55:02.273215Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-04-04T16:55:02.273305Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-04T16:55:02.273598Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-04T16:55:02.273692Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-04T16:55:02.274738Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] >> KqpNewEngine::OrderedScalarContext [GOOD] >> KqpNewEngine::PagingNoPredicateExtract ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2025-04-04T16:55:02.120339Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.120369Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.120397Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T16:55:02.134566Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T16:55:02.135528Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T16:55:02.166648Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.167412Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T16:55:02.169723Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.169743Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.169760Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T16:55:02.170550Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T16:55:02.171100Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T16:55:02.171605Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.172223Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T16:55:02.172816Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-04T16:55:02.173957Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.173995Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.174014Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T16:55:02.185119Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T16:55:02.187960Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T16:55:02.188148Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.188563Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T16:55:02.189348Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.191827Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T16:55:02.194374Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T16:55:02.194458Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-04T16:55:02.203499Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.203532Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.203561Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T16:55:02.214046Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T16:55:02.231231Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T16:55:02.231472Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.234506Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2025-04-04T16:55:02.235740Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T16:55:02.235974Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-04T16:55:02.237375Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-04T16:55:02.237602Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-04T16:55:02.242362Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T16:55:02.242418Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-04T16:55:02.242458Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-04T16:55:02.242646Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-04-04T16:55:02.242687Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-04T16:55:02.242712Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-04T16:55:02.242743Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-04T16:55:02.242891Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-04-04T16:55:02.242986Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-04T16:55:02.243006Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-04T16:55:02.243026Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-04T16:55:02.243192Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-04-04T16:55:02.243217Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-04T16:55:02.243236Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-04T16:55:02.243262Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-04T16:55:02.243367Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2025-04-04T16:55:02.247733Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.247791Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.247812Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T16:55:02.248247Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T16:55:02.248951Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T16:55:02.249148Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.251627Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-04-04T16:55:02.252675Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T16:55:02.252908Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-04T16:55:02.253481Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-04T16:55:02.253746Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-04T16:55:02.254670Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T16:55:02.254706Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-04T16:55:02.254737Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-04T16:55:02.254757Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-04T16:55:02.254793Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-04T16:55:02.255044Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 5). Partition stream id: 1 Getting new event 2025-04-04T16:55:02.255141Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-04T16:55:02.255163Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-04T16:55:02.255180Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-04T16:55:02.255198Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-04T16:55:02.255226Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-04T16:55:02.255359Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 2025-04-04T16:55:02.259939Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.259978Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.260007Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T16:55:02.260319Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T16:55:02.267860Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T16:55:02.268071Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T16:55:02.271521Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T16:55:02.273125Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T16:55:02.273944Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T16:55:02.274525Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2025-04-04T16:55:02.274674Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-04T16:55:02.274785Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T16:55:02.274821Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-04T16:55:02.274844Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2025-04-04T16:55:02.274863Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2025-04-04T16:55:02.274899Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2025-04-04T16:55:02.274930Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-04-04T16:55:02.275098Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-04T16:55:02.275230Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [10, 12). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 2 } } >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin [GOOD] >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest |76.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |76.5%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut >> KqpRanges::DeleteNotFullScan-UseSink [GOOD] |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] |76.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |76.5%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant >> CdcStreamChangeCollector::UpsertToSameKey |76.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |76.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::IsNotNullInJsonValue2 [GOOD] Test command err: Trying to start YDB, gRPC: 12015, MsgBus: 29758 2025-04-04T16:53:18.506530Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502117920271839:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:18.506892Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001520/r3tmp/tmpnreUHJ/pdisk_1.dat 2025-04-04T16:53:22.489373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:22.498116Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:22.511339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:22.511430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:22.528654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12015, node 1 2025-04-04T16:53:23.325601Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:23.325621Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:23.325626Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:23.345076Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:53:23.505834Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502117920271839:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:23.505880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:29758 TClient is connected to server localhost:29758 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:53:35.254786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:36.341252Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:36.360313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:53:36.697654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:37.387291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:37.503388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:37.503412Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:38.346897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:46.045673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502233884390662:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:46.801639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:48.988326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:53:49.279741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:53:49.387616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:53:49.536571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:53:50.326972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:53:50.440122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:53:51.222462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502259654195051:2512], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:51.222736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:51.225895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502259654195062:2515], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:51.290680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:53:51.405265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502259654195064:2516], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:53:51.512866Z node 1 :TX_PROXY ERROR: Actor# [1:7489502259654195126:3569] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:53:58.808930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:54:00.061326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T16:54:00.310152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T16:54:00.572956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T16:54:01.041206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7069, MsgBus: 3169 2025-04-04T16:54:02.706482Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502306379079701:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001520/r3tmp/tmp0pKu0q/pdisk_1.dat 2025-04-04T16:54:02.981019Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:54:03.078772Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:03.143449Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:03.143538Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:03.151438Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7069, node 2 2025-04-04T16:54:03.339674Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:03.339698Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:03.339705Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:03.339826Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3169 TClient is connected to server localhost:3169 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Childre ... olId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:45.458799Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:45.459545Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489502489604235305:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:45.463579Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:45.476575Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489502489604235307:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:54:45.586055Z node 5 :TX_PROXY ERROR: Actor# [5:7489502489604235363:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:54:46.938967Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:54:47.445108Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T16:54:47.877783Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T16:54:48.378994Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T16:54:49.033484Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22314, MsgBus: 9663 2025-04-04T16:54:50.975244Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489502513110535908:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:50.975294Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001520/r3tmp/tmpZo4pTu/pdisk_1.dat 2025-04-04T16:54:51.247424Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:51.251554Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:51.251708Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:51.254929Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22314, node 6 2025-04-04T16:54:51.370928Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:51.370953Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:51.370963Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:51.371100Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9663 TClient is connected to server localhost:9663 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:54:52.225568Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:52.235276Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:54:52.256889Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:52.396550Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:52.663657Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:52.869113Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:55.816351Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502534585374148:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:55.816501Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:55.896739Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:55.975864Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489502513110535908:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:55.975930Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:56.011811Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:56.090531Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:56.143183Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:56.202727Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:56.259839Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:56.361565Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502538880341965:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:56.361795Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:56.362826Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502538880341970:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:56.368234Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:56.383941Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489502538880341973:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:54:56.484185Z node 6 :TX_PROXY ERROR: Actor# [6:7489502538880342028:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:54:58.208205Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:54:58.774185Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T16:54:59.133209Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T16:54:59.531631Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T16:55:00.600229Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 >> AsyncIndexChangeCollector::UpsertToSameKey >> KqpMergeCn::TopSortBy_Date_And_Datetime_Limit4 [GOOD] >> KqpMergeCn::SortBy_PK_Uint64_Desc ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 5080, MsgBus: 25469 2025-04-04T16:53:20.428417Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502124113538665:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:20.501027Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00153a/r3tmp/tmpWBDiQQ/pdisk_1.dat 2025-04-04T16:53:22.462408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:22.779600Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:22.782033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:22.782113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:22.820632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5080, node 1 2025-04-04T16:53:25.302496Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502124113538665:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:25.302825Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:53:26.263364Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:26.263383Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:26.263389Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:26.263799Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25469 TClient is connected to server localhost:25469 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:53:36.394969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:36.414990Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:53:37.667939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:37.667964Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:43.882742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502222897787079:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:43.882870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:46.876518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:53:48.061076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T16:53:48.699899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T16:53:49.627965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715762:2, at schemeshard: 72057594046644480 2025-04-04T16:53:50.377258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715763:0, at schemeshard: 72057594046644480 2025-04-04T16:53:50.758199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T16:53:51.380790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715766:2, at schemeshard: 72057594046644480 2025-04-04T16:53:51.875673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715767:0, at schemeshard: 72057594046644480 2025-04-04T16:53:52.479421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715770:2, at schemeshard: 72057594046644480 2025-04-04T16:53:52.715968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715771:0, at schemeshard: 72057594046644480 2025-04-04T16:53:52.774211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715774:2, at schemeshard: 72057594046644480 2025-04-04T16:53:52.935003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715775:0, at schemeshard: 72057594046644480 2025-04-04T16:53:53.008490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:53:53.131506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715778:2, at schemeshard: 72057594046644480 2025-04-04T16:53:53.259189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715779:0, at schemeshard: 72057594046644480 2025-04-04T16:53:53.442968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715782:2, at schemeshard: 72057594046644480 2025-04-04T16:53:53.817217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715783:0, at schemeshard: 72057594046644480 2025-04-04T16:53:54.065108Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr0vj0977p0h2aahrs528egb", SessionId: ydb://session/3?node_id=1&id=M2UzODQyZTEtNTg0NzUzYmMtMzJmZWVkYmYtYTlmNDA3NWE=, Slow query, duration: 10.183685s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "\n --!syntax_v1\n\n CREATE TABLE `object_table`\n (\n object_id utf8,\n role utf8,\n id utf8 not NULL,\n primary key (id)\n );\n\n ALTER TABLE `object_table` ADD INDEX `object_id_index` GLOBAL ON (object_id);\n ALTER TABLE `object_table` ADD INDEX `role_index` GLOBAL ON (role);\n\n CREATE TABLE `role_table`\n (\n granted_by_role utf8,\n granted_role utf8,\n role_type utf8,\n role utf8,\n id utf8 not NULL,\n primary key (id)\n );\n\n ALTER TABLE `role_table` ADD INDEX `granted_by_role_index` GLOBAL ON (granted_by_role);\n ALTER TABLE `role_table` ADD INDEX `granted_role_index` GLOBAL ON (granted_role);\n ALTER TABLE `role_table` ADD INDEX `role_index` GLOBAL ON (role);\n\n CREATE TABLE `access_table`\n (\n endpoints utf8,\n name utf8,\n class utf8,\n type utf8,\n id utf8 not NULL,\n primary key (id)\n );\n\n ALTER TABLE `access_table` ADD INDEX `endpoints_index` GLOBAL ON (endpoints);\n ALTER TABLE `access_table` ADD INDEX `class_index` GLOBAL ON (class);\n ", parameters: 0b 2025-04-04T16:53:54.170395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502270142428780:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:54.170460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:54.596663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502270142429074:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:54.596769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:54.598399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502270142429079:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:54.619997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-04T16:53:54.737250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ... 93251Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:43.678428Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489502480914068607:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:43.678558Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:43.761870Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:43.822281Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:43.871238Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:43.924370Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:44.007329Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:44.085270Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:44.198815Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489502485209036429:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:44.198963Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:44.199376Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489502485209036434:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:44.205023Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:44.221443Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489502485209036436:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T16:54:44.297966Z node 5 :TX_PROXY ERROR: Actor# [5:7489502485209036491:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 13192, MsgBus: 23770 2025-04-04T16:54:49.998390Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489502508179217197:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:49.998872Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00153a/r3tmp/tmpeJ71mq/pdisk_1.dat 2025-04-04T16:54:50.347915Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:50.478939Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:50.479081Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:50.480791Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13192, node 6 2025-04-04T16:54:50.669298Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:50.669328Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:50.669339Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:50.669516Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23770 TClient is connected to server localhost:23770 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:54:51.804246Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:51.813961Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:54:51.824007Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:51.929060Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:52.442120Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:52.599358Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:54.993098Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489502508179217197:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:55.120929Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:56.695819Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502538243990037:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:56.695943Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:56.806489Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:56.916808Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:57.055332Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:57.135956Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:57.234115Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:57.328411Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:57.475572Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502542538957873:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:57.475668Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:57.475853Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502542538957878:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:57.481107Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:57.508385Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489502542538957880:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:54:57.598573Z node 6 :TX_PROXY ERROR: Actor# [6:7489502542538957941:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> AsyncIndexChangeCollector::DeleteNothing >> KqpSqlIn::TupleSelect [GOOD] >> KqpSqlIn::TupleNotOnlyOfKeys >> KqpNewEngine::PruneEffectPartitions-UseSink [GOOD] >> CdcStreamChangeCollector::UpsertManyRows >> AsyncIndexChangeCollector::UpsertSingleRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::DeleteNotFullScan-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 30466, MsgBus: 21511 2025-04-04T16:53:17.226912Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502109961984707:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:17.456266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00154a/r3tmp/tmpkGnod7/pdisk_1.dat 2025-04-04T16:53:20.467099Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:21.482354Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:21.975569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:21.975692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:21.985341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:22.083137Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502109961984707:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:22.083323Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:53:22.148075Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30466, node 1 2025-04-04T16:53:22.177874Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:53:22.187396Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:53:22.482901Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:22.482944Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:22.482952Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:22.483065Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21511 TClient is connected to server localhost:21511 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:53:29.060883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:29.891708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:31.936918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:32.793704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:32.871232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:36.902365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502195861332326:2426], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:36.902477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:37.102748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:37.102770Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:38.358747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:53:38.486596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:53:38.685764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:53:38.903276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:53:39.171092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:53:39.513802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:53:40.892035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502213041202095:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:40.892093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:40.894648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502213041202100:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:40.933823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:53:41.131485Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T16:53:41.131792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502213041202102:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:53:41.205454Z node 1 :TX_PROXY ERROR: Actor# [1:7489502217336169453:3529] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:53:51.011086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:53:52.712627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T16:53:53.117846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T16:53:53.935397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T16:53:57.430070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28307, MsgBus: 12440 2025-04-04T16:54:01.205778Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502301302401260:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:01.205831Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00154a/r3tmp/tmpomiu5r/pdisk_1.dat 2025-04-04T16:54:01.465128Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:01.465227Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:01.469098Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:01.486211Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28307, node 2 2025-04-04T16:54:01.589824Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:01.589858Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:01.589867Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:01.589995Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12440 TClient is connected to server localhost:12440 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ... : [5:7489502486904000482:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:44.086508Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:44.090839Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489502486904000487:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:44.094632Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:44.106964Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489502486904000489:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:54:44.201669Z node 5 :TX_PROXY ERROR: Actor# [5:7489502486904000544:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:54:45.713749Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:54:46.157458Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T16:54:46.433453Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T16:54:46.755407Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T16:54:47.943969Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15879, MsgBus: 24944 2025-04-04T16:54:51.843333Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489502517486797520:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:51.843402Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00154a/r3tmp/tmphoqAPO/pdisk_1.dat 2025-04-04T16:54:52.064494Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:52.081174Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:52.081307Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:52.082777Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15879, node 6 2025-04-04T16:54:52.274905Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:52.274933Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:52.274944Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:52.275097Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24944 TClient is connected to server localhost:24944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:54:53.343240Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:53.371827Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:53.486700Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:53.735114Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:53.877568Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:56.848990Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489502517486797520:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:56.849076Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:58.392558Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502547551570356:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:58.392694Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:58.465134Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:58.605733Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:58.717745Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:58.815168Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:58.900135Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:59.008594Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:59.292430Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502551846538203:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:59.292571Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:59.293459Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502551846538208:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:59.300242Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:59.345954Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489502551846538210:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:54:59.447129Z node 6 :TX_PROXY ERROR: Actor# [6:7489502551846538271:3473] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Join2"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/Join2","Name":"Delete","Table":"Join2"},{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"Delete-ConstantExpr","Stats":{"ComputeNodes":[{"Tasks":[{"NodeId":6,"FinishTimeMs":1743785702445,"TaskId":1,"Host":"ghrun-mup5cka3jq","ComputeTimeUs":147}],"CpuTimeUs":844}],"UseLlvm":"undefined","Tasks":1,"PhysicalStageId":0,"StageDurationUs":0,"Table":[{"Path":"\/Root\/Join2"}],"BaseTimeMs":1743785702445,"NodesScanShards":[],"CpuTimeUs":{"Count":1,"Sum":844,"Max":844,"Min":844}},"CTE Name":"precompute_0_0"}],"Node Type":"Effect"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":528017,"CpuTimeUs":518156},"ProcessCpuTimeUs":2339,"TotalDurationUs":545373,"ResourcePoolId":"default","QueuedTimeUs":722},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"A-SelfCpu":0.844,"A-Cpu":0.844,"Path":"\/Root\/Join2","Name":"Delete","Table":"Join2"}],"Node Type":"Delete"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query"}} >> CdcStreamChangeCollector::InsertSingleRow >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed >> DataShardWrite::UpsertNoLocksArbiterRestart [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] >> TBlobStorageWardenTest::TestSendToInvalidGroupId >> TBlobStorageWardenTest::ObtainTenantKeySamePin [GOOD] >> BindQueue::Basic >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] >> KqpProxy::NodeDisconnectedTest [GOOD] >> TLocksTest::Range_IncorrectDot2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PruneEffectPartitions-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5418, MsgBus: 64613 2025-04-04T16:53:14.464160Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502099094999347:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:14.464573Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001548/r3tmp/tmpROlb7D/pdisk_1.dat 2025-04-04T16:53:15.205841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:15.205953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:15.241419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:15.288651Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5418, node 1 2025-04-04T16:53:15.510701Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:15.510719Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:15.510729Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:15.510850Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64613 TClient is connected to server localhost:64613 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:53:16.377494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:16.394714Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:53:16.409677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:16.633749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:16.856652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:16.945652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:19.458777Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502099094999347:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:19.458831Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:53:20.806604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502124864804773:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:20.806712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:21.009918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:53:21.088172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:53:21.231758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:53:21.339426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:53:21.462607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:53:21.806228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:53:22.021257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502133454739924:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:22.021357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:22.021799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502133454739929:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:22.026736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:53:22.049429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502133454739931:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:53:22.145731Z node 1 :TX_PROXY ERROR: Actor# [1:7489502133454739997:3482] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 27729, MsgBus: 29203 2025-04-04T16:53:33.069294Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502180996281447:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:33.069490Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001548/r3tmp/tmpM7i48W/pdisk_1.dat 2025-04-04T16:53:34.547538Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:34.609680Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:34.631604Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:34.631678Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:34.633664Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27729, node 2 2025-04-04T16:53:36.414020Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:36.414048Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:36.414054Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:36.414161Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29203 TClient is connected to server localhost:29203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:53:36.876047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:36.914325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:36.989947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:37.423570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: ... 59824Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:44.671755Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502486822827584:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:44.672248Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:44.700636Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:44.803407Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:44.882838Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:44.977808Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:45.101088Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:45.205367Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:45.387580Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502491117795414:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:45.387692Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:45.388107Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502491117795419:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:45.393317Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:45.415198Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489502491117795421:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:54:45.496566Z node 6 :TX_PROXY ERROR: Actor# [6:7489502491117795478:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 12350, MsgBus: 25756 2025-04-04T16:54:49.951938Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489502505546095738:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:49.951994Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001548/r3tmp/tmpOTUUFs/pdisk_1.dat 2025-04-04T16:54:50.405915Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:50.419527Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:50.419725Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:50.421951Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12350, node 7 2025-04-04T16:54:50.640104Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:50.640133Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:50.640145Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:50.640346Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25756 TClient is connected to server localhost:25756 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:54:52.220930Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:52.234877Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:54:52.245336Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:52.445485Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:52.991835Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:53.152139Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:54.956488Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489502505546095738:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:54.956566Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:58.163085Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489502544200803154:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:58.163203Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:58.320557Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:58.454757Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:58.567421Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:58.711534Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:58.817970Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:59.354642Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:59.854479Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489502548495771014:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:59.854651Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:59.854943Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489502548495771019:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:59.864083Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:59.918431Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489502548495771021:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:54:59.996685Z node 7 :TX_PROXY ERROR: Actor# [7:7489502548495771078:3489] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::NodeDisconnectedTest [GOOD] Test command err: 2025-04-04T16:54:47.113008Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502499849657148:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:47.127178Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00162f/r3tmp/tmpaiwy0b/pdisk_1.dat 2025-04-04T16:54:48.294525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:54:48.330552Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:48.332972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:48.333065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:48.341284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25890 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:49.447232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:52.118410Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502499849657148:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:52.118510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:52.431744Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T16:54:52.433070Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T16:54:52.441817Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=NjkzNjgxMzYtZmFmMmI3YWYtMjU3ZTIxNC04MmQ0N2VmNg==, workerId: [1:7489502521324494171:2311], database: , longSession: 0, local sessions count: 1 2025-04-04T16:54:52.441886Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T16:54:52.442131Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=1&id=NjkzNjgxMzYtZmFmMmI3YWYtMjU3ZTIxNC04MmQ0N2VmNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.010000s timeout: 0.010000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [1:7489502521324494171:2311] 2025-04-04T16:54:52.442152Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 0.010000s actor id: [0:0:0] 2025-04-04T16:54:52.442210Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-04T16:54:52.446921Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-04T16:54:52.446980Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T16:54:52.450387Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T16:54:52.454032Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjkzNjgxMzYtZmFmMmI3YWYtMjU3ZTIxNC04MmQ0N2VmNg==, ActorId: [1:7489502521324494171:2311], ActorState: ReadyState, Reply query error, msg:
: Error: SomeUniqTextForUt proxyRequestId: 2 2025-04-04T16:54:52.454727Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T16:54:52.454803Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T16:54:52.454847Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T16:54:52.454879Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T16:54:52.454927Z node 1 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(2) 2025-04-04T16:54:52.455625Z node 1 :KQP_PROXY DEBUG: Reply timeout: requestId 2 sessionId: ydb://session/3?node_id=1&id=NjkzNjgxMzYtZmFmMmI3YWYtMjU3ZTIxNC04MmQ0N2VmNg== status: TIMEOUT round: 0 2025-04-04T16:54:52.455713Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T16:54:52.455779Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [1:7489502508439592243:2288], selfId: [1:7489502499849657319:2278], source: [1:7489502521324494171:2311] 2025-04-04T16:54:52.456904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502521324494173:2313], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:52.457011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:52.506612Z node 1 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(2) 2025-04-04T16:54:52.506638Z node 1 :KQP_PROXY DEBUG: Invalid request info while on request timeout handle. RequestId: 2 2025-04-04T16:55:02.211456Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:697:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:55:02.211643Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:55:02.211705Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:55:02.214095Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:694:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:55:02.214516Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:55:02.214731Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00162f/r3tmp/tmpkgP6KW/pdisk_1.dat 2025-04-04T16:55:02.657942Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:5462 KQP PROXY1 [2:8678280833929343339:121] KQP PROXY2 [3:8678280833929343339:121] SENDER [2:1140:2687] 2025-04-04T16:55:03.141558Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=M2MwN2ZhYTYtOGEzMTUxMjQtNWIxMDM1OGQtMTk3NTM3MGI=, workerId: [3:1141:2374], database: , longSession: 1, local sessions count: 1 2025-04-04T16:55:03.141756Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=M2MwN2ZhYTYtOGEzMTUxMjQtNWIxMDM1OGQtMTk3NTM3MGI= 2025-04-04T16:55:03.151229Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=M2MwN2ZhYTYtOGEzMTUxMjQtNWIxMDM1OGQtMTk3NTM3MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [3:8678280833929343339:121] 2025-04-04T16:55:03.151316Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 0.001000s actor id: [0:0:0] 2025-04-04T16:55:03.152065Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=M2MwN2ZhYTYtOGEzMTUxMjQtNWIxMDM1OGQtMTk3NTM3MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [3:1141:2374] 2025-04-04T16:55:03.152130Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 0.001000s actor id: [0:0:0] 2025-04-04T16:55:03.640452Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1142:2688], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:03.640734Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:03.641195Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1144:2375], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:03.641375Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:03.691145Z node 3 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(3) 2025-04-04T16:55:03.691263Z node 3 :KQP_PROXY DEBUG: Reply timeout: requestId 3 sessionId: ydb://session/3?node_id=3&id=M2MwN2ZhYTYtOGEzMTUxMjQtNWIxMDM1OGQtMTk3NTM3MGI= status: TIMEOUT round: 0 2025-04-04T16:55:03.691396Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(2) 2025-04-04T16:55:03.691429Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 2 sessionId: ydb://session/3?node_id=3&id=M2MwN2ZhYTYtOGEzMTUxMjQtNWIxMDM1OGQtMTk3NTM3MGI= status: TIMEOUT round: 0 2025-04-04T16:55:03.691655Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=M2MwN2ZhYTYtOGEzMTUxMjQtNWIxMDM1OGQtMTk3NTM3MGI=, ActorId: [3:1141:2374], ActorState: ExecuteState, TraceId: 01jr0vmdqg212v1xd9t7nvnbnj, Create QueryResponse for e ... DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=ZDZlYTIzMzAtZGFkYmI2ZDQtZmI5NGYzMGUtZmQ3ZjgxZWQ=, workerId: [3:1410:2517], database: , longSession: 1, local sessions count: 56 2025-04-04T16:55:06.046219Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T16:55:06.051389Z node 2 :KQP_PROXY DEBUG: Received ping session request, request_id: 57, sender: [2:1140:2687], trace_id: 2025-04-04T16:55:06.051598Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 57 timeout: 0.001000s actor id: [0:0:0] 2025-04-04T16:55:06.062951Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(57) 2025-04-04T16:55:06.063033Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 57 sessionId: ydb://session/3?node_id=3&id=ZDZlYTIzMzAtZGFkYmI2ZDQtZmI5NGYzMGUtZmQ3ZjgxZWQ= status: TIMEOUT round: 0 2025-04-04T16:55:06.063167Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 57, sender: [2:1140:2687], selfId: [2:204:2170], source: [2:204:2170] 2025-04-04T16:55:06.064973Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=NDQ2MzAxN2QtYjJjMjFhMTYtNzk3MDk0OTQtZDk3YWFjM2Q=, workerId: [3:1411:2518], database: , longSession: 1, local sessions count: 57 2025-04-04T16:55:06.065127Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=NDQ2MzAxN2QtYjJjMjFhMTYtNzk3MDk0OTQtZDk3YWFjM2Q= 2025-04-04T16:55:06.069680Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=NDQ2MzAxN2QtYjJjMjFhMTYtNzk3MDk0OTQtZDk3YWFjM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 58, targetId: [3:8678280833929343339:121] 2025-04-04T16:55:06.069757Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 58 timeout: 0.001000s actor id: [0:0:0] 2025-04-04T16:55:06.070264Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=NDQ2MzAxN2QtYjJjMjFhMTYtNzk3MDk0OTQtZDk3YWFjM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 87, targetId: [3:1411:2518] 2025-04-04T16:55:06.070329Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 87 timeout: 0.001000s actor id: [0:0:0] 2025-04-04T16:55:06.071969Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1412:2750], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.072072Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.104053Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1413:2519], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.104325Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.119055Z node 3 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(87) 2025-04-04T16:55:06.119152Z node 3 :KQP_PROXY DEBUG: Reply timeout: requestId 87 sessionId: ydb://session/3?node_id=3&id=NDQ2MzAxN2QtYjJjMjFhMTYtNzk3MDk0OTQtZDk3YWFjM2Q= status: TIMEOUT round: 0 2025-04-04T16:55:06.119250Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(58) 2025-04-04T16:55:06.119295Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 58 sessionId: ydb://session/3?node_id=3&id=NDQ2MzAxN2QtYjJjMjFhMTYtNzk3MDk0OTQtZDk3YWFjM2Q= status: TIMEOUT round: 0 2025-04-04T16:55:06.119441Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDQ2MzAxN2QtYjJjMjFhMTYtNzk3MDk0OTQtZDk3YWFjM2Q=, ActorId: [3:1411:2518], ActorState: ExecuteState, TraceId: 01jr0vmgjp84e0c3zqgv2wzq0f, Create QueryResponse for error on request, msg: 2025-04-04T16:55:06.119584Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 58, sender: [2:1140:2687], selfId: [2:204:2170], source: [2:204:2170] 2025-04-04T16:55:06.121739Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 87, sender: [2:204:2170], selfId: [3:234:2126], source: [3:1411:2518] 2025-04-04T16:55:06.121942Z node 2 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 58 2025-04-04T16:55:06.124298Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=YjBjNjA3NGQtZDAxNTIwYjItZjQ2NWQ0YTEtNDc5OWU4NjY=, workerId: [3:1418:2522], database: , longSession: 1, local sessions count: 58 2025-04-04T16:55:06.124456Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T16:55:06.124867Z node 2 :KQP_PROXY DEBUG: Received ping session request, request_id: 59, sender: [2:1140:2687], trace_id: 2025-04-04T16:55:06.125006Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 59 timeout: 0.001000s actor id: [0:0:0] 2025-04-04T16:55:06.125144Z node 3 :KQP_PROXY DEBUG: Received ping session request, has local session: ydb://session/3?node_id=3&id=YjBjNjA3NGQtZDAxNTIwYjItZjQ2NWQ0YTEtNDc5OWU4NjY=, rpc ctrl: [0:0:0], sameNode: 0, trace_id: 2025-04-04T16:55:06.125320Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 59, sender: [2:1140:2687], selfId: [2:204:2170], source: [3:234:2126] 2025-04-04T16:55:06.126985Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=ZTAyNzA5MDgtZmI3OTRmYS1lZjFiYjM2NC0zNjIxZWJkZg==, workerId: [3:1419:2523], database: , longSession: 1, local sessions count: 59 2025-04-04T16:55:06.127131Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=ZTAyNzA5MDgtZmI3OTRmYS1lZjFiYjM2NC0zNjIxZWJkZg== 2025-04-04T16:55:06.127656Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=ZTAyNzA5MDgtZmI3OTRmYS1lZjFiYjM2NC0zNjIxZWJkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 60, targetId: [3:8678280833929343339:121] 2025-04-04T16:55:06.127714Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 60 timeout: 0.001000s actor id: [0:0:0] 2025-04-04T16:55:06.132375Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=ZTAyNzA5MDgtZmI3OTRmYS1lZjFiYjM2NC0zNjIxZWJkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 90, targetId: [3:1419:2523] 2025-04-04T16:55:06.132428Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 90 timeout: 0.001000s actor id: [0:0:0] 2025-04-04T16:55:06.134062Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1420:2752], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.134264Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.223249Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1422:2524], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.223502Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.240409Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:06.240563Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:06.263552Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:06.263697Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:06.284157Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T16:55:06.284864Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:06.285296Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:06.298783Z node 3 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(90) 2025-04-04T16:55:06.298869Z node 3 :KQP_PROXY DEBUG: Reply timeout: requestId 90 sessionId: ydb://session/3?node_id=3&id=ZTAyNzA5MDgtZmI3OTRmYS1lZjFiYjM2NC0zNjIxZWJkZg== status: TIMEOUT round: 0 2025-04-04T16:55:06.298966Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(59) 2025-04-04T16:55:06.298991Z node 2 :KQP_PROXY DEBUG: Invalid request info while on request timeout handle. RequestId: 59 2025-04-04T16:55:06.299102Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTAyNzA5MDgtZmI3OTRmYS1lZjFiYjM2NC0zNjIxZWJkZg==, ActorId: [3:1419:2523], ActorState: ExecuteState, TraceId: 01jr0vmgmm1ck1bs53pnstdgsp, Create QueryResponse for error on request, msg: 2025-04-04T16:55:06.301073Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(60) 2025-04-04T16:55:06.301137Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 60 sessionId: ydb://session/3?node_id=3&id=ZTAyNzA5MDgtZmI3OTRmYS1lZjFiYjM2NC0zNjIxZWJkZg== status: TIMEOUT round: 0 2025-04-04T16:55:06.301270Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 60, sender: [2:1140:2687], selfId: [2:204:2170], source: [2:204:2170] 2025-04-04T16:55:06.301497Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 90, sender: [2:204:2170], selfId: [3:234:2126], source: [3:1419:2523] 2025-04-04T16:55:06.301676Z node 2 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 60 2025-04-04T16:55:06.303925Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=YWE1MzcwZTktODEyZDYzMzctNzUxNjI1MTUtZmUzYTdhNDY=, workerId: [3:1441:2527], database: , longSession: 1, local sessions count: 60 2025-04-04T16:55:06.304070Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T16:55:06.304512Z node 2 :KQP_PROXY DEBUG: Received ping session request, request_id: 61, sender: [2:1140:2687], trace_id: 2025-04-04T16:55:06.304632Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 61 timeout: 0.001000s actor id: [0:0:0] 2025-04-04T16:55:06.331516Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(61) 2025-04-04T16:55:06.331630Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 61 sessionId: ydb://session/3?node_id=3&id=YWE1MzcwZTktODEyZDYzMzctNzUxNjI1MTUtZmUzYTdhNDY= status: TIMEOUT round: 0 2025-04-04T16:55:06.331763Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 61, sender: [2:1140:2687], selfId: [2:204:2170], source: [2:204:2170] >> KqpSort::TopParameterFilter [GOOD] >> TBlobStorageWardenTest::TestFilterBadSerials [GOOD] >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError >> TBlobStorageWardenTest::TestHttpMonPage >> Cdc::HugeKeyDebezium [GOOD] >> Cdc::Drop[PqRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] Test command err: 2025-04-04T16:55:08.243828Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:08.252044Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:08.253596Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:08.253689Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:08.255067Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:08.255216Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d61/r3tmp/tmpA3uWwT/pdisk_1.dat 2025-04-04T16:55:09.209903Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] bootstrap ActorId# [1:477:2459] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1300:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-04T16:55:09.210080Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1300:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:09.210122Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1300:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:09.210151Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1300:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:09.210177Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1300:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:09.210221Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1300:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:09.210267Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1300:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:09.210309Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] restore Id# [72057594037932033:2:8:0:0:1300:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T16:55:09.210387Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1300:1] Marker# BPG33 2025-04-04T16:55:09.210433Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1300:1] Marker# BPG32 2025-04-04T16:55:09.210477Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1300:2] Marker# BPG33 2025-04-04T16:55:09.210501Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1300:2] Marker# BPG32 2025-04-04T16:55:09.210531Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1300:3] Marker# BPG33 2025-04-04T16:55:09.210558Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1300:3] Marker# BPG32 2025-04-04T16:55:09.210762Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:45:2089] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1300:3] FDS# 1300 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:09.210870Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:38:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1300:2] FDS# 1300 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:09.210918Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:59:2103] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1300:1] FDS# 1300 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:09.223440Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1300:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90236 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-04T16:55:09.223691Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1300:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90236 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-04T16:55:09.223807Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1300:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90236 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-04T16:55:09.223886Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1300:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-04T16:55:09.223961Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1300:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-04T16:55:09.224162Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.214 sample PartId# [72057594037932033:2:8:0:0:1300:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.215 sample PartId# [72057594037932033:2:8:0:0:1300:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.215 sample PartId# [72057594037932033:2:8:0:0:1300:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 13.792 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 14.005 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 14.108 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-04-04T16:55:09.258633Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 4294967295 IsLimitedKeyless# 0 fullIfPossible# 1 Marker# DSP58 2025-04-04T16:55:09.260852Z node 1 :BS_PROXY CRIT: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvBlock {TabletId# 1234 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} Response# TEvBlockResult {Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 Sending TEvPut 2025-04-04T16:55:09.261196Z node 1 :BS_PROXY DEBUG: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvPut {Id# [1234:1:0:0:0:5:0] Size# 5 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:1:0:0:0:5:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID" ApproximateFreeSpaceShare# 0} Marker# DSP31 2025-04-04T16:55:09.261369Z node 1 :BS_PROXY DEBUG: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Response# TEvCollectGarbageResult {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 >> SystemView::QueryStatsAllTables [GOOD] >> KqpNewEngine::LookupColumns [GOOD] >> SystemView::QueryStatsRetries >> TBlobStorageWardenTest::TestSendUsefulMonitoring >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectDot2 [GOOD] Test command err: 2025-04-04T16:53:44.114816Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502228344473539:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:44.207335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001eab/r3tmp/tmppCquIa/pdisk_1.dat 2025-04-04T16:53:49.046779Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502228344473539:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:49.046838Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:53:52.766951Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:52.766977Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:53.826542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:53.967293Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:55.505811Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:55.505841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:56.095155Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:56.372700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:56.372779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:56.416207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22007 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:00.261438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:00.290435Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:00.304581Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:54:00.309730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:00.625245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:00.693300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001eab/r3tmp/tmp5kHWRx/pdisk_1.dat 2025-04-04T16:54:04.777568Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:54:04.922607Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:04.987040Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:04.987115Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:04.995386Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17277 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:05.335774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:05.350814Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:05.378220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:05.472724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:05.544395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:11.031301Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489502342285914065:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:11.031336Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001eab/r3tmp/tmpkFojlP/pdisk_1.dat 2025-04-04T16:54:11.283747Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:11.312081Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:11.312177Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:11.319320Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26103 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:11.720837Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:11.735694Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:11.758607Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:54:11.770163Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:11.964568Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:12.077787Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:17.955128Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489502371599098579:2210];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001eab/r3tmp/tmpqFj4u2/pdisk_1.dat ... 1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:39.111427Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:54:39.150538Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:54:39.159212Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:39.381333Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:39.481549Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:44.969620Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7489502484512693763:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:45.003508Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001eab/r3tmp/tmpXLvB0N/pdisk_1.dat 2025-04-04T16:54:45.269231Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:45.356497Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:45.356610Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:45.367618Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6857 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:45.766272Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:45.782951Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:45.816155Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:54:45.834347Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:45.947051Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:46.071520Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001eab/r3tmp/tmpziiRIH/pdisk_1.dat 2025-04-04T16:54:53.941726Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:54:54.028627Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:54.057525Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:54.057631Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:54.062595Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4905 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:54.444207Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:54.462457Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:54.501371Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:54:54.615094Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:54.712064Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T16:55:01.512903Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489502559107545724:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:01.588934Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001eab/r3tmp/tmpcjbN2J/pdisk_1.dat 2025-04-04T16:55:01.842495Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:01.884283Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:01.884405Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:01.886438Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62838 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:55:02.349943Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:02.362931Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:02.384554Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:02.485764Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:02.604898Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> AsyncIndexChangeCollector::DeleteNothing [GOOD] >> AsyncIndexChangeCollector::DeleteSingleRow >> CdcStreamChangeCollector::UpsertToSameKey [GOOD] >> CdcStreamChangeCollector::UpsertToSameKeyWithImages ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] Test command err: 2025-04-04T16:55:09.409024Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:09.411597Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:09.413490Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:09.413596Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:09.417490Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:09.420169Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d4a/r3tmp/tmpxaEdb3/pdisk_1.dat 2025-04-04T16:55:10.118011Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] bootstrap ActorId# [1:542:2461] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1289:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-04T16:55:10.118192Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1289:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:10.118256Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1289:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:10.118286Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1289:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:10.118319Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1289:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:10.118349Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1289:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:10.118376Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1289:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:10.118422Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] restore Id# [72057594037932033:2:8:0:0:1289:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T16:55:10.118499Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1289:1] Marker# BPG33 2025-04-04T16:55:10.118549Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1289:1] Marker# BPG32 2025-04-04T16:55:10.118595Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1289:2] Marker# BPG33 2025-04-04T16:55:10.118642Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1289:2] Marker# BPG32 2025-04-04T16:55:10.118674Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1289:3] Marker# BPG33 2025-04-04T16:55:10.118705Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1289:3] Marker# BPG32 2025-04-04T16:55:10.118903Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:64:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1289:3] FDS# 1289 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:10.118982Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:57:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1289:2] FDS# 1289 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:10.119031Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:78:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1289:1] FDS# 1289 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:10.124232Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1289:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90149 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-04T16:55:10.124541Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1289:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90149 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-04T16:55:10.124658Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1289:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90149 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-04T16:55:10.124744Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1289:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-04T16:55:10.124807Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1289:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-04T16:55:10.125027Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.219 sample PartId# [72057594037932033:2:8:0:0:1289:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.219 sample PartId# [72057594037932033:2:8:0:0:1289:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.22 sample PartId# [72057594037932033:2:8:0:0:1289:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 6.493 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 6.737 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 6.851 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-04-04T16:55:10.231038Z node 1 :BS_PROXY_PUT INFO: [8d27cf9df52bfb78] bootstrap ActorId# [1:587:2498] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:229:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-04T16:55:10.231217Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:10.231265Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:10.231295Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:10.231325Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:10.231351Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:10.231376Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:10.231416Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] restore Id# [72057594037932033:2:9:0:0:229:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T16:55:10.231491Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:229:1] Marker# BPG33 2025-04-04T16:55:10.231537Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:229:1] Marker# BPG32 2025-04-04T16:55:10.231582Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:229:2] Marker# BPG33 2025-04-04T16:55:10.231609Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:229:2] Marker# BPG32 2025-04-04T16:55:10.231639Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:229:3] Marker# BPG33 2025-04-04T16:55:10.231683Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:229:3] Marker# BPG32 2025-04-04T16:55:10.231863Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:57:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:3] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:10.231935Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:78:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:2] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:10.231984Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:71:2097] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:1] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:10.239835Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-04T16:55:10.240078Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2025-04-04T16:55:10.240170Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000 ... ituation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:10.304353Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T16:55:10.304413Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2025-04-04T16:55:10.304451Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2025-04-04T16:55:10.304577Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:592:2502] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:10.324018Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-04-04T16:55:10.324170Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2025-04-04T16:55:10.324236Z node 1 :BS_PROXY_PUT INFO: [f913878b3da83702] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-04T16:55:10.324371Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.648 sample PartId# [1234:2:0:0:0:5:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 20.14 VDiskId# [82000002:1:0:0:0] NodeId# 1 Status# OK } ] } 2025-04-04T16:55:10.324982Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:55:10.325037Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-04-04T16:55:10.325139Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2025-04-04T16:55:10.325791Z node 2 :BS_NODE ERROR: {NW19@node_warden_group.cpp:211} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/i5tq/001d4a/r3tmp/tmpxaEdb3//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-04-04T16:55:10.331075Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2025-04-04T16:55:10.331137Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:55:10.333254Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2104] Create Queue# [2:605:2105] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:10.333436Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2104] Create Queue# [2:606:2106] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:10.333564Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2104] Create Queue# [2:607:2107] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:10.333686Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2104] Create Queue# [2:608:2108] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:10.333806Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2104] Create Queue# [2:609:2109] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:10.333921Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2104] Create Queue# [2:610:2110] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:10.334056Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2104] Create Queue# [2:611:2111] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:10.334084Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:55:10.339663Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:10.339953Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:10.340021Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:10.340077Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:10.340263Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:10.340349Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:10.340419Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:10.340450Z node 2 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-04-04T16:55:10.340484Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-04-04T16:55:10.340661Z node 2 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] bootstrap ActorId# [2:612:2112] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-04-04T16:55:10.340720Z node 2 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-04-04T16:55:10.340894Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:605:2105] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 10931551296169177755 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-04-04T16:55:10.350957Z node 2 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-04-04T16:55:10.351041Z node 2 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2025-04-04T16:55:10.351447Z node 2 :BS_PROXY INFO: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-04-04T16:55:10.351659Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-04-04T16:55:10.351994Z node 1 :BS_PROXY_PUT INFO: [91379e686f748e92] bootstrap ActorId# [1:613:2512] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-04-04T16:55:10.352154Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:10.352202Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T16:55:10.352262Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2025-04-04T16:55:10.352301Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2025-04-04T16:55:10.352437Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:592:2502] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:10.352712Z node 1 :BS_VDISK_PUT ERROR: VDISK[82000002:_:0:0:0]: TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T16:55:10.353032Z node 1 :BS_PROXY_PUT INFO: [91379e686f748e92] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-04-04T16:55:10.353130Z node 1 :BS_PROXY_PUT ERROR: [91379e686f748e92] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2025-04-04T16:55:10.353190Z node 1 :BS_PROXY_PUT NOTICE: [91379e686f748e92] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-04T16:55:10.353311Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.628 sample PartId# [1234:2:0:0:0:11:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 1 } ] } 2025-04-04T16:55:10.353699Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:605:2105] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::TopParameterFilter [GOOD] Test command err: Trying to start YDB, gRPC: 22484, MsgBus: 29157 2025-04-04T16:53:20.825797Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502127436414123:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:20.826103Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001533/r3tmp/tmpQGyu4R/pdisk_1.dat 2025-04-04T16:53:22.503590Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:23.489324Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:23.961446Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:23.969142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:23.969234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:23.981034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22484, node 1 2025-04-04T16:53:25.826500Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502127436414123:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:25.826908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:53:26.564400Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:26.564428Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:26.564435Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:26.565055Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29157 TClient is connected to server localhost:29157 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:53:36.509509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:36.526797Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:53:36.537713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:36.784034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:37.021111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:37.142136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:38.444875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:38.445197Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:47.266560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502243400533016:2446], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:47.266670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:50.515818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:53:50.819454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:53:51.017993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:53:51.117262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:53:51.232808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:53:51.740976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:53:52.397424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502264875370094:2514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:52.397584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:52.402948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502264875370101:2517], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:52.426011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:53:52.529674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502264875370104:2518], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:53:52.601387Z node 1 :TX_PROXY ERROR: Actor# [1:7489502264875370158:3581] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19324, MsgBus: 2454 2025-04-04T16:54:02.310304Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502307106985815:2190];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:02.310435Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001533/r3tmp/tmp1W95Eh/pdisk_1.dat 2025-04-04T16:54:02.645814Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:02.673164Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:02.678624Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:02.680967Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19324, node 2 2025-04-04T16:54:02.943598Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:02.943628Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:02.943635Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:02.943755Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2454 TClient is connected to server localhost:2454 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:54:04.245853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:04.276611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:0 ... SchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:45.264916Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:45.378737Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489502490580075281:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:45.378854Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:45.379402Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489502490580075286:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:45.383521Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:45.402468Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489502490580075288:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:54:45.501239Z node 5 :TX_PROXY ERROR: Actor# [5:7489502490580075343:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:54:47.377646Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:54:53.326421Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:54:53.326466Z node 5 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 23246, MsgBus: 22468 2025-04-04T16:54:57.291482Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489502540983638255:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001533/r3tmp/tmpbSbmib/pdisk_1.dat 2025-04-04T16:54:57.412738Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:54:57.705225Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:57.761633Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:57.761761Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:57.764402Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23246, node 6 2025-04-04T16:54:58.050821Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:58.050854Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:58.050867Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:58.051049Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22468 TClient is connected to server localhost:22468 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:54:59.348149Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:59.370329Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:59.614110Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:00.039074Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:00.195828Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:02.274360Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489502540983638255:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:02.274445Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:04.620209Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502571048410957:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:04.620345Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:04.729902Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:55:04.778121Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:55:04.868158Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:55:04.960154Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:05.041850Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:55:05.156057Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:55:05.333057Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502575343378790:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:05.333215Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:05.333542Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502575343378795:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:05.347929Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:55:05.375518Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489502575343378797:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:55:05.432443Z node 6 :TX_PROXY ERROR: Actor# [6:7489502575343378850:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ( (declare $limit (DataType 'Uint64)) (declare $value (DataType 'Int32)) (let $1 (KqpTable '"/Root/TwoShard" '"72057594046644480:2" '"" '1)) (let $2 '('"Key" '"Value1" '"Value2")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 (DataType 'Int32)) (let $5 (Min (Uint64 '"1001") $limit)) (let $6 (StructType '('"Key" (OptionalType (DataType 'Uint32))) '('"Value1" (OptionalType (DataType 'String))) '('"Value2" (OptionalType $4)))) (let $7 '('('"_logical_id" '497) '('"_id" '"ccc76a7f-6ddf1d3c-39a9f382-332c8d9c") '('"_wide_channels" $6))) (let $8 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($12) (block '( (let $13 (lambda '($16) (block '( (let $17 (Member $16 '"Value2")) (return (Member $16 '"Key") (Member $16 '"Value1") $17 (Coalesce (!= $17 $value) (Bool 'false))) )))) (let $14 (WideFilter (ExpandMap (ToFlow $12) $13) (lambda '($18 $19 $20 $21) $21) $5)) (let $15 (lambda '($22 $23 $24 $25) $22 $23 $24)) (return (FromFlow (WideMap $14 $15))) ))) $7)) (let $9 (DqCnUnionAll (TDqOutput $8 '"0"))) (let $10 (DqPhyStage '($9) (lambda '($26) (FromFlow (NarrowMap (Take (ToFlow $26) $5) (lambda '($27 $28 $29) (AsStruct '('"Key" $27) '('"Value1" $28) '('"Value2" $29)))))) '('('"_logical_id" '510) '('"_id" '"1f89a3b7-9e394b8-2ec724e0-ab115c6a")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($8 $10) '($11) '('('"$limit") '('"$value")) '('('"type" '"data")))) '((KqpTxResultBinding (ListType $6) '"0" '"0")) '('('"type" '"data_query")))) ) >> TBlobStorageWardenTest::TestDeleteStoragePool >> KqpReturning::ReturningWorksIndexedOperationsWithDefault-QueryService [GOOD] >> AsyncIndexChangeCollector::UpsertToSameKey [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] >> KqpNewEngine::DeleteByKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::LookupColumns [GOOD] Test command err: Trying to start YDB, gRPC: 10981, MsgBus: 62342 2025-04-04T16:53:05.461489Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502062764809858:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:05.510632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:53:10.609150Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502062764809858:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:10.609240Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001575/r3tmp/tmpw7Itso/pdisk_1.dat 2025-04-04T16:53:11.282635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:11.672247Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:12.774274Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:12.774303Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:13.782400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:13.782434Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:14.294224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:14.298887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:14.331903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:15.222131Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:15.239888Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:15.239914Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 10981, node 1 2025-04-04T16:53:16.935868Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:16.935889Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:16.935895Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:16.936005Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62342 TClient is connected to server localhost:62342 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:53:22.648972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:22.684122Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:53:22.735810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:24.000995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:25.754689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:27.235377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:29.827069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:29.827095Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:34.621461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502187318863301:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:34.626531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:35.779218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:53:35.941770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:53:36.196332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:53:36.357368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:53:36.434701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:53:36.524441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:53:36.629070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502195908798443:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:36.629160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:36.629545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502195908798448:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:36.633722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:53:36.653372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502195908798450:2497], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:53:36.753410Z node 1 :TX_PROXY ERROR: Actor# [1:7489502195908798507:3529] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 29681, MsgBus: 14111 2025-04-04T16:53:50.698445Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502255500103318:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:50.698530Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001575/r3tmp/tmpTT0BUA/pdisk_1.dat 2025-04-04T16:53:51.883756Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:52.906432Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:53.039759Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:53.120527Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:53.120609Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:53.125911Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29681, node 2 2025-04-04T16:53:55.112415Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:55.112438Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:55.112443Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T1 ... .150483Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:52.008099Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502521033649410:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:52.008241Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:52.140875Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:52.187441Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:52.315732Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:52.394768Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:52.493918Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:52.627779Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:52.768917Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502521033649945:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:52.769031Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:52.769472Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502521033649950:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:52.775205Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:52.808039Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489502521033649952:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:54:52.865972Z node 6 :TX_PROXY ERROR: Actor# [6:7489502521033650006:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 7420, MsgBus: 16346 2025-04-04T16:54:57.657778Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489502542699858482:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:57.657888Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001575/r3tmp/tmpprBD4m/pdisk_1.dat 2025-04-04T16:54:57.996605Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:58.059174Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:58.059317Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:58.062092Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7420, node 7 2025-04-04T16:54:58.238408Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:58.238445Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:58.238455Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:58.238688Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16346 TClient is connected to server localhost:16346 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:54:59.459890Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:59.482552Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T16:54:59.501778Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:59.695790Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:00.335993Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:00.587945Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:02.658492Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489502542699858482:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:02.658597Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:05.950413Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489502577059598642:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:05.950536Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.035984Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.085679Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.174814Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.260233Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.326725Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.387796Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.512351Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489502581354566470:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.512475Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.512929Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489502581354566475:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.522377Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T16:55:06.539302Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489502581354566477:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T16:55:06.636265Z node 7 :TX_PROXY ERROR: Actor# [7:7489502581354566532:3467] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> AsyncIndexChangeCollector::UpsertSingleRow [GOOD] >> AsyncIndexChangeCollector::UpsertManyRows >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] Test command err: 2025-04-04T16:55:10.200148Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:10.203475Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:10.205212Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:10.205356Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:10.210974Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:10.215017Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d48/r3tmp/tmpEJAyoC/pdisk_1.dat Formatting pdisk Creating PDisk Creating pdisk Verify that PDisk returns ERROR 2025-04-04T16:55:10.993744Z node 1 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001d48/r3tmp/tmpRDrSFO//new_pdisk.dat": no such file. PDiskId# 1001 2025-04-04T16:55:10.994636Z node 1 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1001 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001d48/r3tmp/tmpRDrSFO//new_pdisk.dat": no such file. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001d48/r3tmp/tmpRDrSFO//new_pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 18295376670569302561 PDiskId# 1001 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 HashedMainKey[0]# 0x221976E60BD392C7 StartOwnerRound# 10 SectorMap# false EnableSectorEncryption # 1 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# Enable WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1001 2025-04-04T16:55:11.019082Z node 1 :BS_PROXY_PUT INFO: [e2e5f1b9c917f854] bootstrap ActorId# [1:541:2460] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:356:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-04T16:55:11.019211Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:356:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:11.019246Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:356:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:11.019268Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:356:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:11.019293Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:356:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:11.019319Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:356:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:11.019342Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:356:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:11.019378Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] restore Id# [72057594037932033:2:8:0:0:356:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T16:55:11.019438Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:356:1] Marker# BPG33 2025-04-04T16:55:11.019495Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:356:1] Marker# BPG32 2025-04-04T16:55:11.019528Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:356:2] Marker# BPG33 2025-04-04T16:55:11.019547Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:356:2] Marker# BPG32 2025-04-04T16:55:11.019569Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:356:3] Marker# BPG33 2025-04-04T16:55:11.019586Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:356:3] Marker# BPG32 2025-04-04T16:55:11.019704Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:64:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:356:3] FDS# 356 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:11.019751Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:57:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:356:2] FDS# 356 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:11.019789Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:78:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:356:1] FDS# 356 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:11.025969Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:356:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 82803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-04T16:55:11.026198Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:356:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 82803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-04T16:55:11.026284Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:356:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 82803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-04T16:55:11.026359Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:356:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-04T16:55:11.026435Z node 1 :BS_PROXY_PUT INFO: [e2e5f1b9c917f854] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:356:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-04T16:55:11.026640Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.868 sample PartId# [72057594037932033:2:8:0:0:356:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 0.868 sample PartId# [72057594037932033:2:8:0:0:356:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 0.868 sample PartId# [72057594037932033:2:8:0:0:356:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 7.114 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 7.272 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 7.364 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } |76.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> CdcStreamChangeCollector::UpsertManyRows [GOOD] >> CdcStreamChangeCollector::UpsertIntoTwoStreams >> VectorIndexBuildTest::BaseCase [GOOD] >> CdcStreamChangeCollector::InsertSingleRow [GOOD] >> CdcStreamChangeCollector::InsertSingleUuidRow >> SystemView::PartitionStatsTtlFields [GOOD] >> SystemView::PartitionStatsFields ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] Test command err: 2025-04-04T16:55:11.345826Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:11.348034Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:11.349453Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:11.349540Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:11.353039Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:11.355431Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d35/r3tmp/tmppOZQ51/pdisk_1.dat 2025-04-04T16:55:12.071111Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] bootstrap ActorId# [1:542:2461] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1294:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-04T16:55:12.071293Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.071339Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.071371Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.071401Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.071430Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.071459Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.071502Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] restore Id# [72057594037932033:2:8:0:0:1294:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T16:55:12.071579Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1294:1] Marker# BPG33 2025-04-04T16:55:12.071629Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1294:1] Marker# BPG32 2025-04-04T16:55:12.071673Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1294:2] Marker# BPG33 2025-04-04T16:55:12.071703Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1294:2] Marker# BPG32 2025-04-04T16:55:12.071735Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1294:3] Marker# BPG33 2025-04-04T16:55:12.071766Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1294:3] Marker# BPG32 2025-04-04T16:55:12.071966Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:64:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1294:3] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:12.072047Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:57:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1294:2] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:12.072101Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:78:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1294:1] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:12.079906Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1294:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-04T16:55:12.080174Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1294:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-04T16:55:12.080273Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1294:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-04T16:55:12.080360Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1294:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-04T16:55:12.080427Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1294:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-04T16:55:12.080674Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.2 sample PartId# [72057594037932033:2:8:0:0:1294:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.201 sample PartId# [72057594037932033:2:8:0:0:1294:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.201 sample PartId# [72057594037932033:2:8:0:0:1294:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 9.056 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 9.276 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 9.371 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } ] } 2025-04-04T16:55:12.183067Z node 1 :BS_PROXY_PUT INFO: [8d27cf9df52bfb78] bootstrap ActorId# [1:587:2498] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:229:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-04T16:55:12.183244Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.183295Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.183329Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.183357Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.183387Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.183416Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.183452Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] restore Id# [72057594037932033:2:9:0:0:229:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T16:55:12.183531Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:229:1] Marker# BPG33 2025-04-04T16:55:12.183583Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:229:1] Marker# BPG32 2025-04-04T16:55:12.183632Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:229:2] Marker# BPG33 2025-04-04T16:55:12.183660Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:229:2] Marker# BPG32 2025-04-04T16:55:12.183690Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:229:3] Marker# BPG33 2025-04-04T16:55:12.183721Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:229:3] Marker# BPG32 2025-04-04T16:55:12.183887Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:57:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:3] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:12.183962Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:78:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:2] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:12.184014Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:71:2097] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:1] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:12.185903Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-04T16:55:12.186091Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2025-04-04T16:55:12.186184Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [20000 ... ] Create Queue# [1:593:2503] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:12.236609Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:588:2499] Create Queue# [1:594:2504] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:12.236725Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:588:2499] Create Queue# [1:595:2505] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:12.236845Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:588:2499] Create Queue# [1:596:2506] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:12.236967Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:588:2499] Create Queue# [1:597:2507] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:12.237086Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:588:2499] Create Queue# [1:598:2508] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:12.237122Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:55:12.237924Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:12.238060Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:12.238138Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:12.238214Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:12.238327Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:12.238425Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:12.238484Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:12.238512Z node 1 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-04-04T16:55:12.238552Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-04-04T16:55:12.238622Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2025-04-04T16:55:12.239556Z node 1 :BS_PROXY_PUT INFO: [1a43693427d0a82b] bootstrap ActorId# [1:599:2509] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:5:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-04-04T16:55:12.239696Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] Id# [1234:2:0:0:0:5:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.239744Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T16:55:12.239806Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2025-04-04T16:55:12.239845Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2025-04-04T16:55:12.239970Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:592:2502] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:12.248915Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-04-04T16:55:12.249055Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2025-04-04T16:55:12.249115Z node 1 :BS_PROXY_PUT INFO: [1a43693427d0a82b] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-04T16:55:12.249248Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.584 sample PartId# [1234:2:0:0:0:5:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 9.574 VDiskId# [82000002:1:0:0:0] NodeId# 1 Status# OK } ] } 2025-04-04T16:55:12.249822Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:55:12.249875Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-04-04T16:55:12.250020Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Marker# DSP17 2025-04-04T16:55:12.250698Z node 2 :BS_NODE ERROR: {NW19@node_warden_group.cpp:211} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/i5tq/001d35/r3tmp/tmppOZQ51//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-04-04T16:55:12.251828Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2025-04-04T16:55:12.251884Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:55:12.254038Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:601:2104] Create Queue# [2:603:2105] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:12.254201Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:601:2104] Create Queue# [2:604:2106] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:12.258590Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:601:2104] Create Queue# [2:605:2107] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:12.258808Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:601:2104] Create Queue# [2:606:2108] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:12.258943Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:601:2104] Create Queue# [2:607:2109] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:12.259077Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:601:2104] Create Queue# [2:608:2110] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:12.259221Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:601:2104] Create Queue# [2:609:2111] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:12.259273Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:55:12.260577Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:12.260923Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:12.261003Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:12.261182Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:12.261239Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:12.261298Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:12.261356Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:12.261390Z node 2 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-04-04T16:55:12.261425Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-04-04T16:55:12.261631Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:603:2105] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] Test command err: 2025-04-04T16:55:10.864881Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:10.866355Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:10.875299Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:10.875871Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:10.876010Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:10.885730Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d3e/r3tmp/tmpdg6PAK/pdisk_1.dat 2025-04-04T16:55:12.169759Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d3e/r3tmp/tmpXvlzV6/pdisk_1.dat 2025-04-04T16:55:12.243563Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:12.243674Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:12.318045Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:12.318348Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:12.319102Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 >> KqpNewEngine::AutoChooseIndexOrderByLambda [GOOD] >> TBlobStorageWardenTest::TestDeleteStoragePool [GOOD] >> TBlobStorageWardenTest::TestBlockEncriptedGroup >> SystemView::AuthEffectivePermissions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpReturning::ReturningWorksIndexedOperationsWithDefault-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 1867, MsgBus: 31721 2025-04-04T16:53:12.707746Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502089668926379:2148];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00155f/r3tmp/tmpI80eCZ/pdisk_1.dat 2025-04-04T16:53:15.605712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:53:17.102476Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:17.360321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:17.360428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:17.371385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:17.708691Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502089668926379:2148];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:17.708739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:53:17.728978Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1867, node 1 2025-04-04T16:53:17.817396Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:53:19.133359Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:19.153084Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:19.153103Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:19.153544Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31721 TClient is connected to server localhost:31721 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:53:22.459943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:22.479096Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:22.505038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:53:24.051853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:25.100514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:25.512864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:32.718444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:32.718466Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:34.019296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502184158208631:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:34.019400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:34.308250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:53:34.354907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:53:34.413044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:53:34.492141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:53:34.543408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:53:34.618043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:53:34.953655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502184158209168:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:34.953717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:34.954071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502184158209174:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:34.976015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:53:35.178168Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T16:53:35.178405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502184158209176:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:53:35.287255Z node 1 :TX_PROXY ERROR: Actor# [1:7489502188453176563:3526] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:53:39.409473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:53:39.667598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T16:53:40.328702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15206, MsgBus: 17954 2025-04-04T16:53:59.781673Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502293844002130:2280];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:59.904733Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00155f/r3tmp/tmpO7jYnf/pdisk_1.dat 2025-04-04T16:54:00.265826Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:00.267162Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:00.267260Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:00.279796Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15206, node 2 2025-04-04T16:54:00.493630Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:00.493654Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:00.493663Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:00.493802Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17954 TClient is connected to server localhost:17954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446 ... 502524278848722:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:53.623684Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:53.678745Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:53.792720Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:53.884633Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:53.937384Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:53.990114Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:54.089116Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:54.207936Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489502528573816540:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:54.208072Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:54.208552Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489502528573816545:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:54.215953Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:54.240240Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489502528573816547:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:54:54.314805Z node 5 :TX_PROXY ERROR: Actor# [5:7489502528573816603:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:54:56.340164Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26394, MsgBus: 27836 2025-04-04T16:54:59.614983Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489502549504139833:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00155f/r3tmp/tmpjuvafr/pdisk_1.dat 2025-04-04T16:54:59.814824Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:55:00.232940Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:00.238679Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:00.238836Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:00.242588Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26394, node 6 2025-04-04T16:55:00.460036Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:00.460070Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:00.460083Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:00.460295Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27836 TClient is connected to server localhost:27836 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:01.326235Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:01.359719Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:01.589300Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:01.989160Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:02.116566Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:04.562768Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489502549504139833:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:04.562849Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:06.119683Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502579568912519:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.119804Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.188519Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.277116Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.343956Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.488488Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.549784Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.618586Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.725283Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502579568913043:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.725414Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.726073Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502579568913048:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.730880Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:55:06.756076Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489502579568913050:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:55:06.826321Z node 6 :TX_PROXY ERROR: Actor# [6:7489502579568913105:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:08.522713Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DeleteByKey [GOOD] Test command err: Trying to start YDB, gRPC: 3888, MsgBus: 4292 2025-04-04T16:53:20.616280Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502125749347025:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:20.616338Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001535/r3tmp/tmpINMtRy/pdisk_1.dat 2025-04-04T16:53:22.440700Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:22.616486Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3888, node 1 2025-04-04T16:53:22.697528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:22.710741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:22.774631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:24.018761Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:24.018781Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:24.018788Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:24.019815Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:53:25.630977Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502125749347025:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:25.645984Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:4292 TClient is connected to server localhost:4292 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:53:36.353065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:37.612121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:37.612449Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:44.787174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502220238628294:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:44.869337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:45.133118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:53:46.623201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502237418497596:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:46.623284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:46.647102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502237418497601:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:46.680037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T16:53:46.739018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502237418497603:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T16:53:46.839712Z node 1 :TX_PROXY ERROR: Actor# [1:7489502237418497655:2468] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:53:55.851622Z node 1 :RPC_REQUEST WARN: Client lost Trying to start YDB, gRPC: 15594, MsgBus: 7045 2025-04-04T16:53:59.949015Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502292512927131:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:59.949150Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001535/r3tmp/tmpEqW4oZ/pdisk_1.dat 2025-04-04T16:54:00.459968Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:00.463288Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:00.463388Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:00.467669Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15594, node 2 2025-04-04T16:54:00.701787Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:00.701810Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:00.701819Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:00.701983Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7045 TClient is connected to server localhost:7045 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:54:01.668502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:01.690857Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:54:01.701304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:01.835438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:54:02.396824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T16:54:02.598835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:04.948307Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489502292512927131:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:04.948369Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:06.153130Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489502322577699840:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:06.153251Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:06.223679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:06.279370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:06.332898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboper ... se.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:54.700170Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502527853764210:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:54.700304Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:54.777427Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:54.888946Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:54.984006Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:55.079235Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:55.175300Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:55.335143Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:55.475666Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502532148732037:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:55.475804Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:55.476112Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502532148732042:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:55.481754Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:55.526417Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489502532148732044:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:54:55.588095Z node 6 :TX_PROXY ERROR: Actor# [6:7489502532148732102:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:54:57.214402Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29823, MsgBus: 28088 2025-04-04T16:54:59.337317Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489502552315576822:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001535/r3tmp/tmp2Yv7Rj/pdisk_1.dat 2025-04-04T16:54:59.524349Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:54:59.931224Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:00.007269Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:00.007397Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:00.031156Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29823, node 7 2025-04-04T16:55:00.249028Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:00.249056Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:00.249067Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:00.249247Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28088 TClient is connected to server localhost:28088 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:01.554942Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:01.574529Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:01.724264Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:02.097114Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:02.246474Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:04.256885Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489502552315576822:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:04.256974Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:06.120732Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489502582380349497:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.121378Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.229547Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.279795Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.344611Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.440251Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.506165Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.601806Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.783748Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489502582380350026:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.783915Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.784532Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489502582380350031:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.789104Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:55:06.813345Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489502582380350033:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:55:06.870160Z node 7 :TX_PROXY ERROR: Actor# [7:7489502582380350088:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> SystemView::AuthOwners_ResultOrder [GOOD] >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:54:47.022630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:54:47.022754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:54:47.022822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:54:47.022859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:54:47.022910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:54:47.022941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:54:47.023012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:54:47.023090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:54:47.023457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:54:47.319884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:54:47.319951Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:47.372025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:54:47.372309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:54:47.372511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:54:47.408107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:54:47.408506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:54:47.409225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:54:47.410580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:54:47.422727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:54:47.424139Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:54:47.424228Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:54:47.424453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:54:47.424526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:54:47.424567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:54:47.424711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:54:47.443183Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:54:47.868314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:54:47.868610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:47.868897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:54:47.869156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:54:47.869218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:47.872033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:54:47.872185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:54:47.872433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:47.872489Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:54:47.872519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:54:47.872564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:54:47.874834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:47.874888Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:54:47.874923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:54:47.876791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:47.876843Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:47.876883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:54:47.876955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:54:47.889480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:54:47.905924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:54:47.906158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:54:47.907297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:54:47.907464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:54:47.907530Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:54:47.907815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:54:47.907881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:54:47.908088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:54:47.908187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:54:47.913579Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:54:47.913640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:54:47.913846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:54:47.913885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:54:47.914234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:54:47.914330Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:54:47.914437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:54:47.914492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:54:47.914563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:54:47.914628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:54:47.914668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:54:47.914717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:54:47.914754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:54:47.914788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:54:47.914884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:54:47.914941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:54:47.914974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:54:47.916959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:54:47.917078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:54:47.917111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-04T16:55:12.567645Z node 1 :TX_DATASHARD INFO: 72075186233409568 Reporting state Offline to schemeshard 72075186233409561 2025-04-04T16:55:12.568027Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [1:4604:6263], Recipient [1:4615:6272]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-04T16:55:12.568541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72075186233409561, message: Source { RawX1: 4615 RawX2: 4294973568 } TabletId: 72075186233409568 State: 4 2025-04-04T16:55:12.568640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409568, state: Offline, at schemeshard: 72075186233409561 2025-04-04T16:55:12.569004Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [1:4936:6579], Recipient [1:4615:6272]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72075186233409561 Status: OK ServerId: [1:4937:6580] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-04T16:55:12.569046Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-04T16:55:12.575634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72075186233409561:8 hive 72057594037968897 at ss 72075186233409561 2025-04-04T16:55:12.575925Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269552133, Sender [1:3350:5090], Recipient [1:4615:6272]: NKikimrTxDataShard.TEvStateChangedResult TabletId: 72075186233409561 State: 4 2025-04-04T16:55:12.575970Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvStateChangedResult 2025-04-04T16:55:12.576004Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186233409568 state Offline 2025-04-04T16:55:12.576378Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:4936:6579], Recipient [1:4615:6272]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409561 ClientId: [1:4936:6579] ServerId: [1:4937:6580] } 2025-04-04T16:55:12.576414Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-04T16:55:12.576849Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409561 ShardLocalIdx: 8 TxId_Deprecated: 8 TabletID: 72075186233409568 2025-04-04T16:55:12.577193Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268829696, Sender [1:4604:6263], Recipient [1:4615:6272]: NKikimr::TEvTablet::TEvTabletDead 2025-04-04T16:55:12.577467Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186233409568 2025-04-04T16:55:12.577601Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186233409568 2025-04-04T16:55:12.579110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 8 ShardOwnerId: 72075186233409561 ShardLocalIdx: 8, at schemeshard: 72075186233409561 2025-04-04T16:55:12.579426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409561, LocalPathId: 7] was 1 Forgetting tablet 72075186233409568 2025-04-04T16:55:12.585329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409561 2025-04-04T16:55:12.585403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409561, LocalPathId: 7], at schemeshard: 72075186233409561 2025-04-04T16:55:12.585502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409561, LocalPathId: 3] was 4 2025-04-04T16:55:12.591635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72075186233409561:8 2025-04-04T16:55:12.591711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72075186233409561:8 tabletId 72075186233409568 2025-04-04T16:55:12.593586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409561 2025-04-04T16:55:12.662145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1900, transactions count in step: 1, at schemeshard: 72075186233409561 2025-04-04T16:55:12.662341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976735762 AckTo { RawX1: 0 RawX2: 0 } } Step: 1900 MediatorID: 72075186233409563 TabletID: 72075186233409561, at schemeshard: 72075186233409561 2025-04-04T16:55:12.662417Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409561] TDropLock TPropose opId# 281474976735762:0 HandleReply TEvOperationPlan: step# 1900 2025-04-04T16:55:12.662461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976735762:0 128 -> 240 2025-04-04T16:55:12.671575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976735762:0, at schemeshard: 72075186233409561 2025-04-04T16:55:12.671644Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409561] TDone opId# 281474976735762:0 ProgressState 2025-04-04T16:55:12.671717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976735762:0 progress is 1/1 2025-04-04T16:55:12.671743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976735762 ready parts: 1/1 2025-04-04T16:55:12.671767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976735762:0 progress is 1/1 2025-04-04T16:55:12.671786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976735762 ready parts: 1/1 2025-04-04T16:55:12.671815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976735762, ready parts: 1/1, is published: true 2025-04-04T16:55:12.671875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:3350:5090] message: TxId: 281474976735762 2025-04-04T16:55:12.671905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976735762 ready parts: 1/1 2025-04-04T16:55:12.671928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976735762:0 2025-04-04T16:55:12.671948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976735762:0 2025-04-04T16:55:12.672023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409561, LocalPathId: 2] was 4 2025-04-04T16:55:12.676031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976735762 2025-04-04T16:55:12.676137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976735762 2025-04-04T16:55:12.676224Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976735762, buildInfoId: 115 2025-04-04T16:55:12.676322Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976735762, buildInfo: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409561, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409561, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:4227:5921], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735761, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735762, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T16:55:12.684465Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 115 2025-04-04T16:55:12.684592Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409561, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409561, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:4227:5921], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735761, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735762, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T16:55:12.684664Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-04T16:55:12.701753Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 115 2025-04-04T16:55:12.701890Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409561, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409561, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:4227:5921], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735761, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735762, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T16:55:12.701931Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 115, subscribers count# 1 2025-04-04T16:55:12.702112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2025-04-04T16:55:12.702157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:4363:6035] TestWaitNotification: OK eventTxId 115 2025-04-04T16:55:12.712866Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/CommonDB" IndexBuildId: 115 2025-04-04T16:55:12.713166Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 115 State: STATE_DONE Settings { source_path: "/MyRoot/CommonDB/Table" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 115 State: STATE_DONE Settings { source_path: "/MyRoot/CommonDB/Table" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] Test command err: 2025-04-04T16:55:13.297548Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:13.299110Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:13.299946Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:13.300340Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:13.300453Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:13.302939Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d20/r3tmp/tmp8heNff/pdisk_1.dat 2025-04-04T16:55:14.089175Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] bootstrap ActorId# [1:477:2459] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1291:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-04T16:55:14.089332Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:14.089381Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:14.089405Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:14.089424Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:14.089445Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:14.089466Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:14.089497Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] restore Id# [72057594037932033:2:8:0:0:1291:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T16:55:14.089572Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1291:1] Marker# BPG33 2025-04-04T16:55:14.089608Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1291:1] Marker# BPG32 2025-04-04T16:55:14.089644Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1291:2] Marker# BPG33 2025-04-04T16:55:14.089666Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1291:2] Marker# BPG32 2025-04-04T16:55:14.089691Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1291:3] Marker# BPG33 2025-04-04T16:55:14.089710Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1291:3] Marker# BPG32 2025-04-04T16:55:14.089869Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:45:2089] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1291:3] FDS# 1291 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:14.089917Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:38:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1291:2] FDS# 1291 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:14.089950Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:59:2103] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1291:1] FDS# 1291 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:14.099718Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1291:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90165 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-04T16:55:14.099975Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1291:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90165 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-04T16:55:14.100043Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1291:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90165 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-04T16:55:14.100106Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1291:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-04T16:55:14.100161Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1291:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-04T16:55:14.100322Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.975 sample PartId# [72057594037932033:2:8:0:0:1291:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 0.975 sample PartId# [72057594037932033:2:8:0:0:1291:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 0.975 sample PartId# [72057594037932033:2:8:0:0:1291:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 10.825 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 11.002 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 11.066 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-04-04T16:55:14.174345Z node 1 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] bootstrap ActorId# [1:522:2496] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:224:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-04T16:55:14.174525Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:14.174569Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:14.174599Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:14.174642Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:14.174670Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:14.174700Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:14.174740Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] restore Id# [72057594037932033:2:9:0:0:224:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T16:55:14.174814Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG33 2025-04-04T16:55:14.174859Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG32 2025-04-04T16:55:14.174901Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG33 2025-04-04T16:55:14.174927Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG32 2025-04-04T16:55:14.174958Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG33 2025-04-04T16:55:14.174983Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG32 2025-04-04T16:55:14.175149Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:38:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:3] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:14.175222Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:59:2103] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:2] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:14.175406Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:52:2096] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:1] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:14.178301Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-04T16:55:14.178676Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-04T16:55:14.178800Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [ ... Y_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:10:0:0:238:1] Marker# BPG32 2025-04-04T16:55:14.217425Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:10:0:0:238:2] Marker# BPG33 2025-04-04T16:55:14.217449Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:10:0:0:238:2] Marker# BPG32 2025-04-04T16:55:14.217482Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:10:0:0:238:3] Marker# BPG33 2025-04-04T16:55:14.217505Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:10:0:0:238:3] Marker# BPG32 2025-04-04T16:55:14.217663Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:59:2103] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:10:0:0:238:3] FDS# 238 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:14.217728Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:52:2096] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:10:0:0:238:2] FDS# 238 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:14.217775Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:45:2089] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:10:0:0:238:1] FDS# 238 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:14.235014Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:10:0:0:238:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 11 } Cost# 81874 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 12 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-04T16:55:14.235238Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:10:0:0:238:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81874 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2025-04-04T16:55:14.235322Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:10:0:0:238:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81874 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-04T16:55:14.235397Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Result# TEvPutResult {Id# [72057594037932033:2:10:0:0:238:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-04T16:55:14.235477Z node 1 :BS_PROXY_PUT INFO: [8d27cf9df52bfb78] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:10:0:0:238:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-04T16:55:14.235656Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.047 sample PartId# [72057594037932033:2:10:0:0:238:3] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.047 sample PartId# [72057594037932033:2:10:0:0:238:2] QueryCount# 1 VDiskId# [2000000:1:0:2:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.048 sample PartId# [72057594037932033:2:10:0:0:238:1] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 18.341 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 18.515 VDiskId# [2000000:1:0:2:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 18.597 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-04-04T16:55:14.237061Z node 1 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:55:14.237111Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:55:14.241298Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:523:2497] Create Queue# [1:527:2500] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:14.241441Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:523:2497] Create Queue# [1:528:2501] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:14.241553Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:523:2497] Create Queue# [1:529:2502] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:14.241678Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:523:2497] Create Queue# [1:530:2503] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:14.241783Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:523:2497] Create Queue# [1:531:2504] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:14.241886Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:523:2497] Create Queue# [1:532:2505] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:14.241990Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:523:2497] Create Queue# [1:533:2506] targetNodeId# 1 Marker# DSP01 2025-04-04T16:55:14.242015Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:55:14.242688Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:14.242822Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:14.242911Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:14.242987Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:14.243039Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:14.243087Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:14.243138Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:14.243161Z node 1 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-04-04T16:55:14.243194Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-04-04T16:55:14.243330Z node 1 :BS_PROXY_BLOCK DEBUG: [a55b41de52eb2a08] bootstrap ActorId# [1:534:2507] Group# 2181038082 TabletId# 1234 Generation# 1 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-04-04T16:55:14.243380Z node 1 :BS_PROXY_BLOCK DEBUG: [a55b41de52eb2a08] Sending TEvVBlock Tablet# 1234 Generation# 1 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-04-04T16:55:14.243573Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:527:2500] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 1 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 2195907473047179377 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-04-04T16:55:14.251018Z node 1 :BS_PROXY_BLOCK DEBUG: [a55b41de52eb2a08] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-04-04T16:55:14.251099Z node 1 :BS_PROXY_BLOCK DEBUG: [a55b41de52eb2a08] Result# TEvBlockResult {Status# OK} Marker# DSPB04 2025-04-04T16:55:14.251476Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:527:2500] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 2025-04-04T16:55:14.255229Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2025-04-04T16:55:14.255994Z node 1 :BS_PROXY_BLOCK DEBUG: [bba3bffd2e286f4b] bootstrap ActorId# [1:536:2509] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-04-04T16:55:14.256064Z node 1 :BS_PROXY_BLOCK DEBUG: [bba3bffd2e286f4b] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-04-04T16:55:14.256249Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:527:2500] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 16666227723845892201 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-04-04T16:55:14.262928Z node 1 :BS_PROXY_BLOCK DEBUG: [bba3bffd2e286f4b] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-04-04T16:55:14.263011Z node 1 :BS_PROXY_BLOCK DEBUG: [bba3bffd2e286f4b] Result# TEvBlockResult {Status# OK} Marker# DSPB04 2025-04-04T16:55:14.263533Z node 1 :BS_PROXY_BLOCK DEBUG: [f913878b3da83702] bootstrap ActorId# [1:537:2510] Group# 2181038082 TabletId# 1234 Generation# 4 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-04-04T16:55:14.263596Z node 1 :BS_PROXY_BLOCK DEBUG: [f913878b3da83702] Sending TEvVBlock Tablet# 1234 Generation# 4 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-04-04T16:55:14.263780Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:527:2500] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 4 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 5996346386562483777 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-04-04T16:55:14.272714Z node 1 :BS_PROXY_BLOCK DEBUG: [f913878b3da83702] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-04-04T16:55:14.272806Z node 1 :BS_PROXY_BLOCK DEBUG: [f913878b3da83702] Result# TEvBlockResult {Status# OK} Marker# DSPB04 >> KqpNewEngine::PagingNoPredicateExtract [GOOD] |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::AutoChooseIndexOrderByLambda [GOOD] Test command err: Trying to start YDB, gRPC: 7382, MsgBus: 15067 2025-04-04T16:53:21.687878Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502131180012240:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:21.687913Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001526/r3tmp/tmpLA7jUE/pdisk_1.dat 2025-04-04T16:53:22.664311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:22.664430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:22.728462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:23.354485Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7382, node 1 2025-04-04T16:53:24.330833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:24.382888Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:24.382910Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:24.382916Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:24.383023Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:53:26.693329Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502131180012240:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:26.694715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:15067 TClient is connected to server localhost:15067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:53:32.915482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:32.963813Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:53:32.976061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:33.250800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:34.003039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:53:34.091082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T16:53:38.083366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:38.083394Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:40.947678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502212784392655:2426], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:40.947769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:42.685017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:53:42.850053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:53:43.056529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:53:43.247239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:53:43.678077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:53:43.872851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:53:44.755710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502229964262434:2507], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:44.755811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:44.756198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502229964262443:2510], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:44.759391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:53:44.780722Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T16:53:44.793920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502229964262454:2511], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:53:44.910325Z node 1 :TX_PROXY ERROR: Actor# [1:7489502229964262506:3546] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 8405, MsgBus: 9160 2025-04-04T16:54:00.347525Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502298466864885:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001526/r3tmp/tmp3pk0Cd/pdisk_1.dat 2025-04-04T16:54:00.495679Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:54:00.655998Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:00.711209Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:00.711312Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:00.712897Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8405, node 2 2025-04-04T16:54:00.950981Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:00.951010Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:00.951019Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:00.951168Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9160 TClient is connected to server localhost:9160 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:54:02.358030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:02.375465Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T16:54:02.392934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTab ... 502524959725109:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:53.275726Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:53.341454Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:53.408933Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:53.482347Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:53.543672Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:53.602640Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:53.673799Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:53.775162Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502524959725627:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:53.775259Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:53.775512Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502524959725632:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:53.780961Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:53.802636Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489502524959725634:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:54:53.872690Z node 6 :TX_PROXY ERROR: Actor# [6:7489502524959725688:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:54:55.997908Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 61028, MsgBus: 63293 2025-04-04T16:55:00.296530Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489502554785677404:2154];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001526/r3tmp/tmpSHiONJ/pdisk_1.dat 2025-04-04T16:55:00.612993Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:55:00.860014Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:00.923947Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:00.924133Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:00.931809Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61028, node 7 2025-04-04T16:55:01.066918Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:01.066951Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:01.066962Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:01.067141Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63293 TClient is connected to server localhost:63293 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T16:55:02.249895Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T16:55:02.310332Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:02.473452Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:02.760629Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:02.893238Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:05.306437Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489502554785677404:2154];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:05.306559Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:06.677374Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489502580555482849:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.677558Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.719103Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.840665Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.910801Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:55:07.015538Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:07.140220Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:55:07.310121Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:55:07.488018Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489502584850450678:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:07.488207Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:07.491008Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489502584850450683:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:07.507869Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:55:07.547356Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489502584850450686:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:55:07.644528Z node 7 :TX_PROXY ERROR: Actor# [7:7489502584850450746:3475] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:09.827961Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> BindQueue::Basic [GOOD] >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] >> TVectorIndexTests::CreateTableCoveredEmbedding >> TVectorIndexTests::CreateTableWithError >> TAsyncIndexTests::CreateTable |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] Test command err: 2025-04-04T16:55:12.192058Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:12.196719Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:12.197993Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:12.198115Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:12.199645Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:55:12.199807Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d2c/r3tmp/tmp62rZzl/pdisk_1.dat 2025-04-04T16:55:12.782094Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] bootstrap ActorId# [1:477:2459] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1296:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-04T16:55:12.782260Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.782314Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.782340Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.782367Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.782391Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.782416Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1296:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.782453Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] restore Id# [72057594037932033:2:8:0:0:1296:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T16:55:12.782529Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1296:1] Marker# BPG33 2025-04-04T16:55:12.782570Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1296:1] Marker# BPG32 2025-04-04T16:55:12.782626Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1296:2] Marker# BPG33 2025-04-04T16:55:12.782654Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1296:2] Marker# BPG32 2025-04-04T16:55:12.782683Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1296:3] Marker# BPG33 2025-04-04T16:55:12.782708Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1296:3] Marker# BPG32 2025-04-04T16:55:12.782871Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:45:2089] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1296:3] FDS# 1296 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:12.782937Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:38:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1296:2] FDS# 1296 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:12.783021Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:59:2103] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1296:1] FDS# 1296 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:12.788383Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1296:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90204 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-04T16:55:12.788590Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1296:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90204 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-04-04T16:55:12.788676Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1296:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90204 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-04-04T16:55:12.788751Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1296:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-04-04T16:55:12.788810Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1296:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-04T16:55:12.789015Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.12 sample PartId# [72057594037932033:2:8:0:0:1296:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.121 sample PartId# [72057594037932033:2:8:0:0:1296:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.121 sample PartId# [72057594037932033:2:8:0:0:1296:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 6.533 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 6.695 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 6.776 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-04-04T16:55:12.842538Z node 1 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] bootstrap ActorId# [1:522:2496] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:224:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-04T16:55:12.842727Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.842777Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.842819Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.842846Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.842871Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.842891Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:12.842919Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] restore Id# [72057594037932033:2:9:0:0:224:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T16:55:12.842968Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG33 2025-04-04T16:55:12.843006Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG32 2025-04-04T16:55:12.843041Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG33 2025-04-04T16:55:12.843069Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG32 2025-04-04T16:55:12.843097Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG33 2025-04-04T16:55:12.843121Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG32 2025-04-04T16:55:12.843240Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:38:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:3] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:12.843287Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:59:2103] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:2] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:12.843318Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:52:2096] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:1] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:12.844902Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-04-04T16:55:12.845055Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2025-04-04T16:55:12.845152Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000 ... ituation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:15.117878Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T16:55:15.117943Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2025-04-04T16:55:15.117992Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2025-04-04T16:55:15.118129Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:592:2502] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:15.134236Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-04-04T16:55:15.134528Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2025-04-04T16:55:15.134625Z node 2 :BS_PROXY_PUT INFO: [f913878b3da83702] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-04T16:55:15.134796Z node 2 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.674 sample PartId# [1234:2:0:0:0:5:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 2 } TEvVPutResult{ TimestampMs# 16.954 VDiskId# [82000002:1:0:0:0] NodeId# 2 Status# OK } ] } 2025-04-04T16:55:15.135513Z node 3 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:55:15.135578Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-04-04T16:55:15.135743Z node 3 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2025-04-04T16:55:15.136482Z node 3 :BS_NODE ERROR: {NW19@node_warden_group.cpp:211} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/i5tq/001d2c/r3tmp/tmp7gH2o3//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-04-04T16:55:15.137578Z node 3 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2025-04-04T16:55:15.137637Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:55:15.144311Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:603:2104] Create Queue# [3:605:2105] targetNodeId# 2 Marker# DSP01 2025-04-04T16:55:15.144545Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:603:2104] Create Queue# [3:606:2106] targetNodeId# 2 Marker# DSP01 2025-04-04T16:55:15.144676Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:603:2104] Create Queue# [3:607:2107] targetNodeId# 2 Marker# DSP01 2025-04-04T16:55:15.144801Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:603:2104] Create Queue# [3:608:2108] targetNodeId# 2 Marker# DSP01 2025-04-04T16:55:15.144933Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:603:2104] Create Queue# [3:609:2109] targetNodeId# 2 Marker# DSP01 2025-04-04T16:55:15.145062Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:603:2104] Create Queue# [3:610:2110] targetNodeId# 2 Marker# DSP01 2025-04-04T16:55:15.145191Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:603:2104] Create Queue# [3:611:2111] targetNodeId# 2 Marker# DSP01 2025-04-04T16:55:15.145234Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:55:15.151110Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:15.151475Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:15.151566Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:15.151767Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:15.151873Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:15.151956Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:15.152013Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-04-04T16:55:15.152042Z node 3 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-04-04T16:55:15.152093Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-04-04T16:55:15.152304Z node 3 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] bootstrap ActorId# [3:612:2112] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-04-04T16:55:15.152373Z node 3 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 2 Marker# DSPB03 2025-04-04T16:55:15.152599Z node 3 :BS_PROXY DEBUG: Send to queueActorId# [3:605:2105] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 7523176930701069867 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-04-04T16:55:15.159099Z node 3 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 2 Marker# DSPB01 2025-04-04T16:55:15.159197Z node 3 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2025-04-04T16:55:15.159659Z node 3 :BS_PROXY INFO: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-04-04T16:55:15.159916Z node 3 :BS_PROXY DEBUG: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-04-04T16:55:15.160349Z node 2 :BS_PROXY_PUT INFO: [91379e686f748e92] bootstrap ActorId# [2:613:2512] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-04-04T16:55:15.160522Z node 2 :BS_PROXY_PUT DEBUG: [91379e686f748e92] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:55:15.160582Z node 2 :BS_PROXY_PUT DEBUG: [91379e686f748e92] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T16:55:15.160647Z node 2 :BS_PROXY_PUT DEBUG: [91379e686f748e92] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2025-04-04T16:55:15.160703Z node 2 :BS_PROXY_PUT DEBUG: [91379e686f748e92] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2025-04-04T16:55:15.160853Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:592:2502] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:55:15.161152Z node 2 :BS_VDISK_PUT ERROR: VDISK[82000002:_:0:0:0]: TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T16:55:15.161495Z node 2 :BS_PROXY_PUT INFO: [91379e686f748e92] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-04-04T16:55:15.161600Z node 2 :BS_PROXY_PUT ERROR: [91379e686f748e92] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2025-04-04T16:55:15.161671Z node 2 :BS_PROXY_PUT NOTICE: [91379e686f748e92] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-04T16:55:15.161792Z node 2 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.745 sample PartId# [1234:2:0:0:0:11:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 2 } ] } 2025-04-04T16:55:15.166352Z node 3 :BS_PROXY DEBUG: Send to queueActorId# [3:605:2105] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthEffectivePermissions [GOOD] Test command err: 2025-04-04T16:51:28.355561Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501642706572420:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:28.355828Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c46/r3tmp/tmpqFP9RC/pdisk_1.dat 2025-04-04T16:51:28.973504Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:28.986469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:28.986581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:29.000568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13142, node 1 2025-04-04T16:51:29.211373Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:29.211396Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:29.211403Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:29.211520Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2305 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:29.671765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:29.838966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "Tenant1" } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T16:51:29.839148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateExtSubDomain Propose, path/Root/Tenant1, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:51:29.839223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Tenant1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-04T16:51:29.839373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-04T16:51:29.839535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-04T16:51:29.839616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T16:51:29.839631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:51:29.839705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T16:51:29.839730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-04T16:51:29.847265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-04T16:51:29.847418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/Tenant1 2025-04-04T16:51:29.847594Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T16:51:29.847608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T16:51:29.847777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T16:51:29.847865Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T16:51:29.847884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489501647001540173:2388], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-04-04T16:51:29.847919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489501647001540173:2388], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-04-04T16:51:29.847954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:51:29.847972Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:51:29.847989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710658:0, at tablet# 72057594046644480 waiting...2025-04-04T16:51:29.848017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710658 ready parts: 1/1 2025-04-04T16:51:29.856736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:51:29.858423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-04T16:51:29.858503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-04T16:51:29.858535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-04T16:51:29.858552Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-04T16:51:29.858571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-04T16:51:29.858797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-04T16:51:29.858850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-04T16:51:29.858858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-04T16:51:29.858868Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-04T16:51:29.858877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-04T16:51:29.858912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-04-04T16:51:29.859030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:51:29.859038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-04-04T16:51:29.859056Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:51:29.859993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710658 msg type: 269090816 2025-04-04T16:51:29.860072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710658, partId: 4294967295, tablet: 72057594046316545 2025-04-04T16:51:29.866897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-04-04T16:51:29.866970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-04-04T16:51:29.867432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743785489908, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T16:51:29.867561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743785489908 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T16:51:29.867605Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-04T16:51:29.867794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-04-04T16:51:29.867827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-04T16:51:29.867944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T16:51:29.867975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-04T16:51:29.868023Z node 1 :FLAT_TX_SCHEMESHARD INF ... ctRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [default] }] } 2025-04-04T16:55:09.532174Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502594792188032:2409], row count: 1, finished: 0 2025-04-04T16:55:09.546462Z node 26 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools/default TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:55:09.554407Z node 26 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools/default TableId: [72057594046644480:8:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindResourcePool DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T16:55:09.554599Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502594792188032:2409], row count: 5, finished: 0 2025-04-04T16:55:09.558620Z node 26 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:55:09.560775Z node 26 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-04T16:55:09.560845Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502594792188032:2409], row count: 1, finished: 0 2025-04-04T16:55:09.561444Z node 26 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:55:09.574782Z node 26 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [72057594046644480:4:1] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T16:55:09.574874Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502594792188032:2409], row count: 1, finished: 0 2025-04-04T16:55:09.578598Z node 26 :SYSTEM_VIEWS INFO: Scan finished, actor: [26:7489502594792188032:2409], owner: [26:7489502594792188028:2407], scan id: 0, table id: [72057594046644480:1:0:auth_effective_permissions] 2025-04-04T16:55:09.582660Z node 26 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [26:7489502530367676589:2123], database# , query hash# 11342553055430868283, cpu time# 376592 2025-04-04T16:55:09.583428Z node 26 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785709498, txId: 281474976715676] shutting down 2025-04-04T16:55:09.619344Z node 26 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:55:09.619386Z node 26 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:09.750458Z node 29 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded top size# 0, time# 2025-04-04T16:55:09.750341Z 2025-04-04T16:55:09.761783Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jr0vmm1y8r6d2fr12agpcrve, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=Yjc2ZjA0NzYtMWFjY2IwMjctODhlNGZiMTEtODUxNmY1ZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:09.768563Z node 26 :SYSTEM_VIEWS INFO: Scan started, actor: [26:7489502594792188077:2418], owner: [26:7489502594792188074:2416], scan id: 0, table id: [72075186224037888:1:0:auth_effective_permissions] 2025-04-04T16:55:09.773726Z node 26 :SYSTEM_VIEWS INFO: Scan prepared, actor: [26:7489502594792188077:2418], schemeshard id: 72075186224037888, hive id: 72057594037968897, database: /Root/Tenant1, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], database node count: 2 2025-04-04T16:55:09.773772Z node 26 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root/Tenant1 tenant owner: root@builtin subject sid: empty require admin access: 0 is admin: 1 2025-04-04T16:55:09.773861Z node 26 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:55:09.774369Z node 26 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1 TableId: [72075186224037888:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 }] Groups: [] } Children [Dir2,Table1] }] } 2025-04-04T16:55:09.774440Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502594792188077:2418], row count: 1, finished: 0 2025-04-04T16:55:09.774630Z node 26 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Dir2 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:55:09.775781Z node 26 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Dir2 TableId: [72075186224037888:3:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-04T16:55:09.775864Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502594792188077:2418], row count: 2, finished: 0 2025-04-04T16:55:09.776133Z node 26 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:55:09.777670Z node 26 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Table1 TableId: [72075186224037888:2:1] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T16:55:09.777712Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502594792188077:2418], row count: 1, finished: 0 2025-04-04T16:55:09.778517Z node 26 :SYSTEM_VIEWS INFO: Scan finished, actor: [26:7489502594792188077:2418], owner: [26:7489502594792188074:2416], scan id: 0, table id: [72075186224037888:1:0:auth_effective_permissions] 2025-04-04T16:55:09.781089Z node 26 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [26:7489502530367676589:2123], database# , query hash# 17325808444334437222, cpu time# 134974 2025-04-04T16:55:09.781784Z node 26 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785709761, txId: 281474976715678] shutting down 2025-04-04T16:55:09.813180Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 27 2025-04-04T16:55:09.814091Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:55:09.814447Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 28 2025-04-04T16:55:09.814756Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:55:09.815187Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 29 2025-04-04T16:55:09.815626Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:55:09.815833Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 30 2025-04-04T16:55:09.816328Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:55:09.818880Z node 29 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:09.816912Z node 27 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 >> AsyncIndexChangeCollector::DeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthOwners_ResultOrder [GOOD] Test command err: 2025-04-04T16:51:28.098131Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501643179045863:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:28.098171Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c22/r3tmp/tmpTCttSW/pdisk_1.dat 2025-04-04T16:51:28.802681Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:28.829864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:28.829985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:28.843282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10336, node 1 2025-04-04T16:51:29.304419Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:29.304440Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:29.304448Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:29.304603Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8017 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:29.592585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:29.757537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "Tenant1" } } TxId: 281474976710658 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T16:51:29.757792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateExtSubDomain Propose, path/Root/Tenant1, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:51:29.757871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Tenant1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-04T16:51:29.758000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-04T16:51:29.758161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-04T16:51:29.758767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T16:51:29.758797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:51:29.758895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T16:51:29.758945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 waiting... 2025-04-04T16:51:29.764245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-04T16:51:29.764408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: root@builtin, status: StatusAccepted, operation: CREATE DATABASE, path: /Root/Tenant1 2025-04-04T16:51:29.764602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T16:51:29.764619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T16:51:29.764733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T16:51:29.764831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T16:51:29.764885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489501643179046458:2387], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-04-04T16:51:29.764905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489501643179046458:2387], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-04-04T16:51:29.764951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:51:29.764967Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:51:29.764990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-04T16:51:29.765014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710658 ready parts: 1/1 2025-04-04T16:51:29.769764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:51:29.772337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-04T16:51:29.772431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-04T16:51:29.772445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-04T16:51:29.772459Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-04T16:51:29.772475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-04T16:51:29.772734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-04T16:51:29.772771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-04T16:51:29.772776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-04T16:51:29.772784Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-04T16:51:29.772792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-04T16:51:29.772820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-04-04T16:51:29.772928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710658 msg type: 269090816 2025-04-04T16:51:29.772997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710658, partId: 4294967295, tablet: 72057594046316545 2025-04-04T16:51:29.774777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:51:29.774799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-04-04T16:51:29.774819Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:51:29.779023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-04-04T16:51:29.779099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-04-04T16:51:29.779212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743785489817, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T16:51:29.779318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743785489817 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T16:51:29.779353Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-04T16:51:29.779527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-04-04T16:51:29.779557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-04T16:51:29.779662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T16:51:29.779730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04 ... node 26 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir3 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:55:10.078436Z node 26 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir3 TableId: [72057594046644480:16:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-04T16:55:10.078496Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502594787754276:2421], row count: 1, finished: 0 2025-04-04T16:55:10.085688Z node 26 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir11 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:55:10.086784Z node 26 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir11 TableId: [72057594046644480:17:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [SubDir] }] } 2025-04-04T16:55:10.086842Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502594787754276:2421], row count: 1, finished: 0 2025-04-04T16:55:10.087182Z node 26 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir11/SubDir TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:55:10.089638Z node 26 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir11/SubDir TableId: [72057594046644480:18:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-04T16:55:10.089691Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502594787754276:2421], row count: 1, finished: 0 2025-04-04T16:55:10.111621Z node 26 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir2 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:55:10.113347Z node 26 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir2 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [SubDir1,SubDir2,SubDir3] }] } 2025-04-04T16:55:10.113435Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502594787754276:2421], row count: 1, finished: 0 2025-04-04T16:55:10.115898Z node 26 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir2/SubDir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:55:10.119398Z node 26 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir2/SubDir1 TableId: [72057594046644480:13:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-04T16:55:10.119466Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502594787754276:2421], row count: 1, finished: 0 2025-04-04T16:55:10.122383Z node 26 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir2/SubDir2 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:55:10.130504Z node 26 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir2/SubDir2 TableId: [72057594046644480:10:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-04T16:55:10.130573Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502594787754276:2421], row count: 1, finished: 0 2025-04-04T16:55:10.133213Z node 26 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir2/SubDir3 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:55:10.137446Z node 26 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir2/SubDir3 TableId: [72057594046644480:15:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-04T16:55:10.137506Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502594787754276:2421], row count: 1, finished: 0 2025-04-04T16:55:10.140291Z node 26 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:55:10.143578Z node 26 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [72057594046644480:4:1] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T16:55:10.143634Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502594787754276:2421], row count: 1, finished: 0 2025-04-04T16:55:10.151290Z node 26 :SYSTEM_VIEWS INFO: Scan finished, actor: [26:7489502594787754276:2421], owner: [26:7489502594787754272:2419], scan id: 0, table id: [72057594046644480:1:0:auth_owners] 2025-04-04T16:55:10.159292Z node 26 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [26:7489502517478340605:2107], database# , query hash# 16785711142410628055, cpu time# 364371 2025-04-04T16:55:10.160063Z node 26 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785709954, txId: 281474976710678] shutting down 2025-04-04T16:55:10.208674Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 29 2025-04-04T16:55:10.209529Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:55:10.209672Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 27 2025-04-04T16:55:10.210068Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:55:10.210149Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 30 2025-04-04T16:55:10.217890Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:55:10.212097Z node 28 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:10.223587Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 28 2025-04-04T16:55:10.224475Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:55:10.222691Z node 29 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:10.262813Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[28:7489502529899518976:2102], Type=268959746 2025-04-04T16:55:10.262876Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[28:7489502529899518976:2102], Type=268959746 2025-04-04T16:55:10.262910Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[28:7489502529899518976:2102], Type=268959746 2025-04-04T16:55:10.262945Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[28:7489502529899518976:2102], Type=268959746 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] Test command err: Delete nodeId# 58 Add nodeId# 101 Pick Disable nodeId# 86 Enable nodeId# 86 Disable nodeId# 30 Disable nodeId# 62 Delete nodeId# 62 Disable nodeId# 91 Pick Enable nodeId# 91 Pick Disable nodeId# 88 Pick Pick Delete nodeId# 42 Delete nodeId# 5 Enable nodeId# 88 Enable nodeId# 30 Add nodeId# 102 Disable nodeId# 35 Disable nodeId# 85 Enable nodeId# 85 Pick Enable nodeId# 35 Delete nodeId# 74 Add nodeId# 103 Add nodeId# 104 Pick Add nodeId# 105 Disable nodeId# 60 Delete nodeId# 81 Enable nodeId# 60 Pick Delete nodeId# 2 Pick Pick Disable nodeId# 72 Pick Pick Enable nodeId# 72 Add nodeId# 106 Pick Disable nodeId# 100 Disable nodeId# 4 Pick Add nodeId# 107 Enable nodeId# 4 Pick Pick Delete nodeId# 67 Pick Delete nodeId# 82 Add nodeId# 108 Pick Pick Disable nodeId# 39 Add nodeId# 109 Enable nodeId# 39 Disable nodeId# 23 Delete nodeId# 92 Disable nodeId# 28 Disable nodeId# 48 Delete nodeId# 71 Add nodeId# 110 Add nodeId# 111 Pick Enable nodeId# 100 Pick Pick Enable nodeId# 28 Delete nodeId# 68 Pick Delete nodeId# 47 Enable nodeId# 48 Disable nodeId# 59 Disable nodeId# 109 Add nodeId# 112 Enable nodeId# 59 Pick Delete nodeId# 112 Delete nodeId# 111 Pick Enable nodeId# 23 Add nodeId# 113 Add nodeId# 114 Enable nodeId# 109 Pick Delete nodeId# 35 Pick Delete nodeId# 73 Pick Pick Disable nodeId# 60 Pick Disable nodeId# 20 Add nodeId# 115 Add nodeId# 116 Pick Delete nodeId# 25 Add nodeId# 117 Enable nodeId# 60 Disable nodeId# 96 Delete nodeId# 88 Enable nodeId# 96 Add nodeId# 118 Delete nodeId# 37 Pick Enable nodeId# 20 Add nodeId# 119 Delete nodeId# 94 Delete nodeId# 114 Pick Delete nodeId# 38 Pick Disable nodeId# 60 Enable nodeId# 60 Disable nodeId# 79 Add nodeId# 120 Enable nodeId# 79 Delete nodeId# 54 Disable nodeId# 118 Enable nodeId# 118 Add nodeId# 121 Delete nodeId# 10 Pick Pick Pick Pick Delete nodeId# 51 Disable nodeId# 103 Add nodeId# 122 Delete nodeId# 11 Enable nodeId# 103 Disable nodeId# 91 Pick Delete nodeId# 108 Pick Add nodeId# 123 Pick Pick Delete nodeId# 6 Add nodeId# 124 Enable nodeId# 91 Disable nodeId# 122 Delete nodeId# 44 Enable nodeId# 122 Delete nodeId# 120 Add nodeId# 125 Delete nodeId# 104 Pick Add nodeId# 126 Delete nodeId# 107 Delete nodeId# 46 Disable nodeId# 28 Delete nodeId# 7 Enable nodeId# 28 Add nodeId# 127 Disable nodeId# 19 Add nodeId# 128 Disable nodeId# 8 Pick Enable nodeId# 8 Pick Disable nodeId# 15 Add nodeId# 129 Add nodeId# 130 Disable nodeId# 64 Enable nodeId# 19 Disable nodeId# 105 Pick Enable nodeId# 105 Delete nodeId# 23 Disable nodeId# 77 Disable nodeId# 86 Pick Add nodeId# 131 Pick Pick Add nodeId# 132 Delete nodeId# 72 Pick Delete nodeId# 59 Add nodeId# 133 Add nodeId# 134 Pick Enable nodeId# 86 Enable nodeId# 64 Pick Enable nodeId# 15 Disable nodeId# 124 Enable nodeId# 77 Enable nodeId# 124 Pick Delete nodeId# 84 Pick Delete nodeId# 118 Add nodeId# 135 Delete nodeId# 36 Delete nodeId# 3 Add nodeId# 136 Disable nodeId# 98 Delete nodeId# 89 Delete nodeId# 1 Delete nodeId# 63 Enable nodeId# 98 Add nodeId# 137 Pick Add nodeId# 138 Add nodeId# 139 Add nodeId# 140 Add nodeId# 141 Pick Delete nodeId# 130 Disable nodeId# 77 Delete nodeId# 69 Delete nodeId# 20 Disable nodeId# 45 Disable nodeId# 121 Delete nodeId# 29 Disable nodeId# 138 Disable nodeId# 79 Pick Delete nodeId# 133 Add nodeId# 142 Add nodeId# 143 Enable nodeId# 79 Pick Add nodeId# 144 Disable nodeId# 134 Pick Pick Disable nodeId# 140 Add nodeId# 145 Pick Enable nodeId# 134 Enable nodeId# 140 Disable nodeId# 132 Delete nodeId# 39 Disable nodeId# 95 Disable nodeId# 87 Add nodeId# 146 Pick Add nodeId# 147 Enable nodeId# 95 Delete nodeId# 115 Pick Pick Enable nodeId# 77 Disable nodeId# 55 Disable nodeId# 76 Disable nodeId# 102 Disable nodeId# 119 Delete nodeId# 8 Disable nodeId# 142 Pick Add nodeId# 148 Delete nodeId# 142 Pick Disable nodeId# 128 Disable nodeId# 57 Pick Enable nodeId# 87 Disable nodeId# 109 Pick Pick Enable nodeId# 76 Enable nodeId# 55 Delete nodeId# 101 Pick Pick Delete nodeId# 125 Pick Enable nodeId# 119 Enable nodeId# 128 Pick Disable nodeId# 100 Add nodeId# 149 Pick Disable nodeId# 30 Delete nodeId# 143 Delete nodeId# 18 Disable nodeId# 119 Delete nodeId# 66 Delete nodeId# 70 Disable nodeId# 17 Delete nodeId# 134 Pick Delete nodeId# 131 Delete nodeId# 121 Pick Add nodeId# 150 Delete nodeId# 80 Disable nodeId# 135 Delete nodeId# 132 Pick Disable nodeId# 55 Add nodeId# 151 Add nodeId# 152 Add nodeId# 153 Pick Enable nodeId# 135 Disable nodeId# 21 Enable nodeId# 45 Disable nodeId# 34 Disable nodeId# 147 Enable nodeId# 138 Pick Add nodeId# 154 Pick Pick Add nodeId# 155 Add nodeId# 156 Delete nodeId# 33 Enable nodeId# 55 Delete nodeId# 135 Add nodeId# 157 Disable nodeId# 123 Enable nodeId# 21 Disable nodeId# 83 Disable nodeId# 117 Disable nodeId# 103 Enable nodeId# 123 Pick Pick Enable nodeId# 57 Delete nodeId# 91 Enable nodeId# 102 Pick Disable nodeId# 43 Pick Enable nodeId# 109 Pick Pick Add nodeId# 158 Pick Add nodeId# 159 Enable nodeId# 147 Pick Disable nodeId# 123 Disable nodeId# 156 Pick Pick Add nodeId# 160 Enable nodeId# 156 Disable nodeId# 19 Disable nodeId# 48 Enable nodeId# 103 Pick Delete nodeId# 145 Enable nodeId# 17 Pick Disable nodeId# 13 Add nodeId# 161 Add nodeId# 162 Add nodeId# 163 Pick Disable nodeId# 151 Enable nodeId# 123 Disable nodeId# 113 Disable nodeId# 41 Add nodeId# 164 Delete nodeId# 34 Enable nodeId# 43 Add nodeId# 165 Delete nodeId# 55 Add nodeId# 166 Add nodeId# 167 Enable nodeId# 119 Pick Add nodeId# 168 Pick Pick Enable nodeId# 151 Enable nodeId# 13 Enable nodeId# 83 Disable nodeId# 22 Enable nodeId# 41 Disable nodeId# 148 Enable nodeId# 148 Pick Disable nodeId# 166 Delete nodeId# 77 Add nodeId# 169 Add nodeId# 170 Enable nodeId# 117 Pick Pick Disable nodeId# 151 Pick Delete nodeId# 170 Pick Delete nodeId# 164 Disable nodeId# 95 Add nodeId# 171 Enable nodeId# 30 Add nodeId# 172 Delete nodeId# 98 Pick Disable nodeId# 31 Enable nodeId# 113 Add nodeId# 173 Pick Enable nodeId# 100 Disable nodeId# 41 Delete nodeId# 161 Add nodeId# 174 Delete nodeId# 106 Add nodeId# 175 Enable nodeId# 41 Enable nodeId# 95 Pick Pick Add nodeId# 176 Pick Disable nodeId# 86 Delete nodeId# 75 Enable nodeId# 86 Disable nodeId# 79 Add nodeId# 177 Delete nodeId# 79 Add nodeId# 178 Pick Add nodeId# 179 Enable nodeId# 166 Pick Disable nodeId# 140 Delete nodeId# 166 Pick Enable nodeId# 151 Add nodeId# 180 Disable nodeId# 24 Disable nodeId# 137 Enable nodeId# 140 Disable nodeId# 49 Disable nodeId# 140 Delete nodeId# 146 Disable nodeId# 155 Delete nodeId# 103 Enable nodeId# 48 Pick Enable nodeId# 137 Pick Delete nodeId# 12 Disable nodeId# 157 Delete nodeId# 150 Pick Enable nodeId# 24 Disable nodeId# 61 Delete nodeId# 139 Disable nodeId# 90 Pick Delete nodeId# 100 Enable nodeId# 19 Delete nodeId# 144 Enable nodeId# 157 Pick Enable nodeId# 61 Enable nodeId# 155 Delete nodeId# 56 Enable nodeId# 140 Enable nodeId# 22 Enable nodeId# 90 Disable nodeId# 41 Disable nodeId# 156 Delete nodeId# 129 Delete nodeId# 163 Add nodeId# 181 Disable nodeId# 50 Disable nodeId# 90 Enable nodeId# 156 Pick Delete nodeId# 116 Add nodeId# 182 Disable nodeId# 160 Pick Add nodeId# 183 Enable nodeId# 41 Enable nodeId# 50 Disable nodeId# 14 Add nodeId# 184 Enable nodeId# 14 Disable nodeId# 87 Delete nodeId# 172 Disable nodeId# 179 Disable nodeId# 117 Add nodeId# 185 Enable nodeId# 160 Delete nodeId# 151 Disable nodeId# 140 Add nodeId# 186 Enable nodeId# 179 Pick Delete nodeId# 22 Pick Pick Enable nodeId# 87 Delete nodeId# 178 Delete nodeId# 160 Enable nodeId# 90 Pick Add nodeId# 187 Pick Pick Pick Add nodeId# 188 Disable nodeId# 85 Delete nodeId# 168 Delete nodeId# 153 Disable nodeId# 123 Pick Pick Enable nodeId# 140 Disable nodeId# 127 Add nodeId# 189 Delete nodeId# 48 Enable nodeId# 117 Add nodeId# 190 Disable nodeId# 64 Enable nodeId# 123 Enable nodeId# 127 Enable nodeId# 64 Delete nodeId# 105 Enable nodeId# 85 Add nodeId# 191 Disable nodeId# 180 Enable nodeId# 180 Delete nodeId# 165 Add nodeId# 192 Disable nodeId# 149 Add nodeId# 193 Delete nodeId# 60 Disable nodeId# 158 Disable nodeId# 189 Enable nodeId# 149 Delete nodeId# 16 Add nodeId# 194 Pick Enable nodeId# 189 Pick Add nodeId# 195 Add nodeId# 196 Add nodeId# 197 Delete nodeId# 19 Delete nodeId# 13 Pick Pick Disable nodeId# 41 Delete nodeId# 21 Disable nodeId# 196 Disable nodeId# 26 Add nodeId# 198 Delete nodeId# 147 Disable nodeId# 45 Add nodeId# 199 Disable nodeId# 61 Disable nodeId# 126 Delete nodeId# 64 Delete nodeId# 148 Disable nodeId# 175 Add nodeId# 200 Delete nodeId# 154 Disable nodeId# 136 Pick Add nodeId# 201 Enable nodeId# 196 Enable nodeId# 41 Pick Delete nodeId# 87 Add nodeId# 202 Delete nodeId# 78 Add nodeId# 203 Add nodeId# 204 Pick Pick Disable nodeId# 171 Add nodeId# 205 Disable nodeId# 185 Disable nodeId# 169 Disable nodeId# 96 Pick Pick Add nodeId# 206 Delete nodeId# 177 Enable nodeId# 61 Pick Disable nodeId# 97 Add nodeId# 207 Add nodeId# 208 Add nodeId# 209 Disable nodeId# 191 Add nodeId# 210 Enable nodeId# 126 Add nodeId# 211 Disable nodeId# 205 Enable nodeId# 49 Enable nodeId# 26 Enable nodeId# 97 Pick Delete nodeId# 99 Disable nodeId# 141 Delete nodeId# 4 Disable nodeId# 41 Disable nodeId# 188 Delete nodeId# 208 Add nodeId# 212 Delete nodeId# 49 Add nodeId# 213 Delete nodeId# 205 Disable nodeId# 209 Delete nodeId# 61 Pick Add nodeId# 214 Add nodeId# 215 Pick Delete nodeId# 30 Disable nodeId# 109 Add nodeId# 216 Disable nodeId# 137 Pick Disable nodeId# 199 Delete nodeId# 27 Delete nodeId# 93 Disable nodeId# 193 Disable nodeId# 195 Add nodeId# 217 Pick Pick Delete nodeId# 207 Add nodeId# 218 Delete nodeId# 188 Disable nodeId# 52 Disable nodeId# 200 Delete nodeId# 181 Delete nodeId# 86 Enable nodeId# 136 Enable nodeId# 45 Delete nodeId# 155 Pick Add nodeId# 219 Pick Delete nodeId# 159 Enable nodeId# 31 Add nodeId# 220 Pick Disable nodeId# 220 Pick Add nodeId# 221 Enable nodeId# 193 Add nodeId# 222 Pick Pick Add nodeId# 223 Pick Disable nodeId# 183 Add nodeId# 224 Delete nodeId# 149 Delete nodeId# 137 Pick Delete nodeId# 109 Add nodeId# 225 Disable nodeId# 219 Delete nodeId# 212 Delete nodeId# 179 Disable nodeId# 31 Enable nodeId# 171 Disable nodeId# 193 Enable nodeId# 41 Pick Enable nodeId# 175 Delete nodeId# 220 Enable nodeId# 158 Pick Disable nodeId# 215 Enable nodeId# 31 Add nodeId# 226 Add nodeId# 227 Add nodeId# 228 Pick Delete nodeId# 41 Pick Enable nodeId# 141 Pick Pick Delete nodeId# 158 Add nodeId# 229 Pick Disable nodeId# 14 Add nodeId# 230 Add nodeId# 231 Delete nodeId# 24 Add nodeId# 232 Enable nodeId# 191 Delete nodeId# 198 Enable nodeId# 183 Add nodeId# 233 Disable nodeId# 184 Delete nodeId# 127 Enable nodeId# 200 Enable nodeId# 185 Disable nodeId# 119 Disable nodeId# 26 Pick Pick Pick Add nodeId# 234 Enable nodeId# 96 Pick Enable nodeId# 199 Disable nodeId# 206 Disable nodeId# 211 Add nodeId# 235 Delete nodeId# 128 Enable nodeId# 52 Enable nodeId# 14 Delete nodeId# 180 Enable nodeId# 209 Disable nodeId# 226 Pick Enable nodeId# 226 Delete nodeId# 197 Pick Enable nodeId# 219 Disable nodeId# 173 Pick Add nodeId# 236 Delete nodeId# 185 Enable nodeId# 195 Enable nodeId# 169 Disable nodeId# 235 Pick Pick Pick Add nodeId# 237 Pick Delete nodeId# 193 Pick Disable nodeId# 45 Enable nodeId# 215 Add nodeId# 238 Pick Pick Disable nodeId# 219 Enable nodeId# 45 Add nodeId# 239 Disable nodeId# 237 Delete nodeId# 196 Add nodeId# 240 Add nodeId# 241 Disable nodeId# 215 Add nodeId# 242 Enable nodeId# 184 Add nodeId# 243 Disable nodeId# 218 Enable nodeId# 218 Enable nodeId# 237 Delete nodeId# 214 Pick Pick Disable nodeId# 191 Enable nodeId# 191 Pick Enable nodeId# 235 Disable nodeId# 240 Add nodeId# 244 Enable nodeId# 211 Delete nodeId# 211 Disable nodeId# 140 Enable nodeId# 240 Disable nodeId# 122 Disable nodeId# 138 Add nodeId# 245 Add nodeId# 246 Add nodeId# 247 Enable nodeId# 173 Delete nodeId# 43 Delete nodeId# 183 Delete nodeId# 199 Disable nodeId# 201 Pick Add nodeId# 248 Pick Pick Pick Add nodeId# 249 Delete nodeId# 138 Enable nodeId# 206 Enable nodeId# 26 Pick Pick Enable nodeId# 122 Add nodeId# 250 Delete nodeI ... Enable nodeId# 20141 Disable nodeId# 20174 Delete nodeId# 20180 Delete nodeId# 20113 Enable nodeId# 20166 Delete nodeId# 20156 Disable nodeId# 20173 Disable nodeId# 20170 Add nodeId# 20181 Delete nodeId# 20126 Delete nodeId# 20166 Delete nodeId# 19985 Add nodeId# 20182 Pick Pick Pick Pick Delete nodeId# 20152 Add nodeId# 20183 Add nodeId# 20184 Enable nodeId# 20174 Disable nodeId# 20145 Enable nodeId# 20164 Add nodeId# 20185 Pick Enable nodeId# 20145 Pick Delete nodeId# 20174 Pick Disable nodeId# 20122 Disable nodeId# 20131 Pick Pick Enable nodeId# 20122 Enable nodeId# 20131 Disable nodeId# 20163 Add nodeId# 20186 Disable nodeId# 20141 Enable nodeId# 20124 Disable nodeId# 20124 Delete nodeId# 20182 Pick Disable nodeId# 20137 Disable nodeId# 20169 Delete nodeId# 20186 Add nodeId# 20187 Delete nodeId# 20122 Pick Pick Disable nodeId# 20183 Pick Disable nodeId# 20145 Enable nodeId# 20146 Enable nodeId# 20173 Delete nodeId# 20173 Delete nodeId# 20187 Delete nodeId# 20164 Enable nodeId# 20169 Add nodeId# 20188 Pick Add nodeId# 20189 Enable nodeId# 20141 Delete nodeId# 20183 Pick Pick Add nodeId# 20190 Pick Delete nodeId# 20145 Disable nodeId# 20153 Add nodeId# 20191 Disable nodeId# 20169 Pick Add nodeId# 20192 Disable nodeId# 20144 Enable nodeId# 20163 Enable nodeId# 20153 Disable nodeId# 20181 Disable nodeId# 20168 Disable nodeId# 20188 Enable nodeId# 20124 Disable nodeId# 20190 Disable nodeId# 20171 Enable nodeId# 20137 Pick Disable nodeId# 20141 Enable nodeId# 20190 Pick Add nodeId# 20193 Add nodeId# 20194 Enable nodeId# 20181 Pick Enable nodeId# 20141 Disable nodeId# 20193 Delete nodeId# 20170 Pick Enable nodeId# 20169 Enable nodeId# 20193 Pick Disable nodeId# 20137 Disable nodeId# 20124 Delete nodeId# 20144 Disable nodeId# 20163 Add nodeId# 20195 Enable nodeId# 20137 Disable nodeId# 20179 Add nodeId# 20196 Pick Add nodeId# 20197 Enable nodeId# 20168 Pick Disable nodeId# 20194 Delete nodeId# 20142 Enable nodeId# 20194 Disable nodeId# 20191 Add nodeId# 20198 Disable nodeId# 20189 Delete nodeId# 20141 Pick Add nodeId# 20199 Disable nodeId# 20195 Enable nodeId# 20195 Add nodeId# 20200 Pick Pick Enable nodeId# 20188 Pick Pick Add nodeId# 20201 Pick Delete nodeId# 20195 Delete nodeId# 20198 Enable nodeId# 20179 Disable nodeId# 20196 Add nodeId# 20202 Enable nodeId# 20124 Disable nodeId# 20179 Pick Delete nodeId# 20131 Add nodeId# 20203 Add nodeId# 20204 Enable nodeId# 20171 Add nodeId# 20205 Add nodeId# 20206 Pick Enable nodeId# 20189 Add nodeId# 20207 Disable nodeId# 20168 Pick Disable nodeId# 20149 Pick Add nodeId# 20208 Delete nodeId# 20201 Pick Disable nodeId# 20172 Add nodeId# 20209 Disable nodeId# 20192 Enable nodeId# 20179 Disable nodeId# 20200 Add nodeId# 20210 Add nodeId# 20211 Disable nodeId# 20171 Disable nodeId# 20199 Disable nodeId# 20205 Delete nodeId# 20205 Enable nodeId# 20171 Delete nodeId# 20124 Enable nodeId# 20163 Enable nodeId# 20192 Enable nodeId# 20149 Enable nodeId# 20196 Add nodeId# 20212 Add nodeId# 20213 Disable nodeId# 20184 Delete nodeId# 20192 Delete nodeId# 20208 Disable nodeId# 20194 Add nodeId# 20214 Enable nodeId# 20168 Enable nodeId# 20194 Add nodeId# 20215 Disable nodeId# 20158 Disable nodeId# 20190 Add nodeId# 20216 Disable nodeId# 20203 Delete nodeId# 20206 Pick Delete nodeId# 20189 Pick Enable nodeId# 20158 Enable nodeId# 20203 Pick Delete nodeId# 20200 Pick Add nodeId# 20217 Disable nodeId# 20168 Pick Add nodeId# 20218 Disable nodeId# 20149 Disable nodeId# 20202 Delete nodeId# 20214 Delete nodeId# 20210 Delete nodeId# 20191 Delete nodeId# 20209 Pick Delete nodeId# 20172 Disable nodeId# 20193 Pick Enable nodeId# 20184 Enable nodeId# 20193 Pick Disable nodeId# 20137 Disable nodeId# 20196 Delete nodeId# 20202 Delete nodeId# 20168 Enable nodeId# 20149 Disable nodeId# 20163 Delete nodeId# 20203 Add nodeId# 20219 Disable nodeId# 20207 Pick Delete nodeId# 20181 Pick Add nodeId# 20220 Add nodeId# 20221 Disable nodeId# 20193 Disable nodeId# 20213 Disable nodeId# 20146 Delete nodeId# 20190 Delete nodeId# 20153 Enable nodeId# 20196 Pick Enable nodeId# 20146 Disable nodeId# 20217 Disable nodeId# 20221 Add nodeId# 20222 Delete nodeId# 20216 Enable nodeId# 20137 Enable nodeId# 20213 Add nodeId# 20223 Pick Add nodeId# 20224 Pick Enable nodeId# 20207 Disable nodeId# 20184 Disable nodeId# 20146 Disable nodeId# 20149 Pick Enable nodeId# 20146 Enable nodeId# 20221 Pick Add nodeId# 20225 Disable nodeId# 20204 Disable nodeId# 20179 Enable nodeId# 20193 Delete nodeId# 20224 Disable nodeId# 20219 Disable nodeId# 20107 Enable nodeId# 20107 Add nodeId# 20226 Disable nodeId# 20194 Delete nodeId# 20225 Enable nodeId# 20184 Delete nodeId# 20185 Enable nodeId# 20204 Disable nodeId# 20196 Pick Delete nodeId# 20137 Delete nodeId# 20196 Disable nodeId# 20197 Disable nodeId# 20178 Add nodeId# 20227 Disable nodeId# 20193 Disable nodeId# 20158 Pick Pick Add nodeId# 20228 Add nodeId# 20229 Enable nodeId# 20193 Enable nodeId# 20163 Add nodeId# 20230 Disable nodeId# 20226 Pick Delete nodeId# 20218 Enable nodeId# 20219 Enable nodeId# 20199 Enable nodeId# 20226 Add nodeId# 20231 Disable nodeId# 20204 Delete nodeId# 20184 Disable nodeId# 20207 Disable nodeId# 20219 Disable nodeId# 20222 Enable nodeId# 20222 Add nodeId# 20232 Add nodeId# 20233 Enable nodeId# 20197 Pick Disable nodeId# 20221 Enable nodeId# 20158 Delete nodeId# 20171 Disable nodeId# 20213 Add nodeId# 20234 Delete nodeId# 20229 Enable nodeId# 20219 Disable nodeId# 20212 Add nodeId# 20235 Delete nodeId# 20125 Add nodeId# 20236 Disable nodeId# 20107 Disable nodeId# 20226 Disable nodeId# 20146 Disable nodeId# 20222 Disable nodeId# 20228 Delete nodeId# 20223 Pick Enable nodeId# 20204 Pick Enable nodeId# 20149 Delete nodeId# 20193 Pick Delete nodeId# 20197 Add nodeId# 20237 Disable nodeId# 20235 Enable nodeId# 20207 Pick Pick Pick Delete nodeId# 20212 Delete nodeId# 20213 Enable nodeId# 20179 Add nodeId# 20238 Delete nodeId# 20226 Enable nodeId# 20217 Disable nodeId# 20207 Add nodeId# 20239 Pick Add nodeId# 20240 Enable nodeId# 20194 Disable nodeId# 20215 Delete nodeId# 20211 Add nodeId# 20241 Delete nodeId# 20220 Delete nodeId# 20204 Enable nodeId# 20221 Pick Add nodeId# 20242 Disable nodeId# 20242 Disable nodeId# 20221 Pick Pick Disable nodeId# 20149 Enable nodeId# 20149 Disable nodeId# 20154 Disable nodeId# 20169 Pick Add nodeId# 20243 Pick Delete nodeId# 20188 Delete nodeId# 20242 Delete nodeId# 20163 Enable nodeId# 20215 Pick Enable nodeId# 20222 Enable nodeId# 20207 Disable nodeId# 20231 Enable nodeId# 20221 Pick Enable nodeId# 20169 Enable nodeId# 20154 Pick Enable nodeId# 20228 Disable nodeId# 20199 Delete nodeId# 20237 Disable nodeId# 20233 Delete nodeId# 20217 Pick Pick Pick Add nodeId# 20244 Disable nodeId# 20215 Delete nodeId# 20232 Enable nodeId# 20199 Disable nodeId# 20154 Enable nodeId# 20233 Add nodeId# 20245 Pick Delete nodeId# 20178 Delete nodeId# 20149 Add nodeId# 20246 Delete nodeId# 20230 Enable nodeId# 20107 Delete nodeId# 20207 Enable nodeId# 20235 Enable nodeId# 20231 Enable nodeId# 20154 Delete nodeId# 20231 Enable nodeId# 20146 Disable nodeId# 20235 Add nodeId# 20247 Delete nodeId# 20247 Enable nodeId# 20235 Enable nodeId# 20215 Disable nodeId# 20239 Pick Pick Enable nodeId# 20239 Delete nodeId# 20158 Add nodeId# 20248 Disable nodeId# 20222 Pick Pick Delete nodeId# 20222 Delete nodeId# 20194 Add nodeId# 20249 Pick Disable nodeId# 20248 Enable nodeId# 20248 Delete nodeId# 20228 Disable nodeId# 20249 Pick Delete nodeId# 20219 Enable nodeId# 20249 Delete nodeId# 20107 Disable nodeId# 20241 Disable nodeId# 20235 Disable nodeId# 20249 Enable nodeId# 20241 Pick Delete nodeId# 20241 Add nodeId# 20250 Add nodeId# 20251 Delete nodeId# 20249 Enable nodeId# 20235 Pick Delete nodeId# 20179 Add nodeId# 20252 Pick Add nodeId# 20253 Delete nodeId# 20215 Pick Add nodeId# 20254 Pick Add nodeId# 20255 Disable nodeId# 20245 Enable nodeId# 20245 Add nodeId# 20256 Disable nodeId# 20238 Add nodeId# 20257 Disable nodeId# 20255 Add nodeId# 20258 Enable nodeId# 20255 Pick Delete nodeId# 20250 Delete nodeId# 20251 Enable nodeId# 20238 Add nodeId# 20259 Add nodeId# 20260 Disable nodeId# 20244 Delete nodeId# 20258 Enable nodeId# 20244 Delete nodeId# 20244 Delete nodeId# 20169 Add nodeId# 20261 Disable nodeId# 20243 Add nodeId# 20262 Pick Disable nodeId# 20261 Pick Enable nodeId# 20243 Pick Delete nodeId# 20255 Add nodeId# 20263 Delete nodeId# 20239 Delete nodeId# 20233 Delete nodeId# 20154 Delete nodeId# 20253 Pick Pick Delete nodeId# 20261 Pick Add nodeId# 20264 Add nodeId# 20265 Delete nodeId# 20238 Delete nodeId# 20257 Delete nodeId# 20260 Delete nodeId# 20235 Delete nodeId# 20236 Add nodeId# 20266 Delete nodeId# 20252 Delete nodeId# 20248 Pick Delete nodeId# 20256 Pick Pick Disable nodeId# 20234 Add nodeId# 20267 Delete nodeId# 20240 Pick Enable nodeId# 20234 Add nodeId# 20268 Add nodeId# 20269 Delete nodeId# 20221 Add nodeId# 20270 Pick Disable nodeId# 20246 Pick Delete nodeId# 20263 Add nodeId# 20271 Enable nodeId# 20246 Add nodeId# 20272 Add nodeId# 20273 Disable nodeId# 20199 Pick Delete nodeId# 20273 Add nodeId# 20274 Delete nodeId# 20265 Disable nodeId# 20243 Pick Disable nodeId# 20269 Pick Add nodeId# 20275 Enable nodeId# 20199 Disable nodeId# 20262 Delete nodeId# 20234 Enable nodeId# 20243 Enable nodeId# 20269 Enable nodeId# 20262 Add nodeId# 20276 Add nodeId# 20277 Pick Add nodeId# 20278 Pick Pick Add nodeId# 20279 Delete nodeId# 20271 Pick Delete nodeId# 20267 Disable nodeId# 20272 Enable nodeId# 20272 Delete nodeId# 20272 Pick Add nodeId# 20280 Disable nodeId# 20245 Enable nodeId# 20245 Delete nodeId# 20199 Disable nodeId# 20227 Enable nodeId# 20227 Add nodeId# 20281 Add nodeId# 20282 Pick Delete nodeId# 20275 Delete nodeId# 20270 Add nodeId# 20283 Disable nodeId# 20274 Add nodeId# 20284 Add nodeId# 20285 Pick Add nodeId# 20286 Pick Delete nodeId# 20268 Enable nodeId# 20274 Pick Disable nodeId# 20282 Pick Enable nodeId# 20282 Disable nodeId# 20279 Delete nodeId# 20277 Delete nodeId# 20254 Delete nodeId# 20276 Enable nodeId# 20279 Delete nodeId# 20284 Pick Disable nodeId# 20259 Disable nodeId# 20286 Delete nodeId# 20266 Enable nodeId# 20259 Delete nodeId# 20278 Delete nodeId# 20269 Enable nodeId# 20286 Delete nodeId# 20246 Pick Delete nodeId# 20285 Pick Delete nodeId# 20227 Add nodeId# 20287 Disable nodeId# 20283 Add nodeId# 20288 Delete nodeId# 20282 Disable nodeId# 20259 Add nodeId# 20289 Add nodeId# 20290 Add nodeId# 20291 Enable nodeId# 20259 Enable nodeId# 20283 Add nodeId# 20292 Pick Add nodeId# 20293 Pick Add nodeId# 20294 Disable nodeId# 20294 Enable nodeId# 20294 Delete nodeId# 20291 Delete nodeId# 20292 Pick Delete nodeId# 20245 Disable nodeId# 20279 Disable nodeId# 20274 Delete nodeId# 20262 Delete nodeId# 20283 Enable nodeId# 20274 Pick Enable nodeId# 20279 Disable nodeId# 20288 Add nodeId# 20295 Pick Add nodeId# 20296 Disable nodeId# 20286 Pick Pick Enable nodeId# 20288 Enable nodeId# 20286 Pick Add nodeId# 20297 Add nodeId# 20298 Disable nodeId# 20298 Add nodeId# 20299 Delete nodeId# 20264 Disable nodeId# 20286 Pick Disable nodeId# 20280 Pick Delete nodeId# 20296 Pick Enable nodeId# 20280 Enable nodeId# 20286 Add nodeId# 20300 Disable nodeId# 20289 Enable nodeId# 20298 Enable nodeId# 20289 Add nodeId# 20301 Delete nodeId# 20293 Disable nodeId# 20243 Enable nodeId# 20243 Disable nodeId# 20294 Enable nodeId# 20294 Delete nodeId# 20286 Pick Add nodeId# 20302 Add nodeId# 20303 Pick Disable nodeId# 20281 Pick Add nodeId# 20304 Pick Disable nodeId# 20301 Enable nodeId# 20281 Disable nodeId# 20287 Disable nodeId# 20299 Pick Enable nodeId# 20299 Add nodeId# 20305 Delete nodeId# 20305 Delete nodeId# 20302 Add nodeId# 20306 Disable nodeId# 20274 Pick Enable nodeId# 20287 Pick Disable nodeId# 20300 Add nodeId# 20307 Enable nodeId# 20300 Delete nodeId# 20303 Enable nodeId# 20274 Add nodeId# 20308 Pick Enable nodeId# 20301 Disable nodeId# 20289 Pick Delete nodeId# 20299 Pick Pick Delete nodeId# 20294 Pick Enable nodeId# 20289 Disable nodeId# 20279 Add nodeId# 20309 Add nodeId# 20310 Pick Add nodeId# 20311 >> Cdc::Drop[PqRunner] [GOOD] >> Cdc::Drop[YdsRunner] >> CdcStreamChangeCollector::UpsertToSameKeyWithImages [GOOD] >> CdcStreamChangeCollector::UpsertModifyDelete |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PagingNoPredicateExtract [GOOD] Test command err: Trying to start YDB, gRPC: 21366, MsgBus: 11431 2025-04-04T16:53:13.924062Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502096951894454:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:13.925639Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00155e/r3tmp/tmpW8Rcru/pdisk_1.dat 2025-04-04T16:53:16.962734Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:18.124177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:18.217203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:18.217905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:18.241076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:18.302027Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21366, node 1 2025-04-04T16:53:18.542739Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:18.542761Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:18.542767Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:18.542867Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:53:18.923670Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502096951894454:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:18.923718Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:11431 TClient is connected to server localhost:11431 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:53:23.903749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:23.966121Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:23.991768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:53:27.117291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:30.135239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:30.905362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:33.290724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:33.290908Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:36.388879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502195736143992:2426], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:36.388990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:36.654993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:53:36.695679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:53:36.749781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:53:36.783632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:53:36.820592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:53:37.046165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:53:37.179671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502200031111810:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:37.179959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:37.182867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502200031111815:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:37.201272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:53:37.259771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502200031111817:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:53:37.379668Z node 1 :TX_PROXY ERROR: Actor# [1:7489502200031111878:3507] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9747, MsgBus: 18406 2025-04-04T16:53:53.665249Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502267087034809:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:53.672484Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00155e/r3tmp/tmpnW4vQf/pdisk_1.dat 2025-04-04T16:53:55.970218Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:55.979941Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:55.982708Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:55.987467Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:55.994781Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9747, node 2 2025-04-04T16:53:58.109929Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:58.109954Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:58.109962Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:58.122932Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:53:58.668546Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489502267087034809:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:58.668603Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:18406 TClient is connected to server localhost:18406 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594 ... ne=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489502508978467862:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:54.194688Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:57.957467Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502543338207894:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:57.957603Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:58.013703Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:58.095774Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:58.154553Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:58.202630Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:58.257471Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:58.369162Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:58.496239Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502547633175712:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:58.496382Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:58.496721Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502547633175717:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:58.504578Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:58.537447Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489502547633175719:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T16:54:58.624268Z node 6 :TX_PROXY ERROR: Actor# [6:7489502547633175775:3466] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 17205, MsgBus: 27876 2025-04-04T16:55:03.439190Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489502567230783385:2161];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00155e/r3tmp/tmp8tQy6H/pdisk_1.dat 2025-04-04T16:55:03.811843Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:55:03.956460Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:04.090695Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:04.090829Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:04.099768Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17205, node 7 2025-04-04T16:55:04.362960Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:04.362987Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:04.363000Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:04.363173Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27876 TClient is connected to server localhost:27876 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:05.641143Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:05.672414Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:05.872220Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:06.105025Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:06.223294Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:08.426415Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489502567230783385:2161];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:08.426513Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:10.233269Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489502597295556128:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:10.233397Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:10.344719Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:55:10.461385Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:55:10.564942Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:55:10.675271Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:10.773617Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:55:10.868321Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:55:10.952993Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489502597295556653:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:10.953147Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:10.953458Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489502597295556658:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:10.958633Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:55:10.978468Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489502597295556660:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:55:11.066643Z node 7 :TX_PROXY ERROR: Actor# [7:7489502601590524019:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] Test command err: 2025-04-04T16:54:20.351828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:54:20.359440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:54:20.359536Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016f8/r3tmp/tmpi2NI10/pdisk_1.dat 2025-04-04T16:54:22.623293Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.140216s 2025-04-04T16:54:22.623450Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.140400s 2025-04-04T16:54:22.737468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:54:23.006752Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:23.202473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:23.202659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:23.242849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:54:23.574809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:54:23.812533Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T16:54:23.813610Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T16:54:23.830498Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T16:54:23.830928Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:54:23.897062Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T16:54:24.058371Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:54:24.058693Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:54:24.072112Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:54:24.072228Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:54:24.072289Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:54:24.085557Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:54:24.085756Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:54:24.085869Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T16:54:24.098983Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:54:24.208342Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:54:24.266428Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:54:24.266683Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T16:54:24.266723Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:54:24.266759Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:54:24.266794Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:54:24.267055Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T16:54:24.267144Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T16:54:24.267493Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:54:24.267612Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:54:24.267745Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:54:24.267805Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:54:24.301966Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T16:54:24.302065Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T16:54:24.302104Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T16:54:24.302147Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:54:24.302221Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:54:24.302431Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:54:24.302480Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:54:24.318392Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T16:54:24.334499Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T16:54:24.334604Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T16:54:24.334774Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:54:24.354404Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T16:54:24.354524Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:54:24.370630Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:54:24.370741Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T16:54:24.370791Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T16:54:24.370835Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T16:54:24.370873Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T16:54:24.402480Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T16:54:24.402581Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T16:54:24.402652Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T16:54:24.402701Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T16:54:24.425954Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T16:54:24.426050Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T16:54:24.426090Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T16:54:24.426138Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T16:54:24.426192Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T16:54:24.462712Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T16:54:24.462798Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:54:24.473769Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:54:24.473859Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T16:54:24.473912Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T16:54:24.473960Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T16:54:24.474023Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:54:24.649214Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:54:24.649288Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:54:24.649334Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T16:54:24.649703Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T16:54:24.649737Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T16:54:24.649878Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T16:54:24.649925Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T16:54:24.649973Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T16:54:24.650012Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T16:54:24.674459Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:54:24.674573Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:54:24.675560Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipi ... chemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-04T16:55:14.877836Z node 6 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[6:1002:2818], 1001} after executionsCount# 1 2025-04-04T16:55:14.877893Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:1002:2818], 1001} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T16:55:14.878028Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:1002:2818], 1001} finished in read 2025-04-04T16:55:14.878099Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-04T16:55:14.878137Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit ExecuteRead 2025-04-04T16:55:14.878168Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit CompletedOperations 2025-04-04T16:55:14.878198Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CompletedOperations 2025-04-04T16:55:14.878291Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-04T16:55:14.878320Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CompletedOperations 2025-04-04T16:55:14.878348Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037889 has finished 2025-04-04T16:55:14.878385Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-04T16:55:14.878497Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-04T16:55:14.879596Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1008:2824], Recipient [6:716:2596]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:55:14.879653Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:55:14.879694Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:1007:2823], serverId# [6:1008:2824], sessionId# [0:0:0] 2025-04-04T16:55:14.879789Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:1006:2822], Recipient [6:716:2596]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-04T16:55:14.880617Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1011:2827], Recipient [6:716:2596]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:55:14.880664Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:55:14.880702Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:1010:2826], serverId# [6:1011:2827], sessionId# [0:0:0] 2025-04-04T16:55:14.880901Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:1009:2825], Recipient [6:716:2596]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-04T16:55:14.880993Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-04-04T16:55:14.881038Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1001/1000001 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T16:55:14.881072Z node 6 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to non-repeatable v4000/18446744073709551615 2025-04-04T16:55:14.881125Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit CheckRead 2025-04-04T16:55:14.881202Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-04-04T16:55:14.881237Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit CheckRead 2025-04-04T16:55:14.881267Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-04-04T16:55:14.881294Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit BuildAndWaitDependencies 2025-04-04T16:55:14.881346Z node 6 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037890 2025-04-04T16:55:14.881381Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-04-04T16:55:14.881407Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-04-04T16:55:14.881434Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit ExecuteRead 2025-04-04T16:55:14.881460Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit ExecuteRead 2025-04-04T16:55:14.881550Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-04T16:55:14.881723Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[6:1009:2825], 1002} after executionsCount# 1 2025-04-04T16:55:14.881771Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:1009:2825], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T16:55:14.881878Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:1009:2825], 1002} finished in read 2025-04-04T16:55:14.882802Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-04-04T16:55:14.882864Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit ExecuteRead 2025-04-04T16:55:14.882912Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit CompletedOperations 2025-04-04T16:55:14.882943Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit CompletedOperations 2025-04-04T16:55:14.883001Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-04-04T16:55:14.883028Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit CompletedOperations 2025-04-04T16:55:14.883055Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037890 has finished 2025-04-04T16:55:14.883104Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-04-04T16:55:14.883203Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-04-04T16:55:14.884174Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1015:2831], Recipient [6:714:2594]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:55:14.884223Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:55:14.884261Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:1014:2830], serverId# [6:1015:2831], sessionId# [0:0:0] 2025-04-04T16:55:14.884390Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:1013:2829], Recipient [6:714:2594]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-04T16:55:14.885196Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1018:2834], Recipient [6:714:2594]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:55:14.885242Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:55:14.885277Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:1017:2833], serverId# [6:1018:2834], sessionId# [0:0:0] 2025-04-04T16:55:14.982804Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:1016:2832], Recipient [6:714:2594]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-04T16:55:14.982967Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-04-04T16:55:14.983022Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T16:55:14.983062Z node 6 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v4000/18446744073709551615 2025-04-04T16:55:14.983124Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit CheckRead 2025-04-04T16:55:14.983214Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-04-04T16:55:14.983247Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit CheckRead 2025-04-04T16:55:14.983279Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-04-04T16:55:14.983309Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit BuildAndWaitDependencies 2025-04-04T16:55:14.983359Z node 6 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037891 2025-04-04T16:55:14.983396Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-04-04T16:55:14.983422Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-04-04T16:55:14.983446Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit ExecuteRead 2025-04-04T16:55:14.983472Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit ExecuteRead 2025-04-04T16:55:14.983566Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-04T16:55:14.983704Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[6:1016:2832], 1003} after executionsCount# 1 2025-04-04T16:55:14.983751Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:1016:2832], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T16:55:14.983818Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:1016:2832], 1003} finished in read 2025-04-04T16:55:14.983871Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-04-04T16:55:14.983896Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit ExecuteRead 2025-04-04T16:55:14.983920Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit CompletedOperations 2025-04-04T16:55:14.983943Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit CompletedOperations 2025-04-04T16:55:14.983989Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-04-04T16:55:14.984012Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit CompletedOperations 2025-04-04T16:55:14.984039Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037891 has finished 2025-04-04T16:55:14.984069Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-04-04T16:55:14.984161Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableWithError [GOOD] >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] >> AsyncIndexChangeCollector::UpsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn >> TVectorIndexTests::CreateTableCoveredEmbedding [GOOD] >> KqpMergeCn::SortBy_PK_Uint64_Desc [GOOD] >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CreateTable [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue [GOOD] >> CdcStreamChangeCollector::DeleteNothing >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableWithError [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:55:16.905792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:16.905924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:16.905972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:16.906033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:16.906084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:16.906119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:16.906189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:16.906302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:16.906701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:17.005827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:17.005899Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:17.022654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:17.022967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:17.023185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:17.040367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:17.040745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:17.041539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:17.045655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:17.053780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:17.055260Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:17.055326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:17.055528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:17.055586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:17.055645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:17.055786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.063761Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:55:17.214092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:17.214319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.214519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:17.214703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:17.214748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.219476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:17.219644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:17.219914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.219992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:17.220057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:17.220101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:17.222460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.222522Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:17.222551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:17.224407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.224494Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.224536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:17.224584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:17.227311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:17.229330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:17.229566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:17.230941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:17.231105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:17.231161Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:17.231505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:17.231588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:17.231770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:17.231922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:17.236453Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:17.236507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:17.236671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:17.236709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:55:17.237075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.237127Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:17.237239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:17.237278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:17.237334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:17.237390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:17.237429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:17.237476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:17.237516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:55:17.237555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:55:17.237646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:55:17.237685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:55:17.237725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:55:17.240192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:17.240357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:17.240398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T16:55:17.240441Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T16:55:17.240492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:17.240648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T16:55:17.245124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T16:55:17.245755Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-04T16:55:17.247750Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-04-04T16:55:17.269091Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-04-04T16:55:17.271836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "__ydb_parent" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "__ydb_parent" Type: EIndexTypeGlobalVectorKmeansTree VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:17.272271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-04-04T16:55:17.272415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: index key column shouldn't have a reserved name, at schemeshard: 72057594046678944 2025-04-04T16:55:17.272457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: index key column shouldn't have a reserved name, at schemeshard: 72057594046678944 2025-04-04T16:55:17.273311Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T16:55:17.276742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "index key column shouldn\'t have a reserved name" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:17.276884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: index key column shouldn't have a reserved name, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors 2025-04-04T16:55:17.277371Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-04T16:55:17.280270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "embedding" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "embedding" Type: EIndexTypeGlobalVectorKmeansTree DataColumnNames: "id" VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:17.280649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-04-04T16:55:17.280789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: the same column can't be used as key and data column for one index, for example id, at schemeshard: 72057594046678944 2025-04-04T16:55:17.280868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: the same column can't be used as key and data column for one index, for example id, at schemeshard: 72057594046678944 2025-04-04T16:55:17.283331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "the same column can\'t be used as key and data column for one index, for example id" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:17.283517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: the same column can't be used as key and data column for one index, for example id, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors TestModificationResult got TxId: 102, wait until txId: 102 |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableCoveredEmbedding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:55:16.748065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:16.748195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:16.748241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:16.748290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:16.748334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:16.748364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:16.748423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:16.748512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:16.748855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:16.841604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:16.841671Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:16.860217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:16.860518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:16.860711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:16.876195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:16.876485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:16.877118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:16.877974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:16.884341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:16.885754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:16.885818Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:16.886007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:16.886063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:16.886120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:16.886261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:55:16.894938Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:55:17.047044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:17.047296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.047506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:17.047739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:17.047793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.051802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:17.053266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:17.053545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.053640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:17.053698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:17.053736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:17.055758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.055805Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:17.055836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:17.057705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.057762Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.057803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:17.057859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:17.060601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:17.061947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:17.062068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:17.062885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:17.063007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:17.063042Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:17.063249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:17.063297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:17.063410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:17.063479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:17.064967Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:17.065003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:17.065137Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:17.065166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:55:17.065447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.065512Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:17.065605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:17.065635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:17.065680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:17.065719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:17.065751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:17.065780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:17.065808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:55:17.065842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:55:17.065899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:55:17.065926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:55:17.065952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:55:17.067252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:17.067356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:17.067386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... L: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Children { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "idx_vector" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "embedding" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:17.520093Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:55:17.520315Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplLevelTable" took 228us result status StatusSuccess 2025-04-04T16:55:17.520721Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_centroid" Type: "String" TypeId: 4097 Id: 3 NotNull: true IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:17.521357Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:55:17.521519Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplPostingTable" took 168us result status StatusSuccess 2025-04-04T16:55:17.521774Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "embedding" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> SystemView::AuthPermissions_Selects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:55:16.939055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:16.939206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:16.939253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:16.939304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:16.939357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:16.939385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:16.939457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:16.939523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:16.939884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:17.037877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:17.037944Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:17.055715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:17.055954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:17.056140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:17.074987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:17.075292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:17.075915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:17.076831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:17.083133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:17.089938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:17.090038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:17.090290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:17.090349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:17.090407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:17.090559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.099403Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:55:17.245481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:17.245757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.245991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:17.246211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:17.246295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.248863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:17.249026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:17.249233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.249304Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:17.249364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:17.249406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:17.251695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.251755Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:17.251792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:17.254171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.254274Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.254326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:17.254379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:17.258021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:17.260186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:17.260357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:17.261302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:17.261448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:17.261500Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:17.261794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:17.261858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:17.262001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:17.262079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:17.264313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:17.264361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:17.264509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:17.264550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:55:17.264903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.264963Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:17.265059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:17.265093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:17.265135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:17.265173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:17.265208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:17.265244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:17.265278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:55:17.265306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:55:17.265390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:55:17.265425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:55:17.265459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:55:17.267293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:17.267396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:17.267432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... atency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1342 } } 2025-04-04T16:55:17.559871Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1342 } } 2025-04-04T16:55:17.560726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T16:55:17.560765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-04T16:55:17.560882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T16:55:17.560926Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T16:55:17.560985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T16:55:17.561032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:17.561057Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.561083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T16:55:17.561111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-04T16:55:17.573452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-04T16:55:17.573679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T16:55:17.574005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T16:55:17.574161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-04T16:55:17.574642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T16:55:17.574771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-04T16:55:17.574807Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-04-04T16:55:17.574902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-04T16:55:17.574956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-04T16:55:17.574995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-04T16:55:17.575029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-04T16:55:17.575062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-04-04T16:55:17.576385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T16:55:17.576732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.576964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.577383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.577444Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T16:55:17.577512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-04T16:55:17.577539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-04T16:55:17.577590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-04T16:55:17.577617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-04T16:55:17.577642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-04-04T16:55:17.577711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:375:2343] message: TxId: 101 2025-04-04T16:55:17.577756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-04T16:55:17.577800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T16:55:17.577834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T16:55:17.577990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T16:55:17.578029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-04-04T16:55:17.578048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-04-04T16:55:17.578076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T16:55:17.578098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-04-04T16:55:17.578132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-04-04T16:55:17.578177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T16:55:17.584616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T16:55:17.584677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:376:2344] TestWaitNotification: OK eventTxId 101 2025-04-04T16:55:17.585229Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:55:17.585472Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 293us result status StatusSuccess 2025-04-04T16:55:17.586281Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TAsyncIndexTests::OnlineBuild |76.7%| [TA] $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] >> CdcStreamChangeCollector::InsertSingleUuidRow [GOOD] >> CdcStreamChangeCollector::IndexAndStreamUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata [GOOD] Test command err: Trying to start YDB, gRPC: 22152, MsgBus: 64889 2025-04-04T16:54:16.919587Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502367563765508:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:16.920124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00133e/r3tmp/tmpAELOjV/pdisk_1.dat 2025-04-04T16:54:17.727721Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:17.729890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:17.729987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:17.764707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:54:18.001032Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.149046s 2025-04-04T16:54:18.001115Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.149157s TServer::EnableGrpc on GrpcPort 22152, node 1 2025-04-04T16:54:18.726413Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:18.726439Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:18.726447Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:18.726617Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64889 TClient is connected to server localhost:64889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:54:19.798931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:19.844212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:20.172467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:20.539545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:20.666814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:21.962123Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502367563765508:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:21.962221Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:22.198510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502393333570928:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:22.198660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:24.599741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:24.648229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:24.723793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:24.762610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:24.854395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:24.940788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:25.214412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502406218473374:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:25.214543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:25.215236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502406218473380:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:25.261131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:25.294431Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T16:54:25.294854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502406218473383:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:54:25.392408Z node 1 :TX_PROXY ERROR: Actor# [1:7489502406218473439:3483] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:54:30.184240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:54:30.252364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T16:54:30.270586Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710672, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 31638, MsgBus: 3035 2025-04-04T16:54:33.887911Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502438051508396:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:33.888707Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00133e/r3tmp/tmpYmfE3H/pdisk_1.dat 2025-04-04T16:54:34.303790Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:34.303908Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:34.306089Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31638, node 2 2025-04-04T16:54:34.370118Z node 2 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:54:34.383485Z node 2 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:54:34.424584Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:34.597690Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:34.597717Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:34.597725Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:34.597838Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3035 TClient is connected to server localhost:3035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) ... 425453Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:49.738525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710694:0, at schemeshard: 72057594046644480 2025-04-04T16:54:50.681604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T16:54:50.815790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T16:54:55.215524Z node 2 :KQP_SLOW_LOG WARN: TraceId: "01jr0vktcje7ae3kh871mc5kd8", SessionId: ydb://session/3?node_id=2&id=OTlhNzlmZDAtOGZkMTY3LTJiMDQ0Mjg2LWYxNGRjNjcy, Slow query, duration: 11.867453s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "CREATE OBJECT mySaSecretId (TYPE SECRET) WITH value = `mySaSecretValue`;", parameters: 0b 2025-04-04T16:54:55.646991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710726:0, at schemeshard: 72057594046644480 2025-04-04T16:54:55.664233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710727:0, at schemeshard: 72057594046644480 2025-04-04T16:54:55.674814Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710727, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6729, MsgBus: 20142 2025-04-04T16:54:57.110753Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489502542300727524:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00133e/r3tmp/tmpLCgzn3/pdisk_1.dat 2025-04-04T16:54:57.223401Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:54:57.438052Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:57.440592Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:57.440702Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:57.442403Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6729, node 3 2025-04-04T16:54:57.587568Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:57.587593Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:57.587601Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:57.587726Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20142 TClient is connected to server localhost:20142 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:54:58.263637Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:58.283782Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:58.488510Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:58.800079Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:58.982126Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:02.095795Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489502542300727524:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:02.095899Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:03.525437Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489502568070532931:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:03.525552Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:03.586875Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:55:03.794970Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:55:03.927214Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:55:03.986487Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:04.099623Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:55:04.236759Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:55:04.392088Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489502572365500772:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:04.392257Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:04.392539Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489502572365500778:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:04.397445Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:55:04.422948Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489502572365500780:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:55:04.523820Z node 3 :TX_PROXY ERROR: Actor# [3:7489502572365500837:3466] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:06.146972Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:1, at schemeshard: 72057594046644480 2025-04-04T16:55:06.952583Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T16:55:07.726528Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:1, at schemeshard: 72057594046644480 2025-04-04T16:55:08.732044Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-04T16:55:09.529153Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710693:0, at schemeshard: 72057594046644480 2025-04-04T16:55:10.385685Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710700:0, at schemeshard: 72057594046644480 2025-04-04T16:55:11.129814Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T16:55:11.212807Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T16:55:12.401663Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:55:12.401701Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:16.497968Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710754:0, at schemeshard: 72057594046644480 >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] >> CdcStreamChangeCollector::UpsertIntoTwoStreams [GOOD] >> CdcStreamChangeCollector::PageFaults ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpMergeCn::SortBy_PK_Uint64_Desc [GOOD] Test command err: Trying to start YDB, gRPC: 25657, MsgBus: 11336 2025-04-04T16:53:18.030213Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502114972765542:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:18.033537Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001512/r3tmp/tmpLrVNRI/pdisk_1.dat 2025-04-04T16:53:18.578164Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:18.590106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:18.590199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:18.598068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25657, node 1 2025-04-04T16:53:20.549502Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:20.549528Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:20.549534Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:20.549928Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11336 2025-04-04T16:53:23.031306Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502114972765542:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:23.031353Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:11336 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:53:26.421857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:27.200447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:30.189932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:32.675926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:32.809631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:33.557396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:33.557768Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:36.809425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502192282178580:2421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:36.809558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:38.307141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:53:38.427922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:53:38.660885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:53:38.833407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:53:38.918141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:53:39.635255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:53:40.493339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502209462048361:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:40.493410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:40.493701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502209462048366:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:40.515326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:53:40.789953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502209462048368:2483], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:53:40.925715Z node 1 :TX_PROXY ERROR: Actor# [1:7489502209462048424:3538] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:53:48.217951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:53:53.776592Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785633660, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 4041, MsgBus: 16990 2025-04-04T16:53:58.792096Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502288881174964:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:58.792141Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001512/r3tmp/tmpfd1MUr/pdisk_1.dat 2025-04-04T16:54:00.014629Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:54:00.122436Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:00.122568Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:00.125243Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:00.151768Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4041, node 2 2025-04-04T16:54:00.406904Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:00.406927Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:00.406934Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:00.407042Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16990 TClient is connected to server localhost:16990 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:54:01.395723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:01.441136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting. ... sues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:58.599056Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:58.680525Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:58.778789Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:58.863071Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:58.975386Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:59.171977Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:59.347304Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502550692920187:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:59.347480Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:59.347910Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502550692920192:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:59.355575Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:59.433864Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489502550692920194:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T16:54:59.532057Z node 6 :TX_PROXY ERROR: Actor# [6:7489502550692920265:3479] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:02.379450Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T16:55:04.577563Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785704598, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 22019, MsgBus: 18276 2025-04-04T16:55:05.713407Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489502575905630618:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:05.713478Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001512/r3tmp/tmpdArkER/pdisk_1.dat 2025-04-04T16:55:06.057174Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:06.100031Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:06.100159Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:06.105794Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22019, node 7 2025-04-04T16:55:06.331250Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:06.331283Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:06.331297Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:06.331507Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18276 TClient is connected to server localhost:18276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:07.650894Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:07.681877Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:07.800713Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:08.178749Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:08.343846Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:10.716796Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489502575905630618:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:10.716898Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:12.987555Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489502605970403483:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:12.987715Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:13.049265Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:55:13.114832Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:55:13.169632Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:55:13.258992Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:13.350604Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:55:13.533137Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:55:13.663886Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489502610265371322:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:13.664021Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:13.664305Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489502610265371327:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:13.670298Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:55:13.692461Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489502610265371329:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:55:13.771051Z node 7 :TX_PROXY ERROR: Actor# [7:7489502610265371396:3471] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:15.361372Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:55:16.886030Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785716911, txId: 281474976710673] shutting down |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TLocksTest::BrokenSameShardLock [GOOD] >> TUniqueIndexTests::CreateTable |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::VectorKmeansTreePostingImplTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:53:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:56:2057] recipient: [1:50:2094] Leader for TabletID 72057594037927937 is [1:55:2096] sender: [1:73:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:53:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:56:2057] recipient: [2:50:2094] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:73:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:75:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:77:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:55:2096] sender: [2:79:2057] recipient: [2:78:2109] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:81:2057] recipient: [2:78:2109] !Reboot 72057594037927937 (actor [2:55:2096]) rebooted! !Reboot 72057594037927937 (actor [2:55:2096]) tablet resolver refreshed! new actor is[2:80:2110] Leader for TabletID 72057594037927937 is [2:80:2110] sender: [2:134:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:53:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:56:2057] recipient: [3:50:2094] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:73:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:75:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:78:2057] recipient: [3:77:2109] Leader for TabletID 72057594037927937 is [3:55:2096] sender: [3:79:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:81:2057] recipient: [3:77:2109] !Reboot 72057594037927937 (actor [3:55:2096]) rebooted! !Reboot 72057594037927937 (actor [3:55:2096]) tablet resolver refreshed! new actor is[3:80:2110] Leader for TabletID 72057594037927937 is [3:80:2110] sender: [3:134:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:53:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:56:2057] recipient: [4:49:2094] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:73:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:55:2096] sender: [4:80:2057] recipient: [4:78:2109] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:82:2057] recipient: [4:78:2109] !Reboot 72057594037927937 (actor [4:55:2096]) rebooted! !Reboot 72057594037927937 (actor [4:55:2096]) tablet resolver refreshed! new actor is[4:81:2110] Leader for TabletID 72057594037927937 is [4:81:2110] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:53:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:56:2057] recipient: [5:49:2094] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:73:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:79:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:55:2096] sender: [5:83:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:85:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:55:2096]) rebooted! !Reboot 72057594037927937 (actor [5:55:2096]) tablet resolver refreshed! new actor is[5:84:2113] Leader for TabletID 72057594037927937 is [5:84:2113] sender: [5:138:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:53:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:56:2057] recipient: [6:50:2094] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:73:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:79:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:82:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:55:2096] sender: [6:83:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:85:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:55:2096]) rebooted! !Reboot 72057594037927937 (actor [6:55:2096]) tablet resolver refreshed! new actor is[6:84:2113] Leader for TabletID 72057594037927937 is [6:84:2113] sender: [6:138:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:53:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:56:2057] recipient: [7:50:2094] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:73:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:83:2057] recipient: [7:82:2112] Leader for TabletID 72057594037927937 is [7:55:2096] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:86:2057] recipient: [7:82:2112] !Reboot 72057594037927937 (actor [7:55:2096]) rebooted! !Reboot 72057594037927937 (actor [7:55:2096]) tablet resolver refreshed! new actor is[7:85:2113] Leader for TabletID 72057594037927937 is [7:85:2113] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:53:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:56:2057] recipient: [8:49:2094] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:73:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:85:2057] recipient: [8:84:2114] Leader for TabletID 72057594037927937 is [8:55:2096] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:88:2057] recipient: [8:84:2114] !Reboot 72057594037927937 (actor [8:55:2096]) rebooted! !Reboot 72057594037927937 (actor [8:55:2096]) tablet resolver refreshed! new actor is[8:87:2115] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:141:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:53:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:56:2057] recipient: [9:50:2094] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:73:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:55:2096]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:82:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:85:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:55:2096] sender: [9:86:2057] recipient: [9:84:2114] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:88:2057] recipient: [9:84:2114] !Reboot 72057594037927937 (actor [9:55:2096]) rebooted! !Reboot 72057594037927937 (actor [9:55:2096]) tablet resolver refreshed! new actor is[9:87:2115] Leader for TabletID 72057594037927937 is [9:87:2115] sender: [9:141:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:53:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:56:2057] recipient: [10:50:2094] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:73:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:83:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:55:2096] sender: [10:87:2057] recipient: [10:85:2114] Leader for TabletID 72057594037927937 is [10:88:2115] sender: [10:89:2057] recipient: [10:85:2114] !Reboot 72057594037927937 (actor [10:55:2096]) rebooted! !Reboot 72057594037927937 (actor [10:55:2096]) tablet resolver refreshed! new actor is[10:88:2115] Leader for TabletID 72057594037927937 is [10:88:2115] sender: [10:142:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:53:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:56:2057] recipient: [11:50:2094] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:73:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:55:2096]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:84:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:87:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:55:2096] sender: [11:88:2057] recipient: [11:86:2115] Leader for TabletID 72057594037927937 is [11:89:2116] sender: [11:90:2057] recipient: [11:86:2115] !Reboot 72057594037927937 (actor [11:55:2096]) rebooted! !Reboot 72057594037927937 (actor [11:55:2096]) tablet resolver refreshed! new actor is[11:89:2116] Leader for TabletID 72057594037927937 is [11:89:2116] sender: [11:109:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:53:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:56:2057] recipient: [12:49:2094] Leader for TabletID 72057594037927937 is [12:55:2096] sender: [12:73:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:55:2 ... 2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [106:53:2057] recipient: [106:50:2094] Leader for TabletID 72057594037927937 is [106:55:2096] sender: [106:56:2057] recipient: [106:50:2094] Leader for TabletID 72057594037927937 is [106:55:2096] sender: [106:73:2057] recipient: [106:14:2061] !Reboot 72057594037927937 (actor [106:55:2096]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [106:55:2096] sender: [106:126:2057] recipient: [106:36:2083] Leader for TabletID 72057594037927937 is [106:55:2096] sender: [106:129:2057] recipient: [106:14:2061] Leader for TabletID 72057594037927937 is [106:55:2096] sender: [106:130:2057] recipient: [106:128:2147] Leader for TabletID 72057594037927937 is [106:131:2148] sender: [106:132:2057] recipient: [106:128:2147] !Reboot 72057594037927937 (actor [106:55:2096]) rebooted! !Reboot 72057594037927937 (actor [106:55:2096]) tablet resolver refreshed! new actor is[106:131:2148] Leader for TabletID 72057594037927937 is [106:131:2148] sender: [106:151:2057] recipient: [106:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [107:53:2057] recipient: [107:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [107:53:2057] recipient: [107:50:2094] Leader for TabletID 72057594037927937 is [107:55:2096] sender: [107:56:2057] recipient: [107:50:2094] Leader for TabletID 72057594037927937 is [107:55:2096] sender: [107:73:2057] recipient: [107:14:2061] !Reboot 72057594037927937 (actor [107:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [107:55:2096] sender: [107:129:2057] recipient: [107:36:2083] Leader for TabletID 72057594037927937 is [107:55:2096] sender: [107:132:2057] recipient: [107:14:2061] Leader for TabletID 72057594037927937 is [107:55:2096] sender: [107:133:2057] recipient: [107:131:2150] Leader for TabletID 72057594037927937 is [107:134:2151] sender: [107:135:2057] recipient: [107:131:2150] !Reboot 72057594037927937 (actor [107:55:2096]) rebooted! !Reboot 72057594037927937 (actor [107:55:2096]) tablet resolver refreshed! new actor is[107:134:2151] Leader for TabletID 72057594037927937 is [107:134:2151] sender: [107:188:2057] recipient: [107:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [108:53:2057] recipient: [108:51:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [108:53:2057] recipient: [108:51:2094] Leader for TabletID 72057594037927937 is [108:55:2096] sender: [108:56:2057] recipient: [108:51:2094] Leader for TabletID 72057594037927937 is [108:55:2096] sender: [108:73:2057] recipient: [108:14:2061] !Reboot 72057594037927937 (actor [108:55:2096]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [108:55:2096] sender: [108:129:2057] recipient: [108:36:2083] Leader for TabletID 72057594037927937 is [108:55:2096] sender: [108:132:2057] recipient: [108:14:2061] Leader for TabletID 72057594037927937 is [108:55:2096] sender: [108:133:2057] recipient: [108:131:2150] Leader for TabletID 72057594037927937 is [108:134:2151] sender: [108:135:2057] recipient: [108:131:2150] !Reboot 72057594037927937 (actor [108:55:2096]) rebooted! !Reboot 72057594037927937 (actor [108:55:2096]) tablet resolver refreshed! new actor is[108:134:2151] Leader for TabletID 72057594037927937 is [108:134:2151] sender: [108:188:2057] recipient: [108:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [109:53:2057] recipient: [109:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [109:53:2057] recipient: [109:49:2094] Leader for TabletID 72057594037927937 is [109:55:2096] sender: [109:56:2057] recipient: [109:49:2094] Leader for TabletID 72057594037927937 is [109:55:2096] sender: [109:73:2057] recipient: [109:14:2061] !Reboot 72057594037927937 (actor [109:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [109:55:2096] sender: [109:130:2057] recipient: [109:36:2083] Leader for TabletID 72057594037927937 is [109:55:2096] sender: [109:133:2057] recipient: [109:132:2150] Leader for TabletID 72057594037927937 is [109:55:2096] sender: [109:134:2057] recipient: [109:14:2061] Leader for TabletID 72057594037927937 is [109:135:2151] sender: [109:136:2057] recipient: [109:132:2150] !Reboot 72057594037927937 (actor [109:55:2096]) rebooted! !Reboot 72057594037927937 (actor [109:55:2096]) tablet resolver refreshed! new actor is[109:135:2151] Leader for TabletID 72057594037927937 is [109:135:2151] sender: [109:153:2057] recipient: [109:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [110:53:2057] recipient: [110:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [110:53:2057] recipient: [110:50:2094] Leader for TabletID 72057594037927937 is [110:55:2096] sender: [110:56:2057] recipient: [110:50:2094] Leader for TabletID 72057594037927937 is [110:55:2096] sender: [110:73:2057] recipient: [110:14:2061] !Reboot 72057594037927937 (actor [110:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [110:55:2096] sender: [110:132:2057] recipient: [110:36:2083] Leader for TabletID 72057594037927937 is [110:55:2096] sender: [110:135:2057] recipient: [110:14:2061] Leader for TabletID 72057594037927937 is [110:55:2096] sender: [110:136:2057] recipient: [110:134:2152] Leader for TabletID 72057594037927937 is [110:137:2153] sender: [110:138:2057] recipient: [110:134:2152] !Reboot 72057594037927937 (actor [110:55:2096]) rebooted! !Reboot 72057594037927937 (actor [110:55:2096]) tablet resolver refreshed! new actor is[110:137:2153] Leader for TabletID 72057594037927937 is [110:137:2153] sender: [110:191:2057] recipient: [110:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [111:53:2057] recipient: [111:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [111:53:2057] recipient: [111:50:2094] Leader for TabletID 72057594037927937 is [111:55:2096] sender: [111:56:2057] recipient: [111:50:2094] Leader for TabletID 72057594037927937 is [111:55:2096] sender: [111:73:2057] recipient: [111:14:2061] !Reboot 72057594037927937 (actor [111:55:2096]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [111:55:2096] sender: [111:132:2057] recipient: [111:36:2083] Leader for TabletID 72057594037927937 is [111:55:2096] sender: [111:135:2057] recipient: [111:134:2152] Leader for TabletID 72057594037927937 is [111:55:2096] sender: [111:136:2057] recipient: [111:14:2061] Leader for TabletID 72057594037927937 is [111:137:2153] sender: [111:138:2057] recipient: [111:134:2152] !Reboot 72057594037927937 (actor [111:55:2096]) rebooted! !Reboot 72057594037927937 (actor [111:55:2096]) tablet resolver refreshed! new actor is[111:137:2153] Leader for TabletID 72057594037927937 is [111:137:2153] sender: [111:191:2057] recipient: [111:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [112:53:2057] recipient: [112:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [112:53:2057] recipient: [112:50:2094] Leader for TabletID 72057594037927937 is [112:55:2096] sender: [112:56:2057] recipient: [112:50:2094] Leader for TabletID 72057594037927937 is [112:55:2096] sender: [112:73:2057] recipient: [112:14:2061] !Reboot 72057594037927937 (actor [112:55:2096]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [112:55:2096] sender: [112:133:2057] recipient: [112:36:2083] Leader for TabletID 72057594037927937 is [112:55:2096] sender: [112:136:2057] recipient: [112:14:2061] Leader for TabletID 72057594037927937 is [112:55:2096] sender: [112:137:2057] recipient: [112:135:2152] Leader for TabletID 72057594037927937 is [112:138:2153] sender: [112:139:2057] recipient: [112:135:2152] !Reboot 72057594037927937 (actor [112:55:2096]) rebooted! !Reboot 72057594037927937 (actor [112:55:2096]) tablet resolver refreshed! new actor is[112:138:2153] Leader for TabletID 72057594037927937 is [112:138:2153] sender: [112:192:2057] recipient: [112:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [113:53:2057] recipient: [113:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [113:53:2057] recipient: [113:49:2094] Leader for TabletID 72057594037927937 is [113:55:2096] sender: [113:56:2057] recipient: [113:49:2094] Leader for TabletID 72057594037927937 is [113:55:2096] sender: [113:73:2057] recipient: [113:14:2061] !Reboot 72057594037927937 (actor [113:55:2096]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [113:55:2096] sender: [113:136:2057] recipient: [113:36:2083] Leader for TabletID 72057594037927937 is [113:55:2096] sender: [113:138:2057] recipient: [113:14:2061] Leader for TabletID 72057594037927937 is [113:55:2096] sender: [113:140:2057] recipient: [113:139:2155] Leader for TabletID 72057594037927937 is [113:141:2156] sender: [113:142:2057] recipient: [113:139:2155] !Reboot 72057594037927937 (actor [113:55:2096]) rebooted! !Reboot 72057594037927937 (actor [113:55:2096]) tablet resolver refreshed! new actor is[113:141:2156] Leader for TabletID 72057594037927937 is [113:141:2156] sender: [113:195:2057] recipient: [113:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [114:53:2057] recipient: [114:49:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [114:53:2057] recipient: [114:49:2094] Leader for TabletID 72057594037927937 is [114:55:2096] sender: [114:56:2057] recipient: [114:49:2094] Leader for TabletID 72057594037927937 is [114:55:2096] sender: [114:73:2057] recipient: [114:14:2061] !Reboot 72057594037927937 (actor [114:55:2096]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [114:55:2096] sender: [114:136:2057] recipient: [114:36:2083] Leader for TabletID 72057594037927937 is [114:55:2096] sender: [114:139:2057] recipient: [114:138:2155] Leader for TabletID 72057594037927937 is [114:55:2096] sender: [114:140:2057] recipient: [114:14:2061] Leader for TabletID 72057594037927937 is [114:141:2156] sender: [114:142:2057] recipient: [114:138:2155] !Reboot 72057594037927937 (actor [114:55:2096]) rebooted! !Reboot 72057594037927937 (actor [114:55:2096]) tablet resolver refreshed! new actor is[114:141:2156] Leader for TabletID 72057594037927937 is [114:141:2156] sender: [114:195:2057] recipient: [114:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [115:53:2057] recipient: [115:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [115:53:2057] recipient: [115:50:2094] Leader for TabletID 72057594037927937 is [115:55:2096] sender: [115:56:2057] recipient: [115:50:2094] Leader for TabletID 72057594037927937 is [115:55:2096] sender: [115:73:2057] recipient: [115:14:2061] !Reboot 72057594037927937 (actor [115:55:2096]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [115:55:2096] sender: [115:137:2057] recipient: [115:36:2083] Leader for TabletID 72057594037927937 is [115:55:2096] sender: [115:140:2057] recipient: [115:14:2061] Leader for TabletID 72057594037927937 is [115:55:2096] sender: [115:141:2057] recipient: [115:139:2155] Leader for TabletID 72057594037927937 is [115:142:2156] sender: [115:143:2057] recipient: [115:139:2155] !Reboot 72057594037927937 (actor [115:55:2096]) rebooted! !Reboot 72057594037927937 (actor [115:55:2096]) tablet resolver refreshed! new actor is[115:142:2156] Leader for TabletID 72057594037927937 is [0:0:0] sender: [116:53:2057] recipient: [116:50:2094] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [116:53:2057] recipient: [116:50:2094] Leader for TabletID 72057594037927937 is [116:55:2096] sender: [116:56:2057] recipient: [116:50:2094] Leader for TabletID 72057594037927937 is [116:55:2096] sender: [116:73:2057] recipient: [116:14:2061] |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthPermissions_Selects [GOOD] Test command err: 2025-04-04T16:51:28.382162Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501642404867804:2272];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:28.382335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bfc/r3tmp/tmplhlNaK/pdisk_1.dat 2025-04-04T16:51:28.969510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:28.969613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:28.972560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:29.016826Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27089, node 1 2025-04-04T16:51:29.045468Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:51:29.046003Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:51:29.219305Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:29.219324Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:29.219332Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:29.219444Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:29.632316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:29.657002Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:51:29.771915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "Tenant1" } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T16:51:29.772105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateExtSubDomain Propose, path/Root/Tenant1, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:51:29.772170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Tenant1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-04T16:51:29.772320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-04T16:51:29.772435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-04T16:51:29.772798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T16:51:29.772818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:51:29.772866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T16:51:29.772888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-04T16:51:29.774420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-04T16:51:29.774580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/Tenant1 2025-04-04T16:51:29.774734Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T16:51:29.774748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T16:51:29.775064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T16:51:29.775133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T16:51:29.775151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489501646699835527:2405], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-04-04T16:51:29.775172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489501646699835527:2405], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-04-04T16:51:29.775206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:51:29.775237Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:51:29.775283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-04T16:51:29.775310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710658 ready parts: 1/1 waiting... 2025-04-04T16:51:29.779314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:51:29.780511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-04T16:51:29.780574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-04T16:51:29.780586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-04T16:51:29.780599Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-04T16:51:29.780609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-04T16:51:29.780749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-04T16:51:29.780770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-04-04T16:51:29.780773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-04-04T16:51:29.780794Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-04T16:51:29.780800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-04T16:51:29.780834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-04-04T16:51:29.780950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:51:29.780958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-04-04T16:51:29.780976Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:51:29.783036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710658 msg type: 269090816 2025-04-04T16:51:29.783160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710658, partId: 4294967295, tablet: 72057594046316545 2025-04-04T16:51:29.783234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-04-04T16:51:29.783269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-04-04T16:51:29.784916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743785489831, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T16:51:29.784999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743785489831 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T16:51:29.785031Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-04T16:51:29.785159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-04-04T16:51:29.785199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-04-04T16:51:29.785301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG ... :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded top size# 0, time# 2025-04-04T16:55:14.412854Z 2025-04-04T16:55:14.642066Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976715688. Ctx: { TraceId: 01jr0vmrmz995cad1vxn34ab1j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=ODg3OTY4MzMtMzQxMzAxYjktNDczMDYxZDUtM2QxYTAxZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:14.645631Z node 26 :SYSTEM_VIEWS INFO: Scan started, actor: [26:7489502615572774372:2438], owner: [26:7489502615572774369:2436], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-04-04T16:55:14.656953Z node 26 :SYSTEM_VIEWS INFO: Scan prepared, actor: [26:7489502615572774372:2438], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T16:55:14.657010Z node 26 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root tenant owner: root@builtin subject sid: empty require admin access: 0 is admin: 1 2025-04-04T16:55:14.657113Z node 26 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:55:14.657667Z node 26 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } Children [.metadata,Dir1,Table0,Tenant1,Tenant2] }] } 2025-04-04T16:55:14.657742Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502615572774372:2438], row count: 0, finished: 0 2025-04-04T16:55:14.658026Z node 26 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:55:14.658407Z node 26 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [SubDir1,SubDir2] }] } 2025-04-04T16:55:14.658462Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502615572774372:2438], row count: 0, finished: 0 2025-04-04T16:55:14.661702Z node 26 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:55:14.662345Z node 26 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [72057594046644480:10:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-04T16:55:14.662456Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502615572774372:2438], row count: 2, finished: 0 2025-04-04T16:55:14.662650Z node 26 :SYSTEM_VIEWS INFO: Scan finished, actor: [26:7489502615572774372:2438], owner: [26:7489502615572774369:2436], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-04-04T16:55:14.668915Z node 26 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [26:7489502564033164604:2083], database# , query hash# 3187945588805523718, cpu time# 315809 2025-04-04T16:55:14.669889Z node 26 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785714640, txId: 281474976715687] shutting down 2025-04-04T16:55:14.899176Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976715690. Ctx: { TraceId: 01jr0vms00cr0pmhgxk6zjd7qd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=ZDA1NDAzNDEtZWZkYzU5YTAtNDQ3NDlmOTEtM2VkOGZlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:14.911103Z node 26 :SYSTEM_VIEWS INFO: Scan started, actor: [26:7489502615572774412:2447], owner: [26:7489502615572774409:2445], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-04-04T16:55:14.915324Z node 26 :SYSTEM_VIEWS INFO: Scan prepared, actor: [26:7489502615572774412:2447], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T16:55:14.915381Z node 26 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root tenant owner: root@builtin subject sid: empty require admin access: 0 is admin: 1 2025-04-04T16:55:14.915511Z node 26 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:55:14.916173Z node 26 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } Children [.metadata,Dir1,Table0,Tenant1,Tenant2] }] } 2025-04-04T16:55:14.916248Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502615572774412:2447], row count: 0, finished: 0 2025-04-04T16:55:14.916486Z node 26 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:55:14.916853Z node 26 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [SubDir1,SubDir2] }] } 2025-04-04T16:55:14.916919Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502615572774412:2447], row count: 0, finished: 0 2025-04-04T16:55:14.917015Z node 26 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:55:14.917405Z node 26 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [72057594046644480:10:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-04-04T16:55:14.917495Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502615572774412:2447], row count: 1, finished: 0 2025-04-04T16:55:14.917677Z node 26 :SYSTEM_VIEWS INFO: Scan finished, actor: [26:7489502615572774412:2447], owner: [26:7489502615572774409:2445], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-04-04T16:55:14.922368Z node 26 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [26:7489502564033164604:2083], database# , query hash# 15123460272068726277, cpu time# 218469 2025-04-04T16:55:14.926737Z node 26 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785714893, txId: 281474976715689] shutting down 2025-04-04T16:55:14.943277Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 29 2025-04-04T16:55:14.943753Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:55:14.943887Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 28 2025-04-04T16:55:14.944339Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:55:14.944567Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 27 2025-04-04T16:55:14.944944Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:55:14.944523Z node 27 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:14.950564Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 30 2025-04-04T16:55:14.951608Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T16:55:14.946670Z node 30 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:14.963700Z node 26 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[30:7489502569240123816:2106], Type=268959746 >> TAsyncIndexTests::Decimal |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::OnlineBuild [GOOD] |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::VectorKmeansTreePostingImplTable [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::OnlineBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:55:19.255077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:19.255195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:19.255238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:19.255290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:19.255334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:19.255363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:19.255433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:19.255541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:19.255903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:19.344425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:19.344482Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:19.367301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:19.367569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:19.367730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:19.385059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:19.385324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:19.385932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:19.394492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:19.404232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:19.405622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:19.405691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:19.405888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:19.405941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:19.406003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:19.406151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:55:19.423235Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:55:19.607541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:19.607837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:19.608045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:19.608288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:19.608347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:19.611069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:19.611219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:19.611434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:19.611502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:19.611572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:19.611619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:19.613876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:19.613948Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:19.613986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:19.616698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:19.616800Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:19.616844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:19.616906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:19.620595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:19.622881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:19.623099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:19.624263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:19.624441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:19.624495Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:19.624808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:19.624885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:19.625053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:19.625571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:19.647907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:19.647977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:19.648176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:19.648221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:55:19.648643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:19.648727Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:19.648854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:19.648890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:19.648962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:19.649011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:19.649053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:19.649095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:19.649130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:55:19.649167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:55:19.649250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:55:19.649299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:55:19.649334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:55:19.660240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:19.660375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:19.660413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710760, database: /MyRoot, subject: , status: StatusAccepted, operation: DROP LOCK, path: /MyRoot/Table 2025-04-04T16:55:20.367120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710760, status# StatusAccepted 2025-04-04T16:55:20.367188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046678944 PathId: 2 2025-04-04T16:55:20.367244Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 102, cookie: 102, txId: 281474976710760, status: StatusAccepted 2025-04-04T16:55:20.367383Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:383:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046678944 PathId: 2 2025-04-04T16:55:20.367723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.367786Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 ProgressState 2025-04-04T16:55:20.367834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710760 ready parts: 1/1 2025-04-04T16:55:20.367944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710760 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:20.370973Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-04T16:55:20.371062Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:383:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T16:55:20.371363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-04-04T16:55:20.371484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2025-04-04T16:55:20.371620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-04-04T16:55:20.371645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-04-04T16:55:20.371675Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-04-04T16:55:20.371892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:20.371997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:20.372049Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-04-04T16:55:20.372087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-04-04T16:55:20.373797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.373841Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-04-04T16:55:20.373900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-04T16:55:20.373932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-04T16:55:20.373968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-04T16:55:20.374000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-04T16:55:20.374030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-04-04T16:55:20.374071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:122:2148] message: TxId: 281474976710760 2025-04-04T16:55:20.374100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-04T16:55:20.374123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-04T16:55:20.374142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-04-04T16:55:20.374187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-04-04T16:55:20.378464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-04T16:55:20.378541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-04-04T16:55:20.378600Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-04-04T16:55:20.378693Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:383:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T16:55:20.380424Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-04T16:55:20.380516Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:383:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T16:55:20.380571Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-04T16:55:20.382032Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-04T16:55:20.382123Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:383:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T16:55:20.382157Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-04T16:55:20.382273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T16:55:20.382308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:474:2435] TestWaitNotification: OK eventTxId 102 >> KqpSqlIn::TupleNotOnlyOfKeys [GOOD] >> TUniqueIndexTests::CreateTable [GOOD] >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] |76.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |76.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut >> TAsyncIndexTests::Decimal [GOOD] >> TVectorIndexTests::CreateTableMultiColumn >> SystemView::QueryStatsRetries [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow [GOOD] >> SystemView::StoragePoolsFields >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] >> TVectorIndexTests::CreateTable >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] |76.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |76.9%| [TA] $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TUniqueIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:55:20.670056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:20.670170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:20.670210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:20.670576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:20.670648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:20.670678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:20.670747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:20.670821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:20.671175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:20.771598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:20.771650Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:20.789695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:20.789933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:20.790124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:20.802044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:20.802407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:20.803121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:20.818458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:20.824091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:20.825457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:20.825523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:20.825706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:20.825758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:20.825811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:20.826008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.841620Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:55:20.998714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:20.999007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.999225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:20.999443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:20.999500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.003319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:21.003480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:21.003725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.003805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:21.003871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:21.003918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:21.006033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.006097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:21.006135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:21.008175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.008261Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.008314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:21.008372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:21.012199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:21.014294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:21.014468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:21.015506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:21.015654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:21.015707Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:21.016008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:21.016074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:21.016228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:21.016384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:21.018878Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:21.018932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:21.019125Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:21.019165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:55:21.019502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.019564Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:21.019670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:21.019702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:21.019745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:21.019793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:21.019828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:21.019890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:21.019926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:55:21.019954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:55:21.020025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:55:21.020064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:55:21.020106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:55:21.021977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:21.022085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:21.022140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:21.336839Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-04T16:55:21.336878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T16:55:21.336913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-04-04T16:55:21.337355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T16:55:21.337397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-04T16:55:21.337533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T16:55:21.337574Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T16:55:21.337636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T16:55:21.337674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:21.337700Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.337745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T16:55:21.337786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-04T16:55:21.343931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T16:55:21.344022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T16:55:21.352441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T16:55:21.352581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-04T16:55:21.352670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.352722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T16:55:21.352806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-04T16:55:21.353092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.353270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-04T16:55:21.353311Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-04-04T16:55:21.353405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-04T16:55:21.353439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-04T16:55:21.353476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-04T16:55:21.353513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-04T16:55:21.353565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-04-04T16:55:21.353832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.353863Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T16:55:21.353929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-04T16:55:21.353962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-04T16:55:21.353990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-04T16:55:21.354011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-04T16:55:21.354032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-04-04T16:55:21.354106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:375:2343] message: TxId: 101 2025-04-04T16:55:21.354148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-04T16:55:21.354193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T16:55:21.354873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T16:55:21.355087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T16:55:21.355139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-04-04T16:55:21.355165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-04-04T16:55:21.355253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T16:55:21.355287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-04-04T16:55:21.355308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-04-04T16:55:21.355352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T16:55:21.357722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T16:55:21.357780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:376:2344] TestWaitNotification: OK eventTxId 101 2025-04-04T16:55:21.358327Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:55:21.358584Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 289us result status StatusSuccess 2025-04-04T16:55:21.359423Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:55:21.341791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:21.341892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:21.341931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:21.341999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:21.342058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:21.342088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:21.342149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:21.342222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:21.342563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:21.430145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:21.430205Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:21.442777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:21.443075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:21.443246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:21.454673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:21.454995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:21.455606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:21.456499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:21.462079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:21.463514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:21.463579Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:21.463771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:21.463830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:21.463885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:21.464039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.470646Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:55:21.602103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:21.602437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.602708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:21.602936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:21.602982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.607338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:21.607499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:21.607737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.607810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:21.607863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:21.607898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:21.611084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.611151Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:21.611179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:21.613397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.613485Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.613610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:21.613659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:21.617127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:21.619511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:21.619725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:21.620830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:21.621095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:21.621142Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:21.621409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:21.621464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:21.621594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:21.621660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:21.624020Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:21.624073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:21.624276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:21.624319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:55:21.624784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.624850Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:21.624953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:21.624989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:21.625035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:21.625074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:21.625109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:21.625154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:21.625186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:55:21.625217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:55:21.625295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:55:21.625330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:55:21.625360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:55:21.627332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:21.627463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:21.627501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:21.928275Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-04T16:55:21.928308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T16:55:21.928345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-04-04T16:55:21.929065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T16:55:21.929108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-04T16:55:21.929211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T16:55:21.929247Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T16:55:21.929314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T16:55:21.929355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:21.929381Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.929412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T16:55:21.929443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-04T16:55:21.936610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T16:55:21.937134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T16:55:21.944046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T16:55:21.944437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-04T16:55:21.944784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.945532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T16:55:21.945778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-04T16:55:21.946283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-04T16:55:21.946344Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-04-04T16:55:21.946460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-04T16:55:21.946495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-04T16:55:21.946554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-04T16:55:21.946590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-04T16:55:21.946642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-04-04T16:55:21.947072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.947508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T16:55:21.947566Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T16:55:21.947634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-04T16:55:21.947667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-04T16:55:21.947698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-04T16:55:21.947722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-04T16:55:21.947764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-04-04T16:55:21.947852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:375:2343] message: TxId: 101 2025-04-04T16:55:21.947903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-04T16:55:21.947970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T16:55:21.948005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T16:55:21.948130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T16:55:21.948173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-04-04T16:55:21.948195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-04-04T16:55:21.948224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T16:55:21.948246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-04-04T16:55:21.948266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-04-04T16:55:21.948309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T16:55:21.951469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T16:55:21.951533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:376:2344] TestWaitNotification: OK eventTxId 101 2025-04-04T16:55:21.952158Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:55:21.952455Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 322us result status StatusSuccess 2025-04-04T16:55:21.953364Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenSameShardLock [GOOD] Test command err: 2025-04-04T16:54:14.569145Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502356283288472:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:14.575783Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea6/r3tmp/tmp2JPrZY/pdisk_1.dat 2025-04-04T16:54:15.592206Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:15.592678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:15.592749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:15.663272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:54:15.675349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:24766 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:16.288000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:16.339627Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:16.360231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:16.731258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:16.854905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea6/r3tmp/tmpI7u7em/pdisk_1.dat 2025-04-04T16:54:20.222608Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:54:20.414488Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:20.434139Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:20.434705Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:20.439464Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64562 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:20.785629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:20.794638Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:20.808013Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:54:20.814789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:20.924922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:20.997778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:26.558287Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489502408201135250:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea6/r3tmp/tmptbNNCU/pdisk_1.dat 2025-04-04T16:54:26.774181Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:54:26.816641Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:26.816763Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:26.823759Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:54:26.852875Z node 3 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:28037 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:27.401962Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:27.409921Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:27.426224Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:27.508540Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:27.575749Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:32.993325Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489502436288285140:2215];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea6/r3tmp/tmpC2bl97/pdisk_1.dat 2025-04-04T16:54:33.295337Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:54:33.600494Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:33.663096Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:33.663183Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:33.675288Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27075 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDes ... 7968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3815 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:54:55.619208Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:54:55.678928Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:55.905285Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:54:56.030457Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:00.344722Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7489502553542618104:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:00.344787Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea6/r3tmp/tmpx8oAP6/pdisk_1.dat 2025-04-04T16:55:00.828691Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:00.831208Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:00.831319Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:00.842034Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24998 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:55:01.437987Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:01.462489Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:01.490804Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T16:55:01.503361Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:01.666953Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:01.746959Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea6/r3tmp/tmpJb8hAq/pdisk_1.dat 2025-04-04T16:55:07.890426Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:55:07.931025Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:07.944720Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:07.944823Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:07.949104Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25212 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:55:08.343818Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:55:08.370367Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:55:08.376966Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:08.517693Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:08.623190Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:13.950545Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489502612580150727:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:13.950825Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea6/r3tmp/tmpjPKcVL/pdisk_1.dat 2025-04-04T16:55:14.133755Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:14.155412Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:14.155545Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:14.158124Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9998 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:55:14.524127Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:55:14.553443Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T16:55:14.560851Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:14.654303Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:14.739510Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::TupleNotOnlyOfKeys [GOOD] Test command err: Trying to start YDB, gRPC: 8126, MsgBus: 17917 2025-04-04T16:53:17.149177Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502111065728924:2194];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014fc/r3tmp/tmpMo6fBv/pdisk_1.dat 2025-04-04T16:53:17.493471Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:53:17.881770Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:17.893947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:17.894045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:17.909600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8126, node 1 2025-04-04T16:53:18.131527Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:18.131547Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:18.131553Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:18.131658Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17917 TClient is connected to server localhost:17917 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:53:21.131014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:21.807325Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:53:21.854042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:22.141035Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502111065728924:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:22.141071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:53:22.480144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:22.976735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:23.942418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:32.866471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:32.866522Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:33.597480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502179785207383:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:33.598585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:34.448068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:53:34.540521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:53:34.627634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:53:34.723313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:53:34.780277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:53:34.899614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:53:35.384895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502188375142527:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:35.384991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:35.388582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502188375142533:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:35.400730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:53:35.467295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502188375142535:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:53:35.549588Z node 1 :TX_PROXY ERROR: Actor# [1:7489502188375142593:3525] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:53:38.171108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:53:38.739646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T16:53:39.204737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:26: Warning: At function: Filter, At function: Coalesce
:5:49: Warning: At function: SqlIn
:5:49: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:26: Warning: At function: Filter, At function: Coalesce
:5:49: Warning: At function: SqlIn
:5:49: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 23532, MsgBus: 25254 2025-04-04T16:53:59.602688Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502291929303139:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:59.602745Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014fc/r3tmp/tmpt6s3UI/pdisk_1.dat 2025-04-04T16:54:00.186462Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:00.207769Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:00.207868Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:00.211365Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23532, node 2 2025-04-04T16:54:00.499145Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:00.499180Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:00.499188Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:00.499347Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25254 TClient is connected to server localhost:25254 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" Eff ... lId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:57.353073Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:57.373266Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489502543729025134:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:54:57.464918Z node 5 :TX_PROXY ERROR: Actor# [5:7489502543729025189:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:00.056627Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:55:00.320815Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T16:55:00.567523Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:17: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:22: Warning: At function: Filter, At function: Coalesce
:7:31: Warning: At function: SqlIn
:7:31: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 62657, MsgBus: 26241 2025-04-04T16:55:05.750179Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489502578482315808:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:05.755057Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014fc/r3tmp/tmp79bjNi/pdisk_1.dat 2025-04-04T16:55:06.020796Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:06.057562Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:06.074697Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:06.079404Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62657, node 6 2025-04-04T16:55:06.374989Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:06.375019Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:06.375032Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:06.375222Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26241 TClient is connected to server localhost:26241 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:07.646206Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:07.666899Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:55:07.685890Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:07.914196Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:08.191292Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:08.303232Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:10.753312Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489502578482315808:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:10.753416Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:12.410132Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502608547088594:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:12.410331Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:12.481069Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:55:12.540090Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:55:12.584211Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:55:12.658170Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:12.704082Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:55:12.853246Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:55:12.949714Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502608547089121:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:12.949881Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:12.955504Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489502608547089126:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:12.971817Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:55:12.998481Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489502608547089128:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:55:13.079131Z node 6 :TX_PROXY ERROR: Actor# [6:7489502612842056480:3468] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:15.087265Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:55:15.181407Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T16:55:15.319205Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:37: Warning: At function: SqlIn
:7:37: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:37: Warning: At function: SqlIn
:7:37: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test [GOOD] >> KqpRanges::NoFullScanAtScanQuery [GOOD] >> Cdc::Drop[YdsRunner] [GOOD] |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpRanges::NoFullScanAtDNFPredicate |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> Cdc::Drop[TopicRunner] >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |76.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |76.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |76.9%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> CdcStreamChangeCollector::DeleteNothing [GOOD] >> CdcStreamChangeCollector::DeleteSingleRow >> TVectorIndexTests::CreateTableMultiColumn [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] >> TVectorIndexTests::CreateTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] Test command err: 2025-04-04T16:55:08.818194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:55:08.818878Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:55:08.818952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ef9/r3tmp/tmppdxrRa/pdisk_1.dat 2025-04-04T16:55:09.407423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:55:09.466573Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:09.469901Z node 1 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-04-04T16:55:09.510045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:09.510224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:09.522221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:09.619293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:55:09.670445Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T16:55:09.670730Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:09.728270Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:09.728431Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:55:09.730331Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:55:09.730500Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:55:09.730562Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:55:09.731183Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:55:09.731416Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:55:09.731526Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T16:55:09.743409Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:55:09.778220Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:55:09.780187Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:55:09.780307Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T16:55:09.780335Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:55:09.780377Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:55:09.780413Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:09.780823Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:55:09.780916Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:55:09.781015Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:09.781052Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:09.781096Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:55:09.781138Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:09.781212Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T16:55:09.781611Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:09.782374Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:55:09.782510Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:55:09.784324Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:09.795181Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:09.795296Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:55:09.938408Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T16:55:09.944667Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:55:09.944765Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:09.945681Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:09.945743Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:55:09.945812Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T16:55:09.946111Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T16:55:09.946367Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:55:09.947127Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:09.947204Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T16:55:09.949916Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T16:55:09.950425Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:09.952367Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T16:55:09.952438Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:09.953645Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T16:55:09.953717Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:09.955587Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:09.955965Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:09.956006Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:55:09.956058Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T16:55:09.956118Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:55:09.956170Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T16:55:09.956262Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:09.963398Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T16:55:09.963637Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T16:55:09.963694Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:55:09.975905Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:09.976091Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-04T16:55:09.976157Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-04T16:55:09.976210Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-04T16:55:09.977750Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:10.004246Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:10.291973Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:55:10.292035Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:10.292195Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:10.292228Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:55:10.292274Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-04T16:55:10.292451Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-04T16:55:10.292553Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:55:10.292761Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:10.293390Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:10.361901Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-04T16:55:10.361980Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:10.362020Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:10.362077Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tab ... :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T16:55:21.475190Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:21.475909Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T16:55:21.476006Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:21.477384Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:21.477445Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:55:21.477502Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T16:55:21.477571Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:55:21.477626Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T16:55:21.477722Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:21.478479Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:21.481015Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T16:55:21.481089Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:55:21.481506Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T16:55:21.492454Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:21.492648Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-04T16:55:21.492708Z node 3 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-04T16:55:21.492749Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-04T16:55:21.494530Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:21.519898Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:21.730170Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:55:21.730299Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:21.730650Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:21.730708Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:55:21.730762Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-04T16:55:21.731013Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-04T16:55:21.731168Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:55:21.731508Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:21.734406Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:21.775085Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-04T16:55:21.775222Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:21.775282Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:21.775341Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:21.775426Z node 3 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [3:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:55:21.775502Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-04-04T16:55:21.775606Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:21.778480Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-04-04T16:55:21.778589Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:55:21.785517Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:885:2723], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:21.785622Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:896:2728], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:21.786064Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:21.791701Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T16:55:21.799490Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:21.966994Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:21.972044Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:899:2731], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T16:55:21.999734Z node 3 :TX_PROXY ERROR: Actor# [3:955:2768] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:22.107282Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0vmzxq3zcjx82p7bny4e9t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Mzc1MDU1NDktYTA2OTVkMWMtZDkzM2VlMWYtMWU1ZWJkODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:22.108013Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:982:2786], serverId# [3:983:2787], sessionId# [0:0:0] 2025-04-04T16:55:22.108262Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:22.110112Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743785722109991 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-04T16:55:22.121456Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:22.121652Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-04T16:55:22.121724Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:22.277353Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0vn08e3pj33v2q3rakgbhp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDBkNjNiZWItODkzZWRjMzAtOWY2OGRiMTUtNGQ5ZGMxYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:22.277819Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:22.278739Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1743785722278645 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 50b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-04T16:55:22.289872Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:22.290029Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 50 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-04T16:55:22.290083Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:22.388774Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0vn0dp8vvw36ks872pejvt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZjlmY2ZhOTQtMjVlOWY1ZTQtODM5YjYzZTAtMzQzMWEzODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:22.389282Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:22.390685Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1743785722390540 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-04T16:55:22.403034Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:22.403222Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-04T16:55:22.403271Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:22.405476Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:1021:2818], serverId# [3:1022:2819], sessionId# [0:0:0] 2025-04-04T16:55:22.412612Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:1023:2820], serverId# [3:1024:2821], sessionId# [0:0:0] >> YdbTableSplit::SplitByLoadWithReads [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableMultiColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:55:22.693212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:22.693351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:22.693389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:22.693436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:22.693469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:22.693494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:22.693548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:22.693627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:22.693993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:22.773271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:22.773334Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:22.787502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:22.787716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:22.787875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:22.799879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:22.800139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:22.800655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:22.801522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:22.807509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:22.809002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:22.809077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:22.809309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:22.809369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:22.809426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:22.809579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.817582Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:55:22.987871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:22.988116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.988342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:22.988587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:22.988654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.991065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:22.991224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:22.991435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.991527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:22.991587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:22.991626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:22.993774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.993843Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:22.993888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:22.995923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.995996Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.996042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:22.996102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:23.000256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:23.005230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:23.005446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:23.006654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:23.006838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:23.006902Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:23.007251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:23.007334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:23.007517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:23.007612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:23.010040Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:23.010096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:23.010333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:23.010384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:55:23.010879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.010955Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:23.011066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:23.011103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:23.011155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:23.011202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:23.011243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:23.011295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:23.011336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:55:23.011374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:55:23.011472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:55:23.011525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:55:23.011567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:55:23.013626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:23.013763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:23.013803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "idx_vector" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "covered1" DataColumnNames: "covered2" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:23.454285Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:55:23.454562Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplLevelTable" took 288us result status StatusSuccess 2025-04-04T16:55:23.455093Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_centroid" Type: "String" TypeId: 4097 Id: 3 NotNull: true IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:23.456687Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:55:23.457034Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplPostingTable" took 1.09ms result status StatusSuccess 2025-04-04T16:55:23.457392Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "id1" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "id2" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "covered1" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } Columns { Name: "covered2" Type: "String" TypeId: 4097 Id: 5 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id1" KeyColumnNames: "id2" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:55:22.987746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:22.987844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:22.987877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:22.987914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:22.987946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:22.987965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:22.988004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:22.988054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:22.988333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:23.056493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:23.056535Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:23.065715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:23.065907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:23.066043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:23.075876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:23.076085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:23.076638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:23.077268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:23.082106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:23.083342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:23.083405Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:23.083591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:23.083640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:23.083682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:23.083837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.095971Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:55:23.225265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:23.225493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.225657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:23.225821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:23.225868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.228979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:23.229118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:23.229312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.229380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:23.229441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:23.229483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:23.232310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.232368Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:23.232404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:23.235543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.235624Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.235699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:23.235755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:23.245626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:23.259772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:23.259929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:23.260654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:23.260775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:23.260810Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:23.261023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:23.261069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:23.261181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:23.261244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:23.263431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:23.263483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:23.263659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:23.263700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:55:23.264064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.264131Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:23.264221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:23.264252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:23.264294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:23.264331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:23.264366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:23.264404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:23.264436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:55:23.264463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:55:23.264526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:55:23.264564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:55:23.264597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:55:23.266464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:23.266576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:23.266630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :55:23.763412Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-04T16:55:23.763436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T16:55:23.764201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:55:23.764266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:55:23.764288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T16:55:23.765702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:55:23.765775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:55:23.765799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T16:55:23.765825Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-04T16:55:23.765865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:55:23.766340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:55:23.766434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:55:23.766462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T16:55:23.767172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:3, at schemeshard: 72057594046678944 2025-04-04T16:55:23.767220Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:3 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:23.767517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-04T16:55:23.767661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/4 2025-04-04T16:55:23.767694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/4 2025-04-04T16:55:23.767728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/4 2025-04-04T16:55:23.767766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/4 2025-04-04T16:55:23.767810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/4, is published: false 2025-04-04T16:55:23.768157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:55:23.768230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:55:23.768290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T16:55:23.768958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:55:23.769018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:55:23.769039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T16:55:23.769070Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-04T16:55:23.769112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-04T16:55:23.770415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:55:23.770490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:55:23.770527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T16:55:23.770556Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-04-04T16:55:23.770583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-04T16:55:23.770659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/4, is published: true 2025-04-04T16:55:23.771529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-04T16:55:23.771573Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:23.771794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T16:55:23.771875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-04-04T16:55:23.771900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-04-04T16:55:23.771943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-04-04T16:55:23.771969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-04-04T16:55:23.771997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2025-04-04T16:55:23.772417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.772457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:23.772615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T16:55:23.772704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-04-04T16:55:23.772730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-04T16:55:23.772757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-04-04T16:55:23.772778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-04T16:55:23.772800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2025-04-04T16:55:23.772864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:415:2372] message: TxId: 102 2025-04-04T16:55:23.772909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-04T16:55:23.772963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T16:55:23.772994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T16:55:23.773094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T16:55:23.773131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-04-04T16:55:23.773166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-04-04T16:55:23.773208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T16:55:23.773250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-04-04T16:55:23.773272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-04-04T16:55:23.773311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-04T16:55:23.773333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2025-04-04T16:55:23.773348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2025-04-04T16:55:23.773375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-04T16:55:23.773684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T16:55:23.773799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T16:55:23.773825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T16:55:23.776206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T16:55:23.776249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T16:55:23.776329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T16:55:23.776380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T16:55:23.778200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T16:55:23.778395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T16:55:23.778449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:534:2484] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test [GOOD] Test command err: 2025-04-04T16:52:00.489861Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501780960121789:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:00.489988Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:52:00.588132Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489501780014021313:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:00.588188Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:52:01.093276Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T16:52:01.135495Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001521/r3tmp/tmprvMz3I/pdisk_1.dat 2025-04-04T16:52:01.490026Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:01.539359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:01.539465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:01.547620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:01.585133Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:01.587199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:01.587287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:01.599100Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:01.602313Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 TServer::EnableGrpc on GrpcPort 29137, node 1 2025-04-04T16:52:01.628608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:01.652508Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:52:01.658593Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:52:01.821370Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001521/r3tmp/yandexjuGWQ6.tmp 2025-04-04T16:52:01.821397Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001521/r3tmp/yandexjuGWQ6.tmp 2025-04-04T16:52:01.821678Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001521/r3tmp/yandexjuGWQ6.tmp 2025-04-04T16:52:01.821789Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:01.979952Z INFO: TTestServer started on Port 28348 GrpcPort 29137 TClient is connected to server localhost:28348 PQClient connected to localhost:29137 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:52:02.971847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:03.812541Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:04.923865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:52:05.371097Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501780960121789:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:05.371145Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:52:05.591362Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489501780014021313:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:52:05.592225Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; waiting... waiting... 2025-04-04T16:52:10.705512Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489501822963694693:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:10.705656Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489501822963694667:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:10.714375Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:10.738010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T16:52:10.835745Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489501822963694696:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T16:52:11.014482Z node 2 :TX_PROXY ERROR: Actor# [2:7489501822963694727:2190] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:52:13.137468Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489501828204763190:2356], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:52:13.147515Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489501827258662030:2333], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:52:13.231524Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTU1NTk5YWYtYTllYWU0NmYtNDk2ZDYxNi0yYTM1ZWZhNA==, ActorId: [2:7489501822963694665:2322], ActorState: ExecuteState, TraceId: 01jr0vf59s014vbz1esnxt594m, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:52:13.408186Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:52:13.682608Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDliNzc0MTQtNTU4MzViODAtNDA5Y2E1ODQtNTNiOWFlNjQ=, ActorId: [1:7489501823909795828:2349], ActorState: ExecuteState, TraceId: 01jr0vf5ky7agt79ygz7stmvgw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:52:13.685124Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:52:13.747912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:52:14.694812Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489501840143563966:2346], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:52:14.696750Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Njk3MGNjMGItMTEzNjI3ZDEtYjUzOTk5ZDctNTMwZDBhZWQ=, ActorId: [2:7489501840143563959:2342], ActorState: ExecuteState, TraceId: 01jr0vf8yg5qf9pd26rxxpfgsf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:52:14.710754Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position ... ue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:55:07.143088Z node 9 :KQP_PROXY ERROR: TraceId: "01jr0vmbkg36yew3kj1nebbzjk", Request deadline has expired for 1.167815s seconds 2025-04-04T16:55:07.180848Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7489502586689581936:2359], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:55:07.182061Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=NDdhZTkwNGItNjQ2ZDk5ZGQtODA5MmIyOWItYjBlM2I5OGY=, ActorId: [9:7489502586689581918:2352], ActorState: ExecuteState, TraceId: 01jr0vmhm67bk625p6cg7645xs, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:55:07.185139Z node 9 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:55:07.211074Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T16:55:07.425965Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T16:55:07.689751Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T16:55:08.332945Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0vmjbe42v00q6dtwjz1bxp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=YWZhY2UzOTQtZmY3NWFmYzMtZGRlNmQxZDgtNmY1ODViY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [9:7489502590984549632:3108] 2025-04-04T16:55:13.994072Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:55:13.994110Z node 9 :IMPORT WARN: Table profiles were not loaded === CheckClustersList. Ok 2025-04-04T16:55:14.330252Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-04T16:55:14.330288Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-04T16:55:14.330301Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-04T16:55:14.330327Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7489502616754353815:3335] (SourceId=A_Source, PreferedPartition=0) InitTable: SourceId=A_Source TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2025-04-04T16:55:14.336605Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:1, at schemeshard: 72057594046644480 2025-04-04T16:55:15.751038Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715684:0, at schemeshard: 72057594046644480 2025-04-04T16:55:16.668945Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2025-04-04T16:55:17.702456Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-04-04T16:55:18.646061Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2025-04-04T16:55:19.852422Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715711:0, at schemeshard: 72057594046644480 2025-04-04T16:55:20.713323Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [9:7489502548034874968:2069], Recipient [9:7489502616754353815:3335]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2025-04-04T16:55:20.713369Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7489502616754353815:3335] (SourceId=A_Source, PreferedPartition=0) StartKqpSession 2025-04-04T16:55:20.727827Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [9:7489502548034874996:2085], Recipient [9:7489502616754353815:3335]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=9&id=YTcxMGI0MmUtY2UwZjI1Y2ItMWY1ZDMxOWMtZGVhMjE3ZDY=" NodeId: 9 } YdbStatus: SUCCESS ResourceExhausted: false 2025-04-04T16:55:20.727870Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7489502616754353815:3335] (SourceId=A_Source, PreferedPartition=0) Select from the table 2025-04-04T16:55:21.011300Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [9:7489502548034874996:2085], Recipient [9:7489502616754353815:3335]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=YTcxMGI0MmUtY2UwZjI1Y2ItMWY1ZDMxOWMtZGVhMjE3ZDY=" PreparedQuery: "e4934f0e-d1422a36-8ca284cb-796199d2" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01jr0vmz4nc173sqs35p4pa6zj" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 161 2025-04-04T16:55:21.011459Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7489502616754353815:3335] (SourceId=A_Source, PreferedPartition=0) Selected from table PartitionId=(NULL) SeqNo=(NULL) 2025-04-04T16:55:21.011480Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7489502616754353815:3335] (SourceId=A_Source, PreferedPartition=0) OnPartitionChosen 2025-04-04T16:55:21.011498Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7489502616754353815:3335] (SourceId=A_Source, PreferedPartition=0) Update the table 2025-04-04T16:55:21.265210Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateUpdate, received event# 271646721, Sender [9:7489502548034874996:2085], Recipient [9:7489502616754353815:3335]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=YTcxMGI0MmUtY2UwZjI1Y2ItMWY1ZDMxOWMtZGVhMjE3ZDY=" PreparedQuery: "c1095407-b49bb99a-b306e3a-22b2c21c" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 147 2025-04-04T16:55:21.265248Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7489502616754353815:3335] (SourceId=A_Source, PreferedPartition=0) HandleUpdate PartitionPersisted=0 Status=SUCCESS Received TEvChooseResult: 0 2025-04-04T16:55:21.265282Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7489502616754353815:3335] (SourceId=A_Source, PreferedPartition=0) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-04T16:55:21.265319Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7489502616754353815:3335] (SourceId=A_Source, PreferedPartition=0) Start idle 2025-04-04T16:55:21.605991Z node 9 :KQP_EXECUTER ERROR: ActorId: [9:7489502646819126293:2761] TxId: 281474976715719. Ctx: { TraceId: 01jr0vmz7e7cngt8r8b7r1p8tq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=YjExYWEyZjUtYzBlNWI1OGUtZThkNTUxZjctNGI1Y2YzZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 10 2025-04-04T16:55:21.606162Z node 9 :KQP_COMPUTE ERROR: SelfId: [9:7489502646819126299:2773], TxId: 281474976715719, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=9&id=YjExYWEyZjUtYzBlNWI1OGUtZThkNTUxZjctNGI1Y2YzZDc=. TraceId : 01jr0vmz7e7cngt8r8b7r1p8tq. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [9:7489502646819126293:2761], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-04T16:55:21.606460Z node 9 :KQP_COMPUTE ERROR: SelfId: [9:7489502646819126300:2774], TxId: 281474976715719, task: 4. Ctx: { TraceId : 01jr0vmz7e7cngt8r8b7r1p8tq. SessionId : ydb://session/3?node_id=9&id=YjExYWEyZjUtYzBlNWI1OGUtZThkNTUxZjctNGI1Y2YzZDc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [9:7489502646819126293:2761], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> SystemView::PartitionStatsFields [FAIL] >> SystemView::PDisksFields |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] Test command err: 2025-04-04T16:54:54.282979Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502527867974823:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:54.284681Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c30/r3tmp/tmpsGGheT/pdisk_1.dat 2025-04-04T16:54:54.943026Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:54.967132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:54.967291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:55.001916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12990, node 1 2025-04-04T16:54:55.561414Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:55.561437Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:55.561446Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:55.561569Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:54:56.114737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:25421 2025-04-04T16:54:59.282575Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502527867974823:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:59.282681Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:00.152314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502553637779676:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:00.152486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:00.522675Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.348453s 2025-04-04T16:55:00.522739Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.352387s 2025-04-04T16:55:00.585080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:55:00.930598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502553637779868:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:00.930699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:00.937829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785700832 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785700832 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-04T16:55:01.374573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502557932747275:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:01.374690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:01.374743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502557932747280:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:01.466638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502557932747313:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:01.466646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502557932747315:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:01.466730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:01.501237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502557932747338:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:01.501339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502557932747340:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:01.501393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:01.522561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T16:55:01.522868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T16:55:01.522905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-04T16:55:01.522987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T16:55:01.523024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710660:2, at schemeshard: 72057594046644480 2025-04-04T16:55:01.523090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T16:55:01.523178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710660:3, path# /Root/.metadata/workload_manager/pools/default 2025-04-04T16:55:01.523895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:3 1 -> 128 2025-04-04T16:55:01.524594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T16:55:01.524617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-04T16:55:01.525949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710661:0, path# /Root/.metadata/workload_ ... tx: { TraceId: 01jr0vmzedfqp5qk81t2t6q4c2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzk0NTJkOTItMWZkNThhMDYtZTQ3YzI1ZjgtMTQ1YmU5MjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.303345Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714962. Ctx: { TraceId: 01jr0vmzeg2pky59hm7f5jqqeh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzJkNTE3ZjMtMTIwMzRjZTMtMjA5OGVkOTItY2NlNmY5Nzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.306627Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714963. Ctx: { TraceId: 01jr0vmzek65xe31d86fkhezmd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDE4NDY5NmEtYTllMDZlY2EtN2VmMTNlY2EtNTJkYTRjNzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.314503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-04T16:55:21.314583Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714964. Ctx: { TraceId: 01jr0vmzezacb89dr5f0hmg377, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWY5NDQyYmYtYWZhYzcwZC0yZWQzZTk2ZC05NTM5MGVjYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.314656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-04-04T16:55:21.316646Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714965. Ctx: { TraceId: 01jr0vmzf12yr8dyqbd5s5trxr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjM2ZjZiMTEtMzc4NGRhN2UtNDFmNzM1MzgtM2Q5NjY1NjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.317070Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714966. Ctx: { TraceId: 01jr0vmzf1cbraz65wbfx23tx9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2VhNzc1ZTYtNjhjMGVhYmUtMmM3ZDRmOS0xNmNkYmQyYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.317138Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714967. Ctx: { TraceId: 01jr0vmzf1ff20qsr775x4n6eg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDM5MzYxMDctZGI1ZWJlNmUtMTU4NWNkYmItNmQ5YjcwMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.318496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-04T16:55:21.325911Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714971. Ctx: { TraceId: 01jr0vmzf7991apv70ypmeq8je, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzJkNTE3ZjMtMTIwMzRjZTMtMjA5OGVkOTItY2NlNmY5Nzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.326065Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714968. Ctx: { TraceId: 01jr0vmzf72r1sh5q8tzsw48sq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzJmOTkzMGItNDM4YmZmN2MtNzM5N2E1ZjktZTA3ZjVkNDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.326660Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714969. Ctx: { TraceId: 01jr0vmzf7ethndb3kd5a9sexh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzk0NTJkOTItMWZkNThhMDYtZTQ3YzI1ZjgtMTQ1YmU5MjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.326731Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714970. Ctx: { TraceId: 01jr0vmzf8551br7b09hwdnhcf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI4YmE5ZTAtMjI3ZTdlNmYtNTFiZGZiMDAtNTIxNjRkNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.327056Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714972. Ctx: { TraceId: 01jr0vmzf87v416cjbh6shwawh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDE4NDY5NmEtYTllMDZlY2EtN2VmMTNlY2EtNTJkYTRjNzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.327236Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714973. Ctx: { TraceId: 01jr0vmzf8ctm7nt5e9znmpbqd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTcxMTMzOTAtY2U4NjY1NzktMmEyMzY2MWUtODJmZWM1N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.335919Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714974. Ctx: { TraceId: 01jr0vmzfh23rghw97rh2ps2rh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDM5MzYxMDctZGI1ZWJlNmUtMTU4NWNkYmItNmQ5YjcwMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-04-04T16:55:21.340235Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714975. Ctx: { TraceId: 01jr0vmzfre8x5t770xbe0j65t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjM2ZjZiMTEtMzc4NGRhN2UtNDFmNzM1MzgtM2Q5NjY1NjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.346904Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714976. Ctx: { TraceId: 01jr0vmzfy1veadqkydtjrbhw1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWY5NDQyYmYtYWZhYzcwZC0yZWQzZTk2ZC05NTM5MGVjYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.348589Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714978. Ctx: { TraceId: 01jr0vmzg06qc8hsnarps860mq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzJkNTE3ZjMtMTIwMzRjZTMtMjA5OGVkOTItY2NlNmY5Nzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.349107Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714977. Ctx: { TraceId: 01jr0vmzg0byzbrprxewa2p3mz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2VhNzc1ZTYtNjhjMGVhYmUtMmM3ZDRmOS0xNmNkYmQyYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785700832 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-04T16:55:21.351939Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714979. Ctx: { TraceId: 01jr0vmzg251exbchvj2bc356k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzk0NTJkOTItMWZkNThhMDYtZTQ3YzI1ZjgtMTQ1YmU5MjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.358432Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714980. Ctx: { TraceId: 01jr0vmzg7dcefjpcbgvakrb3m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDE4NDY5NmEtYTllMDZlY2EtN2VmMTNlY2EtNTJkYTRjNzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.358545Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714981. Ctx: { TraceId: 01jr0vmzg77ffrbnv43x4xjzy0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTcxMTMzOTAtY2U4NjY1NzktMmEyMzY2MWUtODJmZWM1N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.368005Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714982. Ctx: { TraceId: 01jr0vmzgfesnrv9egkdy60y55, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzJmOTkzMGItNDM4YmZmN2MtNzM5N2E1ZjktZTA3ZjVkNDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.368042Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714983. Ctx: { TraceId: 01jr0vmzgcff190txbefmykdnw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjM2ZjZiMTEtMzc4NGRhN2UtNDFmNzM1MzgtM2Q5NjY1NjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.368580Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714984. Ctx: { TraceId: 01jr0vmzgea9967fecew1fw2jz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDM5MzYxMDctZGI1ZWJlNmUtMTU4NWNkYmItNmQ5YjcwMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.368610Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976714985. Ctx: { TraceId: 01jr0vmzgc699f1vscv0fys3zz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI4YmE5ZTAtMjI3ZTdlNmYtNTFiZGZiMDAtNTIxNjRkNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785700832 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-04T16:55:22.327173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 2240 rowCount 27 cpuUsage 0 2025-04-04T16:55:22.329705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 19392 rowCount 215 cpuUsage 0 Table has 2 shards 2025-04-04T16:55:22.427194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-04-04T16:55:22.427363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 27, DataSize 2240 2025-04-04T16:55:22.427518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 215, DataSize 19392 2025-04-04T16:55:22.427735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 >> CdcStreamChangeCollector::IndexAndStreamUpsert [GOOD] >> CdcStreamChangeCollector::NewImage ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReads [GOOD] Test command err: 2025-04-04T16:54:52.810378Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502519347493413:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:52.810445Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c4e/r3tmp/tmpdAFJn2/pdisk_1.dat 2025-04-04T16:54:53.576473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:53.576609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:53.584040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:54:53.588412Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4721, node 1 2025-04-04T16:54:53.756432Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:53.756452Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:53.756462Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:53.756576Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19977 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:54:54.074300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:19977 2025-04-04T16:54:57.543030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502540822330881:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:57.543165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:57.761678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:54:57.810590Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502519347493413:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:54:57.810715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:54:58.130639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502545117298367:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:58.130726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:58.148219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785697962 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785697962 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-04T16:54:58.339306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502545117298450:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:58.339405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:58.339711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502545117298455:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:58.422577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T16:54:58.422902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T16:54:58.422937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-04T16:54:58.423064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T16:54:58.423092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710660:2, at schemeshard: 72057594046644480 2025-04-04T16:54:58.423169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T16:54:58.423239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710660:3, path# /Root/.metadata/workload_manager/pools/default 2025-04-04T16:54:58.423575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:3 1 -> 128 2025-04-04T16:54:58.423855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T16:54:58.423873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-04T16:54:58.509067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2025-04-04T16:54:58.509380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T16:54:58.510097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T16:54:58.510184Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 281474976710660:2 ProgressState, at schemeshard: 72057594046644480 2025-04-04T16:54:58.510424Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 281474976710660:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T16:54:58.510461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502545117298510:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:58.510462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502545117298507:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:58.510522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502545117298511:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:58.510565Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710660:3, ProgressState 2025-04-04T16:54:58.510624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access ... TraceId: 01jr0vmwkb77etstrw4thc07yn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzYjFmMTktOTE3MTAwMGItOWEzNjYzNC00MjJjYWIyZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:18.387758Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721383. Ctx: { TraceId: 01jr0vmwkbdgb0pp53kv92dyjc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmIxNzlhNTgtMTE4Y2E3MTItOTJiMjhjZjMtYWNmOWFlNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:18.388650Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721384. Ctx: { TraceId: 01jr0vmwkb7gbs171eres67maf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI1NDgzNGEtYTM0ZGY5YjctNjc1MjNiNTAtNzA0N2Y2NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:18.389244Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721385. Ctx: { TraceId: 01jr0vmwkd55fbcnbm1y5pv4yv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjAyMjhlYjktN2QwZTJlYWEtY2NkOGIyMDUtYmUwNDFlM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:18.394525Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489502631016702140:2367], TxId: 281474976721330, task: 1, CA Id [1:7489502631016702138:2367]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-04T16:55:18.396202Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721386. Ctx: { TraceId: 01jr0vmwkm786azy7kmv309kzy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGEwMzMxZmQtYTY5M2EwMzUtYmQxNDFlNTktNTIyZTQ1ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:18.397497Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721387. Ctx: { TraceId: 01jr0vmwkt2870m7dt3ga407sm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTdkZWYzOTAtZTUxYWY2NmQtMjNiMThmMWYtMjgxMzY2OTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:18.400276Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721388. Ctx: { TraceId: 01jr0vmwkweqk372g8prfzyme0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjM3ZWY0MGYtYjM5ZTE4NTEtZDQ3MWQwNTUtNmMwMWVhZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:18.400485Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721390. Ctx: { TraceId: 01jr0vmwkw0ehsnvwxbcrfrscq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzJlOGRhZWMtOTRiNzVjMzgtYWMyNjJiODYtZTI0NzkxMzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:18.400977Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721389. Ctx: { TraceId: 01jr0vmwkwb4h67pxjaz0xztv2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzYjFmMTktOTE3MTAwMGItOWEzNjYzNC00MjJjYWIyZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:18.407276Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721391. Ctx: { TraceId: 01jr0vmwm46m6yqyzyxhr77tcn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmIxNzlhNTgtMTE4Y2E3MTItOTJiMjhjZjMtYWNmOWFlNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:18.408124Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721392. Ctx: { TraceId: 01jr0vmwm45m2xa9bj8h1c0skh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI1NDgzNGEtYTM0ZGY5YjctNjc1MjNiNTAtNzA0N2Y2NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:18.413382Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721393. Ctx: { TraceId: 01jr0vmwmbdtc84dydt718tv7s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjAyMjhlYjktN2QwZTJlYWEtY2NkOGIyMDUtYmUwNDFlM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:18.419832Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721394. Ctx: { TraceId: 01jr0vmwmh99j9xghkv1sfrfw0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTdkZWYzOTAtZTUxYWY2NmQtMjNiMThmMWYtMjgxMzY2OTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:18.420655Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721395. Ctx: { TraceId: 01jr0vmwmhbw9tmmfyr89cgmch, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGEwMzMxZmQtYTY5M2EwMzUtYmQxNDFlNTktNTIyZTQ1ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785697962 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-04T16:55:18.432974Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721396. Ctx: { TraceId: 01jr0vmwmk62ezzhtzg6tw76k3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjM3ZWY0MGYtYjM5ZTE4NTEtZDQ3MWQwNTUtNmMwMWVhZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:18.446100Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721400. Ctx: { TraceId: 01jr0vmwmz1vpx1sr9w1dwtbvr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjAyMjhlYjktN2QwZTJlYWEtY2NkOGIyMDUtYmUwNDFlM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:18.447149Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721397. Ctx: { TraceId: 01jr0vmwmrc5na8vxzgnmrcc6r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGQzYjFmMTktOTE3MTAwMGItOWEzNjYzNC00MjJjYWIyZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:18.447687Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721398. Ctx: { TraceId: 01jr0vmwmy1hhtgst54mptsb8z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmIxNzlhNTgtMTE4Y2E3MTItOTJiMjhjZjMtYWNmOWFlNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:18.448229Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721399. Ctx: { TraceId: 01jr0vmwmqc640jvbbewqf3npn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzJlOGRhZWMtOTRiNzVjMzgtYWMyNjJiODYtZTI0NzkxMzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:18.464813Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721401. Ctx: { TraceId: 01jr0vmwmyak5kmye2pxd71ph7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI1NDgzNGEtYTM0ZGY5YjctNjc1MjNiNTAtNzA0N2Y2NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:18.478342Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489502631016702064:2379], TxId: 281474976721324, task: 1, CA Id [1:7489502631016702062:2379]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-04T16:55:18.661393Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489502631016702140:2367], TxId: 281474976721330, task: 1, CA Id [1:7489502631016702138:2367]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-04T16:55:18.822208Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489502631016702064:2379], TxId: 281474976721324, task: 1, CA Id [1:7489502631016702062:2379]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-04T16:55:19.050339Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489502631016702140:2367], TxId: 281474976721330, task: 1, CA Id [1:7489502631016702138:2367]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-04T16:55:19.313444Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489502631016702064:2379], TxId: 281474976721324, task: 1, CA Id [1:7489502631016702062:2379]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-04T16:55:19.965166Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489502631016702140:2367], TxId: 281474976721330, task: 1, CA Id [1:7489502631016702138:2367]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-04T16:55:20.266090Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489502631016702064:2379], TxId: 281474976721324, task: 1, CA Id [1:7489502631016702062:2379]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-04T16:55:20.922372Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489502631016702140:2367], TxId: 281474976721330, task: 1, CA Id [1:7489502631016702138:2367]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-04T16:55:21.156665Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489502631016702064:2379], TxId: 281474976721324, task: 1, CA Id [1:7489502631016702062:2379]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-04T16:55:21.763244Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489502631016702064:2379], TxId: 281474976721324, task: 1, CA Id [1:7489502631016702062:2379]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-04-04T16:55:21.870980Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489502631016702140:2367], TxId: 281474976721330, task: 1, CA Id [1:7489502631016702138:2367]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785697962 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards 2025-04-04T16:55:23.071360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-04T16:55:23.109332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 >> GenericFederatedQuery::YdbManagedSelectAll >> GenericFederatedQuery::YdbFilterPushdown >> GenericFederatedQuery::YdbManagedSelectConstant >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken >> GenericFederatedQuery::PostgreSQLOnPremSelectAll >> GenericFederatedQuery::ClickHouseManagedSelectConstant >> GenericFederatedQuery::YdbSelectCount >> GenericFederatedQuery::ClickHouseManagedSelectAll |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SelfJoinQueryService >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] >> Cdc::Drop[TopicRunner] [GOOD] >> Cdc::DescribeStream >> KqpScanSpilling::SelfJoin |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] Test command err: 2025-04-04T16:55:08.856069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:55:08.856566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:55:08.856646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b93/r3tmp/tmptSM0A7/pdisk_1.dat 2025-04-04T16:55:09.285350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:55:09.348952Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:09.393358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:09.393509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:09.405052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:09.498289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:55:09.547728Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:676:2577] 2025-04-04T16:55:09.548036Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:09.601887Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:09.602036Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:55:09.606127Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:55:09.606230Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:55:09.606357Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:55:09.606822Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:55:09.607075Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:55:09.607171Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:701:2577] in generation 1 2025-04-04T16:55:09.608818Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:679:2579] 2025-04-04T16:55:09.609033Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:09.628160Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:09.628273Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:55:09.629694Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-04T16:55:09.629759Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-04T16:55:09.629817Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-04T16:55:09.630099Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:55:09.630207Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:55:09.630298Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:710:2579] in generation 1 2025-04-04T16:55:09.643001Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:55:09.675434Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:55:09.675691Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:55:09.675820Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-04-04T16:55:09.675861Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:55:09.675899Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:55:09.675933Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:09.676215Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:55:09.676250Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-04T16:55:09.676299Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:55:09.676351Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-04-04T16:55:09.676374Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-04T16:55:09.676395Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-04T16:55:09.676416Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:55:09.676812Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:55:09.676935Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:55:09.677034Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:09.677085Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:09.677129Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:55:09.677167Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:09.677210Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-04T16:55:09.677255Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-04T16:55:09.677697Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2573], serverId# [1:687:2583], sessionId# [0:0:0] 2025-04-04T16:55:09.677762Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T16:55:09.677794Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:09.677830Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-04T16:55:09.677878Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T16:55:09.678033Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:09.678222Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:55:09.678407Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:55:09.678867Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:670:2574], serverId# [1:695:2589], sessionId# [0:0:0] 2025-04-04T16:55:09.679058Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T16:55:09.679254Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-04T16:55:09.679311Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-04T16:55:09.683008Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:09.683097Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T16:55:09.694014Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:09.694117Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:55:09.694191Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T16:55:09.694222Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-04T16:55:09.858963Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:738:2617], serverId# [1:741:2620], sessionId# [0:0:0] 2025-04-04T16:55:09.859333Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:739:2618], serverId# [1:742:2621], sessionId# [0:0:0] 2025-04-04T16:55:09.869147Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:55:09.869234Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:09.869614Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-04T16:55:09.869652Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:55:09.869811Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:09.869857Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:55:09.869908Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T16:55:09.870154Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T16:55:09.870351Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:55:09.870476Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T16:55:09.870505Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:55:09.870560Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-04T16:55:09.870759Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T16:55:09.870869Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:55:09.871800Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:09.871876Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-04T16:55:09.873784Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T ... 2025-04-04T16:55:26.869953Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T16:55:26.869985Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:55:26.870017Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-04T16:55:26.870227Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T16:55:26.870369Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:55:26.870502Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:26.870569Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-04T16:55:26.871161Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T16:55:26.871646Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:26.872996Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T16:55:26.873071Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T16:55:26.873535Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T16:55:26.873945Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:26.876930Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T16:55:26.877005Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:26.878836Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T16:55:26.878952Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:26.879031Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-04-04T16:55:26.879067Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:55:26.879862Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-04-04T16:55:26.879921Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T16:55:26.881222Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:26.881278Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:55:26.881345Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T16:55:26.881426Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:55:26.881495Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T16:55:26.881599Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:26.883314Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T16:55:26.883378Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-04T16:55:26.883414Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2025-04-04T16:55:26.883475Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:55:26.883523Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T16:55:26.883598Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:55:26.883980Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:26.884062Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T16:55:26.888417Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T16:55:26.888504Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:55:26.888709Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T16:55:26.889237Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T16:55:26.889620Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-04-04T16:55:26.889667Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-04T16:55:26.901261Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:786:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:26.901392Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:796:2662], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:26.901878Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:26.908196Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:55:26.917100Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:26.917252Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T16:55:27.092951Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:27.093116Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T16:55:27.097168Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:800:2665], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:55:27.138859Z node 4 :TX_PROXY ERROR: Actor# [4:878:2712] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:27.269320Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0vn4xk66949dxhyk0ax7ft, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZmY3MzYwODQtOGQ0OGE2MmQtYTcwNjdiNTctYTdkNjUxYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:27.269990Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:943:2744], serverId# [4:944:2745], sessionId# [0:0:0] 2025-04-04T16:55:27.270293Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T16:55:27.272028Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743785727271915 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-04T16:55:27.285238Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T16:55:27.285445Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-04T16:55:27.285521Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:55:27.377374Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0vn59t4k6g43pksc7j7sye, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MWZlMDk3ZmEtMjcxZTUzOWItNjQzZjcwYmItNjUyMzQ2ZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:27.377760Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T16:55:27.379438Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1743785727379309 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-04T16:55:27.379689Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1743785727379309 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-04T16:55:27.391004Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T16:55:27.391185Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-04T16:55:27.391230Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:55:27.395397Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:980:2777], serverId# [4:981:2778], sessionId# [0:0:0] 2025-04-04T16:55:27.403062Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:982:2779], serverId# [4:983:2780], sessionId# [0:0:0] >> SystemView::StoragePoolsFields [GOOD] >> SystemView::StoragePoolsRanges >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |77.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |77.0%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] Test command err: 2025-04-04T16:55:10.026006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:55:10.026574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:55:10.026662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bd3/r3tmp/tmppFhmfE/pdisk_1.dat 2025-04-04T16:55:10.509025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:55:10.556562Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:10.604119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:10.604292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:10.619665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:10.729326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:55:10.866421Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:676:2577] 2025-04-04T16:55:10.866880Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:10.950346Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:10.950510Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:55:10.952242Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:55:10.952331Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:55:10.952403Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:55:10.952759Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:55:10.952956Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:55:10.953078Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:701:2577] in generation 1 2025-04-04T16:55:10.954627Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:679:2579] 2025-04-04T16:55:10.954836Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:10.965247Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:10.965362Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:55:10.966809Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-04T16:55:10.966888Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-04T16:55:10.966940Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-04T16:55:10.967233Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:55:10.967345Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:55:10.967410Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:710:2579] in generation 1 2025-04-04T16:55:10.978532Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:55:11.004594Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:55:11.004851Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:55:11.004985Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-04-04T16:55:11.005028Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:55:11.005067Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:55:11.005102Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:11.005425Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:55:11.005467Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-04T16:55:11.005527Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:55:11.005592Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-04-04T16:55:11.005617Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-04T16:55:11.005640Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-04T16:55:11.005671Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:55:11.006160Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:55:11.008220Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:55:11.008404Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:11.008451Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:11.008496Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:55:11.008568Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:11.008623Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-04T16:55:11.008714Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-04T16:55:11.009244Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2573], serverId# [1:687:2583], sessionId# [0:0:0] 2025-04-04T16:55:11.009312Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T16:55:11.009343Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:11.009375Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-04T16:55:11.009443Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T16:55:11.009611Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:11.009855Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:55:11.009962Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:55:11.010465Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:670:2574], serverId# [1:695:2589], sessionId# [0:0:0] 2025-04-04T16:55:11.010703Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T16:55:11.010897Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-04T16:55:11.010969Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-04T16:55:11.013025Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:11.013115Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T16:55:11.026076Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:11.026199Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:55:11.026329Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T16:55:11.026391Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-04T16:55:11.185776Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:738:2617], serverId# [1:741:2620], sessionId# [0:0:0] 2025-04-04T16:55:11.186091Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:739:2618], serverId# [1:742:2621], sessionId# [0:0:0] 2025-04-04T16:55:11.200359Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:55:11.200464Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:11.200849Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-04T16:55:11.200888Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:55:11.201631Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:11.201686Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:55:11.201740Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T16:55:11.203582Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T16:55:11.203812Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:55:11.203999Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T16:55:11.204739Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:55:11.204806Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-04T16:55:11.205071Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T16:55:11.205230Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:55:11.207287Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:11.207400Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-04T16:55:11.211188Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T ... 24037890 2025-04-04T16:55:28.181841Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:28.181925Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T16:55:28.181995Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-04T16:55:28.182631Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T16:55:28.182673Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-04T16:55:28.182710Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2025-04-04T16:55:28.182763Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:55:28.182821Z node 4 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T16:55:28.182891Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T16:55:28.187769Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T16:55:28.188275Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T16:55:28.188820Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-04-04T16:55:28.188902Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-04T16:55:28.189632Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T16:55:28.190265Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T16:55:28.190319Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:55:28.190750Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-04-04T16:55:28.190797Z node 4 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-04-04T16:55:28.207158Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:837:2696], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:28.207277Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:846:2701], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:28.207360Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:28.213001Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:55:28.219416Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T16:55:28.219547Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-04T16:55:28.220130Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:28.377128Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:28.377219Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T16:55:28.377257Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-04T16:55:28.379639Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:851:2704], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:55:28.414470Z node 4 :TX_PROXY ERROR: Actor# [4:933:2755] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:28.739541Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0vn66da49fzqfbxap2fmrs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZTg2YmEwYS04ZTA2YTI0NC0yMjI1MDFmNy1kYmY5ZjgzMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:28.750689Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1048:2808], serverId# [4:1049:2809], sessionId# [0:0:0] 2025-04-04T16:55:28.751318Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Acquired lock# 281474976715660, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T16:55:28.761033Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0vn66da49fzqfbxap2fmrs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZTg2YmEwYS04ZTA2YTI0NC0yMjI1MDFmNy1kYmY5ZjgzMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:28.766280Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0vn66da49fzqfbxap2fmrs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZTg2YmEwYS04ZTA2YTI0NC0yMjI1MDFmNy1kYmY5ZjgzMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:28.766881Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T16:55:28.768606Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743785728768520 Step: 1501 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-04T16:55:28.768797Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1743785728768520 Step: 1501 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-04T16:55:28.780019Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T16:55:28.780173Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-04-04T16:55:28.780321Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-04T16:55:28.780399Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:55:28.781601Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037889 2025-04-04T16:55:28.781680Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:55:28.878225Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0vn6rh2dkea96fsw7zpz6v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YTI1MzMxZWEtZDkyNjYxOWQtODI0YjVmMGItZjc2ZDZhMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:28.878702Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T16:55:28.879983Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1743785728879906 Step: 1501 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-04T16:55:28.880135Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 4 Group: 1743785728879906 Step: 1501 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-04T16:55:28.880206Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 5 Group: 1743785728879906 Step: 1501 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-04T16:55:28.880263Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 1743785728879906 Step: 1501 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 24b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-04T16:55:28.898872Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T16:55:28.899091Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 24 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-04T16:55:28.899149Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:55:28.904029Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1104:2854], serverId# [4:1105:2855], sessionId# [0:0:0] 2025-04-04T16:55:28.911717Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1106:2856], serverId# [4:1107:2857], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] Test command err: 2025-04-04T16:55:09.793474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:55:09.794029Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:55:09.794100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bad/r3tmp/tmpLlkajJ/pdisk_1.dat 2025-04-04T16:55:10.228312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:55:10.277035Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:10.328847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:10.328996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:10.341245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:10.431101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:55:10.482201Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:676:2577] 2025-04-04T16:55:10.482528Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:10.532873Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:10.533046Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:55:10.534832Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:55:10.534921Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:55:10.534986Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:55:10.535277Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:55:10.535429Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:55:10.535491Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:701:2577] in generation 1 2025-04-04T16:55:10.536873Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:679:2579] 2025-04-04T16:55:10.537080Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:10.545093Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:10.545177Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:55:10.546180Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-04T16:55:10.546226Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-04T16:55:10.546290Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-04T16:55:10.546540Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:55:10.546642Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:55:10.546690Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:710:2579] in generation 1 2025-04-04T16:55:10.559157Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:55:10.595734Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:55:10.595992Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:55:10.596118Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-04-04T16:55:10.596154Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:55:10.596190Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:55:10.596226Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:10.596525Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:55:10.596562Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-04T16:55:10.596611Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:55:10.596667Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-04-04T16:55:10.596689Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-04T16:55:10.596710Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-04T16:55:10.596731Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:55:10.598301Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:55:10.598483Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:55:10.598658Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:10.598698Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:10.598741Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:55:10.598782Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:10.598824Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-04T16:55:10.598889Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-04T16:55:10.599412Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2573], serverId# [1:687:2583], sessionId# [0:0:0] 2025-04-04T16:55:10.599478Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T16:55:10.599517Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:10.599553Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-04T16:55:10.599601Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T16:55:10.599773Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:10.600014Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:55:10.600115Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:55:10.600551Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:670:2574], serverId# [1:695:2589], sessionId# [0:0:0] 2025-04-04T16:55:10.600727Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T16:55:10.600886Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-04T16:55:10.600942Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-04T16:55:10.602988Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:10.603287Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T16:55:10.615102Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:10.615215Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:55:10.615296Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T16:55:10.615343Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-04T16:55:10.788557Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:738:2617], serverId# [1:741:2620], sessionId# [0:0:0] 2025-04-04T16:55:10.788984Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:739:2618], serverId# [1:742:2621], sessionId# [0:0:0] 2025-04-04T16:55:10.815098Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:55:10.815203Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:10.815613Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-04T16:55:10.815640Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:55:10.815808Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:10.815856Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:55:10.815910Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T16:55:10.816245Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T16:55:10.816413Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:55:10.816538Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T16:55:10.816566Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:55:10.816621Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-04T16:55:10.816813Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T16:55:10.816925Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:55:10.817947Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:10.818038Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-04T16:55:10.820409Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T ... :55:27.967748Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:27.967794Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:55:27.967833Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T16:55:27.968029Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T16:55:27.968140Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:55:27.968523Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:27.968573Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T16:55:27.968985Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T16:55:27.969391Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:27.970813Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T16:55:27.970858Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:27.971625Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T16:55:27.971693Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:27.972605Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:27.972854Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:27.972889Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:55:27.972928Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T16:55:27.972981Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:55:27.973024Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T16:55:27.973090Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:27.976352Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T16:55:27.976494Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T16:55:27.976554Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:55:27.993867Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:27.994022Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-04T16:55:27.994067Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-04T16:55:27.994099Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-04T16:55:27.994911Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:28.021343Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:28.231334Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:55:28.231404Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:28.231565Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:28.231618Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:55:28.231668Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-04T16:55:28.231834Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-04T16:55:28.231945Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:55:28.232183Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:28.232856Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:28.273558Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-04T16:55:28.273670Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:28.273718Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:28.273786Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:28.273877Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:55:28.273954Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-04-04T16:55:28.274074Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:28.276420Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-04-04T16:55:28.276514Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:55:28.284679Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:884:2723], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:28.284836Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:28.284926Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:893:2728], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:28.290961Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T16:55:28.298503Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:28.456838Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:28.459968Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:898:2731], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T16:55:28.486757Z node 4 :TX_PROXY ERROR: Actor# [4:954:2768] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:28.577142Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0vn68t8x25wy3h3876r6kw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YjQ4MTBhY2YtNjIzZTBkNTgtNzQwMWUwZGUtOGFkMWExNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:28.577769Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:981:2786], serverId# [4:982:2787], sessionId# [0:0:0] 2025-04-04T16:55:28.578009Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:28.579578Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743785728579462 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-04T16:55:28.590995Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:28.591209Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-04T16:55:28.591285Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:28.678064Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0vn6jk6n4rtb47a61qd9yt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NjI2YWM5MjMtMTVhNzllNjUtOGE1OTAwYy00NTZjNTdkMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:28.678506Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:28.679626Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1743785728679511 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-04T16:55:28.691308Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:28.691433Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-04T16:55:28.691479Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:28.693447Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1005:2806], serverId# [4:1006:2807], sessionId# [0:0:0] 2025-04-04T16:55:28.700051Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1007:2808], serverId# [4:1008:2809], sessionId# [0:0:0] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken [GOOD] >> KqpScanSpilling::HandleErrorsCorrectly >> CdcStreamChangeCollector::PageFaults [GOOD] >> CdcStreamChangeCollector::OldImage >> CdcStreamChangeCollector::NewImage [GOOD] >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken [GOOD] Test command err: Trying to start YDB, gRPC: 28584, MsgBus: 10167 2025-04-04T16:55:26.797902Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502665090623690:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:26.798385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d90/r3tmp/tmpA1JXnY/pdisk_1.dat 2025-04-04T16:55:27.283608Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:27.301400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:27.301522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:27.304735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28584, node 1 2025-04-04T16:55:27.417522Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:27.417548Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:27.417555Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:27.417691Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10167 TClient is connected to server localhost:10167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:28.035960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:28.051890Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] >> SystemView::PDisksFields [GOOD] |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |77.0%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::NewImage [GOOD] Test command err: 2025-04-04T16:55:10.658732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:55:10.659261Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:55:10.659333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b86/r3tmp/tmpQpWyfI/pdisk_1.dat 2025-04-04T16:55:11.102313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:55:11.167618Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:11.176746Z node 1 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-04-04T16:55:11.214594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:11.214758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:11.226493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:11.317759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:55:11.364336Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T16:55:11.364727Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:11.414512Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:11.414704Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:55:11.416664Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:55:11.416833Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:55:11.416919Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:55:11.417347Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:55:11.417531Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:55:11.417623Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T16:55:11.428533Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:55:11.458554Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:55:11.458790Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:55:11.458922Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T16:55:11.458960Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:55:11.459000Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:55:11.459037Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:11.459474Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:55:11.459584Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:55:11.459696Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:11.459734Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:11.459786Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:55:11.459847Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:11.459918Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T16:55:11.460399Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:11.460595Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:55:11.460682Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:55:11.462486Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:11.473393Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:11.473507Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:55:11.621777Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T16:55:11.625925Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:55:11.626034Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:11.631266Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:11.631334Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:55:11.631403Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T16:55:11.631697Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T16:55:11.631847Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:55:11.632437Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:11.632517Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T16:55:11.634507Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T16:55:11.635004Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:11.636936Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T16:55:11.636994Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:11.638151Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T16:55:11.638234Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:11.643575Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:11.643968Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:11.644016Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:55:11.644066Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T16:55:11.644141Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:55:11.644200Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T16:55:11.644318Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:11.666526Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T16:55:11.666761Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T16:55:11.666835Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:55:11.679292Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:11.679468Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-04T16:55:11.679520Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-04T16:55:11.679565Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-04T16:55:11.681085Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:11.711219Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:12.022523Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:55:12.022601Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:12.022915Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:12.022970Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:55:12.023041Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-04T16:55:12.023259Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-04T16:55:12.023429Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:55:12.023751Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:12.024617Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:12.095821Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-04T16:55:12.095945Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:12.096014Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:12.096083Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tab ... ode 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:29.712913Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T16:55:29.712996Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:29.719961Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T16:55:29.720101Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:29.721975Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:29.722505Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:29.722562Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:55:29.722645Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T16:55:29.722737Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:55:29.722814Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T16:55:29.722928Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:29.725544Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T16:55:29.725740Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T16:55:29.725806Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:55:29.738137Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:29.738347Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-04T16:55:29.738415Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-04T16:55:29.738463Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-04T16:55:29.739430Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:29.764423Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:29.974151Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:55:29.974218Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:29.974407Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:29.974459Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:55:29.974510Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-04T16:55:29.974767Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-04T16:55:29.974927Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:55:29.975263Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:29.976118Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:30.028902Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-04T16:55:30.029001Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:30.029040Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:30.029085Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:30.029162Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:55:30.029223Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-04-04T16:55:30.029356Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:30.032252Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-04-04T16:55:30.032349Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:55:30.041299Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:884:2723], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.041428Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.041495Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:893:2728], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.051386Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T16:55:30.059007Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:30.230012Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:30.236357Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:898:2731], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T16:55:30.264990Z node 4 :TX_PROXY ERROR: Actor# [4:954:2768] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:30.527893Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0vn7zqez5n4pa6ep4yvr6k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YWUxYTllMmEtOWM3OGJkNDMtNDMzNzJmMDAtZTFlZThkNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:30.531821Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:995:2795], serverId# [4:996:2796], sessionId# [0:0:0] 2025-04-04T16:55:30.532358Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T16:55:30.536704Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0vn7zqez5n4pa6ep4yvr6k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YWUxYTllMmEtOWM3OGJkNDMtNDMzNzJmMDAtZTFlZThkNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:30.540641Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0vn7zqez5n4pa6ep4yvr6k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YWUxYTllMmEtOWM3OGJkNDMtNDMzNzJmMDAtZTFlZThkNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:30.541226Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:30.542755Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743785730542601 Step: 2001 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-04T16:55:30.554951Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:30.555081Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 2001 from mediator time cast 2025-04-04T16:55:30.555186Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-04T16:55:30.555260Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:30.556306Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 2001 at tablet 72075186224037888 2025-04-04T16:55:30.556389Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:30.647620Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0vn8g0f4vsn7zmnmfmzkb8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NmM3ZmQyODctN2EwNmYwMC00OGE4MzA5MC05NTU1N2M5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:30.648056Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:30.649240Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1743785730649125 Step: 2001 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-04T16:55:30.660536Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:30.660695Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-04T16:55:30.660748Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:30.662751Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1030:2820], serverId# [4:1031:2821], sessionId# [0:0:0] 2025-04-04T16:55:30.669016Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1032:2822], serverId# [4:1033:2823], sessionId# [0:0:0] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId >> KqpScanSpilling::SpillingPragmaParseError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] Test command err: 2025-04-04T16:52:30.228765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:52:30.229837Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:30.235031Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019c2/r3tmp/tmp0YXWZd/pdisk_1.dat 2025-04-04T16:52:34.607987Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4506, node 1 2025-04-04T16:52:37.055661Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:37.056121Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:37.056478Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:37.058425Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:37.061465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:52:37.222868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:37.222991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:37.238496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1204 2025-04-04T16:52:39.303635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:52:47.440085Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T16:52:47.513797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:47.514168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:47.568524Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:52:47.578930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:47.968437Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:47.971673Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:47.982404Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:47.982865Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:47.983001Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:47.983340Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:47.983441Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:47.983577Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:47.983672Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:48.223800Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:48.223918Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:48.298330Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:48.475905Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:48.529728Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T16:52:48.529849Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T16:52:48.565436Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T16:52:48.565661Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T16:52:48.565904Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T16:52:48.565993Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T16:52:48.566051Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T16:52:48.566116Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T16:52:48.566179Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T16:52:48.566260Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T16:52:48.567445Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T16:52:48.606679Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T16:52:48.606800Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T16:52:48.631702Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2605] 2025-04-04T16:52:48.647090Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2617] 2025-04-04T16:52:48.647305Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1902:2617], schemeshard id = 72075186224037897 2025-04-04T16:52:48.675520Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T16:52:48.737639Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T16:52:48.737712Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T16:52:48.737800Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T16:52:48.756196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T16:52:48.781039Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T16:52:48.781223Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T16:52:49.091470Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T16:52:49.338184Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T16:52:49.439796Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T16:52:50.686823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:50.686950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:50.705711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T16:52:51.186224Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:52:51.186708Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:52:51.187066Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:52:51.187241Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:52:51.187409Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:52:51.187555Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:52:51.187677Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:52:51.187808Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:52:51.187942Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:52:51.188124Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:52:51.188262Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:52:51.188401Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:52:51.287018Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:52:51.287128Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TT ... [72075186224037894] Subscribed for config changes 2025-04-04T16:55:30.103412Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T16:55:30.103474Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T16:55:30.103580Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T16:55:30.104959Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T16:55:30.105036Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T16:55:30.107280Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T16:55:30.184556Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T16:55:30.184737Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-04T16:55:30.185472Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8753:6601], server id = [2:8758:6606], tablet id = 72075186224037899, status = OK 2025-04-04T16:55:30.185598Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8753:6601], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:30.187014Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8754:6602], server id = [2:8759:6607], tablet id = 72075186224037900, status = OK 2025-04-04T16:55:30.187101Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8754:6602], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:30.187439Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8755:6603], server id = [2:8760:6608], tablet id = 72075186224037901, status = OK 2025-04-04T16:55:30.187490Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8755:6603], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:30.188528Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8756:6604], server id = [2:8761:6609], tablet id = 72075186224037902, status = OK 2025-04-04T16:55:30.188602Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8756:6604], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:30.189347Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8757:6605], server id = [2:8763:6611], tablet id = 72075186224037903, status = OK 2025-04-04T16:55:30.189403Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8757:6605], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:30.190018Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T16:55:30.191499Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-04T16:55:30.191895Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8753:6601], server id = [2:8758:6606], tablet id = 72075186224037899 2025-04-04T16:55:30.191939Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:30.192443Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-04T16:55:30.193093Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8754:6602], server id = [2:8759:6607], tablet id = 72075186224037900 2025-04-04T16:55:30.193122Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:30.193380Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-04T16:55:30.193613Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8755:6603], server id = [2:8760:6608], tablet id = 72075186224037901 2025-04-04T16:55:30.193637Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:30.193922Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8766:6614], server id = [2:8770:6618], tablet id = 72075186224037904, status = OK 2025-04-04T16:55:30.194010Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8766:6614], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:30.194291Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-04T16:55:30.195146Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8769:6617], server id = [2:8772:6620], tablet id = 72075186224037905, status = OK 2025-04-04T16:55:30.195215Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8769:6617], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:30.195474Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8756:6604], server id = [2:8761:6609], tablet id = 72075186224037902 2025-04-04T16:55:30.195533Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:30.196402Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8771:6619], server id = [2:8774:6622], tablet id = 72075186224037906, status = OK 2025-04-04T16:55:30.196466Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8771:6619], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:30.196772Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8757:6605], server id = [2:8763:6611], tablet id = 72075186224037903 2025-04-04T16:55:30.196799Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:30.197058Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8773:6621], server id = [2:8775:6623], tablet id = 72075186224037907, status = OK 2025-04-04T16:55:30.197112Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8773:6621], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:30.197879Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-04T16:55:30.198045Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8776:6624], server id = [2:8778:6626], tablet id = 72075186224037908, status = OK 2025-04-04T16:55:30.198099Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8776:6624], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:30.199092Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-04T16:55:30.199903Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8766:6614], server id = [2:8770:6618], tablet id = 72075186224037904 2025-04-04T16:55:30.199933Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:30.200164Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-04T16:55:30.200271Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8769:6617], server id = [2:8772:6620], tablet id = 72075186224037905 2025-04-04T16:55:30.200297Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:30.200628Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-04T16:55:30.200819Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8771:6619], server id = [2:8774:6622], tablet id = 72075186224037906 2025-04-04T16:55:30.200843Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:30.200996Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-04T16:55:30.201042Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T16:55:30.201295Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T16:55:30.201507Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T16:55:30.201786Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T16:55:30.204572Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8773:6621], server id = [2:8775:6623], tablet id = 72075186224037907 2025-04-04T16:55:30.204613Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:30.205060Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8776:6624], server id = [2:8778:6626], tablet id = 72075186224037908 2025-04-04T16:55:30.205088Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:30.205495Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T16:55:30.250925Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8799:6647]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T16:55:30.251404Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T16:55:30.251483Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8799:6647], StatRequests.size() = 1 2025-04-04T16:55:30.479075Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NzA3MjVlZmMtNjFkYmRjNDQtZjJlZGYxZS1mNGUyZTJlNw==, TxId: 2025-04-04T16:55:30.479151Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzA3MjVlZmMtNjFkYmRjNDQtZjJlZGYxZS1mNGUyZTJlNw==, TxId: 2025-04-04T16:55:30.479764Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T16:55:30.492699Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8809:6653] 2025-04-04T16:55:30.492815Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8705:6569], server id = [2:8809:6653], tablet id = 72075186224037894, status = OK 2025-04-04T16:55:30.492949Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8809:6653], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-04T16:55:30.493105Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8810:6654] 2025-04-04T16:55:30.493174Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8810:6654], schemeshard id = 72075186224037897 2025-04-04T16:55:30.508376Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T16:55:30.508460Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T16:55:30.603299Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8813:6657]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T16:55:30.603612Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T16:55:30.603663Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T16:55:30.606586Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T16:55:30.606688Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-04T16:55:30.606741Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T16:55:30.613525Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpRanges::NoFullScanAtDNFPredicate [GOOD] |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |77.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |77.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |77.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> Cdc::DescribeStream [GOOD] >> Cdc::DecimalKey >> KqpScanSpilling::SelfJoinQueryService [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize [GOOD] >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::NoFullScanAtDNFPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 16622, MsgBus: 18775 2025-04-04T16:53:16.979499Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502107931630147:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:16.979526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00154c/r3tmp/tmpS45RTd/pdisk_1.dat 2025-04-04T16:53:21.478640Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:21.827806Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:21.863192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:21.863275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:21.868414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16622, node 1 2025-04-04T16:53:22.007276Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502107931630147:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:22.007357Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:53:22.210824Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:22.210850Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:22.210857Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:22.211007Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18775 TClient is connected to server localhost:18775 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:53:29.496807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:29.906834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:31.341475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:32.533446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:32.741442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:36.519708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502193830977900:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:36.519793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:36.746542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:36.746565Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:37.044041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:53:37.179896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:53:37.229136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:53:37.645961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:53:38.253418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:53:38.456634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:53:38.854526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502202420913059:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:38.854633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:38.855409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502202420913065:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:38.884274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:53:39.060393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502202420913067:2485], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:53:39.179906Z node 1 :TX_PROXY ERROR: Actor# [1:7489502206715880422:3513] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:53:45.844654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:53:47.065055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T16:53:47.973270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T16:53:49.118156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T16:53:52.355717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9744, MsgBus: 3191 2025-04-04T16:54:00.267627Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502295983357094:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00154c/r3tmp/tmpFG1SVi/pdisk_1.dat 2025-04-04T16:54:00.392217Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:54:00.572846Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:00.617872Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:00.620899Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:00.623808Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9744, node 2 2025-04-04T16:54:00.878874Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:00.878892Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:00.878900Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:00.879017Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3191 TClient is connected to server localhost:3191 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 ... ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:55:19.013851Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:55:19.145722Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7489502637838473583:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:19.145881Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:19.150419Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7489502637838473588:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:19.155431Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:55:19.216424Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7489502637838473590:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:55:19.302176Z node 8 :TX_PROXY ERROR: Actor# [8:7489502637838473649:3468] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:21.063965Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:55:22.190715Z node 8 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785722224, txId: 281474976710673] shutting down ---------QUERY---------- --!syntax_v1 SELECT * FROM `/Root/TableWithIntKey` WHERE Key1 IN (1, 2, 100, 101, 102, 200, 201, 201, 1000, 1001, 1002, 2000, 2001, 2002) AND (Key1 > 2000) ORDER BY Key1; ---------RESULT--------- [[[2001];#];[[2002];[2]]] ------------------------ Trying to start YDB, gRPC: 5569, MsgBus: 24663 2025-04-04T16:55:23.420334Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7489502652465758856:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:23.420450Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00154c/r3tmp/tmp2Ssat9/pdisk_1.dat 2025-04-04T16:55:23.660489Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:23.664766Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:23.664909Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:23.669614Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5569, node 9 2025-04-04T16:55:23.756837Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:23.756864Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:23.756874Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:23.757076Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24663 TClient is connected to server localhost:24663 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:24.455583Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:24.475739Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:24.564212Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:24.807973Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:24.912484Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:28.420714Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7489502652465758856:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:28.420830Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:28.776581Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7489502673940597113:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:28.776724Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:28.818068Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:55:28.875672Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:55:28.936012Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:55:29.014856Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:29.070517Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:55:29.139644Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:55:29.214031Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7489502678235564927:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:29.214206Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:29.214652Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7489502678235564932:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:29.220148Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:55:29.289220Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7489502678235564934:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:55:29.360806Z node 9 :TX_PROXY ERROR: Actor# [9:7489502678235564992:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:31.027861Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:55:31.886295Z node 9 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785731919, txId: 281474976710673] shutting down ---------QUERY---------- --!syntax_v1 SELECT Value FROM `/Root/TestDNF` WHERE Key1 = 1 AND (Key2 = 100 OR Key2 = 300) ORDER BY Value; ---------RESULT--------- [[[5u]];[[9u]]] ------------------------ 2025-04-04T16:55:32.491484Z node 9 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785732528, txId: 281474976710675] shutting down ---------QUERY---------- --!syntax_v1 SELECT Value FROM `/Root/TestDNF` WHERE Key1 = 1 AND Key2 IN (100, 300, 400) ORDER BY Value; ---------RESULT--------- [[[5u]];[[9u]];[[10u]]] ------------------------ |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SelfJoinQueryService [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/i5tq/000cd8/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk2 Trying to start YDB, gRPC: 20516, MsgBus: 16021 2025-04-04T16:55:27.201442Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502671981665865:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:27.201532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000cd8/r3tmp/tmpdb5ob6/pdisk_1.dat 2025-04-04T16:55:27.656444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:27.656553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:27.664671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:27.666821Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20516, node 1 2025-04-04T16:55:27.758843Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:27.758866Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:27.758884Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:27.759003Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16021 TClient is connected to server localhost:16021 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:28.288642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:28.310378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:28.439044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:28.598256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:28.670942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:30.499191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502684866569552:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.499373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.834685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:55:30.878823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:55:30.929800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:55:30.978446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:31.048715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:55:31.091502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:55:31.182139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502689161537368:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:31.182227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:31.183209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502689161537373:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:31.188120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:55:31.209066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502689161537375:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:55:31.286147Z node 1 :TX_PROXY ERROR: Actor# [1:7489502689161537433:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:32.202387Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502671981665865:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:32.202464Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (DataType 'String)) (let $5 (OptionalType $4)) (let $6 (StructType '('"Key" $3) '('"Value" $5))) (let $7 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($21) (block '( (let $22 (lambda '($23) (block '( (let $24 (VariantType (TupleType $6 $6))) (let $25 (Variant $23 '0 $24)) (let $26 (Variant $23 '1 $24)) (return $25 $26) )))) (return (FromFlow (MultiMap (ToFlow $21) $22))) ))) '('('"_logical_id" '700) '('"_id" '"8c7037ad-a01080bf-e1172771-b4986e06")))) (let $8 (DqCnUnionAll (TDqOutput $7 '1))) (let $9 '('('"_logical_id" '545) '('"_id" '"19704a7a-650d6c54-bfa05c0b-cf09c8d9") '('"_wide_channels" $6))) (let $10 (DqPhyStage '($8) (lambda '($27) (block '( (let $28 (lambda '($29) (Member $29 '"Key") (Member $29 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $27) $28))) ))) $9)) (let $11 (DqCnMap (TDqOutput $7 '0))) (let $12 (DqCnBroadcast (TDqOutput $10 '0))) (let $13 (StructType '('"t1.Key" $3) '('"t1.Value" $5) '('"t2.Key" $3) '('"t2.Value" $5))) (let $14 '('('"_logical_id" '615) '('"_id" '"6a542138-40a263fb-91b85c40-45117fcf") '('"_wide_channels" $13))) (let $15 (DqPhyStage '($11 $12) (lambda '($30 $31) (block '( (let $32 '('Many 'Hashed 'Compact)) (let $33 (SqueezeToDict (NarrowFlatMap (WideFilter (ToFlow $31) (lambda '($36 $37) (Exists $37))) (lambda '($38 $39) (IfPresent $39 (lambda '($40) (Just '($40 (AsStruct '('"Key" $38) '('"Value" $39))))) (Nothing (OptionalType (TupleType $4 $6)))))) (lambda '($41) (Nth $41 '0)) (lambda '($42) (Nth $42 '1)) $32)) (let $34 (Sort (FlatMap $33 (lambda '($43) (block '( (let $44 '('"Value")) (let $45 '('"Key" '"t1.Key" '"Value" '"t1.Value")) (let $46 '('"Key" '"t2.Key" '"Value" '"t2.Value")) (return (MapJoinCore (OrderedFilter (ToFlow $30) (lambda '($47) (Exists (Member $47 '"Value")))) $43 'Inner $44 $44 $45 $46 '('"t1.Value") '('"t2.Value"))) )))) (Bool 'true) (lambda '($48) (Member $48 '"t1.Key")))) (let $35 (lambda '($49) (Member $49 '"t1.Key") (Member $49 '"t1.Value") (Member $49 '"t2.Key") (Member $49 '"t2.Value"))) (return (FromFlow (ExpandMap $34 $35))) ))) $14)) (let $16 (DqCnMerge (TDqOutput $15 '0) '('('0 '"Asc")))) (let $17 (DqPhyStage '($16) (lambda '($50) (FromFlow (NarrowMap (ToFlow $50) (lambda '($51 $52 $53 $54) (AsStruct '('"t1.Key" $51) '('"t1.Value" $52) '('"t2.Key" $53) '('"t2.Value" $54)))))) '('('"_logical_id" '627) '('"_id" '"f95bf45e-234508d3-4e069359-5ff61682")))) (let $18 '($7 $10 $15 $17)) (let $19 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $20 (DqCnResult (TDqOutput $17 '0) $19)) (return (KqpPhysicalQuery '((KqpPhysicalTx $18 '($20) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $13) '0 '0)) '('('"type" '"query")))) ) |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpScanSpilling::SelfJoin [GOOD] >> GenericFederatedQuery::YdbFilterPushdown [GOOD] >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant [GOOD] >> GenericFederatedQuery::PostgreSQLSelectCount >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId [GOOD] |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> GenericFederatedQuery::YdbSelectCount [GOOD] >> CdcStreamChangeCollector::OldImage [GOOD] |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> GenericFederatedQuery::YdbManagedSelectConstant [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SelfJoin [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/i5tq/000c71/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk1 Trying to start YDB, gRPC: 20780, MsgBus: 29336 2025-04-04T16:55:29.110719Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502677510498613:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:29.111279Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c71/r3tmp/tmpwA2RIB/pdisk_1.dat 2025-04-04T16:55:29.550067Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:29.568138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:29.568318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:29.570572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20780, node 1 2025-04-04T16:55:29.657826Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:29.657846Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:29.657852Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:29.658012Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29336 TClient is connected to server localhost:29336 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:30.287597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:55:30.331504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:55:30.483765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:55:30.682370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T16:55:30.766137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:32.643695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502690395402154:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:32.643872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:33.001464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.042936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.086485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.160503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.195961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.265786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.335755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502694690369972:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:33.335851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:33.336143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502694690369977:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:33.340425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:55:33.351271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502694690369979:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:55:33.458760Z node 1 :TX_PROXY ERROR: Actor# [1:7489502694690370035:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:34.101488Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502677510498613:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:34.122460Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:36.168417Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=1;ch_limit=50;inputs=0;input_channels_count=0; 2025-04-04T16:55:36.168690Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=2;ch_limit=50;inputs=1;input_channels_count=1; 2025-04-04T16:55:36.168811Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=2;ch_limit=50;inputs=2;input_channels_count=2; 2025-04-04T16:55:36.168862Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=2;ch_limit=50;inputs=1;input_channels_count=1; 2025-04-04T16:55:36.169637Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_fetcher_actor.cpp:46 :META:Table { TableId { OwnerId: 72057594046644480 TableId: 6 } TablePath: "/Root/KeyValue" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Columns { Id: 1 Name: "Key" Type: 4 } Columns { Id: 2 Name: "Value" Type: 4097 } KeyColumnTypes: 4 Reads { ShardId: 72075186224037911 KeyRanges { From: "\001\000\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } } ItemsLimit: 0 Reverse: false DataFormat: FORMAT_CELLVEC EnableShardsSequentialScan: true KeyColumnTypeInfos { } ReadType: ROWS 2025-04-04T16:55:36.169741Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272490:2553], TxId: 281474976710682, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. TraceId : 01jr0vndh5228q5qmza89b8h03. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Start compute actor [1:7489502707575272490:2553], task: 1 2025-04-04T16:55:36.169766Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272490:2553], TxId: 281474976710682, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. TraceId : 01jr0vndh5228q5qmza89b8h03. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Set periodic stats 0.100000s 2025-04-04T16:55:36.169788Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272490:2553], TxId: 281474976710682, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. TraceId : 01jr0vndh5228q5qmza89b8h03. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. EVLOGKQP START 2025-04-04T16:55:36.170713Z node 1 :KQP_COMPUTE DEBUG: Register LocalFileSpillingActor [1:7489502707575272498:3861] at service [1:7597699455116079460:27756] 2025-04-04T16:55:36.170743Z node 1 :KQP_COMPUTE DEBUG: Register LocalFileSpillingActor [1:7489502707575272499:3862] at service [1:7597699455116079460:27756] 2025-04-04T16:55:36.170931Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272491:2554], TxId: 281474976710682, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. TraceId : 01jr0vndh5228q5qmza89b8h03. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Start compute actor [1:7489502707575272491:2554], task: 2 2025-04-04T16:55:36.170971Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272491:2554], TxId: 281474976710682, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. TraceId : 01jr0vndh5228q5qmza89b8h03. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Set periodic stats 0.100000s 2025-04-04T16:55:36.171642Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272491:2554], TxId: 281474976710682, task: 2. Ctx: { SessionId : ydb://session/3? ... xMTE0Nzc5Yg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2025-04-04T16:55:36.204134Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272492:2555], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jr0vndh5228q5qmza89b8h03. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2025-04-04T16:55:36.204143Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272492:2555], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jr0vndh5228q5qmza89b8h03. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:55:36.204208Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272492:2555], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jr0vndh5228q5qmza89b8h03. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:55:36.204232Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272493:2556], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CustomerSuppliedId : . TraceId : 01jr0vndh5228q5qmza89b8h03. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Send stats to executor actor [1:7489502707575272485:2548] TaskId: 4 Stats: CpuTimeUs: 1237 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 732 InputRows: 1 InputBytes: 50 OutputRows: 1 OutputBytes: 50 ResultRows: 1 ResultBytes: 50 ComputeCpuTimeUs: 164 BuildCpuTimeUs: 568 HostName: "ghrun-mup5cka3jq" NodeId: 1 CreateTimeMs: 1743785736180 } MaxMemoryUsage: 104857600 2025-04-04T16:55:36.204247Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272493:2556], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CustomerSuppliedId : . TraceId : 01jr0vndh5228q5qmza89b8h03. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:55:36.204300Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272493:2556], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CustomerSuppliedId : . TraceId : 01jr0vndh5228q5qmza89b8h03. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646923 2025-04-04T16:55:36.204330Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272493:2556], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CustomerSuppliedId : . TraceId : 01jr0vndh5228q5qmza89b8h03. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:55:36.204334Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272492:2555], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jr0vndh5228q5qmza89b8h03. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2025-04-04T16:55:36.204350Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272492:2555], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jr0vndh5228q5qmza89b8h03. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:55:36.204459Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272493:2556], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CustomerSuppliedId : . TraceId : 01jr0vndh5228q5qmza89b8h03. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646923 2025-04-04T16:55:36.204480Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272493:2556], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CustomerSuppliedId : . TraceId : 01jr0vndh5228q5qmza89b8h03. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:55:36.204522Z node 1 :KQP_COMPUTE DEBUG: [CloseFile async] from: [1:7489502707575272498:3861], waitTime: 0.000419s 2025-04-04T16:55:36.204656Z node 1 :KQP_COMPUTE DEBUG: [CloseFile async] from: [1:7489502707575272499:3862], waitTime: 0.000563s 2025-04-04T16:55:36.204760Z node 1 :KQP_COMPUTE DEBUG: [CloseFileResponse] from: [1:7489502707575272498:3861] 2025-04-04T16:55:36.204794Z node 1 :KQP_COMPUTE DEBUG: [CloseFileResponse] from: [1:7489502707575272499:3862] 2025-04-04T16:55:36.205490Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272492:2555], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jr0vndh5228q5qmza89b8h03. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:55:36.205643Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272492:2555], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jr0vndh5228q5qmza89b8h03. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2025-04-04T16:55:36.205661Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272492:2555], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jr0vndh5228q5qmza89b8h03. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2025-04-04T16:55:36.205676Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272492:2555], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jr0vndh5228q5qmza89b8h03. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:55:36.205751Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272492:2555], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jr0vndh5228q5qmza89b8h03. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:55:36.205852Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272493:2556], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CustomerSuppliedId : . TraceId : 01jr0vndh5228q5qmza89b8h03. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646923 2025-04-04T16:55:36.205894Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272493:2556], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CustomerSuppliedId : . TraceId : 01jr0vndh5228q5qmza89b8h03. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:55:36.205911Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272492:2555], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jr0vndh5228q5qmza89b8h03. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2025-04-04T16:55:36.205923Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272492:2555], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jr0vndh5228q5qmza89b8h03. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:55:36.207369Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272493:2556], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CustomerSuppliedId : . TraceId : 01jr0vndh5228q5qmza89b8h03. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:55:36.207556Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272493:2556], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CustomerSuppliedId : . TraceId : 01jr0vndh5228q5qmza89b8h03. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:55:36.207641Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272493:2556], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CustomerSuppliedId : . TraceId : 01jr0vndh5228q5qmza89b8h03. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:55:36.207855Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272492:2555], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jr0vndh5228q5qmza89b8h03. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2025-04-04T16:55:36.207871Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272492:2555], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jr0vndh5228q5qmza89b8h03. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:55:36.207926Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272492:2555], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jr0vndh5228q5qmza89b8h03. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:55:36.208004Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272493:2556], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CustomerSuppliedId : . TraceId : 01jr0vndh5228q5qmza89b8h03. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646923 2025-04-04T16:55:36.208042Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489502707575272492:2555], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jr0vndh5228q5qmza89b8h03. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTA2Yjg5YzgtOTVkMmIzOTMtYTM1YmY4ZC0xMTE0Nzc5Yg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId [GOOD] Test command err: Trying to start YDB, gRPC: 21935, MsgBus: 29023 2025-04-04T16:55:33.487656Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502695966102695:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:33.487819Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d7b/r3tmp/tmpONBAgp/pdisk_1.dat 2025-04-04T16:55:34.006114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:34.006285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:34.007988Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:34.011704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21935, node 1 2025-04-04T16:55:34.115838Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:34.115865Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:34.115901Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:34.116050Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29023 TClient is connected to server localhost:29023 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:34.818064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 3885, MsgBus: 10907 2025-04-04T16:55:26.443103Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502668516656329:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:26.443173Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e4b/r3tmp/tmphzvq5q/pdisk_1.dat 2025-04-04T16:55:26.838465Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3885, node 1 2025-04-04T16:55:26.867407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:26.867467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:26.875558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:26.921779Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:26.921801Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:26.921815Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:26.921952Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10907 TClient is connected to server localhost:10907 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:27.589104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:29.697252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502681401558886:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:29.697397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:29.935990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-04T16:55:30.057494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502685696526303:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.057589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.057990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502685696526309:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.062298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-04T16:55:30.078226Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-04T16:55:30.078824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502685696526311:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T16:55:30.140047Z node 1 :TX_PROXY ERROR: Actor# [1:7489502685696526351:2401] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:30.740226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:31.202708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-04T16:55:31.443080Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502668516656329:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:31.443153Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:31.772723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T16:55:32.280908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T16:55:32.899493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.424866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T16:55:33.496785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T16:55:35.715802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710709:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbSelectCount [GOOD] Test command err: Trying to start YDB, gRPC: 63253, MsgBus: 27777 2025-04-04T16:55:26.812472Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502666318637068:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:26.823536Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d8a/r3tmp/tmpHH0nCP/pdisk_1.dat 2025-04-04T16:55:27.167477Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63253, node 1 2025-04-04T16:55:27.236939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:27.238042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:27.242441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:27.263646Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:27.263667Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:27.263673Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:27.263836Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27777 TClient is connected to server localhost:27777 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:27.851219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:27.862649Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:55:29.888651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502679203539548:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:29.888791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.081474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-04T16:55:30.202554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502683498506968:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.202679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.203075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502683498506973:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.212458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-04T16:55:30.224952Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-04T16:55:30.225240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502683498506975:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T16:55:30.299707Z node 1 :TX_PROXY ERROR: Actor# [1:7489502683498507015:2399] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:30.914330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:55:31.403932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-04T16:55:31.809829Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502666318637068:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:31.809914Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:31.929479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:55:32.484669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.183121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.758895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T16:55:33.807927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T16:55:36.186164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710709:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> GenericFederatedQuery::YdbManagedSelectAll [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbManagedSelectConstant [GOOD] Test command err: Trying to start YDB, gRPC: 11295, MsgBus: 11171 2025-04-04T16:55:26.632445Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502666468771823:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:26.632565Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e33/r3tmp/tmpTd05XT/pdisk_1.dat 2025-04-04T16:55:27.077383Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:27.086316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:27.086434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:27.092389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11295, node 1 2025-04-04T16:55:27.282260Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:27.282281Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:27.282287Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:27.282382Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11171 TClient is connected to server localhost:11171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:27.840548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:27.864246Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:55:30.184519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502683648641678:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.184713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.455304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-04T16:55:30.618151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502683648641800:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.618264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.618588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502683648641805:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.622383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-04T16:55:30.633552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502683648641807:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T16:55:30.726526Z node 1 :TX_PROXY ERROR: Actor# [1:7489502683648641873:2409] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:31.288732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:31.634888Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502666468771823:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:31.634951Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:31.861140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-04T16:55:32.405003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T16:55:32.975457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.519745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T16:55:34.090002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T16:55:34.152753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T16:55:36.447993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710711:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::OldImage [GOOD] Test command err: 2025-04-04T16:55:10.653744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:55:10.654577Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:55:10.654696Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bbe/r3tmp/tmpc8PjbI/pdisk_1.dat 2025-04-04T16:55:11.098320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:55:11.145162Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:11.149029Z node 1 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-04-04T16:55:11.190106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:11.190325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:11.203864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:11.309505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:55:11.389062Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T16:55:11.389383Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:11.465846Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:11.465990Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:55:11.467692Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:55:11.467850Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:55:11.467910Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:55:11.468305Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:55:11.468460Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:55:11.468565Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T16:55:11.479420Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:55:11.519383Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:55:11.519643Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:55:11.519797Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T16:55:11.519843Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:55:11.519880Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:55:11.519925Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:11.520457Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:55:11.520584Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:55:11.520753Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:11.520801Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:11.520851Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:55:11.520921Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:11.521001Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T16:55:11.521542Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:11.521808Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:55:11.521913Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:55:11.524090Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:11.534921Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:11.535035Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:55:11.683488Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T16:55:11.688117Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:55:11.688219Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:11.689681Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:11.689851Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:55:11.689951Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T16:55:11.690280Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T16:55:11.690470Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:55:11.691250Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:11.691374Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T16:55:11.693497Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T16:55:11.693902Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:11.695451Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T16:55:11.695494Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:11.696493Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T16:55:11.696553Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:11.697661Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:11.697939Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:11.697970Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:55:11.698006Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T16:55:11.698060Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:55:11.698105Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T16:55:11.698194Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:11.703966Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T16:55:11.704181Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T16:55:11.704244Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:55:11.721535Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:11.721743Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-04T16:55:11.721806Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-04T16:55:11.721847Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-04T16:55:11.723630Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:11.749934Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:12.056988Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:55:12.057070Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:12.057322Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:12.057368Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:55:12.057461Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-04T16:55:12.057680Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-04T16:55:12.057819Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:55:12.058119Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:12.069733Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:12.155809Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-04T16:55:12.155917Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:12.155982Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:12.156030Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tab ... ode 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:36.450138Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T16:55:36.450217Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:36.451772Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T16:55:36.451865Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:36.453475Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:36.453947Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:36.454000Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:55:36.454070Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T16:55:36.454159Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:55:36.454225Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T16:55:36.454533Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:36.457195Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T16:55:36.457430Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T16:55:36.457504Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:55:36.472414Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:36.472585Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-04-04T16:55:36.472640Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-04-04T16:55:36.472678Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-04-04T16:55:36.473548Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:36.502943Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:36.745325Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:55:36.745407Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:36.745610Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:36.745670Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:55:36.745746Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-04T16:55:36.745992Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-04-04T16:55:36.746140Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:55:36.746536Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:55:36.747414Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:55:36.795174Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-04T16:55:36.795294Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:36.795345Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:55:36.795396Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:36.795482Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:55:36.795570Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-04-04T16:55:36.795715Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:36.798147Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-04-04T16:55:36.798326Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:55:36.820008Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:884:2723], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:36.820172Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:36.820267Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:893:2728], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:36.835328Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T16:55:36.854716Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:37.024203Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:55:37.028368Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:898:2731], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T16:55:37.055458Z node 4 :TX_PROXY ERROR: Actor# [4:954:2768] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:37.350331Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0vnekd6b3g2yx9bbys0rb6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzIxNjA2MmYtNzJkNzBmOTMtZDI5NmEyZjktZDkwYjJhNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:37.355162Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:995:2795], serverId# [4:996:2796], sessionId# [0:0:0] 2025-04-04T16:55:37.355654Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T16:55:37.360839Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0vnekd6b3g2yx9bbys0rb6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzIxNjA2MmYtNzJkNzBmOTMtZDI5NmEyZjktZDkwYjJhNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:37.365775Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0vnekd6b3g2yx9bbys0rb6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzIxNjA2MmYtNzJkNzBmOTMtZDI5NmEyZjktZDkwYjJhNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:37.366344Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:37.368030Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743785737367888 Step: 2001 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-04T16:55:37.379367Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:37.379507Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 2001 from mediator time cast 2025-04-04T16:55:37.379629Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-04T16:55:37.379721Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:37.380774Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 2001 at tablet 72075186224037888 2025-04-04T16:55:37.380855Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:37.488592Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0vnf59ckpyvskb6favsfw2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ODExNDM5NzQtOWE0NGI5ZjEtNjViMTg1ZWEtMzllYjUyOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:37.489071Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:55:37.490446Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1743785737490319 Step: 2001 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-04T16:55:37.501879Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:55:37.502056Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-04-04T16:55:37.502109Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:55:37.504501Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1030:2820], serverId# [4:1031:2821], sessionId# [0:0:0] 2025-04-04T16:55:37.512659Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1032:2822], serverId# [4:1033:2823], sessionId# [0:0:0] >> SystemView::StoragePoolsRanges [GOOD] >> Normalizers::CleanEmptyPortionsNormalizer >> TColumnShardTestReadWrite::ReadStale >> KqpScanSpilling::SpillingPragmaParseError [GOOD] >> TColumnShardTestReadWrite::WriteReadDuplicate |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TColumnShardTestReadWrite::WriteReadModifications >> TBSV::CleanupDroppedVolumesOnRestart >> GenericFederatedQuery::ClickHouseManagedSelectConstant [GOOD] >> GenericFederatedQuery::ClickHouseSelectCount |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbManagedSelectAll [GOOD] Test command err: Trying to start YDB, gRPC: 21515, MsgBus: 64207 2025-04-04T16:55:26.203168Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502667544428558:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:26.203262Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e63/r3tmp/tmpnFshPB/pdisk_1.dat 2025-04-04T16:55:26.515731Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21515, node 1 2025-04-04T16:55:26.593586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:26.593708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:26.596001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:26.613701Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:26.613730Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:26.613739Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:26.613893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64207 TClient is connected to server localhost:64207 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:27.329178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:27.345515Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:55:29.408364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502680429331114:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:29.408533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:29.635691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-04T16:55:29.758024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502680429331236:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:29.758092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:29.759097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502680429331241:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:29.762781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-04T16:55:29.771317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502680429331243:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T16:55:29.847008Z node 1 :TX_PROXY ERROR: Actor# [1:7489502680429331283:2401] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:30.501411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:30.983908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-04T16:55:31.203167Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502667544428558:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:31.203302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:31.539873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T16:55:32.079801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T16:55:32.683647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.275438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T16:55:33.328367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T16:55:35.356313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710709:0, at schemeshard: 72057594046644480 2025-04-04T16:55:35.396245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2025-04-04T16:55:35.398135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-04-04T16:55:35.399729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-04-04T16:55:38.152763Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785738184, txId: 281474976710751] shutting down >> TColumnShardTestReadWrite::ReadStale [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SpillingPragmaParseError [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/i5tq/000b9f/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk5 Trying to start YDB, gRPC: 3439, MsgBus: 12838 2025-04-04T16:55:33.541272Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502694651815853:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:33.541626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b9f/r3tmp/tmpwj84Gk/pdisk_1.dat 2025-04-04T16:55:33.987599Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:34.019653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:34.019799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:34.022276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3439, node 1 2025-04-04T16:55:34.151258Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:34.151280Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:34.151287Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:34.151441Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12838 TClient is connected to server localhost:12838 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:34.840976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:34.873696Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:55:34.957053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:35.169560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:35.368427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:35.478820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:37.288757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502711831686613:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:37.288888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:37.652978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:55:37.700059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:55:37.787405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:55:37.842647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:37.917566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:55:37.963201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:55:38.058360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502716126654433:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:38.058446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:38.058907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502716126654438:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:38.062982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:55:38.079722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502716126654440:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:55:38.157218Z node 1 :TX_PROXY ERROR: Actor# [1:7489502716126654495:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:38.528440Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502694651815853:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:38.551808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:39.224320Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489502720421622056:2493], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:3:40: Error: Bad "EnableSpillingNodes" setting for "$all" cluster: (yexception) tools/enum_parser/enum_serialization_runtime/enum_runtime.cpp:70: Key 'GraceJoin1' not found in enum NYql::NDq::EEnabledSpillingNodes. Valid options are: 'None', 'GraceJoin', 'Aggregation', 'All'. 2025-04-04T16:55:39.224572Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTBiMjliMTgtYzU4NDlhN2QtZGY1OGE3NzUtZjRmZWVkMTU=, ActorId: [1:7489502720421622049:2489], ActorState: ExecuteState, TraceId: 01jr0vngxr2knjcga0d24gw87q, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> GenericFederatedQuery::PostgreSQLOnPremSelectAll [GOOD] >> GenericFederatedQuery::PostgreSQLFilterPushdown >> TBSV::ShouldLimitBlockStoreVolumeDropRate |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::StoragePoolsRanges [GOOD] Test command err: 2025-04-04T16:51:28.283058Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501644646806065:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:28.283121Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c02/r3tmp/tmpXBiv8f/pdisk_1.dat 2025-04-04T16:51:28.919377Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:28.920662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:28.920753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:28.932131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12250, node 1 2025-04-04T16:51:29.095262Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:29.095275Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:29.095280Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:29.095355Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:29.489329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:29.557699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:29.582651Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489501647121870808:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:29.582692Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; waiting... 2025-04-04T16:51:29.646614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:29.646687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:29.662893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:29.662986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:29.667105Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-04T16:51:29.671533Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-04T16:51:29.696198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:29.697414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:29.944167Z node 4 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2025-04-04T16:51:29.944631Z node 4 :SYSTEM_VIEWS INFO: [72075186224037893] OnActivateExecutor 2025-04-04T16:51:29.944699Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Execute 2025-04-04T16:51:30.024393Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:30.025342Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Complete 2025-04-04T16:51:30.025366Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] tablet is offline 2025-04-04T16:51:30.049617Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Execute: database# /Root/Tenant1 2025-04-04T16:51:30.072540Z node 4 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Complete 2025-04-04T16:51:30.074890Z node 4 :SYSTEM_VIEWS INFO: NSysView::TPartitionStatsCollector initialized: domain key# [OwnerId: 72057594046644480, LocalPathId: 2], sysview processor id# 72075186224037893 2025-04-04T16:51:30.136808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:30.259036Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489501652958794841:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:30.259138Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; waiting... 2025-04-04T16:51:30.393432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:30.393580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:30.418974Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T16:51:30.421236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:30.421334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:30.450780Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:51:30.474796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:30.491109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:30.946886Z node 4 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables 2025-04-04T16:51:31.114373Z node 2 :SYSTEM_VIEWS INFO: [72075186224037899] OnActivateExecutor 2025-04-04T16:51:31.114505Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxInitSchema::Execute 2025-04-04T16:51:31.272768Z node 2 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2025-04-04T16:51:31.272951Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:31.283515Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxInitSchema::Complete 2025-04-04T16:51:31.283539Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] tablet is offline 2025-04-04T16:51:31.290612Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxConfigure::Execute: database# /Root/Tenant2 2025-04-04T16:51:31.379335Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxConfigure::Complete 2025-04-04T16:51:31.431403Z node 2 :SYSTEM_VIEWS INFO: NSysView::TPartitionStatsCollector initialized: domain key# [OwnerId: 72057594046644480, LocalPathId: 3], sysview processor id# 72075186224037899 2025-04-04T16:51:31.946879Z node 4 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables 2025-04-04T16:51:32.274617Z node 2 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables 2025-04-04T16:51:32.949686Z node 4 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables 2025-04-04T16:51:33.286469Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501644646806065:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:33.286537Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:33.408384Z node 2 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables 2025-04-04T16:51:33.954325Z node 4 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables 2025-04-04T16:51:34.406582Z node 2 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables 2025-04-04T16:51:34.583941Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489501647121870808:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:34.584007Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:34.955290Z node 4 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables 2025-04-04T16:51:35.282956Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489501652958794841:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:35.296322Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:35.410639Z node 2 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables 2025-04-04T16:51:35.959204Z node 4 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables 2025-04-04T16:51:36.426387Z node 2 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables 2025-04-04T16:51:36.961358Z node 4 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables 2025-04-04T16:51:37.426398Z node 2 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables 2025-04-04T16:51:37.978666Z node 4 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables 2025-04-04T16:51:38.114925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501687596480418:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:38.115095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:38.119693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501687596480453:2351], DatabaseId: /Root, PoolId: d ... ur snapshot: [step: 1743785728589, txId: 281474976710660] shutting down 2025-04-04T16:55:29.823436Z node 57 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[57:7489502678721371842:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:29.823601Z node 57 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c02/r3tmp/tmpUrIBvh/pdisk_1.dat 2025-04-04T16:55:30.032682Z node 57 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:30.059369Z node 57 :HIVE WARN: HIVE#72057594037968897 Node(57, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:30.059529Z node 57 :HIVE WARN: HIVE#72057594037968897 Node(57, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:30.064018Z node 57 :HIVE WARN: HIVE#72057594037968897 Node(57, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1422, node 57 2025-04-04T16:55:30.190795Z node 57 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:30.190830Z node 57 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:30.190844Z node 57 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:30.191051Z node 57 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21188 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:30.776666Z node 57 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:34.819195Z node 57 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[57:7489502678721371842:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:34.819306Z node 57 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:36.696991Z node 57 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [57:7489502708786143578:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:36.697180Z node 57 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:36.697810Z node 57 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [57:7489502708786143590:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:36.705732Z node 57 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:55:36.729407Z node 57 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [57:7489502708786143592:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:55:36.807301Z node 57 :TX_PROXY ERROR: Actor# [57:7489502708786143643:2343] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:36.980171Z node 57 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0vn8rm2mdntt9z1mqn0k6n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=57&id=ZjRhZmY3ZDQtMjViZWZiMjYtYzE5Y2QxMGQtZDJhMTI1NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:36.984278Z node 57 :SYSTEM_VIEWS INFO: Scan started, actor: [57:7489502708786143682:2348], owner: [57:7489502708786143679:2346], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-04T16:55:36.991440Z node 57 :SYSTEM_VIEWS INFO: Scan prepared, actor: [57:7489502708786143682:2348], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T16:55:36.996644Z node 57 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [57:7489502708786143682:2348], row count: 4, finished: 1 2025-04-04T16:55:36.996711Z node 57 :SYSTEM_VIEWS INFO: Scan finished, actor: [57:7489502708786143682:2348], owner: [57:7489502708786143679:2346], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-04T16:55:37.038579Z node 57 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785736977, txId: 281474976715660] shutting down 2025-04-04T16:55:37.308802Z node 57 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0vnevb5j5hz935bwesbw1z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=57&id=ZjgzYTkwYWYtNmI3MThkYzItOWI1ZjY5MWYtY2MxYzk3NDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:37.312629Z node 57 :SYSTEM_VIEWS INFO: Scan started, actor: [57:7489502713081111018:2360], owner: [57:7489502713081111014:2358], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-04T16:55:37.314379Z node 57 :SYSTEM_VIEWS INFO: Scan prepared, actor: [57:7489502713081111018:2360], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T16:55:37.314820Z node 57 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [57:7489502713081111018:2360], row count: 2, finished: 1 2025-04-04T16:55:37.314855Z node 57 :SYSTEM_VIEWS INFO: Scan finished, actor: [57:7489502713081111018:2360], owner: [57:7489502713081111014:2358], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-04T16:55:37.331852Z node 57 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785737305, txId: 281474976715662] shutting down 2025-04-04T16:55:37.599296Z node 57 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0vnf4b3h7rr64rwhcmqzkx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=57&id=OWU5YTdlYmQtZmY2OGFmNzAtZmRmNjg1NTEtYzYzNDAwMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:37.601898Z node 57 :SYSTEM_VIEWS INFO: Scan started, actor: [57:7489502713081111050:2369], owner: [57:7489502713081111046:2367], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-04T16:55:37.602965Z node 57 :SYSTEM_VIEWS INFO: Scan prepared, actor: [57:7489502713081111050:2369], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T16:55:37.603359Z node 57 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [57:7489502713081111050:2369], row count: 3, finished: 1 2025-04-04T16:55:37.603399Z node 57 :SYSTEM_VIEWS INFO: Scan finished, actor: [57:7489502713081111050:2369], owner: [57:7489502713081111046:2367], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-04T16:55:37.611377Z node 57 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785737594, txId: 281474976715664] shutting down 2025-04-04T16:55:38.056366Z node 57 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr0vnfd648em744r3s3cc5qm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=57&id=ZWY4MjNmZjUtODk5MzYxNWUtYzI3YmU5Yi03MjZkNzI4Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:38.060771Z node 57 :SYSTEM_VIEWS INFO: Scan started, actor: [57:7489502717376078385:2381], owner: [57:7489502717376078381:2379], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-04T16:55:38.070503Z node 57 :SYSTEM_VIEWS INFO: Scan prepared, actor: [57:7489502717376078385:2381], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T16:55:38.072177Z node 57 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [57:7489502717376078385:2381], row count: 2, finished: 1 2025-04-04T16:55:38.072227Z node 57 :SYSTEM_VIEWS INFO: Scan finished, actor: [57:7489502717376078385:2381], owner: [57:7489502717376078381:2379], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-04T16:55:38.079423Z node 57 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785738054, txId: 281474976715666] shutting down 2025-04-04T16:55:38.441989Z node 57 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr0vnfvpbjn1dn31rdz40w6s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=57&id=MmNlOWY5NWYtYzQ2ODBkNDEtZjNlYWRiNGEtOWNiOWE0ZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:38.444974Z node 57 :SYSTEM_VIEWS INFO: Scan started, actor: [57:7489502717376078416:2390], owner: [57:7489502717376078413:2388], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-04T16:55:38.447928Z node 57 :SYSTEM_VIEWS INFO: Scan prepared, actor: [57:7489502717376078416:2390], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T16:55:38.448384Z node 57 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [57:7489502717376078416:2390], row count: 3, finished: 1 2025-04-04T16:55:38.448434Z node 57 :SYSTEM_VIEWS INFO: Scan finished, actor: [57:7489502717376078416:2390], owner: [57:7489502717376078413:2388], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-04-04T16:55:38.452919Z node 57 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785738440, txId: 281474976715668] shutting down >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadStale [GOOD] Test command err: 2025-04-04T16:55:39.991572Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T16:55:40.125706Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T16:55:40.177532Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T16:55:40.177887Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T16:55:40.187743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:55:40.187998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:55:40.188276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:55:40.188396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:55:40.188541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:55:40.188661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:55:40.188779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:55:40.188891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:55:40.189037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:55:40.189246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:55:40.189368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:55:40.189480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:55:40.226320Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T16:55:40.226649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T16:55:40.226738Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T16:55:40.226961Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:40.227157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:55:40.227249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:55:40.227317Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T16:55:40.227423Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T16:55:40.227497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:55:40.227550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:55:40.227591Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T16:55:40.227800Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:40.227879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:55:40.227921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:55:40.227950Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T16:55:40.228044Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T16:55:40.228148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:55:40.228214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:55:40.228247Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T16:55:40.228329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:55:40.228370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:55:40.228402Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T16:55:40.228455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:55:40.228495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:55:40.228536Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T16:55:40.229007Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2025-04-04T16:55:40.229161Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=64; 2025-04-04T16:55:40.229272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=42; 2025-04-04T16:55:40.229360Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-04-04T16:55:40.229591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:55:40.229674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:55:40.229714Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T16:55:40.229956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:55:40.230021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:55:40.230068Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T16:55:40.230306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:55:40.230367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:55:40.230404Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T16:55:40.230645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T16:55:40.230695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T16:55:40.230729Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T16:55:40.230869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T16:55:40.230925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T16:55:40.230986Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... _id=1;broken=0; 2025-04-04T16:55:40.942457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=100;fline=tx_controller.cpp:211;event=finished_tx;tx_id=100; 2025-04-04T16:55:40.961630Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] complete at tablet 9437184 2025-04-04T16:55:40.961781Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=100;lock_id=1;broken=0; 2025-04-04T16:55:40.961948Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=77304; 2025-04-04T16:55:40.966045Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=manager.cpp:10;event=lock;process_id=CS::INDEXATION::a40c9924-117511f0-9a4ea312-87c663b9; 2025-04-04T16:55:40.966129Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=ro_controller.cpp:45;event=CS::INDEXATION;tablet_id=9437184; 2025-04-04T16:55:40.967164Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:752;event=indexation;bytes=77304;blobs_count=9;max_limit=251658240;has_more=0;external_task_id=a40c9924-117511f0-9a4ea312-87c663b9; 2025-04-04T16:55:40.967529Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:618;event=start_changes;type=CS::INDEXATION;task_id=a40c9924-117511f0-9a4ea312-87c663b9; 2025-04-04T16:55:40.968019Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=69691;external_task_id=a40c9924-117511f0-9a4ea312-87c663b9;type=CS::INDEXATION;priority=0;; 2025-04-04T16:55:40.968250Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=1;task=cpu=0;mem=69691;external_task_id=a40c9924-117511f0-9a4ea312-87c663b9;type=CS::INDEXATION;priority=0;; 2025-04-04T16:55:40.968298Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=a40c9924-117511f0-9a4ea312-87c663b9;mem=69691;cpu=0; 2025-04-04T16:55:40.968455Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=a40c9924-117511f0-9a4ea312-87c663b9;task_id=1;mem=69691;cpu=0; 2025-04-04T16:55:40.968612Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=a40c9924-117511f0-9a4ea312-87c663b9;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=a40c9924-117511f0-9a4ea312-87c663b9; 2025-04-04T16:55:40.988655Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=a40c9924-117511f0-9a4ea312-87c663b9;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-04-04T16:55:40.988858Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-04-04T16:55:40.989462Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:239;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T16:55:40.989509Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T16:55:40.989574Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:767;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=77304;indexing_debug={task_ids=a40c9924-117511f0-9a4ea312-87c663b9,;}; 2025-04-04T16:55:40.989694Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=0; 2025-04-04T16:55:40.989908Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T16:55:40.989969Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T16:55:40.990007Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T16:55:40.990108Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T16:55:40.990523Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 1 version: {640000:max} readable: {1000000:max} at tablet 9437184 2025-04-04T16:55:41.003000Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 1 at tablet 9437184 2025-04-04T16:55:41.003212Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=constructor.cpp:18;event=overriden_columns;ids=1,2,3,4,5,6,7,8,9,10,4294967040,4294967041,4294967042,4294967043; 2025-04-04T16:55:41.003337Z node 1 :TX_COLUMNSHARD_SCAN WARN: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot build metadata withno ranges;details=Snapshot too old: {640000:max}. CS min read snapshot: {700000:max}. now: 2025-04-04T16:55:41.003293Z; 2025-04-04T16:55:41.021628Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:1:255:1:6824:0]; 2025-04-04T16:55:41.021762Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:1:255:2:6824:0]; 2025-04-04T16:55:41.035573Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-04T16:55:41.035826Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[8] (CS::INDEXATION) apply at tablet 9437184 2025-04-04T16:55:41.036735Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 7 2025-04-04T16:55:41.036869Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 0 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T16:55:41.037320Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {640000:max} readable: {1000000:max} at tablet 9437184 2025-04-04T16:55:41.050332Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=a40c9924-117511f0-9a4ea312-87c663b9;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-04-04T16:55:41.050421Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=a40c9924-117511f0-9a4ea312-87c663b9;fline=with_appended.cpp:65;portions=1,;task_id=a40c9924-117511f0-9a4ea312-87c663b9; 2025-04-04T16:55:41.050718Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=a40c9924-117511f0-9a4ea312-87c663b9;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::a40c9924-117511f0-9a4ea312-87c663b9; 2025-04-04T16:55:41.050793Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=a40c9924-117511f0-9a4ea312-87c663b9;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T16:55:41.050877Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=a40c9924-117511f0-9a4ea312-87c663b9;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T16:55:41.050947Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=a40c9924-117511f0-9a4ea312-87c663b9;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=0; 2025-04-04T16:55:41.051021Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=a40c9924-117511f0-9a4ea312-87c663b9;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T16:55:41.051077Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=a40c9924-117511f0-9a4ea312-87c663b9;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T16:55:41.051117Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=a40c9924-117511f0-9a4ea312-87c663b9;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T16:55:41.051203Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=a40c9924-117511f0-9a4ea312-87c663b9;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.993000s; 2025-04-04T16:55:41.051285Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=a40c9924-117511f0-9a4ea312-87c663b9;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T16:55:41.051442Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 7 2025-04-04T16:55:41.051544Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T16:55:41.051664Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-04T16:55:41.054124Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:32;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2025-04-04T16:55:41.054223Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2025-04-04T16:55:41.054581Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Projection","input":"1,6"},"fetch":"1,6"}]}; 2025-04-04T16:55:41.054727Z node 1 :TX_COLUMNSHARD_SCAN WARN: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot build metadata withno ranges;details=Snapshot too old: {640000:max}. CS min read snapshot: {700000:max}. now: 2025-04-04T16:55:41.054671Z; 2025-04-04T16:55:41.055005Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=1;external_task_id=a40c9924-117511f0-9a4ea312-87c663b9;mem=69691;cpu=0; >> TBSV::ShardsNotLeftInShardsToDelete |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> GenericFederatedQuery::ClickHouseManagedSelectAll [GOOD] >> GenericFederatedQuery::ClickHouseFilterPushdown |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |77.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |77.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:55:40.864297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:40.864403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:40.864453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:40.864489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:40.864532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:40.864572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:40.864641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:40.864721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:40.865058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:40.967225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:40.967285Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:40.980635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:40.980909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:40.981061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:40.999164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:40.999523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:41.000196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:41.001182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:41.008781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:41.010173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:41.010278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:41.010466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:41.010513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:41.010554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:41.010711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.024174Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:55:41.143125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:41.143311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.143468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:41.143630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:41.143670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.145872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:41.146046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:41.146270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.146328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:41.146383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:41.146423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:41.148669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.148725Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:41.148758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:41.150539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.150593Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.150647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:41.150696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:41.154516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:41.156665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:41.156845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:41.157914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:41.158059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:41.158120Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:41.158448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:41.158510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:41.158715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:41.158814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:41.160936Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:41.160980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:41.161124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:41.161156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:55:41.161365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.161394Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:41.161453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:41.161477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:41.161521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:41.161557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:41.161584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:41.161616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:41.161639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:55:41.161669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:55:41.161733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:55:41.161764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:55:41.161788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:55:41.163485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:41.163624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:41.163663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... -04T16:55:41.346031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.346079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.346152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.358130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T16:55:41.359555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:41.359835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:41.359890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:41.360161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.360542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T16:55:41.360607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T16:55:41.360687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:41.363484Z node 1 :FLAT_TX_SCHEMESHARD WARN: TTxCleanBlockStoreVolumes Complete, done PersistRemoveBlockStoreVolume for 1 volumes, left 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.363537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:41.363573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:41.364107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:403:2379] sender: [1:467:2058] recipient: [1:15:2062] 2025-04-04T16:55:41.410890Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:55:41.411118Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 242us result status StatusPathDoesNotExist 2025-04-04T16:55:41.411296Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T16:55:41.412334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:403:2379] sender: [1:468:2058] recipient: [1:101:2136] Leader for TabletID 72057594046678944 is [1:403:2379] sender: [1:471:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:403:2379] sender: [1:472:2058] recipient: [1:470:2430] Leader for TabletID 72057594046678944 is [1:473:2431] sender: [1:474:2058] recipient: [1:470:2430] 2025-04-04T16:55:41.460921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:41.461019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:41.461055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:41.461091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:41.461128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:41.461153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:41.461219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:41.461310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:41.461632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:41.478090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:41.479348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:41.479499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:41.479670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:41.479699Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:41.479880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:41.480563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:41.480654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.480736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.481123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.481216Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-04T16:55:41.481414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.481526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.481645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.481733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.481813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.481972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.482279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.482387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.482812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.482896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.483094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.483217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.483320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.483488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.483588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.483739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.484050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.484226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.484323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.484375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-04T16:55:41.496107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:41.496189Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:41.496254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:41.496309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:41.496340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:41.496469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:473:2431] sender: [1:532:2058] recipient: [1:15:2062] 2025-04-04T16:55:41.529246Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:55:41.529471Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 250us result status StatusPathDoesNotExist 2025-04-04T16:55:41.529669Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> EvWrite::WriteWithLock >> Cdc::DecimalKey [GOOD] >> Cdc::DropColumn >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] Test command err: 2025-04-04T16:55:40.583778Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T16:55:40.694388Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T16:55:40.720684Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T16:55:40.720941Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T16:55:40.729175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:55:40.729414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:55:40.729714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:55:40.729837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:55:40.729953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:55:40.730068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:55:40.730169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:55:40.730314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:55:40.730433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:55:40.730569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:55:40.730698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:55:40.730807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:55:40.764124Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T16:55:40.764648Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T16:55:40.764705Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T16:55:40.764895Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:40.765079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:55:40.765158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:55:40.765202Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T16:55:40.765285Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T16:55:40.765351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:55:40.765387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:55:40.765418Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T16:55:40.765605Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:40.765672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:55:40.765714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:55:40.765743Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T16:55:40.765838Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T16:55:40.765941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:55:40.765996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:55:40.766077Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T16:55:40.766153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:55:40.766187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:55:40.766219Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T16:55:40.766280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:55:40.766324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:55:40.766355Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T16:55:40.766831Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=60; 2025-04-04T16:55:40.766935Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=46; 2025-04-04T16:55:40.767043Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=46; 2025-04-04T16:55:40.767169Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=80; 2025-04-04T16:55:40.767355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:55:40.767412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:55:40.767445Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T16:55:40.767665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:55:40.767737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:55:40.767778Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T16:55:40.767946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:55:40.767994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:55:40.768027Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T16:55:40.768240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T16:55:40.768290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T16:55:40.768322Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T16:55:40.768434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T16:55:40.768487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T16:55:40.768527Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... pp:52;scan_step=name=BUILD_STAGE_RESULT;duration=0.000000s;size=0;details={};;scan_step_idx=5; 2025-04-04T16:55:42.226144Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T16:55:42.226185Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=fetching.cpp:12;event=apply; 2025-04-04T16:55:42.226220Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=0; 2025-04-04T16:55:42.226299Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=interval.cpp:17;event=start_construct_result;interval_idx=0;interval_id=6;memory=8391908;count=2; 2025-04-04T16:55:42.226844Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=merge.cpp:149;event=DoExecute;interval_idx=0; 2025-04-04T16:55:42.227342Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2025-04-04T16:55:42.227455Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T16:55:42.227498Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-04T16:55:42.227563Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T16:55:42.228067Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T16:55:42.228140Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-04T16:55:42.228193Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=6; 2025-04-04T16:55:42.228242Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=0;merger=0;interval_id=6; 2025-04-04T16:55:42.228296Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-04T16:55:42.228406Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T16:55:42.228566Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T16:55:42.228711Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-04T16:55:42.229017Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[35] (CS::INDEXATION) apply at tablet 9437184 2025-04-04T16:55:42.236622Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 1 2025-04-04T16:55:42.236822Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=2812;raw_bytes=868;count=1;records=10} inactive {blob_bytes=11248;raw_bytes=3472;count=4;records=40} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T16:55:42.237361Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T16:55:42.237548Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T16:55:42.237684Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T16:55:42.237747Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:428:2446] finished for tablet 9437184 2025-04-04T16:55:42.238288Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:424:2442];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap"],"t":0.006},{"events":["f_processing","f_task_result"],"t":0.008},{"events":["l_task_result"],"t":0.023},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.032}],"full":{"a":1743785742204887,"name":"_full_task","f":1743785742204887,"d_finished":0,"c":0,"l":1743785742237826,"d":32939},"events":[{"name":"bootstrap","f":1743785742205305,"d_finished":6550,"c":1,"l":1743785742211855,"d":6550},{"a":1743785742237328,"name":"ack","f":1743785742237328,"d_finished":0,"c":0,"l":1743785742237826,"d":498},{"a":1743785742237302,"name":"processing","f":1743785742213490,"d_finished":5014,"c":10,"l":1743785742228639,"d":5538},{"name":"ProduceResults","f":1743785742209508,"d_finished":2739,"c":12,"l":1743785742237723,"d":2739},{"a":1743785742237727,"name":"Finish","f":1743785742237727,"d_finished":0,"c":0,"l":1743785742237826,"d":99},{"name":"task_result","f":1743785742213526,"d_finished":4798,"c":10,"l":1743785742228636,"d":4798}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T16:55:42.238400Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:424:2442];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T16:55:42.238904Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:424:2442];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap"],"t":0.006},{"events":["f_processing","f_task_result"],"t":0.008},{"events":["l_task_result"],"t":0.023},{"events":["f_ack","l_ProduceResults","f_Finish"],"t":0.032},{"events":["l_ack","l_processing","l_Finish"],"t":0.033}],"full":{"a":1743785742204887,"name":"_full_task","f":1743785742204887,"d_finished":0,"c":0,"l":1743785742238447,"d":33560},"events":[{"name":"bootstrap","f":1743785742205305,"d_finished":6550,"c":1,"l":1743785742211855,"d":6550},{"a":1743785742237328,"name":"ack","f":1743785742237328,"d_finished":0,"c":0,"l":1743785742238447,"d":1119},{"a":1743785742237302,"name":"processing","f":1743785742213490,"d_finished":5014,"c":10,"l":1743785742228639,"d":6159},{"name":"ProduceResults","f":1743785742209508,"d_finished":2739,"c":12,"l":1743785742237723,"d":2739},{"a":1743785742237727,"name":"Finish","f":1743785742237727,"d_finished":0,"c":0,"l":1743785742238447,"d":720},{"name":"task_result","f":1743785742213526,"d_finished":4798,"c":10,"l":1743785742228636,"d":4798}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T16:55:42.239001Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T16:55:42.204034Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=2812;inserted_portions_bytes=0;committed_portions_bytes=1384;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4196;selected_rows=0; 2025-04-04T16:55:42.239048Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T16:55:42.239406Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:428:2446];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] >> Normalizers::EmptyTablesNormalizer |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::Write >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:55:42.736896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:42.736999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:42.737051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:42.737089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:42.737128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:42.737180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:42.737239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:42.737307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:42.737642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:42.825247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:42.825300Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:42.844190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:42.844450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:42.844590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:42.869802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:42.870151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:42.870814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:42.871811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:42.881911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:42.883331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:42.883412Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:42.883612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:42.883675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:42.884506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:42.884703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:55:42.891606Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:55:43.073958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:43.074168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:43.074380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:43.074593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:43.074663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:43.079311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:43.079455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:43.079623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:43.079670Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:43.079715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:43.079755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:43.081996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:43.082053Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:43.082086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:43.083902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:43.083952Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:43.083988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:43.084033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:43.093741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:43.095594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:43.095757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:43.096732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:43.096861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:43.096911Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:43.097198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:43.097247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:43.097406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:43.097509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:43.099525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:43.099571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:43.099711Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:43.099749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:55:43.100027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:43.100064Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:43.100146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:43.100178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:43.100238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:43.100274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:43.100313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:43.100349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:43.100389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:55:43.100422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:55:43.100487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:55:43.100523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:55:43.100554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:55:43.102322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:43.102434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:43.102466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-04T16:55:43.204232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:43.204550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T16:55:43.204844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T16:55:43.204962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409546 Forgetting tablet 72075186233409547 2025-04-04T16:55:43.205571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T16:55:43.207930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-04T16:55:43.208085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-04T16:55:43.209069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:43.209194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:43.209250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropBlockStoreVolume TPropose, operationId: 102:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-04T16:55:43.209356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T16:55:43.209486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T16:55:43.209525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T16:55:43.209572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T16:55:43.209600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T16:55:43.209706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:43.209762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T16:55:43.209809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-04T16:55:43.209859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T16:55:43.209898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T16:55:43.209939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T16:55:43.210060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T16:55:43.210100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-04T16:55:43.210130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-04T16:55:43.210161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-04T16:55:43.211975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T16:55:43.212025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-04T16:55:43.212166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T16:55:43.212203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-04T16:55:43.212865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-04T16:55:43.212904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-04T16:55:43.213048Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:43.213090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:43.213254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T16:55:43.213381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:43.213415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-04T16:55:43.213452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2025-04-04T16:55:43.213964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:55:43.214082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:55:43.214115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-04T16:55:43.214160Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T16:55:43.214204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T16:55:43.214577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T16:55:43.214641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T16:55:43.214698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:55:43.214907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:55:43.214966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:55:43.214993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-04T16:55:43.215035Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-04T16:55:43.215075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:43.215133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-04T16:55:43.215986Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2025-04-04T16:55:43.216160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:43.216458Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2025-04-04T16:55:43.217374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T16:55:43.218595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T16:55:43.222062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T16:55:43.222441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T16:55:43.223965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T16:55:43.224054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-04T16:55:43.224378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T16:55:43.224433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T16:55:43.224790Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T16:55:43.224874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T16:55:43.224934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:392:2372] TestWaitNotification: OK eventTxId 102 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-04-04T16:55:43.225339Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-04T16:55:43.225416Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 { Type { Kind: Struct Struct { Member { Name: "ShardsToDelete" Type { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "List" Type { Kind: List List { Item { Kind: Struct Struct { Member { Name: "ShardIdx" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } Member { Name: "Truncated" Type { Kind: Data Data { Scheme: 6 } } } } } } } } } } Value { Struct { Optional { Struct { } Struct { Bool: false } } } } } >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot >> EvWrite::WriteWithLock [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate [GOOD] >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace [GOOD] >> Cdc::AreJsonsEqualReturnsTrueOnEqual [GOOD] >> Cdc::AreJsonsEqualReturnsFalseOnDifferent [GOOD] >> Cdc::AreJsonsEqualFailsOnWildcardInArray [GOOD] >> Cdc::AlterViaTopicService >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:55:41.977167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:41.977296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:41.977354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:41.977410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:41.977469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:41.977509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:41.977578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:41.977655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:41.978080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:42.077677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:42.077758Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:42.103794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:42.104109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:42.104304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:42.134293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:42.134738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:42.135487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:42.138141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:42.144823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:42.146429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:42.146524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:42.146798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:42.146857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:42.146907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:42.147065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:55:42.157445Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:55:42.320882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:42.321153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:42.321388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:42.321648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:42.321717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:42.326610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:42.326826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:42.327066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:42.327132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:42.327190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:42.327235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:42.331295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:42.331376Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:42.331440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:42.333885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:42.333954Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:42.333999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:42.334182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:42.338911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:42.349244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:42.349535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:42.350947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:42.351151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:42.351244Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:42.351635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:42.351707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:42.351920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:42.352069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:42.361245Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:42.361331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:42.361565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:42.361617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:55:42.362001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:42.362061Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:42.362182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:42.362225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:42.362336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:42.362385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:42.362436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:42.362500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:42.362561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:55:42.362608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:55:42.362719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:55:42.362770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:55:42.362809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:55:42.365130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:42.365283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:55:42.365328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1/1 2025-04-04T16:55:43.930043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:43.931530Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 24 TabletID: 72075186233409569 Forgetting tablet 72075186233409569 2025-04-04T16:55:43.934701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 24 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2025-04-04T16:55:43.935026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 4 2025-04-04T16:55:43.935353Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 23 TabletID: 72075186233409568 Forgetting tablet 72075186233409568 2025-04-04T16:55:43.935867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-04T16:55:43.936222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 23 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2025-04-04T16:55:43.936693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 3 2025-04-04T16:55:43.937748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-04T16:55:43.951714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2025-04-04T16:55:43.951907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000028 2025-04-04T16:55:43.952378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000028, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:43.952518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000028 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:43.952586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropBlockStoreVolume TPropose, operationId: 129:0 HandleReply TEvOperationPlan, step: 5000028, at schemeshard: 72057594046678944 2025-04-04T16:55:43.952736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2025-04-04T16:55:43.952884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-04-04T16:55:43.952933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-04T16:55:43.952972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-04-04T16:55:43.953005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-04T16:55:43.953073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:43.953164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-04-04T16:55:43.953218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2025-04-04T16:55:43.953288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-04T16:55:43.953327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 129:0 2025-04-04T16:55:43.953361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 129:0 2025-04-04T16:55:43.953494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2025-04-04T16:55:43.953556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2025-04-04T16:55:43.953598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 54 2025-04-04T16:55:43.953629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 13], 18446744073709551615 2025-04-04T16:55:43.957447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-04-04T16:55:43.957509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-04-04T16:55:43.958408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2025-04-04T16:55:43.958462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-04-04T16:55:43.959176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:24 hive 72057594037968897 at ss 72057594046678944 2025-04-04T16:55:43.959219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:23 hive 72057594037968897 at ss 72057594046678944 2025-04-04T16:55:43.959377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:43.959404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:43.959540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 13] 2025-04-04T16:55:43.959659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:43.959690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-04-04T16:55:43.959728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 129, path id: 13 FAKE_COORDINATOR: Erasing txId 129 2025-04-04T16:55:43.960102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2025-04-04T16:55:43.960165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2025-04-04T16:55:43.960192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2025-04-04T16:55:43.960228Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 13], version: 18446744073709551615 2025-04-04T16:55:43.960268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-04-04T16:55:43.960719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T16:55:43.960765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 13], at schemeshard: 72057594046678944 2025-04-04T16:55:43.960825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:55:43.961179Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 24 2025-04-04T16:55:43.961426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2025-04-04T16:55:43.961472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2025-04-04T16:55:43.961492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2025-04-04T16:55:43.961510Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 54 2025-04-04T16:55:43.961540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:43.961601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2025-04-04T16:55:43.961911Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 23 2025-04-04T16:55:43.962116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 24 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2025-04-04T16:55:43.962514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 23 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2025-04-04T16:55:43.964194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-04T16:55:43.965142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T16:55:43.965222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-04T16:55:43.966993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-04-04T16:55:43.967062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 TestModificationResult got TxId: 129, wait until txId: 129 TestWaitNotification wait txId: 129 2025-04-04T16:55:43.967546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: send EvNotifyTxCompletion 2025-04-04T16:55:43.967583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 129 2025-04-04T16:55:43.968272Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2025-04-04T16:55:43.968380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2025-04-04T16:55:43.968409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:1665:3535] TestWaitNotification: OK eventTxId 129 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithLock [GOOD] Test command err: 2025-04-04T16:55:43.015467Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T16:55:43.139183Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T16:55:43.167191Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T16:55:43.167520Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T16:55:43.177152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:55:43.177379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:55:43.177642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:55:43.177791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:55:43.177942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:55:43.178052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:55:43.178155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:55:43.178272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:55:43.178397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:55:43.178559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:55:43.178685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:55:43.178791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:55:43.211442Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T16:55:43.212121Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T16:55:43.212229Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T16:55:43.212409Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:43.212585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:55:43.212699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:55:43.212747Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T16:55:43.212844Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T16:55:43.212941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:55:43.212994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:55:43.213024Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T16:55:43.213250Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:43.213326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:55:43.213371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:55:43.213405Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T16:55:43.213503Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T16:55:43.213570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:55:43.213657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:55:43.213738Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T16:55:43.213798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:55:43.213825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:55:43.213849Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T16:55:43.213886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:55:43.213912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:55:43.213942Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T16:55:43.214312Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=72; 2025-04-04T16:55:43.214426Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=53; 2025-04-04T16:55:43.214514Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=42; 2025-04-04T16:55:43.214610Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-04-04T16:55:43.214818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:55:43.214875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:55:43.214910Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T16:55:43.215114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:55:43.215169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:55:43.215214Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T16:55:43.215378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:55:43.215424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:55:43.215456Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T16:55:43.215719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T16:55:43.215770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T16:55:43.215804Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T16:55:43.215946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T16:55:43.215990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T16:55:43.216037Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... 252Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-04T16:55:44.285280Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T16:55:44.285635Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T16:55:44.285666Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-04T16:55:44.285701Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=1; 2025-04-04T16:55:44.285751Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=4096;merger=0;interval_id=1; 2025-04-04T16:55:44.285788Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-04T16:55:44.285874Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T16:55:44.285896Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=4096;finished=1; 2025-04-04T16:55:44.285916Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T16:55:44.286066Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T16:55:44.286192Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:4096;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T16:55:44.286222Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T16:55:44.286331Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=4096; 2025-04-04T16:55:44.286395Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=458752;num_rows=4096;batch_columns=key,field; 2025-04-04T16:55:44.286481Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:303:2321];bytes=458752;rows=4096;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-04-04T16:55:44.286594Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T16:55:44.286713Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T16:55:44.286846Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T16:55:44.287246Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T16:55:44.287392Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T16:55:44.287526Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T16:55:44.287581Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:307:2325] finished for tablet 9437184 2025-04-04T16:55:44.288114Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:303:2321];stats={"p":[{"events":["f_bootstrap"],"t":0.001},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["f_ack","l_task_result"],"t":0.068},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.07}],"full":{"a":1743785744217175,"name":"_full_task","f":1743785744217175,"d_finished":0,"c":0,"l":1743785744287644,"d":70469},"events":[{"name":"bootstrap","f":1743785744218631,"d_finished":3461,"c":1,"l":1743785744222092,"d":3461},{"a":1743785744287217,"name":"ack","f":1743785744286038,"d_finished":843,"c":1,"l":1743785744286881,"d":1270},{"a":1743785744287194,"name":"processing","f":1743785744225011,"d_finished":32012,"c":9,"l":1743785744286884,"d":32462},{"name":"ProduceResults","f":1743785744220553,"d_finished":2672,"c":12,"l":1743785744287560,"d":2672},{"a":1743785744287568,"name":"Finish","f":1743785744287568,"d_finished":0,"c":0,"l":1743785744287644,"d":76},{"name":"task_result","f":1743785744225047,"d_finished":30964,"c":8,"l":1743785744285944,"d":30964}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T16:55:44.288208Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:303:2321];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T16:55:44.288666Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:303:2321];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.001},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["f_ack","l_task_result"],"t":0.068},{"events":["l_ProduceResults","f_Finish"],"t":0.07},{"events":["l_ack","l_processing","l_Finish"],"t":0.071}],"full":{"a":1743785744217175,"name":"_full_task","f":1743785744217175,"d_finished":0,"c":0,"l":1743785744288252,"d":71077},"events":[{"name":"bootstrap","f":1743785744218631,"d_finished":3461,"c":1,"l":1743785744222092,"d":3461},{"a":1743785744287217,"name":"ack","f":1743785744286038,"d_finished":843,"c":1,"l":1743785744286881,"d":1878},{"a":1743785744287194,"name":"processing","f":1743785744225011,"d_finished":32012,"c":9,"l":1743785744286884,"d":33070},{"name":"ProduceResults","f":1743785744220553,"d_finished":2672,"c":12,"l":1743785744287560,"d":2672},{"a":1743785744287568,"name":"Finish","f":1743785744287568,"d_finished":0,"c":0,"l":1743785744288252,"d":684},{"name":"task_result","f":1743785744225047,"d_finished":30964,"c":8,"l":1743785744285944,"d":30964}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T16:55:44.288773Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T16:55:44.216516Z;index_granules=0;index_portions=1;index_batches=176;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=494016;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=494016;selected_rows=0; 2025-04-04T16:55:44.288821Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T16:55:44.289075Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:307:2325];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime |77.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |77.2%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |77.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] Test command err: 2025-04-04T16:52:22.420034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:52:22.420182Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:52:22.420289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019cf/r3tmp/tmpcljUNK/pdisk_1.dat 2025-04-04T16:52:22.910329Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2498, node 1 2025-04-04T16:52:24.001030Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:24.001089Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:24.001119Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:24.001558Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:24.017243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:52:24.373141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:24.373310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:24.397081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64532 2025-04-04T16:52:27.022095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:52:44.890633Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T16:52:45.180196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:45.180709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:45.239654Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:52:45.251208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:46.908610Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:46.909125Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:46.909851Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:46.909994Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:46.910505Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:46.911093Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:46.911477Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:46.911749Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:46.911830Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:47.169697Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:47.169798Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:47.191752Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:47.485683Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:47.699691Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T16:52:47.699827Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T16:52:47.738907Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T16:52:47.740385Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T16:52:47.740601Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T16:52:47.740660Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T16:52:47.740708Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T16:52:47.740756Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T16:52:47.740805Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T16:52:47.740848Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T16:52:47.741333Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T16:52:47.823704Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T16:52:47.824061Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1864:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T16:52:47.835134Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1876:2604] 2025-04-04T16:52:47.860486Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1896:2615] 2025-04-04T16:52:47.861521Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1896:2615], schemeshard id = 72075186224037897 2025-04-04T16:52:47.898207Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T16:52:47.974001Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T16:52:47.974268Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T16:52:47.974667Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T16:52:48.033912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T16:52:48.105446Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T16:52:48.106466Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T16:52:48.620831Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T16:52:48.867948Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T16:52:48.950989Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T16:52:50.284970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2229:3066], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:50.285109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:50.315172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T16:52:50.719336Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2381:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:52:50.719555Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2381:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:52:50.719859Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2381:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:52:50.719990Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2381:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:52:50.720110Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2381:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:52:50.720242Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2381:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:52:50.720374Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2381:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:52:50.720487Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2381:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:52:50.720612Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2381:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:52:50.720725Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2381:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:52:50.720847Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2381:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:52:50.720961Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2381:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:52:50.814940Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[2:2401:2895];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:52:50.815052Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[2:2401:2895];tablet_id=72075186224037903;process=T ... [72075186224037894] Subscribed for config changes 2025-04-04T16:55:42.436867Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T16:55:42.436968Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T16:55:42.437171Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T16:55:42.438801Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T16:55:42.438891Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T16:55:42.439906Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T16:55:42.465195Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T16:55:42.465409Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-04T16:55:42.467049Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8636:6544], server id = [2:8641:6549], tablet id = 72075186224037899, status = OK 2025-04-04T16:55:42.467572Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8636:6544], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:42.467890Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8637:6545], server id = [2:8642:6550], tablet id = 72075186224037900, status = OK 2025-04-04T16:55:42.467976Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8637:6545], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:42.471220Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8638:6546], server id = [2:8644:6552], tablet id = 72075186224037901, status = OK 2025-04-04T16:55:42.471357Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8638:6546], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:42.471569Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8639:6547], server id = [2:8643:6551], tablet id = 72075186224037902, status = OK 2025-04-04T16:55:42.471630Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8639:6547], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:42.472951Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8640:6548], server id = [2:8647:6555], tablet id = 72075186224037903, status = OK 2025-04-04T16:55:42.473027Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8640:6548], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:42.479046Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T16:55:42.479730Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8636:6544], server id = [2:8641:6549], tablet id = 72075186224037899 2025-04-04T16:55:42.479787Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:42.481523Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-04T16:55:42.481939Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8659:6564], server id = [2:8661:6565], tablet id = 72075186224037904, status = OK 2025-04-04T16:55:42.482039Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8659:6564], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:42.482735Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8637:6545], server id = [2:8642:6550], tablet id = 72075186224037900 2025-04-04T16:55:42.482777Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:42.485005Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-04T16:55:42.485616Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8638:6546], server id = [2:8644:6552], tablet id = 72075186224037901 2025-04-04T16:55:42.485652Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:42.486059Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8662:6566], server id = [2:8666:6569], tablet id = 72075186224037905, status = OK 2025-04-04T16:55:42.486162Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8662:6566], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:42.486878Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-04T16:55:42.488124Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8639:6547], server id = [2:8643:6551], tablet id = 72075186224037902 2025-04-04T16:55:42.488172Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:42.489242Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8667:6570], server id = [2:8668:6571], tablet id = 72075186224037906, status = OK 2025-04-04T16:55:42.489350Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8667:6570], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:42.489961Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-04T16:55:42.491243Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8640:6548], server id = [2:8647:6555], tablet id = 72075186224037903 2025-04-04T16:55:42.491576Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:42.492337Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8669:6572], server id = [2:8674:6576], tablet id = 72075186224037907, status = OK 2025-04-04T16:55:42.492441Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8669:6572], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:42.494327Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8673:6575], server id = [2:8677:6579], tablet id = 72075186224037908, status = OK 2025-04-04T16:55:42.494435Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8673:6575], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:42.495377Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-04T16:55:42.496943Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8659:6564], server id = [2:8661:6565], tablet id = 72075186224037904 2025-04-04T16:55:42.496989Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:42.502293Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-04T16:55:42.503641Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8662:6566], server id = [2:8666:6569], tablet id = 72075186224037905 2025-04-04T16:55:42.503698Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:42.504466Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-04T16:55:42.504805Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8667:6570], server id = [2:8668:6571], tablet id = 72075186224037906 2025-04-04T16:55:42.504846Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:42.506681Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-04T16:55:42.507310Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8669:6572], server id = [2:8674:6576], tablet id = 72075186224037907 2025-04-04T16:55:42.507352Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:42.507952Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-04T16:55:42.508027Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T16:55:42.508343Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T16:55:42.508604Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T16:55:42.508992Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T16:55:42.512444Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8673:6575], server id = [2:8677:6579], tablet id = 72075186224037908 2025-04-04T16:55:42.512497Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:42.513471Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T16:55:42.544228Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8702:6600]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T16:55:42.544621Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T16:55:42.544714Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8702:6600], StatRequests.size() = 1 2025-04-04T16:55:42.763696Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTllZWJlODMtOWVlY2RjMDMtNzU3MmZjODMtMzZmMzUyYTM=, TxId: 2025-04-04T16:55:42.763782Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTllZWJlODMtOWVlY2RjMDMtNzU3MmZjODMtMzZmMzUyYTM=, TxId: 2025-04-04T16:55:42.764281Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T16:55:42.777149Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8712:6606] 2025-04-04T16:55:42.777288Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8586:6512], server id = [2:8712:6606], tablet id = 72075186224037894, status = OK 2025-04-04T16:55:42.777434Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8712:6606], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-04T16:55:42.777670Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8713:6607] 2025-04-04T16:55:42.777763Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8713:6607], schemeshard id = 72075186224037897 2025-04-04T16:55:42.811978Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T16:55:42.812107Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T16:55:42.897984Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8716:6610]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T16:55:42.898429Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T16:55:42.898509Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T16:55:42.902278Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T16:55:42.902382Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-04T16:55:42.902464Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T16:55:42.927757Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |77.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate [GOOD] Test command err: 2025-04-04T16:52:23.931699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:52:23.933590Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:23.934363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019d0/r3tmp/tmpT4Xtdn/pdisk_1.dat 2025-04-04T16:52:27.149558Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25745, node 1 2025-04-04T16:52:29.516984Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:29.517304Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:29.517957Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:29.521109Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:29.660215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:52:30.147382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:30.148867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:30.222326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64970 2025-04-04T16:52:36.678503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:52:48.808371Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T16:52:48.872579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:48.872701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:48.909448Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:52:48.911505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:49.219998Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:49.220599Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:49.221053Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:49.221199Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:49.221397Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:49.221482Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:49.221554Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:49.221622Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:49.221694Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:49.423892Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:49.423996Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:49.444211Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:49.770883Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:49.816185Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T16:52:49.816301Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T16:52:49.867324Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T16:52:49.867783Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T16:52:49.868032Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T16:52:49.868095Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T16:52:49.868145Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T16:52:49.868192Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T16:52:49.868244Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T16:52:49.868295Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T16:52:49.869146Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T16:52:49.919433Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T16:52:49.919545Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2594], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T16:52:49.930896Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2603] 2025-04-04T16:52:49.935423Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1898:2613] 2025-04-04T16:52:49.935669Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1898:2613], schemeshard id = 72075186224037897 2025-04-04T16:52:49.946539Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T16:52:49.982579Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T16:52:49.982644Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T16:52:49.982713Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T16:52:49.999240Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T16:52:50.009107Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T16:52:50.009274Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T16:52:50.259180Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T16:52:50.513761Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T16:52:50.617569Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T16:52:51.926651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3075], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:51.927197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:51.996136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T16:52:52.317715Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:52:52.318052Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:52:52.318446Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:52:52.318686Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:52:52.318839Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:52:52.319013Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:52:52.319144Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:52:52.319263Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:52:52.319397Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:52:52.319519Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:52:52.319626Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:52:52.319758Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:52:52.379009Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:52:52.379359Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... 8494Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7128:5238], server id = [2:7129:5239], tablet id = 72075186224037894 2025-04-04T16:55:40.408652Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7257:5315] 2025-04-04T16:55:40.408717Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7257:5315] 2025-04-04T16:55:40.461401Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T16:55:40.461496Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T16:55:40.462108Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T16:55:40.463235Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T16:55:40.463590Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded database: /Root/Database 2025-04-04T16:55:40.463647Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start key 2025-04-04T16:55:40.463697Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-04-04T16:55:40.463737Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table local path id: 4 2025-04-04T16:55:40.463778Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start time: 1743785740368991 2025-04-04T16:55:40.463817Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-04-04T16:55:40.463855Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded global traversal round: 2 2025-04-04T16:55:40.463949Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-04-04T16:55:40.464026Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T16:55:40.464142Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-04-04T16:55:40.464238Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2025-04-04T16:55:40.464314Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2025-04-04T16:55:40.464381Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T16:55:40.464537Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T16:55:40.465375Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T16:55:40.466124Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T16:55:40.466202Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T16:55:40.466674Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T16:55:40.468071Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T16:55:40.468134Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T16:55:40.471270Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T16:55:40.547538Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T16:55:40.547814Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-04T16:55:40.548526Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7304:5346], server id = [2:7305:5347], tablet id = 72075186224037899, status = OK 2025-04-04T16:55:40.548647Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7304:5346], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:40.553672Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T16:55:40.553788Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T16:55:40.553971Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T16:55:40.554181Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T16:55:40.554533Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T16:55:40.557304Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7304:5346], server id = [2:7305:5347], tablet id = 72075186224037899 2025-04-04T16:55:40.557353Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:40.558003Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T16:55:40.587462Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7325:5366]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T16:55:40.587793Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T16:55:40.587848Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7325:5366], StatRequests.size() = 1 2025-04-04T16:55:40.741751Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZWQ3ZTIxODMtYmI5NTBjMC05ZjJmYWU3OC1mMTBiZDU2Zg==, TxId: 2025-04-04T16:55:40.741836Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZWQ3ZTIxODMtYmI5NTBjMC05ZjJmYWU3OC1mMTBiZDU2Zg==, TxId: 2025-04-04T16:55:40.742676Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T16:55:40.756194Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7335:5372] 2025-04-04T16:55:40.756475Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7335:5372], schemeshard id = 72075186224037897 2025-04-04T16:55:40.756575Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7257:5315], server id = [2:7336:5373], tablet id = 72075186224037894, status = OK 2025-04-04T16:55:40.756642Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7336:5373] 2025-04-04T16:55:40.756759Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7336:5373], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-04T16:55:40.771854Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T16:55:40.771919Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T16:55:40.848588Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7340:5376] 2025-04-04T16:55:40.849298Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2802:3221] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-04T16:55:40.849364Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2802:3221] 2025-04-04T16:55:40.849443Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-04T16:55:41.335561Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-04T16:55:41.335663Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T16:55:42.102917Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T16:55:42.103000Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-04T16:55:42.103047Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-04T16:55:43.404016Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T16:55:43.404213Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T16:55:43.404277Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T16:55:43.405200Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T16:55:43.427261Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T16:55:43.427804Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T16:55:43.427893Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T16:55:43.428540Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T16:55:43.444098Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T16:55:43.444422Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-04-04T16:55:43.445307Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7419:5419], server id = [2:7420:5420], tablet id = 72075186224037899, status = OK 2025-04-04T16:55:43.445451Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7419:5419], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:43.447132Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T16:55:43.447278Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T16:55:43.447514Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T16:55:43.447734Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T16:55:43.448134Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T16:55:43.451998Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7419:5419], server id = [2:7420:5420], tablet id = 72075186224037899 2025-04-04T16:55:43.452081Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:43.452720Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T16:55:43.477289Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NGJiMDgwMmQtYjEwYTAzNTQtNmRiMjEzMWEtNGI0MDAzNGE=, TxId: 2025-04-04T16:55:43.477404Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGJiMDgwMmQtYjEwYTAzNTQtNmRiMjEzMWEtNGI0MDAzNGE=, TxId: 2025-04-04T16:55:43.478143Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T16:55:43.507694Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T16:55:43.507839Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2802:3221] >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve [GOOD] >> TColumnShardTestReadWrite::WriteOverload >> TColumnShardTestReadWrite::WriteStandaloneOverload >> TColumnShardTestReadWrite::WriteRead >> Normalizers::EmptyTablesNormalizer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve [GOOD] Test command err: 2025-04-04T16:52:26.408348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:52:26.409658Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:26.422475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019cb/r3tmp/tmp4khjAt/pdisk_1.dat 2025-04-04T16:52:30.008885Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:30.736729Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.102749s 2025-04-04T16:52:30.737739Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.103486s TServer::EnableGrpc on GrpcPort 27228, node 1 2025-04-04T16:52:34.285029Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:34.285086Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:34.285113Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:34.285484Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:34.368145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:52:34.664619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:34.664736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:34.694285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11055 2025-04-04T16:52:38.725733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:52:49.784942Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T16:52:49.912806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:49.912927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:49.963934Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:52:49.971001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:50.295963Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:50.296614Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:50.297200Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:50.297330Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:50.297609Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:50.297693Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:50.297799Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:50.297869Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:50.297951Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:50.527014Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:50.527142Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:50.548181Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:50.736001Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:50.820800Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T16:52:50.820954Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T16:52:51.040057Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T16:52:51.040569Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T16:52:51.040775Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T16:52:51.040848Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T16:52:51.040901Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T16:52:51.040953Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T16:52:51.041016Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T16:52:51.041072Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T16:52:51.041928Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T16:52:51.091044Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T16:52:51.091143Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2594], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T16:52:51.109861Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2603] 2025-04-04T16:52:51.117983Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1898:2613] 2025-04-04T16:52:51.118204Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1898:2613], schemeshard id = 72075186224037897 2025-04-04T16:52:51.137113Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T16:52:51.176907Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T16:52:51.176965Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T16:52:51.177063Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T16:52:51.212739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T16:52:51.237758Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T16:52:51.237927Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T16:52:51.757946Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T16:52:52.215830Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T16:52:52.327162Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T16:52:53.425344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3075], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:53.425536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:53.447132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T16:52:53.586854Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:52:53.587153Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:52:53.587513Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:52:53.587658Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:52:53.587787Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:52:53.587940Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:52:53.588064Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:52:53.588209Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:52:53.588402Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:52:53.588541Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:52:53.588695Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:52:53.588839Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2327:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:52:53.620585Z node 2 :TX_COL ... ConnectSchemeShard, pipe server id = [2:7243:5308], schemeshard id = 72075186224037897 2025-04-04T16:55:38.556470Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7199:5281], server id = [2:7244:5309], tablet id = 72075186224037894, status = OK 2025-04-04T16:55:38.556527Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7244:5309] 2025-04-04T16:55:38.556631Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7244:5309], node id = 2, have schemeshards count = 1, need schemeshards count = 0 ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to 2025-04-04T16:55:38.651312Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7246:5310] 2025-04-04T16:55:38.652306Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2805:3224] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-04T16:55:38.652390Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2805:3224] 2025-04-04T16:55:38.652470Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-04T16:55:39.947099Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T16:55:39.947211Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-04T16:55:39.947262Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-04T16:55:39.947327Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T16:55:39.947415Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T16:55:39.948347Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T16:55:39.971867Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T16:55:39.972378Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T16:55:39.972525Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T16:55:39.973566Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T16:55:40.000251Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T16:55:40.000515Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-04T16:55:40.001175Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7294:5337], server id = [2:7295:5338], tablet id = 72075186224037899, status = OK 2025-04-04T16:55:40.001320Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7294:5337], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:40.019374Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T16:55:40.019569Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T16:55:40.019822Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T16:55:40.020075Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T16:55:40.020476Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T16:55:40.024157Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7294:5337], server id = [2:7295:5338], tablet id = 72075186224037899 2025-04-04T16:55:40.024232Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:40.025041Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T16:55:40.090289Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7315:5357]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T16:55:40.091117Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T16:55:40.091190Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7315:5357], StatRequests.size() = 1 2025-04-04T16:55:40.289602Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDQ4ODgwNjctMzI3NTRlZGYtN2U5NjQzOTgtMzYzZDE5M2I=, TxId: 2025-04-04T16:55:40.289705Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDQ4ODgwNjctMzI3NTRlZGYtN2U5NjQzOTgtMzYzZDE5M2I=, TxId: 2025-04-04T16:55:40.290980Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T16:55:40.305763Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T16:55:40.305829Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T16:55:40.884438Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-04T16:55:40.884551Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T16:55:41.585171Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T16:55:41.585261Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T16:55:41.585857Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T16:55:41.600191Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T16:55:41.600661Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T16:55:41.600735Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-04-04T16:55:41.627039Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T16:55:42.905440Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T16:55:42.905523Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-04T16:55:42.905563Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-04T16:55:42.905829Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-04T16:55:42.906944Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-04T16:55:42.907084Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-04T16:55:42.942603Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-04T16:55:44.142342Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T16:55:44.142459Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-04T16:55:44.142515Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-04T16:55:45.319155Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T16:55:45.319444Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T16:55:45.336038Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T16:55:45.336213Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T16:55:45.336261Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T16:55:45.337091Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T16:55:45.359648Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T16:55:45.360069Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T16:55:45.360146Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T16:55:45.360568Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T16:55:45.392802Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T16:55:45.393222Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-04T16:55:45.393958Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7480:5449], server id = [2:7481:5450], tablet id = 72075186224037899, status = OK 2025-04-04T16:55:45.394080Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7480:5449], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:45.395612Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T16:55:45.395713Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T16:55:45.395868Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T16:55:45.396050Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T16:55:45.396340Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T16:55:45.398779Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7480:5449], server id = [2:7481:5450], tablet id = 72075186224037899 2025-04-04T16:55:45.398841Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:45.399548Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T16:55:45.423562Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDZlMjA1NGYtY2Y0NmY4YjEtZTU4YzRmNjItNzAyMWY3ZTA=, TxId: 2025-04-04T16:55:45.423643Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDZlMjA1NGYtY2Y0NmY4YjEtZTU4YzRmNjItNzAyMWY3ZTA=, TxId: 2025-04-04T16:55:45.424230Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T16:55:45.443725Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T16:55:45.443825Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2805:3224] >> EvWrite::WriteInTransaction >> GenericFederatedQuery::PostgreSQLSelectCount [GOOD] |77.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |77.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |77.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} |77.3%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::EmptyTablesNormalizer [GOOD] Test command err: 2025-04-04T16:55:43.753731Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T16:55:43.851389Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T16:55:43.877282Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T16:55:43.877647Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T16:55:43.885668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=PortionsCleaner; 2025-04-04T16:55:43.885968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-04T16:55:43.886168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:55:43.886381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:55:43.886521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:55:43.886669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:55:43.886788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:55:43.886883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:55:43.887027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:55:43.887171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:55:43.887317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:55:43.887452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:55:43.887559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:55:43.918610Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T16:55:43.919509Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=PortionsCleaner; 2025-04-04T16:55:43.919647Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-04T16:55:43.920183Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=59; 2025-04-04T16:55:43.920291Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=52; 2025-04-04T16:55:43.920381Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-04-04T16:55:43.920506Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=67; 2025-04-04T16:55:43.920701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=PortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-04-04T16:55:43.920778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-04-04T16:55:43.920824Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-04T16:55:43.920979Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:43.921089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:55:43.921149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:55:43.921180Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-04T16:55:43.921282Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T16:55:43.921340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:55:43.921387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:55:43.921437Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-04T16:55:43.921618Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:43.921684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:55:43.921727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:55:43.921756Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-04T16:55:43.921867Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T16:55:43.921927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:55:43.921966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:55:43.922006Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-04T16:55:43.922104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:55:43.922174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:55:43.922206Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-04T16:55:43.922398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:55:43.922458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:55:43.922513Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T16:55:43.922971Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-04-04T16:55:43.923081Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-04-04T16:55:43.923177Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=27; 2025-04-04T16:55:43.923254Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=27; 2025-04-04T16:55:43.923413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:55:43.923462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:55:43.923497Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T16:55:43.923716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:55:43.923768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:55:43.923798Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T16:55:43.923976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpda ... ogs.cpp:495;event=OnTieringModified;new_count_tierings=0; 2025-04-04T16:55:47.275334Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T16:55:47.275408Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T16:55:47.275476Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=0; 2025-04-04T16:55:47.275587Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T16:55:47.275654Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T16:55:47.275697Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T16:55:47.275800Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T16:55:47.289848Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T16:55:47.290203Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:401:2414];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T16:55:47.290331Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:401:2414];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T16:55:47.290366Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T16:55:47.290399Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T16:55:47.290479Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:401:2414];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T16:55:47.290561Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:401:2414];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T16:55:47.290650Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:401:2414];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=0; 2025-04-04T16:55:47.290724Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:401:2414];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T16:55:47.290785Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:401:2414];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T16:55:47.290835Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:401:2414];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T16:55:47.290924Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:401:2414];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T16:55:47.446920Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 111 scanId: 0 version: {11:111} readable: {11:max} at tablet 9437184 2025-04-04T16:55:47.447125Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 111 scanId: 0 at tablet 9437184 2025-04-04T16:55:47.447389Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:401:2414];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:32;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-04-04T16:55:47.447594Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:401:2414];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-04-04T16:55:47.447905Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:401:2414];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Projection","input":"1,2,3"},"fetch":"1,2,3"}]}; 2025-04-04T16:55:47.448064Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:401:2414];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:127;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-04T16:55:47.448812Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:401:2414];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:462:2467];trace_detailed=; 2025-04-04T16:55:47.449624Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:85;ff_first=(column_ids=1,2,3;column_names=field,key1,key2;);; 2025-04-04T16:55:47.449859Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:100;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; 2025-04-04T16:55:47.450264Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:462:2467];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T16:55:47.450433Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:462:2467];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:47.450599Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:462:2467];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:47.450682Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:462:2467] finished for tablet 9437184 2025-04-04T16:55:47.451166Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:462:2467];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:460:2466];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743785747448685,"name":"_full_task","f":1743785747448685,"d_finished":0,"c":0,"l":1743785747450750,"d":2065},"events":[{"name":"bootstrap","f":1743785747448965,"d_finished":1073,"c":1,"l":1743785747450038,"d":1073},{"a":1743785747450218,"name":"ack","f":1743785747450218,"d_finished":0,"c":0,"l":1743785747450750,"d":532},{"a":1743785747450195,"name":"processing","f":1743785747450195,"d_finished":0,"c":0,"l":1743785747450750,"d":555},{"name":"ProduceResults","f":1743785747450013,"d_finished":374,"c":2,"l":1743785747450661,"d":374},{"a":1743785747450665,"name":"Finish","f":1743785747450665,"d_finished":0,"c":0,"l":1743785747450750,"d":85}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:47.451265Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:462:2467];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:460:2466];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T16:55:47.451606Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:462:2467];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:460:2466];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743785747448685,"name":"_full_task","f":1743785747448685,"d_finished":0,"c":0,"l":1743785747451314,"d":2629},"events":[{"name":"bootstrap","f":1743785747448965,"d_finished":1073,"c":1,"l":1743785747450038,"d":1073},{"a":1743785747450218,"name":"ack","f":1743785747450218,"d_finished":0,"c":0,"l":1743785747451314,"d":1096},{"a":1743785747450195,"name":"processing","f":1743785747450195,"d_finished":0,"c":0,"l":1743785747451314,"d":1119},{"name":"ProduceResults","f":1743785747450013,"d_finished":374,"c":2,"l":1743785747450661,"d":374},{"a":1743785747450665,"name":"Finish","f":1743785747450665,"d_finished":0,"c":0,"l":1743785747451314,"d":649}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:47.451684Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:462:2467];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T16:55:47.448018Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-04T16:55:47.451754Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:462:2467];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T16:55:47.451853Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:462:2467];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; >> TColumnShardTestReadWrite::Write [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLSelectCount [GOOD] Test command err: Trying to start YDB, gRPC: 13991, MsgBus: 65396 2025-04-04T16:55:26.633385Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502664953599711:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:26.633716Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d98/r3tmp/tmpAtihoq/pdisk_1.dat 2025-04-04T16:55:27.131213Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:27.137922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:27.137997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:27.140633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13991, node 1 2025-04-04T16:55:27.215149Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:27.215180Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:27.215201Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:27.215354Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65396 TClient is connected to server localhost:65396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:27.821441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:27.844888Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:55:30.015375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502682133469569:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.015530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.304628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-04T16:55:30.476567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502682133469690:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.476678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.476944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502682133469695:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.482598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-04T16:55:30.498382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502682133469697:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T16:55:30.562215Z node 1 :TX_PROXY ERROR: Actor# [1:7489502682133469737:2400] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:31.165558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:31.620899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-04T16:55:31.638385Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502664953599711:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:31.638471Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:32.196754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T16:55:32.776287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.316689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.778334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T16:55:33.823854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T16:55:35.873343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710709:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } ... _id=[2:7489502715337643984:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:37.298418Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d98/r3tmp/tmpPb7hHH/pdisk_1.dat 2025-04-04T16:55:37.437640Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30401, node 2 2025-04-04T16:55:37.463491Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:37.463578Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:37.465649Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:37.549477Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:37.549503Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:37.549513Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:37.549631Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16052 TClient is connected to server localhost:16052 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:38.075745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:38.082058Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T16:55:40.819975Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489502728222546521:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:40.820088Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:40.838363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-04T16:55:40.919236Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489502728222546642:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:40.919296Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:40.919442Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489502728222546647:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:40.922480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-04-04T16:55:40.936795Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489502728222546649:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T16:55:41.025165Z node 2 :TX_PROXY ERROR: Actor# [2:7489502732517513985:2396] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:41.561022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:42.166960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-04T16:55:42.298917Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489502715337643984:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:42.298997Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:42.797328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T16:55:43.391371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-04T16:55:44.046408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-04T16:55:44.632687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-04T16:55:44.680759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-04T16:55:47.110040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715710:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> Normalizers::SchemaVersionsNormalizer >> EvWrite::WriteInTransaction [GOOD] >> Normalizers::PortionsNormalizer >> Cdc::AlterViaTopicService [GOOD] >> Cdc::Alter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteInTransaction [GOOD] Test command err: 2025-04-04T16:55:48.647223Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T16:55:48.740250Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T16:55:48.760897Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T16:55:48.761107Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T16:55:48.769439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:55:48.769615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:55:48.769835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:55:48.769928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:55:48.770022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:55:48.770097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:55:48.770191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:55:48.770315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:55:48.770476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:55:48.770666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:55:48.770783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:55:48.770891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:55:48.803956Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T16:55:48.805313Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T16:55:48.805410Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T16:55:48.805606Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:48.805776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:55:48.805854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:55:48.805900Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T16:55:48.806047Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T16:55:48.806139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:55:48.806184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:55:48.806220Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T16:55:48.806447Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:48.806524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:55:48.806566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:55:48.806599Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T16:55:48.806730Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T16:55:48.806806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:55:48.806879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:55:48.806937Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T16:55:48.807029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:55:48.807071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:55:48.807104Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T16:55:48.807163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:55:48.807204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:55:48.807235Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T16:55:48.807719Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=61; 2025-04-04T16:55:48.807853Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=70; 2025-04-04T16:55:48.807936Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-04T16:55:48.808024Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=40; 2025-04-04T16:55:48.808219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:55:48.808280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:55:48.808314Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T16:55:48.808526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:55:48.808583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:55:48.808630Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T16:55:48.808787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:55:48.808830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:55:48.808860Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T16:55:48.809050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T16:55:48.809091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T16:55:48.809125Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T16:55:48.809258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T16:55:48.809330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T16:55:48.809388Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... 179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T16:55:49.644755Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T16:55:49.644786Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-04T16:55:49.644841Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=1; 2025-04-04T16:55:49.644883Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=2048;merger=0;interval_id=1; 2025-04-04T16:55:49.644934Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-04T16:55:49.645013Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T16:55:49.645042Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=2048;finished=1; 2025-04-04T16:55:49.645068Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T16:55:49.645254Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T16:55:49.645394Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:2048;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T16:55:49.645443Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T16:55:49.645539Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=2048; 2025-04-04T16:55:49.645605Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=229376;num_rows=2048;batch_columns=key,field; 2025-04-04T16:55:49.645713Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:283:2301];bytes=229376;rows=2048;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-04-04T16:55:49.645876Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T16:55:49.645965Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T16:55:49.646092Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T16:55:49.646412Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T16:55:49.646529Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T16:55:49.646643Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T16:55:49.646685Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:287:2305] finished for tablet 9437184 2025-04-04T16:55:49.647158Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:283:2301];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.032},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.034}],"full":{"a":1743785749612447,"name":"_full_task","f":1743785749612447,"d_finished":0,"c":0,"l":1743785749646776,"d":34329},"events":[{"name":"bootstrap","f":1743785749613431,"d_finished":2748,"c":1,"l":1743785749616179,"d":2748},{"a":1743785749646387,"name":"ack","f":1743785749645223,"d_finished":894,"c":1,"l":1743785749646117,"d":1283},{"a":1743785749646365,"name":"processing","f":1743785749618153,"d_finished":15015,"c":9,"l":1743785749646119,"d":15426},{"name":"ProduceResults","f":1743785749615032,"d_finished":2383,"c":12,"l":1743785749646669,"d":2383},{"a":1743785749646671,"name":"Finish","f":1743785749646671,"d_finished":0,"c":0,"l":1743785749646776,"d":105},{"name":"task_result","f":1743785749618176,"d_finished":13961,"c":8,"l":1743785749645100,"d":13961}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T16:55:49.647232Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:283:2301];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T16:55:49.647626Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:283:2301];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.032},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.034}],"full":{"a":1743785749612447,"name":"_full_task","f":1743785749612447,"d_finished":0,"c":0,"l":1743785749647284,"d":34837},"events":[{"name":"bootstrap","f":1743785749613431,"d_finished":2748,"c":1,"l":1743785749616179,"d":2748},{"a":1743785749646387,"name":"ack","f":1743785749645223,"d_finished":894,"c":1,"l":1743785749646117,"d":1791},{"a":1743785749646365,"name":"processing","f":1743785749618153,"d_finished":15015,"c":9,"l":1743785749646119,"d":15934},{"name":"ProduceResults","f":1743785749615032,"d_finished":2383,"c":12,"l":1743785749646669,"d":2383},{"a":1743785749646671,"name":"Finish","f":1743785749646671,"d_finished":0,"c":0,"l":1743785749647284,"d":613},{"name":"task_result","f":1743785749618176,"d_finished":13961,"c":8,"l":1743785749645100,"d":13961}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T16:55:49.647702Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T16:55:49.611842Z;index_granules=0;index_portions=1;index_batches=82;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=238056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=238056;selected_rows=0; 2025-04-04T16:55:49.647747Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T16:55:49.648051Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:287:2305];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::Write [GOOD] Test command err: 2025-04-04T16:55:44.152421Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T16:55:44.258258Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T16:55:44.291999Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T16:55:44.292297Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T16:55:44.303302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:55:44.303565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:55:44.303863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:55:44.304020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:55:44.304137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:55:44.304258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:55:44.304374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:55:44.304511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:55:44.304657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:55:44.304795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:55:44.304915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:55:44.305023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:55:44.337482Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T16:55:44.337755Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T16:55:44.337828Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T16:55:44.338034Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:44.338221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:55:44.338327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:55:44.338391Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T16:55:44.338486Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T16:55:44.338570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:55:44.338638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:55:44.338673Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T16:55:44.338878Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:44.338955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:55:44.338998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:55:44.339039Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T16:55:44.339128Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T16:55:44.339204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:55:44.339264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:55:44.339295Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T16:55:44.339367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:55:44.339406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:55:44.339447Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T16:55:44.339495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:55:44.339534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:55:44.339589Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T16:55:44.340061Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=63; 2025-04-04T16:55:44.340178Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=55; 2025-04-04T16:55:44.340279Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=48; 2025-04-04T16:55:44.340374Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-04-04T16:55:44.340574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:55:44.340663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:55:44.340705Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T16:55:44.340897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:55:44.340950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:55:44.340994Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T16:55:44.341182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:55:44.341229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:55:44.341260Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T16:55:44.341466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T16:55:44.341529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T16:55:44.341572Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T16:55:44.341700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T16:55:44.341742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T16:55:44.341800Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... :[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-04-04T16:55:49.450815Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages |77.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |77.3%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |77.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut >> Cdc::DropColumn [GOOD] >> Cdc::DropIndex >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] >> TStorageTenantTest::Empty [GOOD] >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::Empty [GOOD] |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] Test command err: 2025-04-04T16:55:39.866500Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T16:55:39.992750Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T16:55:40.024458Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T16:55:40.024854Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T16:55:40.037836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2025-04-04T16:55:40.038219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-04T16:55:40.044353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:55:40.044759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:55:40.044949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:55:40.045106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:55:40.045247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:55:40.045364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:55:40.045550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:55:40.045725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:55:40.045880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:55:40.046036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:55:40.046158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:55:40.099493Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T16:55:40.100340Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2025-04-04T16:55:40.100429Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-04T16:55:40.100806Z node 1 :TX_COLUMNSHARD CRIT: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:286;tasks_for_remove=0; 2025-04-04T16:55:40.101001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-04-04T16:55:40.101103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-04-04T16:55:40.101279Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-04T16:55:40.101480Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:40.101602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:55:40.101658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:55:40.101705Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-04T16:55:40.101792Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T16:55:40.101851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:55:40.101894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:55:40.101934Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-04T16:55:40.102137Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:40.102214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:55:40.102291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:55:40.102328Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-04T16:55:40.102438Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T16:55:40.102502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:55:40.102538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:55:40.102567Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-04T16:55:40.102678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:55:40.102735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:55:40.102788Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-04T16:55:40.102878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:55:40.102933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:55:40.102961Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T16:55:40.103427Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=51; 2025-04-04T16:55:40.103551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=72; 2025-04-04T16:55:40.103639Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=42; 2025-04-04T16:55:40.103725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-04-04T16:55:40.103903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:55:40.103961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:55:40.103996Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T16:55:40.104219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:55:40.104272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:55:40.104319Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T16:55:40.104494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:55:40.104538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:55:40.104575Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;eve ... sk_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T16:55:51.248244Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T16:55:51.248321Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-04T16:55:51.248380Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-04-04T16:55:51.248453Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-04-04T16:55:51.248512Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-04T16:55:51.248614Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:51.248656Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-04-04T16:55:51.248726Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T16:55:51.248960Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T16:55:51.249146Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:51.249197Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T16:55:51.249335Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-04-04T16:55:51.249413Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-04-04T16:55:51.249546Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:492:2495];bytes=2405760;rows=20048;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-04-04T16:55:51.249695Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:51.249815Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:51.249946Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:51.250999Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T16:55:51.251162Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:51.251296Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:51.251343Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:494:2496] finished for tablet 9437184 2025-04-04T16:55:51.251896Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:492:2495];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.343},{"events":["f_ack"],"t":0.344},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.346}],"full":{"a":1743785750904895,"name":"_full_task","f":1743785750904895,"d_finished":0,"c":0,"l":1743785751251407,"d":346512},"events":[{"name":"bootstrap","f":1743785750905107,"d_finished":2678,"c":1,"l":1743785750907785,"d":2678},{"a":1743785751250963,"name":"ack","f":1743785751248930,"d_finished":1045,"c":1,"l":1743785751249975,"d":1489},{"a":1743785751250942,"name":"processing","f":1743785750907900,"d_finished":224323,"c":9,"l":1743785751249979,"d":224788},{"name":"ProduceResults","f":1743785750906503,"d_finished":3018,"c":12,"l":1743785751251325,"d":3018},{"a":1743785751251327,"name":"Finish","f":1743785751251327,"d_finished":0,"c":0,"l":1743785751251407,"d":80},{"name":"task_result","f":1743785750907921,"d_finished":223072,"c":8,"l":1743785751248781,"d":223072}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:51.251994Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:492:2495];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T16:55:51.252505Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:492:2495];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.343},{"events":["f_ack"],"t":0.344},{"events":["l_ProduceResults","f_Finish"],"t":0.346},{"events":["l_ack","l_processing","l_Finish"],"t":0.347}],"full":{"a":1743785750904895,"name":"_full_task","f":1743785750904895,"d_finished":0,"c":0,"l":1743785751252047,"d":347152},"events":[{"name":"bootstrap","f":1743785750905107,"d_finished":2678,"c":1,"l":1743785750907785,"d":2678},{"a":1743785751250963,"name":"ack","f":1743785751248930,"d_finished":1045,"c":1,"l":1743785751249975,"d":2129},{"a":1743785751250942,"name":"processing","f":1743785750907900,"d_finished":224323,"c":9,"l":1743785751249979,"d":225428},{"name":"ProduceResults","f":1743785750906503,"d_finished":3018,"c":12,"l":1743785751251325,"d":3018},{"a":1743785751251327,"name":"Finish","f":1743785751251327,"d_finished":0,"c":0,"l":1743785751252047,"d":720},{"name":"task_result","f":1743785750907921,"d_finished":223072,"c":8,"l":1743785751248781,"d":223072}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:51.252596Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T16:55:50.904294Z;index_granules=0;index_portions=1;index_batches=939;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2589280;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2589280;selected_rows=0; 2025-04-04T16:55:51.252650Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T16:55:51.253039Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:494:2496];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; |77.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |77.3%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |77.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] >> GenericFederatedQuery::ClickHouseSelectCount [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] Test command err: 2025-04-04T16:55:00.006658Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502551993391845:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:00.006707Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c1a/r3tmp/tmpxcHHXl/pdisk_1.dat 2025-04-04T16:55:00.987763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:00.987858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:00.991832Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:00.994646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65084, node 1 2025-04-04T16:55:01.362577Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:01.362617Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:01.362625Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:01.362740Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13038 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:01.981954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:13038 2025-04-04T16:55:04.990362Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502551993391845:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:04.990448Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:05.210442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502577763196498:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:05.210582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:05.460707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.140811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502582058164012:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.140921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.148704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502582058164047:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.148792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502582058164048:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.148827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502582058164049:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.148866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502582058164050:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.148899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502582058164051:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.148931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502582058164052:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.148963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502582058164053:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.148997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502582058164046:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.156066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502582058164072:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.156154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502582058164074:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.156347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.156613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T16:55:06.159213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502582058164130:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.159280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502582058164132:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.159320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.196750Z node 1 :TX_PROXY ERROR: Actor# [1:7489502582058164076:2789] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T16:55:06.196931Z node 1 :TX_PROXY ERROR: Actor# [1:7489502582058164082:2795] txid# 281474976710666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T16:55:06.197032Z node 1 :TX_PROXY ERROR: Actor# [1:7489502582058164080:2793] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T16:55:06.197129Z node 1 :TX_PROXY ERROR: Actor# [1:7489502582058164077:2790] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T16:55:06.197375Z node 1 :TX_PROXY ERROR: Actor# [1:7489502582058164078:2791] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T16:55:06.197481Z node 1 :TX_PROXY ERROR: Actor# [1:7489502582058164079:2792] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T16:55:06.197567Z node 1 :TX_PROXY ERROR: Actor# [1:7489502582058164081:2794] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T16:55:06.209135Z node 1 :TX_PROXY ERROR: Actor# [1:7489502582058164133:2818] txid# 281474976710667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], t ... n/3?node_id=1&id=NmI0MDFiOTMtMzJiNzU3ZGYtNjA2ZmY0YWQtMzllZWVjZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.319977Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731285. Ctx: { TraceId: 01jr0vntsy56g26cwc4exr2302, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTk4OWQyNTUtNmQxOGM2NzktN2IwNTc2NGQtMmE2NWU1ZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.320482Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731286. Ctx: { TraceId: 01jr0vntsx76xm3qfcjgc0t741, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODcwN2RkZS1jYzY3ZjU3Mi00YTRjNWIwNC04ZjZmYWUxOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.321161Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731287. Ctx: { TraceId: 01jr0vntt27bteam7e92f28hyr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTI3ZmNlOTYtYmE1MDUxNjEtM2M2NzJhZDEtZTMwYTBmOWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.324593Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731288. Ctx: { TraceId: 01jr0vntt4d8695mcxg5c4q21n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmE1NmMwYTctMjIwODA1ZmEtZmY5ZTcxYTctZTdhZTYzOWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.325385Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731289. Ctx: { TraceId: 01jr0vntt626wfbm4jdr61v9q0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTJhN2Q4OTYtZGVlMzI0NTktYTkxMTgwNjktNDA5ODRjODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.325458Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731290. Ctx: { TraceId: 01jr0vntt62jvqshn34h6ezwxt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGZhZWM2YTMtZmZjZjgwODctNTU2MTlkMTgtM2ZmNDMwYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.330327Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731291. Ctx: { TraceId: 01jr0vnttc9sn81staptg8mr9p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjVlODE5ODItMWM4ZTJkYzEtOWQyMDliZTYtNGRjZGY5MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.330376Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731292. Ctx: { TraceId: 01jr0vnttfbn5sdrfwp3gawrzg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFjNTQ0ZWMtYzU0OTYyY2QtYTVlNzk5MzUtNGUzZGJiOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.333853Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731294. Ctx: { TraceId: 01jr0vntth2mpqtjx8h2pjmdv4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTk4OWQyNTUtNmQxOGM2NzktN2IwNTc2NGQtMmE2NWU1ZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.334493Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731293. Ctx: { TraceId: 01jr0vntth82d0b7s19cs7tcjz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzI0OGIwZGMtNDU3MGQ0ODgtYWUzOTk1YjgtM2NmYjdjZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.335198Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731295. Ctx: { TraceId: 01jr0vntth9mcxzy3dgp1neh3y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmI0MDFiOTMtMzJiNzU3ZGYtNjA2ZmY0YWQtMzllZWVjZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.336142Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731296. Ctx: { TraceId: 01jr0vnttn282nw9banyb2b3gm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmE1NmMwYTctMjIwODA1ZmEtZmY5ZTcxYTctZTdhZTYzOWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.340229Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731297. Ctx: { TraceId: 01jr0vnttn6x1zhjfq17qe0sd6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTJhN2Q4OTYtZGVlMzI0NTktYTkxMTgwNjktNDA5ODRjODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.340229Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731298. Ctx: { TraceId: 01jr0vnttne73fw7ccyqscymqs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTI3ZmNlOTYtYmE1MDUxNjEtM2M2NzJhZDEtZTMwYTBmOWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.343481Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731299. Ctx: { TraceId: 01jr0vnttvb8sktxd9tg1w8gnb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODcwN2RkZS1jYzY3ZjU3Mi00YTRjNWIwNC04ZjZmYWUxOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.344088Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731300. Ctx: { TraceId: 01jr0vnttvf3c73kyd17w429jb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGZhZWM2YTMtZmZjZjgwODctNTU2MTlkMTgtM2ZmNDMwYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.347507Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731302. Ctx: { TraceId: 01jr0vnttz2xxyd5ehfcfjqen9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTk4OWQyNTUtNmQxOGM2NzktN2IwNTc2NGQtMmE2NWU1ZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.347531Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731301. Ctx: { TraceId: 01jr0vnttzfnjh7g9bbnr0nbwd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFjNTQ0ZWMtYzU0OTYyY2QtYTVlNzk5MzUtNGUzZGJiOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.348687Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731303. Ctx: { TraceId: 01jr0vnttz7dkyj56drhcmkpar, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjVlODE5ODItMWM4ZTJkYzEtOWQyMDliZTYtNGRjZGY5MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.352539Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731304. Ctx: { TraceId: 01jr0vntv46xpapfspdjr7qh9k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmE1NmMwYTctMjIwODA1ZmEtZmY5ZTcxYTctZTdhZTYzOWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-04-04T16:55:49.353260Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731305. Ctx: { TraceId: 01jr0vntv4ct750696ry554fec, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmI0MDFiOTMtMzJiNzU3ZGYtNjA2ZmY0YWQtMzllZWVjZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.353364Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731306. Ctx: { TraceId: 01jr0vntv62fr9syekdzsgdwey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzI0OGIwZGMtNDU3MGQ0ODgtYWUzOTk1YjgtM2NmYjdjZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.353824Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731307. Ctx: { TraceId: 01jr0vntv5183pt6gkw3vvjxkm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTI3ZmNlOTYtYmE1MDUxNjEtM2M2NzJhZDEtZTMwYTBmOWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.354361Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731308. Ctx: { TraceId: 01jr0vntv4er0r7qe2q1y3agph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTJhN2Q4OTYtZGVlMzI0NTktYTkxMTgwNjktNDA5ODRjODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785705676 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 4 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-04T16:55:49.359232Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731309. Ctx: { TraceId: 01jr0vntvb4jc2t7w8dh47n81w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODcwN2RkZS1jYzY3ZjU3Mi00YTRjNWIwNC04ZjZmYWUxOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.363960Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731310. Ctx: { TraceId: 01jr0vntve61actdynbxjw6ee6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGZhZWM2YTMtZmZjZjgwODctNTU2MTlkMTgtM2ZmNDMwYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.364194Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731311. Ctx: { TraceId: 01jr0vntve6711wnwjvwzk0ndv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTk4OWQyNTUtNmQxOGM2NzktN2IwNTc2NGQtMmE2NWU1ZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.364915Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731312. Ctx: { TraceId: 01jr0vntvharhtww845sysyva2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjVlODE5ODItMWM4ZTJkYzEtOWQyMDliZTYtNGRjZGY5MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:49.370061Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731313. Ctx: { TraceId: 01jr0vntvkbg0kgpyy3fdx6rfc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFjNTQ0ZWMtYzU0OTYyY2QtYTVlNzk5MzUtNGUzZGJiOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785705676 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 4 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 4 shards >> TColumnShardTestReadWrite::WriteRead [GOOD] >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] |77.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |77.4%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |77.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 19313, MsgBus: 26796 2025-04-04T16:55:26.743587Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502666610442393:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:26.743643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e1f/r3tmp/tmp2bQD8p/pdisk_1.dat 2025-04-04T16:55:27.152453Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19313, node 1 2025-04-04T16:55:27.183707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:27.183828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:27.191104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:27.294961Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:27.294984Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:27.295009Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:27.295131Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26796 TClient is connected to server localhost:26796 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:27.865526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:30.332408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502683790312243:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.332571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.669266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-04T16:55:30.803926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502683790312359:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.804006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.805911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502683790312364:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.870548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-04T16:55:30.886224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502683790312366:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T16:55:30.983944Z node 1 :TX_PROXY ERROR: Actor# [1:7489502683790312444:2414] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:31.540324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:31.745732Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502666610442393:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:31.756853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:32.026145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-04T16:55:32.585772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.319932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.889646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T16:55:34.331595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T16:55:34.369997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T16:55:36.858232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-04-04T16:55:37.013924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 2025-04-04T16:55:37.015946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 2025-04-04T16:55:37.017426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710713:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } ... tion part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:42.383328Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T16:55:45.312054Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489502750022871113:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:45.312154Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:45.328966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-04T16:55:45.411741Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489502750022871235:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:45.411819Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:45.412073Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489502750022871240:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:45.415839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-04-04T16:55:45.427245Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489502750022871242:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T16:55:45.524819Z node 2 :TX_PROXY ERROR: Actor# [2:7489502750022871282:2397] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:46.189407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:46.495237Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489502732843001439:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:46.495299Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:46.836309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-04T16:55:47.589876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T16:55:48.236806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-04T16:55:48.766184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-04T16:55:49.345009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-04T16:55:49.390335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-04T16:55:51.762732Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715713:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> Normalizers::SchemaVersionsNormalizer [GOOD] |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteRead [GOOD] Test command err: 2025-04-04T16:55:47.876197Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T16:55:48.037764Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T16:55:48.060197Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T16:55:48.060415Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T16:55:48.068627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:55:48.068858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:55:48.069113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:55:48.069214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:55:48.069298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:55:48.069395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:55:48.069508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:55:48.069636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:55:48.069758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:55:48.069875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:55:48.069985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:55:48.070085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:55:48.099630Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T16:55:48.100956Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T16:55:48.101043Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T16:55:48.101241Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:48.101443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:55:48.101520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:55:48.101595Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T16:55:48.101729Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T16:55:48.101796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:55:48.101862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:55:48.101893Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T16:55:48.102088Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:48.102157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:55:48.102211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:55:48.102267Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T16:55:48.102374Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T16:55:48.102444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:55:48.102500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:55:48.102533Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T16:55:48.102637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:55:48.102691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:55:48.102743Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T16:55:48.102814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:55:48.102876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:55:48.102925Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T16:55:48.103364Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=62; 2025-04-04T16:55:48.103478Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=52; 2025-04-04T16:55:48.103597Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-04-04T16:55:48.103697Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=53; 2025-04-04T16:55:48.103892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:55:48.103953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:55:48.103989Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T16:55:48.104210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:55:48.104260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:55:48.104303Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T16:55:48.104473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:55:48.104526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:55:48.104567Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T16:55:48.104784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T16:55:48.104825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T16:55:48.104857Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T16:55:48.104996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T16:55:48.105063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T16:55:48.105109Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... d: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T16:55:53.522822Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T16:55:53.523016Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-04T16:55:53.523141Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-04T16:55:53.523375Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:430:2445];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-04-04T16:55:53.523650Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T16:55:53.523857Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T16:55:53.524087Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T16:55:53.524422Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T16:55:53.524650Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T16:55:53.524839Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T16:55:53.524917Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:431:2446] finished for tablet 9437184 2025-04-04T16:55:53.525580Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:430:2445];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.004},{"events":["l_task_result"],"t":0.018},{"events":["f_ack"],"t":0.019},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.021}],"full":{"a":1743785753503361,"name":"_full_task","f":1743785753503361,"d_finished":0,"c":0,"l":1743785753524992,"d":21631},"events":[{"name":"bootstrap","f":1743785753503645,"d_finished":4239,"c":1,"l":1743785753507884,"d":4239},{"a":1743785753524393,"name":"ack","f":1743785753522387,"d_finished":1747,"c":1,"l":1743785753524134,"d":2346},{"a":1743785753524367,"name":"processing","f":1743785753507997,"d_finished":8605,"c":10,"l":1743785753524138,"d":9230},{"name":"ProduceResults","f":1743785753506062,"d_finished":4998,"c":13,"l":1743785753524873,"d":4998},{"a":1743785753524901,"name":"Finish","f":1743785753524901,"d_finished":0,"c":0,"l":1743785753524992,"d":91},{"name":"task_result","f":1743785753508025,"d_finished":6647,"c":9,"l":1743785753522097,"d":6647}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T16:55:53.525710Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:430:2445];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T16:55:53.526397Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:430:2445];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.004},{"events":["l_task_result"],"t":0.018},{"events":["f_ack"],"t":0.019},{"events":["l_ProduceResults","f_Finish"],"t":0.021},{"events":["l_ack","l_processing","l_Finish"],"t":0.022}],"full":{"a":1743785753503361,"name":"_full_task","f":1743785753503361,"d_finished":0,"c":0,"l":1743785753525770,"d":22409},"events":[{"name":"bootstrap","f":1743785753503645,"d_finished":4239,"c":1,"l":1743785753507884,"d":4239},{"a":1743785753524393,"name":"ack","f":1743785753522387,"d_finished":1747,"c":1,"l":1743785753524134,"d":3124},{"a":1743785753524367,"name":"processing","f":1743785753507997,"d_finished":8605,"c":10,"l":1743785753524138,"d":10008},{"name":"ProduceResults","f":1743785753506062,"d_finished":4998,"c":13,"l":1743785753524873,"d":4998},{"a":1743785753524901,"name":"Finish","f":1743785753524901,"d_finished":0,"c":0,"l":1743785753525770,"d":869},{"name":"task_result","f":1743785753508025,"d_finished":6647,"c":9,"l":1743785753522097,"d":6647}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T16:55:53.526509Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T16:55:53.502594Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-04T16:55:53.526584Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T16:55:53.527116Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> Normalizers::PortionsNormalizer [GOOD] >> KqpRanges::ValidatePredicates [GOOD] >> KqpRanges::ValidatePredicatesDataQuery |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseSelectCount [GOOD] Test command err: Trying to start YDB, gRPC: 19829, MsgBus: 17227 2025-04-04T16:55:26.741533Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502664843068569:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:26.742578Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e2c/r3tmp/tmpgCEp1d/pdisk_1.dat 2025-04-04T16:55:27.279333Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:27.284066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:27.284179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:27.289916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19829, node 1 2025-04-04T16:55:27.428968Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:27.428990Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:27.428998Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:27.429145Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17227 TClient is connected to server localhost:17227 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:28.082385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:30.543389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502682022938416:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.543510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.779331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-04T16:55:30.908316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502682022938539:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.908451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.909022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502682022938544:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.913280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-04T16:55:30.922583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502682022938546:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T16:55:30.992736Z node 1 :TX_PROXY ERROR: Actor# [1:7489502682022938586:2401] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:31.613967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:55:31.742448Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502664843068569:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:31.742563Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:32.120109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-04T16:55:32.636738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.254126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.842481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T16:55:34.326922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T16:55:34.364538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T16:55:38.460350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710721:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 15640, MsgBus: 19340 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e2c/r3tmp/tmpQTpKaJ/pdisk_1.dat 2025-04-04T16:55:40.672490Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:55:40.727149Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:40.727292Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:40.729153Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:40.731272Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15640, node 2 2025-04-04T16:55:40.846010Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:40.846045Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:40.846060Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:40.846231Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19340 TClient is connected to server localhost:19340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:41.350057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:41.371109Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:55:44.306213Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489502744654869371:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:44.306362Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:44.330210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-04T16:55:44.385333Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489502744654869490:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:44.385464Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:44.385569Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489502744654869495:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:44.388889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-04T16:55:44.399618Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489502744654869497:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T16:55:44.462546Z node 2 :TX_PROXY ERROR: Actor# [2:7489502744654869537:2399] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:45.027298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:45.634094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-04T16:55:46.352215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T16:55:46.966103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T16:55:47.558229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T16:55:48.126014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T16:55:48.172625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T16:55:51.885285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710719:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::SchemaVersionsNormalizer [GOOD] Test command err: 2025-04-04T16:55:50.235273Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T16:55:50.349923Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T16:55:50.396953Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T16:55:50.397316Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T16:55:50.410373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SchemaVersionCleaner; 2025-04-04T16:55:50.410691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-04T16:55:50.410904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:55:50.411100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:55:50.411257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:55:50.411379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:55:50.411493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:55:50.411586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:55:50.411728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:55:50.411855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:55:50.412001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:55:50.412131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:55:50.412232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:55:50.442519Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T16:55:50.443181Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=SchemaVersionCleaner; 2025-04-04T16:55:50.443253Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-04T16:55:50.443552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SchemaVersionCleaner;id=NO_VALUE_OPTIONAL; 2025-04-04T16:55:50.443634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-04-04T16:55:50.443681Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-04T16:55:50.443832Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:50.443920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:55:50.443974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:55:50.444034Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-04T16:55:50.444140Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T16:55:50.444206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:55:50.444247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:55:50.444280Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-04T16:55:50.444463Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:50.444543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:55:50.444596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:55:50.444649Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-04T16:55:50.444755Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T16:55:50.444822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:55:50.444859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:55:50.444889Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-04T16:55:50.444954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:55:50.445012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:55:50.445049Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-04T16:55:50.445102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:55:50.445180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:55:50.445213Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T16:55:50.445604Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=57; 2025-04-04T16:55:50.445706Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=49; 2025-04-04T16:55:50.445781Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=32; 2025-04-04T16:55:50.445871Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-04-04T16:55:50.446029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:55:50.446083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:55:50.446131Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T16:55:50.452097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:55:50.452199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:55:50.452269Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T16:55:50.452496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:55:50.452567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:55:50.452613Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T16:55:50.452814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;pr ... SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T16:55:54.213626Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T16:55:54.213691Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-04T16:55:54.213750Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-04-04T16:55:54.213817Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-04-04T16:55:54.213867Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-04T16:55:54.214036Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:54.214088Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-04-04T16:55:54.214131Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T16:55:54.214434Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T16:55:54.214645Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:54.214701Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T16:55:54.214869Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-04-04T16:55:54.214959Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-04-04T16:55:54.215109Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:454:2461];bytes=2405760;rows=20048;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-04-04T16:55:54.215280Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:54.215419Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:54.215559Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:54.216508Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T16:55:54.216674Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:54.216851Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:54.216914Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:456:2462] finished for tablet 9437184 2025-04-04T16:55:54.217475Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:454:2461];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.306},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.309}],"full":{"a":1743785753907860,"name":"_full_task","f":1743785753907860,"d_finished":0,"c":0,"l":1743785754216991,"d":309131},"events":[{"name":"bootstrap","f":1743785753908120,"d_finished":2722,"c":1,"l":1743785753910842,"d":2722},{"a":1743785754216471,"name":"ack","f":1743785754214402,"d_finished":1202,"c":1,"l":1743785754215604,"d":1722},{"a":1743785754216449,"name":"processing","f":1743785753910949,"d_finished":184230,"c":9,"l":1743785754215607,"d":184772},{"name":"ProduceResults","f":1743785753909701,"d_finished":3196,"c":12,"l":1743785754216887,"d":3196},{"a":1743785754216891,"name":"Finish","f":1743785754216891,"d_finished":0,"c":0,"l":1743785754216991,"d":100},{"name":"task_result","f":1743785753910967,"d_finished":182834,"c":8,"l":1743785754214194,"d":182834}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:54.217592Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:454:2461];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T16:55:54.218102Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:454:2461];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.306},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.309}],"full":{"a":1743785753907860,"name":"_full_task","f":1743785753907860,"d_finished":0,"c":0,"l":1743785754217647,"d":309787},"events":[{"name":"bootstrap","f":1743785753908120,"d_finished":2722,"c":1,"l":1743785753910842,"d":2722},{"a":1743785754216471,"name":"ack","f":1743785754214402,"d_finished":1202,"c":1,"l":1743785754215604,"d":2378},{"a":1743785754216449,"name":"processing","f":1743785753910949,"d_finished":184230,"c":9,"l":1743785754215607,"d":185428},{"name":"ProduceResults","f":1743785753909701,"d_finished":3196,"c":12,"l":1743785754216887,"d":3196},{"a":1743785754216891,"name":"Finish","f":1743785754216891,"d_finished":0,"c":0,"l":1743785754217647,"d":756},{"name":"task_result","f":1743785753910967,"d_finished":182834,"c":8,"l":1743785754214194,"d":182834}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:54.218225Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T16:55:53.907194Z;index_granules=0;index_portions=1;index_batches=953;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2589608;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2589608;selected_rows=0; 2025-04-04T16:55:54.218307Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T16:55:54.218698Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:456:2462];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; |77.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |77.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |77.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::PortionsNormalizer [GOOD] Test command err: 2025-04-04T16:55:50.547771Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T16:55:50.648482Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T16:55:50.673780Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T16:55:50.674064Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T16:55:50.681247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2025-04-04T16:55:50.681493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=LeakedBlobsNormalizer; 2025-04-04T16:55:50.681562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-04T16:55:50.681685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:55:50.682711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:55:50.682964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:55:50.683100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:55:50.683211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:55:50.683359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:55:50.683526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:55:50.683668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:55:50.683788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:55:50.683907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:55:50.684037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:55:50.709571Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T16:55:50.710264Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2025-04-04T16:55:50.710356Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-04T16:55:50.710651Z node 1 :TX_COLUMNSHARD CRIT: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:286;tasks_for_remove=0; 2025-04-04T16:55:50.710813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-04-04T16:55:50.710895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2025-04-04T16:55:50.710940Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-04-04T16:55:50.711428Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=57; 2025-04-04T16:55:50.711565Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=69; 2025-04-04T16:55:50.711654Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-04T16:55:50.711724Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2025-04-04T16:55:50.711805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2025-04-04T16:55:50.711858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-04-04T16:55:50.711886Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-04T16:55:50.711983Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:50.712040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:55:50.712082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:55:50.712114Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-04T16:55:50.712185Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T16:55:50.712226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:55:50.712268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:55:50.712301Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-04T16:55:50.712435Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:50.712475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:55:50.712498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:55:50.712522Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-04T16:55:50.712579Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T16:55:50.712618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:55:50.712642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:55:50.712662Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-04T16:55:50.712712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:55:50.712740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:55:50.712772Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-04T16:55:50.712824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:55:50.712848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:55:50.712866Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T16:55:50.713132Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=26; 2025-04-04T16:55:50.713188Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=20; 2025-04-04T16:55:50.713251Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=21; 2025-04-04T16:55:50.713303Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=21; 2025-04-04T16:55:50.713419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:55:50.713449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=a ... 09Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T16:55:54.220698Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=0; 2025-04-04T16:55:54.224373Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T16:55:54.224472Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T16:55:54.224537Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T16:55:54.224659Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T16:55:54.243621Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T16:55:54.243966Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:395:2409];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T16:55:54.244085Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:395:2409];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T16:55:54.244125Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T16:55:54.244153Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T16:55:54.244203Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:395:2409];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T16:55:54.244275Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:395:2409];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T16:55:54.244345Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:395:2409];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=0; 2025-04-04T16:55:54.244416Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:395:2409];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T16:55:54.244476Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:395:2409];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T16:55:54.244527Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:395:2409];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T16:55:54.244618Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:395:2409];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=1.000000s; 2025-04-04T16:55:54.244672Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:395:2409];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T16:55:54.399176Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 111 scanId: 0 version: {11:111} readable: {11:max} at tablet 9437184 2025-04-04T16:55:54.399399Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 111 scanId: 0 at tablet 9437184 2025-04-04T16:55:54.399588Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:395:2409];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:32;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-04-04T16:55:54.399659Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:395:2409];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-04-04T16:55:54.399906Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:395:2409];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Projection","input":"1,2,3"},"fetch":"1,2,3"}]}; 2025-04-04T16:55:54.400016Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:395:2409];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:127;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-04T16:55:54.400531Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:395:2409];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:468:2474];trace_detailed=; 2025-04-04T16:55:54.401252Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:85;ff_first=(column_ids=1,2,3;column_names=field,key1,key2;);; 2025-04-04T16:55:54.401444Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:100;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; 2025-04-04T16:55:54.401796Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:468:2474];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T16:55:54.401928Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:468:2474];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:54.402037Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:468:2474];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:54.402102Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:468:2474] finished for tablet 9437184 2025-04-04T16:55:54.402517Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:468:2474];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:466:2473];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743785754400449,"name":"_full_task","f":1743785754400449,"d_finished":0,"c":0,"l":1743785754402165,"d":1716},"events":[{"name":"bootstrap","f":1743785754400675,"d_finished":919,"c":1,"l":1743785754401594,"d":919},{"a":1743785754401774,"name":"ack","f":1743785754401774,"d_finished":0,"c":0,"l":1743785754402165,"d":391},{"a":1743785754401761,"name":"processing","f":1743785754401761,"d_finished":0,"c":0,"l":1743785754402165,"d":404},{"name":"ProduceResults","f":1743785754401582,"d_finished":267,"c":2,"l":1743785754402079,"d":267},{"a":1743785754402082,"name":"Finish","f":1743785754402082,"d_finished":0,"c":0,"l":1743785754402165,"d":83}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:54.402599Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:468:2474];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:466:2473];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T16:55:54.402927Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:468:2474];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:466:2473];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743785754400449,"name":"_full_task","f":1743785754400449,"d_finished":0,"c":0,"l":1743785754402653,"d":2204},"events":[{"name":"bootstrap","f":1743785754400675,"d_finished":919,"c":1,"l":1743785754401594,"d":919},{"a":1743785754401774,"name":"ack","f":1743785754401774,"d_finished":0,"c":0,"l":1743785754402653,"d":879},{"a":1743785754401761,"name":"processing","f":1743785754401761,"d_finished":0,"c":0,"l":1743785754402653,"d":892},{"name":"ProduceResults","f":1743785754401582,"d_finished":267,"c":2,"l":1743785754402079,"d":267},{"a":1743785754402082,"name":"Finish","f":1743785754402082,"d_finished":0,"c":0,"l":1743785754402653,"d":571}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T16:55:54.403011Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:468:2474];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T16:55:54.399978Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-04T16:55:54.403054Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:468:2474];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T16:55:54.403212Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:468:2474];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; >> UpsertLoad::ShouldCreateTable >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] Test command err: 2025-04-04T16:52:47.888771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:52:47.890042Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:47.890634Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019c3/r3tmp/tmp6gUkq1/pdisk_1.dat 2025-04-04T16:52:48.419251Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26372, node 1 2025-04-04T16:52:48.901555Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:48.901617Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:48.901663Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:48.902234Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:48.924478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:52:49.070794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:49.070952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:49.095723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19530 2025-04-04T16:52:49.725633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:52:57.981562Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T16:52:58.093148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:58.093260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:58.132065Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:52:58.134882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:58.457800Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:58.458449Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:58.458960Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:58.459117Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:58.459326Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:58.459442Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:58.459517Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:58.459619Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:58.459692Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:58.661378Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:58.661495Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:58.680080Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:58.920277Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:59.022622Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T16:52:59.022777Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T16:52:59.069517Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T16:52:59.069939Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T16:52:59.070185Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T16:52:59.070274Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T16:52:59.070337Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T16:52:59.070384Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T16:52:59.070444Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T16:52:59.070494Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T16:52:59.071321Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T16:52:59.113666Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T16:52:59.113781Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2594], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T16:52:59.125377Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2603] 2025-04-04T16:52:59.133158Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1898:2613] 2025-04-04T16:52:59.133389Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1898:2613], schemeshard id = 72075186224037897 2025-04-04T16:52:59.155716Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T16:52:59.220754Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T16:52:59.220819Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T16:52:59.220904Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T16:52:59.252877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T16:52:59.272427Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T16:52:59.272584Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T16:53:00.282974Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T16:53:01.212609Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T16:53:01.348464Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T16:53:04.602991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3075], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:04.604187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:53:04.926691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T16:53:07.781761Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:53:07.781968Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:53:07.782197Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:53:07.798515Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:53:07.798726Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:53:07.798853Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:53:07.798950Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:53:07.799043Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:53:07.799147Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:53:07.799239Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:53:07.799367Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:53:07.799458Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2885];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:53:07.856611Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2396:2886];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:53:07.856688Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2396:2886];tablet_id=72075186224037900;process= ... ode 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T16:55:49.623804Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:8481:6447], schemeshard count = 1 2025-04-04T16:55:51.519828Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T16:55:51.519904Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T16:55:51.519977Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T16:55:51.520051Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T16:55:51.528109Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T16:55:51.545679Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T16:55:51.546342Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T16:55:51.546476Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T16:55:51.547475Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 2 2025-04-04T16:55:51.547550Z node 2 :STATISTICS WARN: [72075186224037894] TTxResponseTabletDistribution::Execute. Some tablets are probably in Hive boot queue 2025-04-04T16:55:51.547616Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T16:55:52.731889Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T16:55:52.745484Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T16:55:52.745752Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-04T16:55:52.746481Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8641:6531], server id = [2:8646:6536], tablet id = 72075186224037899, status = OK 2025-04-04T16:55:52.747008Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8641:6531], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:52.747402Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8642:6532], server id = [2:8647:6537], tablet id = 72075186224037900, status = OK 2025-04-04T16:55:52.747461Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8642:6532], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:52.749049Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8643:6533], server id = [2:8648:6538], tablet id = 72075186224037901, status = OK 2025-04-04T16:55:52.749109Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8643:6533], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:52.749684Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8644:6534], server id = [2:8649:6539], tablet id = 72075186224037902, status = OK 2025-04-04T16:55:52.749737Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8644:6534], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:52.750778Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8645:6535], server id = [2:8652:6542], tablet id = 72075186224037903, status = OK 2025-04-04T16:55:52.750822Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8645:6535], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:52.754954Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T16:55:52.755585Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8641:6531], server id = [2:8646:6536], tablet id = 72075186224037899 2025-04-04T16:55:52.755638Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:52.756968Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-04T16:55:52.757274Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8642:6532], server id = [2:8647:6537], tablet id = 72075186224037900 2025-04-04T16:55:52.757316Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:52.757966Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8664:6551], server id = [2:8666:6552], tablet id = 72075186224037904, status = OK 2025-04-04T16:55:52.758043Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8664:6551], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:52.759838Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-04T16:55:52.760278Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8667:6553], server id = [2:8669:6554], tablet id = 72075186224037905, status = OK 2025-04-04T16:55:52.760354Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8667:6553], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:52.761009Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8643:6533], server id = [2:8648:6538], tablet id = 72075186224037901 2025-04-04T16:55:52.761037Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:52.761788Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-04T16:55:52.762529Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8644:6534], server id = [2:8649:6539], tablet id = 72075186224037902 2025-04-04T16:55:52.762561Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:52.762963Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-04T16:55:52.763440Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8671:6556], server id = [2:8673:6558], tablet id = 72075186224037906, status = OK 2025-04-04T16:55:52.763516Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8671:6556], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:52.764017Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8645:6535], server id = [2:8652:6542], tablet id = 72075186224037903 2025-04-04T16:55:52.764044Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:52.764289Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8675:6560], server id = [2:8679:6563], tablet id = 72075186224037907, status = OK 2025-04-04T16:55:52.764356Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8675:6560], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:52.765166Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8677:6562], server id = [2:8680:6564], tablet id = 72075186224037908, status = OK 2025-04-04T16:55:52.765241Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8677:6562], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T16:55:52.768021Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-04T16:55:52.768632Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8664:6551], server id = [2:8666:6552], tablet id = 72075186224037904 2025-04-04T16:55:52.768664Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:52.770399Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-04T16:55:52.771108Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8667:6553], server id = [2:8669:6554], tablet id = 72075186224037905 2025-04-04T16:55:52.771140Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:52.771774Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-04T16:55:52.772294Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8671:6556], server id = [2:8673:6558], tablet id = 72075186224037906 2025-04-04T16:55:52.772321Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:52.773250Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-04T16:55:52.773641Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8675:6560], server id = [2:8679:6563], tablet id = 72075186224037907 2025-04-04T16:55:52.773667Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:52.774117Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-04T16:55:52.774181Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T16:55:52.774395Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T16:55:52.774595Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T16:55:52.774967Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T16:55:52.776814Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8677:6562], server id = [2:8680:6564], tablet id = 72075186224037908 2025-04-04T16:55:52.776845Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:52.777620Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T16:55:52.797245Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8707:6587]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T16:55:52.797523Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T16:55:52.797591Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8707:6587], StatRequests.size() = 1 2025-04-04T16:55:53.001675Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=N2VmMzk5MDItNjQ1MjM0NTYtOWZhYjM3Zi1mYTVlOGUwZA==, TxId: 2025-04-04T16:55:53.001777Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2VmMzk5MDItNjQ1MjM0NTYtOWZhYjM3Zi1mYTVlOGUwZA==, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-04T16:55:53.002938Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8715:6593]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T16:55:53.003269Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T16:55:53.003956Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T16:55:53.004058Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T16:55:53.008390Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T16:55:53.008494Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-04T16:55:53.008569Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T16:55:53.017532Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert2 |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Cdc::Alter [GOOD] >> Cdc::AddColumn |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TPQCDTest::TestRelatedServicesAreRunning >> TPQCDTest::TestUnavailableWithoutClustersList ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 3400, MsgBus: 15879 2025-04-04T16:55:26.910321Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502665048430661:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:26.911189Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d84/r3tmp/tmps4ph6O/pdisk_1.dat 2025-04-04T16:55:27.345830Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:27.348390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:27.348451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:27.352686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3400, node 1 2025-04-04T16:55:27.422073Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:27.422095Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:27.422109Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:27.422284Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15879 TClient is connected to server localhost:15879 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:28.034298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:28.055076Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:55:30.248950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502682228300510:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.249148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.519677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-04T16:55:30.646223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502682228300631:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.646323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.646587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502682228300636:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.649952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-04T16:55:30.661031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502682228300638:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T16:55:30.736062Z node 1 :TX_PROXY ERROR: Actor# [1:7489502682228300678:2398] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:31.283843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:31.772117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-04T16:55:31.913207Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502665048430661:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:31.913345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:32.403976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.027277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.652560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T16:55:34.175869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T16:55:34.258661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T16:55:38.522517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710731:0, at schemeshard: 72057594046644480 2025-04-04T16:55:38.552774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710732:0, at schemeshard: 72057594046644480 2025-04-04T16:55:38.555184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710734:0, at schemeshard: 72057594046644480 2025-04-04T16:55:38.556788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710733:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { ... 594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:43.510949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:43.517702Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:55:46.425019Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489502752171640908:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:46.425130Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:46.452357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-04T16:55:46.536684Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489502752171641030:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:46.536814Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:46.537203Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489502752171641035:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:46.543798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-04T16:55:46.558802Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489502752171641037:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T16:55:46.650235Z node 2 :TX_PROXY ERROR: Actor# [2:7489502752171641103:2406] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:47.194879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:47.536629Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489502734991771216:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:47.536713Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:47.807178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-04-04T16:55:48.419448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T16:55:49.090754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T16:55:49.722119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T16:55:50.264303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T16:55:50.308534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T16:55:54.720138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710737:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] >> TPQCDTest::TestPrioritizeLocalDatacenter |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |77.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |77.5%| [TA] $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::WriteToTopic_Demo_20_RestartNo |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> BasicUsage::ConnectToYDB |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] Test command err: 2025-04-04T16:52:40.269409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:52:40.269639Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:52:40.269741Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019c7/r3tmp/tmpFOHWgv/pdisk_1.dat 2025-04-04T16:52:43.869857Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:44.658125Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.128305s 2025-04-04T16:52:44.659048Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.129251s TServer::EnableGrpc on GrpcPort 1392, node 1 2025-04-04T16:52:46.804251Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:52:46.804319Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:52:46.804350Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:52:46.804884Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:52:46.816553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:52:46.926794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:46.926973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:46.945331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7290 2025-04-04T16:52:47.677120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:52:52.209308Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T16:52:52.301727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:52.301860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:52.342702Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:52:52.345193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:52.685555Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:52.686528Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:52.687183Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:52.687573Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:52.687718Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:52.688044Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:52.688158Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:52.688280Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:52.688371Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T16:52:52.933942Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:52:52.934083Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:52:52.957661Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:52:53.244735Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:52:53.295879Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T16:52:53.295989Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T16:52:53.338866Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T16:52:53.339106Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T16:52:53.339575Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T16:52:53.339673Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T16:52:53.339744Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T16:52:53.339808Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T16:52:53.339878Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T16:52:53.339938Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T16:52:53.342015Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T16:52:53.375533Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T16:52:53.375659Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T16:52:53.381977Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2605] 2025-04-04T16:52:53.387330Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2617] 2025-04-04T16:52:53.387552Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1902:2617], schemeshard id = 72075186224037897 2025-04-04T16:52:53.401706Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T16:52:53.434966Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T16:52:53.435201Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T16:52:53.435311Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T16:52:53.447973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T16:52:53.466208Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T16:52:53.466408Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T16:52:53.743283Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T16:52:54.059261Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T16:52:54.179884Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T16:52:55.965287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:55.965413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:52:56.002496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T16:52:56.654931Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:52:56.656086Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:52:56.662471Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:52:56.662984Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:52:56.663102Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:52:56.663228Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:52:56.663325Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:52:56.663415Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:52:56.663827Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:52:56.664483Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:52:56.665389Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:52:56.665488Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:52:56.780784Z node 2 :TX_COLUM ... = 72075186224037899 2025-04-04T16:55:50.715875Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:50.716154Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T16:55:50.718830Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T16:55:50.741028Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7576:5556]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T16:55:50.741276Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T16:55:50.741335Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7576:5556], StatRequests.size() = 1 2025-04-04T16:55:50.876735Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDgzYWYxNDctZmVjMWIxNzUtMzlmMWFmNTktYjI3ODY2Yjk=, TxId: 2025-04-04T16:55:50.876821Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDgzYWYxNDctZmVjMWIxNzUtMzlmMWFmNTktYjI3ODY2Yjk=, TxId: 2025-04-04T16:55:50.877385Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T16:55:50.892972Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T16:55:50.893042Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3265:3366] 2025-04-04T16:55:51.436746Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-04T16:55:51.436856Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T16:55:52.219454Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T16:55:52.219545Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is column table. 2025-04-04T16:55:52.224014Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T16:55:52.241878Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T16:55:52.242464Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T16:55:52.242523Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 5], AnalyzedShards 1 2025-04-04T16:55:52.269888Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T16:55:52.281063Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-04T16:55:52.281917Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-04T16:55:52.281999Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-04T16:55:52.303929Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-04T16:55:53.616089Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T16:55:53.616166Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is column table. 2025-04-04T16:55:53.616207Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-04T16:55:53.616804Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T16:55:53.638367Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T16:55:53.638808Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T16:55:53.638877Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T16:55:53.639260Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T16:55:53.669029Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T16:55:53.669198Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-04T16:55:53.669665Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7693:5617], server id = [2:7694:5618], tablet id = 72075186224037900, status = OK 2025-04-04T16:55:53.669746Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7693:5617], path = { OwnerId: 72075186224037897 LocalId: 5 } 2025-04-04T16:55:53.672137Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-04T16:55:53.672232Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T16:55:53.672418Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T16:55:53.672631Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T16:55:53.672856Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T16:55:53.674871Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7693:5617], server id = [2:7694:5618], tablet id = 72075186224037900 2025-04-04T16:55:53.674915Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:53.675674Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T16:55:53.699890Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDZhYjcxNjItMmUwNWEwNDgtNzk1NjQ2MmQtZWMyZWRiM2I=, TxId: 2025-04-04T16:55:53.699966Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDZhYjcxNjItMmUwNWEwNDgtNzk1NjQ2MmQtZWMyZWRiM2I=, TxId: 2025-04-04T16:55:53.700646Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T16:55:53.730390Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-04T16:55:53.730447Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T16:55:54.262470Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-04T16:55:54.262541Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T16:55:55.029042Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T16:55:55.029280Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T16:55:55.040108Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T16:55:55.040166Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-04T16:55:55.040204Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-04T16:55:56.256745Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T16:55:56.256866Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is column table. 2025-04-04T16:55:56.256916Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-04T16:55:56.257476Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T16:55:56.271971Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T16:55:56.272316Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T16:55:56.272371Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T16:55:56.272740Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T16:55:56.291391Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T16:55:56.291645Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-04-04T16:55:56.292520Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7789:5675], server id = [2:7790:5676], tablet id = 72075186224037900, status = OK 2025-04-04T16:55:56.292623Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7789:5675], path = { OwnerId: 72075186224037897 LocalId: 5 } 2025-04-04T16:55:56.294127Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-04T16:55:56.294232Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T16:55:56.294443Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T16:55:56.294650Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T16:55:56.294949Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T16:55:56.297895Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7789:5675], server id = [2:7790:5676], tablet id = 72075186224037900 2025-04-04T16:55:56.297944Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T16:55:56.298732Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T16:55:56.333769Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OGZhZjJkNWItMzUwZDE2OTAtNmJiOTY1MTgtZTMzMWM0YTc=, TxId: 2025-04-04T16:55:56.333838Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OGZhZjJkNWItMzUwZDE2OTAtNmJiOTY1MTgtZTMzMWM0YTc=, TxId: 2025-04-04T16:55:56.334398Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:07.000000Z, event interval end# 2025-04-04T16:55:54.000000Z 2025-04-04T16:55:56.334824Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T16:55:56.355338Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-04T16:55:56.355428Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3265:3366] |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId >> TMonitoringTests::InvalidActorId [GOOD] |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId [GOOD] >> TMonitoringTests::ValidActorId >> TMonitoringTests::ValidActorId [GOOD] |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> UpsertLoad::ShouldCreateTable [GOOD] >> UpsertLoad::ShouldDropCreateTable |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::ValidActorId [GOOD] >> Cdc::DropIndex [GOOD] >> Cdc::DisableStream >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |77.5%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |77.5%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] Test command err: 2025-04-04T16:55:40.433563Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T16:55:40.532531Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T16:55:40.560419Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T16:55:40.560714Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T16:55:40.569846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:55:40.570115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:55:40.570378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:55:40.570524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:55:40.570661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:55:40.570790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:55:40.570893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:55:40.571044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:55:40.571177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:55:40.571319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:55:40.571432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:55:40.571526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:55:40.600357Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T16:55:40.600657Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T16:55:40.600727Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T16:55:40.600933Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:40.601172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:55:40.601265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:55:40.601332Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T16:55:40.601432Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T16:55:40.601500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:55:40.601544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:55:40.601594Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T16:55:40.601787Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:40.601850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:55:40.601888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:55:40.601916Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T16:55:40.602010Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T16:55:40.602094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:55:40.602145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:55:40.602191Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T16:55:40.602294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:55:40.602332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:55:40.602384Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T16:55:40.602437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:55:40.602498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:55:40.602542Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T16:55:40.602992Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=59; 2025-04-04T16:55:40.603105Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=55; 2025-04-04T16:55:40.603220Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=48; 2025-04-04T16:55:40.603319Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-04-04T16:55:40.603506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:55:40.603568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:55:40.603604Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T16:55:40.603829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:55:40.603892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:55:40.603932Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T16:55:40.604135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:55:40.604193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:55:40.604227Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T16:55:40.604433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T16:55:40.604481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T16:55:40.604510Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T16:55:40.604622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T16:55:40.604660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T16:55:40.604708Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... OLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=10;merger=0;interval_id=49; 2025-04-04T16:56:02.119402Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-04T16:56:02.119502Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T16:56:02.119541Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=10;finished=1; 2025-04-04T16:56:02.119586Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T16:56:02.120913Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T16:56:02.121084Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:10;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T16:56:02.121144Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T16:56:02.121261Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=10; 2025-04-04T16:56:02.121322Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=80;num_rows=10;batch_columns=timestamp; 2025-04-04T16:56:02.121447Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:4118:6130];bytes=80;rows=10;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-04-04T16:56:02.121587Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T16:56:02.121711Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T16:56:02.121818Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T16:56:02.122503Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T16:56:02.122653Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T16:56:02.122766Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T16:56:02.122808Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:4123:6135] finished for tablet 9437184 2025-04-04T16:56:02.123369Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:4118:6130];stats={"p":[{"events":["f_bootstrap"],"t":0.001},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["l_task_result"],"t":0.022},{"events":["f_ack"],"t":0.023},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.025}],"full":{"a":1743785762097426,"name":"_full_task","f":1743785762097426,"d_finished":0,"c":0,"l":1743785762122868,"d":25442},"events":[{"name":"bootstrap","f":1743785762099067,"d_finished":2744,"c":1,"l":1743785762101811,"d":2744},{"a":1743785762122476,"name":"ack","f":1743785762120880,"d_finished":962,"c":1,"l":1743785762121842,"d":1354},{"a":1743785762122456,"name":"processing","f":1743785762102594,"d_finished":4067,"c":8,"l":1743785762121845,"d":4479},{"name":"ProduceResults","f":1743785762100587,"d_finished":2621,"c":11,"l":1743785762122790,"d":2621},{"a":1743785762122793,"name":"Finish","f":1743785762122793,"d_finished":0,"c":0,"l":1743785762122868,"d":75},{"name":"task_result","f":1743785762102637,"d_finished":2873,"c":7,"l":1743785762119640,"d":2873}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T16:56:02.123453Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:4118:6130];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T16:56:02.123951Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:4118:6130];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.001},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["l_task_result"],"t":0.022},{"events":["f_ack"],"t":0.023},{"events":["l_ProduceResults","f_Finish"],"t":0.025},{"events":["l_ack","l_processing","l_Finish"],"t":0.026}],"full":{"a":1743785762097426,"name":"_full_task","f":1743785762097426,"d_finished":0,"c":0,"l":1743785762123501,"d":26075},"events":[{"name":"bootstrap","f":1743785762099067,"d_finished":2744,"c":1,"l":1743785762101811,"d":2744},{"a":1743785762122476,"name":"ack","f":1743785762120880,"d_finished":962,"c":1,"l":1743785762121842,"d":1987},{"a":1743785762122456,"name":"processing","f":1743785762102594,"d_finished":4067,"c":8,"l":1743785762121845,"d":5112},{"name":"ProduceResults","f":1743785762100587,"d_finished":2621,"c":11,"l":1743785762122790,"d":2621},{"a":1743785762122793,"name":"Finish","f":1743785762122793,"d_finished":0,"c":0,"l":1743785762123501,"d":708},{"name":"task_result","f":1743785762102637,"d_finished":2873,"c":7,"l":1743785762119640,"d":2873}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T16:56:02.124035Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T16:56:02.096832Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=2812;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2812;selected_rows=0; 2025-04-04T16:56:02.124082Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T16:56:02.124278Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:4123:6135];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] Test command err: 2025-04-04T16:55:59.390550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:55:59.391163Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:55:59.391224Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cf0/r3tmp/tmpTk5O40/pdisk_1.dat 2025-04-04T16:55:59.805212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:55:59.855865Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:59.900409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:59.900550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:59.913965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:56:00.007960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:56:00.397831Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 KeyFrom: 12345 } 2025-04-04T16:56:00.398009Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:736:2619], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 KeyFrom: 12345 2025-04-04T16:56:00.405553Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:736:2619], subTag: 2} started# 5 actors each with inflight# 4 2025-04-04T16:56:00.405660Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-04T16:56:00.406671Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-04T16:56:00.406753Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-04T16:56:00.406792Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-04T16:56:00.406830Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-04-04T16:56:00.414047Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 1} session: ydb://session/3?node_id=1&id=ZjhjMDRlNjctN2IxN2NhNDAtZWI5ZGRhM2ItZWY1Njc2MjU= 2025-04-04T16:56:00.414158Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 2} session: ydb://session/3?node_id=1&id=N2VmMjdhYTYtZGE0NDcxOTMtNGZiNzcxZTctOGY0ZjFlNDE= 2025-04-04T16:56:00.416163Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 3} session: ydb://session/3?node_id=1&id=NTgzNzJlMDQtNjViZjM5ZjYtZmQ5MzNjNzEtMmQ0NGQ0MjU= 2025-04-04T16:56:00.417911Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 4} session: ydb://session/3?node_id=1&id=N2M5YmM3YWQtNjdkZGFjNWMtOGY2MjhiMjgtNTA5Yjc1N2Q= 2025-04-04T16:56:00.419993Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 5} session: ydb://session/3?node_id=1&id=ZGQwMjFjYjgtNzEzNDc3OS1lMWE1ZjM3NS0yNTI4NWM1ZA== 2025-04-04T16:56:00.424469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:750:2633], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:00.424626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:777:2654], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:00.424700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:778:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:00.424769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:00.424843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:00.424905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:00.424987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:00.436715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:56:00.513396Z node 1 :TX_PROXY ERROR: Actor# [1:797:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T16:56:00.514816Z node 1 :TX_PROXY ERROR: Actor# [1:798:2675] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T16:56:00.515520Z node 1 :TX_PROXY ERROR: Actor# [1:799:2676] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T16:56:00.516176Z node 1 :TX_PROXY ERROR: Actor# [1:803:2677] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T16:56:00.691540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:788:2665], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:56:00.691661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:789:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:56:00.691725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:790:2667], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:56:00.691772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:791:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:56:00.691843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:792:2669], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:56:00.732871Z node 1 :TX_PROXY ERROR: Actor# [1:898:2740] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:01.224663Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 1} finished in 1743785761.224598s, errors=0 2025-04-04T16:56:01.225000Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:736:2619], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1743785761224 OperationsOK: 4 OperationsError: 0 } 2025-04-04T16:56:01.239434Z node 1 :TX_PROXY ERROR: Actor# [1:951:2778] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:01.313935Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 4} finished in 1743785761.313872s, errors=0 2025-04-04T16:56:01.314298Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:736:2619], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1743785761313 OperationsOK: 4 OperationsError: 0 } 2025-04-04T16:56:01.329242Z node 1 :TX_PROXY ERROR: Actor# [1:982:2800] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:01.401304Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 5} finished in 1743785761.401255s, errors=0 2025-04-04T16:56:01.401603Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:736:2619], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1743785761401 OperationsOK: 4 OperationsError: 0 } 2025-04-04T16:56:01.415732Z node 1 :TX_PROXY ERROR: Actor# [1:1013:2822] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:01.496300Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 3} finished in 1743785761.496238s, errors=0 2025-04-04T16:56:01.496611Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:736:2619], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1743785761496 OperationsOK: 4 OperationsError: 0 } 2025-04-04T16:56:01.512695Z node 1 :TX_PROXY ERROR: Actor# [1:1044:2844] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:01.587919Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 2} finished in 1743785761.587873s, errors=0 2025-04-04T16:56:01.588265Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:736:2619], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1743785761587 OperationsOK: 4 OperationsError: 0 } 2025-04-04T16:56:01.588341Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:736:2619], subTag: 2} finished in 1.183096s, oks# 20, errors# 0 2025-04-04T16:56:01.588466Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:737:2620] with tag# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] Test command err: 2025-04-04T16:55:57.671268Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502798762596514:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:57.671587Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001492/r3tmp/tmprU80yD/pdisk_1.dat 2025-04-04T16:55:58.212272Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:58.217502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:58.217640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:58.220615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11252, node 1 2025-04-04T16:55:58.351308Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001492/r3tmp/yandexPHdjdW.tmp 2025-04-04T16:55:58.351346Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001492/r3tmp/yandexPHdjdW.tmp 2025-04-04T16:55:58.351547Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001492/r3tmp/yandexPHdjdW.tmp 2025-04-04T16:55:58.351699Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:56:01.320550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502815942466254:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.320940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.321448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502815942466289:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.329466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-04-04T16:56:01.355930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502815942466291:2385], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-04-04T16:56:01.496864Z node 1 :TX_PROXY ERROR: Actor# [1:7489502815942466352:2375] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:01.794915Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489502815942466361:2391], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:56:01.795575Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2Q5ZjA0YzMtMWY2YjA5YjktZjU2MTRmMjgtOTg3OGNhYjc=, ActorId: [1:7489502815942466251:2379], ActorState: ExecuteState, TraceId: 01jr0vp6h49chgqjd3j0dg95at, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:56:01.818570Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> StreamCreator::Basic |77.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |77.6%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |77.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] Test command err: 2025-04-04T16:56:00.166045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:00.166682Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:00.166750Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ce6/r3tmp/tmpkxldSK/pdisk_1.dat 2025-04-04T16:56:00.664884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:56:00.736307Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:00.807238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:00.807410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:00.819758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:56:00.911684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:56:01.295151Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "JustTable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2025-04-04T16:56:01.295326Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:736:2619], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2025-04-04T16:56:01.299532Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:736:2619], subTag: 2} started# 5 actors each with inflight# 4 2025-04-04T16:56:01.299642Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-04T16:56:01.299738Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-04T16:56:01.299788Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-04T16:56:01.299844Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-04T16:56:01.299901Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-04T16:56:01.306144Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 1} session: ydb://session/3?node_id=1&id=YzZjNzkwMTgtYTE4ZTRmMzItMTRkNTI0NC1mMDFiN2NmMg== 2025-04-04T16:56:01.306231Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 2} session: ydb://session/3?node_id=1&id=NjhlMzAxZjEtNzhlMTMyZDItM2FhMWU3NDQtOWVlMDU1OGM= 2025-04-04T16:56:01.308130Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 3} session: ydb://session/3?node_id=1&id=ZWNhMTczZjUtZTQ1YTQ5MmItMmNjOTUyY2ItZDU0ZjYxN2E= 2025-04-04T16:56:01.310417Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 4} session: ydb://session/3?node_id=1&id=M2IwZjQzZGQtYWE4NzQ3OTgtNzUxYzRiNmUtZjk5ODNjMA== 2025-04-04T16:56:01.312462Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 5} session: ydb://session/3?node_id=1&id=YTFlNDI3ZjktMjAxMzY5NzktODEzNmQ1MGUtNjFkMmMwZWM= 2025-04-04T16:56:01.317390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:750:2633], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.317749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:777:2654], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.317845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:778:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.317897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.317978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.318041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.318136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.326447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:56:01.381045Z node 1 :TX_PROXY ERROR: Actor# [1:797:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T16:56:01.382141Z node 1 :TX_PROXY ERROR: Actor# [1:798:2675] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T16:56:01.382803Z node 1 :TX_PROXY ERROR: Actor# [1:799:2676] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T16:56:01.383411Z node 1 :TX_PROXY ERROR: Actor# [1:803:2677] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T16:56:01.556247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:788:2665], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:56:01.556393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:789:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:56:01.556450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:790:2667], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:56:01.556507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:791:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:56:01.556590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:792:2669], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:56:01.593328Z node 1 :TX_PROXY ERROR: Actor# [1:898:2740] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:02.111663Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 3} finished in 1743785762.111589s, errors=0 2025-04-04T16:56:02.112090Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:736:2619], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1743785762111 OperationsOK: 4 OperationsError: 0 } 2025-04-04T16:56:02.129880Z node 1 :TX_PROXY ERROR: Actor# [1:951:2778] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:02.195902Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 5} finished in 1743785762.195855s, errors=0 2025-04-04T16:56:02.196255Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:736:2619], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1743785762195 OperationsOK: 4 OperationsError: 0 } 2025-04-04T16:56:02.211210Z node 1 :TX_PROXY ERROR: Actor# [1:982:2800] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:02.279626Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 2} finished in 1743785762.279581s, errors=0 2025-04-04T16:56:02.279901Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:736:2619], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1743785762279 OperationsOK: 4 OperationsError: 0 } 2025-04-04T16:56:02.296932Z node 1 :TX_PROXY ERROR: Actor# [1:1013:2822] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:02.372460Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 1} finished in 1743785762.372409s, errors=0 2025-04-04T16:56:02.372833Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:736:2619], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1743785762372 OperationsOK: 4 OperationsError: 0 } 2025-04-04T16:56:02.392014Z node 1 :TX_PROXY ERROR: Actor# [1:1044:2844] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:02.465753Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 4} finished in 1743785762.465702s, errors=0 2025-04-04T16:56:02.466049Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:736:2619], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1743785762465 OperationsOK: 4 OperationsError: 0 } 2025-04-04T16:56:02.466124Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:736:2619], subTag: 2} finished in 1.166867s, oks# 20, errors# 0 2025-04-04T16:56:02.466267Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:737:2620] with tag# 2 >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] Test command err: 2025-04-04T16:55:57.489108Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502799481355044:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:57.513671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014a6/r3tmp/tmpjHSG4W/pdisk_1.dat 2025-04-04T16:55:58.039914Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:58.043689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:58.043817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:58.047769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2912, node 1 2025-04-04T16:55:58.151576Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/0014a6/r3tmp/yandexcivdNw.tmp 2025-04-04T16:55:58.151602Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/0014a6/r3tmp/yandexcivdNw.tmp 2025-04-04T16:55:58.175213Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/0014a6/r3tmp/yandexcivdNw.tmp 2025-04-04T16:55:58.175546Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23336 PQClient connected to localhost:2912 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:58.514990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:58.533527Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:55:58.557081Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:01.336275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502816661224799:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.336379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502816661224835:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.336442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.342566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-04T16:56:01.350102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502816661224867:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.350306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.359869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502816661224837:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T16:56:01.546457Z node 1 :TX_PROXY ERROR: Actor# [1:7489502816661224893:2390] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:01.581243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:56:01.617939Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489502816661224904:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:56:01.618977Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzgwMzg5N2MtOWI1YWJiMjAtZTA2Mzk3ZjItMTMxYzBiZjc=, ActorId: [1:7489502816661224796:2329], ActorState: ExecuteState, TraceId: 01jr0vp6h9afmdy8vxc296pp85, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:56:01.621770Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:56:01.728453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:56:01.870230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T16:56:02.243610Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr0vp766bef5qc391czpdkx8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDU1Nzk2OGMtZTk5YzdhMDEtNDQ4YTU2MWItOTRkNWVhNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:02.482363Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502799481355044:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:02.482444Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] Test command err: 2025-04-04T16:55:57.994150Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502799354623499:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:57.994206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00148e/r3tmp/tmpey4hwR/pdisk_1.dat 2025-04-04T16:55:58.470016Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:58.478450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:58.478566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:58.482872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62163, node 1 2025-04-04T16:55:58.608870Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/00148e/r3tmp/yandex84lL90.tmp 2025-04-04T16:55:58.608891Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/00148e/r3tmp/yandex84lL90.tmp 2025-04-04T16:55:58.609068Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/00148e/r3tmp/yandex84lL90.tmp 2025-04-04T16:55:58.609219Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6666 PQClient connected to localhost:62163 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:58.976511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:59.005422Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:59.016919Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:56:01.669625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502816534493267:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.669919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.670275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502816534493300:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.675584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-04T16:56:01.695392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502816534493305:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T16:56:01.994483Z node 1 :TX_PROXY ERROR: Actor# [1:7489502816534493370:2391] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:02.053657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:56:02.091936Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489502820829460675:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:56:02.094147Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmU2M2Y5ZjYtYWIwMmRhZmEtNzc5NDNiNmUtOTkzYWYzM2Q=, ActorId: [1:7489502816534493264:2331], ActorState: ExecuteState, TraceId: 01jr0vp6vf1nwyayf0brvmaear, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:56:02.097794Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:56:02.213491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:56:02.335879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T16:56:02.655780Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr0vp7kk9ndkhkx4avhyy884, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQyMzY2MmUtZjE4MzEzNDktMjRjZWYzYzItZjRhZmUyYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:02.994331Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502799354623499:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:02.994411Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> Cdc::AddColumn [GOOD] >> Cdc::AddColumn_TopicAutoPartitioning |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> BSCStopPDisk::PDiskStop |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |77.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |77.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |77.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql >> UpsertLoad::ShouldDropCreateTable [GOOD] |77.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql >> TCmsTest::StateRequestUnknownNode >> TCmsTest::StateRequest >> TCmsTest::TestForceRestartMode >> TCmsTest::RequestRestartServicesRejectSecond >> TCmsTest::WalleTasks >> TCmsTest::TestKeepAvailableMode >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery |77.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |77.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source >> BSCStopPDisk::PDiskStop [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListTypeYdb >> TMiniKQLProtoTestYdb::TestExportListTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportIntegralYdb >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists >> TMiniKQLProtoTestYdb::TestExportIntegralYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayload ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> BSCStopPDisk::PDiskStop [GOOD] Test command err: RandomSeed# 9072774894486649785 >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalYdb >> TMiniKQLEngineFlatTest::TestSelectRowPayload [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldDropCreateTable [GOOD] Test command err: 2025-04-04T16:55:59.250011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:55:59.250496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:55:59.250551Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d03/r3tmp/tmpK4SBrH/pdisk_1.dat 2025-04-04T16:55:59.651717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:55:59.693931Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:59.742662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:59.742823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:59.755862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:59.852748Z node 1 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# BrandNewTable in dir# /Root 2025-04-04T16:56:00.042943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:644:2551], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:00.043081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:00.062761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:56:00.428883Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# BrandNewTable in dir# /Root with rows# 10 2025-04-04T16:56:00.431424Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:640:2548], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2025-04-04T16:56:00.459387Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:640:2548], subTag: 1} TUpsertActor finished in 0.027494s, errors=0 2025-04-04T16:56:00.459744Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "BrandNewTable" CreateTable: true MinParts: 11 MaxParts: 13 MaxPartSizeMb: 1234 } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-04-04T16:56:00.459894Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:640:2548], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2025-04-04T16:56:00.527710Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:640:2548], subTag: 3} TUpsertActor finished in 0.067459s, errors=0 2025-04-04T16:56:00.527826Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:753:2629] with tag# 3 2025-04-04T16:56:04.527822Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:04.527978Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:56:04.528256Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d03/r3tmp/tmp0zRO4U/pdisk_1.dat 2025-04-04T16:56:04.840450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:56:04.877439Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:04.915806Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:04.915966Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:04.931118Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:56:05.017870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:56:05.307094Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 } UpsertBulkStart { RowCount: 100 Inflight: 3 } 2025-04-04T16:56:05.307265Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:737:2619], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 2025-04-04T16:56:05.777751Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:737:2619], subTag: 2} TUpsertActor finished in 0.470028s, errors=0 2025-04-04T16:56:05.777851Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:738:2620] with tag# 2 2025-04-04T16:56:05.782714Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 drops table# table in dir# /Root 2025-04-04T16:56:05.793206Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:780:2661], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:05.793319Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:06.040281Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# table in dir# /Root 2025-04-04T16:56:06.054746Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:846:2707], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:06.054884Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:06.073643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T16:56:06.130970Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-04T16:56:06.382315Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# table in dir# /Root with rows# 10 2025-04-04T16:56:06.382688Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:776:2658], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2025-04-04T16:56:06.395078Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:776:2658], subTag: 1} TUpsertActor finished in 0.012038s, errors=0 2025-04-04T16:56:06.395424Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "table" DropTable: true } TargetShard { TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-04-04T16:56:06.395573Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:776:2658], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2025-04-04T16:56:06.459128Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:776:2658], subTag: 3} TUpsertActor finished in 0.063262s, errors=0 2025-04-04T16:56:06.459242Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:939:2781] with tag# 3 >> TMiniKQLProtoTestYdb::TestExportOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListYdb >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive >> TMiniKQLProtoTestYdb::TestExportListYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb >> StreamCreator::Basic [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowManyShards >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb >> TMiniKQLEngineFlatTest::TestSelectRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowNoShards >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb >> TMiniKQLEngineFlatTest::TestSelectRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions |77.7%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems >> BasicUsage::ConnectToYDB [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems [GOOD] >> BasicUsage::WriteRead >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSomePushDown >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue >> TMiniKQLEngineFlatTest::TestSomePushDown [GOOD] >> TMiniKQLEngineFlatTest::TestTakePushdown >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload >> TMiniKQLEngineFlatTest::TestTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload >> TCmsTest::StateRequestUnknownNode [GOOD] >> TCmsTest::TestForceRestartMode [GOOD] >> TCmsTest::StateStorageAvailabilityMode >> TCmsTest::StateStorageTwoRings >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload >> TCmsTest::RequestRestartServicesRejectSecond [GOOD] >> TCmsTest::RequestRestartServicesWrongHost >> TCmsTest::TestKeepAvailableMode [GOOD] >> TCmsTest::TestKeepAvailableModeDisconnects >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards >> TCmsTest::StateRequest [GOOD] >> TCmsTest::StateRequestNode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::Basic [GOOD] Test command err: 2025-04-04T16:56:03.885930Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502826198317323:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:03.886455Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00190a/r3tmp/tmpiX2cpT/pdisk_1.dat 2025-04-04T16:56:04.477226Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:04.480481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:04.480580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:04.483612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9476 TServer::EnableGrpc on GrpcPort 9472, node 1 2025-04-04T16:56:04.886974Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:56:04.886998Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:56:04.887007Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:56:04.887126Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9476 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:56:05.297959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:05.311179Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:56:05.314778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785765421 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743785765351 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785765421 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-04T16:56:05.495988Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T16:56:05.496253Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T16:56:05.496272Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-04T16:56:05.497079Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-04T16:56:08.264234Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743785765421, tx_id: 281474976710658 } } } 2025-04-04T16:56:08.264631Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-04T16:56:08.266383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T16:56:08.268992Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-04T16:56:08.269015Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-04T16:56:08.306387Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-04T16:56:08.306421Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] 2025-04-04T16:56:08.455650Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2025-04-04T16:56:08.540374Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][1:7489502847673154740:2347] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:5:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-04T16:56:08.583297Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2025-04-04T16:56:08.583345Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# 2025-04-04T16:56:08.609558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:56:08.646588Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2025-04-04T16:56:08.646643Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785765421 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyC... (TRUNCATED) 2025-04-04T16:56:08.891200Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502826198317323:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:08.891296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdown >> TMiniKQLEngineFlatTest::TestTopSortPushdown [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestAcquireLocks |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> TMiniKQLProgramBuilderTest::TestAcquireLocks [GOOD] >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartNo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey >> Cdc::DisableStream [GOOD] >> Cdc::InitialScan >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:130:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:132:2058] recipient: [1:108:2140] 2025-04-04T16:55:22.285862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:22.285948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:22.286062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:22.286100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:22.286134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:22.286165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:22.286213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:22.286288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:22.286599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:22.367377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:22.367430Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:172:2058] recipient: [1:15:2062] 2025-04-04T16:55:22.382253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:22.382827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:22.383009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:22.393332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:22.393582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:22.394346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:22.395240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:22.399149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:22.400586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:22.400683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:22.400935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:22.400998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:22.401047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:22.401215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] Leader for TabletID 72057594037968897 is [1:215:2214] sender: [1:216:2058] recipient: [1:209:2210] 2025-04-04T16:55:22.408674Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-04T16:55:22.543085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:22.543332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.543571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:22.543809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:22.543883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.546540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:22.546724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:22.546894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.546947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:22.546979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:22.547021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:22.548881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.548933Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:22.548963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:22.550767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.550816Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.550866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:22.550931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:22.562360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:22.564606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:22.564806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:22.565786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:22.565944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:22.566001Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:22.566310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:22.566381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:22.566567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:22.566662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:22.569682Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:22.569740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:22.569946Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:22.569994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:22.570531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.570587Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:22.570732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:22.570802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:22.570853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:22.570887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:22.570926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:22.570965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:22.571003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id ... ame: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:10.349598Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:56:10.349931Z node 22 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 374us result status StatusSuccess 2025-04-04T16:56:10.351378Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectRow >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull |77.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration >> TMiniKQLProgramBuilderTest::TestSelectRow [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |77.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |77.8%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] >> TCmsTest::StateStorageTwoRings [GOOD] >> TCmsTest::SysTabletsNode >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns |77.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |77.8%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterName >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] >> TCmsTest::RequestRestartServicesWrongHost [GOOD] >> TCmsTest::RestartNodeInDownState |77.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut >> TMiniKQLProgramBuilderTest::TestInvalidParameterName [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterType >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] >> TMiniKQLEngineFlatTest::TestEmptyProgram >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey [GOOD] >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb >> TCmsTest::TestKeepAvailableModeDisconnects [GOOD] >> TMiniKQLEngineFlatTest::TestEmptyProgram [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRow >> TCmsTest::TestKeepAvailableModeScheduled >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb >> TMiniKQLEngineFlatTest::TestEraseRow [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNullKey >> TCmsTest::StateRequestNode [GOOD] >> TCmsTest::StateRequestUnknownMultipleNodes >> TMiniKQLEngineFlatTest::TestEraseRowNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowManyShards >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb >> TCmsTest::StateStorageAvailabilityMode [GOOD] >> TCmsTest::StateStorageLockedNodes |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Success >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportBoolYdb >> TMiniKQLEngineFlatTest::TestCASBoth2Success [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestDiagnostics >> TMiniKQLProtoTestYdb::TestExportBoolYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDoubleYdb >> TMiniKQLEngineFlatTest::TestDiagnostics [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown >> TMiniKQLProtoTestYdb::TestExportDoubleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalYdb |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] >> TMiniKQLEngineFlatTest::TestPureProgram >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown >> TMiniKQLProtoTestYdb::TestExportDecimalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb >> TMiniKQLEngineFlatTest::TestPureProgram [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb [GOOD] >> TMiniKQLEngineFlatTest::TestLengthPushdown >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive >> TMiniKQLEngineFlatTest::TestLengthPushdown [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb [GOOD] >> TMiniKQLEngineFlatTest::TestInternalResult [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictYdb >> TMiniKQLEngineFlatTest::TestIndependentSelects >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo >> TMiniKQLEngineFlatHostTest::ShardId [GOOD] >> TMiniKQLEngineFlatHostTest::Basic [GOOD] >> TMiniKQLEngineFlatTest::TestAbort >> TMiniKQLEngineFlatTest::TestIndependentSelects [GOOD] >> TMiniKQLEngineFlatTest::TestCrossTableRs >> TMiniKQLProtoTestYdb::TestExportDictYdb [GOOD] >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo >> TMiniKQLEngineFlatTest::TestAbort [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] |77.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |77.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |77.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestMapsPushdown >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 [GOOD] >> TMiniKQLEngineFlatTest::TestBug998 >> TMiniKQLEngineFlatTest::TestMapsPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown >> TCmsTest::SysTabletsNode [GOOD] >> TMiniKQLEngineFlatTest::TestBug998 [GOOD] >> TMiniKQLEngineFlatTest::TestAcquireLocks >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown >> TMiniKQLEngineFlatTest::TestAcquireLocks [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda [GOOD] >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidTypeYdb >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] >> TCmsTest::RestartNodeInDownState [GOOD] >> TCmsTest::SamePriorityRequest >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable >> TMiniKQLProtoTestYdb::TestExportUuidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] Test command err: SetProgram (370): ydb/core/engine/mkql_engine_flat.cpp:183: ExtractResultType(): requirement !label.StartsWith(TxInternalResultPrefix) failed. Label can't be used in SetResult as it's reserved for internal purposes: __cantuse PrepareShardPrograms (491): too many shard readsets (1 > 0), src tables: [200:301:0], dst tables: [200:302:0] Type { Kind: Struct } >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SysTabletsNode [GOOD] >> TCmsTest::StateStorageNodesFromOneRing >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVoidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStringYdb >> TCmsTest::StateRequestUnknownMultipleNodes [GOOD] >> TMiniKQLProtoTestYdb::TestExportStringYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidYdb >> TCmsTenatsTest::TestNoneTenantPolicy >> TMiniKQLProtoTestYdb::TestExportUuidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleYdb ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] Test command err: PrepareShardPrograms (491): too many shard readsets (2 > 1), src tables: [200:301:0], dst tables: [200:301:0] Type { Kind: Struct } >> TMiniKQLProtoTestYdb::TestExportTupleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructYdb >> TCmsTest::TestKeepAvailableModeScheduled [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantYdb >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] >> TCmsTest::WalleTasks [GOOD] >> TCmsTest::WalleTasksWithNodeLimit |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] >> TCmsTest::StateStorageLockedNodes [GOOD] |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateRequestUnknownMultipleNodes [GOOD] |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestKeepAvailableModeScheduled [GOOD] >> Cdc::AddColumn_TopicAutoPartitioning [GOOD] >> Cdc::AddIndex |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageLockedNodes [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet |77.8%| [TA] $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TExternalDataSourceTest::CreateExternalDataSource >> TCmsTest::StateStorageNodesFromOneRing [GOOD] >> TCmsTest::StateStorageTwoBrokenRings |77.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |77.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |77.8%| [TA] {RESULT} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes >> TCmsTest::SamePriorityRequest [GOOD] >> TExternalDataSourceTest::SchemeErrors >> TExternalDataSourceTest::DropTableTwice >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest [GOOD] >> TCmsTenatsTest::TestNoneTenantPolicy [GOOD] >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost >> TExternalDataSourceTest::ReadOnlyMode >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> TExternalDataSourceTest::CreateExternalDataSource [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists >> Yq_1::DeleteConnections >> TExternalDataSourceTest::SchemeErrors [GOOD] |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:56:17.997800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:56:17.997908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:17.997949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:56:17.997994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:56:17.998046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:56:17.998072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:56:17.998130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:17.998194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:56:17.998538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:18.065191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:18.065252Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:18.084777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:18.085068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:56:18.085232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:56:18.099118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:56:18.099490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:56:18.100281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:18.103917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:56:18.118799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:18.120211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:18.120290Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:18.120512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:56:18.120567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:18.120611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:56:18.120749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.136778Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:56:18.307377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:18.307682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.307937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:56:18.308204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:56:18.308275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.311583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:18.311827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:56:18.312059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.312151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:56:18.312223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:56:18.312266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:56:18.319395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.319496Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:56:18.319545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:56:18.327453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.327547Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.327619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:18.327676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:56:18.331943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:56:18.339192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:56:18.339440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:56:18.340652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:18.340829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:18.340886Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:18.341234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:56:18.341309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:18.341499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:18.341600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:56:18.355567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:18.355642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:18.355854Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:18.355903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:56:18.356281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.356349Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:56:18.356477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:18.356515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:18.356571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:18.356608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:18.356647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:56:18.356713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:18.356752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:56:18.356787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:56:18.356871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:18.356911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:56:18.356953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:56:18.359166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:18.359312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:18.359349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T16:56:18.359391Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T16:56:18.359434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:18.359556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T16:56:18.379347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T16:56:18.379990Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-04T16:56:18.381041Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-04-04T16:56:18.405003Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-04-04T16:56:18.407888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:18.408296Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 101:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2025-04-04T16:56:18.408370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-04-04T16:56:18.408418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-04-04T16:56:18.409351Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T16:56:18.431095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:18.431356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource 2025-04-04T16:56:18.432069Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T16:56:18.432317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T16:56:18.432361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-04T16:56:18.432852Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T16:56:18.432988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T16:56:18.433054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:282:2273] TestWaitNotification: OK eventTxId 101 2025-04-04T16:56:18.433606Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:18.433867Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 267us result status StatusPathDoesNotExist 2025-04-04T16:56:18.434081Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc >> TExternalDataSourceTest::DropTableTwice [GOOD] >> TExternalDataSourceTest::ParallelCreateExternalDataSource >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] |77.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |77.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |77.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:56:18.463984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:56:18.464119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:18.464197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:56:18.464240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:56:18.464286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:56:18.464318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:56:18.464378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:18.464473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:56:18.464895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:18.550012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:18.550080Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:18.565577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:18.565919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:56:18.566123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:56:18.579830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:56:18.580194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:56:18.580915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:18.582410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:56:18.588268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:18.589783Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:18.589877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:18.590099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:56:18.590155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:18.590201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:56:18.590382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.599368Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:56:18.742803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:18.743052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.743494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:56:18.743737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:56:18.743793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.746194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:18.746412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:56:18.746655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.746721Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:56:18.746769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:56:18.746808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:56:18.748951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.749010Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:56:18.749046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:56:18.751307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.751361Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.751410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:18.751484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:56:18.755175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:56:18.757383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:56:18.757580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:56:18.758567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:18.758743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:18.758799Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:18.759128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:56:18.759186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:18.759343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:18.759450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:56:18.761770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:18.761819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:18.761986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:18.762028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:56:18.762335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.762378Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:56:18.762481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:18.762515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:18.762573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:18.762611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:18.762666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:56:18.762707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:18.762757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:56:18.762790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:56:18.762864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:18.762900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:56:18.762939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:56:18.764750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:18.764849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:18.764882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e DataStream was not found" TxId: 125 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:18.824114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 125, database: /MyRoot, subject: , status: StatusSchemeError, reason: (NKikimr::NExternalSource::TExternalSourceException) External source with type DataStream was not found, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 2025-04-04T16:56:18.826900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:18.827271Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } 2025-04-04T16:56:18.827365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 126:0, path# /MyRoot/DirA/MyExternalDataSource 2025-04-04T16:56:18.827539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Authorization method isn't specified, at schemeshard: 72057594046678944 2025-04-04T16:56:18.830519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Authorization method isn\'t specified" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:18.830771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Authorization method isn't specified, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2025-04-04T16:56:18.834100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:18.834536Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2025-04-04T16:56:18.834682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 127:0, path# /MyRoot/DirA/MyExternalDataSource 2025-04-04T16:56:18.834871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2025-04-04T16:56:18.837719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Maximum length of location must be less or equal equal to 1000 but got 1001" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:18.837908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2025-04-04T16:56:18.840655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:18.841007Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2025-04-04T16:56:18.841090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 128:0, path# /MyRoot/DirA/MyExternalDataSource 2025-04-04T16:56:18.841257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2025-04-04T16:56:18.843998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Maximum length of installation must be less or equal equal to 1000 but got 1001" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:18.844223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2025-04-04T16:56:18.847029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:18.847273Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } 2025-04-04T16:56:18.847354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 129:0, path# /MyRoot/DirA/ 2025-04-04T16:56:18.847465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2025-04-04T16:56:18.850052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/DirA/\', error: path part shouldn\'t be empty" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:18.850279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/ TestModificationResult got TxId: 129, wait until txId: 129 >> TCmsTest::StateStorageTwoBrokenRings [GOOD] >> TCmsTest::StateStorageRollingRestart >> TExternalDataSourceTest::ReadOnlyMode [GOOD] >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:56:18.855254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:56:18.855395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:18.855439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:56:18.855472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:56:18.855511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:56:18.855555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:56:18.855614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:18.855684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:56:18.856031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:18.924837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:18.924908Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:18.937874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:18.938166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:56:18.938355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:56:18.949383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:56:18.949728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:56:18.950428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:18.955117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:56:18.961368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:18.962976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:18.963052Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:18.963258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:56:18.963312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:18.963352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:56:18.963506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.969066Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:56:19.082262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:19.082477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.082696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:56:19.082879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:56:19.082918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.084817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:19.084975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:56:19.085142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.085193Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:56:19.085250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:56:19.085312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:56:19.088424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.088501Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:56:19.088543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:56:19.090384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.090454Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.090511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:19.090570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:56:19.112087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:56:19.114796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:56:19.115012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:56:19.116093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:19.116234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:19.116284Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:19.116616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:56:19.116698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:19.116866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:19.116955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:56:19.119361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:19.119402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:19.119600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:19.119645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:56:19.119972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.120049Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:56:19.120154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:19.120182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:19.120244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:19.120278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:19.120311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:56:19.120353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:19.120393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:56:19.120422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:56:19.120503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:19.120539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:56:19.120567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:56:19.128657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:19.128798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:19.128841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 19.165494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:19.165532Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-04-04T16:56:19.165655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-04T16:56:19.165799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:19.165841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T16:56:19.168835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:19.168879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:19.169071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T16:56:19.169157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:19.169190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-04T16:56:19.169237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T16:56:19.169498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.169540Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T16:56:19.169623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T16:56:19.169769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T16:56:19.169797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T16:56:19.169821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T16:56:19.169847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-04T16:56:19.169878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T16:56:19.169902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T16:56:19.169924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T16:56:19.169981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T16:56:19.170006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-04T16:56:19.170049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-04T16:56:19.170086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-04T16:56:19.170744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T16:56:19.170817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T16:56:19.170851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-04T16:56:19.170885Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-04T16:56:19.170924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:19.171541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T16:56:19.171594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T16:56:19.171622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T16:56:19.171644Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-04T16:56:19.171693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T16:56:19.171748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-04T16:56:19.179746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T16:56:19.179898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T16:56:19.180108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T16:56:19.180147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-04T16:56:19.180598Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T16:56:19.180707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T16:56:19.180734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:295:2286] TestWaitNotification: OK eventTxId 101 2025-04-04T16:56:19.181211Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/UniqueName" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:19.181407Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/UniqueName" took 240us result status StatusSuccess 2025-04-04T16:56:19.181776Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/UniqueName" PathDescription { Self { Name: "UniqueName" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "UniqueName" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-04-04T16:56:19.185311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:19.185671Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2025-04-04T16:56:19.185757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TAlterExternalDataSource Propose: opId# 102:0, path# /MyRoot/UniqueName 2025-04-04T16:56:19.185916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, at schemeshard: 72057594046678944 2025-04-04T16:56:19.188628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-04-04T16:56:19.188807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/UniqueName TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-04T16:56:19.189111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T16:56:19.189152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T16:56:19.189611Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T16:56:19.189709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T16:56:19.189744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:303:2294] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:56:18.082971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:56:18.083074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:18.083137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:56:18.083173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:56:18.083220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:56:18.083248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:56:18.083320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:18.083386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:56:18.083726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:18.153030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:18.153087Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:18.172443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:18.172714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:56:18.172842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:56:18.196035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:56:18.196420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:56:18.197198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:18.198568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:56:18.220141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:18.221895Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:18.222002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:18.222219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:56:18.222306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:18.222358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:56:18.222531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.232510Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:56:18.390102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:18.390446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.390733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:56:18.391006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:56:18.391069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.407367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:18.407539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:56:18.407771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.407844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:56:18.407887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:56:18.407963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:56:18.411513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.411615Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:56:18.411662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:56:18.419367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.419470Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.419535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:18.419619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:56:18.447854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:56:18.455118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:56:18.455344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:56:18.456508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:18.456668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:18.456723Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:18.457073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:56:18.457137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:18.457331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:18.457465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:56:18.467976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:18.468052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:18.468299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:18.468363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:56:18.468760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.468831Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:56:18.468992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:18.469054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:18.469095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:18.469128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:18.469166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:56:18.469214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:18.469248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:56:18.469277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:56:18.469360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:18.469406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:56:18.469442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:56:18.472987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:18.473132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:18.473181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... opId# 101:0 ProgressState 2025-04-04T16:56:19.298968Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T16:56:19.299000Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T16:56:19.299034Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T16:56:19.299063Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T16:56:19.299096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-04T16:56:19.299190Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T16:56:19.299230Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T16:56:19.299266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T16:56:19.299337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T16:56:19.299367Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-04T16:56:19.299405Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-04T16:56:19.299434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-04T16:56:19.300270Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T16:56:19.300366Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T16:56:19.300404Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-04T16:56:19.300447Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T16:56:19.300489Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:19.301844Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T16:56:19.301931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T16:56:19.301958Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T16:56:19.301989Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-04T16:56:19.302021Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T16:56:19.302101Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-04T16:56:19.304915Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T16:56:19.306789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T16:56:19.307069Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T16:56:19.307118Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-04T16:56:19.307572Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T16:56:19.307685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T16:56:19.307725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:303:2294] TestWaitNotification: OK eventTxId 101 2025-04-04T16:56:19.308307Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:19.308555Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 309us result status StatusSuccess 2025-04-04T16:56:19.308892Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-04-04T16:56:19.312048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:19.312389Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } 2025-04-04T16:56:19.312486Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 102:0, path# /MyRoot/MyExternalDataSource 2025-04-04T16:56:19.312671Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-04T16:56:19.315859Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-04-04T16:56:19.316046Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-04T16:56:19.316364Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T16:56:19.316413Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T16:56:19.316846Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T16:56:19.316949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T16:56:19.316991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:311:2302] TestWaitNotification: OK eventTxId 102 2025-04-04T16:56:19.317552Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:19.317783Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 279us result status StatusSuccess 2025-04-04T16:56:19.318119Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TDynamicNameserverTest::TestCacheUsage >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost [GOOD] >> TCmsTenatsTest::TestLimitsWithDownNode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:56:19.135613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:56:19.135728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:19.135804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:56:19.135847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:56:19.135891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:56:19.135923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:56:19.135981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:19.136055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:56:19.136438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:19.231111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:19.231188Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:19.251927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:19.252228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:56:19.252430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:56:19.265877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:56:19.266280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:56:19.267075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:19.269724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:56:19.279354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:19.280842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:19.280939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:19.281166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:56:19.281222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:19.281267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:56:19.281413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.291870Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:56:19.439206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:19.439531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.439843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:56:19.440114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:56:19.440183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.443908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:19.444123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:56:19.444402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.444478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:56:19.444533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:56:19.444582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:56:19.450129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.450258Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:56:19.450314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:56:19.455878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.455970Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.456035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:19.456097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:56:19.460667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:56:19.464452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:56:19.464706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:56:19.465871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:19.466061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:19.466127Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:19.466489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:56:19.466577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:19.466784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:19.466931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:56:19.471702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:19.471769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:19.472004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:19.472056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:56:19.472459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.472513Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:56:19.472622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:19.472680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:19.472757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:19.472798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:19.472839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:56:19.472891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:19.472942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:56:19.472981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:56:19.473068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:19.473117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:56:19.473158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:56:19.483604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:19.483776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:19.483819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... T_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:56:19.912186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T16:56:19.912281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-04T16:56:19.918787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusAccepted TxId: 128 SchemeshardId: 72057594046678944 PathId: 4, at schemeshard: 72057594046678944 2025-04-04T16:56:19.918992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/SubDirBBBB 2025-04-04T16:56:19.919298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:19.919353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:19.919676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-04T16:56:19.919812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:19.919884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:448:2406], at schemeshard: 72057594046678944, txId: 128, path id: 1 2025-04-04T16:56:19.919942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:448:2406], at schemeshard: 72057594046678944, txId: 128, path id: 4 2025-04-04T16:56:19.920108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 128:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.920168Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 128:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:56:19.920233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 128 ready parts: 1/1 2025-04-04T16:56:19.920721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 128 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:56:19.922130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 128 2025-04-04T16:56:19.922295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 128 2025-04-04T16:56:19.922344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 128 2025-04-04T16:56:19.922391Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-04-04T16:56:19.922439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-04T16:56:19.923909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 128 2025-04-04T16:56:19.924034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 128 2025-04-04T16:56:19.924076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 128 2025-04-04T16:56:19.924123Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2025-04-04T16:56:19.924165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-04T16:56:19.924273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 128, ready parts: 0/1, is published: true 2025-04-04T16:56:19.926540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 128:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:128 msg type: 269090816 2025-04-04T16:56:19.926806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 128, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 128 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 128 at step: 5000004 2025-04-04T16:56:19.929312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 2025-04-04T16:56:19.931106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:19.931288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 128 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:19.931375Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 128:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2025-04-04T16:56:19.931553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 128:0 128 -> 240 2025-04-04T16:56:19.931774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T16:56:19.931875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-04T16:56:19.932368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 2025-04-04T16:56:19.936489Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:19.936554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:19.936726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-04T16:56:19.936829Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:19.936872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:448:2406], at schemeshard: 72057594046678944, txId: 128, path id: 1 2025-04-04T16:56:19.936926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:448:2406], at schemeshard: 72057594046678944, txId: 128, path id: 4 FAKE_COORDINATOR: Erasing txId 128 2025-04-04T16:56:19.937413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 128:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.937466Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 128:0 ProgressState 2025-04-04T16:56:19.937598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#128:0 progress is 1/1 2025-04-04T16:56:19.937639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2025-04-04T16:56:19.937681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#128:0 progress is 1/1 2025-04-04T16:56:19.937780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2025-04-04T16:56:19.937827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 128, ready parts: 1/1, is published: false 2025-04-04T16:56:19.937894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2025-04-04T16:56:19.937935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 128:0 2025-04-04T16:56:19.937999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 128:0 2025-04-04T16:56:19.938096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-04T16:56:19.938143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 128, publications: 2, subscribers: 0 2025-04-04T16:56:19.938181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 128, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-04T16:56:19.938214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 128, [OwnerId: 72057594046678944, LocalPathId: 4], 3 2025-04-04T16:56:19.938985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 128 2025-04-04T16:56:19.939095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 128 2025-04-04T16:56:19.939132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 128 2025-04-04T16:56:19.939182Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-04T16:56:19.939227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-04T16:56:19.940187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 128 2025-04-04T16:56:19.940269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 128 2025-04-04T16:56:19.940317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 128 2025-04-04T16:56:19.940358Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-04T16:56:19.940404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-04T16:56:19.940493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 128, subscribers: 0 2025-04-04T16:56:19.955976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 2025-04-04T16:56:19.959550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 TestModificationResult got TxId: 128, wait until txId: 128 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:56:18.693495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:56:18.693618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:18.693683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:56:18.693727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:56:18.693773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:56:18.693805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:56:18.693896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:18.693969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:56:18.694390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:18.781206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:18.781270Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:18.800775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:18.801048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:56:18.801221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:56:18.813135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:56:18.813518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:56:18.814156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:18.815149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:56:18.821427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:18.822759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:18.822842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:18.823030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:56:18.823076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:18.823110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:56:18.823212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:56:18.829934Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:56:19.005696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:19.005955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.006200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:56:19.006457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:56:19.006519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.009309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:19.009493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:56:19.009721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.009780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:56:19.009820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:56:19.009867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:56:19.012315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.012389Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:56:19.012430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:56:19.014656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.014715Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.014781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:19.014852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:56:19.018596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:56:19.021040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:56:19.021232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:56:19.022375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:19.022535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:19.022669Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:19.022998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:56:19.023054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:19.023277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:19.023371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:56:19.026263Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:19.026321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:19.026525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:19.026578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:56:19.026953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:19.027008Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:56:19.027105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:19.027160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:19.027217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:19.027255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:19.027310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:56:19.027356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:19.027396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:56:19.027430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:56:19.027517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:19.027563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:56:19.027598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:56:19.029697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:19.029824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:19.029861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... end EvNotifyTxCompletion 2025-04-04T16:56:20.062973Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 2025-04-04T16:56:20.063439Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2025-04-04T16:56:20.063577Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2025-04-04T16:56:20.063632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [2:338:2329] 2025-04-04T16:56:20.063796Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-04-04T16:56:20.063861Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-04-04T16:56:20.063916Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-04-04T16:56:20.063940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [2:338:2329] 2025-04-04T16:56:20.064021Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-04-04T16:56:20.064043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [2:338:2329] TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 2025-04-04T16:56:20.064480Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:20.064660Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 221us result status StatusSuccess 2025-04-04T16:56:20.064990Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:20.065642Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:20.065793Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 165us result status StatusSuccess 2025-04-04T16:56:20.066047Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:20.067212Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:20.067350Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 173us result status StatusSuccess 2025-04-04T16:56:20.067799Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:20.068283Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:20.068437Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 172us result status StatusSuccess 2025-04-04T16:56:20.068660Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:20.069104Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:20.069239Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 165us result status StatusSuccess 2025-04-04T16:56:20.069479Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TColumnShardTestReadWrite::WriteOverload [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish |77.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |77.9%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |77.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut >> Cdc::InitialScan [GOOD] >> Cdc::InitialScanDebezium >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] >> TColumnShardTestReadWrite::WriteStandaloneOverload [GOOD] >> TTxDataShardUploadRows::TestUploadRows >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc [GOOD] >> TCmsTest::VDisksEviction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload [GOOD] Test command err: 2025-04-04T16:55:47.289359Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T16:55:47.376526Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T16:55:47.401057Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T16:55:47.401325Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T16:55:47.410054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:55:47.410326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:55:47.410584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:55:47.410735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:55:47.410864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:55:47.410990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:55:47.411109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:55:47.411250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:55:47.411391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:55:47.411527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:55:47.411636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:55:47.411742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:55:47.438088Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T16:55:47.438350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T16:55:47.438412Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T16:55:47.438594Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:47.438844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:55:47.438913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:55:47.438965Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T16:55:47.439031Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T16:55:47.439097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:55:47.439164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:55:47.439201Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T16:55:47.439353Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:47.439409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:55:47.439443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:55:47.439486Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T16:55:47.439577Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T16:55:47.439655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:55:47.439709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:55:47.439734Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T16:55:47.439806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:55:47.439855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:55:47.439893Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T16:55:47.439940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:55:47.439978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:55:47.440010Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T16:55:47.440465Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-04-04T16:55:47.440608Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=55; 2025-04-04T16:55:47.440689Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-04-04T16:55:47.440792Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-04-04T16:55:47.440982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:55:47.441041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:55:47.441076Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T16:55:47.441316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:55:47.441379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:55:47.441417Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T16:55:47.441595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:55:47.441654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:55:47.441695Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T16:55:47.441891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T16:55:47.441932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T16:55:47.441965Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T16:55:47.442087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T16:55:47.442119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T16:55:47.442169Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 2 2025-04-04T16:56:16.770162Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:16.779013Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 5 at tablet 9437184 2025-04-04T16:56:16.838905Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 2 2025-04-04T16:56:16.879346Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 2 2025-04-04T16:56:16.879520Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=8;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:16.917504Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 6 at tablet 9437184 2025-04-04T16:56:16.985152Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 2 2025-04-04T16:56:17.022389Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 2 2025-04-04T16:56:17.022572Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=9;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:17.049513Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 7 at tablet 9437184 2025-04-04T16:56:17.112621Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 2 2025-04-04T16:56:17.143419Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 2 2025-04-04T16:56:17.143597Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=10;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:17.145832Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 8 at tablet 9437184 2025-04-04T16:56:17.222202Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 2 2025-04-04T16:56:17.271480Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 2 2025-04-04T16:56:17.271649Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=11;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:17.273859Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 9 at tablet 9437184 2025-04-04T16:56:17.325250Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 2 2025-04-04T16:56:17.356599Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 2 2025-04-04T16:56:17.356770Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=12;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:17.383370Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 10 at tablet 9437184 2025-04-04T16:56:17.444911Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 2 2025-04-04T16:56:17.599367Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 2 2025-04-04T16:56:17.599552Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=13;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:17.732523Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 11 at tablet 9437184 2025-04-04T16:56:17.793959Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 2 2025-04-04T16:56:17.828983Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 2 2025-04-04T16:56:17.829151Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=14;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:17.840584Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 12 at tablet 9437184 2025-04-04T16:56:17.910200Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 2 2025-04-04T16:56:17.958428Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 2 2025-04-04T16:56:17.958647Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=15;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:17.961071Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 13 at tablet 9437184 2025-04-04T16:56:18.028476Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 2 2025-04-04T16:56:18.083829Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 2 2025-04-04T16:56:18.083999Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=16;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:18.086075Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 14 at tablet 9437184 2025-04-04T16:56:18.136457Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 2 2025-04-04T16:56:18.172769Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 2 2025-04-04T16:56:18.172965Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=17;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:18.175198Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 15 at tablet 9437184 2025-04-04T16:56:18.224123Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 2 2025-04-04T16:56:18.246038Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 2 2025-04-04T16:56:18.246216Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=18;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:18.259112Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 16 at tablet 9437184 2025-04-04T16:56:18.309990Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 2 2025-04-04T16:56:18.342452Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 2 2025-04-04T16:56:18.342690Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=19;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:18.345618Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 17 at tablet 9437184 2025-04-04T16:56:18.433292Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 2 2025-04-04T16:56:18.495947Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 2 2025-04-04T16:56:18.496158Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=20;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:18.501479Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 18 at tablet 9437184 2025-04-04T16:56:18.564478Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 2 2025-04-04T16:56:18.595825Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 2 2025-04-04T16:56:18.596009Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=21;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:18.598351Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 19 at tablet 9437184 2025-04-04T16:56:18.653486Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 2 2025-04-04T16:56:18.674399Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 2 2025-04-04T16:56:18.674565Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=22;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:18.691859Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 20 at tablet 9437184 2025-04-04T16:56:18.746569Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 2 2025-04-04T16:56:18.931645Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 2 2025-04-04T16:56:18.931844Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=23;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:19.008068Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 21 at tablet 9437184 2025-04-04T16:56:19.053238Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 2 2025-04-04T16:56:19.079632Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 2 2025-04-04T16:56:19.079781Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=24;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T16:56:20.294130Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];fline=actor.cpp:22;event=flush_writing;size=6330728;count=1; CATCH TEvWrite, status OK 2025-04-04T16:56:20.413573Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 22 at tablet 9437184 2025-04-04T16:56:20.489549Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 2 2025-04-04T16:56:20.530495Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 2 2025-04-04T16:56:20.530743Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=25;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; >> TTxDataShardUploadRows::TestUploadShadowRows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:107:2057] recipient: [1:100:2134] 2025-04-04T16:54:47.886852Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:54:47.886961Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] Leader for TabletID 72057594037927938 is [1:152:2173] sender: [1:153:2057] recipient: [1:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:178:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:54:47.950424Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:54:47.995725Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-04T16:54:48.000980Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2025-04-04T16:54:48.003760Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T16:54:48.006025Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2025-04-04T16:54:48.016153Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:54:48.049694Z node 1 :PERSQUEUE INFO: new Cookie default|b6966b26-3ca2b08e-eb9aa2bb-fae3afd8_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:54:48.080968Z node 1 :PERSQUEUE INFO: new Cookie default|90a8e583-908b9810-1ea4e80f-b2f18d08_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:54:48.173373Z node 1 :PERSQUEUE INFO: new Cookie default|75ac3f3a-2b684e6c-49cd2994-6a4cb478_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:54:48.201785Z node 1 :PERSQUEUE INFO: new Cookie default|cb7f55d3-626c2a2c-cf97ec04-a49f97cd_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:54:48.231280Z node 1 :PERSQUEUE INFO: new Cookie default|abb74ff4-9ff25c28-ae476b4d-fea6e060_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:54:48.259793Z node 1 :PERSQUEUE INFO: new Cookie default|30744905-1497f3f8-8d2755f9-54aefb6e_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:102:2057] recipient: [2:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:102:2057] recipient: [2:100:2134] Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:107:2057] recipient: [2:100:2134] 2025-04-04T16:54:49.475543Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:54:49.475647Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:148:2057] recipient: [2:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:148:2057] recipient: [2:146:2169] Leader for TabletID 72057594037927938 is [2:152:2173] sender: [2:153:2057] recipient: [2:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:178:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:106:2138]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:180:2057] recipient: [2:98:2133] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:183:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:184:2057] recipient: [2:182:2194] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:185:2195] sender: [2:186:2057] recipient: [2:182:2194] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_TREE Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:54:49.566167Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:54:49.566297Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:106:2138]) rebooted! Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST !Reboot 72057594037927937 (actor [2:106:2138]) tablet resolver refreshed! new actor is[2:185:2195] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_TREE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:185:2195] sender: [2:262:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:54:51.264978Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:54:51.266126Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-04T16:54:51.267270Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:268:2260] 2025-04-04T16:54:51.269929 ... ured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_TREE Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [47:288:2281] sender: [47:386:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:102:2057] recipient: [48:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:102:2057] recipient: [48:100:2134] Leader for TabletID 72057594037927937 is [48:106:2138] sender: [48:107:2057] recipient: [48:100:2134] 2025-04-04T16:56:20.526927Z node 48 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:56:20.527006Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:148:2057] recipient: [48:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:148:2057] recipient: [48:146:2169] Leader for TabletID 72057594037927938 is [48:152:2173] sender: [48:153:2057] recipient: [48:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [48:106:2138] sender: [48:178:2057] recipient: [48:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:56:20.549615Z node 48 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:56:20.550539Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 48 actor [48:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 48 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 48 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 48 Important: false } 2025-04-04T16:56:20.551566Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [48:184:2197] 2025-04-04T16:56:20.554462Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [48:184:2197] 2025-04-04T16:56:20.556338Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [48:185:2198] 2025-04-04T16:56:20.558595Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [48:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:56:20.568207Z node 48 :PERSQUEUE INFO: new Cookie default|7b2627b-51975543-2271e56d-21dcae71_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:56:20.574865Z node 48 :PERSQUEUE INFO: new Cookie default|bc8e4fb0-2f4eae64-cf8d8bfc-7bf4eb94_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:56:20.607085Z node 48 :PERSQUEUE INFO: new Cookie default|1d4909db-123e24ee-7d5e3eb1-fb2ddba2_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:56:20.617760Z node 48 :PERSQUEUE INFO: new Cookie default|f6f3d31d-7a5a8549-1a943b4e-6edb27b7_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:56:20.626572Z node 48 :PERSQUEUE INFO: new Cookie default|b1d7c26a-c853b17d-a963baa-a1829d18_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:56:20.638965Z node 48 :PERSQUEUE INFO: new Cookie default|16b93a91-5aafde68-b0b49ed6-36931741_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:102:2057] recipient: [49:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:102:2057] recipient: [49:100:2134] Leader for TabletID 72057594037927937 is [49:106:2138] sender: [49:107:2057] recipient: [49:100:2134] 2025-04-04T16:56:21.286638Z node 49 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:56:21.286721Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:148:2057] recipient: [49:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:148:2057] recipient: [49:146:2169] Leader for TabletID 72057594037927938 is [49:152:2173] sender: [49:153:2057] recipient: [49:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [49:106:2138] sender: [49:176:2057] recipient: [49:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:56:21.306464Z node 49 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:56:21.307307Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 49 actor [49:174:2189] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 49 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 49 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 49 Important: false } 2025-04-04T16:56:21.308040Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [49:182:2195] 2025-04-04T16:56:21.310501Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [49:182:2195] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T16:56:21.312073Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [49:183:2196] 2025-04-04T16:56:21.313764Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [49:183:2196] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:56:21.322144Z node 49 :PERSQUEUE INFO: new Cookie default|ee75aeed-59cc3540-a5f4e920-69f25345_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:56:21.328624Z node 49 :PERSQUEUE INFO: new Cookie default|b92ffbb3-467dd708-c83ac761-63993e74_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:56:21.391032Z node 49 :PERSQUEUE INFO: new Cookie default|264cf86e-d5f6e7ab-3fded84f-7429943_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:56:21.404917Z node 49 :PERSQUEUE INFO: new Cookie default|ba05b9d4-dde5295b-6a8add24-ed1749f0_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:56:21.417878Z node 49 :PERSQUEUE INFO: new Cookie default|dcdd6282-4495b9b1-314a449-c4d5a391_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T16:56:21.427655Z node 49 :PERSQUEUE INFO: new Cookie default|a55504de-a95d25ac-95358a69-c7f5ce79_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> TDynamicNameserverTest::TestCacheUsage [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow >> TCmsTest::WalleTasksWithNodeLimit [GOOD] >> TCmsTest::WalleTasksDifferentPriorities |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::TestCacheUsage [GOOD] Test command err: 2025-04-04T16:56:21.161315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:21.161405Z node 1 :IMPORT WARN: Table profiles were not loaded >> GracefulShutdown::TTxGracefulShutdown >> TCmsTenatsTest::TestLimitsWithDownNode [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandaloneOverload [GOOD] Test command err: 2025-04-04T16:55:47.538715Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T16:55:47.649727Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T16:55:47.673692Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T16:55:47.673966Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T16:55:47.682323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:55:47.682563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:55:47.682814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:55:47.682934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:55:47.683049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:55:47.683162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:55:47.683264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:55:47.683389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:55:47.683528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:55:47.683653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:55:47.683777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:55:47.683882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:55:47.720918Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T16:55:47.721196Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T16:55:47.721257Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T16:55:47.721453Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:47.721619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:55:47.721696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:55:47.721745Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T16:55:47.721827Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T16:55:47.721887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:55:47.721931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:55:47.721963Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T16:55:47.722157Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:47.722262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:55:47.722311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:55:47.722344Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T16:55:47.722426Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T16:55:47.722519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:55:47.722579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:55:47.722607Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T16:55:47.722719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:55:47.722757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:55:47.722789Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T16:55:47.722834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:55:47.722881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:55:47.722923Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T16:55:47.723394Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=64; 2025-04-04T16:55:47.723522Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=60; 2025-04-04T16:55:47.723628Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=57; 2025-04-04T16:55:47.723721Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-04-04T16:55:47.723914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:55:47.723977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:55:47.724012Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T16:55:47.724222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:55:47.724280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:55:47.724333Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T16:55:47.724539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:55:47.724597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:55:47.724647Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T16:55:47.724892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T16:55:47.724962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T16:55:47.725005Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T16:55:47.725131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T16:55:47.725167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T16:55:47.725213Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 2 2025-04-04T16:56:17.126543Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:17.144588Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 5 at tablet 9437184 2025-04-04T16:56:17.215761Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 2 2025-04-04T16:56:17.244034Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 2 2025-04-04T16:56:17.244242Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=8;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:17.246826Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 6 at tablet 9437184 2025-04-04T16:56:17.304627Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 2 2025-04-04T16:56:17.377596Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 2 2025-04-04T16:56:17.377801Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=9;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:17.380310Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 7 at tablet 9437184 2025-04-04T16:56:17.474113Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 2 2025-04-04T16:56:17.555217Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 2 2025-04-04T16:56:17.555427Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=10;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:17.557722Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 8 at tablet 9437184 2025-04-04T16:56:17.616530Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 2 2025-04-04T16:56:17.666364Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 2 2025-04-04T16:56:17.666567Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=11;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:17.712882Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 9 at tablet 9437184 2025-04-04T16:56:17.815025Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 2 2025-04-04T16:56:17.839479Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 2 2025-04-04T16:56:17.839707Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=12;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:17.876253Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 10 at tablet 9437184 2025-04-04T16:56:17.940647Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 2 2025-04-04T16:56:18.121694Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 2 2025-04-04T16:56:18.121893Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=13;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:18.305621Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 11 at tablet 9437184 2025-04-04T16:56:18.377859Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 2 2025-04-04T16:56:18.405625Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 2 2025-04-04T16:56:18.405838Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=14;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:18.408446Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 12 at tablet 9437184 2025-04-04T16:56:18.492674Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 2 2025-04-04T16:56:18.526131Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 2 2025-04-04T16:56:18.526379Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=15;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:18.528866Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 13 at tablet 9437184 2025-04-04T16:56:18.579375Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 2 2025-04-04T16:56:18.625285Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 2 2025-04-04T16:56:18.625508Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=16;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:18.636080Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 14 at tablet 9437184 2025-04-04T16:56:18.699384Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 2 2025-04-04T16:56:18.729126Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 2 2025-04-04T16:56:18.729399Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=17;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:18.742646Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 15 at tablet 9437184 2025-04-04T16:56:18.791940Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 2 2025-04-04T16:56:18.814216Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 2 2025-04-04T16:56:18.814431Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=18;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:18.816869Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 16 at tablet 9437184 2025-04-04T16:56:18.863515Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 2 2025-04-04T16:56:18.896464Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 2 2025-04-04T16:56:18.896645Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=19;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:18.898810Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 17 at tablet 9437184 2025-04-04T16:56:18.949992Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 2 2025-04-04T16:56:18.976404Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 2 2025-04-04T16:56:18.976611Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=20;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:18.978331Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 18 at tablet 9437184 2025-04-04T16:56:19.021051Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 2 2025-04-04T16:56:19.058074Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 2 2025-04-04T16:56:19.058284Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=21;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:19.069207Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 19 at tablet 9437184 2025-04-04T16:56:19.123386Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 2 2025-04-04T16:56:19.150923Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 2 2025-04-04T16:56:19.151149Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=22;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:19.153531Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 20 at tablet 9437184 2025-04-04T16:56:19.226431Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 2 2025-04-04T16:56:19.466546Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 2 2025-04-04T16:56:19.466732Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=23;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-04-04T16:56:19.587496Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 21 at tablet 9437184 2025-04-04T16:56:19.640732Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 2 2025-04-04T16:56:19.671464Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 2 2025-04-04T16:56:19.671644Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=24;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T16:56:20.931617Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];fline=actor.cpp:22;event=flush_writing;size=6330728;count=1; CATCH TEvWrite, status OK 2025-04-04T16:56:21.000711Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 22 at tablet 9437184 2025-04-04T16:56:21.063823Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 2 2025-04-04T16:56:21.102691Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 2 2025-04-04T16:56:21.102925Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=25;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] [GOOD] |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:129:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:132:2155] sender: [1:134:2058] recipient: [1:108:2140] 2025-04-04T16:55:20.221071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:20.221165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:20.221208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:20.221246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:20.221289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:20.221324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:20.221383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:20.221464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:20.221800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:20.356351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:20.356415Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:173:2058] recipient: [1:15:2062] 2025-04-04T16:55:20.403843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:20.404273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:20.404492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:20.427484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:20.427862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:20.428680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:20.428914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:20.446997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:20.448726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:20.448809Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:20.448910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:20.448965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:20.449013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:20.449279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2208] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2208] Leader for TabletID 72057594037968897 is [1:213:2212] sender: [1:214:2058] recipient: [1:207:2208] 2025-04-04T16:55:20.458262Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-04T16:55:20.621066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:20.621367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.621621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:20.621880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:20.621969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.628005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:20.628227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:20.628497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.628590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:20.628660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:20.628704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:20.633993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.634102Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:20.634150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:20.643042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.643137Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.643193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:20.643283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:20.655293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:20.657894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:20.658179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:132:2155] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:20.659458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:20.659645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:20.659707Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:20.660063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:20.660161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:20.660366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:20.660461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:20.663296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:20.663378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:20.663609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:20.663657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2203], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:55:20.664102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.664183Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:20.664318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:20.664380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:20.664427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:20.664469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:20.664513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:20.664562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:20.664601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id ... ARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 456us result status StatusSuccess 2025-04-04T16:56:23.228233Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:23.235415Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:56:23.235764Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 402us result status StatusSuccess 2025-04-04T16:56:23.236808Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardUserAttrsTest::SpecialAttributes |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TCmsTest::VDisksEviction [GOOD] >> TSchemeShardUserAttrsTest::SetAttrs >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> GracefulShutdown::TTxGracefulShutdown [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEviction [GOOD] Test command err: 2025-04-04T16:56:22.843826Z node 18 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-04T16:56:22.843945Z node 18 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-04T16:56:22.844111Z node 18 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-04-04T16:56:22.846528Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120027512 } } 2025-04-04T16:56:22.847519Z node 18 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120027512 } 2025-04-04T16:56:22.847841Z node 18 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.003512s 2025-04-04T16:56:22.847937Z node 18 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-04T16:56:22.848169Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-04-04T16:56:22.848266Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 2025-04-04T16:56:22.848364Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 18 has not yet been completed) 2025-04-04T16:56:22.848559Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2025-04-04T16:56:22.848830Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 18 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-04T16:56:22.848903Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 18, marker# MARKER_DISK_FAULTY 2025-04-04T16:56:22.849209Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2025-04-04T16:56:22.849269Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2025-04-04T16:56:22.849303Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2025-04-04T16:56:22.849335Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2025-04-04T16:56:22.849364Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2025-04-0 ... ices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120540560 } } 2025-04-04T16:56:23.241616Z node 18 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120540560 } 2025-04-04T16:56:23.241934Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-04-04T16:56:23.242031Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 2025-04-04T16:56:23.242098Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 18 has not yet been completed) 2025-04-04T16:56:23.242326Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2025-04-04T16:56:23.242593Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-3, owner# user, order# 3, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 18 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-04T16:56:23.242668Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 18, marker# MARKER_DISK_FAULTY 2025-04-04T16:56:23.243006Z node 18 :CMS DEBUG: [Sentinel] [Main] Config was updated in 0.100000s 2025-04-04T16:56:23.243077Z node 18 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-04T16:56:23.243186Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2025-04-04T16:56:23.243264Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2025-04-04T16:56:23.243307Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2025-04-04T16:56:23.243337Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2025-04-04T16:56:23.243392Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2025-04-04T16:56:23.243423Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 23, wbId# [23:8388350642965737326:1634689637] 2025-04-04T16:56:23.243450Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 24, wbId# [24:8388350642965737326:1634689637] 2025-04-04T16:56:23.243483Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 25, wbId# [25:8388350642965737326:1634689637] 2025-04-04T16:56:23.243680Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 18, response# PDiskStateInfo { PDiskId: 18 CreateTime: 120442072 ChangeTime: 120442072 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-04T16:56:23.244541Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 19, response# PDiskStateInfo { PDiskId: 19 CreateTime: 120442072 ChangeTime: 120442072 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-04T16:56:23.244677Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 20, response# PDiskStateInfo { PDiskId: 20 CreateTime: 120442072 ChangeTime: 120442072 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-04T16:56:23.244783Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 21, response# PDiskStateInfo { PDiskId: 21 CreateTime: 120442072 ChangeTime: 120442072 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-04T16:56:23.244855Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 22, response# PDiskStateInfo { PDiskId: 22 CreateTime: 120442072 ChangeTime: 120442072 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-04T16:56:23.244936Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 23, response# PDiskStateInfo { PDiskId: 23 CreateTime: 120442072 ChangeTime: 120442072 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-04T16:56:23.245001Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 120442072 ChangeTime: 120442072 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-04T16:56:23.245068Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 24, response# PDiskStateInfo { PDiskId: 24 CreateTime: 120442072 ChangeTime: 120442072 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-04-04T16:56:23.245126Z node 18 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-04T16:56:23.259937Z node 18 :CMS DEBUG: TTxStorePermissions complete 2025-04-04T16:56:23.260174Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "VDisks eviction from host 18 has not yet been completed" } RequestId: "user-r-3" Deadline: 0 } 2025-04-04T16:56:23.260799Z node 18 :CMS INFO: User user removes request user-r-3 2025-04-04T16:56:23.260844Z node 18 :CMS DEBUG: Resulting status: OK 2025-04-04T16:56:23.260910Z node 18 :CMS DEBUG: TTxRemoveRequest Execute 2025-04-04T16:56:23.260949Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 18 2025-04-04T16:56:23.261078Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-3, reason# explicit remove 2025-04-04T16:56:23.275436Z node 18 :CMS DEBUG: TTxRemoveRequest Complete 2025-04-04T16:56:23.275670Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: REJECT RequestId: "user-r-3" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } } >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> TCmsTest::StateStorageRollingRestart [GOOD] >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> TSchemeShardUserAttrsTest::MkDir ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> GracefulShutdown::TTxGracefulShutdown [GOOD] Test command err: 2025-04-04T16:56:23.765651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:23.765728Z node 1 :IMPORT WARN: Table profiles were not loaded >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] >> TSchemeShardUserAttrsTest::UserConditionsAtAlter >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageRollingRestart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:56:25.373559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:56:25.373662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:25.373734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:56:25.373771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:56:25.373812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:56:25.373843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:56:25.373897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:25.373969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:56:25.374337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:25.461543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:25.461609Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:25.477730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:25.478017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:56:25.478203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:56:25.489834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:56:25.490210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:56:25.491012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:25.492281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:56:25.498854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:25.500406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:25.500504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:25.500730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:56:25.500790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:25.500840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:56:25.500998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:56:25.509516Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:56:25.665305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:25.665608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:25.665874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:56:25.666116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:56:25.666183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:25.668951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:25.669102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:56:25.669351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:25.669416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:56:25.669471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:56:25.669511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:56:25.671834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:25.671905Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:56:25.671950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:56:25.674493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:25.674551Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:25.674600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:25.674673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:56:25.678556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:56:25.680796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:56:25.681041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:56:25.682121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:25.682324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:25.682388Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:25.682730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:56:25.682804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:25.682963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:25.683062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:56:25.685465Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:25.685521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:25.685722Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:25.685774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:56:25.686098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:25.686147Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:56:25.686271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:25.686310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:25.686385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:25.686424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:25.686463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:56:25.686519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:25.686564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:56:25.686599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:56:25.686688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:25.686728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:56:25.686761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:56:25.688782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:25.688948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:25.688991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4-04T16:56:25.735144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-04T16:56:25.735191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-04T16:56:25.735434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T16:56:25.735487Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:56:25.735542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-04T16:56:25.735674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:56:25.736741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:56:25.736859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:56:25.736900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T16:56:25.736946Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-04T16:56:25.737009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:25.737739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:56:25.737830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:56:25.737866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T16:56:25.737906Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-04T16:56:25.737936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T16:56:25.738026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-04T16:56:25.740732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-04T16:56:25.740906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 2025-04-04T16:56:25.741704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:25.741836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:25.741892Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002, at schemeshard: 72057594046678944 2025-04-04T16:56:25.742069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-04T16:56:25.742284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:25.742386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T16:56:25.743847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T16:56:25.744441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-04T16:56:25.746523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:25.746574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:25.746786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T16:56:25.746904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:25.746942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-04T16:56:25.746983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-04T16:56:25.747049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T16:56:25.747111Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T16:56:25.747222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T16:56:25.747290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T16:56:25.747340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T16:56:25.747377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T16:56:25.747417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-04T16:56:25.747462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T16:56:25.747501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T16:56:25.747542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T16:56:25.747632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T16:56:25.747677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-04T16:56:25.747733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-04T16:56:25.747780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-04T16:56:25.749171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:56:25.749269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:56:25.749304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-04T16:56:25.749361Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T16:56:25.749422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:25.750706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:56:25.750791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:56:25.750822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-04T16:56:25.750852Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T16:56:25.750882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T16:56:25.750960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-04T16:56:25.753626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T16:56:25.754812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2025-04-04T16:56:25.757622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "DirD" } AlterUserAttributes { UserAttributes { Key: "__extra_path_symbols_allowed" Value: "./_" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:25.757888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/DirD, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T16:56:25.757999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, at schemeshard: 72057594046678944 2025-04-04T16:56:25.763199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "UserAttributes: attribute \'__extra_path_symbols_allowed\' has invalid value \'./_\', forbidden symbols are found" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:25.763416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, operation: CREATE DIRECTORY, path: /MyRoot/DirD TestModificationResult got TxId: 103, wait until txId: 103 |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TTxDataShardUploadRows::TestUploadRows [GOOD] >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit [GOOD] >> KqpRanges::ValidatePredicatesDataQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:56:25.849029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:56:25.849151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:25.849201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:56:25.849234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:56:25.849266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:56:25.849311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:56:25.849365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:25.849432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:56:25.849728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:25.934033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:25.934105Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:25.946024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:25.946307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:56:25.946458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:56:25.960034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:56:25.960261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:56:25.960739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:25.961455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:56:25.965619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:25.966603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:25.966685Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:25.966852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:56:25.966890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:25.966924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:56:25.967012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:56:25.972107Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:56:26.104598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:26.104877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:26.105157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:56:26.105391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:56:26.105441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:26.107401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:26.107551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:56:26.107770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:26.107814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:56:26.107851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:56:26.107881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:56:26.110742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:26.110822Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:56:26.110868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:56:26.113487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:26.113548Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:26.113581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:26.113626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:56:26.123308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:56:26.129140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:56:26.129405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:56:26.130581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:26.130803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:26.130873Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:26.131180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:56:26.131256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:26.131447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:26.131556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:56:26.135588Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:26.135656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:26.135859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:26.135905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:56:26.136286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:26.136351Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:56:26.136474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:26.136512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:26.136598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:26.136642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:26.136687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:56:26.136730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:26.136771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:56:26.136806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:56:26.136907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:26.136965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:56:26.137001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:56:26.139317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:26.139503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:26.139550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ationSubscriber for txId 102: satisfy waiter [1:320:2311] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-04-04T16:56:26.247297Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:26.247492Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 225us result status StatusSuccess 2025-04-04T16:56:26.247927Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-04-04T16:56:26.250884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "MyRoot" UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:26.251070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: /MyRoot, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T16:56:26.251179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:26.251335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:56:26.251387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944 2025-04-04T16:56:26.253836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:26.253974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: MyRoot 2025-04-04T16:56:26.254145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T16:56:26.254207Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2025-04-04T16:56:26.254299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-04-04T16:56:26.254442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:56:26.256513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-04T16:56:26.256637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-04-04T16:56:26.256972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:26.257105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:26.257162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000004, at schemeshard: 72057594046678944 2025-04-04T16:56:26.257372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T16:56:26.257413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T16:56:26.257459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T16:56:26.257491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T16:56:26.257545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:26.257632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-04T16:56:26.257687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:56:26.257726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T16:56:26.257768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T16:56:26.257799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T16:56:26.257848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T16:56:26.257885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-04-04T16:56:26.257918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-04-04T16:56:26.260113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:26.260192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:26.260391Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:26.260452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 1 FAKE_COORDINATOR: Erasing txId 103 2025-04-04T16:56:26.260961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T16:56:26.261050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T16:56:26.261096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-04T16:56:26.261148Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-04-04T16:56:26.261191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:26.261296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-04T16:56:26.263489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-04T16:56:26.263788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-04T16:56:26.263851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-04T16:56:26.264311Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T16:56:26.264414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T16:56:26.264455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:339:2330] TestWaitNotification: OK eventTxId 103 2025-04-04T16:56:26.265047Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:26.265300Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 235us result status StatusSuccess 2025-04-04T16:56:26.265791Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardUserAttrsTest::MkDir [GOOD] >> TSchemeShardUserAttrsTest::Boot >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish >> TInterconnectTest::TestConnectAndDisconnect >> TTxDataShardUploadRows::TestUploadShadowRows [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData >> TestProtocols::TestResolveProtocol >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish [GOOD] >> TTxDataShardUploadRows::UploadRowsToReplicatedTable >> Cdc::AddIndex [GOOD] >> Cdc::AddStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::MkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:56:26.654188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:56:26.654305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:26.654367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:56:26.654424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:56:26.654469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:56:26.654495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:56:26.654560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:26.654656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:56:26.654994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:26.756957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:26.757041Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:26.770132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:26.770422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:56:26.770585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:56:26.781306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:56:26.781586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:56:26.782203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:26.785025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:56:26.789240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:26.790448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:26.790535Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:26.790775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:56:26.790831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:26.790880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:56:26.791036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:56:26.797328Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:56:26.935412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:26.935689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:26.935885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:56:26.936124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:56:26.936185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:26.939292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:26.939439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:56:26.939668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:26.939721Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:56:26.939764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:56:26.939796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:56:26.943843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:26.943898Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:56:26.943935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:56:26.946546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:26.946607Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:26.946661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:26.946727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:56:26.950351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:56:26.952285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:56:26.952486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:56:26.953293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:26.953419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:26.953479Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:26.953681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:56:26.953724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:26.953850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:26.953927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:56:26.956125Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:26.956160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:26.956281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:26.956309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:56:26.956549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:26.956579Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:56:26.956652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:26.956678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:26.956724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:26.956749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:26.956777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:56:26.956808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:26.956839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:56:26.956864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:56:26.956915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:26.956943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:56:26.956969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:56:26.958909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:26.959029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:26.959061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... .107563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-04T16:56:27.108205Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-04T16:56:27.108339Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T16:56:27.108390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-04T16:56:27.108449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:378:2369] 2025-04-04T16:56:27.108620Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T16:56:27.108710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T16:56:27.108734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:378:2369] 2025-04-04T16:56:27.108849Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T16:56:27.108902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T16:56:27.108936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:378:2369] 2025-04-04T16:56:27.109054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T16:56:27.109074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:378:2369] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-04-04T16:56:27.109635Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:27.109820Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 220us result status StatusSuccess 2025-04-04T16:56:27.110481Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:27.111121Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:27.111395Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 246us result status StatusSuccess 2025-04-04T16:56:27.111753Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 } ChildrenExist: true } Children { Name: "SubDirA" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:27.112373Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:27.112596Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 186us result status StatusSuccess 2025-04-04T16:56:27.112953Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrB1" Value: "ValB1" } UserAttributes { Key: "AttrB2" Value: "ValB2" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:27.113897Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:27.114076Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA" took 194us result status StatusSuccess 2025-04-04T16:56:27.114474Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA" PathDescription { Self { Name: "SubDirA" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "DirB" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrAA1" Value: "ValAA1" } UserAttributes { Key: "AttrAA2" Value: "ValAA2" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:27.115144Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:27.115335Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA/DirB" took 205us result status StatusSuccess 2025-04-04T16:56:27.115640Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA/DirB" PathDescription { Self { Name: "DirB" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrAB1" Value: "ValAB1" } UserAttributes { Key: "AttrAB2" Value: "ValAB2" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TActorActivity::Basic [GOOD] >> ActorBootstrapped::TestBootstrapped [GOOD] >> ActorBootstrapped::TestBootstrappedParent [GOOD] >> TActorTracker::Basic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:56:26.843420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:56:26.843541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:26.843619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:56:26.843661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:56:26.843712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:56:26.843741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:56:26.843801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:26.843891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:56:26.844305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:26.933974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:26.934043Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:26.952336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:26.952663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:56:26.952845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:56:26.972187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:56:26.972550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:56:26.973207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:26.979949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:56:26.986707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:26.988146Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:26.988248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:26.988463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:56:26.988515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:26.988552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:56:26.988698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:56:26.996536Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:56:27.126693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:27.127011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:27.127290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:56:27.127545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:56:27.127608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:27.131620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:27.131821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:56:27.132096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:27.132165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:56:27.132217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:56:27.132261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:56:27.135067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:27.135147Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:56:27.135195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:56:27.137325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:27.137417Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:27.137462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:27.137541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:56:27.142159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:56:27.144575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:56:27.144837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:56:27.145993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:27.146200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:27.146295Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:27.146685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:56:27.146765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:27.146953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:27.147069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:56:27.149923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:27.150000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:27.150194Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:27.150263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:56:27.150664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:27.150725Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:56:27.150838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:27.150878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:27.150947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:27.150988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:27.151030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:56:27.151089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:27.151131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:56:27.151165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:56:27.151246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:27.151287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:56:27.151323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:56:27.153489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:27.153618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:27.153660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 5 msg type: 269090816 2025-04-04T16:56:27.313955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000005 2025-04-04T16:56:27.315191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:27.315345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:27.315433Z node 1 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 105:0, step: 5000005, at schemeshard: 72057594046678944 2025-04-04T16:56:27.315568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 105:0, at schemeshard: 72057594046678944 2025-04-04T16:56:27.315640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-04T16:56:27.315702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T16:56:27.315751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-04T16:56:27.315793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T16:56:27.315865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T16:56:27.315976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-04T16:56:27.316023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-04T16:56:27.316089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T16:56:27.316133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-04T16:56:27.316176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-04T16:56:27.316245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-04T16:56:27.316302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2025-04-04T16:56:27.316372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-04T16:56:27.316414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-04-04T16:56:27.317562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-04T16:56:27.317694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-04T16:56:27.319889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:27.319940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:27.320211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-04T16:56:27.320386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:27.320426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-04-04T16:56:27.320471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 105, path id: 4 FAKE_COORDINATOR: Erasing txId 105 2025-04-04T16:56:27.321236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T16:56:27.321371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T16:56:27.321410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-04-04T16:56:27.321458Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-04T16:56:27.321505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-04T16:56:27.322175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T16:56:27.322302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T16:56:27.322350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-04T16:56:27.322379Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-04T16:56:27.322413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-04T16:56:27.322505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-04T16:56:27.322781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T16:56:27.322839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-04T16:56:27.322941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T16:56:27.326456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-04T16:56:27.327194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-04T16:56:27.327326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-04T16:56:27.327740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-04T16:56:27.327804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-04T16:56:27.328379Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-04T16:56:27.328500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-04T16:56:27.328547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:397:2388] TestWaitNotification: OK eventTxId 105 2025-04-04T16:56:27.329354Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirC" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:27.329583Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirC" took 280us result status StatusPathDoesNotExist 2025-04-04T16:56:27.329770Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirC\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/DirC" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T16:56:27.330423Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:27.330684Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 225us result status StatusSuccess 2025-04-04T16:56:27.331205Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:56:26.928069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:56:26.928194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:26.928283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:56:26.928325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:56:26.928370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:56:26.928400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:56:26.928459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:26.928536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:56:26.928943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:27.018396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:27.018454Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:27.031119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:27.031389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:56:27.031573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:56:27.049619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:56:27.049932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:56:27.050657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:27.051661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:56:27.057166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:27.058585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:27.058695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:27.058896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:56:27.058949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:27.058992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:56:27.059127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:56:27.065914Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:56:27.222648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:27.223014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:27.223295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:56:27.223528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:56:27.223584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:27.227527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:27.227708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:56:27.227940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:27.227994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:56:27.228038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:56:27.228079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:56:27.230728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:27.230797Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:56:27.230836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:56:27.232914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:27.232974Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:27.233015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:27.233068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:56:27.236718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:56:27.239126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:56:27.239359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:56:27.240465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:27.240639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:27.240697Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:27.241006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:56:27.241073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:27.241239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:27.241330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:56:27.244036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:27.244084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:27.244271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:27.244310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:56:27.244645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:27.244692Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:56:27.244792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:27.244824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:27.244902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:27.244936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:27.244971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:56:27.245013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:27.245065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:56:27.245096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:56:27.245164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:27.245218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:56:27.245253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:56:27.247333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:27.247482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:27.247526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... G: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T16:56:27.327647Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T16:56:27.327743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T16:56:27.327800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:330:2321] TestWaitNotification: OK eventTxId 102 2025-04-04T16:56:27.328423Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:27.328605Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 197us result status StatusSuccess 2025-04-04T16:56:27.328936Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-04-04T16:56:27.331754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "DirA" UserAttributes { Key: "AttrA2" Value: "ValA2" } } ApplyIf { PathId: 2 PathVersion: 4 } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:27.331984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: /MyRoot/DirA, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T16:56:27.332130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-04T16:56:27.332278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:56:27.332327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944 2025-04-04T16:56:27.343303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:27.343538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: /MyRoot/DirA 2025-04-04T16:56:27.343788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T16:56:27.343828Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2025-04-04T16:56:27.343880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-04-04T16:56:27.344036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:56:27.347294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-04T16:56:27.347452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-04-04T16:56:27.347821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:27.347990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:27.348038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000004, at schemeshard: 72057594046678944 2025-04-04T16:56:27.348269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T16:56:27.348306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T16:56:27.348349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T16:56:27.348378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T16:56:27.348439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T16:56:27.348493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-04T16:56:27.348545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T16:56:27.348578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T16:56:27.348643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T16:56:27.348678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T16:56:27.348731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T16:56:27.348767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-04-04T16:56:27.348816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-04T16:56:27.351110Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:27.351160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T16:56:27.351295Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:27.351328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 FAKE_COORDINATOR: Erasing txId 103 2025-04-04T16:56:27.351749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T16:56:27.351821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T16:56:27.351855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-04T16:56:27.351885Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-04T16:56:27.351917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T16:56:27.352005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-04T16:56:27.353587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-04T16:56:27.353823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-04T16:56:27.353866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-04T16:56:27.354185Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T16:56:27.354279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T16:56:27.354306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:347:2338] TestWaitNotification: OK eventTxId 103 2025-04-04T16:56:27.354858Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:27.355023Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 170us result status StatusSuccess 2025-04-04T16:56:27.355566Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TInterconnectTest::TestBlobEvent >> TInterconnectTest::TestConnectAndDisconnect [GOOD] >> TInterconnectTest::TestBlobEventPreSerialized >> TActorTracker::Basic [GOOD] >> TestProtocols::TestResolveProtocol [GOOD] >> TestProtocols::TestHTTPCollectedVerySlow >> TInterconnectTest::TestBlobEvent220BytesPreSerialized >> TInterconnectTest::TestManyEvents >> TSchemeShardUserAttrsTest::Boot [GOOD] >> TInterconnectTest::TestBlobEvent [GOOD] >> TInterconnectTest::TestBlobEvent220Bytes >> TInterconnectTest::TestSimplePingPong >> TInterconnectTest::TestBlobEventPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventUpToMebibytes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::ValidatePredicatesDataQuery [GOOD] Test command err: Trying to start YDB, gRPC: 6627, MsgBus: 16676 2025-04-04T16:53:28.671851Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502161210217511:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:28.766953Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00150a/r3tmp/tmp4ELsYK/pdisk_1.dat 2025-04-04T16:53:32.562355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:33.674627Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502161210217511:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:33.674704Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:53:33.860700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:33.860806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:33.891041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6627, node 1 2025-04-04T16:53:34.067265Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:34.284513Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:53:34.284860Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:53:34.375283Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:36.395052Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:36.395173Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:36.395179Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:36.395277Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16676 TClient is connected to server localhost:16676 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:53:46.543980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:47.015148Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:47.021165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:53:49.383351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:49.383718Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:49.673184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:51.164766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:53:52.253980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T16:54:00.429771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502298649172860:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:00.429867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:01.096895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:54:01.144409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:54:01.194650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:54:01.238392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:54:01.278373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:54:01.363041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:54:01.503242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502302944140695:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:01.503306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:01.503561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502302944140700:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:54:01.507905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:54:01.530305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502302944140702:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:54:01.622982Z node 1 :TX_PROXY ERROR: Actor# [1:7489502302944140758:3519] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:54:03.502162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:54:03.884201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T16:54:04.222485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T16:54:04.604894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T16:54:05.073779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:1:44: Warning: At function: Coalesce
:1:58: Warning: At function: SqlIn
:1:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 22073, MsgBus: 62075 2025-04-04T16:54:07.585277Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502326192145126:2154];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00150a/r3tmp/tmpki2WCk/pdisk_1.dat 2025-04-04T16:54:07.766417Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:54:07.896488Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:07.896600Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:07.897588Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:54:07.906634Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22073, node 2 2025-04-04T16:54:08.182763Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:08.182786Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:08.182795Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:08.182913Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62075 TClient is connected to server ... 776Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785750742, txId: 281474976710785] shutting down 2025-04-04T16:55:51.051878Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785751078, txId: 281474976710787] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 < 9000 OR Key3 IS NULL ORDER BY `Value`; 2025-04-04T16:55:51.393815Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785751414, txId: 281474976710789] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Value = 20 ORDER BY `Value`; 2025-04-04T16:55:51.692445Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785751722, txId: 281474976710791] shutting down 2025-04-04T16:55:51.943102Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785751974, txId: 281474976710793] shutting down EXPECTED: [[[20u]]] RECEIVED: [[[20u]]] 2025-04-04T16:55:52.209997Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785752240, txId: 281474976710795] shutting down Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE (Key1 <= 1000) OR (Key1 > 2000 AND Key1 < 5000) OR (Key1 >= 8000) ORDER BY `Value`; 2025-04-04T16:55:52.721853Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785752751, txId: 281474976710797] shutting down 2025-04-04T16:55:53.234540Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785753262, txId: 281474976710799] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[11u]];[[12u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[11u]];[[12u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 < NULL ORDER BY `Value`; 2025-04-04T16:55:53.435837Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785753430, txId: 281474976710801] shutting down 2025-04-04T16:55:53.571633Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785753564, txId: 281474976710803] shutting down EXPECTED: [] RECEIVED: [] 2025-04-04T16:55:53.660870Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037920 not found 2025-04-04T16:55:53.660927Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037921 not found 2025-04-04T16:55:53.661179Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037919 not found Trying to start YDB, gRPC: 29528, MsgBus: 61102 2025-04-04T16:55:55.167468Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489502792745767374:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:55.167543Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00150a/r3tmp/tmpt72VXv/pdisk_1.dat 2025-04-04T16:55:55.363322Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:55.402176Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:55.402429Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:55.405924Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29528, node 7 2025-04-04T16:55:55.498653Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:55.498682Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:55.498696Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:55.498882Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61102 TClient is connected to server localhost:61102 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:56.381367Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:56.405877Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:56.513876Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:55:56.775883Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T16:55:56.893535Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:00.168266Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489502792745767374:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:00.168389Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:56:01.129996Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489502818515572934:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.130119Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.202984Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T16:56:01.343186Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T16:56:01.397124Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T16:56:01.479128Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T16:56:01.590430Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T16:56:01.725528Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T16:56:01.877808Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489502818515573475:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.877935Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.878491Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489502818515573480:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:01.885486Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T16:56:01.904811Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489502818515573482:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T16:56:01.975314Z node 7 :TX_PROXY ERROR: Actor# [7:7489502818515573535:3465] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:04.054519Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T16:56:10.362369Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:56:10.362416Z node 7 :IMPORT WARN: Table profiles were not loaded >> TInterconnectTest::TestBlobEvent220BytesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TActorTracker::Basic [GOOD] Test command err: ASYNC_DESTROYER >> TInterconnectTest::TestBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestBlobEventsThroughSubChannels ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::Boot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:56:28.154389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:56:28.154521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:28.154595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:56:28.154662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:56:28.154713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:56:28.154765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:56:28.154838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:28.154927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:56:28.155355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:28.237944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:28.238011Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:28.251696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:28.252008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:56:28.252211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:56:28.262881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:56:28.263217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:56:28.263939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:28.264909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:56:28.270929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:28.272416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:28.272506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:28.272744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:56:28.272812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:28.272857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:56:28.273006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:56:28.281953Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:56:28.416015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:28.416307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:28.416577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:56:28.416838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:56:28.416897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:28.423908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:28.424073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:56:28.424303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:28.424368Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:56:28.424419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:56:28.424456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:56:28.427209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:28.427281Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:56:28.427310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:56:28.429424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:28.429496Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:28.429540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:28.429600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:56:28.441926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:56:28.444452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:56:28.444686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:56:28.445935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:28.446093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:28.446151Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:28.446511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:56:28.446582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:28.446843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:28.446999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:56:28.449568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:28.449637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:28.449821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:28.449868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:56:28.450257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:28.450311Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:56:28.450420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:28.450454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:28.450531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:28.450571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:28.450606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:56:28.450663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:28.450698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:56:28.450729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:56:28.450814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:28.450859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:56:28.450891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:56:28.452981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:28.453120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:28.453164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T16:56:28.453201Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T16:56:28.453244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:28.453373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T16:56:28.457462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T16:56:28.458072Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 >> TInterconnectTest::TestBlobEvent220Bytes [GOOD] >> TInterconnectTest::TestAddressResolve >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] [GOOD] >> TInterconnectTest::TestSimplePingPong [GOOD] >> TInterconnectTest::TestSubscribeByFlag >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit >> TInterconnectTest::TestBlobEventDifferentSizes [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized >> TInterconnectTest::TestNotifyUndelivered >> TInterconnectTest::OldFormat >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] >> TInterconnectTest::TestAddressResolve [GOOD] >> TInterconnectTest::OldNbs >> TInterconnectTest::TestSubscribeByFlag [GOOD] >> TInterconnectTest::TestReconnect >> TestProtocols::TestConnectProtocol >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw >> TInterconnectTest::TestNotifyUndelivered [GOOD] >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:108:2139] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:109:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:109:2140] Leader for TabletID 72057594046316545 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:108:2139] Leader for TabletID 72057594046447617 is [1:131:2154] sender: [1:134:2058] recipient: [1:109:2140] 2025-04-04T16:55:25.252433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:25.252520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:25.252557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:25.252597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:25.252661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:25.252692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:25.252762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:25.252856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:25.253188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:25.333957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:25.334016Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:131:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-04-04T16:55:25.347662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:25.347770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:25.347940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:25.352897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:25.353058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:25.353662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:25.354071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:25.360017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:25.361159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:25.361221Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:25.361284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:25.361327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:25.361364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:25.361497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2208] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2208] Leader for TabletID 72057594037968897 is [1:213:2212] sender: [1:214:2058] recipient: [1:207:2208] 2025-04-04T16:55:25.368048Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-04T16:55:25.481339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:25.481550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:25.481758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:25.481982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:25.482042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:25.484517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:25.484663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:25.484875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:25.484936Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:25.484985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:25.485036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:25.486673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:25.486712Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:25.486744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:25.488246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:25.488328Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:25.488360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:25.488413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:25.496932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:25.498627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:25.498805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:121:2147] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:25.499792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:25.499928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 121 RawX2: 4294969443 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:25.499972Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:25.500188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:25.500222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:25.500385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:25.500453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:25.502297Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:25.502336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:25.502497Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:25.502527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2203], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:55:25.502742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:25.502793Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:25.502868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:25.502909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:25.502940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:25.502965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:25.502990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:25.503018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:25.503041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id ... pPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:28.972240Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:56:28.972442Z node 30 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 222us result status StatusSuccess 2025-04-04T16:56:28.973003Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:28.983931Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:843:2673] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-04T16:56:28.984071Z node 30 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][30:782:2673] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-04T16:56:28.984233Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:843:2673] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743785788963151 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743785788963151 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743785788963151 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-04T16:56:28.986390Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:843:2673] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-04T16:56:28.986488Z node 30 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][30:782:2673] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> TInterconnectTest::OldFormat [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew >> TInterconnectTest::TestReconnect [GOOD] >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent >> TestProtocols::TestConnectProtocol [GOOD] >> TestProtocols::TestHTTPCollected |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] >> TInterconnectTest::TestManyEvents [GOOD] >> TInterconnectTest::TestCrossConnect >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor [GOOD] >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes >> TestProtocols::TestHTTPCollected [GOOD] >> TInterconnectTest::TestTraceIdPassThrough >> TInterconnectTest::OldNbs [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TestProtocols::TestHTTPCollectedVerySlow [GOOD] >> TestProtocols::TestHTTPRequest >> TSlotIndexesPoolTest::Basic [GOOD] >> TSchemeShardUserAttrsTest::VariousUse >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] >> Cdc::InitialScanDebezium [GOOD] >> Cdc::InitialScanRacyCompleteAndRequest >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestPingPongThroughSubChannel |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace [GOOD] >> TTxDataShardUploadRows::TestUploadRowsLocks >> TestProtocols::TestHTTPRequest [GOOD] >> TInterconnectTest::TestTraceIdPassThrough [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheck |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldNbs [GOOD] |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Basic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] Test command err: 2025-04-04T16:56:30.085176Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @201 (null) -> PendingActivation 2025-04-04T16:56:30.085295Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP01 ready to work 2025-04-04T16:56:30.085486Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @201 (null) -> PendingActivation 2025-04-04T16:56:30.085535Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP01 ready to work 2025-04-04T16:56:30.094671Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @99 PendingActivation -> PendingNodeInfo 2025-04-04T16:56:30.096938Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP02 configured for host ::1:11985 2025-04-04T16:56:30.097242Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @483 PendingNodeInfo -> PendingConnection 2025-04-04T16:56:30.097918Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH01 starting outgoing handshake 2025-04-04T16:56:30.098159Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-04-04T16:56:30.100083Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH05 connected to peer 2025-04-04T16:56:30.102725Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:55536 2025-04-04T16:56:30.103378Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 0] ICH02 starting incoming handshake 2025-04-04T16:56:30.104630Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH07 SendExBlock ExRequest Protocol: 2 ProgramPID: 181898 ProgramStartTime: 21822682943876 Serial: 1321750833 ReceiverNodeId: 6 SenderActorId: "[5:1321750833:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 181898" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 181898" AcceptUUID: "Cluster for process with id: 181898" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: false HandshakeId: "\234\231\254\272s\\\262\324\343\204\262\031\273\231)@:g\317\237\324\212R\224Q\241\246\247uQ4\320" RequestXxhash: true RequestXdcShuffle: true 2025-04-04T16:56:30.105284Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 5] ICH07 ReceiveExBlock ExRequest Protocol: 2 ProgramPID: 181898 ProgramStartTime: 21822682943876 Serial: 1321750833 ReceiverNodeId: 6 SenderActorId: "[5:1321750833:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 181898" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 181898" AcceptUUID: "Cluster for process with id: 181898" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: false HandshakeId: "\234\231\254\272s\\\262\324\343\204\262\031\273\231)@:g\317\237\324\212R\224Q\241\246\247uQ4\320" RequestXxhash: true RequestXdcShuffle: true 2025-04-04T16:56:30.105382Z node 6 :INTERCONNECT WARN: Handshake [6:21:2057] [node 5] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-04-04T16:56:30.105803Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @99 PendingActivation -> PendingNodeInfo 2025-04-04T16:56:30.107131Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP02 configured for host ::1:30311 2025-04-04T16:56:30.107189Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP17 incoming handshake (actor [6:21:2057]) 2025-04-04T16:56:30.107245Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @483 PendingNodeInfo -> PendingConnection 2025-04-04T16:56:30.107308Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP07 issued incoming handshake reply 2025-04-04T16:56:30.107360Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP08 No active sessions, becoming PendingConnection 2025-04-04T16:56:30.107406Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @220 PendingConnection -> PendingConnection 2025-04-04T16:56:30.107783Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 5] ICH07 SendExBlock ExReply Success { Protocol: 2 ProgramPID: 181898 ProgramStartTime: 21822702141496 Serial: 1868990494 SenderActorId: "[6:1868990494:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 181898" AcceptUUID: "Cluster for process with id: 181898" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: false UseXxhash: true UseXdcShuffle: true } 2025-04-04T16:56:30.107926Z node 6 :INTERCONNECT INFO: Handshake [6:21:2057] [node 5] ICH04 handshake succeeded 2025-04-04T16:56:30.109351Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP19 incoming handshake succeeded 2025-04-04T16:56:30.109416Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP111 dropped incoming handshake: [6:21:2057] poison: false 2025-04-04T16:56:30.109463Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @350 PendingConnection -> StateWork 2025-04-04T16:56:30.109617Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP22 created new session: [6:22:2048] 2025-04-04T16:56:30.109705Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS09 handshake done sender: [6:21:2057] self: [6:1868990494:0] peer: [5:1321750833:0] socket: 25 2025-04-04T16:56:30.109755Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS10 traffic start 2025-04-04T16:56:30.109844Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS11 registering socket in PollerActor 2025-04-04T16:56:30.109905Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2025-04-04T16:56:30.109945Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2025-04-04T16:56:30.109990Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2025-04-04T16:56:30.110507Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH07 ReceiveExBlock ExReply Success { Protocol: 2 ProgramPID: 181898 ProgramStartTime: 21822702141496 Serial: 1868990494 SenderActorId: "[6:1868990494:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 181898" AcceptUUID: "Cluster for process with id: 181898" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: false UseXxhash: true UseXdcShuffle: true } 2025-04-04T16:56:30.110574Z node 5 :INTERCONNECT WARN: Handshake [5:19:2057] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-04-04T16:56:30.110650Z node 5 :INTERCONNECT INFO: Handshake [5:19:2057] [node 6] ICH04 handshake succeeded 2025-04-04T16:56:30.111005Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS01 InputSession created 2025-04-04T16:56:30.111064Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2025-04-04T16:56:30.111110Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:19:2057] poison: false 2025-04-04T16:56:30.111165Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @350 PendingConnection -> StateWork 2025-04-04T16:56:30.111262Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP22 created new session: [5:24:2048] 2025-04-04T16:56:30.111300Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS09 handshake done sender: [5:19:2057] self: [5:1321750833:0] peer: [6:1868990494:0] socket: 24 2025-04-04T16:56:30.111333Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS10 traffic start 2025-04-04T16:56:30.111382Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS11 registering socket in PollerActor 2025-04-04T16:56:30.111414Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-04-04T16:56:30.111442Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2025-04-04T16:56:30.111472Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-04-04T16:56:30.111514Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS04 subscribe for session state for [5:17:2056] 2025-04-04T16:56:30.111645Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS02 ReceiveData called 2025-04-04T16:56:30.111774Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-04T16:56:30.111862Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS01 InputSession created 2025-04-04T16:56:30.111906Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS02 ReceiveData called 2025-04-04T16:56:30.111963Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-04T16:56:30.112031Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS02 ReceiveData called 2025-04-04T16:56:30.112123Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-04T16:56:30.112192Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2025-04-04T16:56:30.112238Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2025-04-04T16:56:30.112286Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS02 ReceiveData called 2025-04-04T16:56:30.112319Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-04T16:56:30.112366Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-04-04T16:56:30.112409Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-04-04T16:56:30.112466Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2025-04-04T16:56:30.112495Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2025-04-04T16:56:30.112548Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-04-04T16:56:30.112577Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-04-04T16:56:30.112717Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS02 send event from: [5:17:2056] to: [6:18:2056] 2025-04-04T16:56:30.112938Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS22 outgoing packet Serial# 1 Confirm# 0 DataSize# 84 InflightDataAmount# 84 2025-04-04T16:56:30.113063Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-04-04T16:56:30.114992Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS02 ReceiveData called 2025-04-04T16:56:30.115103Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS12 Read recvres# 106 num# 1 err# 2025-04-04T16:56:30.115225Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-04T16:56:30.115389Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2025-04-04T16:56:30.115458Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2025-04-04T16:56:30.115568Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS02 send event from: [6:18:2056] to: [5:17:2056] 2025-04-04T16:56:30.115764Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS22 outgoing packet Serial# 1 Confirm# 1 DataSize# 84 InflightDataAmount# 84 2025-04-04T16:56:30.115869Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2025-04-04T16:56:30.118447Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS02 ReceiveData called 2025-04-04T16:56:30.118555Z nod ... lightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 1 packets 2025-04-04T16:56:30.119123Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-04-04T16:56:30.119371Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS02 send event from: [5:17:2056] to: [6:18:2056] 2025-04-04T16:56:30.119506Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS22 outgoing packet Serial# 2 Confirm# 1 DataSize# 84 InflightDataAmount# 84 2025-04-04T16:56:30.119635Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS13 reestablish connection 2025-04-04T16:56:30.119711Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS25 shutdown socket, reason# EPIPE 2025-04-04T16:56:30.119817Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-04-04T16:56:30.119985Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS02 ReceiveData called 2025-04-04T16:56:30.120104Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS12 Read recvres# 0 num# 1 err# 2025-04-04T16:56:30.120440Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS09 ReestablishConnection, reason# EndOfStream 2025-04-04T16:56:30.120558Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS07 socket disconnect 25 reason# EndOfStream 2025-04-04T16:56:30.120634Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS25 shutdown socket, reason# EndOfStream 2025-04-04T16:56:30.120761Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS15 start handshake 2025-04-04T16:56:30.121406Z node 6 :INTERCONNECT DEBUG: Handshake [6:26:2058] [node 5] ICH01 starting outgoing handshake 2025-04-04T16:56:30.121618Z node 6 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-04-04T16:56:30.122545Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS02 ReceiveData called 2025-04-04T16:56:30.122652Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS12 Read recvres# 0 num# 1 err# 2025-04-04T16:56:30.122864Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS09 ReestablishConnection, reason# EndOfStream 2025-04-04T16:56:30.123015Z node 5 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:45530 2025-04-04T16:56:30.125476Z node 6 :INTERCONNECT DEBUG: Handshake [6:26:2058] [node 5] ICH05 connected to peer 2025-04-04T16:56:30.125914Z node 5 :INTERCONNECT DEBUG: Handshake [5:28:2058] [node 0] ICH02 starting incoming handshake 2025-04-04T16:56:30.126047Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS07 socket disconnect -1 reason# EndOfStream 2025-04-04T16:56:30.126103Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS15 start handshake 2025-04-04T16:56:30.126947Z node 5 :INTERCONNECT DEBUG: Handshake [5:29:2059] [node 6] ICH01 starting outgoing handshake 2025-04-04T16:56:30.127084Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP09 (actor [5:28:2058]) from: [6:1868990494:0] for: [5:1321750833:0] 2025-04-04T16:56:30.127162Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS08 incoming handshake Self# [6:1868990494:0] Peer# [5:1321750833:0] Counter# 1 LastInputSerial# 1 2025-04-04T16:56:30.127223Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP06 reply for incoming handshake (actor [5:28:2058]) is held 2025-04-04T16:56:30.127373Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-04-04T16:56:30.127892Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:55552 2025-04-04T16:56:30.128365Z node 6 :INTERCONNECT DEBUG: Handshake [6:31:2059] [node 0] ICH02 starting incoming handshake 2025-04-04T16:56:30.128750Z node 5 :INTERCONNECT DEBUG: Handshake [5:29:2059] [node 6] ICH05 connected to peer 2025-04-04T16:56:30.129191Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP09 (actor [6:31:2059]) from: [5:1321750833:0] for: [6:1868990494:0] 2025-04-04T16:56:30.129250Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS08 incoming handshake Self# [5:1321750833:0] Peer# [6:1868990494:0] Counter# 1 LastInputSerial# 1 2025-04-04T16:56:30.129301Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP07 issued incoming handshake reply 2025-04-04T16:56:30.129672Z node 6 :INTERCONNECT INFO: Handshake [6:31:2059] [node 5] ICH04 handshake succeeded 2025-04-04T16:56:30.129929Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP19 incoming handshake succeeded 2025-04-04T16:56:30.129990Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP111 dropped incoming handshake: [6:31:2059] poison: false 2025-04-04T16:56:30.130047Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP052 dropped outgoing handshake: [6:26:2058] poison: true 2025-04-04T16:56:30.130114Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @350 StateWork -> StateWork 2025-04-04T16:56:30.130187Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS09 handshake done sender: [6:31:2059] self: [6:1868990494:0] peer: [5:1321750833:0] socket: 29 2025-04-04T16:56:30.130282Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS10 traffic start 2025-04-04T16:56:30.130386Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS11 registering socket in PollerActor 2025-04-04T16:56:30.130456Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2025-04-04T16:56:30.130510Z node 6 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 5] ICOCH98 Dropping confirmed messages 2025-04-04T16:56:30.130598Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS24 exit InflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 1 packets 2025-04-04T16:56:30.130691Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 1 NextSerial# 2 2025-04-04T16:56:30.130746Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2025-04-04T16:56:30.131537Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:32:2048] [node 5] ICIS01 InputSession created 2025-04-04T16:56:30.131590Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:32:2048] [node 5] ICIS02 ReceiveData called 2025-04-04T16:56:30.131683Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:32:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-04T16:56:30.131842Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:32:2048] [node 5] ICIS02 ReceiveData called 2025-04-04T16:56:30.131888Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:32:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-04T16:56:30.131929Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2025-04-04T16:56:30.131970Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2025-04-04T16:56:30.132015Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2025-04-04T16:56:30.132033Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2025-04-04T16:56:30.132713Z node 5 :INTERCONNECT INFO: Handshake [5:29:2059] [node 6] ICH04 handshake succeeded 2025-04-04T16:56:30.137414Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2025-04-04T16:56:30.137526Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP111 dropped incoming handshake: [5:28:2058] poison: true 2025-04-04T16:56:30.137603Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:29:2059] poison: false 2025-04-04T16:56:30.137647Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @350 StateWork -> StateWork 2025-04-04T16:56:30.137703Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS09 handshake done sender: [5:29:2059] self: [5:1321750833:0] peer: [6:1868990494:0] socket: 28 2025-04-04T16:56:30.137849Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS10 traffic start 2025-04-04T16:56:30.137963Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS11 registering socket in PollerActor 2025-04-04T16:56:30.138018Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-04-04T16:56:30.138061Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS06 rewind SendQueue size# 1 LastConfirmed# 1 NextSerial# 2 2025-04-04T16:56:30.138217Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-04-04T16:56:30.140296Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:32:2048] [node 5] ICIS02 ReceiveData called 2025-04-04T16:56:30.140407Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:32:2048] [node 5] ICIS12 Read recvres# 106 num# 1 err# 2025-04-04T16:56:30.140553Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:32:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-04-04T16:56:30.140672Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:33:2048] [node 6] ICIS01 InputSession created 2025-04-04T16:56:30.140755Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2025-04-04T16:56:30.140803Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2025-04-04T16:56:30.140853Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:33:2048] [node 6] ICIS02 ReceiveData called 2025-04-04T16:56:30.140923Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:33:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-04T16:56:30.140990Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS02 send event from: [6:18:2056] to: [5:17:2056] 2025-04-04T16:56:30.142326Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS22 outgoing packet Serial# 2 Confirm# 2 DataSize# 84 InflightDataAmount# 84 2025-04-04T16:56:30.142486Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2025-04-04T16:56:30.142557Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:33:2048] [node 6] ICIS02 ReceiveData called 2025-04-04T16:56:30.142620Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:33:2048] [node 6] ICIS12 Read recvres# 106 num# 1 err# 2025-04-04T16:56:30.142740Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:33:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-04-04T16:56:30.142834Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-04-04T16:56:30.142900Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-04-04T16:56:30.142980Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 2 2025-04-04T16:56:30.143020Z node 5 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 6] ICOCH98 Dropping confirmed messages 2025-04-04T16:56:30.143088Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS24 exit InflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 1 packets 2025-04-04T16:56:30.143181Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 2 2025-04-04T16:56:30.143291Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS01 socket: 28 reason# 2025-04-04T16:56:30.143385Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP30 unregister session Session# [5:24:2048] VirtualId# [5:1321750833:0] 2025-04-04T16:56:30.143461Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @201 StateWork -> PendingActivation 2025-04-04T16:56:30.143511Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS25 shutdown socket, reason# 2025-04-04T16:56:30.143624Z node 5 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 6] ICOCH89 Notyfying about Undelivered messages! NotYetConfirmed size: 0, Queue size: 0 |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TestProtocols::TestHTTPRequest [GOOD] >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestTraceIdPassThrough [GOOD] |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry [GOOD] >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] Test command err: 2025-04-04T16:56:30.471862Z node 4 :INTERCONNECT WARN: Handshake [4:20:2056] [node 3] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-04-04T16:56:30.965144Z node 5 :INTERCONNECT WARN: Handshake [5:18:2057] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-04-04T16:56:31.433999Z node 8 :INTERCONNECT WARN: Handshake [8:20:2056] [node 7] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-04-04T16:56:31.436944Z node 7 :INTERCONNECT WARN: Handshake [7:18:2057] [node 8] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default >> TSchemeShardUserAttrsTest::VariousUse [GOOD] >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TEnumerationTest::TestPublish [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::VariousUse [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:56:31.626491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:56:31.626618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:31.626704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:56:31.626748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:56:31.626826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:56:31.626856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:56:31.626921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:31.626996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:56:31.627395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:31.720537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:31.720620Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:31.732381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:31.732709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:56:31.732893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:56:31.747139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:56:31.747485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:56:31.748246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:31.749266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:56:31.755678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:31.757099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:31.757185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:31.757425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:56:31.757479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:31.757525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:56:31.757691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:56:31.765352Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:56:31.878525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:31.878767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:31.878995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:56:31.879227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:56:31.879281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:31.885936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:31.886069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:56:31.886294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:31.886377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:56:31.886439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:56:31.886489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:56:31.889291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:31.889401Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:56:31.889453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:56:31.893604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:31.893701Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:31.893777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:31.893846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:56:31.905486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:56:31.907899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:56:31.908099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:56:31.909012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:31.909162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:31.909223Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:31.909578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:56:31.909645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:31.909813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:31.909898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:56:31.912334Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:31.912394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:31.912763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:31.912856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:56:31.913292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:31.913351Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:56:31.913498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:31.913542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:31.913623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:31.913663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:31.913704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:56:31.913753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:31.913796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:56:31.913850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:56:31.913931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:31.913976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:56:31.914022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:56:31.928091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:31.928301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:31.928352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1/1 2025-04-04T16:56:32.330622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-04T16:56:32.330700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:32.330792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T16:56:32.330852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-04T16:56:32.330886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: false 2025-04-04T16:56:32.330934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-04T16:56:32.330969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2025-04-04T16:56:32.331005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2025-04-04T16:56:32.331069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-04T16:56:32.331118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 112, publications: 3, subscribers: 0 2025-04-04T16:56:32.331151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-04T16:56:32.331182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 3], 7 2025-04-04T16:56:32.331205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-04-04T16:56:32.331892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-04T16:56:32.332058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-04T16:56:32.334011Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:32.334104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:32.334282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T16:56:32.334354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-04T16:56:32.334526Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:32.334601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-04-04T16:56:32.334666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 112, path id: 3 2025-04-04T16:56:32.334694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 112, path id: 4 FAKE_COORDINATOR: Erasing txId 112 2025-04-04T16:56:32.335499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T16:56:32.335600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T16:56:32.335693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 112 2025-04-04T16:56:32.335737Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-04T16:56:32.335783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T16:56:32.336385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T16:56:32.336478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T16:56:32.336515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2025-04-04T16:56:32.336561Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-04-04T16:56:32.336596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T16:56:32.337501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T16:56:32.337588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T16:56:32.337633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2025-04-04T16:56:32.337665Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-04T16:56:32.337712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-04T16:56:32.337799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2025-04-04T16:56:32.339005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T16:56:32.339060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-04T16:56:32.339195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T16:56:32.341283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-04T16:56:32.342041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-04T16:56:32.343628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-04T16:56:32.343733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-04-04T16:56:32.344165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-04-04T16:56:32.344209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-04-04T16:56:32.344881Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-04-04T16:56:32.344973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-04-04T16:56:32.345011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:497:2488] TestWaitNotification: OK eventTxId 112 2025-04-04T16:56:32.345818Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:32.346121Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 306us result status StatusSuccess 2025-04-04T16:56:32.346728Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000008 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrB1" Value: "ValB1" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 113 2025-04-04T16:56:32.349896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "DirB" } ApplyIf { PathId: 2 PathVersion: 8 } ApplyIf { PathId: 3 PathVersion: 7 } ApplyIf { PathId: 4 PathVersion: 3 } } TxId: 113 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:32.350087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/DirB, pathId: 0, opId: 113:0, at schemeshard: 72057594046678944 2025-04-04T16:56:32.350223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 113:1, propose status:StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-04T16:56:32.352927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 113, response: Status: StatusPreconditionFailed Reason: "fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4]" TxId: 113 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:32.353090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 113, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4], operation: DROP DIRECTORY, path: /MyRoot/DirB TestModificationResult got TxId: 113, wait until txId: 113 >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] Test command err: 2025-04-04T16:56:24.794684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:24.795338Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:24.795428Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001181/r3tmp/tmplvuUeA/pdisk_1.dat 2025-04-04T16:56:25.224838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:56:25.272344Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:25.315832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:25.316022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:25.329958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:56:25.420019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:56:25.460857Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T16:56:25.461825Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T16:56:25.462374Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T16:56:25.462647Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:25.473602Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T16:56:25.510778Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:25.510920Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:56:25.512850Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:56:25.512956Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:56:25.513035Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:56:25.513453Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:56:25.513630Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:56:25.513725Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T16:56:25.524669Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:56:25.563632Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:56:25.563846Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:56:25.563967Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T16:56:25.564024Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:56:25.564075Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:56:25.564113Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:25.564350Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T16:56:25.564398Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T16:56:25.564687Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:56:25.564777Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:56:25.564899Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:56:25.564964Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:56:25.565016Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T16:56:25.565057Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T16:56:25.565090Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T16:56:25.565125Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:56:25.565173Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:56:25.565272Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:56:25.565330Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:56:25.565368Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T16:56:25.565733Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T16:56:25.565775Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T16:56:25.565866Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:56:25.566090Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T16:56:25.566148Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:56:25.566229Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:56:25.566307Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T16:56:25.566372Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T16:56:25.566421Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T16:56:25.566465Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T16:56:25.566771Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T16:56:25.566810Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T16:56:25.566844Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T16:56:25.566873Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T16:56:25.566926Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T16:56:25.566972Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T16:56:25.567004Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T16:56:25.567036Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T16:56:25.567058Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T16:56:25.568526Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T16:56:25.568591Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:56:25.579315Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:56:25.579407Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T16:56:25.579469Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T16:56:25.579533Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T16:56:25.579597Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:56:25.720931Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:56:25.720999Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:56:25.721074Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T16:56:25.721396Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T16:56:25.721451Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T16:56:25.721553Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T16:56:25.721594Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T16:56:25.721650Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T16:56:25.721680Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T16:56:25.731716Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:56:25.731826Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:25.732824Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T16:56:25.732907Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T16:56:25.732962Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:56:2 ... t 72075186224037890 to execution unit CompleteOperation 2025-04-04T16:56:27.579531Z node 1 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715667] at 72075186224037890 on unit CompleteOperation 2025-04-04T16:56:27.579714Z node 1 :TX_DATASHARD TRACE: Execution status for [3000:281474976715667] at 72075186224037890 is DelayComplete 2025-04-04T16:56:27.579741Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715667] at 72075186224037890 executing on unit CompleteOperation 2025-04-04T16:56:27.579767Z node 1 :TX_DATASHARD TRACE: Add [3000:281474976715667] at 72075186224037890 to execution unit CompletedOperations 2025-04-04T16:56:27.579792Z node 1 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715667] at 72075186224037890 on unit CompletedOperations 2025-04-04T16:56:27.579820Z node 1 :TX_DATASHARD TRACE: Execution status for [3000:281474976715667] at 72075186224037890 is Executed 2025-04-04T16:56:27.579841Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715667] at 72075186224037890 executing on unit CompletedOperations 2025-04-04T16:56:27.579879Z node 1 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715667] at 72075186224037890 has finished 2025-04-04T16:56:27.579921Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:56:27.579947Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-04T16:56:27.579972Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-04T16:56:27.579997Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-04T16:56:27.591009Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T16:56:27.591086Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T16:56:27.591124Z node 1 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715667] at 72075186224037890 on unit CompleteOperation 2025-04-04T16:56:27.591187Z node 1 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715667] from 72075186224037890 at tablet 72075186224037890 send result to client [1:1114:2909], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T16:56:27.591268Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T16:56:31.149075Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:31.149220Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:56:31.149446Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001181/r3tmp/tmpeH1OHb/pdisk_1.dat 2025-04-04T16:56:31.411761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:56:31.443641Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:31.479578Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:31.479688Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:31.490977Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:56:31.572448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:56:31.595252Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:665:2569] 2025-04-04T16:56:31.595510Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:31.642427Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:31.642577Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:56:31.644320Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:56:31.644414Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:56:31.644476Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:56:31.644798Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:56:31.644941Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:56:31.645018Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:681:2569] in generation 1 2025-04-04T16:56:31.656146Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:56:31.656234Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:56:31.656354Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:56:31.656439Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:683:2579] 2025-04-04T16:56:31.656481Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:56:31.656519Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:56:31.656560Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:31.656983Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:56:31.657086Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:56:31.657176Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:56:31.657227Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:56:31.657272Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:56:31.657318Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:56:31.657384Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:661:2566], serverId# [2:670:2571], sessionId# [0:0:0] 2025-04-04T16:56:31.657817Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:56:31.658052Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:56:31.658133Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:56:31.659882Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:56:31.670969Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:56:31.671095Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:56:31.821129Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:702:2592], serverId# [2:704:2594], sessionId# [0:0:0] 2025-04-04T16:56:31.821874Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:56:31.821936Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:31.822788Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:56:31.822848Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:56:31.822902Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T16:56:31.823188Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T16:56:31.823328Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:56:31.824192Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:56:31.824271Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T16:56:31.824754Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T16:56:31.825179Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:56:31.827377Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T16:56:31.827439Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:31.828123Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T16:56:31.828204Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:56:31.829205Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:56:31.829270Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:56:31.829325Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T16:56:31.829394Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:417:2411], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:56:31.829455Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T16:56:31.829550Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:31.830673Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:56:31.833222Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T16:56:31.833293Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:56:31.833533Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T16:56:31.839749Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:738:2620], serverId# [2:739:2621], sessionId# [0:0:0] 2025-04-04T16:56:31.839883Z node 2 :TX_DATASHARD NOTICE: Rejecting bulk upsert request on datashard: tablet# 72075186224037888, error# Can't execute bulk upsert at replicated table |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestSensorsConfigForStaticSlot |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TEnumerationTest::TestPublish [GOOD] |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] Test command err: 2025-04-04T16:56:25.758442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:25.759007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:25.759088Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001134/r3tmp/tmp3g4CwJ/pdisk_1.dat 2025-04-04T16:56:26.137413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:56:26.181565Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:26.221743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:26.221872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:26.234918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:56:26.333909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:56:26.380500Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T16:56:26.380824Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:26.435276Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:26.435461Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:56:26.437544Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:56:26.437644Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:56:26.437726Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:56:26.438207Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:56:26.438433Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:56:26.438563Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T16:56:26.452507Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:56:26.500814Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:56:26.501021Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:56:26.501133Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T16:56:26.501193Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:56:26.501238Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:56:26.501277Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:26.501740Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:56:26.501876Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:56:26.502029Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:56:26.502074Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:56:26.502117Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:56:26.502164Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:56:26.502233Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T16:56:26.507122Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:56:26.507380Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:56:26.507457Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:56:26.509139Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:56:26.522866Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:56:26.522984Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:56:26.675085Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T16:56:26.679564Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:56:26.679644Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:26.680438Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:56:26.680488Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:56:26.680539Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T16:56:26.680780Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T16:56:26.680924Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:56:26.681464Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:56:26.681536Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T16:56:26.682120Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T16:56:26.682489Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:56:26.684118Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T16:56:26.684176Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:26.685297Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T16:56:26.685440Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:56:26.686515Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:56:26.686886Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:56:26.686936Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:56:26.686978Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T16:56:26.687033Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:56:26.687086Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T16:56:26.687187Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:26.692233Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T16:56:26.692409Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T16:56:26.692474Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:56:26.708697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:735:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:26.708821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:26.708900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:26.715201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:56:26.724456Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:56:26.886827Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:56:26.890327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:749:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:56:26.977668Z node 1 :TX_PROXY ERROR: Actor# [1:823:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:27.412156Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0vpzaj9mnn7r6hy0gc78p4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTlmMDQ0NjEtZGM5ZmU0ZTgtOGZlZThiNTgtOTY1N2ZmM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:27.419492Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:850:2687], serverId# [1:851:2688], sessionId# [0:0:0] 2025-04-04T16:56:27.419851Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:56:27.432839Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16 ... aChangedResult 2025-04-04T16:56:32.566800Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715664 datashard 72075186224037888 state Ready 2025-04-04T16:56:32.566860Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:56:32.568786Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:940:2766], Recipient [2:665:2569]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 940 RawX2: 8589937358 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t\254\003\000\000\000\000\000\000\021\316\n\000\000\002\000\000\000" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2025-04-04T16:56:32.568879Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T16:56:32.568986Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:56:32.569202Z node 2 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:2:0] 2025-04-04T16:56:32.569297Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2025-04-04T16:56:32.569354Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-04T16:56:32.569400Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2025-04-04T16:56:32.569443Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T16:56:32.569479Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T16:56:32.569521Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715664 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-04T16:56:32.569578Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2025-04-04T16:56:32.569625Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-04T16:56:32.569648Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T16:56:32.569670Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit MakeScanSnapshot 2025-04-04T16:56:32.569696Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit MakeScanSnapshot 2025-04-04T16:56:32.569722Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-04T16:56:32.569761Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit MakeScanSnapshot 2025-04-04T16:56:32.569788Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit WaitForStreamClearance 2025-04-04T16:56:32.569812Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit WaitForStreamClearance 2025-04-04T16:56:32.569861Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:940:2766] for [0:281474976715665] at 72075186224037888 2025-04-04T16:56:32.569894Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Continue 2025-04-04T16:56:32.570098Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:940:2766], Recipient [2:665:2569]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715665 2025-04-04T16:56:32.570138Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-04-04T16:56:32.570275Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:940:2766], Recipient [2:665:2569]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715665 Cleared: true 2025-04-04T16:56:32.570312Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-04T16:56:32.570394Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:665:2569], Recipient [2:665:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T16:56:32.570434Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T16:56:32.570507Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:56:32.570551Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T16:56:32.570601Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for WaitForStreamClearance 2025-04-04T16:56:32.570658Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit WaitForStreamClearance 2025-04-04T16:56:32.570707Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715665] at 72075186224037888 2025-04-04T16:56:32.570751Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-04T16:56:32.570822Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit WaitForStreamClearance 2025-04-04T16:56:32.570859Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ReadTableScan 2025-04-04T16:56:32.570896Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ReadTableScan 2025-04-04T16:56:32.571156Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Continue 2025-04-04T16:56:32.571191Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T16:56:32.571230Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-04T16:56:32.571272Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T16:56:32.571311Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T16:56:32.571892Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:946:2771], Recipient [2:665:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-04T16:56:32.571935Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-04T16:56:32.572200Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 1 2025-04-04T16:56:32.572648Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:56:32.572705Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:56:32.573130Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715665, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T16:56:32.573354Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715665, PendingAcks: 0 2025-04-04T16:56:32.573406Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 0 2025-04-04T16:56:32.589975Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:933:2759], Recipient [2:665:2569]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [2:933:2759] ServerId: [2:935:2761] } 2025-04-04T16:56:32.590065Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-04T16:56:32.590182Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-04T16:56:32.590267Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715665, at: 72075186224037888 2025-04-04T16:56:32.590462Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:665:2569], Recipient [2:665:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T16:56:32.590500Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T16:56:32.590572Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:56:32.590611Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T16:56:32.590678Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for ReadTableScan 2025-04-04T16:56:32.590713Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ReadTableScan 2025-04-04T16:56:32.590754Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715665] at 72075186224037888 error: , IsFatalError: 0 2025-04-04T16:56:32.590814Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-04T16:56:32.590849Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ReadTableScan 2025-04-04T16:56:32.590882Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2025-04-04T16:56:32.590912Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-04T16:56:32.590964Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-04T16:56:32.591033Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2025-04-04T16:56:32.591074Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2025-04-04T16:56:32.591117Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T16:56:32.591156Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2025-04-04T16:56:32.591207Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-04T16:56:32.591232Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T16:56:32.591260Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-04-04T16:56:32.591305Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:56:32.591333Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-04T16:56:32.591364Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T16:56:32.591397Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T16:56:32.591464Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:56:32.591514Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-04T16:56:32.591565Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |78.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder [GOOD] >> YdbTableSplit::RenameTablesAndSplit [GOOD] >> TLocalTests::TestAddTenantWhileResolving >> BasicUsage::WriteRead [GOOD] >> BasicUsage::ReadWithoutConsumerWithRestarts >> Yq_1::DeleteConnections [GOOD] >> Yq_1::Create_And_Modify_The_Same_Connection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder [GOOD] Test command err: 2025-04-04T16:56:34.052884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:34.052958Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) >> TNodeBrokerTest::TestListNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] Test command err: 2025-04-04T16:56:24.710910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:24.711341Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:24.711393Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0011ac/r3tmp/tmpzGPJYF/pdisk_1.dat 2025-04-04T16:56:25.084612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:56:25.124925Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:25.165063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:25.165197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:25.176750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:56:25.265071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:56:25.308070Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T16:56:25.308881Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T16:56:25.309255Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T16:56:25.309504Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:25.323452Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T16:56:25.358011Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:25.358144Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:56:25.359776Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:56:25.359901Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:56:25.359962Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:56:25.360320Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:56:25.360486Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:56:25.360569Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T16:56:25.371379Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:56:25.393535Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:56:25.393697Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:56:25.393783Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T16:56:25.393820Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:56:25.393858Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:56:25.393885Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:25.394057Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T16:56:25.394112Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T16:56:25.394364Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:56:25.394451Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:56:25.394561Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:56:25.394598Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:56:25.394652Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T16:56:25.394681Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T16:56:25.394710Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T16:56:25.394731Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:56:25.394760Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:56:25.394825Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:56:25.394860Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:56:25.394892Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T16:56:25.395196Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T16:56:25.395232Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T16:56:25.395317Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:56:25.395547Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T16:56:25.395587Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:56:25.395645Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:56:25.395684Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T16:56:25.395746Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T16:56:25.395780Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T16:56:25.395803Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T16:56:25.395981Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T16:56:25.396002Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T16:56:25.396030Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T16:56:25.396051Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T16:56:25.396083Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T16:56:25.396123Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T16:56:25.396154Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T16:56:25.396177Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T16:56:25.396193Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T16:56:25.397404Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T16:56:25.397456Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:56:25.408766Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:56:25.408863Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T16:56:25.408923Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T16:56:25.408977Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T16:56:25.409046Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:56:25.550757Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:56:25.550826Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:56:25.550917Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T16:56:25.551320Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T16:56:25.551380Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T16:56:25.551497Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T16:56:25.551551Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T16:56:25.551613Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T16:56:25.551657Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T16:56:25.555810Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:56:25.555880Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:25.556706Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T16:56:25.556747Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T16:56:25.556875Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:56:2 ... 33.377365Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037889, TxId: 281474976715668, MessageQuota: 0 2025-04-04T16:56:33.435474Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-04-04T16:56:33.435559Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037889 2025-04-04T16:56:33.436186Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:957:2779], Recipient [2:957:2779]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T16:56:33.436235Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T16:56:33.436299Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T16:56:33.436370Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:56:33.436413Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037889 for ReadTableScan 2025-04-04T16:56:33.436448Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit ReadTableScan 2025-04-04T16:56:33.436495Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [3500:281474976715668] at 72075186224037889 error: , IsFatalError: 0 2025-04-04T16:56:33.436554Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is Executed 2025-04-04T16:56:33.436588Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit ReadTableScan 2025-04-04T16:56:33.436620Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037889 to execution unit CompleteOperation 2025-04-04T16:56:33.436650Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit CompleteOperation 2025-04-04T16:56:33.436870Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is DelayComplete 2025-04-04T16:56:33.436909Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit CompleteOperation 2025-04-04T16:56:33.436949Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037889 to execution unit CompletedOperations 2025-04-04T16:56:33.436994Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit CompletedOperations 2025-04-04T16:56:33.437028Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is Executed 2025-04-04T16:56:33.437054Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit CompletedOperations 2025-04-04T16:56:33.437087Z node 2 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715668] at 72075186224037889 has finished 2025-04-04T16:56:33.437123Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:56:33.437152Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-04T16:56:33.437187Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-04T16:56:33.437229Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-04T16:56:33.448298Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T16:56:33.448397Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T16:56:33.448443Z node 2 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715668] at 72075186224037889 on unit CompleteOperation 2025-04-04T16:56:33.448525Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715668] from 72075186224037889 at tablet 72075186224037889 send result to client [2:1149:2942], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:56:33.448592Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:56:33.448945Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 3500} 2025-04-04T16:56:33.449012Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T16:56:33.449057Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T16:56:33.449371Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:1149:2942], Recipient [2:962:2781]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715668 Cleared: true 2025-04-04T16:56:33.449413Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-04T16:56:33.449890Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:962:2781], Recipient [2:962:2781]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T16:56:33.449933Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T16:56:33.449991Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-04T16:56:33.450039Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:56:33.450079Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037890 for WaitForStreamClearance 2025-04-04T16:56:33.450114Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit WaitForStreamClearance 2025-04-04T16:56:33.450153Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [3500:281474976715668] at 72075186224037890 2025-04-04T16:56:33.450192Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2025-04-04T16:56:33.450226Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit WaitForStreamClearance 2025-04-04T16:56:33.450301Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit ReadTableScan 2025-04-04T16:56:33.450336Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit ReadTableScan 2025-04-04T16:56:33.450579Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Continue 2025-04-04T16:56:33.450614Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:56:33.450665Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2025-04-04T16:56:33.450696Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 out-of-order limits exceeded 2025-04-04T16:56:33.450748Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-04T16:56:33.451560Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1171:2962], Recipient [2:962:2781]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-04T16:56:33.451604Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-04T16:56:33.451883Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 1 2025-04-04T16:56:33.452680Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715668, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T16:56:33.454590Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715668, PendingAcks: 0 2025-04-04T16:56:33.454662Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 0 2025-04-04T16:56:33.522737Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-04T16:56:33.522806Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037890 2025-04-04T16:56:33.523369Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:962:2781], Recipient [2:962:2781]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T16:56:33.523425Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T16:56:33.523493Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-04T16:56:33.523533Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:56:33.523575Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037890 for ReadTableScan 2025-04-04T16:56:33.523609Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit ReadTableScan 2025-04-04T16:56:33.523648Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [3500:281474976715668] at 72075186224037890 error: , IsFatalError: 0 2025-04-04T16:56:33.523692Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2025-04-04T16:56:33.523734Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit ReadTableScan 2025-04-04T16:56:33.523769Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit CompleteOperation 2025-04-04T16:56:33.523800Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit CompleteOperation 2025-04-04T16:56:33.524019Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is DelayComplete 2025-04-04T16:56:33.524056Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit CompleteOperation 2025-04-04T16:56:33.524089Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit CompletedOperations 2025-04-04T16:56:33.524120Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit CompletedOperations 2025-04-04T16:56:33.524156Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2025-04-04T16:56:33.524183Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit CompletedOperations 2025-04-04T16:56:33.524210Z node 2 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715668] at 72075186224037890 has finished 2025-04-04T16:56:33.524241Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:56:33.524271Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-04T16:56:33.524301Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-04T16:56:33.524330Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-04T16:56:33.535305Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T16:56:33.535375Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T16:56:33.535409Z node 2 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715668] at 72075186224037890 on unit CompleteOperation 2025-04-04T16:56:33.535472Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715668] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1149:2942], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T16:56:33.535517Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] >> TDynamicNameserverTest::CacheMissSameDeadline |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::RenameTablesAndSplit [GOOD] Test command err: 2025-04-04T16:55:01.314749Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502559133724623:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:01.314820Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c10/r3tmp/tmp48yVyZ/pdisk_1.dat 2025-04-04T16:55:02.340386Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:02.362740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:55:02.408041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:02.408134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:02.414719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6361, node 1 2025-04-04T16:55:02.806896Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:02.806919Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:02.806928Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:02.807069Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16496 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:03.307580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:06.340071Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502559133724623:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:06.340270Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:06.375059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502580608562167:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.375220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.602791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.604095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T16:55:06.604126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.615401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2025-04-04T16:55:06.727758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743785706775, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T16:55:06.806562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-04T16:55:06.839515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502580608562398:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.839580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:06.856742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Foo, pathId: , opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.857256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T16:55:06.857286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T16:55:06.859263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE, path: /Root/Foo 2025-04-04T16:55:06.878154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743785706922, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T16:55:06.893184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 Fast forward 1m partitions 2 Fast forward 1m partitions 2 Fast forward 1m partitions 2 Fast forward 1m 2025-04-04T16:55:16.817369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:55:16.817937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T16:55:16.918640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-04T16:55:16.929085Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-04T16:55:16.929128Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-04T16:55:17.323791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:55:17.323849Z node 1 :IMPORT WARN: Table profiles were not loaded partitions 1 2025-04-04T16:55:18.971145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMoveTable Propose, from: /Root/Foo, to: /Root/Bar, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T16:55:18.971373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T16:55:18.974086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE RENAME, dst path: /Root/Foo, dst path: /Root/Bar 2025-04-04T16:55:18.991349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743786199036, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T16:55:19.000990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976710660, done: 0, blocked: 1 2025-04-04T16:55:19.018150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2025-04-04T16:55:19.021446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 Fast forward 1m partitions 1 Fast forward 1m 2025-04-04T16:55:26.870987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6841088 rowCount 50000 cpuUsage 0.523 2025-04-04T16:55:26.971964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-04T16:55:26.972096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:3 data size 6841088 row count 50000 2025-04-04T16:55:26.972141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037890 maps to shardIdx: 72057594046644480:3 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=Bar, is column=0, is olap=0 2025-04-04T16:55:26.972173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 3: RowCount 50000, DataSize 6841088 2025-04-04T16:55:26.972251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Requesting full stats from datashard 72075186224037890 2025-04-04T16:55:26.972359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-04T16:55:26.972519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got partition histogram at tablet 72057594046644480 from datashard 72075186224037890 state: 'Ready' data size: 6841088 row count: 50000 2025-04-04T16:55:26.972575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPartitionHistogram::Execute partition histogram at tablet 72057594046644480 from datashard 72075186224037890 for pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6841088 rowCount 50000 dataSizeHistogram buckets 0 2025-04-04T16:55:26.972608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Failed to find proper split key (initially) for 'Split by size' of datashard 72075186224037890 partitions 1 Fast forward 1m 2025-04-04T16:55:31.872352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6841088 rowCount 50000 cpuUsage 0.1309 2025-04-04T16:55:31.973784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-04T16:55:31.973906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:3 data size 6841088 row count 50000 2025-04-04T16:55:31.973968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037890 maps to shardIdx: 72057594046644480:3 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=Bar, is column=0, is olap=0 2025-0 ... 37891 2025-04-04T16:56:33.288794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710664:0 progress is 1/1 2025-04-04T16:56:33.288805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710664 ready parts: 1/1 2025-04-04T16:56:33.288824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710664:0 progress is 1/1 2025-04-04T16:56:33.288835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710664 ready parts: 1/1 2025-04-04T16:56:33.288848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710664, ready parts: 1/1, is published: true 2025-04-04T16:56:33.288901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:7489502954270721325:2818] message: TxId: 281474976710664 2025-04-04T16:56:33.288920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710664 ready parts: 1/1 2025-04-04T16:56:33.288932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710664:0 2025-04-04T16:56:33.288969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710664:0 2025-04-04T16:56:33.289050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-04T16:56:33.290389Z node 1 :TX_DATASHARD DEBUG: 72075186224037892 parts [ [72075186224037890:1:119:1:12288:10894:0] ] return ack processed 2025-04-04T16:56:33.290490Z node 1 :TX_DATASHARD DEBUG: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T16:56:33.290554Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7489502954270721383:2821], serverId# [1:7489502954270721390:4880], sessionId# [0:0:0] 2025-04-04T16:56:33.290621Z node 1 :TX_DATASHARD INFO: 72075186224037892 Initiating switch from PreOffline to Offline state 2025-04-04T16:56:33.290892Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T16:56:33.290977Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 parts [ [72075186224037890:1:119:1:12288:10894:0] ] return ack processed 2025-04-04T16:56:33.291058Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T16:56:33.291120Z node 1 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-04-04T16:56:33.292671Z node 1 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T16:56:33.292743Z node 1 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T16:56:33.292846Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7489502954270721386:2823], serverId# [1:7489502954270721391:4881], sessionId# [0:0:0] 2025-04-04T16:56:33.293472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489502623558235782 RawX2: 4503603922340192 } TabletId: 72075186224037890 State: 4 2025-04-04T16:56:33.293578Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-04T16:56:33.293811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489502623558235782 RawX2: 4503603922340192 } TabletId: 72075186224037890 State: 4 2025-04-04T16:56:33.293845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-04T16:56:33.294568Z node 1 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T16:56:33.294614Z node 1 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T16:56:33.294637Z node 1 :TX_DATASHARD INFO: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T16:56:33.294964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489502949975753666 RawX2: 4503603922340582 } TabletId: 72075186224037891 State: 4 2025-04-04T16:56:33.295013Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-04T16:56:33.295161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489502949975753666 RawX2: 4503603922340582 } TabletId: 72075186224037891 State: 4 2025-04-04T16:56:33.295191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-04T16:56:33.295350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489502949975753667 RawX2: 4503603922340583 } TabletId: 72075186224037892 State: 4 2025-04-04T16:56:33.295377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-04-04T16:56:33.295773Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-04T16:56:33.295805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-04T16:56:33.296462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-04T16:56:33.296465Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-04T16:56:33.297959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-04T16:56:33.297959Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-04T16:56:33.298085Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-04T16:56:33.298090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-04T16:56:33.298159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-04-04T16:56:33.298160Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-04-04T16:56:33.299325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-04T16:56:33.299506Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-04T16:56:33.299605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-04T16:56:33.299811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-04T16:56:33.299895Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-04T16:56:33.299977Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-04-04T16:56:33.300472Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-04T16:56:33.302167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-04T16:56:33.302292Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-04-04T16:56:33.302351Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-04-04T16:56:33.302466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-04T16:56:33.302733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-04T16:56:33.302997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-04T16:56:33.303043Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-04T16:56:33.303115Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-04-04T16:56:33.303103Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-04T16:56:33.303196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-04T16:56:33.303198Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-04-04T16:56:33.303322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-04T16:56:33.303351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-04T16:56:33.303389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-04T16:56:33.303450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-04T16:56:33.303473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-04T16:56:33.303529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-04T16:56:33.303768Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-04T16:56:33.303799Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-04-04T16:56:33.307795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-04T16:56:33.307854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-04T16:56:33.307919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-04T16:56:33.307958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-04T16:56:33.307974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-04-04T16:56:33.308012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] >> TNodeBrokerTest::SingleDomainModeBannedIds >> KqpScanSpilling::HandleErrorsCorrectly [GOOD] |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenantWhileResolving [GOOD] |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] Test command err: 2025-04-04T16:56:34.271612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:34.271702Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:34.322003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestRandomActions >> TDynamicNameserverTest::CacheMissSameDeadline [GOOD] |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenantWhileResolving [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::HandleErrorsCorrectly [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/i5tq/000bd5/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk0 Trying to start YDB, gRPC: 9215, MsgBus: 14174 2025-04-04T16:55:31.295599Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502686470526913:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:31.295645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bd5/r3tmp/tmpoWmFxF/pdisk_1.dat 2025-04-04T16:55:31.721369Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:31.762315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:31.762454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 9215, node 1 2025-04-04T16:55:31.764710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:31.839642Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:31.839678Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:31.839685Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:31.839806Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14174 TClient is connected to server localhost:14174 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:32.798069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:32.825953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:32.981564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:33.186514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:33.272175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:35.099016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502703650397748:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:35.099193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:35.433865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:55:35.494481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:55:35.570728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:55:35.614295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:35.670726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:55:35.706828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:55:35.770727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502703650398265:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:35.770850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:35.771145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502703650398270:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:35.775515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:55:35.786979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502703650398272:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:55:35.886560Z node 1 :TX_PROXY ERROR: Actor# [1:7489502703650398328:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:36.298475Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502686470526913:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:36.298545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:46.713639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:55:46.713686Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:15.492309Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037911, for tableId 6 don't have enough keys: RowCount: 168 DataSize: 33623952 IndexSize: 4329 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 3 2025-04-04T16:56:25.495844Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037911, for tableId 6 don't have enough keys: RowCount: 230 DataSize: 46040013 IndexSize: 5842 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 4 ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '777) '('"_id" '"f34454a4-4b8e5d72-d28af9e4-95bca8f8") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '675) '('"_id" '"eab6f23b-e6d36ae2-2fb22db2-f9f5a715") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '687) '('"_id" '"fad57e91-465ea192-9536f3c-23c6edc3")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) 2025-04-04T16:56:34.624591Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7489502957053474665:6294], blobId: 0, bytes: 1401088 2025-04-04T16:56:34.624752Z node 1 :KQP_COMPUTE ERROR: TxId: 281474976710971. Error: [TEvError] File size limit exceeded: 1/0Mb 2025-04-04T16:56:34.635180Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7489502957053474665:6294], blobId: 1, bytes: 84 2025-04-04T16:56:34.658140Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489502957053474656:4505], TxId: 281474976710971, task: 2. Ctx: { TraceId : 01jr0vq67v0r6k7qpm42a3dkta. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NTNiZjU1ZGMtNGRiMjNhNzQtZTM1MjdiODEtNjc5YWYyZmQ=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: [Compute spilling][TEvError] File size limit exceeded: 1/0Mb }. 2025-04-04T16:56:34.658689Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489502957053474657:4506], TxId: 281474976710971, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NTNiZjU1ZGMtNGRiMjNhNzQtZTM1MjdiODEtNjc5YWYyZmQ=. TraceId : 01jr0vq67v0r6k7qpm42a3dkta. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2025-04-04T16:56:34.672676Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTNiZjU1ZGMtNGRiMjNhNzQtZTM1MjdiODEtNjc5YWYyZmQ=, ActorId: [1:7489502952758507345:4500], ActorState: ExecuteState, TraceId: 01jr0vq67v0r6k7qpm42a3dkta, Create QueryResponse for error on request, msg: |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSameDeadline [GOOD] Test command err: 2025-04-04T16:56:35.763653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:35.763741Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/i5tq/000bf7/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk4 Trying to start YDB, gRPC: 22324, MsgBus: 10653 2025-04-04T16:55:29.760054Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502678366477820:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:29.760995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bf7/r3tmp/tmpk4Tyj8/pdisk_1.dat 2025-04-04T16:55:30.158051Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22324, node 1 2025-04-04T16:55:30.166001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:30.166116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:30.171830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:30.244811Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:30.244840Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:30.244868Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:30.244987Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10653 TClient is connected to server localhost:10653 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:30.963696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:30.978766Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:55:30.990970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:31.166893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:31.352588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:31.438369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:33.340413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502695546348766:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:33.340559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:33.595678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.639261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.681414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.727861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.768284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.839333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.935752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502695546349291:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:33.935833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:33.936150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502695546349296:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:33.941003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:55:33.953732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502695546349298:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:55:34.025071Z node 1 :TX_PROXY ERROR: Actor# [1:7489502699841316649:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:34.760163Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502678366477820:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:34.760241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:45.146521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:55:45.146557Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:08.636245Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037911, for tableId 6 don't have enough keys: RowCount: 168 DataSize: 33624016 IndexSize: 4329 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 3 2025-04-04T16:56:18.638049Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037911, for tableId 6 don't have enough keys: RowCount: 207 DataSize: 41433642 IndexSize: 5267 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 3 ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '777) '('"_id" '"72875293-c2bddcce-272b454a-ca703103") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '675) '('"_id" '"36a19b55-ece5047f-165b32e-e291924a") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '687) '('"_id" '"839fd82d-1d27bd51-6b801fa9-bdaa39c3")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) >> TTxDataShardUploadRows::TestUploadRowsLocks [GOOD] >> TSlotIndexesPoolTest::Init [GOOD] |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissNoDeadline >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Init [GOOD] >> TDynamicNameserverTest::CacheMissSimpleDeadline >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] Test command err: 2025-04-04T16:56:35.975587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:35.975650Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:36.092498Z node 1 :NODE_BROKER ERROR: Cannot register node host3:1001: ERROR_TEMP: No free node IDs 2025-04-04T16:56:36.122077Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-04-04T16:56:36.136311Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node ID is banned 2025-04-04T16:56:36.904996Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-04-04T16:56:36.928669Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs >> Cdc::AddStream [GOOD] >> Cdc::AwsRegion ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadRowsLocks [GOOD] Test command err: 2025-04-04T16:56:25.246177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:25.246794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:25.246870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00115b/r3tmp/tmphWJes4/pdisk_1.dat 2025-04-04T16:56:25.669752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:56:25.724310Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:25.764186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:25.764385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:25.776091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:56:25.860429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:56:25.910824Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:683:2578] 2025-04-04T16:56:25.911142Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:25.960823Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:685:2580] 2025-04-04T16:56:25.961079Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:25.971333Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:25.971555Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:56:25.973385Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:56:25.973483Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:56:25.973547Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:56:25.973905Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:56:25.974138Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:56:25.974221Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:718:2578] in generation 1 2025-04-04T16:56:25.974773Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:25.974979Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:56:25.976397Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-04T16:56:25.976479Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-04T16:56:25.976537Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-04T16:56:25.976825Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:56:25.976947Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:56:25.977001Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:722:2580] in generation 1 2025-04-04T16:56:25.980032Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:689:2582] 2025-04-04T16:56:25.980262Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:25.991327Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:691:2584] 2025-04-04T16:56:25.991501Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:26.001394Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:26.001559Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:56:26.003060Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-04-04T16:56:26.003147Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037891 2025-04-04T16:56:26.003200Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037891 2025-04-04T16:56:26.003518Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:56:26.003680Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:56:26.003737Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037891 persisting started state actor id [1:750:2582] in generation 1 2025-04-04T16:56:26.003968Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:26.004051Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:56:26.005255Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-04T16:56:26.005335Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-04T16:56:26.005408Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-04T16:56:26.005654Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:56:26.005742Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:56:26.005798Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:751:2584] in generation 1 2025-04-04T16:56:26.017850Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:56:26.047160Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:56:26.047400Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:56:26.047564Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:756:2619] 2025-04-04T16:56:26.047629Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:56:26.047671Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:56:26.047709Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:26.048106Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:56:26.048151Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-04T16:56:26.048204Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:56:26.048248Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:757:2620] 2025-04-04T16:56:26.048267Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-04T16:56:26.048284Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-04T16:56:26.048306Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:56:26.048694Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:56:26.048732Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2025-04-04T16:56:26.048804Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:56:26.048872Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [1:758:2621] 2025-04-04T16:56:26.048904Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2025-04-04T16:56:26.048937Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-04-04T16:56:26.048956Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-04T16:56:26.049191Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:56:26.049310Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:56:26.049426Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:56:26.049471Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-04T16:56:26.049526Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:56:26.049578Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:759:2622] 2025-04-04T16:56:26.049601Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-04T16:56:26.049624Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-04T16:56:26.049648Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T16:56:26.049862Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:56:26.049925Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:56:26.049970Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:56:26.050018Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:56:26.050123Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-04T16:56:26.050201Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-04T16:56:26.051036Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2572], serverId# [1:714:2597], sessionId# [0:0:0] 2025-04-04T16:56:26.051106Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T16:56:26.051140Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:56:26.051171Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-04T16:56:26.051208Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T16:56:26.051258Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037891 2025-04-04T16:56:26.051325Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037891 2025-04-04T16:56:26.051499Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:56:26.051827Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:56:26.051935Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:56:26.052572Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:670:2573], serverId# [1:717:2600], sessionId# [0:0:0] 2025-04-04T16:56:26.052644Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-04T16:56:26.052679Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active ... node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:56:35.347549Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:683:2579] 2025-04-04T16:56:35.347587Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:56:35.347624Z node 3 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:56:35.347671Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:35.348068Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:56:35.348191Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:56:35.348313Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:56:35.348363Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:56:35.348411Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:56:35.348465Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:56:35.348860Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:661:2566], serverId# [3:672:2573], sessionId# [0:0:0] 2025-04-04T16:56:35.349033Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:56:35.349220Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:56:35.349287Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:56:35.350887Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:56:35.361711Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:56:35.361862Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:56:35.512930Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:704:2594], sessionId# [0:0:0] 2025-04-04T16:56:35.514580Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:56:35.514681Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:35.515483Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:56:35.515543Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:56:35.515602Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T16:56:35.515894Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T16:56:35.516056Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:56:35.516430Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:56:35.516508Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T16:56:35.517026Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T16:56:35.517465Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:56:35.519296Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T16:56:35.519360Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:35.520216Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T16:56:35.520303Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:56:35.521701Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:56:35.521756Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:56:35.521825Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T16:56:35.521901Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:56:35.521973Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T16:56:35.522076Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:35.522867Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:56:35.525552Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T16:56:35.525630Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:56:35.526190Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T16:56:35.537311Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:736:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:35.537431Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:746:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:35.537528Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:35.542894Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:56:35.549325Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:56:35.719825Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:56:35.724141Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:750:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:56:35.810199Z node 3 :TX_PROXY ERROR: Actor# [3:824:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:36.141573Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0vq7yff5cqqygpmcdb70pa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmIxMGUyNDgtYTI3YjM4NmUtOTE4ZmJlZWItOTMxZTFmMTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:36.147479Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:851:2687], serverId# [3:852:2688], sessionId# [0:0:0] 2025-04-04T16:56:36.147740Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:56:36.160129Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:56:36.160275Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:36.286078Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0vq8jx36d1n9581qnb542q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGI4MzZhYzgtZmQwOTJhOTQtMjRkODNmYjUtZDQ4NmMwZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:36.288017Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] { items { uint32_value: 300 } } 2025-04-04T16:56:36.293806Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-04-04T16:56:36.304855Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-04-04T16:56:36.304935Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:36.304994Z node 3 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-04-04T16:56:36.305620Z node 3 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2025-04-04T16:56:36.305700Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:36.362810Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0vq8pj8wqb44wykagz11ds, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGI4MzZhYzgtZmQwOTJhOTQtMjRkODNmYjUtZDQ4NmMwZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:36.363284Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:56:36.374886Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:56:36.374994Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:36.383872Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NGI4MzZhYzgtZmQwOTJhOTQtMjRkODNmYjUtZDQ4NmMwZmE=, ActorId: [3:858:2693], ActorState: ExecuteState, TraceId: 01jr0vq8pj8wqb44wykagz11ds, Create QueryResponse for error on request, msg: 2025-04-04T16:56:36.384699Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0vq8pj8wqb44wykagz11ds, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGI4MzZhYzgtZmQwOTJhOTQtMjRkODNmYjUtZDQ4NmMwZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:36.384962Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:56:36.385242Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:56:36.385293Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] Test command err: 2025-04-04T16:56:33.701029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:33.701109Z node 1 :IMPORT WARN: Table profiles were not loaded >> TDynamicNameserverTest::CacheMissNoDeadline [GOOD] |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSimpleDeadline [GOOD] |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenant >> TNodeBrokerTest::NodeNameExpiration >> TNodeBrokerTest::FixedNodeId >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] >> TNodeBrokerTest::ResolveScopeIdForServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissNoDeadline [GOOD] Test command err: 2025-04-04T16:56:37.711631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:37.711712Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from to NODE_BROKER_ACTOR |78.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |78.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |78.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} |78.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSimpleDeadline [GOOD] Test command err: 2025-04-04T16:56:37.989722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:37.989786Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) >> Cdc::InitialScanRacyCompleteAndRequest [GOOD] >> Cdc::InitialScanAndLimits >> TLocalTests::TestRemoveTenantWhileResolving >> TNodeBrokerTest::MinDynamicNodeIdShifted >> TDynamicNameserverTest::BasicFunctionality ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] Test command err: 2025-04-04T16:56:37.901907Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T16:56:37.902523Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001b35/r3tmp/tmpkgBiFT/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T16:56:37.903198Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001b35/r3tmp/tmpkgBiFT/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001b35/r3tmp/tmpkgBiFT/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13123758155667917671 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T16:56:37.909526Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T16:56:37.909953Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001b35/r3tmp/tmpkgBiFT/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T16:56:37.910338Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001b35/r3tmp/tmpkgBiFT/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001b35/r3tmp/tmpkgBiFT/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 474895038740510951 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] >> TLocalTests::TestAddTenant [GOOD] |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |78.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |78.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |78.3%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestStateStatic >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] >> TLocalTests::TestRemoveTenantWhileResolving [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenant [GOOD] Test command err: 2025-04-04T16:56:39.055957Z node 1 :LOCAL ERROR: TDomainLocal(dc-1): Receive TEvDescribeSchemeResult with bad status StatusPathDoesNotExist reason is <> while resolving subdomain dc-1 2025-04-04T16:56:39.056247Z node 1 :LOCAL ERROR: Unknown domain dc-3 |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] Test command err: 2025-04-04T16:56:39.128168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:39.128269Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:39.208857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T16:56:39.258619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 >> TDynamicNameserverTest::CacheMissDifferentDeadline |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestRemoveTenantWhileResolving [GOOD] >> TTenantPoolTests::TestStateStatic [GOOD] |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] Test command err: 2025-04-04T16:56:26.029109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:26.029631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:26.029698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00110a/r3tmp/tmpZ08Gof/pdisk_1.dat 2025-04-04T16:56:26.498498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:56:26.554756Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:26.601107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:26.601266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:26.615571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:56:26.712247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:56:26.765583Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:676:2577] 2025-04-04T16:56:26.765907Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:26.814124Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:26.814423Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:56:26.816465Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:56:26.816569Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:56:26.816656Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:56:26.817628Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:56:26.817860Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:56:26.818045Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:701:2577] in generation 1 2025-04-04T16:56:26.819797Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:679:2579] 2025-04-04T16:56:26.820047Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:26.830518Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:26.830672Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:56:26.832340Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-04T16:56:26.832438Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-04T16:56:26.832492Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-04T16:56:26.832852Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:56:26.832995Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:56:26.833081Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:710:2579] in generation 1 2025-04-04T16:56:26.845481Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:56:26.873552Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:56:26.873779Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:56:26.874173Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-04-04T16:56:26.874234Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:56:26.874522Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:56:26.874566Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:26.874906Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:56:26.874949Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-04T16:56:26.875010Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:56:26.875077Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-04-04T16:56:26.875104Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-04T16:56:26.875126Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-04T16:56:26.875151Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:56:26.875583Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:56:26.875699Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:56:26.875831Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:56:26.875909Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:56:26.875967Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:56:26.876016Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:56:26.876076Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-04T16:56:26.876134Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-04T16:56:26.876652Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2573], serverId# [1:687:2583], sessionId# [0:0:0] 2025-04-04T16:56:26.876710Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T16:56:26.876743Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:56:26.876787Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-04T16:56:26.876826Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T16:56:26.876974Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:56:26.877244Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:56:26.877345Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:56:26.877827Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:670:2574], serverId# [1:695:2589], sessionId# [0:0:0] 2025-04-04T16:56:26.878008Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T16:56:26.878160Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-04T16:56:26.878219Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-04T16:56:26.880264Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:56:26.880350Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T16:56:26.891498Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:56:26.891612Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:56:26.891704Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T16:56:26.891745Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-04T16:56:27.050050Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:738:2617], serverId# [1:741:2620], sessionId# [0:0:0] 2025-04-04T16:56:27.050437Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:739:2618], serverId# [1:742:2621], sessionId# [0:0:0] 2025-04-04T16:56:27.054352Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:56:27.054436Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:56:27.054788Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-04T16:56:27.054833Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:56:27.054980Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:56:27.055013Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:56:27.055066Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T16:56:27.055354Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T16:56:27.055512Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:56:27.055635Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T16:56:27.055662Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:56:27.055705Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-04T16:56:27.055897Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T16:56:27.056003Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:56:27.057015Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:56:27.057105Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-04T16:56:27.059177Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T ... an for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T16:56:38.519654Z node 3 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit DirectOp 2025-04-04T16:56:38.519685Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit DirectOp 2025-04-04T16:56:38.519714Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v6000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v3000/18446744073709551615 2025-04-04T16:56:38.519834Z node 3 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T16:56:38.519860Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit DirectOp 2025-04-04T16:56:38.519895Z node 3 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T16:56:38.519921Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-04-04T16:56:38.519953Z node 3 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-04T16:56:38.519969Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T16:56:38.519992Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-04-04T16:56:38.530926Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-04-04T16:56:38.530995Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:5] at 72075186224037888 on unit DirectOp 2025-04-04T16:56:38.531040Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ... bulk upsert finished with status SCHEME_ERROR 2025-04-04T16:56:38.664623Z node 3 :TX_PROXY DEBUG: actor# [3:59:2106] Handle TEvExecuteKqpTransaction 2025-04-04T16:56:38.664694Z node 3 :TX_PROXY DEBUG: actor# [3:59:2106] TxId# 281474976715662 ProcessProposeKqpTransaction 2025-04-04T16:56:38.665441Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0vqaw6c7jq2rtv5e5kybnt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjQ2N2ZmNzgtNzIzOWQwNTAtZTEwMjA4NmMtZGE4ODc0ZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:38.667700Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1109:2908], Recipient [3:669:2573]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-04T16:56:38.667799Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T16:56:38.667863Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v8000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v8000/18446744073709551615 ImmediateWriteEdgeReplied# v8000/18446744073709551615 2025-04-04T16:56:38.667902Z node 3 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v8000/18446744073709551615 2025-04-04T16:56:38.667964Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CheckRead 2025-04-04T16:56:38.668051Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-04T16:56:38.668084Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckRead 2025-04-04T16:56:38.668179Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T16:56:38.668205Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T16:56:38.668240Z node 3 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037888 2025-04-04T16:56:38.668271Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-04T16:56:38.668289Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T16:56:38.668304Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T16:56:38.668319Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit ExecuteRead 2025-04-04T16:56:38.668400Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-04T16:56:38.668573Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[3:1109:2908], 0} after executionsCount# 1 2025-04-04T16:56:38.668619Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1109:2908], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T16:56:38.668736Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1109:2908], 0} finished in read 2025-04-04T16:56:38.668796Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-04T16:56:38.668813Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T16:56:38.668830Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T16:56:38.668844Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-04-04T16:56:38.668874Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-04T16:56:38.668889Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T16:56:38.668905Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2025-04-04T16:56:38.668932Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T16:56:38.669003Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-04T16:56:38.670217Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1109:2908], Recipient [3:669:2573]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-04T16:56:38.670313Z node 3 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } 2025-04-04T16:56:38.809421Z node 3 :TX_PROXY DEBUG: actor# [3:59:2106] Handle TEvExecuteKqpTransaction 2025-04-04T16:56:38.809484Z node 3 :TX_PROXY DEBUG: actor# [3:59:2106] TxId# 281474976715663 ProcessProposeKqpTransaction 2025-04-04T16:56:38.810082Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0vqb0k5hmybakw6s0y2df5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGRlODY1ZDItOWY4MzQxZTEtNWNkM2I4OTctODg1YjM1MjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:38.812387Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1140:2933], Recipient [3:905:2738]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 8 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 1 2025-04-04T16:56:38.812487Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-04T16:56:38.812530Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037889 CompleteEdge# v6000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v5000/18446744073709551615 ImmediateWriteEdgeReplied# v5000/18446744073709551615 2025-04-04T16:56:38.812565Z node 3 :TX_DATASHARD TRACE: 72075186224037889 changed HEAD read to non-repeatable v8000/18446744073709551615 2025-04-04T16:56:38.812618Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2025-04-04T16:56:38.812713Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-04T16:56:38.812756Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2025-04-04T16:56:38.812793Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-04T16:56:38.812840Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-04T16:56:38.812897Z node 3 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037889 2025-04-04T16:56:38.812945Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-04T16:56:38.812972Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-04T16:56:38.812997Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2025-04-04T16:56:38.813023Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2025-04-04T16:56:38.813140Z node 3 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 8 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-04-04T16:56:38.813497Z node 3 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[3:1140:2933], 0} after executionsCount# 1 2025-04-04T16:56:38.813565Z node 3 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[3:1140:2933], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T16:56:38.813721Z node 3 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[3:1140:2933], 0} finished in read 2025-04-04T16:56:38.813799Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-04T16:56:38.813834Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2025-04-04T16:56:38.813862Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2025-04-04T16:56:38.813892Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2025-04-04T16:56:38.813946Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-04T16:56:38.813972Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2025-04-04T16:56:38.814041Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037889 has finished 2025-04-04T16:56:38.814093Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-04T16:56:38.814211Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-04T16:56:38.815916Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1140:2933], Recipient [3:905:2738]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-04T16:56:38.815985Z node 3 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges [GOOD] Test command err: 2025-04-04T16:56:39.704954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:39.705017Z node 1 :IMPORT WARN: Table profiles were not loaded >> TNodeBrokerTest::TestListNodes [GOOD] |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseRestartRace >> TNodeBrokerTest::FixedNodeId [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadline [GOOD] |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestStateStatic [GOOD] |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodes [GOOD] Test command err: 2025-04-04T16:56:35.861089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:35.861175Z node 1 :IMPORT WARN: Table profiles were not loaded |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameWithDifferentTenants ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadline [GOOD] Test command err: 2025-04-04T16:56:40.679537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:40.679647Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::FixedNodeId [GOOD] Test command err: 2025-04-04T16:56:39.215542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:39.215613Z node 1 :IMPORT WARN: Table profiles were not loaded >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction |78.4%| [TA] $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} >> TNodeBrokerTest::NodeNameExpiration [GOOD] >> TDynamicNameserverTest::BasicFunctionality [GOOD] >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::Simple >> TableWriter::Backup [GOOD] |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] Test command err: 2025-04-04T16:56:39.816087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:39.816164Z node 1 :IMPORT WARN: Table profiles were not loaded |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Backup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameExpiration [GOOD] Test command err: 2025-04-04T16:56:39.179785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:39.179868Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:39.206042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::BasicFunctionality [GOOD] Test command err: 2025-04-04T16:56:39.737355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:39.737420Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:39.828826Z node 1 :NODE_BROKER ERROR: Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/i5tq/000bba/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk3 Trying to start YDB, gRPC: 17228, MsgBus: 11709 2025-04-04T16:55:31.653870Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502688652031228:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:31.654140Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bba/r3tmp/tmp929DDl/pdisk_1.dat 2025-04-04T16:55:32.063708Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17228, node 1 2025-04-04T16:55:32.121047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:32.121169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:32.148919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:32.198166Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:32.198214Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:32.198222Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:32.198383Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11709 TClient is connected to server localhost:11709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:33.109758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:33.135469Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:55:33.146791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:33.345862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:55:33.546789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T16:55:33.662861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:35.820706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502705831901976:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:35.820860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:36.133613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:55:36.185434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:55:36.228848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:55:36.275335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:55:36.344659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:55:36.389472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:55:36.460345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502710126869784:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:36.460403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:36.460525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502710126869789:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:36.465450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:55:36.483214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502710126869791:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:55:36.547652Z node 1 :TX_PROXY ERROR: Actor# [1:7489502710126869844:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:36.653494Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502688652031228:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:36.653568Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:47.061954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:55:47.062006Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:11.176582Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037911, for tableId 6 don't have enough keys: RowCount: 168 DataSize: 33623984 IndexSize: 4329 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 3 2025-04-04T16:56:31.179033Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037911, for tableId 6 don't have enough keys: RowCount: 219 DataSize: 43836900 IndexSize: 5567 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 4 ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '777) '('"_id" '"f49d8f9-4cece9aa-1a5f1074-bf50a374") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '675) '('"_id" '"cb7912a5-de151dd1-a116c10d-d4963e48") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '687) '('"_id" '"29789537-bfe6044f-c31552ba-4c770642")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Restore [GOOD] |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit [GOOD] |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Restore [GOOD] |78.5%| [TA] $(B)/ydb/core/kqp/ut/spilling/test-results/unittest/{meta.json ... results_accumulator.log} |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TxUsage::WriteToTopic_Demo_21_RestartNo |78.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |78.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |78.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |78.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/spilling/test-results/unittest/{meta.json ... results_accumulator.log} |78.5%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |78.5%| [TA] $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] Test command err: 2025-04-04T16:56:42.273093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:42.273153Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:42.296928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T16:56:42.341264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 >> ExternalBlobsMultipleChannels::SingleChannel |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> TSlotIndexesPoolTest::Ranges [GOOD] >> ExternalBlobsMultipleChannels::WithCompaction >> TLocalTests::TestAlterTenant |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> TDynamicNameserverTest::CacheMissPipeDisconnect >> TNodeBrokerTest::NodeNameReuseRestart |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Ranges [GOOD] |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] >> TLocalTests::TestAlterTenant [GOOD] >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] >> TDynamicNameserverTest::CacheMissPipeDisconnect [GOOD] >> TNodeBrokerTest::TestListNodesEpochDeltas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] Test command err: 2025-04-04T16:56:41.635805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:41.635890Z node 1 :IMPORT WARN: Table profiles were not loaded ... rebooting node broker ... OnActivateExecutor tabletId# 72057594037936129 ... captured cache request ... sending extend lease request ... captured cache request ... waiting for response ... waiting for epoch update |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAlterTenant [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] Test command err: 2025-04-04T16:56:42.653937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:42.654016Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:42.680850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 >> ColumnShardTiers::DSConfigsStub [GOOD] >> TSlotIndexesPoolTest::Expansion [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissPipeDisconnect [GOOD] Test command err: 2025-04-04T16:56:45.586205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:45.586298Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) >> TNodeBrokerTest::BasicFunctionality >> Yq_1::ListConnections |78.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |78.6%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |78.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg >> BasicUsage::ReadWithoutConsumerWithRestarts [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> Yq_1::ModifyConnections |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Expansion [GOOD] >> Yq_1::Basic >> Cdc::AwsRegion [GOOD] >> Yq_1::DescribeJob >> PrivateApi::PingTask >> Yq_1::CreateQuery_With_Idempotency >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] >> JsonProtoConversion::ProtoMapToJson [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsStub [GOOD] Test command err: 2025-04-04T16:54:56.929232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:54:56.929950Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:54:56.930034Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0011c4/r3tmp/tmpuvBH3A/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29739, node 1 TClient is connected to server localhost:63851 2025-04-04T16:54:57.826920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:54:57.884874Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:54:57.888886Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:54:57.888969Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:54:57.889026Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:54:57.889465Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:54:57.932385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:54:57.932628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:54:57.947638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:54:58.084466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-04T16:54:58.248757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:54:58.249051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:54:58.249393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:54:58.249544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:54:58.249662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:54:58.249810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:54:58.250072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:54:58.250200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:54:58.250396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:54:58.250524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:54:58.250653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:54:58.250767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:54:58.275574Z node 1 :TX_TIERING INFO: fline=manager.cpp:128;event=start_subscribing_metadata; 2025-04-04T16:54:58.278118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:54:58.278235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:54:58.278437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:54:58.278489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:54:58.278723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:54:58.278774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:54:58.278903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:54:58.278955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:54:58.279051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:54:58.279091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:54:58.279163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:54:58.279209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:54:58.280056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:54:58.280124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:54:58.280373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:54:58.280429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:54:58.280642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:54:58.280693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:54:58.280921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T16:54:58.280978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T16:54:58.281114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T16:54:58.281161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T16:54:58.316392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:746:2626];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:54:58.316530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:746:2626];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:54:58.316826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:746:2626];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:54:58.316990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:746:2626];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:54:58.317149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:746:2626];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:54:58.317303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:746:2626];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:54:58.317423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:746:2626];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=norm ... -04-04T16:55:58.705830Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=1; 2025-04-04T16:55:58.705889Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-04-04T16:55:58.705929Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 0 2025-04-04T16:55:58.705981Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-04-04T16:55:58.706040Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 0 2025-04-04T16:55:58.706093Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; Initialization finished REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=0;WAITING=1 2025-04-04T16:56:10.468533Z node 1 :TX_PROXY ERROR: Actor# [1:3581:4775] txid# 281474976715753, issues: { message: "Other entities depend on this data source, please remove them at the beginning: /Root/olapStore/olapTable" severity: 1 } REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=
: Error: Execution, code: 1060
:1:27: Error: Executing DROP OBJECT EXTERNAL_DATA_SOURCE
: Error:
: Error: Other entities depend on this data source, please remove them at the beginning: /Root/olapStore/olapTable, code: 2003 , code: 2003 ;EXPECTATION=0 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/olapStore/olapTable`;EXPECTATION=1;WAITING=1 2025-04-04T16:56:22.182178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715762:0, at schemeshard: 72057594046644480 2025-04-04T16:56:22.838032Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715762;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715762; 2025-04-04T16:56:22.838587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715762;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715762; 2025-04-04T16:56:22.838777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715762;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715762; REQUEST=DROP TABLE `/Root/olapStore/olapTable`;RESULT=
: Info: Execution, code: 1060
:1:12: Info: Executing DROP TABLE
: Info: Success, code: 4 ;EXPECTATION=1 FINISHED_REQUEST=DROP TABLE `/Root/olapStore/olapTable`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2025-04-04T16:56:33.381278Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-04T16:56:33.381356Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-04T16:56:33.381389Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-04T16:56:33.381469Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-04T16:56:33.381550Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-04T16:56:33.381632Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037888;has_config=0; 2025-04-04T16:56:33.381696Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037888 2025-04-04T16:56:33.381752Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 72075186224037888 2025-04-04T16:56:33.381797Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037888 2025-04-04T16:56:33.381877Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 72075186224037888 2025-04-04T16:56:33.381955Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T16:56:33.382011Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-04T16:56:33.382039Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037889;has_config=0; 2025-04-04T16:56:33.382069Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037889 2025-04-04T16:56:33.382099Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 72075186224037889 2025-04-04T16:56:33.382125Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037889 2025-04-04T16:56:33.382160Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 72075186224037889 2025-04-04T16:56:33.382196Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T16:56:33.382225Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-04T16:56:33.382271Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037890;has_config=0; 2025-04-04T16:56:33.382299Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037890 2025-04-04T16:56:33.382326Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 72075186224037890 2025-04-04T16:56:33.382348Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037890 2025-04-04T16:56:33.382377Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 72075186224037890 2025-04-04T16:56:33.382411Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T16:56:33.382520Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-04T16:56:33.382544Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-04-04T16:56:33.382572Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-04T16:56:33.382604Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 0 2025-04-04T16:56:33.382646Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-04-04T16:56:33.382682Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 0 2025-04-04T16:56:33.382717Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T16:56:33.382927Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037888;self_id=[1:740:2624];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:234;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-04-04T16:56:33.383022Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037889;self_id=[1:746:2626];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:234;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-04-04T16:56:33.383087Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037890;self_id=[1:753:2630];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:234;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 2025-04-04T16:56:44.583559Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-04T16:56:44.584106Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-04T16:56:44.584158Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-04T16:56:44.584194Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-04T16:56:44.584318Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-04T16:56:44.584376Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037888;has_config=0; 2025-04-04T16:56:44.584438Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037888 2025-04-04T16:56:44.584533Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T16:56:44.584582Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-04T16:56:44.584614Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037889;has_config=0; 2025-04-04T16:56:44.584645Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037889 2025-04-04T16:56:44.584690Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T16:56:44.584723Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-04T16:56:44.584750Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037890;has_config=0; 2025-04-04T16:56:44.584779Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037890 2025-04-04T16:56:44.584820Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T16:56:44.584964Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-04T16:56:44.584993Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-04-04T16:56:44.585022Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-04-04T16:56:44.585064Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T16:56:44.585472Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037888;self_id=[1:740:2624];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:234;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-04-04T16:56:44.585560Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037889;self_id=[1:746:2626];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:234;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-04-04T16:56:44.585619Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037890;self_id=[1:753:2630];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:234;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] >> JsonProtoConversion::JsonToProtoMap [GOOD] >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] Test command err: 2025-04-04T16:56:45.909311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:45.909406Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:45.946671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 >> JsonProtoConversion::JsonToProtoArray [GOOD] >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestReorderedExecutor |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson [GOOD] >> Cdc::InitialScanAndLimits [GOOD] >> Cdc::InitialScanComplete >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoArray [GOOD] |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoMap [GOOD] |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] |78.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |78.7%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |78.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] Test command err: 2025-04-04T16:56:19.555482Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502893641119974:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:19.555551Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0404 16:56:19.880826577 178919 dns_resolver.cc:162] no server name supplied in dns URI E0404 16:56:19.880987209 178919 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-04T16:56:20.557339Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:20.994642Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:27476: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:27476 2025-04-04T16:56:21.015933Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:27476: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27476 } ] 2025-04-04T16:56:21.557596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:22.559006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:22.814428Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:27476: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:27476 } ] 2025-04-04T16:56:23.528307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489502910820989429:2310], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:23.528483Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ac9/r3tmp/tmpvDPqgz/pdisk_1.dat 2025-04-04T16:56:23.562468Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:23.608474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489502910820989429:2310], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:23.709754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489502910820989429:2310], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 27476, node 1 2025-04-04T16:56:23.797251Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:56:23.797269Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 TClient is connected to server localhost:29771 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-04-04T16:56:24.068196Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:24.069809Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:56:24.069900Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:56:24.069914Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:56:24.070088Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:56:24.137669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:24.559050Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502893641119974:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:24.559148Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0404 16:56:24.879065336 179093 dns_resolver.cc:162] no server name supplied in dns URI E0404 16:56:24.879254740 179093 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-04T16:56:25.793705Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:56:25.801781Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-04T16:56:25.801793Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-04T16:56:25.801821Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-04T16:56:25.801822Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-04T16:56:25.801831Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-04T16:56:25.801837Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-04T16:56:25.803910Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-04T16:56:25.803910Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-04T16:56:25.803942Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-04T16:56:25.803949Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-04T16:56:25.803958Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-04T16:56:25.803965Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-04T16:56:25.805358Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-04T16:56:25.805385Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-04T16:56:25.805391Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-04T16:56:25.805406Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-04T16:56:25.805428Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-04T16:56:25.805435Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-04T16:56:25.807336Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-04T16:56:25.807369Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-04T16:56:25.807376Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-04T16:56:25.807406Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-04T16:56:25.807426Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-04T16:56:25.807438Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-04T16:56:25.810039Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-04T16:56:25.810068Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-04T16:56:25.812432Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-04T16:56:25.820924Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-04T16:56:25.820956Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-04T16:56:25.820987Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-04T16:56:25.821000Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-04T16:56:25.821005Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-04T16:56:25.821768Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-04T16:56:25.821790Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-04T16:56:25.821794Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-04T16:56:25.824084Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-04T16:56:25.824116Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-04T16:56:25.824124Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-04T16:56:25.834612Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-04T16:56:25.834679Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-04T16:56:25.834688Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-04T16:56:25.852883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:56:25.854786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:56:25.857083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T16:56:25.8595 ... x: { SessionId : ydb://session/3?node_id=4&id=MjdmZDQzMi00ZDgwMDgyMi03OTc1MWE4ZS05NzIxMjIwMA==. CustomerSuppliedId : . TraceId : 01jr0vqhffa14ew6ak784rymt5. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Start compute actor [4:7489503007367772342:2653], task: 2 2025-04-04T16:56:45.557951Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503007367772342:2653], TxId: 281474976715713, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjdmZDQzMi00ZDgwMDgyMi03OTc1MWE4ZS05NzIxMjIwMA==. CustomerSuppliedId : . TraceId : 01jr0vqhffa14ew6ak784rymt5. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Set execution timeout 299.740297s 2025-04-04T16:56:45.558411Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503007367772342:2653], TxId: 281474976715713, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjdmZDQzMi00ZDgwMDgyMi03OTc1MWE4ZS05NzIxMjIwMA==. CustomerSuppliedId : . TraceId : 01jr0vqhffa14ew6ak784rymt5. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646926 2025-04-04T16:56:45.558535Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503007367772342:2653], TxId: 281474976715713, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjdmZDQzMi00ZDgwMDgyMi03OTc1MWE4ZS05NzIxMjIwMA==. CustomerSuppliedId : . TraceId : 01jr0vqhffa14ew6ak784rymt5. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7489503007367772341 RawX2: 4503616807242332 } } DstEndpoint { ActorId { RawX1: 7489503007367772342 RawX2: 4503616807242333 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7489503007367772342 RawX2: 4503616807242333 } } DstEndpoint { ActorId { RawX1: 7489503007367772337 RawX2: 4503616807242222 } } InMemory: true } 2025-04-04T16:56:45.558548Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503007367772342:2653], TxId: 281474976715713, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjdmZDQzMi00ZDgwMDgyMi03OTc1MWE4ZS05NzIxMjIwMA==. CustomerSuppliedId : . TraceId : 01jr0vqhffa14ew6ak784rymt5. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Update input channelId: 1, peer: [4:7489503007367772341:2652] 2025-04-04T16:56:45.558582Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503007367772342:2653], TxId: 281474976715713, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjdmZDQzMi00ZDgwMDgyMi03OTc1MWE4ZS05NzIxMjIwMA==. CustomerSuppliedId : . TraceId : 01jr0vqhffa14ew6ak784rymt5. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-04T16:56:45.559981Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715713, task: 1, CA Id [4:7489503007367772341:2652]. Recv TEvReadResult from ShardID=72075186224037897, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= LockId: 281474976715705 DataShard: 72075186224037897 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 12, BrokenTxLocks= 2025-04-04T16:56:45.560012Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715713, task: 1, CA Id [4:7489503007367772341:2652]. Taken 1 locks 2025-04-04T16:56:45.560029Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715713, task: 1, CA Id [4:7489503007367772341:2652]. new data for read #0 seqno = 1 finished = 1 2025-04-04T16:56:45.560053Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503007367772341:2652], TxId: 281474976715713, task: 1. Ctx: { TraceId : 01jr0vqhffa14ew6ak784rymt5. SessionId : ydb://session/3?node_id=4&id=MjdmZDQzMi00ZDgwMDgyMi03OTc1MWE4ZS05NzIxMjIwMA==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2025-04-04T16:56:45.560074Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503007367772341:2652], TxId: 281474976715713, task: 1. Ctx: { TraceId : 01jr0vqhffa14ew6ak784rymt5. SessionId : ydb://session/3?node_id=4&id=MjdmZDQzMi00ZDgwMDgyMi03OTc1MWE4ZS05NzIxMjIwMA==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:56:45.560094Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715713, task: 1, CA Id [4:7489503007367772341:2652]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-04-04T16:56:45.560114Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715713, task: 1, CA Id [4:7489503007367772341:2652]. enter pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-04T16:56:45.560139Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715713, task: 1, CA Id [4:7489503007367772341:2652]. exit pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 1 freeSpace: 8388557 2025-04-04T16:56:45.560158Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715713, task: 1, CA Id [4:7489503007367772341:2652]. returned 1 rows; processed 1 rows 2025-04-04T16:56:45.560200Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715713, task: 1, CA Id [4:7489503007367772341:2652]. dropping batch for read #0 2025-04-04T16:56:45.560213Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715713, task: 1, CA Id [4:7489503007367772341:2652]. effective maxinflight 1024 sorted 0 2025-04-04T16:56:45.560227Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715713, task: 1, CA Id [4:7489503007367772341:2652]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-04-04T16:56:45.560247Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715713, task: 1, CA Id [4:7489503007367772341:2652]. returned async data processed rows 1 left freeSpace 8388557 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-04-04T16:56:45.560491Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503007367772341:2652], TxId: 281474976715713, task: 1. Ctx: { TraceId : 01jr0vqhffa14ew6ak784rymt5. SessionId : ydb://session/3?node_id=4&id=MjdmZDQzMi00ZDgwMDgyMi03OTc1MWE4ZS05NzIxMjIwMA==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-04T16:56:45.560506Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503007367772341:2652], TxId: 281474976715713, task: 1. Ctx: { TraceId : 01jr0vqhffa14ew6ak784rymt5. SessionId : ydb://session/3?node_id=4&id=MjdmZDQzMi00ZDgwMDgyMi03OTc1MWE4ZS05NzIxMjIwMA==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:56:45.560543Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715713, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-04T16:56:45.560626Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503007367772342:2653], TxId: 281474976715713, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjdmZDQzMi00ZDgwMDgyMi03OTc1MWE4ZS05NzIxMjIwMA==. CustomerSuppliedId : . TraceId : 01jr0vqhffa14ew6ak784rymt5. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646923 2025-04-04T16:56:45.560656Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715713, task: 2. Finish input channelId: 1, from: [4:7489503007367772341:2652] 2025-04-04T16:56:45.560692Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503007367772342:2653], TxId: 281474976715713, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjdmZDQzMi00ZDgwMDgyMi03OTc1MWE4ZS05NzIxMjIwMA==. CustomerSuppliedId : . TraceId : 01jr0vqhffa14ew6ak784rymt5. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-04T16:56:45.560862Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503007367772342:2653], TxId: 281474976715713, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjdmZDQzMi00ZDgwMDgyMi03OTc1MWE4ZS05NzIxMjIwMA==. CustomerSuppliedId : . TraceId : 01jr0vqhffa14ew6ak784rymt5. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-04T16:56:45.560879Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503007367772341:2652], TxId: 281474976715713, task: 1. Ctx: { TraceId : 01jr0vqhffa14ew6ak784rymt5. SessionId : ydb://session/3?node_id=4&id=MjdmZDQzMi00ZDgwMDgyMi03OTc1MWE4ZS05NzIxMjIwMA==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2025-04-04T16:56:45.560908Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503007367772341:2652], TxId: 281474976715713, task: 1. Ctx: { TraceId : 01jr0vqhffa14ew6ak784rymt5. SessionId : ydb://session/3?node_id=4&id=MjdmZDQzMi00ZDgwMDgyMi03OTc1MWE4ZS05NzIxMjIwMA==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:56:45.560932Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715713, task: 1. Tasks execution finished 2025-04-04T16:56:45.560945Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503007367772341:2652], TxId: 281474976715713, task: 1. Ctx: { TraceId : 01jr0vqhffa14ew6ak784rymt5. SessionId : ydb://session/3?node_id=4&id=MjdmZDQzMi00ZDgwMDgyMi03OTc1MWE4ZS05NzIxMjIwMA==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-04T16:56:45.561059Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715713, task: 1. pass away 2025-04-04T16:56:45.561151Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715713;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-04T16:56:45.561530Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503007367772342:2653], TxId: 281474976715713, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjdmZDQzMi00ZDgwMDgyMi03OTc1MWE4ZS05NzIxMjIwMA==. CustomerSuppliedId : . TraceId : 01jr0vqhffa14ew6ak784rymt5. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-04T16:56:45.561567Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715713, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-04T16:56:45.561577Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715713, task: 2. Tasks execution finished 2025-04-04T16:56:45.561586Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503007367772342:2653], TxId: 281474976715713, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjdmZDQzMi00ZDgwMDgyMi03OTc1MWE4ZS05NzIxMjIwMA==. CustomerSuppliedId : . TraceId : 01jr0vqhffa14ew6ak784rymt5. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-04-04T16:56:45.561634Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715713, task: 2. pass away 2025-04-04T16:56:45.561680Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715713;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-04T16:56:45.567097Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715714. Ctx: { TraceId: 01jr0vqhffa14ew6ak784rymt5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MjdmZDQzMi00ZDgwMDgyMi03OTc1MWE4ZS05NzIxMjIwMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:45.992084Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:25690: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:25690 >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] Test command err: 2025-04-04T16:56:46.691028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:46.691129Z node 1 :IMPORT WARN: Table profiles were not loaded >> Yq_1::Basic_Null |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] >> Yq_1::CreateConnection_With_Existing_Name >> Yq_1::DescribeConnection >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties >> TNodeBrokerTest::BasicFunctionality [GOOD] |78.7%| [TA] $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TExternalDataSourceTest::RemovingReferencesFromDataSources ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::BasicFunctionality [GOOD] Test command err: 2025-04-04T16:56:47.592541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:47.592616Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:48.753605Z node 1 :NODE_BROKER ERROR: Cannot register node host1:1001: WRONG_REQUEST: Another location is registered for host1:1001 2025-04-04T16:56:48.768619Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-04-04T16:56:48.769085Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-04-04T16:56:48.769470Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired >> KqpPg::InsertFromSelect_Simple+useSink >> KqpPg::TypeCoercionBulkUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::AwsRegion [GOOD] Test command err: 2025-04-04T16:53:16.223267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:53:16.223712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:16.223770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015a8/r3tmp/tmpZ5ZN9f/pdisk_1.dat 2025-04-04T16:53:16.660744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:53:16.728995Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:16.775909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:16.776051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:16.787457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:16.885866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:53:16.949749Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T16:53:16.950004Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:53:17.008300Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:53:17.008447Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:53:17.009976Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:53:17.010062Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:53:17.010128Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:53:17.010527Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:53:17.010685Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:53:17.010778Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T16:53:17.026941Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:53:17.055443Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:53:17.055650Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:53:17.055791Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T16:53:17.055831Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:53:17.055862Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:53:17.055898Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:53:17.056290Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:53:17.056405Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:53:17.056588Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:53:17.056649Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:53:17.056691Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:53:17.056733Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:53:17.056803Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T16:53:17.057206Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:53:17.057447Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:53:17.057533Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:53:17.059248Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:53:17.070112Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:53:17.070212Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:53:17.230442Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T16:53:17.235268Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:53:17.235401Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:53:17.236154Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:53:17.236233Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:53:17.236280Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T16:53:17.236496Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T16:53:17.236625Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:53:17.237136Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:53:17.237198Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T16:53:17.241184Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T16:53:17.241632Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:53:17.243568Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T16:53:17.243613Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:53:17.244709Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T16:53:17.244800Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:53:17.245882Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:53:17.246197Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:53:17.246231Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:53:17.246304Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T16:53:17.246384Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:53:17.246458Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T16:53:17.246533Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:53:17.247836Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:682:2579][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-04-04T16:53:17.252827Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T16:53:17.253031Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T16:53:17.253083Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:53:33.899946Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:53:33.900082Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:53:33.900316Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015a8/r3tmp/tmpjgbIlX/pdisk_1.dat 2025-04-04T16:53:34.443290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:53:34.485951Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:34.538151Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:34.538296Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:34.549760Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:34.656583Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:53:34.817513Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:676:2577] 2025-04-04T16:53:34.841436Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:53:35.505673Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:53:35.511437Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:53:35.581109Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:53:35.581531Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:53:35.582625Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:53:35.610714Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:53:35.612226Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075 ... ' requestId: 2025-04-04T16:56:46.060451Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2025-04-04T16:56:46.060545Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream1/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 1 partNo : 0 messageNo: 1 size 324 offset: -1 2025-04-04T16:56:46.060952Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream1/streamImpl' partition 0 part blob processing sourceId '\00072075186224037888' seqNo 1 partNo 0 2025-04-04T16:56:46.062212Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream1/streamImpl' partition 0 part blob complete sourceId '\00072075186224037888' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 439 count 1 nextOffset 1 batches 1 2025-04-04T16:56:46.063787Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream1/streamImpl' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 427 WTime 2535 2025-04-04T16:56:46.064246Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T16:56:46.064600Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 1 size 427 2025-04-04T16:56:46.065059Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-04T16:56:46.065109Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891] got client message batch for topic 'Table/Stream2/streamImpl' partition 0 2025-04-04T16:56:46.065171Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891] got client message topic: Table/Stream2/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 2 partNo : 0 messageNo: 1 size 323 offset: -1 2025-04-04T16:56:46.065367Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Topic 'Table/Stream2/streamImpl' partition 0 part blob processing sourceId '\00072075186224037888' seqNo 2 partNo 0 2025-04-04T16:56:46.066349Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Topic 'Table/Stream2/streamImpl' partition 0 part blob complete sourceId '\00072075186224037888' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 438 count 1 nextOffset 1 batches 1 2025-04-04T16:56:46.066863Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream2/streamImpl' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 426 WTime 2535 2025-04-04T16:56:46.067125Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T16:56:46.067208Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 1 size 426 2025-04-04T16:56:46.068791Z node 21 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 1 size 427 actorID [21:795:2659] 2025-04-04T16:56:46.069121Z node 21 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 size 427 2025-04-04T16:56:46.069453Z node 21 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 1 size 426 actorID [21:971:2767] 2025-04-04T16:56:46.069556Z node 21 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037891' partition 0 offset 0 partno 0 count 1 parts 0 size 426 >>>>> GetRecords path=/Root/Table/Stream1 partitionId=0 2025-04-04T16:56:46.071166Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-04T16:56:46.071290Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2025-04-04T16:56:46.072291Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 0 max time lag 0ms effective offset 0 2025-04-04T16:56:46.072394Z node 21 :PERSQUEUE DEBUG: waiting read cookie 0 partition 0 user $without_consumer offset 0 count 10000 size 26214400 timeout 0 2025-04-04T16:56:46.072546Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T16:56:46.072672Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] waiting read cookie 0 partition 0 read timeout for $without_consumer offset 0 2025-04-04T16:56:46.072817Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T16:56:46.083821Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 342 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T16:56:46.084035Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T16:56:46.084250Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream1/streamImpl', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-04T16:56:46.084698Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-04-04T16:56:46.084829Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-04-04T16:56:46.085022Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 341 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T16:56:46.085078Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T16:56:46.085140Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream2/streamImpl', Partition: 0, SeqNo: 2, partNo: 0, Offset: 0 is stored on disk 2025-04-04T16:56:46.085355Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-04T16:56:46.085618Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-04T16:56:46.085753Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-04-04T16:56:46.085878Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-04-04T16:56:46.086029Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2025-04-04T16:56:46.086118Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T16:56:46.086569Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][21:1167:2713] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 1 Offset: 0 WriteTimestampMS: 2535 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2025-04-04T16:56:46.086800Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037891][21:1168:2811] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 2 Offset: 0 WriteTimestampMS: 2535 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2025-04-04T16:56:46.086877Z node 21 :PERSQUEUE DEBUG: Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer readTimeStamp done, result 2535 queuesize 0 startOffset 0 2025-04-04T16:56:46.087086Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][21:875:2713] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-04T16:56:46.087235Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][21:1029:2811] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-04T16:56:46.087392Z node 21 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 2, at tablet# 72075186224037888 2025-04-04T16:56:46.087478Z node 21 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 1, at tablet: 72075186224037888 2025-04-04T16:56:46.087660Z node 21 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 2, at tablet: 72075186224037888 2025-04-04T16:56:46.099296Z node 21 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 2, left# 0, at tablet# 72075186224037888 >>>>> GetRecords path=/Root/Table/Stream1 partitionId=0 2025-04-04T16:56:46.383852Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-04T16:56:46.383941Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2025-04-04T16:56:46.384173Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2025-04-04T16:56:46.384296Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-04-04T16:56:46.384480Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-04-04T16:56:46.384607Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T16:56:46.385490Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >>>>> GetRecords path=/Root/Table/Stream2 partitionId=0 2025-04-04T16:56:46.387723Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-04T16:56:46.387878Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891] got client message batch for topic 'Table/Stream2/streamImpl' partition 0 2025-04-04T16:56:46.389080Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream2/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2025-04-04T16:56:46.389235Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-04-04T16:56:46.389437Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-04T16:56:46.389555Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T16:56:46.390416Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties [GOOD] >> TExternalDataSourceTest::DropExternalDataSource >> KqpPg::EmptyQuery+useSink >> KqpPg::ReadPgArray >> KqpPg::ReadPgArray [GOOD] >> KqpPg::TableArrayInsert+useSink >> KqpPg::CreateTableBulkUpsertAndRead >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] |78.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |78.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |78.7%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut >> TExternalDataSourceTest::DropExternalDataSource [GOOD] >> KqpPg::TypeCoercionInsert-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:56:52.475878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:56:52.475990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:52.476058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:56:52.476099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:56:52.476145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:56:52.476174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:56:52.476235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:52.476332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:56:52.476714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:52.561067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:52.561133Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:52.583140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:52.583429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:56:52.583596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:56:52.594689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:56:52.595022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:56:52.595762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:52.596856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:56:52.608223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:52.609401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:52.609467Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:52.609626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:56:52.609673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:52.609710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:56:52.609835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:56:52.618375Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:56:52.782793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:52.783380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:52.783627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:56:52.783864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:56:52.783928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:52.786733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:52.786918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:56:52.787131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:52.787225Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:56:52.787268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:56:52.787317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:56:52.789908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:52.789982Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:56:52.790027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:56:52.793001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:52.793102Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:52.793162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:52.793222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:56:52.797324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:56:52.803372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:56:52.803617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:56:52.804872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:52.805063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:52.805123Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:52.805498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:56:52.805572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:52.805748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:52.805865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:56:52.813057Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:52.813127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:52.813362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:52.813409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:56:52.813763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:52.813818Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:56:52.813935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:52.813977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:52.814048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:52.814095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:52.814138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:56:52.814185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:52.814222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:56:52.814286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:56:52.814376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:52.814415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:56:52.814453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:56:52.816647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:52.816787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:52.816848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... thId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T16:56:53.041713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-04T16:56:53.041753Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-04T16:56:53.041794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:53.043196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T16:56:53.043309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T16:56:53.043338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-04T16:56:53.043366Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-04T16:56:53.043399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T16:56:53.043475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-04T16:56:53.045585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-04-04T16:56:53.045727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-04-04T16:56:53.046382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:53.046501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:53.046552Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000005 2025-04-04T16:56:53.046686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T16:56:53.046765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-04T16:56:53.046936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:53.047005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T16:56:53.047811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T16:56:53.049810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2025-04-04T16:56:53.051308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:53.051351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:53.051520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T16:56:53.051697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:53.051730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-04-04T16:56:53.051765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-04T16:56:53.052097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T16:56:53.052146Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-04T16:56:53.052265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T16:56:53.052331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T16:56:53.052376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T16:56:53.052409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T16:56:53.052447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-04T16:56:53.052489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T16:56:53.052523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-04T16:56:53.052561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-04T16:56:53.052645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T16:56:53.052685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-04-04T16:56:53.052722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-04T16:56:53.052753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-04T16:56:53.053253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T16:56:53.053331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T16:56:53.053366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-04T16:56:53.053407Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T16:56:53.053446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T16:56:53.053853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T16:56:53.053975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T16:56:53.054048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:53.054400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T16:56:53.054468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T16:56:53.054494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-04T16:56:53.054524Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-04T16:56:53.054552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:53.054655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-04T16:56:53.058883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T16:56:53.059015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T16:56:53.059087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-04T16:56:53.059404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-04T16:56:53.059464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-04T16:56:53.060028Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-04T16:56:53.060122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-04T16:56:53.060156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:389:2380] TestWaitNotification: OK eventTxId 104 2025-04-04T16:56:53.060718Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:53.060922Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 243us result status StatusPathDoesNotExist 2025-04-04T16:56:53.061081Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::DropExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:56:51.877212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:56:51.877321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:51.877385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:56:51.877422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:56:51.877465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:56:51.877498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:56:51.877559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:56:51.877643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:56:51.877989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:56:51.985772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:51.985840Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:52.002340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:56:52.002696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:56:52.002886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:56:52.014868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:56:52.015212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:56:52.016016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:52.017004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:56:52.023269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:52.024778Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:52.024872Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:52.025101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:56:52.025159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:52.025208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:56:52.025374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:56:52.033476Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:56:52.181980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:56:52.182227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:52.182498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:56:52.182744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:56:52.182799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:52.189360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:52.189592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:56:52.189841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:52.189908Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:56:52.189968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:56:52.190043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:56:52.192644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:52.192724Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:56:52.192767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:56:52.200010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:52.200085Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:52.200151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:52.200233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:56:52.204419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:56:52.207824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:56:52.208042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:56:52.209262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:52.209451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:52.209569Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:52.209903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:56:52.209963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:56:52.210184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:52.210318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:56:52.219183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:52.219259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:52.219466Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:52.219517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:56:52.219866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:56:52.219926Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:56:52.220034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:52.220091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:52.220152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:56:52.220210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:52.220254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:56:52.220300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:56:52.220341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:56:52.220378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:56:52.220471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:52.220517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:56:52.220557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:56:52.225291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:52.225464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:56:52.225510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Id: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:56:53.212399Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T16:56:53.212454Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-04-04T16:56:53.212509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:53.220815Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:56:53.220958Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:56:53.220995Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T16:56:53.221037Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-04T16:56:53.221080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T16:56:53.221186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-04T16:56:53.223969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-04T16:56:53.224164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-04T16:56:53.224891Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:56:53.225037Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 126 RawX2: 8589936743 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:56:53.225135Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 102:0 HandleReply TEvOperationPlan: step# 5000003 2025-04-04T16:56:53.225274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T16:56:53.225370Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-04T16:56:53.225579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:53.225663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T16:56:53.226939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T16:56:53.229091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-04T16:56:53.230658Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:56:53.230705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:56:53.230866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T16:56:53.231032Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:56:53.231069Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-04T16:56:53.231109Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-04T16:56:53.231298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T16:56:53.231359Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T16:56:53.231474Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T16:56:53.231518Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T16:56:53.231563Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T16:56:53.231600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T16:56:53.231646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-04T16:56:53.231694Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T16:56:53.231734Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T16:56:53.231774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T16:56:53.231851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T16:56:53.231895Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-04T16:56:53.231933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-04T16:56:53.231982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-04T16:56:53.232481Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:56:53.232562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:56:53.232594Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-04T16:56:53.232643Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T16:56:53.232688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T16:56:53.233023Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T16:56:53.233094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T16:56:53.233168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:56:53.233684Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:56:53.233758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:56:53.233789Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-04T16:56:53.233820Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-04T16:56:53.233855Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:56:53.233952Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-04T16:56:53.238294Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T16:56:53.238475Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T16:56:53.238563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-04T16:56:53.238840Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T16:56:53.238891Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T16:56:53.239371Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T16:56:53.239465Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T16:56:53.239508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:331:2322] TestWaitNotification: OK eventTxId 102 2025-04-04T16:56:53.240048Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:56:53.240257Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 241us result status StatusPathDoesNotExist 2025-04-04T16:56:53.240471Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TInterconnectTest::TestCrossConnect [GOOD] >> TInterconnectTest::TestManyEventsWithReconnect >> KqpPg::InsertNoTargetColumns_Simple+useSink >> TInterconnectTest::TestManyEventsWithReconnect [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization >> ExternalBlobsMultipleChannels::Simple [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] Test command err: Starting iteration 0 Starting iteration 1 Starting iteration 2 Starting iteration 3 Starting iteration 4 Starting iteration 5 Starting iteration 6 Starting iteration 7 Starting iteration 8 Starting iteration 9 Starting iteration 10 Starting iteration 11 Starting iteration 12 Starting iteration 13 Starting iteration 14 Starting iteration 15 Starting iteration 16 Starting iteration 17 Starting iteration 18 Starting iteration 19 Starting iteration 20 Starting iteration 21 Starting iteration 22 Starting iteration 23 Starting iteration 24 Starting iteration 25 Starting iteration 26 Starting iteration 27 Starting iteration 28 Starting iteration 29 Starting iteration 30 Starting iteration 31 Starting iteration 32 Starting iteration 33 Starting iteration 34 Starting iteration 35 Starting iteration 36 Starting iteration 37 Starting iteration 38 Starting iteration 39 Starting iteration 40 Starting iteration 41 Starting iteration 42 Starting iteration 43 Starting iteration 44 Starting iteration 45 Starting iteration 46 Starting iteration 47 Starting iteration 48 Starting iteration 49 0 0 0 1 0 3 0 7 0 15 0 31 0 63 0 127 0 255 0 511 0 1023 0 2047 0 4095 0 8191 0 16383 0 32767 0 65535 1 0 1 1 1 3 1 7 1 15 1 31 1 63 1 127 1 255 1 511 1 1023 1 2047 1 4095 1 8191 1 16383 1 32767 1 65535 3 0 3 1 3 3 3 7 3 15 3 31 3 63 3 127 3 255 3 511 3 1023 3 2047 3 4095 3 8191 3 16383 3 32767 3 65535 7 0 7 1 7 3 7 7 7 15 7 31 7 63 7 127 7 255 7 511 7 1023 7 2047 7 4095 7 8191 7 16383 7 32767 7 65535 15 0 15 1 15 3 15 7 15 15 15 31 15 63 15 127 15 255 15 511 15 1023 15 2047 15 4095 15 8191 15 16383 15 32767 15 65535 31 0 31 1 31 3 31 7 31 15 31 31 31 63 31 127 31 255 31 511 31 1023 31 2047 31 4095 31 8191 31 16383 31 32767 31 65535 63 0 63 1 63 3 63 7 63 15 63 31 63 63 63 127 63 255 63 511 63 1023 63 2047 63 4095 63 8191 63 16383 63 32767 63 65535 127 0 127 1 127 3 127 7 127 15 127 31 127 63 127 127 127 255 127 511 127 1023 127 2047 127 4095 127 8191 127 16383 127 32767 127 65535 255 0 255 1 255 3 255 7 255 15 255 31 255 63 255 127 255 255 255 511 255 1023 255 2047 255 4095 255 8191 255 16383 255 32767 255 65535 511 0 511 1 511 3 511 7 511 15 511 31 511 63 511 127 511 255 511 511 511 1023 511 2047 511 4095 511 8191 511 16383 511 32767 511 65535 1023 0 1023 1 1023 3 1023 7 1023 15 1023 31 1023 63 1023 127 1023 255 1023 511 1023 1023 1023 2047 1023 4095 1023 8191 1023 16383 1023 32767 1023 65535 2047 0 2047 1 2047 3 2047 7 2047 15 2047 31 2047 63 2047 127 2047 255 2047 511 2047 1023 2047 2047 2047 4095 2047 8191 2047 16383 2047 32767 2047 65535 4095 0 4095 1 4095 3 4095 7 4095 15 4095 31 4095 63 4095 127 4095 255 4095 511 4095 1023 4095 2047 4095 4095 4095 8191 4095 16383 4095 32767 4095 65535 8191 0 8191 1 8191 3 8191 7 8191 15 8191 31 8191 63 8191 127 8191 255 8191 511 8191 1023 8191 2047 8191 4095 8191 8191 8191 16383 8191 32767 8191 65535 16383 0 16383 1 16383 3 16383 7 16383 15 16383 31 16383 63 16383 127 16383 255 16383 511 16383 1023 16383 2047 16383 4095 16383 8191 16383 16383 16383 32767 16383 65535 32767 0 32767 1 32767 3 32767 7 32767 15 32767 31 32767 63 32767 127 32767 255 32767 511 32767 1023 32767 2047 32767 4095 32767 8191 32767 16383 32767 32767 32767 65535 65535 0 65535 1 65535 3 65535 7 65535 15 65535 31 65535 63 65535 127 65535 255 65535 511 65535 1023 65535 2047 65535 4095 65535 8191 65535 16383 65535 32767 65535 65535 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::Simple [GOOD] Test command err: 2025-04-04T16:56:45.689798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:45.691840Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:45.691937Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016d7/r3tmp/tmpbU6ZA3/pdisk_1.dat 2025-04-04T16:56:46.148957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:56:46.201003Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:46.241091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:46.241230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:46.252922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:56:46.337795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:56:46.676790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:740:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:46.676935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:749:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:46.677028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:46.682921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:56:46.856739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:754:2630], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:56:46.927988Z node 1 :TX_PROXY ERROR: Actor# [1:828:2673] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:47.193560Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0vqjtj96zbqmfj37ypvgk6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWVmY2QwNWQtYmQ2OWRmODgtN2NjZTQ1MzgtN2E5MDk3NWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.273040Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0vqkb675z8sknjd3mj0766, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzY1YjMxNmEtNzQ0NTVhNTUtN2ZjMzJlOGYtZTEwMGQ0Mzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.342805Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0vqkdd3x8p2whsd9f9r1dr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzVhMGU5Mi0zOGI5N2Q0Ni1hOWJmNmQ3Ni02ZDUyMjNkYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.409034Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0vqkfkd55pfcvhmnbs8erw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDNlNWQ0ODgtMjIzMDFjZTEtOWMwODcyYWUtYjI4YjI5Yjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.475757Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0vqkhpc9s2z4384qf64xje, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDc3YzMwZTktZDE0NTc0N2QtZjc4NDU4MjEtZTczYmVhZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.539290Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0vqkkrbmj4rxgsrabrh7v0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjczNTUyNTctNWI4YzZhOC04NTgzODc1Yi1lNzE3ODI3Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.603103Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr0vqknq0hgkakkjcvjmkphp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDlmZWQ0OTUtMjBmNjkyOS1kYzE3ZWQwYy1kNGE4ZjE4Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.654114Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr0vqkqpfq4xncf5wyz4zva2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWI5ODMzM2ItNzk4NGU4MzYtYTBlNzZkNGItMmIwZGRjMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.712380Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr0vqks9847q0qqd041ra6jh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI2M2EyYWMtMzUzZTJlZWYtZjYxYjhhZjktZTNmZjkzNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.774739Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr0vqkv4asdgd22yswnb0e9y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjM3MGE4MmYtYThhYjUwNTctMzE1Y2JlMTgtYWZlOTExY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.828344Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jr0vqkx27fnz74ste9crrwdz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWQ0N2VlMjQtYThjMmNjMDAtMzFlNzM2N2ItZThlMGNmMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.880465Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jr0vqkyq9nsadwabxd7741vx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTBiMmIxOWItOGRlZTIwZjMtMzA4ODYxMWQtOWFlM2QzMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.947754Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr0vqm0c616nygttvj0xfmx8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjEzN2I5YTItOWEyNjRhMWEtYTZlNjc4ZDMtOGNiYzM2Mzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.012392Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jr0vqm2f6ht5wpy52jmxtz8d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTBlMWJkMTItYTgwMDEzNWMtZmIxM2JmYi01YjVkMWJiZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.075882Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jr0vqm4g232vt4pxks1rv25p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTUxOGEwYmYtZjQ0ZTVmNDItMzhjMGI1ZDEtMmNiMjM3ODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.146870Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jr0vqm6f54ftbeqfzgxb5w7b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDg3ZmMwMGYtYTM0MDc3YmQtYTE4NDNmNTItMjIzMGFlMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.266975Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jr0vqm8x1da07vegge1hwm6h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmE0NTA0MjMtZTBlODk3ZGUtYmI0NWNiNmQtMWJlNmI0OWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.400518Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jr0vqmcqbhbx6ahh1wjsp127, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTBmMGEyN2ItMWJkYzEwZS1mZDRjMzhjNy02YTk1MDczMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.467566Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jr0vqmgm3rs05g5trwmywgg7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTAzOGFhOC01MDMxMWE4MC1kNTE0ZWE5MC0yMmU5NTk1MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.539917Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jr0vqmjr4c4d7pwz2wcgee5c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTNmZDQ0ZS0yNmRhYmIyNC0xMDkxNDUxYy1hYzQzMzg5NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.612754Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jr0vqmn1ck8zv4xbj4tt3cx2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFhZmI1My1hMTVmZjQ3My1kMzJiM2QwOS03NmY2NjNhNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.677383Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jr0vqmq9f0a82f27w26bgd8b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Y1N2IyNmItZDdiYTM0MzUtYzk5NjFkNjAtOTVkZjI5YzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.752384Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jr0vqms94knncsj9cexnt9en, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGRhOTY3NTctZTNjYTgzOTItMmNjMWE0N2MtZDkzY2Q0OGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.823525Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jr0vqmvn4gk57drxq1ymnqft, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWI1YmE5ZmItYTA1ZGQxZDYtNThmN2Y5OTctYTNjMjkyOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.893065Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jr0vqmxxafafh6x5s80vhbn4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmU4ZDYyMzctMzE5MmU2ZGYtMjlhMzZjZmUtN2M3ZWYzM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.966902Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jr0vqn03by5dvj6j3e95d9jq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTI2MDMwYS1jNzI0NDYyZS1mMmRmN2YyZi01ZjRiNmQ4 ... 6:56:51.837662Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jr0vqqsvdtb34dbp963exkdg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzlkN2NmNTAtYmUyYjg4Y2QtY2FjN2ZjMDYtOTY0OGY4OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:51.908879Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jr0vqqw20df920wx38qg093g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDA5Mjg2ZDEtZjMyNjg5YjItN2I0YjM3OTUtODRlMDAwYmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:51.979526Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jr0vqqya0ws383zxw22p9vnj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWJkZjliOWEtNjVhYmE1Y2ItZDU2M2Y1NGEtOTFiNmI5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.043364Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jr0vqr0g22t5rxx70q0z1jzh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWJiNjJlYmQtZGFlZTc1OTktNjBmYjExNmYtZmY1ZjFmNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.092871Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jr0vqr2f8kdp0mctazfpw773, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGE1MjExYjYtYjY3ZjA2ZTEtYTM1NDkyNmEtZGQyNmJjOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.187043Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jr0vqr41c8r9sfv0ezajsyrx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTUxNjFjODAtNTcwYTc5OWMtNTA1M2I0NTMtNGNhMGNmYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.257944Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jr0vqr6zecdh1dj7wbthkh3q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzVhN2E4MzctMzUzNGFiNmItMzRkZjBkM2MtZjNjODE5MzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.328902Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jr0vqr961gh8x6rnv30rf1hm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzNiYjc0ZjAtZDAwMzVhMDItN2VjOWNmOTgtOTJkNzY4YTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.400449Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jr0vqrbd08qgvp1cc76jnjag, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFlNDk5My1iMjQ0ZGZhNC03NTg5MDE2MC04NTNmYWE5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.470053Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jr0vqrdne0avjrn4zr2tv6v7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGNhNzEyZDUtNTk1Y2ZmZjMtZjJkNzM1ODMtMWYyZjk2OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.539953Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jr0vqrft69xd1xvma6t2wxwv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmUxMmI3NGItNzYyNmJjYzItNzYwODFlYzYtNmYxNzBlNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.610778Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jr0vqrj02ces7cgmg4hh0djd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI3ZTA2ZDctOGRmNGUzNDYtOWJjM2I2MDQtMmZmMWRiMGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.675624Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jr0vqrm8egmfy6nz0qxqd6mr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWI5OWQ0NmEtYzQ3MDM1OTMtODFmZWNiNTItNjA5ZTFhODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.745535Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jr0vqrp8099qmkhab5avpn5r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODIyMDY2MTQtN2M1MzBjOWMtODEyNGQ2NjEtYzIzNzVkNGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.835977Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jr0vqrre61h5yv6w82672kvn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQzNTBiNjgtOTE3NjUwYTAtYjQxYTNkNzktYTYzODRmNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.921803Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jr0vqrvp8hdsf8hygxgchhgg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTcxODAyNTItYzM0N2YyYTMtNjBhZjE5MWQtNzZiMDlmOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.994197Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jr0vqrxz3wmcgdcbew2rm8hk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzM1MzRkMTEtN2YzNjkyM2QtYmQ5MTk4MmEtODk0MDYyYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.066012Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jr0vqs070cwqa0hygkzvx54b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzEwNTFmZTktZTIxZTY2ZDYtZTU2NWQzNWYtNTc2MTA2NGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.135903Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jr0vqs2e1mptp760smy1jp6b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFlY2EwMTAtYTBlZDcwM2YtY2YwNzM0ZTktYzYyZWRiODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.201895Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jr0vqs4m198cvxxcf6sxrt32, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWI0MzA1MmQtZTFmZDkwNmItN2I4Njg0Ni1mMDVmMmFl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.271192Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jr0vqs6n4tq7qyvpe85tvh52, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjVlNWI4ODgtN2M0YzhmYTAtYjMzYmQyYjktODNjZDVhM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.354417Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jr0vqs8x8cesarvsa0gw4hzc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjlhM2E1NjMtMzdmMTgwMjgtNzZkOWRlODItZmVhYWE2YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.424536Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jr0vqsbgc1a3tcn4rernqf5v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQ3MDZkZjItNjFmZDJlNzUtMzkxYjczYzgtN2ZmMjhmMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.490779Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jr0vqsdn6pn8m8cnhmyntgc4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDZmM2Q4ZDUtYjI1MTYwY2QtMzNlNjVlNWItOGQxYzIyMzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.585018Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jr0vqsfq2hd2hatq7wrmjr30, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTAwOWRjOGYtZjQ4OGNjMDAtYTAwMjBjY2QtM2U4ZTg2YmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.651174Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jr0vqsjn3ewrbjsn4s0st4np, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWNiNzk5NTItOWM1MmU2ZjEtNmRhZDIxMTctMmY1NGEzMzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.715643Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jr0vqsmq5260pq00mz6bphyf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI1MzQ2MTQtMTE3NWFlZjItMmQ0ZTJlNzMtNzUyMmVjZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.778785Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jr0vqspr6m67ftmf2jq5mvn3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGUxNGIzOTEtYjU4ZmM5ZDItYzcyNzFhMWUtNTM3MWI5MzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.845166Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jr0vqsrq0bbnjt4xte60dd0a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzFiMDg0YjItYmI5MTIzODMtYmE0MmZjYzMtOGNlOTVhZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.915817Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jr0vqstt01pew4hbcy0s2dmj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDFkM2EwY2UtOGIyZWI1YTMtYjFmZjliNjYtNDIxM2U2YTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.981164Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jr0vqsx11dfc88jb18x8egjt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2MzZTZkNTctYjdkMzgzOTYtNjMyODlmNmYtOTJmMmZmZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.048553Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jr0vqsz1655qvhqkbjmxzwye, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzFjNmFjZTgtNGQ1NmVhZTktZmM4YTNiNDAtZTM4YTViZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.116227Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jr0vqt15c7tabs6t5qfysefq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM4ZmM5M2YtMzg4ZmVjOTMtZWU4Yzk2NjAtMjIxMGE5MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.213752Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jr0vqt3954v32qx0mqsymzqz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTQzNDI4NzItZjNkODNjMC0zZTRlY2Y5Yi1jNjYwNjA4Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.279485Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jr0vqt6a1rt9qgkd0bh0a9kq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjU2NzhiMzItYjcyMGE4NmUtMTVkZjE1MWMtYjNiMDU3MDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.515109Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jr0vqtbveaewnq2v03s694h0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdmOWVhZmMtNTNlYWNhZWQtMjY1YTFiZjEtYmRlNmNiYTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpPg::NoTableQuery+useSink >> TxUsage::WriteToTopic_Demo_21_RestartNo [GOOD] |78.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |78.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |78.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp >> KqpPg::CreateTableSerialColumns+useSink >> KqpPg::EmptyQuery+useSink [GOOD] >> KqpPg::EmptyQuery-useSink |78.8%| [TA] $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |78.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |78.8%| [TA] {RESULT} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] Test command err: 2025-04-04T16:56:45.353417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:45.353900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:45.353959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016e7/r3tmp/tmpoUwhui/pdisk_1.dat 2025-04-04T16:56:45.745248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:56:45.786136Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:45.826397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:45.826556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:45.838950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:56:45.927549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:56:46.326982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480 2025-04-04T16:56:46.614273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:815:2674], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:46.614453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:825:2679], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:46.614910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:46.620396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:56:46.778022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:829:2682], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:56:46.832394Z node 1 :TX_PROXY ERROR: Actor# [1:888:2722] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:47.128830Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0vqjrk73jrp69ryxr3zpzc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTRmY2UyYTItOTk2ZTY5ZmQtY2MxNzJmZjMtM2NhZjJlMTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.214737Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0vqk975f19qe80tcd6ztj0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWNlNDllMi01MjQxODRiYi1lNDcxODJlNS1hNGRkNGIzYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.291268Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0vqkbm3ytnmq2w6r7dhcf5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmFhMGQ2NzMtYTEwZTdjYmUtODYyMGFlOTktY2U5ZGVlMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.368101Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0vqke08wefn52t35mermwc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWMzMTE1MjUtZmJjMTQwYzQtZTQzOWVkMDItMWYwMWFkNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.445314Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0vqkgd1nfbfgv01f2d0mm3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTIyZWIyZmItNDljNTZhNDUtOTdkNzhlZjUtNDU4ZjA4OTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.518121Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0vqkjtdybe2z91hy64cnne, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjc3OGM5YzMtNTljZjdiYzktZWY3NTM1NjYtOGFiOGZmNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.591060Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr0vqkn3fdc83q1nnanddgb7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI0YWVhYzQtZWNiZmU4NDQtNWQwOTc0ZGEtYTBlMTE5NDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.663586Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr0vqkqc539hwjqyv7kb7jfd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFmN2FjMWItYWVlODE3ZGUtNTQ5YzU4NWMtZDJiMzQwMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.736706Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr0vqksmbdryg64h0gcmpvp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDliOGEyYjQtMjJlYWUzNmEtZjAxN2FjZWQtN2QzMDIzYzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.809755Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr0vqkvxa01gt23qbq930bba, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzNmMjMwNmEtNTdkMGI3ZDUtYmUxMTMyZmYtMmJiMWE5Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.883068Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jr0vqky71yqh48d66apcywmz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmUyMmQwOGQtOWY3MTU3OTQtMzE0OTgxMGEtNThmYWY2ZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.956169Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jr0vqm0ff87kt02ecq1k4jjg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWViMmJiOWMtMjUwZjcxNjEtOThiYjQyMzYtZTA4YTlhYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.029200Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr0vqm2s9x87rz043vhn4rmr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTVmZWY5ZDctYTM3MzcyN2ItMWI1MDQzYTUtZjkzZTIyZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.103018Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jr0vqm52eawmaayjfdc9rtrk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTY3Nzk2OC1kNGZmZTY0NS1jYzJhOGQyOC03NjdiYzc4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.179575Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jr0vqm7c7tes5ht1bt3rqg9n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTk1NTk5OS01YzhhMDg2Yy00NjEyYzJjNC1mNjZlZjNkOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.259697Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jr0vqm9r39jg78nbcp49czv6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjNlZTdmZTctNzZiZGU0ZmMtMTIyNWVjNGYtODA4Mjg2ZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.344133Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jr0vqmc8aqd5sscd260jstf0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQwZmIxN2EtZDU5NTkxNzktOGFmZjYzYzMtMmE1NmJmNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.421979Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jr0vqmexa4qwrwtpe2v8x3e1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjc3YmQ2MDUtYTA3NGJkMjAtZGE1YWJmYjEtZWUxZjdhNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.502165Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jr0vqmhbeg335wd9w0m2yf9r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWQ1NmI2YWItNzgyYmQxM2YtN2JiYTMxMGUtMjExNWU2N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.575522Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jr0vqmkv4vfddpsqmxdkz99t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE4ZGQ4OC1hZmMwM2Q4NS05OGRjNzZjMS05MTE4NTA1ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.643534Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jr0vqmp4epapv2hvp74e14r6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjVjYjc4NGMtNGY1YzFjNzEtMzdkNTkzZGYtNjNkMWYxZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.708145Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jr0vqmr7dv6fy0v66tc2fj1z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzAwYjI5NzUtNTFhMzg5YmMtODBhNmQ2NDMtMjU2OWEzYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.779359Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jr0vqmt97nj0hz5vn3md2s93, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzk0NzJjYjEtMjFiNTk2YTYtZjI4NmRhNjAtZDNhY2ZjMTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.849807Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jr0vqmwgdavj8ffa28wmebc3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzY0NjNlNDktYmZlZjkyOTItNzI4ZTQ5OS00ZDI2ZmE4Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.910889Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jr0vqmyn00ynvv758c5ejp02, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQyYjlhMDQtMjVhOGQ4ZjktN2FiYzc4MzYtOTRlN2EyM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.98328 ... 317954Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jr0vqr8kb60588ngcp2sge26, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2M5YzhjNzQtODAwNTMxOTgtZjRjZDVhMzQtN2VlYTk1YmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.389346Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jr0vqrb35z24qb11qb7nc6s4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjkwZDNjNGYtNmRmODE1OWItOTI1ZjVmOTctODg2ZDVlYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.462114Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jr0vqrd9f6eet90sdsqgenn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQwZjc0ZWItNTYxMjg5NzctZTk3ODFiNGMtOWQxYjg5M2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.538785Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jr0vqrfk00t1dv0x9pjhd6pa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzkyZDRjMzItNTM4ODg5ZTUtOWExNDFjMTUtMTg4YzFiYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.625090Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jr0vqrhy4j72yfdkh7aet0zj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTAzNmE5MDAtZmQ2ZjQwMTEtYTEzZTRjZTItYmY0ZWExYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.714950Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jr0vqrmpfn58ydva0ad94se8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzExMTZjNWQtMmM0MzhkNWMtZmE1MTNhMDctZmNiZjQ5N2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.790859Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jr0vqrqg4z5sm5v6phtkkvtf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDZlODFkNmUtZDVlYjRmMWQtOTI1NzE5ZjctMTQ5MDYyOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.866709Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jr0vqrsw82jtp9jjvf6w9efn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTU5Y2U2M2EtZjU2YWYxMWQtODA4ZmQ4MWYtNDlmNWNkMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.943854Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jr0vqrw86gj6kexevbhf045q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTAxNWQyYjItMzYyNWEyZWEtN2EwYjI5ODctYjE5M2QzNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.018427Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jr0vqrynfc4pffyqkdpx9yy5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWE3Yjk4ZDItYzc2NzRhMzUtZWVkN2E3NWQtMTI2NWM2MjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.093711Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jr0vqs0z9myx6hjv9mcz156c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjhlZjhjMTctYmMzN2EzNmMtYThmODY2ZTYtZmRkMzJmNDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.160786Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jr0vqs3bfkdt2pd7e0rr7h2f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ5YzQ0MGItYjMxODQ3ZDQtNzk0Y2U5ZDktMjdlOTUzYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.222430Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jr0vqs5d8adyaa4c3ghzn5xp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDk3NjE3NmEtNTYwMDBkYjgtN2I5Yjc3ZWUtNzJjYmZkZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.299925Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jr0vqs7aa581j11mfhrmbn4h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWM3MWZmYWQtZjIwNzA2ZDctZjY1ZTQ5ZGItM2ExMDMyZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.404781Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jr0vqs9t7rzxyg367dzceka7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjhjZTJiMDAtZTBiMTM0ZjEtYzk0YmMxOTgtNTFkNWUxZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.477009Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jr0vqsd2ft7jdvbdhn2xsvpt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjExYjdmYTQtY2Q1NDRlYmMtZjcxNDI3OGUtYmFkZjhjN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.551240Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jr0vqsf9e5yhw8bjx1pqrgxs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2YxZmFhZi1lMDcyZTI1NC0xMzgyNmI3MS1mZjBhODQyNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.625411Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jr0vqshm6y1j4w6rc783cg8v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmQwZDk2ZjAtOTk1ZjVlMjYtYmRjN2ZiMmYtOTcwOWE3ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.699563Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jr0vqskyac5p8tqn9hthydep, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzIyYjI1ZjQtMjJiMGMwYzgtM2NlMTdmMS04NDFiNGNiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.775785Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jr0vqsp9deb7rsqfh5k30kpb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzg4NWU0ZS02OTdiMDYxZS04ODBhMzgyOS1lMTFlZjEwYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.852575Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jr0vqsrm01fxgprvw1y83ed1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTRlMjQ2ZGItYzVmOTVmZmUtNzQ2NmVkY2ItZDM3OTliYzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.938056Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jr0vqsv21e400n68k5ra8v2z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTNmNjVlZGYtZDQyM2ZjMjEtMjQxNTI5NjUtMmE3MjU5Zjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.016812Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jr0vqsxq4b4kfda7a299s05k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODE4OWJhMDYtOGIxYTY2ODktNmYzMmIyZDItNTBmN2Q0MDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.085228Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jr0vqt0617k73jew393tq618, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2FmODA1ZGUtZTA1MTNmNjQtMThmYjFlMGYtOTIzNDEwNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.193132Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jr0vqt2a2mcyy8waje2nd6jc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGQ5YzYxZmYtMjZkMDgxNmEtOWM0ZjU4OWQtMTYyN2RjNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.267791Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jr0vqt5p533fn9eyy4qcvpyd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQ3MTE0N2EtMTJjZjY5YTctYmIwNzE3MzQtYjNkZjZhODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.343850Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jr0vqt8056cc9tbeyk5mbwdw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDUyMDkzYzYtZGE3MTI1NDQtNDQ0MmM3Zi00NGRiNGMxYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.406531Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jr0vqtac7kdc5ern48khzfjy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTQ5Y2NjYTUtNGQxM2M3MzEtNGQ5ZjRiYmItY2U0MTgzM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.473985Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jr0vqtcb8988sfqssddkntbs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg5MGY4ZDktNWUxNWRjMjEtNWYyMDAwZDMtNjUzZjA1Zjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.547397Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jr0vqtee0zz4ama7xxvnpfjm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmM1YWExMTItMWE3NTgyNjgtMjI5ODRjMTktYzJhNWZiZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.621567Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jr0vqtgr7x3dc4gb21fgpv2s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDUxZjY0Ni00YzBkODA3YS00NGQ3MmQwZS00YTA5OTViMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.695790Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jr0vqtk383b27z44x1syea45, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTBiNzhhMzEtODZiMDYzYmItNzFkMWNiZmEtZjZhY2JiZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.775554Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jr0vqtnc32gwvt5p3w5spkps, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODc2Yjc4ODQtNzk3OWFkLTYxZDY1MmU1LWY5NTU5MDVi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.911594Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jr0vqtqx7e2dw2gkyes5641v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGMzYmQ1ZWYtNjJkYWM1NTctMWM2OTNkOGQtOTM1ODUzMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:55.014596Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jr0vqtw51znx3ekazprpa78y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2JjOTMxMGQtNzVjMzEzMzYtYjFlMDcxZDEtZjA5MzQ3OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:55.278976Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jr0vqv38e1yngjpxjmwm31c7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTVlZDQ3YWItZjVmYzczM2UtMzNjMTRjYTktYjJkYTk5Nzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] Test command err: 2025-04-04T16:56:45.586092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:45.586702Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:45.586785Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016dc/r3tmp/tmp0zAiZW/pdisk_1.dat 2025-04-04T16:56:46.012889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:56:46.062620Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:46.107153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:46.107326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:46.119377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:56:46.205377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:56:46.530154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:740:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:46.530336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:749:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:46.530425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:46.536544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:56:46.715014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:754:2630], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:56:46.897109Z node 1 :TX_PROXY ERROR: Actor# [1:828:2673] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:47.231980Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0vqjnz0r41gsjbvxp36hm8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTFlMmU2ODAtZjQwNDc0Y2UtM2FlZmIwMy1hOTkwODc4ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.316123Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0vqkcd3467kke61cktqg9m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRlY2U5NzItMjAzMmRmNTAtYWNlYTkzMzItZWZjMmU3ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.391586Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0vqkesf2theentzp7nkj8n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTYzNzkwZGMtZTBjMWIxMzctYmQ0OTAyMWEtOWUyYTIzMmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.467749Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0vqkh40wf538nbsqea4hxe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmQ2M2YyZDAtNDY0NzkzYjctOGZkNDgyMWUtNDQ4OTZmY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.540620Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0vqkkg7zbdf8x2678wsxen, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmE5YzMzNzEtMjc4N2RmOTEtNzY2MjY2MDQtYTA4ZTk2MTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.610409Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0vqknscfsenh32nearn8cf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjgwYjIwOTgtM2FiMGYwM2EtNWU5NmE3NDQtNjY5MjM2ZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.681461Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr0vqkqzfs89wy46e2v2mwm3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU4ODY5NS0yMDUyNGZmYS0xYmVhNzExNi00MzY1NjI4Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.756197Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr0vqkt669tgm14600wwz437, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzk3MGY3MmEtNmM2YTI1MGMtY2MzYWJmYTEtYTdlMzI5YjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.820318Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr0vqkwh33fmmdnn9emh95bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDMzMDk5YWItNjQ5YjdjYS00MDVhYTRlNC1jNTc4ODlmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.903764Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr0vqkyjewr56d5x394fep3j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzJmY2Y4NzMtMzc5MDg3NTctNmFlYTA4NzQtNTljZGU2ZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:47.981789Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jr0vqm1508pcqyc23jpzfqew, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTVlZmMwNTItMzgyMTgzNTUtODBmYWQ4N2QtNmE0OTllMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.058201Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jr0vqm3jddkd5yqaehs29fbt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGY3MjhjNzItMjc5MmY1MjctNmY2MWE1YWItNjM0N2E0ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.168448Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr0vqm64c1xa18fe6tgyg1jf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI1MzY4YTUtOTc5ZjNhYmEtNjg4NmZkNmMtZTFlNzA0NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.289862Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jr0vqm9e143s0kqb4nq60tjh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjhiMGQ3MzctOWNlY2ZlZjEtNmRlOWUwMjYtYWIxNDc5NTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.383405Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jr0vqmd6cnrnxjesfbq4hwbm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjM2ODJmNGEtYjM4MDQ4ZDktODZiYzBlNS04NjgwYjJiOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.464039Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jr0vqmg45ee5t80qa9ewpbh1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjlhMDgyZTgtZjE3ZmVhMzAtZTZkZjAxYWYtNmY4Zjg5MDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.545646Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jr0vqmjn2x4nrbc3d7jn31a0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmFhYmQ2MzEtYjUwMDJhMWItNjQzZGQ2OTgtOTMzM2IzMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.616498Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jr0vqmn69jmm4p661ds7d6sc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBkN2VhNy0xY2QzYmY3LTQ0YzdlYWVmLTJiY2YyODVm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.692093Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jr0vqmqd2f68j3ehspmmqc32, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzZiZmZjNTUtZDlmMTIzOGYtZmU4NzI1ZTQtZjNmNjdiYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.771143Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jr0vqmss2b8w887hq8sbf0f1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzg0YmMyZGMtMmZlNjVlZmItNGM1ZWUwNzctMzM2NjRhN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.847284Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jr0vqmw852pmtpg5rgybyav5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTEwZWJlZTctZjY5MjA4MzgtZjA3MGFhNmUtODJkMzZkYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.922513Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jr0vqmym3qmqnvsq5twys816, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzc0OTg5YTEtNzBlNWRkMDMtNzQ3ZTc1N2EtNTk0Njc4YTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:48.996595Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jr0vqn102gxk1gzyswz6mdkj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTJkNzk4Zi0xOWUyNzM0ZC00OGQyYzMyYi1kOGZjNDY3Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:49.075850Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jr0vqn38691yqnyab7pzd8ew, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2VkMDYwODgtOTFmN2JkOTgtNzk5ZmE5MzItOTRiZjZiOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:49.151410Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jr0vqn5ra40vbnxye90njqgk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWUyNDU5M2MtMjE4ZmE5MDQtOWI5ZDEyZDYtZWFhNDY2Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:49.225359Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jr0vqn830e7hmy9b797c547p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzg5NGEyYzYtOWRiMWZmMmUtZGZkZTJlMGUtMjc3OWZlMjg= ... 488216Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jr0vqre409b7wn04xse3bd0a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDAwMTUwN2UtMWMwZTJhOTItYWNkYzY5OTAtZTc0OWM1YzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.564486Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jr0vqrgd05wz15s03f2sggtt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzg1M2Q1NDUtZWQyMjNmNzAtMjJlNjk3MTAtMTdhYTM0MzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.641804Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jr0vqrjs98ea46hfvc2nkybs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmU5ZWE1NS1jNDQzYTI1My0xNzE0NTc2OS1lYTA2M2M3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.716994Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jr0vqrn7613na9apr7c8akh0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmI2ZWQxOWMtMWI2OTkzODItZjNiNjNlODAtYzJiMDExYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.790883Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jr0vqrqj6ahckqyeyg4b4azm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzU5MzQxNGMtMWRkOGVmNTQtMWM2OTRkOTctYjc2NjRlNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.888276Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jr0vqrsv09jvcfher97zvpfq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODE2MWJmNDEtNmUwNDcwZjctNGQyMDdlN2QtZjQyNGY3OWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.980516Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jr0vqrwy1ex0f9es6nzrnne0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjlkNGZlMzQtYTFjYzVjZGQtYTg5OTdjMjAtYjBjNjAzMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.090544Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jr0vqrzt330jkrm9s2fstmyv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzAzNDAyMjEtNjRkN2E1NmUtYjM4MTdmNjUtMjYyMjY2Yzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.175489Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jr0vqs393z7hpk2qew3cn7kr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmY3YmZjOTEtODZkN2VmYWUtZDQyYjlmYTQtNjBiOTNlNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.256833Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jr0vqs5xezkgg5b8vh53yrn5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjkxZTI2Yy00OGJkYTUyOC0xN2NjZTQ2Yy1hMjBhM2QxMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.359674Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jr0vqs8e7y4tqgphg3n51wgs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTRlNDczMGYtNWM0NzVhOGQtMWRlMDJhNjgtYWJiMjFmYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.442523Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jr0vqsbm8s19mvn1hx1s7h2y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2ViYjVjM2EtNWVlNmNhNS0xNzQ3MjFhYy1jYWQ3OGRjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.523595Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jr0vqse70wf0b92tbfrrq4sx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGIyOWE3YS1iYjY3YjY4Yy1mMjQ3YmRjMS02MWViNWFjOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.600666Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jr0vqsgrejpgj1wycmzv04pk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTI3OGM0NS1lYTJlZjU2My02Zjk3ZDYxYS03MzBjOTllYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.683612Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jr0vqsk70ypcqhwys2ft7vc4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGVmNzU4Ni00ZGNhM2NmZi01NTc0ZjZmYS1hODk2MzVmOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.760987Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jr0vqsnr8me2wn3s368tca9y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjNhMmM5M2MtZmM3NTNmMTEtNmY1NGI0YzQtZDY0YjFlNTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.876659Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jr0vqsr60xw67ysbwhrjefag, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWE3YTkxYzYtYWQ4ZWNjMGYtYWM1NjMzNGItOTBmMWZjYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:53.983465Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jr0vqsw3fnt45jbeadg63q8q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjZmMGI0YjktYjI0NDRiOTgtZjNlYzJmZDktNmJiYWVlOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.060908Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jr0vqsz3dya4dqspme1r0vfk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjE3OTYxMzUtOTIxMjgzMTgtMTEyZjVmNjgtMThlN2QzNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.164717Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jr0vqt1j5cdvqr76g6v2e7at, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTkyNzJlOGMtMzI0YjE0MDgtYjMxNTRlZGUtYWEzNjVlYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.234094Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jr0vqt4s88h3d375pa28ybe1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWUzZWI4NjItOGMxM2ZmMGEtNGVlZGYzYTItNGY4YmM1NGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.298723Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jr0vqt6z03nk17hvrbgpgea6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ2ZmMyNDQtNjI1MmI4ZDktY2UzY2MyOTctODZhYWRjMmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.359496Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jr0vqt8z6mwsr3jb6a4szr9g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGU5N2UxZTUtYjgzZDYzMTYtN2MwZDhlYzAtYzFkMjk0NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.432384Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jr0vqtawdqwp9j8c7fqxcnb9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODVlYzkzMTMtZDA5MTExOTgtOWMwZWIyYWQtNDcwMDkxZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.503089Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jr0vqtd5bgcgz5b3sk4mqv4k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjgzNmNkODItOTRjODA4NzktYTVlOGU3YTItYWUzMWNhZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.573371Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jr0vqtfcbejb6z54s4kq5jbg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzRlZTM5YTgtMzhmOTk5NDMtZjE4YWJhMGMtOTNjNjBhMDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.649523Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jr0vqthjazd1c6aehhynkgpv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzZjNjk3OGYtNzQ5ZGNiYS1kNzA3M2I1MC04NDNiODVlYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.793090Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jr0vqtkz98kjd6mm3ph1e2jy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGZjOWQ3ZS02N2UxZTRjMC1hZTU4NTRiMi0yNDMzOGI1NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.872092Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jr0vqtre0kzch1w3t2d17cfz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDRjNjE1MzItZjk3YjRkNWEtYTVhYmNiY2EtNmVmM2UzOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:54.951309Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jr0vqttx1m1erzb13e6tj423, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmRiMDk3YmYtYTNhMWVkMjctZTJmN2UwNDgtZjBkMjQ0Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:55.034471Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jr0vqtxcbberwc1yxwmqc7vd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UzODMzM2EtMjRjMTIxYzgtNDdiNWViLTdkOTM4MWQx, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:55.115195Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jr0vqv001mqva6z5kjkn5278, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTFlMWUzYjItMTI1N2EyODUtNzFkZjJhYTctYzVkNDgwZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:55.192637Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jr0vqv2g4bc9sxwzepdfb4nq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzVlMDc5MjktZmZkNDhkMDktYTgzYzE3MmItZmQ2NTkzOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:55.269047Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jr0vqv4yfasewaa3m8bte5wr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDVhYWUzODktNmViNWMzODMtNzc2MTYyNmUtNDhkMTQyZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:55.347023Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jr0vqv7a13hzrjh575rw75mf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE0ZGYxMTUtMjM1M2VkOTYtZGY0MzU5MTUtMTZhY2VjN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:55.495117Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jr0vqva38qdx7ty01kt2bb8j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWIwODNhNzctZmE4ZDAxMjctNTVhZWNkZWQtY2FmM2E1YTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit |78.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |78.8%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |78.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator >> TestProgram::CountUIDByVAT >> TestProgram::CountUIDByVAT [GOOD] >> Cdc::InitialScanComplete [GOOD] >> Cdc::InitialScanEnqueuesZeroRecords |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountUIDByVAT [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Aggregation","input":"2,4","output":"10001"},"fetch":"2,4","drop":"2"},{"processor":{"internal":{},"type":"Projection","input":"10001,4"}}]}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int32TypeE; >> TestProgram::CountWithNulls >> TestProgram::CountWithNulls [GOOD] >> TColumnEngineTestLogs::IndexWriteLoadReadStrPK [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] Test command err: 2025-04-04T16:56:47.743350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:47.743760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:47.743828Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016d1/r3tmp/tmpMVoxnb/pdisk_1.dat 2025-04-04T16:56:48.118846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:56:48.162121Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:48.208820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:48.208997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:48.224306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:56:48.312475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:56:48.660143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:740:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:48.660249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:749:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:48.660321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:48.666603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:56:48.838352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:754:2630], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:56:48.957774Z node 1 :TX_PROXY ERROR: Actor# [1:828:2673] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:49.272842Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0vqmrje2xybpz292psvp0z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWNhYjEyZGMtY2FmMzQxMDItN2Q0NjdlOS1lYjYyN2NhNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:49.362208Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0vqnc75h7cz5rv4xcdxq11, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4NTNkODQtNDg4ZTM2ZDItODA2NDM1ZGUtYmI3NjIyNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:49.445078Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0vqneq8e4fx3wf8jn9rfqb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDdmZjI4ZmQtZTg3NWIxZC1hYWQ4M2EyNi1lZTU1YzgwYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:49.523304Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0vqnhabanj5aqqcqghqcv6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2IwMmM4N2EtYmY4MGQ1YjktZTgyMDNiZGUtYjc3Y2M5NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:49.615096Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0vqnkr57591a16z4tftz67, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2JlYTg5MC1iOTI5NmQ4NS1hNzViZTc4ZS05ZDIwODFjZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:49.688296Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0vqnpnfwrvhat78d0m86ct, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmM5MzRjMDAtYWE5ZmNhOWMtMTFmZWY0NzctZjU3Y2E4MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:49.759299Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr0vqnrx1kajdqfstaz9rbr8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjYwOTYwMjQtMWYwYzM4YzUtMzNhNWRiMWMtNDU4NTVjYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:49.827733Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr0vqnv33cwfgjzg7me0yb6m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWY4ZWRiNzAtNTllOTQ5ZWEtMjQ0ZDY2MTItMjZhYTA3MDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:49.902289Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr0vqnx97gz1fpqcvg5q8mbt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGMxMzAzYzQtMTM4NDkzZWQtZTJiNzJjOGItMzk2MTFmODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:49.977172Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr0vqnzk7yem5wav60xhyqzy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmNjMzUyZDQtZWEwYzdlMTItZjFhZWNjNDUtNDA0N2FjMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.053600Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jr0vqp1yaaydhvxn99vpyc1g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjkzODMwNDAtN2JhZTg1ZWUtZDNkNTU2ZmEtNDZmM2I5YTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.127750Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jr0vqp4a67tpyetm6640e7rj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmM5NGY5NTUtNWE0OGFhZTgtOTdlYTkyZWYtMzY0ZDk1ZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.197135Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr0vqp6m50ew7qt3rywy9qkp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTA5OWFiN2YtYzQ4YzlkNi0yOGFmOTk5ZS01OWMyMmEz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.277000Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jr0vqp8tanrg08945fxyn5am, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzMzM2Y0Y2ItZGVmNjkzZTItYWMyOWJlNDItNGUzMmNlODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.357313Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jr0vqpb9cerrk37ca227xcn6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTRjNmQyMDUtNjdiZGIxZTEtM2ExMjMyYTUtZWZiMmNhY2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.434285Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jr0vqpdt6nh2g94fegx69pt0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWY2YmEwNWEtZGQwMDcwYmMtZjI1OTQ2YWItOGRmMGU5MTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.513594Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jr0vqpg83e8c06wc0hm7szgs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzk0MDgwYi1hYTY1ZTI4Zi1jNmZiOTZjOS00YjdkMzcxOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.599437Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jr0vqpjq7v8svetff0advc83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQxMjhiZGQtMWNiYWRiNjQtZGRhMjY3YWItZTMyMDAwNDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.681666Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jr0vqpndbvpvq2mnx3p3jqgg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWViMTBkNjMtYjNjNzk3NDctNjdhNTM4YWMtZGY0MjZkOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.775313Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jr0vqpqy3qz963a5q54z4cwz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRiZDAwZTktYzcxMWQxYzEtZTRjODRlMzEtOWRkMjIxOTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.864923Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jr0vqptwfcmn3018cheq88zq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2VhNGJkZGItMjNjZjM1YjEtNTg0MTFkNmYtNjZjZDc5OGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.936768Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jr0vqpxp1haakppfpkhp8dcg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmQzZjA3OGQtZGVmYTZlMDEtN2U0YzFkMTMtZmRjZTFjZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:51.047947Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jr0vqpzy4vz5r97ks1cnfpwf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI4ZWMxZDItNTQxMTQ5ZjgtNmVkYjhmYS1lNTA5MGVlZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:51.167752Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jr0vqq3hfvrgmjm43qvypzft, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RjODY5ZjQtYmEwM2IzZTUtYWVjYWVhZTktODRlZjVkNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:51.370930Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jr0vqq7939850k198c889hjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE1MzRiNTktNTJjMDA4ZDctODIzZDg0YjEtY2QzNTc4MWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:51.484792Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jr0vqqdn1fz5t3q392zk0k67, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzU1ZjllMTMtYTM5NWI4ZGUtZDRjNTRhMDItM2ZjNDNh, Cu ... :55.357646Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jr0vqv7b3jaxwk3p9xt9rbq3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDk5NjkyYmYtNjY2MTY2OC1iYTVhZDUwMS1jMzVlMzRjNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:55.447128Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jr0vqva38qe30zsh5fwdprk3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmY5OTVjOGEtMmFhNjA2MWUtNjY1NTM1M2QtMzcxYWY5NGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:55.538923Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jr0vqvcx552drq9ekmb1j2a3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWIzMzdjYWYtNDIyMmRkYzctNWJjMDVlODktY2Q4YWRlN2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:55.631157Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jr0vqvfs47gmwqh5420dxazk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjY4Y2JkNTEtZTM4MDBiODYtMWNjZGY4MmEtNTMyNjVmYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:55.749344Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jr0vqvjp2hybv6xrffyx2cwz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTc4YmQ2MjgtY2MxYjdjMWItY2Q2YjFhMDktYjM0NDZhYzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:55.835391Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jr0vqvpbe8bp0rnadjw7zya8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjViMmE0MzAtNTc3M2QxYzMtMzQ5ZDkxNDEtMjU3ZDFhYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:55.921287Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jr0vqvs1bvnf1kjj6wte4b52, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzU2YzJmOTUtODgxMzE1ZS0xOWFiMDk5ZS03YmMzOWFlNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:55.989358Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jr0vqvvp9t97eppsrf2wrjzs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODBmOTdiNjEtZjU0M2Y1MC0xZmY1OTIxZS1jNGQ3MTYxMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.054076Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jr0vqvxsa4hj0kqnna7mdfmj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2QwNjA1M2EtZmFjYmMzMGMtYjkxY2Q2M2MtOWUxZDg2MjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.117131Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jr0vqvzt9ysqywcrqcxwc2aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njg4MTA5ZDQtOTU4NDU2NTYtNmRlY2FlMDQtY2E5NDQzZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.189703Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jr0vqw1t02r7zqehk2189t4e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGE2NWVkMTYtNWE5ZGY0Y2MtODk0ZmNmNWItNjkxNmEwNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.263944Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jr0vqw423fxf8y7n8cq84djq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzFjMGQyZDEtOGMxNmMwYjctZmE3OTg5ZmYtNDk3ZDkxODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.332613Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jr0vqw6c615neehdkv5vj9ka, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmM5NmVjMy1mODI2MDQ0Zi0yM2JkMTE3Yi00Y2UyY2M2ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.403731Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jr0vqw8h1217yqcqdd4k50nn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTAyOWQyYjQtNTI4ZTRiNWQtZmUyYTI2ZDQtNjVjNjBlOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.490305Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jr0vqwasekf2kmvnwjns4g26, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhiNjIzYmYtN2ZjZjcyYzgtNTM2MGYxMDItNzljYTMyYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.565077Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jr0vqwdf71852apj0aen9knb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTg5Y2UxMGQtNmYyNjg5NmYtNDhkYjI4ZDgtODg3Nzg4ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.641544Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jr0vqwfvd84x53k99gx5ha0y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTJmNTAzYmItMjAzYmM2MzMtNWI4NjEzOGEtYzg0YTNhYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.709799Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jr0vqwj7bs5c16v7877p397w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmExZjU2MmUtZWNlN2EyMC1lNmU3YzVkMC1hNzBmMGExOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.781749Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jr0vqwma2fz9ed1tephgca24, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTE3MzUxOGEtYmE0OTk2NmEtZmVmNjI3M2MtZTg2NjZjNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.879800Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jr0vqwpja3k71e8fmfzbkfd8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmQyNzJhYzktMTA0NjljNzItYWQ1M2IwMzUtNmJlYzBjZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.956211Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jr0vqwsn7yyg9e5gd175mvye, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWNiZjQyZTItZjQ3MjY5YzktOTgzNmEwNy04Y2NhMjRhMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.021995Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jr0vqww09ty7vy5vyrbzq42s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODUxOGU5ZmMtZjMyMDFhZjktZjczNGM2LTU3NDQ2Mzg4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.141243Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jr0vqwy33pb5hmbmjp24407x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NThhMmY0YjQtMzI0ZmY2YWUtM2IyNGUwZjMtMjFjMTk5NmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.213087Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jr0vqx1t0p1fvsenwcnyk3pw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmMyMzdiYjktNzVjMDM2NWEtYzYwZTk4ZTMtN2U1ZmUwNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.287786Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jr0vqx4262b29cv1szsn1qa4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWNjM2VhNWUtZTZlMTQzYjctN2U3ZGM1OGEtZjJhY2U4ZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.361285Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jr0vqx6d7nq55ss9jjtxhmsk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDE4MmNkOTYtNGIzMmU0OTUtMzA3N2U2ZTAtZDIzNTkzYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.435454Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jr0vqx8p1mt7ymfz76nn8car, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTYyYWE3MzMtYmRjZTBhOTQtMmM3NTk2OGYtYjRhMzVlNDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.522310Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jr0vqxb0ct2hcgbbx10hgdnf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGY0M2I1ZS05MDJlN2NkNy02ZWY1YWRiMi00NTA3N2E1YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.581194Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jr0vqxdqft2nhywjjqa4x13y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2M0MmMxYjUtNGYzMGJjZS05ZThmMjc1Yi1jYWI0MDgwOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.651285Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jr0vqxfg4wh5rae9wpem4zby, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmJhYmZhMDctMTZiNTYzNC05YzMxMWVmZi0xYzgwMTU1, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.723402Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jr0vqxhr39ej3t8308x3fr83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWRhYjA2M2EtNzFkMjU3M2UtYWU4YTE2YjgtODFlZWFmNDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.813704Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jr0vqxm04ynpdrapj2g1jzbg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTI2MjBlZDUtYjM2Y2MxYzgtNDJhMmQyNDctOGFmYzRlMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:58.012099Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jr0vqxpwestwmb0saz0ncrt3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmEzYzA5ZDQtNGEyMjMyM2YtZTA4YTYwMDEtMTVkZDk3ZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:58.130425Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jr0vqxxp9nrderjxsfhe6rr5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGZjNzAyMDItYTI1NjJmNmQtNDAzY2JlNjQtM2Q4ZGFmNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:58.206680Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jr0vqy0rde5yjrwny6xj6bxy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDljYmJmZDctYmI2ODdjMDAtNmRkOWQzMzYtM2IxZjU5M2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:58.377765Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jr0vqy3zatpesjzpvakdjsh7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQwOTZkNzgtMmJkZGQ4NDMtOTllOTg4YjktNjRmY2JjZjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Calculation","input":"2","output":"10001"},"fetch":"2","drop":"2"},{"processor":{"internal":{},"type":"Projection","input":"10001"}}]}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10UInt64TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteLoadReadStrPK [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:143;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:143;event=RegisterTable;path_id=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=1072;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=0;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=2;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=2;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;));); |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |78.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |78.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |78.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |78.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |78.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |78.9%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut >> PrivateApi::PingTask [GOOD] >> PrivateApi::GetTask >> Yq_1::ListConnections [GOOD] >> Yq_1::ListConnectionsOnEmptyConnectionsTable >> TColumnEngineTestLogs::IndexTtl >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] [GOOD] >> KqpPg::InsertNoTargetColumns_Simple+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Simple-useSink >> TColumnEngineTestLogs::IndexTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] Test command err: 2025-04-04T16:56:48.020640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:48.021251Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:48.021327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016c9/r3tmp/tmpEMGJuO/pdisk_1.dat 2025-04-04T16:56:48.489172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:56:48.545536Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:48.585946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:48.586093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:48.600191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:56:48.683439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:56:49.005557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:740:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:49.005722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:749:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:49.005831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:49.012124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:56:49.183710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:754:2630], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:56:49.296339Z node 1 :TX_PROXY ERROR: Actor# [1:828:2673] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:49.627676Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0vqn3aa9x2f65pt49jechc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTY2MzRlNTAtOWI5ZTI3MWQtZjIwZDExOWEtNzRmOTUxMzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:49.719203Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0vqnqjb5h9at45ncfvg97t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODE1ZjkwYjktZTA4MjI1ZTUtZjMxMjYzMTktMjY1OTZjNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:49.799775Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0vqnt650d5qzm2mgcq3tw6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWE5MDVjMDUtMmQ5NWQ0NjMtYmViYjE3NzQtNDdlNjljOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:49.906376Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0vqnwr2t704dvh1pcptxwx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTY5Yzk1Y2YtNTUwODRmYWMtOTJkODgyZWItZjI4NzI2MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:49.989908Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0vqp024ntc3v9k1erjd466, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWNhOTg4OGItYzUxNzQwNzctNWU4MTNhNzctYmZhMWQ4OGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.086527Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0vqp2n091y3jk04ak2qvr0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGRmODk5MjUtYWNkY2ZiYmYtMTZiMmJjZDAtY2Q3MWZmMDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.182125Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr0vqp5pbeqysdh0fa7mv69a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ1Njc2ZjQtYmQ1YTg2YWMtMmFlZDMyMjEtYzI0OGIzNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.281064Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr0vqp8p4th9x8cv21j8jvv7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDg1ZDAwOGUtNGRjMGZkMmMtYzQ4MTZiYjctZjZmYWExNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.367460Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr0vqpbs4st7b198rethm3c6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQ0YTc3ZTEtNzE1NDMyYTktZmMzZTRiYmYtYjMwNWVmMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.457010Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr0vqpee4339ck9banvy6yar, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzBmZTJkZjUtNGZiNTc1ZTgtN2YwM2IxMTUtZGRkMDRiMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.543506Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jr0vqph9cmcyq4v60yncdctb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmJiOTM3OWItNWViMjhhZTItZjI2ZmM4ZDEtNjU4ODZkOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.632639Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jr0vqpkz3k2kng46dg4rm852, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjVjNTgyMjMtZDA2M2IxM2YtNmYzZDU5ZjgtYmVkNTU1YmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.727122Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr0vqppse3s0hkq2ab9rt2ak, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzRiZjI0MDMtNTM4YzE2ZTAtYWNhNDNmOWMtN2MwZTQzMTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.858427Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jr0vqpstef3grtbjtgx5g28j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzMwZDVjMjctNWI3NDBmZjItNTUyNzI1MDQtNzU3Nzc4NGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:50.941704Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jr0vqpxr1rmtd6y9h87ed17t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmM5N2Q1MS02MjhkZjExNS1hYjA4ZWIwMS04ZTc5MTEwNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:51.054761Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jr0vqq0mc436d7waajbqnj27, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjBjYjBiZTUtZTFjZWQzYjgtZThhYzFhNWQtODk0MTU4MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:51.218118Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jr0vqq42fdx5smzm72x2yq1z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWZkY2RmMDAtOGViNjg3MTUtZWE5MjE4OWUtODU3YTQ3NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:51.388843Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jr0vqq93fv1k0sf3n8cxf3h3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQ4OGUzNC01ZjQxMTE0Ni0yMDNlNGRiLTc0YmIzYmUz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:51.476722Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jr0vqqeebxbs04j1th2mh3sy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjg5YTkxMGMtZDFjNDA3ODQtMTg0NjlkN2UtNzAzZGJjNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:51.562217Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jr0vqqh5501rs9a115px78q4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjY1MzZjODYtNTcxNzFmNzgtNDQxZDQ4ODgtYThiZTFhMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:51.648329Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jr0vqqks1y464ehxrfs4nskz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmZiNGU2ZjEtNzViNjAwZmYtNzM0NzAzNC1iODhkZjJmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:51.731354Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jr0vqqpgaednr8xzkskhkknt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzJjY2ViMi0zNGIxZjY2ZS1iNTc1OWQ4Ny00MzE2NTZmZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:51.813298Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jr0vqqs3bgkdpsexe9p310mf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDdiY2JlMzItYTQ3NDA0OTQtNDRmNWFiOTUtOGUzMDQ3NDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:51.893969Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jr0vqqvn29c2kbw4x2xt31m9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTZiYWYxNTItYTc1MWQzZDAtNWI1M2U2OTYtNjc2NmQ4YmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:51.980193Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jr0vqqy6a7gk18f0hb5wazcf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2RlOGQ3ZDAtMTllZmI5NjUtZDRjMDRkMGItYTAzN2VjNDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:52.076939Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jr0vqr0x169085csrtwh23x7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmE3MzhiMGQtOWMwMGQ0N2MtNDhiNzE5NDctY2M3MmNjODA= ... xOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.105763Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jr0vqvyz3xqvbfp987n33gv1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTQzODA4NDctNjEyMzlmZTUtMTBkZWI1ODctNzNmMmY5MDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.190864Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jr0vqw1vbymzdexz0babjnmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzk2ZmY3ODEtODZhNmE3Y2MtOGE2MDI1Ni04NmJiMGM2Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.286418Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jr0vqw4jftzyr56e6cayqp08, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2EwMTI0Yy03OWZkNTNlMi0zYWU3ZDdmYS0xMzYwMDdmYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.388227Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jr0vqw7efq2bhwmrcp0dzp1s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODMzYTdlZjQtNzI1YWYyZTQtNzljYzA2MTItODQ4OWFiZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.468208Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jr0vqwam95s0477e4jz8sk3r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGNjMzdmMy1hMjQwYTVkYi02YjdjODA3Ny1iMGMyZTkyYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.547945Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jr0vqwd3ak2cm4rn0pkzmqp4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTZkNjAzODktMTNjMzcwYTUtMjg4YWVjY2QtNWIxZGYyYzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.622540Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jr0vqwfk9wscgch0kb8np4z7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjI0ZWQxMTYtYjI5OGJjZjQtNTBhNmFhMmQtN2M0ZjFiZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.711367Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jr0vqwhzctbzn2ztbjrgm9s0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODU1NGY5OS0xYjE2Nzk1Zi1hODFmNmU4Mi00ZWU3ZjY3Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.808626Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jr0vqwms4cpsefptscvp9bc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Q3YjczYjgtYmE2M2ZjOGQtYTE2NTkwMmUtZjEzZjNjYWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:56.935226Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jr0vqwqt1sj54cfe0x9nn2dp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmE3ZmI1Yi01ODc0Yjg1NS01YTM0M2VhYS04NzNjODE4Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.021969Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jr0vqwvr8tqfktpky1qjbzpm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjkzYmNlNGEtM2M1OTk2NS01ZjE5NTAxNi01N2E2ODYwOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.109538Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jr0vqwye7cfntgeq5bxtxzh1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzkxNjJlMjctMTBmOGFiMTYtMzlkMjFiMmYtMWEwODVlOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.222947Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jr0vqx163yve6hc3x255rtbg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2NjZGI3ODgtNDhhZTRjYjQtYzQ2NTI4ZTMtNzc5MDMyZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.343023Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jr0vqx4t96763kd5srvbgxcv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDhlNTU0My0yMWRlNDYxMy1kMGI5MjFhZS0zM2E4YmI0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.443203Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jr0vqx8j52zk68cs61by2qm7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTgxZTU2MjUtMWY1OGM0ZWUtYTkzNzRkMjUtYjllMDE4MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.542160Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jr0vqxbmcbrtqtc12aavjy50, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzU1MGI5YzAtYjhjYTg2NTgtMmVhNTVjYmUtNDlmOTRkYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.650908Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jr0vqxet3f5wg2f5q3t0y9t6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQ3MzEwZjQtNGNkZWRiNWEtODI2MGQwNDgtNjVhN2FhZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.738128Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jr0vqxj30hhcar763823mxt5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI4ZDI2MGUtNGY4ZDY4NmQtOWY3MjQxMTAtNTAzMzEyOTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.825993Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jr0vqxmv0mm00rnw33qc548r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTIyOWM0ZDMtNjZlZjk2MTUtN2ZkZjUzNzEtYjY3ZWVmZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.910574Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jr0vqxqjdg0dhs3dnv8eejk1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTFhNDhhMTgtMjlmOTY2ZjgtMzRjMDVhYmMtODhmMjdiYzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:57.995904Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jr0vqxt66j8464ppdvrfnv64, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjkwMTdmYmQtOGEyNTE3NTktMWMwNTA0ZGItM2EzNTE0M2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:58.139420Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jr0vqxww8y5qc6x2rmjkwakm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIxZjk1YjYtNTY0ZjBhMjQtYjYzMzdiZTctMzA5MjM2MjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:58.226278Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jr0vqy1bc171br901xs96997, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGRiNWIxNzAtZWY3ZWNhZjgtNzZiZWYxZjEtNTUwNDAxYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:58.319321Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jr0vqy433gnpx7zhwsqz7rhd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTRkYzM0YmMtOGZmNWE1NDItYmFmYTA1OTktYTFjNDUzZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:58.398466Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jr0vqy6zfz8g0jpyxwy12c1d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZkMjY5Y2MtZjdlNGI4MjMtNGU5MWU0NzYtMTk4NzU0Nzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:58.475372Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jr0vqy9e45n340f6p8z2etna, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjlkN2VjYTktYzk5ZTdhNzItYjlkZTk2MWEtYzU1NTFhYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:58.554925Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jr0vqybt8drbjnpeasj0bp0f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjFkM2ViNWEtM2I3Y2ZkZGYtODkyZDBiNC0zMzg0NmViZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:58.640561Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jr0vqyeb6e4a727d9mstayqh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJmMWZkMDYtZDFiYjQ5MWMtOGY2MmQ1NTQtYTkzYTQzYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:58.729676Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jr0vqyh2fq9y0wfv2hmd1yax, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc2MWQxZDktMmM2MjA4YzMtYjMyODg5ZmItNjk5ZWMwYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:58.816878Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jr0vqykv0gb36qqska0epz08, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWIwMWRlNjItZWQ4YTBkMjUtOTAwODRmMGEtZTA2ODhkMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:58.904045Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jr0vqyph2gfkgymybq4myk3w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzVhNGQ3N2QtNDQ0NzAzOGMtN2JlYjUxYmUtNDYwYWI3YmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:59.023223Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jr0vqysbbxtcvrr7pqgxkkqx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2ZjNWM1NmMtNDMyNDQwZjEtYzFjMjEwODMtY2E1YTA5ZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:59.115867Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jr0vqywzem0wy2h2hd40nbvw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWY5NGQ1ZDAtZTQzYmQ0MTItOTI5NDVjM2UtY2Y3NWMyMDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:59.209272Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jr0vqyzy0143phntcvvjkxxf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWE2ODU2NmMtMTEyMTdhOTQtYzY1MmMwNWUtMjkxZTBiNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:56:59.226755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480 2025-04-04T16:56:59.635954Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jr0vqzbhdnmqmdc5v7pe7t93, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmVhOTY4OGQtNGE2YjFhYTYtMTUyZmQ4NDEtMTRkNTE2Y2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpPg::NoTableQuery+useSink [GOOD] >> KqpPg::NoTableQuery-useSink >> Yq_1::ModifyConnections [GOOD] >> Yq_1::ModifyQuery >> KqpPg::EmptyQuery-useSink [GOOD] >> KqpPg::DuplicatedColumns+useSink |78.9%| [TA] $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |78.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |78.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |78.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:143;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8400;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232 ... ALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38296;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));); >> TestProgram::YqlKernelEndsWithScalar >> TestProgram::YqlKernelEndsWithScalar [GOOD] >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:130:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:132:2058] recipient: [1:108:2140] 2025-04-04T16:55:23.857385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:23.857466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:23.857565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:23.857595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:23.857629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:23.857666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:23.857720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:23.857776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:23.858043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:23.943322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:23.943380Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:172:2058] recipient: [1:15:2062] 2025-04-04T16:55:23.963453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:23.963923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:23.964119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:23.982568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:23.982807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:23.983553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:23.984290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:23.988804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:23.990208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:23.990305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:23.990528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:23.990580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:23.990642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:23.990771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] Leader for TabletID 72057594037968897 is [1:215:2214] sender: [1:216:2058] recipient: [1:209:2210] 2025-04-04T16:55:23.998656Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-04T16:55:24.145932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:24.146185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:24.146449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:24.146743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:24.146815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:24.149400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:24.149562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:24.149766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:24.149847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:24.149887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:24.149934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:24.152015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:24.152084Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:24.152121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:24.154030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:24.154079Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:24.154127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:24.154197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:24.158215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:24.161094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:24.161291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:24.162282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:24.162443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:24.162499Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:24.162889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:24.162945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:24.163114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:24.163188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:24.165393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:24.165444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:24.165626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:24.165668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:24.166135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:24.166186Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:24.166349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:24.166457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:24.166501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:24.166535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:24.166572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:24.166628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:24.166667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id ... 4 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:57:00.581761Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:57:00.582072Z node 34 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 353us result status StatusSuccess 2025-04-04T16:57:00.583242Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:57:00.594605Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1082:2871] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-04T16:57:00.594755Z node 34 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][34:1041:2871] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-04T16:57:00.594948Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1082:2871] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743785820537333 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743785820537333 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1743785820537333 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-04-04T16:57:00.599630Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1082:2871] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-04-04T16:57:00.599765Z node 34 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][34:1041:2871] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> TVPatchTests::FindingPartsWhenSeveralPartsExist >> TVPatchTests::FindingPartsWhenSeveralPartsExist [GOOD] >> TVPatchTests::FindingPartsWithTimeout >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Const","output":"15"}},{"processor":{"internal":{},"type":"Calculation","input":"7,15","output":"16"},"fetch":"7","drop":"7,15"},{"processor":{"internal":{},"type":"Projection","input":"16"}}]}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; >> TVPatchTests::FindingPartsWithTimeout [GOOD] >> KqpPg::CreateTableSerialColumns+useSink [GOOD] >> KqpPg::CreateTableSerialColumns-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] Test command err: Recv 65537 2025-04-04T16:57:03.220403Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-04T16:57:03.221876Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-04-04T16:57:03.221960Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-04-04T16:57:03.222229Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-04T16:57:03.222450Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 5 PatchedPartId# 5 XorReceiver# yes ParityPart# yes ForceEnd# no 2025-04-04T16:57:03.222527Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:100:0] PullingPart# 5 Send NKikimr::TEvBlobStorage::TEvVGet >> TOosLogicTests::RenderHtml [GOOD] >> TVPatchTests::FindingPartsWhenError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWithTimeout [GOOD] Test command err: Recv 65537 2025-04-04T16:57:03.222337Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-04T16:57:03.223276Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-04T16:57:03.223356Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1 2] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-04T16:57:03.223574Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2025-04-04T16:57:03.223642Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-04-04T16:57:03.223726Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2025-04-04T16:57:03.565436Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NActors::TEvents::TEvWakeup 2025-04-04T16:57:03.576924Z node 2 :BS_VDISK_PATCH ERROR: {BSVSP11@skeleton_vpatch_actor.cpp:734} [0:1:0:0:0] TEvVPatch: the vpatch actor died due to a deadline, before receiving diff; 2025-04-04T16:57:03.577016Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-04-04T16:57:03.577131Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> TVPatchTests::FindingPartsWhenError [GOOD] |78.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |78.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob >> TExternalTableTest::DropTableTwice ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenError [GOOD] Test command err: Recv 65537 2025-04-04T16:57:04.517603Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-04T16:57:04.523296Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# ERROR ResultSize# 1 2025-04-04T16:57:04.523432Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-04-04T16:57:04.523589Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> TExternalTableTest::CreateExternalTable >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob [GOOD] >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::PDisksFields [GOOD] Test command err: 2025-04-04T16:51:28.415386Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501646272965397:2085];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:28.415875Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c0d/r3tmp/tmpgxFIRi/pdisk_1.dat 2025-04-04T16:51:29.027010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:29.027106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:29.031666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:29.048545Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7793, node 1 2025-04-04T16:51:29.092028Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:51:29.139225Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:29.139249Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:29.139255Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:29.139584Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20130 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:29.658140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:29.700808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:29.743455Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489501649877804360:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:29.743497Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; waiting... 2025-04-04T16:51:29.757187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T16:51:29.763407Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489501648117364363:2094];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:29.763562Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:29.843810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:29.843894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:29.864801Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-04T16:51:29.881626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:29.893161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:29.893284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:29.900035Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-04T16:51:29.913155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:30.196677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:30.227927Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489501652825986343:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:30.227997Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:51:30.252869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:30.347076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:30.352782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:30.366396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:30.368922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:30.387099Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T16:51:30.387884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:30.406668Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:51:30.407256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:33.418533Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489501646272965397:2085];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:33.418586Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:34.750423Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489501649877804360:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:34.755118Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:34.766796Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489501648117364363:2094];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:34.770503Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:35.235519Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489501652825986343:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:35.235630Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:51:37.990862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:51:38.756209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501689222639819:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:38.756248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489501689222639831:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:38.756307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:51:38.768000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480 2025-04-04T16:51:38.818510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489501689222639833:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-04-04T16:51:38.930894Z node 1 :TX_PROXY ERROR: Actor# [1:7489501689222639909:3068] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:51:39.619374Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0ve63kd3zfm0crhhek2x45, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjgwMTlmMDEtYTVkNWJiN2MtYzQ4YTIyYTUtZGVkNGZiMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:51:39.658710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:51:40.642585Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0ve7jnc56nkfzchgnxjrh6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjgwMTlmMDEtYTVkNWJiN2MtYzQ4YTIyYTUtZGVkNGZiMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:51:40.774667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: ... e pool default not found or you don't have access permissions } 2025-04-04T16:55:21.119715Z node 21 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:21.129843Z node 21 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T16:55:21.161827Z node 21 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [21:7489502646649669309:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T16:55:21.248345Z node 21 :TX_PROXY ERROR: Actor# [21:7489502646649669383:2716] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:21.380025Z node 21 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0vmz8w6qpvjdfhnf98hxkc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=21&id=NDZiMjY0YzAtYTc1MDhhNS02MWZjYWQzMS0zMDE5ZDM1, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.602916Z node 21 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jr0vmzhwfxe8rxtbsqj00ger, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=21&id=M2IzY2NkYjQtYzJiNDFlNmMtZmUzZjg5YmMtY2RjYmU3Y2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.610480Z node 21 :SYSTEM_VIEWS INFO: Scan started, actor: [21:7489502646649669465:2368], owner: [21:7489502646649669462:2366], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-04T16:55:21.613969Z node 21 :SYSTEM_VIEWS INFO: Scan prepared, actor: [21:7489502646649669465:2368], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T16:55:21.614380Z node 21 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [21:7489502646649669465:2368], row count: 1, finished: 1 2025-04-04T16:55:21.614448Z node 21 :SYSTEM_VIEWS INFO: Scan finished, actor: [21:7489502646649669465:2368], owner: [21:7489502646649669462:2366], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-04T16:55:21.619164Z node 21 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785721601, txId: 281474976710662] shutting down 2025-04-04T16:55:21.894289Z node 21 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0vmzs6eczne2bxqb7yv9gn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=21&id=YjE4OGEyYjktNDFmNzA0NDUtY2ViYjhhMTEtMWMyYTNiM2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:21.899545Z node 21 :SYSTEM_VIEWS INFO: Scan started, actor: [21:7489502646649669497:2377], owner: [21:7489502646649669494:2375], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-04T16:55:21.902195Z node 21 :SYSTEM_VIEWS INFO: Scan prepared, actor: [21:7489502646649669497:2377], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T16:55:21.902665Z node 21 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [21:7489502646649669497:2377], row count: 1, finished: 1 2025-04-04T16:55:21.902726Z node 21 :SYSTEM_VIEWS INFO: Scan finished, actor: [21:7489502646649669497:2377], owner: [21:7489502646649669494:2375], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-04T16:55:21.909433Z node 21 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785721890, txId: 281474976710664] shutting down greater-or-equal assertion failed at ydb/core/sys_view/ut_kqp.cpp:228, void NKikimr::NSysView::(anonymous namespace)::TYsonFieldChecker::Uint64GreaterOrEquals(ui64): value.AsUint64() >= expected NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1AD19E90) ??+0 (0x1A35E16C) NKikimr::NSysView::NTestSuiteSystemView::TTestCasePartitionStatsFields::Execute_(NUnitTest::TTestContext&)+8616 (0x1A3695F8) std::__y1::__function::__func, void ()>::operator()()+280 (0x1A46FED8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1AD4FB96) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1AD208B9) NKikimr::NSysView::NTestSuiteSystemView::TCurrentTest::Execute()+1094 (0x1A46EEC6) NUnitTest::TTestFactory::Execute()+2438 (0x1AD22186) NUnitTest::RunMain(int, char**)+5213 (0x1AD4A4CD) ??+0 (0x7F41B6608D90) __libc_start_main+128 (0x7F41B6608E40) _start+41 (0x180BD029) 2025-04-04T16:55:24.975917Z node 26 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[26:7489502659414819697:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:24.976081Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c0d/r3tmp/tmpylxzGH/pdisk_1.dat 2025-04-04T16:55:25.217286Z node 26 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:25.255465Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:25.255614Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:25.257689Z node 26 :HIVE WARN: HIVE#72057594037968897 Node(26, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23451, node 26 2025-04-04T16:55:25.351152Z node 26 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:55:25.351178Z node 26 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:55:25.351189Z node 26 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:55:25.351381Z node 26 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18326 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:25.750697Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:29.976323Z node 26 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[26:7489502659414819697:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:29.976418Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:30.532252Z node 26 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [26:7489502685184624124:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.532434Z node 26 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.532982Z node 26 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [26:7489502685184624150:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:30.538491Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:55:30.561861Z node 26 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [26:7489502685184624153:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:55:30.660192Z node 26 :TX_PROXY ERROR: Actor# [26:7489502685184624204:2339] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:30.974647Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0vn3tm7ak4zdw7cx5c24s6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=OWUzZGExZGQtY2RiODE4ZWItOGFkMzY5MGQtOTVjNzVmYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:55:30.980047Z node 26 :SYSTEM_VIEWS INFO: Scan started, actor: [26:7489502685184624242:2344], owner: [26:7489502685184624239:2342], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2025-04-04T16:55:30.981379Z node 26 :SYSTEM_VIEWS INFO: Scan prepared, actor: [26:7489502685184624242:2344], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T16:55:31.000209Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7489502685184624242:2344], row count: 1, finished: 1 2025-04-04T16:55:31.000363Z node 26 :SYSTEM_VIEWS INFO: Scan finished, actor: [26:7489502685184624242:2344], owner: [26:7489502685184624239:2342], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2025-04-04T16:55:31.011745Z node 26 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785730971, txId: 281474976715660] shutting down |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] Test command err: Recv 65537 2025-04-04T16:57:05.853034Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-04T16:57:05.854521Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-04-04T16:57:05.854606Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-04-04T16:57:05.854888Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2025-04-04T16:57:05.854983Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-04T16:57:05.855156Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# The diff at index 0 went beyound the blob part; DiffStart# 100 DiffEnd# 96 BlobPartSize# 32 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm |79.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |79.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |79.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::MakeDir >> TExternalTableTest::DropTableTwice [GOOD] |79.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |79.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |79.0%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut >> TPopulatorTest::MakeDir [GOOD] >> TExternalTableTest::CreateExternalTable [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::DropTableTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:57:06.362225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:57:06.362371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:57:06.362447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:57:06.362494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:57:06.362543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:57:06.362601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:57:06.362693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:57:06.362768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:57:06.363147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:57:06.467890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:57:06.467960Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:06.492562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:57:06.492859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:57:06.493056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:57:06.508316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:57:06.508670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:57:06.509404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:06.510439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:57:06.516803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:57:06.518274Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:57:06.518373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:57:06.518594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:57:06.518666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:57:06.518713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:57:06.518850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:57:06.526407Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:57:06.693666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:57:06.693931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:06.694166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:57:06.694518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:57:06.694583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:06.700971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:06.701172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:57:06.701417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:06.701477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:57:06.701516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:57:06.701551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:57:06.708552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:06.708684Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:57:06.708736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:57:06.711296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:06.711370Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:06.711418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:57:06.711505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:57:06.722509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:57:06.725937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:57:06.726216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:57:06.727395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:06.727572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:57:06.727625Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:57:06.727940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:57:06.728013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:57:06.728208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:57:06.728302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:57:06.730958Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:57:06.731037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:57:06.731212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:57:06.731256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:57:06.731607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:06.731665Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:57:06.731767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:57:06.731800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:57:06.731860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:57:06.731894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:57:06.731930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:57:06.731974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:57:06.732017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:57:06.732047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:57:06.732131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:57:06.732175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:57:06.732210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:57:06.734754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:57:06.734913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:57:06.735011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-04-04T16:57:06.877529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:06.877621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:57:06.877671Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 103:0 HandleReply TEvOperationPlan: step# 5000004 2025-04-04T16:57:06.877809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T16:57:06.877863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-04-04T16:57:06.877982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:57:06.878038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T16:57:06.878074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T16:57:06.878768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T16:57:06.880376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 FAKE_COORDINATOR: Erasing txId 103 2025-04-04T16:57:06.880616Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:57:06.880640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:57:06.880752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T16:57:06.880832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T16:57:06.880906Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:57:06.880934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-04T16:57:06.880974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-04T16:57:06.880994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-04T16:57:06.881210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T16:57:06.881241Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-04T16:57:06.881311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T16:57:06.881333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T16:57:06.881357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T16:57:06.881378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T16:57:06.881410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-04T16:57:06.881457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T16:57:06.881485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T16:57:06.881505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T16:57:06.881553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T16:57:06.881581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T16:57:06.881603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2025-04-04T16:57:06.881625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-04T16:57:06.881651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-04T16:57:06.881673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-04-04T16:57:06.881955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T16:57:06.882003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T16:57:06.882042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2025-04-04T16:57:06.882080Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-04T16:57:06.882105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T16:57:06.882410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T16:57:06.882445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T16:57:06.882488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T16:57:06.882916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T16:57:06.882962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T16:57:06.882978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-04-04T16:57:06.882995Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-04T16:57:06.883014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:57:06.884140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T16:57:06.884229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T16:57:06.884254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-04T16:57:06.884297Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-04T16:57:06.884324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T16:57:06.884390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-04T16:57:06.888028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T16:57:06.888299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T16:57:06.889349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T16:57:06.890025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-04T16:57:06.890339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-04T16:57:06.890424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-04T16:57:06.890904Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T16:57:06.891006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T16:57:06.891042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:365:2356] TestWaitNotification: OK eventTxId 103 2025-04-04T16:57:06.891601Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:57:06.891842Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 228us result status StatusPathDoesNotExist 2025-04-04T16:57:06.892045Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> Yq_1::DescribeConnection [GOOD] >> Yq_1::DeleteQuery >> TPopulatorTest::RemoveDir >> TPopulatorTestWithResets::UpdateAck >> KqpPg::NoTableQuery-useSink [GOOD] >> KqpPg::PgCreateTable >> KqpPg::DuplicatedColumns+useSink [GOOD] >> KqpPg::DuplicatedColumns-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::MakeDir [GOOD] Test command err: 2025-04-04T16:57:07.513468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:57:07.513558Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2025-04-04T16:57:07.628218Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:69:2108], cookie# 100, event size# 330, preserialized size# 51 2025-04-04T16:57:07.628340Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-04-04T16:57:07.629610Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:07.629724Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:07.629773Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:07.630121Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:69:2108], cookie# 100, event size# 220, preserialized size# 2 2025-04-04T16:57:07.630163Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2025-04-04T16:57:07.630393Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:07.630446Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:07.630480Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:07.630872Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-04-04T16:57:07.630956Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-04-04T16:57:07.630996Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-04-04T16:57:07.631058Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:95:2121], cookie# 100 2025-04-04T16:57:07.631123Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:12:2059], cookie# 100 2025-04-04T16:57:07.631260Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:15:2062], cookie# 100 2025-04-04T16:57:07.631306Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:18:2065], cookie# 100 2025-04-04T16:57:07.631446Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:96:2122], cookie# 100 2025-04-04T16:57:07.631490Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Ack update: ack to# [1:69:2108], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-04-04T16:57:07.631551Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:97:2123], cookie# 100 2025-04-04T16:57:07.632464Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:95:2121], cookie# 100 2025-04-04T16:57:07.632525Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:96:2122], cookie# 100 2025-04-04T16:57:07.632551Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Ack update: ack to# [1:69:2108], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-04-04T16:57:07.633076Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:97:2123], cookie# 100 2025-04-04T16:57:07.633124Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Ack for unknown update (already acked?): sender# [1:97:2123], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 2025-04-04T16:57:07.636135Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:69:2108], cookie# 100, event size# 340, preserialized size# 56 2025-04-04T16:57:07.636219Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-04-04T16:57:07.636381Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:07.636433Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:07.636502Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 FAKE_COORDINATOR: Erasing txId 100 2025-04-04T16:57:07.637314Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:69:2108], cookie# 100, event size# 225, preserialized size# 2 2025-04-04T16:57:07.637367Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 2025-04-04T16:57:07.637480Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 100 2025-04-04T16:57:07.637530Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 100 2025-04-04T16:57:07.637612Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 100 2025-04-04T16:57:07.637675Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:07.637721Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:07.637754Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:07.637962Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:95:2121], cookie# 100 2025-04-04T16:57:07.638007Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:96:2122], cookie# 100 2025-04-04T16:57:07.638039Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Ack update: ack to# [1:69:2108], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2025-04-04T16:57:07.638085Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-04-04T16:57:07.638163Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-04-04T16:57:07.638225Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-04-04T16:57:07.638958Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:97:2123], cookie# 100 2025-04-04T16:57:07.639016Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:95:2121], cookie# 100 2025-04-04T16:57:07.639282Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:96:2122], cookie# 100 2025-04-04T16:57:07.639332Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Ack update: ack to# [1:69:2108], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-04-04T16:57:07.639477Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 100 2025-04-04T16:57:07.639527Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Ack for unknown update (already acked?): sender# [1:97:2123], cookie# 100 TestModificationResult got TxId: 100, wait until txId: 100 >> KqpPg::InsertNoTargetColumns_Simple-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Serial-useSink >> TPopulatorTest::Boot >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] >> TPopulatorTestWithResets::UpdateAck [GOOD] >> TPopulatorTest::RemoveDir [GOOD] >> TPopulatorTest::Boot [GOOD] >> Yq_1::CreateConnection_With_Existing_Name [GOOD] >> Yq_1::CreateConnections_With_Idempotency >> CompressExecutor::TestReorderedExecutor [GOOD] >> CompressExecutor::TestExecutorMemUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::RemoveDir [GOOD] Test command err: 2025-04-04T16:57:08.863819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:57:08.863879Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2025-04-04T16:57:08.973293Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:69:2108], cookie# 100, event size# 330, preserialized size# 51 2025-04-04T16:57:08.973392Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-04-04T16:57:08.975168Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:08.975270Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:08.975318Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:08.975605Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirB" PathDescription { Self { Name: "DirB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:69:2108], cookie# 100, event size# 220, preserialized size# 2 2025-04-04T16:57:08.975643Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2025-04-04T16:57:08.975758Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:08.975794Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:08.975820Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:08.976073Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-04-04T16:57:08.976135Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-04-04T16:57:08.976170Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-04-04T16:57:08.976223Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:95:2121], cookie# 100 2025-04-04T16:57:08.976275Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:12:2059], cookie# 100 2025-04-04T16:57:08.976339Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:15:2062], cookie# 100 2025-04-04T16:57:08.976378Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:18:2065], cookie# 100 2025-04-04T16:57:08.976488Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:96:2122], cookie# 100 2025-04-04T16:57:08.976526Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Ack update: ack to# [1:69:2108], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-04-04T16:57:08.976592Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:97:2123], cookie# 100 2025-04-04T16:57:08.977273Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:95:2121], cookie# 100 2025-04-04T16:57:08.977325Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:96:2122], cookie# 100 2025-04-04T16:57:08.977349Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Ack update: ack to# [1:69:2108], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-04-04T16:57:08.977807Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:97:2123], cookie# 100 2025-04-04T16:57:08.977843Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Ack for unknown update (already acked?): sender# [1:97:2123], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 2025-04-04T16:57:08.980505Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:69:2108], cookie# 100, event size# 340, preserialized size# 56 2025-04-04T16:57:08.980614Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-04-04T16:57:08.980734Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:08.980782Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:08.980861Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 FAKE_COORDINATOR: Erasing txId 100 2025-04-04T16:57:08.981513Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirB" PathDescription { Self { Name: "DirB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:69:2108], cookie# 100, event size# 225, preserialized size# 2 2025-04-04T16:57:08.981556Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 2025-04-04T16:57:0 ... oard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 101 2025-04-04T16:57:08.995752Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 101 2025-04-04T16:57:08.995823Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:18:2065], cookie# 101 2025-04-04T16:57:08.996274Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:97:2123], cookie# 101 2025-04-04T16:57:08.996357Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:12:2059], cookie# 101 2025-04-04T16:57:08.996395Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:15:2062], cookie# 101 2025-04-04T16:57:08.996458Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 101 2025-04-04T16:57:08.996538Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:95:2121], cookie# 101 2025-04-04T16:57:08.996572Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Ack update: ack to# [1:69:2108], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 5 2025-04-04T16:57:08.996621Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 101 2025-04-04T16:57:08.996655Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 101 2025-04-04T16:57:08.996698Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 101 2025-04-04T16:57:08.997085Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:96:2122], cookie# 101 2025-04-04T16:57:08.997198Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:95:2121], cookie# 101 2025-04-04T16:57:08.997487Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:96:2122], cookie# 101 2025-04-04T16:57:08.997520Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Ack update: ack to# [1:69:2108], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-04-04T16:57:08.997765Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 101 2025-04-04T16:57:08.997794Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Ack for unknown update (already acked?): sender# [1:97:2123], cookie# 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-04T16:57:08.999614Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:69:2108], cookie# 101, event size# 232, preserialized size# 2 2025-04-04T16:57:08.999679Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 101, is deletion# false, version: 6 2025-04-04T16:57:08.999825Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 101 2025-04-04T16:57:08.999878Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 101 2025-04-04T16:57:08.999938Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 101 2025-04-04T16:57:09.000162Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/Root/DirB\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000002, drop txId: 101" Path: "/Root/DirB" PathId: 2 LastExistedPrefixPath: "/Root" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944 }: sender# [1:69:2108], cookie# 101, event size# 306, preserialized size# 0 2025-04-04T16:57:09.000193Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 101, is deletion# true, version: 0 2025-04-04T16:57:09.000265Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:12:2059], cookie# 101 2025-04-04T16:57:09.000322Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:15:2062], cookie# 101 2025-04-04T16:57:09.000372Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:18:2065], cookie# 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T16:57:09.000462Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:95:2121], cookie# 101 2025-04-04T16:57:09.000501Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 101 2025-04-04T16:57:09.000541Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 101 2025-04-04T16:57:09.000569Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 101 2025-04-04T16:57:09.000832Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:96:2122], cookie# 101 2025-04-04T16:57:09.000865Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Ack update: ack to# [1:69:2108], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 6 2025-04-04T16:57:09.000906Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:12:2059], cookie# 101 2025-04-04T16:57:09.000943Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:15:2062], cookie# 101 2025-04-04T16:57:09.000977Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:18:2065], cookie# 101 2025-04-04T16:57:09.001228Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:97:2123], cookie# 101 2025-04-04T16:57:09.001377Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:95:2121], cookie# 101 2025-04-04T16:57:09.001468Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:96:2122], cookie# 101 2025-04-04T16:57:09.001498Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Ack update: ack to# [1:69:2108], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 18446744073709551615 2025-04-04T16:57:09.001770Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:97:2123], cookie# 101 2025-04-04T16:57:09.001800Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Ack for unknown update (already acked?): sender# [1:97:2123], cookie# 101 TestModificationResult got TxId: 101, wait until txId: 101 |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:57:06.736274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:57:06.736396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:57:06.736457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:57:06.736493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:57:06.736543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:57:06.736568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:57:06.736619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:57:06.736685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:57:06.737027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:57:06.945160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:57:06.945229Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:07.008036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:57:07.008372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:57:07.008570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:57:07.038881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:57:07.039273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:57:07.040075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:07.041555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:57:07.051144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:57:07.053137Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:57:07.053257Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:57:07.053545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:57:07.053615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:57:07.053677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:57:07.053865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:57:07.072983Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:57:07.319583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:57:07.319940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:07.320271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:57:07.320624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:57:07.320712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:07.331468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:07.331676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:57:07.331972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:07.332053Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:57:07.332113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:57:07.332180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:57:07.335947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:07.336065Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:57:07.336128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:57:07.339196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:07.339294Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:07.339356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:57:07.339461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:57:07.352057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:57:07.354910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:57:07.355149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:57:07.356474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:07.356679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:57:07.356748Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:57:07.357100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:57:07.357178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:57:07.357426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:57:07.357544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:57:07.360784Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:57:07.360846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:57:07.361088Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:57:07.361152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:57:07.361570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:07.361641Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:57:07.361761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:57:07.361813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:57:07.361872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:57:07.361909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:57:07.361953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:57:07.362022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:57:07.362067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:57:07.362104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:57:07.362186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:57:07.362276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:57:07.362315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:57:07.364496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:57:07.364610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:57:07.364654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-04T16:57:08.772599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-04T16:57:08.772624Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-04T16:57:08.773940Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:57:08.774019Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:57:08.774054Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 102 2025-04-04T16:57:08.774089Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-04T16:57:08.774132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T16:57:08.775708Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:57:08.775784Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:57:08.775810Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-04T16:57:08.775837Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-04T16:57:08.775866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T16:57:08.776681Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:57:08.776762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:57:08.776794Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-04T16:57:08.776819Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-04T16:57:08.776845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T16:57:08.776904Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-04T16:57:08.779116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T16:57:08.780169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T16:57:08.781354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-04T16:57:08.781553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T16:57:08.781592Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T16:57:08.782040Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T16:57:08.782141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T16:57:08.782176Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:335:2326] TestWaitNotification: OK eventTxId 102 2025-04-04T16:57:08.782688Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:57:08.782886Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 255us result status StatusSuccess 2025-04-04T16:57:08.783191Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-04-04T16:57:08.785992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:57:08.786382Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2025-04-04T16:57:08.786488Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 103:0, path# /MyRoot/ExternalTable 2025-04-04T16:57:08.786653Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-04T16:57:08.789258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 102, at schemeshard: 72057594046678944 2025-04-04T16:57:08.789420Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-04T16:57:08.789785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-04T16:57:08.789825Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-04T16:57:08.790279Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T16:57:08.790407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T16:57:08.790444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:343:2334] TestWaitNotification: OK eventTxId 103 2025-04-04T16:57:08.791028Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:57:08.791224Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 230us result status StatusSuccess 2025-04-04T16:57:08.791504Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTestWithResets::UpdateAck [GOOD] Test command err: 2025-04-04T16:57:08.900531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:57:08.900599Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2025-04-04T16:57:09.019915Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:69:2108], cookie# 100, event size# 330, preserialized size# 51 2025-04-04T16:57:09.020015Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-04-04T16:57:09.021463Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:09.021575Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:09.021622Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 100 2025-04-04T16:57:09.022466Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:69:2108], cookie# 100, event size# 220, preserialized size# 2 2025-04-04T16:57:09.022523Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 2025-04-04T16:57:09.025544Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:69:2108], cookie# 100, event size# 340, preserialized size# 56 2025-04-04T16:57:09.025596Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 FAKE_COORDINATOR: Erasing txId 100 2025-04-04T16:57:09.026235Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:69:2108], cookie# 100, event size# 225, preserialized size# 2 2025-04-04T16:57:09.026353Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-04T16:57:09.053222Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2025-04-04T16:57:09.053347Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:97:2123] Successful handshake: replica# [1:18:2065] 2025-04-04T16:57:09.053403Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:97:2123] Resume sync: replica# [1:18:2065], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T16:57:09.053524Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:2199047594611:0] }: sender# [1:97:2123] 2025-04-04T16:57:09.053578Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2025-04-04T16:57:09.053602Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Successful handshake: replica# [1:12:2059] 2025-04-04T16:57:09.053622Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Resume sync: replica# [1:12:2059], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T16:57:09.053668Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2025-04-04T16:57:09.053698Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:96:2122] Successful handshake: replica# [1:15:2062] 2025-04-04T16:57:09.053724Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:96:2122] Resume sync: replica# [1:15:2062], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T16:57:09.053798Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:97:2123] 2025-04-04T16:57:09.053874Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:94:2120] 2025-04-04T16:57:09.053981Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:97:2123] 2025-04-04T16:57:09.054021Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 0 2025-04-04T16:57:09.054121Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:24339059:0] }: sender# [1:95:2121] 2025-04-04T16:57:09.054197Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:94:2120] 2025-04-04T16:57:09.054282Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 0 2025-04-04T16:57:09.054360Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:95:2121] 2025-04-04T16:57:09.054396Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 0 2025-04-04T16:57:09.054481Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 0 2025-04-04T16:57:09.054575Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:95:2121] 2025-04-04T16:57:09.054617Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 0 2025-04-04T16:57:09.054673Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 0 2025-04-04T16:57:09.054727Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:1099535966835:0] }: sender# [1:96:2122] 2025-04-04T16:57:09.054784Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 0 2025-04-04T16:57:09.054839Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:94:2120] 2025-04-04T16:57:09.054925Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:96:2122] 2025-04-04T16:57:09.054965Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 0 2025-04-04T16:57:09.055003Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 0 2025-04-04T16:57:09.055058Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:96:2122] 2025-04-04T16:57:09.055169Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 0 2025-04-04T16:57:09.055233Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:2199047594611:0] }: sender# [1:97:2123] 2025-04-04T16:57:09.055285Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:94:2120], cookie# 0 2025-04-04T16:57:09.055341Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:94:2120] 2025-04-04T16:57:09.055436Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:24339059:0] }: sender# [1:95:2121] 2025-04-04T16:57:09.055506Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:94:2120] 2025-04-04T16:57:09.055546Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 0 2025-04-04T16:57:09.055600Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2025-04-04T16:57:09.055681Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:97:2123], cookie# 0 2025-04-04T16:57:09.055718Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Ack for unknown update (already acked?): sender# [1:97:2123], cookie# 0 2025-04-04T16:57:09.055779Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2025-04-04T16:57:09.055838Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:97:2123], cookie# 100 2025-04-04T16:57:09.055877Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:95:2121], cookie# 0 2025-04-04T16:57:09.055895Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Ack for unknown update (already acked?): sender# [1:95:2121], cookie# 0 2025-04-04T16:57:09.055923Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:95:2121], cookie# 100 2025-04-04T16:57:09.055949Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Ack update: ack to# [1:69:2108], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-04-04T16:57:09.056004Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Ack update: ack to# [1:69:2108], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2025-04-04T16:57:09.056334Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 0 2025-04-04T16:57:09.056366Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Ack for unknown update (already acked?): sender# [1:97:2123], cookie# 0 2025-04-04T16:57:09.056695Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 100 2025-04-04T16:57:09.056865Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:1099535966835:0] }: sender# [1:96:2122] 2025-04-04T16:57:09.056926Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:94:2120] 2025-04-04T16:57:09.057049Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:95:2121], cookie# 0 2025-04-04T16:57:09.057076Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Ack for unknown update (already acked?): sender# [1:95:2121], cookie# 0 2025-04-04T16:57:09.057113Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2025-04-04T16:57:09.057256Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:95:2121], cookie# 100 2025-04-04T16:57:09.057284Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Ack update: ack to# [1:69:2108], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-04-04T16:57:09.057317Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2120] Ack update: ack to# [1:69:2108], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-04-04T16:57:09.057557Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:96:2122], cookie# 0 2025-04-04T16:57:09.057589Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Ack for unknown update (already acked?): sender# [1:96:2122], cookie# 0 2025-04-04T16:57:09.057829Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:96:2122], cookie# 100 2025-04-04T16:57:09.057864Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Ack for unknown update (already acked?): sender# [1:96:2122], cookie# 100 2025-04-04T16:57:09.058093Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:96:2122], cookie# 0 2025-04-04T16:57:09.058120Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Ack for unknown update (already acked?): sender# [1:96:2122], cookie# 0 2025-04-04T16:57:09.058184Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:96:2122], cookie# 100 2025-04-04T16:57:09.058206Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2120] Ack for unknown update (already acked?): sender# [1:96:2122], cookie# 100 TestWaitNotification: OK eventTxId 100 >> TBackupCollectionTests::CreateAbsolutePath |79.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |79.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |79.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::Boot [GOOD] Test command err: 2025-04-04T16:57:09.481213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:57:09.481294Z node 1 :IMPORT WARN: Table profiles were not loaded >> KqpPg::TypeCoercionBulkUpsert [GOOD] >> KqpPg::TypeCoercionInsert+useSink >> TBackupCollectionTests::HiddenByFeatureFlag >> Cdc::InitialScanEnqueuesZeroRecords [GOOD] >> Cdc::InitialScanRacyProgressAndDrop >> TBackupCollectionTests::CreateAbsolutePath [GOOD] >> TBackupCollectionTests::Create >> KqpPg::CreateTableSerialColumns-useSink [GOOD] >> KqpPg::DropIndex |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TBackupCollectionTests::HiddenByFeatureFlag [GOOD] >> TBackupCollectionTests::DisallowedPath >> TNebiusAccessServiceTest::Authenticate >> TNebiusAccessServiceTest::Authenticate [GOOD] >> AssignTxId::Basic >> TBackupCollectionTests::Create [GOOD] >> TBackupCollectionTests::CreateTwice |79.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |79.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |79.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authenticate [GOOD] Test command err: 2025-04-04T16:57:12.646106Z node 1 :GRPC_CLIENT DEBUG: [517000003208] Connect to grpc://localhost:9731 2025-04-04T16:57:12.696409Z node 1 :GRPC_CLIENT DEBUG: [517000003208] Request AuthenticateRequest { iam_token: "**** (3C4833B6)" } 2025-04-04T16:57:12.727185Z node 1 :GRPC_CLIENT DEBUG: [517000003208] Status 7 Permission Denied 2025-04-04T16:57:12.727830Z node 1 :GRPC_CLIENT DEBUG: [517000003208] Request AuthenticateRequest { iam_token: "**** (86DDB286)" } 2025-04-04T16:57:12.731495Z node 1 :GRPC_CLIENT DEBUG: [517000003208] Response AuthenticateResponse { account { user_account { id: "1234" } } } |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TBackupCollectionTests::CreateTwice [GOOD] >> TBackupCollectionTests::BackupAbsentCollection >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit [GOOD] >> TBackupCollectionTests::DisallowedPath [GOOD] >> TBackupCollectionTests::ParallelCreate >> Yq_1::DescribeJob [GOOD] >> Yq_1::DescribeQuery >> Yq_1::CreateQuery_With_Idempotency [GOOD] >> Yq_1::CreateQuery_Without_Connection >> TBackupCollectionTests::ParallelCreate [GOOD] >> TBackupCollectionTests::Drop >> TBackupCollectionTests::BackupAbsentCollection [GOOD] >> TBackupCollectionTests::BackupDroppedCollection >> KqpPg::InsertNoTargetColumns_Serial-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefault+useSink >> Yq_1::Basic_Null [GOOD] >> Yq_1::Basic_TaggedLiteral >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit >> PrivateApi::GetTask [GOOD] >> PrivateApi::Nodes |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> TBackupCollectionTests::Drop [GOOD] >> TBackupCollectionTests::DropTwice |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> DataShardOutOfOrder::TestOutOfOrderLockLost >> TBackupCollectionTests::BackupDroppedCollection [GOOD] >> TBackupCollectionTests::BackupAbsentDirs >> KqpPg::DuplicatedColumns-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder+useSink |79.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |79.1%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |79.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] >> TBackupCollectionTests::DropTwice [GOOD] >> TBackupCollectionTests::TableWithSystemColumns >> Yq_1::Basic [GOOD] >> Yq_1::Basic_EmptyList >> TBackupCollectionTests::BackupAbsentDirs [GOOD] >> TBackupCollectionTests::BackupNonIncrementalCollection |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |79.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |79.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |79.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |79.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |79.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |79.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk >> TTxAllocatorClientTest::Boot |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge >> TTxAllocatorClientTest::Boot [GOOD] >> TBackupCollectionTests::BackupNonIncrementalCollection [GOOD] >> KqpPg::DropIndex [GOOD] >> KqpPg::CreateUniqPgColumn+useSink >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] Test command err: 2025-04-04T16:56:47.196818Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503012911960685:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:47.196883Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0404 16:56:47.586577866 188065 dns_resolver.cc:162] no server name supplied in dns URI E0404 16:56:47.586765691 188065 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-04T16:56:48.204331Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:48.579944Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:28007: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:28007 } ] 2025-04-04T16:56:48.621119Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:28007: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:28007 2025-04-04T16:56:49.222443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:50.221845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:50.538472Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:28007: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:28007 } ] 2025-04-04T16:56:51.230705Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000a9e/r3tmp/tmpbFWr7l/pdisk_1.dat 2025-04-04T16:56:51.406464Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:56:51.407124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503030091830394:2315], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:51.513556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503030091830394:2315], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:51.603107Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28007, node 1 2025-04-04T16:56:51.649528Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:56:51.649553Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:56:51.681994Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:56:51.682017Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:56:51.682027Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:56:51.682258Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:56:51.704093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:51.704228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:51.707525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18831 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:56:52.017852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:52.198092Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503012911960685:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:52.198173Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0404 16:56:52.591313792 188240 dns_resolver.cc:162] no server name supplied in dns URI E0404 16:56:52.591532979 188240 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-04T16:56:53.379563Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-04T16:56:53.379593Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-04T16:56:53.379602Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-04T16:56:53.388299Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-04T16:56:53.388328Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-04T16:56:53.388334Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-04T16:56:53.388719Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-04T16:56:53.388738Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-04T16:56:53.388743Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-04T16:56:53.394945Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-04T16:56:53.394979Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-04T16:56:53.398090Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-04T16:56:53.399664Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-04T16:56:53.399684Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-04T16:56:53.399692Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-04T16:56:53.400756Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-04T16:56:53.400772Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-04T16:56:53.400777Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-04T16:56:53.401719Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-04T16:56:53.401730Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-04T16:56:53.401734Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-04T16:56:53.406603Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-04T16:56:53.406646Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-04T16:56:53.406653Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-04T16:56:53.409382Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-04T16:56:53.409403Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-04T16:56:53.409409Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-04T16:56:53.412153Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-04T16:56:53.412171Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-04T16:56:53.412175Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-04T16:56:53.414483Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-04T16:56:53.414526Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-04T16:56:53.414543Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-04T16:56:53.426449Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-04T16:56:53.426480Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-04T16:56:53.426486Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-04T16:56:53.431405Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-04T16:56:53.431436Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-04T16:56:53.431443Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-04T16:56:53.432859Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-04T16:56:53.432897Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-04T16:56:53.440220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:56:53.443930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:56:53.445503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:56:53.515544Z node 1 :KQP_WORKLOAD_SERVICE ... in flight: 1 shards. pending shards to read: 0, 2025-04-04T16:57:14.229043Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503130422051278:2594], TxId: 281474976715688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YTAyYWQyNWUtZjAwMzAxZTgtNDU3YzA2MGEtYWNmODQ1MDA=. CustomerSuppliedId : . TraceId : 01jr0vrd5har16nwzzyqch4vka. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:57:14.229058Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7489503130422051278:2594]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-04-04T16:57:14.229078Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7489503130422051278:2594]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-04-04T16:57:14.229157Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503130422051279:2595], TxId: 281474976715688, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTAyYWQyNWUtZjAwMzAxZTgtNDU3YzA2MGEtYWNmODQ1MDA=. TraceId : 01jr0vrd5har16nwzzyqch4vka. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Start compute actor [4:7489503130422051279:2595], task: 2 2025-04-04T16:57:14.229203Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503130422051279:2595], TxId: 281474976715688, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTAyYWQyNWUtZjAwMzAxZTgtNDU3YzA2MGEtYWNmODQ1MDA=. TraceId : 01jr0vrd5har16nwzzyqch4vka. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Set execution timeout 299.424635s 2025-04-04T16:57:14.229996Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503130422051279:2595], TxId: 281474976715688, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTAyYWQyNWUtZjAwMzAxZTgtNDU3YzA2MGEtYWNmODQ1MDA=. TraceId : 01jr0vrd5har16nwzzyqch4vka. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-04-04T16:57:14.230176Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503130422051279:2595], TxId: 281474976715688, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTAyYWQyNWUtZjAwMzAxZTgtNDU3YzA2MGEtYWNmODQ1MDA=. TraceId : 01jr0vrd5har16nwzzyqch4vka. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7489503130422051278 RawX2: 4503616807242274 } } DstEndpoint { ActorId { RawX1: 7489503130422051279 RawX2: 4503616807242275 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7489503130422051279 RawX2: 4503616807242275 } } DstEndpoint { ActorId { RawX1: 7489503130422051272 RawX2: 4503616807242264 } } InMemory: true } 2025-04-04T16:57:14.230191Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503130422051279:2595], TxId: 281474976715688, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTAyYWQyNWUtZjAwMzAxZTgtNDU3YzA2MGEtYWNmODQ1MDA=. TraceId : 01jr0vrd5har16nwzzyqch4vka. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Update input channelId: 1, peer: [4:7489503130422051278:2594] 2025-04-04T16:57:14.237806Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503130422051279:2595], TxId: 281474976715688, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTAyYWQyNWUtZjAwMzAxZTgtNDU3YzA2MGEtYWNmODQ1MDA=. TraceId : 01jr0vrd5har16nwzzyqch4vka. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:57:14.246531Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7489503130422051278:2594]. Recv TEvReadResult from ShardID=72075186224037897, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2025-04-04T16:57:14.246570Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7489503130422051278:2594]. Taken 0 locks 2025-04-04T16:57:14.246593Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7489503130422051278:2594]. new data for read #0 seqno = 1 finished = 1 2025-04-04T16:57:14.246660Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503130422051278:2594], TxId: 281474976715688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YTAyYWQyNWUtZjAwMzAxZTgtNDU3YzA2MGEtYWNmODQ1MDA=. CustomerSuppliedId : . TraceId : 01jr0vrd5har16nwzzyqch4vka. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2025-04-04T16:57:14.246700Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503130422051278:2594], TxId: 281474976715688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YTAyYWQyNWUtZjAwMzAxZTgtNDU3YzA2MGEtYWNmODQ1MDA=. CustomerSuppliedId : . TraceId : 01jr0vrd5har16nwzzyqch4vka. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:57:14.246732Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7489503130422051278:2594]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-04-04T16:57:14.246756Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7489503130422051278:2594]. enter pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-04T16:57:14.246774Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7489503130422051278:2594]. exit pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-04T16:57:14.246791Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7489503130422051278:2594]. returned 0 rows; processed 0 rows 2025-04-04T16:57:14.246843Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7489503130422051278:2594]. dropping batch for read #0 2025-04-04T16:57:14.246855Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7489503130422051278:2594]. effective maxinflight 1 sorted 1 2025-04-04T16:57:14.246870Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7489503130422051278:2594]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-04-04T16:57:14.246893Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1, CA Id [4:7489503130422051278:2594]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-04-04T16:57:14.247034Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503130422051278:2594], TxId: 281474976715688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YTAyYWQyNWUtZjAwMzAxZTgtNDU3YzA2MGEtYWNmODQ1MDA=. CustomerSuppliedId : . TraceId : 01jr0vrd5har16nwzzyqch4vka. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-04T16:57:14.247073Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503130422051279:2595], TxId: 281474976715688, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTAyYWQyNWUtZjAwMzAxZTgtNDU3YzA2MGEtYWNmODQ1MDA=. TraceId : 01jr0vrd5har16nwzzyqch4vka. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-04-04T16:57:14.247099Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 2. Finish input channelId: 1, from: [4:7489503130422051278:2594] 2025-04-04T16:57:14.247155Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503130422051279:2595], TxId: 281474976715688, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTAyYWQyNWUtZjAwMzAxZTgtNDU3YzA2MGEtYWNmODQ1MDA=. TraceId : 01jr0vrd5har16nwzzyqch4vka. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:57:14.247224Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503130422051279:2595], TxId: 281474976715688, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTAyYWQyNWUtZjAwMzAxZTgtNDU3YzA2MGEtYWNmODQ1MDA=. TraceId : 01jr0vrd5har16nwzzyqch4vka. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-04T16:57:14.247240Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503130422051278:2594], TxId: 281474976715688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YTAyYWQyNWUtZjAwMzAxZTgtNDU3YzA2MGEtYWNmODQ1MDA=. CustomerSuppliedId : . TraceId : 01jr0vrd5har16nwzzyqch4vka. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2025-04-04T16:57:14.247261Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503130422051278:2594], TxId: 281474976715688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YTAyYWQyNWUtZjAwMzAxZTgtNDU3YzA2MGEtYWNmODQ1MDA=. CustomerSuppliedId : . TraceId : 01jr0vrd5har16nwzzyqch4vka. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:57:14.247288Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1. Tasks execution finished 2025-04-04T16:57:14.247325Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503130422051278:2594], TxId: 281474976715688, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=YTAyYWQyNWUtZjAwMzAxZTgtNDU3YzA2MGEtYWNmODQ1MDA=. CustomerSuppliedId : . TraceId : 01jr0vrd5har16nwzzyqch4vka. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-04T16:57:14.247466Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 1. pass away 2025-04-04T16:57:14.247583Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715688;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-04T16:57:14.247986Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503130422051279:2595], TxId: 281474976715688, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTAyYWQyNWUtZjAwMzAxZTgtNDU3YzA2MGEtYWNmODQ1MDA=. TraceId : 01jr0vrd5har16nwzzyqch4vka. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:57:14.248040Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-04T16:57:14.248051Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 2. Tasks execution finished 2025-04-04T16:57:14.248062Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503130422051279:2595], TxId: 281474976715688, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YTAyYWQyNWUtZjAwMzAxZTgtNDU3YzA2MGEtYWNmODQ1MDA=. TraceId : 01jr0vrd5har16nwzzyqch4vka. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-04T16:57:14.248122Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715688, task: 2. pass away 2025-04-04T16:57:14.248176Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715688;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-04T16:57:14.547486Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:29686: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:29686 >> AssignTxId::Basic [GOOD] >> TBackupCollectionTests::TableWithSystemColumns [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] Test command err: 2025-04-04T16:57:19.760266Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-04T16:57:19.760807Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-04T16:57:19.761667Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-04T16:57:19.763624Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:57:19.764131Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-04T16:57:19.778291Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:57:19.778440Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:57:19.778497Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:57:19.778563Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-04T16:57:19.778808Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:57:19.778916Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-04T16:57:19.779030Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-04T16:57:19.779813Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:69:2104] requested range size#5000 2025-04-04T16:57:19.780395Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:57:19.780474Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:57:19.780577Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-04-04T16:57:19.780628Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:69:2104] TEvAllocateResult from# 0 to# 5000 2025-04-04T16:57:19.780872Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-04T16:57:19.781048Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-04T16:57:19.781208Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-04T16:57:19.781358Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-04T16:57:19.781498Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:69:2104] requested range size#5000 2025-04-04T16:57:19.781963Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:57:19.782031Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:57:19.782121Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 5000 Reserved to# 10000 2025-04-04T16:57:19.782164Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:69:2104] TEvAllocateResult from# 5000 to# 10000 2025-04-04T16:57:19.782628Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-04T16:57:19.782908Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-04T16:57:19.783080Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 2500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-04T16:57:19.783337Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-04-04T16:57:19.783498Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:69:2104] requested range size#5000 2025-04-04T16:57:19.783923Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:57:19.784003Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:57:19.784091Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 10000 Reserved to# 15000 2025-04-04T16:57:19.784123Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:69:2104] TEvAllocateResult from# 10000 to# 15000 2025-04-04T16:57:19.784304Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 3000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::BackupNonIncrementalCollection [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:57:10.748796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:57:10.748905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:57:10.748967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:57:10.749007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:57:10.749063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:57:10.749097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:57:10.749153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:57:10.749227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:57:10.749543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:57:10.841971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:57:10.842050Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:10.855871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:57:10.856186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:57:10.856391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:57:10.868118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:57:10.868457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:57:10.869174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:10.871554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:57:10.878956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:57:10.880435Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:57:10.880525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:57:10.880745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:57:10.880806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:57:10.880854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:57:10.881002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:57:10.888667Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:57:11.047770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:57:11.048093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:11.048420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:57:11.048721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:57:11.048789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:11.053566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:11.053760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:57:11.054013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:11.054085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:57:11.054146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:57:11.054200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:57:11.057497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:11.057586Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:57:11.057652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:57:11.061380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:11.061456Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:11.061517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:57:11.061608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:57:11.065501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:57:11.068269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:57:11.068472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:57:11.069702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:11.069861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:57:11.069931Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:57:11.070318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:57:11.070390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:57:11.070567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:57:11.070685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:57:11.079730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:57:11.079795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:57:11.079990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:57:11.080031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:57:11.080400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:11.080451Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:57:11.080557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:57:11.080613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:57:11.080665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:57:11.080697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:57:11.080733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:57:11.080771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:57:11.080803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:57:11.080836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:57:11.080918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:57:11.080954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:57:11.080993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:57:11.083462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:57:11.083593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:57:11.083630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Complete at tablet# 72057594046678944 2025-04-04T16:57:19.599805Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 105:1 2025-04-04T16:57:19.599967Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T16:57:19.600007Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T16:57:19.600103Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:1, at schemeshard: 72057594046678944 2025-04-04T16:57:19.600166Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:1 ProgressState 2025-04-04T16:57:19.600306Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T16:57:19.600345Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 2/2 2025-04-04T16:57:19.600390Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/2 2025-04-04T16:57:19.600451Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 2/2 2025-04-04T16:57:19.600506Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/2 2025-04-04T16:57:19.600574Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/2, is published: true 2025-04-04T16:57:19.600679Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:304:2295] message: TxId: 105 2025-04-04T16:57:19.600738Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/2 2025-04-04T16:57:19.600792Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-04T16:57:19.600834Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-04T16:57:19.600926Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-04-04T16:57:19.600964Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:1 2025-04-04T16:57:19.600985Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:1 2025-04-04T16:57:19.601116Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-04T16:57:19.601174Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-04T16:57:19.603639Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T16:57:19.603797Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:304:2295] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 105 at schemeshard: 72057594046678944 2025-04-04T16:57:19.604016Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-04T16:57:19.604087Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:527:2488] 2025-04-04T16:57:19.604367Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:529:2490], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:57:19.604411Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:57:19.604441Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2025-04-04T16:57:19.605030Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [7:595:2554], Recipient [7:123:2149]: {TEvModifySchemeTransaction txid# 106 TabletId# 72057594046678944} 2025-04-04T16:57:19.609826Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T16:57:19.612713Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpBackupIncrementalBackupCollection BackupIncrementalBackupCollection { Name: ".backups/collections/MyCollection1" } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:57:19.613348Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-04T16:57:19.613578Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 4], parent name: MyCollection1, child name: 19700101000000Z_incremental, child id: [OwnerId: 72057594046678944, LocalPathId: 8], at schemeshard: 72057594046678944 2025-04-04T16:57:19.613660Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 0 2025-04-04T16:57:19.613759Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:57:19.613894Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 106:1, explain: Incremental backup is disabled on this collection, at schemeshard: 72057594046678944 2025-04-04T16:57:19.613946Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:2, propose status:StatusInvalidParameter, reason: Incremental backup is disabled on this collection, at schemeshard: 72057594046678944 2025-04-04T16:57:19.616705Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Abort operation: IgniteOperation fail to propose a part, opId: 106:1, at schemeshard: 72057594046678944, already accepted parts: 1, propose result status: StatusInvalidParameter, with reason: Incremental backup is disabled on this collection, tx message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpBackupIncrementalBackupCollection BackupIncrementalBackupCollection { Name: ".backups/collections/MyCollection1" } } TxId: 106 TabletId: 72057594046678944 2025-04-04T16:57:19.616873Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir AbortPropose, opId: 106:0, at schemeshard: 72057594046678944 2025-04-04T16:57:19.617154Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T16:57:19.620343Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Incremental backup is disabled on this collection" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:57:19.620541Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Incremental backup is disabled on this collection, operation: BACKUP INCREMENTAL, path: /MyRoot/.backups/collections/MyCollection1 2025-04-04T16:57:19.620599Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-04T16:57:19.620879Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-04T16:57:19.620917Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-04T16:57:19.621276Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:601:2560], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:57:19.621326Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:57:19.621358Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-04T16:57:19.621478Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:304:2295], Recipient [7:123:2149]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2025-04-04T16:57:19.621504Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-04T16:57:19.621567Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-04T16:57:19.621649Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-04T16:57:19.621688Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:599:2558] 2025-04-04T16:57:19.621869Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:601:2560], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:57:19.621907Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:57:19.621943Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 2025-04-04T16:57:19.622403Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:602:2561], Recipient [7:123:2149]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-04T16:57:19.622469Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-04T16:57:19.622594Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T16:57:19.622859Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1" took 269us result status StatusSuccess 2025-04-04T16:57:19.623377Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1" PathDescription { Self { Name: "MyCollection1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeBackupCollection CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 BackupCollectionVersion: 0 } ChildrenExist: true } Children { Name: "19700101000000Z_full" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } BackupCollectionDescription { Name: "MyCollection1" ExplicitEntryList { Entries { Type: ETypeTable Path: "/MyRoot/Table1" } } Cluster { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::Boot [GOOD] Test command err: 2025-04-04T16:57:19.516965Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-04T16:57:19.517538Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-04T16:57:19.518596Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-04T16:57:19.520542Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:57:19.521141Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-04T16:57:19.534536Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:57:19.534790Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:57:19.534897Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:57:19.534975Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-04T16:57:19.535226Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:57:19.535363Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-04T16:57:19.535522Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 >> KqpQuery::UdfTerminate |79.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |79.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |79.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::TableWithSystemColumns [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:57:11.751433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:57:11.751553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:57:11.751612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:57:11.751647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:57:11.751692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:57:11.751717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:57:11.751778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:57:11.751853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:57:11.752153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:57:11.841501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:57:11.841563Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:11.855565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:57:11.855876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:57:11.856040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:57:11.866195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:57:11.866510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:57:11.867148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:11.867965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:57:11.873082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:57:11.876041Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:57:11.876138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:57:11.876349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:57:11.876405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:57:11.876450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:57:11.876585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:57:11.885480Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:57:12.025938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:57:12.026201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:12.026701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:57:12.026945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:57:12.027016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:12.030069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:12.030283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:57:12.030524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:12.030594Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:57:12.030656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:57:12.030715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:57:12.033481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:12.033558Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:57:12.033596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:57:12.035880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:12.035965Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:12.036016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:57:12.036096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:57:12.040380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:57:12.042826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:57:12.043015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:57:12.044046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:12.044193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:57:12.044243Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:57:12.044537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:57:12.044602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:57:12.044776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:57:12.044868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:57:12.051033Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:57:12.051098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:57:12.051278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:57:12.051317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:57:12.051629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:12.051676Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:57:12.051771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:57:12.051828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:57:12.051897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:57:12.051933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:57:12.051985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:57:12.052035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:57:12.052071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:57:12.052100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:57:12.052174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:57:12.052214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:57:12.052247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:57:12.054223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:57:12.059244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:57:12.059315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... RACE: StateWork, received event# 274137603, Sender [6:205:2207], Recipient [6:121:2147]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 8] Version: 3 } 2025-04-04T16:57:20.129678Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-04T16:57:20.129724Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-04-04T16:57:20.129781Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-04-04T16:57:20.129806Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-04-04T16:57:20.129847Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 3 2025-04-04T16:57:20.129885Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 4 2025-04-04T16:57:20.129957Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/2, is published: true 2025-04-04T16:57:20.129998Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T16:57:20.131635Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269550080, Sender [6:589:2546], Recipient [6:121:2147]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1965 } } 2025-04-04T16:57:20.131673Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransactionResult 2025-04-04T16:57:20.131772Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1965 } } 2025-04-04T16:57:20.131805Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409548, partId: 1 2025-04-04T16:57:20.131916Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1965 } } 2025-04-04T16:57:20.132015Z node 6 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1965 } } 2025-04-04T16:57:20.132059Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T16:57:20.133460Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [6:653:2602], Recipient [6:121:2147]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:57:20.133497Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:57:20.133519Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-04T16:57:20.133744Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [6:589:2546], Recipient [6:121:2147]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 589 RawX2: 25769806322 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-04-04T16:57:20.133778Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-04-04T16:57:20.133865Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 589 RawX2: 25769806322 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-04-04T16:57:20.133908Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409548, partId: 1 2025-04-04T16:57:20.134075Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:1, at schemeshard: 72057594046678944, message: Source { RawX1: 589 RawX2: 25769806322 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-04-04T16:57:20.134141Z node 6 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 106:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T16:57:20.134274Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 106:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 589 RawX2: 25769806322 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-04-04T16:57:20.134344Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 106:1, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:20.134389Z node 6 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 106:1, at schemeshard: 72057594046678944 2025-04-04T16:57:20.134432Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:1, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-04T16:57:20.134470Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:1 129 -> 240 2025-04-04T16:57:20.134620Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T16:57:20.135286Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T16:57:20.135409Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T16:57:20.137615Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-04T16:57:20.137664Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T16:57:20.137768Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-04T16:57:20.137787Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T16:57:20.137861Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-04T16:57:20.137879Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T16:57:20.140497Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:1, at schemeshard: 72057594046678944 2025-04-04T16:57:20.140540Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T16:57:20.140733Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:1, at schemeshard: 72057594046678944 2025-04-04T16:57:20.140792Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T16:57:20.140854Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 106:1 2025-04-04T16:57:20.140962Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [6:589:2546] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 106 at schemeshard: 72057594046678944 2025-04-04T16:57:20.141295Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [6:121:2147], Recipient [6:121:2147]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T16:57:20.141341Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T16:57:20.141394Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:1, at schemeshard: 72057594046678944 2025-04-04T16:57:20.141439Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:1 ProgressState 2025-04-04T16:57:20.141567Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T16:57:20.141604Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:1 progress is 2/2 2025-04-04T16:57:20.141641Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2025-04-04T16:57:20.141684Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:1 progress is 2/2 2025-04-04T16:57:20.141719Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2025-04-04T16:57:20.141757Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 2/2, is published: true 2025-04-04T16:57:20.141820Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:303:2294] message: TxId: 106 2025-04-04T16:57:20.141882Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2025-04-04T16:57:20.141939Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-04T16:57:20.141973Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-04T16:57:20.142049Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-04-04T16:57:20.142087Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:1 2025-04-04T16:57:20.142105Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:1 2025-04-04T16:57:20.142195Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2025-04-04T16:57:20.144595Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T16:57:20.144705Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [6:303:2294] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 106 at schemeshard: 72057594046678944 2025-04-04T16:57:20.144891Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-04T16:57:20.144934Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [6:618:2567] 2025-04-04T16:57:20.145185Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:620:2569], Recipient [6:121:2147]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:57:20.145235Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T16:57:20.145260Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> AssignTxId::Basic [GOOD] Test command err: 2025-04-04T16:57:13.523109Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503127990957590:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:13.535020Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0011f6/r3tmp/tmpx4Zw7k/pdisk_1.dat 2025-04-04T16:57:14.326920Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:14.360743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:14.360845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:14.372042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14695 TServer::EnableGrpc on GrpcPort 30293, node 1 2025-04-04T16:57:14.965891Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:14.965917Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:14.965925Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:14.966056Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:57:15.580918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:15.591447Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:57:18.514428Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503127990957590:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:18.533800Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:57:18.619431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503149465794610:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:18.619557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:19.371525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:57:19.403719Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] OnActivateExecutor 2025-04-04T16:57:19.403834Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInitSchema] Execute 2025-04-04T16:57:19.412851Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInitSchema] Complete 2025-04-04T16:57:19.412953Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInit] Execute 2025-04-04T16:57:19.413199Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInit] Complete 2025-04-04T16:57:19.413212Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] SwitchToWork 2025-04-04T16:57:19.428704Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 2 } OperationId { TxId: 281474976710658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:30293" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } 2025-04-04T16:57:19.429009Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxCreateReplication] Execute: NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 2 } OperationId { TxId: 281474976710658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:30293" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } 2025-04-04T16:57:19.429111Z node 1 :REPLICATION_CONTROLLER NOTICE: [controller 72075186224037888][TxCreateReplication] Add replication: rid# 1, pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T16:57:19.430085Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxCreateReplication] Complete 2025-04-04T16:57:19.446595Z node 1 :REPLICATION_CONTROLLER TRACE: [TenantResolver][rid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root/replication TableId: [72057594046644480:2:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindReplication DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T16:57:19.448244Z node 1 :REPLICATION_CONTROLLER TRACE: [TenantResolver][rid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T16:57:19.448606Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::NReplication::NController::TEvPrivate::TEvResolveTenantResult { ReplicationId: 1 Tenant: /Root Sucess: 1 } 2025-04-04T16:57:19.448622Z node 1 :REPLICATION_CONTROLLER NOTICE: [controller 72075186224037888] Tenant resolved: rid# 1, tenant# /Root 2025-04-04T16:57:19.448633Z node 1 :REPLICATION_CONTROLLER INFO: [controller 72075186224037888] Discover tenant nodes: tenant# /Root 2025-04-04T16:57:19.454888Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::TEvDiscovery::TEvDiscoveryData 2025-04-04T16:57:19.454937Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888] Create session: nodeId# 1 TClient::Ls request: /Root/replication TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "replication" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785839481 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsIns... (TRUNCATED) 2025-04-04T16:57:19.474295Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 1 TxId: 0 } 2025-04-04T16:57:19.474389Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 0 2025-04-04T16:57:19.474443Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 1, assigned# 0, allocated# 0, exhausted# 1 2025-04-04T16:57:19.474532Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-04T16:57:19.474580Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 5 2025-04-04T16:57:19.475721Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-04-04T16:57:19.476400Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 0 } 2025-04-04T16:57:19.476445Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-04-04T16:57:19.476499Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-04-04T16:57:19.476895Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 18446744073709551615 } 2025-04-04T16:57:19.476926Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-04-04T16:57:19.476952Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-04-04T16:57:19.477324Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 10000 TxId: 0 } 2025-04-04T16:57:19.477351Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-04-04T16:57:19.477976Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2025-04-04T16:57:19.481656Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 5000 TxId: 0 } 2025-04-04T16:57:19.481751Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 2, allocated# 3 2025-04-04T16:57:19.481814Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2025-04-04T16:57:19.482420Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 20000 TxId: 0 } Versions { Step: 30000 TxId: 0 } Versions { Step: 40000 TxId: 0 } 2025-04-04T16:57:19.482459Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 3, assigned# 2, allocated# 3 2025-04-04T16:57:19.483107Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 0, exhausted# 0 2025-04-04T16:57:19.483212Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-04T16:57:19.483233Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 0, assigned# 5, allocated# 5 2025-04-04T16:57:19.483262Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 2025-04-04T16:57:19.484030Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 50000 TxId: 0 } 2025-04-04T16:57:19.484072Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 5, allocated# 5 2025-04-04T16:57:19.484347Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2025-04-04T16:57:19.484373Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/table, status# SCHEME_ERROR, issues# {
: Error: Path not found } 2025-04-04T16:57:19.484503Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2025-04-04T16:57:19.484616Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxDiscoveryTargetsResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2025-04-04T16:57:19.484651Z node 1 :REPLICATION_CONTROLLER ERROR: [controller 72075186224037888][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /Root/table: SCHEME_ERROR ({
: Error: Path not found }) 2025-04-04T16:57:19.485390Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 2025-04-04T16:57:19.485443Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxDiscoveryTargetsResult] Complete |79.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} |79.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |79.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |79.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::InitialScanRacyProgressAndDrop [GOOD] |79.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator >> Cdc::EnqueueRequestProcessSend >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink >> KqpImmediateEffects::ImmediateUpdate >> KqpPg::InsertValuesFromTableWithDefault+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefault-useSink >> KqpImmediateEffects::Upsert >> KqpImmediateEffects::UpdateAfterInsert >> KqpImmediateEffects::AlreadyBrokenImmediateEffects >> KqpImmediateEffects::Insert >> KqpWrite::Insert >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] >> KqpPg::InsertFromSelect_NoReorder+useSink [GOOD] >> KqpPg::DropTablePg |79.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |79.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |79.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login >> KqpPg::InsertFromSelect_Simple+useSink [GOOD] >> KqpPg::InsertFromSelect_Simple-useSink >> DataShardOutOfOrder::TestOutOfOrderLockLost [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail >> LabeledDbCounters::OneTabletRestart [GOOD] >> LabeledDbCounters::TwoTablets |79.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |79.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |79.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:129:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:132:2155] sender: [1:134:2058] recipient: [1:108:2140] 2025-04-04T16:55:20.233891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:20.233998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:20.234048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:20.234089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:20.234148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:20.234190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:20.234349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:20.234435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:20.234823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:20.332478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:20.332540Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:173:2058] recipient: [1:15:2062] 2025-04-04T16:55:20.353917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:20.354394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:20.354593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:20.360367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:20.360644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:20.361385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:20.361585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:20.364714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:20.366114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:20.366178Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:20.366287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:20.366337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:20.366385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:20.366653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2208] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2208] Leader for TabletID 72057594037968897 is [1:213:2212] sender: [1:214:2058] recipient: [1:207:2208] 2025-04-04T16:55:20.374234Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-04T16:55:20.525387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:20.525638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.525856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:20.526089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:20.526145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.529897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:20.530100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:20.530334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.530410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:20.530467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:20.530504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:20.537175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.537348Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:20.537395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:20.539926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.540006Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.540057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:20.540128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:20.544574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:20.547143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:20.547342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:132:2155] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:20.548508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:20.548681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:20.548731Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:20.549006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:20.549075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:20.549253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:20.549342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:20.552131Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:20.552218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:20.552434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:20.552483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2203], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:55:20.552886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.552961Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:20.553087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:20.553128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:20.553176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:20.553210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:20.553255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:20.553307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:20.553347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id ... titionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:57:25.676536Z node 44 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][44:1011:2709] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_REJECT Reason: REASON_WRONG_STATE 2025-04-04T16:57:25.676621Z node 44 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186233409548:2][72075186233409546][44:1011:2709] Handshake status: status# 2, reason# 7 2025-04-04T16:57:25.676892Z node 44 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][44:838:2709] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvGone { PartitionId: 72075186233409546 HardError: 0 } 2025-04-04T16:57:25.677480Z node 44 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][44:838:2709] HandleIndex TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: MyRoot/Table/UserDefinedIndex TableId: [72057594046678944:4:1] RequestType: ByTableId Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindIndex DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [indexImplTable] }] } 2025-04-04T16:57:25.677925Z node 44 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][44:838:2709] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: MyRoot/Table/UserDefinedIndex/indexImplTable TableId: [72057594046678944:5:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T16:57:25.678489Z node 44 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][44:838:2709] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046678944, LocalPathId: 5] Access: 0 SyncVersion: false Status: OkData Kind: KindAsyncIndexTable PartitionsCount: 2 DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-04T16:57:25.679446Z node 44 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][44:838:2709] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743785845635840 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743785845635840 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743785845635840 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-04T16:57:25.679825Z node 44 :TX_PROXY DEBUG: actor# [44:265:2256] Handle TEvGetProxyServicesRequest 2025-04-04T16:57:25.679995Z node 44 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][44:1019:2709] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-04T16:57:25.680081Z node 44 :TX_PROXY DEBUG: actor# [44:265:2256] Handle TEvGetProxyServicesRequest 2025-04-04T16:57:25.680211Z node 44 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][44:1020:2709] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-04T16:57:25.691975Z node 44 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][44:1020:2709] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-04T16:57:25.692201Z node 44 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][44:838:2709] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-04-04T16:57:25.692296Z node 44 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][44:1019:2709] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-04T16:57:25.692409Z node 44 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][44:1020:2709] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 1743785845635840 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-04T16:57:25.692990Z node 44 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][44:838:2709] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-04-04T16:57:25.693143Z node 44 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][44:1019:2709] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743785845635840 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743785845635840 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-04T16:57:25.698571Z node 44 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][44:1020:2709] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-04T16:57:25.700503Z node 44 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][44:838:2709] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-04-04T16:57:25.701251Z node 44 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][44:1019:2709] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 2 2025-04-04T16:57:25.701493Z node 44 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][44:838:2709] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } >> PrivateApi::Nodes [GOOD] >> KqpPg::TableArrayInsert+useSink [GOOD] >> KqpPg::TableArrayInsert-useSink >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink >> KqpEffects::InsertAbort_Literal_Conflict+UseSink >> KqpQuery::UdfTerminate [GOOD] >> KqpQuery::UdfMemoryLimit >> Yq_1::CreateConnections_With_Idempotency [GOOD] >> KqpPg::CreateTableBulkUpsertAndRead [GOOD] >> KqpPg::CopyTableSerialColumns+useSink >> KqpImmediateEffects::ImmediateUpdate [GOOD] >> KqpImmediateEffects::ImmediateUpdateSelect ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> PrivateApi::Nodes [GOOD] Test command err: 2025-04-04T16:56:47.690313Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503016077203643:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:47.690372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0404 16:56:48.037131484 188517 dns_resolver.cc:162] no server name supplied in dns URI E0404 16:56:48.037333107 188517 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-04T16:56:48.691505Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:49.064292Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:9186: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:9186 2025-04-04T16:56:49.064537Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:9186: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:9186 } ] 2025-04-04T16:56:49.697940Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:50.703283Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:50.755239Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:9186: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:9186 } ] 2025-04-04T16:56:51.709708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:51.834427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:56:51.836235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503033257073218:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00090e/r3tmp/tmpSK3R59/pdisk_1.dat 2025-04-04T16:56:51.896700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503033257073218:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:52.051486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503033257073218:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:52.103081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:52.103190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:52.109843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9186, node 1 TClient is connected to server localhost:8175 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:56:52.605470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:52.690695Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503016077203643:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:52.690760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0404 16:56:53.027127591 188779 dns_resolver.cc:162] no server name supplied in dns URI E0404 16:56:53.027320591 188779 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-04T16:56:53.501333Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:56:53.501985Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-04T16:56:53.502011Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-04T16:56:53.502020Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-04T16:56:53.503314Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-04T16:56:53.503346Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-04T16:56:53.503355Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-04T16:56:53.521459Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-04T16:56:53.521495Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-04T16:56:53.521503Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-04T16:56:53.523714Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-04T16:56:53.523736Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-04T16:56:53.523743Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-04T16:56:53.524655Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-04T16:56:53.524690Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-04T16:56:53.527108Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-04T16:56:53.527135Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-04T16:56:53.527144Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-04T16:56:53.573048Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-04T16:56:53.573076Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-04T16:56:53.573084Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-04T16:56:53.614926Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-04T16:56:53.614947Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-04T16:56:53.614954Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-04T16:56:53.631130Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-04T16:56:53.631155Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-04T16:56:53.631162Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-04T16:56:53.670606Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-04T16:56:53.670654Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-04T16:56:53.670661Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-04T16:56:53.673862Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-04T16:56:53.673883Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-04T16:56:53.673889Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-04T16:56:53.675047Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-04T16:56:53.675065Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-04T16:56:53.675070Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-04T16:56:53.687581Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-04T16:56:53.687604Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-04T16:56:53.687611Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-04T16:56:53.690429Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-04T16:56:53.690466Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-04T16:56:53.690474Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-04T16:56:53.770921Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:53.776640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503041847008426:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:53.777278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503041847008446:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:53.777358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:53.781355Z node 1 :NET_CLASSIFIER WARN: dist ... 022\005scope\030\201 (\000(\0000\000@\201 @\201 @\201 H\001X\204\200\204\200\200\200\204\200\001`\000z\000z\000z\000\210\001\001\240\001\000" } WatermarksMode: WATERMARKS_MODE_DISABLED } } 2025-04-04T16:57:27.350297Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7489503185984639008:2553], TxId: 281474976715706, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=MTNhMzU0NWYtYjkyZmEyZDItOGJhY2ZiZC1kM2VjMGVmOA==. CustomerSuppliedId : . TraceId : 01jr0vrsxrb3sje19jw490xtmd. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-04-04T16:57:27.350394Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7489503185984639008:2553], TxId: 281474976715706, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=MTNhMzU0NWYtYjkyZmEyZDItOGJhY2ZiZC1kM2VjMGVmOA==. CustomerSuppliedId : . TraceId : 01jr0vrsxrb3sje19jw490xtmd. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7489503185984639008 RawX2: 4503629692144121 } } DstEndpoint { ActorId { RawX1: 7489503185984639009 RawX2: 4503629692144122 } } InMemory: true DstStageId: 1 } 2025-04-04T16:57:27.350410Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7489503185984639008:2553], TxId: 281474976715706, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=MTNhMzU0NWYtYjkyZmEyZDItOGJhY2ZiZC1kM2VjMGVmOA==. CustomerSuppliedId : . TraceId : 01jr0vrsxrb3sje19jw490xtmd. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Update output channelId: 1, peer: [7:7489503185984639009:2554] 2025-04-04T16:57:27.350554Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1, CA Id [7:7489503185984639008:2553]. Shards State: TShardState{ TabletId: 72075186224037892, Last Key , Ranges: [#0: [(String : TestTenant, String : NULL, String : NULL) ; (String : TestTenant)]], Points: [], RetryAttempt: 0, ResolveAttempt: 0 } 2025-04-04T16:57:27.350570Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1, CA Id [7:7489503185984639008:2553]. effective maxinflight 1024 sorted 0 2025-04-04T16:57:27.350581Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1, CA Id [7:7489503185984639008:2553]. BEFORE: 1.0 2025-04-04T16:57:27.350718Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1, CA Id [7:7489503185984639008:2553]. Send EvRead to shardId: 72075186224037892, tablePath: Root/yq/pending_small, ranges: [(String : TestTenant) ; (String : TestTenant)] , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2025-04-04T16:57:27.350761Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1, CA Id [7:7489503185984639008:2553]. AFTER: 0.1 2025-04-04T16:57:27.350772Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1, CA Id [7:7489503185984639008:2553]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2025-04-04T16:57:27.350852Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7489503185984639008:2553], TxId: 281474976715706, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=MTNhMzU0NWYtYjkyZmEyZDItOGJhY2ZiZC1kM2VjMGVmOA==. CustomerSuppliedId : . TraceId : 01jr0vrsxrb3sje19jw490xtmd. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:57:27.350867Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1, CA Id [7:7489503185984639008:2553]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-04-04T16:57:27.350884Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1, CA Id [7:7489503185984639008:2553]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-04-04T16:57:27.351705Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1, CA Id [7:7489503185984639008:2553]. Recv TEvReadResult from ShardID=72075186224037892, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2025-04-04T16:57:27.351722Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1, CA Id [7:7489503185984639008:2553]. Taken 0 locks 2025-04-04T16:57:27.351736Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1, CA Id [7:7489503185984639008:2553]. new data for read #0 seqno = 1 finished = 1 2025-04-04T16:57:27.351758Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7489503185984639008:2553], TxId: 281474976715706, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=MTNhMzU0NWYtYjkyZmEyZDItOGJhY2ZiZC1kM2VjMGVmOA==. CustomerSuppliedId : . TraceId : 01jr0vrsxrb3sje19jw490xtmd. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2025-04-04T16:57:27.351789Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7489503185984639008:2553], TxId: 281474976715706, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=MTNhMzU0NWYtYjkyZmEyZDItOGJhY2ZiZC1kM2VjMGVmOA==. CustomerSuppliedId : . TraceId : 01jr0vrsxrb3sje19jw490xtmd. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:57:27.351809Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1, CA Id [7:7489503185984639008:2553]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-04-04T16:57:27.351824Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1, CA Id [7:7489503185984639008:2553]. enter pack cells method shardId: 72075186224037892 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-04T16:57:27.351837Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1, CA Id [7:7489503185984639008:2553]. exit pack cells method shardId: 72075186224037892 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-04T16:57:27.351847Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1, CA Id [7:7489503185984639008:2553]. returned 0 rows; processed 0 rows 2025-04-04T16:57:27.351884Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1, CA Id [7:7489503185984639008:2553]. dropping batch for read #0 2025-04-04T16:57:27.351895Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1, CA Id [7:7489503185984639008:2553]. effective maxinflight 1024 sorted 0 2025-04-04T16:57:27.351906Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1, CA Id [7:7489503185984639008:2553]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-04-04T16:57:27.351919Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1, CA Id [7:7489503185984639008:2553]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-04-04T16:57:27.351990Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7489503185984639008:2553], TxId: 281474976715706, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=MTNhMzU0NWYtYjkyZmEyZDItOGJhY2ZiZC1kM2VjMGVmOA==. CustomerSuppliedId : . TraceId : 01jr0vrsxrb3sje19jw490xtmd. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-04T16:57:27.352009Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7489503185984639009:2554], TxId: 281474976715706, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=MTNhMzU0NWYtYjkyZmEyZDItOGJhY2ZiZC1kM2VjMGVmOA==. TraceId : 01jr0vrsxrb3sje19jw490xtmd. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-04-04T16:57:27.352027Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 2. Finish input channelId: 1, from: [7:7489503185984639008:2553] 2025-04-04T16:57:27.352050Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7489503185984639009:2554], TxId: 281474976715706, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=MTNhMzU0NWYtYjkyZmEyZDItOGJhY2ZiZC1kM2VjMGVmOA==. TraceId : 01jr0vrsxrb3sje19jw490xtmd. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:57:27.352123Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7489503185984639009:2554], TxId: 281474976715706, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=MTNhMzU0NWYtYjkyZmEyZDItOGJhY2ZiZC1kM2VjMGVmOA==. TraceId : 01jr0vrsxrb3sje19jw490xtmd. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-04T16:57:27.352169Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7489503185984639008:2553], TxId: 281474976715706, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=MTNhMzU0NWYtYjkyZmEyZDItOGJhY2ZiZC1kM2VjMGVmOA==. CustomerSuppliedId : . TraceId : 01jr0vrsxrb3sje19jw490xtmd. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2025-04-04T16:57:27.352189Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7489503185984639008:2553], TxId: 281474976715706, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=MTNhMzU0NWYtYjkyZmEyZDItOGJhY2ZiZC1kM2VjMGVmOA==. CustomerSuppliedId : . TraceId : 01jr0vrsxrb3sje19jw490xtmd. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:57:27.352212Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1. Tasks execution finished 2025-04-04T16:57:27.352223Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7489503185984639008:2553], TxId: 281474976715706, task: 1. Ctx: { SessionId : ydb://session/3?node_id=7&id=MTNhMzU0NWYtYjkyZmEyZDItOGJhY2ZiZC1kM2VjMGVmOA==. CustomerSuppliedId : . TraceId : 01jr0vrsxrb3sje19jw490xtmd. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-04T16:57:27.352343Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 1. pass away 2025-04-04T16:57:27.352461Z node 7 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715706;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-04T16:57:27.352860Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7489503185984639009:2554], TxId: 281474976715706, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=MTNhMzU0NWYtYjkyZmEyZDItOGJhY2ZiZC1kM2VjMGVmOA==. TraceId : 01jr0vrsxrb3sje19jw490xtmd. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:57:27.352905Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-04T16:57:27.352915Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 2. Tasks execution finished 2025-04-04T16:57:27.352929Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7489503185984639009:2554], TxId: 281474976715706, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=MTNhMzU0NWYtYjkyZmEyZDItOGJhY2ZiZC1kM2VjMGVmOA==. TraceId : 01jr0vrsxrb3sje19jw490xtmd. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-04T16:57:27.352985Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976715706, task: 2. pass away 2025-04-04T16:57:27.353037Z node 7 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715706;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-04T16:57:28.285991Z node 7 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:11823: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:11823 >> KqpImmediateEffects::UpdateAfterInsert [GOOD] >> KqpImmediateEffects::UnobservedUncommittedChangeConflict >> KqpImmediateEffects::Upsert [GOOD] >> KqpImmediateEffects::UpsertAfterInsert >> KqpPg::InsertValuesFromTableWithDefault-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateConnections_With_Idempotency [GOOD] Test command err: 2025-04-04T16:56:51.862754Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503032869004524:2084];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:51.881527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0404 16:56:52.302297525 190155 dns_resolver.cc:162] no server name supplied in dns URI E0404 16:56:52.309118796 190155 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-04T16:56:52.887912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:53.302066Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:21045: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21045 } ] 2025-04-04T16:56:53.358594Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:21045: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:21045 2025-04-04T16:56:53.892858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:54.785416Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:21045: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21045 } ] 2025-04-04T16:56:54.925839Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:55.930722Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:56.605417Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:56:56.636404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503054343841376:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:56.714579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503054343841376:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:56.866381Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503032869004524:2084];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:56.866453Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000758/r3tmp/tmpYSj34X/pdisk_1.dat 2025-04-04T16:56:56.909619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503054343841376:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:56.980961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:57.274465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503054343841376:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 21045, node 1 E0404 16:56:57.291541065 190326 dns_resolver.cc:162] no server name supplied in dns URI E0404 16:56:57.291775978 190326 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-04T16:56:57.293915Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:56:57.294095Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 TClient is connected to server localhost:3002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:56:57.717822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:57.786425Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-04T16:56:57.786458Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-04T16:56:57.786465Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-04T16:56:57.787856Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-04T16:56:57.787872Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-04T16:56:57.787878Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-04T16:56:57.788748Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-04T16:56:57.788762Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-04T16:56:57.788769Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-04T16:56:57.789600Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-04T16:56:57.789628Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-04T16:56:57.789637Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-04T16:56:57.790607Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-04T16:56:57.790626Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-04T16:56:57.790649Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-04T16:56:57.803729Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-04T16:56:57.803763Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-04T16:56:57.803770Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-04T16:56:57.804873Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-04T16:56:57.804888Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-04T16:56:57.804895Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-04T16:56:57.816873Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-04T16:56:57.816900Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-04T16:56:57.816909Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-04T16:56:57.831676Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-04T16:56:57.831706Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-04T16:56:57.831713Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-04T16:56:57.846721Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-04T16:56:57.846754Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-04T16:56:57.846762Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-04T16:56:57.851037Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-04T16:56:57.851065Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-04T16:56:57.851072Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-04T16:56:57.867348Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-04T16:56:57.867374Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-04T16:56:57.867380Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-04T16:56:57.885553Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-04T16:56:57.885584Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-04T16:56:57.885592Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-04T16:56:57.944990Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-04T16:56:57.945021Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-04T16:56:58.021468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503062933776589:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:58.021508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503062933776575:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:58.021611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503062933776587:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you d ... ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.768762Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.768836Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.768906Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.769034Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.769120Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.769203Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.769281Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.769361Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.769439Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.769515Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.769588Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.769671Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.769796Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.769873Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.769948Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.770064Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.770147Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.770229Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.771105Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.771202Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.771278Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.771352Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.771428Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.771506Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.771586Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.771669Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.771787Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.771886Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.771969Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.772084Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.772173Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.772255Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.772330Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.772410Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.772487Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.772591Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.772693Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.772772Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.772868Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.772964Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.773039Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.773140Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.773226Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.773309Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.773388Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.773484Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.773570Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.773647Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.773729Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.773808Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.773897Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.773981Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.774058Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.774145Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.774225Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.774789Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.774896Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.774979Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.775082Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.775188Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.775271Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.775350Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.775435Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.775519Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.775606Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.775683Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.775760Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.775837Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.775917Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.775994Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.776071Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.776150Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.776232Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.776313Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.776391Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.776466Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.776545Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.776623Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.776700Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.776788Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.776870Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.776950Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.777068Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.777165Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.777245Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.777327Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.777412Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.777491Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.777567Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.777675Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.777755Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.777831Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.777909Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.777988Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.778084Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.778185Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.779011Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.779134Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.779221Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.779310Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.779389Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.779481Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.779568Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.779668Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.779774Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.779884Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.779991Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.780096Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.780197Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.780305Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.780478Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.780568Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.780670Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.780771Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.780855Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.780941Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.781030Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.781127Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.781239Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.781325Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.781454Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.781556Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.781645Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.781730Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.781821Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.781918Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.782010Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.782092Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.782210Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:27.782359Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: |79.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |79.3%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut >> KqpPg::CreateUniqPgColumn+useSink [GOOD] >> KqpPg::CreateUniqPgColumn-useSink >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit [GOOD] >> KqpImmediateEffects::Insert [GOOD] >> KqpImmediateEffects::InsertConflictTxAborted >> TxUsage::WriteToTopic_Demo_22_RestartNo >> KqpImmediateEffects::AlreadyBrokenImmediateEffects [GOOD] >> KqpImmediateEffects::ConflictingKeyR1RWR2 |79.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |79.3%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut >> KqpEffects::InsertRevert_Literal_Duplicates >> Cdc::EnqueueRequestProcessSend [GOOD] >> Cdc::InitialScanAndResolvedTimestamps >> TKesusTest::TestKesusConfig >> KqpWrite::Insert [GOOD] >> KqpWrite::InsertRevert >> Yq_1::ModifyQuery [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink [GOOD] >> KqpLimits::CancelAfterRwTx+useSink >> TKesusTest::TestKesusConfig [GOOD] >> TKesusTest::TestLockNotFound >> TKesusTest::TestLockNotFound [GOOD] >> TKesusTest::TestDeleteSemaphore >> KqpPg::CopyTableSerialColumns+useSink [GOOD] >> KqpPg::CopyTableSerialColumns-useSink >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] >> TKesusTest::TestDeleteSemaphore [GOOD] >> TKesusTest::TestDescribeSemaphoreWatches >> CompressExecutor::TestExecutorMemUsage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ModifyQuery [GOOD] Test command err: 2025-04-04T16:56:47.397391Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503012420596620:2174];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:47.398338Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0404 16:56:47.689872651 188166 dns_resolver.cc:162] no server name supplied in dns URI E0404 16:56:47.690040559 188166 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-04T16:56:47.722711Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:11343: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11343 } ] 2025-04-04T16:56:48.406388Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:48.760871Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:11343: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11343 } ] 2025-04-04T16:56:48.765472Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:11343: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:11343 2025-04-04T16:56:49.408977Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:50.411049Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:50.457522Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:11343: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:11343 } ] 2025-04-04T16:56:51.092684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503029600466076:2310], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:51.092800Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:56:51.150651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503029600466076:2310], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:51.310874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503029600466076:2310], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:51.412314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000a64/r3tmp/tmpN0xjrA/pdisk_1.dat 2025-04-04T16:56:51.624314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503029600466076:2310], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:51.675987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:51.676108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:51.684744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11343, node 1 2025-04-04T16:56:51.744943Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:56:51.745038Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 TClient is connected to server localhost:62776 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:56:52.108446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:52.396415Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503012420596620:2174];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:52.396519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0404 16:56:52.683396598 188421 dns_resolver.cc:162] no server name supplied in dns URI E0404 16:56:52.683756360 188421 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-04T16:56:52.951060Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:52.952261Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:56:52.952275Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:56:52.952282Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:56:52.952397Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:56:53.202626Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-04T16:56:53.202671Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-04T16:56:53.202684Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-04T16:56:53.214432Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-04T16:56:53.214490Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-04T16:56:53.214507Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-04T16:56:53.215699Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-04T16:56:53.215732Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-04T16:56:53.215738Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-04T16:56:53.217208Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-04T16:56:53.217228Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-04T16:56:53.217233Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-04T16:56:53.218324Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-04T16:56:53.218353Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-04T16:56:53.218386Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-04T16:56:53.218395Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-04T16:56:53.218419Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-04T16:56:53.218433Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-04T16:56:53.234316Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:56:53.235043Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-04T16:56:53.235064Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-04T16:56:53.235071Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-04T16:56:53.273087Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-04T16:56:53.273124Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-04T16:56:53.273129Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-04T16:56:53.274120Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-04T16:56:53.274137Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-04T16:56:53.274143Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-04T16:56:53.280229Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-04T16:56:53.280265Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-04T16:56:53.280271Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-04T16:56:53.289476Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-04T16:56:53.289497Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-04T16:56:53.289518Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-04T16:56:53.289527Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-04T16:56:53.289531Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-04T16:56:53.293593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T16:56:53.308221Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-04T16:56:53.30826 ... seId : /Root. PoolId : default. Database : . }. CA StateFunc 271646926 2025-04-04T16:57:34.256465Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503216706319418:3167], TxId: 281474976715843, task: 1. Ctx: { TraceId : 01jr0vs0t16p88bktxxkkr90tw. SessionId : ydb://session/3?node_id=4&id=OGM0NDYyMWYtYzNkOWVjOGItZTA4OGFkZTMtYzdhYzY0MjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7489503216706319418 RawX2: 4503616807242847 } } DstEndpoint { ActorId { RawX1: 7489503216706319419 RawX2: 4503616807242848 } } InMemory: true DstStageId: 1 } 2025-04-04T16:57:34.256483Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503216706319418:3167], TxId: 281474976715843, task: 1. Ctx: { TraceId : 01jr0vs0t16p88bktxxkkr90tw. SessionId : ydb://session/3?node_id=4&id=OGM0NDYyMWYtYzNkOWVjOGItZTA4OGFkZTMtYzdhYzY0MjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Update output channelId: 1, peer: [4:7489503216706319419:3168] 2025-04-04T16:57:34.256585Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1, CA Id [4:7489503216706319418:3167]. Shards State: TShardState{ TabletId: 72075186224037892, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://Execute_folder_id, String : utquee14sbbardqnqu5l)], RetryAttempt: 0, ResolveAttempt: 0 } 2025-04-04T16:57:34.256603Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1, CA Id [4:7489503216706319418:3167]. effective maxinflight 1024 sorted 0 2025-04-04T16:57:34.256617Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1, CA Id [4:7489503216706319418:3167]. BEFORE: 1.0 2025-04-04T16:57:34.256668Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1, CA Id [4:7489503216706319418:3167]. Send EvRead to shardId: 72075186224037892, tablePath: Root/yq/queries, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2025-04-04T16:57:34.256712Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1, CA Id [4:7489503216706319418:3167]. AFTER: 0.1 2025-04-04T16:57:34.256726Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1, CA Id [4:7489503216706319418:3167]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2025-04-04T16:57:34.256818Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503216706319418:3167], TxId: 281474976715843, task: 1. Ctx: { TraceId : 01jr0vs0t16p88bktxxkkr90tw. SessionId : ydb://session/3?node_id=4&id=OGM0NDYyMWYtYzNkOWVjOGItZTA4OGFkZTMtYzdhYzY0MjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-04T16:57:34.256835Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1, CA Id [4:7489503216706319418:3167]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-04-04T16:57:34.256854Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1, CA Id [4:7489503216706319418:3167]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-04-04T16:57:34.258297Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1, CA Id [4:7489503216706319418:3167]. Recv TEvReadResult from ShardID=72075186224037892, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2025-04-04T16:57:34.258324Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1, CA Id [4:7489503216706319418:3167]. Taken 0 locks 2025-04-04T16:57:34.258346Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1, CA Id [4:7489503216706319418:3167]. new data for read #0 seqno = 1 finished = 1 2025-04-04T16:57:34.258379Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503216706319418:3167], TxId: 281474976715843, task: 1. Ctx: { TraceId : 01jr0vs0t16p88bktxxkkr90tw. SessionId : ydb://session/3?node_id=4&id=OGM0NDYyMWYtYzNkOWVjOGItZTA4OGFkZTMtYzdhYzY0MjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 276037645 2025-04-04T16:57:34.258404Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503216706319418:3167], TxId: 281474976715843, task: 1. Ctx: { TraceId : 01jr0vs0t16p88bktxxkkr90tw. SessionId : ydb://session/3?node_id=4&id=OGM0NDYyMWYtYzNkOWVjOGItZTA4OGFkZTMtYzdhYzY0MjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-04T16:57:34.258425Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1, CA Id [4:7489503216706319418:3167]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-04-04T16:57:34.258446Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1, CA Id [4:7489503216706319418:3167]. enter pack cells method shardId: 72075186224037892 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-04T16:57:34.258474Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1, CA Id [4:7489503216706319418:3167]. exit pack cells method shardId: 72075186224037892 processedRows: 0 packed rows: 1 freeSpace: 8387507 2025-04-04T16:57:34.258500Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1, CA Id [4:7489503216706319418:3167]. returned 1 rows; processed 1 rows 2025-04-04T16:57:34.258545Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1, CA Id [4:7489503216706319418:3167]. dropping batch for read #0 2025-04-04T16:57:34.258560Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1, CA Id [4:7489503216706319418:3167]. effective maxinflight 1024 sorted 0 2025-04-04T16:57:34.258575Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1, CA Id [4:7489503216706319418:3167]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-04-04T16:57:34.258592Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1, CA Id [4:7489503216706319418:3167]. returned async data processed rows 1 left freeSpace 8387507 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-04-04T16:57:34.258877Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503216706319418:3167], TxId: 281474976715843, task: 1. Ctx: { TraceId : 01jr0vs0t16p88bktxxkkr90tw. SessionId : ydb://session/3?node_id=4&id=OGM0NDYyMWYtYzNkOWVjOGItZTA4OGFkZTMtYzdhYzY0MjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-04T16:57:34.258900Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503216706319418:3167], TxId: 281474976715843, task: 1. Ctx: { TraceId : 01jr0vs0t16p88bktxxkkr90tw. SessionId : ydb://session/3?node_id=4&id=OGM0NDYyMWYtYzNkOWVjOGItZTA4OGFkZTMtYzdhYzY0MjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-04T16:57:34.258948Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-04T16:57:34.258970Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503216706319419:3168], TxId: 281474976715843, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0vs0t16p88bktxxkkr90tw. SessionId : ydb://session/3?node_id=4&id=OGM0NDYyMWYtYzNkOWVjOGItZTA4OGFkZTMtYzdhYzY0MjM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646923 2025-04-04T16:57:34.259000Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 2. Finish input channelId: 1, from: [4:7489503216706319418:3167] 2025-04-04T16:57:34.259048Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503216706319419:3168], TxId: 281474976715843, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0vs0t16p88bktxxkkr90tw. SessionId : ydb://session/3?node_id=4&id=OGM0NDYyMWYtYzNkOWVjOGItZTA4OGFkZTMtYzdhYzY0MjM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-04T16:57:34.259260Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503216706319419:3168], TxId: 281474976715843, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0vs0t16p88bktxxkkr90tw. SessionId : ydb://session/3?node_id=4&id=OGM0NDYyMWYtYzNkOWVjOGItZTA4OGFkZTMtYzdhYzY0MjM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-04T16:57:34.259280Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503216706319418:3167], TxId: 281474976715843, task: 1. Ctx: { TraceId : 01jr0vs0t16p88bktxxkkr90tw. SessionId : ydb://session/3?node_id=4&id=OGM0NDYyMWYtYzNkOWVjOGItZTA4OGFkZTMtYzdhYzY0MjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646927 2025-04-04T16:57:34.259316Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503216706319418:3167], TxId: 281474976715843, task: 1. Ctx: { TraceId : 01jr0vs0t16p88bktxxkkr90tw. SessionId : ydb://session/3?node_id=4&id=OGM0NDYyMWYtYzNkOWVjOGItZTA4OGFkZTMtYzdhYzY0MjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-04T16:57:34.259335Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1. Tasks execution finished 2025-04-04T16:57:34.259350Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503216706319418:3167], TxId: 281474976715843, task: 1. Ctx: { TraceId : 01jr0vs0t16p88bktxxkkr90tw. SessionId : ydb://session/3?node_id=4&id=OGM0NDYyMWYtYzNkOWVjOGItZTA4OGFkZTMtYzdhYzY0MjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-04-04T16:57:34.259489Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 1. pass away 2025-04-04T16:57:34.259609Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715843;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-04T16:57:34.260087Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503216706319419:3168], TxId: 281474976715843, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0vs0t16p88bktxxkkr90tw. SessionId : ydb://session/3?node_id=4&id=OGM0NDYyMWYtYzNkOWVjOGItZTA4OGFkZTMtYzdhYzY0MjM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-04T16:57:34.260128Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-04T16:57:34.260138Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 2. Tasks execution finished 2025-04-04T16:57:34.260152Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503216706319419:3168], TxId: 281474976715843, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0vs0t16p88bktxxkkr90tw. SessionId : ydb://session/3?node_id=4&id=OGM0NDYyMWYtYzNkOWVjOGItZTA4OGFkZTMtYzdhYzY0MjM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-04-04T16:57:34.260218Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715843, task: 2. pass away 2025-04-04T16:57:34.260280Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715843;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-04T16:57:34.613051Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:9136: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:9136 >> KqpPg::DropTablePg [GOOD] >> KqpPg::DropTablePgMultiple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] Test command err: 2025-04-04T16:57:21.743805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:57:21.744310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:57:21.744391Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f9e/r3tmp/tmpHKiUDI/pdisk_1.dat 2025-04-04T16:57:22.330462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:57:22.388069Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:22.440788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:22.440976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:22.455673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:57:22.572928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:57:22.711560Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T16:57:22.712676Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T16:57:22.713149Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T16:57:22.718525Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:57:22.741043Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T16:57:22.816820Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:57:22.816978Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:57:22.825351Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:57:22.825470Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:57:22.825528Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:57:22.825948Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:57:22.826118Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:57:22.826208Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T16:57:22.837151Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:57:22.877840Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:57:22.878059Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:57:22.878177Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T16:57:22.878214Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:57:22.878511Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:57:22.878559Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:57:22.878828Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T16:57:22.878901Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T16:57:22.879225Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:57:22.879319Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:57:22.879467Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:57:22.879535Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:57:22.879582Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T16:57:22.879618Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T16:57:22.879666Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T16:57:22.879709Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:57:22.879749Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:57:22.879846Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:57:22.879879Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:57:22.879918Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T16:57:22.880293Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T16:57:22.880338Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T16:57:22.880477Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:57:22.880690Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T16:57:22.880741Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:57:22.880843Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:57:22.880888Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T16:57:22.880927Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T16:57:22.880971Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T16:57:22.881004Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T16:57:22.881378Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T16:57:22.881418Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T16:57:22.881459Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T16:57:22.881495Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T16:57:22.881568Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T16:57:22.881608Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T16:57:22.881650Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T16:57:22.881695Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T16:57:22.881738Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T16:57:22.883734Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T16:57:22.883799Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:57:22.894577Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:57:22.894692Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T16:57:22.894756Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T16:57:22.894804Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T16:57:22.894872Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:57:23.052579Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:57:23.052639Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:57:23.052864Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T16:57:23.053269Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T16:57:23.053330Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T16:57:23.053437Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T16:57:23.053477Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T16:57:23.053518Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T16:57:23.053557Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T16:57:23.079081Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:57:23.079183Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:57:23.080142Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T16:57:23.080188Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T16:57:23.080242Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:57:2 ... d key sets: 1 2025-04-04T16:57:39.247120Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715671. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-04T16:57:39.247233Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1243:2997] TxId: 281474976715671. Ctx: { TraceId: 01jr0vs621bafhmp622pbfpxmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhMzIyNmUtODhkZDM4MGItMWVlNGViZGItNDAzNGFlOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '($1) $1)) ) 2025-04-04T16:57:39.247474Z node 2 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-04T16:57:39.247678Z node 2 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976715671. Shard resolve complete, resolved shards: 1 2025-04-04T16:57:39.247773Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1243:2997] TxId: 281474976715671. Ctx: { TraceId: 01jr0vs621bafhmp622pbfpxmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhMzIyNmUtODhkZDM4MGItMWVlNGViZGItNDAzNGFlOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-04-04T16:57:39.247843Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1243:2997] TxId: 281474976715671. Ctx: { TraceId: 01jr0vs621bafhmp622pbfpxmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhMzIyNmUtODhkZDM4MGItMWVlNGViZGItNDAzNGFlOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 2: [72075186224037888] 2025-04-04T16:57:39.247911Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jr0vs621bafhmp622pbfpxmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhMzIyNmUtODhkZDM4MGItMWVlNGViZGItNDAzNGFlOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:57:39.247969Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715671. Ctx: { TraceId: 01jr0vs621bafhmp622pbfpxmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhMzIyNmUtODhkZDM4MGItMWVlNGViZGItNDAzNGFlOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-04T16:57:39.248330Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1243:2997] TxId: 281474976715671. Ctx: { TraceId: 01jr0vs621bafhmp622pbfpxmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhMzIyNmUtODhkZDM4MGItMWVlNGViZGItNDAzNGFlOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-04T16:57:39.248410Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1243:2997] TxId: 281474976715671. Ctx: { TraceId: 01jr0vs621bafhmp622pbfpxmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhMzIyNmUtODhkZDM4MGItMWVlNGViZGItNDAzNGFlOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1247:2997], 2025-04-04T16:57:39.248490Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1243:2997] TxId: 281474976715671. Ctx: { TraceId: 01jr0vs621bafhmp622pbfpxmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhMzIyNmUtODhkZDM4MGItMWVlNGViZGItNDAzNGFlOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1247:2997], 2025-04-04T16:57:39.248546Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1243:2997] TxId: 281474976715671. Ctx: { TraceId: 01jr0vs621bafhmp622pbfpxmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhMzIyNmUtODhkZDM4MGItMWVlNGViZGItNDAzNGFlOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-04T16:57:39.249251Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1243:2997] TxId: 281474976715671. Ctx: { TraceId: 01jr0vs621bafhmp622pbfpxmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhMzIyNmUtODhkZDM4MGItMWVlNGViZGItNDAzNGFlOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1247:2997], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-04-04T16:57:39.249328Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1243:2997] TxId: 281474976715671. Ctx: { TraceId: 01jr0vs621bafhmp622pbfpxmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhMzIyNmUtODhkZDM4MGItMWVlNGViZGItNDAzNGFlOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1247:2997], 2025-04-04T16:57:39.249404Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1243:2997] TxId: 281474976715671. Ctx: { TraceId: 01jr0vs621bafhmp622pbfpxmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhMzIyNmUtODhkZDM4MGItMWVlNGViZGItNDAzNGFlOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1247:2997], 2025-04-04T16:57:39.249904Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1249:2997], Recipient [2:1168:2948]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2025-04-04T16:57:39.250027Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T16:57:39.250100Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v4010/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2000/18446744073709551615 ImmediateWriteEdgeReplied# v2000/18446744073709551615 2025-04-04T16:57:39.250154Z node 2 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v5000/18446744073709551615 2025-04-04T16:57:39.250315Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-04-04T16:57:39.250440Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-04T16:57:39.250496Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-04-04T16:57:39.250544Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T16:57:39.250586Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T16:57:39.250667Z node 2 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-04-04T16:57:39.250718Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-04T16:57:39.250749Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T16:57:39.250776Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T16:57:39.250800Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-04-04T16:57:39.250943Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-04-04T16:57:39.251215Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1249:2997], 0} after executionsCount# 1 2025-04-04T16:57:39.251288Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1249:2997], 0} sends rowCount# 1, bytes# 32, quota rows left# 32766, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T16:57:39.251385Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1249:2997], 0} finished in read 2025-04-04T16:57:39.251463Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-04T16:57:39.251494Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T16:57:39.251524Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T16:57:39.251554Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-04-04T16:57:39.251610Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-04T16:57:39.251635Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T16:57:39.251663Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-04-04T16:57:39.251717Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T16:57:39.252525Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1249:2997], Recipient [2:1168:2948]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-04T16:57:39.252593Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-04T16:57:39.253470Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1243:2997] TxId: 281474976715671. Ctx: { TraceId: 01jr0vs621bafhmp622pbfpxmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhMzIyNmUtODhkZDM4MGItMWVlNGViZGItNDAzNGFlOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1247:2997], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1087 Tasks { TaskId: 1 CpuTimeUs: 255 FinishTimeMs: 1743785859252 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 120 BuildCpuTimeUs: 135 HostName: "ghrun-mup5cka3jq" NodeId: 2 StartTimeMs: 1743785859252 CreateTimeMs: 1743785859248 } MaxMemoryUsage: 1048576 } 2025-04-04T16:57:39.253634Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715671. Ctx: { TraceId: 01jr0vs621bafhmp622pbfpxmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhMzIyNmUtODhkZDM4MGItMWVlNGViZGItNDAzNGFlOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1247:2997] 2025-04-04T16:57:39.253846Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1243:2997] TxId: 281474976715671. Ctx: { TraceId: 01jr0vs621bafhmp622pbfpxmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhMzIyNmUtODhkZDM4MGItMWVlNGViZGItNDAzNGFlOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T16:57:39.253923Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1243:2997] TxId: 281474976715671. Ctx: { TraceId: 01jr0vs621bafhmp622pbfpxmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2NhMzIyNmUtODhkZDM4MGItMWVlNGViZGItNDAzNGFlOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.001087s ReadRows: 1 ReadBytes: 8 ru: 1 rate limiter was not found force flag: 1 { items { uint32_value: 7 } items { uint32_value: 4 } } >> Yq_1::DeleteQuery [GOOD] >> KqpQuery::UdfMemoryLimit [GOOD] >> KqpQuery::TryToUpdateNonExistentColumn >> KqpImmediateEffects::UpsertAfterInsert [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict+UseSink [GOOD] >> KqpEffects::DeletePkPrefixWithIndex |79.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |79.3%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> TKesusTest::TestDescribeSemaphoreWatches [GOOD] >> TKesusTest::TestGetQuoterResourceCounters >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2025-04-04T16:55:02.069575Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1743785702069542 2025-04-04T16:55:02.803753Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502562815029356:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:02.803821Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:55:02.916877Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489502561750129895:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:02.916925Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:55:03.466657Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T16:55:03.491427Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001742/r3tmp/tmpC7kQtB/pdisk_1.dat 2025-04-04T16:55:03.866593Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:55:03.939019Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:55:04.286081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:04.286207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:04.289489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:04.289560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:04.295067Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:04.306624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:04.325985Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:55:04.327562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18188, node 1 2025-04-04T16:55:04.702915Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001742/r3tmp/yandexBmJsJP.tmp 2025-04-04T16:55:04.702939Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001742/r3tmp/yandexBmJsJP.tmp 2025-04-04T16:55:04.703081Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001742/r3tmp/yandexBmJsJP.tmp 2025-04-04T16:55:04.703205Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:55:04.863454Z INFO: TTestServer started on Port 8904 GrpcPort 18188 TClient is connected to server localhost:8904 PQClient connected to localhost:18188 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:05.617820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-04T16:55:07.806417Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502562815029356:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:07.806516Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:07.919515Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489502561750129895:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:07.919586Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:55:09.470932Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489502591814901086:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:09.471022Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:09.471110Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489502591814901094:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:55:09.485403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T16:55:09.522331Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489502591814901100:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T16:55:09.881728Z node 2 :TX_PROXY ERROR: Actor# [2:7489502591814901128:2133] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:55:09.923081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T16:55:09.932460Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489502592879801324:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:55:09.934120Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODk1MDFlYjgtYWY1YTFhMDYtMzE3ZGM3ZTctNDYyMmJmNzg=, ActorId: [1:7489502592879801273:2338], ActorState: ExecuteState, TraceId: 01jr0vmkyv335pn7j2c9v4ze2f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:55:09.932129Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489502591814901143:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:55:09.934159Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzRlMzQzNDEtMjE1MmQxZTUtNTljNWY0MDQtYWVjMjZiNWM=, ActorId: [2:7489502591814901084:2312], ActorState: ExecuteState, TraceId: 01jr0vmkww5cwpg0fyvn9d2ksd, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:55:09.936478Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:55:09.936225Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:55:10.128429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:55:10.359361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:18188", true, true, 1000); 2025-04-04T16:55:10.797860Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr0vmmxm7vpsyz06c3rw1t9b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmE5OWFiNjItMTBlMmI3MjItZDkxZWE4ZTEtNWRjMTg1ZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489502597174769021:2973] === CheckClustersList. Ok 2025-04-04T16:55:16.899566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:18188 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-04T16:55: ... [test-message-group-id] SessionId [test-message-group-id|c1a65135-2173adae-acbb2ebd-8e80bdf4_0] Write session: send init request: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2025-04-04T16:57:35.577659Z node 15 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-04T16:57:35.577707Z node 15 :PQ_WRITE_PROXY DEBUG: new session created cookie 3 2025-04-04T16:57:35.578465Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2025-04-04T16:57:35.578711Z node 15 :PQ_WRITE_PROXY INFO: session request cookie: 3 topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" from ipv6:[::1]:43144 2025-04-04T16:57:35.578739Z node 15 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:43144 proto=v1 topic=test-topic durationSec=0 2025-04-04T16:57:35.578756Z node 15 :PQ_WRITE_PROXY INFO: init check schema 2025-04-04T16:57:35.588192Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2025-04-04T16:57:35.588427Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-04T16:57:35.588448Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-04T16:57:35.588467Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-04T16:57:35.588496Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7489503221559070758:2591] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2025-04-04T16:57:35.593035Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7489503221559070758:2591] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2025-04-04T16:57:36.114525Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715701. Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-04T16:57:36.114716Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7489503221559070772:2593] TxId: 281474976715701. Ctx: { TraceId: 01jr0vs2k9crdrpmtszmkh0mer, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=YmIzMGI2NzEtYWEwZmViMDUtMzI2NGUzNjktYzQ1MmJhYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-04T16:57:36.114972Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=YmIzMGI2NzEtYWEwZmViMDUtMzI2NGUzNjktYzQ1MmJhYTQ=, ActorId: [15:7489503221559070759:2593], ActorState: ExecuteState, TraceId: 01jr0vs2k9crdrpmtszmkh0mer, Create QueryResponse for error on request, msg: 2025-04-04T16:57:36.135448Z node 15 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [15:7489503221559070758:2591] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=YmIzMGI2NzEtYWEwZmViMDUtMzI2NGUzNjktYzQ1MmJhYTQ=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jr0vs2kb5bjxpe5kw2dmdx2t" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2025-04-04T16:57:36.135659Z node 15 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=YmIzMGI2NzEtYWEwZmViMDUtMzI2NGUzNjktYzQ1MmJhYTQ=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jr0vs2kb5bjxpe5kw2dmdx2t" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2025-04-04T16:57:36.136126Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD Test retry state: get retry delay 2025-04-04T16:57:36.140023Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c1a65135-2173adae-acbb2ebd-8e80bdf4_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=YmIzMGI2NzEtYWEwZmViMDUtMzI2NGUzNjktYzQ1MmJhYTQ=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jr0vs2kb5bjxpe5kw2dmdx2t" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2025-04-04T16:57:36.140071Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c1a65135-2173adae-acbb2ebd-8e80bdf4_0] Write session will restart in 2.000000s 2025-04-04T16:57:36.140248Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c1a65135-2173adae-acbb2ebd-8e80bdf4_0] Write session: Do CDS request 2025-04-04T16:57:36.140292Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c1a65135-2173adae-acbb2ebd-8e80bdf4_0] Do schedule cds request after 2000 ms 2025-04-04T16:57:36.570379Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c1a65135-2173adae-acbb2ebd-8e80bdf4_0] Write session: close. Timeout = 0 ms 2025-04-04T16:57:36.570470Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c1a65135-2173adae-acbb2ebd-8e80bdf4_0] Write session will now close 2025-04-04T16:57:36.570573Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c1a65135-2173adae-acbb2ebd-8e80bdf4_0] Write session: aborting 2025-04-04T16:57:36.571520Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c1a65135-2173adae-acbb2ebd-8e80bdf4_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-04-04T16:57:36.571601Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|c1a65135-2173adae-acbb2ebd-8e80bdf4_0] Write session: destroy 2025-04-04T16:57:36.642401Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715703. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T16:57:36.642568Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7489503225854038132:2597] TxId: 281474976715703. Ctx: { TraceId: 01jr0vs3dg26cr1s9tvhtz311p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=Y2NiNDY4OC0zMzcxZDkwNi1jZmNmZTg1Zi01ZjljNWRhNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T16:57:36.642832Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=Y2NiNDY4OC0zMzcxZDkwNi1jZmNmZTg1Zi01ZjljNWRhNA==, ActorId: [15:7489503225854038129:2597], ActorState: ExecuteState, TraceId: 01jr0vs3dg26cr1s9tvhtz311p, Create QueryResponse for error on request, msg: 2025-04-04T16:57:36.643888Z node 15 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr0vs3dp0wmpr2qgen39cmw2" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-04-04T16:57:36.808166Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715704. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T16:57:36.808325Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7489503225854038165:2582] TxId: 281474976715704. Ctx: { TraceId: 01jr0vs234c8aw3rqmys1atv0c, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=ZTAyYTBjYzYtOTJhZmRjYzQtZGMzZGMxMmUtMjVjOGZhODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T16:57:36.808636Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=ZTAyYTBjYzYtOTJhZmRjYzQtZGMzZGMxMmUtMjVjOGZhODM=, ActorId: [15:7489503221559070734:2582], ActorState: ExecuteState, TraceId: 01jr0vs234c8aw3rqmys1atv0c, Create QueryResponse for error on request, msg: 2025-04-04T16:57:36.810954Z node 15 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr0vs3jffecpk7qdvgz9p60h" } } YdbStatus: UNAVAILABLE ConsumedRu: 997 } 2025-04-04T16:57:38.832264Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715707. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T16:57:38.832418Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7489503234443972888:2608] TxId: 281474976715707. Ctx: { TraceId: 01jr0vs5jwftwpevf7g6p9fs6b, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=N2E5MDU1YjktMjA1MDY4MGQtYmRhOTkyMzEtMjc1MzczOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T16:57:38.832686Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=N2E5MDU1YjktMjA1MDY4MGQtYmRhOTkyMzEtMjc1MzczOWI=, ActorId: [15:7489503234443972885:2608], ActorState: ExecuteState, TraceId: 01jr0vs5jwftwpevf7g6p9fs6b, Create QueryResponse for error on request, msg: 2025-04-04T16:57:38.834656Z node 15 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr0vs5k3b0p9tye192rtb58r" } } YdbStatus: UNAVAILABLE ConsumedRu: 4 } 2025-04-04T16:57:39.031415Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715709. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T16:57:39.031589Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7489503234443972931:2600] TxId: 281474976715709. Ctx: { TraceId: 01jr0vs4rrcqqz6xjhph6n6675, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=NzdjYTI2OTMtZTcwNjkwNzEtNmU2NWUxY2QtMTYxNjIxN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T16:57:39.031901Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=NzdjYTI2OTMtZTcwNjkwNzEtNmU2NWUxY2QtMTYxNjIxN2I=, ActorId: [15:7489503230149005563:2600], ActorState: ExecuteState, TraceId: 01jr0vs4rrcqqz6xjhph6n6675, Create QueryResponse for error on request, msg: 2025-04-04T16:57:39.033464Z node 15 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr0vs5se88xk00313q8km84w" } } YdbStatus: UNAVAILABLE ConsumedRu: 686 } >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink >> Yq_1::DescribeQuery [GOOD] >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] >> Yq_1::CreateQuery_Without_Connection [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DeleteQuery [GOOD] Test command err: 2025-04-04T16:56:51.943755Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503031609642160:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:51.943825Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0404 16:56:52.300487273 190184 dns_resolver.cc:162] no server name supplied in dns URI E0404 16:56:52.300731696 190184 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-04T16:56:52.342481Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:22113: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22113 } ] 2025-04-04T16:56:52.951398Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:53.365553Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:22113: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:22113 2025-04-04T16:56:53.371543Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:22113: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22113 } ] 2025-04-04T16:56:53.950484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:54.954717Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:55.158443Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:22113: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22113 } ] 2025-04-04T16:56:55.964979Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:56.619817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503053084479031:2309], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:56.620042Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0007b4/r3tmp/tmpfALbmF/pdisk_1.dat 2025-04-04T16:56:56.702472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503053084479031:2309], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:56.935109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503053084479031:2309], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:56.944006Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503031609642160:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:56.944074Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:56:56.974572Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:56.979776Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22113, node 1 2025-04-04T16:56:57.062832Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:56:57.062861Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:56:57.062869Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:56:57.063009Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:56:57.295052Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-04T16:56:57.295085Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-04T16:56:57.295093Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-04T16:56:57.296883Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-04T16:56:57.296902Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-04T16:56:57.296907Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-04T16:56:57.297934Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-04T16:56:57.297992Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-04T16:56:57.297998Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-04T16:56:57.298019Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-04T16:56:57.298034Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-04T16:56:57.298039Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-04T16:56:57.299479Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-04T16:56:57.299494Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-04T16:56:57.299500Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" E0404 16:56:57.303515511 190370 dns_resolver.cc:162] no server name supplied in dns URI E0404 16:56:57.303731222 190370 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-04T16:56:57.315644Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-04T16:56:57.315681Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-04T16:56:57.315688Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-04T16:56:57.315988Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-04T16:56:57.316005Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-04T16:56:57.316012Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-04T16:56:57.317116Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-04T16:56:57.317137Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-04T16:56:57.317142Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-04T16:56:57.317404Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-04T16:56:57.317418Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-04T16:56:57.317423Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-04T16:56:57.344937Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-04T16:56:57.344970Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-04T16:56:57.344977Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-04T16:56:57.363137Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-04T16:56:57.363169Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-04T16:56:57.363175Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-04T16:56:57.386011Z node 1 :TX_PROXY ERROR: Actor# [1:7489503057379446817:2488] txid# 281474976710660, issues: { message: "database doesn\'t have storage pools at all to create tablet channels to storage pool binding by profile id" severity: 1 } 2025-04-04T16:56:57.388790Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-04T16:56:57.388816Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-04T16:56:57.388821Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-04T16:56:57.391946Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create table "Root/yq/connections" error: BAD_REQUEST {
: Error: database doesn't have storage pools at all to create tablet channels to storage pool binding by profile id } 2025-04-04T16:56:57.394738Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-04T16:56:57.394773Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-04T16:56:57.394802Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-04T16:56:57.410333Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-04T16:56:57.410356Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-04T16:56:57.412996Z node 1 :TX_PROXY ERROR: Actor# [1:7489503057379446815:2486] txid# 281474976710658, issues: { message: "database doesn\'t have storage pools at all to create tablet channels to storage pool binding by profile id" severity: 1 } 2025-04-04T16:56:57.413861Z node 1 :TX_PROXY ERROR: Actor# [1:7489503057379446816:2487] txid# 281474976710659, issues: { message: "database doesn\'t have storage pools at all to create tablet channels to storage pool binding by profile id" severity: 1 } 2025-04-04T16:56:57.417486Z node 1 :TX_PROXY ERROR: Actor# [1:7489503057379446840:2501] txid# 281474976710661, issues: { message: "database doesn\'t have storage pools at all to create tablet channels to storage pool binding by profile id" severity: 1 } 2025-04-04T16:56:57.422516Z node 1 :TX_PROXY ERROR: Actor# [1:7489503057379446845:2505] txid# 281474976710662, issues: { message: "database doesn\'t have storage pools at all to create tablet channels to storage pool binding by profile id" severity: 1 } 2025-04-04T16:56:57.424123Z node 1 :TX_PROXY ERROR: Actor# [1:7489503057379446854:2510] txid# 281474976710663, issues: { message: "database doesn\'t have storage pools at all to create tablet channels to storage pool binding by profile id" severity: 1 } 2025-04-04T16:56:57.424273Z node 1 :TX_PROXY ERROR: Actor# [1:748950305737944685 ... 4037897, tablePath: Root/yq/queries, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2025-04-04T16:57:38.014901Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715839, task: 1, CA Id [4:7489503234688883826:3132]. AFTER: 0.1 2025-04-04T16:57:38.014919Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715839, task: 1, CA Id [4:7489503234688883826:3132]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2025-04-04T16:57:38.014924Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503234688883827:3133], TxId: 281474976715839, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjYxNzY2MzctMjlmZTJlZjAtYWQwZGEwODItMTAyMDEyNDI=. TraceId : 01jr0vs4ehcysg2xje9bfvqycy. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646926 2025-04-04T16:57:38.015021Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503234688883826:3132], TxId: 281474976715839, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjYxNzY2MzctMjlmZTJlZjAtYWQwZGEwODItMTAyMDEyNDI=. CustomerSuppliedId : . TraceId : 01jr0vs4ehcysg2xje9bfvqycy. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-04T16:57:38.015044Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715839, task: 1, CA Id [4:7489503234688883826:3132]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-04-04T16:57:38.015067Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715839, task: 1, CA Id [4:7489503234688883826:3132]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-04-04T16:57:38.015099Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503234688883827:3133], TxId: 281474976715839, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjYxNzY2MzctMjlmZTJlZjAtYWQwZGEwODItMTAyMDEyNDI=. TraceId : 01jr0vs4ehcysg2xje9bfvqycy. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7489503234688883826 RawX2: 4503616807242812 } } DstEndpoint { ActorId { RawX1: 7489503234688883827 RawX2: 4503616807242813 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7489503234688883827 RawX2: 4503616807242813 } } DstEndpoint { ActorId { RawX1: 7489503234688883822 RawX2: 4503616807242678 } } InMemory: true } 2025-04-04T16:57:38.015129Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503234688883827:3133], TxId: 281474976715839, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjYxNzY2MzctMjlmZTJlZjAtYWQwZGEwODItMTAyMDEyNDI=. TraceId : 01jr0vs4ehcysg2xje9bfvqycy. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Update input channelId: 1, peer: [4:7489503234688883826:3132] 2025-04-04T16:57:38.015198Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503234688883827:3133], TxId: 281474976715839, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjYxNzY2MzctMjlmZTJlZjAtYWQwZGEwODItMTAyMDEyNDI=. TraceId : 01jr0vs4ehcysg2xje9bfvqycy. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-04T16:57:38.023438Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715839, task: 1, CA Id [4:7489503234688883826:3132]. Recv TEvReadResult from ShardID=72075186224037897, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2025-04-04T16:57:38.023478Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715839, task: 1, CA Id [4:7489503234688883826:3132]. Taken 0 locks 2025-04-04T16:57:38.023516Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715839, task: 1, CA Id [4:7489503234688883826:3132]. new data for read #0 seqno = 1 finished = 1 2025-04-04T16:57:38.023553Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503234688883826:3132], TxId: 281474976715839, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjYxNzY2MzctMjlmZTJlZjAtYWQwZGEwODItMTAyMDEyNDI=. CustomerSuppliedId : . TraceId : 01jr0vs4ehcysg2xje9bfvqycy. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 276037645 2025-04-04T16:57:38.023586Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503234688883826:3132], TxId: 281474976715839, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjYxNzY2MzctMjlmZTJlZjAtYWQwZGEwODItMTAyMDEyNDI=. CustomerSuppliedId : . TraceId : 01jr0vs4ehcysg2xje9bfvqycy. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-04T16:57:38.023618Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715839, task: 1, CA Id [4:7489503234688883826:3132]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-04-04T16:57:38.023642Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715839, task: 1, CA Id [4:7489503234688883826:3132]. enter pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-04T16:57:38.023659Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715839, task: 1, CA Id [4:7489503234688883826:3132]. exit pack cells method shardId: 72075186224037897 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-04T16:57:38.023675Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715839, task: 1, CA Id [4:7489503234688883826:3132]. returned 0 rows; processed 0 rows 2025-04-04T16:57:38.023728Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715839, task: 1, CA Id [4:7489503234688883826:3132]. dropping batch for read #0 2025-04-04T16:57:38.023744Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715839, task: 1, CA Id [4:7489503234688883826:3132]. effective maxinflight 1024 sorted 0 2025-04-04T16:57:38.023762Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715839, task: 1, CA Id [4:7489503234688883826:3132]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-04-04T16:57:38.023784Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715839, task: 1, CA Id [4:7489503234688883826:3132]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-04-04T16:57:38.023915Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503234688883826:3132], TxId: 281474976715839, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjYxNzY2MzctMjlmZTJlZjAtYWQwZGEwODItMTAyMDEyNDI=. CustomerSuppliedId : . TraceId : 01jr0vs4ehcysg2xje9bfvqycy. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-04T16:57:38.023975Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503234688883827:3133], TxId: 281474976715839, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjYxNzY2MzctMjlmZTJlZjAtYWQwZGEwODItMTAyMDEyNDI=. TraceId : 01jr0vs4ehcysg2xje9bfvqycy. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646923 2025-04-04T16:57:38.024012Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715839, task: 2. Finish input channelId: 1, from: [4:7489503234688883826:3132] 2025-04-04T16:57:38.024059Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503234688883827:3133], TxId: 281474976715839, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjYxNzY2MzctMjlmZTJlZjAtYWQwZGEwODItMTAyMDEyNDI=. TraceId : 01jr0vs4ehcysg2xje9bfvqycy. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-04T16:57:38.024164Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503234688883827:3133], TxId: 281474976715839, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjYxNzY2MzctMjlmZTJlZjAtYWQwZGEwODItMTAyMDEyNDI=. TraceId : 01jr0vs4ehcysg2xje9bfvqycy. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-04T16:57:38.024183Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503234688883826:3132], TxId: 281474976715839, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjYxNzY2MzctMjlmZTJlZjAtYWQwZGEwODItMTAyMDEyNDI=. CustomerSuppliedId : . TraceId : 01jr0vs4ehcysg2xje9bfvqycy. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646927 2025-04-04T16:57:38.024208Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503234688883826:3132], TxId: 281474976715839, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjYxNzY2MzctMjlmZTJlZjAtYWQwZGEwODItMTAyMDEyNDI=. CustomerSuppliedId : . TraceId : 01jr0vs4ehcysg2xje9bfvqycy. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-04T16:57:38.024241Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715839, task: 1. Tasks execution finished 2025-04-04T16:57:38.024259Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503234688883826:3132], TxId: 281474976715839, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjYxNzY2MzctMjlmZTJlZjAtYWQwZGEwODItMTAyMDEyNDI=. CustomerSuppliedId : . TraceId : 01jr0vs4ehcysg2xje9bfvqycy. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-04-04T16:57:38.024409Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715839, task: 1. pass away 2025-04-04T16:57:38.024541Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715839;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-04T16:57:38.024790Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503234688883827:3133], TxId: 281474976715839, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjYxNzY2MzctMjlmZTJlZjAtYWQwZGEwODItMTAyMDEyNDI=. TraceId : 01jr0vs4ehcysg2xje9bfvqycy. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-04T16:57:38.024842Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715839, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-04T16:57:38.024856Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715839, task: 2. Tasks execution finished 2025-04-04T16:57:38.024868Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503234688883827:3133], TxId: 281474976715839, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=MjYxNzY2MzctMjlmZTJlZjAtYWQwZGEwODItMTAyMDEyNDI=. TraceId : 01jr0vs4ehcysg2xje9bfvqycy. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-04-04T16:57:38.024921Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715839, task: 2. pass away 2025-04-04T16:57:38.024969Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715839;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-04T16:57:38.034997Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: DescribeQueryRequest - DescribeQueryResult: {query_id: "utquee14s4rq5fc4t7st" } ERROR: {
: Error: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp:563: Query does not exist or permission denied. Please check the id of the query or your access rights, code: 1000 } 2025-04-04T16:57:38.960160Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:65252: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:65252 >> TKesusTest::TestGetQuoterResourceCounters [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 1800, MsgBus: 18453 2025-04-04T16:57:24.816054Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503172505955380:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:24.816602Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d94/r3tmp/tmpJXBs41/pdisk_1.dat 2025-04-04T16:57:25.319077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:25.319276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:25.322858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:57:25.356520Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1800, node 1 2025-04-04T16:57:25.552496Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:25.552528Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:25.552535Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:25.552651Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18453 TClient is connected to server localhost:18453 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:57:26.655065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:26.682557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:26.832251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:27.027040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:27.180542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:29.208423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503193980793499:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:29.208542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:29.569134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:57:29.617265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:57:29.695322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:57:29.741571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:57:29.792872Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503172505955380:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:29.793174Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:57:29.821458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:57:29.921071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:57:30.046394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503198275761313:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:30.046514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:30.047087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503198275761319:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:30.052816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:57:30.071674Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T16:57:30.072932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503198275761321:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:57:30.155071Z node 1 :TX_PROXY ERROR: Actor# [1:7489503198275761380:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:31.518163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27844, MsgBus: 24943 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d94/r3tmp/tmpBSu2ki/pdisk_1.dat 2025-04-04T16:57:34.050521Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:57:34.080085Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:34.093534Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:34.093620Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:34.099546Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27844, node 2 2025-04-04T16:57:34.195785Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:34.195811Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:34.195829Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:34.195971Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24943 TClient is connected to server localhost:24943 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:57:34.740701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:34.747836Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T16:57:34.772551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:34.907335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:35.109817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:35.205874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:37.656370Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503231300747591:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:37.656476Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:37.700432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T16:57:37.746283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T16:57:37.799269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T16:57:37.845154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T16:57:37.931449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T16:57:38.040925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T16:57:38.143868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503235595715405:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:38.143980Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:38.144478Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503235595715410:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:38.150022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T16:57:38.187653Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-04T16:57:38.187943Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489503235595715412:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T16:57:38.285346Z node 2 :TX_PROXY ERROR: Actor# [2:7489503235595715470:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:40.115560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] Test command err: Trying to start YDB, gRPC: 27123, MsgBus: 5434 2025-04-04T16:57:23.769483Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503167519163904:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:23.769948Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d99/r3tmp/tmpcptdkU/pdisk_1.dat 2025-04-04T16:57:24.894449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:57:24.993123Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:25.005801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:25.021426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:25.031755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27123, node 1 2025-04-04T16:57:25.301140Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:25.301159Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:25.301167Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:25.301288Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5434 TClient is connected to server localhost:5434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:57:26.373673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:26.438200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:26.638830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:27.117607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:27.249654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:28.762721Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503167519163904:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:28.762821Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:57:29.678118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503193288969316:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:29.678231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:30.084679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:57:30.145400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:57:30.193869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:57:30.273505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:57:30.352215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:57:30.430871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:57:30.543249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503197583937149:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:30.543347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:30.543574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503197583937154:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:30.548204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:57:30.568043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503197583937156:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:57:30.632269Z node 1 :TX_PROXY ERROR: Actor# [1:7489503197583937212:3466] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:31.739607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8767, MsgBus: 7456 2025-04-04T16:57:33.490377Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503212066062882:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:33.542699Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d99/r3tmp/tmpYT4aSB/pdisk_1.dat 2025-04-04T16:57:33.774362Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:33.873429Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:33.873549Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:33.887654Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8767, node 2 2025-04-04T16:57:34.118879Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:34.118906Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:34.118912Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:34.119031Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7456 TClient is connected to server localhost:7456 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:57:35.136024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:35.143827Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:57:35.154123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:35.339282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:35.729704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:35.938626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:38.352934Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503233540900987:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:38.353052Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:38.448003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:57:38.478187Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489503212066062882:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:38.478275Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:57:38.569281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:57:38.680335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:57:38.724467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:57:38.834735Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:57:38.923858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:57:39.015372Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503237835868802:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:39.015461Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:39.015708Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503237835868807:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:39.019802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:57:39.041005Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T16:57:39.041740Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489503237835868809:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:57:39.133188Z node 2 :TX_PROXY ERROR: Actor# [2:7489503237835868864:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:40.868064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] Test command err: Trying to start YDB, gRPC: 64117, MsgBus: 28638 2025-04-04T16:57:24.905079Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503175160327932:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:24.905475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d8b/r3tmp/tmpvWn0Dh/pdisk_1.dat 2025-04-04T16:57:25.524845Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:25.529577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:25.529679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:25.532304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64117, node 1 2025-04-04T16:57:25.674522Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:25.674548Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:25.674558Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:25.674703Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28638 TClient is connected to server localhost:28638 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:57:26.308489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:26.347263Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:57:26.364953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:57:26.608621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:26.798874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:26.874586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T16:57:28.880913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503192340198903:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:28.881071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:29.216021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:57:29.262259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:57:29.348798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:57:29.435815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:57:29.537968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:57:29.614698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:57:29.724526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503196635166727:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:29.724621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:29.724899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503196635166732:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:29.731137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:57:29.752444Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T16:57:29.754326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503196635166734:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:57:29.853188Z node 1 :TX_PROXY ERROR: Actor# [1:7489503196635166789:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:29.906529Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503175160327932:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:29.906674Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:57:31.121824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17241, MsgBus: 15199 2025-04-04T16:57:33.802205Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503212314054575:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:33.802276Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d8b/r3tmp/tmpCkc5K6/pdisk_1.dat 2025-04-04T16:57:34.157578Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17241, node 2 2025-04-04T16:57:34.188678Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:34.188757Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:34.206755Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:57:34.298894Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:34.298925Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:34.298934Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:34.299065Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15199 TClient is connected to server localhost:15199 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:57:35.000520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:35.010971Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:57:35.020958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:57:35.109049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T16:57:35.326053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:35.444906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:37.830407Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503229493925497:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:37.830508Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:37.868370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:57:37.968023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:57:38.015480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:57:38.061514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:57:38.119342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:57:38.217264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:57:38.296411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503233788893310:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:38.296551Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:38.302510Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503233788893315:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:38.308947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:57:38.343293Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489503233788893317:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:57:38.411423Z node 2 :TX_PROXY ERROR: Actor# [2:7489503233788893372:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:38.803593Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489503212314054575:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:38.803657Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:57:40.779713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:57:41.697778Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjUxZDU0NDYtZjkzYzM1YjgtYWY4YjU5M2UtYzI1YTQwZDM=, ActorId: [2:7489503246673795850:2530], ActorState: ExecuteState, TraceId: 01jr0vs8h8018stg222gjjs50a, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestGetQuoterResourceCounters [GOOD] Test command err: 2025-04-04T16:57:38.500891Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T16:57:38.501032Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T16:57:38.520417Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T16:57:38.520546Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T16:57:38.544323Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T16:57:38.544631Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:129:2155], cookie=15090156204814679301, path="/foo/bar/baz") 2025-04-04T16:57:38.568802Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:129:2155], cookie=15090156204814679301, status=SUCCESS) 2025-04-04T16:57:38.569383Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:138:2162], cookie=785028329594028346) 2025-04-04T16:57:38.581716Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:138:2162], cookie=785028329594028346) 2025-04-04T16:57:38.582330Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:143:2167], cookie=11184629356456477046, path="/foo/bar/baz") 2025-04-04T16:57:38.596916Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:143:2167], cookie=11184629356456477046, status=SUCCESS) 2025-04-04T16:57:38.597563Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:148:2172], cookie=3247389225081722714) 2025-04-04T16:57:38.615852Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:148:2172], cookie=3247389225081722714) 2025-04-04T16:57:38.645742Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T16:57:38.645847Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T16:57:38.646487Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T16:57:38.647180Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T16:57:38.696398Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T16:57:38.696751Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:190:2204], cookie=11050864851220436292) 2025-04-04T16:57:38.722475Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:190:2204], cookie=11050864851220436292) 2025-04-04T16:57:38.723155Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:198:2211], cookie=6151148713898542995, path="/foo/bar/baz") 2025-04-04T16:57:38.737985Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:198:2211], cookie=6151148713898542995, status=SUCCESS) 2025-04-04T16:57:38.738726Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:203:2216], cookie=17204041814573456566, path="/foo/bar/baz") 2025-04-04T16:57:38.738814Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:203:2216], cookie=17204041814573456566, status=PRECONDITION_FAILED) 2025-04-04T16:57:39.445307Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T16:57:39.445402Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T16:57:39.463424Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T16:57:39.463649Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T16:57:39.490944Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T16:57:39.491295Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:131:2157], cookie=7304523795297586041, name="Lock1") 2025-04-04T16:57:39.491398Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:131:2157], cookie=7304523795297586041) 2025-04-04T16:57:39.998609Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T16:57:39.998735Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T16:57:40.017345Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T16:57:40.017467Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T16:57:40.044400Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T16:57:40.044888Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:131:2157], cookie=8173953201008383102, session=0, seqNo=0) 2025-04-04T16:57:40.045088Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T16:57:40.059768Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:131:2157], cookie=8173953201008383102, session=1) 2025-04-04T16:57:40.060158Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:131:2157], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-04T16:57:40.060325Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-04T16:57:40.060408Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-04T16:57:40.083160Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:131:2157], cookie=111) 2025-04-04T16:57:40.083767Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:142:2166], cookie=12659497660777169792, name="Lock1", force=0) 2025-04-04T16:57:40.099776Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:142:2166], cookie=12659497660777169792) 2025-04-04T16:57:40.100308Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:147:2171], cookie=17577099939963810228, name="Sem1", force=0) 2025-04-04T16:57:40.116193Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:147:2171], cookie=17577099939963810228) 2025-04-04T16:57:40.116748Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:152:2176], cookie=324095372163704080, name="Sem1", limit=42) 2025-04-04T16:57:40.116897Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem1" 2025-04-04T16:57:40.134102Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:152:2176], cookie=324095372163704080) 2025-04-04T16:57:40.134735Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:157:2181], cookie=18324892567339582742, name="Sem1", force=0) 2025-04-04T16:57:40.134834Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 2 "Sem1" 2025-04-04T16:57:40.149887Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:157:2181], cookie=18324892567339582742) 2025-04-04T16:57:40.150531Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:162:2186], cookie=11157258772774068693, name="Sem1", force=0) 2025-04-04T16:57:40.169914Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:162:2186], cookie=11157258772774068693) 2025-04-04T16:57:41.025064Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T16:57:41.025173Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T16:57:41.048824Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T16:57:41.048949Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T16:57:41.073654Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T16:57:41.074143Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:131:2157], cookie=5612991642541693537, session=0, seqNo=0) 2025-04-04T16:57:41.074304Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T16:57:41.097438Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:131:2157], cookie=5612991642541693537, session=1) 2025-04-04T16:57:41.097783Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:131:2157], cookie=9926242233255533390, session=0, seqNo=0) 2025-04-04T16:57:41.097968Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-04T16:57:41.116246Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:131:2157], cookie=9926242233255533390, session=2) 2025-04-04T16:57:41.116578Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=2 from sender=[4:131:2157], cookie=9556245349178666178 2025-04-04T16:57:41.117122Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:143:2167], cookie=11231174304262905843, name="Sem1", limit=3) 2025-04-04T16:57:41.117265Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-04T16:57:41.132419Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:143:2167], cookie=11231174304262905843) 2025-04-04T16:57:41.132722Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:131:2157], cookie=112, name="Sem1") 2025-04-04T16:57:41.132798Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:131:2157], cookie=112) 2025-04-04T16:57:41.132993Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:131:2157], cookie=113, name="Sem1") 2025-04-04T16:57:41.133052Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:131:2157], cookie=113) 2025-04-04T16:57:41.133224Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:131:2157], cookie=15681510557877922057, session=2, seqNo=0) 2025-04-04T16:57:41.148607Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:131:2157], cookie=15681510557877922057, session=2) 2025-04-04T16:57:41.148931Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:131:2157], cookie=114, name="Sem1") 2025-04-04T16:57:41.149022Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:131:2157], cookie=114) 2025-04-04T16:57:41.149248Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:131:2157], cookie=115, name="Sem1") 2025-04-04T16:57:41.149307Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:131:2157], cookie=115) 2025-04-04T16:57:41.149803Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[4:150:2174], cookie=16317012125009837740, name="Sem1") 2025-04-04T16:57:41.164127Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[4:150:2174], cookie=16317012125009837740) 2025-04-04T16:57:41.164471Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:131:2157], cookie=116, session=1, semaphore="Sem1" count=1) 2025-04-04T16:57:41.164627Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-04T16:57:41.183001Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:131:2157], cookie=116) 2025-04-04T16:57:41.183485Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:131:2157], cookie=117, session=2, semaphore="Sem1" count=2) 2025-04-04T16:57:41.183628Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-04-04T16:57:41.196116Z node 4 :KESUS_TABLET DEBUG: [72057594 ... node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem2" 2025-04-04T16:57:42.045610Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:201:2219], cookie=11826100211860322042) 2025-04-04T16:57:42.046093Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:131:2157], cookie=126, session=1, semaphore="Sem2" count=3) 2025-04-04T16:57:42.046285Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Sem2" queue: next order #5 session 1 2025-04-04T16:57:42.058607Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:131:2157], cookie=126) 2025-04-04T16:57:42.059035Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:131:2157], cookie=127, name="Sem2") 2025-04-04T16:57:42.059111Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:131:2157], cookie=127) 2025-04-04T16:57:42.059412Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:131:2157], cookie=128, session=1, semaphore="Sem2" count=3) 2025-04-04T16:57:42.075283Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:131:2157], cookie=128) 2025-04-04T16:57:42.362002Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T16:57:42.385542Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T16:57:42.396257Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:131:2157], cookie=129, session=1, semaphore="Sem2" count=2) 2025-04-04T16:57:42.415371Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:131:2157], cookie=129) 2025-04-04T16:57:42.416040Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:131:2157], cookie=130, name="Sem2") 2025-04-04T16:57:42.416159Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:131:2157], cookie=130) 2025-04-04T16:57:42.416532Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:131:2157], cookie=131, session=1, semaphore="Sem2" count=1) 2025-04-04T16:57:42.439359Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:131:2157], cookie=131) 2025-04-04T16:57:42.439991Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:131:2157], cookie=132, name="Sem2") 2025-04-04T16:57:42.440104Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:131:2157], cookie=132) 2025-04-04T16:57:42.440449Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:131:2157], cookie=133, name="Sem2") 2025-04-04T16:57:42.440555Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:131:2157], cookie=133) 2025-04-04T16:57:42.954030Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T16:57:42.954135Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T16:57:42.973349Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T16:57:42.973486Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T16:57:43.003023Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T16:57:43.019421Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:131:2157], cookie=5153083421147702920, path="/Root1", config={ MaxUnitsPerSecond: 1000 }) 2025-04-04T16:57:43.019703Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root1" 2025-04-04T16:57:43.037695Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:131:2157], cookie=5153083421147702920) 2025-04-04T16:57:43.038395Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:140:2164], cookie=14332039863920760804, path="/Root1/Res", config={ }) 2025-04-04T16:57:43.038674Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root1/Res" 2025-04-04T16:57:43.054234Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:140:2164], cookie=14332039863920760804) 2025-04-04T16:57:43.054972Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:145:2169], cookie=5053574402330762433, path="/Root2", config={ MaxUnitsPerSecond: 1000 }) 2025-04-04T16:57:43.055146Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root2" 2025-04-04T16:57:43.078875Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:145:2169], cookie=5053574402330762433) 2025-04-04T16:57:43.079509Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:150:2174], cookie=17975991611564687796, path="/Root2/Res", config={ }) 2025-04-04T16:57:43.079740Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root2/Res" 2025-04-04T16:57:43.092129Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:150:2174], cookie=17975991611564687796) 2025-04-04T16:57:43.092744Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:155:2179], cookie=12540228091660188818, path="/Root2/Res/Subres", config={ }) 2025-04-04T16:57:43.092967Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 5 "Root2/Res/Subres" 2025-04-04T16:57:43.108117Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:155:2179], cookie=12540228091660188818) 2025-04-04T16:57:43.109451Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:160:2184]. Cookie: 15933924318643322773. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-04T16:57:43.109522Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:160:2184], cookie=15933924318643322773) 2025-04-04T16:57:43.158635Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:160:2184]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-04T16:57:43.206589Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:160:2184]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-04T16:57:43.240946Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:160:2184]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-04T16:57:43.241636Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:167:2188]. Cookie: 3913088615227053344. Data: { ResourceCounters { ResourcePath: "Root2/Res" } ResourceCounters { ResourcePath: "Root2/Res/Subres" } ResourceCounters { ResourcePath: "Root2" } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-04-04T16:57:43.242517Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:170:2191]. Cookie: 1127680704162817996. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-04T16:57:43.242577Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:170:2191], cookie=1127680704162817996) 2025-04-04T16:57:43.280162Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:170:2191]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-04T16:57:43.322683Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:170:2191]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-04-04T16:57:43.323495Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:175:2195]. Cookie: 8087139619749953084. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 200 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 200 } ResourceCounters { ResourcePath: "Root2" Allocated: 200 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-04-04T16:57:43.324399Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:160:2184]. Cookie: 11114407854071873147. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-04T16:57:43.324468Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:160:2184], cookie=11114407854071873147) 2025-04-04T16:57:43.325282Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:170:2191]. Cookie: 15844149423826907110. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-04T16:57:43.325341Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:170:2191], cookie=15844149423826907110) 2025-04-04T16:57:43.362726Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:160:2184]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 20 StateNotification { Status: SUCCESS } } } 2025-04-04T16:57:43.362843Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:170:2191]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 50 StateNotification { Status: SUCCESS } } } 2025-04-04T16:57:43.363569Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:182:2202]. Cookie: 10168658687319352857. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 250 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 250 } ResourceCounters { ResourcePath: "Root2" Allocated: 250 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 320 } ResourceCounters { ResourcePath: "Root1" Allocated: 320 } } |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> Yq_1::Basic_EmptyList [GOOD] >> Yq_1::Basic_EmptyDict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 13479, MsgBus: 7568 2025-04-04T16:57:25.900727Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503178326234187:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:25.900916Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d89/r3tmp/tmpNV65h0/pdisk_1.dat 2025-04-04T16:57:26.440676Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:26.455146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:26.455283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:26.471639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13479, node 1 2025-04-04T16:57:26.602124Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:26.602154Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:26.602162Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:26.602303Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7568 TClient is connected to server localhost:7568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:57:27.332846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:27.362755Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:57:27.381197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:27.594549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:27.822918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:27.912519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:29.963282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503195506105041:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:29.963513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:30.368944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:57:30.455322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:57:30.554199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:57:30.620886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:57:30.724170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:57:30.814769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:57:30.898826Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503178326234187:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:30.898892Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:57:30.971232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503199801072867:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:30.971388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:30.972190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503199801072873:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:30.981341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:57:31.034143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503199801072876:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:57:31.137698Z node 1 :TX_PROXY ERROR: Actor# [1:7489503204096040228:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:32.313062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19695, MsgBus: 2031 2025-04-04T16:57:35.660766Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503222546712225:2156];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d89/r3tmp/tmpZZWeiL/pdisk_1.dat 2025-04-04T16:57:35.737425Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:57:35.829255Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:35.843425Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:35.843522Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:35.845529Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19695, node 2 2025-04-04T16:57:35.950617Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:35.950660Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:35.950668Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:35.950801Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2031 TClient is connected to server localhost:2031 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:57:36.574734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:36.613200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:36.725508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:36.929874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:37.022317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:39.542411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503239726583057:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:39.542523Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:39.583240Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:57:39.626787Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:57:39.709340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:57:39.768239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:57:39.842411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:57:39.942863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:57:40.044199Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503244021550872:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:40.044288Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:40.044489Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503244021550877:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:40.048441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:57:40.078081Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489503244021550879:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:57:40.133760Z node 2 :TX_PROXY ERROR: Actor# [2:7489503244021550935:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:40.676706Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489503222546712225:2156];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:40.676780Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:57:41.460789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:57:42.436893Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489503252611486187:2529], TxId: 281474976710677, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGUzMTUyZWQtMjRlMTI5Ni1iZTJlMDAwLTI2ZWI3OTM0. CustomerSuppliedId : . TraceId : 01jr0vs8sfbqkjeah9xn8xh9qs. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-04T16:57:42.437495Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489503252611486188:2530], TxId: 281474976710677, task: 2. Ctx: { TraceId : 01jr0vs8sfbqkjeah9xn8xh9qs. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGUzMTUyZWQtMjRlMTI5Ni1iZTJlMDAwLTI2ZWI3OTM0. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7489503252611486184:2489], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T16:57:42.438321Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGUzMTUyZWQtMjRlMTI5Ni1iZTJlMDAwLTI2ZWI3OTM0, ActorId: [2:7489503248316518488:2489], ActorState: ExecuteState, TraceId: 01jr0vs8sfbqkjeah9xn8xh9qs, Create QueryResponse for error on request, msg: >> BSCReadOnlyPDisk::ReadOnlyOneByOne ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DescribeQuery [GOOD] Test command err: 2025-04-04T16:56:47.620628Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503014584229369:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:47.620697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0404 16:56:48.005248643 188423 dns_resolver.cc:162] no server name supplied in dns URI E0404 16:56:48.005366962 188423 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-04T16:56:48.620119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:48.974981Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:2332: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2332 } ] 2025-04-04T16:56:49.043066Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:2332: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:2332 2025-04-04T16:56:49.621443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:50.623781Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:50.842410Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:2332: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2332 } ] 2025-04-04T16:56:51.629372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0009bc/r3tmp/tmp0z7QHB/pdisk_1.dat 2025-04-04T16:56:51.790284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503031764099044:2315], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:51.792918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:56:51.883506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503031764099044:2315], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:52.002458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503031764099044:2315], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 2332, node 1 2025-04-04T16:56:52.043265Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:56:52.043311Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 TClient is connected to server localhost:31311 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:56:52.394565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:52.506674Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:52.508642Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:56:52.508664Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:56:52.508675Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:56:52.508810Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:56:52.621109Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503014584229369:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:52.621208Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0404 16:56:53.006036417 188716 dns_resolver.cc:162] no server name supplied in dns URI E0404 16:56:53.006214120 188716 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-04T16:56:53.709537Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:56:53.711427Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-04T16:56:53.711469Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-04T16:56:53.711479Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-04T16:56:53.712977Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-04T16:56:53.712995Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-04T16:56:53.713002Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-04T16:56:53.714880Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-04T16:56:53.714907Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-04T16:56:53.714914Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-04T16:56:53.716183Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-04T16:56:53.716210Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-04T16:56:53.716216Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-04T16:56:53.717560Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-04T16:56:53.717577Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-04T16:56:53.717583Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-04T16:56:53.722914Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-04T16:56:53.722921Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-04T16:56:53.722942Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-04T16:56:53.722942Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-04T16:56:53.722949Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-04T16:56:53.722956Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-04T16:56:53.746526Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-04T16:56:53.746556Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-04T16:56:53.746590Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-04T16:56:53.746604Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-04T16:56:53.746611Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-04T16:56:53.747772Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-04T16:56:53.747791Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-04T16:56:53.747798Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-04T16:56:53.752895Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-04T16:56:53.752923Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-04T16:56:53.752935Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-04T16:56:53.757214Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-04T16:56:53.757241Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-04T16:56:53.757249Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-04T16:56:53.758808Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-04T16:56:53.758837Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-04T16:56:53.758844Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-04T16:56:53.759382Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-04T16:56:53.759396Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-04T16:56:53.759402Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-04T16:56:53.807857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T16:56:53.822080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T16:56:53.823746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T16:56:53.825426Z nod ... hMWY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Start compute actor [4:7489503241846781544:3042], task: 2 2025-04-04T16:57:40.061139Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503241846781544:3042], TxId: 281474976710811, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0vs6jd0qdcx48m1tb4smzz. SessionId : ydb://session/3?node_id=4&id=OTk4ZWQzZmYtYWRlMGJkNDYtOGIyYmJiNjEtYzcyM2JhMWY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Set execution timeout 299.604793s 2025-04-04T16:57:40.061812Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503241846781544:3042], TxId: 281474976710811, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0vs6jd0qdcx48m1tb4smzz. SessionId : ydb://session/3?node_id=4&id=OTk4ZWQzZmYtYWRlMGJkNDYtOGIyYmJiNjEtYzcyM2JhMWY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-04-04T16:57:40.062295Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503241846781544:3042], TxId: 281474976710811, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0vs6jd0qdcx48m1tb4smzz. SessionId : ydb://session/3?node_id=4&id=OTk4ZWQzZmYtYWRlMGJkNDYtOGIyYmJiNjEtYzcyM2JhMWY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7489503241846781543 RawX2: 4503616807242721 } } DstEndpoint { ActorId { RawX1: 7489503241846781544 RawX2: 4503616807242722 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7489503241846781544 RawX2: 4503616807242722 } } DstEndpoint { ActorId { RawX1: 7489503241846781539 RawX2: 4503616807242087 } } InMemory: true } 2025-04-04T16:57:40.062316Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503241846781544:3042], TxId: 281474976710811, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0vs6jd0qdcx48m1tb4smzz. SessionId : ydb://session/3?node_id=4&id=OTk4ZWQzZmYtYWRlMGJkNDYtOGIyYmJiNjEtYzcyM2JhMWY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Update input channelId: 1, peer: [4:7489503241846781543:3041] 2025-04-04T16:57:40.062387Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503241846781544:3042], TxId: 281474976710811, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0vs6jd0qdcx48m1tb4smzz. SessionId : ydb://session/3?node_id=4&id=OTk4ZWQzZmYtYWRlMGJkNDYtOGIyYmJiNjEtYzcyM2JhMWY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:57:40.063172Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710811, task: 1, CA Id [4:7489503241846781543:3041]. Recv TEvReadResult from ShardID=72075186224037890, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2025-04-04T16:57:40.063209Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710811, task: 1, CA Id [4:7489503241846781543:3041]. Taken 0 locks 2025-04-04T16:57:40.063228Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710811, task: 1, CA Id [4:7489503241846781543:3041]. new data for read #0 seqno = 1 finished = 1 2025-04-04T16:57:40.063255Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503241846781543:3041], TxId: 281474976710811, task: 1. Ctx: { TraceId : 01jr0vs6jd0qdcx48m1tb4smzz. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OTk4ZWQzZmYtYWRlMGJkNDYtOGIyYmJiNjEtYzcyM2JhMWY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 276037645 2025-04-04T16:57:40.063276Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503241846781543:3041], TxId: 281474976710811, task: 1. Ctx: { TraceId : 01jr0vs6jd0qdcx48m1tb4smzz. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OTk4ZWQzZmYtYWRlMGJkNDYtOGIyYmJiNjEtYzcyM2JhMWY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-04T16:57:40.063298Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710811, task: 1, CA Id [4:7489503241846781543:3041]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-04-04T16:57:40.063318Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710811, task: 1, CA Id [4:7489503241846781543:3041]. enter pack cells method shardId: 72075186224037890 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-04T16:57:40.063347Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710811, task: 1, CA Id [4:7489503241846781543:3041]. exit pack cells method shardId: 72075186224037890 processedRows: 0 packed rows: 1 freeSpace: 8386370 2025-04-04T16:57:40.063368Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710811, task: 1, CA Id [4:7489503241846781543:3041]. returned 1 rows; processed 1 rows 2025-04-04T16:57:40.063411Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710811, task: 1, CA Id [4:7489503241846781543:3041]. dropping batch for read #0 2025-04-04T16:57:40.063422Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710811, task: 1, CA Id [4:7489503241846781543:3041]. effective maxinflight 1024 sorted 0 2025-04-04T16:57:40.063435Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710811, task: 1, CA Id [4:7489503241846781543:3041]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-04-04T16:57:40.063454Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710811, task: 1, CA Id [4:7489503241846781543:3041]. returned async data processed rows 1 left freeSpace 8386370 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-04-04T16:57:40.063674Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503241846781543:3041], TxId: 281474976710811, task: 1. Ctx: { TraceId : 01jr0vs6jd0qdcx48m1tb4smzz. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OTk4ZWQzZmYtYWRlMGJkNDYtOGIyYmJiNjEtYzcyM2JhMWY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-04T16:57:40.063694Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503241846781543:3041], TxId: 281474976710811, task: 1. Ctx: { TraceId : 01jr0vs6jd0qdcx48m1tb4smzz. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OTk4ZWQzZmYtYWRlMGJkNDYtOGIyYmJiNjEtYzcyM2JhMWY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-04T16:57:40.063739Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710811, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-04T16:57:40.063763Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503241846781544:3042], TxId: 281474976710811, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0vs6jd0qdcx48m1tb4smzz. SessionId : ydb://session/3?node_id=4&id=OTk4ZWQzZmYtYWRlMGJkNDYtOGIyYmJiNjEtYzcyM2JhMWY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-04-04T16:57:40.063796Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710811, task: 2. Finish input channelId: 1, from: [4:7489503241846781543:3041] 2025-04-04T16:57:40.063841Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503241846781544:3042], TxId: 281474976710811, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0vs6jd0qdcx48m1tb4smzz. SessionId : ydb://session/3?node_id=4&id=OTk4ZWQzZmYtYWRlMGJkNDYtOGIyYmJiNjEtYzcyM2JhMWY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:57:40.064041Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503241846781544:3042], TxId: 281474976710811, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0vs6jd0qdcx48m1tb4smzz. SessionId : ydb://session/3?node_id=4&id=OTk4ZWQzZmYtYWRlMGJkNDYtOGIyYmJiNjEtYzcyM2JhMWY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-04T16:57:40.064062Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503241846781543:3041], TxId: 281474976710811, task: 1. Ctx: { TraceId : 01jr0vs6jd0qdcx48m1tb4smzz. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OTk4ZWQzZmYtYWRlMGJkNDYtOGIyYmJiNjEtYzcyM2JhMWY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646927 2025-04-04T16:57:40.064099Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503241846781543:3041], TxId: 281474976710811, task: 1. Ctx: { TraceId : 01jr0vs6jd0qdcx48m1tb4smzz. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OTk4ZWQzZmYtYWRlMGJkNDYtOGIyYmJiNjEtYzcyM2JhMWY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-04T16:57:40.064119Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710811, task: 1. Tasks execution finished 2025-04-04T16:57:40.064137Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503241846781543:3041], TxId: 281474976710811, task: 1. Ctx: { TraceId : 01jr0vs6jd0qdcx48m1tb4smzz. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OTk4ZWQzZmYtYWRlMGJkNDYtOGIyYmJiNjEtYzcyM2JhMWY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-04-04T16:57:40.064257Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710811, task: 1. pass away 2025-04-04T16:57:40.064359Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710811;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-04T16:57:40.064741Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503241846781544:3042], TxId: 281474976710811, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0vs6jd0qdcx48m1tb4smzz. SessionId : ydb://session/3?node_id=4&id=OTk4ZWQzZmYtYWRlMGJkNDYtOGIyYmJiNjEtYzcyM2JhMWY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:57:40.064777Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710811, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-04T16:57:40.064785Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710811, task: 2. Tasks execution finished 2025-04-04T16:57:40.064796Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503241846781544:3042], TxId: 281474976710811, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0vs6jd0qdcx48m1tb4smzz. SessionId : ydb://session/3?node_id=4&id=OTk4ZWQzZmYtYWRlMGJkNDYtOGIyYmJiNjEtYzcyM2JhMWY=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-04T16:57:40.064848Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710811, task: 2. pass away 2025-04-04T16:57:40.064894Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710811;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; E0404 16:57:40.849522699 197840 dns_resolver.cc:162] no server name supplied in dns URI E0404 16:57:40.849710735 197840 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-04T16:57:40.869004Z node 4 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:23018: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:23018 2025-04-04T16:57:40.915562Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:23018: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:23018 >> BSCReadOnlyPDisk::ReadOnlySlay ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateQuery_Without_Connection [GOOD] Test command err: 2025-04-04T16:56:47.957884Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503015969266642:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:47.957985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0404 16:56:48.377046788 188802 dns_resolver.cc:162] no server name supplied in dns URI E0404 16:56:48.377279499 188802 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-04T16:56:48.959994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:49.362652Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:61303: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61303 } ] 2025-04-04T16:56:49.458673Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:61303: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:61303 2025-04-04T16:56:49.964847Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:50.984790Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:51.286974Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:61303: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:61303 } ] 2025-04-04T16:56:51.986966Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:52.251175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503037444103501:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:52.251299Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:56:52.327629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503037444103501:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00088d/r3tmp/tmpof2CQq/pdisk_1.dat 2025-04-04T16:56:52.513027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503037444103501:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 61303, node 1 2025-04-04T16:56:52.566688Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:56:52.566716Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 TClient is connected to server localhost:13306 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:56:52.943094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:52.957749Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503015969266642:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:52.957832Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0404 16:56:53.378076810 188978 dns_resolver.cc:162] no server name supplied in dns URI E0404 16:56:53.378268705 188978 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-04T16:56:53.743847Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:56:53.743883Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:56:53.743892Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:56:53.744073Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:56:53.744449Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:54.208417Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-04T16:56:54.208457Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-04T16:56:54.208466Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-04T16:56:54.214538Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-04T16:56:54.214567Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-04T16:56:54.214574Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-04T16:56:54.218564Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:56:54.223365Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-04T16:56:54.223391Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-04T16:56:54.223400Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-04T16:56:54.243167Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-04T16:56:54.243209Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-04T16:56:54.243537Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-04T16:56:54.243553Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-04T16:56:54.243560Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-04T16:56:54.259998Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-04T16:56:54.260029Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-04T16:56:54.260035Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-04T16:56:54.275043Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-04T16:56:54.275075Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-04T16:56:54.275081Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-04T16:56:54.276064Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-04T16:56:54.276077Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-04T16:56:54.276429Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-04T16:56:54.288562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T16:56:54.294311Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-04T16:56:54.294347Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-04T16:56:54.294355Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-04T16:56:54.304575Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-04T16:56:54.304597Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-04T16:56:54.304604Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-04T16:56:54.358768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T16:56:54.359807Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-04T16:56:54.359824Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-04T16:56:54.359830Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-04T16:56:54.361796Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-04T16:56:54.361811Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-04T16:56:54.361817Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-04T16:56:54.379090Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-04T16:56:54.379113Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-04T16:56:54.379123Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-04T16:56:54.394129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T16:56:54.394736Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-04T16:56:54.394757Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-04T16:56:54.394763Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-04T16:56:54.4673 ... .972823Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.973122Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.973161Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.973190Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.973471Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.973508Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.973539Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.973807Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.973855Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.974039Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.974100Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.974138Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.974489Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.974531Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.974744Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.974782Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.974896Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.975013Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.975136Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.975430Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.975493Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.975617Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.975742Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.975857Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.975992Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.976129Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.976243Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.976362Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.976481Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.976610Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.978628Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.978696Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.978725Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.978903Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.979182Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.979231Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.979412Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.979472Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.979501Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.979619Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.979735Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.980010Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.980046Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.980076Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.980384Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.980422Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.980621Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.980764Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.980804Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.980855Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.980982Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.981128Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.981335Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.981546Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.981580Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.981622Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.981749Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.982021Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.982062Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.982092Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.982397Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.982437Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.982531Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.982618Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.982720Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.982808Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.982895Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.982975Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.983134Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.983154Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.983243Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.983326Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.983410Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.983495Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.983580Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.983660Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.983740Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.983826Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.983903Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.985490Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.985556Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.985588Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.985868Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.985904Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.985931Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.986286Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.986320Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.986348Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.986463Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.986675Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.986883Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.986926Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.987135Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.987208Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.987246Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.987366Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.987494Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.987613Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.987902Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.987943Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.987979Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.988266Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.988304Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.988341Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.988682Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.988733Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.988767Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.988903Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.989026Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.989236Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.989502Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.989553Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.989688Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.989858Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.989983Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.990198Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.990236Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.990449Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.990481Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.992074Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.992133Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.992266Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.992393Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.992516Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.992642Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.992777Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.992893Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.993012Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.993121Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:57:39.993227Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: [good] Yq_1::CreateQuery_Without_Connection >> KqpEffects::InsertRevert_Literal_Duplicates [GOOD] >> KqpEffects::InsertRevert_Literal_Conflict >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] >> KqpPg::TypeCoercionInsert+useSink [GOOD] >> KqpPg::TableSelect+useSink |79.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |79.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> Yq_1::Basic_TaggedLiteral [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 29312, MsgBus: 18337 2025-04-04T16:57:25.417775Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503175852160959:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:25.423824Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d96/r3tmp/tmpjqOQZF/pdisk_1.dat 2025-04-04T16:57:26.243408Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:26.250543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:26.250704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:26.255143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29312, node 1 2025-04-04T16:57:26.526819Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:26.526842Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:26.526856Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:26.527001Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18337 TClient is connected to server localhost:18337 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:57:27.679071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:27.710449Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:57:27.737373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:27.957921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:28.195222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:28.319783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:30.396711Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503175852160959:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:30.396812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:57:30.896033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503197326999083:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:30.896165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:31.455998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:57:31.543630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:57:31.609909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:57:31.649482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:57:31.745271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:57:31.826261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:57:32.008249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503205916934207:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:32.008343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:32.008730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503205916934212:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:32.013033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:57:32.026234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503205916934214:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:57:32.128099Z node 1 :TX_PROXY ERROR: Actor# [1:7489503205916934267:3466] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:33.738043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:57:34.874676Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDllOTkzMzktZTJhOWFmNGItOGMxYjMyZS0zZmU2ODNhMQ==, ActorId: [1:7489503214506869465:2535], ActorState: ExecuteState, TraceId: 01jr0vs1t1a7ympr8st4bewzb7, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-04-04T16:57:34.892293Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDllOTkzMzktZTJhOWFmNGItOGMxYjMyZS0zZmU2ODNhMQ==, ActorId: [1:7489503214506869465:2535], ActorState: ReadyState, TraceId: 01jr0vs1xbfb940d382g7t7nm7, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 25948, MsgBus: 2450 2025-04-04T16:57:37.274018Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503230781666090:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:37.274065Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d96/r3tmp/tmpwQzFjM/pdisk_1.dat 2025-04-04T16:57:37.549432Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:37.579632Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:37.579715Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:37.583415Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25948, node 2 2025-04-04T16:57:37.766866Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:37.766887Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:37.766900Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:37.767018Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2450 TClient is connected to server localhost:2450 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:57:38.647728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:38.663382Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:57:38.675988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:38.782992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:39.120434Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:39.229056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:42.166437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503252256504327:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:42.166573Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:42.227178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:57:42.274840Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489503230781666090:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:42.275074Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:57:42.284862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:57:42.353388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:57:42.408133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:57:42.470265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:57:42.559500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:57:42.672218Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503252256504853:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:42.672334Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:42.672656Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503252256504858:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:42.677188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:57:42.691516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489503252256504860:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:57:42.759023Z node 2 :TX_PROXY ERROR: Actor# [2:7489503252256504915:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:44.132544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpWrite::InsertRevert [GOOD] >> KqpPg::DropTablePgMultiple [GOOD] >> KqpPg::DropTableIfExists >> KqpPg::TypeCoercionInsert-useSink [GOOD] >> KqpPg::V1CreateTable >> KqpPg::CopyTableSerialColumns-useSink [GOOD] >> KqpPg::CreateIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_TaggedLiteral [GOOD] Test command err: 2025-04-04T16:56:51.443326Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503033484576688:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:51.443377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0404 16:56:52.154526416 189890 dns_resolver.cc:162] no server name supplied in dns URI E0404 16:56:52.154771497 189890 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-04T16:56:52.444931Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:53.071978Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:3041: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3041 } ] 2025-04-04T16:56:53.156043Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:3041: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:3041 2025-04-04T16:56:53.445813Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:54.452487Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:54.494504Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:3041: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3041 } ] 2025-04-04T16:56:55.452469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:56.454491Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503033484576688:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:56.454579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:56:56.455045Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:56.685281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503054959413344:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:56.685515Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:56:56.765681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503054959413344:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0007f4/r3tmp/tmp8HdcBe/pdisk_1.dat 2025-04-04T16:56:56.982437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503054959413344:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } E0404 16:56:57.122382726 190120 dns_resolver.cc:162] no server name supplied in dns URI E0404 16:56:57.122543478 190120 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// TServer::EnableGrpc on GrpcPort 3041, node 1 2025-04-04T16:56:57.297358Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:57.315231Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:56:57.315256Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:56:57.315262Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:56:57.315428Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:56:57.527206Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-04T16:56:57.527246Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-04T16:56:57.527253Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-04T16:56:57.528866Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-04T16:56:57.528880Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-04T16:56:57.528885Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-04T16:56:57.529810Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-04T16:56:57.529826Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-04T16:56:57.529831Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-04T16:56:57.532760Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-04T16:56:57.532782Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-04T16:56:57.532788Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-04T16:56:57.534002Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-04T16:56:57.534013Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-04T16:56:57.534017Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-04T16:56:57.535123Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T16:56:57.535204Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-04T16:56:57.535213Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-04T16:56:57.535217Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-04T16:56:57.536121Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-04T16:56:57.536142Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-04T16:56:57.536146Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-04T16:56:57.536792Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-04T16:56:57.536810Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-04T16:56:57.536815Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-04T16:56:57.537042Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-04T16:56:57.537057Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-04T16:56:57.537062Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-04T16:56:57.552662Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-04T16:56:57.552702Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-04T16:56:57.552712Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-04T16:56:57.563287Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-04T16:56:57.563317Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-04T16:56:57.563351Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-04T16:56:57.563365Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-04T16:56:57.563373Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-04T16:56:57.568759Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-04T16:56:57.568772Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-04T16:56:57.568787Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-04T16:56:57.568793Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-04T16:56:57.568802Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-04T16:56:57.568817Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-04T16:56:57.618835Z node 1 :TX_PROXY ERROR: Actor# [1:7489503059254381187:2527] txid# 281474976715661, issues: { message: "database doesn\'t have storage pools at all to create tablet channels to storage pool binding by profile id" severity: 1 } 2025-04-04T16:56:57.618842Z node 1 :TX_PROXY ERROR: Actor# [1:7489503059254381170:2520] txid# 281474976715658, issues: { message: "database doesn\'t have storage pools at all to create tablet channels to storage pool binding by profile id" severity: 1 } 2025-04-04T16:56:57.619075Z node 1 :TX_PROXY ERROR: Actor# [1:7489503059254381171:2521] txid# 281474976715659, issues: { message: "database doesn\'t have storage pools at all to create tablet channels to storage pool binding by profile id" severity: 1 } 2025-04-04T16:56:57.619219Z node 1 :TX_PROXY ERROR: Actor# [1:7489503059254381183:2523] txid# 281474976715660, issues: { message: "database doesn\'t have storage pools at all to create tablet channels to storage pool binding by profile id" severity: 1 } 2025-04-04T16:56:57.619882Z node 1 :TX_PROXY ERROR: Actor# [1:7489503059254381188:2528] txid# 281474976715662, issues: { message: "database doesn\'t have storage pools at all to create tablet channels to storage pool binding by profile id" severity: 1 } 2025-04-04T16:56:57.626547Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create table "Root/yq/compute_databases" error: BAD_REQUEST {
: Error: database doesn't have storage pools at all to create tablet channels to storage pool binding by profile id } 2025-04-04T16:56:57.627451Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create table "Root/yq/tenant_acks" error: BAD_REQUEST {
: Error: database doesn't have storage pools at all to create tablet channels to storage pool binding by profile id } 2025-04-04T16:56:57.627663Z node 1 :TX_PROXY ERROR: Actor# [1:7489503059254381207:2540] txid# 281474976715663, issues: { message: "database doesn\'t have storage pools at all to create tablet channels to storage pool binding by profile id" severity: 1 } 2025-04-04T16:56:57.627886Z node 1 :T ... nning reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-04-04T16:57:42.949124Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7489503250913562807:3124]. Recv TEvReadResult from ShardID=72075186224037896, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2025-04-04T16:57:42.949142Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7489503250913562807:3124]. Taken 0 locks 2025-04-04T16:57:42.949156Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7489503250913562807:3124]. new data for read #0 seqno = 1 finished = 1 2025-04-04T16:57:42.949182Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503250913562807:3124], TxId: 281474976715827, task: 1. Ctx: { TraceId : 01jr0vs9rv61at7qxh1fd18art. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjBhMDRiMTItZWE1ZGNjMWYtZDRkMzhjNWMtZjQzMmE3Yzg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2025-04-04T16:57:42.949204Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503250913562807:3124], TxId: 281474976715827, task: 1. Ctx: { TraceId : 01jr0vs9rv61at7qxh1fd18art. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjBhMDRiMTItZWE1ZGNjMWYtZDRkMzhjNWMtZjQzMmE3Yzg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:57:42.949227Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7489503250913562807:3124]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-04-04T16:57:42.949247Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7489503250913562807:3124]. enter pack cells method shardId: 72075186224037896 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-04-04T16:57:42.949275Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7489503250913562807:3124]. exit pack cells method shardId: 72075186224037896 processedRows: 0 packed rows: 1 freeSpace: 8388572 2025-04-04T16:57:42.949297Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7489503250913562807:3124]. returned 1 rows; processed 1 rows 2025-04-04T16:57:42.949344Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7489503250913562807:3124]. dropping batch for read #0 2025-04-04T16:57:42.949357Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7489503250913562807:3124]. effective maxinflight 1024 sorted 0 2025-04-04T16:57:42.949371Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7489503250913562807:3124]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-04-04T16:57:42.949389Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7489503250913562807:3124]. returned async data processed rows 1 left freeSpace 8388572 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-04-04T16:57:42.949816Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503250913562807:3124], TxId: 281474976715827, task: 1. Ctx: { TraceId : 01jr0vs9rv61at7qxh1fd18art. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjBhMDRiMTItZWE1ZGNjMWYtZDRkMzhjNWMtZjQzMmE3Yzg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-04T16:57:42.949844Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503250913562807:3124], TxId: 281474976715827, task: 1. Ctx: { TraceId : 01jr0vs9rv61at7qxh1fd18art. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjBhMDRiMTItZWE1ZGNjMWYtZDRkMzhjNWMtZjQzMmE3Yzg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:57:42.949887Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-04T16:57:42.949961Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503250913562808:3125], TxId: 281474976715827, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjBhMDRiMTItZWE1ZGNjMWYtZDRkMzhjNWMtZjQzMmE3Yzg=. TraceId : 01jr0vs9rv61at7qxh1fd18art. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Start compute actor [4:7489503250913562808:3125], task: 2 2025-04-04T16:57:42.949996Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503250913562808:3125], TxId: 281474976715827, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjBhMDRiMTItZWE1ZGNjMWYtZDRkMzhjNWMtZjQzMmE3Yzg=. TraceId : 01jr0vs9rv61at7qxh1fd18art. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Set execution timeout 299.993404s 2025-04-04T16:57:42.950291Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503250913562808:3125], TxId: 281474976715827, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjBhMDRiMTItZWE1ZGNjMWYtZDRkMzhjNWMtZjQzMmE3Yzg=. TraceId : 01jr0vs9rv61at7qxh1fd18art. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-04-04T16:57:42.950486Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503250913562808:3125], TxId: 281474976715827, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjBhMDRiMTItZWE1ZGNjMWYtZDRkMzhjNWMtZjQzMmE3Yzg=. TraceId : 01jr0vs9rv61at7qxh1fd18art. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7489503250913562807 RawX2: 4503616807242804 } } DstEndpoint { ActorId { RawX1: 7489503250913562808 RawX2: 4503616807242805 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7489503250913562808 RawX2: 4503616807242805 } } DstEndpoint { ActorId { RawX1: 7489503250913562795 RawX2: 4503616807242162 } } InMemory: true } 2025-04-04T16:57:42.950501Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503250913562808:3125], TxId: 281474976715827, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjBhMDRiMTItZWE1ZGNjMWYtZDRkMzhjNWMtZjQzMmE3Yzg=. TraceId : 01jr0vs9rv61at7qxh1fd18art. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Update input channelId: 1, peer: [4:7489503250913562807:3124] 2025-04-04T16:57:42.950563Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503250913562808:3125], TxId: 281474976715827, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjBhMDRiMTItZWE1ZGNjMWYtZDRkMzhjNWMtZjQzMmE3Yzg=. TraceId : 01jr0vs9rv61at7qxh1fd18art. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-04-04T16:57:42.950594Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 2. Finish input channelId: 1, from: [4:7489503250913562807:3124] 2025-04-04T16:57:42.950664Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503250913562808:3125], TxId: 281474976715827, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjBhMDRiMTItZWE1ZGNjMWYtZDRkMzhjNWMtZjQzMmE3Yzg=. TraceId : 01jr0vs9rv61at7qxh1fd18art. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:57:42.958884Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503250913562807:3124], TxId: 281474976715827, task: 1. Ctx: { TraceId : 01jr0vs9rv61at7qxh1fd18art. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjBhMDRiMTItZWE1ZGNjMWYtZDRkMzhjNWMtZjQzMmE3Yzg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2025-04-04T16:57:42.958996Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503250913562807:3124], TxId: 281474976715827, task: 1. Ctx: { TraceId : 01jr0vs9rv61at7qxh1fd18art. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjBhMDRiMTItZWE1ZGNjMWYtZDRkMzhjNWMtZjQzMmE3Yzg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:57:42.959052Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1. Tasks execution finished 2025-04-04T16:57:42.959073Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503250913562807:3124], TxId: 281474976715827, task: 1. Ctx: { TraceId : 01jr0vs9rv61at7qxh1fd18art. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjBhMDRiMTItZWE1ZGNjMWYtZDRkMzhjNWMtZjQzMmE3Yzg=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-04T16:57:42.959076Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503250913562808:3125], TxId: 281474976715827, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjBhMDRiMTItZWE1ZGNjMWYtZDRkMzhjNWMtZjQzMmE3Yzg=. TraceId : 01jr0vs9rv61at7qxh1fd18art. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-04T16:57:42.959224Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1. pass away 2025-04-04T16:57:42.959341Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715827;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-04T16:57:42.959877Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503250913562808:3125], TxId: 281474976715827, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjBhMDRiMTItZWE1ZGNjMWYtZDRkMzhjNWMtZjQzMmE3Yzg=. TraceId : 01jr0vs9rv61at7qxh1fd18art. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-04-04T16:57:42.959924Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-04T16:57:42.959935Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 2. Tasks execution finished 2025-04-04T16:57:42.959947Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7489503250913562808:3125], TxId: 281474976715827, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjBhMDRiMTItZWE1ZGNjMWYtZDRkMzhjNWMtZjQzMmE3Yzg=. TraceId : 01jr0vs9rv61at7qxh1fd18art. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-04T16:57:42.960008Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 2. pass away 2025-04-04T16:57:42.960068Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715827;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-04T16:57:42.961100Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715830. Ctx: { TraceId: 01jr0vs9s2arych2ny5m9hjgre, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ODY3NmRkZTAtMTY1NTFjN2YtYWFiNzNkMzEtMmIzMmZkYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:57:43.008400Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:11319: Failed to connect to remote host: Connection reset by peer
: Error: Grpc error response on endpoint [::]:11319 2025-04-04T16:57:43.993926Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:11319: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:11319 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::InsertRevert [GOOD] Test command err: Trying to start YDB, gRPC: 28608, MsgBus: 8401 2025-04-04T16:57:26.344683Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503183665428701:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:26.344740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d86/r3tmp/tmpYKvhuv/pdisk_1.dat 2025-04-04T16:57:27.200388Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:27.223478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:27.223943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:27.228169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28608, node 1 2025-04-04T16:57:27.542933Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:27.542973Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:27.542983Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:27.543095Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8401 TClient is connected to server localhost:8401 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:57:28.613786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:28.695016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:28.925269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:29.176355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:29.301837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:31.345516Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503183665428701:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:31.345632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:57:32.167549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503209435234269:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:32.167670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:32.501442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:57:32.569432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:57:32.624393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:57:32.667450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:57:32.703719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:57:32.789343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:57:32.868647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503209435234791:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:32.868713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:32.868867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503209435234796:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:32.872959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:57:32.896532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503209435234798:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:57:32.975130Z node 1 :TX_PROXY ERROR: Actor# [1:7489503209435234853:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:34.553516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:57:35.503608Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489503222320137254:2519], TxId: 281474976710673, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NTE0NTk1MTMtMmYyYTY3MDktZjI2MzFmYjUtOTU1MGViODI=. TraceId : 01jr0vs1pb1n528nk34dsvbctx. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-04T16:57:35.504923Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489503222320137255:2520], TxId: 281474976710673, task: 2. Ctx: { TraceId : 01jr0vs1pb1n528nk34dsvbctx. SessionId : ydb://session/3?node_id=1&id=NTE0NTk1MTMtMmYyYTY3MDktZjI2MzFmYjUtOTU1MGViODI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489503222320137251:2496], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T16:57:35.505466Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTE0NTk1MTMtMmYyYTY3MDktZjI2MzFmYjUtOTU1MGViODI=, ActorId: [1:7489503218025169720:2496], ActorState: ExecuteState, TraceId: 01jr0vs1pb1n528nk34dsvbctx, Create QueryResponse for error on request, msg:
: Error: Conflict with existing key., code: 2012 2025-04-04T16:57:36.120659Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489503226615104604:2533], TxId: 281474976710676, task: 1. Ctx: { TraceId : 01jr0vs2hqb4ge5yn91hj2pj7r. SessionId : ydb://session/3?node_id=1&id=NTE0NTk1MTMtMmYyYTY3MDktZjI2MzFmYjUtOTU1MGViODI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-04T16:57:36.121053Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489503226615104605:2534], TxId: 281474976710676, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NTE0NTk1MTMtMmYyYTY3MDktZjI2MzFmYjUtOTU1MGViODI=. TraceId : 01jr0vs2hqb4ge5yn91hj2pj7r. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489503226615104601:2496], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T16:57:36.121343Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTE0NTk1MTMtMmYyYTY3MDktZjI2MzFmYjUtOTU1MGViODI=, ActorId: [1:7489503218025169720:2496], ActorState: ExecuteState, TraceId: 01jr0vs2hqb4ge5yn91hj2pj7r, Create QueryResponse for error on request, msg:
: Error: Duplicated keys found., code: 2012 Trying to start YDB, gRPC: 28244, MsgBus: 19708 2025-04-04T16:57:38.494676Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503235175138188:2204];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d86/r3tmp/tmpZelCg4/pdisk_1.dat 2025-04-04T16:57:38.691574Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:57:38.853124Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:38.855532Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:38.855616Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:38.868954Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28244, node 2 2025-04-04T16:57:39.050781Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:39.050805Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:39.050813Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:39.050935Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19708 TClient is connected to server localhost:19708 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T16:57:39.859727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T16:57:39.866977Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:57:39.871950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:39.970391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:40.388719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:40.631396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:43.353503Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503256649976282:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:43.353626Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:43.402723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:57:43.465588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:57:43.467653Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489503235175138188:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:43.467712Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:57:43.563488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:57:43.609649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:57:43.680047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:57:43.759815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:57:43.865771Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503256649976802:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:43.865867Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:43.866284Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503256649976807:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:43.872190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:57:43.889077Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489503256649976809:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:57:43.968480Z node 2 :TX_PROXY ERROR: Actor# [2:7489503256649976868:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:45.130845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpEffects::DeletePkPrefixWithIndex [GOOD] >> KqpPg::CreateUniqPgColumn-useSink [GOOD] >> KqpPg::CreateUniqComplexPgColumn+useSink >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartNo [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::ReadWithRestarts >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] >> TBackupTests::ShouldSucceedOnLargeData[Raw] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::DeletePkPrefixWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 26163, MsgBus: 24579 2025-04-04T16:57:32.144725Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503207932072067:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:32.145178Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d81/r3tmp/tmpXtcV1e/pdisk_1.dat 2025-04-04T16:57:32.988076Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:33.012541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:33.012634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:33.035777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26163, node 1 2025-04-04T16:57:33.326974Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:33.327005Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:33.327013Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:33.327158Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24579 TClient is connected to server localhost:24579 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:57:34.391145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:34.428029Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:57:34.437479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:34.662889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:35.039746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:35.221607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:37.133011Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503207932072067:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:37.133114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:57:38.872983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503233701877494:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:38.873144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:39.137319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:57:39.178621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:57:39.244867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:57:39.313763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:57:39.365290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:57:39.437522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:57:39.530662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503237996845315:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:39.530803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:39.531163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503237996845321:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:39.536112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:57:39.552372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503237996845323:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:57:39.649620Z node 1 :TX_PROXY ERROR: Actor# [1:7489503237996845378:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:40.929949Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-04T16:57:40.944027Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T16:57:40.944224Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T16:57:40.944514Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489503242291813013:2505], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7489503242291812991:2505]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7489503242291813013:2505].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-04T16:57:40.945030Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489503242291813006:2505], SessionActorId: [1:7489503242291812991:2505], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7489503242291812991:2505]. isRollback=0 2025-04-04T16:57:40.945261Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjAwODVkZGYtYmRjMGVhMS03NzM5YThhZC0xNTcwMDFhYw==, ActorId: [1:7489503242291812991:2505], ActorState: ExecuteState, TraceId: 01jr0vs7q5c08sk0pc3228t5hy, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7489503242291813007:2505] from: [1:7489503242291813006:2505] 2025-04-04T16:57:40.945339Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489503242291813007:2505] TxId: 281474976710671. Ctx: { TraceId: 01jr0vs7q5c08sk0pc3228t5hy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjAwODVkZGYtYmRjMGVhMS03NzM5YThhZC0xNTcwMDFhYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-04T16:57:40.946959Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjAwODVkZGYtYmRjMGVhMS03NzM5YThhZC0xNTcwMDFhYw==, ActorId: [1:7489503242291812991:2505], ActorState: ExecuteState, TraceId: 01jr0vs7q5c08sk0pc3228t5hy, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 5559, MsgBus: 7184 2025-04-04T16:57:42.414701Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503251886275316:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:42.414768Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d81/r3tmp/tmpmcUR2u/pdisk_1.dat 2025-04-04T16:57:42.649220Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:42.649303Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:42.652045Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:42.668988Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5559, node 2 2025-04-04T16:57:42.819001Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:42.819023Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:42.819034Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:42.819144Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7184 TClient is connected to server localhost:7184 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:57:43.565679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:43.575692Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:43.581602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:57:43.723622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:44.078294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:44.245600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:46.306774Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503269066146277:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:46.306862Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:46.347579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:57:46.382970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:57:46.453381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:57:46.521405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:57:46.577151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:57:46.617728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:57:46.697740Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503269066146799:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:46.697830Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:46.698043Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503269066146804:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:46.701125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:57:46.712371Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489503269066146806:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:57:46.778120Z node 2 :TX_PROXY ERROR: Actor# [2:7489503269066146859:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:47.417841Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489503251886275316:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:47.417946Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:57:47.786310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> Cdc::InitialScanAndResolvedTimestamps [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] Test command err: Trying to start YDB, gRPC: 24523, MsgBus: 17776 2025-04-04T16:57:21.711189Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503161062760485:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:21.711644Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00163d/r3tmp/tmpLPONPO/pdisk_1.dat 2025-04-04T16:57:22.748853Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:22.771841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:22.771965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:22.774370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:57:22.779743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24523, node 1 2025-04-04T16:57:23.106792Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:23.106829Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:23.106837Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:23.106932Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17776 TClient is connected to server localhost:17776 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:57:24.832942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:24.890853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:25.265032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:25.729729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:25.883675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:26.684348Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503161062760485:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:26.684465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:57:28.488667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503191127533203:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:28.488782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:28.953548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:57:29.007633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:57:29.062139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:57:29.120864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:57:29.185389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:57:29.261484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:57:29.305745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503195422501022:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:29.305837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:29.306253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503195422501027:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:29.312250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:57:29.327069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503195422501029:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:57:29.420450Z node 1 :TX_PROXY ERROR: Actor# [1:7489503195422501083:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:31.209185Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489503204012436023:2507], TxId: 281474976710671, task: 1. Ctx: { TraceId : 01jr0vry07few9t31zbcj8c2x1. SessionId : ydb://session/3?node_id=1&id=NWEyOGNjM2ItNjkwMDA2ZDktZGM4NDlhNzYtYjk5YzA1ODE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(17): Bad filter value. }. 2025-04-04T16:57:31.213333Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489503204012436024:2508], TxId: 281474976710671, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NWEyOGNjM2ItNjkwMDA2ZDktZGM4NDlhNzYtYjk5YzA1ODE=. TraceId : 01jr0vry07few9t31zbcj8c2x1. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7489503204012436019:2498], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T16:57:31.215427Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWEyOGNjM2ItNjkwMDA2ZDktZGM4NDlhNzYtYjk5YzA1ODE=, ActorId: [1:7489503199717468657:2498], ActorState: ExecuteState, TraceId: 01jr0vry07few9t31zbcj8c2x1, Create QueryResponse for error on request, msg:
: Error: Terminate was called, reason(17): Bad filter value. Trying to start YDB, gRPC: 26820, MsgBus: 3101 2025-04-04T16:57:32.232180Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503207525780643:2209];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00163d/r3tmp/tmpIrB67F/pdisk_1.dat 2025-04-04T16:57:32.338033Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:57:32.646515Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:32.648009Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:32.648084Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:32.654448Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26820, node 2 2025-04-04T16:57:32.779008Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:32.779029Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:32.779036Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:32.779150Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3101 TClient is connected to server localhost:3101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 720575940466444 ... Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:38.049725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:57:38.129974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:57:38.190272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:57:38.233485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:57:38.279956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:57:38.367526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:57:38.447053Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503233295586554:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:38.447146Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:38.447564Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503233295586559:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:38.452796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:57:38.473479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489503233295586561:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:57:38.549423Z node 2 :TX_PROXY ERROR: Actor# [2:7489503233295586616:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 13394, MsgBus: 11985 2025-04-04T16:57:42.139009Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489503251506272746:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:42.139088Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00163d/r3tmp/tmprbtczp/pdisk_1.dat 2025-04-04T16:57:42.264102Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:42.293721Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:42.293824Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:42.297268Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13394, node 3 2025-04-04T16:57:42.492160Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:42.492192Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:42.492201Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:42.492368Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11985 TClient is connected to server localhost:11985 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:57:43.147605Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:43.155959Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T16:57:43.171397Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:43.260606Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:43.555926Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:43.680646Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:46.484544Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489503268686143697:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:46.484642Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:46.526840Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T16:57:46.575301Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T16:57:46.611185Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T16:57:46.656783Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T16:57:46.699553Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T16:57:46.750361Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T16:57:46.842833Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489503268686144213:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:46.844926Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:46.845150Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489503268686144218:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:46.849316Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T16:57:46.863366Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489503268686144220:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T16:57:46.940735Z node 3 :TX_PROXY ERROR: Actor# [3:7489503268686144275:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:47.139993Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489503251506272746:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:47.140083Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:57:48.311990Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489503277276079141:2496], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:3:84: Error: At function: KiUpdateTable!
:3:84: Error: Column 'NonExistentColumn' does not exist in table '/Root/KeyValue'., code: 2017 2025-04-04T16:57:48.318907Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTUzMGZlY2YtOTI0N2I1MzAtNDQzYzkzNWItODFlMDZiMDM=, ActorId: [3:7489503277276079133:2491], ActorState: ExecuteState, TraceId: 01jr0vseza0ja90gxy0vx4hbc1, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::InitialScanAndResolvedTimestamps [GOOD] Test command err: 2025-04-04T16:53:05.803663Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502059077918972:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:05.899131Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00159e/r3tmp/tmpfcCjX1/pdisk_1.dat 2025-04-04T16:53:10.482472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:10.786480Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502059077918972:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:10.786537Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:53:12.656429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:12.656513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:12.664741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:53:12.708021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:12.708052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:13.358200Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26579, node 1 2025-04-04T16:53:14.340800Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:53:14.340823Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:53:14.340830Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:53:14.341101Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:53:14.867700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:53:15.015748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T16:53:15.227054Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7489502102027592556:2313] 2025-04-04T16:53:15.235710Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:53:15.442585Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:53:15.443239Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:53:15.586925Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:53:15.586989Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:53:15.587293Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:53:15.595951Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:53:16.173849Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:53:16.174037Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7489502106322559872:2313] in generation 1 2025-04-04T16:53:16.202690Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:53:16.856932Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:53:16.857211Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:53:16.857281Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7489502106322559876:2315] 2025-04-04T16:53:16.857290Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:53:16.857779Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:53:16.857953Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:53:16.869825Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:53:16.870004Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:53:16.870415Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7489502102027592553:2310], serverId# [1:7489502102027592569:2316], sessionId# [0:0:0] 2025-04-04T16:53:16.870885Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:53:16.871192Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:53:16.871212Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:53:16.871227Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:53:16.871391Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:53:16.872109Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:53:16.872304Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-04T16:53:16.886422Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:53:16.887167Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:53:16.887224Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:53:16.889455Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7489502106322559891:2333], serverId# [1:7489502106322559892:2334], sessionId# [0:0:0] 2025-04-04T16:53:16.893994Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1743785596938 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743785596938 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:53:16.894031Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:53:16.894192Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:53:16.894228Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:53:16.906041Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743785596938:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-04T16:53:16.907174Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743785596938:281474976710657 keys extracted: 0 2025-04-04T16:53:16.907605Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:53:16.914388Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:53:16.914468Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:53:16.914582Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T16:53:16.916922Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T16:53:16.917333Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:53:16.927883Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743785596938} 2025-04-04T16:53:16.928017Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:53:16.928253Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743785596937 2025-04-04T16:53:16.928488Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:53:16.928838Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743785596945 2025-04-04T16:53:16.932954Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:53:16.932984Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:53:16.933014Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T16:53:16.933056Z node 1 :TX_DATASHARD DEBUG: Complete [1743785596938 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7489502080552755878:2198], exec latency: 9 ms, propose latency: 25 ms 2025-04-04T16:53:16.933089Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-04T16:53:16.933140Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:53:16.934506Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:7489502106322559876:2315][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-04-04T16:53:16.951675Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-04T16:53:16.951725Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:53:16.969830Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:53:16.969946Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:53:16.970051Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:2 2025-04-04T16:53:16.970076Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976710658 2025-04-04T16:53:16.970086Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2025-04-04T16:53:16.990042Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:53:17.003745Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:53:17.008980Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Registered with mediator time cast 2025-04-04T16:53:17.01008 ... 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 3, left# 0, at tablet# 72075186224037888 2025-04-04T16:57:49.523655Z node 27 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715662 at step 7500 at tablet 72075186224037888 { Transactions { TxId: 281474976715662 AckTo { RawX1: 0 RawX2: 0 } } Step: 7500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:57:49.523815Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:57:49.524288Z node 27 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:57:49.524345Z node 27 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:57:49.524422Z node 27 :TX_DATASHARD DEBUG: Found ready operation [7500:281474976715662] in PlanQueue unit at 72075186224037888 2025-04-04T16:57:49.524906Z node 27 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 7500:281474976715662 keys extracted: 0 2025-04-04T16:57:49.525201Z node 27 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:57:49.525607Z node 27 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:57:49.528862Z node 27 :TX_DATASHARD DEBUG: Add schema snapshot: pathId# [OwnerId: 72057594046644480, LocalPathId: 2], version# 3, step# 7500, txId# 281474976715662, at tablet# 72075186224037888 2025-04-04T16:57:49.529597Z node 27 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:57:49.551168Z node 27 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 7500} 2025-04-04T16:57:49.551380Z node 27 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:57:49.551469Z node 27 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:57:49.551530Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:57:49.551735Z node 27 :TX_DATASHARD DEBUG: Complete [7500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [27:415:2409], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:57:49.551844Z node 27 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-04-04T16:57:49.552072Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:57:49.553206Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Emit change records: edge# v7500/18446744073709551615, at tablet# 72075186224037888 2025-04-04T16:57:49.553516Z node 27 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-04T16:57:49.553945Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Emit change records: edge# v7500/18446744073709551615, at tablet# 72075186224037888 2025-04-04T16:57:49.556526Z node 27 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-04-04T16:57:49.556687Z node 27 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:57:49.570152Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Enqueue 1 change record(s): at tablet# 72075186224037888 2025-04-04T16:57:49.570399Z node 27 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 } 2025-04-04T16:57:49.570584Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:57:49.570767Z node 27 :TX_DATASHARD DEBUG: Waiting for PlanStep# 9000 from mediator time cast 2025-04-04T16:57:49.570900Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Enqueue 0 change record(s): at tablet# 72075186224037888 2025-04-04T16:57:49.571007Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:57:49.571305Z node 27 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][27:683:2579] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] BodySize: 0 }] } 2025-04-04T16:57:49.571715Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:976:2779] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] BodySize: 0 }] } 2025-04-04T16:57:49.572246Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037888 2025-04-04T16:57:49.572525Z node 27 :TX_DATASHARD DEBUG: Send 1 change records: to# [27:976:2779], at tablet# 72075186224037888 2025-04-04T16:57:49.572604Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 1, forgotten# 0, left# 0, at tablet# 72075186224037888 2025-04-04T16:57:49.572862Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:976:2779] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-04T16:57:49.573251Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:1058:2779] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-04T16:57:49.573875Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-04T16:57:49.573941Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-04T16:57:49.574089Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 2 requestId: cookie: 2 2025-04-04T16:57:49.574362Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-04T16:57:49.574419Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-04T16:57:49.574490Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 4 partNo : 0 messageNo: 3 size 26 offset: -1 2025-04-04T16:57:49.574767Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v6000/0 2025-04-04T16:57:49.574979Z node 27 :PERSQUEUE INFO: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v6000/0 2025-04-04T16:57:49.575316Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-04-04T16:57:49.576444Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 3 PartNo 0 PackedSize 107 count 1 nextOffset 4 batches 1 2025-04-04T16:57:49.577071Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 3,1 HeadOffset 0 endOffset 3 curOffset 4 d0000000000_00000000000000000003_00000_0000000001_00000| size 93 WTime 7451 2025-04-04T16:57:49.577337Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T16:57:49.577447Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 3 partNo 0 count 1 size 93 2025-04-04T16:57:49.578827Z node 27 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 3 count 1 size 93 actorID [27:918:2735] 2025-04-04T16:57:49.579027Z node 27 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 3 partno 0 count 1 parts 0 size 93 2025-04-04T16:57:49.589548Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T16:57:49.589715Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T16:57:49.589801Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2025-04-04T16:57:49.590103Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 3 requestId: cookie: 2 2025-04-04T16:57:49.590411Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:1058:2779] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 4 Offset: 3 WriteTimestampMS: 7451 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 2 } } } 2025-04-04T16:57:49.590536Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:976:2779] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-04T16:57:49.590763Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-04-04T16:57:49.590801Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037888 2025-04-04T16:57:49.602148Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-04-04T16:57:49.757506Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-04T16:57:49.757581Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-04T16:57:49.757730Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 4 max time lag 0ms effective offset 0 2025-04-04T16:57:49.757836Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 4 2025-04-04T16:57:49.758326Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-04-04T16:57:49.758451Z node 27 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T16:57:49.759376Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> KqpDataIntegrityTrails::Ddl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:57:50.609011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:57:50.609113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:57:50.617590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:57:50.617671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:57:50.617717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:57:50.617755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:57:50.617887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:57:50.617968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:57:50.618352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:57:50.707501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:57:50.707578Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:50.727913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:57:50.728258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:57:50.728473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:57:50.748565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:57:50.748845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:57:50.749413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:50.751407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:57:50.758159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:57:50.759845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:57:50.759939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:57:50.760195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:57:50.760255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:57:50.760301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:57:50.760461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:57:50.770468Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:57:50.919402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:57:50.919673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:50.919917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:57:50.920184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:57:50.920253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:50.926435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:50.926612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:57:50.926871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:50.926955Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:57:50.927034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:57:50.927080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:57:50.930165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:50.930270Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:57:50.930316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:57:50.933315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:50.933417Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:50.933475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:57:50.933547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:57:50.937874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:57:50.940672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:57:50.940901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:57:50.942051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:50.942197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:57:50.942288Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:57:50.942635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:57:50.942717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:57:50.942882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:57:50.942982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:57:50.945878Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:57:50.945934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:57:50.946119Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:57:50.946178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:57:50.946586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:50.946634Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:57:50.946740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:57:50.946772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:57:50.946812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:57:50.946841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:57:50.946878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:57:50.946919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:57:50.946952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:57:50.946984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:57:50.947363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:57:50.947412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:57:50.947446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:57:50.949582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:57:50.949727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:57:50.949780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... d DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T16:57:51.565564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:57:51.565618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-04T16:57:51.566256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T16:57:51.566315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:57:51.567003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:57:51.567151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T16:57:51.567192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T16:57:51.567231Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T16:57:51.567274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T16:57:51.567358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:16878 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 77F4C978-4AF5-4D42-B69C-41D61CC239A6 amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 REQUEST: PUT /data_01.csv.zst HTTP/1.1 HEADERS: Host: localhost:16878 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F850D811-20EA-40B2-92BD-D0AA14AAB6F7 amz-sdk-request: attempt=1 content-length: 20 content-md5: 8NOHH1ycwPXC5K+v+37u8g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_01.csv.zst / / 20 2025-04-04T16:57:51.569274Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:476:2435], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2025-04-04T16:57:51.569375Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:480:2437], result# PutObjectResult { ETag: f0d3871f5c9cc0f5c2e4afaffb7eeef2 } 2025-04-04T16:57:51.569424Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:480:2437], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-04T16:57:51.569547Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:479:2436], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-04T16:57:51.577152Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:475:2433] 2025-04-04T16:57:51.577345Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:476:2435], sender# [1:475:2433], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } FAKE_COORDINATOR: Erasing txId 102 2025-04-04T16:57:51.581491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:16878 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E32FC107-3FC7-4292-9212-824D082D8602 amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2025-04-04T16:57:51.584280Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:476:2435], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2025-04-04T16:57:51.584341Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:476:2435], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-04T16:57:51.584505Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:475:2433], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-04T16:57:51.609026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 318 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-04T16:57:51.609116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-04T16:57:51.609279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 318 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-04T16:57:51.609403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 318 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-04T16:57:51.609465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:51.609602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:57:51.610180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-04T16:57:51.610218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-04T16:57:51.610370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-04T16:57:51.610471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-04T16:57:51.610512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:51.610544Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T16:57:51.610592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T16:57:51.610638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T16:57:51.610679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-04T16:57:51.610823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:57:51.614228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T16:57:51.614836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T16:57:51.615183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T16:57:51.615237Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T16:57:51.615350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T16:57:51.615396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T16:57:51.615440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T16:57:51.615498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T16:57:51.615543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T16:57:51.615635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:369:2336] message: TxId: 102 2025-04-04T16:57:51.615703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T16:57:51.615742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T16:57:51.615771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T16:57:51.615893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T16:57:51.618395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T16:57:51.618462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:452:2412] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] Test command err: Trying to start YDB, gRPC: 6959, MsgBus: 14508 2025-04-04T16:57:37.365533Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503231277923339:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:37.383948Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d7e/r3tmp/tmpxPAH36/pdisk_1.dat 2025-04-04T16:57:38.090770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:38.090857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:38.097579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:57:38.102496Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6959, node 1 2025-04-04T16:57:38.382891Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:38.382913Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:38.382919Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:38.383234Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14508 TClient is connected to server localhost:14508 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:57:39.794661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:39.826194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:40.094943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:40.508753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:40.631865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:42.353226Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503231277923339:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:42.353336Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:57:42.681224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503252752761464:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:42.681332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:42.999760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:57:43.050929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:57:43.127850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:57:43.192768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:57:43.300613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:57:43.347435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:57:43.463741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503257047729292:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:43.463843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:43.464097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503257047729297:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:43.468227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:57:43.525269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503257047729299:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:57:43.623427Z node 1 :TX_PROXY ERROR: Actor# [1:7489503257047729356:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 27881, MsgBus: 32169 2025-04-04T16:57:46.056226Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503267400651197:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:46.056301Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d7e/r3tmp/tmpZWvYgr/pdisk_1.dat 2025-04-04T16:57:46.197991Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:46.220273Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:46.220367Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:46.221920Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27881, node 2 2025-04-04T16:57:46.284944Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:46.284967Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:46.284974Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:46.285076Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32169 TClient is connected to server localhost:32169 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T16:57:46.723138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:57:46.742606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:46.829671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:46.990059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:47.061203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:49.350906Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503280285554838:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:49.350992Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:49.386112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T16:57:49.427771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T16:57:49.461416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T16:57:49.498230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T16:57:49.533219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T16:57:49.628450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T16:57:49.717516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503280285555355:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:49.717584Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:49.717943Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503280285555360:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:49.722902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T16:57:49.741096Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489503280285555362:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T16:57:49.801841Z node 2 :TX_PROXY ERROR: Actor# [2:7489503280285555417:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:51.056903Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489503267400651197:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:51.057000Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpPg::CreateIndex [GOOD] >> KqpPg::CreateNotNullPgColumn |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> KqpPg::DropTableIfExists [GOOD] >> KqpPg::DropTableIfExists_GenericQuery ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl [GOOD] Test command err: Trying to start YDB, gRPC: 1315, MsgBus: 3447 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ac6/r3tmp/tmpjAzQJm/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1315, node 1 TClient is connected to server localhost:3447 TClient is connected to server localhost:3447 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |79.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |79.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |79.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data >> QueryStats::Ranges [GOOD] >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> QueryStats::Ranges [GOOD] |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |79.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |79.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] Test command err: RandomSeed# 5400556809257628832 2025-04-04T16:57:47.241244Z 1 00h01m14.511536s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-04T16:57:47.243989Z 1 00h01m14.511536s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 5511501354075970792] 2025-04-04T16:57:47.274946Z 1 00h01m14.511536s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink |79.5%| [TA] $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.5%| [TA] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestNoMigrationToSelf >> TCutHistoryRestrictions::EmptyDenyList [GOOD] >> TCutHistoryRestrictions::SameTabletInBothLists [GOOD] >> THeavyPerfTest::TTestLoadEverything >> THiveTest::TestDrain >> THiveTest::TestUpdateChannelValues >> THiveTest::TestNoMigrationToSelf [GOOD] >> THiveTest::TestReCreateTablet >> THiveTest::TestFollowers >> THiveTest::TestHiveBalancerWithPrefferedDC1 >> KqpPg::CreateUniqComplexPgColumn+useSink [GOOD] >> KqpPg::CreateUniqComplexPgColumn-useSink >> THiveTest::TestCreateTablet >> KqpPg::CreateNotNullPgColumn [GOOD] >> KqpPg::CreateSequence >> THiveTest::TestReCreateTablet [GOOD] >> THiveTest::TestReCreateTabletError >> THiveTest::TestUpdateChannelValues [GOOD] >> THiveTest::TestStorageBalancer >> THiveTest::TestFollowers [GOOD] >> THiveTest::TestFollowersReconfiguration >> TargetTrackingScaleRecommenderPolicy::ScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::SpikeResistance [GOOD] >> TargetTrackingScaleRecommenderPolicy::NearTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::AtTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::Fluctuations [GOOD] >> TargetTrackingScaleRecommenderPolicy::FluctuationsBigNumbers [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleInToMaxSeen [GOOD] >> TargetTrackingScaleRecommenderPolicy::Idle >> THiveTest::TestCreateTablet [GOOD] >> THiveTest::TestCreate100Tablets |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |79.5%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut >> THiveTest::TestReCreateTabletError [GOOD] >> THiveTest::TestNodeDisconnect >> TargetTrackingScaleRecommenderPolicy::Idle [GOOD] >> TStorageBalanceTest::TestScenario2 >> TCutHistoryRestrictions::BasicTest [GOOD] >> TCutHistoryRestrictions::EmptyAllowList [GOOD] >> TCutHistoryRestrictions::BothListsEmpty [GOOD] >> ObjectDistribution::TestImbalanceCalcualtion [GOOD] >> ObjectDistribution::TestAllowedDomainsAndDown >> ObjectDistribution::TestAllowedDomainsAndDown [GOOD] >> ObjectDistribution::TestAddSameNode [GOOD] >> ObjectDistribution::TestManyIrrelevantNodes >> KqpPg::DropTableIfExists_GenericQuery [GOOD] >> KqpPg::EquiJoin+useSink >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] >> THiveTest::TestFollowersReconfiguration [GOOD] >> THiveTest::TestFollowerPromotion >> THiveTest::TestNodeDisconnect [GOOD] >> THiveTest::TestReassignGroupsWithRecreateTablet >> THiveTest::TestStorageBalancer [GOOD] >> THiveTest::TestRestartsWithFollower >> THiveTest::TestLocalDisconnect |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |79.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator >> THiveTest::TestReassignGroupsWithRecreateTablet [GOOD] >> THiveTest::TestReassignUseRelativeSpace >> THiveTest::TestLocalDisconnect [GOOD] >> THiveTest::TestLocalReplacement ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] Test command err: 2025-04-04T16:55:45.714171Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T16:55:45.862912Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T16:55:45.890933Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T16:55:45.891288Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T16:55:45.900491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:55:45.900761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:55:45.901049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:55:45.901175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:55:45.901312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:55:45.901465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:55:45.901575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:55:45.901705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:55:45.901848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:55:45.901990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:55:45.902145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:55:45.902309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:55:45.940849Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T16:55:45.941160Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T16:55:45.941254Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T16:55:45.941504Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:45.941715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:55:45.941802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:55:45.941878Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T16:55:45.941993Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T16:55:45.942098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:55:45.942148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:55:45.942184Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T16:55:45.942427Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:45.942522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:55:45.942577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:55:45.942612Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T16:55:45.942752Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T16:55:45.942846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:55:45.942914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:55:45.942968Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T16:55:45.943074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:55:45.943118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:55:45.943161Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T16:55:45.943246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:55:45.943290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:55:45.943322Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T16:55:45.943827Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=61; 2025-04-04T16:55:45.943989Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=81; 2025-04-04T16:55:45.944084Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-04-04T16:55:45.944221Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=79; 2025-04-04T16:55:45.944455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:55:45.944536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:55:45.944622Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T16:55:45.944870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:55:45.944936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:55:45.944976Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T16:55:45.945177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:55:45.945243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:55:45.945287Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T16:55:45.945498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T16:55:45.945547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T16:55:45.945581Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T16:55:45.945723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T16:55:45.945796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T16:55:45.945871Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... ne=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:117:2768:0]; 2025-04-04T16:58:00.228429Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:118:2768:0]; 2025-04-04T16:58:00.228483Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:119:2768:0]; 2025-04-04T16:58:00.228537Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:120:2768:0]; 2025-04-04T16:58:00.228596Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:121:2768:0]; 2025-04-04T16:58:00.228658Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:122:2768:0]; 2025-04-04T16:58:00.228721Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:123:2768:0]; 2025-04-04T16:58:00.228779Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:124:2768:0]; 2025-04-04T16:58:00.228841Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:125:2768:0]; 2025-04-04T16:58:00.228902Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:126:2768:0]; 2025-04-04T16:58:00.228960Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:127:2768:0]; 2025-04-04T16:58:00.229019Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:128:2696:0]; 2025-04-04T16:58:00.229075Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:129:2696:0]; 2025-04-04T16:58:00.229137Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:130:2696:0]; 2025-04-04T16:58:00.229195Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:131:2696:0]; 2025-04-04T16:58:00.229255Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:132:8920:0]; 2025-04-04T16:58:00.229343Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:133:2776:0]; 2025-04-04T16:58:00.229432Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:134:2776:0]; 2025-04-04T16:58:00.229493Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:135:2776:0]; 2025-04-04T16:58:00.229552Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:136:2776:0]; 2025-04-04T16:58:00.229609Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:137:2776:0]; 2025-04-04T16:58:00.229667Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:138:2768:0]; 2025-04-04T16:58:00.229723Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:139:2768:0]; 2025-04-04T16:58:00.229780Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:140:2768:0]; 2025-04-04T16:58:00.229837Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:141:2768:0]; 2025-04-04T16:58:00.229894Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:142:2768:0]; 2025-04-04T16:58:00.229949Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:143:2768:0]; 2025-04-04T16:58:00.230003Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:144:2768:0]; 2025-04-04T16:58:00.230079Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:145:2768:0]; 2025-04-04T16:58:00.230152Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:146:2768:0]; 2025-04-04T16:58:00.230212Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:147:2768:0]; 2025-04-04T16:58:00.230302Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:148:2768:0]; 2025-04-04T16:58:00.230360Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:149:2768:0]; 2025-04-04T16:58:00.230422Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:150:2768:0]; 2025-04-04T16:58:00.230491Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:151:2768:0]; 2025-04-04T16:58:00.230545Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:152:2768:0]; 2025-04-04T16:58:00.230601Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:153:2768:0]; 2025-04-04T16:58:00.230675Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:154:2768:0]; 2025-04-04T16:58:00.230732Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:155:2768:0]; 2025-04-04T16:58:00.230793Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:156:2768:0]; 2025-04-04T16:58:00.230855Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:157:2768:0]; 2025-04-04T16:58:00.230942Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:158:2768:0]; 2025-04-04T16:58:00.231008Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:159:2768:0]; 2025-04-04T16:58:00.231066Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:160:2768:0]; 2025-04-04T16:58:00.231122Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:161:2768:0]; 2025-04-04T16:58:00.231179Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:162:2768:0]; 2025-04-04T16:58:00.231235Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:163:2768:0]; 2025-04-04T16:58:00.231296Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:164:2768:0]; 2025-04-04T16:58:00.231353Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:165:2768:0]; 2025-04-04T16:58:00.231408Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:166:2768:0]; 2025-04-04T16:58:00.231463Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:167:2768:0]; 2025-04-04T16:58:00.231524Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:168:2768:0]; 2025-04-04T16:58:00.231584Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:169:2768:0]; 2025-04-04T16:58:00.231663Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:170:2768:0]; 2025-04-04T16:58:00.231737Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:171:2768:0]; 2025-04-04T16:58:00.231814Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:172:2696:0]; 2025-04-04T16:58:00.231870Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:173:2696:0]; 2025-04-04T16:58:00.231929Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:174:2696:0]; 2025-04-04T16:58:00.231985Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:175:2696:0]; 2025-04-04T16:58:00.232048Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:176:8904:0]; 2025-04-04T16:58:00.953150Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-04T16:58:00.953931Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[2] (CS::GENERAL) apply at tablet 9437184 2025-04-04T16:58:01.056613Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 747 2025-04-04T16:58:01.064701Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2213112;raw_bytes=2475629;count=1;records=26059} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7587944;raw_bytes=7088522;count=3;records=75200} inactive {blob_bytes=100101848;raw_bytes=103700153;count=42;records=1100341} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T16:58:01.444966Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5da13ee-117511f0-b86e8b7e-d9d0445c;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-04-04T16:58:01.445048Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5da13ee-117511f0-b86e8b7e-d9d0445c;fline=with_appended.cpp:65;portions=47,;task_id=f5da13ee-117511f0-b86e8b7e-d9d0445c; 2025-04-04T16:58:01.445719Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5da13ee-117511f0-b86e8b7e-d9d0445c;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::f5da13ee-117511f0-b86e8b7e-d9d0445c; 2025-04-04T16:58:01.445799Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5da13ee-117511f0-b86e8b7e-d9d0445c;fline=granule.cpp:101;event=OnCompactionFinished;info=(granule:1;path_id:1;size:7587944;portions_count:47;); 2025-04-04T16:58:01.445855Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5da13ee-117511f0-b86e8b7e-d9d0445c;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T16:58:01.445940Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5da13ee-117511f0-b86e8b7e-d9d0445c;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T16:58:01.446019Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5da13ee-117511f0-b86e8b7e-d9d0445c;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=8; 2025-04-04T16:58:01.446094Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5da13ee-117511f0-b86e8b7e-d9d0445c;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-04-04T16:58:01.446133Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5da13ee-117511f0-b86e8b7e-d9d0445c;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T16:58:01.446177Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5da13ee-117511f0-b86e8b7e-d9d0445c;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T16:58:01.446212Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5da13ee-117511f0-b86e8b7e-d9d0445c;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T16:58:01.446308Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5da13ee-117511f0-b86e8b7e-d9d0445c;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.944000s; 2025-04-04T16:58:01.446353Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5da13ee-117511f0-b86e8b7e-d9d0445c;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T16:58:01.446539Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 747 2025-04-04T16:58:01.447376Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T16:58:01.448010Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=39;external_task_id=f5da13ee-117511f0-b86e8b7e-d9d0445c;mem=11009198;cpu=0; >> THiveTest::TestFollowerPromotion [GOOD] >> THiveTest::TestFollowerPromotionFollowerDies >> THiveTest::TestRestartsWithFollower [GOOD] >> THiveTest::TestStartTabletTwiceInARow >> BasicUsage::ReadWithRestarts [GOOD] >> BasicUsage::SessionNotDestroyedWhileCompressionInFlight >> THiveTest::TestCreate100Tablets [GOOD] >> THiveTest::TestCreateSubHiveCreateTablet >> THiveTest::TestHiveBalancerWithPrefferedDC1 [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC2 |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |79.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure >> THiveTest::TestLocalReplacement [GOOD] >> THiveTest::TestHiveRestart >> THiveTest::TestReassignUseRelativeSpace [GOOD] >> THiveTest::TestManyFollowersOnOneNode >> THiveTest::TestStartTabletTwiceInARow [GOOD] >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject >> THiveTest::TestFollowerPromotionFollowerDies [GOOD] >> THiveTest::TestHiveBalancer >> KqpPg::CreateSequence [GOOD] >> KqpPg::AlterSequence >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit [GOOD] >> THiveTest::TestCreateSubHiveCreateTablet [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTablets >> THiveTest::TestHiveRestart [GOOD] >> THiveTest::TestLimitedNodeList >> THiveTest::TestManyFollowersOnOneNode [GOOD] >> THiveTest::TestLockTabletExecutionTimeout >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit >> Viewer::TabletMerging >> THiveTest::TestLimitedNodeList [GOOD] >> THiveTest::TestHiveNoBalancingWithLowResourceUsage >> Viewer::JsonAutocompleteStartOfDatabaseName >> Viewer::JsonStorageListingV2 >> Viewer::PDiskMerging >> Viewer::JsonAutocompleteEmpty >> Viewer::PDiskMerging [GOOD] >> Viewer::LevenshteinDistance [GOOD] >> Viewer::QueryExecuteScript >> Viewer::Cluster10000Tablets >> KqpPg::InsertFromSelect_Simple-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder-useSink >> THiveTest::TestLockTabletExecutionTimeout [GOOD] >> THiveTest::TestLockTabletExecutionRebootTimeout >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject [GOOD] >> THiveTest::TestSpreadNeighboursDifferentOwners >> THiveTest::TestHiveBalancerWithPrefferedDC2 [GOOD] >> THiveTest::TestHiveBalancerWithPreferredDC3 >> KqpPg::EquiJoin+useSink [GOOD] >> KqpPg::EquiJoin-useSink >> THiveTest::TestDrain [GOOD] >> THiveTest::TestDrainWithMaxTabletsScheduled >> KqpPg::CreateUniqComplexPgColumn-useSink [GOOD] >> KqpPg::CreateTempTable >> THiveTest::TestHiveBalancer [GOOD] >> THiveTest::TestHiveBalancerDifferentResources >> Yq_1::Basic_EmptyDict [GOOD] >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |79.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction >> THiveTest::TestSpreadNeighboursDifferentOwners [GOOD] >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics >> THiveTest::TestHiveBalancerWithPreferredDC3 [GOOD] >> THiveTest::TestHiveFollowersWithChangingDC >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink >> KqpPg::AlterSequence [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequence ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_EmptyDict [GOOD] Test command err: 2025-04-04T16:56:47.587309Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503016248565128:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:47.587398Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0404 16:56:47.867946474 188398 dns_resolver.cc:162] no server name supplied in dns URI E0404 16:56:47.869488330 188398 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-04-04T16:56:48.589562Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:48.878425Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:5608: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:5608 } ] 2025-04-04T16:56:48.962649Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:5608: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:5608 2025-04-04T16:56:49.593400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:50.293640Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:5608: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:5608 } ] 2025-04-04T16:56:50.594710Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:56:51.410178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503033428434688:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:51.410310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:56:51.464003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503033428434688:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:51.589759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503033428434688:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:56:51.596130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000a1f/r3tmp/tmppKEFMu/pdisk_1.dat 2025-04-04T16:56:51.832972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489503033428434688:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 5608, node 1 2025-04-04T16:56:51.924058Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:56:51.924098Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 TClient is connected to server localhost:3023 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:56:52.296741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:52.430227Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-04-04T16:56:52.430286Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-04T16:56:52.430298Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-04-04T16:56:52.430666Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-04-04T16:56:52.430746Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-04T16:56:52.430753Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-04-04T16:56:52.432250Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-04-04T16:56:52.432275Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-04T16:56:52.432281Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-04-04T16:56:52.434546Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-04-04T16:56:52.434574Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-04T16:56:52.434589Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-04-04T16:56:52.434705Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-04-04T16:56:52.434722Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-04T16:56:52.434734Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-04-04T16:56:52.471499Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:56:52.471669Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-04-04T16:56:52.471695Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-04T16:56:52.471703Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-04-04T16:56:52.472842Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-04-04T16:56:52.472874Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-04T16:56:52.473219Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-04-04T16:56:52.474397Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-04-04T16:56:52.474418Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-04T16:56:52.474428Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-04-04T16:56:52.479119Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-04-04T16:56:52.479148Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-04T16:56:52.479154Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-04-04T16:56:52.487875Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-04-04T16:56:52.487936Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-04T16:56:52.487947Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-04-04T16:56:52.489139Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-04-04T16:56:52.489154Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-04T16:56:52.489160Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-04-04T16:56:52.493648Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-04-04T16:56:52.493675Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-04-04T16:56:52.493724Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-04-04T16:56:52.493765Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-04T16:56:52.493773Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-04-04T16:56:52.494939Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-04-04T16:56:52.494950Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-04T16:56:52.494954Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-04-04T16:56:52.526043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503037723402574:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:52.526052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503037723402586:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:52.526336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:52.530384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503037723402591:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:52.530442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503037723402594:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:52.530500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, Datab ... ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.268607Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.268655Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.268717Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.268759Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.268825Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.268874Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.268924Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.268986Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.269027Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.269095Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.269131Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.269192Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.269235Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.269304Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.269381Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.269449Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.269492Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.269553Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.269592Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.269649Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.269699Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.269759Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.269795Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.269856Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.269898Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.269963Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.270005Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.270064Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.270112Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.270163Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.270210Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.270294Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.270326Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.270393Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.270428Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.270492Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.270529Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.270590Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.270638Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.270712Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.270750Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.270821Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.270858Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.270925Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.270960Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.271037Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.271074Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.271137Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.271190Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.271250Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.271294Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.271359Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.271406Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.271466Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.271517Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.271574Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.271624Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.271679Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.271725Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.271786Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.271830Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.271892Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.271943Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.272006Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.272056Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.272113Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.272163Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.272212Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.272267Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.272351Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.272408Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.272469Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.272519Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.272579Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.272626Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.272683Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.272740Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.272790Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.272856Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.272894Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.272957Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.273006Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.273072Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.273109Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.273186Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.273222Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.273287Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.273334Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.273389Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.273443Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.273489Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.273546Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.273598Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.273652Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.273696Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.273767Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.273807Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.273873Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.273919Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.273983Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.274018Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.274099Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.274161Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.274218Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.274291Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.274348Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.274399Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.274459Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.274507Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.274565Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.274624Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.274696Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.274750Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.274792Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.274848Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.274903Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.274955Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.275039Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.275147Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.275188Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.275261Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.275306Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.275369Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.275418Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.275472Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.275525Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.275572Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.275624Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.275797Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-04-04T16:58:09.275943Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics [GOOD] >> THiveTest::TestRestartTablets >> TFlatTest::SplitEmptyToMany [GOOD] >> TFlatTest::SplitEmptyTwice >> THiveTest::TestHiveBalancerDifferentResources [GOOD] >> THiveTest::TestFollowersCrossDC_Easy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:129:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:132:2155] sender: [1:134:2058] recipient: [1:108:2140] 2025-04-04T16:55:22.370285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:22.370371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:22.370411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:22.370444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:22.370486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:22.370513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:22.370578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:22.370658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:22.370963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:22.449867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:22.449932Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:173:2058] recipient: [1:15:2062] 2025-04-04T16:55:22.465488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:22.465755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:22.465927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:22.471674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:22.471916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:22.472434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:22.472704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:22.475717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:22.477025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:22.477083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:22.477162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:22.477202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:22.477230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:22.477432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2208] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2208] Leader for TabletID 72057594037968897 is [1:213:2212] sender: [1:214:2058] recipient: [1:207:2208] 2025-04-04T16:55:22.487208Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:234:2058] recipient: [1:15:2062] 2025-04-04T16:55:22.612633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:22.612880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.613119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:22.613395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:22.613462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.616187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:22.616389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:22.616580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.616647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:22.616707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:22.616743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:22.618762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.618819Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:22.618857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:22.620841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.620918Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.620963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:22.621036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:22.624937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:22.628861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:22.629078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:132:2155] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:22.630008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:22.630146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:22.630200Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:22.630519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:22.630595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:22.630792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:22.630869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:22.633009Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:22.633063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:22.633282Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:22.633325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2203], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:55:22.633750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.633837Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:22.633946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:22.634003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:22.634050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:22.634089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:22.634128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:22.634172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:22.634213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id ... 432855Z node 114 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:2 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:58:11.433160Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-04T16:58:11.433302Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-04-04T16:58:11.433343Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-04-04T16:58:11.433385Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-04-04T16:58:11.433419Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-04-04T16:58:11.433461Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: false 2025-04-04T16:58:11.435500Z node 114 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-04T16:58:11.435613Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-04T16:58:11.435655Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-04T16:58:11.435936Z node 114 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-04T16:58:11.436040Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-04T16:58:11.436071Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-04T16:58:11.436109Z node 114 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-04-04T16:58:11.436154Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-04T16:58:11.436256Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2025-04-04T16:58:11.438289Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-04T16:58:11.438520Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-04T16:58:11.438596Z node 114 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:58:11.438940Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T16:58:11.439091Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-04-04T16:58:11.439134Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-04T16:58:11.439183Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-04-04T16:58:11.439222Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-04T16:58:11.439265Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2025-04-04T16:58:11.439307Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-04T16:58:11.445202Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-04-04T16:58:11.445281Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-04-04T16:58:11.445435Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T16:58:11.445484Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2025-04-04T16:58:11.445508Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2025-04-04T16:58:11.445540Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-04T16:58:11.445569Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2025-04-04T16:58:11.445596Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2025-04-04T16:58:11.445640Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-04T16:58:11.448564Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-04T16:58:11.448706Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-04T16:58:11.448747Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-04T16:58:11.451970Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-04T16:58:11.452320Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-04T16:58:11.459785Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 344 RawX2: 489626274072 } TabletId: 72075186233409546 State: 4 2025-04-04T16:58:11.459886Z node 114 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-04-04T16:58:11.462163Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-04T16:58:11.462775Z node 114 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409546 2025-04-04T16:58:11.462976Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T16:58:11.463280Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-04T16:58:11.463596Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T16:58:11.463641Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-04T16:58:11.463711Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-04T16:58:11.463759Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-04T16:58:11.463800Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409546 2025-04-04T16:58:11.470211Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T16:58:11.470321Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2025-04-04T16:58:11.471079Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-04-04T16:58:11.471387Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-04-04T16:58:11.471439Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-04-04T16:58:11.472509Z node 114 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-04-04T16:58:11.472614Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-04-04T16:58:11.472654Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [114:635:2561] 2025-04-04T16:58:11.479081Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 346 RawX2: 489626274073 } TabletId: 72075186233409547 State: 4 2025-04-04T16:58:11.479169Z node 114 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-04-04T16:58:11.481171Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-04T16:58:11.481696Z node 114 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2025-04-04T16:58:11.481895Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T16:58:11.482166Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T16:58:11.482670Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T16:58:11.482719Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T16:58:11.482792Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:58:11.488777Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T16:58:11.488860Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409547 2025-04-04T16:58:11.489396Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-04-04T16:58:11.489836Z node 114 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-04T16:58:11.489909Z node 114 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 >> THiveTest::TestRestartTablets [GOOD] >> THiveTest::TestServerlessComputeResourcesMode >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] [GOOD] >> KqpPg::InsertFromSelect_NoReorder-useSink [GOOD] >> KqpPg::InsertFromSelect_Serial+useSink >> Viewer::JsonAutocompleteStartOfDatabaseName [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit >> KqpPg::TableSelect+useSink [GOOD] >> Viewer::FuzzySearcherLimit2OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit3OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit4OutOf4 [GOOD] >> Viewer::FuzzySearcherLongWord [GOOD] >> Viewer::FuzzySearcherPriority [GOOD] >> KqpPg::TableSelect-useSink >> THiveTest::TestHiveFollowersWithChangingDC [GOOD] >> THiveTest::TestHiveBalancerWithSystemTablets >> Viewer::JsonAutocompleteEmpty [GOOD] >> Viewer::JsonAutocompleteEndOfDatabaseName >> THiveTest::TestFollowersCrossDC_Easy [GOOD] >> THiveTest::TestFollowers_LocalNodeOnly |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::FuzzySearcherPriority [GOOD] >> KqpPg::CreateTempTable [GOOD] >> KqpPg::CreateTempTableSerial ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:130:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:132:2058] recipient: [1:108:2140] 2025-04-04T16:55:23.309510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:23.309576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:23.309664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:23.309690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:23.309726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:23.309763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:23.309818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:23.309876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:23.310112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:23.399053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:23.399123Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:172:2058] recipient: [1:15:2062] 2025-04-04T16:55:23.421563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:23.421978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:23.422167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:23.429799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:23.429992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:23.430649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:23.431317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:23.434899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:23.436250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:23.436323Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:23.436533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:23.436593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:23.436654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:23.436777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] Leader for TabletID 72057594037968897 is [1:215:2214] sender: [1:216:2058] recipient: [1:209:2210] 2025-04-04T16:55:23.451267Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-04T16:55:23.588276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:23.588497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.588695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:23.588897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:23.588943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.590944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:23.591116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:23.591300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.591361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:23.591402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:23.591452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:23.593112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.593165Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:23.593210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:23.595175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.595244Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.595296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:23.595360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:23.604625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:23.609013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:23.609246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:23.610360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:23.610524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:23.610582Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:23.610899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:23.610958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:23.611137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:23.611239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:23.613406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:23.613457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:23.613661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:23.613705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:23.614196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.614268Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:23.614395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:23.614454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:23.614498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:23.614526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:23.614562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:23.614595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:23.614652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id ... kgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:58:15.342679Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:58:15.343041Z node 54 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 408us result status StatusSuccess 2025-04-04T16:58:15.344097Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:58:15.355424Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1188:2970] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-04T16:58:15.355555Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][54:1157:2970] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-04T16:58:15.355745Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1188:2970] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743785895296783 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743785895296783 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1743785895296783 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-04-04T16:58:15.358994Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1188:2970] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-04-04T16:58:15.359110Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][54:1157:2970] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |79.6%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut >> THiveTest::TestServerlessComputeResourcesMode [GOOD] >> THiveTest::TestSkipBadNode >> BasicUsage::SessionNotDestroyedWhileCompressionInFlight [GOOD] >> BasicUsage::SessionNotDestroyedWhileUserEventHandlingInFlight >> KqpUserConstraint::KqpReadNull-UploadNull >> KqpPg::EquiJoin-useSink [GOOD] >> KqpPg::ExplainColumnsReorder |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ut/ydb-core-security-ut |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut |79.6%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] |79.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service >> THiveTest::TestHiveBalancerWithSystemTablets [GOOD] >> THiveTest::TestHiveBalancerWithFollowers |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> THiveTest::TestSkipBadNode [GOOD] >> THiveTest::TestStopTenant >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [GOOD] >> THiveTest::TestFollowers_LocalNodeOnly [GOOD] >> THiveTest::TestFollowersCrossDC_Tight >> TFlatTest::SplitEmptyTwice [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] >> THiveTest::TestHiveNoBalancingWithLowResourceUsage [GOOD] >> THiveTest::TestLockTabletExecution >> THiveTest::TestStopTenant [GOOD] >> TScaleRecommenderTest::BasicTest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] Test command err: 2025-04-04T16:55:44.484198Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T16:55:44.595002Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T16:55:44.613960Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T16:55:44.614178Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T16:55:44.621463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:55:44.621700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:55:44.621955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:55:44.622056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:55:44.622138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:55:44.622221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:55:44.622317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:55:44.622438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:55:44.622510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:55:44.622587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:55:44.622671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:55:44.622761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:55:44.651348Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T16:55:44.652309Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T16:55:44.652411Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T16:55:44.652648Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:44.652866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:55:44.652943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:55:44.652989Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T16:55:44.653109Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T16:55:44.653174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:55:44.653215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:55:44.653245Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T16:55:44.653459Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:44.653567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:55:44.653627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:55:44.653675Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T16:55:44.653772Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T16:55:44.653832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:55:44.653883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:55:44.653912Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T16:55:44.653979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:55:44.654024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:55:44.654083Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T16:55:44.654140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:55:44.654179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:55:44.654211Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T16:55:44.654670Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=72; 2025-04-04T16:55:44.654797Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=60; 2025-04-04T16:55:44.654889Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=42; 2025-04-04T16:55:44.654968Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2025-04-04T16:55:44.655172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:55:44.655238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:55:44.655274Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T16:55:44.655464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:55:44.655513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:55:44.655547Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T16:55:44.655732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:55:44.655782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:55:44.655825Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T16:55:44.656064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T16:55:44.656115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T16:55:44.656152Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T16:55:44.656271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T16:55:44.656334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T16:55:44.656399Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:44;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););(portion_id:48;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:49;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-04-04T16:58:19.222455Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:5923:7915];fline=general_compaction.cpp:131;event=blobs_created;appended=1;switched=3; 2025-04-04T16:58:19.224304Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5923:7915];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> Viewer::QueryExecuteScript [GOOD] >> Viewer::Plan2SvgOK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 11507, MsgBus: 11733 2025-04-04T16:56:55.295876Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503049873733568:2269];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:55.295933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bf6/r3tmp/tmpqQ90zc/pdisk_1.dat 2025-04-04T16:56:55.928675Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:55.931064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:55.931183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:55.935772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11507, node 1 2025-04-04T16:56:56.206420Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:56:56.206448Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:56:56.206456Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:56:56.206595Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11733 TClient is connected to server localhost:11733 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:56:57.289860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:57.323171Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:57:00.256235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503071348570500:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:00.256375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:00.256377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503071348570512:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:00.261151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T16:57:00.277436Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T16:57:00.277755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503071348570514:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T16:57:00.296282Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503049873733568:2269];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:00.296433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:57:00.360994Z node 1 :TX_PROXY ERROR: Actor# [1:7489503071348570567:2344] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:00.438104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28771, MsgBus: 31430 2025-04-04T16:57:01.896134Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503072446664475:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:01.896188Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bf6/r3tmp/tmp1SFsID/pdisk_1.dat 2025-04-04T16:57:02.077385Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:02.102937Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:02.103025Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:02.107988Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28771, node 2 2025-04-04T16:57:02.258400Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:02.258428Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:02.258444Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:02.258600Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31430 TClient is connected to server localhost:31430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T16:57:03.037244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:57:03.044197Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T16:57:06.554812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503093921501596:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:06.554931Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:06.555447Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503093921501631:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:06.560906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:57:06.580732Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489503093921501633:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:57:06.653423Z node 2 :TX_PROXY ERROR: Actor# [2:7489503093921501684:2338] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:06.678887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T16:57:06.902849Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489503072446664475:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:06.902965Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14244, MsgBus: 25568 2025-04-04T16:57:09.465839Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489503108774122773:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bf6/r3tmp/tmpPwnbFI/pdisk_1.dat 2025-04-04T16:57:09.502451Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:57:09.644629Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:09.687806Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:09.687907Z node 3 ... 68897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:05.542729Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:05.544640Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29989, node 10 2025-04-04T16:58:05.627195Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:05.627231Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:05.627244Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:05.627458Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11510 TClient is connected to server localhost:11510 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:06.664289Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:10.286397Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489503347694316025:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:10.286513Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:58:10.821131Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489503369169153080:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:10.821311Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:10.821518Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489503369169153100:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:10.826237Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:58:10.841742Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489503369169153102:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:58:10.941736Z node 10 :TX_PROXY ERROR: Actor# [10:7489503369169153153:2342] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:58:10.974518Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T16:58:11.107489Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T16:58:11.271689Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7489503373464120688:2362], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2025-04-04T16:58:11.272103Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ODU0ZTAxMTUtNDViMTBiYTMtZmVkZjg3M2YtODcyYTUwMmM=, ActorId: [10:7489503373464120686:2361], ActorState: ExecuteState, TraceId: 01jr0vt5cg6vzzsmwe117daavj, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 7935, MsgBus: 29333 2025-04-04T16:58:12.901595Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7489503380062465703:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:12.901683Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bf6/r3tmp/tmpN5VKeF/pdisk_1.dat 2025-04-04T16:58:13.087029Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:13.132458Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:13.132602Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:13.135000Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7935, node 11 2025-04-04T16:58:13.250656Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:13.250692Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:13.250720Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:13.250944Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29333 TClient is connected to server localhost:29333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:14.103108Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:14.113498Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:58:17.901687Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7489503380062465703:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:17.901828Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:58:18.360903Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7489503405832270114:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:18.361036Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:18.361478Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7489503405832270149:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:18.369199Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T16:58:18.400609Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7489503405832270152:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T16:58:18.467810Z node 11 :TX_PROXY ERROR: Actor# [11:7489503405832270203:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:58:18.507948Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T16:58:18.656186Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T16:58:18.771822Z node 11 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [11:7489503405832270445:2362], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2025-04-04T16:58:18.772829Z node 11 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=11&id=N2JkZTZjYTItNzNmM2U5YmYtNWZjNTE0NjAtODgwOGEyODE=, ActorId: [11:7489503405832270443:2361], ActorState: ExecuteState, TraceId: 01jr0vtcqq5cne2hvty4dxs7ag, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |79.6%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut >> THiveTest::TestLockTabletExecution [GOOD] >> THiveTest::TestLockTabletExecutionBadOwner >> KqpPg::InsertFromSelect_Serial+useSink [GOOD] >> KqpPg::InsertFromSelect_Serial-useSink >> TScaleRecommenderTest::BasicTest [GOOD] >> TStorageBalanceTest::TestScenario1 >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] Test command err: 2025-04-04T16:55:45.327155Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T16:55:45.449629Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T16:55:45.478227Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T16:55:45.478573Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T16:55:45.487746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:55:45.488007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:55:45.488282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:55:45.488446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:55:45.488630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:55:45.488756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:55:45.488890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:55:45.489040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:55:45.489164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:55:45.489298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:55:45.489415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:55:45.489560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:55:45.526451Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T16:55:45.526739Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T16:55:45.526815Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T16:55:45.527025Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:45.527253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:55:45.527349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:55:45.527403Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T16:55:45.527508Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T16:55:45.527575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:55:45.527627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:55:45.527662Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T16:55:45.527898Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:45.527977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:55:45.528039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:55:45.528080Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T16:55:45.528185Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T16:55:45.528274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:55:45.528333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:55:45.528366Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T16:55:45.528467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:55:45.528513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:55:45.528548Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T16:55:45.528604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:55:45.528656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:55:45.528718Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T16:55:45.529189Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=57; 2025-04-04T16:55:45.529314Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=56; 2025-04-04T16:55:45.529429Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=65; 2025-04-04T16:55:45.529546Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-04-04T16:55:45.529752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:55:45.529818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:55:45.529866Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T16:55:45.530092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:55:45.530152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:55:45.530195Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T16:55:45.530389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:55:45.530441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:55:45.530483Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T16:55:45.530723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T16:55:45.530779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T16:55:45.530819Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T16:55:45.530966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T16:55:45.531012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T16:55:45.531087Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:44;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););(portion_id:48;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:49;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-04-04T16:58:20.874804Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:5923:7915];fline=general_compaction.cpp:131;event=blobs_created;appended=1;switched=3; 2025-04-04T16:58:20.876339Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5923:7915];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> DistributedEraseTests::ConditionalEraseRowsShouldErase >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors >> THiveTest::TestFollowersCrossDC_Tight [GOOD] >> THiveTest::TestFollowersCrossDC_MovingLeader >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds >> THiveTest::TestLockTabletExecutionBadOwner [GOOD] >> THiveTest::TestLockTabletExecutionDelete >> TxUsage::WriteToTopic_Demo_23_RestartNo |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |79.6%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut >> KqpPg::AlterColumnSetDefaultFromSequence [GOOD] >> KqpPg::CreateTableIfNotExists_GenericQuery >> KqpPg::CreateTempTableSerial [GOOD] >> KqpPg::DropSequence >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] >> Viewer::Cluster10000Tablets [GOOD] >> Viewer::FuzzySearcherLimit1OutOf4 [GOOD] >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit [GOOD] >> Viewer::JsonStorageListingV1 >> Viewer::SelectStringWithBase64Encoding >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |79.6%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut >> Viewer::JsonAutocompleteEndOfDatabaseName [GOOD] >> Viewer::JsonAutocompleteEmptyColumns >> THiveTest::TestLockTabletExecutionDelete [GOOD] >> THiveTest::TestLockTabletExecutionDeleteReboot >> KqpPg::ExplainColumnsReorder [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] Test command err: 2025-04-04T16:58:22.888586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:58:22.889120Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:58:22.889198Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012ef/r3tmp/tmp43wM3J/pdisk_1.dat 2025-04-04T16:58:23.331558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:58:23.375882Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:23.424794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:23.424931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:23.436649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:58:23.542159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:58:24.042719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:865:2712], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:24.042926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:876:2717], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:24.043459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:24.049228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:58:24.199163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:879:2720], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:58:24.278037Z node 1 :TX_PROXY ERROR: Actor# [1:961:2771] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:58:24.659448Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0vthx4052091eygrz60z9p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDZhN2I2ZTItOTBjZTA0ZTYtNTY1OWJmODgtNTQ4YjAyNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |79.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut >> Viewer::JsonAutocompleteSimilarDatabaseName >> TNodeBrokerTest::TestRandomActions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] Test command err: 2025-04-04T16:55:43.391336Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T16:55:43.513293Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T16:55:43.543530Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T16:55:43.543800Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T16:55:43.552626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:55:43.552888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:55:43.553116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:55:43.553191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:55:43.553300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:55:43.553372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:55:43.553455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:55:43.553544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:55:43.553621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:55:43.553710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:55:43.553771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:55:43.553845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:55:43.582483Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T16:55:43.583887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T16:55:43.583969Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T16:55:43.584208Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:43.584453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:55:43.584539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:55:43.584600Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T16:55:43.584702Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T16:55:43.584794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:55:43.584840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:55:43.584878Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T16:55:43.585088Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:43.585162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:55:43.585207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:55:43.585240Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T16:55:43.585336Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T16:55:43.585399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:55:43.585484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:55:43.585522Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T16:55:43.585598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:55:43.585667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:55:43.585711Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T16:55:43.585775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:55:43.585836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:55:43.585889Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T16:55:43.586354Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=102; 2025-04-04T16:55:43.586478Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=54; 2025-04-04T16:55:43.586605Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=74; 2025-04-04T16:55:43.586740Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=57; 2025-04-04T16:55:43.586946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:55:43.587014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:55:43.587052Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T16:55:43.587193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:55:43.587251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:55:43.587277Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T16:55:43.587389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:55:43.587420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:55:43.587448Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T16:55:43.587587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T16:55:43.587619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T16:55:43.587641Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T16:55:43.587720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T16:55:43.587752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T16:55:43.587804Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... lumn_id:8;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:8;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:10208];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:10208];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:9400];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:9392];;;;switched=(portion_id:44;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2507632;index_size:20;meta:((produced=SPLIT_COMPACTED;)););(portion_id:46;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););(portion_id:51;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););; 2025-04-04T16:58:24.653954Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:6064:8056];fline=general_compaction.cpp:131;event=blobs_created;appended=1;switched=3; 2025-04-04T16:58:24.655814Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6064:8056];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |79.7%| [TA] $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::ExplainColumnsReorder [GOOD] Test command err: Trying to start YDB, gRPC: 30314, MsgBus: 27584 2025-04-04T16:56:53.003260Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503039048941265:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:53.003428Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c36/r3tmp/tmpmXmsDU/pdisk_1.dat 2025-04-04T16:56:53.458917Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:53.475722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:53.475830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:53.479541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30314, node 1 2025-04-04T16:56:53.578832Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:56:53.578864Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:56:53.578879Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:56:53.579029Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27584 TClient is connected to server localhost:27584 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:56:54.325170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:56.592380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503051933843608:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:56.592383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503051933843600:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:56.592502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:56.600874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T16:56:56.614744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503051933843614:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T16:56:56.679852Z node 1 :TX_PROXY ERROR: Actor# [1:7489503051933843665:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 62049, MsgBus: 8044 2025-04-04T16:56:57.275282Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503058122488363:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:57.275539Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c36/r3tmp/tmpJwiCLq/pdisk_1.dat 2025-04-04T16:56:57.511843Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:57.511957Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:57.527018Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:57.535283Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62049, node 2 2025-04-04T16:56:57.683188Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:56:57.683221Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:56:57.683230Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:56:57.683378Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8044 TClient is connected to server localhost:8044 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:56:58.371425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:01.367397Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503075302358180:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:01.367504Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:01.382899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503075302358192:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:01.388068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T16:57:01.409932Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489503075302358194:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T16:57:01.500891Z node 2 :TX_PROXY ERROR: Actor# [2:7489503075302358245:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 6691, MsgBus: 18633 2025-04-04T16:57:02.444115Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489503080514565124:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:02.510764Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c36/r3tmp/tmpFqvLj9/pdisk_1.dat 2025-04-04T16:57:02.900702Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:02.914748Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:02.914855Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:02.923604Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6691, node 3 2025-04-04T16:57:03.123016Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:03.123042Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:03.123054Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:03.123200Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18633 TClient is connected to server localhost:18633 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } D ... 3359070086735:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:07.754898Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-04T16:58:07.771933Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489503359070086737:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-04T16:58:07.844476Z node 10 :TX_PROXY ERROR: Actor# [10:7489503359070086789:2451] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 16202, MsgBus: 13101 2025-04-04T16:58:10.626216Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7489503369425389240:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:10.626318Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c36/r3tmp/tmpaAq3lC/pdisk_1.dat 2025-04-04T16:58:10.885467Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:10.908414Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:10.908552Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:10.912610Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16202, node 11 2025-04-04T16:58:11.071247Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:11.071291Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:11.071317Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:11.071561Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13101 TClient is connected to server localhost:13101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:12.204017Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:15.626991Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7489503369425389240:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:15.627114Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:58:16.446455Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7489503395195193684:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:16.446680Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:16.495376Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:58:16.611088Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T16:58:16.716650Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7489503395195193863:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:16.716891Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:16.717326Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7489503395195193869:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:16.724197Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-04T16:58:16.744048Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7489503395195193871:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-04T16:58:16.821525Z node 11 :TX_PROXY ERROR: Actor# [11:7489503395195193923:2451] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:58:24.605234Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:58:24.605775Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:58:24.605920Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c36/r3tmp/tmpWHkH4F/pdisk_1.dat 2025-04-04T16:58:25.052555Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:58:25.108090Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:25.173862Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:25.174142Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:25.191751Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:58:25.317516Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:644:2552], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:25.317688Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:654:2557], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:25.317829Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:25.328901Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T16:58:25.471439Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:658:2560], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T16:58:25.532583Z node 12 :TX_PROXY ERROR: Actor# [12:729:2600] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } PreparedQuery: "cb110533-1d337225-fab14106-58443b23" QueryAst: "(\n(let $1 (PgType \'int4))\n(let $2 \'(\'(\'\"_logical_id\" \'218) \'(\'\"_id\" \'\"a8535e6d-d15f2c97-51ea41dc-5726d102\") \'(\'\"_partition_mode\" \'\"single\")))\n(let $3 (DqPhyStage \'() (lambda \'() (Iterator (AsList (AsStruct \'(\'\"x\" (PgConst \'1 $1)) \'(\'\"y\" (PgConst \'2 $1)))))) $2))\n(let $4 (DqCnResult (TDqOutput $3 \'\"0\") \'(\'\"y\" \'\"x\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($3) \'($4) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType (StructType \'(\'\"x\" $1) \'(\'\"y\" $1))) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" QueryPlan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{x: \\\"1\\\",y: \\\"2\\\"}]\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}" YdbResults { columns { name: "y" type { pg_type { oid: 23 } } } columns { name: "x" type { pg_type { oid: 23 } } } } QueryDiagnostics: "" >> THiveTest::TestLockTabletExecutionDeleteReboot [GOOD] >> THiveTest::TestLockTabletExecutionRebootReconnect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestRandomActions [GOOD] Test command err: 2025-04-04T16:56:36.600584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:56:36.600648Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:36.685081Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-04T16:56:37.220915Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-04T16:56:37.237619Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-04T16:56:37.252593Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-04T16:56:37.266224Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-04T16:56:37.267421Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-04T16:56:37.282007Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-04T16:56:37.326864Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-04T16:56:37.327763Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-04T16:56:37.328379Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-04T16:56:37.328753Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-04T16:56:37.330036Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-04T16:56:37.345122Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-04T16:56:37.345542Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-04T16:56:37.346017Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-04T16:56:37.381064Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-04T16:56:38.362933Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-04T16:56:38.385659Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-04T16:56:38.918511Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-04T16:56:38.918929Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-04T16:56:39.367823Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-04T16:56:39.368335Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-04T16:56:39.368829Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-04T16:56:39.449152Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-04T16:56:39.823458Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-04T16:56:39.823922Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-04T16:56:40.177462Z node 1 :NODE_BROKER ERROR: Cannot register node host7:6: ERROR_TEMP: No free node IDs 2025-04-04T16:56:40.178444Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:56:40.178989Z node 1 :NODE_BROKER ERROR: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2025-04-04T16:56:40.179358Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:56:40.179960Z node 1 :NODE_BROKER ERROR: Cannot register node host14:13: ERROR_TEMP: No free node IDs 2025-04-04T16:56:40.194083Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:56:40.195553Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:56:40.196041Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:56:40.210082Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:56:40.210580Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:56:40.211405Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:56:40.225595Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:56:40.580904Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-04T16:56:41.114687Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-04T16:56:41.536523Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-04T16:56:41.615243Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-04T16:56:41.631292Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-04T16:56:41.645919Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-04T16:56:41.660418Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-04T16:56:41.660972Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-04T16:56:41.662354Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-04T16:56:41.662855Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-04T16:56:41.664152Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-04T16:56:41.664595Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-04T16:56:41.678570Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-04T16:56:41.692599Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-04T16:56:41.694606Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-04T16:56:41.695209Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-04T16:56:42.334520Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-04T16:56:42.637194Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-04T16:56:42.637827Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-04T16:56:42.654886Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-04T16:56:43.056748Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-04T16:56:43.057392Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-04T16:56:43.738848Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-04T16:56:43.739633Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-04T16:56:43.740236Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-04T16:56:43.740805Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-04T16:56:43.741870Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-04T16:56:43.804616Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-04T16:56:43.843673Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-04T16:56:43.844668Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-04T16:56:43.879668Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-04T16:56:43.894847Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-04T16:56:43.930625Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-04T16:56:43.973786Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-04T16:56:43.975456Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-04T16:56:43.976032Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-04T16:56:43.995587Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-04T16:56:43.996309Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-04T16:56:44.046177Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-04T16:56:44.791115Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-04T16:56:44.791619Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-04T16:56:44.822373Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-04T16:56:44.880016Z node 1 :NODE_BROKER ERROR: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2025-04-04T16:56:44.895025Z node 1 :NODE_BROKER ERROR: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2025-04-04T16:56:44.912162Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-04T16:56:45.012882Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-04T16:56:45.014059Z node 1 :NODE_BROKER ERROR: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2025-04-04T16:56:45.014870Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-04T16:56:45.015800Z node 1 :NODE_BROKER ERROR: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2025-04-04T16:56:45.016453Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-04T16:56:45.033042Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-04T16:56:45.033664Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-04T16:56:45.097831Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-04T16:56:45.137803Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-04T16:56:45.179797Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-04T16:56:45.243513Z node 1 :NODE_BROKER ERROR: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2025-04-04T16:56:45.244229Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-04T16:56:45.244904Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-04T16:56:45.291148Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-04-04T16:56:45.292049Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-04T16:56:45.382382Z node 1 :NODE_BROKER ERROR: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2025-04-04T16:56:45.408297Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRON ... 2.035361Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-04T16:58:12.052551Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-04T16:58:12.117987Z node 1 :NODE_BROKER ERROR: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2025-04-04T16:58:12.237469Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-04T16:58:12.896540Z node 1 :NODE_BROKER ERROR: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2025-04-04T16:58:12.899751Z node 1 :NODE_BROKER ERROR: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2025-04-04T16:58:12.923132Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:58:13.241553Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-04T16:58:13.268781Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-04T16:58:13.308335Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-04T16:58:13.414455Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-04T16:58:13.417964Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-04-04T16:58:14.202492Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-04T16:58:14.753051Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-04-04T16:58:14.874097Z node 1 :NODE_BROKER ERROR: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2025-04-04T16:58:14.913360Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:58:14.916578Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:58:14.985666Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:58:14.991273Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:58:15.049720Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:58:15.052551Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:58:15.084766Z node 1 :NODE_BROKER ERROR: Cannot register node host14:13: ERROR_TEMP: No free node IDs 2025-04-04T16:58:15.813283Z node 1 :NODE_BROKER ERROR: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2025-04-04T16:58:15.900419Z node 1 :NODE_BROKER ERROR: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2025-04-04T16:58:15.906106Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-04-04T16:58:15.929859Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Node has expired 2025-04-04T16:58:15.934048Z node 1 :NODE_BROKER ERROR: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2025-04-04T16:58:15.938268Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Node has expired 2025-04-04T16:58:15.980657Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Node has expired 2025-04-04T16:58:16.002999Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-04-04T16:58:16.005955Z node 1 :NODE_BROKER ERROR: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2025-04-04T16:58:16.075705Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Node has expired 2025-04-04T16:58:16.694174Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-04T16:58:16.735870Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-04T16:58:16.787146Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-04T16:58:16.809976Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-04T16:58:16.861228Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Node has expired 2025-04-04T16:58:17.418393Z node 1 :NODE_BROKER ERROR: Cannot register node host14:13: ERROR_TEMP: No free node IDs 2025-04-04T16:58:17.446584Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-04-04T16:58:17.448945Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:58:17.451512Z node 1 :NODE_BROKER ERROR: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2025-04-04T16:58:17.455379Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:58:18.287843Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-04T16:58:18.313714Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-04-04T16:58:18.363921Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-04T16:58:18.365989Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-04T16:58:18.394802Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-04T16:58:18.398181Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-04T16:58:18.401972Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-04-04T16:58:19.147758Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-04-04T16:58:19.222968Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-04T16:58:19.225957Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-04T16:58:19.362308Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:58:19.364622Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:58:19.367060Z node 1 :NODE_BROKER ERROR: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2025-04-04T16:58:19.369406Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:58:19.371083Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:58:19.834982Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-04-04T16:58:19.969014Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-04T16:58:19.972838Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-04T16:58:19.977602Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-04T16:58:20.120906Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-04T16:58:20.123804Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-04T16:58:20.164798Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-04T16:58:20.167677Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-04T16:58:20.175800Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-04T16:58:20.232937Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-04T16:58:20.235310Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-04T16:58:20.237553Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-04T16:58:20.284228Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-04-04T16:58:21.639699Z node 1 :NODE_BROKER ERROR: Cannot register node host3:2: ERROR_TEMP: No free node IDs 2025-04-04T16:58:21.644208Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:58:21.647635Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:58:21.651120Z node 1 :NODE_BROKER ERROR: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2025-04-04T16:58:21.656827Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:58:21.659603Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:58:21.724100Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:58:21.731855Z node 1 :NODE_BROKER ERROR: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2025-04-04T16:58:21.734719Z node 1 :NODE_BROKER ERROR: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2025-04-04T16:58:21.737153Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:58:22.276127Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-04T16:58:22.338100Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-04T16:58:22.794848Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-04T16:58:22.890263Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-04T16:58:22.918149Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-04T16:58:22.922833Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-04T16:58:23.486451Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-04T16:58:23.489717Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-04T16:58:23.492595Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-04T16:58:23.495549Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-04-04T16:58:23.645577Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-04-04T16:58:23.653597Z node 1 :NODE_BROKER ERROR: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2025-04-04T16:58:24.253835Z node 1 :NODE_BROKER ERROR: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2025-04-04T16:58:24.353934Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:58:24.357873Z node 1 :NODE_BROKER ERROR: Cannot register node host13:12: ERROR_TEMP: No free node IDs 2025-04-04T16:58:24.387229Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-04-04T16:58:24.982960Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-04T16:58:25.067649Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-04T16:58:25.091804Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-04-04T16:58:25.519542Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-04-04T16:58:25.553308Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node >> DstCreator::GlobalConsistency >> Viewer::Plan2SvgOK [GOOD] >> Viewer::Plan2SvgBad |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldErase >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds [GOOD] >> THiveTest::TestLockTabletExecutionRebootReconnect [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds >> THiveTest::TestLockTabletExecutionBadUnlock >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |79.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |79.7%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitEmptyTwice [GOOD] Test command err: 2025-04-04T16:53:33.058020Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502180378941920:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:33.058472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e91/r3tmp/tmpQcuFXo/pdisk_1.dat 2025-04-04T16:53:34.104930Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:53:34.204112Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:53:34.204427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:53:34.204501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:53:34.212546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27546 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:53:34.944292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:53:36.192871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:53:36.624322Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.024s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1211 647 2154)b }, ecr=1.000 2025-04-04T16:53:36.636465Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1085 521 2626)b }, ecr=1.000 2025-04-04T16:53:36.661481Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1519 647 6413)b }, ecr=1.000 2025-04-04T16:53:36.668572Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2272 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743785616489 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743785616489 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-04-04T16:53:38.046953Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502180378941920:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:53:38.047407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:53:49.258841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:53:49.258861Z node 1 :IMPORT WARN: Table profiles were not loaded waiting... 2025-04-04T16:54:00.791395Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.65, eph 1} end=0, 2 blobs 201r (max 201), put Spent{time=0.060s,wait=0.041s,interrupts=1} Part{ 1 pk, lobs 0 +0, (58078 0 0)b }, ecr=1.000 2025-04-04T16:54:00.802904Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.74, eph 1} end=0, 2 blobs 765r (max 765), put Spent{time=0.016s,wait=0.007s,interrupts=1} Part{ 1 pk, lobs 0 +0, (49939 0 0)b }, ecr=1.000 2025-04-04T16:54:01.280032Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.116, eph 2} end=0, 2 blobs 457r (max 458), put Spent{time=0.025s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (131770 0 0)b }, ecr=1.000 2025-04-04T16:54:01.304315Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.517, eph 1} end=0, 2 blobs 3r (max 3), put Spent{time=0.017s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (125 0 0)b }, ecr=1.000 2025-04-04T16:54:01.402939Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.128, eph 1} end=0, 2 blobs 2r (max 2), put Spent{time=0.070s,wait=0.040s,interrupts=1} Part{ 1 pk, lobs 0 +0, (172 0 0)b }, ecr=1.000 2025-04-04T16:54:01.436945Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.130, eph 1} end=0, 2 blobs 504r (max 504), put Spent{time=0.074s,wait=0.033s,interrupts=1} Part{ 1 pk, lobs 0 +0, (31963 0 0)b }, ecr=1.000 2025-04-04T16:54:01.481586Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.129, eph 1} end=0, 2 blobs 2r (max 2), put Spent{time=0.148s,wait=0.057s,interrupts=1} Part{ 1 pk, lobs 0 +0, (119 0 0)b }, ecr=1.000 2025-04-04T16:54:01.485649Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.131, eph 1} end=0, 2 blobs 1509r (max 1509), put Spent{time=0.061s,wait=0.016s,interrupts=1} Part{ 1 pk, lobs 0 +0, (102982 0 0)b }, ecr=1.000 2025-04-04T16:54:01.527767Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.134, eph 2} end=0, 2 blobs 1527r (max 1527), put Spent{time=0.123s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (99609 0 0)b }, ecr=1.000 2025-04-04T16:54:01.664186Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.547, eph 1} end=0, 2 blobs 10001r (max 10001), put Spent{time=0.259s,wait=0.008s,interrupts=1} Part{ 1 pk, lobs 0 +0, (551673 0 0)b }, ecr=1.000 2025-04-04T16:54:02.137874Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.192, eph 3} end=0, 2 blobs 713r (max 714), put Spent{time=0.102s,wait=0.009s,interrupts=1} Part{ 1 pk, lobs 0 +0, (205442 0 0)b }, ecr=1.000 2025-04-04T16:54:02.198937Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.202, eph 3} end=0, 2 blobs 2280r (max 2283), put Spent{time=0.060s,wait=0.006s,interrupts=1} Part{ 1 pk, lobs 0 +0, (148674 0 0)b }, ecr=1.000 2025-04-04T16:54:02.484196Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1029, eph 2} end=0, 2 blobs 3r (max 5), put Spent{time=0.009s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (125 0 0)b }, ecr=1.000 2025-04-04T16:54:02.486888Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.258, eph 4} end=0, 2 blobs 967r (max 968), put Spent{time=0.036s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (278560 0 0)b }, ecr=1.000 2025-04-04T16:54:02.584594Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.275, eph 2} end=0, 2 blobs 2r (max 3), put Spent{time=0.053s,wait=0.030s,interrupts=1} Part{ 1 pk, lobs 0 +0, (172 0 0)b }, ecr=1.000 2025-04-04T16:54:02.634698Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.276, eph 2} end=0, 2 blobs 2r (max 3), put Spent{time=0.105s,wait=0.033s,interrupts=1} Part{ 1 pk, lobs 0 +0, (119 0 0)b }, ecr=1.000 2025-04-04T16:54:02.654789Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.277, eph 2} end=0, 2 blobs 1011r (max 1011), put Spent{time=0.123s,wait=0.020s,interrupts=1} Part{ 1 pk, lobs 0 +0, (63984 0 0)b }, ecr=1.000 2025-04-04T16:54:02.656198Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.274, eph 2} end=0, 2 blobs 3030r (max 3030), put Spent{time=0.127s,wait=0.021s,interrupts=1} Part{ 1 pk, lobs 0 +0, (206690 0 0)b }, ecr=1.000 2025-04-04T16:54:02.750506Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.278, eph 4} end=0, 2 blobs 3033r (max 3036), put Spent{time=0.196s,wait=0.007s,interrupts=1} Part{ 1 pk, lobs 0 +0, (197759 0 0)b }, ecr=1.000 2025-04-04T16:54:02.896864Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1061, eph 2} end=0, 2 blobs 10001r (max 10508), put Spent{time=0.342s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (551673 0 0)b }, ecr=1.000 2025-04-04T16:54:03.178610Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.338, eph 5} end=0, 2 blobs 1222r (max 1223), put Spent{time=0.156s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (351945 0 0)b }, ecr=1.000 2025-04-04T16:54:03.258874Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.351, eph 5} end=0, 2 blobs 3819r (max 3822), put Spent{time=0.120s,wait=0.004s,interrupts=1} Part{ 1 pk, lobs 0 +0, (249009 0 0)b }, ecr=1.000 2025-04-04T16:54:03.499322Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1540, eph 3} end=0, 2 blobs 3r (max 5), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (125 0 0)b }, ecr=1.000 2025-04-04T16:54:03.596755Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.401, eph 3} end=0, 2 blobs 2r (max 3), put Spent{time=0.063s,wait=0.056s,interrupts=1} Part{ 1 pk, lobs 0 +0, (172 0 0)b }, ecr=1.000 2025-04-04T16:54:03.598029Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.402, eph 3} end=0, 2 blobs 2r (max 3), put Spent{time=0.064s,wait=0.057s,interrupts=1} Part{ 1 pk, lobs 0 +0, (11 ... ode 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-04T16:58:16.507260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-04T16:58:16.507332Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976710690:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T16:58:16.507892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 9 2025-04-04T16:58:16.508124Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710690:0 progress is 1/1 2025-04-04T16:58:16.508165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710690 ready parts: 1/1 2025-04-04T16:58:16.508198Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710690:0 progress is 1/1 2025-04-04T16:58:16.508234Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710690 ready parts: 1/1 2025-04-04T16:58:16.508286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710690, ready parts: 1/1, is published: true 2025-04-04T16:58:16.508370Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7489503395518585868:2418] message: TxId: 281474976710690 2025-04-04T16:58:16.508398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710690 ready parts: 1/1 2025-04-04T16:58:16.508429Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710690:0 2025-04-04T16:58:16.508444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710690:0 2025-04-04T16:58:16.508661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 8 2025-04-04T16:58:16.509761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-04T16:58:16.523334Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-04T16:58:16.523704Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-04-04T16:58:16.526413Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-04T16:58:16.526472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-04T16:58:16.577377Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-04T16:58:16.583919Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489503386928650523 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-04-04T16:58:16.584036Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-04T16:58:16.584789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-04T16:58:16.586317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489503391223618133 RawX2: 4503608217307446 } TabletId: 72075186224037890 State: 4 2025-04-04T16:58:16.590069Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-04T16:58:16.593747Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-04T16:58:16.594145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489503395518585624 RawX2: 4503608217307466 } TabletId: 72075186224037894 State: 4 2025-04-04T16:58:16.594223Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-04-04T16:58:16.594393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489503395518585620 RawX2: 4503608217307465 } TabletId: 72075186224037895 State: 4 2025-04-04T16:58:16.594443Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037895, state: Offline, at schemeshard: 72057594046644480 2025-04-04T16:58:16.594579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489503395518585630 RawX2: 4503608217307467 } TabletId: 72075186224037893 State: 4 2025-04-04T16:58:16.594668Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2025-04-04T16:58:16.594918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-04T16:58:16.595178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-04-04T16:58:16.595384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489503391223618142 RawX2: 4503608217307447 } TabletId: 72075186224037891 State: 4 2025-04-04T16:58:16.595430Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-04T16:58:16.596468Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-04T16:58:16.597393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-04-04T16:58:16.597532Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:8 hive 72057594037968897 at ss 72057594046644480 2025-04-04T16:58:16.597620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2025-04-04T16:58:16.597705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-04T16:58:16.597731Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-04T16:58:16.597812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-04T16:58:16.602127Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-04T16:58:16.602459Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-04-04T16:58:16.602780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-04-04T16:58:16.602968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-04T16:58:16.603212Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 8 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2025-04-04T16:58:16.603432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-04T16:58:16.603621Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-04-04T16:58:16.603818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-04T16:58:16.603976Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-04T16:58:16.604155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-04T16:58:16.604391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-04T16:58:16.604430Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-04T16:58:16.604500Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-04T16:58:16.618371Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-04-04T16:58:16.618456Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-04T16:58:16.618479Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-04-04T16:58:16.618505Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037895 not found 2025-04-04T16:58:16.618637Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-04T16:58:16.618676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-04T16:58:16.618766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-04-04T16:58:16.618782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-04-04T16:58:16.618826Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:8 2025-04-04T16:58:16.618837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2025-04-04T16:58:16.618866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-04-04T16:58:16.618883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-04-04T16:58:16.618913Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-04T16:58:16.618957Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-04T16:58:16.619040Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-04T16:58:16.634021Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found >> DistributedEraseTests::ConditionalEraseRowsShouldErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsCheckLimits |79.7%| [TA] {RESULT} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |79.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record >> KqpPg::InsertFromSelect_Serial-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink >> THiveTest::TestLockTabletExecutionBadUnlock [GOOD] >> THiveTest::TestLockTabletExecutionGoodUnlock >> THiveTest::TestHiveBalancerWithFollowers [GOOD] >> THiveTest::TestHiveBalancerWithLimit >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors >> KqpPg::DropSequence [FAIL] >> KqpPg::DeleteWithQueryService+useSink |79.7%| [TA] $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicUsage::SessionNotDestroyedWhileUserEventHandlingInFlight [GOOD] >> BasicUsage::ReadSessionCorrectClose >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation [GOOD] >> Viewer::FloatPointJsonQuery |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |79.8%| [TA] {RESULT} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.8%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut >> KqpPg::CreateTableIfNotExists_GenericQuery [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname >> KqpPg::TableArrayInsert-useSink [GOOD] >> KqpPg::Returning+useSink >> DstCreator::GlobalConsistency [GOOD] >> DstCreator::KeyColumnNameMismatch >> THiveTest::TestLockTabletExecutionGoodUnlock [GOOD] >> THiveTest::TestLockTabletExecutionLocalGone >> TSchemeshardCompactionQueueTest::EnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] |79.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |79.8%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] >> THiveTest::TestHiveBalancerWithLimit [GOOD] >> THiveTest::TestHiveBalancerIgnoreTablet >> EraseRowsTests::ConditionalEraseRowsShouldErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks >> THiveTest::TestFollowersCrossDC_MovingLeader [GOOD] >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower >> THiveTest::TestLockTabletExecutionRebootTimeout [GOOD] >> THiveTest::TestLockTabletExecutionReconnect >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] >> Viewer::Plan2SvgBad [GOOD] >> THiveTest::TestLockTabletExecutionLocalGone [GOOD] >> THiveTest::TestLocalRegistrationInSharedHive >> Viewer::JsonAutocompleteSimilarDatabaseName [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST >> KqpQueryService::TableSink_OltpReplace+HasSecondaryIndex |79.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |79.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] Test command err: 2025-04-04T16:58:27.667709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:58:27.668238Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:58:27.668303Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001295/r3tmp/tmpdthb1i/pdisk_1.dat 2025-04-04T16:58:28.150965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:58:28.215960Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:28.267911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:28.268172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:28.280839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:58:28.376927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:58:28.425737Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T16:58:28.426057Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:58:28.479261Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:58:28.479415Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:58:28.481493Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:58:28.481602Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:58:28.481667Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:58:28.482256Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:58:28.482453Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:58:28.482549Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T16:58:28.494718Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:58:28.525305Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:58:28.525572Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:58:28.525753Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T16:58:28.525809Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:58:28.525848Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:58:28.525890Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:58:28.526488Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:58:28.526625Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:58:28.526803Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:58:28.526863Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:28.526919Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:58:28.527008Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:58:28.527114Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T16:58:28.527707Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:58:28.528074Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:58:28.528190Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:58:28.530530Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:58:28.541772Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:58:28.541948Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:58:28.694775Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T16:58:28.701492Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:58:28.701606Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:58:28.702519Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:58:28.702570Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:58:28.702619Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T16:58:28.702940Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T16:58:28.703123Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:58:28.703825Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:58:28.703903Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T16:58:28.706378Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T16:58:28.706913Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:28.708846Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T16:58:28.708899Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:58:28.710119Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T16:58:28.710216Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:58:28.711763Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:58:28.712179Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:58:28.712225Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:58:28.712285Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T16:58:28.712354Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:58:28.712409Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T16:58:28.712498Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:58:28.723703Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T16:58:28.723903Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T16:58:28.723965Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:58:28.734618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:735:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:28.734751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:28.734829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:28.740431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:58:28.747323Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:58:28.923139Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:58:28.926925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:749:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:58:29.010872Z node 1 :TX_PROXY ERROR: Actor# [1:823:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:58:29.446041Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0vtpfw73bsgmfc5y7844h8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODlmMjQ1NTAtM2E0MGZlNzYtOWY1OWQ1NGEtNzI4ODBiNzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:58:29.458113Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:850:2687], serverId# [1:851:2688], sessionId# [0:0:0] 2025-04-04T16:58:29.458422Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:58:29.477228Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16 ... 86224037888 2025-04-04T16:58:34.446081Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:34.446125Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:58:34.446171Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:58:34.446255Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:661:2566], serverId# [2:670:2571], sessionId# [0:0:0] 2025-04-04T16:58:34.446687Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:58:34.446893Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:58:34.446967Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:58:34.448664Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:58:34.462848Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:58:34.462990Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:58:34.640667Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:702:2592], serverId# [2:704:2594], sessionId# [0:0:0] 2025-04-04T16:58:34.641375Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:58:34.641432Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:58:34.642093Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:58:34.642148Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:58:34.642205Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T16:58:34.642490Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T16:58:34.642625Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:58:34.643384Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:58:34.643457Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T16:58:34.643933Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T16:58:34.644356Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:34.646155Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T16:58:34.646207Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:58:34.652759Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T16:58:34.652851Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:58:34.653910Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:58:34.653962Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:58:34.654007Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T16:58:34.654072Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:417:2411], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:58:34.654127Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T16:58:34.654210Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:58:34.655296Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:58:34.657829Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T16:58:34.657914Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:58:34.658224Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T16:58:34.703082Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:736:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:34.703272Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:747:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:34.703881Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:34.717999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:58:34.749084Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:58:34.953965Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:58:34.961412Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:750:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:58:35.002096Z node 2 :TX_PROXY ERROR: Actor# [2:824:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:58:35.324670Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0vtwa58fj99kg3p1wzzcyk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmU4NjcyMjktMjA2YzAwY2EtNDZlMDcwYjctMWM4MjcxMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:58:35.325231Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:851:2687], serverId# [2:852:2688], sessionId# [0:0:0] 2025-04-04T16:58:35.325435Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:58:35.342867Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:58:35.343019Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:58:35.347461Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:859:2694], serverId# [2:860:2695], sessionId# [0:0:0] 2025-04-04T16:58:35.348497Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-04T16:58:35.362339Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-04T16:58:35.362440Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:58:35.362739Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-04T16:58:35.362779Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-04T16:58:35.363062Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:58:35.363126Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:35.363174Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:58:35.363244Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:58:35.363355Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:859:2694], serverId# [2:860:2695], sessionId# [0:0:0] 2025-04-04T16:58:35.364310Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:58:35.364661Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:58:35.364853Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:58:35.364903Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T16:58:35.364948Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-04T16:58:35.365175Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T16:58:35.365238Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:58:35.365843Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-04T16:58:35.366072Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T16:58:35.366189Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-04T16:58:35.372556Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-04T16:58:35.452138Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-04T16:58:35.452219Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-04T16:58:35.452744Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:58:35.452800Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T16:58:35.452846Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-04T16:58:35.453015Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:35.453097Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:58:35.453159Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit |79.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |79.8%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut >> Viewer::JsonAutocompleteEmptyColumns [GOOD] >> Viewer::JsonAutocompleteColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::Plan2SvgBad [GOOD] Test command err: 2025-04-04T16:58:09.442755Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503364656993988:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:09.442803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T16:58:09.887266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:09.887427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:09.893857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:58:09.920015Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8703, node 1 2025-04-04T16:58:10.011796Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:10.011833Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:10.011865Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:10.012061Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19004 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:10.325471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:10.356094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T16:58:10.359337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:12.686815Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:12.686875Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:13.049861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503381836863875:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:13.050353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503381836863866:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:13.050471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:13.055097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-04T16:58:13.067551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503381836863894:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T16:58:13.129798Z node 1 :TX_PROXY ERROR: Actor# [1:7489503381836863945:2359] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:58:13.352530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:58:13.510810Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:13.510857Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:13.754697Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:13.754738Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:13.985699Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:13.985737Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:14.189028Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:14.189066Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:14.412017Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:14.412062Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:14.442706Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503364656993988:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:14.442782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:58:14.647420Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:14.647459Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:14.882691Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:14.882730Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:15.171742Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:15.171801Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:15.437188Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:15.437237Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:15.666373Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:15.666412Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:15.897848Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:15.897904Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:16.200327Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:16.200382Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:16.467118Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:16.467155Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:16.821352Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:16.821396Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:17.056774Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:17.056841Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:17.381109Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:17.381151Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:17.386867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710709:0, at schemeshard: 72057594046644480 2025-04-04T16:58:17.388829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2025-04-04T16:58:17.390863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-04-04T16:58:18.741496Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:18.741542Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:19.462971Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:19.463020Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:19.809244Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:19.809286Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:20.371911Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-04T16:58:20.374793Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743785900402, txId: 281474976710723] shutting down 2025-04-04T16:58:22.263223Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503423279831482:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:22.263293Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T16:58:22.423946Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:22.446400Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:22.446504Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:22.448402Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29624, node 2 2025-04-04T16:58:22.508294Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:22.508329Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:22.508340Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:22.508470Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14620 2025-04-04T16:58:26.020046Z node 2 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator 2025-04-04T16:58:28.665409Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489503449913134351:2213];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T16:58:28.785876Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:58:28.908498Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:28.927543Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:28.927677Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:28.931189Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29104, node 3 2025-04-04T16:58:29.013084Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:29.013109Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:29.013126Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:29.013308Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28142 2025-04-04T16:58:33.315044Z node 3 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower [GOOD] >> THiveTest::TestGetStorageInfo >> THiveTest::TestLockTabletExecutionReconnect [GOOD] >> THiveTest::TestLockTabletExecutionReconnectExpire >> THiveTest::TestDrainWithMaxTabletsScheduled [GOOD] >> THiveTest::TestDownAfterDrain |79.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |79.9%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq >> THiveTest::TestGetStorageInfo [GOOD] >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartNo [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink >> DstCreator::KeyColumnNameMismatch [GOOD] >> THiveTest::TestHiveBalancerIgnoreTablet [GOOD] >> THiveTest::TestHiveBalancerNodeRestarts >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] >> KqpQueryService::ReturnAndCloseSameTime >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] >> KqpDocumentApi::RestrictWriteExplicitPrepare >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned [GOOD] >> THiveTest::TestExternalBootWhenLocked ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] Test command err: 2025-04-04T16:58:03.098213Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T16:58:03.101964Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:58:03.102292Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-04T16:58:03.102933Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-04T16:58:03.104001Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:257} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-04T16:58:03.104059Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T16:58:03.104948Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:26:2073] ControllerId# 72057594037932033 2025-04-04T16:58:03.104991Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T16:58:03.105141Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T16:58:03.105557Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T16:58:03.120088Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:58:03.120141Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:58:03.122504Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:25:2072] Create Queue# [1:34:2078] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:03.122753Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:25:2072] Create Queue# [1:35:2079] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:03.122999Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:25:2072] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:03.123157Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:25:2072] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:03.123360Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:25:2072] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:03.123599Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:25:2072] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:03.123781Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:25:2072] Create Queue# [1:40:2084] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:03.123817Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:58:03.123931Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:26:2073] 2025-04-04T16:58:03.123988Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:26:2073] 2025-04-04T16:58:03.124039Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-04T16:58:03.124086Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-04T16:58:03.124831Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-04T16:58:03.125246Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:26:2073] 2025-04-04T16:58:03.125301Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:58:03.125342Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-04T16:58:03.125534Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:58:03.147305Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:26:2073] 2025-04-04T16:58:03.147373Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:58:03.147416Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-04T16:58:03.153420Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-04T16:58:03.157658Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-04T16:58:03.157872Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T16:58:03.158079Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-04-04T16:58:03.158148Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-04-04T16:58:03.158186Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-04T16:58:03.167455Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:30:2063] 2025-04-04T16:58:03.167518Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:30:2063] 2025-04-04T16:58:03.167640Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:58:03.167790Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-04T16:58:03.168964Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:51:2091] 2025-04-04T16:58:03.169011Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:51:2091] 2025-04-04T16:58:03.169061Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-04T16:58:03.169340Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:58:03.169569Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:58:03.169679Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:30:2063] 2025-04-04T16:58:03.169750Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-04T16:58:03.169862Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:51:2091] 2025-04-04T16:58:03.169894Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-04T16:58:03.169974Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:58:03.170099Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-04T16:58:03.170161Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2025-04-04T16:58:03.170399Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:26:2073] 2025-04-04T16:58:03.170460Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:26:2073] 2025-04-04T16:58:03.174931Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:30:2063] 2025-04-04T16:58:03.175263Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:26:2073] 2025-04-04T16:58:03.175664Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T16:58:03.176395Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-04-04T16:58:03.176452Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-04-04T16:58:03.176498Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-04-04T16:58:03.176555Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T16:58:03.176750Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2025-04-04T16:58:03.176820Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2025-04-04T16:58:03.176866Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-04T16:58:03.177044Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2025-04-04T16:58:03.177173Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-04T16:58:03.177310Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-04T16:58:03.177348Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2025-04-04T16:58:03.177453Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-04T16:58:03.177489Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2025-04-04T16:58:03.177565Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:30:2063] 2025-04-04T16:58:03.177608Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:30:2063] 2025-04-04T16:58:03.177678Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2025-04-04T16:58:03.178114Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-04-04T16:58:03.178149Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-04T16:58:03.178289Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-04-04T16:58:03.178413Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {Ev ... 53Z node 30 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594046678944 Cookie: 1} 2025-04-04T16:58:38.697138Z node 30 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594046678944 Cookie: 2} 2025-04-04T16:58:38.697469Z node 31 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72057594046678944 CurrentLeader: [30:324:2264] CurrentLeaderTablet: [30:338:2272] CurrentGeneration: 2 CurrentStep: 0} 2025-04-04T16:58:38.697597Z node 31 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72057594046678944 CurrentLeader: [30:324:2264] CurrentLeaderTablet: [30:338:2272] CurrentGeneration: 2 CurrentStep: 0} 2025-04-04T16:58:38.697771Z node 31 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594046678944 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72057594046678944 Cookie: 0 CurrentLeader: [30:324:2264] CurrentLeaderTablet: [30:338:2272] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-04T16:58:38.697860Z node 31 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72057594046678944 followers: 0 2025-04-04T16:58:38.697997Z node 31 :TABLET_RESOLVER DEBUG: SelectForward node 31 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594046678944 followers: 0 countLeader 1 allowFollowers 0 winner: [30:324:2264] 2025-04-04T16:58:38.698196Z node 31 :PIPE_CLIENT DEBUG: TClient[72057594046678944] forward result remote node 30 [31:548:2089] 2025-04-04T16:58:38.698385Z node 31 :PIPE_CLIENT DEBUG: TClient[72057594046678944] remote node connected [31:548:2089] 2025-04-04T16:58:38.698484Z node 31 :PIPE_CLIENT DEBUG: TClient[72057594046678944]::SendEvent [31:548:2089] 2025-04-04T16:58:38.698818Z node 30 :PIPE_SERVER DEBUG: [72057594046678944] Accept Connect Originator# [31:548:2089] 2025-04-04T16:58:38.699145Z node 31 :PIPE_CLIENT DEBUG: TClient[72057594046678944] connected with status OK role: Leader [31:548:2089] 2025-04-04T16:58:38.699246Z node 31 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send queued [31:548:2089] 2025-04-04T16:58:38.699362Z node 31 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send [31:548:2089] 2025-04-04T16:58:38.699411Z node 31 :PIPE_CLIENT DEBUG: TClient[72057594046678944] push event to server [31:548:2089] 2025-04-04T16:58:38.699573Z node 31 :PIPE_CLIENT DEBUG: TClient[72057594046678944]::SendEvent [31:548:2089] 2025-04-04T16:58:38.699774Z node 30 :PIPE_SERVER DEBUG: [72057594046678944] Push Sender# [31:547:2089] EventType# 271122945 2025-04-04T16:58:38.700000Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme 2025-04-04T16:58:38.700096Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T16:58:38.700422Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T16:58:38.700547Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T16:58:38.706890Z node 31 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [31:554:2090] 2025-04-04T16:58:38.706965Z node 31 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [31:554:2090] 2025-04-04T16:58:38.707030Z node 31 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [31:555:2091] 2025-04-04T16:58:38.707068Z node 31 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [31:555:2091] 2025-04-04T16:58:38.707472Z node 31 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:58:38.707569Z node 31 :TABLET_RESOLVER DEBUG: SelectForward node 31 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [30:323:2263] 2025-04-04T16:58:38.707738Z node 31 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [31:554:2090] 2025-04-04T16:58:38.707810Z node 31 :PIPE_CLIENT DEBUG: TClient[72075186224037888] queue send [31:555:2091] 2025-04-04T16:58:38.708024Z node 31 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:58:38.708307Z node 31 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 30 [31:554:2090] 2025-04-04T16:58:38.708724Z node 31 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T16:58:38.708836Z node 31 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [31:554:2090] 2025-04-04T16:58:38.708894Z node 31 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [31:554:2090] 2025-04-04T16:58:38.709752Z node 30 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-04T16:58:38.709848Z node 30 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-04T16:58:38.709957Z node 30 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-04T16:58:38.710102Z node 30 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [31:554:2090] 2025-04-04T16:58:38.715424Z node 31 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [30:461:2364] CurrentLeaderTablet: [30:476:2376] CurrentGeneration: 1 CurrentStep: 0} 2025-04-04T16:58:38.715622Z node 31 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [30:461:2364] CurrentLeaderTablet: [30:476:2376] CurrentGeneration: 1 CurrentStep: 0} 2025-04-04T16:58:38.715766Z node 31 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [30:461:2364] CurrentLeaderTablet: [30:476:2376] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-04T16:58:38.715815Z node 31 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037888 followers: 0 2025-04-04T16:58:38.715903Z node 31 :TABLET_RESOLVER DEBUG: SelectForward node 31 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [30:461:2364] 2025-04-04T16:58:38.716013Z node 31 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result remote node 30 [31:555:2091] 2025-04-04T16:58:38.716661Z node 31 :PIPE_CLIENT DEBUG: TClient[72075186224037888] remote node connected [31:555:2091] 2025-04-04T16:58:38.716748Z node 31 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [31:555:2091] 2025-04-04T16:58:38.716896Z node 31 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [31:554:2090] 2025-04-04T16:58:38.716970Z node 31 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [31:554:2090] 2025-04-04T16:58:38.717029Z node 31 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [31:554:2090] 2025-04-04T16:58:38.717161Z node 31 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [31:554:2090] 2025-04-04T16:58:38.717465Z node 30 :PIPE_SERVER DEBUG: [72075186224037888] Accept Connect Originator# [31:555:2091] 2025-04-04T16:58:38.717796Z node 30 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [31:551:2090] EventType# 268959744 2025-04-04T16:58:38.718183Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2025-04-04T16:58:38.722427Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T16:58:38.722773Z node 30 :HIVE WARN: HIVE#72057594037927937 Node(31, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:38.722977Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{14, redo 208b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-04-04T16:58:38.723110Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T16:58:38.723347Z node 31 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [31:555:2091] 2025-04-04T16:58:38.723418Z node 31 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [31:555:2091] 2025-04-04T16:58:38.723470Z node 31 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [31:555:2091] 2025-04-04T16:58:38.723570Z node 31 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [31:555:2091] 2025-04-04T16:58:38.723946Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{26, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-04-04T16:58:38.724043Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{26, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T16:58:38.724216Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{26, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{15, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T16:58:38.724340Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{26, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T16:58:38.724597Z node 30 :PIPE_SERVER DEBUG: [72075186224037888] Push Sender# [31:552:2091] EventType# 268959744 2025-04-04T16:58:38.724820Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2025-04-04T16:58:38.724871Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T16:58:38.725054Z node 30 :HIVE WARN: HIVE#72075186224037888 Node(31, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:38.725188Z node 30 :HIVE WARN: HIVE#72075186224037888 Node(31, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:38.725270Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{6, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-04-04T16:58:38.725334Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T16:58:38.725577Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-04-04T16:58:38.725642Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T16:58:38.725754Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T16:58:38.725828Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} |79.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |79.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge >> THiveTest::TestLockTabletExecutionReconnectExpire [GOOD] >> THiveTest::TestLockTabletExecutionStealLock >> LabeledDbCounters::TwoTablets [GOOD] >> LabeledDbCounters::TwoTabletsKillOneTablet |79.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname [FAIL] >> KqpPg::CheckPgAutoParams+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnNameMismatch [GOOD] Test command err: 2025-04-04T16:58:28.712599Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503449681232222:2274];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:28.712673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00175a/r3tmp/tmpWkQ9Yg/pdisk_1.dat 2025-04-04T16:58:29.226820Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:29.233887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:29.234008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:29.244377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1194 TServer::EnableGrpc on GrpcPort 28191, node 1 2025-04-04T16:58:29.711971Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:29.711993Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:29.712000Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:29.712142Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:30.263752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:30.307148Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:58:30.316994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785910433 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743785910342 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743785910433 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-04T16:58:30.463519Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T16:58:30.463701Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T16:58:30.463717Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-04T16:58:30.464303Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-04T16:58:32.421715Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743785910433, tx_id: 281474976710658 } } } 2025-04-04T16:58:32.422199Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-04T16:58:32.424101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T16:58:32.425725Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-04T16:58:32.431352Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 TClient::Ls request: /Root/Replicated 2025-04-04T16:58:32.484287Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-04T16:58:32.484324Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743785912519 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) 2025-04-04T16:58:33.532279Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503471454537988:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:33.532791Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00175a/r3tmp/tmprhlqCw/pdisk_1.dat 2025-04-04T16:58:33.940722Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:33.974438Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:33.974539Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:33.979787Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10530 TServer::EnableGrpc on GrpcPort 10681, node 2 2025-04-04T16:58:34.380496Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:34.380524Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:34.380538Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:34.380678Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10530 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:35.360774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:35.369184Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:58:35.377590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:35.500654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743785915410 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743785915599 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743785915410 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743785915599 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-04T16:58:35.578616Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T16:58:35.578791Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T16:58:35.578805Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-04T16:58:35.582882Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-04T16:58:38.539832Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489503471454537988:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:38.539935Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:58:38.856121Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743785915501, tx_id: 281474976710658 } } } 2025-04-04T16:58:38.856491Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-04T16:58:38.858203Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-04T16:58:38.859404Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743785915599 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-04T16:58:38.859672Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key column name mismatch: position: 0, expected: key, got: value >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit |79.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |79.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |79.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] Test command err: 2025-04-04T16:58:27.132877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:58:27.133513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:58:27.133580Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012ac/r3tmp/tmpCwMQe2/pdisk_1.dat 2025-04-04T16:58:27.657154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:58:27.706282Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:27.744883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:27.745054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:27.757139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:58:27.887269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:58:27.979078Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:686:2585] 2025-04-04T16:58:27.979338Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:58:28.050110Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:58:28.050355Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:58:28.052510Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:58:28.052631Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:58:28.052706Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:58:28.053218Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:58:28.053675Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:58:28.053780Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:712:2585] in generation 1 2025-04-04T16:58:28.056438Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:690:2587] 2025-04-04T16:58:28.056704Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:58:28.074964Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:58:28.075518Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:693:2589] 2025-04-04T16:58:28.075791Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:58:28.087063Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:58:28.088927Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-04T16:58:28.089057Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-04T16:58:28.089117Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-04T16:58:28.089565Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:58:28.091126Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:58:28.091212Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:737:2587] in generation 1 2025-04-04T16:58:28.091973Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:58:28.092095Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:58:28.093824Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-04T16:58:28.093901Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-04T16:58:28.093973Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-04T16:58:28.094510Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:58:28.094699Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:58:28.094827Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:738:2589] in generation 1 2025-04-04T16:58:28.106676Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:58:28.155823Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:58:28.156063Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:58:28.156243Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:742:2616] 2025-04-04T16:58:28.156309Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:58:28.156357Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:58:28.156405Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:58:28.156826Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:58:28.156882Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-04T16:58:28.156966Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:58:28.157052Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:743:2617] 2025-04-04T16:58:28.157085Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-04T16:58:28.157130Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-04T16:58:28.157170Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:58:28.157575Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:58:28.157633Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-04T16:58:28.157699Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:58:28.157792Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:744:2618] 2025-04-04T16:58:28.157825Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-04T16:58:28.157853Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-04T16:58:28.157881Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T16:58:28.158078Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:58:28.158220Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:58:28.158577Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:58:28.158701Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:28.158777Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:58:28.158841Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:58:28.158915Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-04T16:58:28.159067Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-04T16:58:28.159307Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:677:2580], serverId# [1:704:2594], sessionId# [0:0:0] 2025-04-04T16:58:28.159391Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T16:58:28.159427Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:28.159462Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-04T16:58:28.159502Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T16:58:28.159560Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-04T16:58:28.159672Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-04T16:58:28.160364Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:58:28.160665Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:58:28.160827Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:58:28.161549Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:678:2581], serverId# [1:710:2599], sessionId# [0:0:0] 2025-04-04T16:58:28.161627Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-04T16:58:28.161684Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:28.161722Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-04-04T16:58:28.161771Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T16:58:28.162065Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T16:58:28.162357Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-04T16:58:28.162512Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-04T16:58:28.165414Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:58:28.165536Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T16:58:28.178995Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:58:28.179143Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:58:28.179410Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T16:58:28.179466Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-04T16:58:28.227664Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:679:2582], serverId# [1:761:2628], sessionId# [0:0:0] 2025-04-04T16:58:28.227895Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-04T16:58:28.228339Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 2 ... _DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:38.822470Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:58:38.822516Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:58:38.822982Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:661:2566], serverId# [3:672:2573], sessionId# [0:0:0] 2025-04-04T16:58:38.823150Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:58:38.823390Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:58:38.823468Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:58:38.825233Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:58:38.838909Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:58:38.839053Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:58:38.998304Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:704:2594], sessionId# [0:0:0] 2025-04-04T16:58:38.999976Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T16:58:39.000065Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:58:39.000724Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:58:39.000792Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:58:39.000844Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T16:58:39.001114Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T16:58:39.001267Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:58:39.001607Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:58:39.001686Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T16:58:39.002236Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T16:58:39.002763Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:39.004222Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T16:58:39.004266Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:58:39.004775Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T16:58:39.004850Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:58:39.005968Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:58:39.006008Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:58:39.006048Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T16:58:39.006108Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:58:39.006152Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T16:58:39.006233Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:58:39.007023Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:58:39.009553Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T16:58:39.009644Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T16:58:39.010122Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T16:58:39.019386Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:736:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:39.019510Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:746:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:39.019601Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:39.025545Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:58:39.031264Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:58:39.204517Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:58:39.208520Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:750:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:58:39.245701Z node 3 :TX_PROXY ERROR: Actor# [3:824:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:58:39.333834Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0vv0h99165gynm71j76y6r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjNmMjRmZC03N2JjZDExNS05M2RmYzIwZC1lODZkM2VkNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:58:39.342716Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:851:2687], serverId# [3:852:2688], sessionId# [0:0:0] 2025-04-04T16:58:39.343004Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:58:39.356537Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:58:39.356694Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:58:39.539909Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0vv0x177n2a99y25293hmp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWU5YWMzZGItNzkwMDM2ODAtNGQ4NjUzNjktMThiNDUwNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:58:39.542337Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] { items { uint64_value: 0 } } 2025-04-04T16:58:39.549902Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:890:2718], serverId# [3:891:2719], sessionId# [0:0:0] 2025-04-04T16:58:39.551151Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-04T16:58:39.562727Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-04T16:58:39.562821Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:58:39.562891Z node 3 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-04-04T16:58:39.563700Z node 3 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2025-04-04T16:58:39.563771Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:58:39.564092Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-04T16:58:39.564134Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037888 2025-04-04T16:58:39.564274Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:890:2718], serverId# [3:891:2719], sessionId# [0:0:0] 2025-04-04T16:58:39.564387Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:58:39.564436Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:39.564487Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:58:39.564548Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:58:39.640328Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0vv12d997dftm91fn9mbvw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWU5YWMzZGItNzkwMDM2ODAtNGQ4NjUzNjktMThiNDUwNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:58:39.640912Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:58:39.652857Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:58:39.653018Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:58:39.667614Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MWU5YWMzZGItNzkwMDM2ODAtNGQ4NjUzNjktMThiNDUwNDg=, ActorId: [3:858:2693], ActorState: ExecuteState, TraceId: 01jr0vv12d997dftm91fn9mbvw, Create QueryResponse for error on request, msg: 2025-04-04T16:58:39.668713Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0vv12d997dftm91fn9mbvw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWU5YWMzZGItNzkwMDM2ODAtNGQ4NjUzNjktMThiNDUwNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T16:58:39.669105Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:58:39.669581Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:58:39.669672Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> THiveTest::TestExternalBootWhenLocked [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:57:50.749877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:57:50.749972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:57:50.750008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:57:50.750045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:57:50.750089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:57:50.750114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:57:50.750280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:57:50.750379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:57:50.750755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:57:50.830842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:57:50.830912Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:50.847676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:57:50.847885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:57:50.848006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:57:50.859315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:57:50.859668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:57:50.860294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:50.861258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:57:50.867895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:57:50.869354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:57:50.869439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:57:50.869628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:57:50.869683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:57:50.869722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:57:50.869819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:57:50.876925Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:57:51.045205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:57:51.045442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:51.045674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:57:51.045942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:57:51.045999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:51.050630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:51.050831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:57:51.051032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:51.051111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:57:51.051170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:57:51.051201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:57:51.053744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:51.053817Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:57:51.053858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:57:51.057026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:51.057111Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:51.057164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:57:51.057211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:57:51.067283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:57:51.070451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:57:51.070668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:57:51.071765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:57:51.071915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:57:51.071957Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:57:51.072275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:57:51.072327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:57:51.072482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:57:51.072563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:57:51.079023Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:57:51.079100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:57:51.079280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:57:51.079319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:57:51.079709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:57:51.079758Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:57:51.079853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:57:51.079883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:57:51.079917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:57:51.079948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:57:51.079984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:57:51.080021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:57:51.080052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:57:51.080077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:57:51.080178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:57:51.080219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:57:51.080250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:57:51.082149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:57:51.082532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:57:51.082583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:14029 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7AFEF660-EC04-4C37-97DA-685CD65858FA amz-sdk-request: attempt=1 content-length: 130 content-md5: rsyfbQ5vVOk4oQ1A/altew== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=99&uploadId=1 / 130 2025-04-04T16:58:39.162207Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3461:5425], result# UploadPartResult { ETag: aecc9f6d0e6f54e938a10d40fda96d7b } 2025-04-04T16:58:39.162749Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3460:5424] 2025-04-04T16:58:39.162969Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3461:5425], sender# [1:3460:5424], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:14029 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0A3AEC49-4509-4898-A34B-705B71077481 amz-sdk-request: attempt=1 content-length: 130 content-md5: Wyd1w7MZYbbZucaVvuRDAw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=100&uploadId=1 / 130 2025-04-04T16:58:39.174913Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3461:5425], result# UploadPartResult { ETag: 5b2775c3b31961b6d9b9c695bee44303 } 2025-04-04T16:58:39.175214Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3460:5424] 2025-04-04T16:58:39.175347Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3461:5425], sender# [1:3460:5424], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:14029 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EB3A3204-BA8D-4477-B3B2-E04E3A9F464B amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=101&uploadId=1 / 0 2025-04-04T16:58:39.185519Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3461:5425], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2025-04-04T16:58:39.185628Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3461:5425], success# 1, error# , multipart# 1, uploadId# 1 2025-04-04T16:58:39.196226Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3461:5425], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [a59dd9a97cf3685e69093fb2d96653c6,bdbb215613239cb3a835fee1fe7e7ca3,cb38dbc776d5763f1926dfb22d508c87,3c430d66d07a0a4b1fa889f321fce197,43baf91083f286b60bf15e7786459cd9,90b5581bef612fa3bf9b38b336af405f,fd4869c26a12d22ee79256d778954d04,a9459bc28198b0b6bd67732c492fd740,697a3f8386ea1ff4e327de943224cb1a,614da0b4ec9464e69cd0c59909e80fbb,9b94eb3f67aa4c8a0bcbf546833ed966,fd45c3afacec641ad19e59d2b31aeba4,fd69678aecbc149601f58cf13c64d33e,90c09ab4923bc9f97f825d36e32bf362,c1586416a281a4cca2b2b4e333d9b079,f31908576272623f9f0a19bf774cde8e,6fe3b42388304d2af07c629aeb683581,7bc90eec21ca5bb3648e6a48e83c5730,8e1dda26de1af89bdffe2eefdcebea1d,14dc42d90caa1575bbfffa9dc8f21d66,92efb2368eecb32d4075c09294fde0b7,98efff5f7c7ecb42e7af65142ce05af9,6206c81807b3b9283b0173ee2c682100,616b431b91aedc9de4593321eb42ba96,9ae4762563ffdec596cc9ca4cb8913e1,946ebf2d95b4796ea2faee21f017be79,45834a9948bb4ab8b62d1894156d13ed,6ad3fe7286856927c1e00422bc8da697,ef89464d20eae46829e1bf557e4d04ce,f128e5de32097d205453080b01c94ac3,c13e650ee2cfcecfdf4f578a2e5b1c2d,fc26314711b25d20fc654cf59301b806,56f6f2c574fba86496a87a7dd5fab46c,c7951eace72cfe0f14f808173e07bc64,3d9ad3340e58b973eaf8d4f14ba3b0f9,fc41d6fdfb52389dda8b26d7a0a3a889,9974b6ae96ffd0b756acb67088e890f9,cde8a5604010abe8fccfa9492144036f,0364e048eaac35c26d48b0c5072b5255,aac5a84927124d6ae4931e2650c80d9f,eab068fe4ca35c2f3e35890bd727eb4f,bc3646bdbcbc7f97dcddf2202ea9421f,6d3f63d672eda4a4617c9e7589a68bfc,0401bade6c3031b5be872238520b993a,1c6405688f86423480173e3e316a20bd,52395f68e877cbb8d7115a247331b0a7,4b0673ac18058554d2c53bf9f99b34b2,87bc1b9e650b31e81a9ad2531e3ef9da,b29053c8cd093c8b92ad3954c42cb7be,faf1084f6b33b00e2e822d1d3c3f0083,eedec03ee8d7eda4654db7206ad0889e,be4469dd028d5519a67098055f25513f,a7afa9827ec27c565cff1ed505a06f4b,91fe8109d2ad934c4364d90c29aaba71,73b81ea00e11db12d66497d30eb48446,cce69ef69777afeab34eefa515abc7f4,4e4ac1a421353964356400b8be8e21da,32cd6083b12660bcd4062af08d89eb05,71957b9db37811c7680638b82dc6384b,a8787e692c423a2dfa07dd261e72790a,283838ab16206b27738ea6653110f833,88bf084fb3029f0d5c0705eece930d70,1ed2f9f7221f1718b81fdf2d846347dd,406706cfbc454922dcad50b9c534b8d1,dbb606c993d798974ed4f5c9ebf195ca,1a4a3868dc6fa26c6b019d237f9ea6f4,82660a3c6b576a1b3fea925f3c179a2e,d393db2749ae42e854e85eeec2ea3592,b42c92ad14ee0e5351fec7e5a045a91b,2c7af27f9dc77efbcbe71c2d7997d6e9,278aba62ab1d9e3ff16df2d82ac5f5c7,6b8380404a7e7ec95ad5f3941d5d404c,c9813b9fc1d6b5087e64849076edd0f8,160785e4dac02a91c43a497ee59eea06,db529a9ba22f60f404031cfe85e966e9,9b70af168e2d3769bd8bc4dffa3202ea,9ac39c3843b6621ace44acf430a59e06,4603ff564a46e93951f246ed18926071,66b85f35ee76a7f71f50e9aad56758de,1665c284ad04d6b893b69372bf8fc6b9,8c1c27ec88fb52f06de6e7516a392672,0a5f992db51277a05ec12f0d6459ef21,8debe3a6023155561cb0890fc05bd7fb,938ece258b7596f8eea7e82bc2b8f88c,767ca0dcf0b154fa3c818044bbfc58fd,914cc7165d994bb05824332ac120446f,ab0ece250f5959a510170ee07aa21b5d,8bf4b44d67f062026b0010a8a0b39cc0,e0aa13fa8246e68c18905d3abadfc44d,27b021b75b6a95f63ea27f7ec238c05f,673e661e4cfea1e431678dd9881c2a8c,f101b34943f1831ae8c0b46ffcb1c2d6,562b32a8142b29c1a88e507ab1981a6b,fdea4c6fc2befb44614992ca8bf34b21,b7c8ec6acc45b037978482996e910b75,aec72fbd2e171b798900b22897d00941,710ef5b5e8eba750b6acc9b32dff42a3,821c7e22ef9c22098171e7f837dcfcc8,aecc9f6d0e6f54e938a10d40fda96d7b,5b2775c3b31961b6d9b9c695bee44303,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:14029 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E39940B5-AA20-4E8F-B661-CBF17C90776F amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2025-04-04T16:58:39.234934Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3461:5425], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv ETag: 5d8c28efc812b445ddd02900ff3ee599 } 2025-04-04T16:58:39.235423Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3460:5424], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-04T16:58:39.277261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-04T16:58:39.277339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-04T16:58:39.277522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-04T16:58:39.277671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-04T16:58:39.277738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T16:58:39.277777Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T16:58:39.277815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T16:58:39.277857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-04T16:58:39.278047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:58:39.283731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T16:58:39.284304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T16:58:39.284358Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T16:58:39.284477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T16:58:39.284510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T16:58:39.284546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T16:58:39.284593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T16:58:39.284636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T16:58:39.284723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-04-04T16:58:39.284773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T16:58:39.284827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T16:58:39.284872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T16:58:39.284991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T16:58:39.297737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T16:58:39.297811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3444:5409] TestWaitNotification: OK eventTxId 102 >> THiveTest::TestLockTabletExecutionStealLock [GOOD] >> THiveTest::TestProgressWithMaxTabletsScheduled >> THiveTest::TestDownAfterDrain [GOOD] >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 >> Viewer::FloatPointJsonQuery [GOOD] >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes >> KqpPg::DeleteWithQueryService+useSink [GOOD] >> KqpPg::DeleteWithQueryService-useSink >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestExternalBootWhenLocked [GOOD] Test command err: 2025-04-04T16:57:59.419682Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T16:57:59.423693Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:57:59.423944Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-04T16:57:59.424555Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-04T16:57:59.425709Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:257} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-04T16:57:59.425773Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T16:57:59.426757Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:72:2075] ControllerId# 72057594037932033 2025-04-04T16:57:59.426807Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T16:57:59.426933Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T16:57:59.427332Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T16:57:59.442680Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:57:59.442750Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:57:59.445116Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:71:2074] Create Queue# [1:80:2080] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.445322Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:71:2074] Create Queue# [1:81:2081] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.445465Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:71:2074] Create Queue# [1:82:2082] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.445615Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:71:2074] Create Queue# [1:83:2083] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.445805Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:71:2074] Create Queue# [1:84:2084] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.446041Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:71:2074] Create Queue# [1:85:2085] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.446668Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:71:2074] Create Queue# [1:86:2086] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.446720Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:57:59.446802Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:72:2075] 2025-04-04T16:57:59.446851Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:72:2075] 2025-04-04T16:57:59.446932Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-04T16:57:59.446982Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-04T16:57:59.447910Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-04T16:57:59.448043Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T16:57:59.450953Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:57:59.451100Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T16:57:59.451946Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:94:2073] ControllerId# 72057594037932033 2025-04-04T16:57:59.451986Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T16:57:59.452050Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T16:57:59.452252Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T16:57:59.453081Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:57:59.453130Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:57:59.454492Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:93:2072] Create Queue# [2:100:2077] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.454603Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:93:2072] Create Queue# [2:101:2078] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.454724Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:93:2072] Create Queue# [2:102:2079] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.454822Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:93:2072] Create Queue# [2:103:2080] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.454912Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:93:2072] Create Queue# [2:104:2081] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.455015Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:93:2072] Create Queue# [2:105:2082] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.455130Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:93:2072] Create Queue# [2:106:2083] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.455153Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:57:59.455204Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:94:2073] 2025-04-04T16:57:59.455228Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:94:2073] 2025-04-04T16:57:59.455255Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-04T16:57:59.455295Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-04T16:57:59.455604Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-04T16:57:59.455688Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T16:57:59.457714Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:57:59.457799Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T16:57:59.458569Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:111:2073] ControllerId# 72057594037932033 2025-04-04T16:57:59.458611Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T16:57:59.458701Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T16:57:59.458905Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T16:57:59.459932Z node 3 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:57:59.460022Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:57:59.461631Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:110:2072] Create Queue# [3:117:2077] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.461816Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:110:2072] Create Queue# [3:118:2078] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.461943Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:110:2072] Create Queue# [3:119:2079] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.462065Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:110:2072] Create Queue# [3:120:2080] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.462215Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:110:2072] Create Queue# [3:121:2081] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.462410Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:110:2072] Create Queue# [3:122:2082] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.462578Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:110:2072] Create Queue# [3:123:2083] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.462607Z node 3 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:57:59.462691Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [3:111:2073] 2025-04-04T16:57:59.462724Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [3:111:2073] 2025-04-04T16:57:59.462777Z node 3 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-04T16:57:59.462815Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-04T16:57:59.463228Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-04T16:57:59.463850Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:57:59.464048Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:94:2073] 2025-04-04T16:57:59.464097Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:57:59.464139Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-04T16:57:59.464280Z node 3 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:57:59.464416Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [3:111:2073] 2025-04-04T16:57:59.464454Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:57:59.464494Z node 3 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-04T16:57:59.464657Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:57:59.486544Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:72:2075] 2025-04-04T16:57:59.486638Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:57:59.486682Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-04T16:57:59.488493Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeLis ... st# true Marker# BPP21 2025-04-04T16:58:41.515562Z node 59 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.043 sample PartId# [72057594037927937:2:8:0:0:200:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 59 } TEvVPutResult{ TimestampMs# 9.398 VDiskId# [0:1:0:0:0] NodeId# 59 Status# OK } ] } 2025-04-04T16:58:41.515834Z node 59 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:8:0:0:200:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-04T16:58:41.516034Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} commited cookie 1 for step 8 2025-04-04T16:58:41.516386Z node 59 :TABLET_MAIN DEBUG: Tablet: 72075186224037888 Received TEvTabletStop from [59:94:2092], reason = ReasonStop Marker# TSYS29 2025-04-04T16:58:41.516443Z node 59 :PIPE_SERVER DEBUG: [72075186224037888] Stop 2025-04-04T16:58:41.516698Z node 59 :TABLET_MAIN NOTICE: Tablet: 72075186224037888 Type: Dummy, EReason: ReasonPill, SuggestedGeneration: 1, KnownGeneration: 1 Marker# TSYS31 2025-04-04T16:58:41.516760Z node 59 :PIPE_SERVER DEBUG: [72075186224037888] Detach 2025-04-04T16:58:41.516996Z node 59 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:3} suiciding, Waste{1:0, 289b +(0, 0b), 2 trc, -0b acc} 2025-04-04T16:58:41.522775Z node 59 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send [59:95:2092] 2025-04-04T16:58:41.522869Z node 59 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [59:95:2092] 2025-04-04T16:58:41.522963Z node 59 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [59:94:2092] EventType# 268960257 2025-04-04T16:58:41.523566Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] peer closed [59:438:2350] 2025-04-04T16:58:41.523643Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] notify reset [59:438:2350] 2025-04-04T16:58:41.523945Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} queued, type NKikimr::NHive::TTxUpdateTabletStatus 2025-04-04T16:58:41.524047Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T16:58:41.524230Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T16:58:41.524328Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T16:58:41.524609Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-04-04T16:58:41.524704Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T16:58:41.524822Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T16:58:41.524919Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T16:58:41.525607Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [59:452:2357] 2025-04-04T16:58:41.525678Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [59:452:2357] 2025-04-04T16:58:41.525852Z node 59 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:58:41.525933Z node 59 :TABLET_RESOLVER DEBUG: SelectForward node 59 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [59:372:2298] 2025-04-04T16:58:41.526047Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [59:452:2357] 2025-04-04T16:58:41.526139Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [59:452:2357] 2025-04-04T16:58:41.526289Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect request undelivered [59:452:2357] 2025-04-04T16:58:41.526370Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed [59:452:2357] 2025-04-04T16:58:41.526488Z node 59 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037888 entry.State: StNormal 2025-04-04T16:58:41.526739Z node 59 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T16:58:41.526948Z node 59 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-04T16:58:41.527052Z node 59 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-04T16:58:41.527093Z node 59 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-04T16:58:41.527167Z node 59 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [59:372:2298] CurrentLeaderTablet: [59:387:2310] CurrentGeneration: 1 CurrentStep: 0} 2025-04-04T16:58:41.527279Z node 59 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [59:372:2298] CurrentLeaderTablet: [59:387:2310] CurrentGeneration: 1 CurrentStep: 0} 2025-04-04T16:58:41.527404Z node 59 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [59:372:2298] CurrentLeaderTablet: [59:387:2310] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-04T16:58:41.527575Z node 59 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-04-04T16:58:41.528187Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [60:454:2092] 2025-04-04T16:58:41.528245Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [60:454:2092] 2025-04-04T16:58:41.528358Z node 60 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:58:41.528432Z node 60 :TABLET_RESOLVER DEBUG: SelectForward node 60 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [59:321:2262] 2025-04-04T16:58:41.528503Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [60:454:2092] 2025-04-04T16:58:41.528560Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [60:454:2092] 2025-04-04T16:58:41.528631Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 59 [60:454:2092] 2025-04-04T16:58:41.528808Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [60:454:2092] 2025-04-04T16:58:41.528899Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [60:454:2092] 2025-04-04T16:58:41.529276Z node 59 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [60:454:2092] 2025-04-04T16:58:41.529726Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [60:454:2092] 2025-04-04T16:58:41.529788Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [60:454:2092] 2025-04-04T16:58:41.529845Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [60:454:2092] 2025-04-04T16:58:41.529961Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [60:454:2092] 2025-04-04T16:58:41.530048Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [60:454:2092] 2025-04-04T16:58:41.530104Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [60:454:2092] 2025-04-04T16:58:41.530510Z node 59 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [60:441:2087] EventType# 268697624 2025-04-04T16:58:41.530733Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} queued, type NKikimr::NHive::TTxStartTablet 2025-04-04T16:58:41.530822Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T16:58:41.531069Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} hope 1 -> done Change{13, redo 83b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-04-04T16:58:41.531168Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T16:58:41.542808Z node 59 :BS_PROXY_PUT INFO: [9eafe310d4d96c0d] bootstrap ActorId# [59:457:2360] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:9:0:0:92:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-04T16:58:41.543001Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] Id# [72057594037927937:2:9:0:0:92:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:58:41.543091Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] restore Id# [72057594037927937:2:9:0:0:92:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T16:58:41.543204Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:9:0:0:92:1] Marker# BPG33 2025-04-04T16:58:41.543287Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:9:0:0:92:1] Marker# BPG32 2025-04-04T16:58:41.543528Z node 59 :BS_PROXY DEBUG: Send to queueActorId# [59:74:2080] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:9:0:0:92:1] FDS# 92 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:58:41.546127Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] received {EvVPutResult Status# OK ID# [72057594037927937:2:9:0:0:92:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 24 } Cost# 80724 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 25 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-04T16:58:41.546305Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] Result# TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-04T16:58:41.546440Z node 59 :BS_PROXY_PUT INFO: [9eafe310d4d96c0d] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-04T16:58:41.546717Z node 59 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.072 sample PartId# [72057594037927937:2:9:0:0:92:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 59 } TEvVPutResult{ TimestampMs# 3.698 VDiskId# [0:1:0:0:0] NodeId# 59 Status# OK } ] } 2025-04-04T16:58:41.546953Z node 59 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-04T16:58:41.547124Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} commited cookie 1 for step 9 >> KqpQueryService::ExecuteQuery >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 [GOOD] >> THiveTest::TestDeleteTablet >> KqpPg::Returning+useSink [GOOD] >> KqpPg::Returning-useSink |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] >> PartitionEndWatcher::EmptyPartition [GOOD] >> PartitionEndWatcher::AfterCommit [GOOD] >> YdbProxy::AlterTable |79.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |79.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan >> KqpQueryService::TableSink_OltpReplace+HasSecondaryIndex [GOOD] >> KqpQueryService::TableSink_OltpReplace-HasSecondaryIndex |79.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan >> THiveTest::TestProgressWithMaxTabletsScheduled [GOOD] >> THiveTest::TestResetServerlessComputeResourcesMode >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] >> THiveTest::TestDeleteTablet [GOOD] >> THiveTest::TestDeleteOwnerTablets >> YdbProxy::MakeDirectory |79.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |79.9%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |79.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase >> THiveTest::TestDeleteOwnerTablets [GOOD] >> THiveTest::TestDeleteOwnerTabletsMany >> YdbProxy::CreateTable >> DistributedEraseTests::ConditionalEraseRowsCheckLimits [GOOD] >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] Test command err: 2025-04-04T16:58:28.891787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:58:28.892398Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:58:28.892465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001286/r3tmp/tmpXAmkY4/pdisk_1.dat 2025-04-04T16:58:29.322535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:58:29.381092Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:29.423855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:29.424042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:29.435864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:58:29.531811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:58:29.598948Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:686:2585] 2025-04-04T16:58:29.599390Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:58:29.654770Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:58:29.654944Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:58:29.657021Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:58:29.657142Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:58:29.657226Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:58:29.657724Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:58:29.658202Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:58:29.658330Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:712:2585] in generation 1 2025-04-04T16:58:29.660937Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:690:2587] 2025-04-04T16:58:29.661236Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:58:29.673648Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:58:29.674134Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:693:2589] 2025-04-04T16:58:29.674358Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:58:29.683039Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:58:29.684602Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-04T16:58:29.684716Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-04T16:58:29.684786Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-04T16:58:29.685106Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:58:29.686129Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:58:29.686188Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:737:2587] in generation 1 2025-04-04T16:58:29.686828Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:58:29.686906Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:58:29.688346Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-04T16:58:29.688416Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-04T16:58:29.688455Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-04T16:58:29.688800Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:58:29.688949Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:58:29.689028Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:738:2589] in generation 1 2025-04-04T16:58:29.700141Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:58:29.735131Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:58:29.735379Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:58:29.735537Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:742:2616] 2025-04-04T16:58:29.735592Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:58:29.735629Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:58:29.735663Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:58:29.736042Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:58:29.736084Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-04T16:58:29.736144Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:58:29.736229Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:743:2617] 2025-04-04T16:58:29.736277Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-04T16:58:29.736302Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-04T16:58:29.736327Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:58:29.736668Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:58:29.736719Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-04T16:58:29.736773Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:58:29.736838Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:744:2618] 2025-04-04T16:58:29.736860Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-04T16:58:29.736882Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-04T16:58:29.736902Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T16:58:29.737066Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:58:29.737185Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:58:29.737425Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:58:29.737487Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:29.737542Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:58:29.737586Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:58:29.737692Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-04T16:58:29.737776Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-04T16:58:29.737900Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:677:2580], serverId# [1:704:2594], sessionId# [0:0:0] 2025-04-04T16:58:29.737969Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T16:58:29.738000Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:29.738023Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-04T16:58:29.738053Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T16:58:29.738085Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-04T16:58:29.738135Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-04T16:58:29.738784Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:58:29.739044Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:58:29.739145Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:58:29.739646Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:678:2581], serverId# [1:710:2599], sessionId# [0:0:0] 2025-04-04T16:58:29.739712Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-04T16:58:29.739747Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:29.739774Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-04-04T16:58:29.739823Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T16:58:29.740044Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T16:58:29.740241Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-04T16:58:29.740310Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-04T16:58:29.742440Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:58:29.742531Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T16:58:29.753465Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:58:29.753594Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:58:29.753736Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T16:58:29.753770Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-04T16:58:29.803366Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:679:2582], serverId# [1:761:2628], sessionId# [0:0:0] 2025-04-04T16:58:29.803608Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-04T16:58:29.803814Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 2 ... 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-04-04T16:58:45.109575Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-04-04T16:58:45.110045Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 ack init split/merge destination OpId 281474976715664 2025-04-04T16:58:45.112617Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 not sending time cast registration request in state SplitDstReceivingSnapshot 2025-04-04T16:58:45.113711Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 ack init split/merge destination OpId 281474976715664 2025-04-04T16:58:45.113769Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 not sending time cast registration request in state SplitDstReceivingSnapshot 2025-04-04T16:58:45.115166Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 received split OpId 281474976715664 at state Ready 2025-04-04T16:58:45.127055Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 starting snapshot for split OpId 281474976715664 2025-04-04T16:58:45.127501Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 CancelReadIterators#0 2025-04-04T16:58:45.129815Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 3, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T16:58:45.129882Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 3, finished edge# 0, front# 0 2025-04-04T16:58:45.131196Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 4, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T16:58:45.131240Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 4, finished edge# 0, front# 0 2025-04-04T16:58:45.132926Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 7, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T16:58:45.132969Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 7, finished edge# 0, front# 0 2025-04-04T16:58:45.142913Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 8, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T16:58:45.142972Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 8, finished edge# 0, front# 0 2025-04-04T16:58:45.144198Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T16:58:45.144242Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2025-04-04T16:58:45.144763Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 snapshot complete for split OpId 281474976715664 2025-04-04T16:58:45.144984Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 12 for split OpId 281474976715664 2025-04-04T16:58:45.145058Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 24 for split OpId 281474976715664 2025-04-04T16:58:45.145092Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 36 for split OpId 281474976715664 2025-04-04T16:58:45.145141Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 48 for split OpId 281474976715664 2025-04-04T16:58:45.145403Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 146 total snapshot size is 194 for split OpId 281474976715664 2025-04-04T16:58:45.145660Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 206 for split OpId 281474976715664 2025-04-04T16:58:45.145699Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 218 for split OpId 281474976715664 2025-04-04T16:58:45.145729Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 230 for split OpId 281474976715664 2025-04-04T16:58:45.145757Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 242 for split OpId 281474976715664 2025-04-04T16:58:45.145861Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 155 total snapshot size is 397 for split OpId 281474976715664 2025-04-04T16:58:45.146599Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Sending snapshots from src for split OpId 281474976715664 2025-04-04T16:58:45.146813Z node 3 :TX_DATASHARD DEBUG: Sending snapshot for split opId 281474976715664 from datashard 72075186224037889 to datashard 72075186224037892 size 221 2025-04-04T16:58:45.146944Z node 3 :TX_DATASHARD DEBUG: Sending snapshot for split opId 281474976715664 from datashard 72075186224037889 to datashard 72075186224037891 size 215 2025-04-04T16:58:45.147307Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [3:1228:2945], serverId# [3:1229:2946], sessionId# [0:0:0] 2025-04-04T16:58:45.147348Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037892, clientId# [3:1227:2944], serverId# [3:1230:2947], sessionId# [0:0:0] 2025-04-04T16:58:45.147487Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 Received snapshot for split/merge TxId 281474976715664 from tabeltId 72075186224037889 2025-04-04T16:58:45.148298Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 Received snapshot for split/merge TxId 281474976715664 from tabeltId 72075186224037889 2025-04-04T16:58:45.150189Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 ack snapshot OpId 281474976715664 2025-04-04T16:58:45.158428Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037891 2025-04-04T16:58:45.158604Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T16:58:45.158752Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-04T16:58:45.158861Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [3:1233:2950] 2025-04-04T16:58:45.158900Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2025-04-04T16:58:45.158959Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037891 2025-04-04T16:58:45.158999Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-04T16:58:45.159339Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037891 for split OpId 281474976715664 2025-04-04T16:58:45.160387Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 2000 2025-04-04T16:58:45.160450Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-04T16:58:45.160676Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-04T16:58:45.160713Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:45.160747Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-04T16:58:45.160780Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-04T16:58:45.160843Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 ack snapshot OpId 281474976715664 2025-04-04T16:58:45.160889Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037892 2025-04-04T16:58:45.160965Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037892 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T16:58:45.161026Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-04-04T16:58:45.161073Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037892, actorId: [3:1235:2952] 2025-04-04T16:58:45.161097Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037892 2025-04-04T16:58:45.161155Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037892 2025-04-04T16:58:45.161183Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-04T16:58:45.161350Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1228:2945], serverId# [3:1229:2946], sessionId# [0:0:0] 2025-04-04T16:58:45.161479Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715664 2025-04-04T16:58:45.162171Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-04T16:58:45.162206Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:45.162233Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037892 TxInFly 0 2025-04-04T16:58:45.162287Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-04T16:58:45.162373Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 1500 next step 2000 2025-04-04T16:58:45.162427Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037891: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-04T16:58:45.162495Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 2000 2025-04-04T16:58:45.162520Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-04T16:58:45.162819Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [3:1227:2944], serverId# [3:1230:2947], sessionId# [0:0:0] 2025-04-04T16:58:45.163101Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 1500 next step 2000 2025-04-04T16:58:45.163138Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037892: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-04T16:58:45.186892Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715664 2025-04-04T16:58:45.190341Z node 3 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715664, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-04-04T16:58:45.193102Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-04-04T16:58:45.193168Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-04-04T16:58:45.193592Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T16:58:45.193643Z node 3 :TX_DATASHARD INFO: Progress tx at non-ready tablet 72075186224037889 state 5 2025-04-04T16:58:45.193784Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1096:2842], serverId# [3:1097:2843], sessionId# [0:0:0] 2025-04-04T16:58:45.193964Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715664 2025-04-04T16:58:45.194032Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T16:58:45.194080Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 >> ObjectDistribution::TestManyIrrelevantNodes [GOOD] >> Sequencer::Basic1 [GOOD] >> StoragePool::TestDistributionRandomProbability >> THiveTest::TestResetServerlessComputeResourcesMode [GOOD] >> THiveTest::TestHiveBalancerNodeRestarts [GOOD] >> THiveTest::TestHiveBalancerDifferentResources2 |79.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink |79.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |79.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot >> KqpDocumentApi::RestrictWriteExplicitPrepare [GOOD] >> KqpDocumentApi::Scripting ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestResetServerlessComputeResourcesMode [GOOD] Test command err: 2025-04-04T16:57:57.925188Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T16:57:57.929110Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:57:57.929362Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T16:57:57.930325Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:44:2071] ControllerId# 72057594037932033 2025-04-04T16:57:57.930366Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T16:57:57.930472Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T16:57:57.930886Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T16:57:57.932208Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:57:57.932261Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:57:57.934360Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:43:2070] Create Queue# [2:50:2075] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.934547Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:43:2070] Create Queue# [2:51:2076] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.934704Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:43:2070] Create Queue# [2:52:2077] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.934861Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:43:2070] Create Queue# [2:53:2078] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.935031Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:43:2070] Create Queue# [2:54:2079] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.935186Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:43:2070] Create Queue# [2:55:2080] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.935334Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:43:2070] Create Queue# [2:56:2081] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.935369Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:57:57.935452Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:44:2071] 2025-04-04T16:57:57.935502Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:44:2071] 2025-04-04T16:57:57.935678Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-04T16:57:57.935738Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-04T16:57:57.936246Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-04T16:57:57.936297Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T16:57:57.938847Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:57:57.939058Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-04T16:57:57.939731Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-04T16:57:57.940925Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:257} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-04T16:57:57.940990Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T16:57:57.941883Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:66:2075] ControllerId# 72057594037932033 2025-04-04T16:57:57.941916Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T16:57:57.942015Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T16:57:57.942225Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T16:57:57.957782Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:57:57.957832Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:57:57.959633Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:65:2074] Create Queue# [1:74:2080] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.959812Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:65:2074] Create Queue# [1:75:2081] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.959959Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:65:2074] Create Queue# [1:76:2082] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.960111Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:65:2074] Create Queue# [1:77:2083] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.960252Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:65:2074] Create Queue# [1:78:2084] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.960721Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:65:2074] Create Queue# [1:79:2085] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.960879Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:65:2074] Create Queue# [1:80:2086] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.960908Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:57:57.960978Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:66:2075] 2025-04-04T16:57:57.961013Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:66:2075] 2025-04-04T16:57:57.961055Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-04T16:57:57.961093Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-04T16:57:57.962060Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:66:2075] 2025-04-04T16:57:57.962137Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:57:57.962189Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-04T16:57:57.962369Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-04T16:57:57.962558Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:57:57.962704Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:44:2071] 2025-04-04T16:57:57.962747Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:57:57.962776Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-04T16:57:57.962897Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:57:57.977331Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:66:2075] 2025-04-04T16:57:57.977399Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:57:57.977440Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-04T16:57:57.982165Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-04T16:57:57.983681Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-04T16:57:57.983935Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:57:57.983973Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-04T16:57:57.984104Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-04T16:57:57.984264Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-04T16:57:57.984724Z node 2 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T16:57:57.984931Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:57:57.985207Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:57:57.985419Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T16:57:57.985603Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:57:57.985748Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [2:48:2064] 2025-04-04T16:57:57.985769Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [2:48:2064] 2025-04-04T16:57:57.985796Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-04T16:57:57.985829Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-04T16:57:57.985875Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-04-04T16:57:57.985893Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-04-04T16:57:57.985917Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-04T16:57:57.985961Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:70:2064] 2025-04-04T16:57:57.985974Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:70:2064] 2025-04-04T16:57:57.986024Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-04-0 ... TabletId# 72075186224037888 RecordGeneration# 3 PerGenerationCounter# 1 Channel# 1 Status# OK} Marker# DSPC02 2025-04-04T16:58:47.446465Z node 22 :BS_PROXY_COLLECT DEBUG: [a81f1bc553f4d9cc] received TEvVCollectGarbageResult# {EvVCollectGarbageResult Status# OK TabletId# 72075186224037888 RecordGeneration# 3 Channel# 0 VDisk# [80000000:1:0:0:0]} Marker# DSPC01 2025-04-04T16:58:47.446512Z node 22 :BS_PROXY_COLLECT INFO: [a81f1bc553f4d9cc] Result# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 3 PerGenerationCounter# 1 Channel# 0 Status# OK} Marker# DSPC02 2025-04-04T16:58:47.447370Z node 22 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [22:682:2481] 2025-04-04T16:58:47.447431Z node 22 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [22:682:2481] 2025-04-04T16:58:47.447764Z node 22 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:58:47.447845Z node 22 :TABLET_RESOLVER DEBUG: SelectForward node 22 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [23:552:2092] 2025-04-04T16:58:47.448010Z node 22 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result remote node 23 [22:682:2481] 2025-04-04T16:58:47.448100Z node 23 :PIPE_CLIENT DEBUG: TClient[72057594046678944] ::Bootstrap [23:686:2143] 2025-04-04T16:58:47.448130Z node 23 :PIPE_CLIENT DEBUG: TClient[72057594046678944] lookup [23:686:2143] 2025-04-04T16:58:47.448229Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594046678944 entry.State: StNormal ev: {EvForward TabletID: 72057594046678944 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:58:47.448270Z node 23 :TABLET_RESOLVER DEBUG: SelectForward node 23 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594046678944 followers: 0 countLeader 1 allowFollowers 0 winner: [22:324:2264] 2025-04-04T16:58:47.448314Z node 22 :PIPE_CLIENT DEBUG: TClient[72075186224037888] remote node connected [22:682:2481] 2025-04-04T16:58:47.448385Z node 22 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [22:682:2481] 2025-04-04T16:58:47.448450Z node 23 :PIPE_CLIENT DEBUG: TClient[72057594046678944] forward result remote node 22 [23:686:2143] 2025-04-04T16:58:47.448523Z node 23 :PIPE_CLIENT DEBUG: TClient[72057594046678944] remote node connected [23:686:2143] 2025-04-04T16:58:47.448553Z node 23 :PIPE_CLIENT DEBUG: TClient[72057594046678944]::SendEvent [23:686:2143] 2025-04-04T16:58:47.448825Z node 22 :PIPE_SERVER DEBUG: [72057594046678944] Accept Connect Originator# [23:686:2143] 2025-04-04T16:58:47.449066Z node 22 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect request undelivered [22:682:2481] 2025-04-04T16:58:47.449131Z node 22 :PIPE_CLIENT DEBUG: TClient[72075186224037888] immediate retry [22:682:2481] 2025-04-04T16:58:47.449179Z node 22 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [22:682:2481] 2025-04-04T16:58:47.449308Z node 22 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037888 entry.State: StNormal 2025-04-04T16:58:47.449505Z node 22 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StProblemResolve ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:58:47.449623Z node 23 :PIPE_CLIENT DEBUG: TClient[72057594046678944] connected with status OK role: Leader [23:686:2143] 2025-04-04T16:58:47.449678Z node 23 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send queued [23:686:2143] 2025-04-04T16:58:47.449771Z node 22 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T16:58:47.449909Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-04T16:58:47.449985Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-04T16:58:47.450020Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-04T16:58:47.450086Z node 22 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [22:643:2452] CurrentLeaderTablet: [22:645:2453] CurrentGeneration: 3 CurrentStep: 0} 2025-04-04T16:58:47.450193Z node 22 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [22:643:2452] CurrentLeaderTablet: [22:645:2453] CurrentGeneration: 3 CurrentStep: 0} 2025-04-04T16:58:47.450348Z node 22 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [22:643:2452] CurrentLeaderTablet: [22:645:2453] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-04-04T16:58:47.450418Z node 22 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037888 followers: 0 2025-04-04T16:58:47.450491Z node 22 :TABLET_RESOLVER DEBUG: SelectForward node 22 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [22:643:2452] 2025-04-04T16:58:47.450604Z node 22 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [22:682:2481] 2025-04-04T16:58:47.450712Z node 22 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [22:682:2481] 2025-04-04T16:58:47.450793Z node 23 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send [23:686:2143] 2025-04-04T16:58:47.450847Z node 23 :PIPE_CLIENT DEBUG: TClient[72057594046678944] push event to server [23:686:2143] 2025-04-04T16:58:47.450948Z node 23 :PIPE_CLIENT DEBUG: TClient[72057594046678944]::SendEvent [23:686:2143] 2025-04-04T16:58:47.451099Z node 22 :PIPE_SERVER DEBUG: [72075186224037888] Accept Connect Originator# [22:682:2481] 2025-04-04T16:58:47.451234Z node 22 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [22:682:2481] 2025-04-04T16:58:47.451270Z node 22 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [22:682:2481] 2025-04-04T16:58:47.451424Z node 22 :PIPE_SERVER DEBUG: [72057594046678944] Push Sender# [23:685:2143] EventType# 271122945 2025-04-04T16:58:47.451598Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{17, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme 2025-04-04T16:58:47.451687Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{17, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T16:58:47.451950Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{17, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T16:58:47.452045Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{17, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T16:58:47.453565Z node 23 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [23:692:2144] 2025-04-04T16:58:47.453607Z node 23 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [23:692:2144] 2025-04-04T16:58:47.453834Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:58:47.453880Z node 23 :TABLET_RESOLVER DEBUG: SelectForward node 23 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [22:323:2263] 2025-04-04T16:58:47.453940Z node 23 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [23:692:2144] 2025-04-04T16:58:47.464514Z node 23 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 22 [23:692:2144] 2025-04-04T16:58:47.464899Z node 23 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [23:692:2144] 2025-04-04T16:58:47.464946Z node 23 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [23:692:2144] 2025-04-04T16:58:47.465721Z node 22 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [23:692:2144] 2025-04-04T16:58:47.466622Z node 23 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [23:692:2144] 2025-04-04T16:58:47.466690Z node 23 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [23:692:2144] 2025-04-04T16:58:47.466728Z node 23 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [23:692:2144] 2025-04-04T16:58:47.466843Z node 23 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [23:692:2144] 2025-04-04T16:58:47.467331Z node 22 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [23:690:2144] EventType# 268959744 2025-04-04T16:58:47.467552Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{42, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2025-04-04T16:58:47.467635Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{42, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T16:58:47.467840Z node 22 :HIVE WARN: HIVE#72057594037927937 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:47.467955Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{42, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{24, redo 152b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-04-04T16:58:47.468048Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{42, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T16:58:47.468690Z node 22 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [22:701:2486] 2025-04-04T16:58:47.468751Z node 22 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [22:701:2486] 2025-04-04T16:58:47.468855Z node 22 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:58:47.468932Z node 22 :TABLET_RESOLVER DEBUG: SelectForward node 22 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [22:323:2263] 2025-04-04T16:58:47.469005Z node 22 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [22:701:2486] 2025-04-04T16:58:47.469067Z node 22 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [22:701:2486] 2025-04-04T16:58:47.469151Z node 22 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [22:701:2486] 2025-04-04T16:58:47.469225Z node 22 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [22:701:2486] 2025-04-04T16:58:47.469348Z node 22 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [22:701:2486] 2025-04-04T16:58:47.469510Z node 22 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [22:701:2486] 2025-04-04T16:58:47.469570Z node 22 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [22:701:2486] 2025-04-04T16:58:47.469623Z node 22 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [22:701:2486] 2025-04-04T16:58:47.469700Z node 22 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [22:701:2486] 2025-04-04T16:58:47.469746Z node 22 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [22:701:2486] 2025-04-04T16:58:47.469820Z node 22 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [22:700:2485] EventType# 268697616 >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup >> Viewer::JsonAutocompleteColumns [GOOD] >> Viewer::JsonAutocompleteColumnsPOST >> YdbProxy::AlterTable [GOOD] >> KqpQueryService::ExecuteQuery [GOOD] >> KqpQueryService::ExecuteQueryExplicitBeginCommitRollback >> YdbProxy::MakeDirectory [GOOD] >> YdbProxy::OAuthToken >> DemoTx::Scenario_1 >> KqpQueryService::TableSink_OltpReplace-HasSecondaryIndex [GOOD] >> KqpQueryService::TableSink_OltpOrder >> THiveTest::TestHiveBalancerDifferentResources2 [GOOD] >> THiveTest::TestHiveBalancerUselessNeighbourMoves >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes [GOOD] >> KqpPg::DeleteWithQueryService-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTable [GOOD] Test command err: 2025-04-04T16:58:45.625645Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503523001402492:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:45.639093Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d0a/r3tmp/tmpBwStib/pdisk_1.dat 2025-04-04T16:58:46.280002Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:46.320337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:46.320443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:46.327475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2484 TServer::EnableGrpc on GrpcPort 27979, node 1 2025-04-04T16:58:46.769490Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:46.769518Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:46.769527Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:46.769697Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2484 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:47.292509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:47.325043Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:58:50.377630Z node 1 :TX_PROXY ERROR: Actor# [1:7489503544476239508:2307] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-04T16:58:50.409477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T16:58:50.579251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T16:58:50.614749Z node 1 :TX_PROXY ERROR: Actor# [1:7489503544476239625:2387] txid# 281474976710661, issues: { message: "Can\'t drop unknown column: \'extra\'" severity: 1 } 2025-04-04T16:58:50.617883Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503523001402492:2189];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:50.617967Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TStorageBalanceTest::TestScenario1 [GOOD] >> YdbProxy::CreateTable [GOOD] >> YdbProxy::CreateCdcStream >> StoragePool::TestDistributionRandomProbability [GOOD] >> StoragePool::TestDistributionRandomProbabilityWithOverflow [GOOD] >> StoragePool::TestDistributionExactMin >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> TStorageBalanceTest::TestScenario1 [GOOD] Test command err: 2025-04-04T16:57:59.073741Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T16:57:59.077104Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:57:59.077422Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-04T16:57:59.078101Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-04T16:57:59.079264Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:257} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-04T16:57:59.079322Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T16:57:59.080155Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:26:2073] ControllerId# 72057594037932033 2025-04-04T16:57:59.080191Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T16:57:59.080316Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T16:57:59.080651Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T16:57:59.103921Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:57:59.103979Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:57:59.106167Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:25:2072] Create Queue# [1:34:2078] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.106397Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:25:2072] Create Queue# [1:35:2079] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.106564Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:25:2072] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.106758Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:25:2072] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.106918Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:25:2072] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.107113Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:25:2072] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.107269Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:25:2072] Create Queue# [1:40:2084] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.107299Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:57:59.107389Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:26:2073] 2025-04-04T16:57:59.107439Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:26:2073] 2025-04-04T16:57:59.107506Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-04T16:57:59.107554Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-04T16:57:59.108270Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-04T16:57:59.108650Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:26:2073] 2025-04-04T16:57:59.108746Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:57:59.108790Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-04T16:57:59.109016Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:57:59.126695Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:26:2073] 2025-04-04T16:57:59.126766Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:57:59.126807Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-04T16:57:59.132156Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-04T16:57:59.133083Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-04T16:57:59.133270Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T16:57:59.133523Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-04-04T16:57:59.133595Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-04-04T16:57:59.133629Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-04T16:57:59.141493Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:30:2063] 2025-04-04T16:57:59.141564Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:30:2063] 2025-04-04T16:57:59.141685Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:57:59.141847Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-04T16:57:59.142409Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:51:2091] 2025-04-04T16:57:59.142461Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:51:2091] 2025-04-04T16:57:59.142516Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-04T16:57:59.142852Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:57:59.143102Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:57:59.143215Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:30:2063] 2025-04-04T16:57:59.143303Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-04T16:57:59.143405Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:51:2091] 2025-04-04T16:57:59.143437Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-04T16:57:59.143510Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:57:59.143863Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-04T16:57:59.143925Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2025-04-04T16:57:59.144135Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:26:2073] 2025-04-04T16:57:59.144201Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:26:2073] 2025-04-04T16:57:59.148696Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:30:2063] 2025-04-04T16:57:59.149025Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:26:2073] 2025-04-04T16:57:59.149459Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T16:57:59.150180Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-04-04T16:57:59.150223Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-04-04T16:57:59.150292Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-04-04T16:57:59.150324Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T16:57:59.150477Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2025-04-04T16:57:59.150532Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2025-04-04T16:57:59.150572Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-04T16:57:59.150758Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2025-04-04T16:57:59.150844Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-04T16:57:59.150963Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-04T16:57:59.150992Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2025-04-04T16:57:59.151059Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-04T16:57:59.151086Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2025-04-04T16:57:59.151146Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:30:2063] 2025-04-04T16:57:59.151169Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:30:2063] 2025-04-04T16:57:59.151228Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2025-04-04T16:57:59.151611Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-04-04T16:57:59.151648Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-04T16:57:59.151781Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-04-04T16:57:59.151895Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {Ev ... {539, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{361, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-04-04T16:58:51.695145Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} Tx{539, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T16:58:51.707291Z node 22 :BS_PROXY_PUT INFO: [c665abdb4b1a4dd0] bootstrap ActorId# [22:3854:5139] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:182:0:0:250:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-04T16:58:51.707448Z node 22 :BS_PROXY_PUT DEBUG: [c665abdb4b1a4dd0] Id# [72057594037927937:2:182:0:0:250:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:58:51.707501Z node 22 :BS_PROXY_PUT DEBUG: [c665abdb4b1a4dd0] restore Id# [72057594037927937:2:182:0:0:250:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T16:58:51.707559Z node 22 :BS_PROXY_PUT DEBUG: [c665abdb4b1a4dd0] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:182:0:0:250:1] Marker# BPG33 2025-04-04T16:58:51.707597Z node 22 :BS_PROXY_PUT DEBUG: [c665abdb4b1a4dd0] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:182:0:0:250:1] Marker# BPG32 2025-04-04T16:58:51.707743Z node 22 :BS_PROXY DEBUG: Send to queueActorId# [22:34:2078] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:182:0:0:250:1] FDS# 250 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:58:51.709938Z node 22 :BS_PROXY_PUT DEBUG: [c665abdb4b1a4dd0] received {EvVPutResult Status# OK ID# [72057594037927937:2:182:0:0:250:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 197 } Cost# 81968 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 198 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-04T16:58:51.710065Z node 22 :BS_PROXY_PUT DEBUG: [c665abdb4b1a4dd0] Result# TEvPutResult {Id# [72057594037927937:2:182:0:0:250:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-04T16:58:51.710166Z node 22 :BS_PROXY_PUT INFO: [c665abdb4b1a4dd0] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:182:0:0:250:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-04T16:58:51.710337Z node 22 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.777 sample PartId# [72057594037927937:2:182:0:0:250:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 22 } TEvVPutResult{ TimestampMs# 2.979 VDiskId# [0:1:0:0:0] NodeId# 22 Status# OK } ] } 2025-04-04T16:58:51.710623Z node 22 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:182:0:0:250:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-04T16:58:51.710861Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} commited cookie 1 for step 182 2025-04-04T16:58:51.711291Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} Tx{540, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-04-04T16:58:51.711356Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} Tx{540, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T16:58:51.711609Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} Tx{540, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{362, redo 303b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-04-04T16:58:51.711672Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} Tx{540, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T16:58:51.711784Z node 22 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [22:509:2451] 2025-04-04T16:58:51.711817Z node 22 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [22:509:2451] 2025-04-04T16:58:51.711869Z node 22 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [22:472:2425] EventType# 268637702 c[def1] *************************--------------------------------------------------------------------------- (0.25) *************************--------------------------------------------------------------------------- (0.25) 2025-04-04T16:58:51.815845Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{541, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-04-04T16:58:51.815942Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{541, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T16:58:51.816036Z node 22 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004057440}: tablet 72075186224037928 wasn't changed 2025-04-04T16:58:51.816075Z node 22 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004057440}: tablet 72075186224037928 skipped channel 0 2025-04-04T16:58:51.816176Z node 22 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004057440}: tablet 72075186224037928 skipped channel 1 2025-04-04T16:58:51.816203Z node 22 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004057440}: tablet 72075186224037928 skipped channel 2 2025-04-04T16:58:51.816283Z node 22 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923004057440}(72075186224037928)::Execute - TryToBoot was not successfull 2025-04-04T16:58:51.816354Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{541, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{363, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-04-04T16:58:51.816400Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{541, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T16:58:51.828625Z node 22 :BS_PROXY_PUT INFO: [8723cb048062d23c] bootstrap ActorId# [22:3856:5141] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:183:0:0:250:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-04T16:58:51.828740Z node 22 :BS_PROXY_PUT DEBUG: [8723cb048062d23c] Id# [72057594037927937:2:183:0:0:250:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:58:51.828788Z node 22 :BS_PROXY_PUT DEBUG: [8723cb048062d23c] restore Id# [72057594037927937:2:183:0:0:250:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T16:58:51.828832Z node 22 :BS_PROXY_PUT DEBUG: [8723cb048062d23c] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:183:0:0:250:1] Marker# BPG33 2025-04-04T16:58:51.828864Z node 22 :BS_PROXY_PUT DEBUG: [8723cb048062d23c] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:183:0:0:250:1] Marker# BPG32 2025-04-04T16:58:51.828966Z node 22 :BS_PROXY DEBUG: Send to queueActorId# [22:34:2078] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:183:0:0:250:1] FDS# 250 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:58:51.841055Z node 22 :BS_PROXY_PUT DEBUG: [8723cb048062d23c] received {EvVPutResult Status# OK ID# [72057594037927937:2:183:0:0:250:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 198 } Cost# 81968 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 199 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-04T16:58:51.841202Z node 22 :BS_PROXY_PUT DEBUG: [8723cb048062d23c] Result# TEvPutResult {Id# [72057594037927937:2:183:0:0:250:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-04T16:58:51.841276Z node 22 :BS_PROXY_PUT INFO: [8723cb048062d23c] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:183:0:0:250:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-04T16:58:51.841424Z node 22 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.559 sample PartId# [72057594037927937:2:183:0:0:250:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 22 } TEvVPutResult{ TimestampMs# 12.707 VDiskId# [0:1:0:0:0] NodeId# 22 Status# OK } ] } 2025-04-04T16:58:51.841738Z node 22 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:183:0:0:250:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-04T16:58:51.841962Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} commited cookie 1 for step 183 2025-04-04T16:58:51.842463Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{542, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-04-04T16:58:51.842528Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{542, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T16:58:51.842792Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{542, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{364, redo 303b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-04-04T16:58:51.842845Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{542, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T16:58:51.842975Z node 22 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [22:509:2451] 2025-04-04T16:58:51.843008Z node 22 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [22:509:2451] 2025-04-04T16:58:51.843065Z node 22 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [22:472:2425] EventType# 268637702 c[def1] *************************--------------------------------------------------------------------------- (0.25) *************************--------------------------------------------------------------------------- (0.25) 2025-04-04T16:58:51.946773Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:185} Tx{543, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-04-04T16:58:51.946858Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:185} Tx{543, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T16:58:51.946973Z node 22 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923003160320}: tablet 72075186224037899 wasn't changed 2025-04-04T16:58:51.947029Z node 22 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923003160320}: tablet 72075186224037899 skipped channel 0 2025-04-04T16:58:51.947115Z node 22 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923003160320}: tablet 72075186224037899 skipped channel 1 2025-04-04T16:58:51.947171Z node 22 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923003160320}: tablet 72075186224037899 skipped channel 2 2025-04-04T16:58:51.947237Z node 22 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923003160320}(72075186224037899)::Execute - TryToBoot was not successfull 2025-04-04T16:58:51.947304Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:185} Tx{543, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{365, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-04-04T16:58:51.947354Z node 22 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:185} Tx{543, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes [GOOD] Test command err: 2025-04-04T16:58:20.571833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:722:2425], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:58:20.572034Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:58:20.572192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:58:20.572899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:719:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:58:20.573026Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:58:20.573159Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T16:58:20.957022Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:21.157029Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-04T16:58:21.172039Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-04T16:58:21.714625Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 14185, node 1 TClient is connected to server localhost:16408 2025-04-04T16:58:21.994459Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:21.994530Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:21.994567Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:21.994861Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:58:25.309037Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489503433728412515:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:25.309109Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T16:58:25.543954Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:25.582220Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:25.582520Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:25.585093Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31334, node 3 2025-04-04T16:58:25.736980Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:25.737018Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:25.737033Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:25.737237Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22481 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:26.225019Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:26.259742Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T16:58:26.264937Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:26.269352Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-04T16:58:29.003137Z node 3 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:29.003212Z node 3 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:30.102071Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489503455203249677:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:30.102071Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489503455203249701:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:30.102216Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:30.108005Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-04T16:58:30.126970Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489503455203249711:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T16:58:30.217362Z node 3 :TX_PROXY ERROR: Actor# [3:7489503455203249762:2360] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:58:30.310395Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489503433728412515:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:30.310508Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:58:30.539238Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=N2UwMGZiMWYtY2EwZjE5MDItZTkxNDU5ZDgtYWYwMDBiMzg=, ActorId: [3:7489503455203249670:2339], ActorState: ExecuteState, TraceId: 01jr0vtqtm9jr1vpry7mgz4j4a, Create QueryResponse for error on request, msg: Scheme operations cannot be executed inside transaction 2025-04-04T16:58:33.317000Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489503471176649666:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:33.317548Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T16:58:33.586673Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:33.621196Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:33.621291Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:33.631724Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8029, node 4 2025-04-04T16:58:33.821725Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:33.821751Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:33.821761Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:33.821891Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27015 2025-04-04T16:58:34.467772Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:34.472494Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T16:58:34.479605Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-04T16:58:38.319247Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489503471176649666:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:38.319385Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:58:38.690848Z node 4 :GRPC_SERVER DEBUG: Got grpc request# request auth and check internal request, traceId# undef, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# /Root, peer# , grpcInfo# undef, timeout# 9.999894s 2025-04-04T16:58:38.691026Z node 4 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T16:58:38.691062Z node 4 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T16:58:39.756442Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489503496946454073:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don ... false data# peer# 2025-04-04T16:58:50.093191Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b4780] received request Name# Coordination/CreateNode ok# false data# peer# 2025-04-04T16:58:50.093668Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b4080] received request Name# Coordination/AlterNode ok# false data# peer# 2025-04-04T16:58:50.093966Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b3980] received request Name# Coordination/DropNode ok# false data# peer# 2025-04-04T16:58:50.094307Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b3280] received request Name# Coordination/DescribeNode ok# false data# peer# 2025-04-04T16:58:50.094613Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b2b80] received request Name# CreateDatabase ok# false data# peer# 2025-04-04T16:58:50.094927Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b1d80] received request Name# GetDatabaseStatus ok# false data# peer# 2025-04-04T16:58:50.095217Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b2480] received request Name# AlterDatabase ok# false data# peer# 2025-04-04T16:58:50.095302Z node 5 :GRPC_SERVER DEBUG: [0x51b0004de780] received request Name# ListDatabases ok# false data# peer# 2025-04-04T16:58:50.095464Z node 5 :GRPC_SERVER DEBUG: [0x51b0004dd980] received request Name# RemoveDatabase ok# false data# peer# 2025-04-04T16:58:50.095681Z node 5 :GRPC_SERVER DEBUG: [0x51b0004dcb80] received request Name# DescribeDatabaseOptions ok# false data# peer# 2025-04-04T16:58:50.095723Z node 5 :GRPC_SERVER DEBUG: [0x51b0004dc480] received request Name# GetScaleRecommendation ok# false data# peer# 2025-04-04T16:58:50.095991Z node 5 :GRPC_SERVER DEBUG: [0x51b0004da880] received request Name# ListEndpoints ok# false data# peer# 2025-04-04T16:58:50.096032Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d7e80] received request Name# WhoAmI ok# false data# peer# 2025-04-04T16:58:50.096292Z node 5 :GRPC_SERVER DEBUG: [0x51b0004db680] received request Name# NodeRegistration ok# false data# peer# 2025-04-04T16:58:50.096320Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d8580] received request Name# Scan ok# false data# peer# 2025-04-04T16:58:50.096536Z node 5 :GRPC_SERVER DEBUG: [0x51b0004dd280] received request Name# GetShardLocations ok# false data# peer# 2025-04-04T16:58:50.096601Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d7780] received request Name# DescribeTable ok# false data# peer# 2025-04-04T16:58:50.096788Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d7080] received request Name# CreateSnapshot ok# false data# peer# 2025-04-04T16:58:50.096874Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d5b80] received request Name# RefreshSnapshot ok# false data# peer# 2025-04-04T16:58:50.097034Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d4d80] received request Name# DiscardSnapshot ok# false data# peer# 2025-04-04T16:58:50.097132Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d4680] received request Name# List ok# false data# peer# 2025-04-04T16:58:50.097380Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d3880] received request Name# RateLimiter/CreateResource ok# false data# peer# 2025-04-04T16:58:50.097628Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d3180] received request Name# RateLimiter/AlterResource ok# false data# peer# 2025-04-04T16:58:50.097859Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d2380] received request Name# RateLimiter/DropResource ok# false data# peer# 2025-04-04T16:58:50.098106Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d1c80] received request Name# RateLimiter/ListResources ok# false data# peer# 2025-04-04T16:58:50.098772Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d0e80] received request Name# RateLimiter/DescribeResource ok# false data# peer# 2025-04-04T16:58:50.098823Z node 5 :GRPC_SERVER DEBUG: [0x51b0004d0780] received request Name# RateLimiter/AcquireResource ok# false data# peer# 2025-04-04T16:58:50.099066Z node 5 :GRPC_SERVER DEBUG: [0x51b0004cf280] received request Name# CreateStream ok# false data# peer# 2025-04-04T16:58:50.099127Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b1680] received request Name# ListStreams ok# false data# peer# 2025-04-04T16:58:50.099342Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b0f80] received request Name# DeleteStream ok# false data# peer# 2025-04-04T16:58:50.099444Z node 5 :GRPC_SERVER DEBUG: [0x51b0004cf980] received request Name# DescribeStream ok# false data# peer# 2025-04-04T16:58:50.099611Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b0880] received request Name# ListShards ok# false data# peer# 2025-04-04T16:58:50.099768Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a2180] received request Name# SetWriteQuota ok# false data# peer# 2025-04-04T16:58:50.099872Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a2f80] received request Name# UpdateStream ok# false data# peer# 2025-04-04T16:58:50.100073Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b0180] received request Name# PutRecord ok# false data# peer# 2025-04-04T16:58:50.100138Z node 5 :GRPC_SERVER DEBUG: [0x51b0004afa80] received request Name# PutRecords ok# false data# peer# 2025-04-04T16:58:50.100397Z node 5 :GRPC_SERVER DEBUG: [0x51b0004af380] received request Name# GetRecords ok# false data# peer# 2025-04-04T16:58:50.100425Z node 5 :GRPC_SERVER DEBUG: [0x51b0004aec80] received request Name# GetShardIterator ok# false data# peer# 2025-04-04T16:58:50.100692Z node 5 :GRPC_SERVER DEBUG: [0x51b00048d880] received request Name# DescribeLimits ok# false data# peer# 2025-04-04T16:58:50.100694Z node 5 :GRPC_SERVER DEBUG: [0x51b00048ed80] received request Name# SubscribeToShard ok# false data# peer# 2025-04-04T16:58:50.100956Z node 5 :GRPC_SERVER DEBUG: [0x51b00048d180] received request Name# DescribeStreamSummary ok# false data# peer# 2025-04-04T16:58:50.101020Z node 5 :GRPC_SERVER DEBUG: [0x51b00048ca80] received request Name# DecreaseStreamRetentionPeriod ok# false data# peer# 2025-04-04T16:58:50.101220Z node 5 :GRPC_SERVER DEBUG: [0x51b00048c380] received request Name# IncreaseStreamRetentionPeriod ok# false data# peer# 2025-04-04T16:58:50.101475Z node 5 :GRPC_SERVER DEBUG: [0x51b00048bc80] received request Name# UpdateShardCount ok# false data# peer# 2025-04-04T16:58:50.101736Z node 5 :GRPC_SERVER DEBUG: [0x51b00048b580] received request Name# UpdateStreamMode ok# false data# peer# 2025-04-04T16:58:50.101988Z node 5 :GRPC_SERVER DEBUG: [0x51b00048ae80] received request Name# RegisterStreamConsumer ok# false data# peer# 2025-04-04T16:58:50.102368Z node 5 :GRPC_SERVER DEBUG: [0x51b00048a780] received request Name# DeregisterStreamConsumer ok# false data# peer# 2025-04-04T16:58:50.102691Z node 5 :GRPC_SERVER DEBUG: [0x51b00048df80] received request Name# DescribeStreamConsumer ok# false data# peer# 2025-04-04T16:58:50.102895Z node 5 :GRPC_SERVER DEBUG: [0x51b00048a080] received request Name# ListStreamConsumers ok# false data# peer# 2025-04-04T16:58:50.102949Z node 5 :GRPC_SERVER DEBUG: [0x51b000489980] received request Name# AddTagsToStream ok# false data# peer# 2025-04-04T16:58:50.103159Z node 5 :GRPC_SERVER DEBUG: [0x51b000489280] received request Name# DisableEnhancedMonitoring ok# false data# peer# 2025-04-04T16:58:50.103196Z node 5 :GRPC_SERVER DEBUG: [0x51b000488b80] received request Name# EnableEnhancedMonitoring ok# false data# peer# 2025-04-04T16:58:50.103450Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a6780] received request Name# ListTagsForStream ok# false data# peer# 2025-04-04T16:58:50.103491Z node 5 :GRPC_SERVER DEBUG: [0x51b0004abb80] received request Name# MergeShards ok# false data# peer# 2025-04-04T16:58:50.103704Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a6080] received request Name# RemoveTagsFromStream ok# false data# peer# 2025-04-04T16:58:50.103745Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a5980] received request Name# SplitShard ok# false data# peer# 2025-04-04T16:58:50.103934Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a4480] received request Name# StartStreamEncryption ok# false data# peer# 2025-04-04T16:58:50.103996Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a3680] received request Name# StopStreamEncryption ok# false data# peer# 2025-04-04T16:58:50.104168Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a1a80] received request Name# SelfCheck ok# false data# peer# 2025-04-04T16:58:50.104243Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a0c80] received request Name# NodeCheck ok# false data# peer# 2025-04-04T16:58:50.104404Z node 5 :GRPC_SERVER DEBUG: [0x51b00047ce80] received request Name# CreateSession ok# false data# peer# 2025-04-04T16:58:50.104524Z node 5 :GRPC_SERVER DEBUG: [0x51b000488480] received request Name# DeleteSession ok# false data# peer# 2025-04-04T16:58:50.104656Z node 5 :GRPC_SERVER DEBUG: [0x51b000487d80] received request Name# AttachSession ok# false data# peer# 2025-04-04T16:58:50.104792Z node 5 :GRPC_SERVER DEBUG: [0x51b000486f80] received request Name# BeginTransaction ok# false data# peer# 2025-04-04T16:58:50.104888Z node 5 :GRPC_SERVER DEBUG: [0x51b000486880] received request Name# CommitTransaction ok# false data# peer# 2025-04-04T16:58:50.105051Z node 5 :GRPC_SERVER DEBUG: [0x51b000486180] received request Name# RollbackTransaction ok# false data# peer# 2025-04-04T16:58:50.105131Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a0580] received request Name# ExecuteQuery ok# false data# peer# 2025-04-04T16:58:50.105307Z node 5 :GRPC_SERVER DEBUG: [0x51b00049f080] received request Name# ExecuteScript ok# false data# peer# 2025-04-04T16:58:50.105374Z node 5 :GRPC_SERVER DEBUG: [0x51b00047d580] received request Name# FetchScriptResults ok# false data# peer# 2025-04-04T16:58:50.105558Z node 5 :GRPC_SERVER DEBUG: [0x51b00046a880] received request Name# ExecuteTabletMiniKQL ok# false data# peer# 2025-04-04T16:58:50.105595Z node 5 :GRPC_SERVER DEBUG: [0x51b00046a180] received request Name# ChangeTabletSchema ok# false data# peer# 2025-04-04T16:58:50.105818Z node 5 :GRPC_SERVER DEBUG: [0x51b000469a80] received request Name# RestartTablet ok# false data# peer# 2025-04-04T16:58:50.105861Z node 5 :GRPC_SERVER DEBUG: [0x51b000469380] received request Name# CreateLogStore ok# false data# peer# 2025-04-04T16:58:50.106093Z node 5 :GRPC_SERVER DEBUG: [0x51b000468c80] received request Name# DescribeLogStore ok# false data# peer# 2025-04-04T16:58:50.106100Z node 5 :GRPC_SERVER DEBUG: [0x51b000468580] received request Name# DropLogStore ok# false data# peer# 2025-04-04T16:58:50.106360Z node 5 :GRPC_SERVER DEBUG: [0x51b000467e80] received request Name# AlterLogStore ok# false data# peer# 2025-04-04T16:58:50.106609Z node 5 :GRPC_SERVER DEBUG: [0x51b000467780] received request Name# CreateLogTable ok# false data# peer# 2025-04-04T16:58:50.106888Z node 5 :GRPC_SERVER DEBUG: [0x51b000467080] received request Name# DescribeLogTable ok# false data# peer# 2025-04-04T16:58:50.107141Z node 5 :GRPC_SERVER DEBUG: [0x51b000466980] received request Name# DropLogTable ok# false data# peer# 2025-04-04T16:58:50.107404Z node 5 :GRPC_SERVER DEBUG: [0x51b000466280] received request Name# AlterLogTable ok# false data# peer# 2025-04-04T16:58:50.107421Z node 5 :GRPC_SERVER DEBUG: [0x51b000465b80] received request Name# Login ok# false data# peer# 2025-04-04T16:58:50.107701Z node 5 :GRPC_SERVER DEBUG: [0x51b000465480] received request Name# DescribeReplication ok# false data# peer# 2025-04-04T16:58:50.107748Z node 5 :GRPC_SERVER DEBUG: [0x51b000464d80] received request Name# DescribeView ok# false data# peer# >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] >> THiveTest::TestHiveBalancerUselessNeighbourMoves [GOOD] >> THiveTest::TestHiveBalancerWithImmovableTablets >> KqpDocumentApi::Scripting [GOOD] >> KqpQueryService::AlterTable_DropNotNull_Valid >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] >> KqpPg::TableSelect-useSink [GOOD] >> KqpPg::TableInsert+useSink >> YdbProxy::OAuthToken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] Test command err: 2025-04-04T16:58:27.519333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:58:27.520163Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:58:27.520256Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012c1/r3tmp/tmpg2s6zP/pdisk_1.dat 2025-04-04T16:58:27.947454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:58:27.990736Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:28.031011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:28.031219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:28.043622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:58:28.138462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:58:28.208850Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:686:2585] 2025-04-04T16:58:28.209291Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:58:28.260749Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:58:28.260889Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:58:28.262895Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:58:28.262998Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:58:28.263055Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:58:28.263481Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:58:28.263895Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:58:28.263962Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:712:2585] in generation 1 2025-04-04T16:58:28.266124Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:690:2587] 2025-04-04T16:58:28.266362Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:58:28.278010Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:58:28.278512Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:693:2589] 2025-04-04T16:58:28.278739Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:58:28.288153Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:58:28.289760Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-04T16:58:28.289868Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-04T16:58:28.289917Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-04T16:58:28.290375Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:58:28.291713Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:58:28.291798Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:737:2587] in generation 1 2025-04-04T16:58:28.292446Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:58:28.292540Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:58:28.294098Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-04T16:58:28.294177Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-04T16:58:28.294267Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-04T16:58:28.295019Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:58:28.295167Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:58:28.295261Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:738:2589] in generation 1 2025-04-04T16:58:28.306505Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:58:28.332813Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:58:28.333065Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:58:28.333249Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:742:2616] 2025-04-04T16:58:28.333326Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:58:28.333368Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:58:28.333420Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:58:28.333805Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:58:28.333846Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-04T16:58:28.333905Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:58:28.333968Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:743:2617] 2025-04-04T16:58:28.333992Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-04T16:58:28.334014Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-04T16:58:28.334060Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T16:58:28.334485Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:58:28.334520Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-04T16:58:28.334569Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:58:28.334695Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:744:2618] 2025-04-04T16:58:28.334719Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-04T16:58:28.334741Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-04T16:58:28.334764Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T16:58:28.334943Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:58:28.335052Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:58:28.335300Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:58:28.335364Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:28.335421Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:58:28.335466Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:58:28.335520Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-04T16:58:28.335595Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-04T16:58:28.335791Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:677:2580], serverId# [1:704:2594], sessionId# [0:0:0] 2025-04-04T16:58:28.335858Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T16:58:28.335893Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:28.335918Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-04T16:58:28.335950Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T16:58:28.335990Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-04T16:58:28.336047Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-04T16:58:28.336924Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:58:28.337226Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:58:28.337335Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:58:28.338095Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:678:2581], serverId# [1:710:2599], sessionId# [0:0:0] 2025-04-04T16:58:28.338169Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-04T16:58:28.338205Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:28.338234Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-04-04T16:58:28.338298Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T16:58:28.338568Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T16:58:28.338804Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-04T16:58:28.338877Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-04T16:58:28.341148Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:58:28.341247Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T16:58:28.352140Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:58:28.352268Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:58:28.352373Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T16:58:28.352431Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-04T16:58:28.397212Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:679:2582], serverId# [1:761:2628], sessionId# [0:0:0] 2025-04-04T16:58:28.397470Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-04T16:58:28.397699Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 2 ... 80, LocalPathId: 11] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037891 2025-04-04T16:58:55.248542Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 5 Group: 0 Step: 2500 TxId: 281474976715667 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037891 2025-04-04T16:58:55.248737Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 0 Step: 2500 TxId: 281474976715667 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037891 2025-04-04T16:58:55.249085Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:55.249316Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-04-04T16:58:55.249364Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 1 2025-04-04T16:58:55.249406Z node 3 :TX_DATASHARD DEBUG: Found ready operation [2500:281474976715667] in PlanQueue unit at 72075186224037893 2025-04-04T16:58:55.249617Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037893 loaded tx from db 2500:281474976715667 keys extracted: 0 2025-04-04T16:58:55.249769Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:58:55.263930Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037893 step# 2500} 2025-04-04T16:58:55.264043Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-04T16:58:55.279314Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 2500} 2025-04-04T16:58:55.279446Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-04T16:58:55.279528Z node 3 :TX_DATASHARD DEBUG: Send RS 2 at 72075186224037891 from 72075186224037891 to 72075186224037893 txId 281474976715667 2025-04-04T16:58:55.279602Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-04T16:58:55.279670Z node 3 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715667] from 72075186224037891 at tablet 72075186224037891 send result to client [3:1432:3073], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T16:58:55.279810Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037891, records: { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 } 2025-04-04T16:58:55.279889Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-04T16:58:55.280232Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1432:3073] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715667, shard# 72075186224037891, status# 2 2025-04-04T16:58:55.280819Z node 3 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037891 2025-04-04T16:58:55.281178Z node 3 :TX_DATASHARD DEBUG: Send 3 change records: to# [3:1236:2955], at tablet# 72075186224037891 2025-04-04T16:58:55.281254Z node 3 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 3, forgotten# 0, left# 0, at tablet# 72075186224037891 2025-04-04T16:58:55.281344Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037893 source 72075186224037891 dest 72075186224037893 producer 72075186224037891 txId 281474976715667 2025-04-04T16:58:55.281445Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037893 got read set: {TEvReadSet step# 2500 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletProducer# 72075186224037891 ReadSet.Size()# 19 Seqno# 2 Flags# 0} 2025-04-04T16:58:55.281539Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037893 2025-04-04T16:58:55.281911Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-04-04T16:58:55.281950Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 1 active planned 1 immediate 0 planned 1 2025-04-04T16:58:55.281994Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [2500:281474976715667] at 72075186224037893 for LoadAndWaitInRS 2025-04-04T16:58:55.282881Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:55.283177Z node 3 :TX_DATASHARD DEBUG: Handle TEvChangeExchange::TEvApplyRecords: origin# 72075186224037891, generation# 1, at tablet# 72075186224037892 2025-04-04T16:58:55.303427Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-04T16:58:55.303534Z node 3 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715667] from 72075186224037893 at tablet 72075186224037893 send result to client [3:1432:3073], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T16:58:55.303631Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037893 {TEvReadSet step# 2500 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletConsumer# 72075186224037893 Flags# 0 Seqno# 2} 2025-04-04T16:58:55.303693Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-04T16:58:55.303849Z node 3 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 3, at tablet# 72075186224037891 2025-04-04T16:58:55.303883Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037891 2025-04-04T16:58:55.303999Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 5, at tablet: 72075186224037891 2025-04-04T16:58:55.304059Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 6, at tablet: 72075186224037891 2025-04-04T16:58:55.304203Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1432:3073] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715667, shard# 72075186224037893, status# 2 2025-04-04T16:58:55.304257Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1432:3073] Reply: txId# 281474976715667, status# OK, error# 2025-04-04T16:58:55.304413Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037891 source 72075186224037891 dest 72075186224037893 consumer 72075186224037893 txId 281474976715667 2025-04-04T16:58:55.304620Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037891 2025-04-04T16:58:55.304685Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037891 2025-04-04T16:58:55.305000Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-04T16:58:55.305044Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:55.305083Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-04T16:58:55.305207Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1427:3069], serverId# [3:1428:3070], sessionId# [0:0:0] 2025-04-04T16:58:55.306991Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037893 2025-04-04T16:58:55.307474Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037893 2025-04-04T16:58:55.307750Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-04-04T16:58:55.307828Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2025-04-04T16:58:55.307907Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037893 for WaitForStreamClearance 2025-04-04T16:58:55.308212Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T16:58:55.308305Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-04T16:58:55.309062Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037893, TxId: 281474976715668, MessageQuota: 1 2025-04-04T16:58:55.309235Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037893, TxId: 281474976715668, MessageQuota: 1 2025-04-04T16:58:55.418646Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037893 2025-04-04T16:58:55.418760Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037893 2025-04-04T16:58:55.419040Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-04-04T16:58:55.419093Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2025-04-04T16:58:55.419148Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037893 for ReadTableScan 2025-04-04T16:58:55.419308Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:55.419388Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-04T16:58:55.419445Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-04T16:58:55.420843Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037892 2025-04-04T16:58:55.421170Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037892 2025-04-04T16:58:55.421372Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-04T16:58:55.421411Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2025-04-04T16:58:55.421455Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715669] at 72075186224037892 for WaitForStreamClearance 2025-04-04T16:58:55.421665Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T16:58:55.421726Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-04T16:58:55.431612Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037892, TxId: 281474976715669, MessageQuota: 1 2025-04-04T16:58:55.431789Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037892, TxId: 281474976715669, MessageQuota: 1 2025-04-04T16:58:55.438622Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037892 2025-04-04T16:58:55.438716Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715669, at: 72075186224037892 2025-04-04T16:58:55.438973Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-04T16:58:55.439020Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2025-04-04T16:58:55.439069Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715669] at 72075186224037892 for ReadTableScan 2025-04-04T16:58:55.439219Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:58:55.439321Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-04T16:58:55.439374Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |80.0%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] Test command err: 2025-04-04T16:58:52.739864Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:1:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:58:52.782368Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:58:52.805121Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:58:52.806425Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:58:52.853959Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T16:58:52.911330Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 Sending TEvPut Sending TEvGet Sending TEvVGet Sending TEvPut Sending TEvGet |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |80.0%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit [GOOD] >> TServiceAccountServiceTest::Get >> TServiceAccountServiceTest::Get [GOOD] >> KqpQueryService::ExecuteQueryExplicitBeginCommitRollback [GOOD] >> KqpQueryService::ExecuteDDLStatusCodeSchemeError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::OAuthToken [GOOD] Test command err: 2025-04-04T16:58:46.625044Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503527331919739:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:46.821555Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cf2/r3tmp/tmpKyvuCp/pdisk_1.dat 2025-04-04T16:58:47.100671Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:47.124544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:47.124654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:47.128424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23626 TServer::EnableGrpc on GrpcPort 20455, node 1 2025-04-04T16:58:47.562337Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:47.562358Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:47.562365Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:47.562477Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23626 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:48.155213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:48.174960Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:58:51.741693Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503546172011811:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:51.741752Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cf2/r3tmp/tmp7X1lIJ/pdisk_1.dat 2025-04-04T16:58:51.937999Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:51.957382Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:51.957460Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:51.959622Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13664 TServer::EnableGrpc on GrpcPort 24530, node 2 2025-04-04T16:58:52.454913Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:52.454939Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:52.454955Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:52.455070Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13664 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:53.084877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:53.093233Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 >> KqpPg::Returning-useSink [GOOD] >> KqpPg::SelectIndex+useSink >> THiveTest::TestDeleteOwnerTabletsMany [GOOD] >> THiveTest::TestDeleteTabletWithFollowers >> TDatabaseResolverTests::Ydb_Serverless >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit >> TDatabaseResolverTests::PostgreSQL >> TDatabaseResolverTests::Ydb_Serverless [GOOD] >> TDatabaseResolverTests::PostgreSQL [GOOD] >> TDatabaseResolverTests::PostgreSQL_PermissionDenied >> YdbProxy::CreateCdcStream [GOOD] >> TDatabaseResolverTests::ClickHouseNative >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] >> TDatabaseResolverTests::ClickHouseNative [GOOD] >> TDatabaseResolverTests::ClickHouseHttp >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase [GOOD] >> Viewer::JsonAutocompleteScheme >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless [GOOD] >> StoragePool::TestDistributionExactMin [GOOD] >> StoragePool::TestDistributionExactMinWithOverflow [GOOD] >> StoragePool::TestDistributionRandomMin7p >> Viewer::SelectStringWithBase64Encoding [GOOD] >> Viewer::SelectStringWithNoBase64Encoding ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] Test command err: 2025-04-04T16:59:00.359399Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed PostgreSQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-postgresql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-postgresql.viewer`. |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge >> TDatabaseResolverTests::DataStreams_Dedicated >> TDatabaseResolverTests::DataStreams_Dedicated [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::CreateCdcStream [GOOD] Test command err: 2025-04-04T16:58:47.579784Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503529972602766:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:47.580192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ceb/r3tmp/tmpqJx1g7/pdisk_1.dat 2025-04-04T16:58:48.265703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:48.265799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:48.270467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:58:48.349733Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21749 TServer::EnableGrpc on GrpcPort 24941, node 1 2025-04-04T16:58:48.791150Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:48.791180Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:48.791189Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:48.791317Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:49.678155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:52.632883Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503529972602766:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:52.633187Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:58:52.661623Z node 1 :TX_PROXY ERROR: Actor# [1:7489503551447439751:2309] txid# 281474976710658, issues: { message: "Column key has wrong key type Float" severity: 1 } 2025-04-04T16:58:52.684488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T16:58:52.949632Z node 1 :TX_PROXY ERROR: Actor# [1:7489503551447439839:2369] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/table\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:58:54.101494Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503558224106110:2245];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ceb/r3tmp/tmpUgO7nt/pdisk_1.dat 2025-04-04T16:58:54.328113Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:58:54.369629Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:54.422064Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:54.422165Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:54.423189Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9749 TServer::EnableGrpc on GrpcPort 6946, node 2 2025-04-04T16:58:55.037939Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:55.037965Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:55.037973Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:55.038095Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:55.657139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:55.665202Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:58:58.796909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:58:59.046541Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489503558224106110:2245];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:59.046665Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:58:59.238766Z node 2 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][2:7489503579698943233:2342] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:4:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-04T16:58:59.482898Z node 2 :TX_PROXY ERROR: Actor# [2:7489503579698943305:2460] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/table/updates\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeCdcStream, state: EPathStateNoChanges)" severity: 1 } >> THiveTest::TestDeleteTabletWithFollowers [GOOD] >> THiveTest::TestCreateTabletBeforeLocal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] Test command err: 2025-04-04T16:59:01.656337Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed ClickHouse database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-clickhouse/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-clickhouse.viewer`. |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> THiveTest::TestHiveBalancerWithImmovableTablets [GOOD] >> THiveTest::TestHiveBalancerHighUsage >> THiveTest::TestCreateSubHiveCreateManyTablets [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots >> THiveTest::TestCreateTabletBeforeLocal [GOOD] >> THiveTest::TestCreateTabletReboots >> KqpQueryService::AlterTable_DropNotNull_Valid [GOOD] >> KqpQueryService::AlterCdcTopic >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink >> DataShardSnapshots::VolatileSnapshotSplit |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |80.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] >> KqpPg::CheckPgAutoParams+useSink [GOOD] >> KqpPg::CheckPgAutoParams-useSink >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 >> DataShardSnapshots::MvccSnapshotTailCleanup >> KqpQueryService::ExecuteDDLStatusCodeSchemeError [GOOD] >> THiveTest::TestHiveBalancerHighUsage [GOOD] >> THiveTest::TestHiveBalancerWithSpareNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:58:59.076500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:58:59.076645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:58:59.076710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:58:59.076747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:58:59.076796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:58:59.076823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:58:59.076909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:58:59.076986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:58:59.077298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:58:59.176864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:58:59.176934Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:59.193802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:58:59.194116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:58:59.194368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:58:59.206015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:58:59.206388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:58:59.207248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:58:59.208318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:58:59.214727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:58:59.216468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:58:59.216560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:58:59.216798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:58:59.216858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:58:59.216906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:58:59.217065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:58:59.225024Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:58:59.373867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:58:59.374192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:59.374476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:58:59.374783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:58:59.374853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:59.377328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:58:59.377518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:58:59.377755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:59.377811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:58:59.377850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:58:59.377895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:58:59.380059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:59.380122Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:58:59.380182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:58:59.382092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:59.382155Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:59.382205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:58:59.382305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:58:59.386496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:58:59.388687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:58:59.388912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:58:59.390026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:58:59.390185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:58:59.390259Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:58:59.390630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:58:59.390705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:58:59.390928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:58:59.391025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:58:59.393475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:58:59.393523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:58:59.393730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:58:59.393772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:58:59.394119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:59.394178Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:58:59.394306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:58:59.394366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:58:59.394428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:58:59.394464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:58:59.394506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:58:59.394557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:58:59.394597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:58:59.394634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:58:59.394742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:58:59.394791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:58:59.394832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:58:59.397129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:58:59.397258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:58:59.397302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... AT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-04T16:59:03.343522Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2025-04-04T16:59:03.343734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T16:59:03.347211Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-04T16:59:03.347607Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:03.347652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T16:59:03.347958Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:03.348001Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-04-04T16:59:03.348597Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-04T16:59:03.348658Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-04T16:59:03.348796Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-04T16:59:03.348844Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T16:59:03.348898Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-04T16:59:03.348949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T16:59:03.348993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-04T16:59:03.349045Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T16:59:03.349111Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-04T16:59:03.349168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-04T16:59:03.349355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-04T16:59:03.349414Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2025-04-04T16:59:03.349460Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-04T16:59:03.350711Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T16:59:03.350815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T16:59:03.350852Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-04T16:59:03.350905Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-04T16:59:03.350962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T16:59:03.351056Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-04T16:59:03.357404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-04T16:59:03.366740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-04T16:59:03.366805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-04T16:59:03.367361Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-04T16:59:03.367475Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-04T16:59:03.367517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:760:2671] TestWaitNotification: OK eventTxId 105 2025-04-04T16:59:03.896239Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:03.896574Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 378us result status StatusSuccess 2025-04-04T16:59:03.897256Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:03.979052Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:59:03.979427Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 394us result status StatusSuccess 2025-04-04T16:59:03.980091Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Verify partition 0 >>>>> Verify partition 1 >>>>> Verify partition 2 >> Viewer::JsonAutocompleteColumnsPOST [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::DeleteWithQueryService-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 2373, MsgBus: 7988 2025-04-04T16:56:57.412968Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503058097879093:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:57.414158Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bce/r3tmp/tmpgVX6Ag/pdisk_1.dat 2025-04-04T16:56:58.065720Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:58.093811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:58.093922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:58.099953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2373, node 1 2025-04-04T16:56:58.346956Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:56:58.346984Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:56:58.346997Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:56:58.347179Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7988 TClient is connected to server localhost:7988 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:56:59.093549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:59.110229Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:57:01.581334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503075277748810:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:01.581502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:01.618350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:57:01.775144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503075277748946:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:01.775248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:01.775556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503075277748951:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:01.783547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T16:57:01.799206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503075277748953:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T16:57:01.893254Z node 1 :TX_PROXY ERROR: Actor# [1:7489503075277749005:2421] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:02.370461Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503058097879093:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:02.370543Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 1 1 1 Trying to start YDB, gRPC: 13275, MsgBus: 63276 2025-04-04T16:57:04.133629Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503086007531209:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bce/r3tmp/tmp70INQ4/pdisk_1.dat 2025-04-04T16:57:04.332949Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:57:04.449470Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:04.487839Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:04.487938Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:04.499047Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13275, node 2 2025-04-04T16:57:04.725401Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:04.725424Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:04.725431Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:04.725545Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63276 TClient is connected to server localhost:63276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T16:57:05.583598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:57:09.078712Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489503086007531209:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:09.078770Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:57:09.407553Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503107482368188:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:09.407627Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:09.445417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T16:57:09.565859Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503107482368319:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:09.565945Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:09.566290Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503107482368324:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:09.570579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T16:57:09.597392Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489503107482368326:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T16:57:09.653758Z node 2 :TX_PROXY ERROR: Actor# [2:7489503107482368377:2420] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 1 1 1 T ... 9503435925687541:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:30.051519Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:58:31.703310Z node 10 :KQP_PROXY ERROR: TraceId: "01jr0vtm7cbft3qhkaced4pa2p", Request deadline has expired for 0.292012s seconds (NYdb::V3::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Trying to start YDB, gRPC: 16807, MsgBus: 11164 2025-04-04T16:58:33.070664Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7489503466352905113:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:33.123542Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bce/r3tmp/tmpBv8uu4/pdisk_1.dat 2025-04-04T16:58:33.430342Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:33.446689Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:33.446827Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:33.448725Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16807, node 11 2025-04-04T16:58:33.579032Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:33.579064Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:33.579076Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:33.579255Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11164 TClient is connected to server localhost:11164 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:35.170731Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:35.286482Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:58:38.045235Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7489503466352905113:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:38.045351Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:58:40.512053Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7489503500712644020:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:40.512181Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:40.538082Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:58:40.686807Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7489503500712644126:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:40.686997Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:40.690886Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7489503500712644131:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:40.697623Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T16:58:40.724972Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7489503500712644133:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T16:58:40.822792Z node 11 :TX_PROXY ERROR: Actor# [11:7489503500712644185:2408] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 4504, MsgBus: 17623 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bce/r3tmp/tmpj2PqHP/pdisk_1.dat 2025-04-04T16:58:43.542481Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:58:43.589623Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:43.594835Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:43.594978Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:43.599910Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4504, node 12 2025-04-04T16:58:43.671527Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:43.671559Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:43.671574Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:43.671770Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17623 TClient is connected to server localhost:17623 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:44.828471Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:50.553590Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7489503544598780010:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:50.553746Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:50.585532Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:58:50.723456Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7489503544598780115:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:50.723591Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:50.723730Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7489503544598780120:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:50.731284Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T16:58:50.748136Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7489503544598780122:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T16:58:50.827193Z node 12 :TX_PROXY ERROR: Actor# [12:7489503544598780173:2404] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteDDLStatusCodeSchemeError [GOOD] Test command err: Trying to start YDB, gRPC: 4911, MsgBus: 65481 2025-04-04T16:58:43.853630Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503512449651532:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:43.862806Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018c4/r3tmp/tmp27gp8a/pdisk_1.dat 2025-04-04T16:58:44.484856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:44.485013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:44.495808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4911, node 1 2025-04-04T16:58:44.627021Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:44.730814Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:58:44.890941Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:44.890969Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:44.890976Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:44.891443Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65481 TClient is connected to server localhost:65481 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:45.942715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:45.989523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:46.180956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:46.414916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:46.505769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:48.418488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503533924489649:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:48.418633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:48.725606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:58:48.772613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:58:48.820684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:58:48.853208Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503512449651532:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:48.853279Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:58:48.877713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:58:49.000956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:58:49.062670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:58:49.137302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503538219457461:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:49.137384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:49.137565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503538219457466:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:49.141959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:58:49.161165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503538219457468:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:58:49.230910Z node 1 :TX_PROXY ERROR: Actor# [1:7489503538219457524:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 23038, MsgBus: 11716 2025-04-04T16:58:51.580637Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503548829380281:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:51.580937Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018c4/r3tmp/tmpXi6WR2/pdisk_1.dat 2025-04-04T16:58:51.800654Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:51.828549Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:51.828649Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:51.830448Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23038, node 2 2025-04-04T16:58:52.017070Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:52.017093Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:52.017101Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:52.017204Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11716 TClient is connected to server localhost:11716 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T16:58:52.753176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T16:58:52.761679Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:58:52.776392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:52.878395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:53.249999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:53.394012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:56.442419Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503570304218389:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:56.442532Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:56.532754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:58:56.581583Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489503548829380281:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:56.581732Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:58:56.623145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:58:56.700411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:58:56.784867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:58:56.866993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:58:57.003876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:58:57.111977Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503574599186209:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:57.112054Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:57.112391Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503574599186214:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:57.116363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:58:57.143231Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489503574599186216:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:58:57.221157Z node 2 :TX_PROXY ERROR: Actor# [2:7489503574599186272:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:58:58.619543Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjdlOTIwNjUtZTE1NTExYzktMTYzMWE5MTktNDEzOTkwMzI=, ActorId: [2:7489503578894153839:2493], ActorState: ReadyState, TraceId: 01jr0vvknf70p9ncz55pj9cgpy, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 3553, MsgBus: 16948 2025-04-04T16:58:59.686732Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489503582297244591:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:59.686803Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018c4/r3tmp/tmpHblbtC/pdisk_1.dat 2025-04-04T16:58:59.822488Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:59.851210Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:59.851305Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:59.853245Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3553, node 3 2025-04-04T16:58:59.961769Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:59.961798Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:59.961807Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:59.961947Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16948 TClient is connected to server localhost:16948 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T16:59:00.538062Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:59:03.833206Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489503599477114438:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:03.833297Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:03.833697Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489503599477114450:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:03.837679Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:59:03.850265Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489503599477114452:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:59:03.921287Z node 3 :TX_PROXY ERROR: Actor# [3:7489503599477114503:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:59:03.949127Z node 3 :TX_PROXY ERROR: Actor# [3:7489503599477114534:2348] txid# 281474976715660, issues: { message: "Type \'TzTimestamp\' specified for column \'payload\' is not supported by storage" severity: 1 } 2025-04-04T16:59:03.949520Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YzBiMTUyOWMtYTE1NjhhZjgtYjBmNTY1MTMtMzk3MzQ1MjE=, ActorId: [3:7489503599477114436:2330], ActorState: ExecuteState, TraceId: 01jr0vvnjp85jygysw38dx68rb, Create QueryResponse for error on request, msg: >> StoragePool::TestDistributionRandomMin7p [GOOD] >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteColumnsPOST [GOOD] Test command err: 2025-04-04T16:58:14.573708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:58:14.574016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:58:14.574066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 9147, node 1 TClient is connected to server localhost:14261 2025-04-04T16:58:23.424537Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:336:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:58:23.424715Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:58:23.424922Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 19641, node 2 TClient is connected to server localhost:3671 2025-04-04T16:58:34.269474Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:337:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:58:34.269692Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:58:34.269996Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 1285, node 3 TClient is connected to server localhost:24827 2025-04-04T16:58:47.148309Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:310:2356], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:58:47.149078Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:58:47.149450Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 63366, node 4 TClient is connected to server localhost:12921 2025-04-04T16:59:01.443086Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:336:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:01.443295Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:01.443551Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 21984, node 5 TClient is connected to server localhost:3640 |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::Get [GOOD] Test command err: 2025-04-04T16:58:57.690785Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503571338113022:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:57.690842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ad4/r3tmp/tmpAh0S2o/pdisk_1.dat 2025-04-04T16:58:58.674944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:58.675101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:58.743130Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:58.752166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:58:58.758433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:21895 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:59.113934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ad4/r3tmp/tmpuz4A5i/pdisk_1.dat 2025-04-04T16:59:02.279053Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:02.287185Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:02.314547Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:02.314622Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:02.316307Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11818 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:59:02.523562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:59:02.533006Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 >> DataShardSnapshots::MvccSnapshotAndSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] Test command err: Took 16.080098 seconds >> THiveTest::TestCreateTabletReboots [GOOD] >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk >> KqpPg::SelectIndex+useSink [GOOD] >> KqpPg::SelectIndex-useSink >> TStorageTenantTest::CreateTableInsideSubDomain |80.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |80.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk [GOOD] >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink [GOOD] >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink >> DemoTx::Scenario_1 [GOOD] >> TSubDomainTest::StartAndStopTenanNode >> TSubDomainTest::UserAttributes >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter+useSink >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink >> DataShardSnapshots::VolatileSnapshotSplit [GOOD] >> DataShardSnapshots::VolatileSnapshotMerge >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] >> KqpQueryService::AlterCdcTopic [GOOD] >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] [GOOD] >> DemoTx::Scenario_2 >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] [GOOD] >> Viewer::JsonAutocompleteScheme [GOOD] >> Viewer::JsonAutocompleteSchemePOST ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::AlterCdcTopic [GOOD] Test command err: Trying to start YDB, gRPC: 19878, MsgBus: 65503 2025-04-04T16:58:40.549393Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503500374060639:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:40.549442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018c7/r3tmp/tmpmptsuR/pdisk_1.dat 2025-04-04T16:58:41.176180Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:41.208946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:41.209056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:41.220848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19878, node 1 2025-04-04T16:58:41.581344Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:41.581386Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:41.581395Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:41.581551Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65503 TClient is connected to server localhost:65503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:42.624481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:42.650958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:42.868031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:43.070385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:43.169421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:45.779284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503521848898677:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:45.779698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:45.779770Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503500374060639:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:45.779925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:58:45.849827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:58:45.892443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:58:45.964124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:58:46.012190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:58:46.061375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:58:46.135799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:58:46.250308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503526143866500:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:46.250434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:46.250927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503526143866505:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:46.255791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:58:46.274332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503526143866507:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:58:46.358467Z node 1 :TX_PROXY ERROR: Actor# [1:7489503526143866561:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:58:47.544752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:58:47.650732Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489503530438834189:2503], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:25: Error: At function: KiWriteTable!
:3:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2025-04-04T16:58:47.652464Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWUxMmVjOTMtNDJkNTEyOTItMmZkZDBhZTAtYjMyOGRiN2M=, ActorId: [1:7489503530438834119:2493], ActorState: ExecuteState, TraceId: 01jr0vv8xv3q7f95w9ygqet52v, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:3:25: Error: At function: KiWriteTable!
:3:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 Trying to start YDB, gRPC: 2753, MsgBus: 28398 2025-04-04T16:58:49.027341Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503532552871166:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:49.027388Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018c7/r3tmp/tmpE0Kq6e/pdisk_1.dat 2025-04-04T16:58:49.401598Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:49.458790Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:49.458886Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:49.465044Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2753, node 2 2025-04-04T16:58:49.670544Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:49.670570Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:49.670578Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:49.670716Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28398 TClient is connected to server localhost:28398 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:50.285002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSub ... 5-04-04T16:59:01.310799Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:59:01.329371Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489503591459306069:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:59:01.388612Z node 3 :TX_PROXY ERROR: Actor# [3:7489503591459306120:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:59:01.424994Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480 2025-04-04T16:59:01.527020Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489503569984469079:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:01.527119Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:59:01.794698Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489503591459306274:2358], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:30: Error: At function: KiWriteTable!
:2:65: Error: Failed to convert type: Struct<'id':Int32,'val':Null> to Struct<'id':Int32,'val':Int32>
:2:65: Error: Failed to convert 'val': Null to Int32
:2:65: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-04T16:59:01.796372Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NmUwMmU3MGMtM2E5MmUzYmQtZjEzZmJkZWMtZGMzYWQ1NTg=, ActorId: [3:7489503591459306272:2357], ActorState: ExecuteState, TraceId: 01jr0vvprhcmmq19941gq55z7n, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T16:59:01.831922Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8457, MsgBus: 6176 2025-04-04T16:59:03.260581Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489503599206883262:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:03.261324Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018c7/r3tmp/tmpmGZwwy/pdisk_1.dat 2025-04-04T16:59:03.377846Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:03.403972Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:03.404084Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:03.407106Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8457, node 4 2025-04-04T16:59:03.478828Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:59:03.478853Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:59:03.478861Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:59:03.478992Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6176 TClient is connected to server localhost:6176 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T16:59:04.025954Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:59:04.049903Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:59:04.164697Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:59:04.406644Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:59:04.508368Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:59:07.244461Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489503616386754200:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:07.244595Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:07.277795Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T16:59:07.316978Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T16:59:07.401259Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T16:59:07.445981Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T16:59:07.484827Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T16:59:07.531165Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T16:59:07.717367Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489503616386754726:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:07.717536Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:07.717978Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489503616386754731:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:07.724593Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T16:59:07.738713Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489503616386754733:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T16:59:07.834130Z node 4 :TX_PROXY ERROR: Actor# [4:7489503616386754790:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:59:08.287576Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489503599206883262:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:08.287699Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:59:09.011145Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T16:59:09.232064Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037919:1][4:7489503624976689852:2513] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:18:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-04T16:59:09.410995Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T16:59:09.480603Z node 4 :TX_PROXY ERROR: Actor# [4:7489503624976690044:3897] txid# 281474976715674, issues: { message: "Cannot change partition count. Use split/merge instead" severity: 1 } 2025-04-04T16:59:09.481158Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Y2VlODgxNzMtNjFkZjk1MmEtYmYxNGEyNjgtNzQzNjRkZjI=, ActorId: [4:7489503624976689953:2529], ActorState: ExecuteState, TraceId: 01jr0vvy8jfpc2qq7brehprfjs, Create QueryResponse for error on request, msg: Query failed, status: BAD_REQUEST:
: Error: Cannot change partition count. Use split/merge instead, code: 2017 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:130:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:132:2058] recipient: [1:108:2140] 2025-04-04T16:55:25.172673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:25.172749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:25.172854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:25.172890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:25.172930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:25.172960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:25.173047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:25.173124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:25.173398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:25.257613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:25.257662Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:172:2058] recipient: [1:15:2062] 2025-04-04T16:55:25.272970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:25.273374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:25.273560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:25.280195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:25.280365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:25.280931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:25.281505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:25.284343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:25.285556Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:25.285618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:25.285788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:25.285834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:25.285869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:25.285981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] Leader for TabletID 72057594037968897 is [1:215:2214] sender: [1:216:2058] recipient: [1:209:2210] 2025-04-04T16:55:25.292340Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-04T16:55:25.403969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:25.404184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:25.404388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:25.404621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:25.404680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:25.406912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:25.407084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:25.407307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:25.407371Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:25.407404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:25.407452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:25.412827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:25.412889Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:25.412926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:25.416676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:25.416756Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:25.416796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:25.416872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:25.420743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:25.424905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:25.425123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:25.426134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:25.426303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:25.426362Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:25.426631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:25.426686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:25.426881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:25.426970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:25.429862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:25.429930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:25.430139Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:25.430186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:25.430676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:25.430724Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:25.430866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:25.430928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:25.430973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:25.431005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:25.431047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:25.431086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:25.431130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id ... nBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:10.334687Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:59:10.334993Z node 93 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 362us result status StatusSuccess 2025-04-04T16:59:10.335930Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:10.347210Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:794:2609] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-04T16:59:10.347339Z node 93 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][93:716:2609] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-04T16:59:10.347552Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:794:2609] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743785950313121 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743785950313121 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743785950313121 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-04T16:59:10.350821Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:794:2609] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-04T16:59:10.350960Z node 93 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][93:716:2609] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] Test command err: 2025-04-04T16:57:59.568346Z node 5 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T16:57:59.572168Z node 5 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:57:59.572388Z node 5 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T16:57:59.573343Z node 5 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [5:153:2076] ControllerId# 72057594037932033 2025-04-04T16:57:59.573389Z node 5 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T16:57:59.573494Z node 5 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T16:57:59.573867Z node 5 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T16:57:59.576109Z node 5 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:57:59.576173Z node 5 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:57:59.578363Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:152:2075] Create Queue# [5:159:2080] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.578525Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:152:2075] Create Queue# [5:160:2081] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.578700Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:152:2075] Create Queue# [5:161:2082] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.578903Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:152:2075] Create Queue# [5:162:2083] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.579074Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:152:2075] Create Queue# [5:163:2084] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.579222Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:152:2075] Create Queue# [5:164:2085] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.579369Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:152:2075] Create Queue# [5:165:2086] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.579401Z node 5 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:57:59.579483Z node 5 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [5:153:2076] 2025-04-04T16:57:59.579550Z node 5 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [5:153:2076] 2025-04-04T16:57:59.579604Z node 5 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-04T16:57:59.579658Z node 5 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-04T16:57:59.580232Z node 5 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-04T16:57:59.580330Z node 6 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T16:57:59.583091Z node 6 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:57:59.583245Z node 6 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T16:57:59.584085Z node 6 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [6:172:2076] ControllerId# 72057594037932033 2025-04-04T16:57:59.584131Z node 6 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T16:57:59.584199Z node 6 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T16:57:59.584378Z node 6 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T16:57:59.585262Z node 6 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:57:59.585313Z node 6 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:57:59.587121Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:171:2075] Create Queue# [6:178:2080] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.587280Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:171:2075] Create Queue# [6:179:2081] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.587479Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:171:2075] Create Queue# [6:180:2082] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.587619Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:171:2075] Create Queue# [6:181:2083] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.587783Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:171:2075] Create Queue# [6:182:2084] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.587941Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:171:2075] Create Queue# [6:183:2085] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.588092Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:171:2075] Create Queue# [6:184:2086] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.588121Z node 6 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:57:59.588188Z node 6 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [6:172:2076] 2025-04-04T16:57:59.588217Z node 6 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [6:172:2076] 2025-04-04T16:57:59.588254Z node 6 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-04T16:57:59.588292Z node 6 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-04T16:57:59.588683Z node 6 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-04T16:57:59.588924Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T16:57:59.591651Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:57:59.591852Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-04T16:57:59.592519Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-04T16:57:59.593691Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:257} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-04T16:57:59.593776Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T16:57:59.594708Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:194:2079] ControllerId# 72057594037932033 2025-04-04T16:57:59.594747Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T16:57:59.594820Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T16:57:59.595055Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T16:57:59.603320Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:145:2068] 2025-04-04T16:57:59.603378Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:145:2068] 2025-04-04T16:57:59.613888Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:57:59.613940Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:57:59.615828Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:193:2078] Create Queue# [1:201:2084] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.615979Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:193:2078] Create Queue# [1:202:2085] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.616135Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:193:2078] Create Queue# [1:203:2086] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.616290Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:193:2078] Create Queue# [1:204:2087] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.616417Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:193:2078] Create Queue# [1:205:2088] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.616581Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:193:2078] Create Queue# [1:206:2089] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.616710Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:193:2078] Create Queue# [1:207:2090] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:59.616736Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:57:59.616809Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:194:2079] 2025-04-04T16:57:59.616862Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:194:2079] 2025-04-04T16:57:59.616909Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-04T16:57:59.616947Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-04T16:57:59.617722Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-04T16:57:59.617855Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T16:57:59.620810Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:57:59.620942Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T16:57:59.621892Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:213:2076] ControllerId# 72057594037932033 2025-04-04T16:57:59.621936Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T16:57:59.622005Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T16:57:59.622180Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T16:57:59.623084Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:57:59.623125Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:57:59.624898Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:212:2075] Create Queue# [2:219:2080] ta ... 566Z node 60 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037892 CurrentLeader: [65:1937:2263] CurrentLeaderTablet: [65:1943:2266] CurrentGeneration: 3 CurrentStep: 0} 2025-04-04T16:59:08.866622Z node 60 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037892 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037892 Cookie: 0 CurrentLeader: [65:1937:2263] CurrentLeaderTablet: [65:1943:2266] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {7, 10, 0}} 2025-04-04T16:59:08.866668Z node 60 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037892 followers: 0 2025-04-04T16:59:08.866711Z node 60 :TABLET_RESOLVER DEBUG: SelectForward node 60 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037892 followers: 0 countLeader 1 allowFollowers 0 winner: [65:1937:2263] 2025-04-04T16:59:08.866810Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037892] forward result remote node 65 [60:2066:2735] 2025-04-04T16:59:08.866971Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037892] remote node connected [60:2066:2735] 2025-04-04T16:59:08.867024Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037892]::SendEvent [60:2066:2735] 2025-04-04T16:59:08.867275Z node 65 :PIPE_SERVER DEBUG: [72075186224037892] Accept Connect Originator# [60:2066:2735] 2025-04-04T16:59:08.867828Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037892] connected with status OK role: Leader [60:2066:2735] 2025-04-04T16:59:08.867883Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037892] send queued [60:2066:2735] 2025-04-04T16:59:08.869197Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037893] ::Bootstrap [60:2070:2737] 2025-04-04T16:59:08.869237Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037893] lookup [60:2070:2737] 2025-04-04T16:59:08.869292Z node 60 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037893 entry.State: StNormal ev: {EvForward TabletID: 72075186224037893 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:59:08.869337Z node 60 :TABLET_RESOLVER DEBUG: SelectForward node 60 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037893 followers: 0 countLeader 1 allowFollowers 0 winner: [65:1287:2097] 2025-04-04T16:59:08.869441Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037893] forward result remote node 65 [60:2070:2737] 2025-04-04T16:59:08.869545Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037893] remote node connected [60:2070:2737] 2025-04-04T16:59:08.869608Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037893]::SendEvent [60:2070:2737] 2025-04-04T16:59:08.869814Z node 65 :PIPE_SERVER DEBUG: [72075186224037893] Accept Connect Originator# [60:2070:2737] 2025-04-04T16:59:08.870174Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037893] connected with status OK role: Leader [60:2070:2737] 2025-04-04T16:59:08.870233Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037893] send queued [60:2070:2737] 2025-04-04T16:59:08.871433Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037894] ::Bootstrap [60:2073:2739] 2025-04-04T16:59:08.871474Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037894] lookup [60:2073:2739] 2025-04-04T16:59:08.871531Z node 60 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037894 entry.State: StNormal ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:59:08.871572Z node 60 :TABLET_RESOLVER DEBUG: SelectForward node 60 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [64:1291:2098] 2025-04-04T16:59:08.871660Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037894] forward result remote node 64 [60:2073:2739] 2025-04-04T16:59:08.871792Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037894] remote node connected [60:2073:2739] 2025-04-04T16:59:08.871829Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037894]::SendEvent [60:2073:2739] 2025-04-04T16:59:08.872207Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037894] connect request undelivered [60:2073:2739] 2025-04-04T16:59:08.872248Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037894] immediate retry [60:2073:2739] 2025-04-04T16:59:08.872277Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037894] lookup [60:2073:2739] 2025-04-04T16:59:08.872327Z node 60 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037894 entry.State: StNormal 2025-04-04T16:59:08.872463Z node 60 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037894 entry.State: StProblemResolve ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:59:08.872547Z node 60 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037894 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T16:59:08.872705Z node 60 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 0} 2025-04-04T16:59:08.872759Z node 60 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 1} 2025-04-04T16:59:08.872799Z node 60 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 2} 2025-04-04T16:59:08.872849Z node 60 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [65:1939:2264] CurrentLeaderTablet: [65:1945:2267] CurrentGeneration: 3 CurrentStep: 0} 2025-04-04T16:59:08.872937Z node 60 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [65:1939:2264] CurrentLeaderTablet: [65:1945:2267] CurrentGeneration: 3 CurrentStep: 0} 2025-04-04T16:59:08.873013Z node 60 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037894 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037894 Cookie: 0 CurrentLeader: [65:1939:2264] CurrentLeaderTablet: [65:1945:2267] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {7, 10, 0}} 2025-04-04T16:59:08.873066Z node 60 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037894 followers: 0 2025-04-04T16:59:08.873113Z node 60 :TABLET_RESOLVER DEBUG: SelectForward node 60 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [65:1939:2264] 2025-04-04T16:59:08.873356Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037894] forward result remote node 65 [60:2073:2739] 2025-04-04T16:59:08.873490Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037894] remote node connected [60:2073:2739] 2025-04-04T16:59:08.873530Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037894]::SendEvent [60:2073:2739] 2025-04-04T16:59:08.873814Z node 65 :PIPE_SERVER DEBUG: [72075186224037894] Accept Connect Originator# [60:2073:2739] 2025-04-04T16:59:08.874174Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037894] connected with status OK role: Leader [60:2073:2739] 2025-04-04T16:59:08.874206Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037894] send queued [60:2073:2739] 2025-04-04T16:59:08.875059Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037895] ::Bootstrap [60:2077:2741] 2025-04-04T16:59:08.875091Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037895] lookup [60:2077:2741] 2025-04-04T16:59:08.875131Z node 60 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037895 entry.State: StNormal ev: {EvForward TabletID: 72075186224037895 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:59:08.875161Z node 60 :TABLET_RESOLVER DEBUG: SelectForward node 60 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037895 followers: 0 countLeader 1 allowFollowers 0 winner: [65:1783:2191] 2025-04-04T16:59:08.875217Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037895] forward result remote node 65 [60:2077:2741] 2025-04-04T16:59:08.875274Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037895] remote node connected [60:2077:2741] 2025-04-04T16:59:08.875302Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037895]::SendEvent [60:2077:2741] 2025-04-04T16:59:08.875467Z node 65 :PIPE_SERVER DEBUG: [72075186224037895] Accept Connect Originator# [60:2077:2741] 2025-04-04T16:59:08.875673Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037895] connected with status OK role: Leader [60:2077:2741] 2025-04-04T16:59:08.875697Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037895] send queued [60:2077:2741] 2025-04-04T16:59:08.876435Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037896] ::Bootstrap [60:2080:2743] 2025-04-04T16:59:08.876470Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037896] lookup [60:2080:2743] 2025-04-04T16:59:08.876519Z node 60 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037896 entry.State: StNormal ev: {EvForward TabletID: 72075186224037896 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:59:08.876561Z node 60 :TABLET_RESOLVER DEBUG: SelectForward node 60 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037896 followers: 0 countLeader 1 allowFollowers 0 winner: [65:1785:2193] 2025-04-04T16:59:08.876641Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037896] forward result remote node 65 [60:2080:2743] 2025-04-04T16:59:08.876730Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037896] remote node connected [60:2080:2743] 2025-04-04T16:59:08.876773Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037896]::SendEvent [60:2080:2743] 2025-04-04T16:59:08.876944Z node 65 :PIPE_SERVER DEBUG: [72075186224037896] Accept Connect Originator# [60:2080:2743] 2025-04-04T16:59:08.877243Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037896] connected with status OK role: Leader [60:2080:2743] 2025-04-04T16:59:08.877281Z node 60 :PIPE_CLIENT DEBUG: TClient[72075186224037896] send queued [60:2080:2743] 2025-04-04T16:59:08.878721Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [60:2082:2744] 2025-04-04T16:59:08.878785Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [60:2082:2744] 2025-04-04T16:59:08.878882Z node 60 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:59:08.878968Z node 60 :TABLET_RESOLVER DEBUG: SelectForward node 60 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [60:587:2273] 2025-04-04T16:59:08.879086Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [60:2082:2744] 2025-04-04T16:59:08.879202Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [60:2082:2744] 2025-04-04T16:59:08.879292Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [60:2082:2744] 2025-04-04T16:59:08.879372Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [60:2082:2744] 2025-04-04T16:59:08.879544Z node 60 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [60:2082:2744] 2025-04-04T16:59:08.879852Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [60:2082:2744] 2025-04-04T16:59:08.879924Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [60:2082:2744] 2025-04-04T16:59:08.879983Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [60:2082:2744] 2025-04-04T16:59:08.880055Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [60:2082:2744] 2025-04-04T16:59:08.880107Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [60:2082:2744] 2025-04-04T16:59:08.880196Z node 60 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [60:559:2268] EventType# 268697616 >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots [GOOD] >> THiveTest::TestCreateTabletChangeToExternal >> TSubDomainTest::UserAttributes [GOOD] >> TSubDomainTest::UserAttributesApplyIf ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:130:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:132:2058] recipient: [1:108:2140] 2025-04-04T16:55:18.040506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:18.040610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:18.040733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:18.040776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:18.040825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:18.040865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:18.040937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:18.041019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:18.041358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:18.146388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:18.146455Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:172:2058] recipient: [1:15:2062] 2025-04-04T16:55:18.168464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:18.168972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:18.169167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:18.195581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:18.195817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:18.196577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:18.198183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:18.206213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:18.207886Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:18.207985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:18.208215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:18.208273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:18.208314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:18.208452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] Leader for TabletID 72057594037968897 is [1:215:2214] sender: [1:216:2058] recipient: [1:209:2210] 2025-04-04T16:55:18.218104Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-04T16:55:18.392890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:18.393162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:18.393426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:18.393675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:18.393738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:18.407609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:18.407811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:18.408086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:18.408165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:18.408210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:18.408266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:18.413247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:18.413337Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:18.413386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:18.419685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:18.419767Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:18.419818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:18.419899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:18.432753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:18.439402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:18.439654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:18.440759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:18.440942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:18.440998Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:18.441310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:18.441379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:18.442009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:18.442156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:18.445246Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:18.445319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:18.445545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:18.445593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:18.446092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:18.446148Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:18.446345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:18.446420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:18.446475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:18.446518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:18.446560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:18.446602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:18.446669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id ... me: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:11.158967Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:59:11.159302Z node 93 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 1.07ms result status StatusSuccess 2025-04-04T16:59:11.160305Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStorageBalanceTest::TestScenario2 [GOOD] >> TStorageBalanceTest::TestScenario3 >> DataShardSnapshots::MvccSnapshotTailCleanup [GOOD] >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 >> TSubDomainTest::Boot >> THiveTest::TestCreateTabletChangeToExternal [GOOD] >> THiveTest::TestExternalBoot >> TSubDomainTest::LsLs >> TSubDomainTest::CreateDummyTabletsInDifferentDomains >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] >> BasicUsage::ReadSessionCorrectClose [GOOD] >> BasicUsage::ConflictingWrites >> DataShardSnapshots::MvccSnapshotAndSplit [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink >> Viewer::TabletMerging [GOOD] >> Viewer::TabletMergingPacked >> THiveTest::TestExternalBoot [GOOD] >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain [GOOD] >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain [GOOD] >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain >> TSubDomainTest::StartAndStopTenanNode [GOOD] >> TSubDomainTest::StartTenanNodeAndStopAtDestructor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] Test command err: 2025-04-04T16:59:08.597059Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503621120986773:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:08.597442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f90/r3tmp/tmp4gK6of/pdisk_1.dat 2025-04-04T16:59:09.238938Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:09.252142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:09.252243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:09.262230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10107 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T16:59:09.524726Z node 1 :TX_PROXY DEBUG: actor# [1:7489503621120986883:2139] Handle TEvNavigate describe path dc-1 2025-04-04T16:59:09.524793Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503625415954614:2437] HANDLE EvNavigateScheme dc-1 2025-04-04T16:59:09.524924Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489503621120986909:2153], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:09.524969Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489503621120986909:2153], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T16:59:09.525166Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503625415954615:2438][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T16:59:09.527275Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503621120986528:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503625415954620:2438] 2025-04-04T16:59:09.527308Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503621120986525:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503625415954619:2438] 2025-04-04T16:59:09.527368Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503621120986525:2051] Subscribe: subscriber# [1:7489503625415954619:2438], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:09.527373Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503621120986528:2054] Subscribe: subscriber# [1:7489503625415954620:2438], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:09.527432Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503621120986531:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503625415954621:2438] 2025-04-04T16:59:09.527458Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503625415954619:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503621120986525:2051] 2025-04-04T16:59:09.527473Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503621120986531:2057] Subscribe: subscriber# [1:7489503625415954621:2438], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:09.527495Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503625415954620:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503621120986528:2054] 2025-04-04T16:59:09.527535Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503621120986525:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503625415954619:2438] 2025-04-04T16:59:09.527567Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503625415954621:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503621120986531:2057] 2025-04-04T16:59:09.527566Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503621120986528:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503625415954620:2438] 2025-04-04T16:59:09.527584Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503621120986531:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503625415954621:2438] 2025-04-04T16:59:09.527613Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503625415954615:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503625415954616:2438] 2025-04-04T16:59:09.527661Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503625415954615:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503625415954617:2438] 2025-04-04T16:59:09.527726Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489503625415954615:2438][/dc-1] Set up state: owner# [1:7489503621120986909:2153], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:09.527866Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503625415954615:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503625415954618:2438] 2025-04-04T16:59:09.527911Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489503625415954615:2438][/dc-1] Path was already updated: owner# [1:7489503621120986909:2153], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:09.527956Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503625415954619:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503625415954616:2438], cookie# 1 2025-04-04T16:59:09.527974Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503625415954620:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503625415954617:2438], cookie# 1 2025-04-04T16:59:09.527990Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503625415954621:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503625415954618:2438], cookie# 1 2025-04-04T16:59:09.528592Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503621120986525:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503625415954619:2438], cookie# 1 2025-04-04T16:59:09.528625Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503621120986528:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503625415954620:2438], cookie# 1 2025-04-04T16:59:09.528642Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503621120986531:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503625415954621:2438], cookie# 1 2025-04-04T16:59:09.528669Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503625415954619:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503621120986525:2051], cookie# 1 2025-04-04T16:59:09.528684Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503625415954620:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503621120986528:2054], cookie# 1 2025-04-04T16:59:09.528699Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503625415954621:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503621120986531:2057], cookie# 1 2025-04-04T16:59:09.528742Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503625415954615:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503625415954616:2438], cookie# 1 2025-04-04T16:59:09.528766Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503625415954615:2438][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T16:59:09.528792Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503625415954615:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503625415954617:2438], cookie# 1 2025-04-04T16:59:09.528812Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503625415954615:2438][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T16:59:09.528833Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503625415954615:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503625415954618:2438], cookie# 1 2025-04-04T16:59:09.528848Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503625415954615:2438][/dc-1] Unexpected sync response: sender# [1:7489503625415954618:2438], cookie# 1 2025-04-04T16:59:09.603928Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489503621120986909:2153], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T16:59:09.604248Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489503621120986909:2153], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... ators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "name_USER_0_kind_storage-pool-number-1" Kind: "storage-pool-number-1" } StoragePools { Name: "name_USER_0_kind_storage-pool-number-2" Kind: "storage-pool-number-2" } StoragePools { Name: "name_USER_0_kind_test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046644480 }, by path# { Subscriber: { Subscriber: [3:7489503631428453503:2355] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T16:59:11.439782Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489503627133485825:2107], cacheItem# { Subscriber: { Subscriber: [3:7489503631428453503:2355] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1743785949941 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:11.440376Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489503631428453510:2356], recipient# [3:7489503631428453502:2354], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T16:59:11.440476Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489503627133485825:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:11.440519Z node 3 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [3:7489503627133485825:2107], path# /dc-1/USER_0/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2025-04-04T16:59:11.440729Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489503631428453511:2357][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T16:59:11.442013Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7489503631428453515:2357][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [1:7489503621120986525:2051] 2025-04-04T16:59:11.442047Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7489503631428453516:2357][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [1:7489503621120986528:2054] 2025-04-04T16:59:11.442079Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489503631428453511:2357][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7489503631428453512:2357] 2025-04-04T16:59:11.442137Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489503631428453511:2357][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7489503631428453513:2357] 2025-04-04T16:59:11.442165Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7489503631428453511:2357][/dc-1/USER_0/.metadata/initialization/migrations] Set up state: owner# [3:7489503627133485825:2107], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:11.442207Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7489503627133485825:2107], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/initialization/migrations PathId: Strong: 1 } 2025-04-04T16:59:11.442266Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7489503627133485825:2107], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7489503631428453511:2357] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T16:59:11.442335Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489503627133485825:2107], cacheItem# { Subscriber: { Subscriber: [3:7489503631428453511:2357] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:11.442347Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7489503631428453517:2357][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [1:7489503621120986531:2057] 2025-04-04T16:59:11.442382Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489503631428453511:2357][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7489503631428453514:2357] 2025-04-04T16:59:11.443098Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7489503631428453511:2357][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7489503627133485825:2107], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:11.443134Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489503631428453518:2358], recipient# [3:7489503631428453501:2324], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:11.443232Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503621120986531:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489503631428453517:2357] 2025-04-04T16:59:12.450417Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489503627133485825:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:12.450574Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489503627133485825:2107], cacheItem# { Subscriber: { Subscriber: [3:7489503631428453511:2357] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:12.450688Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489503635723420816:2359], recipient# [3:7489503635723420815:2325], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:13.458485Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489503627133485825:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:13.458606Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489503627133485825:2107], cacheItem# { Subscriber: { Subscriber: [3:7489503631428453511:2357] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:13.458713Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489503640018388114:2360], recipient# [3:7489503640018388113:2326], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBasic >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink >> DataShardSnapshots::VolatileSnapshotMerge [GOOD] >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate >> KqpPg::SelectIndex-useSink [GOOD] >> KqpPg::TableDeleteAllData+useSink >> TSubDomainTest::UserAttributesApplyIf [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit [GOOD] >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed |80.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut >> Viewer::TabletMergingPacked [GOOD] >> Viewer::VDiskMerging |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |80.1%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut >> TSubDomainTest::Boot [GOOD] >> TSubDomainTest::CheckAccessCopyTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::UserAttributesApplyIf [GOOD] Test command err: 2025-04-04T16:59:09.649107Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503623376831225:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:09.658267Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00160f/r3tmp/tmpkVzbh9/pdisk_1.dat 2025-04-04T16:59:10.089519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:10.089667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:10.092069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:59:10.112241Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:16684 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T16:59:10.305825Z node 1 :TX_PROXY DEBUG: actor# [1:7489503623376831327:2099] Handle TEvNavigate describe path dc-1 2025-04-04T16:59:10.305892Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503627671798917:2257] HANDLE EvNavigateScheme dc-1 2025-04-04T16:59:10.306022Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489503623376831351:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:10.306069Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489503623376831351:2113], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T16:59:10.308625Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503627671798918:2258][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T16:59:10.313981Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503623376831038:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503627671798922:2258] 2025-04-04T16:59:10.314070Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503623376831038:2049] Subscribe: subscriber# [1:7489503627671798922:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:10.314199Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503623376831041:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503627671798923:2258] 2025-04-04T16:59:10.314220Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503623376831041:2052] Subscribe: subscriber# [1:7489503627671798923:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:10.314263Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503623376831044:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503627671798924:2258] 2025-04-04T16:59:10.314299Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503623376831044:2055] Subscribe: subscriber# [1:7489503627671798924:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:10.314392Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503627671798922:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503623376831038:2049] 2025-04-04T16:59:10.314436Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503627671798923:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503623376831041:2052] 2025-04-04T16:59:10.314460Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503627671798924:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503623376831044:2055] 2025-04-04T16:59:10.314502Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503627671798918:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503627671798919:2258] 2025-04-04T16:59:10.314536Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503627671798918:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503627671798920:2258] 2025-04-04T16:59:10.314603Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489503627671798918:2258][/dc-1] Set up state: owner# [1:7489503623376831351:2113], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:10.314748Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503627671798918:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503627671798921:2258] 2025-04-04T16:59:10.314794Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489503627671798918:2258][/dc-1] Path was already updated: owner# [1:7489503623376831351:2113], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:10.314833Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503627671798922:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503627671798919:2258], cookie# 1 2025-04-04T16:59:10.314849Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503627671798923:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503627671798920:2258], cookie# 1 2025-04-04T16:59:10.314884Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503627671798924:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503627671798921:2258], cookie# 1 2025-04-04T16:59:10.314924Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503623376831038:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503627671798922:2258] 2025-04-04T16:59:10.314947Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503623376831038:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503627671798922:2258], cookie# 1 2025-04-04T16:59:10.314967Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503623376831041:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503627671798923:2258] 2025-04-04T16:59:10.315005Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503623376831041:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503627671798923:2258], cookie# 1 2025-04-04T16:59:10.315024Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503623376831044:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503627671798924:2258] 2025-04-04T16:59:10.315037Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503623376831044:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503627671798924:2258], cookie# 1 2025-04-04T16:59:10.318514Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503627671798922:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503623376831038:2049], cookie# 1 2025-04-04T16:59:10.318607Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503627671798923:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503623376831041:2052], cookie# 1 2025-04-04T16:59:10.318637Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503627671798924:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503623376831044:2055], cookie# 1 2025-04-04T16:59:10.318700Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503627671798918:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503627671798919:2258], cookie# 1 2025-04-04T16:59:10.318758Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503627671798918:2258][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T16:59:10.318783Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503627671798918:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503627671798920:2258], cookie# 1 2025-04-04T16:59:10.318819Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503627671798918:2258][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T16:59:10.318845Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503627671798918:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503627671798921:2258], cookie# 1 2025-04-04T16:59:10.318860Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503627671798918:2258][/dc-1] Unexpected sync response: sender# [1:7489503627671798921:2258], cookie# 1 2025-04-04T16:59:10.382860Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489503623376831351:2113], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T16:59:10.383335Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489503623376831351:2113], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... ATOR NOTICE: [2:7489503643544633684:2241] Ack update: ack to# [2:7489503643544633504:2138], cookie# 281474976715662, pathId# [OwnerId: 72057594046644480, LocalPathId: 2], version# 6 2025-04-04T16:59:13.874722Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7489503643544633123:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 6 }: sender# [2:7489503643544633762:2299] 2025-04-04T16:59:13.874731Z node 2 :SCHEME_BOARD_POPULATOR DEBUG: [2:7489503643544633684:2241] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 6 }: sender# [2:7489503643544633686:2243], cookie# 281474976715662 2025-04-04T16:59:13.874740Z node 2 :SCHEME_BOARD_POPULATOR DEBUG: [2:7489503643544633684:2241] Ack for unknown update (already acked?): sender# [2:7489503643544633686:2243], cookie# 281474976715662 2025-04-04T16:59:13.874807Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-04-04T16:59:13.874857Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-04-04T16:59:13.874865Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715662 2025-04-04T16:59:13.874877Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715662, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-04-04T16:59:13.874898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-04T16:59:13.874993Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715662, subscribers: 0 2025-04-04T16:59:13.875208Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715662 2025-04-04T16:59:13.875429Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2025-04-04T16:59:13.939881Z node 2 :TX_PROXY DEBUG: actor# [2:7489503643544633418:2101] Handle TEvNavigate describe path /dc-1/USER_0 2025-04-04T16:59:13.939965Z node 2 :TX_PROXY DEBUG: Actor# [2:7489503643544633826:2352] HANDLE EvNavigateScheme /dc-1/USER_0 2025-04-04T16:59:13.940114Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7489503643544633442:2115], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:13.940272Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7489503643544633757:2299][/dc-1/USER_0] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7489503643544633442:2115], cookie# 10 2025-04-04T16:59:13.940344Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7489503643544633761:2299][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7489503643544633758:2299], cookie# 10 2025-04-04T16:59:13.940376Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7489503643544633762:2299][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7489503643544633759:2299], cookie# 10 2025-04-04T16:59:13.940420Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7489503643544633763:2299][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7489503643544633760:2299], cookie# 10 2025-04-04T16:59:13.940471Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7489503643544633126:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7489503643544633763:2299], cookie# 10 2025-04-04T16:59:13.940524Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7489503643544633763:2299][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7489503643544633126:2055], cookie# 10 2025-04-04T16:59:13.940554Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7489503643544633757:2299][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7489503643544633760:2299], cookie# 10 2025-04-04T16:59:13.940580Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7489503643544633757:2299][/dc-1/USER_0] Sync is in progress: cookie# 10, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T16:59:13.940606Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7489503643544633120:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7489503643544633761:2299], cookie# 10 2025-04-04T16:59:13.940655Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7489503643544633123:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7489503643544633762:2299], cookie# 10 2025-04-04T16:59:13.940687Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7489503643544633761:2299][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7489503643544633120:2049], cookie# 10 2025-04-04T16:59:13.940700Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7489503643544633762:2299][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7489503643544633123:2052], cookie# 10 2025-04-04T16:59:13.940718Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7489503643544633757:2299][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7489503643544633758:2299], cookie# 10 2025-04-04T16:59:13.940750Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7489503643544633757:2299][/dc-1/USER_0] Sync is done: cookie# 10, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T16:59:13.940779Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7489503643544633757:2299][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7489503643544633759:2299], cookie# 10 2025-04-04T16:59:13.940797Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7489503643544633757:2299][/dc-1/USER_0] Unexpected sync response: sender# [2:7489503643544633759:2299], cookie# 10 2025-04-04T16:59:13.940845Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7489503643544633442:2115], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2025-04-04T16:59:13.940940Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7489503643544633442:2115], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7489503643544633757:2299] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1743785953721 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T16:59:13.941050Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7489503643544633442:2115], cacheItem# { Subscriber: { Subscriber: [2:7489503643544633757:2299] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1743785953721 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2025-04-04T16:59:13.941333Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7489503643544633827:2353], recipient# [2:7489503643544633826:2352], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T16:59:13.941385Z node 2 :TX_PROXY DEBUG: Actor# [2:7489503643544633826:2352] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T16:59:13.941452Z node 2 :TX_PROXY DEBUG: Actor# [2:7489503643544633826:2352] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0" Options { ShowPrivateTable: true } 2025-04-04T16:59:13.942325Z node 2 :TX_PROXY DEBUG: Actor# [2:7489503643544633826:2352] Handle TEvDescribeSchemeResult Forward to# [2:7489503643544633825:2351] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743785953721 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } UserAttributes { Key: "AttrA3" Value: "ValA3" } } PathId: 2 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743785953721 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 Processing... (TRUNCATED) >> TSubDomainTest::LsLs [GOOD] >> TSubDomainTest::LsAltered >> KqpPg::InsertNoTargetColumns_Alter+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestExternalBoot [GOOD] Test command err: 2025-04-04T16:57:58.881572Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T16:57:58.884618Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:57:58.884827Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-04T16:57:58.885262Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-04T16:57:58.886078Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:257} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-04T16:57:58.886136Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T16:57:58.886865Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:72:2075] ControllerId# 72057594037932033 2025-04-04T16:57:58.886911Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T16:57:58.887019Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T16:57:58.887298Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T16:57:58.903337Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:57:58.903411Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:57:58.905028Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:71:2074] Create Queue# [1:80:2080] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:58.905179Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:71:2074] Create Queue# [1:81:2081] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:58.905292Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:71:2074] Create Queue# [1:82:2082] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:58.905482Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:71:2074] Create Queue# [1:83:2083] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:58.905603Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:71:2074] Create Queue# [1:84:2084] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:58.905734Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:71:2074] Create Queue# [1:85:2085] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:58.905907Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:71:2074] Create Queue# [1:86:2086] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:58.905927Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:57:58.905995Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:72:2075] 2025-04-04T16:57:58.906033Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:72:2075] 2025-04-04T16:57:58.906076Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-04T16:57:58.906115Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-04T16:57:58.906863Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-04T16:57:58.906966Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T16:57:58.909114Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:57:58.909227Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T16:57:58.909863Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:94:2073] ControllerId# 72057594037932033 2025-04-04T16:57:58.909894Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T16:57:58.909952Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T16:57:58.910078Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T16:57:58.910908Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:57:58.910990Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:57:58.912310Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:93:2072] Create Queue# [2:100:2077] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:58.912400Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:93:2072] Create Queue# [2:101:2078] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:58.912510Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:93:2072] Create Queue# [2:102:2079] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:58.912636Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:93:2072] Create Queue# [2:103:2080] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:58.912777Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:93:2072] Create Queue# [2:104:2081] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:58.912874Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:93:2072] Create Queue# [2:105:2082] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:58.912950Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:93:2072] Create Queue# [2:106:2083] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:58.912965Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:57:58.913013Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:94:2073] 2025-04-04T16:57:58.913034Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:94:2073] 2025-04-04T16:57:58.913059Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-04T16:57:58.913093Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-04T16:57:58.913407Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-04T16:57:58.913479Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T16:57:58.915390Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:57:58.915485Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T16:57:58.916032Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:111:2073] ControllerId# 72057594037932033 2025-04-04T16:57:58.916056Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T16:57:58.916116Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T16:57:58.916295Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T16:57:58.916939Z node 3 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:57:58.916993Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:57:58.918592Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:110:2072] Create Queue# [3:117:2077] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:58.918776Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:110:2072] Create Queue# [3:118:2078] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:58.918897Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:110:2072] Create Queue# [3:119:2079] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:58.919049Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:110:2072] Create Queue# [3:120:2080] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:58.919170Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:110:2072] Create Queue# [3:121:2081] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:58.919312Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:110:2072] Create Queue# [3:122:2082] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:58.919457Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:110:2072] Create Queue# [3:123:2083] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:58.919482Z node 3 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:57:58.919545Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [3:111:2073] 2025-04-04T16:57:58.919570Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [3:111:2073] 2025-04-04T16:57:58.919623Z node 3 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-04T16:57:58.919649Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-04T16:57:58.919931Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-04T16:57:58.920285Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:57:58.920502Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:94:2073] 2025-04-04T16:57:58.920544Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:57:58.920583Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-04T16:57:58.920705Z node 3 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:57:58.920789Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [3:111:2073] 2025-04-04T16:57:58.920815Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:57:58.920834Z node 3 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-04T16:57:58.920957Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:57:58.933301Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:72:2075] 2025-04-04T16:57:58.933387Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:57:58.933416Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-04T16:57:58.935392Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeLis ... rd result local node, try to connect [28:316:2291] 2025-04-04T16:59:15.003603Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037932033]::SendEvent [28:316:2291] 2025-04-04T16:59:15.003707Z node 28 :PIPE_SERVER DEBUG: [72057594037932033] Accept Connect Originator# [28:316:2291] 2025-04-04T16:59:15.003855Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037932033] connected with status OK role: Leader [28:316:2291] 2025-04-04T16:59:15.003895Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send queued [28:316:2291] 2025-04-04T16:59:15.003931Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [28:316:2291] 2025-04-04T16:59:15.003994Z node 28 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [28:282:2268] EventType# 268637702 2025-04-04T16:59:15.004211Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2025-04-04T16:59:15.004312Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T16:59:15.004562Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{20, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T16:59:15.004676Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T16:59:15.005017Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-04-04T16:59:15.005112Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T16:59:15.005572Z node 28 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923004718016}(72075186224037888)::Execute - TryToBoot was not successfull 2025-04-04T16:59:15.005701Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{5, redo 698b alter 0b annex 0, ~{ 2, 1, 3 } -{ }, 0 gb} 2025-04-04T16:59:15.005804Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T16:59:15.018374Z node 28 :BS_PROXY_PUT INFO: [49bb8b081a887568] bootstrap ActorId# [28:319:2294] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:4:0:0:693:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-04T16:59:15.018568Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] Id# [72057594037927937:2:4:0:0:693:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:59:15.018619Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] restore Id# [72057594037927937:2:4:0:0:693:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T16:59:15.018708Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:4:0:0:693:1] Marker# BPG33 2025-04-04T16:59:15.018755Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:4:0:0:693:1] Marker# BPG32 2025-04-04T16:59:15.018921Z node 28 :BS_PROXY DEBUG: Send to queueActorId# [28:34:2078] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:4:0:0:693:1] FDS# 693 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:59:15.025479Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] received {EvVPutResult Status# OK ID# [72057594037927937:2:4:0:0:693:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 19 } Cost# 85456 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 20 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-04T16:59:15.025648Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] Result# TEvPutResult {Id# [72057594037927937:2:4:0:0:693:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-04T16:59:15.025735Z node 28 :BS_PROXY_PUT INFO: [49bb8b081a887568] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:4:0:0:693:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-04T16:59:15.025880Z node 28 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.763 sample PartId# [72057594037927937:2:4:0:0:693:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 28 } TEvVPutResult{ TimestampMs# 7.379 VDiskId# [0:1:0:0:0] NodeId# 28 Status# OK } ] } 2025-04-04T16:59:15.026019Z node 28 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:4:0:0:693:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-04T16:59:15.026357Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} commited cookie 1 for step 4 2025-04-04T16:59:15.026848Z node 28 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T16:59:15.026974Z node 28 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-04T16:59:15.027029Z node 28 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-04T16:59:15.027074Z node 28 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-04T16:59:15.027116Z node 28 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-04T16:59:15.027198Z node 28 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-04T16:59:15.027243Z node 28 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-04T16:59:15.027656Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [28:323:2297] 2025-04-04T16:59:15.027715Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [28:323:2297] 2025-04-04T16:59:15.027812Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [28:323:2297] 2025-04-04T16:59:15.027927Z node 28 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:59:15.028021Z node 28 :TABLET_RESOLVER DEBUG: SelectForward node 28 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [28:275:2264] 2025-04-04T16:59:15.028119Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [28:323:2297] 2025-04-04T16:59:15.028191Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [28:323:2297] 2025-04-04T16:59:15.028281Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [28:323:2297] 2025-04-04T16:59:15.028429Z node 28 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [28:323:2297] 2025-04-04T16:59:15.028652Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [28:323:2297] 2025-04-04T16:59:15.028723Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [28:323:2297] 2025-04-04T16:59:15.028786Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [28:323:2297] 2025-04-04T16:59:15.028867Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [28:323:2297] 2025-04-04T16:59:15.028924Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [28:323:2297] 2025-04-04T16:59:15.029009Z node 28 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [28:322:2296] EventType# 268697624 2025-04-04T16:59:15.029256Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} queued, type NKikimr::NHive::TTxStartTablet 2025-04-04T16:59:15.029364Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T16:59:15.029639Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} hope 1 -> done Change{6, redo 83b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-04-04T16:59:15.029745Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T16:59:15.043340Z node 28 :BS_PROXY_PUT INFO: [6173685a7ad4b3c4] bootstrap ActorId# [28:326:2300] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:5:0:0:92:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-04T16:59:15.043568Z node 28 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Id# [72057594037927937:2:5:0:0:92:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T16:59:15.043655Z node 28 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] restore Id# [72057594037927937:2:5:0:0:92:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T16:59:15.043762Z node 28 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:5:0:0:92:1] Marker# BPG33 2025-04-04T16:59:15.043843Z node 28 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:5:0:0:92:1] Marker# BPG32 2025-04-04T16:59:15.044076Z node 28 :BS_PROXY DEBUG: Send to queueActorId# [28:34:2078] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:5:0:0:92:1] FDS# 92 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T16:59:15.047759Z node 28 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] received {EvVPutResult Status# OK ID# [72057594037927937:2:5:0:0:92:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 20 } Cost# 80724 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 21 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-04T16:59:15.047961Z node 28 :BS_PROXY_PUT DEBUG: [6173685a7ad4b3c4] Result# TEvPutResult {Id# [72057594037927937:2:5:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-04T16:59:15.048094Z node 28 :BS_PROXY_PUT INFO: [6173685a7ad4b3c4] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:5:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-04T16:59:15.048311Z node 28 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.006 sample PartId# [72057594037927937:2:5:0:0:92:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 28 } TEvVPutResult{ TimestampMs# 4.744 VDiskId# [0:1:0:0:0] NodeId# 28 Status# OK } ] } 2025-04-04T16:59:15.048547Z node 28 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:5:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-04T16:59:15.048759Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} commited cookie 1 for step 5 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings >> Viewer::VDiskMerging [GOOD] >> Viewer::TenantInfo5kkTablets >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink >> KqpQueryService::TableSink_OltpOrder [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring1 >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats >> Viewer::JsonStorageListingV2 [GOOD] >> Viewer::JsonStorageListingV2GroupIdFilter >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain [GOOD] >> TSubDomainTest::CreateTableInsideSubDomain >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed [GOOD] >> TExportToS3Tests::AuditCompletedExport >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink [GOOD] >> TSubDomainTest::LsAltered [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink >> DataShardSnapshots::ShardRestartWholeShardLockBasic [GOOD] >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate [GOOD] >> DataShardSnapshots::VolatileSnapshotReadTable >> TExportToS3Tests::CheckItemProgress ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpOrder [GOOD] Test command err: Trying to start YDB, gRPC: 6705, MsgBus: 1076 2025-04-04T16:58:37.660867Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503484795689406:2107];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:37.665770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018d5/r3tmp/tmpYHBnRa/pdisk_1.dat 2025-04-04T16:58:38.182809Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:38.184480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:38.184576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:38.190642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6705, node 1 2025-04-04T16:58:38.538802Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:38.538823Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:38.538829Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:38.538935Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1076 TClient is connected to server localhost:1076 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:39.392184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:41.540032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503501975559188:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:41.540140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:41.844020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:58:42.062264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503506270526637:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:42.062361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:42.062781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503506270526642:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:42.066942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T16:58:42.102431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503506270526644:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T16:58:42.210669Z node 1 :TX_PROXY ERROR: Actor# [1:7489503506270526697:2435] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:58:42.659460Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503484795689406:2107];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:42.659532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24402, MsgBus: 16171 2025-04-04T16:58:45.811115Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503521619755521:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:45.811196Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018d5/r3tmp/tmpOK1l95/pdisk_1.dat 2025-04-04T16:58:45.977750Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:45.997492Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:45.997628Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:45.999518Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24402, node 2 2025-04-04T16:58:46.191024Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:46.191064Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:46.191101Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:46.191282Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16171 TClient is connected to server localhost:16171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T16:58:46.835562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:58:46.856779Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T16:58:50.679917Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503543094592651:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:50.680008Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:50.695449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T16:58:50.766736Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503543094592753:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:50.766813Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:50.766980Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503543094592758:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:50.771354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T16:58:50.789248Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489503543094592760:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T16:58:50.814161Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489503521619755521:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:50.814737Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:58:50.860513Z node 2 :TX_PROXY ERROR: Actor# [2:7489503543094592811:2394] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to sta ... 4.255084Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7489503645876362454:2342] TxId: 281474976710755. Ctx: { TraceId: 01jr0vw2w01kmk9ysgwv2c3ytn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTAzYzUwZGYtOTEyMTRhMzUtYzMzMTVmYzEtMzI5NTNmNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-04T16:59:14.256056Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTAzYzUwZGYtOTEyMTRhMzUtYzMzMTVmYzEtMzI5NTNmNmI=, ActorId: [3:7489503577156882262:2342], ActorState: ExecuteState, TraceId: 01jr0vw2w01kmk9ysgwv2c3ytn, Create QueryResponse for error on request, msg: 2025-04-04T16:59:14.389699Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=97; 2025-04-04T16:59:14.389854Z node 3 :TX_DATASHARD ERROR: Prepare transaction failed. txid 97 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T16:59:14.389955Z node 3 :TX_DATASHARD ERROR: Errors while proposing transaction txid 97 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T16:59:14.390292Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7489503645876362495:2427], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7489503585746817164:2427]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7489503645876362495:2427].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-04T16:59:14.390353Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7489503645876362487:2427], SessionActorId: [3:7489503585746817164:2427], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7489503585746817164:2427]. isRollback=0 2025-04-04T16:59:14.390522Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjI3ZTFiMmEtMzYzMjFhN2MtZTg0NTg3ZjAtZmEyYjFhZmE=, ActorId: [3:7489503585746817164:2427], ActorState: ExecuteState, TraceId: 01jr0vw30maft5ggmhjhfxfajv, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7489503645876362488:2427] from: [3:7489503645876362487:2427] 2025-04-04T16:59:14.390580Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7489503645876362488:2427] TxId: 281474976710756. Ctx: { TraceId: 01jr0vw30maft5ggmhjhfxfajv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjI3ZTFiMmEtMzYzMjFhN2MtZTg0NTg3ZjAtZmEyYjFhZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-04T16:59:14.391671Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjI3ZTFiMmEtMzYzMjFhN2MtZTg0NTg3ZjAtZmEyYjFhZmE=, ActorId: [3:7489503585746817164:2427], ActorState: ExecuteState, TraceId: 01jr0vw30maft5ggmhjhfxfajv, Create QueryResponse for error on request, msg: 2025-04-04T16:59:14.595031Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=98; 2025-04-04T16:59:14.595365Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7489503645876362532:2342], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7489503577156882262:2342]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7489503645876362532:2342].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-04T16:59:14.595428Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7489503645876362522:2342], SessionActorId: [3:7489503577156882262:2342], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7489503577156882262:2342]. isRollback=0 2025-04-04T16:59:14.595625Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTAzYzUwZGYtOTEyMTRhMzUtYzMzMTVmYzEtMzI5NTNmNmI=, ActorId: [3:7489503577156882262:2342], ActorState: ExecuteState, TraceId: 01jr0vw36p0ptp8qycp2h78ppq, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7489503645876362523:2342] from: [3:7489503645876362522:2342] 2025-04-04T16:59:14.595689Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7489503645876362523:2342] TxId: 281474976710757. Ctx: { TraceId: 01jr0vw36p0ptp8qycp2h78ppq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTAzYzUwZGYtOTEyMTRhMzUtYzMzMTVmYzEtMzI5NTNmNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-04T16:59:14.596661Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTAzYzUwZGYtOTEyMTRhMzUtYzMzMTVmYzEtMzI5NTNmNmI=, ActorId: [3:7489503577156882262:2342], ActorState: ExecuteState, TraceId: 01jr0vw36p0ptp8qycp2h78ppq, Create QueryResponse for error on request, msg: 2025-04-04T16:59:14.781247Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=99; 2025-04-04T16:59:14.781537Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7489503645876362568:2342], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7489503577156882262:2342]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7489503645876362568:2342].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-04T16:59:14.781588Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7489503645876362559:2342], SessionActorId: [3:7489503577156882262:2342], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7489503577156882262:2342]. isRollback=0 2025-04-04T16:59:14.781740Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTAzYzUwZGYtOTEyMTRhMzUtYzMzMTVmYzEtMzI5NTNmNmI=, ActorId: [3:7489503577156882262:2342], ActorState: ExecuteState, TraceId: 01jr0vw3cne7pergjjdf6vv0dk, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7489503645876362560:2342] from: [3:7489503645876362559:2342] 2025-04-04T16:59:14.781796Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7489503645876362560:2342] TxId: 281474976710758. Ctx: { TraceId: 01jr0vw3cne7pergjjdf6vv0dk, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTAzYzUwZGYtOTEyMTRhMzUtYzMzMTVmYzEtMzI5NTNmNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-04T16:59:14.782849Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTAzYzUwZGYtOTEyMTRhMzUtYzMzMTVmYzEtMzI5NTNmNmI=, ActorId: [3:7489503577156882262:2342], ActorState: ExecuteState, TraceId: 01jr0vw3cne7pergjjdf6vv0dk, Create QueryResponse for error on request, msg: 2025-04-04T16:59:14.924889Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=100; 2025-04-04T16:59:14.925236Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7489503645876362605:2342], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7489503577156882262:2342]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7489503645876362605:2342].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-04T16:59:14.925295Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7489503645876362595:2342], SessionActorId: [3:7489503577156882262:2342], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7489503577156882262:2342]. isRollback=0 2025-04-04T16:59:14.925487Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTAzYzUwZGYtOTEyMTRhMzUtYzMzMTVmYzEtMzI5NTNmNmI=, ActorId: [3:7489503577156882262:2342], ActorState: ExecuteState, TraceId: 01jr0vw3hdfq50bhs46e2bqnv0, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7489503645876362596:2342] from: [3:7489503645876362595:2342] 2025-04-04T16:59:14.925554Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7489503645876362596:2342] TxId: 281474976710759. Ctx: { TraceId: 01jr0vw3hdfq50bhs46e2bqnv0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTAzYzUwZGYtOTEyMTRhMzUtYzMzMTVmYzEtMzI5NTNmNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-04T16:59:14.926562Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTAzYzUwZGYtOTEyMTRhMzUtYzMzMTVmYzEtMzI5NTNmNmI=, ActorId: [3:7489503577156882262:2342], ActorState: ExecuteState, TraceId: 01jr0vw3hdfq50bhs46e2bqnv0, Create QueryResponse for error on request, msg: 2025-04-04T16:59:15.063939Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=101; 2025-04-04T16:59:15.064192Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7489503650171329938:2427], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7489503585746817164:2427]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7489503650171329938:2427].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-04T16:59:15.064252Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7489503650171329927:2427], SessionActorId: [3:7489503585746817164:2427], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7489503585746817164:2427]. isRollback=0 2025-04-04T16:59:15.064374Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjI3ZTFiMmEtMzYzMjFhN2MtZTg0NTg3ZjAtZmEyYjFhZmE=, ActorId: [3:7489503585746817164:2427], ActorState: ExecuteState, TraceId: 01jr0vw3ntazwrpkzv4cd6j4f7, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7489503650171329928:2427] from: [3:7489503650171329927:2427] 2025-04-04T16:59:15.064605Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7489503650171329928:2427] TxId: 281474976710760. Ctx: { TraceId: 01jr0vw3ntazwrpkzv4cd6j4f7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjI3ZTFiMmEtMzYzMjFhN2MtZTg0NTg3ZjAtZmEyYjFhZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-04T16:59:15.065443Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjI3ZTFiMmEtMzYzMjFhN2MtZTg0NTg3ZjAtZmEyYjFhZmE=, ActorId: [3:7489503585746817164:2427], ActorState: ExecuteState, TraceId: 01jr0vw3ntazwrpkzv4cd6j4f7, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpPg::PgCreateTable [GOOD] >> KqpPg::PgUpdate+useSink >> TExportToS3Tests::DropCopiesBeforeTransferring1 [GOOD] >> TExportToS3Tests::CorruptedDyNumber >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/unittest >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit [GOOD] Test command err: 2025-04-04T16:55:58.489068Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502804528177843:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:58.489512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:55:58.679248Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ec0/r3tmp/tmpnoIJpl/pdisk_1.dat 2025-04-04T16:55:58.879523Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:55:58.927720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:58.927865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 28237, node 1 2025-04-04T16:55:58.930019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:59.047118Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000ec0/r3tmp/yandexgCmCzm.tmp 2025-04-04T16:55:59.047142Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000ec0/r3tmp/yandexgCmCzm.tmp 2025-04-04T16:55:59.047295Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000ec0/r3tmp/yandexgCmCzm.tmp 2025-04-04T16:55:59.047400Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:55:59.126965Z INFO: TTestServer started on Port 2702 GrpcPort 28237 TClient is connected to server localhost:2702 PQClient connected to localhost:28237 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:55:59.608193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:59.634949Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:59.652542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:55:59.806221Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:55:59.817514Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-04T16:56:02.243870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502821708047659:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:02.243870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502821708047647:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:02.243988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:02.249283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T16:56:02.262075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502821708047661:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T16:56:02.535864Z node 1 :TX_PROXY ERROR: Actor# [1:7489502821708047725:2450] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:02.568612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:56:02.611321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:56:02.615698Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489502821708047735:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:56:02.617942Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDRjMTJjYy1jMDQwYjk2Mi02NjkyYTY5OS03OTcyNjk1MA==, ActorId: [1:7489502821708047644:2337], ActorState: ExecuteState, TraceId: 01jr0vp7dh9hezq7exe3a7kvay, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:56:02.620793Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:56:02.761260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489502826003015314:2634] 2025-04-04T16:56:03.486410Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502804528177843:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:03.486527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T16:56:08.196540Z :WriteToTopic_Demo_20_RestartNo INFO: TTopicSdkTestSetup started 2025-04-04T16:56:08.232629Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-04T16:56:08.267278Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7489502847477852071:2793] connected; active server actors: 1 2025-04-04T16:56:08.267594Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-04T16:56:08.268384Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-04T16:56:08.268560Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-04-04T16:56:08.271083Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-04T16:56:08.279129Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:56:08.281669Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-04T16:56:08.282473Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:56:08.282722Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-04T16:56:08.282741Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:56:08.282759Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-04T16:56:08.282790Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:56:08.282821Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:56:08.282859Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-04T16:56:08.283448Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-04-04T16:56:08.283525Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7489502847477852085:2454], now have 1 active actors on pipe 2025-04-04T16:56:08.310674Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7489502847477852070:2792], now have 1 active actors on pipe 2025-04-04T16:56:08.363916Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7489502804528178052 RawX2: 4294969482 } TxId: 281474976710672 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKin ... nt. Number of messages 1, size 7000000 bytes 0 1 2025-04-04T16:59:15.028208Z :DEBUG: [/Root] [/Root] [5a97c7b0-1eb8da2c-fe4f90e1-742feecb] [] Commit offsets [0, 1). Partition stream id: 1 2025-04-04T16:59:15.031078Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_14292833330477155419_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { end: 1 } } } } 2025-04-04T16:59:15.031475Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_14292833330477155419_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) committing to position 1 prev 0 end 2 by cookie 2 2025-04-04T16:59:15.034527Z node 12 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2025-04-04T16:59:15.034585Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2025-04-04T16:59:15.034761Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer offset is set to 1 (startOffset 0) session test-consumer_12_1_14292833330477155419_v1 2025-04-04T16:59:15.034972Z node 12 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T16:59:15.035996Z node 12 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 2 2025-04-04T16:59:15.036026Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T16:59:15.036132Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_14292833330477155419_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 2 } 2025-04-04T16:59:15.036184Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_14292833330477155419_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 1 endOffset 2 with cookie 2 2025-04-04T16:59:15.036215Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_14292833330477155419_v1 replying for commits: assignId# 1, from# 2, to# 2, offset# 1 2025-04-04T16:59:15.039299Z :DEBUG: [/Root] [/Root] [5a97c7b0-1eb8da2c-fe4f90e1-742feecb] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 1 } } 2025-04-04T16:59:15.039982Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-04-04T16:59:15.040055Z :DEBUG: [/Root] [/Root] [5a97c7b0-1eb8da2c-fe4f90e1-742feecb] [] The application data is transferred to the client. Number of messages 1, size 7000000 bytes 2025-04-04T16:59:15.042442Z :DEBUG: [/Root] [/Root] [5a97c7b0-1eb8da2c-fe4f90e1-742feecb] [] Returning serverBytesSize = 0 to budget 0 1 2025-04-04T16:59:15.080778Z :DEBUG: [/Root] [/Root] [5a97c7b0-1eb8da2c-fe4f90e1-742feecb] [] Commit offsets [1, 2). Partition stream id: 1 2025-04-04T16:59:15.087022Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_14292833330477155419_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { start: 1 end: 2 } } } } 2025-04-04T16:59:15.087509Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_14292833330477155419_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) committing to position 2 prev 1 end 2 by cookie 3 2025-04-04T16:59:15.088335Z node 12 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2025-04-04T16:59:15.088389Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2025-04-04T16:59:15.088548Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer offset is set to 2 (startOffset 0) session test-consumer_12_1_14292833330477155419_v1 2025-04-04T16:59:15.088766Z node 12 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T16:59:15.090507Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer readTimeStamp for offset 2 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04T16:59:15.090592Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T16:59:15.090666Z node 12 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 3 2025-04-04T16:59:15.090794Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_14292833330477155419_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 3 } 2025-04-04T16:59:15.090838Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_14292833330477155419_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 2 endOffset 2 with cookie 3 2025-04-04T16:59:15.090886Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_14292833330477155419_v1 replying for commits: assignId# 1, from# 3, to# 3, offset# 2 2025-04-04T16:59:15.091912Z :DEBUG: [/Root] [/Root] [5a97c7b0-1eb8da2c-fe4f90e1-742feecb] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 2 } } 2025-04-04T16:59:15.597035Z node 12 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:59:15.597092Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:15.794471Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:1:2 2025-04-04T16:59:15.794557Z :INFO: [/Root] [/Root] [5a97c7b0-1eb8da2c-fe4f90e1-742feecb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1003 BytesRead: 14000000 MessagesRead: 2 BytesReadCompressed: 14000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T16:59:15.806305Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_14292833330477155419_v1 checking auth because of timeout 2025-04-04T16:59:15.806385Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_14292833330477155419_v1 auth for : test-consumer 2025-04-04T16:59:15.807803Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_14292833330477155419_v1 Handle describe topics response 2025-04-04T16:59:15.807932Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_14292833330477155419_v1 auth is DEAD 2025-04-04T16:59:15.808026Z node 12 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_12_1_14292833330477155419_v1 auth ok: topics# 1, initDone# 1 2025-04-04T16:59:16.870431Z :INFO: [/Root] [/Root] [5a97c7b0-1eb8da2c-fe4f90e1-742feecb] Closing read session. Close timeout: 0.000000s 2025-04-04T16:59:16.870504Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:1:2 2025-04-04T16:59:16.870559Z :INFO: [/Root] [/Root] [5a97c7b0-1eb8da2c-fe4f90e1-742feecb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2080 BytesRead: 14000000 MessagesRead: 2 BytesReadCompressed: 14000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T16:59:16.870700Z :NOTICE: [/Root] [/Root] [5a97c7b0-1eb8da2c-fe4f90e1-742feecb] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-04T16:59:16.870756Z :DEBUG: [/Root] [/Root] [5a97c7b0-1eb8da2c-fe4f90e1-742feecb] [] Abort session to cluster 2025-04-04T16:59:16.871259Z :NOTICE: [/Root] [/Root] [5a97c7b0-1eb8da2c-fe4f90e1-742feecb] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T16:59:16.872798Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|a93807bb-cbc3a66e-2a1923d9-eae75491_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2025-04-04T16:59:16.872864Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|a93807bb-cbc3a66e-2a1923d9-eae75491_0] PartitionId [0] Generation [2] Write session will now close 2025-04-04T16:59:16.872912Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|a93807bb-cbc3a66e-2a1923d9-eae75491_0] PartitionId [0] Generation [2] Write session: aborting 2025-04-04T16:59:16.873461Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|a93807bb-cbc3a66e-2a1923d9-eae75491_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2025-04-04T16:59:16.873504Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|a93807bb-cbc3a66e-2a1923d9-eae75491_0] PartitionId [0] Generation [2] Write session: destroy 2025-04-04T16:59:16.874508Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_14292833330477155419_v1 grpc read done: success# 0, data# { } 2025-04-04T16:59:16.874543Z node 12 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_12_1_14292833330477155419_v1 grpc read failed 2025-04-04T16:59:16.874575Z node 12 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_12_1_14292833330477155419_v1 grpc closed 2025-04-04T16:59:16.874617Z node 12 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_12_1_14292833330477155419_v1 is DEAD 2025-04-04T16:59:16.874788Z node 12 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-message_group_id|a93807bb-cbc3a66e-2a1923d9-eae75491_0 grpc read done: success: 0 data: 2025-04-04T16:59:16.874807Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|a93807bb-cbc3a66e-2a1923d9-eae75491_0 grpc read failed 2025-04-04T16:59:16.874844Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|a93807bb-cbc3a66e-2a1923d9-eae75491_0 grpc closed 2025-04-04T16:59:16.874865Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|a93807bb-cbc3a66e-2a1923d9-eae75491_0 is DEAD 2025-04-04T16:59:16.875457Z node 12 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T16:59:16.876196Z node 12 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][topic_A] pipe [12:7489503646595026789:2553] disconnected; active server actors: 1 2025-04-04T16:59:16.876231Z node 12 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][topic_A] pipe [12:7489503646595026789:2553] client test-consumer disconnected session test-consumer_12_1_14292833330477155419_v1 2025-04-04T16:59:16.876344Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [12:7489503642300059423:2534] destroyed 2025-04-04T16:59:16.876369Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session test-consumer_12_1_14292833330477155419_v1 2025-04-04T16:59:16.876392Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [12:7489503646595026792:2556] destroyed 2025-04-04T16:59:16.876439Z node 12 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_12_1_14292833330477155419_v1 2025-04-04T16:59:16.876576Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] Test command err: 2025-04-04T16:59:09.691908Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503622589984809:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:09.692059Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001617/r3tmp/tmp1JsNpD/pdisk_1.dat 2025-04-04T16:59:10.213148Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:10.225933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:10.226065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:10.234142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16506 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T16:59:10.497147Z node 1 :TX_PROXY DEBUG: actor# [1:7489503622589985040:2114] Handle TEvNavigate describe path dc-1 2025-04-04T16:59:10.497203Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503626884952805:2431] HANDLE EvNavigateScheme dc-1 2025-04-04T16:59:10.497311Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489503622589985089:2138], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:10.497357Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489503622589985089:2138], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T16:59:10.497561Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503626884952807:2433][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T16:59:10.499712Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503622589984723:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503626884952812:2433] 2025-04-04T16:59:10.499790Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503622589984723:2053] Subscribe: subscriber# [1:7489503626884952812:2433], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:10.499858Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503622589984726:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503626884952813:2433] 2025-04-04T16:59:10.499878Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503622589984726:2056] Subscribe: subscriber# [1:7489503626884952813:2433], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:10.500030Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503622589984720:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503626884952811:2433] 2025-04-04T16:59:10.500043Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503626884952812:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503622589984723:2053] 2025-04-04T16:59:10.500068Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503626884952813:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503622589984726:2056] 2025-04-04T16:59:10.500090Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503622589984720:2050] Subscribe: subscriber# [1:7489503626884952811:2433], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:10.500111Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503626884952807:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503626884952809:2433] 2025-04-04T16:59:10.500139Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503626884952807:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503626884952810:2433] 2025-04-04T16:59:10.500200Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503622589984723:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503626884952812:2433] 2025-04-04T16:59:10.500205Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489503626884952807:2433][/dc-1] Set up state: owner# [1:7489503622589985089:2138], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:10.500217Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503622589984726:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503626884952813:2433] 2025-04-04T16:59:10.500370Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503626884952811:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503622589984720:2050] 2025-04-04T16:59:10.500419Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503626884952811:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503626884952808:2433], cookie# 1 2025-04-04T16:59:10.500463Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503626884952812:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503626884952809:2433], cookie# 1 2025-04-04T16:59:10.500485Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503626884952813:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503626884952810:2433], cookie# 1 2025-04-04T16:59:10.500513Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503626884952807:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503626884952808:2433] 2025-04-04T16:59:10.500580Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489503626884952807:2433][/dc-1] Path was already updated: owner# [1:7489503622589985089:2138], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:10.500682Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503622589984720:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503626884952811:2433] 2025-04-04T16:59:10.500713Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503622589984720:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503626884952811:2433], cookie# 1 2025-04-04T16:59:10.500734Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503622589984723:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503626884952812:2433], cookie# 1 2025-04-04T16:59:10.500751Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503622589984726:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503626884952813:2433], cookie# 1 2025-04-04T16:59:10.500810Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503626884952811:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503622589984720:2050], cookie# 1 2025-04-04T16:59:10.500831Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503626884952812:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503622589984723:2053], cookie# 1 2025-04-04T16:59:10.500846Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503626884952813:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503622589984726:2056], cookie# 1 2025-04-04T16:59:10.500873Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503626884952807:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503626884952808:2433], cookie# 1 2025-04-04T16:59:10.500897Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503626884952807:2433][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T16:59:10.500933Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503626884952807:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503626884952809:2433], cookie# 1 2025-04-04T16:59:10.500955Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503626884952807:2433][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T16:59:10.500986Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503626884952807:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503626884952810:2433], cookie# 1 2025-04-04T16:59:10.501001Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503626884952807:2433][/dc-1] Unexpected sync response: sender# [1:7489503626884952810:2433], cookie# 1 TClient::Ls response: 2025-04-04T16:59:10.555677Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489503622589985089:2138], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T16:59:10.556086Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489503622589985089:2138], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersi ... 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T16:59:19.503630Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7489503667206895395:2556][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7489503650027025234:2053] 2025-04-04T16:59:19.503649Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7489503667206895396:2556][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7489503650027025237:2056] 2025-04-04T16:59:19.503675Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489503667206895378:2556][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7489503667206895391:2556] 2025-04-04T16:59:19.503696Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489503667206895378:2556][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7489503667206895392:2556] 2025-04-04T16:59:19.503710Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489503654321992907:2147], cacheItem# { Subscriber: { Subscriber: [3:7489503667206895376:2554] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:19.503735Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7489503667206895378:2556][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [3:7489503654321992907:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:19.503748Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7489503654321992907:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-04T16:59:19.503761Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489503667206895378:2556][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7489503667206895393:2556] 2025-04-04T16:59:19.503783Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7489503667206895378:2556][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7489503654321992907:2147], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:19.503799Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489503650027025231:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489503667206895394:2556] 2025-04-04T16:59:19.503812Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489503650027025234:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489503667206895395:2556] 2025-04-04T16:59:19.503825Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489503650027025237:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489503667206895396:2556] 2025-04-04T16:59:19.510406Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7489503654321992907:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7489503667206895377:2555] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T16:59:19.510523Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489503654321992907:2147], cacheItem# { Subscriber: { Subscriber: [3:7489503667206895377:2555] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:19.510615Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7489503654321992907:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-04T16:59:19.510720Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7489503654321992907:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7489503667206895378:2556] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T16:59:19.510776Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489503654321992907:2147], cacheItem# { Subscriber: { Subscriber: [3:7489503667206895378:2556] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:19.510861Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489503667206895397:2557], recipient# [3:7489503667206895374:2313], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:19.510919Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489503667206895398:2558], recipient# [3:7489503667206895375:2314], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:20.145115Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489503654321992907:2147], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:20.145284Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489503654321992907:2147], cacheItem# { Subscriber: { Subscriber: [3:7489503658616960713:2523] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:20.145421Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489503671501862702:2562], recipient# [3:7489503671501862701:2315], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:20.513540Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489503654321992907:2147], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:20.513709Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489503654321992907:2147], cacheItem# { Subscriber: { Subscriber: [3:7489503667206895378:2556] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:20.513813Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489503671501862707:2563], recipient# [3:7489503671501862706:2316], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::LsAltered [GOOD] Test command err: 2025-04-04T16:59:14.692889Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503647247474857:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:14.693009Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001600/r3tmp/tmpNVt4aG/pdisk_1.dat 2025-04-04T16:59:15.137003Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:15.159617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:15.159725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:15.161408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10286 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T16:59:15.381950Z node 1 :TX_PROXY DEBUG: actor# [1:7489503647247474953:2098] Handle TEvNavigate describe path dc-1 2025-04-04T16:59:15.382007Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503651542442547:2258] HANDLE EvNavigateScheme dc-1 2025-04-04T16:59:15.382130Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489503647247474976:2111], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:15.382166Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489503647247474976:2111], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T16:59:15.382424Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503651542442548:2259][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T16:59:15.385330Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503647247474667:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503651542442552:2259] 2025-04-04T16:59:15.385401Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503647247474667:2049] Subscribe: subscriber# [1:7489503651542442552:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:15.385500Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503647247474673:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503651542442554:2259] 2025-04-04T16:59:15.385530Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503647247474673:2055] Subscribe: subscriber# [1:7489503651542442554:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:15.385578Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503651542442552:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503647247474667:2049] 2025-04-04T16:59:15.385607Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503651542442554:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503647247474673:2055] 2025-04-04T16:59:15.385698Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503651542442548:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503651542442549:2259] 2025-04-04T16:59:15.385725Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503651542442548:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503651542442551:2259] 2025-04-04T16:59:15.385796Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489503651542442548:2259][/dc-1] Set up state: owner# [1:7489503647247474976:2111], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:15.385964Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503651542442552:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503651542442549:2259], cookie# 1 2025-04-04T16:59:15.385979Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503651542442553:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503651542442550:2259], cookie# 1 2025-04-04T16:59:15.385995Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503651542442554:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503651542442551:2259], cookie# 1 2025-04-04T16:59:15.386042Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503647247474667:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503651542442552:2259] 2025-04-04T16:59:15.386068Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503647247474667:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503651542442552:2259], cookie# 1 2025-04-04T16:59:15.386087Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503647247474673:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503651542442554:2259] 2025-04-04T16:59:15.386098Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503647247474673:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503651542442554:2259], cookie# 1 2025-04-04T16:59:15.390364Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503647247474670:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503651542442553:2259] 2025-04-04T16:59:15.390441Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503647247474670:2052] Subscribe: subscriber# [1:7489503651542442553:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:15.390547Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503647247474670:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503651542442553:2259], cookie# 1 2025-04-04T16:59:15.390607Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503651542442552:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503647247474667:2049], cookie# 1 2025-04-04T16:59:15.390622Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503651542442554:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503647247474673:2055], cookie# 1 2025-04-04T16:59:15.390675Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503651542442553:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503647247474670:2052] 2025-04-04T16:59:15.390699Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503651542442553:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503647247474670:2052], cookie# 1 2025-04-04T16:59:15.390797Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503651542442548:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503651542442549:2259], cookie# 1 2025-04-04T16:59:15.390828Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503651542442548:2259][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T16:59:15.390867Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503651542442548:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503651542442551:2259], cookie# 1 2025-04-04T16:59:15.390907Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503651542442548:2259][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T16:59:15.390956Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503651542442548:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503651542442550:2259] 2025-04-04T16:59:15.391054Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489503651542442548:2259][/dc-1] Path was already updated: owner# [1:7489503647247474976:2111], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:15.391081Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503651542442548:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503651542442550:2259], cookie# 1 2025-04-04T16:59:15.391103Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503651542442548:2259][/dc-1] Unexpected sync response: sender# [1:7489503651542442550:2259], cookie# 1 2025-04-04T16:59:15.391134Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503647247474670:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503651542442553:2259] 2025-04-04T16:59:15.465222Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489503647247474976:2111], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T16:59:15.465587Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489503647247474976:2111], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... teTable: true } 2025-04-04T16:59:19.232329Z node 2 :TX_PROXY DEBUG: Actor# [2:7489503665142411728:2325] Handle TEvDescribeSchemeResult Forward to# [2:7489503665142411727:2324] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743785958754 ParentPathId: 1 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743785958754 ParentPathId: 1 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingPara... (TRUNCATED) TClient::Ls request: /dc-1 2025-04-04T16:59:19.234476Z node 2 :TX_PROXY DEBUG: actor# [2:7489503660847444021:2094] Handle TEvNavigate describe path /dc-1 2025-04-04T16:59:19.234511Z node 2 :TX_PROXY DEBUG: Actor# [2:7489503665142411731:2328] HANDLE EvNavigateScheme /dc-1 2025-04-04T16:59:19.234582Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7489503660847444056:2114], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:19.234670Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7489503660847444322:2258][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7489503660847444056:2114], cookie# 4 2025-04-04T16:59:19.234722Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7489503660847444326:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7489503660847444323:2258], cookie# 4 2025-04-04T16:59:19.234739Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7489503660847444327:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7489503660847444324:2258], cookie# 4 2025-04-04T16:59:19.234755Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7489503660847444328:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7489503660847444325:2258], cookie# 4 2025-04-04T16:59:19.234780Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7489503660847443744:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7489503660847444326:2258], cookie# 4 2025-04-04T16:59:19.234803Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7489503660847443747:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7489503660847444327:2258], cookie# 4 2025-04-04T16:59:19.234825Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7489503660847443750:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7489503660847444328:2258], cookie# 4 2025-04-04T16:59:19.234852Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7489503660847444326:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7489503660847443744:2049], cookie# 4 2025-04-04T16:59:19.234868Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7489503660847444327:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7489503660847443747:2052], cookie# 4 2025-04-04T16:59:19.234883Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7489503660847444328:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7489503660847443750:2055], cookie# 4 2025-04-04T16:59:19.234914Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7489503660847444322:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7489503660847444323:2258], cookie# 4 2025-04-04T16:59:19.234932Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7489503660847444322:2258][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T16:59:19.234958Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7489503660847444322:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7489503660847444324:2258], cookie# 4 2025-04-04T16:59:19.234980Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7489503660847444322:2258][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T16:59:19.235010Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7489503660847444322:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7489503660847444325:2258], cookie# 4 2025-04-04T16:59:19.235023Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7489503660847444322:2258][/dc-1] Unexpected sync response: sender# [2:7489503660847444325:2258], cookie# 4 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743785958733 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743785958754 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId:... (TRUNCATED) 2025-04-04T16:59:19.235057Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7489503660847444056:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-04T16:59:19.235131Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7489503660847444056:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7489503660847444322:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743785958733 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T16:59:19.235209Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7489503660847444056:2114], cacheItem# { Subscriber: { Subscriber: [2:7489503660847444322:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743785958733 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-04-04T16:59:19.235342Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7489503665142411732:2329], recipient# [2:7489503665142411731:2328], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T16:59:19.235373Z node 2 :TX_PROXY DEBUG: Actor# [2:7489503665142411731:2328] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T16:59:19.235431Z node 2 :TX_PROXY DEBUG: Actor# [2:7489503665142411731:2328] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-04-04T16:59:19.235927Z node 2 :TX_PROXY DEBUG: Actor# [2:7489503665142411731:2328] Handle TEvDescribeSchemeResult Forward to# [2:7489503665142411730:2327] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 66 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743785958733 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 >> DemoTx::Scenario_2 [GOOD] >> TExportToS3Tests::AuditCompletedExport [GOOD] >> TExportToS3Tests::AuditCancelledExport >> TExportToS3Tests::CheckItemProgress [GOOD] >> TExportToS3Tests::CancelledExportEndTime >> TSubDomainTest::CreateDummyTabletsInDifferentDomains [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable >> Viewer::JsonAutocompleteSchemePOST [GOOD] >> TExportToS3Tests::CorruptedDyNumber [GOOD] >> TExportToS3Tests::CompletedExportEndTime >> TSubDomainTest::CheckAccessCopyTable [GOOD] >> TSubDomainTest::ConsistentCopyTable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> DemoTx::Scenario_3 >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs >> TExportToS3Tests::AuditCancelledExport [GOOD] >> KqpPg::CheckPgAutoParams-useSink [GOOD] >> TExportToS3Tests::CancelledExportEndTime [GOOD] >> TExportToS3Tests::Checksums ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteSchemePOST [GOOD] Test command err: 2025-04-04T16:58:33.334559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:58:33.335044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:58:33.335114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 30090, node 1 TClient is connected to server localhost:15495 2025-04-04T16:58:44.489750Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:336:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:58:44.489938Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:58:44.490173Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 6298, node 2 TClient is connected to server localhost:8503 2025-04-04T16:58:57.243719Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:337:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:58:57.243980Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:58:57.244285Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 25830, node 3 TClient is connected to server localhost:18404 2025-04-04T16:59:09.004735Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:310:2356], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:09.005384Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:09.005699Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 64076, node 4 TClient is connected to server localhost:15895 2025-04-04T16:59:20.730286Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:336:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:20.730506Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:20.730780Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 7832, node 5 TClient is connected to server localhost:18958 >> TExportToS3Tests::UidAsIdempotencyKey >> TExportToS3Tests::CompletedExportEndTime [GOOD] >> TExportToS3Tests::ChecksumsWithCompression |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink >> TExportToS3Tests::ShouldPreserveIncrBackupFlag >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::AuditCancelledExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:125:2058] recipient: [1:108:2140] 2025-04-04T16:59:16.561574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:59:16.561643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:59:16.561669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:59:16.561695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:59:16.561737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:59:16.561758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:59:16.561797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:59:16.561856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:59:16.562150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:59:16.633637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:59:16.633710Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:16.649281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:59:16.649583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:59:16.649803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:59:16.670546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:59:16.670887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:59:16.671552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:16.672534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:59:16.677900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:16.679346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:16.679415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:16.679596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:59:16.679650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:16.679688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:59:16.679827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:59:16.686807Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-04T16:59:16.822869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:59:16.823115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:16.823380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:59:16.823684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:59:16.823739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:16.826596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:16.826909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:59:16.827219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:16.827281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:59:16.827319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:59:16.827357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:59:16.829914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:16.829990Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:59:16.830051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:59:16.832605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:16.832689Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:16.832735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:16.832844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:59:16.836858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:59:16.839345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:59:16.839553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:59:16.840636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:16.840793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:16.840856Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:16.841188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:59:16.841247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:16.841442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:59:16.841527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:59:16.845315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:16.845392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:16.845601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:16.845662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:59:16.846030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:16.846086Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:59:16.846189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:59:16.846220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:16.846282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:59:16.846329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:16.846376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:59:16.846417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:16.846450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:59:16.846478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:59:16.846555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:59:16.846596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:59:16.846670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:59:16.855180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:59:16.855358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:59:16.855401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... hemeOpBackup Backup { TableName: "0" NumberOfRetries: 0 S3Settings { Endpoint: "localhost:29096" Scheme: HTTP Bucket: "" ObjectKeyPattern: "" AccessKey: "" SecretKey: "" StorageClass: STORAGE_CLASS_UNSPECIFIED UseVirtualAddressing: true } Table { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } NeedToBill: true SnapshotStep: 0 SnapshotTxId: 0 EnableChecksums: false EnablePermissions: false } Internal: true } TxId: 281474976710759 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:59:24.717707Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TBackup Propose, path: /MyRoot/export-102/0, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T16:59:24.717872Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-04T16:59:24.718288Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710759:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:59:24.718359Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpBackup, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T16:59:24.719794Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion export in-flight, txId: 102, at schemeshard: 72057594046678944 2025-04-04T16:59:24.719866Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-04-04T16:59:24.721500Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710759, response: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:24.721857Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710759, database: /MyRoot, subject: , status: StatusAccepted, operation: BACKUP TABLE, path: /MyRoot/export-102/0 2025-04-04T16:59:24.722272Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710759, status# StatusAccepted 2025-04-04T16:59:24.722375Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944 2025-04-04T16:59:24.722776Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T16:59:24.722851Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710759:0 ProgressState, operation type: TxBackup, at tablet# 72057594046678944 2025-04-04T16:59:24.722913Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710759:0 ProgressState no shards to create, do next state 2025-04-04T16:59:24.722951Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 2 -> 3 2025-04-04T16:59:24.725372Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 281474976710759 TxId: 102 2025-04-04T16:59:24.725455Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Execute cancel tx: opId# 102:0, target opId# 281474976710759:0 2025-04-04T16:59:24.725925Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T16:59:24.725981Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T16:59:24.726166Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2025-04-04T16:59:24.729431Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 2025-04-04T16:59:24.729711Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T16:59:24.729766Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T16:59:24.729906Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2025-04-04T16:59:24.730452Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvCancelTxResult: Cookie: 102, at schemeshard: 72057594046678944 2025-04-04T16:59:24.730582Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted Result: "Cancelled at SchemeShard" TargetTxId: 281474976710759 TxId: 102 2025-04-04T16:59:24.731358Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-04-04T16:59:24.731540Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 0, tablet: 72075186233409547 2025-04-04T16:59:24.734385Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-04-04T16:59:24.735082Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T16:59:24.735146Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:563:2522] TestWaitNotification: OK eventTxId 102 AUDIT LOG buffer(7): 2025-04-04T16:59:23.830331Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-04-04T16:59:23.880193Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE TABLE, paths=[/MyRoot/Table], status=SUCCESS, detailed_status=StatusAccepted 2025-04-04T16:59:24.324873Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT START, status=SUCCESS, detailed_status=SUCCESS, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none} 2025-04-04T16:59:24.333417Z: component=schemeshard, tx_id=281474976710757, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE DIRECTORY, paths=[/MyRoot/export-102], status=SUCCESS, detailed_status=StatusAccepted 2025-04-04T16:59:24.367441Z: component=schemeshard, tx_id=281474976710758, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=CREATE TABLE COPY FROM, paths=[/MyRoot/export-102/0], status=SUCCESS, detailed_status=StatusAccepted 2025-04-04T16:59:24.721740Z: component=schemeshard, tx_id=281474976710759, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=BACKUP TABLE, paths=[/MyRoot/export-102/0], status=SUCCESS, detailed_status=StatusAccepted 2025-04-04T16:59:24.730918Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT END, status=ERROR, detailed_status=CANCELLED, reason=Cancelled manually, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none}, start_time=2025-04-04T16:59:23.873115Z, end_time=2025-04-04T16:59:53.923115Z AUDIT LOG checked line: 2025-04-04T16:59:24.730918Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT END, status=ERROR, detailed_status=CANCELLED, reason=Cancelled manually, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none}, start_time=2025-04-04T16:59:23.873115Z, end_time=2025-04-04T16:59:53.923115Z >> TExportToS3Tests::Checksums [GOOD] >> TExportToS3Tests::Changefeeds |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentExport >> TExportToS3Tests::ChecksumsWithCompression [GOOD] >> KqpPg::InsertNoTargetColumns_Alter-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Serial+useSink |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert >> TExportToS3Tests::UidAsIdempotencyKey [GOOD] >> TExportToS3Tests::UserSID ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] Test command err: 2025-04-04T16:59:09.202521Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503624602204341:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:09.202581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001632/r3tmp/tmp9wsM9C/pdisk_1.dat 2025-04-04T16:59:09.677534Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:09.684364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:09.684468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:09.688804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8595 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T16:59:09.940246Z node 1 :TX_PROXY DEBUG: actor# [1:7489503624602204581:2123] Handle TEvNavigate describe path dc-1 2025-04-04T16:59:09.940295Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503624602205039:2439] HANDLE EvNavigateScheme dc-1 2025-04-04T16:59:09.940421Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489503624602204615:2144], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:09.940482Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489503624602204615:2144], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T16:59:09.940670Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503624602205040:2440][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T16:59:09.942496Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503624602204247:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503624602205045:2440] 2025-04-04T16:59:09.942555Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503624602204247:2053] Subscribe: subscriber# [1:7489503624602205045:2440], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:09.942619Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503624602204250:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503624602205046:2440] 2025-04-04T16:59:09.942634Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503624602204250:2056] Subscribe: subscriber# [1:7489503624602205046:2440], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:09.942720Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503624602205045:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503624602204247:2053] 2025-04-04T16:59:09.942760Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503624602205046:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503624602204250:2056] 2025-04-04T16:59:09.942815Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503624602205040:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503624602205042:2440] 2025-04-04T16:59:09.942844Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503624602205040:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503624602205043:2440] 2025-04-04T16:59:09.942899Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489503624602205040:2440][/dc-1] Set up state: owner# [1:7489503624602204615:2144], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:09.943002Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503624602204244:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503624602205044:2440] 2025-04-04T16:59:09.943039Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503624602205044:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503624602205041:2440], cookie# 1 2025-04-04T16:59:09.943056Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503624602205045:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503624602205042:2440], cookie# 1 2025-04-04T16:59:09.943066Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503624602204244:2050] Subscribe: subscriber# [1:7489503624602205044:2440], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:09.943070Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503624602205046:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503624602205043:2440], cookie# 1 2025-04-04T16:59:09.943105Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503624602204247:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503624602205045:2440] 2025-04-04T16:59:09.943119Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503624602204244:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503624602205044:2440], cookie# 1 2025-04-04T16:59:09.943122Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503624602204247:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503624602205045:2440], cookie# 1 2025-04-04T16:59:09.943139Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503624602204250:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503624602205046:2440] 2025-04-04T16:59:09.943151Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503624602204250:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503624602205046:2440], cookie# 1 2025-04-04T16:59:09.943175Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503624602205044:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503624602204244:2050] 2025-04-04T16:59:09.943204Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503624602205044:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503624602204244:2050], cookie# 1 2025-04-04T16:59:09.943239Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503624602205045:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503624602204247:2053], cookie# 1 2025-04-04T16:59:09.943258Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503624602205046:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503624602204250:2056], cookie# 1 2025-04-04T16:59:09.943308Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503624602205040:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503624602205041:2440] 2025-04-04T16:59:09.943350Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489503624602205040:2440][/dc-1] Path was already updated: owner# [1:7489503624602204615:2144], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:09.943384Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503624602205040:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503624602205041:2440], cookie# 1 2025-04-04T16:59:09.943407Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503624602205040:2440][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T16:59:09.943422Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503624602205040:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503624602205042:2440], cookie# 1 2025-04-04T16:59:09.943436Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503624602205040:2440][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T16:59:09.943455Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503624602205040:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503624602205043:2440], cookie# 1 2025-04-04T16:59:09.943470Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503624602205040:2440][/dc-1] Unexpected sync response: sender# [1:7489503624602205043:2440], cookie# 1 2025-04-04T16:59:09.943485Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503624602204244:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503624602205044:2440] 2025-04-04T16:59:09.990488Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489503624602204615:2144], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T16:59:09.990889Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489503624602204615:2144], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... 357Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7489503676166541880:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [5:7489503689051444982:2759] 2025-04-04T16:59:24.858365Z node 5 :SCHEME_BOARD_SUBSCRIBER INFO: [main][5:7489503689051444973:2759][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [5:7489503676166542228:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:24.858381Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7489503676166541883:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [5:7489503689051444985:2760] 2025-04-04T16:59:24.858395Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [5:7489503676166542228:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-04T16:59:24.858406Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7489503676166541883:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [5:7489503689051444992:2761] 2025-04-04T16:59:24.858416Z node 5 :SCHEME_BOARD_REPLICA INFO: [5:7489503676166541883:2053] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-04-04T16:59:24.858446Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [5:7489503676166542228:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [5:7489503689051444973:2759] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T16:59:24.858454Z node 5 :SCHEME_BOARD_REPLICA INFO: [5:7489503676166541883:2053] Subscribe: subscriber# [5:7489503689051444992:2761], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:24.858484Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7489503676166541883:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [5:7489503689051444984:2759] 2025-04-04T16:59:24.858513Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7489503676166541886:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [5:7489503689051444993:2761] 2025-04-04T16:59:24.858525Z node 5 :SCHEME_BOARD_REPLICA INFO: [5:7489503676166541886:2056] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-04-04T16:59:24.858540Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7489503676166542228:2127], cacheItem# { Subscriber: { Subscriber: [5:7489503689051444973:2759] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:24.858562Z node 5 :SCHEME_BOARD_REPLICA INFO: [5:7489503676166541886:2056] Subscribe: subscriber# [5:7489503689051444993:2761], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:24.858580Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][5:7489503689051444991:2761][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [5:7489503676166541880:2050] 2025-04-04T16:59:24.858592Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7489503676166541886:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [5:7489503689051444986:2759] 2025-04-04T16:59:24.858602Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][5:7489503689051444992:2761][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [5:7489503676166541883:2053] 2025-04-04T16:59:24.858616Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7489503676166541880:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [5:7489503689051444991:2761] 2025-04-04T16:59:24.858625Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][5:7489503689051444993:2761][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [5:7489503676166541886:2056] 2025-04-04T16:59:24.858669Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7489503676166541883:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [5:7489503689051444992:2761] 2025-04-04T16:59:24.858688Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][5:7489503689051444975:2761][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [5:7489503689051444988:2761] 2025-04-04T16:59:24.858722Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][5:7489503689051444975:2761][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [5:7489503689051444989:2761] 2025-04-04T16:59:24.858743Z node 5 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][5:7489503689051444975:2761][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [5:7489503676166542228:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:24.858754Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7489503689051444994:2762], recipient# [5:7489503689051444962:2308], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:24.858764Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][5:7489503689051444975:2761][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [5:7489503689051444990:2761] 2025-04-04T16:59:24.858787Z node 5 :SCHEME_BOARD_SUBSCRIBER INFO: [main][5:7489503689051444975:2761][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [5:7489503676166542228:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:24.858794Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7489503676166541886:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [5:7489503689051444993:2761] 2025-04-04T16:59:24.858819Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [5:7489503676166542228:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-04T16:59:24.858872Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [5:7489503676166542228:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [5:7489503689051444975:2761] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T16:59:24.858927Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7489503676166542228:2127], cacheItem# { Subscriber: { Subscriber: [5:7489503689051444975:2761] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:24.859005Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7489503689051444995:2763], recipient# [5:7489503689051444972:2316], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:25.519722Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7489503676166542228:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:25.519972Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7489503676166542228:2127], cacheItem# { Subscriber: { Subscriber: [5:7489503680461510328:2745] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:25.520148Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7489503693346412296:2767], recipient# [5:7489503693346412295:2317], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TExportToS3Tests::ShouldPreserveIncrBackupFlag [GOOD] >> TExportToS3Tests::ShouldRestartOnScanErrors |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> TExportToS3Tests::UserSID [GOOD] >> TExportToS3Tests::TablePermissions >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ChecksumsWithCompression [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:59:21.375385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:59:21.375445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:59:21.375470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:59:21.375495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:59:21.375531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:59:21.375551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:59:21.375604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:59:21.375675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:59:21.375978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:59:21.439802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:59:21.439850Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:21.452654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:59:21.452942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:59:21.453104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:59:21.464450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:59:21.464784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:59:21.465571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:21.467566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:59:21.473664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:21.475087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:21.475158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:21.475383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:59:21.475434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:21.475505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:59:21.475634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:59:21.481956Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:59:21.607796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:59:21.608075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:21.608302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:59:21.608591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:59:21.608654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:21.611467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:21.611669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:59:21.611877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:21.611954Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:59:21.611989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:59:21.612024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:59:21.614504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:21.614573Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:59:21.614619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:59:21.617619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:21.617687Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:21.617729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:21.617784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:59:21.621378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:59:21.623943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:59:21.624156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:59:21.625156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:21.625324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:21.625385Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:21.625699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:59:21.625750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:21.625922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:59:21.626032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:59:21.629328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:21.629379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:21.629566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:21.629617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:59:21.629981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:21.630025Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:59:21.630115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:59:21.630145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:21.630181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:59:21.630210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:21.630273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:59:21.630314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:21.630346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:59:21.630375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:59:21.630445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:59:21.630485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:59:21.630516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:59:21.632394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:59:21.632529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:59:21.632566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... el: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:59:26.714607Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-04-04T16:59:26.714748Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2025-04-04T16:59:26.715145Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:26.715276Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 17179871340 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:26.715342Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-04-04T16:59:26.715502Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-04-04T16:59:26.715660Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:23250 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 539E1949-7D30-43EE-803C-36FFEB684C02 amz-sdk-request: attempt=1 content-length: 73 content-md5: a9Su4FHJt26Hhw4HV0+Ocg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 2025-04-04T16:59:26.761988Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:26.762048Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-04T16:59:26.762316Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:26.762354Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:205:2207], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 2025-04-04T16:59:26.762511Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T16:59:26.762565Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:59:26.764950Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-04-04T16:59:26.765134Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-04-04T16:59:26.765165Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-04-04T16:59:26.765199Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-04T16:59:26.765233Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-04T16:59:26.765335Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true REQUEST: PUT /metadata.json.sha256 HTTP/1.1 HEADERS: Host: localhost:23250 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 27CDC2E1-2AC8-48E0-AFD8-6CBCC8AD6B8F amz-sdk-request: attempt=1 content-length: 78 content-md5: 5v+lOCwt7SV92xRPjSiuqQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json.sha256 / / 78 FAKE_COORDINATOR: Erasing txId 281474976710759 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:23250 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4D72C960-7DB3-488D-A4EC-BC8BE0BD6D75 amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 355 2025-04-04T16:59:26.771209Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /scheme.pb.sha256 HTTP/1.1 HEADERS: Host: localhost:23250 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B7DE052E-C345-48C4-A810-C4ADCF3BE1FE amz-sdk-request: attempt=1 content-length: 74 content-md5: NWNhlq1fHKxcSj+x5Xq9NQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb.sha256 / / 74 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:23250 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6DF8E69A-49AE-4DB2-BA12-A3FB6B5A885C amz-sdk-request: attempt=1 content-length: 27 content-md5: CTqKvdXJPw0OgRdlsoR71Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 27 REQUEST: PUT /data_00.csv.sha256 HTTP/1.1 HEADERS: Host: localhost:23250 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A301CB50-83E3-424C-857B-213ADAC901FA amz-sdk-request: attempt=1 content-length: 76 content-md5: gmOXObjloPe2DGxtDsgfpg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.sha256 / / 76 2025-04-04T16:59:26.794948Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 445 RawX2: 17179871598 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-04-04T16:59:26.794995Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-04-04T16:59:26.795093Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 445 RawX2: 17179871598 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-04-04T16:59:26.795184Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 445 RawX2: 17179871598 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-04-04T16:59:26.795247Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:26.795292Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T16:59:26.795327Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T16:59:26.795371Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-04-04T16:59:26.795531Z node 4 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:26.797514Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T16:59:26.797865Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T16:59:26.797910Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-04-04T16:59:26.798018Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-04-04T16:59:26.798048Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-04T16:59:26.798084Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-04-04T16:59:26.798110Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-04T16:59:26.798143Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-04-04T16:59:26.798200Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:122:2148] message: TxId: 281474976710759 2025-04-04T16:59:26.798260Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-04T16:59:26.798295Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-04-04T16:59:26.798318Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-04-04T16:59:26.798433Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T16:59:26.800224Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-04-04T16:59:26.800290Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-04-04T16:59:26.801791Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T16:59:26.801842Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:476:2437] TestWaitNotification: OK eventTxId 102 >> TExportToS3Tests::ShouldSucceedOnConcurrentExport [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::CheckPgAutoParams-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 23552, MsgBus: 22195 2025-04-04T16:56:53.477267Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503041752427497:2169];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:53.482317Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c1d/r3tmp/tmpbs54ac/pdisk_1.dat 2025-04-04T16:56:54.141768Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:54.188958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:54.189115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:54.202720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23552, node 1 2025-04-04T16:56:54.453967Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:56:54.453987Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:56:54.453994Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:56:54.454118Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22195 TClient is connected to server localhost:22195 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:56:55.284183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:55.324289Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:56:57.826438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2025-04-04T16:56:58.180386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2025-04-04T16:56:58.337174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2025-04-04T16:56:58.475054Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503041752427497:2169];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:58.475138Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:56:58.529120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2025-04-04T16:56:58.713861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-04T16:56:58.822677Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill f f t t 2025-04-04T16:56:58.898137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T16:56:58.983804Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill f f t t 2025-04-04T16:56:59.098333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T16:56:59.186832Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {f,f} {f,f} {t,t} {t,t} 2025-04-04T16:56:59.305664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T16:56:59.390359Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {f,f} {f,f} {t,t} {t,t} 2025-04-04T16:56:59.483691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-04T16:56:59.653399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-04T16:56:59.725138Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-04T16:56:59.782871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710693:0, at schemeshard: 72057594046644480 2025-04-04T16:56:59.866234Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-04-04T16:56:59.918862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480 {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-04-04T16:57:00.056351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710700:0, at schemeshard: 72057594046644480 2025-04-04T16:57:00.135150Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-04T16:57:00.183755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 2025-04-04T16:57:00.260122Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-04T16:57:00.316491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710708:0, at schemeshard: 72057594046644480 2025-04-04T16:57:00.384370Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-04-04T16:57:00.435507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-04-04T16:57:00.590801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710715:0, at schemeshard: 72057594046644480 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-04T16:57:00.739130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710718:0, at schemeshard: 72057594046644480 2025-04-04T16:57:00.806753Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-04-04T16:57:00.913301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710722:0, at schemeshard: 72057594046644480 2025-04-04T16:57:01.026473Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-04-04T16:57:01.103763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710726:0, at schemeshard: 72057594046644480 {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-04-04T16:57:01.276360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710729:0, ... delivered;self_id=[14:7489503640573285460:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:13.247827Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c1d/r3tmp/tmpFoAkOE/pdisk_1.dat 2025-04-04T16:59:13.659477Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:13.687808Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:13.687944Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:13.689724Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23322, node 14 2025-04-04T16:59:13.757180Z node 14 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:59:13.757224Z node 14 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:59:13.757244Z node 14 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:59:13.757438Z node 14 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64289 TClient is connected to server localhost:64289 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:59:14.904124Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:59:14.916594Z node 14 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:59:18.248609Z node 14 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7489503640573285460:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:18.248740Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:59:19.958880Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7489503666343089890:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:19.959043Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:19.960286Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7489503666343089911:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:19.966461Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T16:59:19.985446Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7489503666343089913:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T16:59:20.058938Z node 14 :TX_PROXY ERROR: Actor# [14:7489503670638057260:2345] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:59:20.332763Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T16:59:20.720885Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:59:21.605354Z node 14 :KQP_COMPILE_ACTOR ERROR: Get parsing result with error, self: [14:7489503674933024912:2402], owner: [14:7489503666343089862:2324], statement id: 0 2025-04-04T16:59:21.605770Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=MWZiOTY1OTMtNjAxMzhlOTItM2Q4NTgyYjctODk4MGZhY2I=, ActorId: [14:7489503674933024910:2401], ActorState: ExecuteState, TraceId: 01jr0vwa1f97fx7v75p9p51ww0, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T16:59:21.899759Z node 14 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [14:7489503674933024943:2412], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect
: Error: At function: PgSetItem
:1:1: Error: At function: PgWhere
:2:55: Error: At function: PgOp
:2:55: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2025-04-04T16:59:21.900598Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=NzkwYjk4NmUtNjQ3YzBiZWItNDU4MzhhYTItZWRiNjllMjA=, ActorId: [14:7489503674933024940:2410], ActorState: ExecuteState, TraceId: 01jr0vwac82xt2r100kzkr7z7v, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T16:59:21.939111Z node 14 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [14:7489503674933024955:2418], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect
: Error: At function: PgSetItem
:1:1: Error: At function: PgWhere
:2:57: Error: At function: PgAnd
:2:67: Error: At function: PgOp
:2:67: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2025-04-04T16:59:21.939419Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=Y2E0YTFmYTgtNTJhOTA0NzktMWNjZDNkYWQtYzRjYWNkNzM=, ActorId: [14:7489503674933024952:2416], ActorState: ExecuteState, TraceId: 01jr0vwadt0rj5mz90d3g8fje9, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T16:59:21.962903Z node 14 :KQP_EXECUTER CRIT: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0vwaf2fshmwney4x5w5x7h, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=YmExMmUxMWItODFmMDc4ZDctN2NiZWQ3MzEtZjgzNjg2Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, unexpected exception caught: (NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2025-04-04T16:59:21.963233Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=YmExMmUxMWItODFmMDc4ZDctN2NiZWQ3MzEtZjgzNjg2Njc=, ActorId: [14:7489503674933024965:2422], ActorState: ExecuteState, TraceId: 01jr0vwaf2fshmwney4x5w5x7h, Create QueryResponse for error on request, msg: 2025-04-04T16:59:22.018327Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T16:59:22.155318Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T16:59:22.275879Z node 14 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [14:7489503679227992438:2448], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: values have 3 columns, INSERT INTO expects: 2 2025-04-04T16:59:22.276643Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=ODNjZjcxNmUtZTJhM2RiNDUtZTc1ZDhhMzMtMWE1YWEzYmU=, ActorId: [14:7489503679227992435:2446], ActorState: ExecuteState, TraceId: 01jr0vwar48eb3kk2prept0yrk, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T16:59:22.315538Z node 14 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [14:7489503679227992455:2456], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Failed to convert type: List> to List>
:1:1: Error: Failed to convert 'id': pgunknown to Optional
:1:1: Error: Row type mismatch for table: db.[/Root/PgTable2] 2025-04-04T16:59:22.318235Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=N2YyMzA0OGQtZTRjMWE3YWYtYmQyNDljN2QtZjYzMmQ5ZGU=, ActorId: [14:7489503679227992452:2454], ActorState: ExecuteState, TraceId: 01jr0vwasfasn4x351mde6q69b, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T16:59:22.801409Z node 14 :KQP_EXECUTER CRIT: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0vwatr70tdm4basn75chqy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=Y2VkODYzM2EtZTVlMWExNmUtMzk5ODhkMDEtMjhkYTc4MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, unexpected exception caught: (NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2025-04-04T16:59:22.801971Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=Y2VkODYzM2EtZTVlMWExNmUtMzk5ODhkMDEtMjhkYTc4MmU=, ActorId: [14:7489503679227992464:2460], ActorState: ExecuteState, TraceId: 01jr0vwatr70tdm4basn75chqy, Create QueryResponse for error on request, msg: 2025-04-04T16:59:22.853453Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T16:59:23.489476Z node 14 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 14, TabletId: 72075186224037892 not found 2025-04-04T16:59:23.538461Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> DataShardSnapshots::VolatileSnapshotReadTable [GOOD] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TExportToS3Tests::Changefeeds [GOOD] >> KqpPg::PgUpdate+useSink [GOOD] >> KqpPg::PgUpdate-useSink >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 >> TExportToS3Tests::TablePermissions [GOOD] |80.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |80.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TExportToS3Tests::ShouldRestartOnScanErrors [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::Changefeeds [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:59:22.433058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:59:22.433152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:59:22.433188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:59:22.433223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:59:22.433281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:59:22.433313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:59:22.433383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:59:22.433473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:59:22.433823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:59:22.519422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:59:22.519487Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:22.532591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:59:22.532868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:59:22.533091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:59:22.545266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:59:22.545592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:59:22.546345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:22.549910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:59:22.557401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:22.558802Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:22.558861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:22.559066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:59:22.559115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:22.559154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:59:22.559284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:59:22.570913Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:59:22.702398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:59:22.702765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:22.703070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:59:22.703447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:59:22.703529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:22.706919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:22.707116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:59:22.707385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:22.707479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:59:22.707530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:59:22.707565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:59:22.710000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:22.710049Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:59:22.710076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:59:22.714859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:22.714952Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:22.715012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:22.715078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:59:22.719669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:59:22.722194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:59:22.722839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:59:22.724079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:22.724252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:22.724317Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:22.724701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:59:22.724769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:22.724962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:59:22.725081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:59:22.728342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:22.728401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:22.728572Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:22.728631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:59:22.728989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:22.729052Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:59:22.729167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:59:22.729210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:22.729256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:59:22.729285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:22.729319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:59:22.729362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:22.729399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:59:22.729430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:59:22.729500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:59:22.729538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:59:22.729567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:59:22.731551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:59:22.731694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:59:22.731734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7594046678944 2025-04-04T16:59:28.929337Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-04-04T16:59:28.929384Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710761 ready parts: 1/1 2025-04-04T16:59:28.929570Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:59:28.930051Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-04T16:59:28.930110Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-04T16:59:28.930128Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-04T16:59:28.930149Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-04T16:59:28.930170Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T16:59:28.930819Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-04T16:59:28.930879Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-04T16:59:28.930912Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-04T16:59:28.930932Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 7 2025-04-04T16:59:28.930957Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-04-04T16:59:28.931004Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-04-04T16:59:28.937374Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-04T16:59:28.937561Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-04T16:59:28.937622Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-04-04T16:59:28.937674Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-04T16:59:28.938669Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-04-04T16:59:28.938796Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2025-04-04T16:59:28.938948Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000010 2025-04-04T16:59:28.939406Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:28.939529Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 17179871339 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:28.939580Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000010, at schemeshard: 72057594046678944 2025-04-04T16:59:28.939699Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-04-04T16:59:28.939768Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-04T16:59:28.939807Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-04T16:59:28.939878Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-04T16:59:28.939923Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-04T16:59:28.939988Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:59:28.940076Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-04-04T16:59:28.940115Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-04-04T16:59:28.940167Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-04T16:59:28.940208Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-04-04T16:59:28.940273Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-04-04T16:59:28.940349Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-04-04T16:59:28.940407Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-04-04T16:59:28.940456Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2025-04-04T16:59:28.940493Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 9], 18446744073709551615 2025-04-04T16:59:28.941811Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-04T16:59:28.943574Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:28.943627Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:28.943799Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 9] 2025-04-04T16:59:28.943902Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:28.943940Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:204:2206], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-04-04T16:59:28.943976Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:204:2206], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 9 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-04-04T16:59:28.944763Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-04T16:59:28.944844Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-04T16:59:28.944882Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-04T16:59:28.944933Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-04-04T16:59:28.944985Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T16:59:28.945429Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-04T16:59:28.945507Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-04T16:59:28.945532Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-04T16:59:28.945558Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 18446744073709551615 2025-04-04T16:59:28.945587Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-04-04T16:59:28.945662Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-04-04T16:59:28.945709Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:124:2150] 2025-04-04T16:59:28.950765Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-04T16:59:28.951129Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-04T16:59:28.951235Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-04-04T16:59:28.951300Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-04-04T16:59:28.951340Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-04T16:59:28.951367Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-04-04T16:59:28.951397Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 105, itemIdx# 4294967295 2025-04-04T16:59:28.953224Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-04T16:59:28.953309Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-04T16:59:28.953369Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [4:1386:3175] TestWaitNotification: OK eventTxId 105 >> TSubDomainTest::ConsistentCopyTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TablePermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:59:26.164702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:59:26.164796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:59:26.164831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:59:26.164865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:59:26.164917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:59:26.164945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:59:26.165017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:59:26.165109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:59:26.165442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:59:26.251176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:59:26.251231Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:26.263583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:59:26.263842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:59:26.264012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:59:26.274508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:59:26.275058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:59:26.275750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:26.276122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:59:26.282971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:26.284319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:26.284386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:26.284547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:59:26.284592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:26.284627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:59:26.284755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:59:26.292884Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:59:26.431985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:59:26.432269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:26.432510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:59:26.432801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:59:26.432877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:26.435492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:26.435646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:59:26.435834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:26.435927Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:59:26.435983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:59:26.436018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:59:26.438097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:26.438161Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:59:26.438197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:59:26.440134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:26.440186Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:26.440233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:26.440296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:59:26.444052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:59:26.446453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:59:26.446681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:59:26.447834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:26.447996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:26.448066Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:26.448385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:59:26.448476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:26.448654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:59:26.448760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:59:26.453492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:26.453540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:26.453671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:26.453710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:59:26.453961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:26.453995Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:59:26.454069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:59:26.454093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:26.454145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:59:26.454173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:26.454203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:59:26.454230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:26.454285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:59:26.454313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:59:26.454393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:59:26.454431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:59:26.454468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:59:26.456138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:59:26.456244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:59:26.456274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... HEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2025-04-04T16:59:29.324329Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T16:59:29.324499Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T16:59:29.324548Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:59:29.324628Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2025-04-04T16:59:29.324794Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409547 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:59:29.326510Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-04-04T16:59:29.326624Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2025-04-04T16:59:29.326983Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:29.327127Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 12884904044 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:29.327183Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-04-04T16:59:29.327310Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-04-04T16:59:29.327437Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:18095 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 00691B56-2992-456A-844B-3CF0966BB073 amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 2025-04-04T16:59:29.369079Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:29.369152Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-04T16:59:29.369370Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:29.369404Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:207:2209], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-04-04T16:59:29.371455Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T16:59:29.371536Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:59:29.372161Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-04-04T16:59:29.372242Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-04-04T16:59:29.372268Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-04-04T16:59:29.372301Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-04T16:59:29.372335Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-04T16:59:29.372405Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 281474976710759 REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:18095 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 40E5156F-5C5B-4606-A55B-E0C74F3AE4E6 amz-sdk-request: attempt=1 content-length: 137 content-md5: WeIr3D5bqIjvqMGEjx2JrA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /permissions.pb / / 137 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:18095 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CAB02D4F-1C39-40AC-870B-841224E7E91E amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 355 2025-04-04T16:59:29.378084Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:18095 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C9CFC081-CAFC-442B-B3F4-CF1BEE021EF7 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2025-04-04T16:59:29.392830Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 454 RawX2: 12884904311 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-04T16:59:29.392898Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-04-04T16:59:29.393049Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 454 RawX2: 12884904311 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-04T16:59:29.393162Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 454 RawX2: 12884904311 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-04T16:59:29.393231Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:29.393274Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T16:59:29.393317Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T16:59:29.393359Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-04-04T16:59:29.393528Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:29.395787Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T16:59:29.396128Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T16:59:29.396174Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-04-04T16:59:29.396310Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-04-04T16:59:29.396367Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-04T16:59:29.396407Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-04-04T16:59:29.396432Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-04T16:59:29.396463Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-04-04T16:59:29.396531Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:123:2149] message: TxId: 281474976710759 2025-04-04T16:59:29.396571Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-04T16:59:29.396608Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-04-04T16:59:29.396635Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-04-04T16:59:29.396738Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T16:59:29.398919Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-04-04T16:59:29.399007Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-04-04T16:59:29.401256Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T16:59:29.401314Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:482:2443] TestWaitNotification: OK eventTxId 103 >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-04-04T16:59:29.979415Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:29.983048Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:29.983339Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-04T16:59:29.983384Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:29.983461Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-04T16:59:29.983514Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:29.983557Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:29.983626Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-04T16:59:29.984274Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:259:2251], now have 1 active actors on pipe 2025-04-04T16:59:29.984378Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:29.998564Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [3:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:30.001358Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:30.001516Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:30.002413Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [3:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:30.002579Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:30.003016Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:30.003354Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [3:267:2257] 2025-04-04T16:59:30.005887Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-04T16:59:30.005978Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [3:267:2257] 2025-04-04T16:59:30.006052Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:30.006113Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:30.007078Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:270:2259], now have 1 active actors on pipe 2025-04-04T16:59:30.062767Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:30.067133Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:30.067494Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-04-04T16:59:30.067544Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:30.067580Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-04-04T16:59:30.067633Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:30.067680Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:30.067734Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-04-04T16:59:30.068412Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [3:405:2360], now have 1 active actors on pipe 2025-04-04T16:59:30.068534Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:30.068705Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [3:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:30.071049Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:30.071189Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:30.071993Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [3:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:30.072103Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:30.072453Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:30.072695Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [3:413:2366] 2025-04-04T16:59:30.074771Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-04-04T16:59:30.074830Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [3:413:2366] 2025-04-04T16:59:30.074892Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:30.074947Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:30.075664Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [3:416:2368], now have 1 active actors on pipe 2025-04-04T16:59:30.092688Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:30.096944Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:30.097256Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-04-04T16:59:30.097304Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:30.097345Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-04T16:59:30.097389Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:30.097451Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:30.097507Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-04T16:59:30.098228Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:465:2405], now have 1 active actors on pipe 2025-04-04T16:59:30.098319Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:30.098506Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [3:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:30.100863Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:30.100984Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:30.101767Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [3:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:30.101895Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:30.102271Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:30.102498Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [3:473:2411] 2025-04-04T16:59:30.104363Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-04T16:59:30.104440Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:473:2411] 2025-04-04T16:59:30.104491Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 b ... 38] doesn't have tx info 2025-04-04T16:59:30.825201Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:30.825245Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-04T16:59:30.825288Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:30.825344Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:30.825420Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-04T16:59:30.826365Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:463:2403], now have 1 active actors on pipe 2025-04-04T16:59:30.826440Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:30.826646Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 7(current 0) received from actor [4:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:30.829082Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:30.829211Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:30.829946Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 7 actor [4:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:30.830067Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:30.830615Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:30.830921Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [4:471:2409] 2025-04-04T16:59:30.833261Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-04T16:59:30.833343Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [4:471:2409] 2025-04-04T16:59:30.833409Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:30.833469Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:30.834692Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:474:2411], now have 1 active actors on pipe 2025-04-04T16:59:30.857998Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:30.864226Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:30.864479Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-04T16:59:30.864527Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:30.864563Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-04T16:59:30.864601Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:30.864648Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:30.864711Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-04T16:59:30.865348Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:523:2448], now have 1 active actors on pipe 2025-04-04T16:59:30.865440Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:30.865691Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 8(current 0) received from actor [4:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:30.868833Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:30.869012Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:30.869974Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 8 actor [4:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:30.870125Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:30.870570Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:30.870878Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:531:2454] 2025-04-04T16:59:30.872558Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-04T16:59:30.872616Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:531:2454] 2025-04-04T16:59:30.872662Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:30.872711Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:30.873528Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:534:2456], now have 1 active actors on pipe 2025-04-04T16:59:30.874879Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [4:542:2459], now have 1 active actors on pipe 2025-04-04T16:59:30.874993Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:543:2460], now have 1 active actors on pipe 2025-04-04T16:59:30.875099Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:544:2460], now have 1 active actors on pipe 2025-04-04T16:59:30.885995Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:549:2464], now have 1 active actors on pipe 2025-04-04T16:59:30.909953Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:30.913366Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:30.913740Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-04T16:59:30.913792Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:30.913941Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:30.914792Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:30.914862Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-04T16:59:30.914987Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:30.915399Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:30.915759Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:606:2509] 2025-04-04T16:59:30.917575Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-04T16:59:30.918874Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-04T16:59:30.919215Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-04T16:59:30.919603Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-04T16:59:30.919861Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-04T16:59:30.919914Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-04T16:59:30.919958Z node 4 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T16:59:30.920006Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-04T16:59:30.920067Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:606:2509] 2025-04-04T16:59:30.920131Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:30.920191Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:30.921101Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [4:543:2460] destroyed 2025-04-04T16:59:30.921396Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [4:542:2459] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 4 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 4 ErrorCode: OK } PartitionLocation { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "Tablet for that partition is not running" } ErrorCode: OK } } } >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] Test command err: RandomSeed# 18120785817437616759 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:59:25.392805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:59:25.392900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:59:25.392942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:59:25.392975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:59:25.393032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:59:25.393066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:59:25.393140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:59:25.393250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:59:25.393592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:59:25.479424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:59:25.479483Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:25.491248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:59:25.491525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:59:25.491691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:59:25.503535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:59:25.503831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:59:25.504490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:25.508561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:59:25.515983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:25.517460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:25.517537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:25.517726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:59:25.517800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:25.517852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:59:25.518001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:59:25.526983Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:59:25.730971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:59:25.731281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:25.731575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:59:25.732102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:59:25.732210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:25.739424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:25.739591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:59:25.739812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:25.739891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:59:25.739928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:59:25.739962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:59:25.743416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:25.743498Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:59:25.743541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:59:25.747323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:25.747398Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:25.747441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:25.747484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:59:25.752283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:59:25.759647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:59:25.759969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:59:25.761036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:25.761198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:25.761264Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:25.761550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:59:25.761608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:25.761769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:59:25.761846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:59:25.767537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:25.767592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:25.767761Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:25.767814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:59:25.768171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:25.768218Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:59:25.768310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:59:25.768344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:25.768378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:59:25.768406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:25.768441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:59:25.768485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:25.768519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:59:25.768544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:59:25.768607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:59:25.768640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:59:25.768683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:59:25.770573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:59:25.770699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:59:25.770733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 04-04T16:59:30.501187Z node 3 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-04T16:59:30.501308Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409549, shardIdx: 72057594046678944:4, operationId: 281474976710765:0, left await: 0, at schemeshard: 72057594046678944 2025-04-04T16:59:30.501360Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 3 -> 128 2025-04-04T16:59:30.507086Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-04-04T16:59:30.507334Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-04-04T16:59:30.507398Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710765:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:59:30.507510Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710765 ready parts: 1/1 2025-04-04T16:59:30.507689Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976710765 MinStep: 5000010 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:59:30.510017Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710765:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710765 msg type: 269090816 2025-04-04T16:59:30.510164Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710765, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710765 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710765 at step: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 281474976710765 at step: 5000010 2025-04-04T16:59:30.510958Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:30.511100Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710765 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 12884904044 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:30.511166Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710765:0 HandleReply TEvOperationPlan, stepId: 5000010, at schemeshard: 72057594046678944 2025-04-04T16:59:30.511316Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 128 -> 129 2025-04-04T16:59:30.511493Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 REQUEST: PUT /Backup2/metadata.json HTTP/1.1 HEADERS: Host: localhost:19081 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 81E4882B-691C-462E-A755-43434B9CDC11 amz-sdk-request: attempt=1 content-length: 73 content-md5: 5UnTthDw7DG9u0TfCJZu+w== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000010 2025-04-04T16:59:30.548057Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:30.548115Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710765, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-04-04T16:59:30.548347Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:30.548416Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:207:2209], at schemeshard: 72057594046678944, txId: 281474976710765, path id: 7 2025-04-04T16:59:30.548855Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-04-04T16:59:30.548923Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710765:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710765 2025-04-04T16:59:30.549857Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2025-04-04T16:59:30.549964Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2025-04-04T16:59:30.550020Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710765 2025-04-04T16:59:30.550055Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710765, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2025-04-04T16:59:30.550088Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-04-04T16:59:30.550193Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 0/1, is published: true REQUEST: PUT /Backup2/scheme.pb HTTP/1.1 HEADERS: Host: localhost:19081 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 635A8915-CB15-4BC7-88F4-AE3BF8C7756D amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/scheme.pb / / 355 2025-04-04T16:59:30.554607Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 REQUEST: PUT /Backup2/data_00.csv HTTP/1.1 HEADERS: Host: localhost:19081 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1230F38B-0F67-4B7B-ABA3-30A0F3C82611 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/data_00.csv / / 0 2025-04-04T16:59:30.571591Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 814 RawX2: 12884904637 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-04T16:59:30.571652Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710765, tablet: 72075186233409549, partId: 0 2025-04-04T16:59:30.571790Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944, message: Source { RawX1: 814 RawX2: 12884904637 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-04T16:59:30.571915Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710765:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 814 RawX2: 12884904637 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-04-04T16:59:30.571982Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710765:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:30.572025Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-04-04T16:59:30.572066Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710765:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-04T16:59:30.572129Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 129 -> 240 2025-04-04T16:59:30.572315Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710765:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:30.574499Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-04-04T16:59:30.574840Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-04-04T16:59:30.574897Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710765:0 ProgressState 2025-04-04T16:59:30.575025Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710765:0 progress is 1/1 2025-04-04T16:59:30.575051Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-04-04T16:59:30.575098Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710765:0 progress is 1/1 2025-04-04T16:59:30.575127Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-04-04T16:59:30.575156Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 1/1, is published: true 2025-04-04T16:59:30.575239Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:123:2149] message: TxId: 281474976710765 2025-04-04T16:59:30.575280Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-04-04T16:59:30.575310Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710765:0 2025-04-04T16:59:30.575336Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710765:0 2025-04-04T16:59:30.575448Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-04T16:59:30.577310Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710765 2025-04-04T16:59:30.577376Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710765 2025-04-04T16:59:30.578997Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-04T16:59:30.579063Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:844:2771] TestWaitNotification: OK eventTxId 104 >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::ConsistentCopyTable [GOOD] Test command err: 2025-04-04T16:59:14.170028Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503644838048187:2106];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:14.172626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001607/r3tmp/tmp6GmtQB/pdisk_1.dat 2025-04-04T16:59:14.751833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:14.751981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:14.763964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:59:14.806571Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:4193 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T16:59:15.006870Z node 1 :TX_PROXY DEBUG: actor# [1:7489503644838048393:2103] Handle TEvNavigate describe path dc-1 2025-04-04T16:59:15.006942Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503649133015968:2258] HANDLE EvNavigateScheme dc-1 2025-04-04T16:59:15.007053Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489503644838048445:2126], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:15.007095Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489503644838048445:2126], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T16:59:15.007360Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503649133015969:2259][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T16:59:15.015511Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503644838048093:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503649133015974:2259] 2025-04-04T16:59:15.015508Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503644838048090:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503649133015973:2259] 2025-04-04T16:59:15.015591Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503644838048090:2049] Subscribe: subscriber# [1:7489503649133015973:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:15.015589Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503644838048093:2052] Subscribe: subscriber# [1:7489503649133015974:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:15.015680Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503644838048096:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503649133015975:2259] 2025-04-04T16:59:15.015715Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503644838048096:2055] Subscribe: subscriber# [1:7489503649133015975:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:15.015732Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503649133015973:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503644838048090:2049] 2025-04-04T16:59:15.015793Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503649133015974:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503644838048093:2052] 2025-04-04T16:59:15.015832Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503649133015975:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503644838048096:2055] 2025-04-04T16:59:15.015837Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503644838048090:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503649133015973:2259] 2025-04-04T16:59:15.015859Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503644838048093:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503649133015974:2259] 2025-04-04T16:59:15.015873Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503644838048096:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503649133015975:2259] 2025-04-04T16:59:15.015882Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503649133015969:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503649133015970:2259] 2025-04-04T16:59:15.015923Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503649133015969:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503649133015971:2259] 2025-04-04T16:59:15.015983Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489503649133015969:2259][/dc-1] Set up state: owner# [1:7489503644838048445:2126], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:15.016209Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503649133015969:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503649133015972:2259] 2025-04-04T16:59:15.016282Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489503649133015969:2259][/dc-1] Path was already updated: owner# [1:7489503644838048445:2126], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:15.016349Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503649133015973:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503649133015970:2259], cookie# 1 2025-04-04T16:59:15.016368Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503649133015974:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503649133015971:2259], cookie# 1 2025-04-04T16:59:15.016405Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503649133015975:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503649133015972:2259], cookie# 1 2025-04-04T16:59:15.016531Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503644838048090:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503649133015973:2259], cookie# 1 2025-04-04T16:59:15.016613Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503644838048093:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503649133015974:2259], cookie# 1 2025-04-04T16:59:15.016637Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503644838048096:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503649133015975:2259], cookie# 1 2025-04-04T16:59:15.016685Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503649133015973:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503644838048090:2049], cookie# 1 2025-04-04T16:59:15.016711Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503649133015974:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503644838048093:2052], cookie# 1 2025-04-04T16:59:15.016727Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503649133015975:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503644838048096:2055], cookie# 1 2025-04-04T16:59:15.016785Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503649133015969:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503649133015970:2259], cookie# 1 2025-04-04T16:59:15.016820Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503649133015969:2259][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T16:59:15.016837Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503649133015969:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503649133015971:2259], cookie# 1 2025-04-04T16:59:15.016865Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503649133015969:2259][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T16:59:15.016924Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503649133015969:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503649133015972:2259], cookie# 1 2025-04-04T16:59:15.016943Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503649133015969:2259][/dc-1] Unexpected sync response: sender# [1:7489503649133015972:2259], cookie# 1 2025-04-04T16:59:15.080263Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489503644838048445:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T16:59:15.080661Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489503644838048445:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... sult: self# [6:7489503710541080512:2254], recipient# [6:7489503710541080484:2321], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:29.328932Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7489503710541080484:2321], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:29.461629Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7489503692817496898:2106], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:29.461783Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7489503692817496898:2106], cacheItem# { Subscriber: { Subscriber: [7:7489503709997367751:3126] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:29.461841Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7489503692817496898:2106], cacheItem# { Subscriber: { Subscriber: [7:7489503709997367753:3127] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:29.461960Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7489503709997367776:3132], recipient# [7:7489503709997367747:2401], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:29.462173Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7489503709997367747:2401], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:29.554942Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7489503693361211051:2106], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:29.555101Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7489503693361211051:2106], cacheItem# { Subscriber: { Subscriber: [6:7489503710541080485:2247] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:29.555153Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7489503693361211051:2106], cacheItem# { Subscriber: { Subscriber: [6:7489503710541080492:2248] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:29.555290Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7489503710541080513:2255], recipient# [6:7489503710541080484:2321], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:29.555537Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7489503710541080484:2321], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:30.146331Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7489503693361211051:2106], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:30.146497Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7489503693361211051:2106], cacheItem# { Subscriber: { Subscriber: [6:7489503710541080500:2249] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:30.146605Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7489503714836047811:2256], recipient# [6:7489503714836047810:2324], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:30.146753Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:30.185124Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489503693361210799:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:30.185232Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_1/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:59:30.210454Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7489503693361211051:2106], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:30.210619Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7489503693361211051:2106], cacheItem# { Subscriber: { Subscriber: [6:7489503697656178550:2237] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:30.210749Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7489503714836047813:2257], recipient# [6:7489503714836047812:2325], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: 2025-04-04T16:59:30.111866Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:30.117671Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:30.118037Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-04T16:59:30.118086Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:30.118159Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-04T16:59:30.118212Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:30.118279Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:30.118405Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-04T16:59:30.119197Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [1:260:2252], now have 1 active actors on pipe 2025-04-04T16:59:30.119335Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:30.140614Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [1:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:30.144230Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:30.144382Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:30.145251Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [1:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:30.145394Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:30.145839Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:30.146200Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [1:268:2258] 2025-04-04T16:59:30.148346Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-04T16:59:30.148405Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [1:268:2258] 2025-04-04T16:59:30.148457Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:30.148506Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:30.149271Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [1:271:2260], now have 1 active actors on pipe 2025-04-04T16:59:30.208153Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:30.211963Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:30.212308Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-04-04T16:59:30.212361Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:30.212405Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-04-04T16:59:30.212466Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:30.212531Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:30.212591Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-04-04T16:59:30.213322Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [1:406:2361], now have 1 active actors on pipe 2025-04-04T16:59:30.213389Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:30.213588Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [1:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:30.216135Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:30.216273Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:30.217242Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [1:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:30.217350Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:30.217734Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:30.217993Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [1:414:2367] 2025-04-04T16:59:30.220164Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-04-04T16:59:30.220249Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [1:414:2367] 2025-04-04T16:59:30.220315Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:30.220366Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:30.221131Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [1:417:2369], now have 1 active actors on pipe 2025-04-04T16:59:30.246972Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:30.251595Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:30.251928Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-04-04T16:59:30.251976Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:30.252015Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-04T16:59:30.252083Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:30.252179Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:30.252261Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-04T16:59:30.252959Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [1:466:2406], now have 1 active actors on pipe 2025-04-04T16:59:30.253082Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:30.253281Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [1:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:30.256093Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:30.256252Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:30.257148Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [1:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:30.257272Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:30.257686Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:30.257964Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [1:474:2412] 2025-04-04T16:59:30.259923Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-04T16:59:30.259999Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [1:474:2412] 2025-04-04T16:59:30.260056Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:30.260109Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:30.260980Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [1:477:2414], ... SQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:31.780903Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 11(current 0) received from actor [3:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:31.782953Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:31.783080Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:31.783692Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 11 actor [3:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:31.783812Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:31.784089Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:31.784254Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [3:472:2410] 2025-04-04T16:59:31.785832Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-04T16:59:31.785885Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:472:2410] 2025-04-04T16:59:31.785933Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:31.785983Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:31.786819Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:475:2412], now have 1 active actors on pipe 2025-04-04T16:59:31.807221Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:31.811736Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:31.812151Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-04T16:59:31.812219Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:31.812266Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-04T16:59:31.812316Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:31.812377Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:31.812449Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-04T16:59:31.813240Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:524:2449], now have 1 active actors on pipe 2025-04-04T16:59:31.813369Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:31.813585Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 12(current 0) received from actor [3:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:31.817320Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:31.817477Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:31.818178Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 12 actor [3:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:31.818382Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:31.818840Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:31.819053Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:532:2455] 2025-04-04T16:59:31.820702Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-04T16:59:31.820766Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:532:2455] 2025-04-04T16:59:31.820813Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:31.820859Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:31.821481Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:535:2457], now have 1 active actors on pipe 2025-04-04T16:59:31.823155Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:544:2460], now have 1 active actors on pipe 2025-04-04T16:59:31.823965Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:547:2461], now have 1 active actors on pipe 2025-04-04T16:59:31.824131Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [3:546:2461], now have 1 active actors on pipe 2025-04-04T16:59:31.824292Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:548:2461], now have 1 active actors on pipe 2025-04-04T16:59:31.824985Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:561:2472], now have 1 active actors on pipe 2025-04-04T16:59:31.858580Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:31.861802Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:31.862174Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-04T16:59:31.862232Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:31.862412Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:31.862920Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:31.862974Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-04T16:59:31.863102Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:31.863506Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:31.863715Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:618:2517] 2025-04-04T16:59:31.865796Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-04T16:59:31.867136Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-04T16:59:31.867436Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-04T16:59:31.867758Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-04T16:59:31.868027Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-04T16:59:31.868072Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-04T16:59:31.868115Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T16:59:31.868160Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-04T16:59:31.868219Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:618:2517] 2025-04-04T16:59:31.868278Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:31.868334Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:31.869359Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:544:2460] destroyed 2025-04-04T16:59:31.869434Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server disconnected, pipe [3:546:2461] destroyed 2025-04-04T16:59:31.869529Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:547:2461] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } ErrorCode: OK } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-04-04T16:59:31.321716Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:31.325751Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:31.326081Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-04T16:59:31.326132Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:31.326190Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-04T16:59:31.326258Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:31.326310Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:31.326413Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-04T16:59:31.327131Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:260:2252], now have 1 active actors on pipe 2025-04-04T16:59:31.327287Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:31.360679Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:31.363578Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:31.363744Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:31.364639Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:31.364792Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:31.365174Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:31.365517Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:268:2258] 2025-04-04T16:59:31.367948Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-04T16:59:31.368023Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:268:2258] 2025-04-04T16:59:31.368085Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:31.368140Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:31.368983Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:271:2260], now have 1 active actors on pipe 2025-04-04T16:59:31.452479Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:31.461992Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:31.462395Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-04-04T16:59:31.462449Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:31.462491Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-04T16:59:31.462532Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:31.462578Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:31.462644Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-04T16:59:31.463385Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:403:2358], now have 1 active actors on pipe 2025-04-04T16:59:31.463477Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:31.463676Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 2(current 0) received from actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:31.466355Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:31.466529Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:31.467457Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 2 actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:31.467604Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:31.468006Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:31.468231Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [2:411:2364] 2025-04-04T16:59:31.470367Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-04T16:59:31.470442Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:411:2364] 2025-04-04T16:59:31.470520Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:31.470598Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:31.471426Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:414:2366], now have 1 active actors on pipe 2025-04-04T16:59:31.494533Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:31.499235Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:31.499567Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-04T16:59:31.499625Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:31.499687Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-04T16:59:31.499729Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:31.499779Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:31.499864Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-04T16:59:31.500550Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:463:2403], now have 1 active actors on pipe 2025-04-04T16:59:31.500707Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:31.500916Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 3(current 0) received from actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:31.503609Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:31.503740Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:31.504575Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 3 actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:31.504698Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:31.505088Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:31.505325Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:471:2409] 2025-04-04T16:59:31.507441Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-04T16:59:31.507514Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:471:2409] 2025-04-04T16:59:31.507582Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:31.507639Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:31.508526Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:474:2411], now have 1 active actors on pipe REQUEST MetaRequest { CmdGetReadSessionsInfo { ClientId: "client_id" Topic: "rt3.dc1--topic1" Topic: "rt3.dc1--topic2" } } Ticket: "client_id@builtin" 2025-04-04T16:59:31.517826Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:483:2414], now have 1 active actors on pipe 2025-04-04T16:59:31.518454Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:486:2415], now have 1 active actors on pipe 2025-04-04T16:59:31.518850Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:487:2415], now have 1 active actors on pipe 2025-04-04T16:59:31.519524Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:483:2414] destroyed 2025-04-04T16:59:31.520172Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:487:2415] destroyed 2025-04-04T16:59:31.520221Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [2:486:2415] destroyed RESULT Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } PartitionResult { Partition: 2 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } |80.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |80.3%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-04-04T16:59:30.858841Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:30.868573Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:30.868814Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-04T16:59:30.868857Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:30.868902Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-04T16:59:30.868948Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:30.868992Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:30.869067Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-04T16:59:30.869671Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:261:2253], now have 1 active actors on pipe 2025-04-04T16:59:30.869796Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:30.883095Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:30.886319Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:30.886458Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:30.887299Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:30.887444Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:30.887802Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:30.888128Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:269:2259] 2025-04-04T16:59:30.890527Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-04T16:59:30.890608Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2259] 2025-04-04T16:59:30.890698Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:30.890759Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:30.891678Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:272:2261], now have 1 active actors on pipe 2025-04-04T16:59:30.946573Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:30.954707Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:30.955110Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-04-04T16:59:30.955172Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:30.955235Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-04-04T16:59:30.955283Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:30.955337Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:30.955406Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-04-04T16:59:30.956237Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:405:2360], now have 1 active actors on pipe 2025-04-04T16:59:30.956385Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:30.956587Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:30.958922Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:30.959058Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:30.959875Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:30.959973Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:30.960268Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:30.960438Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [2:413:2366] 2025-04-04T16:59:30.961870Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-04-04T16:59:30.961917Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:413:2366] 2025-04-04T16:59:30.961973Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:30.962011Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:30.962672Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:416:2368], now have 1 active actors on pipe 2025-04-04T16:59:30.988765Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:30.994214Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:30.994570Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-04-04T16:59:30.994619Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:30.994692Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-04T16:59:30.994735Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:30.994786Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:30.994864Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-04T16:59:30.995590Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:465:2405], now have 1 active actors on pipe 2025-04-04T16:59:30.995735Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:30.995929Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:30.999312Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:30.999500Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:31.000424Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:31.000646Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:31.001118Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:31.001406Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [2:473:2411] 2025-04-04T16:59:31.003536Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-04T16:59:31.003623Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:473:2411] 2025-04-04T16:59:31.003721Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:31.003791Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Proc ... EBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:32.589230Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-04T16:59:32.589272Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:32.589329Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:32.589395Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-04T16:59:32.590165Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:461:2401], now have 1 active actors on pipe 2025-04-04T16:59:32.590231Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:32.590453Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 11(current 0) received from actor [4:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:32.593275Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:32.593413Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:32.593969Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 11 actor [4:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:32.594077Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:32.594407Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:32.594626Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [4:469:2407] 2025-04-04T16:59:32.596012Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-04T16:59:32.596079Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [4:469:2407] 2025-04-04T16:59:32.596175Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:32.596218Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:32.597013Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:472:2409], now have 1 active actors on pipe 2025-04-04T16:59:32.617217Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:32.622364Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:32.622761Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-04T16:59:32.622811Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:32.622855Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-04T16:59:32.622899Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:32.622953Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:32.623017Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-04T16:59:32.623768Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:521:2446], now have 1 active actors on pipe 2025-04-04T16:59:32.623891Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:32.624105Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 12(current 0) received from actor [4:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:32.627350Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:32.627497Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:32.628180Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 12 actor [4:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:32.628308Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:32.628808Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:32.629027Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:529:2452] 2025-04-04T16:59:32.631147Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-04T16:59:32.631224Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:529:2452] 2025-04-04T16:59:32.631285Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:32.631347Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:32.632292Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:532:2454], now have 1 active actors on pipe 2025-04-04T16:59:32.633557Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [4:538:2457], now have 1 active actors on pipe 2025-04-04T16:59:32.633725Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:539:2458], now have 1 active actors on pipe 2025-04-04T16:59:32.634022Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:540:2458], now have 1 active actors on pipe 2025-04-04T16:59:32.645164Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:548:2465], now have 1 active actors on pipe 2025-04-04T16:59:32.670565Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:32.676084Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:32.676387Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-04T16:59:32.676433Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:32.676583Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:32.677093Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:32.677155Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-04T16:59:32.677261Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:32.677610Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:32.677815Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:605:2510] 2025-04-04T16:59:32.679405Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-04T16:59:32.680591Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-04T16:59:32.680924Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-04T16:59:32.681226Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-04T16:59:32.681408Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-04T16:59:32.681448Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-04T16:59:32.681494Z node 4 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T16:59:32.681528Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-04T16:59:32.681589Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:605:2510] 2025-04-04T16:59:32.681654Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:32.681693Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:32.682439Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [4:539:2458] destroyed 2025-04-04T16:59:32.682678Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [4:538:2457] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionOffsetsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "partition is not ready yet" } ErrorCode: OK } } } >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-04-04T16:59:31.557240Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:31.563066Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:31.563532Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-04T16:59:31.563599Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:31.563676Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-04T16:59:31.563761Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:31.563824Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:31.563917Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-04T16:59:31.564715Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:260:2252], now have 1 active actors on pipe 2025-04-04T16:59:31.564854Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:31.585273Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:31.590726Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:31.590921Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:31.591935Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:31.592118Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:31.592615Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:31.593012Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:268:2258] 2025-04-04T16:59:31.600109Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-04T16:59:31.600203Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:268:2258] 2025-04-04T16:59:31.600277Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:31.600348Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:31.601436Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:271:2260], now have 1 active actors on pipe 2025-04-04T16:59:31.675257Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:31.678816Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:31.679054Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-04T16:59:31.679091Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:31.679123Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-04T16:59:31.679159Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:31.679203Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:31.679248Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-04T16:59:31.679849Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:404:2359], now have 1 active actors on pipe 2025-04-04T16:59:31.679922Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:31.680051Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:31.687535Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:31.687686Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:31.688570Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:31.688732Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:31.689091Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:31.689290Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:412:2365] 2025-04-04T16:59:31.690961Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-04T16:59:31.691023Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:412:2365] 2025-04-04T16:59:31.691069Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:31.691107Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:31.691768Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:415:2367], now have 1 active actors on pipe 2025-04-04T16:59:31.693188Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:423:2370], now have 1 active actors on pipe 2025-04-04T16:59:31.693355Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:425:2371], now have 1 active actors on pipe 2025-04-04T16:59:31.693488Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:423:2370] destroyed 2025-04-04T16:59:31.693857Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:425:2371] destroyed 2025-04-04T16:59:32.166694Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:32.175803Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:32.176200Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-04T16:59:32.176250Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:32.176279Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-04T16:59:32.176310Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:32.176367Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:32.176417Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-04T16:59:32.176986Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:259:2251], now have 1 active actors on pipe 2025-04-04T16:59:32.177068Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:32.177270Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 3(current 0) received from actor [3:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:32.183291Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:32.183452Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:32.184152Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 3 actor [3:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:32.184311Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:32.184682Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:32.184898Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [3:267:2257] 2025-04-04T16:59:32.187303Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing c ... xId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:32.318385Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-04T16:59:32.318437Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:32.318498Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:32.318586Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-04T16:59:32.319488Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:464:2404], now have 1 active actors on pipe 2025-04-04T16:59:32.319563Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:32.319807Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 5(current 0) received from actor [3:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:32.323350Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:32.323513Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:32.324533Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 5 actor [3:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:32.324674Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:32.325125Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:32.325423Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [3:472:2410] 2025-04-04T16:59:32.327673Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-04T16:59:32.327757Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:472:2410] 2025-04-04T16:59:32.327825Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:32.327885Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:32.328848Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:475:2412], now have 1 active actors on pipe 2025-04-04T16:59:32.359869Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:32.378647Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:32.379046Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-04T16:59:32.379109Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:32.379166Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-04T16:59:32.379216Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:32.379268Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:32.379339Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-04T16:59:32.380170Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:524:2449], now have 1 active actors on pipe 2025-04-04T16:59:32.380299Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:32.380501Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 6(current 0) received from actor [3:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:32.384003Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:32.384166Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:32.385111Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 6 actor [3:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:32.385272Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:32.385703Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:32.385959Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:532:2455] 2025-04-04T16:59:32.390437Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-04T16:59:32.390545Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:532:2455] 2025-04-04T16:59:32.390623Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:32.390695Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:32.391726Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:535:2457], now have 1 active actors on pipe 2025-04-04T16:59:32.393551Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:544:2461], now have 1 active actors on pipe 2025-04-04T16:59:32.393620Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:543:2460], now have 1 active actors on pipe 2025-04-04T16:59:32.393736Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:545:2461], now have 1 active actors on pipe 2025-04-04T16:59:32.404859Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:550:2465], now have 1 active actors on pipe 2025-04-04T16:59:32.433292Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:32.436702Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:32.437064Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-04T16:59:32.437113Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:32.437276Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:32.438072Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:32.438127Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-04T16:59:32.438273Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:32.438638Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:32.438905Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:607:2510] 2025-04-04T16:59:32.440999Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-04T16:59:32.442638Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-04T16:59:32.442991Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-04T16:59:32.443331Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-04T16:59:32.443557Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-04T16:59:32.443608Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-04T16:59:32.443655Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T16:59:32.443699Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-04T16:59:32.443769Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:607:2510] 2025-04-04T16:59:32.443854Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:32.443914Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:32.444901Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:544:2461] destroyed 2025-04-04T16:59:32.444979Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:543:2460] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 3 ErrorCode: OK } PartitionLocation { Partition: 2 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetReadSessionsInfo request" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetTopicMetadata request" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition >> DataShardSnapshots::VolatileSnapshotRefreshDiscard [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeout >> TTxLocatorTest::TestZeroRange >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:58:30.026502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:58:30.026619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:58:30.026675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:58:30.026712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:58:30.026753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:58:30.026781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:58:30.026847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:58:30.026965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:58:30.027346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:58:30.110356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:58:30.110419Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:30.122291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:58:30.122571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:58:30.122752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:58:30.133474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:58:30.133820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:58:30.134474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:58:30.135432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:58:30.141157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:58:30.142619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:58:30.142714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:58:30.142901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:58:30.142950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:58:30.142994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:58:30.143146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:58:30.149987Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:58:30.308683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:58:30.308929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:30.309153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:58:30.309391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:58:30.309459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:30.313067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:58:30.313216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:58:30.313407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:30.313486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:58:30.313535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:58:30.313568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:58:30.316581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:30.316649Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:58:30.316696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:58:30.318849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:30.318912Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:30.318949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:58:30.319008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:58:30.322816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:58:30.325037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:58:30.325231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:58:30.326379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:58:30.326522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:58:30.326570Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:58:30.326896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:58:30.326953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:58:30.327123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:58:30.327324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:58:30.329625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:58:30.329685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:58:30.329877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:58:30.329922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:58:30.330274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:30.330346Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:58:30.330468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:58:30.330507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:58:30.330545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:58:30.330582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:58:30.330630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:58:30.330694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:58:30.330731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:58:30.330759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:58:30.330838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:58:30.330884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:58:30.330915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:58:30.332775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:58:30.332881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:58:30.332922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 34.340323Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-04-04T16:59:34.340462Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2025-04-04T16:59:34.340504Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2025-04-04T16:59:34.340555Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-04T16:59:34.340652Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-04T16:59:34.340794Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-04T16:59:34.395268Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:323:2307]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-04T16:59:34.395365Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-04T16:59:34.395481Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409546 outdated step 5000002 last cleanup 0 2025-04-04T16:59:34.395562Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409546 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:59:34.395602Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409546 2025-04-04T16:59:34.395638Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409546 has no attached operations 2025-04-04T16:59:34.395672Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409546 2025-04-04T16:59:34.395876Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:323:2307]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-04T16:59:34.396062Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-04-04T16:59:34.396508Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:323:2307], Recipient [3:123:2149]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 87 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 87 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 49 Memory: 124260 Storage: 14054 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 44 TableOwnerId: 72057594046678944 FollowerId: 0 2025-04-04T16:59:34.396571Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-04T16:59:34.396643Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0049 2025-04-04T16:59:34.396771Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 87 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 87 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-04T16:59:34.396818Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-04T16:59:34.407370Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:330:2311]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-04T16:59:34.407489Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-04T16:59:34.407582Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409547 outdated step 5000002 last cleanup 0 2025-04-04T16:59:34.407656Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409547 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:59:34.407696Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409547 2025-04-04T16:59:34.407730Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409547 has no attached operations 2025-04-04T16:59:34.407763Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409547 2025-04-04T16:59:34.407952Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:330:2311]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-04T16:59:34.408113Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 2 2025-04-04T16:59:34.408503Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:330:2311], Recipient [3:123:2149]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 27 Memory: 119336 } ShardState: 2 UserTablePartOwners: 72075186233409547 NodeId: 3 StartTime: 44 TableOwnerId: 72057594046678944 FollowerId: 0 2025-04-04T16:59:34.408560Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-04T16:59:34.408615Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0027 2025-04-04T16:59:34.408750Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-04T16:59:34.443724Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435090, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-04T16:59:34.443827Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-04T16:59:34.443867Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-04T16:59:34.443946Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 2 2025-04-04T16:59:34.443985Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-04-04T16:59:34.444114Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-04-04T16:59:34.444190Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-04-04T16:59:34.444243Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2025-04-04T16:59:34.444346Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:29.000000Z at schemeshard 72057594046678944 2025-04-04T16:59:34.444446Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-04T16:59:34.444485Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-04-04T16:59:34.444522Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-04-04T16:59:34.444567Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:2 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046678944 2025-04-04T16:59:34.444676Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T16:59:34.455792Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435090, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-04T16:59:34.455904Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-04T16:59:34.455944Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-04T16:59:34.479377Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:1342:3263], Recipient [3:323:2307]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:34.479477Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:34.479536Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409546, clientId# [3:1341:3262], serverId# [3:1342:3263], sessionId# [0:0:0] 2025-04-04T16:59:34.479799Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:1340:3261], Recipient [3:323:2307]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 2 } 2025-04-04T16:59:34.482580Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:1345:3266], Recipient [3:330:2311]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:34.482636Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:34.482696Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409547, clientId# [3:1344:3265], serverId# [3:1345:3266], sessionId# [0:0:0] 2025-04-04T16:59:34.482855Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:1343:3264], Recipient [3:330:2311]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 2 } >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TTxLocatorTest::TestZeroRange [GOOD] >> KqpPg::InsertNoTargetColumns_Serial+useSink [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> KqpPg::TableInsert+useSink [GOOD] >> KqpPg::TableInsert-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestZeroRange [GOOD] Test command err: 2025-04-04T16:59:35.775765Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-04T16:59:35.776296Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-04T16:59:35.777090Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-04T16:59:35.778864Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:35.779411Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-04T16:59:35.788996Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:35.789125Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:35.789167Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:35.789232Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-04T16:59:35.789427Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:35.789526Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-04T16:59:35.789650Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-04T16:59:35.790425Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:69:2104] requested range size#0 2025-04-04T16:59:35.790996Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:35.791121Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:35.791236Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 0 2025-04-04T16:59:35.791283Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:69:2104] TEvAllocateResult from# 0 to# 0 expected SUCCESS >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-04-04T16:59:34.778048Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:34.781415Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:34.781626Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-04T16:59:34.781663Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:34.781707Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-04T16:59:34.781748Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:34.781784Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:34.781833Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-04T16:59:34.782379Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:261:2253], now have 1 active actors on pipe 2025-04-04T16:59:34.782474Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:34.799131Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:34.803985Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:34.804145Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:34.805079Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:34.805239Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:34.805606Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:34.805913Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:269:2259] 2025-04-04T16:59:34.808368Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-04T16:59:34.808444Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2259] 2025-04-04T16:59:34.808514Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:34.808577Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:34.809432Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:272:2261], now have 1 active actors on pipe 2025-04-04T16:59:34.858942Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:34.861477Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:34.861697Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-04-04T16:59:34.861732Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:34.861773Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-04-04T16:59:34.861813Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:34.861850Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:34.861902Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-04-04T16:59:34.862477Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:404:2359], now have 1 active actors on pipe 2025-04-04T16:59:34.862618Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:34.862806Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:34.865001Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:34.865095Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:34.865684Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:34.865771Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:34.866036Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:34.866190Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [2:412:2365] 2025-04-04T16:59:34.867514Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-04-04T16:59:34.867557Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:412:2365] 2025-04-04T16:59:34.867593Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:34.867634Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:34.868186Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:415:2367], now have 1 active actors on pipe 2025-04-04T16:59:34.880669Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:34.883796Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:34.884025Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-04-04T16:59:34.884056Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:34.884086Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-04-04T16:59:34.884115Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:34.884149Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:34.884191Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-04-04T16:59:34.884678Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:464:2404], now have 1 active actors on pipe 2025-04-04T16:59:34.884758Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:34.884895Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:34.886640Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:34.886761Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:34.887319Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-04-04T16:59:34.887444Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:34.887685Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:34.887838Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [2:472:2410] 2025-04-04T16:59:34.889301Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-04-04T16:59:34.889343Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:472:2410] 2025-04-04T16:59:34.889404Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:34.889453Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Proc ... edTxs.size=0 2025-04-04T16:59:35.638130Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:35.638185Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-04T16:59:35.638875Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:522:2447], now have 1 active actors on pipe 2025-04-04T16:59:35.638981Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:35.639164Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 8(current 0) received from actor [3:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:35.641708Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:35.641868Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:35.642793Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 8 actor [3:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:35.642949Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:35.643327Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:35.643559Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:530:2453] 2025-04-04T16:59:35.645509Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-04T16:59:35.645582Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:530:2453] 2025-04-04T16:59:35.645641Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:35.645687Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:35.646527Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:533:2455], now have 1 active actors on pipe 2025-04-04T16:59:35.647655Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:539:2458], now have 1 active actors on pipe 2025-04-04T16:59:35.647969Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-04T16:59:35.648220Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:540:2459], now have 1 active actors on pipe 2025-04-04T16:59:35.648264Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:541:2459], now have 1 active actors on pipe 2025-04-04T16:59:35.648391Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-04T16:59:35.648537Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-04T16:59:35.659551Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:549:2466], now have 1 active actors on pipe 2025-04-04T16:59:35.683166Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:35.686487Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:35.686820Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-04T16:59:35.686865Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:35.686994Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:35.687815Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:35.687871Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-04T16:59:35.687996Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:35.688333Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:35.688552Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:606:2511] 2025-04-04T16:59:35.690458Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-04T16:59:35.691780Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-04T16:59:35.692098Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-04T16:59:35.692382Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-04T16:59:35.692604Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-04T16:59:35.692643Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-04T16:59:35.692685Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T16:59:35.692738Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-04T16:59:35.692809Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:606:2511] 2025-04-04T16:59:35.692862Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:35.692907Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:35.693726Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:540:2459] destroyed 2025-04-04T16:59:35.693797Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:539:2458] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 38 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 38 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 78 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 78 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_UNKNOWN } ErrorCode: OK } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } >> KqpPg::PgUpdate-useSink [GOOD] >> KqpPg::JoinWithQueryService-StreamLookup >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TTxLocatorTest::TestWithReboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] Test command err: 2025-04-04T16:59:36.424780Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-04T16:59:36.425295Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-04T16:59:36.426069Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-04T16:59:36.427812Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.428307Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-04T16:59:36.437071Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.437193Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.437231Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.437276Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-04T16:59:36.437413Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.437478Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-04T16:59:36.437557Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-04T16:59:36.438886Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:79:2114] requested range size#100000 2025-04-04T16:59:36.439415Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:69:2104] requested range size#100000 2025-04-04T16:59:36.439614Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:71:2106] requested range size#100000 2025-04-04T16:59:36.439985Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2108] requested range size#100000 2025-04-04T16:59:36.440214Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:75:2110] requested range size#100000 2025-04-04T16:59:36.440744Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.440851Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:77:2112] requested range size#100000 2025-04-04T16:59:36.441085Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:81:2116] requested range size#100000 2025-04-04T16:59:36.441295Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.441438Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.441606Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.441743Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:83:2118] requested range size#100000 2025-04-04T16:59:36.441938Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.442030Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.442094Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:85:2120] requested range size#100000 2025-04-04T16:59:36.442316Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.442357Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:87:2122] requested range size#100000 2025-04-04T16:59:36.442520Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.442591Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.442733Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2025-04-04T16:59:36.442777Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:79:2114] TEvAllocateResult from# 0 to# 100000 2025-04-04T16:59:36.442921Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.443077Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2025-04-04T16:59:36.443107Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:69:2104] TEvAllocateResult from# 100000 to# 200000 2025-04-04T16:59:36.443198Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2025-04-04T16:59:36.443234Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:71:2106] TEvAllocateResult from# 200000 to# 300000 2025-04-04T16:59:36.443372Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.443514Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.443583Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2025-04-04T16:59:36.443607Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:73:2108] TEvAllocateResult from# 300000 to# 400000 2025-04-04T16:59:36.443685Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2025-04-04T16:59:36.443707Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:75:2110] TEvAllocateResult from# 400000 to# 500000 2025-04-04T16:59:36.443837Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.443877Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2025-04-04T16:59:36.443906Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:77:2112] TEvAllocateResult from# 500000 to# 600000 2025-04-04T16:59:36.443976Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.444013Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.444068Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2025-04-04T16:59:36.444117Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:81:2116] TEvAllocateResult from# 600000 to# 700000 2025-04-04T16:59:36.444184Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.444265Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.444298Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2025-04-04T16:59:36.444316Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:83:2118] TEvAllocateResult from# 700000 to# 800000 2025-04-04T16:59:36.444372Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.444450Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2025-04-04T16:59:36.444469Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:85:2120] TEvAllocateResult from# 800000 to# 900000 2025-04-04T16:59:36.444575Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.444620Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.444679Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2025-04-04T16:59:36.444706Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:87:2122] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-04-04T16:59:36.453070Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:128:2162] requested range size#100000 2025-04-04T16:59:36.453422Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:110:2144] requested range size#100000 2025-04-04T16:59:36.453781Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:112:2146] requested range size#100000 2025-04-04T16:59:36.454017Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.454154Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.454300Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:114:2148] requested range size#100000 2025-04-04T16:59:36.454727Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:116:2150] requested range size#100000 2025-04-04T16:59:36.454919Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.454960Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.455112Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:15:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.455213Z node 1 :TX_ALLOCATOR DEBUG: tablet# ... 000 2025-04-04T16:59:36.512501Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:90:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.512561Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8300000 Reserved to# 8400000 2025-04-04T16:59:36.512584Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:396:2430] TEvAllocateResult from# 8300000 to# 8400000 2025-04-04T16:59:36.512688Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:90:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.512770Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8400000 Reserved to# 8500000 2025-04-04T16:59:36.512790Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:398:2432] TEvAllocateResult from# 8400000 to# 8500000 2025-04-04T16:59:36.512930Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:91:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.512991Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8500000 Reserved to# 8600000 2025-04-04T16:59:36.513011Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:400:2434] TEvAllocateResult from# 8500000 to# 8600000 2025-04-04T16:59:36.513131Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:91:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.513188Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8600000 Reserved to# 8700000 2025-04-04T16:59:36.513210Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:402:2436] TEvAllocateResult from# 8600000 to# 8700000 2025-04-04T16:59:36.513278Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8700000 Reserved to# 8800000 2025-04-04T16:59:36.513306Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:404:2438] TEvAllocateResult from# 8700000 to# 8800000 2025-04-04T16:59:36.513403Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:92:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.513492Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:92:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.513556Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8800000 Reserved to# 8900000 2025-04-04T16:59:36.513577Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:406:2440] TEvAllocateResult from# 8800000 to# 8900000 2025-04-04T16:59:36.513663Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8900000 Reserved to# 9000000 2025-04-04T16:59:36.513692Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:408:2442] TEvAllocateResult from# 8900000 to# 9000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-04-04T16:59:36.517813Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:430:2464] requested range size#100000 2025-04-04T16:59:36.518214Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:432:2466] requested range size#100000 2025-04-04T16:59:36.518604Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:434:2468] requested range size#100000 2025-04-04T16:59:36.518790Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:93:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.518989Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:93:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.519135Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:436:2470] requested range size#100000 2025-04-04T16:59:36.519401Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:94:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.519524Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:438:2472] requested range size#100000 2025-04-04T16:59:36.519647Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:94:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.519891Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:440:2474] requested range size#100000 2025-04-04T16:59:36.520015Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:95:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.520164Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:95:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.520286Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:442:2476] requested range size#100000 2025-04-04T16:59:36.520633Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:444:2478] requested range size#100000 2025-04-04T16:59:36.520740Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:96:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.520903Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:96:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.520952Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:97:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.521060Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:446:2480] requested range size#100000 2025-04-04T16:59:36.521171Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:97:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.521314Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:98:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.521475Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9000000 Reserved to# 9100000 2025-04-04T16:59:36.521523Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:430:2464] TEvAllocateResult from# 9000000 to# 9100000 2025-04-04T16:59:36.521601Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:98:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.521721Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:448:2482] requested range size#100000 2025-04-04T16:59:36.521835Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:99:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.521975Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2025-04-04T16:59:36.522009Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:432:2466] TEvAllocateResult from# 9100000 to# 9200000 2025-04-04T16:59:36.522110Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2025-04-04T16:59:36.522135Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:434:2468] TEvAllocateResult from# 9200000 to# 9300000 2025-04-04T16:59:36.522177Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:99:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.522220Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:100:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.522383Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2025-04-04T16:59:36.522405Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:436:2470] TEvAllocateResult from# 9300000 to# 9400000 2025-04-04T16:59:36.522492Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:100:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.522625Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2025-04-04T16:59:36.522679Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:438:2472] TEvAllocateResult from# 9400000 to# 9500000 2025-04-04T16:59:36.522793Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2025-04-04T16:59:36.522814Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:440:2474] TEvAllocateResult from# 9500000 to# 9600000 2025-04-04T16:59:36.522856Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:101:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.522985Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2025-04-04T16:59:36.523009Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:442:2476] TEvAllocateResult from# 9600000 to# 9700000 2025-04-04T16:59:36.523056Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:101:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.523092Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:102:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.523187Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2025-04-04T16:59:36.523215Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:444:2478] TEvAllocateResult from# 9700000 to# 9800000 2025-04-04T16:59:36.523330Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2025-04-04T16:59:36.523353Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:446:2480] TEvAllocateResult from# 9800000 to# 9900000 2025-04-04T16:59:36.523397Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:102:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:36.523506Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2025-04-04T16:59:36.523539Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:448:2482] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS >> TTxLocatorTest::TestAllocateAllByPieces >> TTxLocatorTest::TestAllocateAll ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-04-04T16:59:36.111792Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:36.116594Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:36.116941Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-04T16:59:36.116986Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:36.117054Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-04T16:59:36.117116Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:36.117167Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:36.117238Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-04T16:59:36.117997Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:261:2253], now have 1 active actors on pipe 2025-04-04T16:59:36.118147Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:36.144056Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:36.147417Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:36.147596Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:36.148527Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:36.148702Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:36.149122Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:36.149457Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:269:2259] 2025-04-04T16:59:36.152018Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-04T16:59:36.152090Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2259] 2025-04-04T16:59:36.152158Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:36.152233Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:36.153116Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:272:2261], now have 1 active actors on pipe 2025-04-04T16:59:36.222109Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:36.226959Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:36.227323Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-04T16:59:36.227400Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:36.227449Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-04T16:59:36.227496Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:36.227570Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:36.227634Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-04T16:59:36.228475Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:404:2359], now have 1 active actors on pipe 2025-04-04T16:59:36.228548Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:36.228746Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:36.231820Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:36.231975Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:36.232892Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:36.233049Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:36.233494Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:36.233727Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:412:2365] 2025-04-04T16:59:36.235989Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-04T16:59:36.236080Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:412:2365] 2025-04-04T16:59:36.236152Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:36.236205Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:36.237102Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:415:2367], now have 1 active actors on pipe 2025-04-04T16:59:36.238587Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:421:2370], now have 1 active actors on pipe 2025-04-04T16:59:36.238934Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-04T16:59:36.239237Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:423:2371], now have 1 active actors on pipe 2025-04-04T16:59:36.239376Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-04T16:59:36.239719Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:421:2370] destroyed 2025-04-04T16:59:36.240063Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:423:2371] destroyed 2025-04-04T16:59:36.709539Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:36.714389Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:36.714791Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-04T16:59:36.714849Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:36.714890Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-04T16:59:36.714934Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:36.714986Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:36.715055Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-04T16:59:36.715745Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:259:2251], now have 1 active actors on pipe 2025-04-04T16:59:36.715862Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:36.716082Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 3(current 0) received from actor [3:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:36.718899Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:36.719067Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:36.719769Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 3 actor [3:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 L ... txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:36.867986Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:36.868362Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:36.868578Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:532:2455] 2025-04-04T16:59:36.870329Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-04T16:59:36.870396Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:532:2455] 2025-04-04T16:59:36.870440Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:36.870475Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:36.871198Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:535:2457], now have 1 active actors on pipe 2025-04-04T16:59:36.872470Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:541:2460], now have 1 active actors on pipe 2025-04-04T16:59:36.872796Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-04T16:59:36.873023Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:542:2461], now have 1 active actors on pipe 2025-04-04T16:59:36.873061Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:543:2461], now have 1 active actors on pipe 2025-04-04T16:59:36.873221Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-04T16:59:36.873401Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-04T16:59:36.885087Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:551:2468], now have 1 active actors on pipe 2025-04-04T16:59:36.909457Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:36.912029Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:36.912453Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-04T16:59:36.912512Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:36.912672Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:36.913499Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:36.913549Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-04T16:59:36.913654Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:36.914006Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:36.914315Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:608:2513] 2025-04-04T16:59:36.916148Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-04-04T16:59:36.917786Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-04-04T16:59:36.918300Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-04-04T16:59:36.918692Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-04-04T16:59:36.918970Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-04-04T16:59:36.919017Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-04T16:59:36.919064Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T16:59:36.919103Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-04T16:59:36.919156Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:608:2513] 2025-04-04T16:59:36.919235Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:36.919290Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:36.920239Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:542:2461] destroyed 2025-04-04T16:59:36.920318Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:541:2460] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 78 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 78 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 92 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 92 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 38 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 38 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } } } >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_Serial+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 26841, MsgBus: 22682 2025-04-04T16:56:52.474593Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503035514034580:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:52.478524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c73/r3tmp/tmp23PNZB/pdisk_1.dat 2025-04-04T16:56:52.881745Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:52.900612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:52.900734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:52.903619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26841, node 1 2025-04-04T16:56:53.019854Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:56:53.019905Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:56:53.019919Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:56:53.020052Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22682 TClient is connected to server localhost:22682 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:56:53.645479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:53.661079Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 16 2025-04-04T16:56:56.346587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:56:56.599730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T16:56:56.688239Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T16:56:56.718648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503052693904452:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:56.718748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:56.718823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503052693904458:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:56.722785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T16:56:56.735039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503052693904466:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T16:56:56.843843Z node 1 :TX_PROXY ERROR: Actor# [1:7489503052693904519:2464] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } f f t t 18 2025-04-04T16:56:57.264048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:56:57.343451Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T16:56:57.351519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-04T16:56:57.468685Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503035514034580:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:57.468752Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 21 2025-04-04T16:56:57.831068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T16:56:57.918132Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T16:56:57.927899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T16:56:57.995477Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 23 2025-04-04T16:56:58.420870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T16:56:58.553976Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T16:56:58.562379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T16:56:58.623564Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 20 2025-04-04T16:56:59.016328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T16:56:59.066418Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T16:56:59.072232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T16:56:59.116342Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 700 2025-04-04T16:56:59.585277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-04T16:56:59.678867Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T16:56:59.685824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-04T16:56:59.744480Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0.5 0.5 1.5 1.5 2.5 2.5 3.5 3.5 4.5 4.5 5.5 5.5 6.5 6.5 7.5 7.5 8.5 8.5 9.5 9.5 701 2025-04-04T16:57:00.053033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480 2025-04-04T16:57:00.162887Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T16:57:00.172162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480 2025-04-04T16:57:00.241211Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0.5 0.5 1.5 1.5 2.5 2.5 3.5 3.5 4.5 4.5 5.5 5.5 6.5 6.5 7.5 7.5 8.5 8.5 9.5 9.5 25 2025-04-04T16:57:00.684679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 2025-04-04T16:57:00.792654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710702:0, at schemeshard: 72057594046644480 2025-04-04T16:57:00.904182Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill text 0 text 0 text 1 text 1 text 2 text 2 text 3 text 3 text 4 text 4 text 5 text 5 text 6 text 6 text 7 text 7 text 8 text 8 text 9 text 9 1042 2025-04-04T16:57:01.477262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710706:0, at schemeshard: 72057594046644480 2025-04-04T16:57:01.553149Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T16:57:01.560703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710708:0, at schemeshard: 72057594046644480 2025-04-04T16:57:01.645033Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill bpchar 0 bpchar 0 bpchar 1 bpchar 1 bpchar 2 bpchar 2 bpchar 3 bpchar 3 bpchar 4 bpchar 4 bpchar 5 bpchar 5 bpchar 6 bpchar 6 bpchar 7 bpchar 7 bpchar 8 bpchar 8 bpchar 9 bpchar 9 1043 2025-04-04T16:57:02.065644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 2025-04-04T16:57:02.152750Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T16:57:02.162064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOp ... 715658 completed, doublechecking } 2025-04-04T16:59:16.330372Z node 10 :TX_PROXY ERROR: Actor# [10:7489503654503079663:2347] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:59:16.378225Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T16:59:16.870830Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 18978, MsgBus: 3515 2025-04-04T16:59:18.684964Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7489503664668497581:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:18.685056Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c73/r3tmp/tmp0cIOEO/pdisk_1.dat 2025-04-04T16:59:18.955563Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:18.980410Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:18.980565Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:18.982495Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18978, node 11 2025-04-04T16:59:19.069389Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:59:19.069421Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:59:19.069435Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:59:19.069652Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3515 TClient is connected to server localhost:3515 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:59:19.986193Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:59:23.686371Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7489503664668497581:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:23.686468Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:59:24.399635Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7489503690438302000:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:24.399813Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:24.400345Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7489503690438302035:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:24.404878Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:59:24.419857Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7489503690438302037:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:59:24.491228Z node 11 :TX_PROXY ERROR: Actor# [11:7489503690438302088:2342] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:59:24.531317Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T16:59:25.238446Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22239, MsgBus: 10935 2025-04-04T16:59:27.393208Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7489503701707430316:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:27.393302Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c73/r3tmp/tmpMf70gO/pdisk_1.dat 2025-04-04T16:59:27.637772Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:27.666384Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:27.666552Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:27.675965Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22239, node 12 2025-04-04T16:59:27.756118Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:59:27.756154Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:59:27.756167Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:59:27.756377Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10935 TClient is connected to server localhost:10935 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:59:29.028354Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:59:29.037592Z node 12 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:59:32.398393Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7489503701707430316:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:32.398514Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:59:34.078602Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7489503731772202047:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:34.079234Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7489503731772202039:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:34.079368Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:34.086907Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T16:59:34.108659Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7489503731772202061:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T16:59:34.177017Z node 12 :TX_PROXY ERROR: Actor# [12:7489503731772202112:2346] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:59:34.215202Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 >> TTxLocatorTest::TestWithReboot [GOOD] >> TTxLocatorTest::TestAllocateAll [GOOD] |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestImposibleSize |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] Test command err: 2025-04-04T16:59:37.563951Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-04T16:59:37.564460Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-04T16:59:37.565250Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-04T16:59:37.567147Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.567719Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-04T16:59:37.579324Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.579464Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.579517Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.579579Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-04T16:59:37.579777Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.579875Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-04T16:59:37.579998Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-04T16:59:37.580761Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:69:2104] requested range size#8796093022207 2025-04-04T16:59:37.581307Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.581437Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.581571Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 8796093022207 2025-04-04T16:59:37.581648Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:69:2104] TEvAllocateResult from# 0 to# 8796093022207 expected SUCCESS 2025-04-04T16:59:37.589774Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2108] requested range size#8796093022207 2025-04-04T16:59:37.590578Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.590675Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.590793Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8796093022207 Reserved to# 17592186044414 2025-04-04T16:59:37.590842Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:74:2108] TEvAllocateResult from# 8796093022207 to# 17592186044414 expected SUCCESS 2025-04-04T16:59:37.591288Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2112] requested range size#8796093022207 2025-04-04T16:59:37.591750Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.591824Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.591942Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 17592186044414 Reserved to# 26388279066621 2025-04-04T16:59:37.591982Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:78:2112] TEvAllocateResult from# 17592186044414 to# 26388279066621 expected SUCCESS 2025-04-04T16:59:37.592376Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2116] requested range size#8796093022207 2025-04-04T16:59:37.592761Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.592817Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.592917Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 26388279066621 Reserved to# 35184372088828 2025-04-04T16:59:37.592959Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:82:2116] TEvAllocateResult from# 26388279066621 to# 35184372088828 expected SUCCESS 2025-04-04T16:59:37.593417Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2120] requested range size#8796093022207 2025-04-04T16:59:37.593764Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.593819Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.593970Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 35184372088828 Reserved to# 43980465111035 2025-04-04T16:59:37.594028Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:86:2120] TEvAllocateResult from# 35184372088828 to# 43980465111035 expected SUCCESS 2025-04-04T16:59:37.594477Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:90:2124] requested range size#8796093022207 2025-04-04T16:59:37.594877Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.594944Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.595106Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 43980465111035 Reserved to# 52776558133242 2025-04-04T16:59:37.595161Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:90:2124] TEvAllocateResult from# 43980465111035 to# 52776558133242 expected SUCCESS 2025-04-04T16:59:37.595577Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:94:2128] requested range size#8796093022207 2025-04-04T16:59:37.595885Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.595958Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.596087Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 52776558133242 Reserved to# 61572651155449 2025-04-04T16:59:37.596138Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:94:2128] TEvAllocateResult from# 52776558133242 to# 61572651155449 expected SUCCESS 2025-04-04T16:59:37.596881Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:98:2132] requested range size#8796093022207 2025-04-04T16:59:37.597251Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.597329Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.597443Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 61572651155449 Reserved to# 70368744177656 2025-04-04T16:59:37.597500Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:98:2132] TEvAllocateResult from# 61572651155449 to# 70368744177656 expected SUCCESS 2025-04-04T16:59:37.598050Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:102:2136] requested range size#8796093022207 2025-04-04T16:59:37.598408Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.598486Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.598590Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 70368744177656 Reserved to# 79164837199863 2025-04-04T16:59:37.598646Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:102:2136] TEvAllocateResult from# 70368744177656 to# 79164837199863 expected SUCCESS 2025-04-04T16:59:37.599142Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:106:2140] requested range size#8796093022207 2025-04-04T16:59:37.599502Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.599596Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.599691Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 79164837199863 Reserved to# 87960930222070 2025-04-04T16:59:37.599738Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:106:2140] TEvAllocateResult from# 79164837199863 to# 87960930222070 expected SUCCESS 2025-04-04T16:59:37.600226Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:110:2144] requested range size#8796093022207 2025-04-04T16:59:37.600554Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.600639Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.600723Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 87960930222070 Reserved to# 96757023244277 2025-04-04T16:59:37.600771Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:110:2144] TEvAllocateResult from# 87960930222070 to# 96757023244277 expected SUCCESS 2025-04-04T16:59:37.601323Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:114:2148] requested range size#8796093022207 2025-04-04T16:59:37.601649Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.601707Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:0:0:69:0] Status# OK StatusFla ... e 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:150:2184] requested range size#8796093022207 2025-04-04T16:59:37.612218Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:23:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.612276Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:23:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.612411Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 175921860444140 Reserved to# 184717953466347 2025-04-04T16:59:37.612451Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:150:2184] TEvAllocateResult from# 175921860444140 to# 184717953466347 expected SUCCESS 2025-04-04T16:59:37.613056Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:154:2188] requested range size#8796093022207 2025-04-04T16:59:37.613410Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:24:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.613484Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:24:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.613574Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 184717953466347 Reserved to# 193514046488554 2025-04-04T16:59:37.613615Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:154:2188] TEvAllocateResult from# 184717953466347 to# 193514046488554 expected SUCCESS 2025-04-04T16:59:37.614436Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:158:2192] requested range size#8796093022207 2025-04-04T16:59:37.614836Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:25:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.614936Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:25:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.615043Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 193514046488554 Reserved to# 202310139510761 2025-04-04T16:59:37.615084Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:158:2192] TEvAllocateResult from# 193514046488554 to# 202310139510761 expected SUCCESS 2025-04-04T16:59:37.615824Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:162:2196] requested range size#8796093022207 2025-04-04T16:59:37.616156Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:26:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.616230Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.616297Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 202310139510761 Reserved to# 211106232532968 2025-04-04T16:59:37.616332Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:162:2196] TEvAllocateResult from# 202310139510761 to# 211106232532968 expected SUCCESS 2025-04-04T16:59:37.617120Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:166:2200] requested range size#8796093022207 2025-04-04T16:59:37.617453Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:27:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.617545Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.617666Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 211106232532968 Reserved to# 219902325555175 2025-04-04T16:59:37.617720Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:166:2200] TEvAllocateResult from# 211106232532968 to# 219902325555175 expected SUCCESS 2025-04-04T16:59:37.618550Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:170:2204] requested range size#8796093022207 2025-04-04T16:59:37.618889Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:28:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.618984Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.619078Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 219902325555175 Reserved to# 228698418577382 2025-04-04T16:59:37.619122Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:170:2204] TEvAllocateResult from# 219902325555175 to# 228698418577382 expected SUCCESS 2025-04-04T16:59:37.619901Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:174:2208] requested range size#8796093022207 2025-04-04T16:59:37.620245Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:29:1:24576:73:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.620346Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.620438Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 228698418577382 Reserved to# 237494511599589 2025-04-04T16:59:37.620475Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:174:2208] TEvAllocateResult from# 228698418577382 to# 237494511599589 expected SUCCESS 2025-04-04T16:59:37.621243Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:178:2212] requested range size#8796093022207 2025-04-04T16:59:37.621572Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:30:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.621712Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.621806Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 237494511599589 Reserved to# 246290604621796 2025-04-04T16:59:37.621844Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:178:2212] TEvAllocateResult from# 237494511599589 to# 246290604621796 expected SUCCESS 2025-04-04T16:59:37.622678Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:182:2216] requested range size#8796093022207 2025-04-04T16:59:37.623010Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:31:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.623069Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:31:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.623186Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 246290604621796 Reserved to# 255086697644003 2025-04-04T16:59:37.623229Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:182:2216] TEvAllocateResult from# 246290604621796 to# 255086697644003 expected SUCCESS 2025-04-04T16:59:37.624115Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:186:2220] requested range size#8796093022207 2025-04-04T16:59:37.624418Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:32:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.624492Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:32:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.624606Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 255086697644003 Reserved to# 263882790666210 2025-04-04T16:59:37.624645Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:186:2220] TEvAllocateResult from# 255086697644003 to# 263882790666210 expected SUCCESS 2025-04-04T16:59:37.625541Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:190:2224] requested range size#8796093022207 2025-04-04T16:59:37.625934Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:33:1:24576:77:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.626010Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:33:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.626094Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 263882790666210 Reserved to# 272678883688417 2025-04-04T16:59:37.626135Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:190:2224] TEvAllocateResult from# 263882790666210 to# 272678883688417 expected SUCCESS 2025-04-04T16:59:37.627113Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:194:2228] requested range size#8796093022207 2025-04-04T16:59:37.627568Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:34:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.627641Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:34:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.627727Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 272678883688417 Reserved to# 281474976710624 2025-04-04T16:59:37.627772Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:194:2228] TEvAllocateResult from# 272678883688417 to# 281474976710624 expected SUCCESS 2025-04-04T16:59:37.628704Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:198:2232] requested range size#31 2025-04-04T16:59:37.629110Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:35:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.629206Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:35:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.629327Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 281474976710624 Reserved to# 281474976710655 2025-04-04T16:59:37.629377Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:198:2232] TEvAllocateResult from# 281474976710624 to# 281474976710655 expected SUCCESS 2025-04-04T16:59:37.630404Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:202:2236] requested range size#1 2025-04-04T16:59:37.630545Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2025-04-04T16:59:37.630595Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:202:2236] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE >> Viewer::SelectStringWithNoBase64Encoding [GOOD] >> Viewer::ServerlessNodesPage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestWithReboot [GOOD] Test command err: 2025-04-04T16:59:37.395372Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-04T16:59:37.395901Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-04T16:59:37.396636Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-04T16:59:37.399582Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.400264Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-04T16:59:37.412377Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.412538Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.412591Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.412659Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-04T16:59:37.412847Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.412982Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-04T16:59:37.413102Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-04T16:59:37.414456Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:79:2114] requested range size#100000 2025-04-04T16:59:37.415133Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:69:2104] requested range size#100000 2025-04-04T16:59:37.415405Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:71:2106] requested range size#100000 2025-04-04T16:59:37.415866Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2108] requested range size#100000 2025-04-04T16:59:37.416196Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:75:2110] requested range size#100000 2025-04-04T16:59:37.416619Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.416721Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:77:2112] requested range size#100000 2025-04-04T16:59:37.416903Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:81:2116] requested range size#100000 2025-04-04T16:59:37.417089Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.417213Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.417373Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.417497Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:83:2118] requested range size#100000 2025-04-04T16:59:37.417678Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.417806Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.417878Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:85:2120] requested range size#100000 2025-04-04T16:59:37.418112Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.418168Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:87:2122] requested range size#100000 2025-04-04T16:59:37.418421Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.418506Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.418613Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2025-04-04T16:59:37.418678Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:79:2114] TEvAllocateResult from# 0 to# 100000 2025-04-04T16:59:37.418819Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.418948Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2025-04-04T16:59:37.418977Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:69:2104] TEvAllocateResult from# 100000 to# 200000 2025-04-04T16:59:37.419065Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2025-04-04T16:59:37.419100Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:71:2106] TEvAllocateResult from# 200000 to# 300000 2025-04-04T16:59:37.419235Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.419378Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.419440Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2025-04-04T16:59:37.419463Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:73:2108] TEvAllocateResult from# 300000 to# 400000 2025-04-04T16:59:37.419527Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2025-04-04T16:59:37.419547Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:75:2110] TEvAllocateResult from# 400000 to# 500000 2025-04-04T16:59:37.419658Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.419692Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2025-04-04T16:59:37.419721Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:77:2112] TEvAllocateResult from# 500000 to# 600000 2025-04-04T16:59:37.419805Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.419857Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.419922Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2025-04-04T16:59:37.419949Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:81:2116] TEvAllocateResult from# 600000 to# 700000 2025-04-04T16:59:37.420102Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.420199Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.420246Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2025-04-04T16:59:37.420272Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:83:2118] TEvAllocateResult from# 700000 to# 800000 2025-04-04T16:59:37.420355Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.420434Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2025-04-04T16:59:37.420460Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:85:2120] TEvAllocateResult from# 800000 to# 900000 2025-04-04T16:59:37.420576Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.420618Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.420665Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2025-04-04T16:59:37.420687Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:87:2122] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-04-04T16:59:37.425032Z node 1 :TABLET_MAIN NOTICE: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 2 Marker# TSYS31 2025-04-04T16:59:37.426304Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 2 Promote Marker# TSYS16 2025-04-04T16:59:37.427218Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:2:12:0:0:71:0] Snap: 2:1 for 72057594046447617 Marker# TRRH04 2025-04-04T16:59:37.427291Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:12:0:0:71:0], refs: [[72057594046447617:2:12:1:24576:76:0],] for 72057594046447617 2025-04-04T16:59:37.427528Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:1:0:0:42:0], refs: [[72057594046447617:2:1:1:28672:35:0],] for 72057594046447617 2025-04-04T16:59:37.427587Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:2:0:0:71:0], refs: [[72057594046447617:2:2:1:8192:71:0],] for 72057594046447617 2025-04-04T16:59:37.427640Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:3:0:0:69:0], refs: [[72057594046447617:2:3:1:24576:70:0],] for 72057594046447617 2025-04-04T16:59:37.427683Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:4:0:0:71:0], refs: [[72057594046447617:2:4:1:24576:76:0],] for 720575940 ... 000000 Reserved to# 9100000 2025-04-04T16:59:37.787017Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:614:2545] TEvAllocateResult from# 9000000 to# 9100000 2025-04-04T16:59:37.787128Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:7:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.787247Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2025-04-04T16:59:37.787273Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:616:2547] TEvAllocateResult from# 9100000 to# 9200000 2025-04-04T16:59:37.787435Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.787486Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:8:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.787548Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2025-04-04T16:59:37.787581Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:628:2559] TEvAllocateResult from# 9200000 to# 9300000 2025-04-04T16:59:37.787714Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2025-04-04T16:59:37.787732Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:630:2561] TEvAllocateResult from# 9300000 to# 9400000 2025-04-04T16:59:37.787785Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.787816Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:9:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.787847Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2025-04-04T16:59:37.787863Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:632:2563] TEvAllocateResult from# 9400000 to# 9500000 2025-04-04T16:59:37.787946Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2025-04-04T16:59:37.787965Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:618:2549] TEvAllocateResult from# 9500000 to# 9600000 2025-04-04T16:59:37.788041Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.788089Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:10:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.788123Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2025-04-04T16:59:37.788138Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:620:2551] TEvAllocateResult from# 9600000 to# 9700000 2025-04-04T16:59:37.788253Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.788329Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2025-04-04T16:59:37.788354Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:622:2553] TEvAllocateResult from# 9700000 to# 9800000 2025-04-04T16:59:37.788404Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2025-04-04T16:59:37.788428Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:624:2555] TEvAllocateResult from# 9800000 to# 9900000 2025-04-04T16:59:37.788533Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:11:1:24576:72:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.788613Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.788711Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2025-04-04T16:59:37.788740Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:626:2557] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-04-04T16:59:37.792812Z node 1 :TABLET_MAIN NOTICE: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 11 Marker# TSYS31 2025-04-04T16:59:37.794082Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 11 Promote Marker# TSYS16 2025-04-04T16:59:37.794934Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:11:11:0:0:71:0] Snap: 11:1 for 72057594046447617 Marker# TRRH04 2025-04-04T16:59:37.794989Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:11:0:0:71:0], refs: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617 2025-04-04T16:59:37.795120Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:1:0:0:42:0], refs: [[72057594046447617:11:1:1:28672:1483:0],] for 72057594046447617 2025-04-04T16:59:37.795159Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:2:0:0:69:0], refs: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2025-04-04T16:59:37.795217Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:3:0:0:71:0], refs: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2025-04-04T16:59:37.795252Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:4:0:0:71:0], refs: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2025-04-04T16:59:37.795291Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:5:0:0:71:0], refs: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617 2025-04-04T16:59:37.795355Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:6:0:0:71:0], refs: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617 2025-04-04T16:59:37.795383Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:7:0:0:71:0], refs: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617 2025-04-04T16:59:37.795406Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:8:0:0:71:0], refs: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617 2025-04-04T16:59:37.795427Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:9:0:0:71:0], refs: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617 2025-04-04T16:59:37.795467Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:10:0:0:71:0], refs: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617 2025-04-04T16:59:37.795562Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::BuildHistory - Process generation 11 from 1 with 11 steps Marker# TRRH09 2025-04-04T16:59:37.795601Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:1:1:28672:1483:0],] for 72057594046447617 2025-04-04T16:59:37.795627Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2025-04-04T16:59:37.795643Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2025-04-04T16:59:37.795668Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2025-04-04T16:59:37.795696Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617 2025-04-04T16:59:37.795726Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:6:1:24576:78:0],] 2025-04-04T16:59:37.795750Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:7:1:24576:78:0],] 2025-04-04T16:59:37.795791Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617, Gc+: [[72057594046447617:11:8:1:24576:75:0],] 2025-04-04T16:59:37.795813Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:9:1:24576:78:0],] 2025-04-04T16:59:37.795831Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:10:1:24576:78:0],] 2025-04-04T16:59:37.795852Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617, Gc+: [[72057594046447617:11:11:1:24576:72:0],] 2025-04-04T16:59:37.796035Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:12:0:0:0:0:0] Marker# TSYS01 2025-04-04T16:59:37.797142Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.800857Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-04T16:59:37.801148Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-04T16:59:37.802014Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 12, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-04T16:59:37.802090Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:1:1:28672:1639:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.802193Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.802343Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 12:0 Marker# TSYS28 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAll [GOOD] Test command err: 2025-04-04T16:59:37.865133Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-04T16:59:37.865511Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-04T16:59:37.866261Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-04T16:59:37.868180Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.868745Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-04T16:59:37.877481Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.877596Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.877652Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.877699Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-04T16:59:37.877838Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.877911Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-04T16:59:37.877993Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-04T16:59:37.878701Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:69:2104] requested range size#281474976710655 2025-04-04T16:59:37.879214Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.879344Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:37.879468Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 281474976710655 2025-04-04T16:59:37.879525Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:69:2104] TEvAllocateResult from# 0 to# 281474976710655 expected SUCCESS 2025-04-04T16:59:37.883233Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2108] requested range size#1 2025-04-04T16:59:37.883375Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2025-04-04T16:59:37.883405Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:74:2108] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE >> TTxLocatorTest::TestImposibleSize [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |80.4%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestImposibleSize [GOOD] Test command err: 2025-04-04T16:59:38.393942Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-04T16:59:38.394583Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-04T16:59:38.395478Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-04T16:59:38.397427Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:38.398047Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-04T16:59:38.411272Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:38.411465Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:38.411531Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:38.411602Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-04T16:59:38.411838Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:38.411957Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-04T16:59:38.412100Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-04T16:59:38.412936Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:69:2104] requested range size#281474976710656 2025-04-04T16:59:38.413104Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 0 Reserved to# 0 2025-04-04T16:59:38.417369Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:69:2104] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2025-04-04T16:59:38.418041Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2107] requested range size#123456 2025-04-04T16:59:38.418749Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:38.418853Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:38.418969Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 123456 2025-04-04T16:59:38.419012Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:73:2107] TEvAllocateResult from# 0 to# 123456 expected SUCCESS 2025-04-04T16:59:38.419498Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:77:2111] requested range size#281474976587200 2025-04-04T16:59:38.419648Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 123456 Reserved to# 0 2025-04-04T16:59:38.419707Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:77:2111] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2025-04-04T16:59:38.420092Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2114] requested range size#246912 2025-04-04T16:59:38.420537Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:38.420597Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:38.420702Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 123456 Reserved to# 370368 2025-04-04T16:59:38.420741Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:80:2114] TEvAllocateResult from# 123456 to# 370368 expected SUCCESS 2025-04-04T16:59:38.421196Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2118] requested range size#281474976340288 2025-04-04T16:59:38.421323Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 370368 Reserved to# 0 2025-04-04T16:59:38.421364Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:84:2118] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink >> KqpLimits::CancelAfterRwTx+useSink [GOOD] >> KqpLimits::CancelAfterRwTx-useSink >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TTxLocatorTest::Boot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:58:30.709582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:58:30.709698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:58:30.709738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:58:30.709775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:58:30.709842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:58:30.709883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:58:30.709947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:58:30.710042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:58:30.710481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:58:30.803114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:58:30.803172Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:30.818055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:58:30.818364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:58:30.818535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:58:30.830166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:58:30.830594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:58:30.831331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:58:30.832428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:58:30.839247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:58:30.840503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:58:30.840580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:58:30.840777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:58:30.840832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:58:30.840878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:58:30.841009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:58:30.849035Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:58:31.002672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:58:31.002948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:31.003187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:58:31.003515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:58:31.003605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:31.008562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:58:31.008719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:58:31.008892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:31.008961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:58:31.009020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:58:31.009062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:58:31.013476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:31.013553Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:58:31.013595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:58:31.018840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:31.018934Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:31.018992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:58:31.019068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:58:31.031134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:58:31.034828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:58:31.035217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:58:31.037279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:58:31.037505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:58:31.037576Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:58:31.038000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:58:31.038130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:58:31.038341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:58:31.038475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:58:31.043620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:58:31.043698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:58:31.043922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:58:31.043978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:58:31.044429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:31.044529Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:58:31.044661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:58:31.044705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:58:31.044767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:58:31.044823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:58:31.044868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:58:31.044920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:58:31.044965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:58:31.045004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:58:31.045082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:58:31.045140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:58:31.045182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:58:31.048201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:58:31.048344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:58:31.048387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-04T16:59:38.777394Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-04T16:59:38.815458Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:355:2335]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T16:59:38.815534Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T16:59:38.815614Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:355:2335], Recipient [3:355:2335]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T16:59:38.815646Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T16:59:38.851014Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435094, Sender [0:0:0], Recipient [3:355:2335]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2025-04-04T16:59:38.851102Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-04-04T16:59:38.851209Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:355:2335]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2025-04-04T16:59:38.851263Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2025-04-04T16:59:38.851302Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-04-04T16:59:38.851387Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-04-04T16:59:38.851451Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-04-04T16:59:38.941292Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:764:2649]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-04T16:59:38.941412Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-04T16:59:38.941508Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409552 outdated step 200 last cleanup 0 2025-04-04T16:59:38.941580Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409552 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:59:38.941638Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409552 2025-04-04T16:59:38.941686Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409552 has no attached operations 2025-04-04T16:59:38.941731Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409552 2025-04-04T16:59:38.941883Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:766:2650]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-04T16:59:38.941947Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-04T16:59:38.942014Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409553 outdated step 200 last cleanup 0 2025-04-04T16:59:38.942063Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409553 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:59:38.942096Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409553 2025-04-04T16:59:38.942134Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409553 has no attached operations 2025-04-04T16:59:38.942176Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409553 2025-04-04T16:59:38.942350Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:764:2649]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-04T16:59:38.942472Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409552, FollowerId 0, tableId 2 2025-04-04T16:59:38.942605Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:766:2650]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-04T16:59:38.942732Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409553, FollowerId 0, tableId 2 2025-04-04T16:59:38.943117Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:766:2650], Recipient [3:892:2749]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409553 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 21 Memory: 119336 } ShardState: 2 UserTablePartOwners: 72075186233409553 NodeId: 3 StartTime: 118 TableOwnerId: 72075186233409549 FollowerId: 0 2025-04-04T16:59:38.943173Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-04T16:59:38.943228Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0021 2025-04-04T16:59:38.943339Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-04T16:59:38.943405Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-04T16:59:38.943635Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:764:2649], Recipient [3:892:2749]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409552 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 30 Memory: 119336 } ShardState: 2 UserTablePartOwners: 72075186233409552 NodeId: 3 StartTime: 118 TableOwnerId: 72075186233409549 FollowerId: 0 2025-04-04T16:59:38.943673Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-04T16:59:38.943709Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.003 2025-04-04T16:59:38.943807Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-04T16:59:38.955884Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:892:2749]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T16:59:38.955963Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T16:59:38.956040Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:892:2749], Recipient [3:892:2749]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T16:59:38.956082Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T16:59:38.966573Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435094, Sender [0:0:0], Recipient [3:892:2749]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2025-04-04T16:59:38.966677Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-04-04T16:59:38.966980Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:892:2749]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2025-04-04T16:59:38.967029Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2025-04-04T16:59:38.967091Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-04-04T16:59:38.967170Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-04-04T16:59:38.967232Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-04-04T16:59:38.967351Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269746180, Sender [3:2037:3854], Recipient [3:892:2749]: NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult 2025-04-04T16:59:38.967405Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProxySchemeCache::TEvNavigateKeySetResult 2025-04-04T16:59:38.989390Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:2040:3857], Recipient [3:764:2649]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:38.989481Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:38.989547Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409552, clientId# [3:2039:3856], serverId# [3:2040:3857], sessionId# [0:0:0] 2025-04-04T16:59:38.989792Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:2038:3855], Recipient [3:764:2649]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 2 } 2025-04-04T16:59:38.990409Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:2043:3860], Recipient [3:766:2650]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:38.990461Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:38.990493Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409553, clientId# [3:2042:3859], serverId# [3:2043:3860], sessionId# [0:0:0] 2025-04-04T16:59:38.990646Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:2041:3858], Recipient [3:766:2650]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 2 } >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 >> TTxLocatorTest::Boot [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |80.4%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::Boot [GOOD] Test command err: 2025-04-04T16:59:41.010990Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-04T16:59:41.011498Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-04T16:59:41.012315Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-04T16:59:41.014065Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:41.014783Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-04T16:59:41.026319Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:41.026477Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:41.026533Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:41.026611Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-04T16:59:41.026885Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T16:59:41.026987Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-04T16:59:41.027103Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |80.4%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] |80.4%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |80.5%| [TA] $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |80.5%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> DemoTx::Scenario_3 [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |80.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |80.5%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |80.5%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut >> DemoTx::Scenario_4 |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> Bloom::Conf [GOOD] >> Bloom::Hashes >> TPartBtreeIndexIteration::NoNodes_Groups >> Bloom::Hashes [GOOD] >> Bloom::Rater ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "No clientId specified in CmdGetReadSessionsInfo" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> BuildStatsHistogram::Ten_Serial_Log >> KqpPg::JoinWithQueryService-StreamLookup [GOOD] >> KqpPg::PgAggregate+useSink |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut >> Bloom::Rater [GOOD] >> Bloom::Dipping >> BasicUsage::ConflictingWrites [GOOD] >> BasicUsage::TWriteSession_WriteEncoded |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |80.5%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink >> TBtreeIndexBuilder::NoNodes [GOOD] >> TBtreeIndexBuilder::OneNode [GOOD] >> TBtreeIndexBuilder::FewNodes [GOOD] >> TBtreeIndexBuilder::SplitBySize [GOOD] >> TBtreeIndexNode::TIsNullBitmap [GOOD] >> TBtreeIndexNode::CompareTo [GOOD] >> TBtreeIndexNode::Basics [GOOD] >> TBtreeIndexNode::Group [GOOD] >> TBtreeIndexNode::History [GOOD] >> TBtreeIndexNode::OneKey >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart >> TBtreeIndexNode::OneKey [GOOD] >> TBtreeIndexNode::Reusable [GOOD] >> TBtreeIndexNode::CutKeys [GOOD] >> TBtreeIndexTPart::Conf [GOOD] >> TBtreeIndexTPart::NoNodes [GOOD] >> TBtreeIndexTPart::OneNode [GOOD] >> TBtreeIndexTPart::FewNodes [GOOD] >> TBtreeIndexTPart::Erases [GOOD] >> TBtreeIndexTPart::Groups [GOOD] >> TBtreeIndexTPart::History >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign [GOOD] >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic >> TChargeBTreeIndex::NoNodes >> TBtreeIndexTPart::History [GOOD] >> TBtreeIndexTPart::External |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> TChargeBTreeIndex::NoNodes [GOOD] >> TChargeBTreeIndex::NoNodes_Groups >> TRowVersionRangesTest::SimpleInserts [GOOD] >> TRowVersionRangesTest::MergeHoleInner [GOOD] >> TRowVersionRangesTest::MergeHoleOuter [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorOrder [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorLowerBound [GOOD] >> TS3FIFOCache::Touch [GOOD] >> TS3FIFOCache::Touch_MainQueue [GOOD] >> TS3FIFOCache::EvictNext [GOOD] >> TS3FIFOCache::UpdateLimit [GOOD] >> TS3FIFOCache::Erase [GOOD] >> TS3FIFOCache::Random >> TS3FIFOCache::Random [GOOD] >> TS3FIFOGhostQueue::Basics [GOOD] >> TScheme::Shapshot [GOOD] >> TScheme::Delta [GOOD] >> TScheme::Policy [GOOD] >> TScreen::Cuts [GOOD] >> TScreen::Join [GOOD] >> TScreen::Sequential >> TBtreeIndexTPart::External [GOOD] >> TChargeBTreeIndex::FewNodes |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |80.6%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut >> TIterator::External >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] >> TIterator::External [GOOD] >> TIterator::Single >> DataCleanup::CleanupDataNoTables [GOOD] >> DataCleanup::CleanupDataNoTablesWithRestart [GOOD] >> DataCleanup::CleanupDataLog >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::SomeRejectProbability >> DataCleanup::CleanupDataLog [GOOD] >> DataCleanup::CleanupData [GOOD] >> DataCleanup::CleanupDataMultipleFamilies >> VDiskRestart::Simple [GOOD] >> DataCleanup::CleanupDataMultipleFamilies [GOOD] >> DataCleanup::CleanupDataMultipleTables >> DataCleanup::CleanupDataMultipleTables [GOOD] >> DataCleanup::CleanupDataWithFollowers [GOOD] >> DataCleanup::CleanupDataMultipleTimes >> DataCleanup::CleanupDataMultipleTimes [GOOD] >> DataCleanup::CleanupDataEmptyTable >> TChargeBTreeIndex::FewNodes [GOOD] >> TChargeBTreeIndex::FewNodes_Groups >> TFlatTableExecutor_RejectProbability::SomeRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability >> DataCleanup::CleanupDataEmptyTable [GOOD] >> DataCleanup::CleanupDataWithRestarts >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot >> Bloom::Dipping [GOOD] >> Bloom::Basics [GOOD] >> Bloom::Stairs >> TScreen::Sequential [GOOD] >> TScreen::Random |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> VDiskRestart::Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-04-04T16:59:45.289650Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:45.294800Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:45.295190Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-04-04T16:59:45.295244Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:45.295345Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-04-04T16:59:45.295447Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:45.295520Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:45.295599Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-04-04T16:59:45.296341Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:261:2253], now have 1 active actors on pipe 2025-04-04T16:59:45.296571Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:45.315035Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:45.318331Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:45.318529Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:45.319358Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-04-04T16:59:45.319514Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:45.319917Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:45.320182Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:269:2259] 2025-04-04T16:59:45.321968Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-04-04T16:59:45.322026Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2259] 2025-04-04T16:59:45.322092Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:45.322146Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:45.322969Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:272:2261], now have 1 active actors on pipe 2025-04-04T16:59:45.390799Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:59:45.394078Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:59:45.394394Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-04-04T16:59:45.394453Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:59:45.394493Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-04-04T16:59:45.394524Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:59:45.394565Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:45.394623Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-04-04T16:59:45.395268Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:405:2360], now have 1 active actors on pipe 2025-04-04T16:59:45.395362Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T16:59:45.395538Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:45.398367Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:45.398474Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:59:45.399227Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:98:2133] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-04-04T16:59:45.399324Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-04-04T16:59:45.399674Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T16:59:45.399877Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:413:2366] 2025-04-04T16:59:45.401697Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-04-04T16:59:45.401752Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:413:2366] 2025-04-04T16:59:45.401818Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T16:59:45.401862Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-04T16:59:45.402593Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:416:2368], now have 1 active actors on pipe 2025-04-04T16:59:45.403997Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:422:2371], now have 1 active actors on pipe 2025-04-04T16:59:45.404342Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:424:2372], now have 1 active actors on pipe 2025-04-04T16:59:45.404570Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:422:2371] destroyed 2025-04-04T16:59:45.404848Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:424:2372] destroyed Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } >> DataCleanup::CleanupDataWithRestarts [GOOD] >> DataCleanup::CleanupDataRetryWithNotGreaterGenerations >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotToRegular >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] [GOOD] >> DataCleanup::CleanupDataRetryWithNotGreaterGenerations [GOOD] >> DBase::Select [GOOD] >> DBase::Subsets [GOOD] >> DBase::WideKey >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables >> Bloom::Stairs [GOOD] >> BuildStatsBTreeIndex::Single |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> TChargeBTreeIndex::NoNodes_Groups [GOOD] >> TChargeBTreeIndex::NoNodes_History >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 >> BuildStatsBTreeIndex::Single [GOOD] >> BuildStatsBTreeIndex::Single_Slices >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables [GOOD] >> TFlatTableExecutor_Reschedule::TestExecuteReschedule [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorSetResourceProfile >> TChargeBTreeIndex::NoNodes_History [GOOD] >> TChargeBTreeIndex::NoNodes_Groups_History >> BuildStatsBTreeIndex::Single_Slices [GOOD] >> BuildStatsBTreeIndex::Single_History >> TFlatTableExecutor_ResourceProfile::TestExecutorSetResourceProfile [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorStaticMemoryLimits [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorReuseStaticMemory |80.6%| [TA] $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.6%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} >> DBase::WideKey [GOOD] >> DBase::Outer [GOOD] >> DBase::VersionBasics [GOOD] >> DBase::VersionPureMem >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime >> BuildStatsBTreeIndex::Single_History [GOOD] >> BuildStatsBTreeIndex::Single_History_Slices >> TIterator::Single [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorReuseStaticMemory [GOOD] >> TIterator::SingleReverse >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataLimitExceeded >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPageLimitExceeded >> BuildStatsBTreeIndex::Single_History_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups >> TFlatTableExecutor_ResourceProfile::TestExecutorPageLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True >> BuildStatsBTreeIndex::Single_Groups [GOOD] >> BuildStatsBTreeIndex::Single_Groups_Slices >> TScreen::Random [GOOD] >> TScreen::Shrink [GOOD] >> TScreen::Cook [GOOD] >> TSharedPageCache::Limits >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorMemoryLimitExceeded >> TFlatTableExecutor_ResourceProfile::TestExecutorMemoryLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData >> BuildStatsBTreeIndex::Single_Groups_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataGC [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxPartialDataHold [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldAndUse [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldOnRelease >> DBase::VersionPureMem [GOOD] >> DBase::VersionPureParts >> TPartBtreeIndexIteration::NoNodes_Groups [GOOD] >> TPartBtreeIndexIteration::NoNodes_History >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldOnRelease [GOOD] >> TFlatTableExecutor_ResourceProfile::TestUpdateConfig [GOOD] >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] >> AutoConfig::GetServicePoolsWith1CPU [GOOD] >> TIterator::SingleReverse [GOOD] >> TIterator::Mixed >> TSharedPageCache::Limits [GOOD] >> TSharedPageCache::Limits_Config >> TExportToS3Tests::RebootDuringCompletion >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History_Slices >> TChargeBTreeIndex::FewNodes_Groups [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History |80.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith1CPU [GOOD] |80.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsBTreeIndex::Mixed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:130:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:132:2058] recipient: [1:108:2140] 2025-04-04T16:55:16.748989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:16.749073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:16.749198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:16.749252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:16.749301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:16.749336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:16.749399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:16.749470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:16.749752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:16.852738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:16.852802Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:172:2058] recipient: [1:15:2062] 2025-04-04T16:55:16.875440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:16.875996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:16.876208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:16.884836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:16.885037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:16.885782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:16.891803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:16.895855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:16.897308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:16.897396Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:16.897620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:16.897678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:16.897720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:16.897864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] Leader for TabletID 72057594037968897 is [1:215:2214] sender: [1:216:2058] recipient: [1:209:2210] 2025-04-04T16:55:16.905431Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-04T16:55:17.050175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:17.050427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.050690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:17.050954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:17.051019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.055488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:17.055676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:17.055866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.055922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:17.055960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:17.056006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:17.057898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.057952Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:17.057987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:17.059930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.059981Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.060023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:17.060088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:17.063886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:17.065717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:17.065891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:17.066865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:17.067004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:17.067073Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:17.067336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:17.067391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:17.067558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:17.067636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:17.072336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:17.072389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:17.072591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:17.072630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:17.086957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.087045Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:17.087185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:17.087253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:17.087301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:17.087330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:17.087367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:17.087407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:17.087442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id ... DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 373us result status StatusSuccess 2025-04-04T16:59:46.852640Z node 104 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:46.859294Z node 104 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:59:46.859583Z node 104 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 341us result status StatusSuccess 2025-04-04T16:59:46.860415Z node 104 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExportToS3Tests::ShouldExcludeBackupTableFromStats [GOOD] >> TExportToS3Tests::ShouldCheckQuotas >> TSharedPageCache::Limits_Config [GOOD] >> TSharedPageCache::ThreeLeveledLRU >> BuildStatsBTreeIndex::Mixed [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups >> BuildStatsBTreeIndex::Mixed_Groups [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups_History ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 [GOOD] Test command err: 2025-04-04T16:59:15.054073Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503649747388521:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:15.054118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015f5/r3tmp/tmp3fSOTP/pdisk_1.dat 2025-04-04T16:59:15.580982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:15.581125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:15.585256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:59:15.638808Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6408 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T16:59:15.867137Z node 1 :TX_PROXY DEBUG: actor# [1:7489503649747388747:2140] Handle TEvNavigate describe path dc-1 2025-04-04T16:59:15.867261Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503649747389191:2445] HANDLE EvNavigateScheme dc-1 2025-04-04T16:59:15.868061Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489503649747388788:2157], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:15.868132Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489503649747388788:2157], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T16:59:15.868856Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503649747389192:2446][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T16:59:15.884112Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503645452421091:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503649747389196:2446] 2025-04-04T16:59:15.884196Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503645452421091:2051] Subscribe: subscriber# [1:7489503649747389196:2446], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:15.884803Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503645452421094:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503649747389197:2446] 2025-04-04T16:59:15.884847Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503645452421094:2054] Subscribe: subscriber# [1:7489503649747389197:2446], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:15.884904Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503645452421097:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503649747389198:2446] 2025-04-04T16:59:15.884923Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503645452421097:2057] Subscribe: subscriber# [1:7489503649747389198:2446], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:15.891718Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503649747389196:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503645452421091:2051] 2025-04-04T16:59:15.895434Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503649747389197:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503645452421094:2054] 2025-04-04T16:59:15.895499Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503649747389198:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503645452421097:2057] 2025-04-04T16:59:15.895552Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503649747389192:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503649747389193:2446] 2025-04-04T16:59:15.895604Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503649747389192:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503649747389194:2446] 2025-04-04T16:59:15.895690Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489503649747389192:2446][/dc-1] Set up state: owner# [1:7489503649747388788:2157], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:15.895913Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503649747389192:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503649747389195:2446] 2025-04-04T16:59:15.896009Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489503649747389192:2446][/dc-1] Path was already updated: owner# [1:7489503649747388788:2157], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:15.896080Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503649747389196:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503649747389193:2446], cookie# 1 2025-04-04T16:59:15.896108Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503649747389197:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503649747389194:2446], cookie# 1 2025-04-04T16:59:15.896123Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503649747389198:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503649747389195:2446], cookie# 1 2025-04-04T16:59:15.896210Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503645452421091:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503649747389196:2446] 2025-04-04T16:59:15.896258Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503645452421091:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503649747389196:2446], cookie# 1 2025-04-04T16:59:15.896414Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503645452421094:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503649747389197:2446] 2025-04-04T16:59:15.896465Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503645452421094:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503649747389197:2446], cookie# 1 2025-04-04T16:59:15.896496Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503645452421097:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503649747389198:2446] 2025-04-04T16:59:15.896511Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503645452421097:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503649747389198:2446], cookie# 1 2025-04-04T16:59:15.934543Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503649747389196:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503645452421091:2051], cookie# 1 2025-04-04T16:59:15.934585Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503649747389197:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503645452421094:2054], cookie# 1 2025-04-04T16:59:15.934601Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503649747389198:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503645452421097:2057], cookie# 1 2025-04-04T16:59:15.934643Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503649747389192:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503649747389193:2446], cookie# 1 2025-04-04T16:59:15.934690Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503649747389192:2446][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T16:59:15.934712Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503649747389192:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503649747389194:2446], cookie# 1 2025-04-04T16:59:15.934733Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503649747389192:2446][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T16:59:15.934756Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503649747389192:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503649747389195:2446], cookie# 1 2025-04-04T16:59:15.934779Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503649747389192:2446][/dc-1] Unexpected sync response: sender# [1:7489503649747389195:2446], cookie# 1 2025-04-04T16:59:15.969274Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489503649747388788:2157], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T16:59:15.969679Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489503649747388788:2157], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:47.614875Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [13:7489503787122765449:2150], recipient# [13:7489503787122765448:2321], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:47.666917Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7489503744388989794:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:47.667168Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7489503787338662922:2202], recipient# [11:7489503787338662921:2324], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:47.673726Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:47.722529Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [13:7489503744173092376:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:47.722751Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [13:7489503744173092376:2109], cacheItem# { Subscriber: { Subscriber: [13:7489503778532830823:2138] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:47.722835Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [13:7489503744173092376:2109], cacheItem# { Subscriber: { Subscriber: [13:7489503778532830824:2139] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:47.722945Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [13:7489503744173092376:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:47.723045Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [13:7489503744173092376:2109], cacheItem# { Subscriber: { Subscriber: [13:7489503748468059709:2126] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T16:59:47.723159Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [13:7489503787122765451:2151], recipient# [13:7489503778532830821:2316], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:47.723234Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [13:7489503787122765452:2152], recipient# [13:7489503787122765450:2322], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:47.723641Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:7489503778532830821:2316], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:47.886674Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7489503744388989794:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:47.886833Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7489503744388989794:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:47.886940Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7489503787338662925:2203], recipient# [11:7489503787338662923:2325], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:47.887010Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7489503787338662926:2204], recipient# [11:7489503787338662924:2326], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:47.887320Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:47.887352Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:47.986488Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7489503744388989794:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:47.986748Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7489503787338662927:2205], recipient# [11:7489503778748728315:2319], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:47.990525Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7489503778748728315:2319], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> DBase::VersionPureParts [GOOD] >> DBase::VersionCompactedMem >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False >> TExportToS3Tests::DropSourceTableBeforeTransferring >> BuildStatsBTreeIndex::Mixed_Groups_History [GOOD] |80.6%| [TA] {RESULT} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> BuildStatsFlatIndex::Single |80.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} >> BuildStatsFlatIndex::Single [GOOD] >> BuildStatsFlatIndex::Single_Slices >> TChargeBTreeIndex::NoNodes_Groups_History [GOOD] >> TChargeBTreeIndex::OneNode >> BuildStatsFlatIndex::Single_Slices [GOOD] >> BuildStatsFlatIndex::Single_History >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink >> BuildStatsFlatIndex::Single_History [GOOD] >> BuildStatsFlatIndex::Single_History_Slices ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:130:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:132:2058] recipient: [1:108:2140] 2025-04-04T16:55:21.931983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:21.932083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:21.932212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:21.932256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:21.932311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:21.932364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:21.932440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:21.932518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:21.932868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:22.018150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:22.018225Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:172:2058] recipient: [1:15:2062] 2025-04-04T16:55:22.040039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:22.040547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:22.040742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:22.052051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:22.052252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:22.052931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:22.053893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:22.057425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:22.058717Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:22.058791Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:22.058983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:22.059035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:22.059075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:22.059195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] Leader for TabletID 72057594037968897 is [1:215:2214] sender: [1:216:2058] recipient: [1:209:2210] 2025-04-04T16:55:22.066477Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-04T16:55:22.219992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:22.220204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.220428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:22.220660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:22.220717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.223155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:22.223310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:22.223545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.223609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:22.223644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:22.223692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:22.225593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.225652Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:22.225686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:22.227698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.227763Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.227811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:22.227885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:22.231743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:22.235419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:22.235604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:22.236562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:22.236695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:22.236748Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:22.236997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:22.237044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:22.237202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:22.237279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:22.246039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:22.246103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:22.246347Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:22.246398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:22.246823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:22.246873Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:22.247011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:22.247072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:22.247116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:22.247148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:22.247183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:22.247221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:22.247267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id ... ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:49.294978Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T16:59:49.295286Z node 107 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 357us result status StatusSuccess 2025-04-04T16:59:49.296176Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:49.310841Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:850:2680] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-04T16:59:49.310961Z node 107 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][107:789:2680] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-04T16:59:49.311123Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:850:2680] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743785989264354 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743785989264354 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743785989264354 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-04T16:59:49.318882Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:850:2680] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-04T16:59:49.319021Z node 107 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][107:789:2680] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan [GOOD] >> TFlatTableExecutor_SnapshotWithCommits::SnapshotWithCommits [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex >> BuildStatsFlatIndex::Single_History_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups >> TSharedPageCache::ThreeLeveledLRU [GOOD] >> TSharedPageCache::S3FIFO >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut >> TChargeBTreeIndex::OneNode [GOOD] >> TChargeBTreeIndex::OneNode_Groups >> BuildStatsHistogram::Ten_Serial_Log [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestSticky |80.6%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut >> TFlatTableExecutor_StickyPages::TestSticky [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex >> BuildStatsFlatIndex::Single_Groups [GOOD] >> BuildStatsFlatIndex::Single_Groups_Slices >> BuildStatsHistogram::Three_Mixed_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels_3_Buckets >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex >> BuildStatsHistogram::Three_Mixed_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_1_Level >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyMain >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff >> BuildStatsFlatIndex::Single_Groups_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups_History >> DBase::VersionCompactedMem [GOOD] >> DBase::VersionCompactedParts >> TFlatTableExecutor_StickyPages::TestStickyMain [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex >> BuildStatsHistogram::Three_Mixed_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_0_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels >> TPartBtreeIndexIteration::NoNodes_History [GOOD] >> TPartBtreeIndexIteration::OneNode |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAll >> BuildStatsFlatIndex::Single_Groups_History [GOOD] >> BuildStatsFlatIndex::Single_Groups_History_Slices >> TFlatTableExecutor_StickyPages::TestStickyAll [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky >> TExportToS3Tests::RebootDuringCompletion [GOOD] >> TExportToS3Tests::RebootDuringAbortion >> BuildStatsHistogram::Three_Serial_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels_3_Buckets >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky >> BuildStatsHistogram::Three_Serial_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Serial_Small_1_Level |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky [GOOD] >> TFlatTableExecutor_VersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRows >> DataShardSnapshots::VolatileSnapshotTimeout [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh >> BuildStatsFlatIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsFlatIndex::Mixed >> BuildStatsHistogram::Three_Serial_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Serial_Small_0_Levels [GOOD] >> BuildStatsMixedIndex::Single >> BuildStatsFlatIndex::Mixed [GOOD] >> BuildStatsFlatIndex::Mixed_Groups >> BuildStatsMixedIndex::Single [GOOD] >> BuildStatsMixedIndex::Single_Slices >> TExportToS3Tests::DropSourceTableBeforeTransferring [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring2 >> BuildStatsMixedIndex::Single_Slices [GOOD] >> BuildStatsMixedIndex::Single_History >> TFlatTableExecutor_VersionedRows::TestVersionedRows [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs >> DBase::VersionCompactedParts [GOOD] >> TTopicWriterTests::TestEnterMessage_ZeroSymbol_Delimited [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] >> DBase::UncommittedChangesVisibility [GOOD] >> DBase::UncommittedChangesCommitWithUpdates |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |80.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace >> BuildStatsFlatIndex::Mixed_Groups [GOOD] >> BuildStatsFlatIndex::Mixed_Groups_History >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite >> DBase::UncommittedChangesCommitWithUpdates [GOOD] >> DBase::ReplayNewTable [GOOD] >> DBase::SnapshotNewTable [GOOD] >> DBase::KIKIMR_15598_Many_MemTables >> TSharedPageCache::S3FIFO [GOOD] >> TSharedPageCache::ClockPro >> BuildStatsMixedIndex::Single_History [GOOD] >> BuildStatsMixedIndex::Single_History_Slices >> BuildStatsFlatIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Serial >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations >> BuildStatsFlatIndex::Serial [GOOD] >> BuildStatsFlatIndex::Serial_Groups >> BuildStatsMixedIndex::Single_History_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |80.7%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> BuildStatsFlatIndex::Serial_Groups [GOOD] >> BuildStatsFlatIndex::Serial_Groups_History >> TChargeBTreeIndex::OneNode_Groups [GOOD] >> TChargeBTreeIndex::OneNode_History |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] >> BuildStatsMixedIndex::Single_Groups [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices >> KqpPg::V1CreateTable [GOOD] >> KqpPg::ValuesInsert+useSink >> BuildStatsFlatIndex::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Single >> KqpPg::PgAggregate+useSink [GOOD] >> KqpPg::PgAggregate-useSink >> TPartBtreeIndexIteration::OneNode [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups >> BuildStatsMixedIndex::Single_Groups_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups_History >> TExportToS3Tests::RebootDuringAbortion [GOOD] >> TExportToS3Tests::ExportStartTime >> TExportToS3Tests::ShouldCheckQuotas [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeDir [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExtSubDomain [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeColumnStore [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeColumnTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeCdcStream >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 >> BuildStatsMixedIndex::Single_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices >> TChargeBTreeIndex::OneNode_History [GOOD] >> TChargeBTreeIndex::OneNode_Groups_History >> TSharedPageCache::ClockPro [GOOD] >> TSharedPageCache::ReplacementPolicySwitch >> YdbMonitoring::SelfCheckWithNodesDying >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTable >> BuildStatsMixedIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsMixedIndex::Mixed >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink >> TSharedPageCache::ReplacementPolicySwitch [GOOD] >> TSharedPageCache::Compaction_BTreeIndex >> BuildStatsMixedIndex::Mixed [GOOD] >> BuildStatsMixedIndex::Mixed_Groups >> TExportToS3Tests::DropCopiesBeforeTransferring2 [GOOD] >> TExportToS3Tests::ExportIndexTablePartitioningSettings >> TSharedPageCache::Compaction_BTreeIndex [GOOD] >> TSharedPageCache::Compaction_FlatIndex >> BuildStatsMixedIndex::Mixed_Groups [GOOD] >> BuildStatsMixedIndex::Mixed_Groups_History >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink >> TSharedPageCache::Compaction_FlatIndex [GOOD] >> TSharedPageCache::Compaction_ZeroCache_BTreeIndex >> TChargeBTreeIndex::FewNodes_Groups_History [GOOD] >> NFwd_TBlobs::SieveFiltered [GOOD] >> NFwd_TBlobs::Simple [GOOD] >> NFwd_TBlobs::Trace [GOOD] >> NFwd_TFlatIndexCache::Basics [GOOD] >> NFwd_TFlatIndexCache::IndexPagesLocator [GOOD] >> NFwd_TFlatIndexCache::GetTwice [GOOD] >> NFwd_TFlatIndexCache::ForwardTwice [GOOD] >> NFwd_TFlatIndexCache::Skip_Done [GOOD] >> NFwd_TFlatIndexCache::Skip_Done_None [GOOD] >> NFwd_TFlatIndexCache::Skip_Keep [GOOD] >> NFwd_TFlatIndexCache::Skip_Wait [GOOD] >> NFwd_TFlatIndexCache::Trace [GOOD] >> NFwd_TFlatIndexCache::End [GOOD] >> NFwd_TFlatIndexCache::Slices [GOOD] >> NFwd_TLoadedPagesCircularBuffer::Basics [GOOD] >> NOther::Blocks [GOOD] >> NPage::Encoded [GOOD] >> NPage::ABI_002 >> TExportToS3Tests::ExportStartTime [GOOD] >> TExportToS3Tests::ExportPartitioningSettings >> BuildStatsMixedIndex::Mixed_Groups_History [GOOD] >> BuildStatsMixedIndex::Serial >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink >> TIterator::Mixed [GOOD] >> TIterator::MixedReverse >> YdbOlapStore::ManyTables >> BuildStatsMixedIndex::Serial [GOOD] >> BuildStatsMixedIndex::Serial_Groups >> TGRpcNewCoordinationClient::CreateDropDescribe >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations [GOOD] >> TFlatTableExecutor_CachePressure::TestNotEnoughLocalCache >> TSharedPageCache::Compaction_ZeroCache_BTreeIndex [GOOD] >> TSharedPageCache::Compaction_ZeroCache_FlatIndex >> TFlatTableExecutor_CachePressure::TestNotEnoughLocalCache [GOOD] >> TFlatTableExecutor_Cold::ColdBorrowScan >> NPage::ABI_002 [GOOD] >> NPage::GroupIdEncoding [GOOD] >> NPageCollection::Align [GOOD] >> NPageCollection::Meta >> TFlatTableExecutor_Cold::ColdBorrowScan [GOOD] >> TFlatTableExecutor_ColumnGroups::TestManyRows >> NPageCollection::Meta [GOOD] >> NPageCollection::PagesToBlobsConverter [GOOD] >> NPageCollection::Grow [GOOD] >> NPageCollection::Groups [GOOD] >> NPageCollection::Chop [GOOD] >> NPageCollection::CookieAllocator [GOOD] >> NProto::LargeGlobId [GOOD] >> Redo::ABI_008 [GOOD] >> Self::Literals [GOOD] >> BuildStatsMixedIndex::Serial_Groups [GOOD] >> BuildStatsMixedIndex::Serial_Groups_History >> TSharedPageCache::Compaction_ZeroCache_FlatIndex [GOOD] >> TSwitchableCache::Touch [GOOD] >> TSwitchableCache::Erase [GOOD] >> TSwitchableCache::EvictNext [GOOD] >> TSwitchableCache::UpdateLimit [GOOD] >> TSwitchableCache::Switch_Touch_RotatePages_All [GOOD] >> TSwitchableCache::Switch_Touch_RotatePages_Parts [GOOD] >> TSwitchableCache::Switch_RotatePages_Force [GOOD] >> TSwitchableCache::Switch_RotatePages_Evicts [GOOD] >> TSwitchableCache::Switch_Touch [GOOD] >> TSwitchableCache::Switch_Erase [GOOD] >> TSwitchableCache::Switch_EvictNext [GOOD] >> TSwitchableCache::Switch_UpdateLimit [GOOD] >> TVersions::WreckHead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:59:19.546816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:59:19.546914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:59:19.546956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:59:19.546990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:59:19.547045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:59:19.547075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:59:19.547128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:59:19.547204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:59:19.547548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:59:19.628849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:59:19.628912Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:19.643766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:59:19.644060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:59:19.644267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:59:19.660220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:59:19.660553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:59:19.661270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:19.662084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:59:19.667163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:19.668679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:19.668748Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:19.668939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:59:19.668988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:19.669030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:59:19.669194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:59:19.676157Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:59:19.795406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:59:19.795662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:19.795917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:59:19.796215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:59:19.796281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:19.798907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:19.799078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:59:19.799340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:19.799415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:59:19.799454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:59:19.799488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:59:19.802557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:19.802644Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:59:19.802708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:59:19.804871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:19.804929Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:19.804978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:19.805043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:59:19.809266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:59:19.819568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:59:19.819854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:59:19.821093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:19.821269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:19.821336Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:19.821646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:59:19.821699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:19.821883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:59:19.821973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:59:19.831717Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:19.831783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:19.831982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:19.832044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:59:19.832445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:19.832543Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:59:19.832654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:59:19.832702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:19.832751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:59:19.832784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:19.832827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:59:19.832872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:19.832908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:59:19.832939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:59:19.833023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:59:19.833060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:59:19.833098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:59:19.835344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:59:19.835463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:59:19.835501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 57594046678944 2025-04-04T16:59:55.521370Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976720762:0, at schemeshard: 72057594046678944 2025-04-04T16:59:55.521446Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720762 ready parts: 1/1 2025-04-04T16:59:55.521600Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976720762 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:59:55.528995Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-04T16:59:55.529136Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-04T16:59:55.529179Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2025-04-04T16:59:55.529227Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-04T16:59:55.529273Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T16:59:55.530493Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-04T16:59:55.530597Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-04T16:59:55.530627Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2025-04-04T16:59:55.530682Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-04-04T16:59:55.530727Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T16:59:55.530817Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2025-04-04T16:59:55.544156Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-04T16:59:55.544463Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720762, at schemeshard: 72057594046678944 2025-04-04T16:59:55.544528Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2025-04-04T16:59:55.544599Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720762, at schemeshard: 72057594046678944 2025-04-04T16:59:55.545914Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720762:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976720762 msg type: 269090816 2025-04-04T16:59:55.546056Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720762, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976720762 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976720762 at step: 5000007 2025-04-04T16:59:55.548557Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:55.548702Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720762 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 17179871340 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:55.548772Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976720762:0, step: 5000007, at schemeshard: 72057594046678944 2025-04-04T16:59:55.548953Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976720762:0, at schemeshard: 72057594046678944 2025-04-04T16:59:55.549078Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720762:0 progress is 1/1 2025-04-04T16:59:55.549127Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-04-04T16:59:55.549188Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720762:0 progress is 1/1 2025-04-04T16:59:55.549236Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-04-04T16:59:55.549312Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:59:55.549401Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T16:59:55.549450Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 1/1, is published: false 2025-04-04T16:59:55.549517Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-04-04T16:59:55.549569Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720762:0 2025-04-04T16:59:55.549622Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720762:0 2025-04-04T16:59:55.549701Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T16:59:55.549757Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720762, publications: 2, subscribers: 1 2025-04-04T16:59:55.549801Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-04T16:59:55.549844Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-04-04T16:59:55.551023Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-04-04T16:59:55.551149Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 FAKE_COORDINATOR: Erasing txId 281474976720762 2025-04-04T16:59:55.553636Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:55.553682Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:55.553853Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T16:59:55.553996Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:55.554042Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:336:2312], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 1 2025-04-04T16:59:55.554102Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:336:2312], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 3 2025-04-04T16:59:55.555020Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-04T16:59:55.555122Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-04T16:59:55.555163Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720762 2025-04-04T16:59:55.555221Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-04T16:59:55.555280Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T16:59:55.556259Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-04T16:59:55.556352Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-04-04T16:59:55.556383Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720762 2025-04-04T16:59:55.556415Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-04T16:59:55.556450Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T16:59:55.556533Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720762, subscribers: 1 2025-04-04T16:59:55.556584Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:288:2275] 2025-04-04T16:59:55.560263Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-04-04T16:59:55.560452Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-04-04T16:59:55.560526Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976720762 2025-04-04T16:59:55.560588Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976720762 2025-04-04T16:59:55.560633Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-04T16:59:55.560665Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762 2025-04-04T16:59:55.560702Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762, id# 102, itemIdx# 4294967295 2025-04-04T16:59:55.562477Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-04T16:59:55.562581Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T16:59:55.562642Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:708:2648] TestWaitNotification: OK eventTxId 102 >> BuildStatsMixedIndex::Serial_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_LowResolution >> DBase::KIKIMR_15598_Many_MemTables [GOOD] >> Memtable::Basics [GOOD] >> Memtable::BasicsReverse [GOOD] >> Memtable::Markers [GOOD] >> Memtable::Overlap [GOOD] >> Memtable::Wreck >> BuildStatsMixedIndex::Single_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Slices_LowResolution >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] >> TExportToS3Tests::EnableChecksumsPersistance >> Memtable::Wreck [GOOD] >> Memtable::Erased >> Memtable::Erased [GOOD] >> NFwd_TBlobs::MemTableTest [GOOD] >> NFwd_TBlobs::Lower [GOOD] >> NFwd_TBlobs::Sieve [GOOD] >> NFwd_TBlobs::Basics [GOOD] >> NFwd_TBlobs::Shuffle [GOOD] >> NFwd_TBlobs::Grow [GOOD] >> NFwd_TBlobs::Filtered [GOOD] >> NFwd_TBTreeIndexCache::Basics [GOOD] >> NFwd_TBTreeIndexCache::IndexPagesLocator [GOOD] >> NFwd_TBTreeIndexCache::GetTwice [GOOD] >> NFwd_TBTreeIndexCache::ForwardTwice [GOOD] >> NFwd_TBTreeIndexCache::Forward_OnlyUsed [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done_None [GOOD] >> NFwd_TBTreeIndexCache::Skip_Keep [GOOD] >> NFwd_TBTreeIndexCache::Skip_Wait [GOOD] >> NFwd_TBTreeIndexCache::Trace_BTree [GOOD] >> NFwd_TBTreeIndexCache::Trace_Data [GOOD] >> NFwd_TBTreeIndexCache::End [GOOD] >> NFwd_TBTreeIndexCache::Slices [GOOD] >> NFwd_TBTreeIndexCache::ManyApplies [GOOD] >> BuildStatsMixedIndex::Single_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_LowResolution >> BuildStatsMixedIndex::Single_Groups_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_LowResolution >> BuildStatsMixedIndex::Single_Groups_History_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution >> TExportToS3Tests::ExportPartitioningSettings [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> Self::Literals [GOOD] Test command err: + BTreeIndex{PageId: 0 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385, 13 rev 1, 683b} | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | > {0, a, false, 0} | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | > {1, b, true, 10} | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | > {2, c, false, 20} | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | > {3, d, true, 30} | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | > {4, e, false, 40} | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | > {5, f, true, 50} | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | > {6, g, false, 60} | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | > {7, h, true, 70} | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | > {9, j, true, 90} | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 + BTreeIndex{PageId: 9 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 116b} | + BTreeIndex{PageId: 5 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | + BTreeIndex{PageId: 0 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93, 13 rev 1, 179b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, a, false, 0} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, b, true, 10} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | > {2, c, false, 20} | | + BTreeIndex{PageId: 1 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195, 13 rev 1, 179b} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, d, true, 30} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, e, false, 40} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | > {5, f, true, 50} | | + BTreeIndex{PageId: 2 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, g, false, 60} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, h, true, 70} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | + BTreeIndex{PageId: 8 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 242b} | | + BTreeIndex{PageId: 3 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 179b} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, j, true, 90} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, k, false, 100} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, l, true, 110} | | + BTreeIndex{PageId: 4 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555, 13 rev 1, 179b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, m, false, 120} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, n, true, 130} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | > {14, o, false, 140} | | + BTreeIndex{PageId: 6 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693, 13 rev 1, 179b} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, p, true, 150} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, q, false, 160} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | > {17, r, true, 170} | | + BTreeIndex{PageId: 7 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 179b} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, s, false, 180} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, t, true, 190} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 + BTreeIndex{PageId: 15 RowCount: 15150 DataSize: 106050 GroupDataSize: 207050 ErasedRowCount: 8080, 13 rev 1, 174b} | + BTreeIndex{PageId: 12 RowCount: 9078 DataSize: 70278 GroupDataSize: 138278 ErasedRowCount: 4318, 13 rev 1, 690b} | | + BTreeIndex{PageId: 0 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 702b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, x, NULL, NULL} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, xx, NULL, NULL} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | | > {2, xxx, NULL, NULL} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, xxxx, NULL, NULL} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, xxxxx, NULL, NULL} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | | > {5, xxxxxx, NULL, NULL} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, xxxxxxx, NULL, NULL} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, xxxxxxxx, NULL, NULL} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | | | > {8, xxxxxxxxx, NULL, NULL} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, xxxxxxxxxx, NULL, NULL} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 1 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891, 13 rev 1, 683b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | | > {14, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | | > {17, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 | | | > {20, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10021 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891 | | > {21, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 2 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395, 13 rev 1, 689b} | | | PageId: 10022 RowCount: 2553 DataSize: 23253 GroupDataSize: 46253 ErasedRowCount: 943 | | | > {22, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10023 RowCount: 2676 DataSize: 24276 GroupDataSize: 48276 ErasedRowCount: 996 | | | > {23, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10024 RowCount: 2800 DataSize: 25300 GroupDataSize: 50300 ErasedRowCount: 1050 | | | > {24, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10025 RowCount: 2925 DataSize: 26325 GroupDataSize: 52325 ErasedRowCount: 1105 | | | > {25, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10026 RowCount: 3051 DataSize: 27351 GroupDataSize: 54351 ErasedRowCount: 1161 | | | > {26, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10027 RowCount: 3178 DataSize: 28378 GroupDataSize: 56378 ErasedRowCount: 1218 | | | > {27, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10028 RowCount: 3306 DataSize: 29406 GroupDataSize: 58406 ErasedRowCount: 1276 | | | > {28, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10029 RowCount: 3435 DataSize: 30435 GroupDataSize: 60435 ErasedRowCount: 1335 | | | > {29, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10030 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395 | | > {30, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 3 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911, 13 rev 1, 669b} | | | PageId: 10031 RowCount: 3696 DataSize: 32496 GroupDataSize: 64496 ErasedRowCount: 1456 | | | > {31, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10032 RowCount: 3828 DataSize: 33528 GroupDataSize: 66528 ErasedRowCount: 1518 | | | > {32, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10033 RowCount: 3961 DataSize: 34561 GroupDataSize: 68561 ErasedRowCount: 1581 | | | > {33, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10034 RowCount: 4095 DataSize: 35595 GroupDataSize: 70595 ErasedRowCount: 1645 | | | > {34, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10035 RowCount: 4230 DataSize: 36630 GroupDataSize: 72630 ErasedRowCount: 1710 | | | > {35, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10036 RowCount: 4366 DataSize: 37666 GroupDataSize: 74666 ErasedRowCount: 1776 | | | > {36, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10037 RowCount: 4503 DataSize: 38703 GroupDataSize: 76703 ErasedRowCount: 1843 | | | > {37, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10038 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911 | | > {38, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 4 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491, 13 rev 1, 725b} | | | PageId: 10039 RowCount: 4780 DataSize: 40780 GroupDataSize: 80780 ErasedRowCount: 1980 | | | > {39, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10040 RowCount: 4920 DataSize: 41820 GroupDataSize: 82820 ErasedRowCount: 2050 | | | > {40, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10041 RowCount: 5061 DataSize: 42861 GroupDataSize: 84861 ErasedRowCount: 2121 | | | > {41, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10042 RowCount: 5203 DataSize: 43903 GroupDataSize: 86903 ErasedRowCount: 2193 | | | > {42, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10043 RowCount: 5346 DataSize: 44946 GroupDataSize: 88946 ErasedRowCount: 2266 | | | > {43, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10044 RowCount: 5490 DataSize: 45990 GroupDataSize: 90990 ErasedRowCount: 2340 | | | > {44, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10045 RowCount: 5635 DataSize: 47035 GroupDataSize: 93035 ErasedRowCount: 2415 | | | > {45, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10046 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491 | | > {46, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 5 RowCount: 6831 DataSize: 55431 GroupDataSize: 109431 ErasedRowCount: 3051, 13 ... 3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution [GOOD] >> Charge::Lookups [GOOD] >> Charge::ByKeysBasics [GOOD] >> Charge::ByKeysGroups >> Charge::ByKeysGroups [GOOD] >> Charge::ByKeysGroupsLimits [GOOD] >> Charge::ByKeysLimits [GOOD] >> Charge::ByKeysReverse [GOOD] >> Charge::ByKeysHistory [GOOD] >> Charge::ByKeysIndex [GOOD] >> Charge::ByRows [GOOD] >> Charge::ByRowsReverse >> Charge::ByRowsReverse [GOOD] >> Charge::ByRowsLimits [GOOD] >> Charge::ByRowsLimitsReverse [GOOD] >> DBase::Basics [GOOD] >> DBase::Defaults [GOOD] >> DBase::Garbage [GOOD] >> DBase::Affects [GOOD] >> DBase::Annex [GOOD] >> DBase::AnnexRollbackChanges [GOOD] >> DBase::KIKIMR_15506_MissingSnapshotKeys [GOOD] >> DBase::EraseCacheWithUncommittedChanges [GOOD] >> DBase::EraseCacheWithUncommittedChangesCompacted [GOOD] >> DBase::AlterAndUpsertChangesVisibility [GOOD] >> DBase::DropModifiedTable [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups [GOOD] >> TPartBtreeIndexIteration::OneNode_History >> TChargeBTreeIndex::OneNode_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes_History ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> NFwd_TBTreeIndexCache::ManyApplies [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:46.644585Z 00000.010 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.011 II| FAKE_ENV: Starting storage for BS group 0 00000.011 II| FAKE_ENV: Starting storage for BS group 1 00000.011 II| FAKE_ENV: Starting storage for BS group 2 00000.011 II| FAKE_ENV: Starting storage for BS group 3 00000.019 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.020 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.020 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.020 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {146b, 4} 00000.020 II| FAKE_ENV: DS.1 gone, left {105b, 3}, put {105b, 3} 00000.020 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.020 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.020 II| FAKE_ENV: All BS storage groups are stopped 00000.020 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.020 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:46.669982Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.017 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.018 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.018 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.018 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {292b, 8} 00000.018 II| FAKE_ENV: DS.1 gone, left {210b, 6}, put {210b, 6} 00000.018 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.018 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.018 II| FAKE_ENV: All BS storage groups are stopped 00000.018 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.018 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:46.693863Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.050 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.051 NN| TABLET_SAUSAGECACHE: Poison cache serviced 2 reqs hit {2 138b} miss {0 0b} 00000.051 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.051 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1179b, 13} 00000.051 II| FAKE_ENV: DS.1 gone, left {907b, 3}, put {1861b, 12} 00000.051 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {132b, 2} 00000.051 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {116b, 2} 00000.051 II| FAKE_ENV: All BS storage groups are stopped 00000.051 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.051 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:46.752550Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.039 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.040 NN| TABLET_SAUSAGECACHE: Poison cache serviced 2 reqs hit {2 102505b} miss {0 0b} 00000.040 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.040 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {751b, 11} 00000.040 II| FAKE_ENV: DS.1 gone, left {541b, 3}, put {103950b, 10} 00000.040 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.040 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.040 II| FAKE_ENV: All BS storage groups are stopped 00000.040 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.040 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:46.798106Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.085 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.086 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.087 NN| TABLET_SAUSAGECACHE: Poison cache serviced 11 reqs hit {19 512947b} miss {0 0b} 00000.087 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.087 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {2095b, 23} 00000.087 II| FAKE_ENV: DS.1 gone, left {774b, 4}, put {209976b, 21} 00000.087 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {205178b, 4} 00000.087 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {102690b, 4} 00000.087 II| FAKE_ENV: All BS storage groups are stopped 00000.087 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 15.00s 00000.087 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:46.891029Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.053 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.054 NN| TABLET_SAUSAGECACHE: Poison cache serviced 6 reqs hit {6 307515b} miss {0 0b} 00000.054 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.054 II| FAKE_ENV: DS.1 gone, left {1253b, 3}, put {311413b, 22} 00000.054 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.054 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.054 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1828b, 23} 00000.054 II| FAKE_ENV: All BS storage groups are stopped 00000.054 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.054 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:46.951612Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.041 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 5 actors 00000.043 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4 reqs hit {8 307719b} miss {0 0b} 00000.043 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.043 II| FAKE_ENV: DS.0 gone, left {57b, 2}, put {1436b, 31} 00000.043 II| FAKE_ENV: DS.1 gone, left {629b, 3}, put {310280b, 16} 00000.043 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.043 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.043 II| FAKE_ENV: All BS storage groups are stopped 00000.043 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.043 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:47.007557Z 00000.016 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.016 II| FAKE_ENV: Starting storage for BS group 0 00000.017 II| FAKE_ENV: Starting storage for BS group 1 00000.017 II| FAKE_ENV: Starting storage for BS group 2 00000.017 II| FAKE_ENV: Starting storage for BS group 3 00000.113 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.114 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4 reqs hit {4 194770b} miss {0 0b} 00000.114 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.115 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1768b, 27} 00000.115 II| FAKE_ENV: DS.1 gone, left {734b, 6}, put {197775b, 24} 00000.115 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.119 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.119 II| FAKE_ENV: All BS storage groups are stopped 00000.119 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.119 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:47.133332Z 00000.032 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.032 II| FAKE_ENV: Starting storage for BS group 0 00000.033 II| FAKE_ENV: Starting storage for BS group 1 00000.034 II| FAKE_ENV: Starting storage for BS group 2 00000.034 II| FAKE_ENV: Starting storage for BS group 3 00000.040 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.041 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.041 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.041 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {326b, 7} 00000.041 II| FAKE_ENV: DS.1 gone, left {418b, 4}, put {453b, 5} 00000.041 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.041 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.041 II| FAKE_ENV: All BS storage groups are stopped 00000.041 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.041 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:47.186382Z 00000.010 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.011 II| FAKE_ENV: Starting storage for BS group 0 00000.012 II| FAKE_ENV: Starting storage for BS group 1 00000.012 II| FAKE_ENV: Starting storage for BS group 2 00000.012 II| FAKE_ENV: Starting storage for BS group 3 00000.110 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.111 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.112 NN| TABLET_SAUSAGECACHE: Poison cache serviced 7 reqs hit {10 410124b} miss {0 0b} 00000.112 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.112 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1494b, 23} 00000.112 II| FAKE_ENV: DS.1 gone, left {506b, 4}, put {310620b, 20} 00000.112 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.112 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.113 II| FAKE_ENV: All BS storage groups are stopped 00000.113 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 15.00s 00000.113 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:47.308965Z 00000.014 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.015 II| FAKE_ENV: Starting storage for BS group 0 00000.015 II| FAKE_ENV: Starting storage for BS group 1 00000.015 II| FAKE_ENV: Starting storage for BS group 2 00000.015 II| FAKE_ENV: Starting storage for BS group 3 00000.049 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.050 NN| TABLET_SAUSAGECACHE: Poison cache serviced 2 reqs hit {2 102505b} miss {0 0b} 00000.050 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.050 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {751b, 11} 00000.050 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.0 ... t32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |80.7%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut >> Viewer::ServerlessNodesPage [GOOD] >> Viewer::ServerlessWithExclusiveNodes |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut >> TExportToS3Tests::EnableChecksumsPersistance [GOOD] >> YdbYqlClient::TestColumnOrder >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite [GOOD] >> DataShardSnapshots::RepeatableReadAfterSplitRace >> TChargeBTreeIndex::FewNodes_History [GOOD] >> TChargeBTreeIndex::FewNodes_Sticky >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink >> TFlatTableExecutor_ColumnGroups::TestManyRows [GOOD] >> TFlatTableExecutor_CompactionScan::TestCompactionScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExportPartitioningSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:59:50.925709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:59:50.925857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:59:50.925923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:59:50.925982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:59:50.926076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:59:50.926126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:59:50.928450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:59:50.931621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:59:50.932692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:59:51.024689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:59:51.024757Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:51.044117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:59:51.044451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:59:51.044642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:59:51.075645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:59:51.076011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:59:51.076814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:51.082645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:59:51.115280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:51.116983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:51.117075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:51.117305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:59:51.117362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:51.117406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:59:51.117579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:59:51.139403Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:59:51.399841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:59:51.400126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:51.400374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:59:51.400703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:59:51.400782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:51.403828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:51.404095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:59:51.404364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:51.404445Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:59:51.404496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:59:51.404535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:59:51.407547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:51.407661Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:59:51.407726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:59:51.410048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:51.410111Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:51.410155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:51.410219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:59:51.414503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:59:51.418154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:59:51.418493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:59:51.419743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:51.419935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:51.420007Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:51.420348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:59:51.420420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:51.420708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:59:51.420791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:59:51.423476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:51.423523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:51.423730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:51.423802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:59:51.424447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:51.424519Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:59:51.424631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:59:51.424663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:51.424702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:59:51.424730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:51.424769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:59:51.424812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:51.424847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:59:51.424878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:59:51.424968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:59:51.425010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:59:51.425046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:59:51.427354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:59:51.427485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:59:51.427547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 57594046678944 2025-04-04T17:00:00.415745Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-04-04T17:00:00.415809Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710761 ready parts: 1/1 2025-04-04T17:00:00.415970Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:00:00.416791Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-04T17:00:00.416886Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-04T17:00:00.416916Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-04T17:00:00.416949Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-04T17:00:00.416983Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T17:00:00.423090Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-04T17:00:00.423200Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-04T17:00:00.423229Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-04T17:00:00.423263Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-04-04T17:00:00.423297Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:00:00.423393Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-04-04T17:00:00.424921Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-04T17:00:00.425214Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-04T17:00:00.425266Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-04-04T17:00:00.425317Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-04-04T17:00:00.431465Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-04-04T17:00:00.431616Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:00:00.431892Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000007 2025-04-04T17:00:00.432835Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:00.432987Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 17179871340 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:00:00.433050Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000007, at schemeshard: 72057594046678944 2025-04-04T17:00:00.433178Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-04-04T17:00:00.433251Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-04T17:00:00.433295Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-04T17:00:00.433346Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-04-04T17:00:00.433390Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-04T17:00:00.433462Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:00:00.433537Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:00:00.433577Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-04-04T17:00:00.433634Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-04-04T17:00:00.433692Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-04-04T17:00:00.433734Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-04-04T17:00:00.433819Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:00:00.433876Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-04-04T17:00:00.433913Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-04T17:00:00.433954Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-04-04T17:00:00.434923Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-04-04T17:00:00.443456Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:00.443509Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:00.443721Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:00:00.443856Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:00.443891Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-04-04T17:00:00.443929Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 3 2025-04-04T17:00:00.444670Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-04T17:00:00.444762Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-04T17:00:00.444796Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-04T17:00:00.444851Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-04T17:00:00.444896Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T17:00:00.445542Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-04T17:00:00.445626Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-04-04T17:00:00.445654Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-04-04T17:00:00.445684Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-04T17:00:00.445711Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:00:00.445781Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-04-04T17:00:00.445847Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:122:2148] 2025-04-04T17:00:00.453632Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-04T17:00:00.454168Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-04-04T17:00:00.454323Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-04-04T17:00:00.454411Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-04-04T17:00:00.454460Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-04T17:00:00.454505Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-04-04T17:00:00.454550Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2025-04-04T17:00:00.456302Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-04T17:00:00.456383Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:00:00.456439Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:605:2562] TestWaitNotification: OK eventTxId 102 >> YdbYqlClient::BuildInfo >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 >> TChargeBTreeIndex::FewNodes_Sticky [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky >> TFlatTableExecutor_CompactionScan::TestCompactionScan [GOOD] >> TFlatTableExecutor_CompressedSelectRows::TestCompressedSelectRows [GOOD] >> TFlatTableExecutor_ExecutorTxLimit::TestExecutorTxLimit |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |80.7%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut >> TFlatTableExecutor_ExecutorTxLimit::TestExecutorTxLimit [GOOD] >> TFlatTableExecutor_Follower::BasicFollowerRead [GOOD] >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot >> BuildStatsHistogram::Single [GOOD] >> BuildStatsHistogram::Single_Slices >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |80.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> DBase::DropModifiedTable [GOOD] Test command err: 10 parts: [0:0:1:0:0:0:0] 120000 rows, 5095 pages, 6 levels: (79984, 26669) (159964, 53329) (240169, 80064) (319996, 106673) (400177, 133400) [0:0:2:0:0:0:0] 60000 rows, 2548 pages, 6 levels: (440041, 146688) (479950, 159991) (519868, 173297) (559849, 186624) (599833, 199952) [0:0:3:0:0:0:0] 30000 rows, 1273 pages, 5 levels: (619663, 206562) (639565, 213196) (659563, 219862) (679648, 226557) (699550, 233191) [0:0:4:0:0:0:0] 15000 rows, 634 pages, 5 levels: (709513, 236512) (719443, 239822) (729415, 243146) (739393, 246472) (749443, 249822) [0:0:5:0:0:0:0] 7500 rows, 317 pages, 4 levels: (754330, 251451) (759397, 253140) (764335, 254786) (769423, 256482) (774439, 258154) [0:0:6:0:0:0:0] 3750 rows, 159 pages, 4 levels: (776926, 258983) (779407, 259810) (781876, 260633) (784414, 261479) (786847, 262290) [0:0:7:0:0:0:0] 1875 rows, 80 pages, 3 levels: (788179, 262734) (789382, 263135) (790639, 263554) (791884, 263969) (793144, 264389) [0:0:8:0:0:0:0] 937 rows, 40 pages, 3 levels: (793732, 264585) (794365, 264796) (794989, 265004) (795610, 265211) (796246, 265423) [0:0:9:0:0:0:0] 468 rows, 21 pages, 2 levels: (796516, 265513) (796828, 265617) (797182, 265735) (797425, 265816) (797809, 265944) [0:0:10:0:0:0:0] 470 rows, 21 pages, 2 levels: (798118, 266047) (798412, 266145) (798733, 266252) (799042, 266355) (799339, 266454) Checking BTree: Touched 0% bytes, 5 pages RowCountHistogram: 10% (actual 10%) key = (80152, 26725) value = 24033 (actual 24511 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 48088 (actual 48568 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 72280 (actual 72759 - 0% error) 5% (actual 5%) key = (281245, 93756) value = 84339 (actual 84818 - 0% error) 10% (actual 10%) key = (361831, 120618) value = 108515 (actual 108996 - 0% error) 14% (actual 14%) key = (480337, 160120) value = 144115 (actual 144540 - 0% error) 5% (actual 5%) key = (520480, 173501) value = 156166 (actual 156594 - 0% error) 12% (actual 12%) key = (619723, 206582) value = 186013 (actual 186392 - 0% error) 9% (actual 9%) key = (699586, 233203) value = 210000 (actual 210331 - 0% error) 12% (actual 12%) DataSizeHistogram: 10% (actual 10%) key = (80152, 26725) value = 2048715 (actual 2088769 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 4098370 (actual 4138455 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 6145924 (actual 6186028 - 0% error) 5% (actual 5%) key = (281245, 93756) value = 7170146 (actual 7210231 - 0% error) 10% (actual 10%) key = (361831, 120618) value = 9219080 (actual 9259232 - 0% error) 14% (actual 14%) key = (480337, 160120) value = 12241592 (actual 12277578 - 0% error) 5% (actual 5%) key = (520480, 173501) value = 13267167 (actual 13303212 - 0% error) 9% (actual 9%) key = (599860, 199961) value = 15290784 (actual 15322806 - 0% error) 12% (actual 12%) key = (699586, 233203) value = 17836473 (actual 17864572 - 0% error) 12% (actual 12%) Checking Flat: Touched 100% bytes, 10 pages RowCountHistogram: 10% (actual 10%) key = (80065, 26696) value = 24008 (actual 24488 - 0% error) 10% (actual 10%) key = (160045, 53356) value = 48012 (actual 48493 - 0% error) 10% (actual 10%) key = (240238, 80087) value = 72016 (actual 72493 - 0% error) 10% (actual 10%) key = (320152, 106725) value = 96035 (actual 96517 - 0% error) 10% (actual 9%) key = (400369, 133464) value = 120052 (actual 120477 - 0% error) 10% (actual 10%) key = (480187, 160070) value = 144069 (actual 144495 - 0% error) 10% (actual 10%) key = (560152, 186725) value = 168082 (actual 168505 - 0% error) 10% (actual 9%) key = (639976, 213333) value = 192097 (actual 192468 - 0% error) 10% (actual 9%) key = (719863, 239962) value = 216115 (actual 216440 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79732, 26585) value = 2038706 (actual 2078707 - 0% error) 10% (actual 10%) key = (159505, 53176) value = 4078259 (actual 4118206 - 0% error) 10% (actual 10%) key = (239938, 79987) value = 6115953 (actual 6156029 - 0% error) 10% (actual 10%) key = (319918, 106647) value = 8154975 (actual 8195015 - 0% error) 10% (actual 9%) key = (400177, 133400) value = 10192596 (actual 10228980 - 0% error) 10% (actual 10%) key = (479950, 159991) value = 12231489 (actual 12267556 - 0% error) 10% (actual 10%) key = (559930, 186651) value = 14270004 (actual 14305948 - 0% error) 10% (actual 9%) key = (639565, 213196) value = 16308955 (actual 16341020 - 0% error) 10% (actual 9%) key = (719611, 239878) value = 18346862 (actual 18374842 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 45 pages RowCountHistogram: 10% (actual 4%) key = (34876, 11633) value = 24002 (actual 10940 - 5% error) 10% (actual 10%) key = (115162, 38395) value = 48090 (actual 35028 - 5% error) 10% (actual 10%) key = (195502, 65175) value = 72183 (actual 59117 - 5% error) 10% (actual 10%) key = (276202, 92075) value = 96373 (actual 83308 - 5% error) 10% (actual 10%) key = (356899, 118974) value = 120561 (actual 107496 - 5% error) 10% (actual 11%) key = (445249, 148424) value = 145590 (actual 133979 - 4% error) 10% (actual 10%) key = (525553, 175192) value = 169735 (actual 158120 - 4% error) 10% (actual 11%) key = (614770, 204931) value = 195036 (actual 184889 - 4% error) 10% (actual 11%) key = (704617, 234880) value = 220528 (actual 211850 - 3% error) 8% (actual 11%) DataSizeHistogram: 10% (actual 4%) key = (34876, 11633) value = 2045575 (actual 936275 - 5% error) 10% (actual 10%) key = (115162, 38395) value = 4093628 (actual 2984377 - 5% error) 10% (actual 10%) key = (195502, 65175) value = 6144579 (actual 5035222 - 5% error) 10% (actual 10%) key = (276202, 92075) value = 8191333 (actual 7082004 - 5% error) 10% (actual 10%) key = (356899, 118974) value = 10240135 (actual 9130907 - 5% error) 10% (actual 11%) key = (445249, 148424) value = 12365629 (actual 11380607 - 4% error) 10% (actual 10%) key = (525553, 175192) value = 14416445 (actual 13431474 - 4% error) 10% (actual 11%) key = (614770, 204931) value = 16567743 (actual 15706949 - 4% error) 10% (actual 11%) key = (704617, 234880) value = 18729362 (actual 17992610 - 3% error) 8% (actual 11%) 3 parts: [0:0:1:0:0:0:0] 150 rows, 7 pages, 1 levels: (286, 103) (607, 210) (811, 278) (1315, 446) (1540, 521) [0:0:2:0:0:0:0] 197 rows, 9 pages, 2 levels: (253, 92) (577, 200) (742, 255) (1156, 393) (1594, 539) [0:0:3:0:0:0:0] 153 rows, 7 pages, 1 levels: (199, 74) (514, 179) (769, 264) (1291, 438) (1555, 526) Checking BTree: Touched 100% bytes, 5 pages RowCountHistogram: 2% (actual 0%) key = (10, 11) value = 12 (actual 1 - 2% error) 2% (actual 0%) key = (16, 13) value = 24 (actual 2 - 4% error) 5% (actual 11%) key = (199, 74) value = 49 (actual 61 - -2% error) 4% (actual 2%) key = (253, 92) value = 73 (actual 74 - 0% error) 4% (actual 2%) key = (286, 103) value = 97 (actual 84 - 2% error) 4% (actual 8%) key = (418, 147) value = 120 (actual 125 - -1% error) 4% (actual 5%) key = (514, 179) value = 144 (actual 154 - -2% error) 5% (actual 4%) key = (577, 200) value = 169 (actual 174 - -1% error) 4% (actual 1%) key = (607, 210) value = 192 (actual 183 - 1% error) 4% (actual 8%) key = (742, 255) value = 214 (actual 226 - -2% error) 5% (actual 1%) key = (769, 264) value = 239 (actual 235 - 0% error) 4% (actual 2%) key = (811, 278) value = 262 (actual 248 - 2% error) 4% (actual 9%) key = (958, 327) value = 286 (actual 293 - -1% error) 5% (actual 5%) key = (1054, 359) value = 311 (actual 322 - -2% error) 4% (actual 2%) key = (1087, 370) value = 334 (actual 332 - 0% error) 4% (actual 4%) key = (1156, 393) value = 358 (actual 354 - 0% error) 4% (actual 8%) key = (1291, 438) value = 381 (actual 394 - -2% error) 4% (actual 1%) key = (1315, 446) value = 404 (actual 401 - 0% error) 4% (actual 3%) key = (1375, 466) value = 426 (actual 419 - 1% error) 4% (actual 10%) key = (1540, 521) value = 449 (actual 469 - -4% error) 3% (actual 1%) key = (1555, 526) value = 465 (actual 474 - -1% error) 3% (actual 2%) key = (1594, 539) value = 482 (actual 484 - 0% error) 1% (actual 2%) key = (1636, 553) value = 491 (actual 497 - -1% error) 1% (actual 0%) key = (1639, 554) value = 496 (actual 498 - 0% error) 0% (actual 0%) DataSizeHistogram: 2% (actual 13%) key = (10, 11) value = 950 (actual 5800 - -11% error) 2% (actual 0%) key = (16, 13) value = 1933 (actual 5800 - -9% error) 4% (actual 0%) key = (199, 74) value = 3866 (actual 5800 - -4% error) 4% (actual 9%) key = (253, 92) value = 5849 (actual 9821 - -9% error) 4% (actual 4%) key = (286, 103) value = 7810 (actual 11827 - -9% error) 4% (actual 4%) key = (418, 147) value = 9825 (actual 13848 - -9% error) 4% (actual 4%) key = (514, 179) value = 11834 (actual 15888 - -9% error) 4% (actual 4%) key = (577, 200) value = 13865 (actual 17883 - -9% error) 4% (actual 4%) key = (607, 210) value = 15865 (actual 19876 - -9% error) 4% (actual 4%) key = (742, 255) value = 17859 (actual 21881 - -9% error) 4% (actual 4%) key = (769, 264) value = 19882 (actual 23918 - -9% error) 4% (actual 0%) key = (811, 278) value = 21897 (actual 23918 - -4% error) 4% (actual 9%) key = (958, 327) value = 23894 (actual 27913 - -9% error) 4% (actual 4%) key = (1054, 359) value = 25915 (actual 29895 - -9% error) 4% (actual 0%) key = (1087, 370) value = 27901 (actual 29895 - -4% error) 4% (actual 4%) key = (1156, 393) value = 29881 (actual 31850 - -4% error) 4% (actual 4%) key = (1291, 438) value = 31821 (actual 33747 - -4% error) 4% (actual 4%) key = (1315, 446) value = 33794 (actual 35739 - -4% error) 4% (actual 9%) key = (1375, 466) value = 35737 (actual 39763 - -9% error) 4% (actual 4%) key = (1540, 521) value = 37749 (actual 41447 - -8% error) 3% (actual 0%) key = (1555, 526) value = 39198 (actual 41447 - -5% error) 3% (actual 1%) key = (1594, 539) value = 40605 (actual 42020 - -3% error) 1% (actual 0%) key = (1636, 553) value = 41344 (actual 42020 - -1% error) 0% (actual 0%) key = (1639, 554) value = 41733 (actual 42020 - 0% error) 0% (actual 0%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 4% (actual 0%) key = (10, 11) value = 24 (actual 1 - 4% error) 5% (actual 0%) key = (16, 13) value = 49 (actual 2 - 9% error) 5% (actual 11%) key = (199, 74) value = 74 (actual 61 - 2% error) 4% (actual 2%) key = (253, 92) value = 97 (actual 74 - 4% error) 4% (actual 2%) key = (286, 103) value = 120 (actual 84 - 7% error) 4% (actual 8%) key = (418, 147) value = 144 (actual 125 - 3% error) 5% (actual 5%) key = (514, 179) value = 169 (actual 154 - 3% error) 5% (actual 4%) key = (577, 200) value = 194 (actual 174 - 4% error) 4% (actual 1%) key = (607, 210) value = 216 (actual 183 - 6% error) 4% (actual 8%) key = (742, 255) value = 237 (actual 226 - 2% error) 5% (actual 1%) key = (769, 264) value = 262 (actual 235 - 5% error) 5% (actual 2%) key = (811, 278) value = 287 (actual 248 - 7% error) 4% (actual 9%) key = (958, 327) value = 311 (actual 293 - 3% error) 4% (actual 5%) key = (1054, 359) value = 335 (actual 322 - 2% error) 4% (actual 2%) key = (1087, 370) value = 358 (ac ... 85 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27765 (actual 27678 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 5% (actual 5%) key = (91, 38) value = 25 (actual 25 - 0% error) 5% (actual 5%) key = (166, 63) value = 50 (actual 50 - 0% error) 4% (actual 4%) key = (253, 92) value = 74 (actual 74 - 0% error) 4% (actual 4%) key = (325, 116) value = 96 (actual 96 - 0% error) 4% (actual 4%) key = (394, 139) value = 119 (actual 119 - 0% error) 5% (actual 5%) key = (481, 168) value = 144 (actual 144 - 0% error) 4% (actual 4%) key = (556, 193) value = 167 (actual 167 - 0% error) 4% (actual 4%) key = (631, 218) value = 191 (actual 191 - 0% error) 4% (actual 4%) key = (709, 244) value = 215 (actual 215 - 0% error) 3% (actual 3%) key = (766, 263) value = 234 (actual 234 - 0% error) 5% (actual 5%) key = (853, 292) value = 261 (actual 261 - 0% error) 4% (actual 4%) key = (934, 319) value = 285 (actual 285 - 0% error) 4% (actual 4%) key = (1006, 343) value = 309 (actual 309 - 0% error) 4% (actual 4%) key = (1087, 370) value = 332 (actual 332 - 0% error) 0% (actual 0%) key = (1090, 371) value = 333 (actual 333 - 0% error) 4% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 4% (actual 4%) key = (91, 38) value = 1974 (actual 1974 - 0% error) 4% (actual 4%) key = (166, 63) value = 3992 (actual 3992 - 0% error) 4% (actual 4%) key = (253, 92) value = 5889 (actual 5889 - 0% error) 4% (actual 4%) key = (325, 116) value = 7868 (actual 7868 - 0% error) 4% (actual 4%) key = (394, 139) value = 9910 (actual 9910 - 0% error) 4% (actual 4%) key = (481, 168) value = 11938 (actual 11938 - 0% error) 4% (actual 4%) key = (556, 193) value = 13685 (actual 13685 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27678 (actual 27678 - 0% error) 0% (actual 0%) key = (1090, 371) value = 27765 (actual 27765 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Mixed: Touched 100% bytes, 3 pages RowCountHistogram: 14% (actual 5%) key = (91, 38) value = 70 (actual 25 - 9% error) 5% (actual 5%) key = (166, 63) value = 95 (actual 50 - 9% error) 4% (actual 4%) key = (253, 92) value = 119 (actual 74 - 9% error) 4% (actual 4%) key = (325, 116) value = 141 (actual 96 - 9% error) 4% (actual 4%) key = (394, 139) value = 164 (actual 119 - 9% error) 5% (actual 5%) key = (481, 168) value = 189 (actual 144 - 9% error) 4% (actual 9%) key = (631, 218) value = 212 (actual 191 - 4% error) 4% (actual 4%) key = (709, 244) value = 236 (actual 215 - 4% error) 3% (actual 3%) key = (766, 263) value = 255 (actual 234 - 4% error) 5% (actual 5%) key = (853, 292) value = 282 (actual 261 - 4% error) 4% (actual 4%) key = (934, 319) value = 306 (actual 285 - 4% error) 4% (actual 4%) key = (1006, 343) value = 330 (actual 309 - 4% error) 4% (actual 4%) key = (1087, 370) value = 353 (actual 332 - 4% error) 0% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 14% (actual 4%) key = (91, 38) value = 5939 (actual 1974 - 9% error) 4% (actual 4%) key = (166, 63) value = 7957 (actual 3992 - 9% error) 4% (actual 4%) key = (253, 92) value = 9854 (actual 5889 - 9% error) 4% (actual 4%) key = (325, 116) value = 11833 (actual 7868 - 9% error) 4% (actual 4%) key = (394, 139) value = 13875 (actual 9910 - 9% error) 4% (actual 4%) key = (481, 168) value = 15903 (actual 11938 - 9% error) 4% (actual 8%) key = (631, 218) value = 17650 (actual 15674 - 4% error) 4% (actual 4%) key = (709, 244) value = 19685 (actual 17709 - 4% error) 4% (actual 4%) key = (766, 263) value = 21640 (actual 19664 - 4% error) 4% (actual 4%) key = (853, 292) value = 23649 (actual 21673 - 4% error) 4% (actual 4%) key = (934, 319) value = 25688 (actual 23712 - 4% error) 4% (actual 4%) key = (1006, 343) value = 27663 (actual 25687 - 4% error) 4% (actual 4%) key = (1087, 370) value = 29654 (actual 27678 - 4% error) 0% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) 3 parts: [0:0:1:0:0:0:0] 167 rows, 1 pages, 0 levels: () () () () () [0:0:2:0:0:0:0] 166 rows, 1 pages, 0 levels: () () () () () [0:0:3:0:0:0:0] 167 rows, 1 pages, 0 levels: () () () () () Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 33% (actual 33%) key = (553, 192) value = 167 (actual 166 - 0% error) 33% (actual 33%) key = (1087, 370) value = 333 (actual 332 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (553, 192) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1087, 370) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 33% (actual 33%) key = (556, 193) value = 167 (actual 167 - 0% error) 33% (actual 33%) key = (1090, 371) value = 333 (actual 333 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (556, 193) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1090, 371) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) Got : 24000 2106439 49449 38 44 Expected: 24000 2106439 49449 38 44 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 49449 20 23 Expected: 12816 1121048 49449 20 23 Got : 24000 3547100 81694 64 44 Expected: 24000 3547100 81694 64 44 { [1012, 1475), [1682, 1985), [2727, 3553), [3599, 3992), [5397, 7244), [9181, 9807), [9993, 10178), [12209, 14029), [15089, 15342), [16198, 16984), [17238, 18436), [21087, 21876), [23701, 23794) } Got : 9582 1425198 81694 26 17 Expected: 9582 1425198 81694 26 17 Got : 24000 2460139 23760 42 41 Expected: 24000 2460139 23760 42 41 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 23760 18 18 Expected: 10440 1060798 23760 18 18 Got : 24000 4054050 46562 68 43 Expected: 24000 4054050 46562 68 43 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2277890 46562 38 24 Expected: 13570 2277890 46562 38 24 Got : 24000 2106459 49449 38 44 Expected: 24000 2106459 49449 38 44 Got : 24000 2460219 23555 41 41 Expected: 24000 2460219 23555 41 41 Got : 24000 4054270 46543 66 43 Expected: 24000 4054270 46543 66 43 Got : 24000 2106479 49555 38 44 Expected: 24000 2106479 49555 38 44 Got : 24000 2460259 23628 41 41 Expected: 24000 2460259 23628 41 41 Got : 24000 4054290 46640 65 43 Expected: 24000 4054290 46640 65 43 Got : 24000 2106439 66674 3 4 Expected: 24000 2106439 66674 3 4 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 66674 2 2 Expected: 12816 1121048 66674 2 2 Got : 24000 2460139 33541 4 4 Expected: 24000 2460139 33541 4 4 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 33541 1 1 Expected: 10440 1060798 33541 1 1 Got : 24000 4054050 64742 7 4 Expected: 24000 4054050 64742 7 4 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2234982 64742 4 2 Expected: 13570 2234982 64742 4 2 >> DemoTx::Scenario_4 [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachAfterLoan >> TFlatTableExecutor_Follower::FollowerAttachAfterLoan [GOOD] >> TFlatTableExecutor_Gc::TestFailedGcAfterReboot >> TFlatTableExecutor_Gc::TestFailedGcAfterReboot [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex >> YdbYqlClient::TestReadTableOneBatch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:58:31.033656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:58:31.033769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:58:31.033814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:58:31.033850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:58:31.033901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:58:31.033930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:58:31.033985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:58:31.034079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:58:31.034539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:58:31.252587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:58:31.252649Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:31.297922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:58:31.298263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:58:31.298473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:58:31.310963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:58:31.311365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:58:31.312036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:58:31.318566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:58:31.329672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:58:31.331395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:58:31.331478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:58:31.331675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:58:31.331732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:58:31.331770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:58:31.331907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:58:31.355100Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:58:31.511259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:58:31.511555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:31.511812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:58:31.512074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:58:31.512149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:31.519735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:58:31.519919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:58:31.520138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:31.520215Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:58:31.520311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:58:31.520358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:58:31.527568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:31.527664Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:58:31.527714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:58:31.534616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:31.534729Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:31.534785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:58:31.534842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:58:31.539258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:58:31.543604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:58:31.543865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:58:31.545082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:58:31.545249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:58:31.545303Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:58:31.545699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:58:31.545778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:58:31.545963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:58:31.546067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:58:31.555599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:58:31.555663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:58:31.555876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:58:31.555929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:58:31.556295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:58:31.556361Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:58:31.556506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:58:31.556561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:58:31.556612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:58:31.556651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:58:31.556689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:58:31.556735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:58:31.556797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:58:31.556830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:58:31.556913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:58:31.556964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:58:31.557000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:58:31.560876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:58:31.561039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:58:31.561080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ::TEvMeasureSelfResponseTime 2025-04-04T17:00:00.390314Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:00:00.714690Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:00:00.714772Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:00:00.714867Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:00:00.714907Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:00:01.057532Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:00:01.057606Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:00:01.057692Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:00:01.057723Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:00:01.125660Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:315:2301]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-04T17:00:01.125770Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-04T17:00:01.125873Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409546 outdated step 5000002 last cleanup 0 2025-04-04T17:00:01.125952Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409546 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:00:01.126011Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409546 2025-04-04T17:00:01.126051Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409546 has no attached operations 2025-04-04T17:00:01.126086Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409546 2025-04-04T17:00:01.126291Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:315:2301]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-04T17:00:01.126636Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-04-04T17:00:01.127170Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:315:2301], Recipient [3:123:2149]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 7 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 87 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 87 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 158 Memory: 124260 Storage: 14054 GroupWriteThroughput { GroupID: 0 Channel: 0 Throughput: 263 } GroupWriteThroughput { GroupID: 0 Channel: 1 Throughput: 440 } GroupWriteIops { GroupID: 0 Channel: 0 Iops: 1 } } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 43 TableOwnerId: 72057594046678944 FollowerId: 0 2025-04-04T17:00:01.127227Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-04T17:00:01.127283Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0158 2025-04-04T17:00:01.127423Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 87 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 87 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-04T17:00:01.127472Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-04T17:00:01.129747Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [3:1063:3006], Recipient [3:315:2301]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-04-04T17:00:01.166361Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435090, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-04T17:00:01.166450Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-04T17:00:01.166494Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-04-04T17:00:01.166572Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-04T17:00:01.166610Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-04T17:00:01.166741Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-04-04T17:00:01.166811Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-04-04T17:00:01.166860Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2025-04-04T17:00:01.166918Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:01:20.000000Z at schemeshard 72057594046678944 2025-04-04T17:00:01.167031Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:00:01.177554Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435090, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-04T17:00:01.177639Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-04T17:00:01.177675Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-04T17:00:01.418105Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:00:01.418199Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:00:01.418322Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:00:01.418360Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:00:01.712285Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:00:01.712370Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:00:01.712490Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:00:01.712531Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:00:02.008695Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:00:02.008777Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:00:02.008895Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:00:02.008939Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:00:02.300768Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:00:02.300845Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:00:02.300942Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:00:02.300976Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:00:02.618779Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:00:02.618871Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:00:02.618971Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:00:02.619009Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:00:02.664953Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:315:2301]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-04T17:00:02.933971Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:00:02.934049Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:00:02.934134Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:00:02.934169Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> Viewer::JsonStorageListingV1 [GOOD] >> Viewer::JsonStorageListingV1GroupIdFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::EnableChecksumsPersistance [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:59:52.399673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:59:52.399746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:59:52.399777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:59:52.399815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:59:52.399861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:59:52.399888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:59:52.399945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:59:52.400014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:59:52.400274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:59:52.481325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:59:52.481391Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:52.493555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:59:52.493864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:59:52.494044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:59:52.507612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:59:52.507943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:59:52.508722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:52.509693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:59:52.515803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:52.517279Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:52.517370Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:52.517594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:59:52.517647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:52.517691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:59:52.517864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:59:52.525884Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:59:52.665025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:59:52.665295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:52.665568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:59:52.665828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:59:52.665891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:52.672551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:52.672724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:59:52.672922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:52.673000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:59:52.673041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:59:52.673072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:59:52.679017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:52.679095Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:59:52.679132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:59:52.687160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:52.687224Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:52.687271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:52.687318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:59:52.697936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:59:52.701020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:59:52.701355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:59:52.702748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:52.702993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:52.703076Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:52.703459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:59:52.703547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:52.703776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:59:52.703874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:59:52.712095Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:52.712163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:52.712387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:52.712455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:59:52.713220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:52.713312Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:59:52.713427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:59:52.713469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:52.713514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:59:52.713572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:52.713619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:59:52.713666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:52.713716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:59:52.713752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:59:52.713844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:59:52.713892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:59:52.713931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:59:52.716389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:59:52.716539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:59:52.716585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 Leader for TabletID 72057594046678944 is [4:564:2519] sender: [4:630:2058] recipient: [4:15:2062] 2025-04-04T17:00:01.702717Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:01.706413Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 17179871340 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:00:01.706563Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-04-04T17:00:01.706801Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-04-04T17:00:01.707019Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:15880 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B347F429-DA22-4831-9A0B-F0C505CC52DA amz-sdk-request: attempt=1 content-length: 73 content-md5: a9Su4FHJt26Hhw4HV0+Ocg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 REQUEST: PUT /metadata.json.sha256 HTTP/1.1 HEADERS: Host: localhost:15880 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 446967BA-C12A-4404-AFC4-3D288916B575 amz-sdk-request: attempt=1 content-length: 78 content-md5: 5v+lOCwt7SV92xRPjSiuqQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json.sha256 / / 78 2025-04-04T17:00:01.724822Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:01.724922Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-04T17:00:01.725292Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:01.725372Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:612:2556], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710759 2025-04-04T17:00:01.725550Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T17:00:01.725622Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:00:01.734332Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-04-04T17:00:01.734549Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-04-04T17:00:01.734610Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-04-04T17:00:01.734689Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-04T17:00:01.734750Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-04T17:00:01.734898Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:15880 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9B80AEF5-5BB0-47F5-9028-8A61EC9D0B00 amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 355 2025-04-04T17:00:01.743961Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /scheme.pb.sha256 HTTP/1.1 HEADERS: Host: localhost:15880 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C17901EE-A85C-41E9-8333-C3EBBABF91E9 amz-sdk-request: attempt=1 content-length: 74 content-md5: NWNhlq1fHKxcSj+x5Xq9NQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb.sha256 / / 74 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:15880 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D772653A-9821-4D55-A345-87012541C878 amz-sdk-request: attempt=1 content-length: 30 content-md5: wztA6/fCcYCMKR0jw2GMNw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 30 REQUEST: PUT /data_00.csv.sha256 HTTP/1.1 HEADERS: Host: localhost:15880 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E8EA31D3-975C-406A-A3D5-A1F7311916F9 amz-sdk-request: attempt=1 content-length: 76 content-md5: gmOXObjloPe2DGxtDsgfpg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.sha256 / / 76 2025-04-04T17:00:01.803866Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 445 RawX2: 17179871598 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-04-04T17:00:01.803971Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-04-04T17:00:01.804185Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 445 RawX2: 17179871598 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-04-04T17:00:01.804328Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 445 RawX2: 17179871598 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-04-04T17:00:01.804412Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:01.804475Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T17:00:01.804522Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T17:00:01.804591Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-04-04T17:00:01.804832Z node 4 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:01.811466Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T17:00:01.812029Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T17:00:01.812110Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-04-04T17:00:01.812298Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-04-04T17:00:01.812357Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-04T17:00:01.812420Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-04-04T17:00:01.812470Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-04T17:00:01.812523Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-04-04T17:00:01.812633Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:564:2519] message: TxId: 281474976710759 2025-04-04T17:00:01.812708Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-04T17:00:01.812764Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-04-04T17:00:01.812810Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-04-04T17:00:01.812999Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T17:00:01.819194Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-04-04T17:00:01.819347Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 TestWaitNotification wait txId: 102 2025-04-04T17:00:01.854842Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:00:01.854941Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T17:00:01.855611Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion export in-flight, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:00:01.855689Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion, export is ready to notify, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:00:01.855847Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:00:01.855912Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:668:2607] TestWaitNotification: OK eventTxId 102 >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex >> TIterator::MixedReverse [GOOD] >> TIterator::Serial >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts >> YdbYqlClient::TestYqlWrongTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T16:59:26.500428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:59:26.500539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:59:26.500583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:59:26.500622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:59:26.500686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:59:26.500720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:59:26.500792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:59:26.500875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:59:26.501286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:59:26.582183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:59:26.582291Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:26.597527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:59:26.597856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:59:26.598067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:59:26.611272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:59:26.611618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:59:26.612511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:26.613621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:59:26.619945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:26.621566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:26.621657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:26.621867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:59:26.621930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:26.621972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:59:26.622126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T16:59:26.631321Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T16:59:26.794487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:59:26.794793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:26.795089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:59:26.795407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:59:26.795482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:26.798434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:26.798623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:59:26.798874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:26.798971Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:59:26.799034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:59:26.799085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:59:26.801653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:26.801743Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:59:26.801847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:59:26.806284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:26.806392Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:26.806452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:26.806508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:59:26.810152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:59:26.816192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:59:26.816518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:59:26.817792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:59:26.817984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:59:26.818066Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:26.818503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:59:26.818589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:59:26.818827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:59:26.818923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:59:26.824837Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:59:26.824921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:59:26.825090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:59:26.825140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T16:59:26.825458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:59:26.825498Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:59:26.825584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:59:26.825614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:26.825647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:59:26.825674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:26.825706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:59:26.825742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:59:26.825772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T16:59:26.825798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T16:59:26.825862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T16:59:26.825894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T16:59:26.825922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T16:59:26.828159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:59:26.828301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T16:59:26.828333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 3409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2025-04-04T16:59:56.894896Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2025-04-04T16:59:56.895015Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-04T16:59:56.895050Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2025-04-04T16:59:56.895076Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2025-04-04T16:59:56.905536Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-04T16:59:59.727158Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.0018 2025-04-04T16:59:59.751225Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0017 2025-04-04T16:59:59.777290Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-04T16:59:59.777473Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2025-04-04T16:59:59.777587Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2025-04-04T16:59:59.777647Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2025-04-04T16:59:59.777783Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-04T16:59:59.777828Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2025-04-04T16:59:59.777857Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2025-04-04T16:59:59.790768Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-04T17:00:02.699320Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.0023 2025-04-04T17:00:02.723223Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0021 2025-04-04T17:00:02.750954Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-04T17:00:02.751182Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2025-04-04T17:00:02.751289Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2025-04-04T17:00:02.751357Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2025-04-04T17:00:02.751497Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-04T17:00:02.751579Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2025-04-04T17:00:02.751627Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2025-04-04T17:00:02.762939Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-04T17:00:03.510141Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [3:577:2535], attempt# 1 2025-04-04T17:00:03.543736Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvReset: self# [3:576:2534] 2025-04-04T17:00:03.554037Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [3:577:2535], sender# [3:576:2534] 2025-04-04T17:00:03.554199Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [3:576:2534] 2025-04-04T17:00:03.554829Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [3:577:2535], sender# [3:576:2534], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } 2025-04-04T17:00:03.555128Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [3:577:2535], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [6e3e0a41fdab8add833862f1bd2954c3,1d8dd09e584ce6a47582a31b591900e2,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:61334 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BE4AE3DB-6AA8-46BF-B392-6C66A74E14ED amz-sdk-request: attempt=1 content-length: 459 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2025-04-04T17:00:03.561803Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [3:577:2535], result# 2025-04-04T17:00:03.562110Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [3:576:2534], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-04T17:00:03.583641Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 449 RawX2: 12884904306 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-04T17:00:03.583755Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-04-04T17:00:03.583999Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 449 RawX2: 12884904306 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-04T17:00:03.584168Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 449 RawX2: 12884904306 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-04T17:00:03.584277Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:03.584333Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T17:00:03.584375Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T17:00:03.584442Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-04-04T17:00:03.584662Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:03.593367Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T17:00:03.594070Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-04-04T17:00:03.594149Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-04-04T17:00:03.594337Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-04-04T17:00:03.594391Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-04T17:00:03.594444Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-04-04T17:00:03.594485Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-04T17:00:03.594532Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-04-04T17:00:03.594639Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:120:2146] message: TxId: 281474976710759 2025-04-04T17:00:03.594724Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-04-04T17:00:03.594776Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-04-04T17:00:03.594836Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-04-04T17:00:03.595037Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T17:00:03.610872Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-04-04T17:00:03.611009Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-04-04T17:00:03.623543Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:00:03.623646Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:596:2551] TestWaitNotification: OK eventTxId 102 >> KqpPg::PgAggregate-useSink [GOOD] >> KqpPg::MkqlTerminate >> TPartBtreeIndexIteration::OneNode_History [GOOD] >> TPartBtreeIndexIteration::OneNode_Slices >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex >> TGRpcNewCoordinationClient::CreateDropDescribe [GOOD] >> TGRpcNewCoordinationClient::NodeNotFound >> DemoTx::Scenario_5 >> YdbYqlClient::CreateTableWithPartitionAtKeys |80.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |80.8%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeCdcStream [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] >> YdbYqlClient::TestDoubleKey >> TPartBtreeIndexIteration::OneNode_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_Slices |80.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer |80.8%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer |80.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |80.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats >> TTableProfileTests::OverwriteCachingPolicy >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSubDomain [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSolomonVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTableIndex |80.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |80.8%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] Test command err: 2025-04-04T16:59:56.526938Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503825710150557:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:56.530962Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017cc/r3tmp/tmpIjK3RU/pdisk_1.dat 2025-04-04T16:59:57.764582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:57.764699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:57.768550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:59:57.775214Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:57.793441Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11331, node 1 2025-04-04T16:59:58.129537Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:59:58.129562Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:59:58.129604Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:59:58.129742Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8899 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:59:58.512878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:01.528680Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503825710150557:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:01.528772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:01.816448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503847184988100:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:01.816586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:02.329189Z node 1 :TX_PROXY DEBUG: actor# [1:7489503825710150782:2140] Handle TEvProposeTransaction 2025-04-04T17:00:02.329225Z node 1 :TX_PROXY DEBUG: actor# [1:7489503825710150782:2140] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T17:00:02.329272Z node 1 :TX_PROXY DEBUG: actor# [1:7489503825710150782:2140] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489503851479955435:2649] 2025-04-04T17:00:02.470674Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503851479955435:2649] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } } } UserToken: "" DatabaseName: "" 2025-04-04T17:00:02.470738Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503851479955435:2649] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:00:02.471124Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503851479955435:2649] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:00:02.471193Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503851479955435:2649] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:00:02.471341Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503851479955435:2649] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:00:02.471480Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503851479955435:2649] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:00:02.471524Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503851479955435:2649] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T17:00:02.471679Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503851479955435:2649] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T17:00:02.473353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:02.480189Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503851479955435:2649] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-04T17:00:02.480254Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503851479955435:2649] txid# 281474976710658 SEND to# [1:7489503851479955434:2345] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-04T17:00:02.790580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503851479955588:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:02.790697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:02.812132Z node 1 :TX_PROXY DEBUG: actor# [1:7489503825710150782:2140] Handle TEvProposeTransaction 2025-04-04T17:00:02.812167Z node 1 :TX_PROXY DEBUG: actor# [1:7489503825710150782:2140] TxId# 281474976710659 ProcessProposeTransaction 2025-04-04T17:00:02.812212Z node 1 :TX_PROXY DEBUG: actor# [1:7489503825710150782:2140] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7489503851479955602:2774] 2025-04-04T17:00:02.814754Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503851479955602:2774] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateCdcStream CreateCdcStream { TableName: "table" StreamDescription { Name: "a" Mode: ECdcStreamModeUpdate Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" } } } } UserToken: "" DatabaseName: "" PeerName: "" 2025-04-04T17:00:02.814801Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503851479955602:2774] txid# 281474976710659 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:00:02.814878Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503851479955602:2774] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:00:02.815165Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503851479955602:2774] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:00:02.815265Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503851479955602:2774] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:00:02.815303Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503851479955602:2774] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-04-04T17:00:02.815433Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503851479955602:2774] txid# 281474976710659 HANDLE EvClientConnected 2025-04-04T17:00:02.833778Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503851479955602:2774] txid# 281474976710659 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-04T17:00:02.833848Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503851479955602:2774] txid# 281474976710659 SEND to# [1:7489503851479955601:2360] Source {TEvProposeTransactionStatus txid# 281474976710659 Status# 53} 2025-04-04T17:00:03.014774Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][1:7489503855774923086:2370] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:4:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-04T17:00:03.059923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503855774923174:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:03.060018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:03.084683Z node 1 :TX_PROXY DEBUG: actor# [1:7489503825710150782:2140] Handle TEvProposeTransaction 2025-04-04T17:00:03.084715Z node 1 :TX_PROXY DEBUG: actor# [1:7489503825710150782:2140] TxId# 281474976710660 ProcessProposeTransaction 2025-04-04T17:00:03.084756Z node 1 :TX_PROXY DEBUG: actor# [1:7489503825710150782:2140] Cookie# 0 userReqId# "" txid# 281474976710660 SEND to# [1:7489503855774923206:2985] 2025-04-04T17:00:03.087946Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503855774923206:2985] txid# 281474976710660 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateCdcStream CreateCdcStream { TableName: "table" StreamDescription { Name: "b" Mode: ECdcStreamModeUpdate Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" } } } } ... 2025-04-04T17:00:05.743500Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:00:05.743583Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:00:05.743597Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976715766, id# 281474976710664 2025-04-04T17:00:05.743642Z node 1 :IMPORT INFO: TImport::TTxProgress: CreateChangefeed propose: info# { Id: 281474976710664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976715766 2025-04-04T17:00:05.743717Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:00:05.748518Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:00:05.748544Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976715766, status# StatusAccepted 2025-04-04T17:00:05.748636Z node 1 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976710664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: Subscribed WaitTxId: 281474976715766 Issue: '' } 2025-04-04T17:00:05.760552Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:00:05.810817Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037896:1][1:7489503864364859986:2502] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:17:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-04T17:00:05.832117Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:00:05.832143Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976715766 2025-04-04T17:00:05.832244Z node 1 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976710664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-04-04T17:00:05.837339Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:00:05.837471Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:00:05.837489Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976715767, id# 281474976710664 2025-04-04T17:00:05.837566Z node 1 :IMPORT INFO: TImport::TTxProgress: CreateConsumers propose: info# { Id: 281474976710664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976715767 2025-04-04T17:00:05.838078Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:00:05.838615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715767:0, at schemeshard: 72057594046644480 2025-04-04T17:00:05.840888Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:00:05.840911Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976715767, status# StatusAccepted 2025-04-04T17:00:05.840987Z node 1 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976710664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: Subscribed WaitTxId: 281474976715767 Issue: '' } 2025-04-04T17:00:05.844640Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:00:05.875708Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:00:05.875736Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976715767 2025-04-04T17:00:05.878186Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:00:06.077336Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7489503868659827465:2515] [0] Resolve database: name# /Root 2025-04-04T17:00:06.077776Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7489503868659827465:2515] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:00:06.077809Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7489503868659827465:2515] [0] Send request: schemeShardId# 72057594046644480 2025-04-04T17:00:06.082518Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7489503868659827465:2515] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976710664 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:25465" scheme: HTTP bucket: "test_bucket" items { source_prefix: "table" destination_path: "/Root/table" } } StartTime { seconds: 1743786004 } EndTime { seconds: 1743786005 } } 2025-04-04T17:00:06.089713Z node 1 :TX_PROXY DEBUG: actor# [1:7489503825710150782:2140] Handle TEvNavigate describe path /Root/table 2025-04-04T17:00:06.089767Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503868659827471:4871] HANDLE EvNavigateScheme /Root/table 2025-04-04T17:00:06.089999Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503868659827471:4871] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:00:06.090093Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503868659827471:4871] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table" Options { ShowPrivateTable: false } 2025-04-04T17:00:06.091323Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503868659827471:4871] Handle TEvDescribeSchemeResult Forward to# [1:7489503868659827469:2516] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 11 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715760 CreateStep: 1743786005185 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "table" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableSchemaVersion: 4 IsBackup: false CdcStreams { Name: "a" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046644480 LocalId: 14 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } CdcStreams { Name: "b" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046644480 LocalId: 16 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } CdcStreams { Name: "c" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046644480 LocalId: 12 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 9 PathsLimit: 10000 ShardsInside: 8 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 11 PathOwnerId: 72057594046644480 >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink >> TIterator::Serial [GOOD] >> TIterator::SerialReverse >> BuildStatsHistogram::Single_Slices [GOOD] >> BuildStatsHistogram::Single_History >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |80.9%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut >> YdbYqlClient::BuildInfo [GOOD] >> YdbYqlClient::AlterTableAddIndexWithDataColumn >> YdbYqlClient::TestColumnOrder [GOOD] >> YdbYqlClient::TestDecimal >> TPartBtreeIndexIteration::OneNode_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_History_Slices >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky [GOOD] >> TClockProCache::Touch [GOOD] >> TClockProCache::Lifecycle [GOOD] >> TClockProCache::EvictNext >> TClockProCache::EvictNext [GOOD] >> TClockProCache::UpdateLimit [GOOD] >> TClockProCache::Erase [GOOD] >> TClockProCache::Random [GOOD] >> TCompaction::OneMemtable [GOOD] >> TCompaction::ManyParts >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> YdbYqlClient::TestReadTableOneBatch [GOOD] >> YdbYqlClient::TestReadTableNotNullBorder >> TGRpcNewCoordinationClient::NodeNotFound [GOOD] >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> Describe::Basic >> TIterator::SerialReverse [GOOD] >> TIterator::GetKey [GOOD] >> TIterator::GetKeyWithEraseCache [GOOD] >> TIterator::GetKeyWithVersionSkips [GOOD] >> TLegacy::IndexIter >> YdbYqlClient::TestYqlWrongTable [GOOD] >> YdbYqlClient::TraceId >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> TCompaction::ManyParts [GOOD] >> TCompaction::BootAbort >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplit >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> YdbYqlClient::CreateTableWithPartitionAtKeys [GOOD] >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning >> TLegacy::IndexIter [GOOD] >> TLegacy::ScreenedIndexIter [GOOD] >> TLegacy::StatsIter >> TPartBtreeIndexIteration::OneNode_History_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices >> YdbYqlClient::TestDoubleKey [GOOD] >> YdbYqlClient::TestMultipleModifications >> TCompaction::BootAbort [GOOD] >> TCompaction::Defaults [GOOD] >> TCompaction::Merges [GOOD] >> TCompactionMulti::ManyParts >> TLegacy::StatsIter [GOOD] >> TPageHandleTest::Uninitialized [GOOD] >> TPageHandleTest::NormalUse [GOOD] >> TPageHandleTest::HandleRef [GOOD] >> TPageHandleTest::PinnedRef [GOOD] >> TPageHandleTest::PinnedRefPure [GOOD] >> TPart::State [GOOD] >> TPart::Trivials [GOOD] >> TPart::Basics [GOOD] >> TPart::BasicColumnGroups [GOOD] >> TPart::CellDefaults [GOOD] >> TPart::Matter [GOOD] >> TPart::External [GOOD] >> TPart::Outer [GOOD] >> TPart::MassCheck ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2025-04-04T17:00:13.420243Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.420301Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.420327Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:00:13.421289Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-04T17:00:13.421348Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.421396Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.422926Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008396s 2025-04-04T17:00:13.423503Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:00:13.449158Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-04T17:00:13.454195Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.463192Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.463220Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.463242Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:00:13.463683Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-04T17:00:13.463731Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.463761Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.463829Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008017s 2025-04-04T17:00:13.464280Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:00:13.465316Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-04T17:00:13.465388Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.466701Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.466724Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.466744Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:00:13.482924Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-04T17:00:13.483021Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.483055Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.483135Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.292688s 2025-04-04T17:00:13.483906Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:00:13.484931Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-04T17:00:13.485011Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.486280Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.486302Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.486322Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:00:13.486619Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-04T17:00:13.486652Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.486679Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.486738Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.192028s 2025-04-04T17:00:13.487113Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:00:13.498491Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-04T17:00:13.498629Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.499842Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.499890Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.499912Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:00:13.500330Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:00:13.512297Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:00:13.557833Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.569799Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2025-04-04T17:00:13.569855Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.569884Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.569963Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.185891s 2025-04-04T17:00:13.570189Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-04T17:00:13.573991Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.574019Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.574040Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:00:13.574472Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:00:13.577102Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:00:13.577316Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.602825Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:00:13.714635Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.715320Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-04T17:00:13.715446Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:00:13.715537Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-04T17:00:13.715641Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-04T17:00:13.823109Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-04T17:00:13.823314Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-04T17:00:13.826178Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.826199Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.826217Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:00:13.834535Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:00:13.835162Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:00:13.835314Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.838837Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:00:13.941005Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.942480Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-04T17:00:13.942562Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:00:13.942635Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-04T17:00:13.942769Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2025-04-04T17:00:13.954493Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-04T17:00:13.954841Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-04T17:00:13.954942Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-04T17:00:13.955059Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> TPart::MassCheck [GOOD] >> TPart::WreckPart >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot >> TCompactionMulti::ManyParts [GOOD] >> TCompactionMulti::MainPageCollectionEdge >> KqpPg::MkqlTerminate [GOOD] >> KqpPg::NoSelectFullScan >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> TPart::WreckPart [GOOD] >> TPart::PageFailEnv >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTableIndex [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSequence >> YdbYqlClient::TestDecimal [GOOD] >> YdbYqlClient::TestBusySession >> YdbYqlClient::AlterTableAddIndexWithDataColumn [GOOD] >> YdbYqlClient::CheckDefaultTableSettings1 >> TTableProfileTests::OverwriteCachingPolicy [GOOD] >> TTableProfileTests::ExplicitPartitionsSimple >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink >> BuildStatsHistogram::Single_History [GOOD] >> BuildStatsHistogram::Single_History_Slices >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 >> TPart::PageFailEnv [GOOD] >> TPart::ForwardEnv >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout >> TPart::ForwardEnv [GOOD] >> TPart::WreckPartColumnGroups >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] >> TVersions::WreckHead [GOOD] >> TVersions::WreckHeadReverse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] Test command err: 2025-04-04T17:00:19.626290Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-04T17:00:19.659652Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [1:10:2057], tablet id = 3, status = OK 2025-04-04T17:00:19.682443Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:10:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-04T17:00:19.682669Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = OK 2025-04-04T17:00:19.682727Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:11:2058], path = { OwnerId: 3 LocalId: 3 } 2025-04-04T17:00:19.682802Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-04-04T17:00:19.682944Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5, status = OK 2025-04-04T17:00:19.682976Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:12:2059], path = { OwnerId: 3 LocalId: 3 } 2025-04-04T17:00:19.683045Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = OK 2025-04-04T17:00:19.683082Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:8:2055], path = { OwnerId: 3 LocalId: 3 } 2025-04-04T17:00:19.683129Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-04-04T17:00:19.683152Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-04T17:00:19.683190Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2, status = OK 2025-04-04T17:00:19.683224Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:9:2056], path = { OwnerId: 3 LocalId: 3 } 2025-04-04T17:00:19.683255Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [1:13:2060], tablet id = 6, status = OK 2025-04-04T17:00:19.683283Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:13:2060], path = { OwnerId: 3 LocalId: 3 } 2025-04-04T17:00:19.683547Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 5 2025-04-04T17:00:19.683648Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-04-04T17:00:19.683709Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [0:0:0], tablet id = 5, status = ERROR 2025-04-04T17:00:19.683727Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-04T17:00:19.683751Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = OK 2025-04-04T17:00:19.683785Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:14:2061], path = { OwnerId: 3 LocalId: 3 } 2025-04-04T17:00:19.683853Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [0:0:0], tablet id = 1, status = ERROR 2025-04-04T17:00:19.683896Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-04T17:00:19.683922Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 7 2025-04-04T17:00:19.683966Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [0:0:0], tablet id = 7, status = ERROR 2025-04-04T17:00:19.683984Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-04T17:00:19.694599Z node 1 :STATISTICS DEBUG: Tablet 3 has already been processed 2025-04-04T17:00:19.694692Z node 1 :STATISTICS ERROR: No result was received from the tablet 4 2025-04-04T17:00:19.694721Z node 1 :STATISTICS DEBUG: Tablet 4 is not local. 2025-04-04T17:00:19.694810Z node 1 :STATISTICS DEBUG: Tablet 5 has already been processed 2025-04-04T17:00:19.694830Z node 1 :STATISTICS DEBUG: Tablet 1 has already been processed 2025-04-04T17:00:19.694848Z node 1 :STATISTICS ERROR: No result was received from the tablet 2 2025-04-04T17:00:19.694867Z node 1 :STATISTICS DEBUG: Tablet 2 is not local. 2025-04-04T17:00:19.694915Z node 1 :STATISTICS ERROR: No result was received from the tablet 6 2025-04-04T17:00:19.694951Z node 1 :STATISTICS DEBUG: Tablet 6 is not local. 2025-04-04T17:00:19.694973Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-04T17:00:19.695068Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-04T17:00:19.695097Z node 1 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-04-04T17:00:19.695144Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [0:0:0], tablet id = 4, status = ERROR 2025-04-04T17:00:19.695170Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-04T17:00:19.695203Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [0:0:0], tablet id = 2, status = ERROR 2025-04-04T17:00:19.695223Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-04T17:00:19.695251Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2025-04-04T17:00:19.695270Z node 1 :STATISTICS DEBUG: Skip EvClientConnected >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores [GOOD] >> TGRpcNewCoordinationClient::SessionCreateUpdateDeleteSemaphore >> YdbYqlClient::TestReadTableNotNullBorder [GOOD] >> YdbYqlClient::TestReadTableNotNullBorder2 >> YdbYqlClient::TraceId [GOOD] >> YdbYqlClient::Utf8DatabasePassViaHeader |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |80.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningBySize >> TCompactionMulti::MainPageCollectionEdge [GOOD] >> TCompactionMulti::MainPageCollectionEdgeMany >> YdbYqlClient::TestMultipleModifications [GOOD] >> YdbYqlClient::TestDescribeTableWithShardStats >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices [GOOD] >> TPartGroupBtreeIndexIter::NoNodes >> TPartGroupBtreeIndexIter::NoNodes [GOOD] >> TPartGroupBtreeIndexIter::OneNode >> TPartGroupBtreeIndexIter::OneNode [GOOD] >> TPartGroupBtreeIndexIter::FewNodes >> TPartGroupBtreeIndexIter::FewNodes [GOOD] >> TPartMulti::Basics [GOOD] >> TPartMulti::BasicsReverse [GOOD] >> TPartSlice::TrivialMerge [GOOD] >> TPartSlice::SimpleMerge [GOOD] >> TPartSlice::ComplexMerge [GOOD] >> TPartSlice::LongTailMerge [GOOD] >> TPartSlice::CutSingle [GOOD] >> TPartSlice::CutMulti [GOOD] >> TPartSlice::LookupBasics [GOOD] >> TPartSlice::LookupFull [GOOD] >> TPartSlice::EqualByRowId [GOOD] >> TPartSlice::SupersetByRowId [GOOD] >> TPartSlice::Subtract [GOOD] >> TPartSlice::ParallelCompactions [GOOD] >> TPartSlice::UnsplitBorrow [GOOD] >> TPartSliceLoader::RestoreMissingSlice |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> TPart::WreckPartColumnGroups [GOOD] >> TPart::PageFailEnvColumnGroups >> TPartSliceLoader::RestoreMissingSlice [GOOD] >> TPartSliceLoader::RestoreOneSlice [GOOD] >> TPartSliceLoader::RestoreMissingSliceFullScreen [GOOD] >> TPartSliceLoader::RestoreFromScreenIndexKeys [GOOD] >> TPartSliceLoader::RestoreFromScreenDataKeys >> TPartSliceLoader::RestoreFromScreenDataKeys [GOOD] >> TRowVersionRangesTest::MergeFailLeft [GOOD] >> TRowVersionRangesTest::MergeFailRight [GOOD] >> TRowVersionRangesTest::MergeFailOuter [GOOD] >> TRowVersionRangesTest::MergeFailInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeft [GOOD] >> TRowVersionRangesTest::MergeExtendLeftInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeftComplete [GOOD] >> TRowVersionRangesTest::MergeExtendRight [GOOD] >> TRowVersionRangesTest::MergeExtendRightInner [GOOD] >> TRowVersionRangesTest::MergeExtendRightComplete [GOOD] >> TRowVersionRangesTest::MergeExtendBoth [GOOD] >> TRowVersionRangesTest::MergeHoleExact [GOOD] >> TRowVersionRangesTest::MergeAllOuter [GOOD] >> TRowVersionRangesTest::MergeAllInner [GOOD] >> TRowVersionRangesTest::MergeAllEdges [GOOD] >> TRowVersionRangesTest::ContainsEmpty [GOOD] >> TRowVersionRangesTest::ContainsNonEmpty [GOOD] >> TRowVersionRangesTest::ContainsInvalid [GOOD] >> TRowVersionRangesTest::AdjustDown [GOOD] >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] >> TCompactionMulti::MainPageCollectionEdgeMany [GOOD] >> TCompactionMulti::MainPageCollectionOverflow >> TCompactionMulti::MainPageCollectionOverflow [GOOD] >> TCompactionMulti::MainPageCollectionOverflowSmallRefs >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink >> TCompactionMulti::MainPageCollectionOverflowSmallRefs [GOOD] >> TCompactionMulti::MainPageCollectionOverflowLargeRefs >> TCompactionMulti::MainPageCollectionOverflowLargeRefs [GOOD] >> TExecutorDb::RandomOps |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |80.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard >> YdbMonitoring::SelfCheckWithNodesDying [GOOD] >> YdbOlapStore::BulkUpsert >> DemoTx::Scenario_5 [GOOD] >> YdbYqlClient::TestBusySession [GOOD] >> YdbYqlClient::TestConstraintViolation |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] Test command err: Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 3750b 40r} data 2915b + FlatIndex{7} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 3 0 620b {0, 1} | 3 39 620b {5, 7} + BTreeIndex{Empty, PageId: 3 RowCount: 40 DataSize: 620 GroupDataSize: 3130 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{3} Label{34 rev 1, 620b}, [0, +40)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 5129b 40r} data 5373b + FlatIndex{3} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 1 0 2466b {0, 1} | 1 39 2466b {5, 7} + BTreeIndex{Empty, PageId: 1 RowCount: 40 DataSize: 2466 GroupDataSize: 2663 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{1} Label{14 rev 1, 2466b}, [0, +40)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4017b + FlatIndex{20} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 6 12 122b {1, 8} | 7 14 122b {2, NULL} | 8 16 122b {2, 4} | 9 18 122b {2, 7} | 10 20 122b {2, 10} | 11 22 122b {3, 3} | 12 24 122b {3, 6} | 13 26 122b {3, 8} | 14 28 122b {4, NULL} | 15 30 122b {4, 4} | 16 32 122b {4, 7} | 17 34 122b {4, 10} | 18 36 122b {5, 3} | 19 38 122b {5, 6} | 19 39 122b {5, 7} + BTreeIndex{PageId: 21 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | > {0, 4} | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | > {0, 7} | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | > {0, 10} | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | > {1, 3} | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | > {1, 6} | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | > {1, 8} | PageId: 6 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | > {2, NULL} | PageId: 7 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | > {2, 4} | PageId: 8 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | PageId: 9 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | > {2, 10} | PageId: 10 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | > {3, 3} | PageId: 11 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | > {3, 6} | PageId: 12 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | > {3, 8} | PageId: 13 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | > {4, NULL} | PageId: 14 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | > {4, 4} | PageId: 15 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | > {4, 7} | PageId: 16 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | > {4, 10} | PageId: 17 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | > {5, 3} | PageId: 18 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | > {5, 6} | PageId: 19 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set ... xxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{19} Label{194 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4441b + FlatIndex{26} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 7 12 122b {1, 8} | 8 14 122b {2, NULL} | 9 16 122b {2, 4} | 11 18 122b {2, 7} | 12 20 122b {2, 10} | 13 22 122b {3, 3} | 15 24 122b {3, 6} | 16 26 122b {3, 8} | 17 28 122b {4, NULL} | 19 30 122b {4, 4} | 20 32 122b {4, 7} | 21 34 122b {4, 10} | 24 36 122b {5, 3} | 25 38 122b {5, 6} | 25 39 122b {5, 7} + BTreeIndex{PageId: 29 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 360 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 722 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 7 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 8 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 9 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 1454 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 11 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 12 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 13 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 1820 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 15 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 16 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 17 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 19 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 20 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 21 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 24 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 25 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{7} Label{74 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{8} Label{84 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{9} Label{94 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{11} Label{114 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{12} Label{124 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{13} Label{134 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{15} Label{154 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{16} Label{164 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{17} Label{174 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{19} Label{194 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{20} Label{204 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{21} Label{214 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{24} Label{244 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{25} Label{254 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} >> YdbYqlClient::CheckDefaultTableSettings1 [GOOD] >> YdbYqlClient::CheckDefaultTableSettings2 >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish >> TPart::PageFailEnvColumnGroups [GOOD] >> TPart::ForwardEnvColumnGroups >> Viewer::ServerlessWithExclusiveNodes [GOOD] >> Viewer::ServerlessWithExclusiveNodesCheckTable >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink >> DataShardSnapshots::RepeatableReadAfterSplitRace [GOOD] >> DataShardSnapshots::PostMergeNotCompactedTooEarly >> TPart::ForwardEnvColumnGroups [GOOD] >> TPart::Versions [GOOD] >> TPart::ManyVersions >> TPart::ManyVersions [GOOD] >> TPart::ManyDeltas [GOOD] >> TPart::CutKeys_Lz4 [GOOD] >> TPart::CutKeys_Seek [GOOD] >> TPart::CutKeys_SeekPages >> TPart::CutKeys_SeekPages [GOOD] >> TPart::CutKeys_SeekSlices [GOOD] >> TPart::CutKeys_CutString >> TPart::CutKeys_CutString [GOOD] >> TPart::CutKeys_CutUtf8String [GOOD] >> TPartBtreeIndexIteration::NoNodes >> TFstClassSrcIdPQTest::TestTableCreated >> BuildStatsHistogram::Single_History_Slices [GOOD] >> BuildStatsHistogram::Ten_Mixed >> YdbYqlClient::Utf8DatabasePassViaHeader [GOOD] >> YdbYqlClient::TestYqlTypesFromPreparedQuery >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout >> YdbYqlClient::CreateAndAltertTableWithPartitioningBySize [GOOD] >> YdbYqlClient::CreateAndAltertTableWithReadReplicasSettings >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] >> YdbYqlClient::TestDescribeTableWithShardStats [GOOD] >> YdbYqlClient::TestExplicitPartitioning >> TGRpcNewCoordinationClient::SessionCreateUpdateDeleteSemaphore [GOOD] >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback >> TTableProfileTests::ExplicitPartitionsSimple [GOOD] >> TTableProfileTests::ExplicitPartitionsUnordered >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] Test command err: 2025-04-04T17:00:27.093398Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-04T17:00:27.094175Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-04-04T17:00:27.094501Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-04-04T17:00:27.094628Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-04-04T17:00:27.103347Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-04T17:00:27.103591Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-04-04T17:00:27.103621Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-04T17:00:27.103715Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-04T17:00:27.103860Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [3:44:2057], tablet id = 3, status = OK 2025-04-04T17:00:27.103938Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:44:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-04T17:00:27.104016Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-04-04T17:00:27.104060Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-04T17:00:27.104198Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-04-04T17:00:27.104270Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-04-04T17:00:27.104286Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-04T17:00:27.104376Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2025-04-04T17:00:27.104413Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-04T17:00:27.104489Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-04-04T17:00:27.113083Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2025-04-04T17:00:27.113265Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:00:27.113443Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-04-04T17:00:27.113472Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-04T17:00:27.113654Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-04-04T17:00:27.125062Z node 3 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-04T17:00:27.125143Z node 3 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T17:00:27.125256Z node 4 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-04T17:00:27.125280Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T17:00:27.136584Z node 2 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-04-04T17:00:27.136686Z node 1 :STATISTICS INFO: Node 2 is unavailable 2025-04-04T17:00:27.136735Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-04T17:00:27.136831Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-04T17:00:27.136865Z node 1 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-04-04T17:00:27.136955Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-04T17:00:27.136994Z node 1 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T17:00:27.137176Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-04T17:00:27.137231Z node 1 :STATISTICS DEBUG: Skip TEvAggregateKeepAlive >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSequence [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView >> YdbYqlClient::TestReadTableNotNullBorder2 [GOOD] >> YdbYqlClient::TestReadTableSnapshot >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex >> KqpPg::TableDeleteAllData+useSink [GOOD] >> KqpPg::TableDeleteAllData-useSink >> TPartBtreeIndexIteration::NoNodes [GOOD] >> TPartBtreeIndexIteration::FewNodes >> KqpPg::NoSelectFullScan [GOOD] >> KqpPg::LongDomainName >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2NoRestart >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2NoRestart [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1ToSchema2 >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1ToSchema2 [GOOD] >> TGenCompaction::OverloadFactorDuringForceCompaction >> TPartBtreeIndexIteration::FewNodes [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |80.9%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut >> YdbYqlClient::TestConstraintViolation [GOOD] >> Describe::Basic [GOOD] >> Describe::DescribePartitionPermissions >> TGenCompaction::OverloadFactorDuringForceCompaction [GOOD] >> TGenCompaction::ForcedCompactionNoGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithFinalParts [GOOD] >> TGenCompaction::ForcedCompactionByDeletedRows [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccData [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataRestart [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataBorrowed [GOOD] >> TIterator::Basics >> TIterator::Basics [GOOD] >> BuildStatsHistogram::Ten_Mixed [GOOD] >> BuildStatsHistogram::Ten_Serial >> YdbYqlClient::CheckDefaultTableSettings2 [GOOD] >> YdbYqlClient::CheckDefaultTableSettings3 >> YdbYqlClient::TestYqlTypesFromPreparedQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestConstraintViolation [GOOD] Test command err: 2025-04-04T17:00:03.312965Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503855607389136:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:03.313013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001377/r3tmp/tmpiETeuv/pdisk_1.dat 2025-04-04T17:00:04.376441Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:00:04.814584Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:04.859563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:04.859654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:04.877767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20158, node 1 2025-04-04T17:00:05.255108Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:05.255136Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:05.255142Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:05.255245Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29527 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:05.784463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:08.314725Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503855607389136:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:08.314800Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:08.690963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503877082226667:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:08.691168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:09.292949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:09.519763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503881377194154:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:09.519862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:09.520223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503881377194159:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:09.524471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:00:09.577950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503881377194161:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:00:09.683389Z node 1 :TX_PROXY ERROR: Actor# [1:7489503881377194238:2812] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:00:09.997592Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0vxrxecravmz679xfa2pxp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGQ3MDY4ZWEtOWJmMTFiMDgtNGZmZTlmOTctYzRhZmMzZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:00:10.223106Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0vxsdf3hfwfjvyp7arssth, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGQ3MDY4ZWEtOWJmMTFiMDgtNGZmZTlmOTctYzRhZmMzZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:00:12.048298Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489503896326916970:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:12.048369Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001377/r3tmp/tmp3hQsPq/pdisk_1.dat 2025-04-04T17:00:12.370588Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:12.393503Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:12.393597Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:12.397842Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22961, node 4 2025-04-04T17:00:12.554963Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:12.554987Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:12.554995Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:12.555154Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16856 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:12.919749Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:16.206956Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489503913506787202:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:16.207072Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:16.208418Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489503913506787214:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:16.213132Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:00:16.252391Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489503913506787216:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:00:16.317803Z node 4 :TX_PROXY ERROR: Actor# [4:7489503913506787292:2684] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:00:18.823467Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489503920763152369:2076];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001377/r3tmp/tmpkoObsO/pdisk_1.dat 2025-04-04T17:00:18.964998Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:00:19.088161Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:19.151483Z node 7 :HIVE WARN: HI ... stId: 7 2025-04-04T17:00:22.932979Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489503937943022630:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:22.933057Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:22.965804Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=Y2VhYTQ5ZTctYzAzYTY4MmYtY2VkYTM1ZmMtOTY1NmNjOTk=, ActorId: [7:7489503937943022561:2335], ActorState: ExecuteState, TraceId: 01jr0vy6028284rmkwmtfzrdhn, Reply query error, msg: Pending previous query completion proxyRequestId: 8 2025-04-04T17:00:22.965879Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=Y2VhYTQ5ZTctYzAzYTY4MmYtY2VkYTM1ZmMtOTY1NmNjOTk=, ActorId: [7:7489503937943022561:2335], ActorState: ExecuteState, TraceId: 01jr0vy6028284rmkwmtfzrdhn, Reply query error, msg: Pending previous query completion proxyRequestId: 9 2025-04-04T17:00:22.965906Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=Y2VhYTQ5ZTctYzAzYTY4MmYtY2VkYTM1ZmMtOTY1NmNjOTk=, ActorId: [7:7489503937943022561:2335], ActorState: ExecuteState, TraceId: 01jr0vy6028284rmkwmtfzrdhn, Reply query error, msg: Pending previous query completion proxyRequestId: 10 2025-04-04T17:00:22.967001Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=Y2VhYTQ5ZTctYzAzYTY4MmYtY2VkYTM1ZmMtOTY1NmNjOTk=, ActorId: [7:7489503937943022561:2335], ActorState: ExecuteState, TraceId: 01jr0vy6028284rmkwmtfzrdhn, Reply query error, msg: Pending previous query completion proxyRequestId: 11 2025-04-04T17:00:22.970977Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=Y2VhYTQ5ZTctYzAzYTY4MmYtY2VkYTM1ZmMtOTY1NmNjOTk=, ActorId: [7:7489503937943022561:2335], ActorState: ExecuteState, TraceId: 01jr0vy6028284rmkwmtfzrdhn, Reply query error, msg: Pending previous query completion proxyRequestId: 12 2025-04-04T17:00:22.988984Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489503937943022595:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:00:23.068390Z node 7 :TX_PROXY ERROR: Actor# [7:7489503942237990002:2692] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:00:25.035236Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489503948998150262:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:25.035965Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001377/r3tmp/tmpeL12qC/pdisk_1.dat 2025-04-04T17:00:25.216399Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:25.269564Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:25.269673Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:25.274466Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13232, node 10 2025-04-04T17:00:25.466948Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:25.466977Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:25.466991Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:25.467149Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14471 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:25.939839Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:28.896384Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489503961883053061:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:28.896498Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:28.917069Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:29.115958Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489503966178020527:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:29.116094Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:29.116357Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489503966178020532:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:29.121374Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:00:29.210626Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489503966178020534:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:00:29.320100Z node 10 :TX_PROXY ERROR: Actor# [10:7489503966178020607:2805] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:00:29.958291Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0vyc1t61esp8kktgawcfm2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZWYzMTgxN2ItZjRmMzJmMjMtNjUyZTVkOTAtYmI1ZTQzMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:00:30.028331Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489503948998150262:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:30.028431Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:30.067573Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr0vyc1t61esp8kktgawcfm2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZWYzMTgxN2ItZjRmMzJmMjMtNjUyZTVkOTAtYmI1ZTQzMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:00:30.074859Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jr0vyc1t61esp8kktgawcfm2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZWYzMTgxN2ItZjRmMzJmMjMtNjUyZTVkOTAtYmI1ZTQzMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:00:30.447151Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr0vyd0p5p36q1aag2acybrv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZWYzMTgxN2ItZjRmMzJmMjMtNjUyZTVkOTAtYmI1ZTQzMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:00:30.484138Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0vyd0p5p36q1aag2acybrv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZWYzMTgxN2ItZjRmMzJmMjMtNjUyZTVkOTAtYmI1ZTQzMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:00:30.605660Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7489503970472988062:2380], TxId: 281474976710665, task: 1. Ctx: { TraceId : 01jr0vyd0p5p36q1aag2acybrv. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=10&id=ZWYzMTgxN2ItZjRmMzJmMjMtNjUyZTVkOTAtYmI1ZTQzMjE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-04T17:00:30.608538Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7489503970472988063:2381], TxId: 281474976710665, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=10&id=ZWYzMTgxN2ItZjRmMzJmMjMtNjUyZTVkOTAtYmI1ZTQzMjE=. TraceId : 01jr0vyd0p5p36q1aag2acybrv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Handle abort execution event from: [10:7489503970472988059:2335], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:00:30.609203Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ZWYzMTgxN2ItZjRmMzJmMjMtNjUyZTVkOTAtYmI1ZTQzMjE=, ActorId: [10:7489503961883053058:2335], ActorState: ExecuteState, TraceId: 01jr0vyd0p5p36q1aag2acybrv, Create QueryResponse for error on request, msg: 2025-04-04T17:00:30.611602Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr0vyd0p5p36q1aag2acybrv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZWYzMTgxN2ItZjRmMzJmMjMtNjUyZTVkOTAtYmI1ZTQzMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback [GOOD] >> YdbYqlClient::CreateAndAltertTableWithReadReplicasSettings [GOOD] >> YdbYqlClient::CreateTableWithMESettings |81.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |81.0%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish [GOOD] >> DataShardSnapshots::VolatileSnapshotRenameTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestYqlTypesFromPreparedQuery [GOOD] Test command err: 2025-04-04T17:00:07.055922Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503871432395559:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:07.055969Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00136d/r3tmp/tmpx8zwLA/pdisk_1.dat 2025-04-04T17:00:07.867486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:07.867553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:07.871398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12510, node 1 2025-04-04T17:00:08.072339Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:00:08.072361Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:00:08.074357Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:08.110511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:00:08.413888Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:08.413925Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:08.413937Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:08.414064Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3573 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:09.302375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:12.066378Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503871432395559:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:12.066477Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:12.132927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503892907233114:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:12.133050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:12.449953Z node 1 :TX_PROXY ERROR: Actor# [1:7489503892907233151:2652] txid# 281474976710658, issues: { message: "Column Key has wrong key type Json" severity: 1 } 2025-04-04T17:00:12.482856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503892907233161:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:12.482976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:12.503445Z node 1 :TX_PROXY ERROR: Actor# [1:7489503892907233168:2662] txid# 281474976710659, issues: { message: "Column Key has wrong key type Yson" severity: 1 } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00136d/r3tmp/tmpdvtouk/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26090, node 4 TClient is connected to server localhost:7207 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00136d/r3tmp/tmpZ2yZYq/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13968, node 7 TClient is connected to server localhost:5150 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:00:27.422839Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489503958048609198:2140];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00136d/r3tmp/tmpcYunmL/pdisk_1.dat 2025-04-04T17:00:27.569766Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:00:27.672336Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:27.717114Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:27.717220Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:27.724253Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11404, node 10 2025-04-04T17:00:27.840436Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:27.840459Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:27.840470Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:27.840617Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17861 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:28.187683Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:31.503577Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489503975228479356:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:31.503655Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489503975228479347:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:31.503795Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:31.508664Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:00:31.545853Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489503975228479361:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:00:31.635304Z node 10 :TX_PROXY ERROR: Actor# [10:7489503975228479436:2682] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TIterator::Basics [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:45.953864Z 00000.008 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.010 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.010 II| FAKE_ENV: Starting storage for BS group 0 00000.011 II| FAKE_ENV: Starting storage for BS group 1 00000.011 II| FAKE_ENV: Starting storage for BS group 2 00000.011 II| FAKE_ENV: Starting storage for BS group 3 00000.015 II| TABLET_FLATBOOT: Leader{1:2:-} booting Deps{0:0 entries 0} {nil} 00000.015 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 1, has 1 jobs, Boot{ 2 que, 2 refs } 00000.016 II| TABLET_FLATBOOT: Leader{1:2:-} loading { Alter 0, Turns 0, Loans 0, GCExt 0 } 00000.016 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 2, has 4 jobs, Boot{ 5 que, 2 refs } 00000.016 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 3, has 1 jobs, Boot{ 2 que, 2 refs } 00000.016 II| TABLET_FLATBOOT: Leader{1:2:-} redo log has 0 records, last before 0:0 00000.016 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 4, has 1 jobs, Boot{ 2 que, 2 refs } 00000.016 II| TABLET_FLATBOOT: Leader{1:2:-} result: db change {1 -> 1} snap on 0 00000.016 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 5, has 0 jobs, Boot{ 1 que, 2 refs } 00000.016 II| TABLET_FLATBOOT: Leader{1:2:-} booting completed, took 0.000s 00000.080 II| TABLET_FLATBOOT: Leader{1:3:-} booting Deps{2:1 entries 252} {nil} 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} fired stage 1, has 1 jobs, Boot{ 2 que, 2 refs } 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} snap in deps on 2:1, TLargeGlobId{[1:2:1:1:28672:35:0] ~35b, grp 1} 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} process snap gc entry, + [ ], - [ ] 00000.080 DD| TABLET_FLATBOOT: Leader{1:3:-} Loading TLargeGlobId{[1:2:1:1:28672:35:0] ~35b, grp 1} 00000.081 II| TABLET_FLATBOOT: Leader{1:3:-} snap on 2:1 change 1, 25b, ABI 28 of [1, 28], GC{ +0 -0 } 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process gc snapshot, + [ ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:2:1:8192:209:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:3:1:24576:74:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:4:1:24576:79:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:5:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:6:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:7:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:8:1:24576:79:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:9:1:24576:79:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:10:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:11:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:12:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:13:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:14:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:15:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:16:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:17:1:24576:79:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:18:1:24576:79:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:19:1:24576:79:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:20:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:21:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:22:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:23:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:24:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:25:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:26:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:27:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:28:1:24576:79:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:29:1:24576:79:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:30:1:24576:79:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:31:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:32:1:24576:79:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:33:1:24576:83:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:34:1:24576:82:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:35:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:36:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:37:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:38:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:39:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:40:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:41:1:24576:81:0] ], - [ ] 00000.081 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:42:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:43:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:44:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:45:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:46:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:47:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:48:1:24576:79:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:49:1:24576:79:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:50:1:24576:82:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:51:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:52:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:53:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:54:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:55:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:56:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:57:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:58:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:59:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:60:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:61:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:62:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:63:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:64:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:65:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:66:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:67:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:68:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:69:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:70:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:71:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:72:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:73:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:74:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:75:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:76:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:77:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:78:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:79:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:80:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:81:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:82:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:83:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:84:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:85:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:86:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:87:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:88:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:89:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:90:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:91:1:24576:81:0] ], - [ ] 00000.082 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:92:1:24576:81:0] ], - [ ] 00000.083 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:93:1:24576:81:0] ], - [ ] 00000.083 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:94:1:24576:81:0] ], - [ ] 00000.083 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:95:1:24576:81:0] ], - [ ] 00000.083 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:96:1:24576:81:0] ], - [ ] 00000.083 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:97:1:24576:81:0] ], - [ ] 00000.083 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:98:1:24576:81:0] ], - [ ] 00000.083 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:99:1:24576:81:0] ], - ... FAKE_ENV: DS.1 gone, left {143227b, 7}, put {157466b, 150} 00000.086 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.086 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.087 II| FAKE_ENV: All BS storage groups are stopped 00000.087 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.087 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:53.135113Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.011 II| TABLET_SAUSAGECACHE: Limit memory consumer with 0B 00000.078 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.079 NN| TABLET_SAUSAGECACHE: Poison cache serviced 8 reqs hit {0 0b} miss {32 149036b} 00000.079 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.079 II| FAKE_ENV: DS.1 gone, left {143227b, 7}, put {157466b, 150} 00000.079 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.079 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.079 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {14313b, 148} 00000.079 II| FAKE_ENV: All BS storage groups are stopped 00000.079 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.080 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:53.229617Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.012 II| TABLET_SAUSAGECACHE: Limit memory consumer with 0B 00000.076 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.077 NN| TABLET_SAUSAGECACHE: Poison cache serviced 11 reqs hit {0 0b} miss {33 149660b} 00000.077 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.077 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {14313b, 148} 00000.077 II| FAKE_ENV: DS.1 gone, left {143220b, 7}, put {157459b, 150} 00000.077 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.077 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.077 II| FAKE_ENV: All BS storage groups are stopped 00000.078 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.078 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:53.313730Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.011 II| TABLET_SAUSAGECACHE: Limit memory consumer with 0B 00000.079 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.080 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {6 536b} miss {28 140276b} 00000.080 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.080 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {14470b, 149} 00000.080 II| FAKE_ENV: DS.1 gone, left {143272b, 8}, put {157572b, 152} 00000.080 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.080 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.080 II| FAKE_ENV: All BS storage groups are stopped 00000.081 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.081 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:53.404663Z 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.012 II| TABLET_SAUSAGECACHE: Limit memory consumer with 0B 00000.084 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.084 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4 reqs hit {32 141412b} miss {44 278804b} 00000.084 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.084 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {14482b, 150} 00000.085 II| FAKE_ENV: DS.1 gone, left {142818b, 10}, put {156975b, 152} 00000.085 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.085 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.085 II| FAKE_ENV: All BS storage groups are stopped 00000.085 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.085 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:53.497180Z 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.012 II| TABLET_SAUSAGECACHE: Limit memory consumer with 0B 00000.086 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.087 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {8 1608b} miss {44 278804b} 00000.087 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.087 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {14354b, 149} 00000.087 II| FAKE_ENV: DS.1 gone, left {142718b, 8}, put {156875b, 150} 00000.087 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.087 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.087 II| FAKE_ENV: All BS storage groups are stopped 00000.087 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.087 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:53.591295Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.046 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.047 NN| TABLET_SAUSAGECACHE: Poison cache serviced 6 reqs hit {6 691b} miss {0 0b} 00000.047 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.047 II| FAKE_ENV: DS.0 gone, left {1356b, 12}, put {1376b, 13} 00000.047 II| FAKE_ENV: DS.1 gone, left {6714b, 23}, put {6714b, 23} 00000.047 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.047 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.047 II| FAKE_ENV: All BS storage groups are stopped 00000.047 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.047 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:53.645299Z 00000.015 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.016 II| FAKE_ENV: Starting storage for BS group 0 00000.016 II| FAKE_ENV: Starting storage for BS group 1 00000.017 II| FAKE_ENV: Starting storage for BS group 2 00000.017 II| FAKE_ENV: Starting storage for BS group 3 00000.520 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.520 NN| TABLET_SAUSAGECACHE: Poison cache serviced 10 reqs hit {860 5535466b} miss {0 0b} 00000.520 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.520 II| FAKE_ENV: DS.0 gone, left {1201b, 13}, put {1221b, 14} 00000.520 II| FAKE_ENV: DS.1 gone, left {6701898b, 17}, put {6701898b, 17} 00000.535 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.535 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.535 II| FAKE_ENV: All BS storage groups are stopped 00000.535 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.535 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:54.187663Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00021.113 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00021.113 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4109 reqs hit {2091 2366441b} miss {6144 6340608b} 00021.114 II| FAKE_ENV: Shut order, stopping 4 BS groups 00021.114 II| FAKE_ENV: DS.0 gone, left {1761b, 14}, put {1781b, 15} 00021.114 II| FAKE_ENV: DS.1 gone, left {6925275b, 27}, put {6925275b, 27} 00021.117 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00021.117 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00021.117 II| FAKE_ENV: All BS storage groups are stopped 00021.117 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00021.118 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T17:00:15.330937Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00014.824 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00014.824 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4106 reqs hit {43 252905b} miss {4096 4227072b} 00014.825 II| FAKE_ENV: Shut order, stopping 4 BS groups 00014.825 II| FAKE_ENV: DS.0 gone, left {44744b, 2}, put {164747b, 16} 00014.825 II| FAKE_ENV: DS.1 gone, left {2762170b, 2068}, put {2762170b, 2068} 00014.834 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00014.834 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00014.834 II| FAKE_ENV: All BS storage groups are stopped 00014.835 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00014.835 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T17:00:30.187070Z 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-04-04T17:00:30.220945Z 00000.010 II| FAKE_ENV: Starting storage for BS group 0 00000.011 II| FAKE_ENV: Starting storage for BS group 1 00000.011 II| FAKE_ENV: Starting storage for BS group 2 00000.011 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-04-04T17:00:30.294013Z 00000.013 II| FAKE_ENV: Starting storage for BS group 0 00000.013 II| FAKE_ENV: Starting storage for BS group 1 00000.013 II| FAKE_ENV: Starting storage for BS group 2 00000.014 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-04-04T17:00:30.356092Z 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 >> TVersions::WreckHeadReverse [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink [GOOD] >> TVersions::Wreck2 >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 >> YdbYqlClient::TestReadTableSnapshot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback [GOOD] Test command err: 2025-04-04T16:59:58.909880Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503834466785767:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:58.914743Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00137d/r3tmp/tmp1fjBIf/pdisk_1.dat 2025-04-04T16:59:59.954381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:00:00.172703Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24668, node 1 2025-04-04T17:00:00.574682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:00.574835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:00.628749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:00:00.676454Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:00.676482Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:00.676489Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:00.676636Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63469 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:01.184572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:01.369292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:01.511921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:00:01.697174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropKesus, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:00:01.712273Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-04T17:00:01.724319Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2025-04-04T17:00:07.812552Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489503874745085505:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:07.812621Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00137d/r3tmp/tmpTfOB03/pdisk_1.dat 2025-04-04T17:00:08.225109Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:08.324858Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:08.324998Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:08.332168Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13878, node 4 2025-04-04T17:00:08.756894Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:08.756925Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:08.756932Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:08.757034Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:09.376824Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:13.931309Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489503897296030784:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:13.932581Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00137d/r3tmp/tmpHLT9fm/pdisk_1.dat 2025-04-04T17:00:14.224715Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19605, node 7 2025-04-04T17:00:14.398475Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:14.398570Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:14.442478Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:00:14.740079Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:14.740102Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:14.740110Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:14.740959Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17574 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:15.236031Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:15.367663Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:21.510732Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489503931850574592:2134];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00137d/r3tmp/tmp9SSlp4/pdisk_1.dat 2025-04-04T17:00:21.779024Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:00:22.011548Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:22.063573Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:22.063701Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:22.067955Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26948, node 10 2025-04-04T17:00:22.409003Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:22.409022Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:22.409031Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:22.409170Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4884 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:22.810326Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:22.938455Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:27.891192Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489503958252469520:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:27.891249Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00137d/r3tmp/tmpJtuHIs/pdisk_1.dat 2025-04-04T17:00:28.115641Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6711, node 13 2025-04-04T17:00:28.234136Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:28.236552Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:28.294755Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:00:28.376671Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:28.376700Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:28.376712Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:28.377235Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8818 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:28.792421Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:28.939839Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink >> Viewer::TenantInfo5kkTablets [GOOD] >> Viewer::UseTransactionWhenExecuteDataActionQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableSnapshot [GOOD] Test command err: 2025-04-04T17:00:06.027110Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503868320479702:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:06.034695Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001372/r3tmp/tmp19qQgh/pdisk_1.dat 2025-04-04T17:00:06.829146Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23830, node 1 2025-04-04T17:00:06.861832Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:00:06.861848Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:00:07.008382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:07.008489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:07.137968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:00:07.206973Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:07.206998Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:07.207007Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:07.207177Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:07.658948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:07.740681Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:00:10.867308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503885500349953:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:10.867430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:11.034371Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503868320479702:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:11.034436Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:11.170427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:11.373924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503889795317440:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:11.373986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:11.374208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503889795317445:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:11.379786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:00:11.418569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503889795317447:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:00:11.499865Z node 1 :TX_PROXY ERROR: Actor# [1:7489503889795317528:2834] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:00:11.625028Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0vxtqd92bfa15nn0hbxrd5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGEwZWU5ZjctNzcwODRkOGUtYmFjYWYyMzItMjhhMGRkZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:00:11.797879Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr0vxv07epj1qt2dak9z2pd1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGEwZWU5ZjctNzcwODRkOGUtYmFjYWYyMzItMjhhMGRkZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:00:11.823235Z node 1 :TX_PROXY ERROR: [ReadTable [1:7489503889795317591:2371] TxId# 281474976710663] RESPONSE Status# ResolveError shard: 0 table: Root/Test 2025-04-04T17:00:11.833482Z node 1 :TX_PROXY ERROR: [ReadTable [1:7489503889795317594:2372] TxId# 281474976710664] RESPONSE Status# ResolveError shard: 0 table: Root/Test 2025-04-04T17:00:13.835957Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489503900387356646:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:13.836051Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001372/r3tmp/tmp4bpcfG/pdisk_1.dat 2025-04-04T17:00:14.283522Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:14.356085Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:14.356172Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:14.363269Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61304, node 4 2025-04-04T17:00:14.642191Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:14.642215Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:14.642222Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:14.642374Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:15.047321Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:18.483680Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489503921862194208:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:18.483771Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:18.524755Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:18.707657Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489503921862194383:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:18.707820Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489503921862194388:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:18.707828Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:18.712688Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:00:18.745835Z node 4 :KQP_WORKLOAD_SERVI ... hecking } 2025-04-04T17:00:26.203843Z node 7 :TX_PROXY ERROR: Actor# [7:7489503956114615057:2804] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:00:26.282151Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0vy93zf09weg60s2bf9qsj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjUxODFiMGEtN2EyYjdlODctYjk2ZTdmOTktMjRjZDEwZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:00:26.358330Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489503934639777285:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:26.358400Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:26.455315Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0vy9aa2s6n7zad5y6jsf55, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjUxODFiMGEtN2EyYjdlODctYjk2ZTdmOTktMjRjZDEwZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:00:28.559136Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489503963963940818:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:28.559201Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001372/r3tmp/tmplhWbAK/pdisk_1.dat 2025-04-04T17:00:28.888598Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:28.955055Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:28.955184Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:28.959739Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21314, node 10 2025-04-04T17:00:29.227134Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:29.227167Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:29.227179Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:29.227369Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10758 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:29.691279Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:29.715482Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:00:29.798626Z node 10 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jr0vycq22et6cd4ghhtp0075, sdkBuildInfo# ydb-cpp-sdk/3.1.0, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:55166, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.990667s 2025-04-04T17:00:29.814567Z node 10 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jr0vycqn9x2mwc9ywv7g1fba, sdkBuildInfo# ydb-cpp-sdk/3.1.0, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:55172, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:00:33.268492Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ExecuteSchemeQueryRequest, traceId# 01jr0vyg3m29k4k8htvqj487xb, sdkBuildInfo# ydb-cpp-sdk/3.1.0, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:32778, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:00:33.270695Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489503985438778367:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:33.270814Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:33.292247Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:33.298929Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:00:33.299081Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:00:33.299095Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:00:33.299138Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:00:33.414583Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:00:33.414645Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:00:33.414761Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:00:33.414772Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:00:33.466113Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jr0vyg9qd4k322s7jcsejx5j, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:32786, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:00:33.506141Z node 10 :READ_TABLE_API DEBUG: [10:7489503985438778532:2347] Adding quota request to queue ShardId: 0, TxId: 281474976715659 2025-04-04T17:00:33.506197Z node 10 :READ_TABLE_API DEBUG: [10:7489503985438778532:2347] Assign stream quota to Shard 0, Quota 5, TxId 281474976715659 Reserved: 5 of 25, Queued: 0 2025-04-04T17:00:33.509451Z node 10 :READ_TABLE_API DEBUG: [10:7489503985438778532:2347] got stream part, size: 35, RU required: 128 rate limiter absent 2025-04-04T17:00:33.509895Z node 10 :READ_TABLE_API DEBUG: [10:7489503985438778532:2347] Starting inactivity timer for 600.000000s with tag 3 2025-04-04T17:00:33.509948Z node 10 :READ_TABLE_API NOTICE: [10:7489503985438778532:2347] Finish grpc stream, status: 400000 2025-04-04T17:00:33.516338Z node 10 :GRPC_SERVER DEBUG: [0x51a00007aa80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.516658Z node 10 :GRPC_SERVER DEBUG: [0x51a000123c80] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.516903Z node 10 :GRPC_SERVER DEBUG: [0x51a000124880] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.516923Z node 10 :GRPC_SERVER DEBUG: [0x51a00007b080] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.517100Z node 10 :GRPC_SERVER DEBUG: [0x51a00007a480] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.517224Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ea080] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.517302Z node 10 :GRPC_SERVER DEBUG: [0x51a000124e80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.517444Z node 10 :GRPC_SERVER DEBUG: [0x51a000124280] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.517540Z node 10 :GRPC_SERVER DEBUG: [0x51a000123680] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.517592Z node 10 :GRPC_SERVER DEBUG: [0x51a000153c80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.517771Z node 10 :GRPC_SERVER DEBUG: [0x51a000128a80] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.517786Z node 10 :GRPC_SERVER DEBUG: [0x51a000127e80] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.517963Z node 10 :GRPC_SERVER DEBUG: [0x51b0000b1a80] received request Name# KeyValue ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.518025Z node 10 :GRPC_SERVER DEBUG: [0x51a0000eac80] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.518176Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ee880] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.518235Z node 10 :GRPC_SERVER DEBUG: [0x51a0000edc80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.518423Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e8880] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.518439Z node 10 :GRPC_SERVER DEBUG: [0x51a000126680] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.518653Z node 10 :GRPC_SERVER DEBUG: [0x51a000125a80] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.518679Z node 10 :GRPC_SERVER DEBUG: [0x51a000122480] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.518868Z node 10 :GRPC_SERVER DEBUG: [0x51a0000eb880] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.518882Z node 10 :GRPC_SERVER DEBUG: [0x51a000127280] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-04-04T17:00:33.560660Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489503963963940818:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:33.560760Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] >> TTableProfileTests::ExplicitPartitionsUnordered [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink >> DataShardSnapshots::DelayedWriteReadableAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit >> TPartBtreeIndexIteration::FewNodes_Groups [GOOD] >> TPartBtreeIndexIteration::FewNodes_History >> KqpPg::LongDomainName [GOOD] >> BuildStatsHistogram::Ten_Serial [GOOD] >> BuildStatsHistogram::Ten_Crossed ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] Test command err: 2025-04-04T16:59:57.092806Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503831833398184:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:57.092854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017b9/r3tmp/tmpC8LbkQ/pdisk_1.dat 2025-04-04T16:59:58.107068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:58.345419Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:58.373956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:58.374072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:58.389242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16524, node 1 2025-04-04T16:59:58.682312Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:59:58.682332Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:59:58.682339Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:59:58.682940Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4753 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:59:59.485499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:02.099799Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503831833398184:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:02.099884Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:03.366939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503857603202854:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:03.367065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:03.730592Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.258607s 2025-04-04T17:00:03.730627Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.258673s 2025-04-04T17:00:03.768441Z node 1 :TX_PROXY DEBUG: actor# [1:7489503831833398218:2140] Handle TEvProposeTransaction 2025-04-04T17:00:03.768479Z node 1 :TX_PROXY DEBUG: actor# [1:7489503831833398218:2140] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T17:00:03.768537Z node 1 :TX_PROXY DEBUG: actor# [1:7489503831833398218:2140] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489503857603202892:2649] 2025-04-04T17:00:03.913309Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503857603202892:2649] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } } } UserToken: "" DatabaseName: "" 2025-04-04T17:00:03.913360Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503857603202892:2649] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:00:03.913694Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503857603202892:2649] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:00:03.913758Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503857603202892:2649] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:00:03.913877Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503857603202892:2649] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:00:03.913998Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503857603202892:2649] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:00:03.914033Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503857603202892:2649] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T17:00:03.914149Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503857603202892:2649] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T17:00:03.915602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:03.932030Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503857603202892:2649] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-04T17:00:03.932085Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503857603202892:2649] txid# 281474976710658 SEND to# [1:7489503857603202891:2347] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-04T17:00:04.400878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503861898170343:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:04.400948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:04.401156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503861898170348:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:04.401547Z node 1 :TX_PROXY DEBUG: actor# [1:7489503831833398218:2140] Handle TEvProposeTransaction 2025-04-04T17:00:04.401571Z node 1 :TX_PROXY DEBUG: actor# [1:7489503831833398218:2140] TxId# 281474976710659 ProcessProposeTransaction 2025-04-04T17:00:04.401620Z node 1 :TX_PROXY DEBUG: actor# [1:7489503831833398218:2140] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7489503861898170351:2773] 2025-04-04T17:00:04.406640Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503861898170351:2773] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-04-04T17:00:04.406724Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503861898170351:2773] txid# 281474976710659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:00:04.406745Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503861898170351:2773] txid# 281474976710659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-04-04T17:00:04.409148Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503861898170351:2773] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:00:04.409253Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503861898170351:2773] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:00:04.409504Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503861898170351:2773] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:00:04.409632Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503861898170351:2773] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:00:04.409672Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503861898170351:2773] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-04-04T17:00:04.409800Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503861898170351:2773] txid# 281474976710659 HANDLE EvClientConnected 2025-04-04T17:00:04.411167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:00:04.417140Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503861898170351:2773] txid# 281474976710659 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-04T17:00:04.417197Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503861898170351:2773] txid# 281474976710659 SEND to# [1 ... -amz-date, Signature=dff0222f0dc0a06f5b4934a9d49d393059d160b3a5de55921afe191a83d5a3cc content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250404T170034Z 2025-04-04T17:00:34.791516Z node 10 :IMPORT DEBUG: HandleScheme TEvExternalStorage::TEvHeadObjectResponse: self# [10:7489503988415720485:2198], result# No response body. REQUEST: HEAD /test_bucket/view/create_view.sql HTTP/1.1 HEADERS: Host: localhost:19470 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 05A72D0A-77DA-44A5-92A0-3B64F9660918 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250404/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=b32db2d6ddbdbfaee53e80bacf68518b73f62255eacfa39393234258e9b1346e content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250404T170034Z S3_MOCK::HttpServeRead: /test_bucket/view/create_view.sql / 165 2025-04-04T17:00:34.795692Z node 10 :IMPORT DEBUG: HandleScheme TEvExternalStorage::TEvHeadObjectResponse: self# [10:7489503988415720485:2198], result# HeadObjectResult { ETag: 54623f53d68141118383b3390c4965d5 ContentLength: 165 } REQUEST: GET /test_bucket/view/create_view.sql HTTP/1.1 HEADERS: Host: localhost:19470 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 89FC4C86-6D4E-4134-B389-C5FC08777C45 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250404/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;range;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=75736ec30b0df4413fcf8e289fa4b228e661b4236bc037295a02669e6ad0bf62 content-type: application/xml range: bytes=0-164 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250404T170034Z S3_MOCK::HttpServeRead: /test_bucket/view/create_view.sql / 165 2025-04-04T17:00:34.801761Z node 10 :IMPORT DEBUG: HandleScheme TEvExternalStorage::TEvGetObjectResponse: self# [10:7489503988415720485:2198], result# 54623f53d68141118383b3390c4965d5 REQUEST: HEAD /test_bucket/view/permissions.pb HTTP/1.1 HEADERS: Host: localhost:19470 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4864AFB8-5B02-4B25-B09D-12EB6DCC8435 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250404/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=c59d90f11238a0be149add850e566f0aa36d92835447b62734535a02c4357cb9 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250404T170034Z 2025-04-04T17:00:34.872673Z node 10 :IMPORT DEBUG: HandlePermissions TEvExternalStorage::TEvHeadObjectResponse: self# [10:7489503988415720485:2198], result# No response body. 2025-04-04T17:00:34.920732Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7489503988415720511:2377] [0] Resolve database: name# /Root 2025-04-04T17:00:34.923925Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7489503988415720511:2377] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:00:34.924022Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7489503988415720511:2377] [0] Send request: schemeShardId# 72057594046644480 2025-04-04T17:00:34.930072Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7489503988415720511:2377] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715664 Status: SUCCESS Progress: PROGRESS_PREPARING ImportFromS3Settings { endpoint: "localhost:19470" scheme: HTTP bucket: "test_bucket" items { source_prefix: "view" destination_path: "/Root/view" } } StartTime { seconds: 1743786034 } } REQUEST: GET /test_bucket?prefix=view HTTP/1.1 HEADERS: Host: localhost:19470 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E2912B97-C801-4C2B-B538-8CEA59609CBC amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250404/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=0f573aec98ad495cecc087e7bad5d4d1266c7c9725b05cef17fa1c2384a5115f content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250404T170034Z S3_MOCK::HttpServeList: view 2025-04-04T17:00:34.946109Z node 10 :IMPORT DEBUG: HandleChangefeeds TEvExternalStorage::TEvListObjectResponse: self# [10:7489503988415720485:2198], result# ListObjectsResult { } 2025-04-04T17:00:34.946204Z node 10 :IMPORT INFO: Reply: self# [10:7489503988415720485:2198], success# 1, error# 2025-04-04T17:00:34.946394Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:00:34.946415Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnSchemeResult: id# 281474976715664, itemIdx# 0, success# 1 2025-04-04T17:00:35.070215Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:00:35.098501Z node 10 :IMPORT DEBUG: TSchemeQueryExecutor HandleCompileResponse, self: [10:7489503992710687814:2844], status: SUCCESS 2025-04-04T17:00:35.098591Z node 10 :IMPORT INFO: TSchemeQueryExecutor Reply, self: [10:7489503992710687814:2844], status: SUCCESS 2025-04-04T17:00:35.098893Z node 10 :IMPORT DEBUG: TSchemeQueryExecutor Reply, self: [10:7489503992710687814:2844], status: SUCCESS, prepared query: "WorkingDir: \"/Root\" OperationType: ESchemeOpCreateView FailedOnAlreadyExists: false CreateView { Name: \"view\" QueryText: \"SELECT 1 AS Key UNION SELECT 2 AS Key UNION SELECT 3 AS Key\" CapturedContext { PathPrefix: \"/Root\" SyntaxVersion: 1 AnsiLexer: false PgParser: false Pragmas: \"AnsiInForEmptyOrNullableItemsCollections\" Pragmas: \"AnsiLike\" Pragmas: \"FlexibleTypes\" Pragmas: \"AnsiCurrentRow\" Pragmas: \"WarnOnAnsiAliasShadowing\" Pragmas: \"AnsiOptionalAs\" Pragmas: \"EmitAggApply\" } }" 2025-04-04T17:00:35.099140Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:00:35.099168Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnSchemeQueryPreparation: id# 281474976715664, itemIdx# 0, status# SUCCESS, error# 2025-04-04T17:00:35.099351Z node 10 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976715664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/view' DstPathId: State: CreateSchemeObject SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-04-04T17:00:35.103479Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:00:35.103715Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:00:35.103739Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976710758, id# 281474976715664 2025-04-04T17:00:35.103836Z node 10 :IMPORT INFO: TImport::TTxProgress: ExecutePreparedQuery: info# { Id: 281474976715664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/view' DstPathId: State: CreateSchemeObject SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976710758 2025-04-04T17:00:35.103960Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:00:35.111193Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:00:35.111242Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976710758, status# StatusAccepted 2025-04-04T17:00:35.111465Z node 10 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976715664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/view' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 8] State: CreateSchemeObject SubState: Subscribed WaitTxId: 281474976710758 Issue: '' } 2025-04-04T17:00:35.119427Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:00:35.134957Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:00:35.134996Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-04-04T17:00:35.137913Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:00:35.353109Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7489503992710687856:2379] [0] Resolve database: name# /Root 2025-04-04T17:00:35.358527Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7489503992710687856:2379] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:00:35.358580Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7489503992710687856:2379] [0] Send request: schemeShardId# 72057594046644480 2025-04-04T17:00:35.364845Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7489503992710687856:2379] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715664 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:19470" scheme: HTTP bucket: "test_bucket" items { source_prefix: "view" destination_path: "/Root/view" } } StartTime { seconds: 1743786034 } EndTime { seconds: 1743786035 } } 2025-04-04T17:00:35.533652Z node 10 :TX_PROXY DEBUG: actor# [10:7489503962645915337:2112] Handle TEvExecuteKqpTransaction 2025-04-04T17:00:35.533691Z node 10 :TX_PROXY DEBUG: actor# [10:7489503962645915337:2112] TxId# 281474976715665 ProcessProposeKqpTransaction 2025-04-04T17:00:35.534071Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0vyj5j323820fc6cr7827f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NmJkOTc0YTQtYzQwNzVhNDctNWU2NGMxNTktZTMxMTg2YjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TProxyActorTest::TestDisconnectWhileAttaching >> ConvertMiniKQLTypeToYdbTypeTest::SimpleType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDate [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Optional [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::List [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Struct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] >> YdbYqlClient::CheckDefaultTableSettings3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::LongDomainName [GOOD] Test command err: Trying to start YDB, gRPC: 5869, MsgBus: 29908 2025-04-04T16:56:57.042954Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503059080656695:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:57.051952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000be3/r3tmp/tmphoZLfz/pdisk_1.dat 2025-04-04T16:56:57.696535Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:57.721327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:57.721422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:57.723170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5869, node 1 2025-04-04T16:56:57.916009Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:56:57.916028Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:56:57.916034Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:56:57.916149Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29908 TClient is connected to server localhost:29908 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:56:58.788941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:58.819178Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:57:01.137966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503076260526480:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:01.137966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503076260526488:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:01.138040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:01.141879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T16:57:01.155963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503076260526494:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T16:57:01.223327Z node 1 :TX_PROXY ERROR: Actor# [1:7489503076260526545:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 15603, MsgBus: 23062 2025-04-04T16:57:02.544214Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503080573071694:2213];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000be3/r3tmp/tmpR18UVz/pdisk_1.dat 2025-04-04T16:57:02.592677Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:57:02.717298Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:02.804581Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:02.804678Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:02.806772Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15603, node 2 2025-04-04T16:57:03.050826Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:03.050850Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:03.050858Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:03.050975Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23062 TClient is connected to server localhost:23062 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:57:03.741887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:03.763305Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T16:57:07.308745Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503102047908660:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:07.308854Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503102047908666:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:07.308918Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:07.319587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T16:57:07.343533Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489503102047908674:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T16:57:07.413869Z node 2 :TX_PROXY ERROR: Actor# [2:7489503102047908726:2340] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:07.469607Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489503080573071694:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:07.469691Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29409, MsgBus: 15141 2025-04-04T16:57:08.888483Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489503104043218925:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:08.888537Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000be3/r3tmp/tmpUoudjZ/pdisk_1.dat 2025-04-04T16:57:09.242317Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:09.260802Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:09.266446Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:09.275262Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29409, node 3 2025-04-04T16:57:09.482965Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:09.482995Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:09.483005Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:09.483152Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15141 TClient is connected to server localhost:15141 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 720575940466444 ... entPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:18.285449Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:18.296927Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:00:21.389045Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489503912841380991:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:21.389157Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:24.286449Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489503947201120027:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:24.286627Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:24.287073Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489503947201120054:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:24.293677Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:00:24.321700Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489503947201120056:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:00:24.413572Z node 10 :TX_PROXY ERROR: Actor# [10:7489503947201120109:2350] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:00:24.468918Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["aid (null, 3)","aid [7, 7]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"0","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["abalance"],"E-Cost":"0","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (null, 3)","aid [7, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["aid (null, 3)","aid [7, 7]"],"Name":"TableRangeScan","Path":"\/Root\/pgbench_accounts","E-Rows":"0","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["abalance"],"E-Cost":"0","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","ReadRange":["aid (4, 3)"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"1","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (4, 3)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","ReadRange":["aid (4, 3)"],"E-Size":"0","Name":"TableRangeScan","Path":"\/Root\/pgbench_accounts","E-Rows":"1","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 26992, MsgBus: 23577 2025-04-04T17:00:29.407857Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7489503968780611899:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:29.407987Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000be3/r3tmp/tmpKfRAoa/pdisk_1.dat 2025-04-04T17:00:29.839010Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:29.845498Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:29.845657Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:29.850377Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26992, node 11 2025-04-04T17:00:30.015175Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:30.015212Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:30.015230Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:30.015479Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23577 TClient is connected to server localhost:23577 WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'... TClient::Ls request: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_D... (TRUNCATED) WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' success. 2025-04-04T17:00:31.201534Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:31.217472Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:00:34.410574Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7489503968780611899:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:34.410725Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:36.660652Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7489503998845383648:2336], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:36.660653Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7489503998845383659:2339], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:36.660796Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:36.666382Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:00:36.686572Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7489503998845383662:2340], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:00:36.764017Z node 11 :TX_PROXY ERROR: Actor# [11:7489503998845383713:2348] txid# 281474976710659, issues: { message: "Check failed: path: \'/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:00:36.802745Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] >> TReplicationTests::CreateSequential >> TReplicationTests::Create >> YdbYqlClient::CreateTableWithMESettings [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleBool [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalString [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::List [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] >> DataShardSnapshots::PostMergeNotCompactedTooEarly [GOOD] >> DataShardSnapshots::PipelineAndMediatorRestoreRace ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CheckDefaultTableSettings3 [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001373/r3tmp/tmpWTGRD2/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30694, node 1 TClient is connected to server localhost:16755 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:00:11.901569Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489503892354159483:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:11.901634Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001373/r3tmp/tmprTv5gm/pdisk_1.dat 2025-04-04T17:00:12.097996Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:12.137410Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:12.137524Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:12.144845Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4674, node 4 2025-04-04T17:00:12.364194Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:12.364219Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:12.364228Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:12.364338Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61761 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:12.635721Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:15.770992Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489503909534029720:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:15.771117Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:16.216387Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:16.462412Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489503913828997203:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:16.462532Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:16.462878Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489503913828997208:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:16.466516Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:00:16.514262Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489503913828997210:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:00:16.607891Z node 4 :TX_PROXY ERROR: Actor# [4:7489503913828997291:2810] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:00:16.761451Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0vxzpbdxf5egtaath26m6w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NWM3OTBlZWMtZDRlMTIxZjgtZWJmNjFkYjItNTIxMjg0N2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:00:16.822743Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-04T17:00:16.902738Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489503892354159483:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:16.902790Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:16.931245Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-04T17:00:19.470763Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489503927038379748:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:19.470837Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001373/r3tmp/tmpzjSPAM/pdisk_1.dat 2025-04-04T17:00:19.736936Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1148, node 7 2025-04-04T17:00:19.887743Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:19.887838Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:20.048978Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:00:20.099323Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:20.099352Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:20.099362Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:20.099531Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16494 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:20.517900Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:20.539228Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:00:23.925461Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:25.989538Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489503951385268687:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:25.995892Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001373/r3tmp/tmp1iZsMt/pdisk_1.dat 2025-04-04T17:00:26.186471Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5575, node 10 2025-04-04T17:00:26.315276Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:26.315431Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:26.332123Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:26.332153Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:26.332162Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:26.332321Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:00:26.372607Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30164 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:26.948019Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:30.981475Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489503951385268687:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:30.981546Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:31.303869Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:33.668869Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489503985725733403:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:33.669190Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001373/r3tmp/tmpzqnrKn/pdisk_1.dat 2025-04-04T17:00:33.947275Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:33.996492Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:33.996611Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:34.005509Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6684, node 13 2025-04-04T17:00:34.315125Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:34.315159Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:34.315166Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:34.315332Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20364 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:34.756146Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:38.274098Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateTableWithMESettings [GOOD] Test command err: 2025-04-04T17:00:07.926789Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503875516794518:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:07.926841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001367/r3tmp/tmpr4BTny/pdisk_1.dat 2025-04-04T17:00:08.855441Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:08.905267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:08.905343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:08.938576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18142, node 1 2025-04-04T17:00:09.500626Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:09.500649Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:09.500656Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:09.500802Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:10.034545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:12.681050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:12.928968Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503875516794518:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:12.929082Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:15.041816Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489503904062932604:2229];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:15.041959Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001367/r3tmp/tmp1kqr1j/pdisk_1.dat 2025-04-04T17:00:15.304373Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:15.339967Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:15.340050Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:15.346727Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32076, node 4 2025-04-04T17:00:15.694866Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:15.694886Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:15.694892Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:15.695035Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10958 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:16.056064Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:19.286588Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:21.595929Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489503934827571089:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:21.596042Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001367/r3tmp/tmpFmZlq6/pdisk_1.dat 2025-04-04T17:00:21.860548Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:21.923388Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:21.923476Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:21.927475Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61604, node 7 2025-04-04T17:00:22.102949Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:22.102974Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:22.102980Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:22.103108Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:22.378378Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:25.115360Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:25.334479Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:00:25.429191Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:00:25.514878Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:00:27.624880Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489503960742216051:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:27.624947Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001367/r3tmp/tmpRYLgx4/pdisk_1.dat 2025-04-04T17:00:27.821031Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:27.870941Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:27.871042Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:27.874678Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3707, node 10 2025-04-04T17:00:28.027172Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:28.027198Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:28.027216Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:28.027408Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:28.435946Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:32.200001Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:32.352454Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:00:32.364278Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-04-04T17:00:32.364321Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-04-04T17:00:34.348076Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489503987360326974:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:34.348184Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001367/r3tmp/tmpJnCvjK/pdisk_1.dat 2025-04-04T17:00:34.815872Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:34.864633Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:34.864739Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:34.871650Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25419, node 13 2025-04-04T17:00:35.024771Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:35.024795Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:35.024804Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:35.024957Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24833 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:35.347456Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:35.375078Z node 13 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:00:38.405707Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> TPartBtreeIndexIteration::FewNodes_History [GOOD] >> TPartBtreeIndexIteration::FewNodes_Sticky >> ConvertYdbValueToMiniKQLValueTest::SimpleBool [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBoolTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimal [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimalTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalString [GOOD] >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] >> TGRpcYdbTest::CreateTableBadRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] Test command err: ... waiting for blocked registrations ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR ... waiting for blocked registrations (done) 2025-04-04T17:00:41.818520Z node 1 :PIPE_SERVER ERROR: [72057594037927937] NodeDisconnected NodeId# 2 ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR >> BuildStatsHistogram::Ten_Crossed [GOOD] >> BuildStatsHistogram::Ten_Mixed_Log >> TPartBtreeIndexIteration::FewNodes_Sticky [GOOD] >> TPartBtreeIndexIteration::FewNodes_Slices >> TPQCachingProxyTest::MultipleSessions >> TPQCachingProxyTest::MultipleSessions [GOOD] >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin-NotNull >> KqpJoinOrder::FiveWayJoinWithComplexPreds-StreamLookupJoin-ColumnStore >> TReplicationTests::Create [GOOD] >> TReplicationTests::ConsistencyLevel >> TConsoleConfigTests::TestModifyConfigItem ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::MultipleSessions [GOOD] Test command err: 2025-04-04T17:00:44.397449Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:00:44.397556Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:00:44.419827Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:00:44.419957Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-04-04T17:00:44.420079Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-04-04T17:00:44.420168Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 2 for session: session1 2025-04-04T17:00:44.420225Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-04-04T17:00:44.420316Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 2 for session session1, Generation: 1 2025-04-04T17:00:44.420397Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session2:1 with generation 2 2025-04-04T17:00:44.420459Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 3 for session: session2 2025-04-04T17:00:44.420502Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 3 for session session2, Generation: 2 >> TPartBtreeIndexIteration::FewNodes_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_Slices >> TReplicationTests::CreateSequential [GOOD] >> TReplicationTests::CreateInParallel |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyType >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink >> TGroupMapperTest::MakeDisksUnusable >> TGroupMapperTest::Mirror3dc >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] >> TGroupMapperTest::MakeDisksUnusable [GOOD] >> KqpPg::ValuesInsert+useSink [GOOD] >> KqpPg::ValuesInsert-useSink |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksUnusable [GOOD] >> TGroupMapperTest::Mirror3dc [GOOD] >> TReplicationTests::ConsistencyLevel [GOOD] >> TReplicationTests::Alter >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> LabeledDbCounters::TwoTabletsKillOneTablet [GOOD] |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Mirror3dc [GOOD] >> TReplicationTests::CreateInParallel [GOOD] >> TReplicationTests::CreateDropRecreate |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] Test command err: 2025-04-04T16:59:07.171620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:07.172093Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:07.172177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001318/r3tmp/tmpWx9CpW/pdisk_1.dat 2025-04-04T16:59:07.516336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:59:07.560353Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:07.610439Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmMwODJjZTgtZmE4MzE1MzgtYzQyNjdiOTEtY2MwNDk0MmE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZmMwODJjZTgtZmE4MzE1MzgtYzQyNjdiOTEtY2MwNDk0MmE= 2025-04-04T16:59:07.611148Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmMwODJjZTgtZmE4MzE1MzgtYzQyNjdiOTEtY2MwNDk0MmE=, ActorId: [1:618:2539], ActorState: unknown state, session actor bootstrapped 2025-04-04T16:59:07.611688Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmMwODJjZTgtZmE4MzE1MzgtYzQyNjdiOTEtY2MwNDk0MmE=, ActorId: [1:618:2539], ActorState: ReadyState, TraceId: 01jr0vvwevcedfxvqng9kpyaa7, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: CREATE TABLE `/Root/table1` (key int, value int, PRIMARY KEY (key)); rpcActor: [0:0:0] database: databaseId: /Root pool id: default 2025-04-04T16:59:07.830065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:622:2542], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:07.830267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:07.864830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:07.864970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:07.868037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:59:07.887761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:59:07.926057Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:683:2575], Recipient [1:688:2578]: NKikimr::TEvTablet::TEvBoot 2025-04-04T16:59:07.927182Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:683:2575], Recipient [1:688:2578]: NKikimr::TEvTablet::TEvRestored 2025-04-04T16:59:07.927651Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:688:2578] 2025-04-04T16:59:07.927916Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:59:07.976639Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:683:2575], Recipient [1:688:2578]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T16:59:07.977439Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:59:07.977553Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:59:07.979266Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:59:07.979378Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:59:07.979429Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:59:07.979785Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:59:07.979920Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:59:07.980016Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:702:2578] in generation 1 2025-04-04T16:59:07.980426Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:59:08.041097Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:59:08.041309Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:59:08.041425Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:704:2587] 2025-04-04T16:59:08.041459Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:59:08.041495Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:59:08.041532Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:59:08.041736Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:688:2578], Recipient [1:688:2578]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T16:59:08.041804Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T16:59:08.042024Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:59:08.042135Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:59:08.042266Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:59:08.042332Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:59:08.042402Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T16:59:08.042450Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T16:59:08.042486Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T16:59:08.042522Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:59:08.042570Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:59:08.075399Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:707:2589], Recipient [1:688:2578]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:08.075465Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:08.075525Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:679:2573], serverId# [1:707:2589], sessionId# [0:0:0] 2025-04-04T16:59:08.075655Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:707:2589] 2025-04-04T16:59:08.075700Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T16:59:08.075825Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:59:08.076045Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T16:59:08.076097Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:59:08.076203Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:59:08.076262Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T16:59:08.076314Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T16:59:08.076356Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T16:59:08.076408Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T16:59:08.076688Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T16:59:08.076736Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T16:59:08.076776Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T16:59:08.076806Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T16:59:08.076875Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T16:59:08.076901Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T16:59:08.076930Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T16:59:08.076976Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T16:59:08.077006Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T16:59:08.077831Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:59:08.077926Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T16:59:08.077959Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T16:59:08.078001Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T16:59:08.078068Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T16:59:08.080363Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:708:2590], Recipient [1:688:2578]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T16:59:08.080419Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:59:08.124456Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:723:2599], Recipient [1:688:2578]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:08.124526Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:08.124564Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:721:2597], serverId# [1:723:2599], sessionId# [0:0:0] 2025-04-04T16:59:08.124974Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:688:2578]: {TEvPlanStep step# 300 MediatorId# 72057594046382081 Tabl ... node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-04-04T17:00:44.799712Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:00:44.799797Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-04T17:00:44.799891Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: LOCKS_BROKEN 2025-04-04T17:00:44.800025Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:00:44.800991Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=OTU3NjZmOTMtYzdmNDkxYjYtYmE2MWY3ZTktYTg1MjYzYTM=, ActorId: [13:841:2686], ActorState: ExecuteState, TraceId: 01jr0vyv6f6cfb2s3b2ftevv5b, Create QueryResponse for error on request, msg: 2025-04-04T17:00:44.802196Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr0vyv6f6cfb2s3b2ftevv5b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTU3NjZmOTMtYzdmNDkxYjYtYmE2MWY3ZTktYTg1MjYzYTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:00:44.802605Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [13:949:2686], Recipient [13:904:2732]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 949 RawX2: 55834577534 } TxBody: " \0018\001j3\010\001\032\'\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000 \003\"\006\020\0020\000@\n\220\001\000" TxId: 281474976715666 ExecLevel: 0 Flags: 8 2025-04-04T17:00:44.802676Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:00:44.802810Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [13:904:2732], Recipient [13:904:2732]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-04T17:00:44.802842Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-04T17:00:44.802910Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:00:44.803100Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-04T17:00:44.803201Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit CheckDataTx 2025-04-04T17:00:44.803250Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-04-04T17:00:44.803284Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CheckDataTx 2025-04-04T17:00:44.803317Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:00:44.803349Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:00:44.803391Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v500/18446744073709551615 ImmediateWriteEdgeReplied# v500/18446744073709551615 2025-04-04T17:00:44.803446Z node 13 :TX_DATASHARD TRACE: Activated operation [0:281474976715666] at 72075186224037888 2025-04-04T17:00:44.803485Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-04-04T17:00:44.803511Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:00:44.803562Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-04T17:00:44.803590Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-04T17:00:44.803669Z node 13 :TX_DATASHARD TRACE: Operation [0:281474976715666] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193448 2025-04-04T17:00:44.803826Z node 13 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2025-04-04T17:00:44.803940Z node 13 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-04T17:00:44.804024Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-04-04T17:00:44.804060Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-04T17:00:44.804088Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:00:44.804117Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit FinishPropose 2025-04-04T17:00:44.804173Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715666 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-04T17:00:44.804284Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is DelayComplete 2025-04-04T17:00:44.804316Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:00:44.804347Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:00:44.804375Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:00:44.804423Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-04-04T17:00:44.804451Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:00:44.804478Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715666] at 72075186224037888 has finished 2025-04-04T17:00:44.804541Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:00:44.804576Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715666] at 72075186224037888 on unit FinishPropose 2025-04-04T17:00:44.804620Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:00:44.806448Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [13:61:2108], Recipient [13:904:2732]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 13 Status: STATUS_NOT_FOUND 2025-04-04T17:00:45.092643Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr0vyvcsex30xc1sat385309, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MjI3YjRjZmQtZDA4ODVjNjMtNDdlYTk1MTctNTRhMTZjZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:00:45.095150Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:970:2776], Recipient [13:904:2732]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-04T17:00:45.095347Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T17:00:45.095437Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v500/18446744073709551615 ImmediateWriteEdgeReplied# v500/18446744073709551615 2025-04-04T17:00:45.095525Z node 13 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v500/18446744073709551615 2025-04-04T17:00:45.095638Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-04-04T17:00:45.095807Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-04T17:00:45.095891Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-04-04T17:00:45.095968Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:00:45.096032Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:00:45.096105Z node 13 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-04-04T17:00:45.096174Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-04T17:00:45.096209Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:00:45.096239Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T17:00:45.096271Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:00:45.096456Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-04T17:00:45.096847Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[13:970:2776], 0} after executionsCount# 1 2025-04-04T17:00:45.096960Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:970:2776], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:00:45.097080Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:970:2776], 0} finished in read 2025-04-04T17:00:45.097190Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-04T17:00:45.097219Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:00:45.097248Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:00:45.097277Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:00:45.097322Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-04T17:00:45.097342Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:00:45.097380Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-04-04T17:00:45.097444Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T17:00:45.097594Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-04T17:00:45.098904Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:970:2776], Recipient [13:904:2732]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-04T17:00:45.099006Z node 13 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 22 } } >> Viewer::UseTransactionWhenExecuteDataActionQuery [GOOD] >> TReplicationTests::Alter [GOOD] >> TReplicationTests::CannotAddReplicationConfig >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |81.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl >> TGRpcYdbTest::CreateTableBadRequest [GOOD] >> TGRpcYdbTest::CreateTableBadRequest2 |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TExecutorDb::RandomOps [GOOD] >> TExecutorDb::FullScan >> TReplicationTests::CreateDropRecreate [GOOD] >> TReplicationTests::CreateWithoutCredentials >> Describe::DescribePartitionPermissions [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> BuildStatsHistogram::Ten_Mixed_Log [GOOD] >> BuildStatsHistogram::Ten_Crossed_Log >> TNetClassifierTest::TestInitFromFile |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TPartBtreeIndexIteration::FewNodes_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_History_Slices >> TVersions::Wreck2 [GOOD] >> TVersions::Wreck2Reverse >> TConsoleConfigTests::TestModifyConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItem >> TReplicationTests::CannotAddReplicationConfig [GOOD] >> TReplicationTests::CannotSetAsyncReplicaAttribute >> TFstClassSrcIdPQTest::TestTableCreated [GOOD] >> TFstClassSrcIdPQTest::NoMapping >> TNetClassifierTest::TestInitFromRemoteSource >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex >> TReplicationTests::CreateWithoutCredentials [GOOD] >> TReplicationTests::Describe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] Test command err: 2025-04-04T16:58:25.575038Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503436695487230:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:25.585553Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T16:58:26.268809Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:26.274810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:26.274933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:26.278610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65305, node 1 2025-04-04T16:58:26.473812Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:26.473862Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:26.473869Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:26.474007Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:26.830201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:26.852276Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:58:26.865133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T16:58:26.868096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:29.818701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503453875357100:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:29.818884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:29.819305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503453875357120:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:29.830110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-04T16:58:29.857512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503453875357122:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T16:58:29.939776Z node 1 :TX_PROXY ERROR: Actor# [1:7489503453875357173:2361] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:58:32.655180Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503465897774636:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:32.655742Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T16:58:33.036174Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:33.039562Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:33.039692Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:33.042896Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6603, node 2 2025-04-04T16:58:33.290925Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:33.290954Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:33.290963Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:33.291117Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:33.732336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:33.766756Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T16:58:33.820841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T16:58:33.828625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:33.837116Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-04-04T16:58:37.652717Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489503465897774636:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:37.652816Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:58:38.276659Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503491667579115:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:38.276780Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:38.277086Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503491667579127:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:38.281843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-04-04T16:58:38.302019Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489503491667579129:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-04T16:58:38.389919Z node 2 :TX_PROXY ERROR: Actor# [2:7489503491667579180:2362] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:58:40.725118Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489503499155093473:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:40.725195Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T16:58:41.144322Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:41.224880Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:41.225016Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:41.226904Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5475, node 3 2025-04-04T16:58: ... t proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-04T16:59:25.081135Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489503691987926674:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T16:59:25.160273Z node 7 :TX_PROXY ERROR: Actor# [7:7489503691987926726:2362] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:59:28.495808Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7489503707090419405:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:28.495897Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T16:59:28.835709Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:28.837709Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:28.837843Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:28.843517Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18976, node 8 2025-04-04T16:59:28.930732Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:59:28.930769Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:59:28.930793Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:59:28.931025Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23544 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T16:59:29.410017Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T16:59:29.430832Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T16:59:29.438360Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:59:29.449199Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-04T16:59:33.502101Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7489503707090419405:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:33.502263Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:59:35.039724Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7489503737155191197:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:35.039810Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7489503737155191173:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:35.040347Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:59:35.046866Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-04T16:59:35.064612Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7489503737155191200:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T16:59:35.157011Z node 8 :TX_PROXY ERROR: Actor# [8:7489503737155191251:2365] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:59:51.492861Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:463:2425], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:51.493546Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:51.493799Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T16:59:52.221248Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:52.475915Z node 9 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-04T16:59:52.507885Z node 9 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-04T16:59:53.701395Z node 9 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 22651, node 9 TClient is connected to server localhost:63011 2025-04-04T16:59:54.890504Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:59:54.890649Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:59:54.890741Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:59:54.891785Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:00:15.581911Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:536:2426], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:00:15.582739Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:00:15.582975Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T17:00:16.492443Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:16.742194Z node 11 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-04T17:00:16.801840Z node 11 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-04T17:00:19.113913Z node 11 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 14966, node 11 TClient is connected to server localhost:16121 2025-04-04T17:00:20.617864Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:20.618011Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:20.618124Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:20.619437Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:00:40.263401Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [14:614:2427], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:00:40.264173Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:00:40.264401Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T17:00:40.894392Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:41.097373Z node 14 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-04T17:00:41.140762Z node 14 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-04T17:00:42.369642Z node 14 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 20891, node 14 TClient is connected to server localhost:11452 2025-04-04T17:00:43.256477Z node 14 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:43.256598Z node 14 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:43.256688Z node 14 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:43.257561Z node 14 :NET_CLASSIFIER ERROR: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> LabeledDbCounters::TwoTabletsKillOneTablet [GOOD] Test command err: 2025-04-04T16:51:28.114620Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489501644826272941:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:28.114660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c2d/r3tmp/tmporJBSH/pdisk_1.dat 2025-04-04T16:51:28.626018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:28.626111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:28.639020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:28.695774Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28832, node 1 2025-04-04T16:51:28.762327Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:51:28.763992Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:51:28.844611Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:51:28.844632Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:51:28.844639Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:51:28.844735Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19237 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:51:29.166497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:29.213303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:29.230545Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:51:29.310023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:29.310123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:29.328187Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T16:51:29.331207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:29.391029Z node 3 :SYSTEM_VIEWS INFO: [72075186224037893] OnActivateExecutor 2025-04-04T16:51:29.391078Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Execute 2025-04-04T16:51:29.405359Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [3:7489501646632208991:2074], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2025-04-04T16:51:29.461515Z node 3 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2025-04-04T16:51:29.461616Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:29.461802Z node 3 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [3:7489501646632208991:2074], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/Database1 2025-04-04T16:51:29.461986Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Complete 2025-04-04T16:51:29.462022Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Execute 2025-04-04T16:51:29.463886Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2025-04-04T16:51:29.464018Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval metrics: query count# 0 2025-04-04T16:51:29.465792Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval query tops: total query count# 0 2025-04-04T16:51:29.468209Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading nodes to request: nodes count# 0, hashes count# 0 2025-04-04T16:51:29.468258Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 6, result count# 0 2025-04-04T16:51:29.468304Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 7, result count# 0 2025-04-04T16:51:29.468342Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 8, result count# 0 2025-04-04T16:51:29.468372Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 9, result count# 0 2025-04-04T16:51:29.468403Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 10, result count# 0 2025-04-04T16:51:29.468428Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 11, result count# 0 2025-04-04T16:51:29.468466Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 12, result count# 0 2025-04-04T16:51:29.468499Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 13, result count# 0 2025-04-04T16:51:29.468539Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 14, result count# 0 2025-04-04T16:51:29.468588Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 15, result count# 0 2025-04-04T16:51:29.468658Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 16, partCount count# 0 2025-04-04T16:51:29.468693Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 19, partCount count# 0 2025-04-04T16:51:29.468725Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 17, result count# 0 2025-04-04T16:51:29.468752Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 18, result count# 0 2025-04-04T16:51:29.468892Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reset: interval end# 2025-04-04T16:51:29.000000Z 2025-04-04T16:51:29.469781Z node 3 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [3:7489501646632208991:2074], database# /Root/Database1, no sysview processor 2025-04-04T16:51:29.479757Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Complete 2025-04-04T16:51:29.483806Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Execute: database# /Root/Database1 2025-04-04T16:51:29.497662Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Complete 2025-04-04T16:51:29.497723Z node 3 :SYSTEM_VIEWS INFO: NSysView::TPartitionStatsCollector initialized: domain key# [OwnerId: 72057594046644480, LocalPathId: 2], sysview processor id# 72075186224037893 2025-04-04T16:51:29.537665Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxAggregate::Execute 2025-04-04T16:51:29.537751Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryResults: interval end# 2025-04-04T16:51:29.000000Z, query count# 0 2025-04-04T16:51:29.537778Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 8, interval end# 2025-04-04T16:51:29.000000Z, query count# 0, persisted# 0 2025-04-04T16:51:29.537815Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 10, interval end# 2025-04-04T16:51:29.000000Z, query count# 0, persisted# 0 2025-04-04T16:51:29.537833Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 12, interval end# 2025-04-04T16:51:29.000000Z, query count# 0, persisted# 0 2025-04-04T16:51:29.537859Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 14, interval end# 2025-04-04T16:51:29.000000Z, query count# 0, persisted# 0 2025-04-04T16:51:29.537877Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 9, interval end# 2025-04-04T17:00:00.000000Z, query count# 0, persisted# 0 2025-04-04T16:51:29.537893Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 11, interval end# 2025-04-04T17:00:00.000000Z, query count# 0, persisted# 0 2025-04-04T16:51:29.537907Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 13, interval end# 2025-04-04T17:00:00.000000Z, query count# 0, persisted# 0 2025-04-04T16:51:29.537930Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 15, interval end# 2025-04-04T17:00:00.000000Z, query count# 0, persisted# 0 2025-04-04T16:51:29.543362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T16:51:29.544553Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxAggregate::Complete waiting... 2025-04-04T16:51:29.580329Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489501649970849665:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:51:29.580390Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Database2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; waiting... 2025-04-04T16:51:29.636878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:51:29.636958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:51:29.662126Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:51:29.679302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:51:29.836117Z node 2 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2025-04-04T16:51:29.847093Z node 2 :SYSTEM_VIEWS INFO: [72075186224037899] OnActivateExecutor 2025-04-04T16:51:29.847129Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxInitSchema::Execute 2025-04-04T16:51:29.894790Z node 2 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [2:7489501649970849803:2206], path id# [OwnerId: 72057594046644480, LocalPathId: 3], service# 2 2025-04-04T16:51:29.895122Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:51:29.902132Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxInitSchema::Complete 2025-04-04T16:51:29.902211Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxInit::Execute 2025-04-04T16:51:29.902720Z node 2 :SYSTEM_VIEWS DEBUG: [72075186224037899] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2025-04-04T16:5 ... tionTopResults: table id# 18, partition interval end# 2025-04-04T18:00:00.000000Z, partition count# 0 2025-04-04T17:00:42.024869Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] Reset: interval end# 2025-04-04T17:00:42.000000Z 2025-04-04T17:00:42.030170Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxCollect::Complete 2025-04-04T17:00:42.510466Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxAggregate::Execute 2025-04-04T17:00:42.510542Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryResults: interval end# 2025-04-04T17:00:42.000000Z, query count# 0 2025-04-04T17:00:42.510586Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 8, interval end# 2025-04-04T17:00:42.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:42.510612Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 10, interval end# 2025-04-04T17:00:42.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:42.510645Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 12, interval end# 2025-04-04T17:00:42.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:42.510703Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 14, interval end# 2025-04-04T17:00:42.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:42.510730Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 9, interval end# 2025-04-04T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:42.510757Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 11, interval end# 2025-04-04T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:42.510793Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 13, interval end# 2025-04-04T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:42.510815Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 15, interval end# 2025-04-04T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:42.517102Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxAggregate::Complete 2025-04-04T17:00:42.586180Z node 18 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables 2025-04-04T17:00:43.022433Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxCollect::Execute 2025-04-04T17:00:43.022499Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistPartitionTopResults: table id# 17, partition interval end# 2025-04-04T17:00:43.000000Z, partition count# 0 2025-04-04T17:00:43.022537Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistPartitionTopResults: table id# 18, partition interval end# 2025-04-04T18:00:00.000000Z, partition count# 0 2025-04-04T17:00:43.022677Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] Reset: interval end# 2025-04-04T17:00:43.000000Z 2025-04-04T17:00:43.025985Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxCollect::Complete 2025-04-04T17:00:43.521266Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxAggregate::Execute 2025-04-04T17:00:43.521345Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryResults: interval end# 2025-04-04T17:00:43.000000Z, query count# 0 2025-04-04T17:00:43.521376Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 8, interval end# 2025-04-04T17:00:43.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:43.521404Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 10, interval end# 2025-04-04T17:00:43.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:43.521432Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 12, interval end# 2025-04-04T17:00:43.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:43.521461Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 14, interval end# 2025-04-04T17:00:43.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:43.521491Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 9, interval end# 2025-04-04T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:43.521520Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 11, interval end# 2025-04-04T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:43.521545Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 13, interval end# 2025-04-04T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:43.521570Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 15, interval end# 2025-04-04T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:43.528253Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxAggregate::Complete 2025-04-04T17:00:43.589105Z node 18 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables 2025-04-04T17:00:44.007024Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxCollect::Execute 2025-04-04T17:00:44.007087Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistPartitionTopResults: table id# 17, partition interval end# 2025-04-04T17:00:44.000000Z, partition count# 0 2025-04-04T17:00:44.007110Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistPartitionTopResults: table id# 18, partition interval end# 2025-04-04T18:00:00.000000Z, partition count# 0 2025-04-04T17:00:44.007202Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] Reset: interval end# 2025-04-04T17:00:44.000000Z 2025-04-04T17:00:44.015491Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxCollect::Complete 2025-04-04T17:00:44.411058Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] TEvApplyLabeledCounters: services count# 1 2025-04-04T17:00:44.505581Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] TEvApplyCounters: services count# 1 2025-04-04T17:00:44.529314Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxAggregate::Execute 2025-04-04T17:00:44.529386Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryResults: interval end# 2025-04-04T17:00:44.000000Z, query count# 0 2025-04-04T17:00:44.529420Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 8, interval end# 2025-04-04T17:00:44.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:44.529449Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 10, interval end# 2025-04-04T17:00:44.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:44.529477Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 12, interval end# 2025-04-04T17:00:44.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:44.529506Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 14, interval end# 2025-04-04T17:00:44.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:44.529533Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 9, interval end# 2025-04-04T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:44.529564Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 11, interval end# 2025-04-04T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:44.529591Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 13, interval end# 2025-04-04T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:44.529632Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 15, interval end# 2025-04-04T18:00:00.000000Z, query count# 0, persisted# 0 2025-04-04T17:00:44.535346Z node 18 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxAggregate::Complete 2025-04-04T17:00:44.589634Z node 18 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables iteration 6
name=topic.partition.alive_count: 32
name=topic.partition.init_duration_milliseconds_max: 8
name=topic.partition.producers_count_max: 0
name=topic.partition.read.inflight_throttled_microseconds_max: 0
name=topic.partition.read.speed_limit_bytes_per_second: 0
name=topic.partition.read.throttled_microseconds_max: 0
name=topic.partition.read_without_consumer.speed_limit_bytes_per_second: 0
name=topic.partition.read_without_consumer.throttled_microseconds_max: 0
name=topic.partition.storage_bytes_max: 0
name=topic.partition.total_count: 32
name=topic.partition.uptime_milliseconds_min: 119615
name=topic.partition.write.bytes_per_day_max: 0
name=topic.partition.write.bytes_per_hour_max: 0
name=topic.partition.write.bytes_per_minute_max: 0
name=topic.partition.write.idle_milliseconds_max: 119711
name=topic.partition.write.lag_milliseconds_max: 0
name=topic.partition.write.speed_limit_bytes_per_second: 50000000
name=topic.partition.write.throttled_microseconds_max: 0
name=topic.producers_count: 0
name=topic.reserve.limit_bytes: 0
name=topic.reserve.used_bytes: 0
name=topic.storage_bytes: 0
consumer=user:
    name=topic.partition.alive_count: 32
    name=topic.partition.committed_end_to_end_lag_milliseconds_max: 9663
    name=topic.partition.committed_lag_messages_max: 0
    name=topic.partition.committed_read_lag_milliseconds_max: 9663
    name=topic.partition.end_to_end_lag_milliseconds_max: 110110
    name=topic.partition.read.idle_milliseconds_max: 119710
    name=topic.partition.read.lag_messages_max: 0
    name=topic.partition.read.lag_milliseconds_max: 0
    name=topic.partition.read.speed_limit_bytes_per_second: 0
    name=topic.partition.read.throttled_microseconds_max: 0
    name=topic.partition.write.lag_milliseconds_max: 0
    name=topic.read.lag_messages: 0
CHECK COUNTER topic.partition.alive_count wait 32 got 32 2025-04-04T17:00:44.777163Z node 16 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [16:7489503504677358682:2080], processor id# 72075186224037895, database# /Root/PQ 2025-04-04T17:00:44.777241Z node 16 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [16:7489503504677358682:2080], database# /Root/PQ, processor id# 72075186224037895 2025-04-04T17:00:44.780247Z node 16 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 17 2025-04-04T17:00:44.779779Z node 18 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [18:7489503516381404117:2075], processor id# 72075186224037895, database# /Root/PQ 2025-04-04T17:00:44.783929Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T17:00:44.784243Z node 16 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 18 2025-04-04T17:00:44.785040Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T17:00:44.783258Z node 18 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [18:7489503516381404117:2075], database# /Root/PQ, processor id# 72075186224037895 2025-04-04T17:00:45.629560Z node 17 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [17:7489503502234047968:2061] 2025-04-04T17:00:45.626365Z node 18 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [18:7489503516381404117:2075] 2025-04-04T17:00:45.669547Z node 17 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [17:7489503515118949937:2075] 2025-04-04T17:00:45.737035Z node 17 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [17:7489503502234047968:2061] 2025-04-04T17:00:45.762360Z node 18 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [18:7489503503496502153:2061] 2025-04-04T17:00:45.794757Z node 18 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [18:7489503503496502153:2061] 2025-04-04T17:00:46.008922Z node 17 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [17:7489503515118949937:2075] |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TReplicationTests::CannotSetAsyncReplicaAttribute [GOOD] >> TReplicationTests::AlterReplicatedTable >> TConsoleConfigTests::TestRemoveConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItems >> TReplicationTests::Describe [GOOD] >> TReplicationTests::CreateReplicatedTable >> DataShardSnapshots::PipelineAndMediatorRestoreRace [GOOD] >> DataShardSnapshots::ShardRestartLockBasic >> TPQCachingProxyTest::TestPublishAndForget >> TNetClassifierTest::TestInitFromBadlyFormattedFile >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex >> TConsoleConfigTests::TestRemoveConfigItems [GOOD] >> TConsoleConfigTests::TestConfigureOrderConflicts >> TPQCachingProxyTest::TestDeregister >> TReplicationTests::AlterReplicatedTable [GOOD] >> TReplicationTests::AlterReplicatedIndexTable >> TPQCachingProxyTest::TestPublishAndForget [GOOD] |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::OutdatedSession |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |81.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain >> TPartBtreeIndexIteration::FewNodes_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices >> TPQCachingProxyTest::TestDeregister [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestPublishAndForget [GOOD] Test command err: 2025-04-04T17:00:52.057401Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:00:52.057495Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:00:52.083600Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:00:52.083764Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-04-04T17:00:52.083931Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-04-04T17:00:52.084035Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-04-04T17:00:52.084173Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: forget read: 1 for session session1 >> TPQCachingProxyTest::OutdatedSession [GOOD] >> TConsoleConfigTests::TestConfigureOrderConflicts [GOOD] >> TConsoleConfigTests::TestGetItems ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestDeregister [GOOD] Test command err: 2025-04-04T17:00:52.534107Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:00:52.534216Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:00:52.566014Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:00:52.566121Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-04-04T17:00:52.566176Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session2:1 with generation 1 2025-04-04T17:00:52.566329Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: session1 >> TNetClassifierTest::TestInitFromFile [GOOD] >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::UseTransactionWhenExecuteDataActionQuery [GOOD] Test command err: BASE_PERF = 3.615402155 2025-04-04T16:59:50.741719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:1439:2361], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:50.743553Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:50.744145Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:50.745180Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:1741:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:50.745382Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:1445:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:50.746451Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:50.746739Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:50.746832Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:50.746898Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:50.747454Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:1448:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:50.748409Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:50.748606Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:50.751498Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:1442:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:50.752260Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:50.752319Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T16:59:51.310133Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:51.540241Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-04T16:59:51.574379Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-04T16:59:52.234761Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 20329, node 1 TClient is connected to server localhost:1714 2025-04-04T16:59:52.674864Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:59:52.674945Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:59:52.674982Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:59:52.675304Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:00:36.722999Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489503997101049784:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:36.723117Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T17:00:36.967146Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:36.972572Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:36.972729Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:36.976096Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61040, node 6 2025-04-04T17:00:37.101582Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:37.101611Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:37.101625Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:37.101843Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25733 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:37.635606Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:37.683999Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:00:37.688971Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:37.710626Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-04T17:00:40.319349Z node 6 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T17:00:40.319486Z node 6 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T17:00:41.723522Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489503997101049784:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:41.723635Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:41.996268Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489504018575886966:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:41.996430Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:41.996531Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489504018575886978:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:42.004383Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-04T17:00:42.017564Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489504018575886980:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T17:00:42.110299Z node 6 :TX_PROXY ERROR: Actor# [6:7489504022870854327:2361] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:00:43.215813Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:00:43.575641Z node 6 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T17:00:43.575684Z node 6 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-04-04T17:00:44.406739Z node 6 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-04-04T17:00:44.406788Z node 6 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success >> TReplicationTests::AlterReplicatedIndexTable [GOOD] >> TReplicationTests::CopyReplicatedTable >> TGRpcYdbTest::CreateTableBadRequest2 [GOOD] >> TGRpcYdbTest::CreateTableBadRequest3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::OutdatedSession [GOOD] Test command err: 2025-04-04T17:00:52.875909Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:00:52.875999Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:00:52.901089Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:00:52.901232Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-04-04T17:00:52.901350Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-04-04T17:00:52.901403Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-04-04T17:00:52.901496Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 2, killed existing session with older generation >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex >> DataShardSnapshots::VolatileSnapshotRenameTimeout [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommit >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin+NotNull >> TReplicationTests::CreateReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromFile [GOOD] Test command err: 2025-04-04T17:00:49.843690Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504051835797928:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:49.843724Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bfa/r3tmp/tmpgJjXFD/pdisk_1.dat 2025-04-04T17:00:50.354524Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:50.371129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:50.371272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:50.377760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:00:50.431005Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000bfa/r3tmp/yandexcfTUAf.tmp 2025-04-04T17:00:50.431038Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000bfa/r3tmp/yandexcfTUAf.tmp 2025-04-04T17:00:50.431424Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000bfa/r3tmp/yandexcfTUAf.tmp 2025-04-04T17:00:50.431574Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] Test command err: 2025-04-04T17:00:50.134104Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504059659725974:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:50.135097Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bec/r3tmp/tmpqAFoCA/pdisk_1.dat 2025-04-04T17:00:50.619054Z node 1 :HTTP ERROR: (#26,[::1]:62033) connection closed with error: Connection refused 2025-04-04T17:00:50.619443Z node 1 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-04T17:00:50.620531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:50.620626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:50.625831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:00:50.677081Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:50.685552Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:50.685574Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:50.685580Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:50.685745Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TConsoleConfigTests::TestGetItems [GOOD] >> TConsoleConfigTests::TestGetNodeItems |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TReplicationTests::CopyReplicatedTable [GOOD] |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials [GOOD] >> TReplicationTests::DropReplicationWithUnknownSecret >> TPQCachingProxyTest::TestWrongSessionOrGeneration ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/unittest >> Describe::DescribePartitionPermissions [GOOD] Test command err: 2025-04-04T16:55:59.018980Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489502802544268994:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:55:59.019029Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ea8/r3tmp/tmpMycQdw/pdisk_1.dat 2025-04-04T16:55:59.466380Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T16:55:59.701123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:55:59.701222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:55:59.703193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:55:59.735675Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12179, node 1 2025-04-04T16:55:59.828921Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000ea8/r3tmp/yandexEHfP3I.tmp 2025-04-04T16:55:59.828955Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000ea8/r3tmp/yandexEHfP3I.tmp 2025-04-04T16:55:59.829192Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000ea8/r3tmp/yandexEHfP3I.tmp 2025-04-04T16:55:59.829381Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:55:59.889083Z INFO: TTestServer started on Port 23923 GrpcPort 12179 TClient is connected to server localhost:23923 PQClient connected to localhost:12179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:56:00.252315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:00.267964Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:00.318655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:56:00.527493Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:00.539542Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-04T16:56:02.729055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502819724139005:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:02.729618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489502819724138992:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:02.729743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:02.734032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T16:56:02.750306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489502819724139010:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T16:56:02.951893Z node 1 :TX_PROXY ERROR: Actor# [1:7489502819724139074:2453] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:02.978792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:56:03.003519Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489502819724139083:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:56:03.010698Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWU2NDI2YjktY2Y0ZWZkMGQtMzcxZjYwOTQtM2U0ZWFlOGQ=, ActorId: [1:7489502819724138978:2337], ActorState: ExecuteState, TraceId: 01jr0vp7ww4cpg1eeet7hnaj6y, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:56:03.013280Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:56:03.051639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:56:03.153031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489502824019106670:2639] 2025-04-04T16:56:04.020669Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489502802544268994:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:04.021234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T16:56:08.696944Z :ConnectToYDB INFO: TTopicSdkTestSetup started 2025-04-04T16:56:08.744284Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-04T16:56:08.789750Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7489502845493943423:2796] connected; active server actors: 1 2025-04-04T16:56:08.790030Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-04T16:56:08.793768Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-04T16:56:08.793884Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-04-04T16:56:08.806003Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T16:56:08.814418Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-04T16:56:08.814467Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-04T16:56:08.814821Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T16:56:08.815228Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-04T16:56:08.815250Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T16:56:08.815274Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-04T16:56:08.815301Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-04T16:56:08.815337Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T16:56:08.815385Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-04T16:56:08.816835Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-04-04T16:56:08.816888Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7489502845493943454:2452], now have 1 active actors on pipe 2025-04-04T16:56:08.819101Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7489502845493943422:2795], now have 1 active actors on pipe 2025-04-04T16:56:08.853491Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7489502806839236723 RawX2: 4294969492 } TxId: 281474976710672 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } Expl ... ersion: 16 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 16 ACLVersion: 6 EffectiveACLVersion: 14 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "test-topic" PathId: 13 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } YdbDatabasePath: "/Root" Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 13 PathOwnerId: 72057594046644480 }, by path# { Subscriber: { Subscriber: [15:7489504046090017996:2482] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 4 TableKind: 0 Created: 1 CreateStep: 1743786047395 PathId: [OwnerId: 72057594046644480, LocalPathId: 13] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [15:7489504046090017996:2482] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 4 TableKind: 0 Created: 1 CreateStep: 1743786047395 PathId: [OwnerId: 72057594046644480, LocalPathId: 13] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2025-04-04T17:00:48.022373Z node 15 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-04-04T17:00:48.022550Z node 15 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request operation_params { } path: "test-topic" include_location: true 2025-04-04T17:00:48.022635Z node 15 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[15:7489504050384985596:2518]: Bootstrap 2025-04-04T17:00:48.022966Z node 15 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [15:7489503981665507223:2113], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/test-topic TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:00:48.023147Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][15:7489504046090018034:2882][/Root/test-topic] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [15:7489503981665507223:2113], cookie# 13 2025-04-04T17:00:48.023241Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][15:7489504046090018038:2882][/Root/test-topic] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /Root/test-topic }: sender# [15:7489504046090018035:2882], cookie# 13 2025-04-04T17:00:48.023271Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][15:7489504046090018039:2882][/Root/test-topic] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /Root/test-topic }: sender# [15:7489504046090018036:2882], cookie# 13 2025-04-04T17:00:48.023299Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][15:7489504046090018040:2882][/Root/test-topic] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /Root/test-topic }: sender# [15:7489504046090018037:2882], cookie# 13 2025-04-04T17:00:48.023390Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][15:7489504046090018038:2882][/Root/test-topic] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 16 Partial: 0 }: sender# [15:7489503981665506907:2049], cookie# 13 2025-04-04T17:00:48.023419Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][15:7489504046090018039:2882][/Root/test-topic] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 16 Partial: 0 }: sender# [15:7489503981665506910:2052], cookie# 13 2025-04-04T17:00:48.023446Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][15:7489504046090018040:2882][/Root/test-topic] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 16 Partial: 0 }: sender# [15:7489503981665506913:2055], cookie# 13 2025-04-04T17:00:48.023491Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][15:7489504046090018034:2882][/Root/test-topic] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 16 Partial: 0 }: sender# [15:7489504046090018035:2882], cookie# 13 2025-04-04T17:00:48.023519Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][15:7489504046090018034:2882][/Root/test-topic] Sync is in progress: cookie# 13, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:00:48.023546Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][15:7489504046090018034:2882][/Root/test-topic] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 16 Partial: 0 }: sender# [15:7489504046090018036:2882], cookie# 13 2025-04-04T17:00:48.023575Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][15:7489504046090018034:2882][/Root/test-topic] Sync is done: cookie# 13, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:00:48.023634Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][15:7489504046090018034:2882][/Root/test-topic] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 16 Partial: 0 }: sender# [15:7489504046090018037:2882], cookie# 13 2025-04-04T17:00:48.023650Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][15:7489504046090018034:2882][/Root/test-topic] Unexpected sync response: sender# [15:7489504046090018037:2882], cookie# 13 2025-04-04T17:00:48.023703Z node 15 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [15:7489503981665507223:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root/test-topic PathId: Partial: 0 } 2025-04-04T17:00:48.023790Z node 15 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [15:7489503981665507223:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root/test-topic PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [15:7489504046090018034:2882] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 13 } Filled: 1 Status: StatusSuccess Kind: 4 TableKind: 0 Created: 1 CreateStep: 1743786047395 PathId: [OwnerId: 72057594046644480, LocalPathId: 13] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:00:48.023878Z node 15 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [15:7489503981665507223:2113], cacheItem# { Subscriber: { Subscriber: [15:7489504046090018034:2882] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 13 } Filled: 1 Status: StatusSuccess Kind: 4 TableKind: 0 Created: 1 CreateStep: 1743786047395 PathId: [OwnerId: 72057594046644480, LocalPathId: 13] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/test-topic TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 13 IsSync: true Partial: 0 } 2025-04-04T17:00:48.024166Z node 15 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [15:7489504050384985597:3082], recipient# [15:7489504050384985596:2518], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/test-topic TableId: [72057594046644480:13:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTopic DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:00:48.029565Z node 15 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [15:7489504050384985596:2518]: Request location 2025-04-04T17:00:48.030141Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [15:7489504050384985598:2519] connected; active server actors: 1 2025-04-04T17:00:48.030318Z node 15 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 15, Generation 1 2025-04-04T17:00:48.030399Z node 15 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [15:7489504050384985596:2518]: Got location 2025-04-04T17:00:48.031210Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [15:7489504050384985598:2519] disconnected; active server actors: 1 2025-04-04T17:00:48.031251Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [15:7489504050384985598:2519] disconnected no session === status=SUCCESS issues= 2025-04-04T17:00:48.164674Z node 15 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [15:7489503981665507223:2113], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:00:48.164889Z node 15 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [15:7489503981665507223:2113], cacheItem# { Subscriber: { Subscriber: [15:7489504016025246135:2419] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:00:48.165099Z node 15 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [15:7489504050384985604:3087], recipient# [15:7489504050384985603:2520], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TTableProfileTests::ExplicitPartitionsWrongKeyType [GOOD] >> BuildStatsHistogram::Ten_Crossed_Log [GOOD] >> BuildStatsHistogram::Five_Five_Mixed |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TConsoleConfigTests::TestGetNodeItems [GOOD] >> TConsoleConfigTests::TestGetNodeConfig >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::CopyReplicatedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:00:43.301133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:00:43.301298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:43.301348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:00:43.301412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:00:43.301455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:00:43.301486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:00:43.301544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:43.301616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:00:43.302016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:00:43.717966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:00:43.718058Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:43.757894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:00:43.758196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:00:43.800050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:00:43.836090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:00:43.838447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:00:43.841461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:43.854528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:00:43.890826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:43.918270Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:43.918359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:43.931310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:00:43.931384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:43.931450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:00:43.931641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:00:43.963534Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:00:44.146686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:00:44.158606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:44.166540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:00:44.166853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:00:44.166927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:44.169827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:44.178366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:00:44.178701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:44.178789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:00:44.178824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:00:44.178878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:00:44.181316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:44.181389Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:00:44.181440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:00:44.183456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:44.183506Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:44.191932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:44.192053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:00:44.204768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:00:44.206952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:00:44.216040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:00:44.233803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:44.233976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:00:44.234028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:44.234367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:00:44.234430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:44.242871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:00:44.243084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:00:44.256057Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:44.256121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:44.256321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:44.256368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:00:44.264919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:44.264990Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:00:44.265111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:00:44.265149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:44.265203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:00:44.265237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:44.265273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:00:44.265311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:44.265350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:00:44.265382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:00:44.265451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:00:44.265486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:00:44.265516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:00:44.279855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:00:44.280002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:00:44.280044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... -04T17:00:54.865184Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:00:54.865279Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-04T17:00:54.884612Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:00:54.884737Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:00:54.887448Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1676 } } 2025-04-04T17:00:54.887514Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-04T17:00:54.887668Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1676 } } 2025-04-04T17:00:54.887789Z node 8 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1676 } } 2025-04-04T17:00:54.888945Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 409 RawX2: 34359740746 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:00:54.889020Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-04T17:00:54.889237Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 409 RawX2: 34359740746 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:00:54.889328Z node 8 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:00:54.889472Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 409 RawX2: 34359740746 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:00:54.889571Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:54.889640Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-04-04T17:00:54.892930Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:00:54.894531Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:00:54.909476Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 309 RawX2: 34359740664 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:00:54.909556Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-04T17:00:54.909697Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 309 RawX2: 34359740664 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:00:54.909745Z node 8 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:00:54.909837Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 309 RawX2: 34359740664 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:00:54.909901Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:54.909961Z node 8 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:00:54.910014Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:00:54.910069Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T17:00:54.910102Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-04T17:00:54.913627Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:00:54.914370Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:00:54.914460Z node 8 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2025-04-04T17:00:54.914544Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-04-04T17:00:54.914610Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2025-04-04T17:00:54.914772Z node 8 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-04-04T17:00:54.914833Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-04-04T17:00:54.917703Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:00:54.917782Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:00:54.917977Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:00:54.918046Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:00:54.918102Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:00:54.918150Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:00:54.918210Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T17:00:54.918350Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:335:2314] message: TxId: 102 2025-04-04T17:00:54.918440Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:00:54.918503Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:00:54.918565Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:00:54.918811Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:00:54.918867Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:00:54.921677Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:00:54.921880Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:439:2400] TestWaitNotification: OK eventTxId 102 2025-04-04T17:00:54.922698Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CopyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:00:54.923019Z node 8 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/CopyTable" took 396us result status StatusSuccess 2025-04-04T17:00:54.923547Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CopyTable" PathDescription { Self { Name: "CopyTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "CopyTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::ShouldSkipDroppedColumn >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 >> TExecutorDb::FullScan [GOOD] >> TExecutorDb::CoordinatorSimulation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] Test command err: 2025-04-04T17:00:51.874907Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504064282439679:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:51.874956Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ba7/r3tmp/tmp7xsGuR/pdisk_1.dat 2025-04-04T17:00:52.390838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:52.391049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:52.391309Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:52.395840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:00:52.425317Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000ba7/r3tmp/yandex4WASTC.tmp 2025-04-04T17:00:52.425349Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000ba7/r3tmp/yandex4WASTC.tmp 2025-04-04T17:00:52.425499Z node 1 :NET_CLASSIFIER ERROR: invalid NetData format 2025-04-04T17:00:52.425532Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: /home/runner/.ya/build/build_root/i5tq/000ba7/r3tmp/yandex4WASTC.tmp 2025-04-04T17:00:52.425685Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration >> DataShardSnapshots::DelayedWriteReplyAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] Test command err: 2025-04-04T17:00:55.884927Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:00:55.885016Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:00:55.906180Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:00:55.906293Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 2 2025-04-04T17:00:55.906386Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-04-04T17:00:55.906454Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 2 2025-04-04T17:00:55.906538Z node 1 :PQ_READ_PROXY INFO: Direct read cache: attempted to register server session: session1:1 with stale generation 1, ignored 2025-04-04T17:00:55.906594Z node 1 :PQ_READ_PROXY ALERT: Direct read cache: tried to stage direct read for session session1 with generation 1, previously had this session with generation 2. Data ignored 2025-04-04T17:00:55.906635Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-04-04T17:00:55.906746Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: forget read: 1 for session session1 >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL |81.2%| [TA] $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleConfigTests::TestGetNodeConfig [GOOD] >> YdbOlapStore::ManyTables [GOOD] >> YdbOlapStore::LogWithUnionAllAscending >> TConsoleConfigTests::TestAutoOrder |81.2%| [TA] $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TTableProfileTests::ExplicitPartitionsWrongKeyType [GOOD] Test command err: 2025-04-04T17:00:09.190774Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503880045601633:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:09.222960Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001362/r3tmp/tmprGKFBB/pdisk_1.dat 2025-04-04T17:00:10.190209Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:10.193335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:10.193419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:10.258350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:00:10.302942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21912, node 1 2025-04-04T17:00:10.698278Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:10.698297Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:10.698303Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:10.698422Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32322 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:11.077367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:32322 2025-04-04T17:00:11.480964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:11.520116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:12.164521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:12.165565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:12.199747Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T17:00:12.256415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32322 2025-04-04T17:00:12.731135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:32322 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743786013420 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-04T17:00:14.063872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:00:14.158929Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503880045601633:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:14.158982Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:32322 TClient::Ls request: /Root/ydb_ut_tenant/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1743786014730 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-04T17:00:15.296906Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-04T17:00:15.320014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T17:00:19.477646Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489503927058486534:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:19.478183Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001362/r3tmp/tmpDyFgOd/pdisk_1.dat 2025-04-04T17:00:19.737720Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:19.831454Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:19.831587Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:19.844002Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2587, node 4 2025-04-04T17:00:20.218965Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:20.218992Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:20.219000Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:20.219166Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14173 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:20.626464Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:14173 2025-04-04T17:00:21.157474Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:21.207193Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:00:21.221663Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:21.751221Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489503934038431322:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:21.751310Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:00:21.899262Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:21.899363Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:21.914474Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-04-04T17:00:21.9165 ... 515]; 2025-04-04T17:00:35.625996Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:35.656069Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:00:37.607377Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489504003124411926:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:37.607526Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001362/r3tmp/tmpsBFZ8E/pdisk_1.dat 2025-04-04T17:00:37.903985Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:37.963456Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:37.963566Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:37.966267Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27358, node 10 2025-04-04T17:00:38.174956Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:38.174984Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:38.174996Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:38.175190Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1902 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:38.651601Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:1902 2025-04-04T17:00:39.074986Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:39.121475Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:39.628699Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7489504010813891485:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:39.628848Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:00:39.679340Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:39.679447Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:39.691410Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-04-04T17:00:39.692476Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1902 2025-04-04T17:00:40.176027Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-04-04T17:00:40.176456Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T17:00:40.632716Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:00:41.633240Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:00:42.633797Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:00:43.642603Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:00:46.319646Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489504041243337572:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:46.319753Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001362/r3tmp/tmpOfMwwR/pdisk_1.dat 2025-04-04T17:00:46.806144Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:46.882346Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:46.882495Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:46.886293Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21614, node 13 2025-04-04T17:00:47.059795Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:47.059826Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:47.059841Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:47.060063Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18793 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:47.659986Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:18793 2025-04-04T17:00:48.224477Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:48.274906Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:48.790145Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7489504049481915945:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:48.790235Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:00:48.818297Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:48.818434Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:48.821740Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-04-04T17:00:48.823136Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18793 2025-04-04T17:00:49.502569Z node 13 :TX_PROXY ERROR: Actor# [13:7489504054128240896:2926] txid# 281474976715660, issues: { message: "Error at split boundary 0: Value of type Uint64 expected in tuple at position 1" severity: 1 } 2025-04-04T17:00:49.509544Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-04-04T17:00:49.536368Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T17:00:49.794689Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:00:50.803537Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:00:51.804305Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:00:52.806297Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-StreamLookupJoin+ColumnStore >> KqpJoinOrder::FiveWayJoinWithComplexPreds-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-StreamLookupJoin-ColumnStore |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:00:43.301118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:00:43.301287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:43.301367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:00:43.301415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:00:43.301459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:00:43.301505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:00:43.301582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:43.301652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:00:43.301996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:00:43.719463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:00:43.719560Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:43.759778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:00:43.760036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:00:43.801264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:00:43.836059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:00:43.837770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:00:43.841040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:43.854492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:00:43.878943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:43.918270Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:43.918359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:43.930463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:00:43.930628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:43.930724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:00:43.930953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:00:43.956503Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:00:44.146712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:00:44.159494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:44.166489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:00:44.166857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:00:44.166940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:44.169912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:44.182344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:00:44.182635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:44.182760Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:00:44.182810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:00:44.182866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:00:44.184967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:44.185027Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:00:44.185085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:00:44.186951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:44.187003Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:44.192013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:44.192125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:00:44.204719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:00:44.206959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:00:44.217069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:00:44.234791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:44.234957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:00:44.235015Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:44.235335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:00:44.235404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:44.243618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:00:44.243761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:00:44.256040Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:44.256117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:44.256312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:44.256355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:00:44.264676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:44.264748Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:00:44.264852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:00:44.264889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:44.264930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:00:44.264954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:44.264980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:00:44.265011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:44.265045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:00:44.265083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:00:44.265144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:00:44.265172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:00:44.265202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:00:44.275625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:00:44.275762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:00:44.275798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... E: StateWork, received event# 2146435072, Sender [9:123:2149], Recipient [9:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:00:56.372991Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:00:56.373113Z node 9 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:56.373145Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:56.373279Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:00:56.373448Z node 9 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:56.373483Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [9:202:2204], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-04T17:00:56.373523Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [9:202:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-04T17:00:56.373583Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:00:56.373633Z node 9 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:00:56.373776Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:00:56.373812Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:00:56.373868Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:00:56.373928Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:00:56.373979Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:00:56.374027Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-04T17:00:56.374093Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:00:56.374148Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:00:56.374193Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:00:56.374417Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:00:56.374482Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-04T17:00:56.374537Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-04T17:00:56.374587Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-04T17:00:56.376139Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [9:202:2204], Recipient [9:123:2149]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 } 2025-04-04T17:00:56.376191Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-04T17:00:56.376273Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:00:56.376356Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:00:56.376399Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:00:56.376452Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-04T17:00:56.376503Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:00:56.376616Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:00:56.377630Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [9:202:2204], Recipient [9:123:2149]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 } 2025-04-04T17:00:56.377678Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-04T17:00:56.377751Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:00:56.377833Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:00:56.377864Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:00:56.377895Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:00:56.377928Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:00:56.378031Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-04T17:00:56.378083Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:00:56.380818Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435082, Sender [9:123:2149], Recipient [9:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-04-04T17:00:56.380879Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-04-04T17:00:56.380968Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:00:56.381033Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:00:56.381136Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:00:56.383682Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:00:56.384422Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:00:56.384460Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:00:56.386051Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:00:56.386090Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:00:56.386194Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-04T17:00:56.386488Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:00:56.386549Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T17:00:56.387082Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [9:447:2402], Recipient [9:123:2149]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:00:56.387151Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:00:56.387202Z node 9 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-04T17:00:56.387331Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [9:363:2342], Recipient [9:123:2149]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2025-04-04T17:00:56.387366Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-04T17:00:56.387456Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:00:56.387632Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:00:56.387682Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [9:445:2400] 2025-04-04T17:00:56.387894Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [9:447:2402], Recipient [9:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:00:56.387940Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:00:56.387995Z node 9 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-04-04T17:00:56.388488Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [9:448:2403], Recipient [9:123:2149]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-04T17:00:56.388566Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-04T17:00:56.388742Z node 9 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:00:56.388978Z node 9 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 239us result status StatusPathDoesNotExist 2025-04-04T17:00:56.389155Z node 9 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Replication\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Replication" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] Test command err: 2025-04-04T16:59:10.567502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:10.568055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:10.568122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012f7/r3tmp/tmpWqFWnC/pdisk_1.dat 2025-04-04T16:59:11.028073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:59:11.086544Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:11.125530Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T16:59:11.126477Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T16:59:11.126842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:11.126953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:11.139106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:59:11.225894Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-04T16:59:11.225970Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T16:59:11.226182Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-04-04T16:59:11.379533Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T16:59:11.379663Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T16:59:11.380287Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T16:59:11.380389Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T16:59:11.380779Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T16:59:11.381011Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T16:59:11.381107Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T16:59:11.381372Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T16:59:11.382785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:59:11.383869Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T16:59:11.383946Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2517] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-04T16:59:11.417721Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T16:59:11.422413Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T16:59:11.422979Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T16:59:11.423251Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:59:11.433968Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T16:59:11.464518Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:59:11.464670Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:59:11.466233Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:59:11.466351Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:59:11.466402Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:59:11.466771Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:59:11.466891Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:59:11.466980Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T16:59:11.467441Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:59:11.514548Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:59:11.514775Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:59:11.514899Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T16:59:11.514969Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:59:11.515017Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:59:11.515068Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:59:11.515307Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T16:59:11.515376Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T16:59:11.515691Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:59:11.515791Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:59:11.515907Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:59:11.515981Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:59:11.516055Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T16:59:11.516107Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T16:59:11.516159Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T16:59:11.516196Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:59:11.516245Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:59:11.516401Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:11.516441Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:11.516497Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T16:59:11.516577Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T16:59:11.516614Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T16:59:11.516739Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:59:11.516961Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T16:59:11.517020Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:59:11.517122Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:59:11.517179Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T16:59:11.517217Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T16:59:11.517286Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T16:59:11.517331Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T16:59:11.517694Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T16:59:11.517735Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T16:59:11.517784Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T16:59:11.517821Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T16:59:11.517902Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T16:59:11.517932Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T16:59:11.517984Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T16:59:11.518020Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T16:59:11.518053Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T16:59:11.519415Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:59:11.519481Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T16:59:11.519516Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T16:59:11.519571Z node 1 :TX_DATASHARD TRACE: Prop ... 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-04T17:00:54.927592Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-04T17:00:54.927680Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2025-04-04T17:00:54.927817Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3005 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-04T17:00:54.927928Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-04T17:00:54.928054Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [13:665:2570], Recipient [13:755:2634]: {TEvReadSet step# 3005 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-04T17:00:54.928088Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-04T17:00:54.928122Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-04-04T17:00:54.928173Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3005 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-04T17:00:54.928215Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-04T17:00:54.928306Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [13:755:2634], Recipient [13:665:2570]: {TEvReadSet step# 3005 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-04T17:00:54.928345Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-04T17:00:54.928376Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2025-04-04T17:00:54.928421Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3005 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-04T17:00:54.928632Z node 13 :TX_DATASHARD DEBUG: Complete [3005 : 281474976715663] from 72075186224037888 at tablet 72075186224037888 send result to client [13:982:2781], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:00:54.929003Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [13:665:2570], Recipient [13:755:2634]: {TEvReadSet step# 3005 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-04T17:00:54.929043Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-04T17:00:54.929077Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-04-04T17:00:54.929132Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3005 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-04T17:00:54.929252Z node 13 :TX_DATASHARD DEBUG: Complete [3005 : 281474976715663] from 72075186224037889 at tablet 72075186224037889 send result to client [13:982:2781], exec latency: 0 ms, propose latency: 0 ms TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1857 } } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-1" WriteRows: 1 WriteBytes: 8 } } } 2025-04-04T17:00:54.930195Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:00:54.930679Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 981 } } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-2" WriteRows: 1 WriteBytes: 8 } } } 2025-04-04T17:00:54.931650Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:00:54.934998Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-04T17:00:54.935231Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [13:665:2570], Recipient [13:755:2634]: {TEvReadSet step# 3005 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-04T17:00:54.935286Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:00:54.935373Z node 13 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715663 2025-04-04T17:00:54.942265Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-04T17:00:54.942629Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [13:755:2634], Recipient [13:665:2570]: {TEvReadSet step# 3005 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-04-04T17:00:54.942777Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:00:54.942882Z node 13 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2025-04-04T17:00:55.206860Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] Handle TEvExecuteKqpTransaction 2025-04-04T17:00:55.207020Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] TxId# 281474976715667 ProcessProposeKqpTransaction 2025-04-04T17:00:55.208605Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr0vz58z47skep3jm44d38vp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZTU1OGZjNzQtZTdjZGFiNmMtNjdmYTJhZjgtZDcxMjMxZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 2025-04-04T17:00:55.213520Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:1106:2901], Recipient [13:665:2570]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-04T17:00:55.213726Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T17:00:55.213879Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3005/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v4000/18446744073709551615 ImmediateWriteEdge# v4001/0 ImmediateWriteEdgeReplied# v4001/0 2025-04-04T17:00:55.213978Z node 13 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v4001/18446744073709551615 2025-04-04T17:00:55.214122Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-04T17:00:55.214383Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-04T17:00:55.214500Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-04T17:00:55.214607Z node 13 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:00:55.214711Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:00:55.214803Z node 13 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-04T17:00:55.214889Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-04T17:00:55.214936Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:00:55.214979Z node 13 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T17:00:55.215010Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:00:55.215210Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-04T17:00:55.215744Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[13:1106:2901], 0} after executionsCount# 1 2025-04-04T17:00:55.215869Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:1106:2901], 0} sends rowCount# 2, bytes# 96, quota rows left# 999, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:00:55.216058Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:1106:2901], 0} finished in read 2025-04-04T17:00:55.216211Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-04T17:00:55.216253Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:00:55.216290Z node 13 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:00:55.216328Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:00:55.216406Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-04T17:00:55.216447Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:00:55.216511Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-04T17:00:55.216613Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T17:00:55.216833Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-04T17:00:55.223335Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:1106:2901], Recipient [13:665:2570]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-04T17:00:55.223483Z node 13 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 2 } items { uint32_value: 22 } } |81.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |81.2%| [TA] {RESULT} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] |81.2%| [TA] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |81.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables >> TConsoleConfigTests::TestAutoOrder [GOOD] >> TConsoleConfigTests::TestAutoSplit >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice >> TSchemeShardSubDomainTest::LS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:00:57.434718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:00:57.434834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:57.434923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:00:57.434969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:00:57.435051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:00:57.435097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:00:57.435220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:57.435315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:00:57.435797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:00:57.537582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:00:57.537653Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:57.555548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:00:57.555890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:00:57.556096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:00:57.568583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:00:57.568866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:00:57.569441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:57.570332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:00:57.577597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:57.579166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:57.579246Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:57.579444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:00:57.579499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:57.579545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:00:57.579697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:00:57.588051Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:00:57.759591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:00:57.759878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:57.760055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:00:57.760256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:00:57.760318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:57.763105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:57.763304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:00:57.763526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:57.763608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:00:57.763648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:00:57.763696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:00:57.766038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:57.766124Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:00:57.766179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:00:57.770537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:57.770618Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:57.770684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:57.770759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:00:57.784304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:00:57.786513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:00:57.786767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:00:57.788008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:57.788166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:00:57.788221Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:57.788553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:00:57.788972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:57.789183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:00:57.789275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:00:57.791911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:57.791973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:57.794171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:57.794268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:00:57.794777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:57.794840Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:00:57.794988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:00:57.795036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:57.795078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:00:57.795127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:57.795189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:00:57.795243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:57.795283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:00:57.795321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:00:57.795404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:00:57.795452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:00:57.795508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:00:57.807721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:00:57.807944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:00:57.808008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T17:00:57.808079Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T17:00:57.808128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:00:57.808277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T17:00:57.813882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T17:00:57.814421Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1743786057.815516 267824 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2025-04-04T17:00:57.816024Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-04-04T17:00:57.834643Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-04-04T17:00:57.837666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:00:57.838165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:00:57.838363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } }, at schemeshard: 72057594046678944 2025-04-04T17:00:57.839075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL should be less than 1743786057 seconds (20182 days, 55 years). The ttl behaviour is undefined before 1970., at schemeshard: 72057594046678944 2025-04-04T17:00:57.840262Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:00:57.847305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL should be less than 1743786057 seconds (20182 days, 55 years). The ttl behaviour is undefined before 1970." TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:00:57.847528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL should be less than 1743786057 seconds (20182 days, 55 years). The ttl behaviour is undefined before 1970., operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-04T17:00:57.848133Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |81.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} >> TGRpcYdbTest::CreateTableBadRequest3 [GOOD] >> TGRpcYdbTest::CreateTableWithIndex >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain >> TSchemeShardSubDomainTest::CreateForceDropSolomon >> TSchemeShardSubDomainTest::Redefine >> TSchemeShardSubDomainTest::SimultaneousDeclare >> DataShardSnapshots::ShardRestartLockBasic [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByConflict >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] >> TConsoleConfigTests::TestAutoSplit [GOOD] >> TConsoleConfigTests::TestValidation >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution >> YdbYqlClient::TestExplicitPartitioning [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] >> TSchemeShardSubDomainTest::LS [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] >> TSchemeShardSubDomainTest::ForceDropTwice >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:00:58.661517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:00:58.661606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:58.661670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:00:58.661708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:00:58.661775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:00:58.661807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:00:58.661909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:58.661981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:00:58.662375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:00:58.745375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:00:58.745441Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:58.761118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:00:58.761419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:00:58.761606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:00:58.774386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:00:58.774767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:00:58.775576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:58.776659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:00:58.783073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:58.784629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:58.784715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:58.784930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:00:58.784989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:58.785034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:00:58.785184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:00:58.794543Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:00:58.947638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:00:58.947967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:58.948224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:00:58.948516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:00:58.948602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:58.951947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:58.952174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:00:58.952440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:58.952516Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:00:58.952558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:00:58.952595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:00:58.956438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:58.956572Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:00:58.956621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:00:58.959339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:58.959411Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:58.959467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:58.959584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:00:58.963723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:00:58.966262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:00:58.966484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:00:58.967745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:58.967907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:00:58.967964Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:58.968292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:00:58.968352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:58.968549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:00:58.968651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:00:58.971558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:58.971598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:58.971792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:58.971829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:00:58.972093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:58.972145Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:00:58.972234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:00:58.972265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:58.972299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:00:58.972324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:58.972366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:00:58.972404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:58.972434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:00:58.972460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:00:58.972523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:00:58.972552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:00:58.972585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:00:58.976574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:00:58.976745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:00:58.976818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 44, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T17:00:59.789096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-04-04T17:00:59.789209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:00:59.789369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-04-04T17:00:59.789407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-04-04T17:00:59.789443Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-04-04T17:00:59.789832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:59.789964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:00:59.790022Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-04-04T17:00:59.790073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-04-04T17:00:59.792527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.792591Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-04-04T17:00:59.792677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-04T17:00:59.792715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-04T17:00:59.792754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-04T17:00:59.792812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-04T17:00:59.792860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-04-04T17:00:59.792933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:122:2148] message: TxId: 281474976710760 2025-04-04T17:00:59.792999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-04T17:00:59.793035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-04T17:00:59.793062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-04-04T17:00:59.793135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-04-04T17:00:59.795627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-04T17:00:59.795712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-04-04T17:00:59.795784Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-04-04T17:00:59.795872Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T17:00:59.798044Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-04T17:00:59.798161Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T17:00:59.798219Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-04T17:00:59.800290Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-04T17:00:59.800400Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T17:00:59.800451Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-04T17:00:59.800574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:00:59.800613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:473:2434] TestWaitNotification: OK eventTxId 102 2025-04-04T17:00:59.801314Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:00:59.801662Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 347us result status StatusSuccess 2025-04-04T17:00:59.802192Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::LS [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:00:59.572726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:00:59.572826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:59.572873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:00:59.572908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:00:59.572954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:00:59.572976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:00:59.573029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:59.573111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:00:59.573427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:00:59.665650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:00:59.665714Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:59.681571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:00:59.681844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:00:59.682004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:00:59.708610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:00:59.708981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:00:59.709703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:59.711787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:00:59.718220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:59.719759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:59.719844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:59.720068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:00:59.720122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:59.720162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:00:59.720320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.727423Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:00:59.866195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:00:59.866462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.866719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:00:59.866971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:00:59.867043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.872986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:59.873108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:00:59.873282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.873326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:00:59.873363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:00:59.873416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:00:59.878979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.879078Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:00:59.879123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:00:59.881493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.881536Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.881568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:59.881634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:00:59.884358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:00:59.886213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:00:59.886526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:00:59.887656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:59.887821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:00:59.887945Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:59.888283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:00:59.888341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:59.888542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:00:59.888646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:00:59.891534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:59.891596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:59.891785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:59.891824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:00:59.892164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.892216Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:00:59.892317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:00:59.892372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:59.892433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:00:59.892487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:59.892545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:00:59.892591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:59.892628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:00:59.892660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:00:59.892754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:00:59.892797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:00:59.892841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:00:59.894729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:00:59.894866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:00:59.894910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... rom tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-04T17:01:00.026460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-04T17:01:00.026906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:00.027062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:00.027118Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-04T17:01:00.027532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-04T17:01:00.027601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-04T17:01:00.027821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:00.027911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:01:00.027978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:01:00.033061Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:00.033149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:00.033323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:01:00.033435Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:00.033475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-04T17:01:00.033522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 FAKE_COORDINATOR: Erasing txId 100 2025-04-04T17:01:00.033899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.033948Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-04T17:01:00.034059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-04T17:01:00.034100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:01:00.034163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-04T17:01:00.034203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:01:00.034269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-04T17:01:00.034338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:01:00.034403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-04T17:01:00.034450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-04T17:01:00.034680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:01:00.034733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-04T17:01:00.034774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-04T17:01:00.034805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-04T17:01:00.035547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:01:00.035624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:01:00.035650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-04T17:01:00.035693Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:01:00.035732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:00.036442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:01:00.036502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:01:00.036525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-04T17:01:00.036557Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:01:00.036580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:01:00.036630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-04T17:01:00.041031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-04T17:01:00.042392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-04T17:01:00.042704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-04T17:01:00.042763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-04T17:01:00.043326Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-04T17:01:00.043462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-04T17:01:00.043508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:455:2409] TestWaitNotification: OK eventTxId 100 2025-04-04T17:01:00.044090Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:00.044361Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 270us result status StatusSuccess 2025-04-04T17:01:00.044872Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:00.045547Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:00.045748Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 242us result status StatusSuccess 2025-04-04T17:01:00.046135Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::Redefine [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:00:59.461817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:00:59.461932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:59.461992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:00:59.462028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:00:59.462073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:00:59.462100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:00:59.462155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:59.462278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:00:59.462684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:00:59.544223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:00:59.544293Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:59.558390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:00:59.558693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:00:59.558859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:00:59.578060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:00:59.578490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:00:59.579290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:59.586387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:00:59.594062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:59.595536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:59.595619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:59.595806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:00:59.595859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:59.595897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:00:59.596045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.604416Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:00:59.765561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:00:59.765841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.766096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:00:59.766353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:00:59.766442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.773380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:59.773550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:00:59.773769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.773821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:00:59.773861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:00:59.773912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:00:59.776173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.776250Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:00:59.776289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:00:59.778162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.778217Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.778287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:59.778386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:00:59.781396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:00:59.783231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:00:59.783431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:00:59.784227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:59.784360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:00:59.784411Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:59.784661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:00:59.784699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:59.784847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:00:59.784916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:00:59.786631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:59.786694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:59.786863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:59.786913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:00:59.787214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.787267Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:00:59.787348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:00:59.787392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:59.787438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:00:59.787477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:59.787540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:00:59.787585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:59.787615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:00:59.787646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:00:59.787718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:00:59.787760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:00:59.787789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:00:59.789235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:00:59.789318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:00:59.789349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... athId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:00:59.864624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:00:59.864668Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-04T17:00:59.864708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:00:59.867352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:00:59.867442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:00:59.867472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:00:59.867502Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:00:59.867533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-04T17:00:59.867621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 1 2025-04-04T17:00:59.867671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:274:2265] 2025-04-04T17:00:59.869982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:00:59.870038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:00:59.870063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:00:59.870087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:6 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:00:59.870110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:00:59.870164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:00:59.871263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:00:59.879302Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 2025-04-04T17:00:59.879588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-04T17:00:59.879974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-04T17:00:59.880542Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2025-04-04T17:00:59.880896Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2025-04-04T17:00:59.881071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:59.881370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:00:59.881645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-04T17:00:59.881917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:00:59.882148Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 2025-04-04T17:00:59.882320Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2025-04-04T17:00:59.882532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:00:59.882761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:00:59.882800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:275:2266] 2025-04-04T17:00:59.882929Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2025-04-04T17:00:59.883207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-04T17:00:59.883432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:00:59.883844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T17:00:59.884007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:00:59.884443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-04T17:00:59.884593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:00:59.886093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:00:59.886168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:00:59.886404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:00:59.886852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:00:59.886938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:00:59.887028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:00:59.888502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-04T17:00:59.890278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:00:59.890371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-04T17:00:59.893781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-04T17:00:59.893898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:00:59.894042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-04T17:00:59.894276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:00:59.894407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-04-04T17:00:59.895110Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:00:59.895335Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 194us result status StatusPathDoesNotExist 2025-04-04T17:00:59.895546Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:00:59.896062Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:00:59.896263Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 215us result status StatusSuccess 2025-04-04T17:00:59.896622Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:00:59.114285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:00:59.114413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:59.114480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:00:59.114521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:00:59.114572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:00:59.114606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:00:59.114681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:59.114774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:00:59.115128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:00:59.216349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:00:59.216424Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:59.229307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:00:59.229619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:00:59.229793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:00:59.241881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:00:59.242298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:00:59.243070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:59.244129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:00:59.250916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:59.252586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:59.252691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:59.252967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:00:59.253055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:59.253108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:00:59.253276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.263308Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:00:59.424151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:00:59.424496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.424829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:00:59.425055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:00:59.425116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.433710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:59.433883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:00:59.434122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.434181Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:00:59.434227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:00:59.434320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:00:59.437202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.437279Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:00:59.437324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:00:59.444360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.444483Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.444544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:59.444638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:00:59.455592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:00:59.460225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:00:59.460545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:00:59.461703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:59.461875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:00:59.461943Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:59.462327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:00:59.462423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:59.462624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:00:59.462722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:00:59.468212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:59.468288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:59.468523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:59.468574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:00:59.468975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.469033Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:00:59.469146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:00:59.469202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:59.469256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:00:59.469317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:59.469366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:00:59.469418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:59.469459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:00:59.469498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:00:59.469603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:00:59.469654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:00:59.469694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:00:59.472002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:00:59.472155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:00:59.472197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... operationId: 107:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 107 Step: 250 OrderId: 107 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1530 } } 2025-04-04T17:01:00.261899Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 107 Step: 250 OrderId: 107 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1530 } } 2025-04-04T17:01:00.262359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 107 2025-04-04T17:01:00.262432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 107 2025-04-04T17:01:00.262462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-04T17:01:00.262487Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-04-04T17:01:00.262519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-04-04T17:01:00.262582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/3, is published: true 2025-04-04T17:01:00.263098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 608 RawX2: 4294969845 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2025-04-04T17:01:00.263135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409548, partId: 0 2025-04-04T17:01:00.263247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 608 RawX2: 4294969845 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2025-04-04T17:01:00.263287Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:01:00.263349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 608 RawX2: 4294969845 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2025-04-04T17:01:00.263431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 107:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:00.263458Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.263487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-04T17:01:00.263518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-04-04T17:01:00.269324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2025-04-04T17:01:00.269435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-04T17:01:00.270028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-04T17:01:00.270266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2025-04-04T17:01:00.270915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-04T17:01:00.271971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:2, at schemeshard: 72057594046678944 2025-04-04T17:01:00.272031Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 107:2 ProgressState 2025-04-04T17:01:00.272173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:2 progress is 2/3 2025-04-04T17:01:00.272208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2025-04-04T17:01:00.272249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:2 progress is 2/3 2025-04-04T17:01:00.272295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2025-04-04T17:01:00.272333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 2/3, is published: true 2025-04-04T17:01:00.273458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.273639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-04T17:01:00.273733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.273840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.273909Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 107:0 ProgressState 2025-04-04T17:01:00.273988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 3/3 2025-04-04T17:01:00.274017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-04-04T17:01:00.274050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 3/3 2025-04-04T17:01:00.274074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-04-04T17:01:00.274103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 3/3, is published: true 2025-04-04T17:01:00.274189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:482:2431] message: TxId: 107 2025-04-04T17:01:00.274261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-04-04T17:01:00.274308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-04T17:01:00.274347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-04T17:01:00.274487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-04T17:01:00.274535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:1 2025-04-04T17:01:00.274556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:1 2025-04-04T17:01:00.274695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-04T17:01:00.274738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:2 2025-04-04T17:01:00.274762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:2 2025-04-04T17:01:00.274812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-04T17:01:00.277963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-04T17:01:00.278022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:538:2487] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-04-04T17:01:00.282088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "Table7" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value0" Type: "Utf8" } Columns { Name: "Value1" Type: "Utf8" } Columns { Name: "Value2" Type: "Utf8" } Columns { Name: "Value3" Type: "Utf8" } Columns { Name: "Value4" Type: "Utf8" } KeyColumnNames: "RowId" } IndexDescription { Name: "UserDefinedIndexByValue0" KeyColumnNames: "Value0" } IndexDescription { Name: "UserDefinedIndexByValue1" KeyColumnNames: "Value1" } IndexDescription { Name: "UserDefinedIndexByValue2" KeyColumnNames: "Value2" } IndexDescription { Name: "UserDefinedIndexByValue3" KeyColumnNames: "Value3" } IndexDescription { Name: "UserDefinedIndexByValue4" KeyColumnNames: "Value4" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:01:00.282698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/USER_0/Table7 domain path id: [OwnerId: 72057594046678944, LocalPathId: 2] domain path: /MyRoot/USER_0 shardsToCreate: 6 GetShardsInside: 4 MaxShards: 7 2025-04-04T17:01:00.282812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 108:0, explain: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2025-04-04T17:01:00.282871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2025-04-04T17:01:00.285586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusResourceExhausted Reason: "indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:00.285828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot/USER_0, subject: , status: StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/USER_0/Table7 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-04-04T17:01:00.286331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-04-04T17:01:00.286396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-04-04T17:01:00.287069Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-04-04T17:01:00.287181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-04T17:01:00.287218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:739:2657] TestWaitNotification: OK eventTxId 108 >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] >> TConsoleConfigTests::TestValidation [GOOD] >> TConsoleConfigTests::TestCheckConfigUpdates |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |81.3%| [TA] $(B)/ydb/core/sys_view/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin+NotNull [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin-NotNull >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:01:00.633185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:01:00.633293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:00.633351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:01:00.633384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:01:00.633430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:01:00.633458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:01:00.633519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:00.633604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:01:00.633904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:01:00.707789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:00.707849Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:00.717353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:01:00.717549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:01:00.717701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:01:00.726294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:01:00.726547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:01:00.727109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:00.728397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:01:00.733311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:00.734531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:00.734615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:00.734843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:01:00.734901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:00.734962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:01:00.735107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.742830Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:01:00.879933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:01:00.880128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.880314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:01:00.880498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:01:00.880586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.883575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:00.883728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:01:00.883974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.884031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:01:00.884065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:01:00.884117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:01:00.886606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.886713Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:01:00.886756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:01:00.889054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.889118Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.889162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:00.889238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:01:00.893303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:00.898730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:01:00.898959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:01:00.900173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:00.900336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:00.900401Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:00.900747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:01:00.900817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:00.901036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:00.901153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:01:00.903892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:00.903960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:00.904181Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:00.904226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:01:00.904533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.904568Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:01:00.904668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:00.904710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:00.904750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:00.904792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:00.904834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:01:00.904883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:00.904921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:01:00.904954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:01:00.905029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:00.905077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:01:00.905107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:01:00.906855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:00.906959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:00.906986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T17:01:00.907018Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T17:01:00.907050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:00.907200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T17:01:00.910329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T17:01:00.910823Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-04-04T17:01:00.911373Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-04-04T17:01:00.927774Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-04-04T17:01:00.930704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { Coordinators: 1 Mediators: 1 Name: "USER_0" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:01:00.930994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_0, opId: 100:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.931086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: plan resolution is 0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.931818Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:01:00.935332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: plan resolution is 0" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:00.935553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: plan resolution is 0, operation: CREATE DATABASE, path: /MyRoot/USER_0 2025-04-04T17:01:00.936148Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-04T17:01:00.936381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-04T17:01:00.936432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-04T17:01:00.936917Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-04T17:01:00.937035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-04T17:01:00.937074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:282:2273] TestWaitNotification: OK eventTxId 100 2025-04-04T17:01:00.937540Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:00.937685Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 160us result status StatusPathDoesNotExist 2025-04-04T17:01:00.937820Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |81.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestExplicitPartitioning [GOOD] Test command err: 2025-04-04T17:00:08.792607Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503879589244068:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:08.792878Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001363/r3tmp/tmp4pkffz/pdisk_1.dat 2025-04-04T17:00:09.671274Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26654, node 1 2025-04-04T17:00:09.768563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:09.768669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:09.803548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:00:10.012395Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:10.012422Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:10.012431Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:10.012599Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11865 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:10.350770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:12.942410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503896769114298:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:12.942533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:13.216014Z node 1 :TX_PROXY ERROR: Actor# [1:7489503901064081618:2641] txid# 281474976710658, issues: { message: "Column Key has wrong key type Double" severity: 1 } 2025-04-04T17:00:15.135056Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489503909102514612:2219];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001363/r3tmp/tmp2x0GnG/pdisk_1.dat 2025-04-04T17:00:15.351896Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:00:15.541316Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:15.591597Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:15.591690Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:15.607824Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19328, node 4 2025-04-04T17:00:15.958376Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:15.958401Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:15.958407Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:15.958558Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:16.260758Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:19.411054Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489503926282384710:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:19.420969Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:19.449118Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489503926282384733:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:19.449450Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:19.509049Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489503926282384769:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:19.529279Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:19.541010Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:19.561094Z node 4 :TX_PROXY ERROR: Actor# [4:7489503926282384786:2655] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:00:19.571425Z node 4 :TX_PROXY ERROR: Actor# [4:7489503926282384812:2670] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:00:19.576154Z node 4 :TX_PROXY ERROR: Actor# [4:7489503926282384817:2675] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:00:19.576405Z node 4 :TX_PROXY ERROR: Actor# [4:7489503926282384821:2677] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:00:19.576493Z node 4 :TX_PROXY ERROR: Actor# [4:7489503926282384822:2678] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:00:19.576559Z node 4 :TX_PROXY ERROR: Actor# [4:7489503926282384814:2672] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:00:19.576647Z node 4 :TX_PROXY ERROR: Actor# [4:7489503926282384813:2671] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:00:19.576898Z node 4 :TX_PROXY ERROR: Actor# [4:7489503926282384816:2674] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:00:19.580170Z node 4 :TX_PROXY ERROR: Actor# [4:7489503926282384815:2673] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:00:19.811561Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489503926282385009:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:19.811662Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:19.812816Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] Ac ... UTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jr0vy2z2b5xy11c24a5hbe1j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=N2E0M2RjOGQtOWRlMTMyYWQtOTlkZDNjMC0yYzVmMzg4ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:00:20.102318Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489503909102514612:2219];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:20.102409Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:22.145300Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489503939713864287:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:22.145361Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001363/r3tmp/tmpKxM7oc/pdisk_1.dat 2025-04-04T17:00:22.357677Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:22.396244Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:22.396334Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:22.400153Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16058, node 7 2025-04-04T17:00:22.485695Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:22.485720Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:22.485727Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:22.485857Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13851 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:22.753961Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:25.430491Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:25.570627Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489503952598767416:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:25.570919Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489503952598767428:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:25.571289Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:25.574055Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:00:25.602377Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489503952598767430:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:00:25.705856Z node 7 :TX_PROXY ERROR: Actor# [7:7489503952598767509:2849] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:00:25.792639Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0vy8k00b78kxd3azg0118v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YjJiNmM4MjEtOTk0YjU1OTctYTg2NGNkODYtNzYyMzlmMTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:00:27.823479Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489503961157725046:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:27.823597Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001363/r3tmp/tmpWhmSTo/pdisk_1.dat 2025-04-04T17:00:28.007204Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:28.034064Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:28.034185Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:28.040629Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2828, node 10 2025-04-04T17:00:28.126999Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:28.127029Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:28.127036Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:28.127204Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18789 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:28.816100Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:31.911122Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:32.806818Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489503961157725046:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:32.806892Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:42.975701Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:00:42.975735Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:58.090607Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504094301713549:2543], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:58.090722Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504094301713541:2540], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:58.091109Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:58.095201Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:00:58.162609Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489504094301713555:2544], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:00:58.272305Z node 10 :TX_PROXY ERROR: Actor# [10:7489504094301713630:3193] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:00:58.413541Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0vz8b81hd0rpp6j3z4bd8v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NzI2NTg0OGUtZDQ0NWYwLWFmMTkwNTZkLTEwZWY3NmYw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:00:58.982428Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0vz8pv95f8ngjcj9b2s08h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NzI2NTg0OGUtZDQ0NWYwLWFmMTkwNTZkLTEwZWY3NmYw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |81.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |81.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:00:59.994940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:00:59.995044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:59.995102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:00:59.995142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:00:59.995187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:00:59.995217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:00:59.995274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:59.995381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:00:59.995740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:01:00.086033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:00.086093Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:00.100311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:01:00.100601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:01:00.100769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:01:00.114582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:01:00.114976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:01:00.115721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:00.118024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:01:00.126583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:00.128167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:00.128274Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:00.128514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:01:00.128576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:00.128619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:01:00.128755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.137128Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:01:00.289443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:01:00.289753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.290033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:01:00.290334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:01:00.290415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.293284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:00.293433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:01:00.293661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.293716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:01:00.293763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:01:00.293831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:01:00.296027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.296103Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:01:00.296142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:01:00.298334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.298408Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.298463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:00.298543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:01:00.309371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:00.311902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:01:00.312091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:01:00.313033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:00.313174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:00.313254Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:00.313534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:01:00.313591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:00.313756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:00.313837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:01:00.315843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:00.315889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:00.316036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:00.316069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:01:00.316393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.316445Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:01:00.316551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:00.316605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:00.316664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:00.316708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:00.316744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:01:00.316789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:00.316847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:01:00.316881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:01:00.316963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:00.317008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:01:00.317034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:01:00.319057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:00.319182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:00.319221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... RD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:01:01.049491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:01:01.049836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:01:01.053036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:01:01.053453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:01:01.053511Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-04T17:01:01.053640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:01:01.053681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:01:01.053723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:01:01.053753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:01:01.053798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-04T17:01:01.053879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:489:2445] message: TxId: 103 2025-04-04T17:01:01.053928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:01:01.053966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T17:01:01.053995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T17:01:01.054158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-04T17:01:01.056786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:01:01.056882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:490:2446] TestWaitNotification: OK eventTxId 103 2025-04-04T17:01:01.057521Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:01.057793Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 293us result status StatusSuccess 2025-04-04T17:01:01.058450Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:01.059156Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:01.059435Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 272us result status StatusSuccess 2025-04-04T17:01:01.059948Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:01.063696Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:01.064019Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 331us result status StatusSuccess 2025-04-04T17:01:01.064477Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:01.065155Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:01.065411Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 270us result status StatusSuccess 2025-04-04T17:01:01.065856Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Redefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:01:00.194466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:01:00.194587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:00.194695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:01:00.194738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:01:00.194784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:01:00.194813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:01:00.194879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:00.194964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:01:00.195304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:01:00.280522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:00.280589Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:00.293161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:01:00.293459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:01:00.293617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:01:00.305288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:01:00.305635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:01:00.306341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:00.310741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:01:00.318202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:00.319751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:00.319868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:00.320075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:01:00.320131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:00.320172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:01:00.320309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.327731Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:01:00.477427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:01:00.477695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.477939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:01:00.478168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:01:00.478263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.480818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:00.480965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:01:00.481195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.481249Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:01:00.481292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:01:00.481341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:01:00.483770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.483838Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:01:00.483876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:01:00.485912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.485986Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.486035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:00.486109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:01:00.489890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:00.492730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:01:00.493001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:01:00.494094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:00.494295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:00.494360Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:00.494682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:01:00.494738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:00.494959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:00.495061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:01:00.497628Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:00.497721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:00.497940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:00.497991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:01:00.498384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.498439Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:01:00.498537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:00.498589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:00.498647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:00.498710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:00.498754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:01:00.498797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:00.498850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:01:00.498888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:01:00.498994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:00.499040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:01:00.499075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:01:00.501220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:00.501370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:00.501410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... sReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:01:00.746072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-04T17:01:00.746107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:01:00.746139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-04T17:01:00.746174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-04T17:01:00.746368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:01:00.746418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-04-04T17:01:00.746459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-04T17:01:00.746500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-04T17:01:00.747339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:01:00.747422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:01:00.747451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:01:00.747515Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-04T17:01:00.747573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:00.748353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:01:00.748435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:01:00.748461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:01:00.748491Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:01:00.748518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:01:00.748587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-04T17:01:00.751621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:01:00.751674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:01:00.751698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:01:00.752780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T17:01:00.753401Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-04T17:01:00.754258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:00.754688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-04-04T17:01:00.755415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T17:01:00.755679Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-04T17:01:00.755906Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-04-04T17:01:00.756437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-04T17:01:00.756708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2025-04-04T17:01:00.757781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T17:01:00.757959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:01:00.759282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:01:00.759360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:01:00.759505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:01:00.760174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:01:00.760232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:01:00.760325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:00.761954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:01:00.762012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-04T17:01:00.763761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-04T17:01:00.763810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-04T17:01:00.764295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:01:00.764353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-04T17:01:00.766024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:01:00.766128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-04T17:01:00.766547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-04T17:01:00.766594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-04T17:01:00.767125Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-04T17:01:00.767226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-04T17:01:00.767261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:576:2530] TestWaitNotification: OK eventTxId 104 2025-04-04T17:01:00.767943Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:00.768139Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 218us result status StatusPathDoesNotExist 2025-04-04T17:01:00.768367Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:01:00.768948Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:00.769170Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 202us result status StatusSuccess 2025-04-04T17:01:00.769559Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:01:00.380317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:01:00.380452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:00.380518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:01:00.380554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:01:00.380604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:01:00.380632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:01:00.380693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:00.380787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:01:00.381137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:01:00.466766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:00.466833Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:00.479332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:01:00.479614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:01:00.479749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:01:00.490295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:01:00.490617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:01:00.491304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:00.492219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:01:00.497592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:00.499193Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:00.499289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:00.499470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:01:00.499515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:00.499554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:01:00.499678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.506691Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:01:00.638627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:01:00.638902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.639168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:01:00.639445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:01:00.639514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.642208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:00.642408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:01:00.642635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.642718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:01:00.642758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:01:00.642809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:01:00.645030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.645097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:01:00.645135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:01:00.647243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.647298Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.647339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:00.647422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:01:00.651150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:00.653391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:01:00.653635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:01:00.654773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:00.654935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:00.654999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:00.655313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:01:00.655373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:00.655555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:00.655656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:01:00.658178Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:00.658235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:00.658469Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:00.658511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:01:00.658861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.658908Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:01:00.659025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:00.659081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:00.659130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:00.659181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:00.659225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:01:00.659277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:00.659318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:01:00.659351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:01:00.659442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:00.659482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:01:00.659513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:01:00.661448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:00.661578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:00.661623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... g: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:01:00.700369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 100 2025-04-04T17:01:00.700419Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-04T17:01:00.700454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:01:00.700520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 0/1, is published: true 2025-04-04T17:01:00.703587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.703654Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 100:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:01:00.703714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 3 -> 128 2025-04-04T17:01:00.704843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-04T17:01:00.705436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-04T17:01:00.707116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.707173Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.707229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet# 72057594046678944 2025-04-04T17:01:00.707285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-04-04T17:01:00.707418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:00.710161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-04T17:01:00.710335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-04T17:01:00.710776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:00.710900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:00.710948Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-04T17:01:00.711176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-04T17:01:00.711221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-04T17:01:00.711381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:00.711434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:01:00.711492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-04T17:01:00.713633Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:00.713669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:00.713821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:01:00.713946Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:00.713991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-04T17:01:00.714035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-04T17:01:00.714491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.714543Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-04T17:01:00.714677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-04T17:01:00.714719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:01:00.714776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-04T17:01:00.714813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:01:00.714854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-04T17:01:00.714896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:01:00.714932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-04T17:01:00.714982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-04T17:01:00.715052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:01:00.715090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2025-04-04T17:01:00.715155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-04T17:01:00.715191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-04T17:01:00.715908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:01:00.715990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:01:00.716031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-04T17:01:00.716070Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:01:00.716133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:00.716923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:01:00.716998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:01:00.717021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-04T17:01:00.717059Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:01:00.717086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:01:00.717147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2025-04-04T17:01:00.717184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:273:2264] 2025-04-04T17:01:00.724076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-04T17:01:00.724680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-04T17:01:00.724795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-04T17:01:00.724831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:274:2265] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 100 2025-04-04T17:01:00.725381Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:00.725576Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 211us result status StatusSuccess 2025-04-04T17:01:00.726038Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] >> BuildStatsHistogram::Five_Five_Mixed [GOOD] >> BuildStatsHistogram::Five_Five_Serial >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] >> TConsoleConfigTests::TestCheckConfigUpdates [GOOD] >> TConsoleConfigTests::TestManageValidators >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] >> TSchemeShardSubDomainTest::DeclareAndDelete >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable >> KqpPg::TableInsert-useSink [GOOD] >> TSchemeShardSubDomainTest::CopyRejects >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex [GOOD] >> TSchemeShardSubDomainTest::SetSchemeLimits >> TSchemeShardSubDomainTest::SchemeQuotas >> KqpPg::TempTablesSessionsIsolation >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 [GOOD] >> TSchemeShardSubDomainTest::CreateAndWait >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommit [GOOD] >> TConsoleConfigTests::TestManageValidators [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction >> TVersions::Wreck2Reverse [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex [GOOD] >> TSchemeShardSubDomainTest::CopyRejects [GOOD] >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase >> TConsoleConfigTests::TestDryRun >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardSubDomainTest::ConsistentCopyRejects >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex_Empty [GOOD] >> TVersions::Wreck1 >> TFlatTableExecutor_KeepEraseMarkers::TestKeepEraseMarkers >> TConsoleConfigTests::TestDryRun [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag >> TFlatTableExecutor_KeepEraseMarkers::TestKeepEraseMarkers [GOOD] >> TStorageBalanceTest::TestScenario3 [GOOD] >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactUncommittedLongTx >> TFlatTableExecutor_LongTx::CompactUncommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactCommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactedLongTxRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:01:03.544181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:01:03.544321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:03.544379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:01:03.544432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:01:03.544501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:01:03.544543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:01:03.544615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:03.544720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:01:03.545108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:01:03.645059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:03.645120Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:03.671847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:01:03.672168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:01:03.672372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:01:03.686437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:01:03.686859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:01:03.687645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:03.688725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:01:03.695417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:03.697075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:03.697158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:03.697362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:01:03.697417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:03.697457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:01:03.697581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:01:03.710123Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:01:03.896390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:01:03.896701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:03.896984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:01:03.897285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:01:03.897353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:03.902379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:03.902582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:01:03.902839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:03.902902Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:01:03.902943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:01:03.903001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:01:03.905559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:03.905630Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:01:03.905678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:01:03.908187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:03.908252Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:03.908297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:03.908383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:01:03.912657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:03.917801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:01:03.918029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:01:03.919375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:03.919590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:03.920731Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:03.921105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:01:03.921179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:03.921406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:03.921513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:01:03.925567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:03.925638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:03.925875Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:03.925927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:01:03.926296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:03.926352Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:01:03.926470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:03.926531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:03.926601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:03.926651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:03.926718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:01:03.926771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:03.926812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:01:03.926851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:01:03.926964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:03.927015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:01:03.927053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:01:03.929218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:03.929677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:03.929736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 140 OrderId: 102 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1607 } } 2025-04-04T17:01:04.408413Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 140 OrderId: 102 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1607 } } 2025-04-04T17:01:04.409170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 506 RawX2: 4294969754 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:01:04.409235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2025-04-04T17:01:04.409434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 506 RawX2: 4294969754 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:01:04.409505Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:01:04.409590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 506 RawX2: 4294969754 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:01:04.409654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:04.409697Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.409754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-04T17:01:04.409812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-04T17:01:04.410593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:01:04.410727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:01:04.410771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:01:04.410827Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-04-04T17:01:04.410890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:01:04.411191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:01:04.411266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:01:04.411338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:01:04.411373Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-04T17:01:04.411423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:01:04.411535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-04T17:01:04.417829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.417960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.418224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.418276Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:01:04.418383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:01:04.418437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:01:04.418495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:01:04.418526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:01:04.418559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T17:01:04.418623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:308:2299] message: TxId: 102 2025-04-04T17:01:04.418685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:01:04.418718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:01:04.418745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:01:04.418859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:01:04.419129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:01:04.419237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:01:04.420667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:01:04.420705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:534:2478] TestWaitNotification: OK eventTxId 102 2025-04-04T17:01:04.421133Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:04.421368Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 220us result status StatusSuccess 2025-04-04T17:01:04.421766Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 2 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 140 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:04.422408Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:04.422593Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 219us result status StatusSuccess 2025-04-04T17:01:04.422878Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 140 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:01:04.505617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:01:04.505756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:04.505817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:01:04.505861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:01:04.505912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:01:04.505943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:01:04.506005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:04.506111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:01:04.506520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:01:04.598830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:04.598906Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:04.626350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:01:04.628798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:01:04.629011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:01:04.648605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:01:04.649014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:01:04.649785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:04.650915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:01:04.657185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:04.658800Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:04.658891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:04.659132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:01:04.659194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:04.659240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:01:04.659388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.671577Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:01:04.810654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:01:04.810936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.811235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:01:04.811469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:01:04.811535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.814315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:04.814515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:01:04.814782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.814846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:01:04.814894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:01:04.814956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:01:04.817977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.818046Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:01:04.818090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:01:04.821002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.821068Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.821130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:04.821225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:01:04.825139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:04.831515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:01:04.831785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:01:04.832932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:04.833102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:04.833167Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:04.833487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:01:04.833565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:04.833773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:04.833869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:01:04.836590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:04.836674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:04.836879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:04.836929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:01:04.837281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.837331Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:01:04.837431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:04.837481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:04.837530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:04.837578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:04.837631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:01:04.837680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:04.837722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:01:04.837756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:01:04.837843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:04.837891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:01:04.837928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:01:04.840168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:04.840294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:04.840335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-04T17:01:05.083452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-04T17:01:05.084263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:05.084411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:05.084470Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-04T17:01:05.084791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-04T17:01:05.084854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-04T17:01:05.085033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:05.085117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:01:05.085178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-04T17:01:05.087265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:05.087351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:05.087507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:01:05.087615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:05.087651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:335:2311], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-04T17:01:05.087686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:335:2311], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-04T17:01:05.087731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.087763Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-04T17:01:05.087858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-04T17:01:05.087892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:01:05.087931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-04T17:01:05.087967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:01:05.088002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-04T17:01:05.088056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:01:05.088117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-04T17:01:05.088147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-04T17:01:05.088291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:01:05.088350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-04T17:01:05.088403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-04T17:01:05.088435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-04T17:01:05.089484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:01:05.089582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:01:05.089622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-04T17:01:05.089658Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:01:05.089692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:05.090640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:01:05.090757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:01:05.090801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-04T17:01:05.090837Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:01:05.090884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:01:05.090940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-04T17:01:05.094501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-04T17:01:05.095714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-04T17:01:05.095963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-04T17:01:05.096022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-04T17:01:05.096516Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-04T17:01:05.096598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-04T17:01:05.096634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:483:2432] TestWaitNotification: OK eventTxId 100 2025-04-04T17:01:05.097113Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:05.097350Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 275us result status StatusSuccess 2025-04-04T17:01:05.097822Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 3 ShardsInside: 2 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 DatabaseQuotas { data_stream_shards_quota: 3 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:05.098438Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:05.098716Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 216us result status StatusSuccess 2025-04-04T17:01:05.099145Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 3 ShardsInside: 0 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TFlatTableExecutor_LongTx::CompactedLongTxRestart [GOOD] >> TFlatTableExecutor_LongTx::CompactMultipleChanges [GOOD] >> TFlatTableExecutor_LongTx::LongTxBorrow >> TFlatTableExecutor_LongTx::LongTxBorrow [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTxRead [GOOD] >> TFlatTableExecutor_LongTx::CompactedTxIdReuse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:01:03.303622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:01:03.303719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:03.303768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:01:03.303801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:01:03.303845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:01:03.303868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:01:03.303915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:03.303987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:01:03.304246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:01:03.382775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:03.382836Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:03.397235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:01:03.397600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:01:03.397755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:01:03.417490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:01:03.417966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:01:03.418880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:03.425761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:01:03.432506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:03.434057Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:03.434148Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:03.434373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:01:03.434430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:03.434473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:01:03.434595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:01:03.443723Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:01:03.594219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:01:03.594525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:03.594845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:01:03.595137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:01:03.595206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:03.601630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:03.601817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:01:03.602092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:03.602166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:01:03.602210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:01:03.602294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:01:03.607779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:03.607899Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:01:03.607953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:01:03.610832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:03.610905Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:03.610974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:03.611084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:01:03.623469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:03.626479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:01:03.626816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:01:03.627696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:03.627840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:03.627936Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:03.628200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:01:03.628248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:03.628422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:03.628501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:01:03.630582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:03.630644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:03.630889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:03.630942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:01:03.631330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:03.631396Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:01:03.631515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:03.631594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:03.631655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:03.631703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:03.631753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:01:03.631821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:03.631871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:01:03.631914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:01:03.632017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:03.632073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:01:03.632112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:01:03.634330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:03.634463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:03.634505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 44, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:01:03.727168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:01:03.727204Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:01:03.727250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:01:03.727346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-04T17:01:03.729462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-04T17:01:03.729643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-04-04T17:01:03.730476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:03.730615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:03.730694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropSubdomain TPropose operationId# 101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-04T17:01:03.730776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:03.730821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:01:03.730957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2025-04-04T17:01:03.731098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:03.731169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:01:03.731866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:01:03.734929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:01:03.735505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:03.735572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:03.735726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:01:03.735881Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:03.735920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-04T17:01:03.736102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T17:01:03.736473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:01:03.736522Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-04-04T17:01:03.736650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:01:03.736689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:01:03.736747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:01:03.736799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:01:03.736840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-04T17:01:03.736878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:01:03.736913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:01:03.736950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:01:03.737078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:01:03.737121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-04T17:01:03.737163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-04T17:01:03.737197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-04T17:01:03.738090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:01:03.738180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:01:03.738221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:01:03.738297Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-04T17:01:03.738341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:03.739184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:01:03.739279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:01:03.739310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:01:03.739339Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:01:03.739372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:01:03.739456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-04T17:01:03.740147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:01:03.740207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:01:03.740323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:01:03.741251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:01:03.741301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:01:03.741370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:03.743701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:01:03.746344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:01:03.746482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:01:03.746646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T17:01:03.746976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:01:03.747041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-04T17:01:03.747532Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:01:03.747695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:01:03.747749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:340:2331] TestWaitNotification: OK eventTxId 101 2025-04-04T17:01:03.748361Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:03.748573Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 221us result status StatusPathDoesNotExist 2025-04-04T17:01:03.748776Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TFlatTableExecutor_LongTx::CompactedTxIdReuse [GOOD] >> TFlatTableExecutor_LongTx::MergeSkewedCommitted >> TFstClassSrcIdPQTest::NoMapping [GOOD] >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] >> TFstClassSrcIdPQTest::ProperPartitionSelected >> TFlatTableExecutor_LongTx::MergeSkewedCommitted [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::SmallValues >> TFlatTableExecutor_LongTxAndBlobs::SmallValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot [GOOD] >> TGRpcYdbTest::CreateTableWithIndex [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshotFollower [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScan >> TGRpcYdbTest::CreateDeleteYqlSession >> TFlatTableExecutor_PostponedScan::TestPostponedScan [GOOD] |81.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByConflict [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable >> BuildStatsHistogram::Five_Five_Serial [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag [GOOD] >> BuildStatsHistogram::Five_Five_Crossed >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart >> DataShardSnapshots::ShardRestartAfterDropTable >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC [GOOD] |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |81.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |81.4%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |81.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |81.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |81.4%| [TA] {RESULT} $(B)/ydb/core/sys_view/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:01:01.426590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:01:01.426752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:01.426838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:01:01.426883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:01:01.426938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:01:01.426970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:01:01.427044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:01.427156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:01:01.427623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:01:01.513421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:01.513506Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:01.535193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:01:01.535586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:01:01.535859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:01:01.557112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:01:01.557560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:01:01.558456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:01.559704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:01:01.570003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:01.571884Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:01.571983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:01.572203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:01:01.572265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:01.572312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:01:01.572466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:01:01.581196Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:01:01.749443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:01:01.749688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:01.749951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:01:01.750203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:01:01.750323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:01.753316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:01.753497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:01:01.753719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:01.753781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:01:01.753816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:01:01.753955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:01:01.758847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:01.758917Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:01:01.758955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:01:01.761563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:01.761634Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:01.761698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:01.761776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:01:01.765341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:01.768996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:01:01.769210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:01:01.770291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:01.770436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:01.770492Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:01.770774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:01:01.770830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:01.771018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:01.771139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:01:01.774745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:01.774800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:01.774993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:01.775046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:01:01.775336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:01.775374Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:01:01.775451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:01.775501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:01.775539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:01.775572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:01.775605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:01:01.775640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:01.775671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:01:01.775698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:01:01.775764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:01.775800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:01:01.775823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:01:01.777360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:01.777474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:01.777524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 8944 2025-04-04T17:01:02.087378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:6 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:01:02.087405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:01:02.087430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:01:02.089694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:01:02.093258Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2025-04-04T17:01:02.093756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-04T17:01:02.094153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-04T17:01:02.094836Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-04T17:01:02.095687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:02.095965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:01:02.096783Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409546 2025-04-04T17:01:02.098512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-04T17:01:02.098776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:01:02.099269Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2025-04-04T17:01:02.100369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 Forgetting tablet 72075186233409548 2025-04-04T17:01:02.101098Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-04T17:01:02.101497Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-04T17:01:02.102002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-04T17:01:02.102264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409551 2025-04-04T17:01:02.103594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T17:01:02.103844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:01:02.104937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-04T17:01:02.105130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-04-04T17:01:02.108588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:01:02.108746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:01:02.108931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:01:02.109659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:01:02.109715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:01:02.109800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:02.111085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-04T17:01:02.111143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-04T17:01:02.111322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:01:02.111350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-04T17:01:02.114786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-04T17:01:02.114849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-04T17:01:02.114966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-04T17:01:02.114995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-04T17:01:02.115082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:01:02.115118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-04T17:01:02.115369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-04T17:01:02.115419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-04T17:01:02.117665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:01:02.117781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2025-04-04T17:01:02.118091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:01:02.118144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-04-04T17:01:02.118278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-04T17:01:02.118306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-04T17:01:02.118798Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:01:02.118959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:01:02.118997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:668:2571] 2025-04-04T17:01:02.119228Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T17:01:02.119306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:01:02.119331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:668:2571] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-04-04T17:01:02.119851Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:02.120063Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 235us result status StatusPathDoesNotExist 2025-04-04T17:01:02.120269Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:01:02.120740Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:02.120954Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 184us result status StatusSuccess 2025-04-04T17:01:02.121382Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> IncrementalBackup::BackupRestore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:01:04.832892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:01:04.833005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:04.833065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:01:04.833105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:01:04.833152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:01:04.833182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:01:04.833248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:04.833332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:01:04.833680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:01:04.910832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:04.910891Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:04.921463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:01:04.921703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:01:04.921880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:01:04.931919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:01:04.932256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:01:04.932924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:04.933921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:01:04.942900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:04.944484Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:04.944574Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:04.944837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:01:04.944898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:04.944948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:01:04.945129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.957488Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:01:05.093191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:01:05.093438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.093667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:01:05.093909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:01:05.093978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.101770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:05.101938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:01:05.102189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.102264Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:01:05.102303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:01:05.102359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:01:05.105197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.105272Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:01:05.105337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:01:05.108325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.108426Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.108471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:05.108556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:01:05.123931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:05.126513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:01:05.126758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:01:05.127899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:05.128061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:05.128133Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:05.128512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:01:05.128589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:05.128837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:05.128951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:01:05.131731Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:05.131795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:05.132005Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:05.132068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:01:05.132463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.132522Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:01:05.132627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:05.132680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:05.132748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:05.132798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:05.132842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:01:05.132895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:05.132934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:01:05.132969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:01:05.133061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:05.133109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:01:05.133145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:01:05.135344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:05.135483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:05.135523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... AckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:01:05.351186Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2025-04-04T17:01:05.351949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-04T17:01:05.352221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-04T17:01:05.352599Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-04T17:01:05.352880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:05.353072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:01:05.353454Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-04T17:01:05.353747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-04T17:01:05.353922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409546 2025-04-04T17:01:05.355098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 Forgetting tablet 72075186233409548 2025-04-04T17:01:05.357222Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2025-04-04T17:01:05.357432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-04T17:01:05.357676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:01:05.358093Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-04T17:01:05.358686Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-04T17:01:05.359139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T17:01:05.359345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:01:05.359733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-04T17:01:05.359891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409551 Forgetting tablet 72075186233409547 2025-04-04T17:01:05.361806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:01:05.361865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:01:05.362023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409549 2025-04-04T17:01:05.362878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:01:05.362923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:01:05.362991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:05.363819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-04T17:01:05.363896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-04T17:01:05.364500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2025-04-04T17:01:05.369802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:01:05.369866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-04T17:01:05.370052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-04T17:01:05.370083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-04T17:01:05.373332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-04T17:01:05.373384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-04T17:01:05.373475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:01:05.373490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-04T17:01:05.373537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2025-04-04T17:01:05.373576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-04T17:01:05.373603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-04T17:01:05.373736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:01:05.373790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:01:05.373864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409549, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-04-04T17:01:05.374097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-04T17:01:05.374132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-04T17:01:05.374228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:01:05.374276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-04T17:01:05.374699Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-04T17:01:05.374870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-04T17:01:05.374912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:615:2518] 2025-04-04T17:01:05.375084Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:01:05.375152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:01:05.375169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:615:2518] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-04T17:01:05.375523Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:05.375715Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 167us result status StatusPathDoesNotExist 2025-04-04T17:01:05.375892Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:01:05.376228Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:05.376398Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 161us result status StatusSuccess 2025-04-04T17:01:05.376689Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:01:04.812455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:01:04.812566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:04.812619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:01:04.812661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:01:04.812707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:01:04.812735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:01:04.812802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:04.812944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:01:04.813345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:01:04.903240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:04.903306Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:04.918845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:01:04.919157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:01:04.919313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:01:04.931061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:01:04.931429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:01:04.932185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:04.933263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:01:04.939124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:04.940416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:04.940501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:04.940752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:01:04.940812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:04.940859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:01:04.941006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.947918Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:01:05.090577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:01:05.090863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.091110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:01:05.091369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:01:05.091451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.099392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:05.099574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:01:05.099782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.099922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:01:05.099967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:01:05.100023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:01:05.103225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.103294Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:01:05.103334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:01:05.111155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.111223Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.111263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:05.111340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:01:05.114950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:05.122306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:01:05.122517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:01:05.123594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:05.123730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:05.123803Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:05.124166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:01:05.124221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:05.124418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:05.124523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:01:05.126783Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:05.126844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:05.127032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:05.127073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:01:05.127383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.127431Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:01:05.127531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:05.127593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:05.127646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:05.127685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:05.127724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:01:05.127770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:05.127804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:01:05.127849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:01:05.127948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:05.127991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:01:05.128022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:01:05.129934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:05.130058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:05.130096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ransactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:05.209767Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-04T17:01:05.209987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-04T17:01:05.210040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-04T17:01:05.210204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:01:05.210304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:01:05.210349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T17:01:05.214377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:05.214425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:01:05.214554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:01:05.214697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:05.214771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-04T17:01:05.214819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-04T17:01:05.215209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.215253Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:01:05.215320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:01:05.215347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:01:05.215377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:01:05.215399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:01:05.215441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-04T17:01:05.215486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:01:05.215514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:01:05.215538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:01:05.215622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:01:05.215663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-04T17:01:05.215688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-04T17:01:05.215731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-04T17:01:05.216545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:01:05.216644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:01:05.216676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:01:05.216705Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-04T17:01:05.216742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:01:05.217669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:01:05.217739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:01:05.217771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:01:05.217815Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-04T17:01:05.217841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:01:05.217924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-04T17:01:05.227423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:01:05.227906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 100, wait until txId: 101 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-04-04T17:01:05.228115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-04T17:01:05.228150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-04T17:01:05.228314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:01:05.228351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-04T17:01:05.228677Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-04T17:01:05.228795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-04T17:01:05.228824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:329:2320] 2025-04-04T17:01:05.228987Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:01:05.229059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:01:05.229084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:329:2320] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-04T17:01:05.229493Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:05.229689Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir/USER_0" took 207us result status StatusSuccess 2025-04-04T17:01:05.230089Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:05.230677Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:05.230884Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir" took 226us result status StatusSuccess 2025-04-04T17:01:05.231221Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:01:04.661848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:01:04.662002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:04.662063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:01:04.662141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:01:04.662199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:01:04.662232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:01:04.662322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:04.662425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:01:04.662872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:01:04.748749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:04.748811Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:04.761514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:01:04.761805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:01:04.761970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:01:04.773224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:01:04.773607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:01:04.774351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:04.775362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:01:04.781532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:04.783124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:04.783215Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:04.783432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:01:04.783488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:04.783534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:01:04.783671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.791332Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:01:04.935296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:01:04.935558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.935817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:01:04.936079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:01:04.936150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.938653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:04.938852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:01:04.939111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.939170Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:01:04.939206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:01:04.939287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:01:04.941512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.941579Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:01:04.941622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:01:04.943651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.943706Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.943751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:04.943840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:01:04.954153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:04.956431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:01:04.956642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:01:04.957763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:04.957925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:04.957992Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:04.958399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:01:04.958472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:04.958700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:04.958810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:01:04.961436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:04.961498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:04.961703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:04.961761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:01:04.962120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.962168Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:01:04.962278Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:04.962335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:04.962387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:04.962436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:04.962483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:01:04.962525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:04.962565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:01:04.962598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:01:04.962704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:04.962757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:01:04.962788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:01:04.964831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:04.964953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:04.965007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... bletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:01:07.090995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/USER_0/Table11, opId: 137:0, at schemeshard: 72057594046678944 2025-04-04T17:01:07.091115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/USER_0/Table11, opId: 137:0, schema: Name: "Table11" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key", at schemeshard: 72057594046678944 2025-04-04T17:01:07.091568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_0, child name: Table11, child id: [OwnerId: 72057594046678944, LocalPathId: 10], at schemeshard: 72057594046678944 2025-04-04T17:01:07.091643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 0 2025-04-04T17:01:07.091706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 1 2025-04-04T17:01:07.091821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2025-04-04T17:01:07.091867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 137:0 1 -> 2 2025-04-04T17:01:07.092441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 137:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:01:07.092527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 137:0, at schemeshard: 72057594046678944 2025-04-04T17:01:07.092680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 11 2025-04-04T17:01:07.092751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 3 2025-04-04T17:01:07.095673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 137, response: Status: StatusAccepted TxId: 137 SchemeshardId: 72057594046678944 PathId: 10, at schemeshard: 72057594046678944 2025-04-04T17:01:07.095886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 137, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2025-04-04T17:01:07.096260Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:07.096319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:01:07.096545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2025-04-04T17:01:07.096645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:07.096690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:1044:2906], at schemeshard: 72057594046678944, txId: 137, path id: 2 2025-04-04T17:01:07.096751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:1044:2906], at schemeshard: 72057594046678944, txId: 137, path id: 10 2025-04-04T17:01:07.097259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2025-04-04T17:01:07.097332Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 137:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046678944 2025-04-04T17:01:07.097599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 137:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 10 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-04-04T17:01:07.098643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2025-04-04T17:01:07.098774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2025-04-04T17:01:07.098814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2025-04-04T17:01:07.098859Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18 2025-04-04T17:01:07.098923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 12 2025-04-04T17:01:07.099929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 10 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2025-04-04T17:01:07.100027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 10 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2025-04-04T17:01:07.100071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2025-04-04T17:01:07.100124Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 10], version: 1 2025-04-04T17:01:07.100164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 4 2025-04-04T17:01:07.100262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 137, ready parts: 0/1, is published: true 2025-04-04T17:01:07.103138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:10 msg type: 268697601 2025-04-04T17:01:07.103312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72057594037968897 2025-04-04T17:01:07.103362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2025-04-04T17:01:07.103954Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 10 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-04-04T17:01:07.104205Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 10, type DataShard, boot OK, tablet id 72075186233409555 2025-04-04T17:01:07.105016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-04-04T17:01:07.105076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2025-04-04T17:01:07.105203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 137:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-04-04T17:01:07.105255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-04-04T17:01:07.105328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-04-04T17:01:07.105461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 137:0 2 -> 3 2025-04-04T17:01:07.108289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2025-04-04T17:01:07.109770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2025-04-04T17:01:07.111951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 137:0, at schemeshard: 72057594046678944 2025-04-04T17:01:07.112211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2025-04-04T17:01:07.112272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId# 137:0 ProgressState at tabletId# 72057594046678944 2025-04-04T17:01:07.112353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 seqNo: 4:5 2025-04-04T17:01:07.112781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 992 RawX2: 4294970161 } TxBody: "\n\236\004\n\007Table11\020\n\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\240\207\205\000\000\000\000\001\020\n:\004\010\004\020\005" TxId: 137 ExecLevel: 0 Flags: 0 SchemeShardId: 72057594046678944 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } SubDomainPathId: 2 2025-04-04T17:01:07.117577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72075186233409555 cookie: 72057594046678944:10 msg type: 269549568 2025-04-04T17:01:07.117801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72075186233409555 TestModificationResult got TxId: 137, wait until txId: 137 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:00:58.850071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:00:58.850178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:58.850227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:00:58.850286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:00:58.850331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:00:58.850362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:00:58.850416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:58.850497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:00:58.850884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:00:58.925164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:00:58.925230Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:58.955045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:00:58.955323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:00:58.955494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:00:58.973919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:00:58.974316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:00:58.974969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:58.976646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:00:58.982341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:58.983847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:58.983925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:58.984128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:00:58.984177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:58.984217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:00:58.984345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:00:58.991567Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:00:59.171080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:00:59.171389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.171673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:00:59.171935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:00:59.172011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.175760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:59.175943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:00:59.176182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.176247Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:00:59.176288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:00:59.176343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:00:59.184065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.184158Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:00:59.184206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:00:59.187206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.187293Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.187340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:59.187440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:00:59.192440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:00:59.197408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:00:59.197698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:00:59.199001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:59.199190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:00:59.199264Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:59.199659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:00:59.199730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:59.199947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:00:59.200074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:00:59.203072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:59.203140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:59.203361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:59.203414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:00:59.203824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:59.203929Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:00:59.204042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:00:59.204099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:59.204153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:00:59.204198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:59.204243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:00:59.204309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:59.204361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:00:59.204403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:00:59.204501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:00:59.204550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:00:59.204592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:00:59.215273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:00:59.215461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:00:59.215500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... 525 RawX2: 4294969768 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-04T17:01:02.471954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-04-04T17:01:02.472093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 525 RawX2: 4294969768 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-04T17:01:02.472148Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2025-04-04T17:01:02.472708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-04T17:01:02.472776Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet# 72075186233409546 2025-04-04T17:01:02.472840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-04T17:01:02.472889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2025-04-04T17:01:02.472965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet# 72075186233409546 2025-04-04T17:01:02.473085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2025-04-04T17:01:02.473281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2025-04-04T17:01:02.473364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-04T17:01:02.478058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-04T17:01:02.478510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-04T17:01:02.478987Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-04T17:01:02.479032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-04T17:01:02.479196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-04-04T17:01:02.479376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-04T17:01:02.479468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:441:2393], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-04-04T17:01:02.479517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:441:2393], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-04-04T17:01:02.479772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-04T17:01:02.479840Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-04-04T17:01:02.479981Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-04T17:01:02.480026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-04-04T17:01:02.480075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-04-04T17:01:02.480960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-04-04T17:01:02.481106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-04-04T17:01:02.481147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-04T17:01:02.481176Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 9 2025-04-04T17:01:02.481208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-04-04T17:01:02.510787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-04T17:01:02.510941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-04T17:01:02.510976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-04T17:01:02.511011Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:01:02.511047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-04-04T17:01:02.511156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-04T17:01:02.514576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-04T17:01:02.514651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-04-04T17:01:02.515092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-04T17:01:02.515306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:01:02.515348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:01:02.515394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:01:02.515426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:01:02.515461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-04T17:01:02.517890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:553:2492] message: TxId: 104 2025-04-04T17:01:02.518003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:01:02.518064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-04T17:01:02.518107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-04T17:01:02.518297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-04-04T17:01:02.521265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-04T17:01:02.521330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-04T17:01:02.522137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-04T17:01:02.525929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-04T17:01:02.529223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-04T17:01:02.529330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:441:2393], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-04-04T17:01:02.529677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-04T17:01:02.529748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:823:2741] 2025-04-04T17:01:02.530517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-04-04T17:01:02.532381Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-04-04T17:01:02.532634Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 293us result status StatusSuccess 2025-04-04T17:01:02.533144Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:01:03.850285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:01:03.850412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:03.850456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:01:03.850491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:01:03.850554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:01:03.850585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:01:03.850640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:03.850766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:01:03.851092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:01:03.926008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:03.926066Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:03.945646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:01:03.945944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:01:03.946118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:01:03.959849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:01:03.960224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:01:03.960849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:03.965907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:01:03.972347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:03.973649Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:03.973710Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:03.973883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:01:03.973928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:03.973961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:01:03.974113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:01:03.979878Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:01:04.106287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:01:04.106528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.106734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:01:04.106910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:01:04.106964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.109280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:04.109452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:01:04.109690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.109747Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:01:04.109780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:01:04.109838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:01:04.112149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.112217Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:01:04.112250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:01:04.114418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.114477Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.114557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:04.114634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:01:04.118638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:04.121006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:01:04.121235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:01:04.122375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:04.122540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:04.122617Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:04.123002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:01:04.123062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:04.123277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:04.123379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:01:04.125828Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:04.125876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:04.126064Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:04.126133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:01:04.126526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.126579Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:01:04.126735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:04.126782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:04.126828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:04.126872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:04.126908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:01:04.126963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:04.127002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:01:04.127029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:01:04.127095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:04.127128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:01:04.127160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:01:04.129057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:04.129171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:04.129206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... osedWaitParts operationId# 106:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:01:06.167730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 106:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 434 RawX2: 8589936989 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-04-04T17:01:06.167785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 106:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:06.167821Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-04T17:01:06.167865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2025-04-04T17:01:06.167921Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-04T17:01:06.167949Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 129 -> 240 2025-04-04T17:01:06.170098Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-04T17:01:06.170632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-04T17:01:06.170736Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 106:0ProgressState, operation type TxCopyTable 2025-04-04T17:01:06.170801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 106:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-04-04T17:01:06.170845Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 106, done: 0, blocked: 1 2025-04-04T17:01:06.170937Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 106:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 106 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-04-04T17:01:06.170977Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 240 -> 240 2025-04-04T17:01:06.174571Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-04T17:01:06.174646Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-04-04T17:01:06.174795Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-04T17:01:06.174836Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T17:01:06.174886Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-04T17:01:06.174928Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T17:01:06.174996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-04-04T17:01:06.175080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:642:2564] message: TxId: 106 2025-04-04T17:01:06.175146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T17:01:06.175233Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-04T17:01:06.175265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-04T17:01:06.175428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-04T17:01:06.175470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:01:06.181425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-04T17:01:06.181551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:819:2717] TestWaitNotification: OK eventTxId 106 2025-04-04T17:01:06.182493Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:06.182809Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table" took 351us result status StatusSuccess 2025-04-04T17:01:06.183266Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table" PathDescription { Self { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:06.184197Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:06.184471Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dst" took 301us result status StatusSuccess 2025-04-04T17:01:06.184878Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dst" PathDescription { Self { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "dst" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:06.185661Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:06.185865Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 217us result status StatusSuccess 2025-04-04T17:01:06.186395Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:01:00.203571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:01:00.203689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:00.203755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:01:00.203820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:01:00.203875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:01:00.203903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:01:00.203970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:00.204055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:01:00.204417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:01:00.298308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:00.298393Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:00.313078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:01:00.313346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:01:00.313522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:01:00.324340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:01:00.324610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:01:00.325163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:00.326267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:01:00.331426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:00.332820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:00.332911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:00.333161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:01:00.333217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:00.333264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:01:00.333410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.342072Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:01:00.480582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:01:00.480832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.481097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:01:00.481356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:01:00.481441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.483824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:00.483969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:01:00.484153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.484233Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:01:00.484274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:01:00.484329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:01:00.486376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.486437Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:01:00.486482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:01:00.488796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.488848Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.488891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:00.488979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:01:00.505990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:00.508767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:01:00.508983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:01:00.510062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:00.510217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:00.510328Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:00.510698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:01:00.510759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:00.510958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:00.511070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:01:00.515876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:00.515967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:00.516177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:00.516228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:01:00.516608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:00.516664Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:01:00.516765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:00.516824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:00.516886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:00.516926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:00.516970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:01:00.517009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:00.517058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:01:00.517106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:01:00.517211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:00.517255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:01:00.517290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:01:00.519258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:00.519396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:00.519435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... T_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:39 2025-04-04T17:01:01.843720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:39 tabletId 72075186233409584 2025-04-04T17:01:01.843829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:9 2025-04-04T17:01:01.843899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2025-04-04T17:01:01.844122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:13 2025-04-04T17:01:01.844153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2025-04-04T17:01:01.844235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:17 2025-04-04T17:01:01.844260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2025-04-04T17:01:01.844784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:21 2025-04-04T17:01:01.844821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2025-04-04T17:01:01.845008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:26 2025-04-04T17:01:01.845035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2025-04-04T17:01:01.845126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:30 2025-04-04T17:01:01.845153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2025-04-04T17:01:01.845236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:34 2025-04-04T17:01:01.845262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2025-04-04T17:01:01.845337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-04T17:01:01.845362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-04T17:01:01.845497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:38 2025-04-04T17:01:01.845524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2025-04-04T17:01:01.845585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:8 2025-04-04T17:01:01.845651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:8 tabletId 72075186233409553 2025-04-04T17:01:01.845737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:12 2025-04-04T17:01:01.845759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:12 tabletId 72075186233409557 2025-04-04T17:01:01.850315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-04T17:01:01.850387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-04-04T17:01:01.850590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:16 2025-04-04T17:01:01.850618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409561 2025-04-04T17:01:01.850717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:25 2025-04-04T17:01:01.850743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:25 tabletId 72075186233409570 2025-04-04T17:01:01.850830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:20 2025-04-04T17:01:01.850855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:20 tabletId 72075186233409565 2025-04-04T17:01:01.851893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:29 2025-04-04T17:01:01.851947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:29 tabletId 72075186233409574 2025-04-04T17:01:01.852027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:33 2025-04-04T17:01:01.852051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:33 tabletId 72075186233409578 2025-04-04T17:01:01.852274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:01:01.852308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-04T17:01:01.854073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:37 2025-04-04T17:01:01.854118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:37 tabletId 72075186233409582 2025-04-04T17:01:01.857569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:42 2025-04-04T17:01:01.857628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:42 tabletId 72075186233409587 2025-04-04T17:01:01.857758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-04T17:01:01.857785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-04T17:01:01.857921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:11 2025-04-04T17:01:01.857983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2025-04-04T17:01:01.858098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2025-04-04T17:01:01.858123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2025-04-04T17:01:01.858187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:19 2025-04-04T17:01:01.858213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2025-04-04T17:01:01.858388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-04-04T17:01:01.858417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-04-04T17:01:01.858533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2025-04-04T17:01:01.858566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-04-04T17:01:01.858836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:28 2025-04-04T17:01:01.858868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2025-04-04T17:01:01.859564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:32 2025-04-04T17:01:01.859603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2025-04-04T17:01:01.859683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:01:01.859708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-04T17:01:01.859769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:36 2025-04-04T17:01:01.859855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2025-04-04T17:01:01.859942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-04T17:01:01.860243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:01:01.860446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:01:01.860584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:01:01.860786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:01.864720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-04T17:01:01.865293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-04T17:01:01.865416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-04T17:01:01.866505Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T17:01:01.866800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:01:01.866881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:2060:3661] TestWaitNotification: OK eventTxId 103 2025-04-04T17:01:01.868078Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:01.868648Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 586us result status StatusPathDoesNotExist 2025-04-04T17:01:01.869053Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:01:01.870200Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:01.870614Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 442us result status StatusPathDoesNotExist 2025-04-04T17:01:01.870899Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:00:58.050908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:00:58.051031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:58.051079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:00:58.051118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:00:58.051194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:00:58.051237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:00:58.051434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:58.051533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:00:58.051919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:00:58.141169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:00:58.141247Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:58.158318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:00:58.158690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:00:58.158879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:00:58.175700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:00:58.176046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:00:58.176926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:58.177975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:00:58.184983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:58.186478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:58.186557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:58.186790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:00:58.186847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:58.186890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:00:58.187026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:00:58.195136Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:00:58.374792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:00:58.375118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:58.375381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:00:58.375652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:00:58.375722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:58.379699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:58.379916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:00:58.380171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:58.380257Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:00:58.380302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:00:58.380352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:00:58.388722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:58.388790Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:00:58.388828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:00:58.391144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:58.391216Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:58.391277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:58.391348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:00:58.395551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:00:58.399234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:00:58.399481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:00:58.400634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:58.400787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:00:58.400843Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:58.401159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:00:58.401223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:58.401428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:00:58.401514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:00:58.404547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:58.404604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:58.404825Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:58.404880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:00:58.405298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:58.405360Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:00:58.405465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:00:58.405499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:58.405541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:00:58.405570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:58.405633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:00:58.405691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:58.405726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:00:58.405756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:00:58.405839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:00:58.405882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:00:58.405926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:00:58.408068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:00:58.408217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:00:58.408257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ::TEvColumnShard::TEvProposeTransactionResult> complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.223178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.223261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.224342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.224614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.225266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.226371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.227664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.227779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.227855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.227944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.228056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.228148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.228238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.228309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.232116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.232292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.232340Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:01:05.232468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:01:05.232523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:01:05.232565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:01:05.232600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:01:05.232636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-04T17:01:05.232734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:2740:4005] message: TxId: 101 2025-04-04T17:01:05.232797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:01:05.232880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:01:05.232918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:01:05.234094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-04-04T17:01:05.234993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.237961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:01:05.238035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:2741:4006] TestWaitNotification: OK eventTxId 101 2025-04-04T17:01:05.238741Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:05.239014Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 347us result status StatusSuccess 2025-04-04T17:01:05.239780Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "str" Type: "String" TypeId: 4097 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "key" NextColumnId: 4 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "key" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1743786065.240473 268097 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TAlterColumnTable: 6:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2025-04-04T17:01:05.243476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterTtlSettings { Enabled { ColumnName: "str" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:01:05.243718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.244211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, at schemeshard: 72057594046678944 2025-04-04T17:01:05.247195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:05.247418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 >> TExecutorDb::CoordinatorSimulation [GOOD] >> TExecutorDb::RandomCoordinatorSimulation >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:01:03.864801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:01:03.864903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:03.864956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:01:03.864989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:01:03.865030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:01:03.865056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:01:03.865116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:03.865201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:01:03.865528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:01:03.948602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:03.948675Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:03.960919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:01:03.961218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:01:03.961375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:01:03.972905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:01:03.973259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:01:03.973987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:03.975019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:01:03.986207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:03.987968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:03.988082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:03.988314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:01:03.988377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:03.988427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:01:03.988583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:01:03.997963Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:01:04.178453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:01:04.178841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.179124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:01:04.179364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:01:04.179443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.182303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:04.182481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:01:04.182730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.182797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:01:04.182838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:01:04.182893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:01:04.185333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.185401Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:01:04.185439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:01:04.187809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.187883Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.187933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:04.188007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:01:04.191866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:04.194453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:01:04.194757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:01:04.195884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:04.196045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:04.196114Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:04.196431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:01:04.196501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:04.196698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:04.196825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:01:04.199599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:04.199676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:04.199883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:04.199929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:01:04.200298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.200359Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:01:04.200466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:04.200520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:04.200578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:04.200620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:04.200680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:01:04.200731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:04.200769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:01:04.200805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:01:04.200908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:04.200960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:01:04.201007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:01:04.203295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:04.203433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:04.203475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-04T17:01:04.267642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-04T17:01:04.268006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:04.268166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:04.268238Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-04T17:01:04.268483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-04T17:01:04.268549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-04T17:01:04.268735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:04.268808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:01:04.268871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:01:04.271510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:04.271566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:04.271722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:01:04.271845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:04.271881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-04T17:01:04.271933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 FAKE_COORDINATOR: Erasing txId 100 2025-04-04T17:01:04.272281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-04T17:01:04.272344Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-04T17:01:04.272459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-04T17:01:04.272499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:01:04.272541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-04T17:01:04.272572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:01:04.272608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-04T17:01:04.272655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:01:04.272693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-04T17:01:04.272731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-04T17:01:04.272806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:01:04.272870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-04T17:01:04.272921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-04T17:01:04.272954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-04T17:01:04.273653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:01:04.273750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:01:04.273782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-04T17:01:04.273827Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:01:04.273879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:04.275520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:01:04.275599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:01:04.275628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-04T17:01:04.275658Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:01:04.275703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:01:04.275790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-04T17:01:04.278191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-04T17:01:04.279581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-04T17:01:04.279812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-04T17:01:04.279866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-04T17:01:04.280314Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-04T17:01:04.280427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-04T17:01:04.280474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:309:2300] TestWaitNotification: OK eventTxId 100 2025-04-04T17:01:04.280952Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:04.281194Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 236us result status StatusSuccess 2025-04-04T17:01:04.281613Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:04.282073Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:04.282224Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 151us result status StatusSuccess 2025-04-04T17:01:04.282599Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> TStorageBalanceTest::TestScenario3 [GOOD] Test command err: c[def1] ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) 2025-04-04T16:58:02.371019Z node 5 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T16:58:02.374766Z node 5 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:58:02.374987Z node 5 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T16:58:02.375927Z node 5 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [5:284:2079] ControllerId# 72057594037932033 2025-04-04T16:58:02.375973Z node 5 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T16:58:02.376085Z node 5 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T16:58:02.376411Z node 5 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T16:58:02.377604Z node 5 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:58:02.377654Z node 5 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:58:02.379733Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:283:2078] Create Queue# [5:290:2083] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:02.379885Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:283:2078] Create Queue# [5:291:2084] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:02.380049Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:283:2078] Create Queue# [5:292:2085] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:02.380201Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:283:2078] Create Queue# [5:293:2086] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:02.380337Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:283:2078] Create Queue# [5:294:2087] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:02.380492Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:283:2078] Create Queue# [5:295:2088] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:02.380624Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:283:2078] Create Queue# [5:296:2089] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:02.380655Z node 5 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:58:02.380721Z node 5 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [5:284:2079] 2025-04-04T16:58:02.380760Z node 5 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [5:284:2079] 2025-04-04T16:58:02.380801Z node 5 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-04T16:58:02.380839Z node 5 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-04T16:58:02.381361Z node 5 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-04T16:58:02.381498Z node 6 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T16:58:02.384147Z node 6 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:58:02.384259Z node 6 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T16:58:02.385043Z node 6 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [6:303:2080] ControllerId# 72057594037932033 2025-04-04T16:58:02.385093Z node 6 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T16:58:02.385154Z node 6 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T16:58:02.385320Z node 6 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T16:58:02.386126Z node 6 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:58:02.386164Z node 6 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:58:02.388648Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:302:2079] Create Queue# [6:309:2084] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:02.388815Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:302:2079] Create Queue# [6:310:2085] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:02.388946Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:302:2079] Create Queue# [6:311:2086] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:02.389114Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:302:2079] Create Queue# [6:312:2087] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:02.389245Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:302:2079] Create Queue# [6:313:2088] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:02.389412Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:302:2079] Create Queue# [6:314:2089] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:02.389531Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:302:2079] Create Queue# [6:315:2090] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:02.389564Z node 6 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:58:02.389631Z node 6 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [6:303:2080] 2025-04-04T16:58:02.389672Z node 6 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [6:303:2080] 2025-04-04T16:58:02.389712Z node 6 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-04T16:58:02.389750Z node 6 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-04T16:58:02.390145Z node 6 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-04T16:58:02.390303Z node 7 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T16:58:02.392532Z node 7 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:58:02.392639Z node 7 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T16:58:02.393185Z node 7 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [7:324:2080] ControllerId# 72057594037932033 2025-04-04T16:58:02.393207Z node 7 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T16:58:02.393251Z node 7 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T16:58:02.393390Z node 7 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T16:58:02.394161Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T16:58:02.396954Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:58:02.397162Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-04T16:58:02.397770Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-04T16:58:02.399046Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:257} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-04T16:58:02.399117Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T16:58:02.400145Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:335:2083] ControllerId# 72057594037932033 2025-04-04T16:58:02.400184Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T16:58:02.400263Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T16:58:02.400496Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T16:58:02.424426Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:58:02.424479Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:58:02.426290Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:334:2082] Create Queue# [1:343:2088] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:02.426489Z node 1 :BS_PROXY DEBUG: Group# 0 Acto ... de 11 :BS_PROXY_PUT DEBUG: [4f1778a7321dabfc] Result# TEvPutResult {Id# [72057594037927937:2:493:0:0:244:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-04T17:01:03.393005Z node 11 :BS_PROXY_PUT INFO: [4f1778a7321dabfc] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:493:0:0:244:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-04T17:01:03.393128Z node 11 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.247 sample PartId# [72057594037927937:2:493:0:0:244:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 11 } TEvVPutResult{ TimestampMs# 10.323 VDiskId# [0:1:0:0:0] NodeId# 11 Status# OK } ] } 2025-04-04T17:01:03.393999Z node 11 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:493:0:0:244:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-04T17:01:03.399383Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} commited cookie 1 for step 493 2025-04-04T17:01:03.401173Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1495, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-04-04T17:01:03.401269Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1495, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:01:03.401524Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1495, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{998, redo 303b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-04-04T17:01:03.401634Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:494} Tx{1495, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:01:03.401780Z node 11 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [11:1287:2641] 2025-04-04T17:01:03.401822Z node 11 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [11:1287:2641] 2025-04-04T17:01:03.401882Z node 11 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [11:1229:2603] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.046) ******---------------------------------------------------------------------------------------------- (0.064) *******--------------------------------------------------------------------------------------------- (0.066) *****----------------------------------------------------------------------------------------------- (0.054) *****----------------------------------------------------------------------------------------------- (0.046) ******---------------------------------------------------------------------------------------------- (0.056) ******---------------------------------------------------------------------------------------------- (0.058) ******---------------------------------------------------------------------------------------------- (0.06) *****----------------------------------------------------------------------------------------------- (0.046) *****----------------------------------------------------------------------------------------------- (0.052) *****----------------------------------------------------------------------------------------------- (0.052) 2025-04-04T17:01:03.506572Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:495} Tx{1496, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-04-04T17:01:03.506682Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:495} Tx{1496, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:01:03.506810Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005526432}: tablet 72075186224037959 wasn't changed 2025-04-04T17:01:03.506848Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005526432}: tablet 72075186224037959 skipped channel 0 2025-04-04T17:01:03.506936Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005526432}: tablet 72075186224037959 skipped channel 1 2025-04-04T17:01:03.506972Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005526432}: tablet 72075186224037959 skipped channel 2 2025-04-04T17:01:03.507044Z node 11 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923005526432}(72075186224037959)::Execute - TryToBoot was not successfull 2025-04-04T17:01:03.507118Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:495} Tx{1496, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{999, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-04-04T17:01:03.507169Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:495} Tx{1496, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:01:03.526830Z node 11 :BS_PROXY_PUT INFO: [409dbd935e372960] bootstrap ActorId# [11:11652:6241] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:494:0:0:246:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-04T17:01:03.526985Z node 11 :BS_PROXY_PUT DEBUG: [409dbd935e372960] Id# [72057594037927937:2:494:0:0:246:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T17:01:03.527032Z node 11 :BS_PROXY_PUT DEBUG: [409dbd935e372960] restore Id# [72057594037927937:2:494:0:0:246:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T17:01:03.527081Z node 11 :BS_PROXY_PUT DEBUG: [409dbd935e372960] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:494:0:0:246:1] Marker# BPG33 2025-04-04T17:01:03.527115Z node 11 :BS_PROXY_PUT DEBUG: [409dbd935e372960] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:494:0:0:246:1] Marker# BPG32 2025-04-04T17:01:03.527255Z node 11 :BS_PROXY DEBUG: Send to queueActorId# [11:452:2088] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:494:0:0:246:1] FDS# 246 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T17:01:03.535574Z node 11 :BS_PROXY_PUT DEBUG: [409dbd935e372960] received {EvVPutResult Status# OK ID# [72057594037927937:2:494:0:0:246:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 511 } Cost# 81937 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 512 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-04T17:01:03.535731Z node 11 :BS_PROXY_PUT DEBUG: [409dbd935e372960] Result# TEvPutResult {Id# [72057594037927937:2:494:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-04T17:01:03.535788Z node 11 :BS_PROXY_PUT INFO: [409dbd935e372960] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:494:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-04T17:01:03.535908Z node 11 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.831 sample PartId# [72057594037927937:2:494:0:0:246:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 11 } TEvVPutResult{ TimestampMs# 9.212 VDiskId# [0:1:0:0:0] NodeId# 11 Status# OK } ] } 2025-04-04T17:01:03.536684Z node 11 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:494:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-04T17:01:03.537072Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:495} commited cookie 1 for step 494 2025-04-04T17:01:03.538162Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:495} Tx{1497, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-04-04T17:01:03.538205Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:495} Tx{1497, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:01:03.538763Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:495} Tx{1497, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{1000, redo 303b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-04-04T17:01:03.538826Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:495} Tx{1497, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:01:03.538959Z node 11 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [11:1287:2641] 2025-04-04T17:01:03.539013Z node 11 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [11:1287:2641] 2025-04-04T17:01:03.539078Z node 11 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [11:1229:2603] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.046) ******---------------------------------------------------------------------------------------------- (0.064) *******--------------------------------------------------------------------------------------------- (0.066) *****----------------------------------------------------------------------------------------------- (0.054) *****----------------------------------------------------------------------------------------------- (0.046) ******---------------------------------------------------------------------------------------------- (0.056) ******---------------------------------------------------------------------------------------------- (0.058) ******---------------------------------------------------------------------------------------------- (0.06) *****----------------------------------------------------------------------------------------------- (0.046) *****----------------------------------------------------------------------------------------------- (0.052) *****----------------------------------------------------------------------------------------------- (0.052) 2025-04-04T17:01:03.641900Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:496} Tx{1498, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-04-04T17:01:03.642024Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:496} Tx{1498, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:01:03.642157Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005527328}: tablet 72075186224037967 wasn't changed 2025-04-04T17:01:03.642202Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005527328}: tablet 72075186224037967 skipped channel 0 2025-04-04T17:01:03.642310Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005527328}: tablet 72075186224037967 skipped channel 1 2025-04-04T17:01:03.642348Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005527328}: tablet 72075186224037967 skipped channel 2 2025-04-04T17:01:03.642416Z node 11 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923005527328}(72075186224037967)::Execute - TryToBoot was not successfull 2025-04-04T17:01:03.645320Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:496} Tx{1498, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{1001, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-04-04T17:01:03.645393Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:496} Tx{1498, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin-NotNull >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:01:05.063869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:01:05.063956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:05.063997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:01:05.064132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:01:05.064171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:01:05.064193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:01:05.064239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:05.064317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:01:05.064659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:01:05.140117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:05.140164Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:05.156453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:01:05.156742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:01:05.156914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:01:05.167303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:01:05.167638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:01:05.168193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:05.169182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:01:05.175272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:05.176464Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:05.176521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:05.176701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:01:05.176748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:05.176798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:01:05.176929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.183648Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:01:05.313063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:01:05.313298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.313604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:01:05.313851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:01:05.313925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.316962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:05.317146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:01:05.317369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.317434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:01:05.317469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:01:05.317521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:01:05.319678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.319747Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:01:05.319783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:01:05.321600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.321637Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.321677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:05.321742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:01:05.325381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:05.329921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:01:05.330150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:01:05.331254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:05.331394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:05.331457Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:05.331784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:01:05.331845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:05.332044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:05.332148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:01:05.334428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:05.334478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:05.334745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:05.334802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:01:05.335145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:05.335196Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:01:05.335292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:05.335340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:05.335379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:05.335424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:05.335462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:01:05.335506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:05.335542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:01:05.335572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:01:05.335659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:05.335718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:01:05.335760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:01:05.337685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:05.337817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:05.337855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... 186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-04T17:01:08.257859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2025-04-04T17:01:08.258006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 440 RawX2: 4294969700 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-04T17:01:08.258057Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-04-04T17:01:08.261840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:01:08.261966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-04T17:01:08.262036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-04T17:01:08.262078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2025-04-04T17:01:08.262163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-04T17:01:08.262325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2025-04-04T17:01:08.262481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:01:08.262571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:01:08.263837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:01:08.263992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:01:08.265580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:08.265653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:01:08.265859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:01:08.266049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:08.266088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-04T17:01:08.266141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-04T17:01:08.266534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:01:08.266590Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:01:08.266708Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:01:08.266753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-04T17:01:08.266810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-04T17:01:08.268161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:01:08.268277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:01:08.268310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:01:08.268346Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-04-04T17:01:08.268442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:01:08.269701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:01:08.269827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:01:08.269867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:01:08.269894Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-04T17:01:08.269921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:01:08.269985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-04T17:01:08.274541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:01:08.274617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:01:08.275037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:01:08.275298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:01:08.275365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:01:08.275410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:01:08.275437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:01:08.275474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-04T17:01:08.275548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:408:2375] message: TxId: 103 2025-04-04T17:01:08.275606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:01:08.275640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T17:01:08.275671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T17:01:08.275757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:01:08.276177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:08.276221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:01:08.277758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:01:08.278336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:01:08.280386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:08.280439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-04-04T17:01:08.280582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:01:08.280640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:735:2670] 2025-04-04T17:01:08.281560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-04-04T17:01:08.283302Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:08.283576Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 244us result status StatusSuccess 2025-04-04T17:01:08.284051Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |81.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |81.4%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |81.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:47.252733Z 00000.020 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.023 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.024 II| FAKE_ENV: Starting storage for BS group 0 00000.024 II| FAKE_ENV: Starting storage for BS group 1 00000.025 II| FAKE_ENV: Starting storage for BS group 2 00000.025 II| FAKE_ENV: Starting storage for BS group 3 00000.051 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [1:30:2062]) priority=200 resources={1, 0} 00000.051 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [1:30:2062]) to queue queue_background_compaction 00000.051 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [1:30:2062]) from queue queue_background_compaction 00000.051 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [1:30:2062]) to queue queue_background_compaction 00000.051 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 50.000000 (insert task gen0-table-101-tablet-1 (1 by [1:30:2062])) 00000.058 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [1:30:2062]) (release resources {1, 0}) 00000.058 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 50.000000 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [1:30:2062])) 00000.060 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.060 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.060 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.060 II| FAKE_ENV: DS.0 gone, left {771b, 9}, put {791b, 10} 00000.060 II| FAKE_ENV: DS.1 gone, left {1327b, 10}, put {1327b, 10} 00000.060 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.060 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.060 II| FAKE_ENV: All BS storage groups are stopped 00000.060 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.060 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 23}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:47.357574Z 00000.019 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.021 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.021 II| FAKE_ENV: Starting storage for BS group 0 00000.022 II| FAKE_ENV: Starting storage for BS group 1 00000.022 II| FAKE_ENV: Starting storage for BS group 2 00000.022 II| FAKE_ENV: Starting storage for BS group 3 00000.030 DD| RESOURCE_BROKER: Submitted new background_compaction task bckg-block (987987987987 by [2:8:2055]) priority=0 resources={1, 0} 00000.030 DD| RESOURCE_BROKER: Assigning waiting task bckg-block (987987987987 by [2:8:2055]) to queue queue_background_compaction 00000.030 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task bckg-block (987987987987 by [2:8:2055]) from queue queue_background_compaction 00000.030 DD| RESOURCE_BROKER: Assigning in-fly task bckg-block (987987987987 by [2:8:2055]) to queue queue_background_compaction 00000.030 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 300.000000 (insert task bckg-block (987987987987 by [2:8:2055])) 00000.032 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [2:30:2062]) priority=200 resources={1, 0} 00000.032 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [2:30:2062]) to queue queue_background_compaction 00000.032 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.039 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (1 by [2:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.039 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [2:30:2062]) to queue queue_compaction_gen0 00000.040 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [2:30:2062]) from queue queue_compaction_gen0 00000.040 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [2:30:2062]) to queue queue_compaction_gen0 00000.040 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 5.000000 (insert task gen0-table-101-tablet-1 (1 by [2:30:2062])) 00000.043 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [2:30:2062]) (release resources {1, 0}) 00000.043 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 5.000000 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [2:30:2062])) 00000.044 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.044 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.044 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.044 II| FAKE_ENV: DS.0 gone, left {1262b, 14}, put {1282b, 15} 00000.044 II| FAKE_ENV: DS.1 gone, left {1868b, 15}, put {1868b, 15} 00000.044 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.044 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.045 II| FAKE_ENV: All BS storage groups are stopped 00000.045 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.045 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 31}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:47.423504Z 00000.011 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.013 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.014 II| FAKE_ENV: Starting storage for BS group 0 00000.014 II| FAKE_ENV: Starting storage for BS group 1 00000.014 II| FAKE_ENV: Starting storage for BS group 2 00000.014 II| FAKE_ENV: Starting storage for BS group 3 00000.022 DD| RESOURCE_BROKER: Submitted new background_compaction task bckg-block (987987987987 by [3:8:2055]) priority=0 resources={1, 0} 00000.022 DD| RESOURCE_BROKER: Assigning waiting task bckg-block (987987987987 by [3:8:2055]) to queue queue_background_compaction 00000.022 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task bckg-block (987987987987 by [3:8:2055]) from queue queue_background_compaction 00000.022 DD| RESOURCE_BROKER: Assigning in-fly task bckg-block (987987987987 by [3:8:2055]) to queue queue_background_compaction 00000.022 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 300.000000 (insert task bckg-block (987987987987 by [3:8:2055])) 00000.029 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [3:30:2062]) priority=200 resources={1, 0} 00000.029 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [3:30:2062]) to queue queue_background_compaction 00000.029 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.030 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (1 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.030 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [3:30:2062]) to queue queue_compaction_gen0 00000.030 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [3:30:2062]) from queue queue_compaction_gen0 00000.030 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [3:30:2062]) to queue queue_compaction_gen0 00000.030 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 5.000000 (insert task gen0-table-101-tablet-1 (1 by [3:30:2062])) 00000.035 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [3:30:2062]) (release resources {1, 0}) 00000.035 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 5.000000 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [3:30:2062])) 00000.038 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (2 by [3:30:2062]) priority=200 resources={1, 0} 00000.038 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (2 by [3:30:2062]) to queue queue_background_compaction 00000.038 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.039 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (2 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.039 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (2 by [3:30:2062]) to queue queue_compaction_gen0 00000.039 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (2 by [3:30:2062]) from queue queue_compaction_gen0 00000.039 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (2 by [3:30:2062]) to queue queue_compaction_gen0 00000.039 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 4.750000 (insert task gen0-table-101-tablet-1 (2 by [3:30:2062])) 00000.065 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (2 by [3:30:2062]) (release resources {1, 0}) 00000.065 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 4.750000 to 0.000000 (remove task gen0-table-101-tablet-1 (2 by [3:30:2062])) 00000.067 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (3 by [3:30:2062]) priority=200 resources={1, 0} 00000.067 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (3 by [3:30:2062]) to queue queue_background_compaction 00000.067 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.068 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (3 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.068 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (3 by [3:30:2062]) to queue queue_compaction_gen0 00000.068 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (3 by [3:30:2062]) from queue queue_compaction_gen0 00000.068 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (3 by [3:30:2062]) to queue queue_compaction_gen0 00000.068 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 4.500000 (insert task gen0-table-101-tablet-1 (3 by [3:30:2062])) 00000.074 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (3 by [3:30:2062]) (release resources {1, 0}) 00000.074 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 4.500000 to 0.000000 (remove task gen0-table-101-tablet-1 (3 by [3:30:2062])) 00000.077 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (4 by [3:30:2062]) priority=200 resources={1, 0} 00000.077 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (4 by [3:30:2062]) to queue queue_background_compaction 00000.077 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.078 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (4 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.078 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (4 by [3:30:2062]) to queue queue_compaction_gen0 00000.078 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (4 by [3:30:2062]) from queue queue_compaction_gen0 00000.078 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (4 by [3:30:2062]) to queue queue_compaction_gen0 00000.078 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 4.250000 (insert task gen0-table-101-tablet-1 (4 by [3:30:2062])) 00000.082 DD| R ... eader{1:2:97} starting compaction 00000.062 II| TABLET_EXECUTOR: Leader{1:2:98} starting Scan{15 on 101, Compact{1.2.97, eph 8}} 00000.062 II| TABLET_EXECUTOR: Leader{1:2:98} started compaction 15 00000.062 II| TABLET_OPS_HOST: Scan{15 on 101, Compact{1.2.97, eph 8}} begin on TSubset{head 9, 1m 1p 0c} 00000.064 II| TABLET_OPS_HOST: Scan{15 on 101, Compact{1.2.97, eph 8}} end=0, 80r seen, TFwd{fetch=1.99KiB,saved=1.99KiB,usage=1.99KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.065 II| TABLET_EXECUTOR: Leader{1:2:99} Compact 15 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 97, product {1 parts epoch 9} done 00000.069 II| TABLET_EXECUTOR: Leader{1:2:109} starting compaction 00000.069 II| TABLET_EXECUTOR: Leader{1:2:110} starting Scan{17 on 101, Compact{1.2.109, eph 9}} 00000.069 II| TABLET_EXECUTOR: Leader{1:2:110} started compaction 17 00000.069 II| TABLET_OPS_HOST: Scan{17 on 101, Compact{1.2.109, eph 9}} begin on TSubset{head 10, 1m 1p 0c} 00000.071 II| TABLET_OPS_HOST: Scan{17 on 101, Compact{1.2.109, eph 9}} end=0, 90r seen, TFwd{fetch=2.27KiB,saved=2.27KiB,usage=2.27KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.072 II| TABLET_EXECUTOR: Leader{1:2:111} Compact 17 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 109, product {1 parts epoch 10} done 00000.076 II| TABLET_EXECUTOR: Leader{1:2:121} starting compaction 00000.076 II| TABLET_EXECUTOR: Leader{1:2:122} starting Scan{19 on 101, Compact{1.2.121, eph 10}} 00000.076 II| TABLET_EXECUTOR: Leader{1:2:122} started compaction 19 00000.076 II| TABLET_OPS_HOST: Scan{19 on 101, Compact{1.2.121, eph 10}} begin on TSubset{head 11, 1m 1p 0c} 00000.078 II| TABLET_OPS_HOST: Scan{19 on 101, Compact{1.2.121, eph 10}} end=0, 100r seen, TFwd{fetch=2.54KiB,saved=2.54KiB,usage=2.54KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.079 II| TABLET_EXECUTOR: Leader{1:2:123} Compact 19 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 121, product {1 parts epoch 11} done 00000.083 II| TABLET_EXECUTOR: Leader{1:2:133} starting compaction 00000.083 II| TABLET_EXECUTOR: Leader{1:2:134} starting Scan{21 on 101, Compact{1.2.133, eph 11}} 00000.083 II| TABLET_EXECUTOR: Leader{1:2:134} started compaction 21 00000.083 II| TABLET_OPS_HOST: Scan{21 on 101, Compact{1.2.133, eph 11}} begin on TSubset{head 12, 1m 1p 0c} 00000.085 II| TABLET_OPS_HOST: Scan{21 on 101, Compact{1.2.133, eph 11}} end=0, 110r seen, TFwd{fetch=2.81KiB,saved=2.81KiB,usage=2.81KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.090 II| TABLET_EXECUTOR: Leader{1:2:135} Compact 21 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 133, product {1 parts epoch 12} done 00000.092 II| TABLET_EXECUTOR: Leader{1:2:137} starting Scan{24 on 101, DummyScan} 00000.092 II| TABLET_OPS_HOST: Scan{24 on 101, DummyScan} begin on TSubset{head 12, 1m 1p 0c} 00000.095 II| TABLET_EXECUTOR: Leader{1:2:146} starting compaction 00000.095 II| TABLET_EXECUTOR: Leader{1:2:147} starting Scan{25 on 101, Compact{1.2.146, eph 12}} 00000.095 II| TABLET_EXECUTOR: Leader{1:2:147} started compaction 25 00000.095 II| TABLET_OPS_HOST: Scan{25 on 101, Compact{1.2.146, eph 12}} begin on TSubset{head 13, 1m 1p 0c} 00000.097 II| TABLET_OPS_HOST: Scan{25 on 101, Compact{1.2.146, eph 12}} end=0, 120r seen, TFwd{fetch=3.09KiB,saved=3.09KiB,usage=3.09KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.098 II| TABLET_EXECUTOR: Leader{1:2:148} Compact 25 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 146, product {1 parts epoch 13} done 00000.104 II| TABLET_EXECUTOR: Leader{1:2:158} starting compaction 00000.104 II| TABLET_EXECUTOR: Leader{1:2:159} starting Scan{27 on 101, Compact{1.2.158, eph 13}} 00000.104 II| TABLET_EXECUTOR: Leader{1:2:159} started compaction 27 00000.104 II| TABLET_OPS_HOST: Scan{27 on 101, Compact{1.2.158, eph 13}} begin on TSubset{head 14, 1m 1p 0c} 00000.106 II| TABLET_OPS_HOST: Scan{27 on 101, Compact{1.2.158, eph 13}} end=0, 130r seen, TFwd{fetch=3.5KiB,saved=3.5KiB,usage=3.5KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.108 II| TABLET_EXECUTOR: Leader{1:2:160} Compact 27 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 158, product {1 parts epoch 14} done 00000.111 II| TABLET_EXECUTOR: Leader{1:2:170} starting compaction 00000.111 II| TABLET_EXECUTOR: Leader{1:2:171} starting Scan{29 on 101, Compact{1.2.170, eph 14}} 00000.111 II| TABLET_EXECUTOR: Leader{1:2:171} started compaction 29 00000.112 II| TABLET_OPS_HOST: Scan{29 on 101, Compact{1.2.170, eph 14}} begin on TSubset{head 15, 1m 1p 0c} 00000.114 II| TABLET_OPS_HOST: Scan{29 on 101, Compact{1.2.170, eph 14}} end=0, 140r seen, TFwd{fetch=3.93KiB,saved=3.93KiB,usage=3.93KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.115 II| TABLET_EXECUTOR: Leader{1:2:172} Compact 29 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 170, product {1 parts epoch 15} done 00000.119 II| TABLET_EXECUTOR: Leader{1:2:182} starting compaction 00000.119 II| TABLET_EXECUTOR: Leader{1:2:183} starting Scan{31 on 101, Compact{1.2.182, eph 15}} 00000.119 II| TABLET_EXECUTOR: Leader{1:2:183} started compaction 31 00000.119 II| TABLET_OPS_HOST: Scan{31 on 101, Compact{1.2.182, eph 15}} begin on TSubset{head 16, 1m 1p 0c} 00000.121 II| TABLET_OPS_HOST: Scan{31 on 101, Compact{1.2.182, eph 15}} end=0, 150r seen, TFwd{fetch=4.36KiB,saved=4.36KiB,usage=4.36KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.127 II| TABLET_EXECUTOR: Leader{1:2:184} Compact 31 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 182, product {1 parts epoch 16} done 00000.130 II| TABLET_EXECUTOR: Leader{1:2:194} starting compaction 00000.130 II| TABLET_EXECUTOR: Leader{1:2:195} starting Scan{33 on 101, Compact{1.2.194, eph 16}} 00000.130 II| TABLET_EXECUTOR: Leader{1:2:195} started compaction 33 00000.130 II| TABLET_OPS_HOST: Scan{33 on 101, Compact{1.2.194, eph 16}} begin on TSubset{head 17, 1m 1p 0c} 00000.132 II| TABLET_OPS_HOST: Scan{33 on 101, Compact{1.2.194, eph 16}} end=0, 160r seen, TFwd{fetch=4.79KiB,saved=4.79KiB,usage=4.79KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.134 II| TABLET_EXECUTOR: Leader{1:2:195} Compact 33 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 194, product {1 parts epoch 17} done 00000.138 II| TABLET_EXECUTOR: Leader{1:2:206} starting compaction 00000.139 II| TABLET_EXECUTOR: Leader{1:2:207} starting Scan{35 on 101, Compact{1.2.206, eph 17}} 00000.139 II| TABLET_EXECUTOR: Leader{1:2:207} started compaction 35 00000.139 II| TABLET_OPS_HOST: Scan{35 on 101, Compact{1.2.206, eph 17}} begin on TSubset{head 18, 1m 1p 0c} 00000.141 II| TABLET_OPS_HOST: Scan{35 on 101, Compact{1.2.206, eph 17}} end=0, 170r seen, TFwd{fetch=5.22KiB,saved=5.22KiB,usage=5.22KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.143 II| TABLET_EXECUTOR: Leader{1:2:208} Compact 35 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 206, product {1 parts epoch 18} done 00000.146 II| TABLET_EXECUTOR: Leader{1:2:218} starting compaction 00000.146 II| TABLET_EXECUTOR: Leader{1:2:219} starting Scan{37 on 101, Compact{1.2.218, eph 18}} 00000.146 II| TABLET_EXECUTOR: Leader{1:2:219} started compaction 37 00000.146 II| TABLET_OPS_HOST: Scan{37 on 101, Compact{1.2.218, eph 18}} begin on TSubset{head 19, 1m 1p 0c} 00000.149 II| TABLET_OPS_HOST: Scan{37 on 101, Compact{1.2.218, eph 18}} end=0, 180r seen, TFwd{fetch=5.65KiB,saved=5.65KiB,usage=5.65KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.150 II| TABLET_EXECUTOR: Leader{1:2:219} Compact 37 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 218, product {1 parts epoch 19} done 00000.154 II| TABLET_EXECUTOR: Leader{1:2:230} starting compaction 00000.154 II| TABLET_EXECUTOR: Leader{1:2:231} starting Scan{39 on 101, Compact{1.2.230, eph 19}} 00000.154 II| TABLET_EXECUTOR: Leader{1:2:231} started compaction 39 00000.154 II| TABLET_OPS_HOST: Scan{39 on 101, Compact{1.2.230, eph 19}} begin on TSubset{head 20, 1m 1p 0c} 00000.157 II| TABLET_OPS_HOST: Scan{39 on 101, Compact{1.2.230, eph 19}} end=0, 190r seen, TFwd{fetch=6.08KiB,saved=6.08KiB,usage=6.08KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.166 II| TABLET_EXECUTOR: Leader{1:2:231} Compact 39 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 230, product {1 parts epoch 20} done 00000.170 II| TABLET_EXECUTOR: Leader{1:2:242} starting compaction 00000.170 II| TABLET_EXECUTOR: Leader{1:2:243} starting Scan{41 on 101, Compact{1.2.242, eph 20}} 00000.170 II| TABLET_EXECUTOR: Leader{1:2:243} started compaction 41 00000.170 II| TABLET_OPS_HOST: Scan{41 on 101, Compact{1.2.242, eph 20}} begin on TSubset{head 21, 1m 1p 0c} 00000.172 II| TABLET_OPS_HOST: Scan{41 on 101, Compact{1.2.242, eph 20}} end=0, 200r seen, TFwd{fetch=6.51KiB,saved=6.51KiB,usage=6.51KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.174 II| TABLET_EXECUTOR: Leader{1:2:244} Compact 41 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 242, product {1 parts epoch 21} done 00000.178 II| TABLET_EXECUTOR: Leader{1:2:254} starting compaction 00000.178 II| TABLET_EXECUTOR: Leader{1:2:255} starting Scan{43 on 101, Compact{1.2.254, eph 21}} 00000.178 II| TABLET_EXECUTOR: Leader{1:2:255} started compaction 43 00000.178 II| TABLET_OPS_HOST: Scan{43 on 101, Compact{1.2.254, eph 21}} begin on TSubset{head 22, 1m 1p 0c} 00000.181 II| TABLET_OPS_HOST: Scan{43 on 101, Compact{1.2.254, eph 21}} end=0, 210r seen, TFwd{fetch=6.94KiB,saved=6.94KiB,usage=6.94KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.182 II| TABLET_EXECUTOR: Leader{1:2:256} Compact 43 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 254, product {1 parts epoch 22} done 00000.186 II| TABLET_EXECUTOR: Leader{1:2:266} starting compaction 00000.186 II| TABLET_EXECUTOR: Leader{1:2:267} starting Scan{45 on 101, Compact{1.2.266, eph 22}} 00000.186 II| TABLET_EXECUTOR: Leader{1:2:267} started compaction 45 00000.186 II| TABLET_OPS_HOST: Scan{45 on 101, Compact{1.2.266, eph 22}} begin on TSubset{head 23, 1m 1p 0c} 00000.188 II| TABLET_OPS_HOST: Scan{45 on 101, Compact{1.2.266, eph 22}} end=0, 220r seen, TFwd{fetch=7.41KiB,saved=7.41KiB,usage=7.41KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.190 II| TABLET_EXECUTOR: Leader{1:2:268} Compact 45 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 266, product {1 parts epoch 23} done 00000.192 II| TABLET_OPS_HOST: Scan{24 on 101, DummyScan} end=0, 111r seen, TFwd{fetch=3.09KiB,saved=3.09KiB,usage=3.09KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2} 00000.193 II| TABLET_EXECUTOR: Leader{1:2:270} suiciding, Waste{2:0, 8733b +(262, 99237b), 269 trc, -99237b acc} 00000.196 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.196 NN| TABLET_SAUSAGECACHE: Poison cache serviced 44 reqs hit {45 78249b} miss {0 0b} 00000.196 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.196 II| FAKE_ENV: DS.0 gone, left {27018b, 269}, put {27038b, 270} 00000.197 II| FAKE_ENV: DS.1 gone, left {110468b, 290}, put {110468b, 290} 00000.198 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.198 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.198 II| FAKE_ENV: All BS storage groups are stopped 00000.198 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.199 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 153}, stopped ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:01:08.781792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:01:08.781912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:08.781971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:01:08.782036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:01:08.782095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:01:08.782139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:01:08.782203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:08.782808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:01:08.783198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:01:08.871258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:08.871330Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:08.892103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:01:08.892405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:01:08.892573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:01:08.913297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:01:08.913716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:01:08.914524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:08.915712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:01:08.922737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:08.924363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:08.924456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:08.924684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:01:08.924749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:08.924793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:01:08.924931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:01:08.932701Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:01:09.063058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:01:09.063345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:09.063907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:01:09.064174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:01:09.064240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:09.073378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:09.073553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:01:09.073792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:09.073849Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:01:09.073894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:01:09.073951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:01:09.080822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:09.080907Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:01:09.080948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:01:09.083291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:09.083352Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:09.083403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:09.083486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:01:09.094542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:09.100401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:01:09.100673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:01:09.101938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:09.102126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:09.102202Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:09.102558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:01:09.102632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:09.102883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:09.103002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:01:09.111800Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:09.111886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:09.112146Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:09.112203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:01:09.118691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:09.118806Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:01:09.118947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:09.119019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:09.119128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:09.119189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:09.119238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:01:09.119304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:09.119354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:01:09.119398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:01:09.119540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:09.119596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:01:09.119638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:01:09.122270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:09.122463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:09.122516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... m tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-04T17:01:09.240292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-04T17:01:09.246620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:09.246861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:09.246935Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-04T17:01:09.247257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-04T17:01:09.247325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-04T17:01:09.247577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:09.247680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:01:09.247782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-04T17:01:09.251254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:09.251315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:09.251615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:01:09.251771Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:09.251829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-04T17:01:09.251884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-04T17:01:09.252250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-04T17:01:09.252306Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-04T17:01:09.252468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-04T17:01:09.252558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:01:09.252610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-04T17:01:09.252650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:01:09.252703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-04T17:01:09.252759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:01:09.252822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-04T17:01:09.252862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-04T17:01:09.252978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:01:09.253050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-04T17:01:09.253095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-04T17:01:09.253130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-04T17:01:09.259498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:01:09.259641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:01:09.259685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-04T17:01:09.259740Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:01:09.259820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:09.261224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:01:09.261376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:01:09.261420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-04T17:01:09.261459Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:01:09.261502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:01:09.261608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-04T17:01:09.267326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-04T17:01:09.268939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-04-04T17:01:09.269314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-04T17:01:09.269371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-04T17:01:09.269513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:01:09.269536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-04T17:01:09.270119Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:01:09.270261Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-04T17:01:09.270334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:01:09.270385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:311:2302] 2025-04-04T17:01:09.270539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-04T17:01:09.270566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:311:2302] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 100 2025-04-04T17:01:09.271063Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:09.271331Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 263us result status StatusSuccess 2025-04-04T17:01:09.271886Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:09.272430Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:09.272682Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 274us result status StatusPathDoesNotExist 2025-04-04T17:01:09.272858Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/USER_0\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/USER_0" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] Test command err: ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 346b 12r} data 755b + FlatIndex{4} Label{3 rev 3, 172b} 5 rec | Page Row Bytes (Uint32, String) | 0 0 86b {1, aaa} | 1 3 88b {1, b} | 2 6 86b {2, NULL} | 3 9 86b {2, ccx} | 3 11 86b {2, cxz} + BTreeIndex{PageId: 5 RowCount: 12 DataSize: 346 ErasedRowCount: 0} Label{13 rev 1, 208b} | PageId: 0 RowCount: 3 DataSize: 86 ErasedRowCount: 0 | > {1, b} | PageId: 1 RowCount: 6 DataSize: 174 ErasedRowCount: 0 | > {2, NULL} | PageId: 2 RowCount: 9 DataSize: 260 ErasedRowCount: 0 | > {2, ccx} | PageId: 3 RowCount: 12 DataSize: 346 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 346b 12r} data 777b + FlatIndex{4} Label{3 rev 3, 179b} 5 rec | Page Row Bytes (Uint32, String) | 0 0 86b {1, aaa} | 1 3 88b {1, baaaa} | 2 6 86b {2, aaa} | 3 9 86b {2, ccx} | 3 11 86b {2, cxz} + BTreeIndex{PageId: 5 RowCount: 12 DataSize: 346 ErasedRowCount: 0} Label{13 rev 1, 223b} | PageId: 0 RowCount: 3 DataSize: 86 ErasedRowCount: 0 | > {1, baaaa} | PageId: 1 RowCount: 6 DataSize: 174 ErasedRowCount: 0 | > {2, aaa} | PageId: 2 RowCount: 9 DataSize: 260 ErasedRowCount: 0 | > {2, ccx} | PageId: 3 RowCount: 12 DataSize: 346 ErasedRowCount: 0 ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1347b + FlatIndex{10} Label{3 rev 3, 362b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, ab} | 2 2 42b {1, ac} | 3 3 42b {1, b} | 4 4 42b {1, bb} | 5 5 42b {2, NULL} | 6 6 42b {2, ab} | 7 7 42b {2, ac} | 8 8 42b {2, b} | 9 9 42b {2, bb} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 536b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, ab} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, ac} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, b} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bb} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, NULL} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, ab} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, ac} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, b} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bb} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{10} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, aba} | 2 2 42b {1, aca} | 3 3 42b {1, baa} | 4 4 42b {1, bba} | 5 5 42b {2, aaa} | 6 6 42b {2, aba} | 7 7 42b {2, aca} | 8 8 42b {2, baa} | 9 9 42b {2, bba} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, aba} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, aca} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, baa} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bba} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, aaa} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, aba} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, aca} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, baa} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bba} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= SLICES ======= { [0, 2), [2, 4), [4, 6), [6, 7), [7, 8), [8, 9), [9, 9] } ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1347b + FlatIndex{10} Label{3 rev 3, 362b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, ab} | 2 2 42b {1, ac} | 3 3 42b {1, b} | 4 4 42b {1, bb} | 5 5 42b {2, NULL} | 6 6 42b {2, ab} | 7 7 42b {2, ac} | 8 8 42b {2, b} | 9 9 42b {2, bb} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 536b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, ab} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, ac} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, b} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bb} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, NULL} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, ab} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, ac} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, b} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bb} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{10} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, aba} | 2 2 42b {1, aca} | 3 3 42b {1, baa} | 4 4 42b {1, bba} | 5 5 42b {2, aaa} | 6 6 42b {2, aba} | 7 7 42b {2, aca} | 8 8 42b {2, baa} | 9 9 42b {2, bba} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, aba} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, aca} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, baa} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bba} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, aaa} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, aba} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, aca} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, baa} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bba} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 316b + FlatIndex{2} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccccd} | 1 1 41b {ccccccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 83b 2r} data 320b + FlatIndex{2} Label{3 rev 3, 109b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 43b {ccccccd} | 1 1 43b {ccccccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 83 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 83 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 312b + FlatIndex{2} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccccd} | 1 1 40b {cccccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 82b 2r} data 316b + FlatIndex{2} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 42b {cccccd} | 1 1 42b {cccccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 82 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 82 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 79b 2r} data 308b + FlatIndex{2} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 39b {ccccd} | 1 1 39b {ccccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 79 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 79 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 312b + FlatIndex{2} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccd} | 1 1 41b {ccccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 78b 2r} data 304b + FlatIndex{2} Label{3 rev 3, 101b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 38b {cccd} | 1 1 38b {cccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 78 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 78 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 308b + FlatIndex{2} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccd} | 1 1 40b {cccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 75b 2r} data 292b + FlatIndex{2} Label{3 rev 3, 95b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 75 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 75 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 77b 2r} data 296b + FlatIndex{2} Label{3 rev 3, 97b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 77 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 77 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 69b 2r} data 280b + FlatIndex{2} Label{3 rev 3, 89b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 69 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 69 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 71b 2r} data 284b + FlatIndex{2} Label{3 rev 3, 91b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 71 ErasedRowCount: 0} Label{13 rev 1, 10 ... {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{48} Label{484 rev 1, 138b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{51} Label{514 rev 1, 138b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 4), [6, 8), [8, 12), [14, 16), [16, 18), [20, 28), [32, 34), [34, 38), [38, 39] } Part{[1:2:3:0:0:0:0] eph 0, 10774b 40r} data 15576b + FlatIndex{95} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 66b {0, 1} | 2 2 66b {0, 4} | 4 4 82b {0, 7} | 8 6 66b {0, 10} | 11 8 66b {1, 3} | 14 10 82b {1, 6} | 20 12 66b {1, 8} | 23 14 66b {2, NULL} | 26 16 82b {2, 4} | 36 18 66b {2, 7} | 39 20 66b {2, 10} | 42 22 82b {3, 3} | 48 24 66b {3, 6} | 53 26 66b {3, 8} | 58 28 82b {4, NULL} | 64 30 66b {4, 4} | 67 32 66b {4, 7} | 70 34 82b {4, 10} | 82 36 66b {5, 3} | 87 38 66b {5, 6} | 87 39 66b {5, 7} + BTreeIndex{PageId: 98 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 72 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 15 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 66 GroupDataSize: 76 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 2 RowCount: 4 DataSize: 132 GroupDataSize: 526 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 4 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 27 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 8 RowCount: 8 DataSize: 280 GroupDataSize: 1488 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 11 RowCount: 10 DataSize: 346 GroupDataSize: 1938 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 14 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 43 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 20 RowCount: 14 DataSize: 494 GroupDataSize: 2906 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 23 RowCount: 16 DataSize: 560 GroupDataSize: 3360 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 26 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 97 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 59 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 36 RowCount: 20 DataSize: 708 GroupDataSize: 4330 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 39 RowCount: 22 DataSize: 774 GroupDataSize: 4784 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 42 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 71 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 48 RowCount: 26 DataSize: 922 GroupDataSize: 5754 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 53 RowCount: 28 DataSize: 988 GroupDataSize: 6208 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 58 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 96 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 64 RowCount: 32 DataSize: 1136 GroupDataSize: 7178 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 67 RowCount: 34 DataSize: 1202 GroupDataSize: 7632 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 70 RowCount: 36 DataSize: 1284 GroupDataSize: 8163 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 82 RowCount: 38 DataSize: 1350 GroupDataSize: 8602 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 87 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 66b}, [0, +2)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} + Rows{2} Label{24 rev 1, 66b}, [2, +2)row | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} + Rows{4} Label{44 rev 1, 82b}, [4, +2)row | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} + Rows{8} Label{84 rev 1, 66b}, [6, +2)row | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} + Rows{11} Label{114 rev 1, 66b}, [8, +2)row | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} + Rows{14} Label{144 rev 1, 82b}, [10, +2)row | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} + Rows{20} Label{204 rev 1, 66b}, [12, +2)row | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} + Rows{23} Label{234 rev 1, 66b}, [14, +2)row | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} + Rows{26} Label{264 rev 1, 82b}, [16, +2)row | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} + Rows{36} Label{364 rev 1, 66b}, [18, +2)row | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} + Rows{39} Label{394 rev 1, 66b}, [20, +2)row | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} + Rows{42} Label{424 rev 1, 82b}, [22, +2)row | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} + Rows{48} Label{484 rev 1, 66b}, [24, +2)row | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} + Rows{53} Label{534 rev 1, 66b}, [26, +2)row | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} + Rows{58} Label{584 rev 1, 82b}, [28, +2)row | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} + Rows{64} Label{644 rev 1, 66b}, [30, +2)row | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} + Rows{67} Label{674 rev 1, 66b}, [32, +2)row | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} + Rows{70} Label{704 rev 1, 82b}, [34, +2)row | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} + Rows{82} Label{824 rev 1, 66b}, [36, +2)row | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} + Rows{87} Label{874 rev 1, 66b}, [38, +2)row | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 4), [6, 8), [8, 12), [14, 16), [16, 18), [20, 28), [32, 34), [34, 38), [38, 39] } Part{[1:2:3:0:0:0:0] eph 0, 10774b 40r} data 15576b + FlatIndex{95} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 66b {0, 1} | 2 2 66b {0, 4} | 4 4 82b {0, 7} | 8 6 66b {0, 10} | 11 8 66b {1, 3} | 14 10 82b {1, 6} | 20 12 66b {1, 8} | 23 14 66b {2, NULL} | 26 16 82b {2, 4} | 36 18 66b {2, 7} | 39 20 66b {2, 10} | 42 22 82b {3, 3} | 48 24 66b {3, 6} | 53 26 66b {3, 8} | 58 28 82b {4, NULL} | 64 30 66b {4, 4} | 67 32 66b {4, 7} | 70 34 82b {4, 10} | 82 36 66b {5, 3} | 87 38 66b {5, 6} | 87 39 66b {5, 7} + BTreeIndex{PageId: 98 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 72 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 15 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 66 GroupDataSize: 76 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 2 RowCount: 4 DataSize: 132 GroupDataSize: 526 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 4 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 27 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 8 RowCount: 8 DataSize: 280 GroupDataSize: 1488 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 11 RowCount: 10 DataSize: 346 GroupDataSize: 1938 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 14 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 43 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 20 RowCount: 14 DataSize: 494 GroupDataSize: 2906 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 23 RowCount: 16 DataSize: 560 GroupDataSize: 3360 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 26 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 97 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 59 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 36 RowCount: 20 DataSize: 708 GroupDataSize: 4330 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 39 RowCount: 22 DataSize: 774 GroupDataSize: 4784 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 42 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 71 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 48 RowCount: 26 DataSize: 922 GroupDataSize: 5754 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 53 RowCount: 28 DataSize: 988 GroupDataSize: 6208 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 58 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 96 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 64 RowCount: 32 DataSize: 1136 GroupDataSize: 7178 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 67 RowCount: 34 DataSize: 1202 GroupDataSize: 7632 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 70 RowCount: 36 DataSize: 1284 GroupDataSize: 8163 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 82 RowCount: 38 DataSize: 1350 GroupDataSize: 8602 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 87 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 66b}, [0, +2)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} + Rows{2} Label{24 rev 1, 66b}, [2, +2)row | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} + Rows{4} Label{44 rev 1, 82b}, [4, +2)row | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} + Rows{8} Label{84 rev 1, 66b}, [6, +2)row | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} + Rows{11} Label{114 rev 1, 66b}, [8, +2)row | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} + Rows{14} Label{144 rev 1, 82b}, [10, +2)row | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} + Rows{20} Label{204 rev 1, 66b}, [12, +2)row | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} + Rows{23} Label{234 rev 1, 66b}, [14, +2)row | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} + Rows{26} Label{264 rev 1, 82b}, [16, +2)row | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} + Rows{36} Label{364 rev 1, 66b}, [18, +2)row | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} + Rows{39} Label{394 rev 1, 66b}, [20, +2)row | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} + Rows{42} Label{424 rev 1, 82b}, [22, +2)row | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} + Rows{48} Label{484 rev 1, 66b}, [24, +2)row | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} + Rows{53} Label{534 rev 1, 66b}, [26, +2)row | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} + Rows{58} Label{584 rev 1, 82b}, [28, +2)row | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} + Rows{64} Label{644 rev 1, 66b}, [30, +2)row | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} + Rows{67} Label{674 rev 1, 66b}, [32, +2)row | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} + Rows{70} Label{704 rev 1, 82b}, [34, +2)row | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} + Rows{82} Label{824 rev 1, 66b}, [36, +2)row | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} + Rows{87} Label{874 rev 1, 66b}, [38, +2)row | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} >> TSchemeShardSubDomainTest::CreateDropNbs >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges >> TDataShardLocksTest::MvccTestOooTxDoesntBreakPrecedingReadersLocks [GOOD] >> TDataShardLocksTest::MvccTestOutdatedLocksRemove [GOOD] >> TDataShardLocksTest::MvccTestBreakEdge [GOOD] >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] >> IncrementalBackup::SimpleBackup >> KqpPg::TempTablesSessionsIsolation [GOOD] >> KqpPg::TempTablesDrop >> TDataShardLocksTest::MvccTestWriteBreaksLocks [GOOD] >> TDataShardLocksTest::Points_ManyTx >> KqpSysColV1::StreamInnerJoinTables >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 >> KqpSysColV0::InnerJoinTables |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |81.5%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut >> TDataShardLocksTest::Points_ManyTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakAll >> Worker::Basic >> KqpSysColV1::StreamInnerJoinSelect |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakAll [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |81.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] >> IncrementalBackup::SimpleRestore >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydbd/ydbd |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd >> TGRpcYdbTest::CreateDeleteYqlSession [GOOD] >> TDataShardLocksTest::Points_OneTx |81.5%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> TDataShardLocksTest::Points_OneTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_RemoveAll >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:01:11.883819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:01:11.883986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:11.884048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:01:11.884099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:01:11.884148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:01:11.884175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:01:11.884234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:01:11.884318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:01:11.884670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:01:11.973625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:11.973709Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:11.989300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:01:11.989562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:01:11.989730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:01:12.005062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:01:12.005451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:01:12.006189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:12.008007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:01:12.016567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:12.018128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:12.018224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:12.018480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:01:12.018531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:12.018572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:01:12.018805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:01:12.026800Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:01:12.212841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:01:12.213141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:12.213426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:01:12.213654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:01:12.213721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:12.216917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:12.217134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:01:12.217370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:12.217430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:01:12.217472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:01:12.217528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:01:12.220131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:12.220211Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:01:12.220266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:01:12.222485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:12.222537Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:12.222573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:12.222635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:01:12.230155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:12.232892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:01:12.233175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:01:12.234488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:12.234699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:12.234780Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:12.235091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:01:12.235149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:01:12.235351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:12.235462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:01:12.247784Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:01:12.247854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:01:12.248056Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:01:12.248101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:01:12.248454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:01:12.248510Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:01:12.248611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:12.248671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:12.248733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:01:12.248774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:12.248815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:01:12.248856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:01:12.248911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:01:12.248950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:01:12.249050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:01:12.249093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:01:12.249139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:01:12.251435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:12.251590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:01:12.251625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2025-04-04T17:01:12.550709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:01:12.550737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:01:12.550768Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-04T17:01:12.550803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:01:12.551962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:01:12.552062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:01:12.552104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:01:12.552132Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:01:12.552159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:01:12.552222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-04T17:01:12.557446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:01:12.557521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:01:12.557548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:01:12.557574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:01:12.558787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:01:12.560782Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-04T17:01:12.561732Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-04T17:01:12.561945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T17:01:12.562292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-04-04T17:01:12.563040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-04T17:01:12.563315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409548 2025-04-04T17:01:12.564116Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-04T17:01:12.564282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T17:01:12.564447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:01:12.565162Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-04T17:01:12.565407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-04T17:01:12.565555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2025-04-04T17:01:12.579131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:01:12.579221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T17:01:12.579316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2025-04-04T17:01:12.580572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:01:12.587021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:01:12.587100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:01:12.587252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:01:12.588325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:01:12.593154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:01:12.593230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-04T17:01:12.593329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-04T17:01:12.593360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-04T17:01:12.593452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:01:12.593478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-04T17:01:12.594096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-04T17:01:12.594155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-04T17:01:12.595980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-04T17:01:12.596104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:01:12.596146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:01:12.596241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:01:12.596555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:01:12.598698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-04T17:01:12.598984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:01:12.599026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T17:01:12.599654Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:01:12.599783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:01:12.599819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:536:2491] TestWaitNotification: OK eventTxId 102 2025-04-04T17:01:12.616154Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:12.616437Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 308us result status StatusPathDoesNotExist 2025-04-04T17:01:12.616631Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:01:12.617366Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:01:12.617571Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 209us result status StatusPathDoesNotExist 2025-04-04T17:01:12.617716Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] >> BuildStatsHistogram::Five_Five_Crossed [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels >> BuildStatsHistogram::Single_Small_2_Levels [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Single_Small_1_Level >> TDataShardLocksTest::Points_ManyTx_RemoveAll [GOOD] >> TDataShardLocksTest::UseLocksCache >> BuildStatsHistogram::Single_Small_1_Level [GOOD] >> BuildStatsHistogram::Single_Small_0_Levels [GOOD] >> BuildStatsHistogram::Mixed_Groups_History |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |81.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |81.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::CreateDeleteYqlSession [GOOD] Test command err: 2025-04-04T17:00:44.019939Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504033701392224:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:44.025672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012fe/r3tmp/tmp5HrnaC/pdisk_1.dat 2025-04-04T17:00:44.465225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:44.465376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:44.468230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:00:44.472324Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8228, node 1 2025-04-04T17:00:44.582853Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:44.582875Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:44.582879Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:44.582982Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25665 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:44.898812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:48.913579Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504049786013175:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:48.914130Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012fe/r3tmp/tmpP7df6V/pdisk_1.dat 2025-04-04T17:00:49.202505Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:49.249891Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:49.249973Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:49.253025Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29590, node 4 2025-04-04T17:00:49.395074Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:49.395105Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:49.395112Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:49.395280Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:49.756260Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:49.877237Z node 4 :TX_PROXY ERROR: Actor# [4:7489504054080981364:2609] txid# 281474976715658, issues: { message: "Unknown column \'BlaBla\' specified in key column list" severity: 1 } 2025-04-04T17:00:54.210151Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489504077046873861:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:54.210340Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012fe/r3tmp/tmpWQjrky/pdisk_1.dat 2025-04-04T17:00:54.412361Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:54.452741Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:54.452878Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:54.461403Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17743, node 7 2025-04-04T17:00:54.622365Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:54.622400Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:54.622409Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:54.622622Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9743 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:54.971977Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:59.810032Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489504097184854311:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:59.810086Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012fe/r3tmp/tmpuqi8v6/pdisk_1.dat 2025-04-04T17:01:00.044768Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61763, node 10 2025-04-04T17:01:00.144467Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:00.144570Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:00.157433Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:01:00.214280Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:00.214329Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:00.214348Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:00.214547Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65432 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:00.553943Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:00.634142Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:01:00.886319Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:01:03.952725Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504114364725009:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:03.952725Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504114364725017:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:03.952812Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:03.958530Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-04T17:01:03.987823Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489504114364725023:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-04T17:01:04.074183Z node 10 :TX_PROXY ERROR: Actor# [10:7489504118659692396:3078] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:01:04.810716Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489504097184854311:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:04.810799Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:01:05.216792Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr0vze2e024qe4w6bekp3mc1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZGIwNWIzM2QtZmExZjM4MjAtYmRkYjA1YTUtMTdlYmI5YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:01:05.253852Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jr0vze2e024qe4w6bekp3mc1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZGIwNWIzM2QtZmExZjM4MjAtYmRkYjA1YTUtMTdlYmI5YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:01:05.259648Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr0vze2e024qe4w6bekp3mc1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZGIwNWIzM2QtZmExZjM4MjAtYmRkYjA1YTUtMTdlYmI5YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:01:05.406150Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0vzfc361tpf4p9w9z1xx9q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ODMyYjkzYzQtN2RhNmVjNzgtYTczMzlkY2YtYjdhNDM5YTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:01:07.050592Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489504132082894580:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:07.050648Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012fe/r3tmp/tmprvAIMM/pdisk_1.dat 2025-04-04T17:01:07.183016Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:07.216135Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:07.216207Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:07.218842Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63429, node 13 2025-04-04T17:01:07.296003Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:07.296031Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:07.296038Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:07.296197Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19102 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:07.607834Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> BuildStatsHistogram::Mixed_Groups_History [GOOD] >> BuildStatsHistogram::Serial_Groups_History ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:00:57.057437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:00:57.057532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:57.057582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:00:57.057617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:00:57.057674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:00:57.057704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:00:57.057802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:00:57.057869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:00:57.058212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:00:57.152430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:00:57.152494Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:57.183546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:00:57.183860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:00:57.184048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:00:57.216226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:00:57.216618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:00:57.217373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:57.223848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:00:57.244211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:57.245705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:57.245775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:57.245982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:00:57.246032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:57.246073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:00:57.246277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:00:57.260720Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:00:57.407912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:00:57.408225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:57.408466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:00:57.408752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:00:57.408812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:57.413414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:57.413632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:00:57.413854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:57.413920Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:00:57.413970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:00:57.414004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:00:57.416747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:57.416844Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:00:57.416901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:00:57.419252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:57.419320Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:57.419361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:57.419450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:00:57.423344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:00:57.425741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:00:57.425968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:00:57.427181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:00:57.427331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:00:57.427382Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:57.427667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:00:57.427716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:00:57.427905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:00:57.428019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:00:57.430779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:00:57.430837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:00:57.431045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:00:57.431088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:00:57.431482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:00:57.431551Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:00:57.431660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:00:57.431698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:57.431748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:00:57.431784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:57.431846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:00:57.431901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:00:57.431942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:00:57.431979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:00:57.432068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:00:57.432103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:00:57.432144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:00:57.434391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:00:57.434538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:00:57.434581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6678944 2025-04-04T17:00:57.897523Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-04T17:00:57.897640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:00:57.897683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:00:57.897742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:00:57.897777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:00:57.897829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-04T17:00:57.897922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:379:2347] message: TxId: 103 2025-04-04T17:00:57.897969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:00:57.898009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T17:00:57.898038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T17:00:57.898144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:00:57.900179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:00:57.900239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:506:2467] TestWaitNotification: OK eventTxId 103 2025-04-04T17:01:02.212165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:01:02.212266Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:03.575926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0443 2025-04-04T17:01:03.587271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0897 2025-04-04T17:01:03.619901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-04T17:01:03.620088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-04T17:01:03.620200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2025-04-04T17:01:03.620248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-04-04T17:01:03.620359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-04T17:01:03.620413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2025-04-04T17:01:03.620444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-04-04T17:01:03.630968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-04T17:01:06.217602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0122 2025-04-04T17:01:06.228394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0237 2025-04-04T17:01:06.259404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-04T17:01:06.259631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-04T17:01:06.259719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2025-04-04T17:01:06.259760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-04-04T17:01:06.259857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-04T17:01:06.259905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2025-04-04T17:01:06.259932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-04-04T17:01:06.272527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-04T17:01:08.821617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0122 2025-04-04T17:01:08.834842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0237 2025-04-04T17:01:08.866882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-04T17:01:08.867076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-04T17:01:08.867176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2025-04-04T17:01:08.867237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-04-04T17:01:08.867340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-04T17:01:08.867395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2025-04-04T17:01:08.867448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-04-04T17:01:08.877974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-04T17:01:11.601668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0046 2025-04-04T17:01:11.612328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0075 2025-04-04T17:01:11.651656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-04-04T17:01:11.651850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-04T17:01:11.651927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2025-04-04T17:01:11.651970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-04-04T17:01:11.652087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-04T17:01:11.652142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2025-04-04T17:01:11.652173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-04-04T17:01:11.665396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-04T17:01:14.442541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-04T17:01:14.442709Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-04T17:01:14.442975Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-04T17:01:14.443238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409547, request: TableId: 2 Expiration { ColumnId: 2 WallClockTimestamp: 60024000 ColumnUnit: UNIT_AUTO } SchemaVersion: 3 Indexes { OwnerId: 72057594046678944 PathId: 4 SchemaVersion: 1 KeyMap { IndexColumnId: 1 MainColumnId: 3 } KeyMap { IndexColumnId: 2 MainColumnId: 1 } } Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-04T17:01:14.443854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T17:01:14.444636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-04T17:01:14.444710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T17:01:14.455696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-04T17:01:14.455990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-04T17:01:14.456045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T01:01:00.024000Z, at schemeshard: 72057594046678944 2025-04-04T17:01:14.456114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |81.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase [GOOD] |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental >> BuildStatsHistogram::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Benchmark >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |81.6%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> Viewer::JsonStorageListingV2GroupIdFilter [GOOD] >> Viewer::JsonStorageListingV2NodeIdFilter |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> BuildStatsHistogram::Benchmark [GOOD] >> BuildStatsHistogram::Many_Mixed >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase [GOOD] Test command err: 2025-04-04T16:59:06.703656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:06.704252Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:06.704314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001322/r3tmp/tmpPJ8k1s/pdisk_1.dat 2025-04-04T16:59:07.098780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:59:07.148322Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:07.192154Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T16:59:07.193207Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T16:59:07.193547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:07.193686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:07.205427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:59:07.291588Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-04T16:59:07.291694Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T16:59:07.291914Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-04-04T16:59:07.454602Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T16:59:07.454735Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T16:59:07.455376Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T16:59:07.455490Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T16:59:07.455878Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T16:59:07.456115Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T16:59:07.456225Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T16:59:07.456482Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T16:59:07.458151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:59:07.459244Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T16:59:07.459344Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2517] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-04T16:59:07.495930Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T16:59:07.497113Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T16:59:07.497651Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T16:59:07.497934Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:59:07.512615Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T16:59:07.575675Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:59:07.575789Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:59:07.577096Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:59:07.577172Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:59:07.577224Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:59:07.577508Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:59:07.577626Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:59:07.577711Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T16:59:07.588582Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:59:07.625654Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:59:07.625871Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:59:07.626000Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T16:59:07.626040Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:59:07.626090Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:59:07.626132Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:59:07.626363Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T16:59:07.626418Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T16:59:07.626729Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:59:07.626816Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:59:07.626938Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:59:07.626995Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:59:07.627051Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T16:59:07.627093Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T16:59:07.627120Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T16:59:07.627148Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:59:07.627191Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:59:07.627289Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:07.627316Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:07.627386Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T16:59:07.627742Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T16:59:07.627783Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T16:59:07.627888Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:59:07.628116Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T16:59:07.628169Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:59:07.628246Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:59:07.628291Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T16:59:07.628332Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T16:59:07.628376Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T16:59:07.628408Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T16:59:07.628660Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T16:59:07.628688Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T16:59:07.628731Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T16:59:07.628756Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T16:59:07.628813Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T16:59:07.628853Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T16:59:07.628897Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T16:59:07.628921Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T16:59:07.628940Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T16:59:07.630145Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T16:59:07.630187Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T16:59:07.640983Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... e 12 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[12:1156:2915], 1} finished in read 2025-04-04T17:01:15.135246Z node 12 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-04T17:01:15.135287Z node 12 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit ExecuteRead 2025-04-04T17:01:15.135320Z node 12 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit CompletedOperations 2025-04-04T17:01:15.135355Z node 12 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CompletedOperations 2025-04-04T17:01:15.135411Z node 12 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-04T17:01:15.135441Z node 12 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CompletedOperations 2025-04-04T17:01:15.135470Z node 12 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037889 has finished 2025-04-04T17:01:15.135508Z node 12 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-04T17:01:15.135614Z node 12 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-04T17:01:15.136683Z node 12 :KQP_EXECUTER TRACE: ActorId: [12:1143:2898] TxId: 281474976715669. Ctx: { TraceId: 01jr0vzrpeacgdtchkcg94xagd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=OGJiZjFlZTQtNDcyZDAyZGItY2M3OWNhNTEtOTRiMGUxNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got result, channelId: 2, shardId: 0, inputIndex: 0, from: [12:1157:2916], finished: 1 2025-04-04T17:01:15.136777Z node 12 :KQP_EXECUTER TRACE: ActorId: [12:1143:2898] TxId: 281474976715669. Ctx: { TraceId: 01jr0vzrpeacgdtchkcg94xagd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=OGJiZjFlZTQtNDcyZDAyZGItY2M3OWNhNTEtOTRiMGUxNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send ack to channelId: 2, seqNo: 2, to: [12:1157:2916] 2025-04-04T17:01:15.137988Z node 12 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [12:1156:2915], Recipient [12:980:2788]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-04-04T17:01:15.138058Z node 12 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 1 } 2025-04-04T17:01:15.138433Z node 12 :KQP_EXECUTER DEBUG: ActorId: [12:1143:2898] TxId: 281474976715669. Ctx: { TraceId: 01jr0vzrpeacgdtchkcg94xagd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=OGJiZjFlZTQtNDcyZDAyZGItY2M3OWNhNTEtOTRiMGUxNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [12:1153:2915], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 2259 DurationUs: 13000 Tasks { TaskId: 1 CpuTimeUs: 475 FinishTimeMs: 1743786075136 OutputRows: 3 OutputBytes: 12 Tables { TablePath: "/Root/table" ReadRows: 3 ReadBytes: 24 AffectedPartitions: 2 } IngressRows: 3 ComputeCpuTimeUs: 300 BuildCpuTimeUs: 175 WaitInputTimeUs: 11650 HostName: "ghrun-mup5cka3jq" NodeId: 12 StartTimeMs: 1743786075123 CreateTimeMs: 1743786075112 } MaxMemoryUsage: 1048576 } 2025-04-04T17:01:15.138695Z node 12 :KQP_EXECUTER INFO: TxId: 281474976715669. Ctx: { TraceId: 01jr0vzrpeacgdtchkcg94xagd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=OGJiZjFlZTQtNDcyZDAyZGItY2M3OWNhNTEtOTRiMGUxNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [12:1153:2915] 2025-04-04T17:01:15.138834Z node 12 :KQP_EXECUTER DEBUG: ActorId: [12:1143:2898] TxId: 281474976715669. Ctx: { TraceId: 01jr0vzrpeacgdtchkcg94xagd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=OGJiZjFlZTQtNDcyZDAyZGItY2M3OWNhNTEtOTRiMGUxNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [12:1154:2916], 2025-04-04T17:01:15.138893Z node 12 :KQP_EXECUTER DEBUG: ActorId: [12:1143:2898] TxId: 281474976715669. Ctx: { TraceId: 01jr0vzrpeacgdtchkcg94xagd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=OGJiZjFlZTQtNDcyZDAyZGItY2M3OWNhNTEtOTRiMGUxNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [12:1154:2916], 2025-04-04T17:01:15.139110Z node 12 :KQP_EXECUTER DEBUG: ActorId: [12:1143:2898] TxId: 281474976715669. Ctx: { TraceId: 01jr0vzrpeacgdtchkcg94xagd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=OGJiZjFlZTQtNDcyZDAyZGItY2M3OWNhNTEtOTRiMGUxNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [12:1154:2916], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1044 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 498 FinishTimeMs: 1743786075137 InputRows: 3 InputBytes: 12 OutputRows: 3 OutputBytes: 12 ResultRows: 3 ResultBytes: 12 ComputeCpuTimeUs: 387 BuildCpuTimeUs: 111 HostName: "ghrun-mup5cka3jq" NodeId: 12 CreateTimeMs: 1743786075113 } MaxMemoryUsage: 1048576 } 2025-04-04T17:01:15.139190Z node 12 :KQP_EXECUTER INFO: TxId: 281474976715669. Ctx: { TraceId: 01jr0vzrpeacgdtchkcg94xagd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=OGJiZjFlZTQtNDcyZDAyZGItY2M3OWNhNTEtOTRiMGUxNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [12:1154:2916] 2025-04-04T17:01:15.141765Z node 12 :KQP_EXECUTER INFO: ActorId: [12:1143:2898] TxId: 281474976715669. Ctx: { TraceId: 01jr0vzrpeacgdtchkcg94xagd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=OGJiZjFlZTQtNDcyZDAyZGItY2M3OWNhNTEtOTRiMGUxNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 9050 DurationUs: 1743786070589338 Tables { TablePath: "/Root/table" ReadRows: 3 ReadBytes: 24 AffectedPartitions: 2 } ExecuterCpuTimeUs: 5747 StartTimeMs: 4550 FinishTimeMs: 1743786075139 Stages { StageId: 1 StageGuid: "2bae26eb-3bdd3f52-a0ea666-67525515" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 \'\"1001\")) (lambda \'($2 $3) (AsStruct \'(\'\"key\" $2) \'(\'\"value\" $3)))))))\n)\n" ComputeActors { CpuTimeUs: 1044 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 498 FinishTimeMs: 1743786075137 InputRows: 3 InputBytes: 12 OutputRows: 3 OutputBytes: 12 ResultRows: 3 ResultBytes: 12 ComputeCpuTimeUs: 387 BuildCpuTimeUs: 111 HostName: "ghrun-mup5cka3jq" NodeId: 12 CreateTimeMs: 1743786075113 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743786075123 } Stages { StageGuid: "827051b3-c258f3c1-ac67d85e-ba03c3fc" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (lambda \'($3) (Member $3 \'\"key\") (Member $3 \'\"value\")))\n (return (FromFlow (ExpandMap (Take (ToFlow $1) (Uint64 \'\"1001\")) $2)))\n))))\n)\n" BaseTimeMs: 1743786075123 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":6,\"Plans\":[{\"Node Type\":\"ResultSet\",\"PlanNodeId\":5,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":4,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":3,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":2,\"Plans\":[{\"Node Type\":\"TableFullScan\",\"Operators\":[{\"E-Cost\":\"0\",\"E-Rows\":\"0\",\"E-Size\":\"0\",\"Inputs\":[],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/table\",\"ReadColumns\":[\"key\",\"value\"],\"ReadLimit\":\"1001\",\"ReadRanges\":[\"key (-∞, +∞)\"],\"ReadRangesPointPrefixLen\":\"0\",\"Scan\":\"Sequential\",\"Table\":\"table\"}],\"PlanNodeId\":1,\"StageGuid\":\"\",\"Tables\":[\"table\"]}],\"StageGuid\":\"827051b3-c258f3c1-ac67d85e-ba03c3fc\",\"Stats\":{\"BaseTimeMs\":1743786075123,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"SortColumns\":[\"key (Asc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"2bae26eb-3bdd3f52-a0ea666-67525515\",\"Stats\":{\"BaseTimeMs\":1743786075123,\"ComputeNodes\":[{\"CpuTimeUs\":1044,\"Tasks\":[{\"ComputeTimeUs\":387,\"FinishTimeMs\":1743786075137,\"Host\":\"ghrun-mup5cka3jq\",\"InputBytes\":12,\"InputRows\":3,\"NodeId\":12,\"OutputBytes\":12,\"OutputRows\":3,\"ResultBytes\":12,\"ResultRows\":3,\"TaskId\":2}]}],\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 1429 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\002\022\013\010\224\010\020\323\021\030\347\031 \002" } } 2025-04-04T17:01:15.141919Z node 12 :KQP_EXECUTER DEBUG: ActorId: [12:1143:2898] TxId: 281474976715669. Ctx: { TraceId: 01jr0vzrpeacgdtchkcg94xagd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=OGJiZjFlZTQtNDcyZDAyZGItY2M3OWNhNTEtOTRiMGUxNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T17:01:15.142025Z node 12 :KQP_EXECUTER TRACE: ActorId: [12:1143:2898] TxId: 281474976715669. Ctx: { TraceId: 01jr0vzrpeacgdtchkcg94xagd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=OGJiZjFlZTQtNDcyZDAyZGItY2M3OWNhNTEtOTRiMGUxNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-04-04T17:01:15.142137Z node 12 :KQP_EXECUTER DEBUG: ActorId: [12:1143:2898] TxId: 281474976715669. Ctx: { TraceId: 01jr0vzrpeacgdtchkcg94xagd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=OGJiZjFlZTQtNDcyZDAyZGItY2M3OWNhNTEtOTRiMGUxNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.003303s ReadRows: 3 ReadBytes: 24 ru: 3 rate limiter was not found force flag: 1 2025-04-04T17:01:15.144506Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=OGJiZjFlZTQtNDcyZDAyZGItY2M3OWNhNTEtOTRiMGUxNGE=, ActorId: [12:1133:2898], ActorState: ExecuteState, TraceId: 01jr0vzrpeacgdtchkcg94xagd, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-04T17:01:15.144847Z node 12 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=12&id=OGJiZjFlZTQtNDcyZDAyZGItY2M3OWNhNTEtOTRiMGUxNGE=, ActorId: [12:1133:2898], ActorState: ExecuteState, TraceId: 01jr0vzrpeacgdtchkcg94xagd, txInfo Status: Committed Kind: ReadOnly TotalDuration: 63.39 ServerDuration: 63.289 QueriesCount: 2 2025-04-04T17:01:15.145050Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=OGJiZjFlZTQtNDcyZDAyZGItY2M3OWNhNTEtOTRiMGUxNGE=, ActorId: [12:1133:2898], ActorState: ExecuteState, TraceId: 01jr0vzrpeacgdtchkcg94xagd, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-04T17:01:15.145711Z node 12 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=12&id=OGJiZjFlZTQtNDcyZDAyZGItY2M3OWNhNTEtOTRiMGUxNGE=, ActorId: [12:1133:2898], ActorState: ExecuteState, TraceId: 01jr0vzrpeacgdtchkcg94xagd, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:01:15.145828Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=OGJiZjFlZTQtNDcyZDAyZGItY2M3OWNhNTEtOTRiMGUxNGE=, ActorId: [12:1133:2898], ActorState: ExecuteState, TraceId: 01jr0vzrpeacgdtchkcg94xagd, EndCleanup, isFinal: 0 2025-04-04T17:01:15.145967Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=OGJiZjFlZTQtNDcyZDAyZGItY2M3OWNhNTEtOTRiMGUxNGE=, ActorId: [12:1133:2898], ActorState: ExecuteState, TraceId: 01jr0vzrpeacgdtchkcg94xagd, Sent query response back to proxy, proxyRequestId: 11, proxyId: [12:57:2104] { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 5 } items { uint32_value: 50 } }, { items { uint32_value: 6 } items { uint32_value: 60 } } >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb [GOOD] >> TColumnShardTestReadWrite::ReadGroupBy >> DataShardSnapshots::ShardRestartAfterDropTable [GOOD] >> IncrementalBackup::BackupRestore [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot [GOOD] >> DataShardSnapshots::BrokenLockChangesDontLeak >> YdbYqlClient::TestYqlIssues >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] >> KqpSysColV1::StreamInnerJoinTables [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull >> KqpSystemView::QueryStatsScan [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] Test command err: 2025-04-04T16:59:07.704568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:07.705134Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:07.705193Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00130e/r3tmp/tmpJSQ6tE/pdisk_1.dat 2025-04-04T16:59:08.103804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:59:08.151831Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:08.191523Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T16:59:08.192419Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T16:59:08.192758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:08.192877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:08.204526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:59:08.285149Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-04T16:59:08.285221Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T16:59:08.286023Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-04-04T16:59:08.442025Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T16:59:08.442153Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T16:59:08.442920Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T16:59:08.443026Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T16:59:08.443378Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T16:59:08.443641Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T16:59:08.443741Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T16:59:08.444035Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T16:59:08.445660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:59:08.446857Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T16:59:08.446936Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2517] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-04T16:59:08.479902Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T16:59:08.481063Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T16:59:08.481582Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T16:59:08.481929Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:59:08.492892Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T16:59:08.523438Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:59:08.523688Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:59:08.525747Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:59:08.525863Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:59:08.525920Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:59:08.526880Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:59:08.527075Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:59:08.527210Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T16:59:08.527808Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:59:08.559462Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:59:08.559657Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:59:08.559771Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T16:59:08.559807Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:59:08.559841Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:59:08.559877Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:59:08.560087Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T16:59:08.560134Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T16:59:08.560367Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:59:08.560467Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:59:08.560551Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:59:08.560593Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:59:08.560643Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T16:59:08.560683Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T16:59:08.560721Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T16:59:08.560752Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:59:08.560794Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:59:08.560896Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:08.560923Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:08.560964Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T16:59:08.561024Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T16:59:08.561072Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T16:59:08.561146Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:59:08.561339Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T16:59:08.561396Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:59:08.561471Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:59:08.561514Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T16:59:08.561545Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T16:59:08.561589Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T16:59:08.561617Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T16:59:08.561892Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T16:59:08.561923Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T16:59:08.561948Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T16:59:08.561972Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T16:59:08.562020Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T16:59:08.562055Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T16:59:08.562087Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T16:59:08.562118Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T16:59:08.562164Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T16:59:08.563499Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:59:08.563559Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T16:59:08.563596Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T16:59:08.563650Z node 1 :TX_DATASHARD TRACE: Prop ... ablet: 72075186224037890 2025-04-04T17:01:14.681933Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037890 2025-04-04T17:01:14.682746Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 278593539, Sender [13:831:2692], Recipient [13:665:2570]: NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2,3,4] } 2025-04-04T17:01:14.682889Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435091, Sender [13:665:2570], Recipient [13:665:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRemoveChangeRecords 2025-04-04T17:01:14.682947Z node 13 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 3, at tablet# 72075186224037888 2025-04-04T17:01:14.682984Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 2, at tablet: 72075186224037888 2025-04-04T17:01:14.683072Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 3, at tablet: 72075186224037888 2025-04-04T17:01:14.683120Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037888 2025-04-04T17:01:14.683420Z node 13 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 3, left# 0, at tablet# 72075186224037890 2025-04-04T17:01:14.683807Z node 13 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 3, left# 0, at tablet# 72075186224037888 2025-04-04T17:01:14.950817Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:1077:2892]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-04T17:01:15.854676Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] Handle TEvExecuteKqpTransaction 2025-04-04T17:01:15.854805Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] TxId# 281474976715671 ProcessProposeKqpTransaction 2025-04-04T17:01:15.855982Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jr0vzrv88e6k68wzkka4x6sy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzM1MTkzMzUtZWMyYWFlOWEtOWJlMWQ3YzEtNjE0MWY1YWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 4 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2025-04-04T17:01:15.859226Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:1705:3415], Recipient [13:794:2663]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 4 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2025-04-04T17:01:15.859435Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-04T17:01:15.859545Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037889 CompleteEdge# v8007/281474976715670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T17:01:15.859635Z node 13 :TX_DATASHARD TRACE: 72075186224037889 changed HEAD read to non-repeatable v9000/18446744073709551615 2025-04-04T17:01:15.859755Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2025-04-04T17:01:15.859990Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-04T17:01:15.860106Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2025-04-04T17:01:15.860210Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-04T17:01:15.860278Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-04T17:01:15.860363Z node 13 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037889 2025-04-04T17:01:15.860435Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-04T17:01:15.860471Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-04T17:01:15.860500Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2025-04-04T17:01:15.860528Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2025-04-04T17:01:15.860710Z node 13 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 4 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-04-04T17:01:15.861218Z node 13 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[13:1705:3415], 0} after executionsCount# 1 2025-04-04T17:01:15.861348Z node 13 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[13:1705:3415], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2025-04-04T17:01:15.861512Z node 13 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[13:1705:3415], 0} finished in read 2025-04-04T17:01:15.861655Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-04T17:01:15.861692Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2025-04-04T17:01:15.861724Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2025-04-04T17:01:15.861756Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2025-04-04T17:01:15.861824Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-04-04T17:01:15.861850Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2025-04-04T17:01:15.861895Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037889 has finished 2025-04-04T17:01:15.861978Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-04T17:01:15.862205Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-04T17:01:15.865846Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:1705:3415], Recipient [13:794:2663]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-04T17:01:15.865979Z node 13 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 21 } } 2025-04-04T17:01:17.180487Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] Handle TEvExecuteKqpTransaction 2025-04-04T17:01:17.180621Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] TxId# 281474976715672 ProcessProposeKqpTransaction 2025-04-04T17:01:17.182229Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr0vzsqqadn46t7g4f2srmcw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NDM1N2M1NTYtMmNkOGJiZmUtN2RlMWQzMzQtZTAyYjgxODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 7 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2025-04-04T17:01:17.185661Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:1736:3440], Recipient [13:1077:2892]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 7 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2025-04-04T17:01:17.185891Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-04-04T17:01:17.185990Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v8007/281474976715670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T17:01:17.186081Z node 13 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v9000/18446744073709551615 2025-04-04T17:01:17.186223Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit CheckRead 2025-04-04T17:01:17.186506Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-04-04T17:01:17.186596Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit CheckRead 2025-04-04T17:01:17.186716Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-04-04T17:01:17.186792Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit BuildAndWaitDependencies 2025-04-04T17:01:17.186876Z node 13 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037891 2025-04-04T17:01:17.186962Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-04-04T17:01:17.187003Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-04-04T17:01:17.187032Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037891 to execution unit ExecuteRead 2025-04-04T17:01:17.187061Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit ExecuteRead 2025-04-04T17:01:17.187273Z node 13 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 7 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-04-04T17:01:17.187855Z node 13 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[13:1736:3440], 0} after executionsCount# 1 2025-04-04T17:01:17.187996Z node 13 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[13:1736:3440], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2025-04-04T17:01:17.188178Z node 13 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[13:1736:3440], 0} finished in read 2025-04-04T17:01:17.188331Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-04-04T17:01:17.188369Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit ExecuteRead 2025-04-04T17:01:17.188405Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037891 to execution unit CompletedOperations 2025-04-04T17:01:17.188438Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit CompletedOperations 2025-04-04T17:01:17.188513Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-04-04T17:01:17.188539Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit CompletedOperations 2025-04-04T17:01:17.188585Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037891 has finished 2025-04-04T17:01:17.188673Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-04-04T17:01:17.188900Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 2025-04-04T17:01:17.190766Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:1736:3440], Recipient [13:1077:2892]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-04T17:01:17.190888Z node 13 :TX_DATASHARD TRACE: 72075186224037891 ReadCancel: { ReadId: 0 } { items { uint32_value: 10 } items { uint32_value: 110 } }, { items { uint32_value: 20 } items { uint32_value: 210 } } >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] >> IncrementalBackup::SimpleRestore [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental >> KqpPg::TempTablesDrop [FAIL] >> KqpPg::TempTablesWithCache >> IncrementalBackup::SimpleBackup [GOOD] >> IncrementalBackup::MultiRestore >> KqpSysColV0::InnerJoinTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartAfterDropTable [GOOD] Test command err: 2025-04-04T16:59:07.856468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:07.856975Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:07.857032Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00130c/r3tmp/tmpe3y2uJ/pdisk_1.dat 2025-04-04T16:59:08.297773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:59:08.365507Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:08.412766Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T16:59:08.413736Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T16:59:08.414179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:08.414317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:08.426893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:59:08.508814Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-04T16:59:08.508896Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T16:59:08.509088Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-04-04T16:59:08.644041Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T16:59:08.644167Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T16:59:08.644790Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T16:59:08.644899Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T16:59:08.645214Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T16:59:08.645399Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T16:59:08.645485Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T16:59:08.645724Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T16:59:08.647212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:59:08.648212Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T16:59:08.648302Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2517] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-04T16:59:08.679562Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T16:59:08.680668Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T16:59:08.681090Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T16:59:08.681321Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:59:08.691645Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T16:59:08.720866Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:59:08.721025Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:59:08.723046Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:59:08.723156Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:59:08.723233Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:59:08.723720Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:59:08.723901Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:59:08.724021Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T16:59:08.724575Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:59:08.751303Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:59:08.751537Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:59:08.751660Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T16:59:08.751706Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:59:08.751746Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:59:08.751808Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:59:08.752061Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T16:59:08.752124Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T16:59:08.752472Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:59:08.752569Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:59:08.752676Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:59:08.752735Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:59:08.752804Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T16:59:08.752858Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T16:59:08.752916Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T16:59:08.752958Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:59:08.753012Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:59:08.753192Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:08.753233Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:08.753293Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T16:59:08.753370Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T16:59:08.753423Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T16:59:08.753534Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:59:08.753787Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T16:59:08.753862Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:59:08.753969Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:59:08.754024Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T16:59:08.754065Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T16:59:08.754123Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T16:59:08.754162Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T16:59:08.754569Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T16:59:08.754617Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T16:59:08.754682Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T16:59:08.754725Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T16:59:08.754799Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T16:59:08.754833Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T16:59:08.754891Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T16:59:08.754928Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T16:59:08.754955Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T16:59:08.756189Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:59:08.756249Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T16:59:08.756285Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T16:59:08.756340Z node 1 :TX_DATASHARD TRACE: Prop ... ete at 72075186224037888 2025-04-04T17:01:17.233247Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:01:17.233340Z node 14 :TX_DATASHARD TRACE: Complete execution for [600:281474976715664] at 72075186224037888 on unit DropTable 2025-04-04T17:01:17.233405Z node 14 :TX_DATASHARD TRACE: Complete execution for [600:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-04-04T17:01:17.233491Z node 14 :TX_DATASHARD DEBUG: Complete [600 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [14:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:01:17.233607Z node 14 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715664 state PreOffline TxInFly 0 2025-04-04T17:01:17.233815Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:01:17.238491Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [14:683:2575], Recipient [14:688:2578]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-04T17:01:17.239386Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [14:977:2799], Recipient [14:688:2578]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [14:980:2802] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-04T17:01:17.239451Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-04T17:01:17.254117Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [14:408:2403], Recipient [14:688:2578]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715664 2025-04-04T17:01:17.254203Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-04T17:01:17.254706Z node 14 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715664 datashard 72075186224037888 state PreOffline 2025-04-04T17:01:17.254839Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:01:17.256985Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [14:708:2590], Recipient [14:688:2578]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:01:17.257066Z node 14 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:01:17.257298Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [14:708:2590], Recipient [14:898:2730]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:01:17.257339Z node 14 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T17:01:17.268532Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T17:01:17.269171Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [14:977:2799], Recipient [14:688:2578]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [14:977:2799] ServerId: [14:980:2802] } 2025-04-04T17:01:17.269249Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-04T17:01:17.269793Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 65543, Sender [14:592:2517], Recipient [14:688:2578]: NActors::TEvents::TEvPoison 2025-04-04T17:01:17.270791Z node 14 :TX_DATASHARD INFO: OnDetach: 72075186224037888 2025-04-04T17:01:17.271118Z node 14 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-04T17:01:17.296915Z node 14 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [14:988:2810], Recipient [14:990:2811]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:01:17.313098Z node 14 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [14:988:2810], Recipient [14:990:2811]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:01:17.313323Z node 14 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [14:988:2810], Recipient [14:990:2811]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:01:17.318363Z node 14 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [14:990:2811] 2025-04-04T17:01:17.318864Z node 14 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:01:17.330881Z node 14 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:01:17.332974Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:01:17.336731Z node 14 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:01:17.336893Z node 14 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:01:17.337003Z node 14 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:01:17.337818Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:01:17.338472Z node 14 :TX_DATASHARD TRACE: StateInactive, received event# 275709965, Sender [14:61:2108], Recipient [14:990:2811]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 14 Status: STATUS_SUBSCRIBED 2025-04-04T17:01:17.338720Z node 14 :TX_DATASHARD WARN: TDataShard::StateInactive unhandled event type: 275709965 event: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 14 Status: STATUS_SUBSCRIBED 2025-04-04T17:01:17.339069Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:01:17.339218Z node 14 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [14:1005:2811] in generation 2 2025-04-04T17:01:17.340701Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:01:17.340825Z node 14 :TX_DATASHARD INFO: Switched to work state PreOffline tabletId 72075186224037888 2025-04-04T17:01:17.341022Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 1 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T17:01:17.341107Z node 14 :TX_DATASHARD INFO: Send registration request to time cast PreOffline tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:01:17.341305Z node 14 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [14:1007:2819] 2025-04-04T17:01:17.341385Z node 14 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:01:17.341463Z node 14 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: PreOffline, queue size: 0 2025-04-04T17:01:17.341539Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:01:17.341901Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:990:2811], Recipient [14:990:2811]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:01:17.341958Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:01:17.342230Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435075, Sender [14:990:2811], Recipient [14:990:2811]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressResendReadSet 2025-04-04T17:01:17.342307Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressResendReadSet 2025-04-04T17:01:17.343038Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [14:24:2071], Recipient [14:990:2811]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 600} 2025-04-04T17:01:17.343099Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-04T17:01:17.343179Z node 14 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 600 2025-04-04T17:01:17.343259Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:01:17.344545Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:01:17.344637Z node 14 :TX_DATASHARD INFO: Progress tx at non-ready tablet 72075186224037888 state 5 2025-04-04T17:01:17.345081Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:01:17.345358Z node 14 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2025-04-04T17:01:17.345455Z node 14 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2025-04-04T17:01:17.345565Z node 14 :TX_DATASHARD DEBUG: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2025-04-04T17:01:17.346315Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [14:990:2811], Recipient [14:898:2730]: {TEvReadSet step# 500 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2025-04-04T17:01:17.346376Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-04T17:01:17.346467Z node 14 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-04-04T17:01:17.346607Z node 14 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 500 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2025-04-04T17:01:17.346722Z node 14 :TX_DATASHARD NOTICE: Outdated readset for 500:281474976715663 at 72075186224037889 2025-04-04T17:01:17.346814Z node 14 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-04T17:01:17.346929Z node 14 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037889 {TEvReadSet step# 500 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2025-04-04T17:01:17.347114Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [14:898:2730], Recipient [14:990:2811]: {TEvReadSet step# 500 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-04-04T17:01:17.347172Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:01:17.347247Z node 14 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2025-04-04T17:01:17.347371Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T17:01:17.347597Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [14:24:2071], Recipient [14:990:2811]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 400 NextReadStep# 600 ReadStep# 600 } 2025-04-04T17:01:17.347656Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-04T17:01:17.347740Z node 14 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 400 next step 600 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb [GOOD] Test command err: 2025-04-04T17:00:47.532511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:00:47.532613Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:47.651671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:00:50.076897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:00:50.076975Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:50.118427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:00:51.253226Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:00:51.253310Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:51.300399Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:00:52.400576Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:00:52.400656Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:52.455202Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:00:53.668794Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:00:53.668865Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:53.720974Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:00:55.044683Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:00:55.044783Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:55.096743Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:00:56.387277Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:00:56.387376Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:56.443815Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:00:57.745476Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:00:57.745557Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:57.799611Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:00:59.126359Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:00:59.126442Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:59.195971Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:01:00.535327Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:00.535432Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:00.601774Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:01:01.987889Z node 11 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:01.988002Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:02.046480Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:01:03.194013Z node 12 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:03.194108Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:03.257407Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:01:05.012192Z node 13 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:05.012261Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:05.069095Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:01:06.215019Z node 14 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:06.215120Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:06.273901Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:01:08.062169Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:08.062271Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:08.121881Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:01:09.960862Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:09.960960Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:10.023049Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:01:12.036667Z node 20 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:12.036768Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:12.104395Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:01:14.076398Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:14.076505Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:14.135605Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:01:15.632567Z node 23 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:01:15.632684Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:15.716103Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:01:16.491891Z node 23 :BS_CONTROLLER ERROR: {BSC26@console_interaction.cpp:113} failed to parse config obtained from Console ErrorReason# ydb/library/yaml_config/yaml_config_parser.cpp:1268: Condition violated: `config.HasDomainsConfig()' Yaml# --- metadata: kind: MainConfig cluster: "" version: 1 config: log_config: cluster_name: cluster1 allowed_labels: test: type: enum values: ? true selector_config: [] >> KqpSysColV1::StreamInnerJoinSelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 23169, MsgBus: 4977 2025-04-04T17:01:11.690899Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504146789585501:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:11.690949Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0011c5/r3tmp/tmpMrJbAZ/pdisk_1.dat 2025-04-04T17:01:12.494921Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:12.509341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:12.509482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:12.512512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23169, node 1 2025-04-04T17:01:12.710929Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:12.710953Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:12.710962Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:12.711065Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4977 TClient is connected to server localhost:4977 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:13.736193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:13.822495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:14.081610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:14.369294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:14.467990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:16.694477Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504146789585501:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:16.694571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:01:16.883078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504168264423747:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:16.883224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:17.149681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.203633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.247625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.286947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.339543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.450869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.544849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504172559391564:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:17.544999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:17.545280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504172559391569:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:17.551684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:01:17.567946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489504172559391571:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:01:17.668264Z node 1 :TX_PROXY ERROR: Actor# [1:7489504172559391626:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:01:19.597520Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786079595, txId: 281474976710671] shutting down [[[108u];["One"];[8];["Value5"];[108u];["One"];#;["Value31"]]] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsScan [GOOD] Test command err: Trying to start YDB, gRPC: 4349, MsgBus: 7351 2025-04-04T17:01:11.391325Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504146995205220:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:11.391853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012fb/r3tmp/tmp1qFF91/pdisk_1.dat 2025-04-04T17:01:12.001956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:12.002067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:12.010768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:01:12.086584Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4349, node 1 2025-04-04T17:01:12.338894Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:12.338935Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:12.338951Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:12.339082Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7351 TClient is connected to server localhost:7351 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:13.361381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:13.442373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:13.696901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:13.926452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:14.063117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:16.296233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504168470043318:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:16.296343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:16.386389Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504146995205220:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:16.386469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:01:16.630641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:01:16.674868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:01:16.713619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:01:16.752147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:01:16.792554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:01:16.850208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:01:16.944830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504168470043833:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:16.944952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:16.945147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504168470043838:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:16.949557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:01:16.973176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489504168470043840:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:01:17.035177Z node 1 :TX_PROXY ERROR: Actor# [1:7489504172765011192:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:01:19.365996Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786078860, txId: 281474976710671] shutting down 2025-04-04T17:01:19.643943Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786079616, txId: 281474976710674] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] Test command err: 2025-04-04T16:55:46.348383Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T16:55:46.483503Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T16:55:46.503377Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T16:55:46.503625Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T16:55:46.510552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:55:46.510734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:55:46.510948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:55:46.511023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:55:46.511105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:55:46.511188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:55:46.511253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:55:46.511350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:55:46.511425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:55:46.511515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:55:46.511601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:55:46.511666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:55:46.547015Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T16:55:46.547319Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T16:55:46.547401Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T16:55:46.547610Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:46.547765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:55:46.547825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:55:46.547859Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T16:55:46.547921Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T16:55:46.547974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:55:46.548008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:55:46.548029Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T16:55:46.548161Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:46.548217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:55:46.548246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:55:46.548265Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T16:55:46.548339Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T16:55:46.548395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:55:46.548438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:55:46.548467Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T16:55:46.548518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:55:46.548546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:55:46.548565Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T16:55:46.548868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:55:46.548910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:55:46.548943Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T16:55:46.549314Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=42; 2025-04-04T16:55:46.549428Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=60; 2025-04-04T16:55:46.549603Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=42; 2025-04-04T16:55:46.549678Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=31; 2025-04-04T16:55:46.549858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:55:46.549930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:55:46.549964Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T16:55:46.550189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:55:46.550263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:55:46.550302Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T16:55:46.550484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:55:46.550526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:55:46.550555Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T16:55:46.550745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T16:55:46.550784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T16:55:46.550813Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T16:55:46.550985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T16:55:46.551026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T16:55:46.551087Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... LOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:61;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:57;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-04-04T17:01:18.593343Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11457:13084];fline=general_compaction.cpp:131;event=blobs_created;appended=1;switched=2; 2025-04-04T17:01:18.596062Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11457:13084];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 5977, MsgBus: 2492 2025-04-04T17:01:11.747496Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504148989277965:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:11.747648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0011cc/r3tmp/tmpeQ9v4G/pdisk_1.dat 2025-04-04T17:01:12.451513Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:12.456572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:12.456696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:12.465787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5977, node 1 2025-04-04T17:01:12.658036Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:12.658061Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:12.658069Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:12.658211Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2492 TClient is connected to server localhost:2492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:13.619656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:13.658969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:13.981884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:14.222013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:14.335807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:16.452785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504170464116213:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:16.452914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:16.754392Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504148989277965:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:16.754462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:01:16.876214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:01:16.929277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.009892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.073640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.156536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.211326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.291260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504174759084031:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:17.291353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:17.291950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504174759084036:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:17.304118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:01:17.321872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489504174759084038:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:01:17.398794Z node 1 :TX_PROXY ERROR: Actor# [1:7489504174759084093:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental [GOOD] |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |81.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign >> Worker::Basic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 17209, MsgBus: 5206 2025-04-04T17:01:12.201290Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504152766894316:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:12.201816Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0011c2/r3tmp/tmpf81Bjs/pdisk_1.dat 2025-04-04T17:01:12.737280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:12.737461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:12.771697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17209, node 1 2025-04-04T17:01:12.846851Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:01:12.846876Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:01:12.983071Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:13.054941Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:13.054964Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:13.054974Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:13.055115Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5206 TClient is connected to server localhost:5206 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:14.149634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:14.194562Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:01:14.205875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:14.515658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:14.825708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:14.923153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:17.073031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504174241732438:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:17.073141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:17.186631Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504152766894316:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:17.186713Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:01:17.341210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.387807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.442512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.517702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.618331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.731966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.842863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504174241732961:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:17.842971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:17.843466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504174241732966:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:17.849118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:01:17.872336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489504174241732968:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:01:17.974608Z node 1 :TX_PROXY ERROR: Actor# [1:7489504174241733024:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:01:20.811498Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786080806, txId: 281474976710671] shutting down >> Backup::ProposeBackup |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |81.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] Test command err: 2025-04-04T16:55:43.449869Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T16:55:43.557998Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T16:55:43.578931Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T16:55:43.579248Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T16:55:43.588795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:55:43.589047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:55:43.589330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:55:43.589471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:55:43.589607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:55:43.589764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:55:43.589874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:55:43.590007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:55:43.590124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:55:43.590277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:55:43.590429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:55:43.590569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:55:43.616728Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T16:55:43.616925Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T16:55:43.616985Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T16:55:43.617146Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:43.617296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:55:43.617368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:55:43.617418Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T16:55:43.617488Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T16:55:43.617558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:55:43.617589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:55:43.617623Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T16:55:43.617778Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:43.617831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:55:43.617863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:55:43.617884Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T16:55:43.617950Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T16:55:43.618015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:55:43.618060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:55:43.618081Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T16:55:43.618136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:55:43.618168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:55:43.618197Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T16:55:43.618269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:55:43.618321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:55:43.618344Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T16:55:43.618719Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-04-04T16:55:43.618816Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=43; 2025-04-04T16:55:43.618880Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=26; 2025-04-04T16:55:43.618981Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-04-04T16:55:43.619139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:55:43.619199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:55:43.619231Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T16:55:43.619389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:55:43.619435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:55:43.619474Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T16:55:43.619611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:55:43.619643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:55:43.619668Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T16:55:43.619813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T16:55:43.619860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T16:55:43.619891Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T16:55:43.620043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T16:55:43.620102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T16:55:43.620162Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... B:0:2688];;column_id:8;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:8;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:10208];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:10208];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:9400];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:9392];;;;switched=(portion_id:60;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2507632;index_size:20;meta:((produced=SPLIT_COMPACTED;)););(portion_id:56;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););; 2025-04-04T17:01:19.957116Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11539:13166];fline=general_compaction.cpp:131;event=blobs_created;appended=1;switched=2; 2025-04-04T17:01:19.960438Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11539:13166];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] >> YdbQueryService::TestCreateAndAttachSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental [GOOD] Test command err: 2025-04-04T17:01:12.022103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:01:12.022733Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:01:12.022818Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00164c/r3tmp/tmpD1lXuy/pdisk_1.dat 2025-04-04T17:01:12.485347Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:595:2519], Recipient [1:408:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:12.485415Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:12.485465Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:01:12.485596Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:592:2517], Recipient [1:408:2403]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-04-04T17:01:12.485635Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T17:01:12.633381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-04T17:01:12.633779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:12.634190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T17:01:12.634501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:01:12.634592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:12.634756Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:12.635844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T17:01:12.636057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T17:01:12.636121Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:12.636162Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-04T17:01:12.636439Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:408:2403], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:01:12.636485Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:01:12.636559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:12.636626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T17:01:12.636668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:01:12.636739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:01:12.636881Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:12.637451Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:12.637508Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-04T17:01:12.637664Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:408:2403], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:01:12.637710Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:01:12.637770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:12.637851Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:01:12.637911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:01:12.638012Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:12.638482Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:12.638537Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-04T17:01:12.638685Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:408:2403], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:01:12.638718Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:01:12.638756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:12.638804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:12.638858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-04T17:01:12.638888Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:12.638925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:01:12.643939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:12.644819Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:12.644876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:01:12.645101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:01:12.646609Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:600:2524], Recipient [1:408:2403]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:602:2525] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-04T17:01:12.646720Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-04T17:01:12.646775Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-04-04T17:01:12.646934Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:404:2399], Recipient [1:408:2403]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-04-04T17:01:12.647430Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:604:2527], Recipient [1:408:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:12.647490Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:12.647555Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:01:12.647743Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:592:2517], Recipient [1:408:2403]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-04-04T17:01:12.647780Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-04T17:01:12.647855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-04T17:01:12.647899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-04T17:01:12.647940Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-04T17:01:12.705877Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:408:2403]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-04-04T17:01:12.705989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2025-04-04T17:01:12.706034Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:12.706096Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-04T17:01:12.706168Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-04-04T17:01:12.775722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:12.775906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:12.790028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:01:12.878099Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:404:2399], Recipient [1:408:2403]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-04-04T17:01:12.878947Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:635:2543], Recipient [1:408:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:12.878995Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:12.879030Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:01:12.879165Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:408:2403]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-04-04T17:01:12.879202Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:01:12.879307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:01:12.879473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:01:12.879548Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationP ... 6644480:2, datashard: 72075186224037889, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-04T17:01:22.368122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715662:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-04-04T17:01:22.368169Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:22.368418Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2500 2025-04-04T17:01:22.368680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:01:22.368704Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:22.368958Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:01:22.368991Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:22.379682Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:01:22.379765Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-04-04T17:01:22.379829Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:01:22.379865Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:01:22.380089Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [2:1002:2796], Recipient [2:417:2411]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:22.380122Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:22.380142Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:01:22.380265Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [2:738:2609], Recipient [2:417:2411]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 738 RawX2: 8589937201 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-04T17:01:22.380296Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-04-04T17:01:22.380347Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 738 RawX2: 8589937201 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-04T17:01:22.380372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715662, tablet: 72075186224037888, partId: 0 2025-04-04T17:01:22.380466Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715662:0, at schemeshard: 72057594046644480, message: Source { RawX1: 738 RawX2: 8589937201 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-04T17:01:22.380498Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715662:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-04T17:01:22.380547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715662:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 738 RawX2: 8589937201 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-04T17:01:22.380582Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715662:0, shardIdx: 72057594046644480:1, datashard: 72075186224037888, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-04T17:01:22.380610Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:01:22.380650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:0, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-04-04T17:01:22.380687Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:0, datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-04-04T17:01:22.380710Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:0 129 -> 240 2025-04-04T17:01:22.380820Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:22.381203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:01:22.381228Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:22.381255Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:0 2025-04-04T17:01:22.381359Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [2:949:2752] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-04-04T17:01:22.381402Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [2:738:2609] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-04-04T17:01:22.381479Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037889 state Ready 2025-04-04T17:01:22.381550Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-04T17:01:22.381715Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [2:417:2411], Recipient [2:417:2411]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:01:22.381742Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:01:22.381778Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:01:22.381828Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:0ProgressState, operation type TxCopyTable 2025-04-04T17:01:22.381864Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:22.381898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 281474976715662:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-04-04T17:01:22.381967Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715662, done: 0, blocked: 1 2025-04-04T17:01:22.382049Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715662 Name: CopyTableBarrier }, at tablet# 72057594046644480 2025-04-04T17:01:22.382085Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:0 240 -> 240 2025-04-04T17:01:22.382445Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-04-04T17:01:22.382487Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:01:22.382801Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:22.382837Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:0 2025-04-04T17:01:22.382932Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [2:417:2411], Recipient [2:417:2411]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:01:22.382956Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:01:22.382985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:01:22.383017Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715662:0 ProgressState 2025-04-04T17:01:22.383116Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:22.383152Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:0 progress is 1/1 2025-04-04T17:01:22.383182Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-04-04T17:01:22.383220Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:0 progress is 1/1 2025-04-04T17:01:22.383259Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-04-04T17:01:22.383292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715662, ready parts: 1/1, is published: true 2025-04-04T17:01:22.383368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:929:2736] message: TxId: 281474976715662 2025-04-04T17:01:22.383418Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-04-04T17:01:22.383456Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:0 2025-04-04T17:01:22.383485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:0 2025-04-04T17:01:22.383617Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 3 2025-04-04T17:01:22.383648Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 3 2025-04-04T17:01:22.384018Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:22.384083Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [2:929:2736] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-04-04T17:01:22.384414Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [2:937:2743], Recipient [2:417:2411]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:01:22.384455Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:01:22.384488Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-04T17:01:22.512707Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [2:1029:2813], serverId# [2:1030:2814], sessionId# [0:0:0] 2025-04-04T17:01:22.512928Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0w002nf3qvg6j48kdwpsfy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWNjZGM2MWEtYjA4ZWIwMi04NjNjNmJhZS1hYzk0MzBkNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } }, { items { uint32_value: 4 } items { uint32_value: 40 } }, { items { uint32_value: 5 } items { uint32_value: 50 } } >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout [GOOD] >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-StreamLookupJoin+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Worker::Basic [GOOD] Test command err: 2025-04-04T17:01:12.304664Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504153514865361:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:12.305179Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001b46/r3tmp/tmp06hxCi/pdisk_1.dat 2025-04-04T17:01:12.838949Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:12.849028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:12.849140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:12.851746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28897 TServer::EnableGrpc on GrpcPort 3722, node 1 2025-04-04T17:01:13.161199Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:13.161232Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:13.161241Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:13.161397Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28897 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:13.592773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:13.801220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786073911 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-04-04T17:01:13.939658Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489504157809833348:2422] Handshake: worker# [1:7489504157809833347:2422] 2025-04-04T17:01:13.939737Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7489504157809833349:2422] Handshake: worker# [1:7489504157809833347:2422] 2025-04-04T17:01:13.942397Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7489504157809833349:2422] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:01:13.942879Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7489504157809833349:2422] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 3] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-04T17:01:13.942956Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7489504157809833349:2422] Send handshake: worker# [1:7489504157809833347:2422] 2025-04-04T17:01:13.943026Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7489504157809833347:2422] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-04-04T17:01:13.943053Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7489504157809833347:2422] Handshake with writer: sender# [1:7489504157809833349:2422] 2025-04-04T17:01:13.945670Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489504157809833348:2422] Create read session: session# [1:7489504157809833352:2293] 2025-04-04T17:01:13.945755Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7489504157809833347:2422] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-04-04T17:01:13.945770Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7489504157809833347:2422] Handshake with reader: sender# [1:7489504157809833348:2422] 2025-04-04T17:01:13.945811Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489504157809833348:2422] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-04T17:01:14.010944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:2, at schemeshard: 72057594046644480 2025-04-04T17:01:15.948290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504166399768108:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:15.948809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:15.954209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504166399768140:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:15.954334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504166399768141:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:15.959411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:2, at schemeshard: 72057594046644480 2025-04-04T17:01:15.964263Z node 1 :TX_PROXY ERROR: Actor# [1:7489504166399768147:2512] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:01:15.980927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489504166399768145:2378], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:01:15.980975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489504166399768146:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:01:16.057429Z node 1 :TX_PROXY ERROR: Actor# [1:7489504170694735490:2543] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:01:16.081216Z node 1 :TX_PROXY ERROR: Actor# [1:7489504170694735508:2551] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:01:17.165014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.303726Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504153514865361:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:17.303815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:01:17.677607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:01:18.615818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:01:19.379016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-04T17:01:20.220178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-04T17:01:21.647488Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489504157809833348:2422] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Offset: 0 Data: 36b Codec: RAW }] } } 2025-04-04T17:01:21.647609Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7489504157809833347:2422] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 0 Data: 36b CreateTime: 2025-04-04T17:01:21Z }] } 2025-04-04T17:01:21.647699Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7489504157809833349:2422] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 0 Data: 36b CreateTime: 2025-04-04T17:01:21Z }] } 2025-04-04T17:01:21.647878Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7489504157809833349:2422] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 0 BodySize: 36 }] } 2025-04-04T17:01:21.648049Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7489504192169572804:2422] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-04T17:01:21.648094Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7489504157809833349:2422] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-04-04T17:01:21.648174Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7489504192169572804:2422] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-04-04T17:01:21.652123Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7489504192169572804:2422] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-04T17:01:21.652221Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7489504157809833349:2422] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-04-04T17:01:21.652291Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7489504157809833349:2422] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [0] } 2025-04-04T17:01:21.652375Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7489504157809833347:2422] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-04T17:01:21.652417Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489504157809833348:2422] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-04T17:01:21.980039Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489504157809833348:2422] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Offset: 1 Data: 36b Codec: RAW }] } } 2025-04-04T17:01:21.980131Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7489504157809833347:2422] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 1 Data: 36b CreateTime: 2025-04-04T17:01:21Z }] } 2025-04-04T17:01:21.980199Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7489504157809833349:2422] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 1 Data: 36b CreateTime: 2025-04-04T17:01:21Z }] } 2025-04-04T17:01:21.980299Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7489504157809833349:2422] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 }] } 2025-04-04T17:01:21.980398Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7489504192169572804:2422] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-04-04T17:01:21.988698Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7489504192169572804:2422] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-04T17:01:21.988795Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7489504157809833349:2422] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-04-04T17:01:21.988848Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7489504157809833349:2422] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-04-04T17:01:21.988904Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7489504157809833347:2422] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-04T17:01:21.988956Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489504157809833348:2422] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-04T17:01:22.146658Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489504157809833348:2422] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Offset: 2 Data: 36b Codec: RAW }] } } 2025-04-04T17:01:22.146771Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7489504157809833347:2422] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 2 Data: 36b CreateTime: 2025-04-04T17:01:22Z }] } 2025-04-04T17:01:22.146847Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7489504157809833349:2422] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 2 Data: 36b CreateTime: 2025-04-04T17:01:22Z }] } 2025-04-04T17:01:22.146967Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7489504157809833349:2422] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 36 }] } 2025-04-04T17:01:22.147072Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7489504192169572804:2422] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-04-04T17:01:22.149163Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7489504192169572804:2422] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-04-04T17:01:22.149238Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7489504157809833349:2422] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-04-04T17:01:22.149284Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7489504157809833349:2422] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } 2025-04-04T17:01:22.149341Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7489504157809833347:2422] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-04T17:01:22.149380Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489504157809833348:2422] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-04T17:01:22.374959Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489504157809833348:2422] Handle NKikimr::NReplication::TEvYdbProxy::TEvTopicReaderGone { Result: { status: UNAVAILABLE, issues: {
: Error: PartitionSessionClosed { Partition session id: 1 Topic: "topic" Partition: 0 Reason: ConnectionLost } } } } 2025-04-04T17:01:22.374993Z node 1 :REPLICATION_SERVICE INFO: [RemoteTopicReader][/Root/topic][0][1:7489504157809833348:2422] Leave 2025-04-04T17:01:22.375060Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7489504157809833347:2422] Reader has gone: sender# [1:7489504157809833348:2422] 2025-04-04T17:01:22.375153Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489504196464540278:2422] Handshake: worker# [1:7489504157809833347:2422] 2025-04-04T17:01:22.376485Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489504196464540278:2422] Create read session: session# [1:7489504196464540279:2293] 2025-04-04T17:01:22.376542Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7489504157809833347:2422] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-04-04T17:01:22.376558Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7489504157809833347:2422] Handshake with reader: sender# [1:7489504196464540278:2422] 2025-04-04T17:01:22.376620Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489504196464540278:2422] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll |81.7%| [TA] $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbOlapStore::LogLast50 >> TExecutorDb::RandomCoordinatorSimulation [GOOD] >> TExecutorDb::MultiPage >> Backup::ProposeBackup [GOOD] |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |81.7%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |81.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence >> TFstClassSrcIdPQTest::ProperPartitionSelected [GOOD] >> TPQCompatTest::DiscoverTopics ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] Test command err: 2025-04-04T16:55:44.604114Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T16:55:44.737330Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T16:55:44.762675Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T16:55:44.763009Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T16:55:44.771767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:55:44.772025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:55:44.772267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:55:44.772380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:55:44.772541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:55:44.772688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:55:44.772796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:55:44.772921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:55:44.773046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:55:44.773171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:55:44.773298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:55:44.773420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:55:44.810310Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T16:55:44.810590Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T16:55:44.810723Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T16:55:44.810939Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:44.811135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:55:44.811213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:55:44.811261Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T16:55:44.811355Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T16:55:44.811419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:55:44.811463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:55:44.811493Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T16:55:44.811727Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:44.811804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:55:44.811847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:55:44.811881Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T16:55:44.811974Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T16:55:44.812047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:55:44.812110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:55:44.812148Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T16:55:44.812238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:55:44.812285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:55:44.812319Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T16:55:44.812373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:55:44.812413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:55:44.812439Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T16:55:44.812890Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=53; 2025-04-04T16:55:44.812999Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=48; 2025-04-04T16:55:44.813102Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=59; 2025-04-04T16:55:44.813197Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-04-04T16:55:44.813387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:55:44.813446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:55:44.813481Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T16:55:44.813699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:55:44.813772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:55:44.813818Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T16:55:44.813985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:55:44.814029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:55:44.814058Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T16:55:44.814275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T16:55:44.814320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T16:55:44.814350Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T16:55:44.814496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T16:55:44.814539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T16:55:44.814606Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... B:0:2688];;column_id:8;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:8;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:10208];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:10208];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:9400];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:9392];;;;switched=(portion_id:60;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2507632;index_size:20;meta:((produced=SPLIT_COMPACTED;)););(portion_id:56;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););; 2025-04-04T17:01:23.211319Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11538:13165];fline=general_compaction.cpp:131;event=blobs_created;appended=1;switched=2; 2025-04-04T17:01:23.215359Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11538:13165];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Backup::ProposeBackup [GOOD] Test command err: 2025-04-04T17:01:24.422734Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:01:24.737955Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:01:24.789355Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:01:24.789713Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:01:24.814973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:01:24.815444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:01:24.815776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:01:24.815913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:01:24.816048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:01:24.816155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:01:24.816256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:01:24.816388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:01:24.816517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:01:24.816845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:01:24.816970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:01:24.817077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:01:24.882780Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:01:24.883653Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:01:24.883745Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:01:24.883943Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:01:24.884146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:01:24.884227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:01:24.884280Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:01:24.884414Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:01:24.884492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:01:24.884552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:01:24.884595Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:01:24.884811Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:01:24.884889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:01:24.884932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:01:24.884966Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:01:24.885080Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:01:24.885158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:01:24.885225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:01:24.885274Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:01:24.885377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:01:24.885430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:01:24.885463Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:01:24.885514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:01:24.885552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:01:24.885584Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:01:24.886062Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-04-04T17:01:24.886187Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=70; 2025-04-04T17:01:24.886311Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=69; 2025-04-04T17:01:24.886398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-04-04T17:01:24.886631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:01:24.886716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:01:24.886758Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:01:24.886975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:01:24.887024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:01:24.887063Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:01:24.887238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:01:24.887282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:01:24.887322Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:01:24.887502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:01:24.887538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:01:24.887569Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:01:24.887714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:01:24.887770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:01:24.887831Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... 4967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-04-04T17:01:26.223457Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:299:2317];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:01:26.223607Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:299:2317];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;);columns=7;rows=100; 2025-04-04T17:01:26.223703Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:299:2317];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=4813;num_rows=100;batch_columns=key1,key2,field,_yql_plan_step,_yql_tx_id,_yql_write_id,_yql_delete_flag; 2025-04-04T17:01:26.223937Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:299:2317];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:293:2311];bytes=4813;rows=100;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string _yql_plan_step: uint64 _yql_tx_id: uint64 _yql_write_id: uint64 _yql_delete_flag: bool; 2025-04-04T17:01:26.224089Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:299:2317];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-04-04T17:01:26.224241Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:299:2317];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-04-04T17:01:26.224442Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:299:2317];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-04-04T17:01:26.225493Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:0:0:1:3:2752:0]; 2025-04-04T17:01:26.247020Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:299:2317];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:01:26.247260Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:299:2317];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-04-04T17:01:26.247413Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:299:2317];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-04-04T17:01:26.247471Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:299:2317] finished for tablet 9437184 2025-04-04T17:01:26.248126Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:299:2317];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:293:2311];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.022},{"events":["l_bootstrap"],"t":0.025},{"events":["f_processing","f_task_result"],"t":0.033},{"events":["f_ack"],"t":0.083},{"events":["l_task_result"],"t":0.127},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.22}],"full":{"a":1743786086026737,"name":"_full_task","f":1743786086026737,"d_finished":0,"c":0,"l":1743786086247575,"d":220838},"events":[{"name":"bootstrap","f":1743786086027232,"d_finished":25398,"c":1,"l":1743786086052630,"d":25398},{"a":1743786086246981,"name":"ack","f":1743786086110530,"d_finished":4244,"c":3,"l":1743786086224520,"d":4838},{"a":1743786086246958,"name":"processing","f":1743786086060428,"d_finished":24410,"c":27,"l":1743786086224524,"d":25027},{"name":"ProduceResults","f":1743786086049459,"d_finished":12973,"c":32,"l":1743786086247448,"d":12973},{"a":1743786086247454,"name":"Finish","f":1743786086247454,"d_finished":0,"c":0,"l":1743786086247575,"d":121},{"name":"task_result","f":1743786086060454,"d_finished":19467,"c":24,"l":1743786086154082,"d":19467}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-04-04T17:01:26.248232Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:299:2317];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:293:2311];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:01:26.248817Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:299:2317];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:293:2311];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.022},{"events":["l_bootstrap"],"t":0.025},{"events":["f_processing","f_task_result"],"t":0.033},{"events":["f_ack"],"t":0.083},{"events":["l_task_result"],"t":0.127},{"events":["l_ProduceResults","f_Finish"],"t":0.22},{"events":["l_ack","l_processing","l_Finish"],"t":0.221}],"full":{"a":1743786086026737,"name":"_full_task","f":1743786086026737,"d_finished":0,"c":0,"l":1743786086248281,"d":221544},"events":[{"name":"bootstrap","f":1743786086027232,"d_finished":25398,"c":1,"l":1743786086052630,"d":25398},{"a":1743786086246981,"name":"ack","f":1743786086110530,"d_finished":4244,"c":3,"l":1743786086224520,"d":5544},{"a":1743786086246958,"name":"processing","f":1743786086060428,"d_finished":24410,"c":27,"l":1743786086224524,"d":25733},{"name":"ProduceResults","f":1743786086049459,"d_finished":12973,"c":32,"l":1743786086247448,"d":12973},{"a":1743786086247454,"name":"Finish","f":1743786086247454,"d_finished":0,"c":0,"l":1743786086248281,"d":827},{"name":"task_result","f":1743786086060454,"d_finished":19467,"c":24,"l":1743786086154082,"d":19467}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-04-04T17:01:26.248955Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:299:2317];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:01:26.025355Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=4;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=13880;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=13880;selected_rows=0; 2025-04-04T17:01:26.249011Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:299:2317];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:01:26.249318Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:299:2317];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;; 2025-04-04T17:01:26.249845Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 9437184 2025-04-04T17:01:26.308628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NOlap::NBackground::TEvExecuteGeneralLocalTransaction;method=TTxController::FinishProposeOnComplete;tx_id=115;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:242:2260]; 2025-04-04T17:01:26.308715Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NOlap::NBackground::TEvExecuteGeneralLocalTransaction;method=TTxController::FinishProposeOnComplete;tx_id=115;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=115; >> YdbYqlClient::TestYqlIssues [GOOD] >> YdbYqlClient::TestYqlSessionClosed >> TExecutorDb::MultiPage [GOOD] >> TExecutorDb::EncodedPage >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] >> TExecutorDb::EncodedPage [GOOD] >> TFlatCxxDatabaseTest::BasicSchemaTest >> TFlatCxxDatabaseTest::BasicSchemaTest [GOOD] >> TFlatCxxDatabaseTest::RenameColumnSchemaTest [GOOD] >> TFlatCxxDatabaseTest::SchemaFillerTest [GOOD] >> TFlatDatabaseDecimal::UpdateRead [GOOD] >> TFlatEraseCacheTest::BasicUsage [GOOD] >> TFlatEraseCacheTest::BasicUsageReverse >> TFlatEraseCacheTest::BasicUsageReverse [GOOD] >> TFlatEraseCacheTest::CacheEviction [GOOD] >> TFlatEraseCacheTest::StressGarbageCollection >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] >> TFlatEraseCacheTest::StressGarbageCollection [GOOD] >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings [GOOD] >> TFlatExecutorLeases::Basics >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |81.7%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> TDataShardLocksTest::UseLocksCache [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] >> IncrementalBackup::MultiRestore [GOOD] >> IncrementalBackup::E2EBackupCollection >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] >> YdbQueryService::TestCreateAndAttachSession [GOOD] >> YdbQueryService::TestForbidExecuteWithoutAttach >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::UseLocksCache [GOOD] Test command err: 2025-04-04T17:01:18.140730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:01:18.141268Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:01:18.141342Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001be2/r3tmp/tmpDfJQzl/pdisk_1.dat 2025-04-04T17:01:18.574788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:18.623412Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:18.667953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:18.668155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:18.683687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:01:18.776473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:01:18.820928Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:671:2572]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:01:18.822685Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:671:2572]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:01:18.823251Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:671:2572] 2025-04-04T17:01:18.823559Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:01:18.902095Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:671:2572]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:01:18.905305Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:658:2565], Recipient [1:673:2574]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:01:18.906711Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:01:18.906967Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:01:18.908987Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:01:18.909089Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:01:18.909170Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:01:18.909558Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:01:18.909670Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:658:2565], Recipient [1:673:2574]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:01:18.910173Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:673:2574] 2025-04-04T17:01:18.914401Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:01:18.924223Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:01:18.924334Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:700:2572] in generation 1 2025-04-04T17:01:18.924561Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:658:2565], Recipient [1:673:2574]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:01:18.925916Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:01:18.934896Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:01:18.936627Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-04T17:01:18.936716Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-04T17:01:18.936793Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-04T17:01:18.937132Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:01:18.937295Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:01:18.937373Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:705:2574] in generation 1 2025-04-04T17:01:18.951111Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:01:18.997716Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:01:18.997981Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:01:18.998127Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:708:2593] 2025-04-04T17:01:18.998168Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:01:18.999981Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:01:19.000111Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:01:19.000499Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:671:2572], Recipient [1:671:2572]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:01:19.000572Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:01:19.000687Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:01:19.000733Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-04T17:01:19.000816Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:01:19.000895Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:709:2594] 2025-04-04T17:01:19.000954Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-04T17:01:19.000995Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-04T17:01:19.001023Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:01:19.001432Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:673:2574], Recipient [1:673:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:01:19.001476Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:01:19.001674Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:01:19.001782Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:01:19.002398Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:01:19.002465Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:01:19.002517Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:01:19.002570Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:01:19.002616Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:01:19.002651Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:01:19.002739Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:01:19.002812Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-04T17:01:19.002891Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-04T17:01:19.003019Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:688:2582], Recipient [1:671:2572]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:19.003060Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:19.003108Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:688:2582], sessionId# [0:0:0] 2025-04-04T17:01:19.003155Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T17:01:19.003201Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:01:19.003234Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037889 2025-04-04T17:01:19.003263Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-04T17:01:19.003287Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-04T17:01:19.003310Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-04T17:01:19.003345Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T17:01:19.003494Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:688:2582] 2025-04-04T17:01:19.003545Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:01:19.003699Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:01:19.003935Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:01:19.003996Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:01:19.004121Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:01:19.004202Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:01:19.004248Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:01:19.004292Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:01:19.004331Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:01:19.004651Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:01:19.004692Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:01:19.004733Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:01:19.004789Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:01:19.004844Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:01:19.004873Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 720751 ... 1474976715663] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:01:29.696839Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:01:29.696892Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037888 is Executed 2025-04-04T17:01:29.696916Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:01:29.696939Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715663] at 72075186224037888 has finished 2025-04-04T17:01:29.711020Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:01:29.711102Z node 2 :TX_DATASHARD TRACE: Complete execution for [2500:281474976715662] at 72075186224037888 on unit CompleteOperation 2025-04-04T17:01:29.711184Z node 2 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [2:937:2726], exec latency: 8 ms, propose latency: 9 ms 2025-04-04T17:01:29.711269Z node 2 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 2500 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-04T17:01:29.711309Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:01:29.711350Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:01:29.711382Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715663] at 72075186224037888 on unit FinishPropose 2025-04-04T17:01:29.711421Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715663 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-04T17:01:29.711502Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:01:29.713055Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:977:2783], Recipient [2:674:2574]: {TEvReadSet step# 2500 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-04T17:01:29.713124Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:01:29.713161Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715662 2025-04-04T17:01:29.714879Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:61:2108], Recipient [2:977:2783]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 2 Status: STATUS_NOT_FOUND 2025-04-04T17:01:29.715061Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:61:2108], Recipient [2:674:2574]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 2 Status: STATUS_NOT_FOUND 2025-04-04T17:01:30.181260Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0w077z2eqadkszyqyptkxk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTM1NmQ0ZDItYjIxNjZiNjUtYzcxYTNiMS1mYWFjZTQ3, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:01:30.184547Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1020:2809], Recipient [2:977:2783]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-04T17:01:30.184694Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T17:01:30.184780Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-04-04T17:01:30.184880Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-04T17:01:30.184940Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-04-04T17:01:30.184982Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:01:30.185018Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:01:30.185078Z node 2 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-04-04T17:01:30.185130Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-04T17:01:30.185170Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:01:30.185198Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T17:01:30.185227Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:01:30.185350Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-04T17:01:30.185630Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-04-04T17:01:30.185702Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1020:2809], 0} after executionsCount# 1 2025-04-04T17:01:30.185754Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1020:2809], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:01:30.185863Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1020:2809], 0} finished in read 2025-04-04T17:01:30.185937Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-04T17:01:30.185965Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:01:30.185990Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:01:30.186019Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:01:30.186068Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-04T17:01:30.186089Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:01:30.186114Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-04-04T17:01:30.186170Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T17:01:30.190482Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-04T17:01:30.191599Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1020:2809], Recipient [2:977:2783]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-04T17:01:30.191676Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-04T17:01:30.193285Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1020:2809], Recipient [2:674:2574]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2025-04-04T17:01:30.193396Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-04T17:01:30.193549Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CheckRead 2025-04-04T17:01:30.193631Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-04T17:01:30.193702Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CheckRead 2025-04-04T17:01:30.193743Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-04T17:01:30.193772Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-04T17:01:30.193840Z node 2 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037889 2025-04-04T17:01:30.193873Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-04T17:01:30.193899Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-04T17:01:30.193954Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit ExecuteRead 2025-04-04T17:01:30.193984Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit ExecuteRead 2025-04-04T17:01:30.194112Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2025-04-04T17:01:30.194410Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-04-04T17:01:30.194465Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[2:1020:2809], 1} after executionsCount# 1 2025-04-04T17:01:30.194503Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:1020:2809], 1} sends rowCount# 2, bytes# 64, quota rows left# 997, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:01:30.194575Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:1020:2809], 1} finished in read 2025-04-04T17:01:30.194634Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-04T17:01:30.194679Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2025-04-04T17:01:30.194704Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2025-04-04T17:01:30.194730Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2025-04-04T17:01:30.194781Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-04T17:01:30.194827Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2025-04-04T17:01:30.194859Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037889 has finished 2025-04-04T17:01:30.194897Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-04T17:01:30.194996Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-04T17:01:30.201287Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1020:2809], Recipient [2:674:2574]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-04-04T17:01:30.201362Z node 2 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 1 } >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] |81.7%| [TA] $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] >> KqpPg::TempTablesWithCache [FAIL] >> KqpPg::TableDeleteWhere+useSink |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |81.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |81.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init >> TFlatExecutorLeases::Basics [GOOD] >> TFlatExecutorLeases::BasicsLeaseTimeout >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions [GOOD] >> TGRpcYdbTest::AlterTableAddIndexBadRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] Test command err: 2025-04-04T16:59:06.666319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:06.666916Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:06.666974Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00132b/r3tmp/tmpVCCw3x/pdisk_1.dat 2025-04-04T16:59:07.059082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T16:59:07.103892Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:07.142807Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T16:59:07.143678Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T16:59:07.144022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:07.144132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:07.156288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:59:07.238678Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-04T16:59:07.238748Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T16:59:07.238928Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-04-04T16:59:07.374926Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T16:59:07.375033Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T16:59:07.375581Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T16:59:07.375671Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T16:59:07.375946Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T16:59:07.376102Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T16:59:07.376185Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T16:59:07.376389Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T16:59:07.377734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T16:59:07.378635Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T16:59:07.378724Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2517] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-04T16:59:07.410321Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T16:59:07.411378Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T16:59:07.411834Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T16:59:07.412131Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:59:07.423566Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T16:59:07.457981Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:59:07.458146Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T16:59:07.460031Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T16:59:07.460133Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T16:59:07.460198Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T16:59:07.460628Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T16:59:07.460774Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T16:59:07.460875Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T16:59:07.461269Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T16:59:07.485466Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T16:59:07.485691Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T16:59:07.485812Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T16:59:07.485851Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T16:59:07.485888Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T16:59:07.485925Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T16:59:07.486148Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T16:59:07.486203Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T16:59:07.486575Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T16:59:07.486714Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T16:59:07.486817Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T16:59:07.486875Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T16:59:07.486940Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T16:59:07.487016Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T16:59:07.487056Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T16:59:07.487089Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T16:59:07.487147Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T16:59:07.487298Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:07.487335Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T16:59:07.487385Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T16:59:07.487466Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T16:59:07.487535Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T16:59:07.487672Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T16:59:07.487914Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T16:59:07.487967Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T16:59:07.488098Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T16:59:07.488155Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T16:59:07.488203Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T16:59:07.488251Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T16:59:07.488300Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T16:59:07.488728Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T16:59:07.488781Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T16:59:07.488820Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T16:59:07.488856Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T16:59:07.488951Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T16:59:07.488985Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T16:59:07.489027Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T16:59:07.489064Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T16:59:07.489118Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T16:59:07.490445Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T16:59:07.490521Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T16:59:07.490563Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T16:59:07.490604Z node 1 :TX_DATASHARD TRACE: Prop ... rd::TEvProposeTransaction 2025-04-04T17:01:31.429886Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [16:689:2579], Recipient [16:689:2579]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-04T17:01:31.429956Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-04T17:01:31.430117Z node 16 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:01:31.431082Z node 16 :TX_DATASHARD TRACE: TxId: 281474976715663, shard 72075186224037888, task: 1, meta: Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\003\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } 2025-04-04T17:01:31.431272Z node 16 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, task: 1, write point (Uint32 : 3) 2025-04-04T17:01:31.431422Z node 16 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 3) table: [72057594046644480:2:1] 2025-04-04T17:01:31.432101Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037888 on unit CheckDataTx 2025-04-04T17:01:31.432286Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037888 is Executed 2025-04-04T17:01:31.432417Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037888 executing on unit CheckDataTx 2025-04-04T17:01:31.432516Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:01:31.432593Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:01:31.432718Z node 16 :TX_DATASHARD TRACE: Activated operation [0:281474976715663] at 72075186224037888 2025-04-04T17:01:31.432811Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037888 is Executed 2025-04-04T17:01:31.432845Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:01:31.432909Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-04T17:01:31.432948Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-04T17:01:31.433061Z node 16 :TX_DATASHARD TRACE: Operation [0:281474976715663] (execute_kqp_data_tx) at 72075186224037888 aborting because it cannot acquire locks 2025-04-04T17:01:31.433180Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037888 is Executed 2025-04-04T17:01:31.433241Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-04T17:01:31.433277Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:01:31.433307Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037888 on unit FinishPropose 2025-04-04T17:01:31.433350Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037888 is DelayComplete 2025-04-04T17:01:31.433427Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:01:31.433510Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:01:31.433589Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:01:31.433701Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037888 is Executed 2025-04-04T17:01:31.433739Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:01:31.433791Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:281474976715663] at 72075186224037888 has finished 2025-04-04T17:01:31.433943Z node 16 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:01:31.434042Z node 16 :TX_DATASHARD TRACE: Complete execution for [0:281474976715663] at 72075186224037888 on unit FinishPropose 2025-04-04T17:01:31.434151Z node 16 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715663 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: LOCKS_BROKEN 2025-04-04T17:01:31.434350Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:01:31.435600Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=YzIxYmM3ZWQtMThkZDQwYzUtMjVlNTQxNTEtMmQzMTY2MjI=, ActorId: [16:837:2682], ActorState: ExecuteState, TraceId: 01jr0w08gs6awpnt4k7243na5q, Create QueryResponse for error on request, msg: 2025-04-04T17:01:31.437100Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0w08gs6awpnt4k7243na5q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=YzIxYmM3ZWQtMThkZDQwYzUtMjVlNTQxNTEtMmQzMTY2MjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:01:31.437632Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [16:895:2682], Recipient [16:689:2579]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 895 RawX2: 68719479418 } TxBody: " \0018\001j3\010\001\032\'\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000 \003\"\006\020\0020\000@\n\220\001\000" TxId: 281474976715664 ExecLevel: 0 Flags: 8 2025-04-04T17:01:31.437699Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:01:31.437893Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [16:689:2579], Recipient [16:689:2579]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-04T17:01:31.437947Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-04T17:01:31.438034Z node 16 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:01:31.438366Z node 16 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-04T17:01:31.438540Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CheckDataTx 2025-04-04T17:01:31.438603Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-04-04T17:01:31.438640Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CheckDataTx 2025-04-04T17:01:31.438693Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:01:31.438728Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:01:31.438795Z node 16 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v400/0 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v401/0 ImmediateWriteEdgeReplied# v401/0 2025-04-04T17:01:31.438897Z node 16 :TX_DATASHARD TRACE: Activated operation [0:281474976715664] at 72075186224037888 2025-04-04T17:01:31.438939Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-04-04T17:01:31.438971Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:01:31.439000Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-04T17:01:31.439031Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-04T17:01:31.439147Z node 16 :TX_DATASHARD TRACE: Operation [0:281474976715664] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193448 2025-04-04T17:01:31.439355Z node 16 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2025-04-04T17:01:31.439554Z node 16 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-04T17:01:31.439667Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-04-04T17:01:31.439719Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-04T17:01:31.439770Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:01:31.439806Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit FinishPropose 2025-04-04T17:01:31.439880Z node 16 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715664 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-04T17:01:31.440032Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is DelayComplete 2025-04-04T17:01:31.440069Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:01:31.440102Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:01:31.440152Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:01:31.440235Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-04-04T17:01:31.440268Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:01:31.440299Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:281474976715664] at 72075186224037888 has finished 2025-04-04T17:01:31.440375Z node 16 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:01:31.440412Z node 16 :TX_DATASHARD TRACE: Complete execution for [0:281474976715664] at 72075186224037888 on unit FinishPropose 2025-04-04T17:01:31.440459Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:01:31.442637Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [16:61:2108], Recipient [16:689:2579]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 16 Status: STATUS_NOT_FOUND 2025-04-04T17:01:31.448767Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [16:904:2735], Recipient [16:689:2579]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:31.448914Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:31.449020Z node 16 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [16:903:2734], serverId# [16:904:2735], sessionId# [0:0:0] 2025-04-04T17:01:31.449249Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553224, Sender [16:593:2518], Recipient [16:689:2579]: NKikimr::TEvDataShard::TEvGetOpenTxs |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |81.8%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |81.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] >> TFlatExecutorLeases::BasicsLeaseTimeout [GOOD] >> TFlatExecutorLeases::BasicsInitialLease >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull >> YdbYqlClient::TestYqlSessionClosed [GOOD] >> YdbYqlClient::TestYqlLongSessionPrepareError >> TFlatExecutorLeases::BasicsInitialLease [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseTimeout |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |81.8%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |81.8%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |81.8%| [TA] $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] Test command err: 2025-04-04T16:59:09.462895Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503624452556317:2275];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:09.462959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00162a/r3tmp/tmpwNIEZs/pdisk_1.dat 2025-04-04T16:59:09.949910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:09.950103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:09.956392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:59:09.994813Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2579 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T16:59:10.250526Z node 1 :TX_PROXY DEBUG: actor# [1:7489503624452556341:2115] Handle TEvNavigate describe path dc-1 2025-04-04T16:59:10.250596Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503628747524114:2441] HANDLE EvNavigateScheme dc-1 2025-04-04T16:59:10.250756Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489503624452556383:2131], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T16:59:10.250797Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489503624452556383:2131], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T16:59:10.251032Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503628747524115:2442][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T16:59:10.253198Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503624452556022:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503628747524119:2442] 2025-04-04T16:59:10.253276Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503624452556022:2050] Subscribe: subscriber# [1:7489503628747524119:2442], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:10.253353Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503624452556025:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503628747524120:2442] 2025-04-04T16:59:10.253373Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503624452556025:2053] Subscribe: subscriber# [1:7489503628747524120:2442], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:10.253437Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503624452556028:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489503628747524121:2442] 2025-04-04T16:59:10.253454Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489503624452556028:2056] Subscribe: subscriber# [1:7489503628747524121:2442], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T16:59:10.253509Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503628747524119:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503624452556022:2050] 2025-04-04T16:59:10.253533Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503628747524120:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503624452556025:2053] 2025-04-04T16:59:10.253550Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503628747524121:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503624452556028:2056] 2025-04-04T16:59:10.253610Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503628747524115:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503628747524116:2442] 2025-04-04T16:59:10.253683Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503628747524115:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503628747524117:2442] 2025-04-04T16:59:10.253768Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489503628747524115:2442][/dc-1] Set up state: owner# [1:7489503624452556383:2131], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:10.253908Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503628747524115:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489503628747524118:2442] 2025-04-04T16:59:10.253964Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489503628747524115:2442][/dc-1] Path was already updated: owner# [1:7489503624452556383:2131], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T16:59:10.254022Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503628747524119:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503628747524116:2442], cookie# 1 2025-04-04T16:59:10.254046Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503628747524120:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503628747524117:2442], cookie# 1 2025-04-04T16:59:10.254072Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503628747524121:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503628747524118:2442], cookie# 1 2025-04-04T16:59:10.254137Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503624452556022:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503628747524119:2442] 2025-04-04T16:59:10.254167Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503624452556022:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503628747524119:2442], cookie# 1 2025-04-04T16:59:10.254201Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503624452556025:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503628747524120:2442] 2025-04-04T16:59:10.254224Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503624452556025:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503628747524120:2442], cookie# 1 2025-04-04T16:59:10.254475Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503624452556028:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489503628747524121:2442] 2025-04-04T16:59:10.254508Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489503624452556028:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489503628747524121:2442], cookie# 1 2025-04-04T16:59:10.254549Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503628747524119:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503624452556022:2050], cookie# 1 2025-04-04T16:59:10.254603Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503628747524120:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503624452556025:2053], cookie# 1 2025-04-04T16:59:10.254626Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489503628747524121:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503624452556028:2056], cookie# 1 2025-04-04T16:59:10.254696Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503628747524115:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503628747524116:2442], cookie# 1 2025-04-04T16:59:10.254730Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503628747524115:2442][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T16:59:10.254750Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503628747524115:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503628747524117:2442], cookie# 1 2025-04-04T16:59:10.254779Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503628747524115:2442][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T16:59:10.254819Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503628747524115:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489503628747524118:2442], cookie# 1 2025-04-04T16:59:10.254842Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489503628747524115:2442][/dc-1] Unexpected sync response: sender# [1:7489503628747524118:2442], cookie# 1 2025-04-04T16:59:10.327943Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489503624452556383:2131], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T16:59:10.328252Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489503624452556383:2131], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... 5:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:01:33.493631Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489503648995586764:2153], cacheItem# { Subscriber: { Subscriber: [3:7489503653290554646:2565] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:01:33.493725Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489504241701077005:4084], recipient# [3:7489504241701077004:2715], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } IsActive: /dc-1/USER_0 -- 2 -- 2 2025-04-04T17:01:33.639406Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489503648995586380:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [5:7489503658148041301:2105] 2025-04-04T17:01:33.639455Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7489503648995586380:2051] Unsubscribe: subscriber# [5:7489503658148041301:2105], path# /dc-1/USER_0 2025-04-04T17:01:33.639504Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489503648995586383:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [5:7489503658148041302:2105] 2025-04-04T17:01:33.639522Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7489503648995586383:2054] Unsubscribe: subscriber# [5:7489503658148041302:2105], path# /dc-1/USER_0 2025-04-04T17:01:33.639555Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489503648995586386:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [5:7489503658148041303:2105] 2025-04-04T17:01:33.639573Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7489503648995586386:2057] Unsubscribe: subscriber# [5:7489503658148041303:2105], path# /dc-1/USER_0 2025-04-04T17:01:33.639898Z node 3 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 5 2025-04-04T17:01:33.641173Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T17:01:33.689219Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7489503658148041307:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:01:33.689388Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7489503658148041307:2107], cacheItem# { Subscriber: { Subscriber: [5:7489503658148041322:2112] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:01:33.689496Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7489504242263595180:3245], recipient# [5:7489504242263595179:2600], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:01:34.132040Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7489503658148041307:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:01:34.132221Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7489503658148041307:2107], cacheItem# { Subscriber: { Subscriber: [5:7489503671032943657:2411] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:01:34.132326Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7489504246558562482:3246], recipient# [5:7489504246558562481:2601], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:01:34.227547Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7489503659480493178:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:01:34.227724Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7489503659480493178:2107], cacheItem# { Subscriber: { Subscriber: [4:7489503672365395402:2305] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:01:34.227849Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7489504247891013657:2587], recipient# [4:7489504247891013656:2610], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:01:34.478735Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7489503659480493178:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:01:34.478892Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7489503659480493178:2107], cacheItem# { Subscriber: { Subscriber: [4:7489503659480493488:2300] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:01:34.478989Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7489504247891013659:2588], recipient# [4:7489504247891013658:2611], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:01:35.233213Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7489503659480493178:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:01:35.233390Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7489503659480493178:2107], cacheItem# { Subscriber: { Subscriber: [4:7489503672365395402:2305] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:01:35.233498Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7489504252185980957:2589], recipient# [4:7489504252185980956:2612], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseTimeout [GOOD] >> TFlatTableDatetime::TestDate >> YdbQueryService::TestForbidExecuteWithoutAttach [GOOD] >> TFlatTableDatetime::TestDate [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default >> YdbQueryService::TestCreateDropAttachSession |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |81.8%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/kqprun |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |81.8%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |81.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [GOOD] Test command err: 2025-04-04T17:01:16.828848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:01:16.829525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:01:16.829619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00163b/r3tmp/tmpq35xUJ/pdisk_1.dat 2025-04-04T17:01:17.308430Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:595:2519], Recipient [1:408:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:17.308506Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:17.308562Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:01:17.308724Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:592:2517], Recipient [1:408:2403]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-04-04T17:01:17.308764Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T17:01:17.525793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-04T17:01:17.526135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.526465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T17:01:17.526809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:01:17.526951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.527079Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:17.528177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T17:01:17.528431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T17:01:17.528501Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:17.528551Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-04T17:01:17.528800Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:408:2403], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:01:17.528857Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:01:17.528938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.528994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T17:01:17.529039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:01:17.529105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:01:17.529257Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:17.529865Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:17.529935Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-04T17:01:17.530107Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:408:2403], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:01:17.530157Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:01:17.530226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.530333Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:01:17.530392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:01:17.530492Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:17.530964Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:17.531014Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-04T17:01:17.531153Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:408:2403], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:01:17.531193Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:01:17.531233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.531282Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:17.531333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-04T17:01:17.531378Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:17.531424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:01:17.545683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:17.547064Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:17.547143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:01:17.547376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:01:17.549016Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:600:2524], Recipient [1:408:2403]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:602:2525] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-04T17:01:17.549088Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-04T17:01:17.549139Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-04-04T17:01:17.549283Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:404:2399], Recipient [1:408:2403]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-04-04T17:01:17.549774Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:604:2527], Recipient [1:408:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:17.549831Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:17.549901Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:01:17.550069Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:592:2517], Recipient [1:408:2403]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-04-04T17:01:17.550110Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-04T17:01:17.550184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-04T17:01:17.550273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-04T17:01:17.550344Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-04T17:01:17.617299Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:408:2403]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-04-04T17:01:17.617422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2025-04-04T17:01:17.617506Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:17.617591Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-04T17:01:17.617664Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-04-04T17:01:17.660909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:17.661095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:17.675463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:01:17.759803Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:404:2399], Recipient [1:408:2403]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-04-04T17:01:17.760676Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:635:2543], Recipient [1:408:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:17.760730Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:17.760770Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:01:17.760940Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:408:2403]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-04-04T17:01:17.760978Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:01:17.761082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:01:17.761249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:01:17.761317Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationP ... 76715662:1, at schemeshard: 72057594046644480 2025-04-04T17:01:38.872785Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:38.873249Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-04T17:01:38.873307Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:38.887232Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:01:38.887328Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-04-04T17:01:38.887424Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:01:38.887472Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:01:38.887904Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [3:993:2793], Recipient [3:408:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:38.887963Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:38.888001Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:01:38.888164Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [3:665:2569], Recipient [3:408:2403]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 665 RawX2: 12884904457 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-04T17:01:38.888204Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-04-04T17:01:38.888288Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 665 RawX2: 12884904457 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-04T17:01:38.888334Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715662, tablet: 72075186224037888, partId: 1 2025-04-04T17:01:38.888506Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480, message: Source { RawX1: 665 RawX2: 12884904457 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-04T17:01:38.888570Z node 3 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715662:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-04T17:01:38.888653Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715662:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 665 RawX2: 12884904457 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-04-04T17:01:38.888718Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715662:1, shardIdx: 72057594046644480:1, datashard: 72075186224037888, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-04T17:01:38.888768Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-04T17:01:38.888833Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:1, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-04-04T17:01:38.888886Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:1, datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-04-04T17:01:38.888932Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:1 129 -> 240 2025-04-04T17:01:38.889082Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:38.889673Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-04T17:01:38.889709Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:38.889749Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:1 2025-04-04T17:01:38.889866Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:934:2743] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-04-04T17:01:38.889936Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:665:2569] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-04-04T17:01:38.890062Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-04-04T17:01:38.890150Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:01:38.894877Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037889 state Ready 2025-04-04T17:01:38.894995Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-04T17:01:38.895282Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:408:2403], Recipient [3:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:01:38.895329Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:01:38.895392Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-04T17:01:38.895482Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:1ProgressState, operation type TxCopyTable 2025-04-04T17:01:38.895555Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:38.895610Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 281474976715662:1, name: CopyTableBarrier, done: 1, blocked: 1, parts count: 2 2025-04-04T17:01:38.895666Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715662, done: 1, blocked: 1 2025-04-04T17:01:38.895786Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:1 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715662 Name: CopyTableBarrier }, at tablet# 72057594046644480 2025-04-04T17:01:38.895843Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:1 240 -> 240 2025-04-04T17:01:38.896645Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:38.896697Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:1 2025-04-04T17:01:38.896852Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:408:2403], Recipient [3:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:01:38.896889Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:01:38.896966Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-04T17:01:38.897037Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715662:1 ProgressState 2025-04-04T17:01:38.897193Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:38.897235Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:1 progress is 2/2 2025-04-04T17:01:38.897292Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 2/2 2025-04-04T17:01:38.897349Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:1 progress is 2/2 2025-04-04T17:01:38.897395Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 2/2 2025-04-04T17:01:38.897448Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715662, ready parts: 2/2, is published: true 2025-04-04T17:01:38.897553Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:904:2723] message: TxId: 281474976715662 2025-04-04T17:01:38.897619Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 2/2 2025-04-04T17:01:38.897676Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:0 2025-04-04T17:01:38.897725Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:0 2025-04-04T17:01:38.897827Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 2 2025-04-04T17:01:38.897872Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:1 2025-04-04T17:01:38.897896Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:1 2025-04-04T17:01:38.897995Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 3 2025-04-04T17:01:38.898032Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-04T17:01:38.901438Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:38.901589Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:904:2723] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-04-04T17:01:38.902075Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:919:2731], Recipient [3:408:2403]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:01:38.902124Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:01:38.902158Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-04T17:01:39.235071Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [3:1020:2810], serverId# [3:1021:2811], sessionId# [0:0:0] 2025-04-04T17:01:39.235301Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0w0g6y8ahyb18c87f0hknc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTEzMTVlZjAtNmYyZDBlZDktYjNkZTFmNDktZDEyNmM1NGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } 2025-04-04T17:01:39.631690Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0w0gj5aky93zrmq197xt97, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZjI3MjFhM2ItNDkzMmZmZGMtNjRhZTJmNGMtOGI5YjU2ZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 >> KqpPg::TableDeleteAllData-useSink [GOOD] >> KqpPg::PgUpdateCompoundKey+useSink >> IncrementalBackup::E2EBackupCollection [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False >> TGRpcYdbTest::AlterTableAddIndexBadRequest [GOOD] >> TGRpcYdbTest::CreateAlterCopyAndDropTable >> TFlatTest::Mix_DML_DDL >> KqpLimits::CancelAfterRwTx-useSink [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful >> YdbYqlClient::TestYqlLongSessionPrepareError [GOOD] >> YdbYqlClient::TestYqlLongSessionMultipleErrors >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] >> TVersions::Wreck1 [GOOD] >> TVersions::Wreck1Reverse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::E2EBackupCollection [GOOD] Test command err: 2025-04-04T17:01:15.006988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:01:15.007615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:01:15.007685Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00163e/r3tmp/tmppIznvg/pdisk_1.dat 2025-04-04T17:01:15.437106Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:595:2519], Recipient [1:408:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:15.437184Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:15.437246Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:01:15.437428Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:592:2517], Recipient [1:408:2403]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-04-04T17:01:15.437466Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T17:01:15.696591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-04T17:01:15.696946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:15.697243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T17:01:15.697528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:01:15.697606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:15.697725Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:15.700632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T17:01:15.700836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T17:01:15.700892Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:15.700931Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-04T17:01:15.701155Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:408:2403], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:01:15.701220Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:01:15.701300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:15.701351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T17:01:15.701389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:01:15.701441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:01:15.701599Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:15.702180Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:15.702222Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-04T17:01:15.702406Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:408:2403], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:01:15.702467Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:01:15.702536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:15.702608Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:01:15.702645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:01:15.702748Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:15.703165Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:15.703202Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-04T17:01:15.703295Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:408:2403], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:01:15.703329Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:01:15.703365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:15.703413Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:15.703466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-04T17:01:15.703497Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:15.703537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:01:15.707263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:15.707975Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:15.708025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:01:15.708175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:01:15.709156Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:600:2524], Recipient [1:408:2403]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:602:2525] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-04T17:01:15.709207Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-04T17:01:15.709243Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-04-04T17:01:15.709341Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:404:2399], Recipient [1:408:2403]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-04-04T17:01:15.709664Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:604:2527], Recipient [1:408:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:15.709698Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:15.709739Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:01:15.709852Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:592:2517], Recipient [1:408:2403]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-04-04T17:01:15.709878Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-04T17:01:15.709934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-04T17:01:15.709976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-04T17:01:15.710007Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-04T17:01:15.778854Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:408:2403]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-04-04T17:01:15.778990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2025-04-04T17:01:15.779040Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:15.779105Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-04T17:01:15.779185Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-04-04T17:01:15.828838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:15.829004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:15.845200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:01:15.935795Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:404:2399], Recipient [1:408:2403]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-04-04T17:01:15.936556Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:635:2543], Recipient [1:408:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:15.936608Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:15.936648Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:01:15.936784Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:408:2403]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-04-04T17:01:15.936818Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:01:15.936905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:01:15.937072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:01:15.937147Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationP ... ode 3 :TX_DATASHARD INFO: 72075186224037892 Sending notify to schemeshard 72057594046644480 txId 281474976715668 state Ready TxInFly 0 2025-04-04T17:01:43.707191Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-04T17:01:43.707523Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [3:1552:3215], Recipient [3:408:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:43.707567Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:43.707598Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:01:43.707782Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [3:1210:2936], Recipient [3:408:2403]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 1210 RawX2: 12884904824 } Origin: 72075186224037892 State: 2 TxId: 281474976715668 Step: 0 Generation: 1 2025-04-04T17:01:43.707813Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-04-04T17:01:43.707886Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 1210 RawX2: 12884904824 } Origin: 72075186224037892 State: 2 TxId: 281474976715668 Step: 0 Generation: 1 2025-04-04T17:01:43.707926Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715668, tablet: 72075186224037892, partId: 1 2025-04-04T17:01:43.708048Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715668:1, at schemeshard: 72057594046644480, message: Source { RawX1: 1210 RawX2: 12884904824 } Origin: 72075186224037892 State: 2 TxId: 281474976715668 Step: 0 Generation: 1 2025-04-04T17:01:43.708089Z node 3 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715668:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-04T17:01:43.708193Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715668:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 1210 RawX2: 12884904824 } Origin: 72075186224037892 State: 2 TxId: 281474976715668 Step: 0 Generation: 1 2025-04-04T17:01:43.708245Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715668:1, shardIdx: 72057594046644480:5, datashard: 72075186224037892, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-04T17:01:43.708277Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-04T17:01:43.708311Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715668:1, datashard: 72075186224037892, at schemeshard: 72057594046644480 2025-04-04T17:01:43.708348Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:1 129 -> 240 2025-04-04T17:01:43.708552Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TRestoreMultipleIncrementalBackups TDone, operationId: 281474976715668:1 Constructed op# SrcTablePaths: "/Root/.backups/collections/MyCollection/19700101000002Z_incremental/Table" DstTablePath: "/Root/Table" SrcPathIds { OwnerId: 72057594046644480 LocalId: 15 } 2025-04-04T17:01:43.708715Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:43.709262Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-04T17:01:43.709298Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:43.709330Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715668:1 2025-04-04T17:01:43.709401Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:1210:2936] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715668 at schemeshard: 72057594046644480 2025-04-04T17:01:43.709527Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715668 datashard 72075186224037892 state Ready 2025-04-04T17:01:43.709583Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 Got TEvSchemaChangedResult from SS at 72075186224037892 2025-04-04T17:01:43.709783Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:408:2403], Recipient [3:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:01:43.709823Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:01:43.709871Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-04T17:01:43.709933Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TRestoreMultipleIncrementalBackups TDone, operationId: 281474976715668:1 ProgressState 2025-04-04T17:01:43.710086Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:43.710131Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:1 progress is 1/2 2025-04-04T17:01:43.710174Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 1/2 2025-04-04T17:01:43.710228Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715668, done: 1, blocked: 1 2025-04-04T17:01:43.710791Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715668:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715668 Name: CopyTableBarrier }, at tablet# 72057594046644480 2025-04-04T17:01:43.710847Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:0 240 -> 240 2025-04-04T17:01:43.711052Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:1 progress is 1/2 2025-04-04T17:01:43.711101Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 1/2 2025-04-04T17:01:43.711157Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 1/2, is published: true 2025-04-04T17:01:43.711660Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:43.711723Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715668:0 2025-04-04T17:01:43.711877Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:408:2403], Recipient [3:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:01:43.711914Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:01:43.711963Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-04T17:01:43.712012Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:0 ProgressState 2025-04-04T17:01:43.712176Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:43.712209Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:0 progress is 2/2 2025-04-04T17:01:43.712239Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 2/2 2025-04-04T17:01:43.712279Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:0 progress is 2/2 2025-04-04T17:01:43.712305Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 2/2 2025-04-04T17:01:43.712334Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 2/2, is published: true 2025-04-04T17:01:43.712411Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:1428:3112] message: TxId: 281474976715668 2025-04-04T17:01:43.712471Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 2/2 2025-04-04T17:01:43.712526Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:0 2025-04-04T17:01:43.712570Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:0 2025-04-04T17:01:43.712727Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 16] was 4 2025-04-04T17:01:43.712774Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 3 2025-04-04T17:01:43.712821Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:1 2025-04-04T17:01:43.712841Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:1 2025-04-04T17:01:43.712889Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 15] was 3 2025-04-04T17:01:43.712913Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 16] was 3 2025-04-04T17:01:43.713462Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:43.713569Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:1428:3112] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715668 at schemeshard: 72057594046644480 2025-04-04T17:01:43.713995Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:1436:3119], Recipient [3:408:2403]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:01:43.714036Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:01:43.714066Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-04T17:01:43.739448Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:1552:3215], Recipient [3:408:2403]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:01:43.739543Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:01:43.739590Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-04T17:01:43.842614Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:408:2403]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:01:43.842735Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:01:43.842898Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:408:2403], Recipient [3:408:2403]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:01:43.842941Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:01:44.309763Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr0w0n6d3j5fy36f3kc18cwm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YzBjYTU1YmUtZmRlMDI2ODctNTQxMWI1MzgtYmNiNjJjMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 2 } items { uint32_value: 200 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } >> YdbQueryService::TestCreateDropAttachSession [GOOD] >> YdbQueryService::TestCreateAttachAndDropAttachedSession >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRwTx-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 30140, MsgBus: 28554 2025-04-04T16:57:23.946831Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503170762476762:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:23.947386Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001639/r3tmp/tmp56lUWd/pdisk_1.dat 2025-04-04T16:57:25.027948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:25.028060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:25.029665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:57:25.053440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:57:25.093048Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30140, node 1 2025-04-04T16:57:25.348681Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:25.348704Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:25.348715Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:25.348839Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28554 TClient is connected to server localhost:28554 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:57:26.280103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:26.293277Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:57:26.318006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:28.931758Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503170762476762:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:28.931846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:57:29.264958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503196532281471:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:29.265093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:29.265671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503196532281483:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:57:29.270202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T16:57:29.303623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503196532281485:2362], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T16:57:29.383019Z node 1 :TX_PROXY ERROR: Actor# [1:7489503196532281536:2613] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:57:30.112667Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=2;memory=1048576; 2025-04-04T16:57:30.112714Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 2. [Mem] memory 1048576 NOT granted 2025-04-04T16:57:30.156023Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489503200827248880:2373], TxId: 281474976710661, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=Y2FkZDE1MWQtMmZlZGNhZmItYzFkM2Y3ZDQtNDdlZjNiMTA=. TraceId : 01jr0vrwdae788995bexkjftkc. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-mup5cka3jq, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2025-04-04T16:57:30.108516Z }, code: 2029 }. 2025-04-04T16:57:30.159606Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489503200827248879:2372], TxId: 281474976710661, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr0vrwdae788995bexkjftkc. SessionId : ydb://session/3?node_id=1&id=Y2FkZDE1MWQtMmZlZGNhZmItYzFkM2Y3ZDQtNDdlZjNiMTA=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489503200827248868:2355], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-04-04T16:57:30.164173Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2FkZDE1MWQtMmZlZGNhZmItYzFkM2Y3ZDQtNDdlZjNiMTA=, ActorId: [1:7489503196532281443:2355], ActorState: ExecuteState, TraceId: 01jr0vrwdae788995bexkjftkc, Create QueryResponse for error on request, msg:
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-mup5cka3jq, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2025-04-04T16:57:30.108516Z } , code: 2029 Trying to start YDB, gRPC: 7176, MsgBus: 21020 2025-04-04T16:57:31.360711Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503201807626111:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001639/r3tmp/tmpDxXSYX/pdisk_1.dat 2025-04-04T16:57:31.487035Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:57:31.691054Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:57:31.724874Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:57:31.724978Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:57:31.732010Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7176, node 2 2025-04-04T16:57:31.950150Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:57:31.950176Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:57:31.950185Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:57:31.950339Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21020 TClient is connected to server localhost:21020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:57:33.011074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:33.022882Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:57:33.039473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:57:36.342399Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489503201807626111:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:57:36.342476Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/ ... son: {
: Error: Terminate execution } 2025-04-04T17:01:31.995922Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489504235972086811:5679], TxId: 281474976716033, task: 8. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==. TraceId : 01jr0w08zqbk61x55d48dehx5b. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7489504235972086797:2496], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-04T17:01:31.996200Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489504235972086806:5674], TxId: 281474976716033, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jr0w08zqbk61x55d48dehx5b. SessionId : ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7489504235972086797:2496], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-04T17:01:31.996544Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489504235972086807:5675], TxId: 281474976716033, task: 4. Ctx: { TraceId : 01jr0w08zqbk61x55d48dehx5b. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7489504235972086797:2496], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-04T17:01:31.996797Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489504235972086804:5672], TxId: 281474976716033, task: 1. Ctx: { TraceId : 01jr0w08zqbk61x55d48dehx5b. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7489504235972086797:2496], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-04T17:01:31.997002Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489504235972086808:5676], TxId: 281474976716033, task: 5. Ctx: { TraceId : 01jr0w08zqbk61x55d48dehx5b. SessionId : ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7489504235972086797:2496], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-04T17:01:31.997220Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489504235972086809:5677], TxId: 281474976716033, task: 6. Ctx: { CustomerSuppliedId : . TraceId : 01jr0w08zqbk61x55d48dehx5b. SessionId : ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7489504235972086797:2496], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-04T17:01:31.997425Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489504235972086812:5680], TxId: 281474976716033, task: 9. Ctx: { SessionId : ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==. CustomerSuppliedId : . TraceId : 01jr0w08zqbk61x55d48dehx5b. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7489504235972086797:2496], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-04T17:01:31.998383Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==, ActorId: [4:7489503793590439551:2496], ActorState: ExecuteState, TraceId: 01jr0w08zqbk61x55d48dehx5b, Create QueryResponse for error on request, msg: 2025-04-04T17:01:32.490562Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==, ActorId: [4:7489503793590439551:2496], ActorState: ExecuteState, TraceId: 01jr0w09fc0gehk7bpjh707mjj, Create QueryResponse for error on request, msg: 2025-04-04T17:01:33.077644Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==, ActorId: [4:7489503793590439551:2496], ActorState: ExecuteState, TraceId: 01jr0w0a1tbjfswthagz5tzge9, Create QueryResponse for error on request, msg: 2025-04-04T17:01:33.574794Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==, ActorId: [4:7489503793590439551:2496], ActorState: ExecuteState, TraceId: 01jr0w0ahb41062zejy0ey0crb, Create QueryResponse for error on request, msg: 2025-04-04T17:01:34.505494Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==, ActorId: [4:7489503793590439551:2496], ActorState: ExecuteState, TraceId: 01jr0w0be84akyemr5kv1svbe5, Create QueryResponse for error on request, msg: 2025-04-04T17:01:35.404399Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7489504253151956382:2496] TxId: 281474976716041. Ctx: { TraceId: 01jr0w0cab302veqrzgba4cnd3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 479ms } {
: Error: Cancelling after 479ms during execution } ] 2025-04-04T17:01:35.404619Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489504253151956390:5744], TxId: 281474976716041, task: 2. Ctx: { TraceId : 01jr0w0cab302veqrzgba4cnd3. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7489504253151956382:2496], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-04T17:01:35.405034Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489504253151956389:5743], TxId: 281474976716041, task: 1. Ctx: { TraceId : 01jr0w0cab302veqrzgba4cnd3. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7489504253151956382:2496], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-04T17:01:35.405297Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489504253151956396:5750], TxId: 281474976716041, task: 8. Ctx: { CustomerSuppliedId : . TraceId : 01jr0w0cab302veqrzgba4cnd3. SessionId : ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7489504253151956382:2496], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-04T17:01:35.405386Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489504253151956397:5751], TxId: 281474976716041, task: 9. Ctx: { SessionId : ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==. CustomerSuppliedId : . TraceId : 01jr0w0cab302veqrzgba4cnd3. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7489504253151956382:2496], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-04T17:01:35.405695Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489504253151956391:5745], TxId: 281474976716041, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jr0w0cab302veqrzgba4cnd3. SessionId : ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7489504253151956382:2496], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-04T17:01:35.405994Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7489504253151956382:2496] TxId: 281474976716041. Ctx: { TraceId: 01jr0w0cab302veqrzgba4cnd3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Unexpected event while waiting for shutdown: NYql::NDq::TEvDqCompute::TEvChannelData 2025-04-04T17:01:35.409055Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==, ActorId: [4:7489503793590439551:2496], ActorState: ExecuteState, TraceId: 01jr0w0cab302veqrzgba4cnd3, Create QueryResponse for error on request, msg: 2025-04-04T17:01:35.910904Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==, ActorId: [4:7489503793590439551:2496], ActorState: ExecuteState, TraceId: 01jr0w0ct5bm8tz4nts1eykaxx, Create QueryResponse for error on request, msg: 2025-04-04T17:01:36.718793Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==, ActorId: [4:7489503793590439551:2496], ActorState: ExecuteState, TraceId: 01jr0w0dk8fc0p71gryx6zhfwx, Create QueryResponse for error on request, msg: 2025-04-04T17:01:37.394719Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==, ActorId: [4:7489503793590439551:2496], ActorState: ExecuteState, TraceId: 01jr0w0e8gcadhrw4a7e42xfm3, Create QueryResponse for error on request, msg: 2025-04-04T17:01:38.058851Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==, ActorId: [4:7489503793590439551:2496], ActorState: ExecuteState, TraceId: 01jr0w0ex4dw1gx3y58e6pab1g, Create QueryResponse for error on request, msg: 2025-04-04T17:01:38.792040Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==, ActorId: [4:7489503793590439551:2496], ActorState: ExecuteState, TraceId: 01jr0w0fk96mj76c8ymd3404vm, Create QueryResponse for error on request, msg: 2025-04-04T17:01:39.482532Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==, ActorId: [4:7489503793590439551:2496], ActorState: ExecuteState, TraceId: 01jr0w0g939vann2303mv0pkhm, Create QueryResponse for error on request, msg: 2025-04-04T17:01:40.342712Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==, ActorId: [4:7489503793590439551:2496], ActorState: ExecuteState, TraceId: 01jr0w0h496tqnkm2s8h67149m, Create QueryResponse for error on request, msg: 2025-04-04T17:01:41.008853Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==, ActorId: [4:7489503793590439551:2496], ActorState: ExecuteState, TraceId: 01jr0w0hs0ac7qpdf5pafvdywv, Create QueryResponse for error on request, msg: 2025-04-04T17:01:41.907897Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==, ActorId: [4:7489503793590439551:2496], ActorState: ExecuteState, TraceId: 01jr0w0jn5f6vxbetcb97rtby1, Create QueryResponse for error on request, msg: 2025-04-04T17:01:42.561367Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==, ActorId: [4:7489503793590439551:2496], ActorState: ExecuteState, TraceId: 01jr0w0k98aa2qbr4pfn4wmfma, Create QueryResponse for error on request, msg: 2025-04-04T17:01:43.234541Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTc3ODkyYjEtZjU2ZDA5OTEtNzNlYTMxZS1hMzMyYzk1Mw==, ActorId: [4:7489503793590439551:2496], ActorState: ExecuteState, TraceId: 01jr0w0kycf2crrr4p82gpwfd5, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] Test command err: Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2050b 40r} data 2167b + FlatIndex{1} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 0 0 2050b {0, 1} | 0 39 2050b {5, 7} + BTreeIndex{Empty, PageId: 0 RowCount: 40 DataSize: 2050 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{0} Label{04 rev 1, 2050b}, [0, +40)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 3750b 40r} data 2915b + FlatIndex{7} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 3 0 620b {0, 1} | 3 39 620b {5, 7} + BTreeIndex{Empty, PageId: 3 RowCount: 40 DataSize: 620 GroupDataSize: 3130 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{3} Label{34 rev 1, 620b}, [0, +40)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 5129b 40r} data 5373b + FlatIndex{3} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 1 0 2466b {0, 1} | 1 39 2466b {5, 7} + BTreeIndex{Empty, PageId: 1 RowCount: 40 DataSize: 2466 GroupDataSize: 2663 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{1} Label{14 rev 1, 2466b}, [0, +40)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 8474b 40r} data 6832b + FlatIndex{15} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 7 0 1036b {0, 1} | 7 39 1036b {5, 7} + BTreeIndex{Empty, PageId: 7 RowCount: 40 DataSize: 1036 GroupDataSize: 7438 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{7} Label{74 rev 1, 1036b}, [0, +40)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4017b + FlatIndex{20} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 6 12 122b {1, 8} | 7 14 122b {2, NULL} | 8 16 122b {2, 4} | 9 18 122b {2, 7} | 10 20 122b {2, 10} | 11 22 122b {3, 3} | 12 24 122b {3, 6} | 13 26 122b {3, 8} | 14 28 122b {4, NULL} | 15 30 122b {4, 4} | 16 32 122b {4, 7} | 17 34 122b {4, 10} | 18 36 122b {5, 3} | 19 38 122b {5, 6} | 19 39 122b {5, 7} + BTreeIndex{PageId: 21 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | > {0, 4} | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | > {0, 7} | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | > {0, 10} | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | > {1, 3} | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | > {1, 6} | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | > {1, 8} | PageId: 6 RowCount: 14 DataSize: 844 ErasedRowCount: 0 ... ut Result: TEvPutResult {Id# [9437184:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:01:38.908261Z node 13 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [9437184:2:3:1:24576:72:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:01:38.908445Z node 13 :TABLET_EXECUTOR DEBUG: Leader{9437184:2:4} commited cookie 1 for step 3 2025-04-04T17:01:38.909720Z node 14 :TABLET_MAIN DEBUG: Tablet: 9437184 HandleStateStorageInfoResolve, KnownGeneration: 2 Promote Marker# TSYS16 2025-04-04T17:01:38.929077Z node 14 :TABLET_MAIN DEBUG: TabletId# 9437184 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [9437184:2:3:0:0:69:0] Snap: 2:1 for 9437184 Marker# TRRH04 2025-04-04T17:01:38.929198Z node 14 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 9437184, id [9437184:2:3:0:0:69:0], refs: [[9437184:2:3:1:24576:72:0],] for 9437184 2025-04-04T17:01:38.951268Z node 14 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 9437184, id [9437184:2:1:0:0:72:0], refs: [[9437184:2:1:1:28672:35:0],] for 9437184 2025-04-04T17:01:38.951400Z node 14 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 9437184, id [9437184:2:2:0:0:71:0], refs: [[9437184:2:2:1:8192:58:0],] for 9437184 2025-04-04T17:01:38.952666Z node 14 :TABLET_MAIN DEBUG: TabletId# 9437184 TTabletReqRebuildHistoryGraph::BuildHistory - Process generation 2 from 1 with 3 steps Marker# TRRH09 2025-04-04T17:01:38.952736Z node 14 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[9437184:2:1:1:28672:35:0],] for 9437184 2025-04-04T17:01:38.952785Z node 14 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[9437184:2:2:1:8192:58:0],] for 9437184 2025-04-04T17:01:38.952841Z node 14 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[9437184:2:3:1:24576:72:0],] for 9437184, Gc+: [[9437184:2:3:1:24576:72:0],] 2025-04-04T17:01:38.953111Z node 14 :TABLET_MAIN DEBUG: Tablet: 9437184 TTablet::WriteZeroEntry. logid# [9437184:3:0:0:0:0:0] Marker# TSYS01 2025-04-04T17:01:38.955345Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:-} sending TEvGet batch 35 bytes, 35 total, blobs: { [9437184:2:1:1:28672:35:0] } 2025-04-04T17:01:38.958737Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:-} sending TEvGet batch 58 bytes, 58 total, blobs: { [9437184:2:2:1:8192:58:0] } 2025-04-04T17:01:38.960051Z node 14 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [9437184:3:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999018} 2025-04-04T17:01:38.962118Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:-} sending TEvGet batch 72 bytes, 72 total, blobs: { [9437184:2:3:1:24576:72:0] } 2025-04-04T17:01:39.174630Z node 13 :BS_VDISK_PUT ERROR: VDISK[0:_:0:0:0]: TEvVPut: failed to pass the Hull check; id# [9437184:2:4:0:0:41:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:01:39.174954Z node 13 :BS_PROXY_PUT ERROR: [681a4b8bba80a2fa] Result# TEvPutResult {Id# [9437184:2:4:0:0:41:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [0:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 0 Marker# BPP12 2025-04-04T17:01:39.175365Z node 13 :TABLET_MAIN ERROR: Tablet: 9437184 Type: Dummy, EReason: ReasonDemotedByBlobStorage, SuggestedGeneration: 0, KnownGeneration: 2, Details: Got VPutResult status# BLOCKED from VDiskId# [0:1:0:0:0] Marker# TSYS31 2025-04-04T17:01:39.175529Z node 13 :TABLET_EXECUTOR INFO: Leader{9437184:2:5} suiciding, Waste{2:0, 130b +(0, 0b), 4 trc, -0b acc} 2025-04-04T17:01:39.318714Z node 14 :TABLET_EXECUTOR INFO: Leader{9437184:3:0} activating executor 2025-04-04T17:01:39.319237Z node 14 :TABLET_EXECUTOR INFO: LSnap{9437184:3, on 3:1, 94b, wait} done, Waste{2:0, 130b +(0, 0b), 3 trc} 2025-04-04T17:01:39.319437Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInitSchema} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInitSchema 2025-04-04T17:01:39.319503Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInitSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:01:39.319698Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInit} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInit 2025-04-04T17:01:39.319764Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInit} took 4194304b of static mem, Memory{8388608 dyn 0} 2025-04-04T17:01:39.319867Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInitSchema} hope 1 -> done Change{3, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T17:01:39.319942Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInitSchema} release 4194304b of static, Memory{4194304 dyn 0} 2025-04-04T17:01:39.341399Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInit} hope 1 -> done Change{3, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T17:01:39.341513Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatExecutorLeases::TLeasesTablet::TTxInit} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:01:39.341771Z node 14 :TABLET_MAIN INFO: Tablet: 9437184 Active! Generation: 3, Type: Dummy started in 501msec Marker# TSYS24 2025-04-04T17:01:39.345667Z node 14 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [9437184:3:1:0:0:72:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:01:39.345886Z node 14 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [9437184:3:1:1:28672:94:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:01:39.346012Z node 14 :TABLET_MAIN DEBUG: Tablet: 9437184 GcCollect 0 channel, tablet:gen:step => 3:0 Marker# TSYS28 2025-04-04T17:01:39.346128Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:2} commited cookie 2 for step 1 2025-04-04T17:01:39.429110Z node 14 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [9437184:3:2:0:0:41:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999018} 2025-04-04T17:01:39.429352Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:3} commited cookie 8 for step 2 2025-04-04T17:01:39.503074Z node 14 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [9437184:3:3:0:0:41:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999018} 2025-04-04T17:01:39.503290Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:4} commited cookie 8 for step 3 2025-04-04T17:01:39.657019Z node 14 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [9437184:3:4:0:0:41:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999018} 2025-04-04T17:01:39.657264Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:5} commited cookie 8 for step 4 2025-04-04T17:01:39.716149Z node 14 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [9437184:3:5:0:0:41:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999018} 2025-04-04T17:01:39.716414Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:6} commited cookie 8 for step 5 2025-04-04T17:01:39.779288Z node 14 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [9437184:3:6:0:0:41:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999018} 2025-04-04T17:01:39.779479Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:7} commited cookie 8 for step 6 2025-04-04T17:01:39.851522Z node 14 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [9437184:3:7:0:0:41:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999018} 2025-04-04T17:01:39.851789Z node 14 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:8} commited cookie 8 for step 7 00000.000 II| FAKE_ENV: Born at 2025-04-04T17:01:40.422457Z 00000.012 II| FAKE_ENV: Starting storage for BS group 0 00000.013 II| FAKE_ENV: Starting storage for BS group 1 00000.013 II| FAKE_ENV: Starting storage for BS group 2 00000.013 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-04-04T17:01:40.512491Z 00000.011 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.012 II| FAKE_ENV: Starting storage for BS group 0 00000.012 II| FAKE_ENV: Starting storage for BS group 1 00000.013 II| FAKE_ENV: Starting storage for BS group 2 00000.013 II| FAKE_ENV: Starting storage for BS group 3 00003.216 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00003.218 NN| TABLET_SAUSAGECACHE: Poison cache serviced 299 reqs hit {1012 6947830b} miss {0 0b} 00003.218 II| FAKE_ENV: Shut order, stopping 4 BS groups 00003.218 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199548b, 2023} 00003.219 II| FAKE_ENV: DS.1 gone, left {2007005b, 4}, put {7211300b, 2026} 00003.219 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00003.219 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00003.219 II| FAKE_ENV: All BS storage groups are stopped 00003.219 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00003.219 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T17:01:43.745797Z 00000.011 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.011 II| FAKE_ENV: Starting storage for BS group 0 00000.012 II| FAKE_ENV: Starting storage for BS group 1 00000.012 II| FAKE_ENV: Starting storage for BS group 2 00000.012 II| FAKE_ENV: Starting storage for BS group 3 00001.965 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.966 NN| TABLET_SAUSAGECACHE: Poison cache serviced 299 reqs hit {1012 6947830b} miss {0 0b} 00001.967 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.967 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199551b, 2023} 00001.967 II| FAKE_ENV: DS.1 gone, left {2007005b, 4}, put {7211282b, 2026} 00001.967 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.967 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.968 II| FAKE_ENV: All BS storage groups are stopped 00001.968 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00001.968 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-04-04T17:01:45.728761Z 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00002.371 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00002.373 NN| TABLET_SAUSAGECACHE: Poison cache serviced 299 reqs hit {1012 6947830b} miss {0 0b} 00002.373 II| FAKE_ENV: Shut order, stopping 4 BS groups 00002.373 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199545b, 2023} 00002.373 II| FAKE_ENV: DS.1 gone, left {2007005b, 4}, put {7211282b, 2026} 00002.374 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00002.374 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00002.374 II| FAKE_ENV: All BS storage groups are stopped 00002.374 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00002.374 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull [GOOD] |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |81.9%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] >> TFlatTest::Mix_DML_DDL [GOOD] >> TFlatTest::OutOfDiskSpace [GOOD] >> TColumnShardTestReadWrite::WriteReadStandalone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 28071, MsgBus: 19786 2025-04-04T17:00:45.419974Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504034994588105:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:45.423559Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00156a/r3tmp/tmpWTDelE/pdisk_1.dat 2025-04-04T17:00:45.919208Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:45.923746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:45.923863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:45.931095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28071, node 1 2025-04-04T17:00:46.086969Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:46.087001Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:46.087008Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:46.087135Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19786 TClient is connected to server localhost:19786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:46.706022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:46.723808Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:00:46.735786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:46.927087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:47.139218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:47.237564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:49.014775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504052174458925:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:49.014898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:49.537242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:00:49.604956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:00:49.643727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:00:49.678078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:00:49.729185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:00:49.801463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:00:49.907750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504052174459444:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:49.907844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:49.908369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504052174459449:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:49.913398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:00:49.930549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489504052174459451:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:00:50.039786Z node 1 :TX_PROXY ERROR: Actor# [1:7489504056469426807:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:00:50.414714Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504034994588105:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:50.414808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:51.653980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:00:51.753901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2457, MsgBus: 61088 2025-04-04T17:00:54.322769Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489504076548551234:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:54.322826Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00156a/r3tmp/tmpHzJiY0/pdisk_1.dat 2025-04-04T17:00:54.442187Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:54.495697Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:54.497209Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:54.499353Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2457, node 2 2025-04-04T17:00:54.574956Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:54.574990Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:54.575003Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:54.575162Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61088 TClient is connected to server localhost:61088 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:55.195221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:55.216547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:55.313610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 28147497 ... -04-04T17:01:28.727827Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:01:28.841239Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:01:28.997979Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:01:29.216902Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:01:29.395390Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:01:29.525193Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:01:29.672503Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489504226764767378:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:29.672696Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:29.673194Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489504226764767384:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:29.679202Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:01:29.707344Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489504226764767386:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:01:29.809871Z node 5 :TX_PROXY ERROR: Actor# [5:7489504226764767443:3482] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:01:32.282200Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:01:32.410911Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23571, MsgBus: 17323 2025-04-04T17:01:36.884538Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489504254672746975:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00156a/r3tmp/tmpuEqhrq/pdisk_1.dat 2025-04-04T17:01:37.199517Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:01:37.293223Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:37.341188Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:37.346562Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:37.348479Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23571, node 6 2025-04-04T17:01:37.577738Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:37.577767Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:37.577779Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:37.578106Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17323 TClient is connected to server localhost:17323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:39.296256Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:39.330351Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:39.486099Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:01:39.983460Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:01:40.189971Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:41.840586Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489504254672746975:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:41.840693Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:01:45.453015Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489504293327454270:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:45.453346Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:45.507291Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:01:45.600485Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:01:45.717660Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:01:45.783940Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:01:45.886082Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:01:46.006137Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:01:46.147058Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489504297622422101:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:46.147170Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:46.147256Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489504297622422106:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:46.153903Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:01:46.185389Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489504297622422108:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:01:46.243863Z node 6 :TX_PROXY ERROR: Actor# [6:7489504297622422163:3471] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:01:49.123625Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:01:49.270647Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowBatch >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> YdbYqlClient::TestYqlLongSessionMultipleErrors [GOOD] |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |81.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view >> BuildStatsHistogram::Many_Mixed [GOOD] >> BuildStatsHistogram::Many_Serial >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> YdbQueryService::TestCreateAttachAndDropAttachedSession [GOOD] >> YdbS3Internal::BadRequests >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 >> TGRpcYdbTest::CreateAlterCopyAndDropTable [GOOD] >> TGRpcYdbTest::BeginTxRequestError >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::OutOfDiskSpace [GOOD] Test command err: 2025-04-04T17:01:47.967343Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504303389344500:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:47.967400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e90/r3tmp/tmpGWIRwy/pdisk_1.dat 2025-04-04T17:01:49.110437Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:01:49.195139Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:49.263019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:49.266436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:49.274967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21802 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:01:50.020383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:50.087373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:50.714441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... proxy error code: Unknown error:
: Error: Resolve failed for table: /dc-1/Table, error: column 'value' not exist, code: 200400 waiting... 2025-04-04T17:01:50.774807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:01:50.840552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:50.892588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... proxy error code: Unknown error:
:5:24: Error: At function: AsList
:5:32: Error: At function: SetResult
:4:27: Error: At function: SelectRow
:4:27: Error: Mismatch of key columns count for table [/dc-1/Table], expected: 2, but got 1., code: 2028 >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestYqlLongSessionMultipleErrors [GOOD] Test command err: 2025-04-04T17:01:20.173795Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504187047332086:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:20.173984Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012ed/r3tmp/tmpJRSzs2/pdisk_1.dat 2025-04-04T17:01:20.800129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:20.800220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:20.815712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:01:20.874448Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13989, node 1 2025-04-04T17:01:21.273548Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:21.273570Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:21.273576Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:21.273690Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:21.951578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:25.167462Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504187047332086:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:25.167530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:01:25.454286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504208522169469:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:25.454435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:25.749044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:01:26.021524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504212817136935:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:26.021616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:26.021862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504212817136940:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:26.025014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:01:26.063493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489504212817136942:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:01:26.150895Z node 1 :TX_PROXY ERROR: Actor# [1:7489504212817137013:2820] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:01:26.191728Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489504212817137024:2357], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:43: Error: Failed to convert type: Struct<'Key':String,'Value':String> to Struct<'Key':Uint32?,'Value':String?>
:2:43: Error: Failed to convert 'Key': String to Optional
:2:43: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-04T17:01:26.193214Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmE4ZTBlNTgtMjExYjYyYmQtOWVlYTI3MC02YzU2ZGQyMw==, ActorId: [1:7489504208522169444:2337], ActorState: ExecuteState, TraceId: 01jr0w03m4420qw8gpcbm924rm, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T17:01:28.618691Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504221972223148:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:28.618742Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012ed/r3tmp/tmpdjq5lf/pdisk_1.dat 2025-04-04T17:01:29.107068Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:29.165824Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:29.165921Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:29.181633Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24108, node 4 2025-04-04T17:01:29.433094Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:29.433116Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:29.433130Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:29.433284Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16643 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:30.000907Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:33.610860Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489504221972223148:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:33.610915Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:01:35.421614Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489504252036995313:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:35.421711Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:37.442789Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489504259651880081:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:37.451009Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012ed/r3tmp/tmpwwZ8Eu/pdisk_1.dat 2025-04-04T17:01:37.916557Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:38.033321Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:38.033402Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:38.056122Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23363, node 7 2025-04-04T17:01:38.339677Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:38.339702Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:38.339714Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:38.339855Z node 7 :NET_CLASSIFIER ERROR: got bad distributable con ... p:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489504259651880081:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:42.426946Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:01:43.898942Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489504285421684952:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:43.899050Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:43.905783Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:01:44.103141Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489504289716652416:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:44.103243Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:44.103689Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489504289716652421:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:44.108023Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:01:44.186462Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489504289716652423:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:01:44.251632Z node 7 :TX_PROXY ERROR: Actor# [7:7489504289716652498:2814] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:01:44.461639Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0w0n92cefvrv5kffdkz95c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZTFhY2IyYjUtYzViOTZkOGMtMmFlNThiMS02MzQyZTk1Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:01:44.552189Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7489504289716652561:2371], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[Root/BadTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:01:44.553697Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=ZTFhY2IyYjUtYzViOTZkOGMtMmFlNThiMS02MzQyZTk1Mw==, ActorId: [7:7489504285421684934:2339], ActorState: ExecuteState, TraceId: 01jr0w0npkb1qhs0jvsm58q7tg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:01:44.724426Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0w0nqy61zafpfd6gcey1ng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZTFhY2IyYjUtYzViOTZkOGMtMmFlNThiMS02MzQyZTk1Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:01:45.036441Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0w0nxbdn0mvtbrntjpxs67, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZTFhY2IyYjUtYzViOTZkOGMtMmFlNThiMS02MzQyZTk1Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:01:47.389532Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489504303610430284:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:47.389604Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012ed/r3tmp/tmp3aCjH2/pdisk_1.dat 2025-04-04T17:01:47.808404Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:47.941681Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:47.941772Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:47.962793Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32495, node 10 2025-04-04T17:01:48.145945Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:48.145982Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:48.145991Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:48.146132Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20795 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:48.784660Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:52.392936Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489504303610430284:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:52.393022Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:01:53.407049Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504329380235146:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:53.407183Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:53.430233Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:01:53.607932Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504329380235310:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:53.608026Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:53.608313Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504329380235315:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:53.613157Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:01:53.699851Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489504329380235317:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:01:53.769237Z node 10 :TX_PROXY ERROR: Actor# [10:7489504329380235394:2810] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:01:53.811086Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7489504329380235405:2359], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Cannot find table 'db.[Root/BadTable1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:01:53.812953Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=OGE0MTc1NmYtNzc0ZDJjN2MtNWNlYjUxMzUtMzhkN2FmOWE=, ActorId: [10:7489504329380235136:2337], ActorState: ExecuteState, TraceId: 01jr0w0yj6frjx0yav8gmy3aqk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:01:53.868779Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7489504329380235430:2365], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Cannot find table 'db.[Root/BadTable2]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:01:53.870645Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=OGE0MTc1NmYtNzc0ZDJjN2MtNWNlYjUxMzUtMzhkN2FmOWE=, ActorId: [10:7489504329380235136:2337], ActorState: ExecuteState, TraceId: 01jr0w0ysmfptd7h25683pz6r8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] [GOOD] >> TPQCompatTest::DiscoverTopics [GOOD] >> TPQCompatTest::SetupLockSession >> YdbOlapStore::LogLast50 [GOOD] >> YdbOlapStore::LogLast50ByResource >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] >> KqpPg::PgUpdateCompoundKey+useSink [GOOD] >> KqpPg::PgUpdateCompoundKey-useSink >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] >> YdbLogStore::LogStore >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] >> YdbTableBulkUpsertOlap::UpsertArrowBatch [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowDupField >> YdbS3Internal::BadRequests [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/solomon/async_io/ut/ydb-library-yql-providers-solomon-async_io-ut |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/async_io/ut/ydb-library-yql-providers-solomon-async_io-ut |81.9%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/async_io/ut/ydb-library-yql-providers-solomon-async_io-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbS3Internal::BadRequests [GOOD] Test command err: 2025-04-04T17:01:25.225472Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504209824989057:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:25.225645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012e4/r3tmp/tmpFOwaEi/pdisk_1.dat 2025-04-04T17:01:26.315807Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:26.330761Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:01:26.355366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:26.355467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:26.367797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32681, node 1 2025-04-04T17:01:26.886360Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:26.886385Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:26.886392Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:26.886511Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:27.555803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:30.363094Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504209824989057:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:30.363188Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:01:30.954124Z node 1 :KQP_PROXY WARN: Failed to parse session id: unknownSesson 2025-04-04T17:01:33.027171Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504243841948733:2153];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:33.027536Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012e4/r3tmp/tmpPRDVzl/pdisk_1.dat 2025-04-04T17:01:33.708226Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:33.813166Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:33.813260Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:33.860360Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16325, node 4 2025-04-04T17:01:34.231394Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:34.231421Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:34.231428Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:34.231572Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61981 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:35.531912Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:38.028975Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489504243841948733:2153];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:38.029061Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:01:41.458765Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489504276194674434:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:41.459770Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012e4/r3tmp/tmpDACxKB/pdisk_1.dat 2025-04-04T17:01:41.717650Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:41.767219Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:41.767298Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:41.770551Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25907, node 7 2025-04-04T17:01:42.042838Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:42.042863Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:42.042873Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:42.043051Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4028 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:42.444041Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:48.422628Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489504307163849902:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:48.430733Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012e4/r3tmp/tmpLpRh6u/pdisk_1.dat 2025-04-04T17:01:48.695461Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:48.776724Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:48.776847Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:48.780405Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24485, node 10 2025-04-04T17:01:49.010284Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:49.010311Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:49.010321Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:49.010479Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62229 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:49.427728Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:53.420255Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489504307163849902:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:53.420355Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:01:55.603891Z node 10 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:01:55.614234Z node 10 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-04T17:01:55.617441Z node 10 :KQP_PROXY DEBUG: TraceId: "01jr0w0tp22p88hvxm7ev8km85", Request has 18445000287593.934230s seconds to be completed 2025-04-04T17:01:55.625892Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=NzZmMjM2MmQtZDk4MWJiMzktZTY5YWE0MGYtYTlkNDNiOGQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NzZmMjM2MmQtZDk4MWJiMzktZTY5YWE0MGYtYTlkNDNiOGQ= 2025-04-04T17:01:55.626021Z node 10 :KQP_PROXY DEBUG: TraceId: "01jr0w0tp22p88hvxm7ev8km85", Created new session, sessionId: ydb://session/3?node_id=10&id=NzZmMjM2MmQtZDk4MWJiMzktZTY5YWE0MGYtYTlkNDNiOGQ=, workerId: [10:7489504337228622067:2338], database: , longSession: 1, local sessions count: 1 2025-04-04T17:01:55.626072Z node 10 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-04T17:01:55.626303Z node 10 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jr0w0tp22p88hvxm7ev8km85 2025-04-04T17:01:55.627094Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=NzZmMjM2MmQtZDk4MWJiMzktZTY5YWE0MGYtYTlkNDNiOGQ=, ActorId: [10:7489504337228622067:2338], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:01:55.627212Z node 10 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-04T17:01:55.627250Z node 10 :KQP_PROXY DEBUG: Updated table service config. 2025-04-04T17:01:55.627284Z node 10 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:01:55.627338Z node 10 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-04T17:01:55.627427Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:01:55.627492Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:01:55.641601Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:01:55.641667Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:01:55.641700Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:01:55.667190Z node 10 :KQP_PROXY DEBUG: Received ping session request, has local session: ydb://session/3?node_id=10&id=NzZmMjM2MmQtZDk4MWJiMzktZTY5YWE0MGYtYTlkNDNiOGQ=, rpc ctrl: [10:7489504337228622083:2339], sameNode: 1, trace_id: 2025-04-04T17:01:55.667244Z node 10 :KQP_PROXY TRACE: Attach local session: [10:7489504337228622067:2338] to rpc: [10:7489504337228622083:2339] on same node 2025-04-04T17:01:55.698999Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=NzZmMjM2MmQtZDk4MWJiMzktZTY5YWE0MGYtYTlkNDNiOGQ=, ActorId: [10:7489504337228622067:2338], ActorState: ReadyState, Session closed due to explicit close event 2025-04-04T17:01:55.699054Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=NzZmMjM2MmQtZDk4MWJiMzktZTY5YWE0MGYtYTlkNDNiOGQ=, ActorId: [10:7489504337228622067:2338], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:01:55.699091Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=NzZmMjM2MmQtZDk4MWJiMzktZTY5YWE0MGYtYTlkNDNiOGQ=, ActorId: [10:7489504337228622067:2338], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-04T17:01:55.699121Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=NzZmMjM2MmQtZDk4MWJiMzktZTY5YWE0MGYtYTlkNDNiOGQ=, ActorId: [10:7489504337228622067:2338], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:01:55.699203Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=NzZmMjM2MmQtZDk4MWJiMzktZTY5YWE0MGYtYTlkNDNiOGQ=, ActorId: [10:7489504337228622067:2338], ActorState: unknown state, Session actor destroyed 2025-04-04T17:01:55.699546Z node 10 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=10&id=NzZmMjM2MmQtZDk4MWJiMzktZTY5YWE0MGYtYTlkNDNiOGQ=, workerId: [10:7489504337228622067:2338], local sessions count: 0 2025-04-04T17:01:55.711803Z node 10 :KQP_PROXY DEBUG: Received ping session request, request_id: 3, sender: [10:7489504337228622086:2341], trace_id: 2025-04-04T17:01:55.711959Z node 10 :KQP_PROXY NOTICE: Session not found: ydb://session/3?node_id=10&id=NzZmMjM2MmQtZDk4MWJiMzktZTY5YWE0MGYtYTlkNDNiOGQ= 2025-04-04T17:01:55.712081Z node 10 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [10:7489504337228622086:2341], selfId: [10:7489504307163850122:2280], source: [10:7489504307163850122:2280] 2025-04-04T17:01:57.588395Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489504347236586001:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:57.588461Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012e4/r3tmp/tmpsQDUxl/pdisk_1.dat 2025-04-04T17:01:58.151706Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:58.221680Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:58.221791Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:58.226581Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13869, node 13 2025-04-04T17:01:58.602947Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:58.602975Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:58.602985Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:58.603146Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:58.904542Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:02.590453Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7489504347236586001:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:02.590551Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:02:02.746342Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:02:03.455013Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489504373006392697:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:03.455103Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:03.456244Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489504373006392709:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:03.461555Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:02:03.513922Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7489504373006392711:2442], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:02:03.585358Z node 13 :TX_PROXY ERROR: Actor# [13:7489504373006392823:4257] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:02:04.283469Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0w185wc447dhm0epjfy6s3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NzQ3M2FlNC01ZGRiYjMzYS1hYThhMjgxNS1mMWE2NWNiZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:130:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:132:2058] recipient: [1:108:2140] 2025-04-04T16:55:19.748255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:19.748346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:19.748465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:19.748502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:19.748543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:19.748584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:19.748660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:19.748760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:19.749074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:19.828119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:19.828181Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:172:2058] recipient: [1:15:2062] 2025-04-04T16:55:19.843084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:19.843533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:19.843688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:19.851484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:19.851675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:19.852359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:19.853062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:19.856597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:19.857949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:19.858043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:19.858282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:19.858352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:19.858403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:19.858547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] Leader for TabletID 72057594037968897 is [1:215:2214] sender: [1:216:2058] recipient: [1:209:2210] 2025-04-04T16:55:19.867285Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-04T16:55:20.043568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:20.043792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.044028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:20.044261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:20.044319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.046881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:20.047036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:20.047266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.047338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:20.047379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:20.047424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:20.049649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.049710Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:20.049784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:20.051881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.051934Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.051977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:20.052048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:20.055812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:20.057810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:20.058014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:20.059111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:20.059265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:20.059311Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:20.059621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:20.059672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:20.059847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:20.059920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:20.062208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:20.062288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:20.062475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:20.062520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:20.062960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:20.063008Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:20.063149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:20.063206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:20.063257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:20.063286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:20.063320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:20.063362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:20.063398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id ... tionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:58.047030Z node 118 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:01:58.047486Z node 118 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 506us result status StatusSuccess 2025-04-04T17:01:58.048594Z node 118 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:01:58.077774Z node 118 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][118:1088:2877] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-04T17:01:58.077906Z node 118 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][118:1047:2877] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-04T17:01:58.078093Z node 118 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][118:1088:2877] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743786117926010 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743786117926010 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1743786117926010 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-04-04T17:01:58.087966Z node 118 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][118:1088:2877] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-04-04T17:01:58.088152Z node 118 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][118:1047:2877] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] Test command err: 2025-04-04T17:01:55.233959Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:01:55.644507Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:01:55.687030Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:01:55.687353Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:01:55.711206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:01:55.711475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:01:55.712051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:01:55.712190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:01:55.712322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:01:55.712447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:01:55.712555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:01:55.712687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:01:55.712807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:01:55.712916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:01:55.713038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:01:55.713136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:01:55.760716Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:01:55.760952Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:01:55.761029Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:01:55.761214Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:01:55.761421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:01:55.761484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:01:55.761533Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:01:55.761623Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:01:55.761674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:01:55.761705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:01:55.761729Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:01:55.761876Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:01:55.761922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:01:55.761948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:01:55.761967Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:01:55.762044Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:01:55.762103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:01:55.762147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:01:55.762165Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:01:55.762227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:01:55.762313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:01:55.762349Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:01:55.762409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:01:55.762452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:01:55.762485Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:01:55.763041Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=67; 2025-04-04T17:01:55.763167Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=55; 2025-04-04T17:01:55.763246Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-04T17:01:55.763341Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-04-04T17:01:55.763570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:01:55.763645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:01:55.763693Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:01:55.763936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:01:55.763998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:01:55.764056Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:01:55.764211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:01:55.764251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:01:55.764288Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:01:55.764473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:01:55.764519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:01:55.764556Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:01:55.764710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:01:55.764768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:01:55.764825Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... [us] request_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:02:07.689447Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:02:07.689617Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-04T17:02:07.689714Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-04T17:02:07.689882Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:430:2445];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-04-04T17:02:07.690085Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:02:07.690228Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:02:07.691662Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:02:07.692038Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:02:07.692201Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:02:07.692350Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:02:07.692433Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:431:2446] finished for tablet 9437184 2025-04-04T17:02:07.692957Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:430:2445];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.026},{"events":["l_ProduceResults","f_Finish"],"t":0.029},{"events":["l_ack","l_processing","l_Finish"],"t":0.03}],"full":{"a":1743786127662432,"name":"_full_task","f":1743786127662432,"d_finished":0,"c":0,"l":1743786127692511,"d":30079},"events":[{"name":"bootstrap","f":1743786127662727,"d_finished":4108,"c":1,"l":1743786127666835,"d":4108},{"a":1743786127692005,"name":"ack","f":1743786127689067,"d_finished":2639,"c":1,"l":1743786127691706,"d":3145},{"a":1743786127691988,"name":"processing","f":1743786127666933,"d_finished":17760,"c":10,"l":1743786127691709,"d":18283},{"name":"ProduceResults","f":1743786127664858,"d_finished":13708,"c":13,"l":1743786127692390,"d":13708},{"a":1743786127692416,"name":"Finish","f":1743786127692416,"d_finished":0,"c":0,"l":1743786127692511,"d":95},{"name":"task_result","f":1743786127666954,"d_finished":14933,"c":9,"l":1743786127688859,"d":14933}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:02:07.693048Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:430:2445];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:02:07.693504Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:430:2445];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.026},{"events":["l_ProduceResults","f_Finish"],"t":0.029},{"events":["l_ack","l_processing","l_Finish"],"t":0.03}],"full":{"a":1743786127662432,"name":"_full_task","f":1743786127662432,"d_finished":0,"c":0,"l":1743786127693103,"d":30671},"events":[{"name":"bootstrap","f":1743786127662727,"d_finished":4108,"c":1,"l":1743786127666835,"d":4108},{"a":1743786127692005,"name":"ack","f":1743786127689067,"d_finished":2639,"c":1,"l":1743786127691706,"d":3737},{"a":1743786127691988,"name":"processing","f":1743786127666933,"d_finished":17760,"c":10,"l":1743786127691709,"d":18875},{"name":"ProduceResults","f":1743786127664858,"d_finished":13708,"c":13,"l":1743786127692390,"d":13708},{"a":1743786127692416,"name":"Finish","f":1743786127692416,"d_finished":0,"c":0,"l":1743786127693103,"d":687},{"name":"task_result","f":1743786127666954,"d_finished":14933,"c":9,"l":1743786127688859,"d":14933}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:02:07.693628Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:02:07.661735Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-04T17:02:07.693677Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:02:07.694034Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |81.9%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> TGRpcYdbTest::BeginTxRequestError [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::BeginTxRequestError [GOOD] Test command err: 2025-04-04T17:01:19.048499Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504184359241216:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:19.048558Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012f1/r3tmp/tmpiHxI7s/pdisk_1.dat 2025-04-04T17:01:20.038764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:20.039346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:20.049137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:01:20.054691Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:01:20.185755Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20411, node 1 2025-04-04T17:01:20.330795Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:01:20.330829Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:01:20.671110Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:20.671138Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:20.671145Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:20.671282Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2358 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:21.411516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:21.569814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:01:25.844215Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504208265007240:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:25.854363Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012f1/r3tmp/tmpST6PbF/pdisk_1.dat 2025-04-04T17:01:26.290543Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:26.390782Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:26.390880Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:26.404572Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23684, node 4 2025-04-04T17:01:26.782697Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:26.782733Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:26.782742Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:26.782901Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28143 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:27.262542Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:27.528311Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:01:27.704396Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:01:27.915879Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:01:28.076280Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:01:28.231969Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:01:28.866983Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:01:28.938980Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:01:29.015500Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:01:35.698934Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489504250852259575:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:35.699024Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012f1/r3tmp/tmp53VawG/pdisk_1.dat 2025-04-04T17:01:36.698722Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:36.744266Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:01:36.753141Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:36.753268Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:36.759547Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14731, node 7 2025-04-04T17:01:37.171149Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:37.171178Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:37.171185Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:37.171363Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:37.947844Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:38.148522Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:01:47.449519Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489504301025815018:2176];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012f1/r3tmp/tmp7gb7wp/pdisk_1.dat 2025-04-04T17:01:47.664015Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:01:47.945988Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:48.036344Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:48.036484Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:48.048326Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26433, node 10 2025-04-04T17:01:48.454690Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:48.454718Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:48.454730Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:48.454928Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22888 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:49.636354Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:49.788289Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:01:50.070855Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:01:50.821125Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-04-04T17:01:58.587146Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489504350430616301:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:58.588971Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012f1/r3tmp/tmpffK9th/pdisk_1.dat 2025-04-04T17:01:59.121643Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:59.199096Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:59.199220Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:59.208070Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2474, node 13 2025-04-04T17:01:59.503665Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:59.503693Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:59.503701Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:59.503848Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:00.495756Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:03.586868Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7489504350430616301:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:03.586948Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:02:08.540372Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489504393380290474:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:08.540538Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:08.540891Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489504393380290486:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:08.545828Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:02:08.597814Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7489504393380290488:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:02:08.660022Z node 13 :TX_PROXY ERROR: Actor# [13:7489504393380290587:2729] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:02:08.661550Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=MWFlYWIzZWYtOTFjMjViMDEtYmI5NDYzYTEtMzkzMmU2YTk=, ActorId: [13:7489504393380290462:2341], ActorState: ExecuteState, TraceId: 01jr0w1d4m8sabrtpxqqf3ydp4, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2025-04-04T17:02:08.685526Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=MWFlYWIzZWYtOTFjMjViMDEtYmI5NDYzYTEtMzkzMmU2YTk=, ActorId: [13:7489504393380290462:2341], ActorState: ExecuteState, TraceId: 01jr0w1d9bcz1y6wwzah1hqhqy, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2025-04-04T17:02:08.693225Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=MWFlYWIzZWYtOTFjMjViMDEtYmI5NDYzYTEtMzkzMmU2YTk=, ActorId: [13:7489504393380290462:2341], ActorState: ExecuteState, TraceId: 01jr0w1d9m10rtdta7xgywthce, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> TLocksTest::CK_Range_BrokenLock >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> TColumnShardTestReadWrite::WriteExoticTypes >> YdbTableBulkUpsertOlap::UpsertArrowDupField [GOOD] >> YdbTableBulkUpsertOlap::ParquetImportBug_Datashard >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |82.0%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> TTableProfileTests::UseTableProfilePreset >> KqpPg::ValuesInsert-useSink [GOOD] >> PgCatalog::PgType >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::GeneralPrioritiesBug1 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 >> KqpPg::PgUpdateCompoundKey-useSink [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck [GOOD] >> TTabletCountersPercentile::SingleBucket [GOOD] >> THeavyPerfTest::TTestLoadEverything [GOOD] >> THiveImplTest::BootQueueSpeed >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::SingleBucket [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> YdbTableBulkUpsertOlap::ParquetImportBug_Datashard [GOOD] >> YdbTableBulkUpsertOlap::UpsertCSV ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::PgUpdateCompoundKey-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 7248, MsgBus: 13215 2025-04-04T16:56:53.280380Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503041661373059:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:53.280428Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c45/r3tmp/tmpjbD9bB/pdisk_1.dat 2025-04-04T16:56:53.622218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:53.622350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:53.624405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7248, node 1 2025-04-04T16:56:53.655391Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:56:53.655566Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:56:53.659109Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:56:53.734921Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:56:53.734955Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:56:53.734963Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:56:53.735103Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13215 TClient is connected to server localhost:13215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:56:54.467793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:54.510750Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 16 2025-04-04T16:56:57.163085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg1000_b (key, value) VALUES ( '0'::int2, ARRAY ['false'::bool, 'false'::bool] ); 2025-04-04T16:56:57.354885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503058841242788:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:57.355026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:57.355505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503058841242800:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:57.360008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T16:56:57.373282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503058841242802:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T16:56:57.471935Z node 1 :TX_PROXY ERROR: Actor# [1:7489503058841242854:2400] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } --!syntax_pg INSERT INTO Pg1000_b (key, value) VALUES ( '1'::int2, ARRAY ['true'::bool, 'true'::bool] ); 18 2025-04-04T16:56:57.915191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:56:57.980538Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::"char", '0'::"char"] ); --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::"char", '1'::"char"] ); --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::"char", '2'::"char"] ); 2025-04-04T16:56:58.290991Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503041661373059:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:58.291395Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 21 2025-04-04T16:56:58.505834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-04T16:56:58.573415Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int2, '0'::int2] ); --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int2, '1'::int2] ); --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int2, '2'::int2] ); 23 2025-04-04T16:56:59.013951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int4, '0'::int4] ); --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int4, '1'::int4] ); --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int4, '2'::int4] ); 20 2025-04-04T16:56:59.744977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T16:56:59.797590Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int8, '0'::int8] ); --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int8, '1'::int8] ); --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int8, '2'::int8] ); 700 2025-04-04T16:57:00.337124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-04T16:57:00.408760Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '0'::int2, ARRAY ['0.5'::float4, '0.5'::float4] ); --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '1'::int2, ARRAY ['1.5'::float4, '1.5'::float4] ); --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '2'::int2, ARRAY ['2.5'::float4, '2.5'::float4] ); 701 2025-04-04T16:57:01.209103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710693:0, at schemeshard: 72057594046644480 2025-04-04T16:57:01.285268Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1022_b (key, value) VALUES ( '0'::int2, ARRAY ['0.5'::float8, '0.5'::float8] ); --!syntax_pg INSERT INTO Pg1022_b (key, value) VALUES ( '1'::int2, ARRAY ['1.5'::float8, '1.5'::float8] ); --!syntax_pg INSERT INTO Pg1022_b (key, value) VALUES ( '2'::int2, ARRAY ['2.5'::float8, '2.5'::float8] ); 25 2025-04-04T16:57:01.941364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480 2025-04-04T16:57:02.070623Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1009_b (key, value) VALUES ( '0'::int2, ARRAY ['text 0'::text, 'text 0'::text] ); --!syntax_pg INSERT INTO Pg1009_b (key, value) VALUES ( '1'::int2, ARRAY ['text 1'::text, 'text 1'::text] ); --!syntax_pg INSERT INTO Pg1009_b (key, value) VALUES ( '2'::int2, ARRAY ['text 2'::text, 'text 2'::text] ); 1042 2025-04-04T16:57:02.828486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2025-04-04T16:57:02.928238Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1014_b (key, value) VALUES ( '0'::int2, ARRAY ['bpchar 0'::bpchar, 'bpchar 0'::bpchar] ); --!syntax_pg INSERT INTO Pg1014_b (key, value) VALUES ( '1'::int2, ARRAY ['bpchar 1'::bpchar, 'bpchar 1'::bpchar] ); --!syntax_pg INSERT INTO Pg1014_b (key, value) VALUES ( '2'::int2, ARRAY ['bpchar 2'::bpchar, 'bpchar 2'::bpchar] ); 1043 2025-04-04T16:57:03.603106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is ... rsion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:46.972891Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:50.413446Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7489504294013855170:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:50.413550Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:01:56.224589Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7489504341258495974:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:56.224751Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:56.261480Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:01:56.473851Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7489504341258496080:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:56.474032Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:56.482750Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7489504341258496085:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:01:56.490057Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:01:56.585726Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7489504341258496087:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:01:56.666809Z node 9 :TX_PROXY ERROR: Actor# [9:7489504341258496142:2419] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:01:56.958207Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7489504341258496188:2368], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Cannot update primary key column: key1
:1:1: Error: Cannot update primary key column: key2 2025-04-04T17:01:56.961196Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=YWQ3NjA0NzQtNjI1MTUzOTQtNTFmOTliNWEtYzI2YTAyMzY=, ActorId: [9:7489504341258496181:2364], ActorState: ExecuteState, TraceId: 01jr0w11s4d732cxme85wntagh, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T17:01:57.019964Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 1995, MsgBus: 5166 2025-04-04T17:02:02.836097Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489504368234361595:2173];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:03.007979Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c45/r3tmp/tmpXP24r9/pdisk_1.dat 2025-04-04T17:02:03.525717Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:03.716862Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:03.717042Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:03.721943Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1995, node 10 2025-04-04T17:02:04.127164Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:04.127201Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:04.127234Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:04.127469Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5166 TClient is connected to server localhost:5166 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:05.504418Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:05.540211Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:02:07.813351Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489504368234361595:2173];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:07.813459Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:02:14.947537Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504419773969723:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:14.947718Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:15.052287Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:02:15.277668Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504424068937126:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:15.277895Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:15.278626Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504424068937131:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:15.297147Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:02:15.379604Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489504424068937133:2362], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:02:15.456635Z node 10 :TX_PROXY ERROR: Actor# [10:7489504424068937185:2425] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:02:16.407819Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7489504428363904557:2384], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Cannot update primary key column: key1
:1:1: Error: Cannot update primary key column: key2 2025-04-04T17:02:16.409723Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ZTUyOTk2NGYtN2Q0NTY1MGYtYWNkZjAxYWQtMWFmYWE4NDg=, ActorId: [10:7489504428363904550:2380], ActorState: ExecuteState, TraceId: 01jr0w1ms6ber45qvmpap3f3t5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T17:02:16.440926Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:18.520225Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:02:18.520280Z node 10 :IMPORT WARN: Table profiles were not loaded >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesCorrectCerts >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] Test command err: 2025-04-04T17:02:15.583186Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:02:15.785007Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:02:15.865934Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:02:15.866224Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:02:15.874742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:02:15.875006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:02:15.875329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:02:15.875464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:02:15.875587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:02:15.875701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:02:15.875816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:02:15.875947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:02:15.876091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:02:15.876226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:02:15.876326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:02:15.876425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:02:15.906140Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:02:15.906435Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:02:15.906509Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:02:15.906712Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:02:15.906916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:02:15.906987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:02:15.907046Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:02:15.907139Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:02:15.907196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:02:15.907234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:02:15.907270Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:02:15.907459Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:02:15.907515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:02:15.907555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:02:15.907583Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:02:15.907669Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:02:15.907741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:02:15.907787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:02:15.907828Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:02:15.907900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:02:15.907938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:02:15.907984Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:02:15.908030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:02:15.908069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:02:15.908106Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:02:15.908516Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-04-04T17:02:15.908620Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=53; 2025-04-04T17:02:15.908705Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-04-04T17:02:15.908774Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-04-04T17:02:15.908973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:02:15.909039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:02:15.909077Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:02:15.909274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:02:15.909323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:02:15.909359Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:02:15.909508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:02:15.909548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:02:15.909585Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:02:15.909774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:02:15.909821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:02:15.909865Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:02:15.909976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:02:15.910017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:02:15.910060Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... :[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-04-04T17:02:23.330050Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] >> TColumnShardTestReadWrite::CompactionInGranule_PKString |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |82.0%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] >> PgCatalog::PgType [GOOD] >> PgCatalog::InformationSchema >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] >> THiveImplTest::BootQueueSpeed [GOOD] >> THiveImplTest::BalancerSpeedAndDistribution >> YdbScripting::Params |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |82.0%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut >> BuildStatsHistogram::Many_Serial [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] [GOOD] >> TVersions::Wreck1Reverse [GOOD] >> TVersions::Wreck0 >> TTableProfileTests::UseTableProfilePreset [GOOD] >> TTableProfileTests::UseTableProfilePresetViaSdk >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] [GOOD] |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] >> YdbTableBulkUpsertOlap::UpsertCsvBug >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> YdbOlapStore::LogWithUnionAllAscending [GOOD] >> YdbOlapStore::LogWithUnionAllDescending >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] >> TPQCompatTest::SetupLockSession [GOOD] >> TPQCompatTest::BadTopics >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] >> Viewer::JsonStorageListingV1GroupIdFilter [GOOD] >> Viewer::JsonStorageListingV1NodeIdFilter >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesCorrectCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 >> TFlatTest::MiniKQLRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> BuildStatsHistogram::Many_Serial [GOOD] Test command err: Got : 24000 2106439 49449 9 9 Expected: 24000 2106439 49449 9 9 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 49449 9 9 Expected: 12816 1121048 49449 9 9 Got : 24000 3547100 81694 9 9 Expected: 24000 3547100 81694 9 9 { [1012, 1475), [1682, 1985), [2727, 3553), [3599, 3992), [5397, 7244), [9181, 9807), [9993, 10178), [12209, 14029), [15089, 15342), [16198, 16984), [17238, 18436), [21087, 21876), [23701, 23794) } Got : 9582 1425282 81694 9 9 Expected: 9582 1425282 81694 9 9 Got : 24000 2460139 23760 9 9 Expected: 24000 2460139 23760 9 9 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060767 23760 9 9 Expected: 10440 1060767 23760 9 9 Got : 24000 4054050 46562 9 9 Expected: 24000 4054050 46562 9 9 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2273213 46562 9 9 Expected: 13570 2273213 46562 9 9 Got : 24000 2106459 49449 9 9 Expected: 24000 2106459 49449 9 9 Got : 24000 2460219 23555 9 9 Expected: 24000 2460219 23555 9 9 Got : 24000 4054270 46543 9 9 Expected: 24000 4054270 46543 9 9 Got : 24000 2106439 25272 38 44 Expected: 24000 2106439 25272 38 44 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 25272 20 23 Expected: 12816 1121048 25272 20 23 Got : 24000 3547100 49916 64 44 Expected: 24000 3547100 49916 64 44 { [1012, 1475), [1682, 1985), [2727, 3553), [3599, 3992), [5397, 7244), [9181, 9807), [9993, 10178), [12209, 14029), [15089, 15342), [16198, 16984), [17238, 18436), [21087, 21876), [23701, 23794) } Got : 9582 1425198 49916 26 17 Expected: 9582 1425198 49916 26 17 Got : 24000 2460139 13170 42 41 Expected: 24000 2460139 13170 42 41 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 13170 18 18 Expected: 10440 1060798 13170 18 18 Got : 24000 4054050 29361 68 43 Expected: 24000 4054050 29361 68 43 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2277890 29361 38 24 Expected: 13570 2277890 29361 38 24 Got : 24000 2106459 25428 38 44 Expected: 24000 2106459 25428 38 44 Got : 24000 2460219 13482 41 41 Expected: 24000 2460219 13482 41 41 Got : 24000 4054270 29970 67 43 Expected: 24000 4054270 29970 67 43 Got : 24000 2106479 25458 38 44 Expected: 24000 2106479 25458 38 44 Got : 24000 2460259 13528 42 41 Expected: 24000 2460259 13528 42 41 Got : 24000 4054290 30013 67 43 Expected: 24000 4054290 30013 67 43 1 parts: [0:0:1:0:0:0:0] 240000 rows, 10181 pages, 7 levels: (159964, 53329) (319996, 106673) (479902, 159975) (639565, 213196) (799303, 266442) Checking BTree: Touched 0% bytes, 4 pages RowCountHistogram: 10% (actual 10%) key = (80152, 26725) value = 24033 (actual 24079 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 48088 (actual 48136 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 72280 (actual 72327 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 96428 (actual 96478 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 120604 (actual 120651 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 144727 (actual 144775 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 168893 (actual 168936 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 192974 (actual 193024 - 0% error) 5% (actual 5%) key = (683260, 227761) value = 205073 (actual 205115 - 0% error) 14% (actual 14%) DataSizeHistogram: 10% (actual 10%) key = (80152, 26725) value = 2048715 (actual 2052707 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 4098370 (actual 4102393 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 6145924 (actual 6149966 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 8194622 (actual 8198636 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 10244365 (actual 10248317 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 12292389 (actual 12296360 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 14344066 (actual 14348128 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 16393002 (actual 16396983 - 0% error) 5% (actual 5%) key = (683260, 227761) value = 17416844 (actual 17420850 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1 pages RowCountHistogram: 10% (actual 10%) key = (80065, 26696) value = 24008 (actual 24056 - 0% error) 10% (actual 10%) key = (160045, 53356) value = 48012 (actual 48061 - 0% error) 10% (actual 10%) key = (240238, 80087) value = 72016 (actual 72061 - 0% error) 10% (actual 10%) key = (320152, 106725) value = 96035 (actual 96085 - 0% error) 10% (actual 10%) key = (400354, 133459) value = 120047 (actual 120093 - 0% error) 10% (actual 10%) key = (480133, 160052) value = 144053 (actual 144100 - 0% error) 10% (actual 10%) key = (560080, 186701) value = 168060 (actual 168102 - 0% error) 10% (actual 10%) key = (639892, 213305) value = 192073 (actual 192119 - 0% error) 10% (actual 10%) key = (719776, 239933) value = 216090 (actual 216137 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79732, 26585) value = 2038706 (actual 2042645 - 0% error) 10% (actual 10%) key = (159427, 53150) value = 4076220 (actual 4080259 - 0% error) 10% (actual 10%) key = (239872, 79965) value = 6113940 (actual 6117932 - 0% error) 10% (actual 10%) key = (319834, 106619) value = 8152983 (actual 8156951 - 0% error) 10% (actual 10%) key = (400105, 133376) value = 10190566 (actual 10194584 - 0% error) 10% (actual 10%) key = (479833, 159952) value = 12228261 (actual 12232212 - 0% error) 10% (actual 10%) key = (559774, 186599) value = 14265925 (actual 14269984 - 0% error) 10% (actual 10%) key = (639385, 213136) value = 16304923 (actual 16308915 - 0% error) 10% (actual 10%) key = (719437, 239820) value = 18342658 (actual 18346641 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 51 pages RowCountHistogram: 10% (actual 10%) key = (80152, 26725) value = 24033 (actual 24079 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 48088 (actual 48136 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 72280 (actual 72327 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 96428 (actual 96478 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 120604 (actual 120651 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 144727 (actual 144775 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 168893 (actual 168936 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 192974 (actual 193024 - 0% error) 10% (actual 10%) key = (723403, 241142) value = 217180 (actual 217228 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (80152, 26725) value = 2048715 (actual 2052707 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 4098370 (actual 4102393 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 6145924 (actual 6149966 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 8194622 (actual 8198636 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 10244365 (actual 10248317 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 12292389 (actual 12296360 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 14344066 (actual 14348128 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 16393002 (actual 16396983 - 0% error) 10% (actual 10%) key = (723403, 241142) value = 18443184 (actual 18447186 - 0% error) 9% (actual 9%) { [12965, 17271), [20685, 27602), [31405, 43682), [58051, 73731), [81074, 85635), [86559, 89297), [92588, 112654), [134937, 148111), [152568, 158136), [169526, 171272), [181381, 184364), [188301, 199001), [201179, 227534) } 1 parts: [0:0:1:0:0:0:0] 240000 rows, 10181 pages, 7 levels: (159964, 53329) (319996, 106673) (479902, 159975) (639565, 213196) (799303, 266442) Checking BTree: Touched 3% bytes, 111 pages RowCountHistogram: 6% (actual 6%) key = (80152, 26725) value = 7654 (actual 7700 - 0% error) 11% (actual 11%) key = (140245, 46756) value = 21908 (actual 21959 - 0% error) 12% (actual 12%) key = (241096, 80373) value = 37729 (actual 37776 - 0% error) 5% (actual 5%) key = (291388, 97137) value = 44561 (actual 44610 - 0% error) 14% (actual 14%) key = (361831, 120618) value = 62406 (actual 62455 - 0% error) 6% (actual 6%) key = (462178, 154067) value = 70269 (actual 70314 - 0% error) 10% (actual 10%) key = (522574, 174199) value = 83950 (actual 83996 - 0% error) 9% (actual 9%) key = (647905, 215976) value = 96207 (actual 96256 - 0% error) 11% (actual 11%) key = (703270, 234431) value = 110645 (actual 110694 - 0% error) 12% (actual 12%) DataSizeHistogram: 6% (actual 6%) key = (80152, 26725) value = 650681 (actual 654673 - 0% error) 11% (actual 11%) key = (140245, 46756) value = 1862907 (actual 1866988 - 0% error) 12% (actual 12%) key = (241096, 80373) value = 3200081 (actual 3204123 - 0% error) 5% (actual 5%) key = (291388, 97137) value = 3780473 (actual 3784554 - 0% error) 14% (actual 14%) key = (361831, 120618) value = 5294670 (actual 5298760 - 0% error) 6% (actual 6%) key = (462178, 154067) value = 5965285 (actual 5969310 - 0% error) 10% (actual 10%) key = (522574, 174199) value = 7125413 (actual 7129406 - 0% error) 9% (actual 9%) key = (647905, 215976) value = 8166922 (actual 8170966 - 0% error) 11% (actual 11%) key = (703270, 234431) value = 9391370 (actual 9395383 - 0% error) 12% (actual 12%) { [12965, 17271), [20685, 27602), [31405, 43682), [58051, 73731), [81074, 85635), [86559, 89297), [92588, 112654), [134937, 148111), [152568, 158136), [169526, 171272), [181381, 184364), [188301, 199001), [201179, 227534) } Checking Flat: Touched 100% bytes, 1 pages RowCountHistogram: 10% (actual 10%) key = (109672, 36565) value = 12716 (actual 12760 - 0% error) 10% (actual 10%) key = (200011, 66678) value = 25439 (actual 25485 - 0% error) 10% (actual 10%) key = (242497, 80840) value = 38151 (actual 38197 - 0% error) 10% (actual 10%) key = (323278, 107767) value = 50861 (actual 50910 - 0% error) 9% (actual 9%) key = (365755, 121926) value = 63568 (actual 63614 - 0% error) 10% (actual 10%) key = (482191, 160738) value = 76283 (actual 76335 - 0% error) 10% (actual 9%) key = (610882, 203635) value = 88992 (actual 89039 - 0% error) 10% (actual 10%) key = (673702, 224575) value = 101722 (actual 101768 - 0% error) 10% (actual 10%) key = (715753, 238592) value = 114435 (actual 114484 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) ... 140, NULL) (311209, NULL) (311281, NULL) (311344, NULL) (311416, NULL) [0:0:935:0:0:0:0] 100 rows, 100 pages, 4 levels: (311479, NULL) (311542, NULL) (311614, NULL) (311683, NULL) (311755, NULL) [0:0:936:0:0:0:0] 100 rows, 100 pages, 4 levels: (311821, NULL) (311890, NULL) (311956, NULL) (312034, NULL) (312100, NULL) [0:0:937:0:0:0:0] 100 rows, 100 pages, 4 levels: (312172, NULL) (312232, NULL) (312301, NULL) (312370, NULL) (312439, NULL) [0:0:938:0:0:0:0] 100 rows, 100 pages, 4 levels: (312508, NULL) (312571, NULL) (312637, NULL) (312700, NULL) (312760, NULL) [0:0:939:0:0:0:0] 100 rows, 100 pages, 4 levels: (312835, NULL) (312904, NULL) (312970, NULL) (313030, NULL) (313102, NULL) [0:0:940:0:0:0:0] 100 rows, 100 pages, 4 levels: (313174, NULL) (313240, NULL) (313300, NULL) (313366, NULL) (313429, NULL) [0:0:941:0:0:0:0] 100 rows, 100 pages, 4 levels: (313498, NULL) (313573, NULL) (313639, NULL) (313699, NULL) (313768, NULL) [0:0:942:0:0:0:0] 100 rows, 100 pages, 4 levels: (313828, NULL) (313891, NULL) (313957, NULL) (314023, NULL) (314086, NULL) [0:0:943:0:0:0:0] 100 rows, 100 pages, 4 levels: (314149, NULL) (314212, NULL) (314275, NULL) (314338, NULL) (314401, NULL) [0:0:944:0:0:0:0] 100 rows, 100 pages, 4 levels: (314464, NULL) (314530, NULL) (314590, NULL) (314656, NULL) (314719, NULL) [0:0:945:0:0:0:0] 100 rows, 100 pages, 4 levels: (314788, NULL) (314854, NULL) (314920, NULL) (314983, NULL) (315046, NULL) [0:0:946:0:0:0:0] 100 rows, 100 pages, 4 levels: (315109, NULL) (315178, NULL) (315238, NULL) (315304, NULL) (315370, NULL) [0:0:947:0:0:0:0] 100 rows, 100 pages, 4 levels: (315433, NULL) (315496, NULL) (315565, NULL) (315631, NULL) (315697, NULL) [0:0:948:0:0:0:0] 100 rows, 100 pages, 4 levels: (315766, NULL) (315826, NULL) (315889, NULL) (315952, NULL) (316024, NULL) [0:0:949:0:0:0:0] 100 rows, 100 pages, 4 levels: (316087, NULL) (316156, NULL) (316222, NULL) (316288, NULL) (316357, NULL) [0:0:950:0:0:0:0] 100 rows, 100 pages, 4 levels: (316432, NULL) (316498, NULL) (316564, NULL) (316636, NULL) (316705, NULL) [0:0:951:0:0:0:0] 100 rows, 100 pages, 4 levels: (316768, NULL) (316831, NULL) (316891, NULL) (316951, NULL) (317011, NULL) [0:0:952:0:0:0:0] 100 rows, 100 pages, 4 levels: (317080, NULL) (317143, NULL) (317218, NULL) (317287, NULL) (317356, NULL) [0:0:953:0:0:0:0] 100 rows, 100 pages, 4 levels: (317422, NULL) (317497, NULL) (317563, NULL) (317632, NULL) (317701, NULL) [0:0:954:0:0:0:0] 100 rows, 100 pages, 4 levels: (317764, NULL) (317824, NULL) (317887, NULL) (317953, NULL) (318019, NULL) [0:0:955:0:0:0:0] 100 rows, 100 pages, 4 levels: (318088, NULL) (318166, NULL) (318235, NULL) (318304, NULL) (318370, NULL) [0:0:956:0:0:0:0] 100 rows, 100 pages, 4 levels: (318442, NULL) (318511, NULL) (318574, NULL) (318640, NULL) (318703, NULL) [0:0:957:0:0:0:0] 100 rows, 100 pages, 4 levels: (318772, NULL) (318838, NULL) (318898, NULL) (318970, NULL) (319036, NULL) [0:0:958:0:0:0:0] 100 rows, 100 pages, 4 levels: (319099, NULL) (319162, NULL) (319225, NULL) (319294, NULL) (319360, NULL) [0:0:959:0:0:0:0] 100 rows, 100 pages, 4 levels: (319423, NULL) (319492, NULL) (319555, NULL) (319621, NULL) (319687, NULL) [0:0:960:0:0:0:0] 100 rows, 100 pages, 4 levels: (319753, NULL) (319828, NULL) (319900, NULL) (319963, NULL) (320035, NULL) [0:0:961:0:0:0:0] 100 rows, 100 pages, 4 levels: (320104, NULL) (320164, NULL) (320233, NULL) (320299, NULL) (320365, NULL) [0:0:962:0:0:0:0] 100 rows, 100 pages, 4 levels: (320428, NULL) (320500, NULL) (320569, NULL) (320629, NULL) (320698, NULL) [0:0:963:0:0:0:0] 100 rows, 100 pages, 4 levels: (320764, NULL) (320833, NULL) (320893, NULL) (320959, NULL) (321019, NULL) [0:0:964:0:0:0:0] 100 rows, 100 pages, 4 levels: (321085, NULL) (321151, NULL) (321214, NULL) (321277, NULL) (321352, NULL) [0:0:965:0:0:0:0] 100 rows, 100 pages, 4 levels: (321421, NULL) (321493, NULL) (321562, NULL) (321631, NULL) (321691, NULL) [0:0:966:0:0:0:0] 100 rows, 100 pages, 4 levels: (321757, NULL) (321823, NULL) (321886, NULL) (321949, NULL) (322009, NULL) [0:0:967:0:0:0:0] 100 rows, 100 pages, 4 levels: (322081, NULL) (322159, NULL) (322225, NULL) (322294, NULL) (322363, NULL) [0:0:968:0:0:0:0] 100 rows, 100 pages, 4 levels: (322429, NULL) (322498, NULL) (322564, NULL) (322642, NULL) (322711, NULL) [0:0:969:0:0:0:0] 100 rows, 100 pages, 4 levels: (322783, NULL) (322846, NULL) (322915, NULL) (322978, NULL) (323041, NULL) [0:0:970:0:0:0:0] 100 rows, 100 pages, 4 levels: (323104, NULL) (323164, NULL) (323230, NULL) (323305, NULL) (323368, NULL) [0:0:971:0:0:0:0] 100 rows, 100 pages, 4 levels: (323434, NULL) (323506, NULL) (323569, NULL) (323632, NULL) (323707, NULL) [0:0:972:0:0:0:0] 100 rows, 100 pages, 4 levels: (323776, NULL) (323851, NULL) (323917, NULL) (323986, NULL) (324052, NULL) [0:0:973:0:0:0:0] 100 rows, 100 pages, 4 levels: (324115, NULL) (324184, NULL) (324256, NULL) (324316, NULL) (324379, NULL) [0:0:974:0:0:0:0] 100 rows, 100 pages, 4 levels: (324442, NULL) (324502, NULL) (324568, NULL) (324631, NULL) (324703, NULL) [0:0:975:0:0:0:0] 100 rows, 100 pages, 4 levels: (324769, NULL) (324838, NULL) (324904, NULL) (324973, NULL) (325033, NULL) [0:0:976:0:0:0:0] 100 rows, 100 pages, 4 levels: (325105, NULL) (325174, NULL) (325234, NULL) (325297, NULL) (325363, NULL) [0:0:977:0:0:0:0] 100 rows, 100 pages, 4 levels: (325438, NULL) (325504, NULL) (325570, NULL) (325630, NULL) (325699, NULL) [0:0:978:0:0:0:0] 100 rows, 100 pages, 4 levels: (325771, NULL) (325834, NULL) (325900, NULL) (325966, NULL) (326032, NULL) [0:0:979:0:0:0:0] 100 rows, 100 pages, 4 levels: (326101, NULL) (326170, NULL) (326233, NULL) (326296, NULL) (326359, NULL) [0:0:980:0:0:0:0] 100 rows, 100 pages, 4 levels: (326434, NULL) (326497, NULL) (326563, NULL) (326632, NULL) (326701, NULL) [0:0:981:0:0:0:0] 100 rows, 100 pages, 4 levels: (326773, NULL) (326836, NULL) (326905, NULL) (326965, NULL) (327025, NULL) [0:0:982:0:0:0:0] 100 rows, 100 pages, 4 levels: (327097, NULL) (327169, NULL) (327232, NULL) (327301, NULL) (327364, NULL) [0:0:983:0:0:0:0] 100 rows, 100 pages, 4 levels: (327430, NULL) (327496, NULL) (327559, NULL) (327622, NULL) (327682, NULL) [0:0:984:0:0:0:0] 100 rows, 100 pages, 4 levels: (327742, NULL) (327811, NULL) (327871, NULL) (327934, NULL) (327997, NULL) [0:0:985:0:0:0:0] 100 rows, 100 pages, 4 levels: (328072, NULL) (328138, NULL) (328222, NULL) (328291, NULL) (328363, NULL) [0:0:986:0:0:0:0] 100 rows, 100 pages, 4 levels: (328432, NULL) (328501, NULL) (328573, NULL) (328648, NULL) (328717, NULL) [0:0:987:0:0:0:0] 100 rows, 100 pages, 4 levels: (328783, NULL) (328849, NULL) (328915, NULL) (328978, NULL) (329044, NULL) [0:0:988:0:0:0:0] 100 rows, 100 pages, 4 levels: (329119, NULL) (329185, NULL) (329248, NULL) (329317, NULL) (329383, NULL) [0:0:989:0:0:0:0] 100 rows, 100 pages, 4 levels: (329455, NULL) (329518, NULL) (329590, NULL) (329662, NULL) (329722, NULL) [0:0:990:0:0:0:0] 100 rows, 100 pages, 4 levels: (329782, NULL) (329854, NULL) (329917, NULL) (329983, NULL) (330049, NULL) [0:0:991:0:0:0:0] 100 rows, 100 pages, 4 levels: (330118, NULL) (330187, NULL) (330253, NULL) (330322, NULL) (330382, NULL) [0:0:992:0:0:0:0] 100 rows, 100 pages, 4 levels: (330454, NULL) (330520, NULL) (330595, NULL) (330673, NULL) (330739, NULL) [0:0:993:0:0:0:0] 100 rows, 100 pages, 4 levels: (330808, NULL) (330874, NULL) (330940, NULL) (331003, NULL) (331072, NULL) [0:0:994:0:0:0:0] 100 rows, 100 pages, 4 levels: (331132, NULL) (331204, NULL) (331276, NULL) (331342, NULL) (331405, NULL) [0:0:995:0:0:0:0] 100 rows, 100 pages, 4 levels: (331465, NULL) (331540, NULL) (331615, NULL) (331684, NULL) (331753, NULL) [0:0:996:0:0:0:0] 100 rows, 100 pages, 4 levels: (331816, NULL) (331891, NULL) (331960, NULL) (332026, NULL) (332086, NULL) [0:0:997:0:0:0:0] 100 rows, 100 pages, 4 levels: (332152, NULL) (332215, NULL) (332284, NULL) (332350, NULL) (332419, NULL) [0:0:998:0:0:0:0] 100 rows, 100 pages, 4 levels: (332491, NULL) (332557, NULL) (332623, NULL) (332686, NULL) (332752, NULL) [0:0:999:0:0:0:0] 100 rows, 100 pages, 4 levels: (332818, NULL) (332884, NULL) (332944, NULL) (333013, NULL) (333073, NULL) [0:0:1000:0:0:0:0] 100 rows, 100 pages, 4 levels: (333148, NULL) (333214, NULL) (333274, NULL) (333340, NULL) (333403, NULL) Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 5% (actual 6%) key = (16984, 5669) value = 5100 (actual 6998 - -1% error) 10% (actual 9%) key = (50416, 16813) value = 15100 (actual 16798 - -1% error) 10% (actual 9%) key = (83701, 27908) value = 25100 (actual 26598 - -1% error) 10% (actual 9%) key = (116986, 39003) value = 35100 (actual 36398 - -1% error) 10% (actual 9%) key = (150319, 50114) value = 45100 (actual 46198 - -1% error) 10% (actual 9%) key = (183700, 61241) value = 55100 (actual 55998 - 0% error) 10% (actual 9%) key = (217081, 72368) value = 65100 (actual 65798 - 0% error) 10% (actual 9%) key = (250486, 83503) value = 75100 (actual 75598 - 0% error) 10% (actual 9%) key = (283771, 94598) value = 85100 (actual 85398 - 0% error) 14% (actual 14%) DataSizeHistogram: 5% (actual 6%) key = (16648, 5557) value = 524891 (actual 723287 - -1% error) 10% (actual 9%) key = (50086, 16703) value = 1569936 (actual 1747238 - -1% error) 9% (actual 9%) key = (83356, 27793) value = 2610698 (actual 2767306 - -1% error) 10% (actual 9%) key = (116647, 38890) value = 3652143 (actual 3787394 - -1% error) 9% (actual 9%) key = (149656, 49893) value = 4685435 (actual 4800597 - -1% error) 10% (actual 9%) key = (183040, 61021) value = 5728420 (actual 5822785 - 0% error) 10% (actual 9%) key = (216727, 72250) value = 6776444 (actual 6848929 - 0% error) 9% (actual 9%) key = (250144, 83389) value = 7813547 (actual 7865227 - 0% error) 9% (actual 9%) key = (283444, 94489) value = 8853697 (actual 8884838 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1000 pages RowCountHistogram: 10% (actual 11%) key = (33379, 11134) value = 10000 (actual 11800 - -1% error) 10% (actual 9%) key = (66721, 22248) value = 20000 (actual 21600 - -1% error) 10% (actual 9%) key = (100015, 33346) value = 30000 (actual 31400 - -1% error) 10% (actual 9%) key = (133258, 44427) value = 40000 (actual 41200 - -1% error) 10% (actual 9%) key = (166621, 55548) value = 50000 (actual 51000 - -1% error) 10% (actual 9%) key = (200041, 66688) value = 60000 (actual 60800 - 0% error) 10% (actual 9%) key = (233449, 77824) value = 70000 (actual 70600 - 0% error) 10% (actual 9%) key = (266824, 88949) value = 80000 (actual 80400 - 0% error) 10% (actual 9%) key = (300073, 100032) value = 90000 (actual 90200 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 11%) key = (33187, NULL) value = 1041247 (actual 1229534 - -1% error) 10% (actual 9%) key = (66517, NULL) value = 2082456 (actual 2249844 - -1% error) 10% (actual 9%) key = (99709, NULL) value = 3123684 (actual 3270138 - -1% error) 10% (actual 9%) key = (132925, NULL) value = 4164886 (actual 4290603 - -1% error) 10% (actual 9%) key = (166246, NULL) value = 5206111 (actual 5311117 - -1% error) 10% (actual 9%) key = (199678, NULL) value = 6247321 (actual 6331068 - 0% error) 10% (actual 9%) key = (233290, NULL) value = 7288529 (actual 7350869 - 0% error) 10% (actual 9%) key = (266701, NULL) value = 8329759 (actual 8371441 - 0% error) 10% (actual 9%) key = (300052, NULL) value = 9371030 (actual 9392083 - 0% error) 9% (actual 9%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] >> THiveImplTest::BalancerSpeedAndDistribution [GOOD] >> THiveImplTest::TestShortTabletTypes [GOOD] >> THiveImplTest::TestStDev [GOOD] >> THiveTest::TestBlockCreateTablet |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |82.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] >> YdbYqlClient::SecurityTokenAuthMultiTenantSDK >> THiveTest::TestBlockCreateTablet [GOOD] >> THiveTest::DrainWithHiveRestart >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |82.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> YdbScripting::Params [GOOD] >> YdbTableBulkUpsert::DataValidation >> YdbYqlClient::TestDecimal1 >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] >> PgCatalog::InformationSchema [GOOD] >> PgCatalog::CheckSetConfig >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] >> KqpPg::TableDeleteWhere+useSink [GOOD] >> KqpPg::TableDeleteWhere-useSink |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> TFlatTest::MiniKQLRanges [GOOD] >> TFlatTest::MergeEmptyAndWrite >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TTableProfileTests::DescribeTableWithPartitioningPolicy >> KqpJoinOrder::GeneralPrioritiesBug1 [GOOD] >> KqpJoinOrder::GeneralPrioritiesBug2 >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] >> YdbTableBulkUpsertOlap::UpsertCsvBug [GOOD] >> YdbTableBulkUpsertOlap::UpsertCSV_DataShard >> YdbOlapStore::BulkUpsert [GOOD] >> YdbOlapStore::DuplicateRows >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] >> YdbOlapStore::LogLast50ByResource [GOOD] >> YdbOlapStore::LogGrepNonExisting >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |82.1%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 >> TLocksTest::CK_Range_BrokenLock [GOOD] >> TLocksTest::CK_Range_BrokenLockInf >> YdbYqlClient::SecurityTokenAuthMultiTenantSDK [GOOD] >> YdbYqlClient::SecurityTokenAuthMultiTenantSDKAsync >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] >> TFlatTest::MergeEmptyAndWrite [GOOD] >> THiveTest::DrainWithHiveRestart [GOOD] >> THiveTest::TestCheckSubHiveForwarding >> TTableProfileTests::UseTableProfilePresetViaSdk [GOOD] >> TTableProfileTests::WrongTableProfile |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |82.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats >> YdbTableBulkUpsert::DataValidation [GOOD] >> YdbTableBulkUpsert::DecimalPK >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |82.1%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut >> THiveTest::TestCheckSubHiveForwarding [GOOD] >> THiveTest::TestCheckSubHiveDrain >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] >> PgCatalog::CheckSetConfig [FAIL] >> PgCatalog::PgDatabase+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::MergeEmptyAndWrite [GOOD] Test command err: 2025-04-04T17:02:32.960677Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504495885575234:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:32.961046Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e8e/r3tmp/tmpSsTdjp/pdisk_1.dat 2025-04-04T17:02:33.882883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:33.882995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:33.887615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:02:33.945256Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:33.962182Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:8654 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:02:34.362346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:02:34.417096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:38.601114Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489504522914188753:2124];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:38.601152Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e8e/r3tmp/tmpc3nWIT/pdisk_1.dat 2025-04-04T17:02:39.007544Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:39.020522Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:39.020631Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:39.027132Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25094 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:02:39.493464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:39.514325Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:39.535248Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:02:39.554818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:39.780368Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.006s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1085 521 2626)b }, ecr=1.000 2025-04-04T17:02:39.780988Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.014s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1211 647 2154)b }, ecr=1.000 2025-04-04T17:02:39.828741Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1519 647 6413)b }, ecr=1.000 2025-04-04T17:02:39.881924Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.009s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2272 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786159752 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-04-04T17:02:39.920884Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:02:39.986002Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-04T17:02:39.986381Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:02:39.988388Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-04T17:02:39.989338Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:02:39.990177Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037888 restored its data 2025-04-04T17:02:39.991310Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-04T17:02:39.991507Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:02:39.992103Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037889 restored its data 2025-04-04T17:02:39.993005Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-04T17:02:39.996519Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:02:39.997223Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037888 restored its data 2025-04-04T17:02:39.998172Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-04T17:02:39.998426Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:02:39.999066Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037889 restored its data 2025-04-04T17:02:39.999976Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-04T17:02:40.000191Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:02:40.000722Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037888 restored its data 2025-04-04T17:02:40.001520Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-04T17:02:40.002786Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:02:40.003399Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037889 restored its data 2025-04-04T17:02:40.004238Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-04T17:02:40.004458Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:02:40.005216Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037888 restored its data 2025-04-04T17:02:40.006992Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-04T17:02:40.007201Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:02:40.007834Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037889 restored its data 2025-04-04T17:02:40.008661Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-04T17:02:40.009532Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:02:40.010175Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037888 restored its data 2025-04-04T17:02:40.011102Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-04T17:02:40.011261Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:02:40.011837Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037889 restored its data 2025-04-04T17:02:40.013538Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 released its data 2025-04-04T17:02:40.013720Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:02:40.014329Z node 2 :TX_DATASHARD DEBUG: tx 281474976710676 at 72075186224037888 restored its data 2025-04-04T17:02:40.015248Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:02:40.015287Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:02:40.015540Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:02:40.016023Z n ... ate 0 planned 0 2025-04-04T17:02:40.563212Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976710687 2025-04-04T17:02:40.563218Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710687 2025-04-04T17:02:40.563228Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710687, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 18446744073709551615 2025-04-04T17:02:40.563260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-04T17:02:40.563298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710687, ready parts: 0/1, is published: true 2025-04-04T17:02:40.564168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710687 2025-04-04T17:02:40.564240Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710687 2025-04-04T17:02:40.564273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710687 2025-04-04T17:02:40.564700Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1743786160606} 2025-04-04T17:02:40.564740Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T17:02:40.576140Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-04T17:02:40.576309Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T17:02:40.576372Z node 2 :TX_DATASHARD DEBUG: Complete [1743786160606 : 281474976710687] from 72075186224037890 at tablet 72075186224037890 send result to client [2:7489504522914189027:2140], exec latency: 0 ms, propose latency: 13 ms 2025-04-04T17:02:40.576413Z node 2 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976710687 state PreOffline TxInFly 0 2025-04-04T17:02:40.576472Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T17:02:40.576857Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976710687 Step: 1743786160606 OrderId: 281474976710687 ExecLatency: 0 ProposeLatency: 13 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 1045 } } 2025-04-04T17:02:40.576901Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710687, tablet: 72075186224037890, partId: 0 2025-04-04T17:02:40.577129Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710687:0, at schemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976710687 Step: 1743786160606 OrderId: 281474976710687 ExecLatency: 0 ProposeLatency: 13 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 1045 } } 2025-04-04T17:02:40.577205Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976710687 Step: 1743786160606 OrderId: 281474976710687 ExecLatency: 0 ProposeLatency: 13 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 1045 } } 2025-04-04T17:02:40.577883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7489504531504124217 RawX2: 4503608217307446 } Origin: 72075186224037890 State: 5 TxId: 281474976710687 Step: 0 Generation: 1 2025-04-04T17:02:40.577902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710687, tablet: 72075186224037890, partId: 0 2025-04-04T17:02:40.578024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710687:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7489504531504124217 RawX2: 4503608217307446 } Origin: 72075186224037890 State: 5 TxId: 281474976710687 Step: 0 Generation: 1 2025-04-04T17:02:40.578050Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710687:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-04T17:02:40.578120Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976710687:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7489504531504124217 RawX2: 4503608217307446 } Origin: 72075186224037890 State: 5 TxId: 281474976710687 Step: 0 Generation: 1 2025-04-04T17:02:40.578158Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710687:0, shardIdx: 72057594046644480:3, datashard: 72075186224037890, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-04T17:02:40.578175Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-04T17:02:40.578190Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710687:0, datashard: 72075186224037890, at schemeshard: 72057594046644480 2025-04-04T17:02:40.578211Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710687:0 129 -> 240 2025-04-04T17:02:40.578525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-04T17:02:40.578877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-04T17:02:40.579003Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710687 datashard 72075186224037890 state PreOffline 2025-04-04T17:02:40.579006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-04T17:02:40.579036Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976710687:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:02:40.579055Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-04-04T17:02:40.579347Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-04T17:02:40.579476Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710687:0 progress is 1/1 2025-04-04T17:02:40.579493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710687 ready parts: 1/1 2025-04-04T17:02:40.579513Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710687:0 progress is 1/1 2025-04-04T17:02:40.579522Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710687 ready parts: 1/1 2025-04-04T17:02:40.579536Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710687, ready parts: 1/1, is published: true 2025-04-04T17:02:40.579586Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7489504531504124460:2394] message: TxId: 281474976710687 2025-04-04T17:02:40.579622Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710687 ready parts: 1/1 2025-04-04T17:02:40.579640Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710687:0 2025-04-04T17:02:40.579662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710687:0 2025-04-04T17:02:40.579729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-04T17:02:40.580501Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T17:02:40.580613Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-04-04T17:02:40.582114Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 TClient::Ls request: /dc-1/Dir/TableOld 2025-04-04T17:02:40.582545Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489504531504124217 RawX2: 4503608217307446 } TabletId: 72075186224037890 State: 4 2025-04-04T17:02:40.582601Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:02:40.582896Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-04T17:02:40.582912Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 TClient::Ls response: 2025-04-04T17:02:40.586741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-04-04T17:02:40.586977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-04T17:02:40.587205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-04T17:02:40.588968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-04T17:02:40.589081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-04T17:02:40.589623Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-04T17:02:40.595081Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-04T17:02:40.597498Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-04T17:02:40.597601Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-04-04T17:02:40.597984Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-04T17:02:40.598042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-04T17:02:40.598138Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> YdbYqlClient::TestDecimal1 [GOOD] >> YdbYqlClient::TestDecimal35 |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |82.1%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |82.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |82.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |82.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut >> TTableProfileTests::DescribeTableWithPartitioningPolicy [GOOD] >> TTableProfileTests::ExplicitPartitionsComplex |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut >> THiveTest::TestCheckSubHiveDrain [GOOD] >> THiveTest::TestCheckSubHiveMigration |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] >> EvWrite::WriteWithSplit >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> YdbTableBulkUpsertOlap::UpsertCSV_DataShard [GOOD] >> YdbTableBulkUpsertOlap::UpsertMixed >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> YdbYqlClient::SecurityTokenAuthMultiTenantSDKAsync [GOOD] >> YdbYqlClient::SimpleColumnFamilies |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |82.2%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy >> THiveTest::TestCheckSubHiveMigration [GOOD] >> THiveTest::TestCheckSubHiveMigrationManyTablets >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] >> YdbYqlClient::RetryOperationAsync >> YdbTableBulkUpsert::DecimalPK [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldFail >> YdbTableBulkUpsertOlap::UpsertCSV [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/config/ut/ydb-services-config-ut |82.2%| [LD] {RESULT} $(B)/ydb/services/config/ut/ydb-services-config-ut |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/config/ut/ydb-services-config-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |82.2%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |82.2%| [LD] {RESULT} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |82.2%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |82.2%| [LD] {RESULT} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] >> YdbYqlClient::TestDecimal35 [GOOD] >> YdbYqlClient::TestDecimalFullStack >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] >> PgCatalog::PgDatabase+useSink [GOOD] >> PgCatalog::PgDatabase-useSink >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] >> TTableProfileTests::WrongTableProfile [GOOD] >> TYqlDateTimeTests::DateKey >> TTableProfileTests::ExplicitPartitionsComplex [GOOD] >> TTableProfileTests::DescribeTableOptions >> YdbTableBulkUpsert::AsyncIndexShouldFail [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldSucceed |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |82.3%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] >> YdbTableBulkUpsertOlap::UpsertMixed [GOOD] >> YdbYqlClient::AlterTableAddIndex >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |82.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> KqpJoinOrder::GeneralPrioritiesBug2 [GOOD] >> KqpJoinOrder::GeneralPrioritiesBug3 >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFold+StreamLookupJoin-ColumnStore >> TPQCompatTest::BadTopics [GOOD] >> TPQCompatTest::CommitOffsets |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |82.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] [GOOD] |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> YdbYqlClient::TestDecimalFullStack [GOOD] >> YdbYqlClient::TestDescribeDirectory |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |82.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] Test command err: 2025-04-04T17:02:29.282640Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504482702187441:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:29.282797Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012c5/r3tmp/tmpGRoYXo/pdisk_1.dat 2025-04-04T17:02:30.251066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:30.251194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:30.256802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:02:30.326518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:02:30.361417Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31616, node 1 2025-04-04T17:02:30.392644Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:02:30.406744Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:02:30.714464Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:30.714491Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:30.714499Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:30.714632Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:31.060277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:34.270360Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504482702187441:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:34.270414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:02:34.383957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504504177024775:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:34.384068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:34.850085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:02:35.022907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504508471992261:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:35.023101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:35.023696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504508471992266:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:35.028448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:02:35.062967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489504508471992268:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:02:35.173471Z node 1 :TX_PROXY ERROR: Actor# [1:7489504508471992343:2826] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:02:35.298682Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0w239fe32t9qt64y1a6hqn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQyNDdhNzQtZmI4ODE4NTktOWI1MmY5OWMtYjUzMjM1YmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:02:35.494735Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jr0w27bsc9ha2qyxdsd78ypr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGFlNTFhODItNTY0YTVkZTAtYzU1MmRkM2EtOGQ3Y2IzMjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:02:35.516069Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786155531, txId: 281474976710662] shutting down 2025-04-04T17:02:37.383705Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504515833984683:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:37.383761Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012c5/r3tmp/tmpFuIRqW/pdisk_1.dat 2025-04-04T17:02:37.764202Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:37.780346Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:37.780457Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:37.793053Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31426, node 4 2025-04-04T17:02:38.001209Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:38.001236Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:38.001246Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:38.001405Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8998 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:38.384395Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:41.793776Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Decimal(22,9) value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Date value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Datetime value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Timestamp value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Interval value CLIENT_INTERNAL_ERROR
: Error: GRpc error: (13): Unable to parse request
: Error: Grpc error response on endpoint localhost:31426 BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Yson value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Json value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid JSON for JsonDocument provided: TAPE_ERROR: The JSON document has an improper structure: missing or superfluous commas, braces, missing keys, etc. BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid DyNumber string representation 2025-04-04T17:02:42.382484Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489504515833984683:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:42.382555Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012c5/r3tmp/tmp1RzK3X/pdisk_1.dat 2025-04-04T17:02:44.687325Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:02:44.813793Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:44.864498Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:44.864590Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:44.876549Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13069, node 7 2025-04-04T17:02:44.983818Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:44.983844Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:44.983850Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:44.984002Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7138 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:45.432093Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:49.105031Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:02:49.360735Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489504569652159904:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:49.360864Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:49.361355Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489504569652159916:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:49.370402Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:02:49.436050Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489504569652159918:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:02:49.534537Z node 7 :TX_PROXY ERROR: Actor# [7:7489504569652159992:2803] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:02:49.962689Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0w2n0ebkq4g1jccmrakpqv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZjUxYjhlYTYtOTMwYzRjZWYtYzg3NzJiYzAtMzlhNTliZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:02:52.262471Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489504582131907725:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:52.262580Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012c5/r3tmp/tmpelGIWv/pdisk_1.dat 2025-04-04T17:02:52.604046Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:52.657377Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:52.657494Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:52.663814Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1196, node 10 2025-04-04T17:02:52.801541Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:52.801598Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:52.801615Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:52.801786Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:53.135378Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:56.353771Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/ui8'Only async-indexed tables are supported by BulkUpsert
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable'unknown table 2025-04-04T17:02:58.396949Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489504607236381627:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:58.397504Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012c5/r3tmp/tmp28tFcS/pdisk_1.dat 2025-04-04T17:02:58.751591Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:58.772175Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:58.772279Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:58.782054Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23061, node 13 2025-04-04T17:02:59.058988Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:59.059012Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:59.059021Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:59.059199Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:59.340059Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:02.421703Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:03:02.764824Z node 13 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable'unknown table >> PgCatalog::PgDatabase-useSink [GOOD] >> PgCatalog::PgRoles >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard [GOOD] Test command err: 2025-04-04T17:01:56.669315Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504341162138671:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:56.669363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012db/r3tmp/tmp3EP62A/pdisk_1.dat 2025-04-04T17:01:57.752027Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:01:57.776009Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:57.791522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:57.791630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:57.795069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31893, node 1 2025-04-04T17:01:58.188426Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:58.188449Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:58.188458Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:58.188566Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26095 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:59.083410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:26095 2025-04-04T17:01:59.522735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:59.699416Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7489504354047041443:2321];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:01:59.746491Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7489504354047041443:2321];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:01:59.746813Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2025-04-04T17:01:59.870870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504354047041443:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:01:59.871176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504354047041443:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:01:59.871566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504354047041443:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:01:59.871719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504354047041443:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:01:59.871834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504354047041443:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:01:59.871968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504354047041443:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:01:59.872083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504354047041443:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:01:59.872191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504354047041443:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:01:59.872949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504354047041443:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:01:59.873201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504354047041443:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:01:59.873329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504354047041443:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:01:59.873434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504354047041443:2321];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:01:59.889294Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7489504354047041465:2324];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:01:59.927128Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7489504354047041465:2324];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:01:59.927309Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037890 2025-04-04T17:01:59.939960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489504354047041465:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:01:59.940058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489504354047041465:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:01:59.940396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489504354047041465:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:01:59.940529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489504354047041465:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:01:59.940663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489504354047041465:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:01:59.940810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489504354047041465:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:01:59.940949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489504354047041465:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:01:59.941097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489504354047041465:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:01:59.941225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489504354047041465:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:01:59.941331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489504354047041465:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:01:59.941438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489504354047041465:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:01:59.941564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489504354047041465:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:01:59.944995Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7489504354047041445:2323];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:01:59.981630Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7489504354047041445:2323];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:01:59.981822Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037891 2025-04-04T17:01:59.991011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489504354047041445:2323];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:01:59.991092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489504354047041445:2323];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:01:59.991420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489504354047041445:2323];tablet_ ... 2057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:03:00.248923Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:03:00.248951Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:03:00.249082Z node 13 :TX_DATASHARD DEBUG: Found ready operation [1743786180276:281474976710658] in PlanQueue unit at 72075186224037888 2025-04-04T17:03:00.249449Z node 13 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743786180276:281474976710658 keys extracted: 0 2025-04-04T17:03:00.249625Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:03:00.249790Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:03:00.249862Z node 13 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:03:00.250685Z node 13 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:03:00.251156Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:03:00.253000Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743786180275 2025-04-04T17:03:00.253028Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:03:00.254326Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743786180283 2025-04-04T17:03:00.255020Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743786180276} 2025-04-04T17:03:00.255085Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:03:00.255151Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:03:00.255180Z node 13 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:03:00.255210Z node 13 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:03:00.255280Z node 13 :TX_DATASHARD DEBUG: Complete [1743786180276 : 281474976710658] from 72075186224037888 at tablet 72075186224037888 send result to client [13:7489504583613620181:2206], exec latency: 1 ms, propose latency: 5 ms 2025-04-04T17:03:00.255319Z node 13 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710658 state Ready TxInFly 0 2025-04-04T17:03:00.255378Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:03:00.260473Z node 13 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710658 datashard 72075186224037888 state Ready 2025-04-04T17:03:00.260658Z node 13 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8984;columns=10; 2025-04-04T17:03:00.295867Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [13:7489504617973359473:2765], serverId# [13:7489504617973359474:2766], sessionId# [0:0:0] 2025-04-04T17:03:00.296020Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-04-04T17:03:00.307657Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-04-04T17:03:00.307724Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 SUCCESS Upsert done: 0.036557s 2025-04-04T17:03:00.326474Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489504617973359482:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:00.326622Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:00.326960Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489504617973359493:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:00.331630Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:03:00.347411Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:03:00.373210Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:03:00.381444Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7489504617973359496:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:03:00.451559Z node 13 :TX_PROXY ERROR: Actor# [13:7489504617973359571:2830] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:03:00.594028Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:03:00.594165Z node 13 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710661 at tablet 72075186224037888 2025-04-04T17:03:00.603337Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:03:00.608745Z node 13 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710661 at step 1743786180647 at tablet 72075186224037888 { Transactions { TxId: 281474976710661 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743786180647 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:03:00.608781Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:03:00.608937Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:03:00.608958Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:03:00.608984Z node 13 :TX_DATASHARD DEBUG: Found ready operation [1743786180647:281474976710661] in PlanQueue unit at 72075186224037888 2025-04-04T17:03:00.609169Z node 13 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743786180647:281474976710661 keys extracted: 0 2025-04-04T17:03:00.609519Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:03:00.617463Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743786180647} 2025-04-04T17:03:00.617532Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:03:00.617589Z node 13 :TX_DATASHARD DEBUG: Complete [1743786180647 : 281474976710661] from 72075186224037888 at tablet 72075186224037888 send result to client [13:7489504617973359598:2844], exec latency: 0 ms, propose latency: 8 ms 2025-04-04T17:03:00.617616Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:03:00.621993Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr0w2zq03pmhf0wqjnp53x66, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ODE3OWEyOWQtOWNjNjcwZTQtM2ZmOTZmZGUtMmNmNjE4YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:00.635521Z node 13 :TX_DATASHARD INFO: Start scan, at: [13:7489504617973359628:2185], tablet: [13:7489504617973359368:2347], scanId: 4, table: /Root/LogsX, gen: 1, deadline: 2025-04-04T17:13:00.635147Z 2025-04-04T17:03:00.637344Z node 13 :TX_DATASHARD DEBUG: Got ScanDataAck, at: [13:7489504617973359628:2185], scanId: 4, table: /Root/LogsX, gen: 1, tablet: [13:7489504617973359368:2347], freeSpace: 8388608;limits:(bytes=0;chunks=0); 2025-04-04T17:03:00.637373Z node 13 :TX_DATASHARD DEBUG: Wakeup driver at: [13:7489504617973359628:2185] 2025-04-04T17:03:00.638864Z node 13 :TX_DATASHARD DEBUG: Range 0 of 1 exhausted: try next one. table: /Root/LogsX range: [(Utf8 : NULL, Timestamp : NULL) ; ()) next range: 2025-04-04T17:03:00.638894Z node 13 :TX_DATASHARD DEBUG: TableRanges is over, at: [13:7489504617973359628:2185], scanId: 4, table: /Root/LogsX 2025-04-04T17:03:00.638941Z node 13 :TX_DATASHARD DEBUG: Finish scan, at: [13:7489504617973359628:2185], scanId: 4, table: /Root/LogsX, reason: 0, abortEvent: 2025-04-04T17:03:00.638983Z node 13 :TX_DATASHARD DEBUG: Send ScanData, from: [13:7489504617973359628:2185], to: [13:7489504617973359624:2366], scanId: 4, table: /Root/LogsX, bytes: 11000, rows: 100, page faults: 0, finished: 1, pageFault: 0 2025-04-04T17:03:00.639754Z node 13 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-04T17:03:00.639875Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:03:00.639896Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:03:00.639922Z node 13 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:03:00.639988Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:03:00.654952Z node 13 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786180647, txId: 281474976710661] shutting down 2025-04-04T17:03:02.034887Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jr0w302861qz4n386eje86fh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTk4YjhjMGItZjZlYmZmOWYtOWIwNTY5NTUtM2YyNzNjYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 100 rows Negative (wrong format): BAD_REQUEST Negative (wrong data): SCHEME_ERROR FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8016;columns=9; 2025-04-04T17:03:02.171148Z node 13 :ARROW_HELPER ERROR: fline=arrow_helpers.cpp:142;event=cannot_parse;message=Invalid: Ran out of field metadata, likely malformed;schema_columns_count=10;schema_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; Negative (less columns): BAD_REQUEST FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8984;columns=10; 2025-04-04T17:03:02.208612Z node 13 :ARROW_HELPER ERROR: fline=arrow_helpers.cpp:142;event=cannot_parse;message=Serialization error: batch is not valid: Invalid: Offsets buffer size (bytes): 400 isn't large enough for length: 100;schema_columns_count=10;schema_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; Negative (reordered columns): BAD_REQUEST >> YdbYqlClient::AlterTableAddIndex [GOOD] >> YdbYqlClient::AlterTableAddIndexAsyncOp >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] [GOOD] |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> YdbYqlClient::RetryOperationAsync [GOOD] >> YdbYqlClient::QueryLimits >> TTableProfileTests::DescribeTableOptions [GOOD] |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> YdbTableBulkUpsert::ValidRetry >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TTableProfileTests::DescribeTableOptions [GOOD] Test command err: 2025-04-04T17:02:25.336581Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504467838762513:2278];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:25.336647Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012cf/r3tmp/tmpOZwESK/pdisk_1.dat 2025-04-04T17:02:26.155111Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:26.160657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:26.160775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:26.172433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23362, node 1 2025-04-04T17:02:26.496357Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:26.496389Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:26.496401Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:26.496528Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1208 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:26.944856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1743793346052678 Nodes { NodeId: 1024 Host: "localhost" Port: 16700 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1743793346052678 } Nodes { NodeId: 1 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 2 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 3 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-04-04T17:02:32.185597Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504497928641036:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:32.222405Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012cf/r3tmp/tmpv5RKpD/pdisk_1.dat 2025-04-04T17:02:32.523995Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:32.563201Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:32.563286Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:32.566967Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11524, node 4 2025-04-04T17:02:32.880580Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:32.880600Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:32.880605Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:32.880719Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:33.175693Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1743793352508772 Nodes { NodeId: 1024 Host: "localhost" Port: 8773 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1743793352508772 } Nodes { NodeId: 4 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 5 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 6 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-04-04T17:02:39.330603Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489504525232957593:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:39.330651Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012cf/r3tmp/tmpg8XZEw/pdisk_1.dat 2025-04-04T17:02:39.684925Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:39.720459Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:39.720564Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:39.731525Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13055, node 7 2025-04-04T17:02:40.008671Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:40.008712Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:40.008718Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:40.008851Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13687 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:40.415354Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:13687 2025-04-04T17:02:40.828493Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:40.885746Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:41.397630Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7489504535087465619:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:41.398627Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:02:41.460658Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:41.460744Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:41.471124Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-04-04T17:02:41.475182Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13687 2025-04-04T17:02:43.804061Z node 7 :FLAT_TX_SCHEMESHARD WARN: ... napshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:02:46.461514Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:7489504556562303232:2368], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:02:47.955566Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489504560620039211:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:47.955680Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012cf/r3tmp/tmpEQV2hx/pdisk_1.dat 2025-04-04T17:02:48.154622Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:48.204113Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:48.204245Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:48.209688Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20328, node 10 2025-04-04T17:02:48.371836Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:48.371865Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:48.371876Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:48.372034Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:48.804611Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:2547 2025-04-04T17:02:49.705830Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:49.779679Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:50.310866Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7489504573878646060:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:50.311005Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:02:50.408703Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:50.408831Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:50.412519Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-04-04T17:02:50.419128Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2547 2025-04-04T17:02:51.250507Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:2547 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743786171590 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "ke... (TRUNCATED) 2025-04-04T17:02:52.148143Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-04-04T17:02:52.149154Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T17:02:55.310919Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7489504573878646060:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:55.311007Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:02:58.361696Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489504607719910244:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:58.361744Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012cf/r3tmp/tmpePgoHN/pdisk_1.dat 2025-04-04T17:02:58.578864Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:58.638974Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:58.639093Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:58.656028Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30927, node 13 2025-04-04T17:02:58.947150Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:58.947191Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:58.947205Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:58.947438Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22054 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:59.624825Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:22054 2025-04-04T17:03:00.137529Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:00.186076Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:00.692542Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7489504618570998306:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:00.692605Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:03:00.732442Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:00.732554Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:00.754549Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-04-04T17:03:00.769448Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22054 2025-04-04T17:03:01.735497Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-04-04T17:03:01.736040Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T17:03:05.693800Z node 15 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[15:7489504618570998306:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:05.693909Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=timeout; |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> TYqlDateTimeTests::DateKey [GOOD] >> TYqlDateTimeTests::DatetimeKey |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index >> TResourceBroker::TestRealUsage >> TLocksTest::CK_Range_BrokenLockInf [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] >> TResourceBroker::TestRealUsage [GOOD] >> TResourceBroker::TestRandomQueue >> YdbYqlClient::TestDescribeDirectory [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] >> YdbLogStore::LogStore [GOOD] >> YdbLogStore::LogStoreNegative >> Normalizers::ColumnChunkNormalizer >> TResourceBroker::TestRandomQueue [GOOD] >> TFlatTest::LargeProxyReply >> YdbYqlClient::QueryLimits [GOOD] >> YdbYqlClient::QueryStats >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] >> YdbYqlClient::AlterTableAddIndexAsyncOp [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestRandomQueue [GOOD] Test command err: 2025-04-04T17:03:16.205481Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-8 (8 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.205600Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-11 (11 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.205640Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-12 (12 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.205779Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-17 (17 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.205938Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-25 (25 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.206085Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-32 (32 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.206160Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-35 (35 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.206226Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-37 (37 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.206327Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-39 (39 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.206407Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-42 (42 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.206445Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-43 (43 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.206690Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-53 (53 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.206758Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-55 (55 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.208551Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-56 (56 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.208721Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-63 (63 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.208907Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-71 (71 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.209002Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-75 (75 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.209088Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-78 (78 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.209148Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-80 (80 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.209258Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-84 (84 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.209297Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-85 (85 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.209402Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-90 (90 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.209447Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-91 (91 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.209541Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-94 (94 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.209596Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-96 (96 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.209732Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-102 (102 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.209776Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-103 (103 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.209956Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-109 (109 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.210043Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-111 (111 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.210181Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-118 (118 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.210219Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-119 (119 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.210325Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-120 (120 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.210458Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-126 (126 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.210600Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-133 (133 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.210705Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-135 (135 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.210817Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-138 (138 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.210955Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-145 (145 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.211017Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-147 (147 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.211068Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-148 (148 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.211173Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-152 (152 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.211320Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-158 (158 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.211372Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-159 (159 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.211424Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-160 (160 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.211587Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-167 (167 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.211629Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-168 (168 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.211672Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-169 (169 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.211708Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-170 (170 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.211750Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-171 (171 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.211837Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-175 (175 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.211878Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-176 (176 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.211962Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-180 (180 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.212024Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-181 (181 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.212068Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-182 (182 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.212145Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-185 (185 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.212284Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-192 (192 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.212504Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-202 (202 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.212714Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-213 (213 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.212819Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-217 (217 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.212876Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-219 (219 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.212954Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-222 (222 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.213028Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-225 (225 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.213141Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-230 (230 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.213201Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-232 (232 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.213274Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-235 (235 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.213436Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-243 (243 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.213475Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-244 (244 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.213598Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-250 (250 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.213745Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-254 (254 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.213891Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-261 (261 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.213948Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-262 (262 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.214041Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-264 (264 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.214126Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-267 (267 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.214179Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-268 (268 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.214338Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-272 (272 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.214548Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-281 (281 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.214605Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-282 (282 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.214906Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-291 (291 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.215075Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-298 (298 by [2:98:2133])' of ... 25-04-04T17:03:16.272697Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-332 (332 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.272799Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-412 (412 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.272889Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-417 (417 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.272976Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-426 (426 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.273017Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-429 (429 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.273081Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-447 (447 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.273133Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-448 (448 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.273175Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-471 (471 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.273202Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-473 (473 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.273324Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-519 (519 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.273415Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-537 (537 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.273473Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-549 (549 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.273567Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-557 (557 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.273664Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-580 (580 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.273779Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-674 (674 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.273842Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-715 (715 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.273873Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-723 (723 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.273928Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-726 (726 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.273990Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-727 (727 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.274070Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-759 (759 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.274117Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-770 (770 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.274156Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-802 (802 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.274183Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-810 (810 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.274279Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-841 (841 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.274381Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-871 (871 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.274457Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-884 (884 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.274503Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-921 (921 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.274545Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-933 (933 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.274609Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-959 (959 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.274653Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-978 (978 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.274736Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-990 (990 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.274799Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-11 (11 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.274840Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-12 (12 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.274915Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-39 (39 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.275011Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-80 (80 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.275117Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-84 (84 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.275157Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-102 (102 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.275201Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-118 (118 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.276454Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-145 (145 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.276524Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-167 (167 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.276553Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-168 (168 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.276581Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-169 (169 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.276648Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-171 (171 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.276763Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-217 (217 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.276823Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-222 (222 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.276863Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-244 (244 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.276956Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-254 (254 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.277032Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-267 (267 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.277138Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-344 (344 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.277194Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-349 (349 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.277255Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-364 (364 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.277327Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-368 (368 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.277444Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-451 (451 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.277485Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-454 (454 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.277526Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-457 (457 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.277605Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-489 (489 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.277702Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-528 (528 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.277761Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-539 (539 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.277790Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-558 (558 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.277863Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-560 (560 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.277956Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-566 (566 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.278006Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-569 (569 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.278043Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-585 (585 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.278107Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-615 (615 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.278149Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-631 (631 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.278226Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-643 (643 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.278369Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-655 (655 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.278481Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-693 (693 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.278595Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-729 (729 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.278696Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-763 (763 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.278755Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-777 (777 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.278797Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-781 (781 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.278868Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-825 (825 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.278925Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-848 (848 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.279045Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-868 (868 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.279092Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-874 (874 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.279143Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-899 (899 by [2:98:2133])' of unknown type 'wrong' to default queue 2025-04-04T17:03:16.279226Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-923 (923 by [2:98:2133])' of unknown type 'wrong' to default queue |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_BrokenLockInf [GOOD] Test command err: 2025-04-04T17:02:16.535641Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504428809697208:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:16.536177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e8f/r3tmp/tmpym084A/pdisk_1.dat 2025-04-04T17:02:17.492754Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:17.517225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:17.518295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:17.525965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14659 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:02:18.102106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:02:18.157524Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:02:18.193353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:18.452022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:18.530223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:21.134999Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489504450062785168:2219];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e8f/r3tmp/tmpNPf7so/pdisk_1.dat 2025-04-04T17:02:21.233518Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:02:21.371957Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:21.372053Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:21.405975Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:21.424992Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5045 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:02:21.935968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:02:21.977761Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:02:21.983234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:22.101106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:22.184856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:25.673973Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489504466022204556:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:25.797917Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e8f/r3tmp/tmpynkHgj/pdisk_1.dat 2025-04-04T17:02:26.124513Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:26.138441Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:26.138555Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:26.151865Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27195 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:02:26.575186Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:02:26.657483Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:02:26.661824Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:26.867499Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:26.948833Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e8f/r3tmp/tmpCU25XK/pdisk_1.dat 2025-04-04T17:02:32.274560Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:02:32.277496Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:32.324599Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:32.324709Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:32.331248Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22001 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:02:32.607753Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshar ... SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:02:47.729684Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:02:47.768760Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:47.886464Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:47.973026Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:53.033942Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7489504584273945403:2149];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:53.047319Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e8f/r3tmp/tmpdXcncu/pdisk_1.dat 2025-04-04T17:02:53.488340Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:53.489283Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:53.489395Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:53.492400Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17829 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:02:53.834202Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:53.850559Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:53.879443Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:02:53.967788Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:54.066173Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:54.150579Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:58.293402Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7489504605800169259:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:58.366504Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e8f/r3tmp/tmp9ZOx3C/pdisk_1.dat 2025-04-04T17:02:58.727857Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:58.727985Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:58.732450Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:58.775931Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22901 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:02:59.134032Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:02:59.162786Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:59.329550Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:59.447554Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:05.054721Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489504639052751063:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:05.054787Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e8f/r3tmp/tmpCCVG10/pdisk_1.dat 2025-04-04T17:03:05.567267Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:05.693376Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:05.693541Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:05.698045Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3756 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:03:06.784477Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:06.800254Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:06.818432Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:03:06.825483Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:07.044815Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:07.218804Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> PgCatalog::PgRoles [GOOD] >> PgCatalog::PgTables ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestDescribeDirectory [GOOD] Test command err: 2025-04-04T17:02:38.030530Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504521490674078:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:38.030624Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012b9/r3tmp/tmpUZ2t8q/pdisk_1.dat 2025-04-04T17:02:39.078892Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:02:39.093132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:39.093244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:39.093780Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:39.120616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4990, node 1 2025-04-04T17:02:39.437885Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:39.437912Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:39.437927Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:39.438057Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:39.881647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:43.031962Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504521490674078:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:43.032067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:02:44.251786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504547260478943:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:44.251912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:44.252478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504547260478955:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:44.258468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:02:44.316458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489504547260478957:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:02:44.406625Z node 1 :TX_PROXY ERROR: Actor# [1:7489504547260479035:2693] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:02:47.338859Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504562596327146:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:47.338923Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012b9/r3tmp/tmpLWW7Ue/pdisk_1.dat 2025-04-04T17:02:47.755988Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:47.788531Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:47.788651Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:47.804821Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12060, node 4 2025-04-04T17:02:47.974951Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:47.974976Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:47.974987Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:47.975118Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65231 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:48.500748Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:52.342537Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489504562596327146:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:52.342600Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:02:54.330645Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489504592661099358:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:54.330749Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:54.331175Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489504592661099370:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:54.335163Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:02:54.391732Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489504592661099372:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:02:54.481686Z node 4 :TX_PROXY ERROR: Actor# [4:7489504592661099445:2709] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:02:56.787266Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489504599271684309:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:56.787373Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012b9/r3tmp/tmpYQGeVo/pdisk_1.dat 2025-04-04T17:02:57.009894Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:57.048820Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:57.048894Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:57.054615Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28713, node 7 2025-04-04T17:02:57.206563Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:57.206595Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:57.206606Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:57.206819Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20142 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:57.876187Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:01.223773Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:03:01.484680Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489504620746522023:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:01.484801Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:01.485241Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489504620746522035:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:01.489277Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:03:01.542554Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489504620746522037:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:03:01.639610Z node 7 :TX_PROXY ERROR: Actor# [7:7489504620746522108:2788] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:03:01.787783Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489504599271684309:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:01.787863Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:03:01.831079Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0w30va5ekdk1esk5d9gcj0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZDMzZGJjOGEtNzhkYjI2NmYtMjQ3MzdmMDAtYThjZDBmNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:02.015082Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr0w3176cqygzaggx8xeph0z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZDMzZGJjOGEtNzhkYjI2NmYtMjQ3MzdmMDAtYThjZDBmNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:02.245141Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jr0w31dn5avz1wffn3c1ygnc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZDMzZGJjOGEtNzhkYjI2NmYtMjQ3MzdmMDAtYThjZDBmNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:02.433013Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr0w31kx53rxg3z55p2dddt5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZDMzZGJjOGEtNzhkYjI2NmYtMjQ3MzdmMDAtYThjZDBmNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:02.846229Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0w31ta69qvmv68p4reby5k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZDMzZGJjOGEtNzhkYjI2NmYtMjQ3MzdmMDAtYThjZDBmNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:04.577826Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr0w327nfamhhhaypkfzfydr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZDMzZGJjOGEtNzhkYjI2NmYtMjQ3MzdmMDAtYThjZDBmNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:04.607466Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0w327nfamhhhaypkfzfydr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZDMzZGJjOGEtNzhkYjI2NmYtMjQ3MzdmMDAtYThjZDBmNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:07.527814Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489504646274365679:2228];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:07.528250Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012b9/r3tmp/tmptGe4lM/pdisk_1.dat 2025-04-04T17:03:08.235808Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:08.359771Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:08.359891Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:08.378716Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3201, node 10 2025-04-04T17:03:08.668836Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:08.668867Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:08.668881Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:08.669764Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8103 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:09.423249Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:12.390888Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489504646274365679:2228];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:12.390959Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:03:14.127762Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504676339137748:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:14.127866Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:14.192585Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |82.5%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::AlterTableAddIndexAsyncOp [GOOD] Test command err: 2025-04-04T17:02:30.785183Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504488699293931:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:30.785241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012c2/r3tmp/tmpeOpRED/pdisk_1.dat 2025-04-04T17:02:31.741043Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:31.752831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:31.752914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:31.777489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8199, node 1 2025-04-04T17:02:32.127052Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:32.127086Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:32.127093Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:32.127233Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23344 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:32.802515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:35.747268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-04T17:02:35.785342Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504488699293931:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:35.785405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; SUCCESS 2025-04-04T17:02:36.070025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504514469098947:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:36.070136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:36.074051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504514469098959:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:36.086001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:02:36.122218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489504514469098961:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:02:36.215476Z node 1 :TX_PROXY ERROR: Actor# [1:7489504514469099043:2821] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:02:36.865784Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr0w2810e068x9d4v2hjgj3k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Y0MjRlNDctMTBiNjQ0OGItMTc2NmM5YTktNmY2ZmVkYjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:02:36.884609Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786156896, txId: 281474976710661] shutting down 2025-04-04T17:02:36.967039Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-04T17:02:36.974100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 BAD_REQUEST 2025-04-04T17:02:37.209723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:02:37.227817Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found SUCCESS 2025-04-04T17:02:37.506138Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-04T17:02:39.862441Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504527039372423:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:39.862492Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012c2/r3tmp/tmpf8vDQl/pdisk_1.dat 2025-04-04T17:02:40.227161Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:40.299516Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:40.299599Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:40.305073Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10307, node 4 2025-04-04T17:02:40.478882Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:40.478911Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:40.478919Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:40.479067Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:41.034208Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:44.854338Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489504527039372423:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:44.854425Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:02:46.477626Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:02:46.520552Z node 4 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [4:7489504557104144673:2345] 2025-04-04T17:02:46.520741Z node 4 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:02:46.543747Z node 4 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:02:46.543846Z node 4 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:02:46.546034Z node 4 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:02:46.546077Z node 4 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:02:46.546120Z node 4 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:02:46.546569Z node 4 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:02:46.546634Z node 4 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:02:46.546683Z node 4 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [4:7489504557104144696:2345] in generation 1 2025-04-04T17:02:46.559992Z node 4 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:02:46.560045Z node 4 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:02:46.560124Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:02:46.560163Z node 4 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [4:7489504557104144700:2346] 2025-04-04T17:02:46.560174Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:02:46.560193Z node 4 :TX_DATASHARD INFO: Cannot activate change send ... 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:00.365735Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:03.732331Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504629645102920:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:03.732471Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:03.767895Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:03:04.083106Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504633940070393:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:04.083248Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:04.083581Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504633940070398:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:04.092029Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:03:04.182900Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489504633940070400:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:03:04.270331Z node 10 :TX_PROXY ERROR: Actor# [10:7489504633940070474:2799] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:03:04.278837Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489504612465232682:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:04.278895Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:03:04.519681Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0w33cj058q0gjx0464q8e6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NWQ3OThlYzgtZjAwMTdhODctMTI5MDgyZmItNDNkZDYwNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:04.674223Z node 10 :TX_PROXY WARN: [AlterTableAddIndex [10:7489504633940070528:2372] TxId# 281474976715663] Access check failed 2025-04-04T17:03:04.758152Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-04T17:03:05.081418Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-04T17:03:05.613560Z node 10 :TX_PROXY ERROR: [AlterTableAddIndex [10:7489504638235038237:2391] TxId# 281474976715665] Unable to navigate: Root/WrongPath status: PathErrorUnknown 2025-04-04T17:03:05.841368Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037889 not found 2025-04-04T17:03:08.903145Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489504652164399426:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:08.903404Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012c2/r3tmp/tmpOMBrsw/pdisk_1.dat 2025-04-04T17:03:09.605498Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32662, node 13 2025-04-04T17:03:09.804233Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:09.804361Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:10.089420Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:03:10.159128Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:10.159158Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:10.159169Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:10.159357Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28166 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:10.675691Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:13.906486Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7489504652164399426:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:13.906594Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:03:14.334405Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489504677934204288:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:14.334519Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:14.350878Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:03:14.548534Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489504677934204448:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:14.548660Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:14.548970Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489504677934204453:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:14.554796Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:03:14.598572Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7489504677934204455:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:03:14.697171Z node 13 :TX_PROXY ERROR: Actor# [13:7489504677934204531:2813] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:03:14.900311Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0w3dkk1k7xjerbg8fpfjeb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZjBmY2U4NDAtYzFmOWNlNzctOTAxOThjZjUtZmY2OGExOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:14.957691Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T17:03:15.092075Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T17:03:15.207132Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 >> TVersions::Wreck0 [GOOD] >> TVersions::Wreck0Reverse |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] >> YdbYqlClient::DiscoveryLocationOverride >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvideIncorrectCerts >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 >> YdbOlapStore::LogNonExistingRequest >> YdbTableBulkUpsert::ValidRetry [GOOD] >> YdbTableBulkUpsert::Types |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |82.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts >> TYqlDateTimeTests::SimpleUpsertSelect >> KqpJoinOrder::GeneralPrioritiesBug3 [GOOD] >> TColumnShardTestReadWrite::ReadWithProgram >> TFlatTest::AutoSplitBySize_BTreeIndex >> YdbOlapStore::LogGrepNonExisting [GOOD] >> YdbOlapStore::LogGrepExisting >> TYqlDateTimeTests::DatetimeKey [GOOD] >> Normalizers::ColumnChunkNormalizer [GOOD] |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::ColumnChunkNormalizer [GOOD] Test command err: 2025-04-04T17:03:16.878462Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:03:17.031475Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:03:17.084557Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:03:17.084870Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:03:17.096400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-04-04T17:03:17.096673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:03:17.096926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:03:17.097045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:03:17.097157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:03:17.097282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:03:17.097382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:03:17.097479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:03:17.097655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:03:17.097792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:03:17.097908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:03:17.098033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:03:17.158416Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:03:17.159178Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:03:17.159255Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-04-04T17:03:17.159474Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:03:17.159665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:03:17.159747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:03:17.159791Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-04-04T17:03:17.159893Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:03:17.159963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:03:17.160029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:03:17.160069Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-04-04T17:03:17.160243Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:03:17.160303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:03:17.160346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:03:17.160439Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-04-04T17:03:17.160538Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:03:17.160605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:03:17.160671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:03:17.160710Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:03:17.160785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:03:17.160820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:03:17.160844Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:03:17.160890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:03:17.160928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:03:17.160989Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:03:17.161410Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=64; 2025-04-04T17:03:17.161518Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=63; 2025-04-04T17:03:17.161628Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-04-04T17:03:17.161700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2025-04-04T17:03:17.161877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:03:17.161935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:03:17.161969Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:03:17.162181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:03:17.162223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:03:17.162281Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:03:17.162460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:03:17.162502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:03:17.162529Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:03:17.162716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:03:17.162753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:03:17.162780Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:03:17.162917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:03:17.162962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normaliza ... abletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:03:25.135771Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T17:03:25.135836Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-04T17:03:25.135881Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-04-04T17:03:25.135955Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-04-04T17:03:25.136023Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-04T17:03:25.136136Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T17:03:25.136179Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-04-04T17:03:25.136229Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:03:25.136475Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:03:25.136655Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T17:03:25.136702Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:03:25.136842Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-04-04T17:03:25.137011Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-04-04T17:03:25.137151Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:476:2482];bytes=2405760;rows=20048;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-04-04T17:03:25.137313Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T17:03:25.137438Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T17:03:25.137557Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T17:03:25.138522Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:03:25.138738Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T17:03:25.138862Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T17:03:25.138914Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:478:2483] finished for tablet 9437184 2025-04-04T17:03:25.139465Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:476:2482];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":1.03},{"events":["f_ack"],"t":1.031},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":1.033}],"full":{"a":1743786204105354,"name":"_full_task","f":1743786204105354,"d_finished":0,"c":0,"l":1743786205138983,"d":1033629},"events":[{"name":"bootstrap","f":1743786204105659,"d_finished":2967,"c":1,"l":1743786204108626,"d":2967},{"a":1743786205138481,"name":"ack","f":1743786205136442,"d_finished":1150,"c":1,"l":1743786205137592,"d":1652},{"a":1743786205138458,"name":"processing","f":1743786204108716,"d_finished":401159,"c":9,"l":1743786205137595,"d":401684},{"name":"ProduceResults","f":1743786204107457,"d_finished":3186,"c":12,"l":1743786205138893,"d":3186},{"a":1743786205138896,"name":"Finish","f":1743786205138896,"d_finished":0,"c":0,"l":1743786205138983,"d":87},{"name":"task_result","f":1743786204108733,"d_finished":395841,"c":8,"l":1743786205136281,"d":395841}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T17:03:25.139578Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:476:2482];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:03:25.140056Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:476:2482];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":1.03},{"events":["f_ack"],"t":1.031},{"events":["l_ProduceResults","f_Finish"],"t":1.033},{"events":["l_ack","l_processing","l_Finish"],"t":1.034}],"full":{"a":1743786204105354,"name":"_full_task","f":1743786204105354,"d_finished":0,"c":0,"l":1743786205139629,"d":1034275},"events":[{"name":"bootstrap","f":1743786204105659,"d_finished":2967,"c":1,"l":1743786204108626,"d":2967},{"a":1743786205138481,"name":"ack","f":1743786205136442,"d_finished":1150,"c":1,"l":1743786205137592,"d":2298},{"a":1743786205138458,"name":"processing","f":1743786204108716,"d_finished":401159,"c":9,"l":1743786205137595,"d":402330},{"name":"ProduceResults","f":1743786204107457,"d_finished":3186,"c":12,"l":1743786205138893,"d":3186},{"a":1743786205138896,"name":"Finish","f":1743786205138896,"d_finished":0,"c":0,"l":1743786205139629,"d":733},{"name":"task_result","f":1743786204108733,"d_finished":395841,"c":8,"l":1743786205136281,"d":395841}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-04-04T17:03:25.141420Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:03:24.104705Z;index_granules=0;index_portions=1;index_batches=939;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2589280;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2589280;selected_rows=0; 2025-04-04T17:03:25.141470Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:03:25.141743Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:478:2483];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans >> TFlatTest::LargeProxyReply [GOOD] >> TFlatTest::LargeProxyReplyRW >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] Test command err: 2025-04-04T17:03:24.384823Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:03:24.779628Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:03:24.840635Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:03:24.840958Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:03:24.877410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:03:24.877659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:03:24.877911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:03:24.878051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:03:24.878197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:03:24.878418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:03:24.878524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:03:24.878621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:03:24.878770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:03:24.878905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:03:24.879042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:03:24.879147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:03:24.929911Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:03:24.934613Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:03:24.934718Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:03:24.934988Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:03:24.935267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:03:24.935366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:03:24.935422Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:03:24.935542Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:03:24.935629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:03:24.935681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:03:24.935718Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:03:24.935916Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:03:24.935982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:03:24.936020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:03:24.936049Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:03:24.936157Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:03:24.936227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:03:24.936278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:03:24.936316Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:03:24.936401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:03:24.936457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:03:24.936500Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:03:24.936556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:03:24.936591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:03:24.936618Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:03:24.937089Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=79; 2025-04-04T17:03:24.937197Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=52; 2025-04-04T17:03:24.937279Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-04T17:03:24.937364Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=40; 2025-04-04T17:03:24.937560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:03:24.937622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:03:24.937663Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:03:24.937843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:03:24.937890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:03:24.937937Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:03:24.938100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:03:24.938151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:03:24.938196Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:03:24.940895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:03:24.940975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:03:24.941014Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:03:24.941178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:03:24.941221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:03:24.941268Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... e 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-04T17:03:26.106871Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-04T17:03:26.106918Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:03:26.106991Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T17:03:26.107025Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=fetching.cpp:12;event=apply; 2025-04-04T17:03:26.107066Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=0; 2025-04-04T17:03:26.107112Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=interval.cpp:17;event=start_construct_result;interval_idx=0;interval_id=2;memory=8402114;count=1; 2025-04-04T17:03:26.107580Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:149;event=DoExecute;interval_idx=0; 2025-04-04T17:03:26.122917Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2025-04-04T17:03:26.123121Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-04T17:03:26.123169Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-04T17:03:26.123207Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:03:26.123468Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T17:03:26.123507Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-04T17:03:26.123547Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-04-04T17:03:26.123585Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=0;merger=0;interval_id=2; 2025-04-04T17:03:26.123633Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-04T17:03:26.123760Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-04T17:03:26.123906Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-04T17:03:26.124145Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:03:26.124257Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-04T17:03:26.124355Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-04T17:03:26.124401Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:284:2302] finished for tablet 9437184 2025-04-04T17:03:26.124978Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:283:2301];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.019},{"events":["f_processing","f_task_result"],"t":0.02},{"events":["l_task_result"],"t":0.059},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.06}],"full":{"a":1743786206064106,"name":"_full_task","f":1743786206064106,"d_finished":0,"c":0,"l":1743786206124501,"d":60395},"events":[{"name":"bootstrap","f":1743786206064364,"d_finished":19682,"c":1,"l":1743786206084046,"d":19682},{"a":1743786206124120,"name":"ack","f":1743786206124120,"d_finished":0,"c":0,"l":1743786206124501,"d":381},{"a":1743786206124099,"name":"processing","f":1743786206084135,"d_finished":18992,"c":9,"l":1743786206123966,"d":19394},{"name":"ProduceResults","f":1743786206065768,"d_finished":2218,"c":11,"l":1743786206124378,"d":2218},{"a":1743786206124381,"name":"Finish","f":1743786206124381,"d_finished":0,"c":0,"l":1743786206124501,"d":120},{"name":"task_result","f":1743786206084156,"d_finished":18813,"c":9,"l":1743786206123964,"d":18813}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-04T17:03:26.125076Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:283:2301];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:03:26.125595Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:283:2301];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.019},{"events":["f_processing","f_task_result"],"t":0.02},{"events":["l_task_result"],"t":0.059},{"events":["f_ack","l_ProduceResults","f_Finish"],"t":0.06},{"events":["l_ack","l_processing","l_Finish"],"t":0.061}],"full":{"a":1743786206064106,"name":"_full_task","f":1743786206064106,"d_finished":0,"c":0,"l":1743786206125133,"d":61027},"events":[{"name":"bootstrap","f":1743786206064364,"d_finished":19682,"c":1,"l":1743786206084046,"d":19682},{"a":1743786206124120,"name":"ack","f":1743786206124120,"d_finished":0,"c":0,"l":1743786206125133,"d":1013},{"a":1743786206124099,"name":"processing","f":1743786206084135,"d_finished":18992,"c":9,"l":1743786206123966,"d":20026},{"name":"ProduceResults","f":1743786206065768,"d_finished":2218,"c":11,"l":1743786206124378,"d":2218},{"a":1743786206124381,"name":"Finish","f":1743786206124381,"d_finished":0,"c":0,"l":1743786206125133,"d":752},{"name":"task_result","f":1743786206084156,"d_finished":18813,"c":9,"l":1743786206123964,"d":18813}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-04-04T17:03:26.125700Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:03:26.063532Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-04T17:03:26.125752Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:03:26.126124Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:284:2302];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TYqlDateTimeTests::DatetimeKey [GOOD] Test command err: 2025-04-04T17:02:18.790502Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504435443644807:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:18.790569Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012d1/r3tmp/tmpp1KLKL/pdisk_1.dat 2025-04-04T17:02:19.532173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:19.532306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:19.540033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:02:19.551002Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14561, node 1 2025-04-04T17:02:20.016801Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:20.016823Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:20.016831Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:20.016978Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21159 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:20.911068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:21159 2025-04-04T17:02:21.696301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:21.802808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:22.337187Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489504453020865755:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:22.337251Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:02:22.390265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:22.390355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:22.395815Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T17:02:22.396955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21159 2025-04-04T17:02:22.925966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:02:23.794206Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504435443644807:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:23.794317Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:21159 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743786143400 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-04T17:02:24.223764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:21159 TClient::Ls request: /Root/ydb_ut_tenant/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1743786145110 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-04T17:02:26.399501Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-04T17:02:26.405047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T17:02:27.337981Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489504453020865755:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:27.338069Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:02:30.167028Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504487805891738:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:30.174065Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012d1/r3tmp/tmpwfxNI8/pdisk_1.dat 2025-04-04T17:02:30.619086Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2260, node 4 2025-04-04T17:02:30.875033Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:30.875169Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:30.889560Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:02:30.922791Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:30.922819Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:30.922828Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:30.923012Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28250 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:31.353665Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:28250 2025-04-04T17:02:31.900718Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:02:31.939235Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:02:32.446615Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489504497949292367:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:32.446702Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initiali ... initialize from file: (empty maybe) 2025-04-04T17:03:00.250927Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:00.251180Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11903 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:01.401303Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:03.947757Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489504606607006009:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:03.947857Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:03:08.803339Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:03:09.192926Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504653851647607:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:09.193083Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:09.193781Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504653851647622:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:09.224346Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:03:09.318017Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489504653851647624:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:03:09.392087Z node 10 :TX_PROXY ERROR: Actor# [10:7489504653851647701:2842] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:03:09.728752Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0w38c4dnrjhphchdghjpvm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NmE4Njg0MGYtMzU5MTBhMzQtMThhN2M2N2EtMWJlYjEwOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:10.176225Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0w38y27j47nsdbv10b9rxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NmE4Njg0MGYtMzU5MTBhMzQtMThhN2M2N2EtMWJlYjEwOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:10.525307Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0w39bqfq8g8pnvfzjkfbkz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NmE4Njg0MGYtMzU5MTBhMzQtMThhN2M2N2EtMWJlYjEwOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:10.976780Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0w39pb3495qpdzsx1147rm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NmE4Njg0MGYtMzU5MTBhMzQtMThhN2M2N2EtMWJlYjEwOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:14.413479Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489504675962403067:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:14.413589Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012d1/r3tmp/tmpgktHi6/pdisk_1.dat 2025-04-04T17:03:14.946727Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:15.013228Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:15.013386Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:15.021968Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24834, node 13 2025-04-04T17:03:15.400585Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:15.400617Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:15.400631Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:15.400860Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61691 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:16.208126Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:19.415453Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7489504675962403067:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:19.415552Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:03:21.468646Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:03:21.738333Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489504706027175406:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:21.738471Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:21.739627Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489504706027175420:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:21.745817Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:03:21.832173Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7489504706027175422:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:03:21.912059Z node 13 :TX_PROXY ERROR: Actor# [13:7489504706027175499:2822] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:03:22.115180Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0w3mm56j8awmdrvyt1ze8y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YWY0YTM1ZGQtZDIwMGY1MGUtNDdhZWIyOGQtYjZjZDg2YTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:22.325446Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr0w3n0rb9k9mn4zftrk09m8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YWY0YTM1ZGQtZDIwMGY1MGUtNDdhZWIyOGQtYjZjZDg2YTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:22.651374Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jr0w3n6w89n5ej6k4nmejj63, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YWY0YTM1ZGQtZDIwMGY1MGUtNDdhZWIyOGQtYjZjZDg2YTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:22.971109Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr0w3nh4b72ah0ttr646vhwz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YWY0YTM1ZGQtZDIwMGY1MGUtNDdhZWIyOGQtYjZjZDg2YTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |82.5%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut >> KqpJoinOrder::FiveWayJoinWithConstantFold+StreamLookupJoin-ColumnStore [GOOD] >> TColumnShardTestReadWrite::RebootWriteRead >> YdbLogStore::LogStoreNegative [GOOD] >> YdbLogStore::Dirs >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyHosts |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> YdbYqlClient::TestReadTableMultiShardWholeTable >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::GeneralPrioritiesBug3 [GOOD] Test command err: Trying to start YDB, gRPC: 2964, MsgBus: 3167 2025-04-04T17:00:45.372979Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504035080737120:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:45.373057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00156c/r3tmp/tmpCMIfwG/pdisk_1.dat 2025-04-04T17:00:45.889159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:45.889278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:45.893732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:00:45.898144Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2964, node 1 2025-04-04T17:00:46.070803Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:46.070825Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:46.070832Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:46.070954Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3167 TClient is connected to server localhost:3167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:46.964630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:46.979131Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:00:46.988876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:47.209232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:47.432860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:47.512356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:49.076959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504052260608081:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:49.077109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:49.537293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:00:49.580911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:00:49.618894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:00:49.697902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:00:49.758386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:00:49.842102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:00:49.934461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504052260608601:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:49.934537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:49.934725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504052260608606:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:49.939398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:00:49.958380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489504052260608608:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:00:50.037664Z node 1 :TX_PROXY ERROR: Actor# [1:7489504056555575958:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:00:50.371456Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504035080737120:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:50.371566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:51.702371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:00:51.762941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:00:51.796008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:00:51.847954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:00:51.880003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:00:51.994896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:00:52.028426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:00:52.068434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:00:52.114842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:00:52.157196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:00:52.206617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:00:52.280417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:00:52.339773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:00:52.919457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:00:52.975306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:00:53.045408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:00:53.123215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04- ... : 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:03:12.667605Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:03:12.762419Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:03:12.819553Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:03:12.927753Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:03:13.011850Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:03:13.275015Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:03:13.387600Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:03:13.508140Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:03:13.743261Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:03:13.933233Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:03:14.024830Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:03:14.168280Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:03:14.268285Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:03:15.662846Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:03:15.752910Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:03:15.752941Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:15.759218Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:03:15.855564Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:03:15.948994Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-04T17:03:16.056731Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-04T17:03:16.137801Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-04T17:03:16.266864Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-04T17:03:16.376247Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-04T17:03:16.484680Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710692:0, at schemeshard: 72057594046644480 2025-04-04T17:03:16.586208Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710693:0, at schemeshard: 72057594046644480 2025-04-04T17:03:16.733744Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480 2025-04-04T17:03:16.827552Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480 2025-04-04T17:03:16.917948Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480 2025-04-04T17:03:17.033901Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480 2025-04-04T17:03:17.130350Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 2025-04-04T17:03:17.216894Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480 2025-04-04T17:03:17.331929Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710700:0, at schemeshard: 72057594046644480 2025-04-04T17:03:17.433054Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 2025-04-04T17:03:17.531822Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710702:0, at schemeshard: 72057594046644480 2025-04-04T17:03:17.660375Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480 2025-04-04T17:03:17.751158Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 2025-04-04T17:03:17.873189Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2025-04-04T17:03:17.949676Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710706:0, at schemeshard: 72057594046644480 2025-04-04T17:03:18.049704Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 2025-04-04T17:03:18.508856Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710708:1, at schemeshard: 72057594046644480 2025-04-04T17:03:18.616807Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710709:0, at schemeshard: 72057594046644480 2025-04-04T17:03:18.742570Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2025-04-04T17:03:18.847040Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-04-04T17:03:18.943774Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 2025-04-04T17:03:19.036300Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710713:0, at schemeshard: 72057594046644480 2025-04-04T17:03:19.109523Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 2025-04-04T17:03:19.181380Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710715:0, at schemeshard: 72057594046644480 2025-04-04T17:03:19.263210Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710716:0, at schemeshard: 72057594046644480 2025-04-04T17:03:19.468559Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710717:0, at schemeshard: 72057594046644480 2025-04-04T17:03:19.677408Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710718:0, at schemeshard: 72057594046644480 2025-04-04T17:03:19.830993Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710719:0, at schemeshard: 72057594046644480 2025-04-04T17:03:19.962853Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710720:0, at schemeshard: 72057594046644480 2025-04-04T17:03:20.201590Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710721:0, at schemeshard: 72057594046644480 2025-04-04T17:03:20.451713Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710722:0, at schemeshard: 72057594046644480 |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |82.6%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] >> YdbYqlClient::DiscoveryLocationOverride [GOOD] >> YdbYqlClient::DeleteTableWithDeletedIndex >> TPQCompatTest::CommitOffsets [GOOD] >> TPQCompatTest::LongProducerAndLongMessageGroupId >> TYqlDateTimeTests::SimpleUpsertSelect [GOOD] >> TYqlDateTimeTests::TimestampKey >> YdbTableBulkUpsert::Types [GOOD] >> YdbTableBulkUpsert::Uint8 >> YdbYqlClient::QueryStats [GOOD] >> YdbYqlClient::RenameTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFold+StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 5884, MsgBus: 10518 2025-04-04T17:00:45.403575Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504036818718450:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:45.403720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00157b/r3tmp/tmpPHcZqw/pdisk_1.dat 2025-04-04T17:00:45.982335Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:45.984104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:45.984525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:45.988647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5884, node 1 2025-04-04T17:00:46.088721Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:46.088743Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:46.088756Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:46.088872Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10518 TClient is connected to server localhost:10518 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:46.902458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:46.975361Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:00:46.988044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:47.190531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:47.370175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:47.491693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:49.857865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504053998589407:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:49.858210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:49.921357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:00:49.982981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:00:50.025536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:00:50.073151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:00:50.151448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:00:50.245023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:00:50.314833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504058293557223:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:50.314945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:50.315384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504058293557228:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:50.321252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:00:50.335850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489504058293557230:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:00:50.403900Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504036818718450:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:50.403960Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:50.407754Z node 1 :TX_PROXY ERROR: Actor# [1:7489504058293557285:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:00:51.710364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:00:51.747152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:00:51.793573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:00:51.823404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:00:51.854117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:00:51.989324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:00:52.022902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:00:52.061753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:00:52.099354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:00:52.138414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:00:52.171357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:00:52.203911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:00:52.232981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:00:52.800256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:00:52.838156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:00:52.919316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:00:52.958288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025- ... : 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:03:15.993421Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:03:16.076876Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:03:16.190812Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:03:16.330506Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:03:16.442155Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:03:16.860854Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:03:16.990012Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:03:17.063336Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:03:17.157813Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:03:17.289690Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:03:17.309591Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:03:17.309621Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:17.364510Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:03:17.424967Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:03:17.533638Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:03:19.109062Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:03:19.197935Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:03:19.285213Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:03:19.395855Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-04T17:03:19.490655Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-04T17:03:19.555329Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-04T17:03:19.643586Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-04T17:03:19.781628Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-04T17:03:19.956337Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710692:0, at schemeshard: 72057594046644480 2025-04-04T17:03:20.117828Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710693:0, at schemeshard: 72057594046644480 2025-04-04T17:03:20.316137Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480 2025-04-04T17:03:20.479496Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480 2025-04-04T17:03:20.628449Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480 2025-04-04T17:03:20.722197Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480 2025-04-04T17:03:20.869821Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 2025-04-04T17:03:21.004087Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480 2025-04-04T17:03:21.164843Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710700:0, at schemeshard: 72057594046644480 2025-04-04T17:03:21.379695Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 2025-04-04T17:03:21.550723Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710702:0, at schemeshard: 72057594046644480 2025-04-04T17:03:21.664202Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480 2025-04-04T17:03:21.833368Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 2025-04-04T17:03:21.972449Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2025-04-04T17:03:22.095481Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710706:0, at schemeshard: 72057594046644480 2025-04-04T17:03:22.192143Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 2025-04-04T17:03:22.700254Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710708:1, at schemeshard: 72057594046644480 2025-04-04T17:03:22.789252Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710709:0, at schemeshard: 72057594046644480 2025-04-04T17:03:22.875244Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2025-04-04T17:03:22.958431Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-04-04T17:03:23.090878Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 2025-04-04T17:03:23.201044Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710713:0, at schemeshard: 72057594046644480 2025-04-04T17:03:23.309728Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 2025-04-04T17:03:23.412020Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710715:0, at schemeshard: 72057594046644480 2025-04-04T17:03:23.496649Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710716:0, at schemeshard: 72057594046644480 2025-04-04T17:03:23.812910Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710717:0, at schemeshard: 72057594046644480 2025-04-04T17:03:23.922033Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710718:0, at schemeshard: 72057594046644480 2025-04-04T17:03:24.067613Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710719:0, at schemeshard: 72057594046644480 2025-04-04T17:03:24.230906Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710720:0, at schemeshard: 72057594046644480 2025-04-04T17:03:24.760740Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710721:0, at schemeshard: 72057594046644480 2025-04-04T17:03:24.976840Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710722:0, at schemeshard: 72057594046644480 |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> TGRpcYdbTest::MakeListRemoveDirectory >> TResourceBrokerInstant::TestErrors >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvideIncorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideAnyCerts >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] >> TResourceBrokerInstant::TestErrors [GOOD] >> TResourceBrokerInstant::TestMerge >> PgCatalog::PgTables [GOOD] >> TResourceBrokerInstant::TestMerge [GOOD] >> TColumnShardTestReadWrite::WriteStandalone >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::TestMerge [GOOD] Test command err: 2025-04-04T17:03:34.280234Z node 1 :RESOURCE_BROKER ERROR: FinishTaskInstant failed for task 2: cannot finish unknown task >> KqpPg::TableDeleteWhere-useSink [GOOD] >> TFlatTest::LargeProxyReplyRW [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyHosts [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBaseDn >> TFlatTest::SelectRangeReverseItemsLimit >> TYqlDateTimeTests::TimestampKey [GOOD] >> TYqlDateTimeTests::IntervalKey >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> YdbYqlClient::TestReadTableMultiShardWholeTable [GOOD] >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] >> YdbYqlClient::DeleteTableWithDeletedIndex [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitions |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] >> YdbLogStore::Dirs [GOOD] >> YdbLogStore::LogTable >> EvWrite::WriteWithSplit [GOOD] |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeProxyReplyRW [GOOD] Test command err: 2025-04-04T17:03:16.766804Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504686450345822:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:16.767208Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e8d/r3tmp/tmpPuOemS/pdisk_1.dat 2025-04-04T17:03:17.451284Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:17.456117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:17.456227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:17.460150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20530 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:03:17.786317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:03:17.841446Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:03:17.847152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:21.491389Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037888, for tableId 3 don't have enough keys: RowCount: 164 DataSize: 16802876 IndexSize: 4137 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 1 2025-04-04T17:03:21.759340Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504686450345822:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:21.759407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:03:26.573803Z node 1 :TX_PROXY ERROR: Actor# [1:7489504725105054950:4153] txid# 281474976711010 MergeResult Result too large TDataReq marker# P18 2025-04-04T17:03:26.573922Z node 1 :TX_PROXY ERROR: Actor# [1:7489504725105054950:4153] txid# 281474976711010 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable 2025-04-04T17:03:27.400759Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489504732390843720:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:27.400806Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e8d/r3tmp/tmpeidDjf/pdisk_1.dat 2025-04-04T17:03:27.610202Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:27.639855Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:27.639959Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:27.641719Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13272 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:03:27.888998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:03:27.910159Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:03:27.916220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:31.267943Z node 2 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037888, for tableId 3 don't have enough keys: RowCount: 164 DataSize: 16802396 IndexSize: 4137 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 1 2025-04-04T17:03:32.408139Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489504732390843720:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:32.408198Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:03:36.429467Z node 2 :TX_PROXY ERROR: Actor# [2:7489504766750585687:4157] txid# 281474976716011 MergeResult Result too large TDataReq marker# P18 2025-04-04T17:03:36.429548Z node 2 :TX_PROXY ERROR: Actor# [2:7489504766750585687:4157] txid# 281474976716011 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable >> TFlatTest::AutoSplitBySize_BTreeIndex [GOOD] >> TFlatTest::AutoSplitBySize_FlatIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] Test command err: 2025-04-04T17:03:29.932262Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:03:30.075285Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:03:30.098884Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:03:30.099204Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:03:30.108235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:03:30.108529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:03:30.108777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:03:30.108885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:03:30.109016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:03:30.109135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:03:30.109280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:03:30.109382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:03:30.109501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:03:30.109632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:03:30.109740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:03:30.109896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:03:30.151882Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:03:30.152503Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:03:30.152567Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:03:30.152774Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:03:30.153025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:03:30.153095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:03:30.153195Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:03:30.153298Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:03:30.153374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:03:30.153440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:03:30.153473Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:03:30.153662Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:03:30.153733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:03:30.153775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:03:30.153805Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:03:30.153887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:03:30.153949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:03:30.153998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:03:30.154027Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:03:30.154112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:03:30.154204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:03:30.154268Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:03:30.154318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:03:30.154355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:03:30.154395Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:03:30.154827Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-04-04T17:03:30.154938Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-04-04T17:03:30.155063Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=50; 2025-04-04T17:03:30.155162Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=47; 2025-04-04T17:03:30.155349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:03:30.155416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:03:30.155454Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:03:30.155681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:03:30.155763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:03:30.155804Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:03:30.155973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:03:30.156035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:03:30.156078Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:03:30.156278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:03:30.156460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:03:30.156494Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:03:30.156617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:03:30.156651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:03:30.156703Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... lumn_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:03:39.230208Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:03:39.230391Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-04T17:03:39.230505Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-04T17:03:39.230727Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1050:2921];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-04-04T17:03:39.230917Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:03:39.231073Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:03:39.231260Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:03:39.231557Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:03:39.231735Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:03:39.231921Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:03:39.231982Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1051:2922] finished for tablet 9437184 2025-04-04T17:03:39.232562Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1050:2921];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.015},{"events":["l_ProduceResults","f_Finish"],"t":0.017},{"events":["l_ack","l_processing","l_Finish"],"t":0.018}],"full":{"a":1743786219213975,"name":"_full_task","f":1743786219213975,"d_finished":0,"c":0,"l":1743786219232050,"d":18075},"events":[{"name":"bootstrap","f":1743786219214221,"d_finished":3668,"c":1,"l":1743786219217889,"d":3668},{"a":1743786219231522,"name":"ack","f":1743786219229837,"d_finished":1461,"c":1,"l":1743786219231298,"d":1989},{"a":1743786219231504,"name":"processing","f":1743786219218018,"d_finished":7035,"c":10,"l":1743786219231301,"d":7581},{"name":"ProduceResults","f":1743786219216375,"d_finished":4008,"c":13,"l":1743786219231961,"d":4008},{"a":1743786219231966,"name":"Finish","f":1743786219231966,"d_finished":0,"c":0,"l":1743786219232050,"d":84},{"name":"task_result","f":1743786219218041,"d_finished":5402,"c":9,"l":1743786219229580,"d":5402}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:03:39.233123Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1050:2921];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:03:39.233700Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1050:2921];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.015},{"events":["l_ProduceResults","f_Finish"],"t":0.017},{"events":["l_ack","l_processing","l_Finish"],"t":0.019}],"full":{"a":1743786219213975,"name":"_full_task","f":1743786219213975,"d_finished":0,"c":0,"l":1743786219233195,"d":19220},"events":[{"name":"bootstrap","f":1743786219214221,"d_finished":3668,"c":1,"l":1743786219217889,"d":3668},{"a":1743786219231522,"name":"ack","f":1743786219229837,"d_finished":1461,"c":1,"l":1743786219231298,"d":3134},{"a":1743786219231504,"name":"processing","f":1743786219218018,"d_finished":7035,"c":10,"l":1743786219231301,"d":8726},{"name":"ProduceResults","f":1743786219216375,"d_finished":4008,"c":13,"l":1743786219231961,"d":4008},{"a":1743786219231966,"name":"Finish","f":1743786219231966,"d_finished":0,"c":0,"l":1743786219233195,"d":1229},{"name":"task_result","f":1743786219218041,"d_finished":5402,"c":9,"l":1743786219229580,"d":5402}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:03:39.233805Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:03:39.213347Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-04T17:03:39.233853Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:03:39.234302Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_AllowOnlyDefaultGroup >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithSplit [GOOD] Test command err: 2025-04-04T17:02:49.459784Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:02:49.592327Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:02:49.619891Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:02:49.620236Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:02:49.630860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:02:49.631264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:02:49.631548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:02:49.631697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:02:49.631833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:02:49.631953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:02:49.632110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:02:49.632224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:02:49.632394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:02:49.632619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:02:49.632741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:02:49.632845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:02:49.664074Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:02:49.664816Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:02:49.664908Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:02:49.665114Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:02:49.665308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:02:49.665413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:02:49.665456Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:02:49.665546Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:02:49.665633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:02:49.665679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:02:49.665708Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:02:49.665993Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:02:49.666077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:02:49.666119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:02:49.666147Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:02:49.666232Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:02:49.666322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:02:49.666403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:02:49.666445Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:02:49.666542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:02:49.666582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:02:49.666616Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:02:49.666682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:02:49.666717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:02:49.666745Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:02:49.667179Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2025-04-04T17:02:49.667327Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=80; 2025-04-04T17:02:49.667556Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=55; 2025-04-04T17:02:49.667666Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-04-04T17:02:49.667883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:02:49.667958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:02:49.667993Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:02:49.668174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:02:49.668231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:02:49.668271Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:02:49.668459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:02:49.668509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:02:49.668563Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:02:49.668761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:02:49.668807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:02:49.668839Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:02:49.668960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:02:49.669016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:02:49.669065Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... :2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:856:2873];bytes=3691800;rows=450;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-04-04T17:03:39.275299Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:1;records_count:149;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T17:03:39.275416Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:149;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T17:03:39.275497Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:03:39.275537Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:03:39.277079Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:03:39.277215Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:149;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T17:03:39.277255Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:03:39.277355Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=149; 2025-04-04T17:03:39.277414Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=1222396;num_rows=149;batch_columns=key,field; 2025-04-04T17:03:39.277538Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:856:2873];bytes=1222396;rows=149;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-04-04T17:03:39.277640Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T17:03:39.277765Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T17:03:39.277910Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T17:03:39.278512Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:03:39.278627Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T17:03:39.278721Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T17:03:39.278763Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:865:2882] finished for tablet 9437184 2025-04-04T17:03:39.279381Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:856:2873];stats={"p":[{"events":["f_bootstrap"],"t":0.001},{"events":["f_ProduceResults"],"t":0.009},{"events":["l_bootstrap"],"t":0.027},{"events":["f_processing","f_task_result"],"t":0.034},{"events":["l_task_result"],"t":1.405},{"events":["f_ack"],"t":32.363},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":32.381}],"full":{"a":1743786186897556,"name":"_full_task","f":1743786186897556,"d_finished":0,"c":0,"l":1743786219278829,"d":32381273},"events":[{"name":"bootstrap","f":1743786186899164,"d_finished":26030,"c":1,"l":1743786186925194,"d":26030},{"a":1743786219278485,"name":"ack","f":1743786219260887,"d_finished":9959,"c":9,"l":1743786219277945,"d":10303},{"a":1743786219278467,"name":"processing","f":1743786186931687,"d_finished":550946,"c":53,"l":1743786219277948,"d":551308},{"name":"ProduceResults","f":1743786186906871,"d_finished":18832,"c":64,"l":1743786219278742,"d":18832},{"a":1743786219278746,"name":"Finish","f":1743786219278746,"d_finished":0,"c":0,"l":1743786219278829,"d":83},{"name":"task_result","f":1743786186931715,"d_finished":539558,"c":44,"l":1743786188303443,"d":539558}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T17:03:39.279477Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:856:2873];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:03:39.279996Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:856:2873];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.001},{"events":["f_ProduceResults"],"t":0.009},{"events":["l_bootstrap"],"t":0.027},{"events":["f_processing","f_task_result"],"t":0.034},{"events":["l_task_result"],"t":1.405},{"events":["f_ack"],"t":32.363},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":32.381}],"full":{"a":1743786186897556,"name":"_full_task","f":1743786186897556,"d_finished":0,"c":0,"l":1743786219279526,"d":32381970},"events":[{"name":"bootstrap","f":1743786186899164,"d_finished":26030,"c":1,"l":1743786186925194,"d":26030},{"a":1743786219278485,"name":"ack","f":1743786219260887,"d_finished":9959,"c":9,"l":1743786219277945,"d":11000},{"a":1743786219278467,"name":"processing","f":1743786186931687,"d_finished":550946,"c":53,"l":1743786219277948,"d":552005},{"name":"ProduceResults","f":1743786186906871,"d_finished":18832,"c":64,"l":1743786219278742,"d":18832},{"a":1743786219278746,"name":"Finish","f":1743786219278746,"d_finished":0,"c":0,"l":1743786219279526,"d":780},{"name":"task_result","f":1743786186931715,"d_finished":539558,"c":44,"l":1743786188303443,"d":539558}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T17:03:39.280086Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:03:06.889935Z;index_granules=0;index_portions=5;index_batches=2052;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=17133336;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=17133336;selected_rows=0; 2025-04-04T17:03:39.280135Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:03:39.280409Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:865:2882];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TGRpcYdbTest::MakeListRemoveDirectory [GOOD] >> TGRpcYdbTest::ReadTable >> TGRpcLdapAuthentication::LdapAuthWithValidCredentials >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |82.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] >> TFlatTest::SelectRangeReverseItemsLimit [GOOD] >> TFlatTest::SelectRangeReverseIncludeKeys >> YdbYqlClient::TestReadTableMultiShard >> YdbYqlClient::RenameTables [GOOD] >> YdbTableBulkUpsert::Simple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] Test command err: 304 176 28 48 32 24 16 24 56 >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBaseDn [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindDn ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::RenameTables [GOOD] Test command err: 2025-04-04T17:02:52.335532Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504582590251184:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:52.383234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012ae/r3tmp/tmpZLvZ9q/pdisk_1.dat 2025-04-04T17:02:53.166283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:53.166394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:53.169787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:02:53.258931Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31815, node 1 2025-04-04T17:02:53.434944Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:02:53.435107Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:02:53.435128Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:02:53.588307Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:53.588332Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:53.588341Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:53.591964Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14905 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:53.977774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 2025-04-04T17:02:57.338366Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504582590251184:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:57.338446Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 2025-04-04T17:03:00.940757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504616949990581:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:00.940894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:00.941214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504616949990593:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:00.945655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:03:01.008350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489504616949990595:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:03:01.079495Z node 1 :TX_PROXY ERROR: Actor# [1:7489504621244957963:2709] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012ae/r3tmp/tmpTLyLw6/pdisk_1.dat 2025-04-04T17:03:10.771031Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:03:10.881768Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12338, node 4 2025-04-04T17:03:10.962701Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:10.965214Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:10.987423Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:03:11.014931Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:11.014954Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:11.014961Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:11.015099Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:11.376194Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:14.449368Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489504677157993124:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:14.449551Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:14.468898Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:03:14.723470Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489504677157993290:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:14.723679Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:14.724050Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489504677157993295:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:14.729337Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:03:14.802438Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489504677157993297:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-0 ... from SchemeCache 2025-04-04T17:03:40.955096Z node 13 :TX_PROXY DEBUG: Actor# [13:7489504787615183382:3587] txid# 281474976710672 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:03:40.955192Z node 13 :TX_PROXY DEBUG: Actor# [13:7489504787615183382:3587] HANDLE EvNavigateKeySetResult, txid# 281474976710672 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:03:40.955230Z node 13 :TX_PROXY DEBUG: Actor# [13:7489504787615183382:3587] txid# 281474976710672 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2025-04-04T17:03:40.955426Z node 13 :TX_PROXY DEBUG: Actor# [13:7489504787615183382:3587] txid# 281474976710672 HANDLE EvClientConnected 2025-04-04T17:03:40.955652Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-1, pathId: 0, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:03:40.955808Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710672:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:03:40.958472Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710672, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-1 2025-04-04T17:03:40.959417Z node 13 :TX_PROXY DEBUG: Actor# [13:7489504787615183382:3587] txid# 281474976710672 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710672} 2025-04-04T17:03:40.959454Z node 13 :TX_PROXY DEBUG: Actor# [13:7489504787615183382:3587] txid# 281474976710672 SEND to# [13:7489504787615183381:2400] Source {TEvProposeTransactionStatus txid# 281474976710672 Status# 53} 2025-04-04T17:03:40.963238Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:03:40.963412Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:03:40.963430Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:03:40.963499Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:03:40.972663Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743786221016, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:03:40.975993Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976710672, done: 0, blocked: 1 2025-04-04T17:03:40.979881Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:03:40.979933Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:03:40.980041Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:03:40.980042Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:03:40.982556Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710672:0 2025-04-04T17:03:40.997343Z node 13 :GRPC_SERVER DEBUG: Got grpc request# DropTableRequest, traceId# 01jr0w47e55b1aamb5sw1d8haf, sdkBuildInfo# ydb-cpp-sdk/3.1.0, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:41750, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:03:41.001410Z node 13 :TX_PROXY DEBUG: actor# [13:7489504757550410197:2133] Handle TEvProposeTransaction 2025-04-04T17:03:41.001451Z node 13 :TX_PROXY DEBUG: actor# [13:7489504757550410197:2133] TxId# 281474976710673 ProcessProposeTransaction 2025-04-04T17:03:41.001491Z node 13 :TX_PROXY DEBUG: actor# [13:7489504757550410197:2133] Cookie# 0 userReqId# "" txid# 281474976710673 SEND to# [13:7489504791910150748:3653] 2025-04-04T17:03:41.003784Z node 13 :TX_PROXY DEBUG: Actor# [13:7489504791910150748:3653] txid# 281474976710673 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "Root" OperationType: ESchemeOpDropTable Drop { Name: "Table-2" } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:41750" 2025-04-04T17:03:41.003832Z node 13 :TX_PROXY DEBUG: Actor# [13:7489504791910150748:3653] txid# 281474976710673 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:03:41.003916Z node 13 :TX_PROXY DEBUG: Actor# [13:7489504791910150748:3653] txid# 281474976710673 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:03:41.005803Z node 13 :TX_PROXY DEBUG: Actor# [13:7489504791910150748:3653] txid# 281474976710673 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:03:41.005908Z node 13 :TX_PROXY DEBUG: Actor# [13:7489504791910150748:3653] HANDLE EvNavigateKeySetResult, txid# 281474976710673 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:03:41.005948Z node 13 :TX_PROXY DEBUG: Actor# [13:7489504791910150748:3653] txid# 281474976710673 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-04T17:03:41.006140Z node 13 :TX_PROXY DEBUG: Actor# [13:7489504791910150748:3653] txid# 281474976710673 HANDLE EvClientConnected 2025-04-04T17:03:41.006411Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-2, pathId: 0, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:03:41.006549Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710673:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:03:41.009698Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710673, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-2 2025-04-04T17:03:41.010632Z node 13 :TX_PROXY DEBUG: Actor# [13:7489504791910150748:3653] txid# 281474976710673 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710673} 2025-04-04T17:03:41.010699Z node 13 :TX_PROXY DEBUG: Actor# [13:7489504791910150748:3653] txid# 281474976710673 SEND to# [13:7489504787615183451:2403] Source {TEvProposeTransactionStatus txid# 281474976710673 Status# 53} 2025-04-04T17:03:41.011942Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:03:41.011962Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:03:41.012063Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:03:41.012076Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:03:41.015201Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 13, TabletId: 72075186224037890 not found 2025-04-04T17:03:41.020292Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-04T17:03:41.027075Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743786221072, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:03:41.035480Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976710673, done: 0, blocked: 1 2025-04-04T17:03:41.041377Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:03:41.041385Z node 13 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:03:41.041488Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:03:41.041505Z node 13 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:03:41.045165Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710673:0 2025-04-04T17:03:41.053918Z node 13 :GRPC_SERVER DEBUG: [0x51a0000d2c80] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.053943Z node 13 :GRPC_SERVER DEBUG: [0x51a00004f880] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.054154Z node 13 :GRPC_SERVER DEBUG: [0x51a00004fe80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.054163Z node 13 :GRPC_SERVER DEBUG: [0x51a000105c80] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.054353Z node 13 :GRPC_SERVER DEBUG: [0x51a000155a80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.054356Z node 13 :GRPC_SERVER DEBUG: [0x51a000111c80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.054526Z node 13 :GRPC_SERVER DEBUG: [0x51a000141680] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.054563Z node 13 :GRPC_SERVER DEBUG: [0x51a00011ca80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.054720Z node 13 :GRPC_SERVER DEBUG: [0x51a000001880] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.054751Z node 13 :GRPC_SERVER DEBUG: [0x51a000051c80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.054915Z node 13 :GRPC_SERVER DEBUG: [0x51a00011e880] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.054937Z node 13 :GRPC_SERVER DEBUG: [0x51a0000fcc80] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.055100Z node 13 :GRPC_SERVER DEBUG: [0x51b00027de80] received request Name# KeyValue ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.055105Z node 13 :GRPC_SERVER DEBUG: [0x51a000142280] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.055263Z node 13 :GRPC_SERVER DEBUG: [0x51a00005be80] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.055298Z node 13 :GRPC_SERVER DEBUG: [0x51a0000cea80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.055446Z node 13 :GRPC_SERVER DEBUG: [0x51a000156080] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.055475Z node 13 :GRPC_SERVER DEBUG: [0x51a000002a80] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.055618Z node 13 :GRPC_SERVER DEBUG: [0x51a0000fae80] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.055631Z node 13 :GRPC_SERVER DEBUG: [0x51a000181880] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.055775Z node 13 :GRPC_SERVER DEBUG: [0x51a000112880] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.055777Z node 13 :GRPC_SERVER DEBUG: [0x51a0000fd880] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-04-04T17:03:41.059005Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 13, TabletId: 72075186224037889 not found 2025-04-04T17:03:41.060988Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> TColumnShardTestReadWrite::WriteStandalone [GOOD] >> TFlatTest::SelectRangeNullArgs3 >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot >> TPQTest::DirectReadBadSessionOrPipe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandalone [GOOD] Test command err: 2025-04-04T17:03:36.169340Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:03:36.394289Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:03:36.443600Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:03:36.443900Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:03:36.453334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:03:36.453577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:03:36.453871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:03:36.454006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:03:36.454124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:03:36.454235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:03:36.454554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:03:36.454716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:03:36.454867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:03:36.454992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:03:36.455101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:03:36.455197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:03:36.488695Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:03:36.489639Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:03:36.489724Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:03:36.489962Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:03:36.490193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:03:36.490290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:03:36.490336Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:03:36.490457Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:03:36.490564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:03:36.490607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:03:36.490676Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:03:36.490886Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:03:36.490958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:03:36.490996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:03:36.491026Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:03:36.491124Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:03:36.491189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:03:36.491238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:03:36.491269Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:03:36.491344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:03:36.491392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:03:36.491444Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:03:36.491492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:03:36.491531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:03:36.491559Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:03:36.492009Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=60; 2025-04-04T17:03:36.492135Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=57; 2025-04-04T17:03:36.492216Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-04-04T17:03:36.492304Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-04-04T17:03:36.492491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:03:36.492565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:03:36.492599Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:03:36.492786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:03:36.492830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:03:36.492861Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:03:36.493017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:03:36.493062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:03:36.493104Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:03:36.493325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:03:36.493373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:03:36.493407Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:03:36.493523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:03:36.493580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:03:36.493645Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... :[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-04-04T17:03:44.068450Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |82.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribe >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] [GOOD] |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitions [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning >> TGRpcYdbTest::GetOperationBadRequest >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::TableDeleteWhere-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 23596, MsgBus: 23432 2025-04-04T16:56:52.762714Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503036665367017:2097];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:52.763222Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c62/r3tmp/tmpUPArCz/pdisk_1.dat 2025-04-04T16:56:53.190655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:53.190762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:53.194009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:56:53.194722Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23596, node 1 2025-04-04T16:56:53.282864Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:56:53.282885Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:56:53.282893Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:56:53.282999Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23432 TClient is connected to server localhost:23432 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:56:54.160407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:54.195306Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T16:56:57.363290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229'Unable to coerce value for pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-04-04T16:56:57.634993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465'Unable to coerce value for _pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-04-04T16:56:57.755263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T16:56:57.770341Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503036665367017:2097];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:57.770407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; abcd 2025-04-04T16:56:57.935613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 {abcd,abcd} 2025-04-04T16:56:58.128532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 abcd 2025-04-04T16:56:58.309365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 {"abcd ","abcd "} 2025-04-04T16:56:58.432059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgvarchar_17472595041006102391_17823623939509273229'Unable to coerce value for pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character varying(2) 2025-04-04T16:56:58.522896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgvarchar_17472595041006102391_5352544928909966465'Unable to coerce value for _pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character varying(2) 2025-04-04T16:56:58.635929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 abcd 2025-04-04T16:56:58.783451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 {abcd,abcd} 2025-04-04T16:56:58.909134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 abcd 2025-04-04T16:56:59.071384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 {abcd,abcd} 2025-04-04T16:56:59.216219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbit_17472595041006102391_5866627432374416336'Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(2) 2025-04-04T16:56:59.305917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbit_17472595041006102391_11087201080355820517'Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(2) 2025-04-04T16:56:59.380495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 1111 2025-04-04T16:56:59.501392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 {1111,1111} 2025-04-04T16:56:59.645478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbit_10103374131519304989_5866627432374416336'Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(6) 2025-04-04T16:56:59.749372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbit_10103374131519304989_11087201080355820517'Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(6) 2025-04-04T16:56:59.823823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgvarbit_17472595041006102391_5866627432374416336'Unable to coerce value for pgvarbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string too long for type bit varying(2) 2025-04-04T16:56:59.903049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgvarbit_17472595041006102391_11087201080355820517'Unable to coerce value for _pgvarbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string too long for type bit varying(2) 2025-04-04T16:57:00.001029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 1111 2025-04-04T16:57:00.099433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 {1111,1111} 2025-04-04T16:57:00.232487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 1111 2025-04-04T16:57:00.396507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 {1111,1111} 2025-04-04T16:57:00.509220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at s ... schemeshard: 72057594046644480 2025-04-04T17:03:27.733314Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:27.757393Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710808:0, at schemeshard: 72057594046644480 600 2025-04-04T17:03:27.857386Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:27.890594Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710810:0, at schemeshard: 72057594046644480 2025-04-04T17:03:27.991188Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:28.015865Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710812:0, at schemeshard: 72057594046644480 628 2025-04-04T17:03:28.108255Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:28.133937Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710814:0, at schemeshard: 72057594046644480 2025-04-04T17:03:28.236241Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:28.275408Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710816:0, at schemeshard: 72057594046644480 601 2025-04-04T17:03:28.367254Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:28.396958Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710818:0, at schemeshard: 72057594046644480 2025-04-04T17:03:28.489258Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:28.516111Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710820:0, at schemeshard: 72057594046644480 603 2025-04-04T17:03:28.690751Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710821:0, at schemeshard: 72057594046644480 2025-04-04T17:03:28.858228Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:28.893462Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710823:0, at schemeshard: 72057594046644480 602 2025-04-04T17:03:29.143974Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710824:0, at schemeshard: 72057594046644480 2025-04-04T17:03:29.354832Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710825:0, at schemeshard: 72057594046644480 604 2025-04-04T17:03:29.556228Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710826:0, at schemeshard: 72057594046644480 2025-04-04T17:03:29.713664Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:29.747284Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710828:0, at schemeshard: 72057594046644480 718 2025-04-04T17:03:29.868586Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:29.938446Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710830:0, at schemeshard: 72057594046644480 2025-04-04T17:03:30.107829Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:30.141886Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710832:0, at schemeshard: 72057594046644480 869 2025-04-04T17:03:30.344345Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:30.374157Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710834:0, at schemeshard: 72057594046644480 2025-04-04T17:03:30.584439Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710835:0, at schemeshard: 72057594046644480 650 2025-04-04T17:03:30.831140Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710836:0, at schemeshard: 72057594046644480 2025-04-04T17:03:30.960023Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:30.995067Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710838:0, at schemeshard: 72057594046644480 829 2025-04-04T17:03:31.128515Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:31.156033Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710840:0, at schemeshard: 72057594046644480 2025-04-04T17:03:31.320680Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710841:0, at schemeshard: 72057594046644480 774 2025-04-04T17:03:31.447057Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:31.545693Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710843:0, at schemeshard: 72057594046644480 2025-04-04T17:03:31.699280Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:31.739606Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710845:0, at schemeshard: 72057594046644480 2950 2025-04-04T17:03:31.898644Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:31.922916Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710847:0, at schemeshard: 72057594046644480 2025-04-04T17:03:32.065095Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:32.066736Z node 11 :TX_DATASHARD ERROR: Prepare transaction failed. txid 281474976710849 at tablet 72075186224037954 errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976710849] at 72075186224037954 while waiting for scan finish) | 2025-04-04T17:03:32.069818Z node 11 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710849 at tablet 72075186224037954 status: ERROR errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976710849] at 72075186224037954 while waiting for scan finish) | 2025-04-04T17:03:32.088985Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710850:0, at schemeshard: 72057594046644480 114 2025-04-04T17:03:32.233060Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710851:0, at schemeshard: 72057594046644480 2025-04-04T17:03:32.419981Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:32.473354Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710853:0, at schemeshard: 72057594046644480 3802 2025-04-04T17:03:32.669046Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:32.721528Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710855:0, at schemeshard: 72057594046644480 2025-04-04T17:03:32.865116Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710856:0, at schemeshard: 72057594046644480 4072 2025-04-04T17:03:33.008319Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:33.054424Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710858:0, at schemeshard: 72057594046644480 2025-04-04T17:03:33.175017Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:33.211442Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710860:0, at schemeshard: 72057594046644480 142 2025-04-04T17:03:33.436203Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:33.473924Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710862:0, at schemeshard: 72057594046644480 2025-04-04T17:03:33.736367Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710863:0, at schemeshard: 72057594046644480 3615 2025-04-04T17:03:33.958235Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710864:0, at schemeshard: 72057594046644480 2025-04-04T17:03:34.151476Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:34.200981Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710866:0, at schemeshard: 72057594046644480 3614 2025-04-04T17:03:34.503253Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710867:0, at schemeshard: 72057594046644480 2025-04-04T17:03:34.657394Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:03:34.703168Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710869:0, at schemeshard: 72057594046644480 22 2025-04-04T17:03:34.924229Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710870:0, at schemeshard: 72057594046644480 2025-04-04T17:03:35.144546Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710871:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> PgCatalog::PgTables [GOOD] Test command err: Trying to start YDB, gRPC: 16007, MsgBus: 12139 2025-04-04T16:56:54.288645Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503042662874299:2120];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:54.301115Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c0a/r3tmp/tmpAGU9oq/pdisk_1.dat 2025-04-04T16:56:54.778961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:56:54.779112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:56:54.815295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:56:54.857844Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16007, node 1 2025-04-04T16:56:55.006855Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:56:55.006901Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:56:55.006914Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:56:55.007028Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12139 TClient is connected to server localhost:12139 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:56:55.837771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:56:55.872307Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 1042 2025-04-04T16:56:58.035956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229'Typemod mismatch, got type pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce_pgbpchar_17472595041006102391_17823623939509273229 (key, value) VALUES ( '0'::int2, 'abcd'::bpchar ) 2025-04-04T16:56:58.216263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503059842744182:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:58.216442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:58.216862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503059842744194:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:56:58.220669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T16:56:58.239349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503059842744196:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T16:56:58.316842Z node 1 :TX_PROXY ERROR: Actor# [1:7489503059842744248:2400] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:56:58.685563Z node 1 :TX_DATASHARD CRIT: Exception while executing KQP transaction [0:281474976710663] at 72075186224037888: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-04-04T16:56:58.693683Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710663 at tablet 72075186224037888 status: EXEC_ERROR errors: UNKNOWN (Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ) | 2025-04-04T16:56:58.694053Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489503059842744304:2339] TxId: 281474976710663. Ctx: { TraceId: 01jr0vqy3606x37rf84029jtps, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjQzNWQ1NmYtNGFkODE1NzctNWRiOGYxY2MtZmRlODMzZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. EXEC_ERROR: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ; 2025-04-04T16:56:58.704313Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjQzNWQ1NmYtNGFkODE1NzctNWRiOGYxY2MtZmRlODMzZTM=, ActorId: [1:7489503059842744179:2339], ActorState: ExecuteState, TraceId: 01jr0vqy3606x37rf84029jtps, Create QueryResponse for error on request, msg:
: Error: Error executing transaction (ExecError): Execution failed
: Error: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-04-04T16:56:58.749768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465'Typemod mismatch, got type _pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce__pgbpchar_17472595041006102391_5352544928909966465 (key, value) VALUES ( '0'::int2, '{abcd,abcd}'::_bpchar ) 2025-04-04T16:56:59.097678Z node 1 :TX_DATASHARD CRIT: Exception while executing KQP transaction [0:281474976710668] at 72075186224037889: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-04-04T16:56:59.099013Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710668 at tablet 72075186224037889 status: EXEC_ERROR errors: UNKNOWN (Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ) | 2025-04-04T16:56:59.099201Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489503064137711738:2373] TxId: 281474976710668. Ctx: { TraceId: 01jr0vqypqc072z26fd2devazw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzdiYzhkYzYtNDBlMDEwMmItMjVjY2JmMzUtYWRiMGUzZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. EXEC_ERROR: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ; 2025-04-04T16:56:59.099397Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzdiYzhkYzYtNDBlMDEwMmItMjVjY2JmMzUtYWRiMGUzZTk=, ActorId: [1:7489503059842744396:2373], ActorState: ExecuteState, TraceId: 01jr0vqypqc072z26fd2devazw, Create QueryResponse for error on request, msg:
: Error: Error executing transaction (ExecError): Execution failed
: Error: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 1042 2025-04-04T16:56:59.130979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_2169371982377735806_17823623939509273229'Typemod mismatch, got type pgbpchar for column value, type mod , but expected 4 --!syntax_pg INSERT INTO Coerce_pgbpchar_2169371982377735806_17823623939509273229 (key, value) VALUES ( '0'::int2, 'abcd'::bpchar ) 2025-04-04T16:56:59.288128Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503042662874299:2120];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:56:59.288191Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; abcd 2025-04-04T16:56:59.612124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_2169371982377735806_5352544928909966465'Typemod mismatch, got type _pgbpchar for column value, type mod , but expected 4 --!syntax_pg INSERT INTO Coerce__pgbpchar_2169371982377735806_5352544928909966465 (key, value) VALUES ( '0'::int2, '{abcd,abcd}'::_bpchar ) {abcd,abcd} 1042 2025-04-04T16:57:00.071683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480
: Error ... ed -> Connecting 2025-04-04T17:03:08.188993Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:03:08.198598Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21080, node 10 2025-04-04T17:03:08.495013Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:08.495057Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:08.495071Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:08.495298Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17702 TClient is connected to server localhost:17702 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:10.553868Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:10.563968Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:03:12.466642Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489504646248212655:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:12.466771Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:03:15.789789Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504680607951522:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:15.789973Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:15.791163Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504680607951557:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:15.798404Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:03:15.829887Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489504680607951559:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:03:15.889407Z node 10 :TX_PROXY ERROR: Actor# [10:7489504680607951612:2351] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 21652, MsgBus: 8819 2025-04-04T17:03:17.709412Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7489504690287619679:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:17.709499Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c0a/r3tmp/tmpy53uHE/pdisk_1.dat 2025-04-04T17:03:17.969789Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:18.004216Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:18.004373Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:18.008688Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21652, node 11 2025-04-04T17:03:18.089089Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:18.089165Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:18.089182Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:18.089411Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8819 TClient is connected to server localhost:8819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:19.060446Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:22.709672Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7489504690287619679:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:22.709796Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:03:24.044694Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7489504720352391403:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:24.045000Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:24.082531Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7489504720352391438:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:24.119730Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:03:24.192845Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7489504720352391440:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:03:24.275314Z node 11 :TX_PROXY ERROR: Actor# [11:7489504720352391491:2347] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:03:24.465153Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:03:24.645277Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:03:31.515128Z node 11 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 11, TabletId: 72075186224037888 not found 2025-04-04T17:03:31.624779Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:03:32.821083Z node 11 :KQP_COMPUTE ERROR: SelfId: [11:7489504754712130457:2458], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=11&id=ZTI2Y2YxYjgtODdlNzhhYzMtMWQ3NmQ0YWEtODRhNzliZjM=. CustomerSuppliedId : . TraceId : 01jr0w3yn48kj4nnn35km3zagw. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(57): ERROR: invalid input syntax for type boolean: "pg_proc" }. 2025-04-04T17:03:32.846320Z node 11 :KQP_COMPUTE ERROR: SelfId: [11:7489504754712130458:2459], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jr0w3yn48kj4nnn35km3zagw. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=11&id=ZTI2Y2YxYjgtODdlNzhhYzMtMWQ3NmQ0YWEtODRhNzliZjM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [11:7489504754712130454:2454], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:03:32.853249Z node 11 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=11&id=ZTI2Y2YxYjgtODdlNzhhYzMtMWQ3NmQ0YWEtODRhNzliZjM=, ActorId: [11:7489504754712130444:2454], ActorState: ExecuteState, TraceId: 01jr0w3yn48kj4nnn35km3zagw, Create QueryResponse for error on request, msg: 2025-04-04T17:03:32.910419Z node 11 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:03:32.910450Z node 11 :IMPORT WARN: Table profiles were not loaded >> TYqlDateTimeTests::IntervalKey [GOOD] >> TYqlDateTimeTests::SimpleOperations >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideAnyCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedCert >> TGRpcYdbTest::ReadTable [GOOD] >> TGRpcYdbTest::OperationTimeout >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot >> TGRpcLdapAuthentication::LdapAuthWithValidCredentials [GOOD] >> TGRpcNewClient::SimpleYqlQuery >> TTableProfileTests::UseDefaultProfile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] Test command err: 2025-04-04T17:03:38.029498Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504778273041574:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:38.032642Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e8a/r3tmp/tmpy3dhzT/pdisk_1.dat 2025-04-04T17:03:38.610163Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:38.619780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:38.619886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:38.643477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25911 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:03:39.358131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:39.373543Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:39.423508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:42.856071Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489504797120096745:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:42.856110Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e8a/r3tmp/tmpgVtxXD/pdisk_1.dat 2025-04-04T17:03:42.971009Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:43.000508Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:43.000633Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:43.002279Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16735 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:03:43.369242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:03:43.392539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TGRpcYdbTest::DropTableBadRequest >> TPQTest::DirectReadBadSessionOrPipe [GOOD] >> TPQTest::DirectReadOldPipe >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots [GOOD] >> THiveTest::TestCheckSubHiveMigrationWithReboots >> TFlatTest::SelectRangeNullArgs3 [GOOD] >> TFlatTest::SelectRangeNullArgs4 >> TGRpcYdbTest::ExecuteQueryBadRequest >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindDn [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindPassword >> YdbYqlClient::TestReadTableMultiShard [GOOD] >> YdbYqlClient::TestReadTableMultiShardUseSnapshot >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] >> TPQTest::DirectReadOldPipe [GOOD] >> TPQTest::TestAccountReadQuota >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] >> YdbTableBulkUpsert::Simple [GOOD] >> YdbTableBulkUpsert::SyncIndexShouldSucceed >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] >> TGRpcYdbTest::GetOperationBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryWithUuid >> YdbYqlClient::TestReadTableMultiShardWithDescribe [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |82.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] >> TGRpcYdbTest::OperationTimeout [GOOD] >> TGRpcYdbTest::OperationCancelAfter >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] >> YdbOlapStore::LogNonExistingRequest [GOOD] >> YdbOlapStore::LogNonExistingUserId >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning [GOOD] >> YdbTableBulkUpsert::Uint8 [GOOD] >> YdbTableBulkUpsert::ZeroRows >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_AllowOnlyDefaultGroup [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesEmptyClientCerts >> TFlatTest::SelectRangeNullArgs4 [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] >> TGRpcYdbTest::DropTableBadRequest [GOOD] >> TGRpcYdbTest::CreateYqlSession |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |82.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat >> TGRpcNewClient::SimpleYqlQuery [GOOD] >> TGRpcNewClient::TestAuth >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] >> TFlatTest::AutoSplitBySize_FlatIndex [GOOD] >> TFlatTest::AutoMergeBySize ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning [GOOD] Test command err: 2025-04-04T17:03:22.647389Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504712833033289:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:22.647441Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00129e/r3tmp/tmp54bqwA/pdisk_1.dat 2025-04-04T17:03:23.703049Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:03:23.879447Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:23.879955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:23.880039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:23.911377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28382, node 1 2025-04-04T17:03:24.421366Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:24.421401Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:24.421407Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:24.421557Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20282 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:25.128347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:27.658398Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504712833033289:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:27.658479Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:03:31.672515Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504751667523148:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:31.673183Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00129e/r3tmp/tmpodYcPq/pdisk_1.dat 2025-04-04T17:03:32.152179Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:32.216650Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:32.216753Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:32.225174Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19282, node 4 2025-04-04T17:03:32.583111Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:32.583141Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:32.583147Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:32.583285Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20084 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:33.133497Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:36.670362Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489504751667523148:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:36.670470Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:03:37.371407Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:03:37.742686Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found 2025-04-04T17:03:37.793032Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2025-04-04T17:03:39.877307Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489504783810167058:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:39.883205Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00129e/r3tmp/tmpreEaRg/pdisk_1.dat 2025-04-04T17:03:40.336566Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:40.371988Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:40.372111Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:40.384897Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17383, node 7 2025-04-04T17:03:40.618770Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:40.618806Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:40.618818Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:40.625268Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7504 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:41.247458Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:44.853959Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:03:44.878626Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489504783810167058:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:44.878702Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:03:46.969122Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489504812838740218:2154];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:46.969689Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00129e/r3tmp/tmpF0afkz/pdisk_1.dat 2025-04-04T17:03:47.127913Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:47.162009Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:47.162135Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:47.166031Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2843, node 10 2025-04-04T17:03:47.233077Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:47.233105Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:47.233113Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:47.233277Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62729 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:47.565901Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:51.592598Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:03:51.971769Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489504812838740218:2154];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:51.971823Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TPQTest::TestAccountReadQuota [GOOD] >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] >> TPQCompatTest::LongProducerAndLongMessageGroupId [GOOD] >> TPQCompatTest::ReadWriteSessions >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously [GOOD] >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeNullArgs4 [GOOD] Test command err: 2025-04-04T17:03:45.254078Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504810078926700:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:45.256931Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e89/r3tmp/tmpS1xR3N/pdisk_1.dat 2025-04-04T17:03:45.926986Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:45.932533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:45.932643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:45.946541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14045 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:03:46.372366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:46.390694Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:46.404207Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:03:46.418529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:49.718808Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489504826966024074:2198];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e89/r3tmp/tmpPadZo5/pdisk_1.dat 2025-04-04T17:03:49.967754Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:03:50.033007Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:50.033097Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:50.051198Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:50.051494Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7003 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:03:50.459875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:50.470531Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:50.502910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TGRpcYdbTest::ExecuteQueryBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryImplicitSession >> YdbYqlClient::TestReadTableMultiShardUseSnapshot [GOOD] >> YdbYqlClient::TestReadTableMultiShardOneRow >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindPassword [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts_AccessDenied >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] >> YdbTableBulkUpsert::SyncIndexShouldSucceed [GOOD] >> YdbTableBulkUpsert::Timeout >> TGRpcYdbTest::ExecuteQueryWithUuid [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental >> TYqlDateTimeTests::SimpleOperations [GOOD] >> TYqlDecimalTests::DecimalKey >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] >> TTableProfileTests::UseDefaultProfile [GOOD] >> TTableProfileTests::OverwriteCompactionPolicy >> TColumnShardTestReadWrite::ReadGroupBy [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [GOOD] >> TGRpcYdbTest::OperationCancelAfter [GOOD] >> TGRpcYdbTest::KeepAlive >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete [GOOD] >> TGRpcNewClient::TestAuth [GOOD] >> TGRpcNewClient::CreateAlterUpsertDrop >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [GOOD] Test command err: 2025-04-04T17:03:31.035065Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504750887553067:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:31.039345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001285/r3tmp/tmpXQY2yW/pdisk_1.dat 2025-04-04T17:03:31.635800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:31.635978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:31.640874Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:31.641029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21270, node 1 2025-04-04T17:03:31.999224Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:31.999260Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:31.999268Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:31.999436Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:32.694330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:32.846519Z node 1 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jr0w3zfcdaj3j0sfv5y131pq, sdkBuildInfo# ydb-cpp-sdk/3.1.0, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:35698, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.975764s 2025-04-04T17:03:32.928418Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jr0w3zgz22svpjkq4nf7hcrj, sdkBuildInfo# ydb-cpp-sdk/3.1.0, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:35708, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:03:35.947027Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01jr0w42ga0qdvaamfsgfzaytn, sdkBuildInfo# ydb-cpp-sdk/3.1.0, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:53360, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:03:35.947628Z node 1 :TX_PROXY DEBUG: actor# [1:7489504750887553317:2140] Handle TEvProposeTransaction 2025-04-04T17:03:35.947657Z node 1 :TX_PROXY DEBUG: actor# [1:7489504750887553317:2140] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T17:03:35.947703Z node 1 :TX_PROXY DEBUG: actor# [1:7489504750887553317:2140] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489504768067423323:2633] 2025-04-04T17:03:36.041546Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504750887553067:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:36.041642Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:03:36.066629Z node 1 :TX_PROXY DEBUG: Actor# [1:7489504768067423323:2633] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Test" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Fk" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" KeyColumnNames: "Fk" UniformPartitionsCount: 16 PartitionConfig { } Temporary: false } CreateIndexedTable { } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:53360" 2025-04-04T17:03:36.066706Z node 1 :TX_PROXY DEBUG: Actor# [1:7489504768067423323:2633] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:03:36.067047Z node 1 :TX_PROXY DEBUG: Actor# [1:7489504768067423323:2633] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:03:36.067115Z node 1 :TX_PROXY DEBUG: Actor# [1:7489504768067423323:2633] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:03:36.067299Z node 1 :TX_PROXY DEBUG: Actor# [1:7489504768067423323:2633] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:03:36.067467Z node 1 :TX_PROXY DEBUG: Actor# [1:7489504768067423323:2633] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:03:36.067524Z node 1 :TX_PROXY DEBUG: Actor# [1:7489504768067423323:2633] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T17:03:36.067688Z node 1 :TX_PROXY DEBUG: Actor# [1:7489504768067423323:2633] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T17:03:36.071678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:03:36.085603Z node 1 :TX_PROXY DEBUG: Actor# [1:7489504768067423323:2633] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-04T17:03:36.085689Z node 1 :TX_PROXY DEBUG: Actor# [1:7489504768067423323:2633] txid# 281474976710658 SEND to# [1:7489504768067423322:2337] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-04T17:03:36.088089Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:03:36.088181Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:03:36.088191Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:03:36.088282Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:03:36.197451Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504772362390760:2721], Recipient [1:7489504772362390861:2346]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:36.198584Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504772362390756:2717], Recipient [1:7489504772362390871:2350]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:36.199270Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504772362390758:2719], Recipient [1:7489504772362390860:2345]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:36.199874Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504772362390752:2713], Recipient [1:7489504772362390870:2349]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:36.200406Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504772362390765:2726], Recipient [1:7489504772362390898:2355]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:36.201015Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504772362390761:2722], Recipient [1:7489504772362390873:2352]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:36.201493Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504772362390755:2716], Recipient [1:7489504772362390868:2347]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:36.201979Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504772362390757:2718], Recipient [1:7489504772362390858:2343]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:36.202412Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504772362390751:2712], Recipient [1:7489504772362390906:2356]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:36.206749Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504772362390762:2723], Recipient [1:7489504772362390859:2344]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:36.207355Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504772362390753:2714], Recipient [1:7489504772362390872:2351]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:36.207791Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504772362390750:2711], Recipient [1:7489504772362390897:2354]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:36.208237Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504772362390754:2715], Recipient [1:7489504772362390856:2341]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:36.208812Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504772362390759:2720], Recipient [1:7489504772362390874:2353]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:36.208901Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504772362390764:2725], Recipient [1:7489504772362390857:2342]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:36.209455Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504772362390763:2724], Recipient [1:7489504772362390869:2348]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:36.212311Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7489504772362390752:2713], Recipient [1:7489504772362390870:2349]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:03:36.212864Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7489504772362390756:2717], Recipient [1:7489504772362390871:2350]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:03:36.213317Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037901 actor [1:7489504772362390871:2350] 2025-04-04T17:03:36.213899Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:03:36.214023Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:7489504772362390870:2349] 2025-04-04T17:03:36.214280Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:03:36.229877Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7489504772362390753:2714], Recipient [1:7489504772362390872:2351]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:03:36.230370Z node 1 :T ... rStreamClearance 2025-04-04T17:03:57.564073Z node 10 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715681] at 72075186224037897 2025-04-04T17:03:57.564087Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2025-04-04T17:03:57.564104Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit WaitForStreamClearance 2025-04-04T17:03:57.564118Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit ReadTableScan 2025-04-04T17:03:57.564130Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit ReadTableScan 2025-04-04T17:03:57.564312Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Continue 2025-04-04T17:03:57.564326Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:03:57.564339Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2025-04-04T17:03:57.564351Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-04-04T17:03:57.564361Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-04-04T17:03:57.564393Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-04-04T17:03:57.565785Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [10:7489504862486251533:2155], Recipient [10:7489504858191283198:2347]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-04T17:03:57.565820Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-04T17:03:57.565881Z node 10 :READ_TABLE_API DEBUG: [10:7489504862486251513:2403] Adding quota request to queue ShardId: 0, TxId: 281474976715680 2025-04-04T17:03:57.565915Z node 10 :READ_TABLE_API DEBUG: [10:7489504862486251513:2403] Assign stream quota to Shard 0, Quota 5, TxId 281474976715680 Reserved: 5 of 25, Queued: 0 2025-04-04T17:03:57.566074Z node 10 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037897, TxId: 281474976715681, MessageQuota: 5 2025-04-04T17:03:57.566313Z node 10 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037897, TxId: 281474976715681, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 4 2025-04-04T17:03:57.568607Z node 10 :READ_TABLE_API DEBUG: [10:7489504862486251513:2403] got stream part, size: 75, RU required: 128 rate limiter absent 2025-04-04T17:03:57.568977Z node 10 :READ_TABLE_API DEBUG: [10:7489504862486251513:2403] Starting inactivity timer for 600.000000s with tag 3 2025-04-04T17:03:57.570535Z node 10 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037897, TxId: 281474976715681, PendingAcks: 0 2025-04-04T17:03:57.570577Z node 10 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037897, TxId: 281474976715681, MessageQuota: 4 2025-04-04T17:03:57.570581Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269549569, Sender [10:7489504862486251514:2403], Recipient [10:7489504858191283198:2347]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715681 2025-04-04T17:03:57.570602Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2025-04-04T17:03:57.570616Z node 10 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037897 txId 281474976715681 2025-04-04T17:03:57.570676Z node 10 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037897 txId 281474976715681 2025-04-04T17:03:57.570776Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269287431, Sender [10:7489504862486251514:2403], Recipient [10:7489504858191283198:2347]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715681 2025-04-04T17:03:57.570794Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2025-04-04T17:03:57.570885Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7489504862486251514:2403], Recipient [10:7489504858191283198:2347]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743786237599 TxId: 281474976715680 2025-04-04T17:03:57.571058Z node 10 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037897 2025-04-04T17:03:57.571072Z node 10 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715681, at: 72075186224037897 2025-04-04T17:03:57.571154Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [10:7489504858191283198:2347], Recipient [10:7489504858191283198:2347]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:03:57.571171Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:03:57.571199Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037897 2025-04-04T17:03:57.571224Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:03:57.571249Z node 10 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715681] at 72075186224037897 for ReadTableScan 2025-04-04T17:03:57.571267Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit ReadTableScan 2025-04-04T17:03:57.571287Z node 10 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715681] at 72075186224037897 error: , IsFatalError: 0 2025-04-04T17:03:57.571320Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2025-04-04T17:03:57.571339Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit ReadTableScan 2025-04-04T17:03:57.571353Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit FinishPropose 2025-04-04T17:03:57.571365Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit FinishPropose 2025-04-04T17:03:57.571409Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is DelayCompleteNoMoreRestarts 2025-04-04T17:03:57.571424Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit FinishPropose 2025-04-04T17:03:57.571435Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit CompletedOperations 2025-04-04T17:03:57.571448Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit CompletedOperations 2025-04-04T17:03:57.571502Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2025-04-04T17:03:57.571515Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit CompletedOperations 2025-04-04T17:03:57.571525Z node 10 :TX_DATASHARD TRACE: Execution plan for [0:281474976715681] at 72075186224037897 has finished 2025-04-04T17:03:57.571540Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:03:57.571551Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2025-04-04T17:03:57.571564Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-04-04T17:03:57.571575Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-04-04T17:03:57.575294Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-04-04T17:03:57.575344Z node 10 :TX_DATASHARD TRACE: Complete execution for [0:281474976715681] at 72075186224037897 on unit FinishPropose 2025-04-04T17:03:57.575368Z node 10 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715681 at tablet 72075186224037897 send to client, exec latency: 8 ms, propose latency: 12 ms, status: COMPLETE 2025-04-04T17:03:57.575430Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037897 2025-04-04T17:03:57.577294Z node 10 :READ_TABLE_API NOTICE: [10:7489504862486251513:2403] Finish grpc stream, status: 400000 2025-04-04T17:03:57.580873Z node 10 :GRPC_SERVER DEBUG: [0x51a0001c0280] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-04T17:03:57.581120Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d1480] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-04T17:03:57.581290Z node 10 :GRPC_SERVER DEBUG: [0x51a00007bc80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-04T17:03:57.581460Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d4a80] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-04T17:03:57.581645Z node 10 :GRPC_SERVER DEBUG: [0x51a0000c9680] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-04T17:03:57.581818Z node 10 :GRPC_SERVER DEBUG: [0x51a000152480] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-04T17:03:57.581970Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ebe80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-04T17:03:57.582114Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d3880] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-04T17:03:57.582282Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d4480] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-04T17:03:57.582439Z node 10 :GRPC_SERVER DEBUG: [0x51a0000cc680] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-04T17:03:57.582600Z node 10 :GRPC_SERVER DEBUG: [0x51a00008e280] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-04T17:03:57.582778Z node 10 :GRPC_SERVER DEBUG: [0x51a000168680] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2025-04-04T17:03:57.582936Z node 10 :GRPC_SERVER DEBUG: [0x51b000177680] received request Name# KeyValue ok# false data# peer# current inflight# 0 2025-04-04T17:03:57.583099Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d0e80] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-04T17:03:57.583262Z node 10 :GRPC_SERVER DEBUG: [0x51a00008be80] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-04T17:03:57.583413Z node 10 :GRPC_SERVER DEBUG: [0x51a000133880] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-04T17:03:57.583577Z node 10 :GRPC_SERVER DEBUG: [0x51a0000a8c80] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-04T17:03:57.583731Z node 10 :GRPC_SERVER DEBUG: [0x51a000165c80] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2025-04-04T17:03:57.583880Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d5680] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2025-04-04T17:03:57.584020Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d2080] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2025-04-04T17:03:57.584187Z node 10 :GRPC_SERVER DEBUG: [0x51a00002ee80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-04T17:03:57.584380Z node 10 :GRPC_SERVER DEBUG: [0x51a000166880] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:107:2057] recipient: [1:100:2134] 2025-04-04T17:03:46.728659Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:03:46.754979Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:03:46.755503Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-04T17:03:46.755565Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:03:46.755602Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-04T17:03:46.755637Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:03:46.755685Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:03:46.755736Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] Leader for TabletID 72057594037927938 is [1:152:2173] sender: [1:153:2057] recipient: [1:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:178:2057] recipient: [1:14:2061] 2025-04-04T17:03:46.836458Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:177:2192], now have 1 active actors on pipe 2025-04-04T17:03:46.836617Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T17:03:46.856981Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:03:46.871663Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-04-04T17:03:46.873224Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:03:46.883707Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-04-04T17:03:46.888330Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:03:46.899051Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:03:46.904702Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2025-04-04T17:03:46.913215Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-04-04T17:03:46.913321Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2025-04-04T17:03:46.913386Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:03:46.927618Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T17:03:46.933921Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-04-04T17:03:46.934089Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-04-04T17:03:46.934153Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2025-04-04T17:03:46.934179Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2025-04-04T17:03:46.942503Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04T17:03:46.942577Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:03:46.942872Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:03:46.959934Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:03:46.960627Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:191:2202], now have 1 active actors on pipe 2025-04-04T17:03:46.978895Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:194:2204], now have 1 active actors on pipe 2025-04-04T17:03:46.979007Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-04-04T17:03:46.979056Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-04-04T17:03:46.994388Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid2' SeqNo: 1 partNo : 0 messageNo: 0 size: 511957 2025-04-04T17:03:46.994947Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid2' SeqNo: 1 partNo : 1 messageNo: 0 size: 511957 2025-04-04T17:03:46.995448Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid2' SeqNo: 1 partNo : 2 messageNo: 0 size: 511957 2025-04-04T17:03:46.995943Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid2' SeqNo: 1 partNo : 3 messageNo: 0 size: 511957 2025-04-04T17:03:46.996086Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid2' SeqNo: 1 partNo : 4 messageNo: 0 size: 49324 2025-04-04T17:03:46.996160Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid2' SeqNo: 1 partNo : 4 messageNo: 0 size 49324 offset: 0 2025-04-04T17:03:46.996331Z node 1 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2025-04-04T17:03:46.996576Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2025-04-04T17:03:46.996682Z node 1 :PERSQUEUE DEBUG: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-04-04T17:03:46.997127Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:196:2206], now have 1 active actors on pipe 2025-04-04T17:03:46.997252Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-04-04T17:03:46.997300Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-04-04T17:03:46.997398Z node 1 :PERSQUEUE INFO: new Cookie default|3a77b762-36e2d5e3-8a1804c1-4141db4b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-04T17:03:46.997525Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-04T17:03:46.997607Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:03:46.997941Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:198:2208], now have 1 active actors on pipe 2025-04-04T17:03:46.998042Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-04-04T17:03:46.998076Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-04-04T17:03:46.998610Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid2' SeqNo: 1 partNo : 0 messageNo: 0 size: 511957 2025-04-04T17:03:46.999117Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid2' SeqNo: 1 partNo : 1 messageNo: 0 size: 511957 2025-04-04T17:03:46.999594Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid2' SeqNo: 1 partNo : 2 messageNo: 0 size: 511957 2025-04-04T1 ... 005_00000_0000000001_00014(+) to d0000000000_00000000000000000005_00000_0000000001_00014(+) 2025-04-04T17:03:59.000351Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:03:59.002900Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:03:59.008363Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [9:440:2417], now have 1 active actors on pipe 2025-04-04T17:03:59.009081Z node 9 :PERSQUEUE DEBUG: FormAnswer for 2 blobs 2025-04-04T17:03:59.014021Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.015440Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.016655Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.017819Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.019183Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.020379Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.021546Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.022803Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.024082Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.025204Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.027308Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.028711Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.029928Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.031175Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.031725Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 1 size 172682 from pos 0 cbcount 1 2025-04-04T17:03:59.033525Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.034815Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.036048Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.037283Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.038765Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.040018Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.041517Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.042783Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.043967Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.045094Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.046574Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.047924Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.049327Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.050973Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:03:59.051568Z node 9 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 1 size 172682 from pos 0 cbcount 1 2025-04-04T17:03:59.055333Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 123 2025-04-04T17:03:59.108828Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [9:443:2419], now have 1 active actors on pipe 2025-04-04T17:03:59.109004Z node 9 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-04T17:03:59.109059Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-04T17:03:59.109127Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid1' SeqNo: 15 partNo : 0 messageNo: 1 size 102400 offset: 14 2025-04-04T17:03:59.109241Z node 9 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'topic' partition 0 error: new GetOwnership request needed for owner 2025-04-04T17:03:59.109412Z node 9 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:03:59.109466Z node 9 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000002_00000_0000000001_00014(+) to d0000000000_00000000000000000002_00000_0000000001_00014(+) 2025-04-04T17:03:59.109504Z node 9 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000003_00000_0000000001_00014(+) to d0000000000_00000000000000000003_00000_0000000001_00014(+) 2025-04-04T17:03:59.109575Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 45, Error new GetOwnership request needed for owner 2025-04-04T17:03:59.109618Z node 9 :PERSQUEUE DEBUG: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:03:59.112935Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:03:59.117620Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [9:455:2430], now have 1 active actors on pipe 2025-04-04T17:03:59.117779Z node 9 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-04T17:03:59.117837Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-04T17:03:59.117967Z node 9 :PERSQUEUE INFO: new Cookie default|6d57bd23-58bcda18-4b212f9f-7b1dd309_14 generated for partition 0 topic 'topic' owner default 2025-04-04T17:03:59.118089Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-04T17:03:59.118181Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:03:59.118581Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [9:457:2432], now have 1 active actors on pipe 2025-04-04T17:03:59.118684Z node 9 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-04T17:03:59.118721Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-04T17:03:59.118768Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid1' SeqNo: 15 partNo : 0 messageNo: 0 size 102400 offset: 14 2025-04-04T17:03:59.118879Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Send write quota request. Topic: "topic". Partition: 0. Amount: 102409. Cookie: 15 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:03:59.407020Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Got quota. Topic: "topic". Partition: 0: Cookie: 15 2025-04-04T17:03:59.407216Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob processing sourceId 'sourceid1' seqNo 15 partNo 0 2025-04-04T17:03:59.408187Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob complete sourceId 'sourceid1' seqNo 15 partNo 0 FormedBlobsCount 0 NewHead: Offset 14 PartNo 0 PackedSize 102472 count 1 nextOffset 15 batches 1 2025-04-04T17:03:59.409109Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'topic' partition 0 compactOffset 14,1 HeadOffset 14 endOffset 14 curOffset 15 d0000000000_00000000000000000014_00000_0000000001_00000| size 102462 WTime 2103 2025-04-04T17:03:59.409500Z node 9 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:03:59.409603Z node 9 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 14 partNo 0 count 1 size 102462 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:03:59.428698Z node 9 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 14 count 1 size 102462 actorID [9:133:2159] 2025-04-04T17:03:59.428852Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 102409 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:03:59.428925Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:03:59.429009Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid1', Topic: 'topic', Partition: 0, SeqNo: 15, partNo: 0, Offset: 14 is stored on disk 2025-04-04T17:03:59.429387Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:03:59.429528Z node 9 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 14 partno 0 count 1 parts 0 size 102462 2025-04-04T17:03:59.430060Z node 9 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [9:468:2441], now have 1 active actors on pipe >> TGRpcYdbTest::CreateYqlSession [GOOD] >> TGRpcYdbTest::ExecuteDmlQuery >> Viewer::JsonStorageListingV2NodeIdFilter [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> Viewer::JsonStorageListingV1PDiskIdFilter |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] >> YdbTableBulkUpsert::ZeroRows [GOOD] >> YdbTableBulkUpsertOlap::ParquetImportBug >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedCert [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedPrivatekey >> TVersions::Wreck0Reverse [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts_AccessDenied [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientProvidesEmptyClientCerts >> TGRpcYdbTest::ExecuteQueryImplicitSession [GOOD] >> TGRpcYdbTest::ExecuteQueryExplicitSession >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |82.7%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword [GOOD] >> YdbTableBulkUpsert::Timeout [GOOD] >> YdbTableBulkUpsert::RetryOperationSync ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TVersions::Wreck0Reverse [GOOD] Test command err: SmallQueue: MainQueue: {11 0f 1b}, {14 1f 1b}, {15 2f 1b}, {18 0f 1b}, {19 0f 1b}, {23 0f 1b}, {27 0f 1b} GhostQueue: 9, 12, 13, 16, 17, 20, 21, 24, 25, 28 0.29206 00000.000 II| FAKE_ENV: Born at 2025-04-04T16:59:48.959591Z 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.010 II| FAKE_ENV: Starting storage for BS group 0 00000.011 II| FAKE_ENV: Starting storage for BS group 1 00000.011 II| FAKE_ENV: Starting storage for BS group 2 00000.011 II| FAKE_ENV: Starting storage for BS group 3 00000.016 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.016 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} hope 1 -> done Change{2, redo 0b alter 209b annex 0, ~{ } -{ }, 0 gb} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} release 4194304b of static, Memory{0 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.019 NN| TABLET_SAUSAGECACHE: Update config MemoryLimit: 8388608 ReplacementPolicy: ThreeLeveledLRU 00000.019 NN| TABLET_SAUSAGECACHE: Switch replacement policy from S3FIFO to ThreeLeveledLRU 00000.019 NN| TABLET_SAUSAGECACHE: Switch replacement policy done from S3FIFO to ThreeLeveledLRU 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{2, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.040 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{3, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.040 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.040 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.041 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.041 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.041 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{4, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.041 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.042 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.042 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.042 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.043 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{5, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.043 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.044 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.044 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.044 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.045 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{6, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.045 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.045 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 00000.045 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.045 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.046 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{7, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.046 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.046 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 1 for step 8 00000.047 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.047 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.047 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{8, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.047 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.047 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.048 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.048 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.049 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{9, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.049 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.049 DD| TABLET_EXECUTOR: Leader{1:2:11} commited cookie 1 for step 10 00000.049 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.049 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.050 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{10, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.050 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.050 DD| TABLET_EXECUTOR: Leader{1:2:12} commited cookie 1 for step 11 00000.051 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.051 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.051 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{11, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.051 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.051 DD| TABLET_EXECUTOR: Leader{1:2:13} commited cookie 1 for step 12 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{12, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:14} commited cookie 1 for step 13 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.059 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{13, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.059 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.060 DD| TABLET_EXECUTOR: Leader{1:2:15} commited cookie 1 for step 14 00000.060 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.060 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.061 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{14, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.061 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxW ... aded page collection [1:2:103:1:12288:2546:0] status OK pages [ 5 ] 00000.317 DD| TABLET_EXECUTOR: Leader{1:2:105} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.317 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{196, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.317 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{196, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.317 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] pages [ 100 5 ] 00000.317 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{197, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.318 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{197, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.318 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{197, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.318 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{197, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.318 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{197, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.318 D3| TABLET_EXECUTOR: Leader{1:2:105} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [4 4] 00000.318 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{197, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.318 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [10:30:2062] class Online from cache [ ] already requested [ ] to request [ 4 ] 00000.318 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 4 ] 00000.318 DD| TABLET_EXECUTOR: Leader{1:2:105} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.318 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{197, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.318 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{197, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.318 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] pages [ 100 4 ] 00000.319 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{198, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.319 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{198, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.319 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{198, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.319 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{198, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.319 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{198, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.319 D3| TABLET_EXECUTOR: Leader{1:2:105} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [3 4] 00000.319 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{198, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.319 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [10:30:2062] class Online from cache [ ] already requested [ ] to request [ 3 ] 00000.319 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 3 ] 00000.319 DD| TABLET_EXECUTOR: Leader{1:2:105} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.319 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{198, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.319 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{198, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.319 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] pages [ 100 3 ] 00000.320 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{199, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.320 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{199, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.320 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{199, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.320 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{199, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.320 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{199, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.320 D3| TABLET_EXECUTOR: Leader{1:2:105} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [2 4] 00000.320 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{199, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.320 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [10:30:2062] class Online from cache [ ] already requested [ ] to request [ 2 ] 00000.320 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 2 ] 00000.320 DD| TABLET_EXECUTOR: Leader{1:2:105} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.321 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{199, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.321 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{199, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.321 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] pages [ 100 2 ] 00000.321 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{200, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.321 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{200, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.321 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{200, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.321 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{200, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.321 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{200, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.321 D3| TABLET_EXECUTOR: Leader{1:2:105} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [1 4] 00000.321 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{200, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.321 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [10:30:2062] class Online from cache [ ] already requested [ ] to request [ 1 ] 00000.322 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 1 ] 00000.322 DD| TABLET_EXECUTOR: Leader{1:2:105} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.322 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{200, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.322 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{200, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.322 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] pages [ 1 100 ] 00000.322 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{201, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.322 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{201, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.322 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{201, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.322 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{201, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.323 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{201, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.323 D3| TABLET_EXECUTOR: Leader{1:2:105} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [0 4] 00000.323 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{201, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.323 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [10:30:2062] class Online from cache [ ] already requested [ ] to request [ 0 ] 00000.323 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 0 ] 00000.323 DD| TABLET_EXECUTOR: Leader{1:2:105} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.323 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{201, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.323 DD| TABLET_EXECUTOR: Leader{1:2:105} Tx{201, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.323 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] pages [ 0 100 ] Counters: Active:0/0, Passive:2772, MemLimit:0 00000.323 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.325 II| TABLET_EXECUTOR: Leader{1:2:105} suiciding, Waste{2:0, 10249619b +(100, 48685b), 104 trc, -48685b acc} 00000.331 DD| TABLET_SAUSAGECACHE: Unregister [10:30:2062] 00000.331 NN| TABLET_SAUSAGECACHE: Poison cache serviced 100 reqs hit {0 0b} miss {100 10244300b} 00000.331 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.331 II| FAKE_ENV: DS.0 gone, left {10109b, 104}, put {10129b, 105} 00000.332 II| FAKE_ENV: DS.1 gone, left {10298442b, 106}, put {10298442b, 106} 00000.342 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.342 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.342 II| FAKE_ENV: All BS storage groups are stopped 00000.342 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.344 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 1841}, stopped >> YdbYqlClient::TestReadTableMultiShardOneRow [GOOD] >> YdbYqlClient::TestReadTableBatchLimits >> TGRpcYdbTest::KeepAlive [GOOD] |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword [GOOD] Test command err: 2025-04-04T17:03:30.561812Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504743226397690:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:30.561855Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001288/r3tmp/tmpG0SWF7/pdisk_1.dat 2025-04-04T17:03:31.514843Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:31.517357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:31.517430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:31.541743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29542, node 1 2025-04-04T17:03:31.942805Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:31.942825Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:31.942831Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:31.942950Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30102 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:32.655062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:38.332909Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504781344991155:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:38.333012Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001288/r3tmp/tmpSR4RUe/pdisk_1.dat 2025-04-04T17:03:38.901245Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:38.992324Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:38.992408Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:39.008444Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22405, node 4 2025-04-04T17:03:39.346932Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:39.346952Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:39.346958Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:39.347085Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62220 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:39.623404Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:44.808954Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489504804240289920:2085];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:44.809055Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001288/r3tmp/tmpFBpbpr/pdisk_1.dat 2025-04-04T17:03:45.022323Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7790, node 7 2025-04-04T17:03:45.153044Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:45.154040Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:45.281756Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:45.281788Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:45.281798Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:45.281968Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:03:45.282715Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29846 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:45.740046Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001288/r3tmp/tmpLMh5nV/pdisk_1.dat 2025-04-04T17:03:51.047482Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:03:51.193735Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:51.239304Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:51.239402Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:51.248629Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10449, node 10 2025-04-04T17:03:51.463076Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:51.463103Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:51.463111Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:51.463274Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21959 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:51.980081Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:57.728425Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489504861665855039:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:57.728552Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001288/r3tmp/tmpv7eK5v/pdisk_1.dat 2025-04-04T17:03:58.283437Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:58.382871Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:58.382967Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:58.418446Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6639, node 13 2025-04-04T17:03:58.744917Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:58.744942Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:58.744952Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:58.745094Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13669 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:59.183690Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::KeepAlive [GOOD] Test command err: 2025-04-04T17:03:34.482856Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504763733898156:2223];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:34.483186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00127d/r3tmp/tmpdDUM91/pdisk_1.dat 2025-04-04T17:03:35.397623Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:35.426101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:35.426221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:35.448182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28710, node 1 2025-04-04T17:03:35.768718Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:35.768740Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:35.768745Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:35.768861Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15091 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:36.412274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:36.594778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:03:41.918807Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504792744986834:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:41.919773Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00127d/r3tmp/tmpblbkxk/pdisk_1.dat 2025-04-04T17:03:42.145886Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15299, node 4 2025-04-04T17:03:42.307197Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:42.318386Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:42.403796Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:03:42.450964Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:42.450991Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:42.450999Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:42.451169Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13387 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:42.747903Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:42.877992Z node 4 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01jr0w498x56fzp705e11cscgb, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:60644, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:03:42.880998Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:03:42.891440Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:03:42.891567Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:03:42.891627Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:03:42.891669Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:03:43.026705Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:03:43.026816Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:03:43.026831Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:03:43.026871Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:03:43.106425Z node 4 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jr0w49fxa12gj0tgc682r1n2, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:60644, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:03:45.671258Z node 4 :GRPC_SERVER DEBUG: Got grpc request# ExecuteDataQueryRequest, traceId# 01jr0w4c0722bp2kz63j4nswkz, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:60644, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:03:45.679115Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489504809924857157:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:45.679268Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:45.679719Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489504809924857171:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:45.684508Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:03:45.691928Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:03:45.692064Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:03:45.692079Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:03:45.692134Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:03:45.724296Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:03:45.724341Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:03:45.724415Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:03:45.724421Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:03:45.734061Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489504809924857173:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:03:45.803291Z node 4 :TX_PROXY ERROR: Actor# [4:7489504809924857249:2797] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:03:46.374869Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0w4c0722bp2kz63j4nswkz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZmI5YjA1OC1mMjZkMmNjMy05ZTg1NjFiNS1kNDk4ZGM4ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:46.398188Z node 4 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jr0w4cpxaggn8wag8bbazcrd, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:60644, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:03:46.398681Z node 4 :READ_TABLE_API NOTICE: [4:7489504814219824608:2353] Finish grpc stream, status: 400010 2025-04-04T17:03:46.403342Z node 4 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jr0w4cq26vrxx838n25yjat3, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:60644, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:03:46.417469Z node 4 :READ_TABLE_API DEBUG: [4:7489504814219824609:2354] Adding quota request to queue ShardId: 0, TxId: 281474976715662 2025-04-04T17:03:46.417545Z node 4 :READ_TABLE_API DEBUG: [4:7489504814219824609:2354] Assign stream quota to Shard 0, Quota 5, TxId 281474976715662 Reserved: 5 of 25, Queued: 0 2025-04-04T17:03:46.422852Z node 4 :READ_TABLE_API DEBUG: [4:7489504814219824609:2354] got stream part, size: 290, RU required: 128 rate limiter absent 2025-04-04T17:03:46.423244Z node 4 :READ_TABLE_API DEBUG: [4:7489504814219824609:2354] Starting inactivity timer for 600.000000s with tag 3 2025-04-04T17:03:4 ... current inflight# 0 2025-04-04T17:03:46.553437Z node 4 :GRPC_SERVER DEBUG: [0x51a000037e80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-04T17:03:46.553606Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d2c80] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-04T17:03:46.553650Z node 4 :GRPC_SERVER DEBUG: [0x51a000038a80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-04T17:03:46.553797Z node 4 :GRPC_SERVER DEBUG: [0x51a000039680] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-04T17:03:46.553855Z node 4 :GRPC_SERVER DEBUG: [0x51a0000f9680] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-04T17:03:46.553967Z node 4 :GRPC_SERVER DEBUG: [0x51a000049280] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-04T17:03:46.554000Z node 4 :GRPC_SERVER DEBUG: [0x51a0000a0280] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-04T17:03:46.554118Z node 4 :GRPC_SERVER DEBUG: [0x51a0000c9680] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-04T17:03:46.554173Z node 4 :GRPC_SERVER DEBUG: [0x51a0000c8a80] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-04T17:03:46.554347Z node 4 :GRPC_SERVER DEBUG: [0x51a0000c7e80] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2025-04-04T17:03:46.554506Z node 4 :GRPC_SERVER DEBUG: [0x51b0001a8680] received request Name# KeyValue ok# false data# peer# current inflight# 0 2025-04-04T17:03:46.554699Z node 4 :GRPC_SERVER DEBUG: [0x51a00003c080] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-04T17:03:46.554860Z node 4 :GRPC_SERVER DEBUG: [0x51a00003a880] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-04T17:03:46.555036Z node 4 :GRPC_SERVER DEBUG: [0x51a00003a280] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-04T17:03:46.555205Z node 4 :GRPC_SERVER DEBUG: [0x51a000039c80] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-04T17:03:46.555351Z node 4 :GRPC_SERVER DEBUG: [0x51a0000c6680] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2025-04-04T17:03:46.555506Z node 4 :GRPC_SERVER DEBUG: [0x51a0000c1e80] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2025-04-04T17:03:46.555658Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d1a80] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2025-04-04T17:03:46.555802Z node 4 :GRPC_SERVER DEBUG: [0x51a00003ae80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-04T17:03:46.555960Z node 4 :GRPC_SERVER DEBUG: [0x51a0000c7280] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-04-04T17:03:48.257631Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489504823337651747:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:48.257700Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00127d/r3tmp/tmpFS8Hg4/pdisk_1.dat 2025-04-04T17:03:48.452829Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6429, node 7 2025-04-04T17:03:48.569103Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:48.569201Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:48.601445Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:03:48.602484Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:48.602517Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:48.602527Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:48.602707Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:48.946947Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting...
: Error: Operation timeout. 2025-04-04T17:03:53.610382Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489504843320076768:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:53.610595Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00127d/r3tmp/tmpNWbFZX/pdisk_1.dat 2025-04-04T17:03:53.889263Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:53.929803Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:53.929907Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:53.933872Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26380, node 10 2025-04-04T17:03:54.075270Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:54.075294Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:54.075303Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:54.075456Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26816 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:54.383140Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting...
: Error: Operation cancelled. 2025-04-04T17:03:59.745676Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489504871833050067:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:59.745819Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00127d/r3tmp/tmp9HU2NN/pdisk_1.dat 2025-04-04T17:03:59.932724Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:59.976334Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:59.976429Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:59.988961Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8323, node 13 2025-04-04T17:04:00.295033Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:00.295069Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:00.295079Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:00.295257Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62890 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:00.708312Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadGroupBy [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; -- group by key: 0 2025-04-04T17:01:19.240391Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:01:19.375721Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:01:19.399323Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:01:19.399645Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:01:19.409106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:01:19.409395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:01:19.409694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:01:19.409899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:01:19.410032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:01:19.410148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:01:19.410296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:01:19.410416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:01:19.410556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:01:19.410698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:01:19.410856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:01:19.410972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:01:19.451891Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:01:19.452172Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:01:19.452244Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:01:19.452442Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:01:19.452646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:01:19.452731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:01:19.452779Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:01:19.452908Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:01:19.452987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:01:19.453035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:01:19.453065Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:01:19.453230Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:01:19.453311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:01:19.453354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:01:19.453385Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:01:19.453531Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:01:19.453597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:01:19.453676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:01:19.453709Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:01:19.453786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:01:19.453837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:01:19.453900Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:01:19.453947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:01:19.453985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:01:19.454014Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:01:19.454485Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=57; 2025-04-04T17:01:19.454615Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=68; 2025-04-04T17:01:19.454730Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=58; 2025-04-04T17:01:19.454823Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=51; 2025-04-04T17:01:19.455008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:01:19.455077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:01:19.455118Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:01:19.455336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:01:19.455405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:01:19.455443Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:01:19.455605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:01:19.455653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:01:19.455689Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:01:19.455865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:01:19.455905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:01:19.458085Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:01:19.460473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:01:19.460610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:01:19.460747Z node 1 :TX_COLUMNS ... RD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-04T17:03:58.341544Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2052; 2025-04-04T17:03:58.341600Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1;merger=0;interval_id=2052; 2025-04-04T17:03:58.341651Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-04T17:03:58.341778Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-04T17:03:58.341823Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-04-04T17:03:58.341871Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:03:58.342576Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:03:58.342781Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-04T17:03:58.342834Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:03:58.342967Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;);columns=4;rows=1; 2025-04-04T17:03:58.343050Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=26;num_rows=1;batch_columns=100,101,102,103; 2025-04-04T17:03:58.343181Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[54:432:2450];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2025-04-04T17:03:58.343327Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-04T17:03:58.343471Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-04T17:03:58.343581Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-04T17:03:58.343918Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:03:58.344076Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-04T17:03:58.344206Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-04T17:03:58.344250Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: Scan [54:433:2451] finished for tablet 9437184 2025-04-04T17:03:58.344839Z node 54 :TX_COLUMNSHARD_SCAN INFO: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[54:432:2450];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.017},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.019}],"full":{"a":1743786238324612,"name":"_full_task","f":1743786238324612,"d_finished":0,"c":0,"l":1743786238344312,"d":19700},"events":[{"name":"bootstrap","f":1743786238324861,"d_finished":3192,"c":1,"l":1743786238328053,"d":3192},{"a":1743786238343890,"name":"ack","f":1743786238342545,"d_finished":1064,"c":1,"l":1743786238343609,"d":1486},{"a":1743786238343873,"name":"processing","f":1743786238329977,"d_finished":9316,"c":10,"l":1743786238343612,"d":9755},{"name":"ProduceResults","f":1743786238326620,"d_finished":3413,"c":13,"l":1743786238344231,"d":3413},{"a":1743786238344235,"name":"Finish","f":1743786238344235,"d_finished":0,"c":0,"l":1743786238344312,"d":77},{"name":"task_result","f":1743786238330000,"d_finished":8060,"c":9,"l":1743786238341937,"d":8060}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-04T17:03:58.344918Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[54:432:2450];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:03:58.345500Z node 54 :TX_COLUMNSHARD_SCAN INFO: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[54:432:2450];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.017},{"events":["l_ProduceResults","f_Finish"],"t":0.019},{"events":["l_ack","l_processing","l_Finish"],"t":0.02}],"full":{"a":1743786238324612,"name":"_full_task","f":1743786238324612,"d_finished":0,"c":0,"l":1743786238344965,"d":20353},"events":[{"name":"bootstrap","f":1743786238324861,"d_finished":3192,"c":1,"l":1743786238328053,"d":3192},{"a":1743786238343890,"name":"ack","f":1743786238342545,"d_finished":1064,"c":1,"l":1743786238343609,"d":2139},{"a":1743786238343873,"name":"processing","f":1743786238329977,"d_finished":9316,"c":10,"l":1743786238343612,"d":10408},{"name":"ProduceResults","f":1743786238326620,"d_finished":3413,"c":13,"l":1743786238344231,"d":3413},{"a":1743786238344235,"name":"Finish","f":1743786238344235,"d_finished":0,"c":0,"l":1743786238344965,"d":730},{"name":"task_result","f":1743786238330000,"d_finished":8060,"c":9,"l":1743786238341937,"d":8060}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-04-04T17:03:58.345604Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:03:58.323988Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=4;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=16001;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=16001;selected_rows=0; 2025-04-04T17:03:58.345650Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:03:58.346032Z node 54 :TX_COLUMNSHARD_SCAN INFO: SelfId=[54:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;; |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |82.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay_yt/query_replay_yt |82.7%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental [GOOD] >> TGRpcNewClient::CreateAlterUpsertDrop [GOOD] >> TGRpcNewClient::InMemoryTables >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesServerCerts >> YdbOlapStore::LogGrepExisting [GOOD] >> YdbOlapStore::LogExistingUserId >> TTableProfileTests::OverwriteCompactionPolicy [GOOD] >> TTableProfileTests::OverwriteExecutionPolicy >> TGRpcYdbTest::ExecuteQueryExplicitSession [GOOD] >> TGRpcYdbTest::ExecutePreparedQuery |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |82.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots >> YdbTableBulkUpsertOlap::ParquetImportBug [GOOD] >> TGRpcYdbTest::ExecuteDmlQuery [GOOD] >> TGRpcYdbTest::CreateYqlSessionExecuteQuery >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] >> TYqlDecimalTests::DecimalKey [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession [GOOD] >> TGRpcYdbTest::ExplainQuery >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsertOlap::ParquetImportBug [GOOD] Test command err: 2025-04-04T17:03:12.835077Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504667106220998:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:12.835114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012ad/r3tmp/tmp0VMvTd/pdisk_1.dat 2025-04-04T17:03:13.796417Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:13.850467Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:03:13.856647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:13.856800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:13.878436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5777, node 1 2025-04-04T17:03:14.266975Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:14.267020Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:14.267032Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:14.267131Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26977 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:14.877021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:17.835124Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504667106220998:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:17.835183Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:03:18.070545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 CLIENT_DEADLINE_EXCEEDED 2025-04-04T17:03:19.327847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504697170994941:2442], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:19.327927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504697170994949:2445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:19.327988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:19.333386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:03:19.365413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489504697170994955:2446], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:03:19.431836Z node 1 :TX_PROXY ERROR: Actor# [1:7489504697170995036:4197] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:03:20.013334Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0w3j8x12t14n8f4v47ev1v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTU0NmY4ZmYtY2NiMDA0NTUtMzA0MzE2NWQtNGQ1OGEzYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012ad/r3tmp/tmpsq78n0/pdisk_1.dat 2025-04-04T17:03:23.977089Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:03:24.442133Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:24.678559Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:24.678640Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:24.700817Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6874, node 4 2025-04-04T17:03:25.106961Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:25.106987Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:25.106994Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:25.107122Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:25.608335Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:30.368189Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:03:33.202447Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489504757753179993:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:33.202553Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012ad/r3tmp/tmp2Akg8O/pdisk_1.dat 2025-04-04T17:03:33.803523Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:33.862634Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:33.862745Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:33.888109Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10378, node 7 2025-04-04T17:03:34.322777Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:34.322811Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:34.322819Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:34.322976Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9748 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:34.687867Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:38.182367Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489504757753179993:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:38.182443Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migra ... 5186224037888;tx_state=complete;fline=columnshard_impl.cpp:618;event=start_changes;type=CS::INDEXATION;task_id=d20bd96a-117611f0-892131cd-19f7fa51; 2025-04-04T17:04:07.637712Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[13:7489504904366979071:2341];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=3380;external_task_id=d20bd96a-117611f0-892131cd-19f7fa51;type=CS::INDEXATION;priority=0;; 2025-04-04T17:04:07.639774Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[13:7489504904366979071:2341];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=1;task=cpu=0;mem=3380;external_task_id=d20bd96a-117611f0-892131cd-19f7fa51;type=CS::INDEXATION;priority=0;; 2025-04-04T17:04:07.639835Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[13:7489504904366979071:2341];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=d20bd96a-117611f0-892131cd-19f7fa51;mem=3380;cpu=0; 2025-04-04T17:04:07.640175Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[13:7489504904366979071:2341];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=d20bd96a-117611f0-892131cd-19f7fa51;task_id=1;mem=3380;cpu=0; 2025-04-04T17:04:07.640292Z node 13 :TX_COLUMNSHARD DEBUG: external_task_id=d20bd96a-117611f0-892131cd-19f7fa51;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=d20bd96a-117611f0-892131cd-19f7fa51; 2025-04-04T17:04:07.640386Z node 13 :TX_COLUMNSHARD DEBUG: external_task_id=d20bd96a-117611f0-892131cd-19f7fa51;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-04-04T17:04:07.657957Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7489504904366979071:2341];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-04-04T17:04:07.660341Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489504904366979262:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:07.660528Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:07.660627Z node 13 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037888 2025-04-04T17:04:07.661012Z node 13 :TX_COLUMNSHARD DEBUG: TxWriteIndex[5] (CS::INDEXATION) apply at tablet 72075186224037888 2025-04-04T17:04:07.662142Z node 13 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 72075186224037888 Save Batch GenStep: 1:2 Blob count: 1 2025-04-04T17:04:07.662293Z node 13 :TX_COLUMNSHARD DEBUG: Index: tables 0 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037888 2025-04-04T17:04:07.662990Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489504904366979276:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:07.669480Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:04:07.674237Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=d20bd96a-117611f0-892131cd-19f7fa51;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-04-04T17:04:07.674327Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=d20bd96a-117611f0-892131cd-19f7fa51;fline=with_appended.cpp:65;portions=1,;task_id=d20bd96a-117611f0-892131cd-19f7fa51; 2025-04-04T17:04:07.674620Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=d20bd96a-117611f0-892131cd-19f7fa51;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::d20bd96a-117611f0-892131cd-19f7fa51; 2025-04-04T17:04:07.674708Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=d20bd96a-117611f0-892131cd-19f7fa51;tablet_id=72075186224037888;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:04:07.674800Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=d20bd96a-117611f0-892131cd-19f7fa51;tablet_id=72075186224037888;fline=columnshard_impl.cpp:779;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=0; 2025-04-04T17:04:07.674897Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=d20bd96a-117611f0-892131cd-19f7fa51;tablet_id=72075186224037888;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=0; 2025-04-04T17:04:07.675346Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=d20bd96a-117611f0-892131cd-19f7fa51;tablet_id=72075186224037888;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:04:07.675405Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=d20bd96a-117611f0-892131cd-19f7fa51;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:04:07.675454Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=d20bd96a-117611f0-892131cd-19f7fa51;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:04:07.675592Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=d20bd96a-117611f0-892131cd-19f7fa51;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:04:07.675986Z node 13 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Delete Blob DS:2181038080:[72075186224037888:1:1:3:0:3864:0] 2025-04-04T17:04:07.676044Z node 13 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:2 Blob count: 1 2025-04-04T17:04:07.676134Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:779;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=0; 2025-04-04T17:04:07.676243Z node 13 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=1;external_task_id=d20bd96a-117611f0-892131cd-19f7fa51;mem=3380;cpu=0; 2025-04-04T17:04:07.799167Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7489504904366979278:2416], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:04:07.880226Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[13:7489504904366979071:2341];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-04T17:04:07.883074Z node 13 :TX_PROXY ERROR: Actor# [13:7489504904366979438:2930] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:04:08.382425Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[13:7489504904366979071:2341];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-04T17:04:08.412448Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr0w51f5ddq8m5g66z3nan27, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=N2M4OGQxYzktMjg5MGQ3YzAtYTlmYjk3MDctOGZhNTBkYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:04:08.474938Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7489504904366979071:2341];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-04T17:04:08.477444Z node 13 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976710662 scanId: 1 version: {1743786248001:max} readable: {1743786248435:max} at tablet 72075186224037888 2025-04-04T17:04:08.477662Z node 13 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976710662 scanId: 1 at tablet 72075186224037888 2025-04-04T17:04:08.478433Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7489504904366979071:2341];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710662;scan_id=1;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1743786248001:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:32;event=parse_program;program=Command { Projection { Columns { Id: 5 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 1 } Columns { Id: 7 } Columns { Id: 2 } Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2025-04-04T17:04:08.628114Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7489504904366979071:2341];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710662;scan_id=1;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1743786248001:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 5 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 1 } Columns { Id: 7 } Columns { Id: 2 } Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2025-04-04T17:04:08.628540Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7489504904366979071:2341];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710662;scan_id=1;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1743786248001:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Projection","input":"5,3,4,1,7,2,6"},"fetch":"7,1,2,3,4,5,6"}]}; 2025-04-04T17:04:08.636983Z node 13 :TX_COLUMNSHARD INFO: self_id=[13:7489504904366979109:2345];tablet_id=72075186224037888;parent=[13:7489504904366979071:2341];fline=manager.cpp:82;event=ask_data;request=request_id=3;3={portions_count=1};; 2025-04-04T17:04:08.639085Z node 13 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-04-04T17:04:08.641348Z node 13 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-04T17:04:08.646146Z node 13 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037888 2025-04-04T17:04:08.702073Z node 13 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786248001, txId: 18446744073709551615] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TYqlDecimalTests::DecimalKey [GOOD] Test command err: 2025-04-04T17:03:23.951688Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504715824557694:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:23.951873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00128f/r3tmp/tmpazrjVy/pdisk_1.dat 2025-04-04T17:03:24.770688Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:24.824608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:24.824714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:24.845292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8186, node 1 2025-04-04T17:03:25.120994Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:25.121037Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:25.121044Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:25.121160Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:25.473949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:28.954537Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504715824557694:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:28.954622Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:03:28.986145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:03:29.234058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504741594362564:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:29.234207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:29.234591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504741594362576:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:29.239104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:03:29.279374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489504741594362578:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:03:29.349365Z node 1 :TX_PROXY ERROR: Actor# [1:7489504741594362649:2807] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:03:29.804277Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0w3vyed7sxd3hryh2mav79, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTYyM2M1NzUtYTZmOTk2NDktM2I4NjMyMDItNzRlYjhmNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:30.005297Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr0w3whccykyfpdc62h84zmt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTYyM2M1NzUtYTZmOTk2NDktM2I4NjMyMDItNzRlYjhmNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:30.213099Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jr0w3wqmahv2rd5qbasapwqn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTYyM2M1NzUtYTZmOTk2NDktM2I4NjMyMDItNzRlYjhmNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:30.451518Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr0w3wxd0epz0kymka0kxajm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTYyM2M1NzUtYTZmOTk2NDktM2I4NjMyMDItNzRlYjhmNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:30.611137Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0w3x4tbp91pd379pq7vjsb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTYyM2M1NzUtYTZmOTk2NDktM2I4NjMyMDItNzRlYjhmNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:32.319514Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504755182541146:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:32.319566Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00128f/r3tmp/tmpi8DOOZ/pdisk_1.dat 2025-04-04T17:03:32.551265Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17331, node 4 2025-04-04T17:03:32.657951Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:32.658015Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:32.747967Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:03:32.760239Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:32.760266Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:32.760272Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:32.760416Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2303 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:33.016308Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:35.544944Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:03:35.654817Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489504768067444222:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:35.654877Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489504768067444217:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:35.655121Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:35.658820Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:03:35.693033Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489504768067444231:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:03:35.771632Z node 4 :TX_PROXY ERROR: Actor# [4:7489504768067444302:2786] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:03:35.876298Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0w42755706sf12s6shv4ym, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MjFkNzBkM2MtNmZmNzZlYzktOD ... 4-04T17:03:54.089974Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0w4kf38fjqnj5j8kwt1zw5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NDk5NzM0NDktNGI5ZmRlY2ItMzRmYWIzODEtYjA1MjRmNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:54.758396Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr0w4m82d0bmy83260m9rf90, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NDk5NzM0NDktNGI5ZmRlY2ItMzRmYWIzODEtYjA1MjRmNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:54.798094Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0w4m82d0bmy83260m9rf90, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NDk5NzM0NDktNGI5ZmRlY2ItMzRmYWIzODEtYjA1MjRmNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:54.971383Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01jr0w4my83w91ddj3g56vefwp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NDk5NzM0NDktNGI5ZmRlY2ItMzRmYWIzODEtYjA1MjRmNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:55.203450Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jr0w4n329nmgar8aayef9tn1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NDk5NzM0NDktNGI5ZmRlY2ItMzRmYWIzODEtYjA1MjRmNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:55.351000Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jr0w4naa92n8meeq1bpa0jc0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NDk5NzM0NDktNGI5ZmRlY2ItMzRmYWIzODEtYjA1MjRmNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:55.492372Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jr0w4ney3dcw9c2rebw5c375, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NDk5NzM0NDktNGI5ZmRlY2ItMzRmYWIzODEtYjA1MjRmNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:55.788525Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr0w4nkk3qvdxnj1tabkpvax, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NDk5NzM0NDktNGI5ZmRlY2ItMzRmYWIzODEtYjA1MjRmNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:56.285522Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jr0w4nww49s4nh0cqj4jd3sa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NDk5NzM0NDktNGI5ZmRlY2ItMzRmYWIzODEtYjA1MjRmNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:56.290621Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jr0w4nww49s4nh0cqj4jd3sa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NDk5NzM0NDktNGI5ZmRlY2ItMzRmYWIzODEtYjA1MjRmNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:03:59.027995Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489504871061177845:2291];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:59.028185Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00128f/r3tmp/tmpCbjfmC/pdisk_1.dat 2025-04-04T17:03:59.735843Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:59.819308Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:59.819448Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:59.835597Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62528, node 13 2025-04-04T17:04:00.198758Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:00.198794Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:00.198806Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:00.199244Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24955 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:00.880111Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:04.023738Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7489504871061177845:2291];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:04.023859Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:04:07.211767Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:04:07.425286Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489504905420917306:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:07.425382Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489504905420917314:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:07.425451Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:07.430776Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:04:07.474498Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7489504905420917320:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:04:07.539872Z node 13 :TX_PROXY ERROR: Actor# [13:7489504905420917391:2829] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:04:07.712733Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0w517xa1v6qzmbx6fet38t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDYzYTQ4YzMtNjZkZTA1NC1jMTYzZTQyZC1mMDk1MTU3NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:04:07.899619Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr0w51hf9a2hzwdqwt8126vv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDYzYTQ4YzMtNjZkZTA1NC1jMTYzZTQyZC1mMDk1MTU3NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:04:08.072518Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jr0w51q46kvezfg4be82x0an, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDYzYTQ4YzMtNjZkZTA1NC1jMTYzZTQyZC1mMDk1MTU3NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:04:08.254600Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr0w51wp1qeb5k2vzsv2htdd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDYzYTQ4YzMtNjZkZTA1NC1jMTYzZTQyZC1mMDk1MTU3NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:04:08.432883Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0w522686e7hxzsvm8s12av, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDYzYTQ4YzMtNjZkZTA1NC1jMTYzZTQyZC1mMDk1MTU3NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:04:08.591744Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr0w527w9kd9dd7d02rejdce, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDYzYTQ4YzMtNjZkZTA1NC1jMTYzZTQyZC1mMDk1MTU3NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:04:08.870283Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0w52ctayk3g394124s56gf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDYzYTQ4YzMtNjZkZTA1NC1jMTYzZTQyZC1mMDk1MTU3NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:04:09.256741Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01jr0w52nb1ecg2v8302v1hqr0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDYzYTQ4YzMtNjZkZTA1NC1jMTYzZTQyZC1mMDk1MTU3NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:04:09.719076Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jr0w531vb5ps7kcx0zcbq4sz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDYzYTQ4YzMtNjZkZTA1NC1jMTYzZTQyZC1mMDk1MTU3NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:04:10.154274Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jr0w53g06ehv9575720g7bv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDYzYTQ4YzMtNjZkZTA1NC1jMTYzZTQyZC1mMDk1MTU3NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [FAIL] Test command err: Starting YDB, grpc: 19731, msgbus: 25974 2025-04-04T16:58:58.599864Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503578024317022:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:58.605995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00100b/r3tmp/tmpXDWFtl/pdisk_1.dat 2025-04-04T16:58:59.283241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:59.283384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:59.289452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:58:59.364698Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19731, node 1 2025-04-04T16:58:59.611786Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T16:58:59.612516Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T16:58:59.621808Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:59.621835Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:59.621842Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:59.621983Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25974 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T16:58:59.903171Z node 1 :TX_PROXY DEBUG: actor# [1:7489503578024317247:2115] Handle TEvNavigate describe path dc-1 2025-04-04T16:58:59.903243Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503582319285081:2458] HANDLE EvNavigateScheme dc-1 2025-04-04T16:58:59.904071Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503582319285081:2458] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T16:58:59.951206Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503582319285081:2458] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-04T16:58:59.963959Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503582319285081:2458] Handle TEvDescribeSchemeResult Forward to# [1:7489503582319285080:2457] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T16:59:00.006018Z node 1 :TX_PROXY DEBUG: actor# [1:7489503578024317247:2115] Handle TEvProposeTransaction 2025-04-04T16:59:00.006054Z node 1 :TX_PROXY DEBUG: actor# [1:7489503578024317247:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T16:59:00.006160Z node 1 :TX_PROXY DEBUG: actor# [1:7489503578024317247:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489503586614252386:2466] 2025-04-04T16:59:00.130746Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503586614252386:2466] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T16:59:00.130844Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503586614252386:2466] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T16:59:00.130863Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503586614252386:2466] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T16:59:00.130960Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503586614252386:2466] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T16:59:00.131272Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503586614252386:2466] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T16:59:00.131422Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503586614252386:2466] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-04T16:59:00.131493Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503586614252386:2466] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-04T16:59:00.131660Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503586614252386:2466] txid# 281474976710657 HANDLE EvClientConnected 2025-04-04T16:59:00.132482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T16:59:00.135416Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503586614252386:2466] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-04T16:59:00.135517Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503586614252386:2466] txid# 281474976710657 SEND to# [1:7489503586614252385:2465] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-04T16:59:00.159242Z node 1 :TX_PROXY DEBUG: actor# [1:7489503578024317247:2115] Handle TEvProposeTransaction 2025-04-04T16:59:00.159282Z node 1 :TX_PROXY DEBUG: actor# [1:7489503578024317247:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T16:59:00.159313Z node 1 :TX_PROXY DEBUG: actor# [1:7489503578024317247:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489503586614252428:2504] 2025-04-04T16:59:00.161621Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503586614252428:2504] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T16:59:00.161688Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503586614252428:2504] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T16:59:00.161705Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503586614252428:2504] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T16:59:00.161835Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503586614252428:2504] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T16:59:00.162106Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503586614252428:2504] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T16:59:00.162224Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503586614252428:2504] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T16:59:00.162287Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503586614252428:2504] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T16:59:00.162416Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503586614252428:2504] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T16:59:00.163104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T16:59:00.165891Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503586614252428:2504] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-04T16:59:00.165938Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503586614252428:2504] txid# 281474976710658 SEND to# [1:7489503586614252427:2503] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-04T16:59:00.210173Z node 1 :TX_PROXY DEBUG: actor# [1:7489503578024317247:2115] Handle TEvProposeTransaction 2025-04-04T16:59:00.210227Z node 1 :TX_PROXY DEBUG: actor# [1:7489503578024317247:2115] TxId# 281474976710659 ProcessProposeTransaction 2025-04-04T16:59:00.210311Z node 1 :TX_PROXY DEBUG: actor# [1:7489503578024317247:2115] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7489503586614252446:2514] 2025-04-04T16:59:00.212703Z node 1 :TX_PROXY DEBUG: Actor# [1:7489503586614252446:2514] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\026\010\001\022\022\032\020db_admin@builtin\n\036\010\000\022\032\010\001\020\200\200\002\032\020db_admin@builtin \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" Requ ... .TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-04T17:02:56.130178Z node 59 :TX_PROXY DEBUG: Actor# [59:7489504597948889676:2449] Handle TEvDescribeSchemeResult Forward to# [59:7489504597948889675:2448] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:02:56.137913Z node 59 :TX_PROXY DEBUG: actor# [59:7489504589358954377:2102] Handle TEvProposeTransaction 2025-04-04T17:02:56.137947Z node 59 :TX_PROXY DEBUG: actor# [59:7489504589358954377:2102] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T17:02:56.138057Z node 59 :TX_PROXY DEBUG: actor# [59:7489504589358954377:2102] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [59:7489504597948889682:2454] 2025-04-04T17:02:56.140938Z node 59 :TX_PROXY DEBUG: Actor# [59:7489504597948889682:2454] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T17:02:56.141017Z node 59 :TX_PROXY DEBUG: Actor# [59:7489504597948889682:2454] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:02:56.141040Z node 59 :TX_PROXY DEBUG: Actor# [59:7489504597948889682:2454] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:02:56.141094Z node 59 :TX_PROXY DEBUG: Actor# [59:7489504597948889682:2454] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:02:56.141440Z node 59 :TX_PROXY DEBUG: Actor# [59:7489504597948889682:2454] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:02:56.141566Z node 59 :TX_PROXY DEBUG: Actor# [59:7489504597948889682:2454] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-04T17:02:56.141630Z node 59 :TX_PROXY DEBUG: Actor# [59:7489504597948889682:2454] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-04T17:02:56.141790Z node 59 :TX_PROXY DEBUG: Actor# [59:7489504597948889682:2454] txid# 281474976710657 HANDLE EvClientConnected 2025-04-04T17:02:56.143620Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:02:56.150812Z node 59 :TX_PROXY DEBUG: Actor# [59:7489504597948889682:2454] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-04T17:02:56.150876Z node 59 :TX_PROXY DEBUG: Actor# [59:7489504597948889682:2454] txid# 281474976710657 SEND to# [59:7489504597948889681:2453] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-04T17:02:56.184936Z node 59 :TX_PROXY DEBUG: actor# [59:7489504589358954377:2102] Handle TEvProposeTransaction 2025-04-04T17:02:56.184971Z node 59 :TX_PROXY DEBUG: actor# [59:7489504589358954377:2102] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T17:02:56.185010Z node 59 :TX_PROXY DEBUG: actor# [59:7489504589358954377:2102] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [59:7489504597948889720:2488] 2025-04-04T17:02:56.187623Z node 59 :TX_PROXY DEBUG: Actor# [59:7489504597948889720:2488] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T17:02:56.187694Z node 59 :TX_PROXY DEBUG: Actor# [59:7489504597948889720:2488] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:02:56.187716Z node 59 :TX_PROXY DEBUG: Actor# [59:7489504597948889720:2488] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:02:56.187771Z node 59 :TX_PROXY DEBUG: Actor# [59:7489504597948889720:2488] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:02:56.188081Z node 59 :TX_PROXY DEBUG: Actor# [59:7489504597948889720:2488] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:02:56.188179Z node 59 :TX_PROXY DEBUG: Actor# [59:7489504597948889720:2488] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:02:56.188237Z node 59 :TX_PROXY DEBUG: Actor# [59:7489504597948889720:2488] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T17:02:56.188388Z node 59 :TX_PROXY DEBUG: Actor# [59:7489504597948889720:2488] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T17:02:56.188927Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:02:56.192560Z node 59 :TX_PROXY DEBUG: Actor# [59:7489504597948889720:2488] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-04T17:02:56.192620Z node 59 :TX_PROXY DEBUG: Actor# [59:7489504597948889720:2488] txid# 281474976710658 SEND to# [59:7489504597948889719:2487] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-04T17:02:59.548051Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7489504589358954468:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:59.548127Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:03:05.216462Z node 59 :KQP_PROXY ERROR: TraceId: "01jr0w2vr6b2cm21jrrdzen4p7", Request deadline has expired for 3.966184s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:27271 NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x182793D0) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+2039 (0x179A2F47) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase)+1709 (0x179B8F4D) std::__y1::__bind_return, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase>, std::__y1::tuple, __is_valid_bind_return, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase>, std::__y1::tuple>::value>::type std::__y1::__bind const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>::operator()[abi:fe190000](NUnitTest::TTestContext&)+588 (0x179ECE9C) std::__y1::__function::__func, void ()>::operator()()+280 (0x179DD578) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x182AF0F6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1827FDF9) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TCurrentTest::Execute()+1094 (0x179DC566) NUnitTest::TTestFactory::Execute()+2438 (0x182816C6) NUnitTest::RunMain(int, char**)+5213 (0x182A9A2D) ??+0 (0x7F5EA3E72D90) __libc_start_main+128 (0x7F5EA3E72E40) _start+41 (0x158FA029) >> THiveTest::TestCheckSubHiveMigrationManyTablets [GOOD] >> THiveTest::PipeAlivenessOfDeadTablet >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts [GOOD] >> YdbYqlClient::TestReadTableBatchLimits [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] >> YdbTableBulkUpsert::RetryOperationSync [GOOD] >> YdbTableBulkUpsert::RetryOperation |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |82.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts [GOOD] Test command err: 2025-04-04T17:03:40.785850Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504786944263890:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:40.785907Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00127c/r3tmp/tmpVNN361/pdisk_1.dat 2025-04-04T17:03:41.384383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:41.384501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:41.411871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:03:41.426884Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17763, node 1 2025-04-04T17:03:41.929609Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:41.929632Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:41.929639Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:41.929777Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:42.431214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:42.802190Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:35668) has now valid token of root@builtin 2025-04-04T17:03:42.955322Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-04T17:03:42.955355Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:03:42.955364Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:03:42.955404Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-04T17:03:47.221262Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504819089462159:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:47.221317Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00127c/r3tmp/tmp7asguY/pdisk_1.dat 2025-04-04T17:03:47.380233Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:47.417810Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:47.417903Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:47.421343Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23637, node 4 2025-04-04T17:03:47.594751Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:47.594780Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:47.594787Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:47.594942Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:47.879420Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:47.903238Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:03:48.128844Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:36302) has now valid token of root@builtin 2025-04-04T17:03:48.231313Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-04T17:03:48.231349Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:03:48.231357Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:03:48.231389Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-04T17:03:52.424390Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489504841336119250:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:52.431236Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00127c/r3tmp/tmpwwHPLy/pdisk_1.dat 2025-04-04T17:03:52.726429Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:52.752713Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:52.752809Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:52.756516Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8813, node 7 2025-04-04T17:03:52.874135Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:52.874160Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:52.874168Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:52.874352Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:53.164141Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1743793432688552 Nodes { NodeId: 1024 Host: "localhost" Port: 9336 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1743793432688552 } Nodes { NodeId: 7 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 8 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 9 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-04-04T17:03:57.686276Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489504862626406127:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:57.762919Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00127c/r3tmp/tmpnTNO1q/pdisk_1.dat 2025-04-04T17:03:57.989547Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:58.030500Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:58.030610Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:58.037947Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16334, node 10 2025-04-04T17:03:58.264092Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:58.264121Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:58.264132Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:58.274575Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63047 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:58.591282Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: UNAUTHORIZED Reason: "Cannot authorize node. Access denied" } 2025-04-04T17:04:03.255556Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489504887078379751:2093];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:03.256397Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00127c/r3tmp/tmp2zM4Lj/pdisk_1.dat 2025-04-04T17:04:03.468498Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:04:03.509346Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:04:03.509455Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:04:03.512743Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7788, node 13 2025-04-04T17:04:03.607146Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:03.607175Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:03.607185Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:03.607355Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19137 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:03.904078Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1743793443459464 Nodes { NodeId: 1024 Host: "localhost" Port: 61516 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1743793443459464 } Nodes { NodeId: 13 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 14 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 15 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-04-04T17:04:09.483567Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7489504912402675732:2166];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:09.483839Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00127c/r3tmp/tmpVoYTkK/pdisk_1.dat 2025-04-04T17:04:09.685207Z node 16 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63880, node 16 2025-04-04T17:04:09.789667Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:04:09.789855Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:04:09.878435Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:04:09.909112Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:09.909144Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:09.909155Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:09.909321Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19838 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:10.296916Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node 2025-04-04T17:04:10.676632Z node 16 :TICKET_PARSER ERROR: Ticket 24A89EFEA888946072FA11E1715B49839DC808E44B2DFE99EB5DF0C53C4953FE: Cannot create token from certificate. Client certificate failed verification Register node result Status { Code: ERROR Reason: "Cannot create token from certificate. Client certificate failed verification" } >> THiveTest::PipeAlivenessOfDeadTablet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableBatchLimits [GOOD] Test command err: 2025-04-04T17:03:43.347653Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504799546181672:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:43.347730Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001271/r3tmp/tmpfCJdcl/pdisk_1.dat 2025-04-04T17:03:44.152389Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18789, node 1 2025-04-04T17:03:44.248623Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:44.248644Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:44.248665Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:44.248807Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:03:44.308987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:44.309097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:44.324982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29300 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:44.561723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:44.668428Z node 1 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jr0w4b0s1strc65g0akj355d, sdkBuildInfo# ydb-cpp-sdk/3.1.0, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33866, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.985892s 2025-04-04T17:03:44.720828Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jr0w4b203pze7hsxmncww2q6, sdkBuildInfo# ydb-cpp-sdk/3.1.0, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33870, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:03:47.145098Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01jr0w4de86pmjdsj2eqmdv6fk, sdkBuildInfo# ydb-cpp-sdk/3.1.0, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33878, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:03:47.145838Z node 1 :TX_PROXY DEBUG: actor# [1:7489504799546181900:2140] Handle TEvProposeTransaction 2025-04-04T17:03:47.145858Z node 1 :TX_PROXY DEBUG: actor# [1:7489504799546181900:2140] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T17:03:47.145896Z node 1 :TX_PROXY DEBUG: actor# [1:7489504799546181900:2140] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489504816726051883:2629] 2025-04-04T17:03:47.254579Z node 1 :TX_PROXY DEBUG: Actor# [1:7489504816726051883:2629] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Test" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Fk" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" KeyColumnNames: "Fk" UniformPartitionsCount: 16 PartitionConfig { } Temporary: false } CreateIndexedTable { } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:33878" 2025-04-04T17:03:47.254660Z node 1 :TX_PROXY DEBUG: Actor# [1:7489504816726051883:2629] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:03:47.255218Z node 1 :TX_PROXY DEBUG: Actor# [1:7489504816726051883:2629] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:03:47.255320Z node 1 :TX_PROXY DEBUG: Actor# [1:7489504816726051883:2629] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:03:47.255525Z node 1 :TX_PROXY DEBUG: Actor# [1:7489504816726051883:2629] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:03:47.255688Z node 1 :TX_PROXY DEBUG: Actor# [1:7489504816726051883:2629] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:03:47.255753Z node 1 :TX_PROXY DEBUG: Actor# [1:7489504816726051883:2629] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T17:03:47.255903Z node 1 :TX_PROXY DEBUG: Actor# [1:7489504816726051883:2629] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T17:03:47.257676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:03:47.261837Z node 1 :TX_PROXY DEBUG: Actor# [1:7489504816726051883:2629] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-04T17:03:47.261912Z node 1 :TX_PROXY DEBUG: Actor# [1:7489504816726051883:2629] txid# 281474976710658 SEND to# [1:7489504816726051882:2336] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-04T17:03:47.265130Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:03:47.265231Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:03:47.265241Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:03:47.265274Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:03:47.393614Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504816726051952:2693], Recipient [1:7489504816726052093:2349]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:47.394148Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504816726051954:2695], Recipient [1:7489504816726052084:2345]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:47.394681Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504816726051958:2699], Recipient [1:7489504816726052099:2350]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:47.396360Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504816726051961:2702], Recipient [1:7489504816726052105:2351]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:47.396867Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504816726051950:2691], Recipient [1:7489504816726052092:2348]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:47.397492Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504816726051963:2704], Recipient [1:7489504816726052072:2341]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:47.398952Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504816726051962:2703], Recipient [1:7489504816726052075:2342]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:47.399398Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504816726051951:2692], Recipient [1:7489504816726052078:2343]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:47.399876Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504816726051957:2698], Recipient [1:7489504816726052079:2344]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:47.400437Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504816726051956:2697], Recipient [1:7489504816726052121:2353]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:47.400841Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504816726051960:2701], Recipient [1:7489504816726052143:2354]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:47.401292Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504816726051959:2700], Recipient [1:7489504816726052148:2355]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:47.402198Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504816726051948:2689], Recipient [1:7489504816726052070:2340]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:47.405202Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7489504816726051948:2689], Recipient [1:7489504816726052070:2340]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:03:47.405669Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7489504816726051960:2701], Recipient [1:7489504816726052143:2354]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:03:47.406107Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7489504816726052070:2340] 2025-04-04T17:03:47.415263Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [1:7489504816726052143:2354] 2025-04-04T17:03:47.415599Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:03:47.430025Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:03:47.446687Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504816726051955:2696], Recipient [1:7489504816726052106:2352]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:47.447458Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7489504816726051955:2696], Recipient [1:7489504816726052106:2352]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:03:47.447834Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037900 actor [1:7489504816726052106:2352] 2025-04-04T17:03:47.448029Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:03:47.482657Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7489504816726051949:2690], Recipient [1:7489504816726052091:2347]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:03:47.483415Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7489504816726051949:2690], Recipient [1:7489504816726052091:2347]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:03:47.483846Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:7489504816726052091:2347] 2025-04-04T17:03:47.484239Z node 1 :TX_DATASHARD DEBUG: TxIni ... h start ---- [[45u];[90u];["A"]] ---- batch end ---- ---- batch start ---- [[46u];[92u];["A"]] ---- batch end ---- ---- batch start ---- [[47u];[94u];["A"]] ---- batch end ---- ---- batch start ---- [[48u];[96u];["A"]] ---- batch end ---- ---- batch start ---- [[49u];[98u];["A"]] ---- batch end ---- ---- batch start ---- [[50u];[100u];["A"]] ---- batch end ---- ---- batch start ---- [[51u];[102u];["A"]] ---- batch end ---- ---- batch start ---- [[52u];[104u];["A"]] ---- batch end ---- ---- batch start ---- [[53u];[106u];["A"]] ---- batch end ---- ---- batch start ---- [[54u];[108u];["A"]] ---- batch end ---- ---- batch start ---- [[55u];[110u];["A"]] ---- batch end ---- ---- batch start ---- [[56u];[112u];["A"]] ---- batch end ---- ---- batch start ---- [[57u];[114u];["A"]] ---- batch end ---- ---- batch start ---- [[58u];[116u];["A"]] ---- batch end ---- ---- batch start ---- [[59u];[118u];["A"]] ---- batch end ---- ---- batch start ---- [[60u];[120u];["A"]] ---- batch end ---- ---- batch start ---- [[61u];[122u];["A"]] ---- batch end ---- ---- batch start ---- [[62u];[124u];["A"]] ---- batch end ---- ---- batch start ---- [[63u];[126u];["A"]] ---- batch end ---- ---- batch start ---- [[64u];[128u];["A"]] ---- batch end ---- ---- batch start ---- [[65u];[130u];["A"]] ---- batch end ---- ---- batch start ---- [[66u];[132u];["A"]] ---- batch end ---- ---- batch start ---- [[67u];[134u];["A"]] ---- batch end ---- ---- batch start ---- [[68u];[136u];["A"]] ---- batch end ---- ---- batch start ---- [[69u];[138u];["A"]] ---- batch end ---- ---- batch start ---- [[70u];[140u];["A"]] ---- batch end ---- ---- batch start ---- [[71u];[142u];["A"]] ---- batch end ---- ---- batch start ---- [[72u];[144u];["A"]] ---- batch end ---- ---- batch start ---- [[73u];[146u];["A"]] ---- batch end ---- ---- batch start ---- [[74u];[148u];["A"]] ---- batch end ---- ---- batch start ---- [[75u];[150u];["A"]] ---- batch end ---- ---- batch start ---- [[76u];[152u];["A"]] ---- batch end ---- ---- batch start ---- [[77u];[154u];["A"]] ---- batch end ---- ---- batch start ---- [[78u];[156u];["A"]] ---- batch end ---- ---- batch start ---- [[79u];[158u];["A"]] ---- batch end ---- ---- batch start ---- [[80u];[160u];["A"]] ---- batch end ---- ---- batch start ---- [[81u];[162u];["A"]] ---- batch end ---- ---- batch start ---- [[82u];[164u];["A"]] ---- batch end ---- ---- batch start ---- [[83u];[166u];["A"]] ---- batch end ---- ---- batch start ---- [[84u];[168u];["A"]] ---- batch end ---- ---- batch start ---- [[85u];[170u];["A"]] ---- batch end ---- ---- batch start ---- [[86u];[172u];["A"]] ---- batch end ---- ---- batch start ---- [[87u];[174u];["A"]] ---- batch end ---- ---- batch start ---- [[88u];[176u];["A"]] ---- batch end ---- ---- batch start ---- [[89u];[178u];["A"]] ---- batch end ---- ---- batch start ---- [[90u];[180u];["A"]] ---- batch end ---- ---- batch start ---- [[91u];[182u];["A"]] ---- batch end ---- ---- batch start ---- [[92u];[184u];["A"]] ---- batch end ---- ---- batch start ---- [[93u];[186u];["A"]] ---- batch end ---- ---- batch start ---- [[94u];[188u];["A"]] ---- batch end ---- ---- batch start ---- [[95u];[190u];["A"]] ---- batch end ---- ---- batch start ---- [[96u];[192u];["A"]] ---- batch end ---- ---- batch start ---- [[97u];[194u];["A"]] ---- batch end ---- ---- batch start ---- [[98u];[196u];["A"]] ---- batch end ---- ---- batch start ---- [[99u];[198u];["A"]] ---- batch end ---- 2025-04-04T17:04:14.263438Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:04:14.263446Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2025-04-04T17:04:14.263453Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-04-04T17:04:14.263463Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-04-04T17:04:14.263485Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-04-04T17:04:14.263499Z node 10 :TX_DATASHARD TRACE: Complete execution for [0:281474976715679] at 72075186224037897 on unit FinishPropose 2025-04-04T17:04:14.263527Z node 10 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715679 at tablet 72075186224037897 send to client, exec latency: 47 ms, propose latency: 47 ms, status: COMPLETE 2025-04-04T17:04:14.263584Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037897 2025-04-04T17:04:14.264562Z node 10 :READ_TABLE_API NOTICE: [10:7489504935804781978:2412] Finish grpc stream, status: 400000 2025-04-04T17:04:14.264584Z node 10 :READ_TABLE_API DEBUG: [10:7489504935804781978:2412] Send zero quota to Shard 0, TxId 281474976715678 2025-04-04T17:04:14.264599Z node 10 :READ_TABLE_API DEBUG: [10:7489504935804781978:2412] Send zero quota to Shard 0, TxId 281474976715678 2025-04-04T17:04:14.264612Z node 10 :READ_TABLE_API DEBUG: [10:7489504935804781978:2412] Send zero quota to Shard 0, TxId 281474976715678 2025-04-04T17:04:14.264624Z node 10 :READ_TABLE_API DEBUG: [10:7489504935804781978:2412] Send zero quota to Shard 0, TxId 281474976715678 2025-04-04T17:04:14.264638Z node 10 :READ_TABLE_API DEBUG: [10:7489504935804781978:2412] Send zero quota to Shard 0, TxId 281474976715678 2025-04-04T17:04:14.264881Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7489504935804781979:2412], Recipient [10:7489504918624910861:2344]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743786254245 TxId: 281474976715678 2025-04-04T17:04:14.265088Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7489504935804781979:2412], Recipient [10:7489504918624910859:2343]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743786254245 TxId: 281474976715678 2025-04-04T17:04:14.265217Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7489504935804781979:2412], Recipient [10:7489504918624910863:2346]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743786254245 TxId: 281474976715678 2025-04-04T17:04:14.265366Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7489504935804781979:2412], Recipient [10:7489504918624910870:2348]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743786254245 TxId: 281474976715678 2025-04-04T17:04:14.265469Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7489504935804781979:2412], Recipient [10:7489504918624910855:2341]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743786254245 TxId: 281474976715678 2025-04-04T17:04:14.265649Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7489504935804781979:2412], Recipient [10:7489504918624910856:2342]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743786254245 TxId: 281474976715678 2025-04-04T17:04:14.265850Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7489504935804781979:2412], Recipient [10:7489504918624910898:2349]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743786254245 TxId: 281474976715678 2025-04-04T17:04:14.265982Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7489504935804781979:2412], Recipient [10:7489504918624910862:2345]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743786254245 TxId: 281474976715678 2025-04-04T17:04:14.266093Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7489504935804781979:2412], Recipient [10:7489504918624910853:2340]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743786254245 TxId: 281474976715678 2025-04-04T17:04:14.266204Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7489504935804781979:2412], Recipient [10:7489504918624910869:2347]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743786254245 TxId: 281474976715678 2025-04-04T17:04:14.271549Z node 10 :GRPC_SERVER DEBUG: [0x51a000027680] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-04T17:04:14.271548Z node 10 :GRPC_SERVER DEBUG: [0x51a000050480] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-04T17:04:14.271782Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f1e80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-04T17:04:14.271792Z node 10 :GRPC_SERVER DEBUG: [0x51a000063080] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-04T17:04:14.271948Z node 10 :GRPC_SERVER DEBUG: [0x51a000065a80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-04T17:04:14.271962Z node 10 :GRPC_SERVER DEBUG: [0x51a000064e80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-04T17:04:14.272107Z node 10 :GRPC_SERVER DEBUG: [0x51a0000a7a80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-04T17:04:14.272108Z node 10 :GRPC_SERVER DEBUG: [0x51a000048c80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-04T17:04:14.272281Z node 10 :GRPC_SERVER DEBUG: [0x51a000033680] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-04T17:04:14.272438Z node 10 :GRPC_SERVER DEBUG: [0x51a000195680] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-04T17:04:14.272593Z node 10 :GRPC_SERVER DEBUG: [0x51a000166280] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2025-04-04T17:04:14.272726Z node 10 :GRPC_SERVER DEBUG: [0x51b000383f80] received request Name# KeyValue ok# false data# peer# current inflight# 0 2025-04-04T17:04:14.272870Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e1680] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-04T17:04:14.273019Z node 10 :GRPC_SERVER DEBUG: [0x51a000021680] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-04T17:04:14.273164Z node 10 :GRPC_SERVER DEBUG: [0x51a000004880] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-04T17:04:14.273301Z node 10 :GRPC_SERVER DEBUG: [0x51a000064880] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-04T17:04:14.273442Z node 10 :GRPC_SERVER DEBUG: [0x51a0000a6e80] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2025-04-04T17:04:14.273592Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e4c80] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2025-04-04T17:04:14.273732Z node 10 :GRPC_SERVER DEBUG: [0x51a0000a7480] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2025-04-04T17:04:14.273905Z node 10 :GRPC_SERVER DEBUG: [0x51a000026480] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-04T17:04:14.274043Z node 10 :GRPC_SERVER DEBUG: [0x51a000045680] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-04-04T17:04:14.278375Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e4680] received request Name# DrainNode ok# false data# peer# current inflight# 0 |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] >> TGRpcYdbTest::ExecutePreparedQuery [GOOD] >> TGRpcYdbTest::ExecuteQueryCache >> TGRpcNewClient::InMemoryTables [GOOD] |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |82.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |82.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] [GOOD] >> TGRpcYdbTest::ExplainQuery [GOOD] |82.8%| [TA] $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] >> TGRpcYdbTest::CreateYqlSessionExecuteQuery [GOOD] >> TGRpcYdbTest::DeleteFromAfterCreate ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewClient::InMemoryTables [GOOD] Test command err: 2025-04-04T17:03:42.911190Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504796397337822:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:42.911279Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001274/r3tmp/tmpWgCDgh/pdisk_1.dat 2025-04-04T17:03:43.577170Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:43.587511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:43.588480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:43.600530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27770, node 1 2025-04-04T17:03:43.675019Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:43.675042Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:43.675064Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:43.675174Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23806 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:44.077745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:48.502525Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504821240952619:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:48.502702Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001274/r3tmp/tmpBg7oez/pdisk_1.dat 2025-04-04T17:03:48.738135Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:48.761722Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:48.761810Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:48.769210Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13270, node 4 2025-04-04T17:03:48.942043Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:48.942071Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:48.942078Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:48.942206Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21297 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:49.159113Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:49.176720Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:03:51.709177Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:03:52.023886Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489504838420822983:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:52.024029Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:52.024444Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489504838420822995:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:52.029159Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:03:52.059056Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489504838420822997:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:03:52.151373Z node 4 :TX_PROXY ERROR: Actor# [4:7489504838420823074:2794] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:03:54.712365Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489504847744552077:2218];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:54.712426Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001274/r3tmp/tmpriY3kz/pdisk_1.dat 2025-04-04T17:03:54.901915Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:54.936254Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:54.936341Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:54.944313Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7254, node 7 2025-04-04T17:03:55.050824Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:55.050849Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:55.050859Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:55.050992Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:55.303905Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:01.451524Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489504878984394111:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:01.451590Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001274/r3tmp/tmpwRVu40/pdisk_1.dat 2025-04-04T17:04:01.809501Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:04:01.872730Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:04:01.872822Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:04:01.884346Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8425, node 10 2025-04-04T17:04:02.168125Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:02.168156Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:02.168165Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:02.168325Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16945 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:02.591575Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:06.066959Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:04:06.282914Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:04:06.423231Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504900459231882:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:06.429127Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-04-04T17:04:06.429450Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504900459231877:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:06.429560Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:06.459311Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489504878984394111:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:06.459414Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:04:06.526367Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489504900459231891:2361], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-04T17:04:06.611297Z node 10 :TX_PROXY ERROR: Actor# [10:7489504900459231970:2898] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:04:06.769175Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0w507s17z4bzq3s3k56zwz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NGNhOTlhZWQtZTI4ODc0NzItNTNmNWRjNi01ZTI0MTE1YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:04:06.899293Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001274/r3tmp/tmpk2hzuN/pdisk_1.dat 2025-04-04T17:04:09.586822Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489504914558837111:2116];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:09.587609Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:04:09.980536Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:04:10.056626Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:04:10.056733Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:04:10.063956Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18285, node 13 2025-04-04T17:04:10.384531Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:10.384562Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:10.384576Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:10.384779Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:11.088143Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:14.583489Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7489504914558837111:2116];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:14.583576Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:04:15.849258Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:04:16.072566Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:04:16.171489Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |82.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::ExplainQuery [GOOD] Test command err: 2025-04-04T17:03:47.201198Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504818613133233:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:47.201280Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001267/r3tmp/tmp9eSoLc/pdisk_1.dat 2025-04-04T17:03:47.600080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:47.600210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:47.604056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:03:47.632391Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4188, node 1 2025-04-04T17:03:47.664765Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:03:47.664832Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:03:47.735699Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:47.735743Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:47.735778Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:47.735915Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:48.106032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:52.632453Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504841213733913:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:52.632566Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001267/r3tmp/tmpwcYVmk/pdisk_1.dat 2025-04-04T17:03:52.931100Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:52.974451Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:52.974536Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:52.977871Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24763, node 4 2025-04-04T17:03:53.182696Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:53.182715Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:53.182720Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:53.182818Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5014 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:53.435998Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:55.879946Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489504854098636828:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:55.880318Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489504854098636820:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:55.880428Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:55.884383Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:03:55.917843Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489504854098636834:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:03:56.008513Z node 4 :TX_PROXY ERROR: Actor# [4:7489504858393604201:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:03:58.286515Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489504865611968461:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:58.287063Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001267/r3tmp/tmpvZlVSh/pdisk_1.dat 2025-04-04T17:03:58.497324Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:58.536158Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:58.536246Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:58.542584Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29177, node 7 2025-04-04T17:03:58.790994Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:58.791029Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:58.791037Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:58.791187Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2388 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:59.083143Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR E0000 00:00:1743786242.080584 324727 text_format.cc:383] Error parsing text-format Ydb.Type: 3:13: Unknown enumeration value of "TYPE_UNDEFINED" for field "type_id". 2025-04-04T17:04:02.086426Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489504882791838604:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:02.086511Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:02.088436Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489504882791838616:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:02.092274Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:04:02.178444Z node 7 :KQP_WORKLOAD_SERVICE WARN: [Workl ... d: , PoolId: default}. Database not set, use /Root
: Error: Unsupported protobuf type:
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: E0000 00:00:1743786242.357692 324727 text_format.cc:383] Error parsing text-format Ydb.Type: 5:21: Unknown enumeration value of "Int32" for field "type_id". 2025-04-04T17:04:02.417985Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=MTZjMGVmODAtZGFmMGJmZmQtNzUzMWIxZjgtOWI2ODQ3MDE=, ActorId: [7:7489504882791838586:2332], ActorState: ExecuteState, TraceId: 01jr0w4w9r8pjgej18v4hhkzcf, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: 2025-04-04T17:04:02.420189Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0w4w9r8pjgej18v4hhkzcf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MTZjMGVmODAtZGFmMGJmZmQtNzUzMWIxZjgtOWI2ODQ3MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root
: Error: Unsupported protobuf type:
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: 2025-04-04T17:04:04.295180Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489504889883150955:2091];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:04.303299Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001267/r3tmp/tmpicWr4d/pdisk_1.dat 2025-04-04T17:04:04.522504Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10464, node 10 2025-04-04T17:04:04.662623Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:04:04.670636Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:04:04.693309Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:04:04.745110Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:04.745140Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:04.745151Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:04.745351Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22950 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:05.272178Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:09.286386Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489504889883150955:2091];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:09.286471Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:04:10.075686Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504915652955834:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:10.075760Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504915652955845:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:10.076340Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:10.083296Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:04:10.135724Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489504915652955848:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:04:10.212278Z node 10 :TX_PROXY ERROR: Actor# [10:7489504915652955921:2694] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:04:12.470567Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489504926215229538:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:12.470636Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001267/r3tmp/tmp2LDJ0q/pdisk_1.dat 2025-04-04T17:04:12.773283Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:04:12.827149Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:04:12.827253Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:04:12.842878Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27599, node 13 2025-04-04T17:04:13.067261Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:13.067283Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:13.067292Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:13.067462Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:13.504383Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:13.612053Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:04:16.995630Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489504943395099920:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:16.995743Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:16.996139Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489504943395099932:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:17.001873Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:04:17.040910Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7489504943395099934:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:04:17.111494Z node 13 :TX_PROXY ERROR: Actor# [13:7489504947690067316:2811] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:04:17.462280Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0w5ak1d41sxb4fc38djpkz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NDllYmZkMWQtZDAwYzA4MTktYTk0NjhiOGYtNTUxNTMyMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:04:17.474147Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7489504926215229538:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:17.476774Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TTableProfileTests::OverwriteExecutionPolicy [GOOD] >> TTableProfileTests::OverwritePartitioningPolicy >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] |82.9%| [TA] {RESULT} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:130:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:132:2058] recipient: [1:108:2140] 2025-04-04T16:55:18.193595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:18.193694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:18.193797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:18.193839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:18.193877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:18.193916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:18.193979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:18.194053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:18.194804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:18.313629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:18.313694Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:172:2058] recipient: [1:15:2062] 2025-04-04T16:55:18.336836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:18.337470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:18.337688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:18.356596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:18.356829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:18.357562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:18.358558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:18.364264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:18.365941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:18.366049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:18.366291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:18.366350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:18.366398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:18.366562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] Leader for TabletID 72057594037968897 is [1:215:2214] sender: [1:216:2058] recipient: [1:209:2210] 2025-04-04T16:55:18.376130Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-04T16:55:18.499831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:18.500080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:18.500322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:18.500546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:18.500602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:18.503326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:18.503495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:18.503738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:18.503805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:18.503841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:18.503887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:18.506341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:18.506408Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:18.506446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:18.513946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:18.514020Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:18.514090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:18.514166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:18.518498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:18.520987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:18.521202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:18.522230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:18.522432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:18.522486Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:18.522815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:18.522873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:18.523070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:18.523166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:18.526101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:18.526212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:18.526462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:18.526507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:18.527009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:18.527057Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:18.527184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:18.527242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:18.527292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:18.527330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:18.527364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:18.527405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:18.527438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id ... shold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:04:19.282127Z node 167 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:04:19.282503Z node 167 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 426us result status StatusSuccess 2025-04-04T17:04:19.283511Z node 167 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:04:19.295086Z node 167 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][167:1045:2811] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-04T17:04:19.295204Z node 167 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][167:1046:2811] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-04T17:04:19.295292Z node 167 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][167:967:2811] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-04-04T17:04:19.295399Z node 167 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][167:967:2811] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-04-04T17:04:19.295541Z node 167 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][167:1045:2811] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743786259217098 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743786259217098 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-04T17:04:19.295738Z node 167 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][167:1046:2811] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 1743786259217098 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-04T17:04:19.311632Z node 167 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][167:1045:2811] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 2 2025-04-04T17:04:19.311802Z node 167 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][167:967:2811] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-04-04T17:04:19.312439Z node 167 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][167:1046:2811] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-04T17:04:19.312530Z node 167 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][167:967:2811] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedPrivatekey [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success >> YdbOlapStore::DuplicateRows [GOOD] >> YdbOlapStore::LogExistingRequest |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesServerCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] >> TPQCompatTest::ReadWriteSessions [GOOD] >> TGRpcYdbTest::ExecuteQueryCache [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] >> YdbOlapStore::LogWithUnionAllDescending [GOOD] >> YdbOlapStore::LogTsRangeDescending >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::ExecuteQueryCache [GOOD] Test command err: 2025-04-04T17:03:50.815768Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504831295648744:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:50.815859Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001253/r3tmp/tmpYQ1ZHe/pdisk_1.dat 2025-04-04T17:03:51.697563Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:51.765685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:51.774435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:51.787680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20661, node 1 2025-04-04T17:03:52.153526Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:52.153553Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:52.153558Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:52.153665Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11633 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:52.645159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:57.171493Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504861445231429:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:57.171611Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001253/r3tmp/tmpt1QbFT/pdisk_1.dat 2025-04-04T17:03:57.451036Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:57.529345Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:57.530115Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 12770, node 4 2025-04-04T17:03:57.596261Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:03:57.754852Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:57.754875Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:57.754882Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:57.755010Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5578 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:58.060583Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:03.752113Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489504887937953841:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:03.752209Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001253/r3tmp/tmpdNRMGe/pdisk_1.dat 2025-04-04T17:04:04.045527Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21617, node 7 2025-04-04T17:04:04.134218Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:04:04.134367Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:04:04.156999Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:04:04.181067Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:04.181099Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:04.181109Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:04.181256Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15656 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:04.509257Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:08.183049Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489504909412791408:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:08.183184Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:08.183668Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489504909412791420:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:08.189205Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:04:08.228121Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489504909412791422:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:04:08.313303Z node 7 :TX_PROXY ERROR: Actor# [7:7489504909412791497:2678] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:04:08.723111Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489504887937953841:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:08.723202Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:04:10.726613Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489504916515561898:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:10.726687Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001253/r3tmp/tmpqD3OQX/pdisk_1.dat 2025-04-04T17:04:11.003610Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:04:11.044813Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:04:11.044930Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:04:11.052536Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17023, node 10 2025-04-04T17:04:11.114147Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:11.114177Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:11.114186Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:11.114363Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31577 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:11.528641Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:15.732547Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489504916515561898:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:15.732632Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:04:15.886185Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504937990399438:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:15.886345Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:15.886865Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504937990399450:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:15.892136Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:04:15.965739Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489504937990399452:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:04:16.035431Z node 10 :TX_PROXY ERROR: Actor# [10:7489504942285366813:2672] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:04:18.491725Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489504952965182773:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:18.548112Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001253/r3tmp/tmpDnejmu/pdisk_1.dat 2025-04-04T17:04:18.922042Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:04:19.006206Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:04:19.009125Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:04:19.014831Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22269, node 13 2025-04-04T17:04:19.264500Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:19.264527Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:19.264537Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:19.264708Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1897 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:20.161263Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:23.493149Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7489504952965182773:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:23.493228Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:04:25.326512Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489504983029954857:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:25.326590Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489504983029954849:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:25.326927Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:25.330267Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:04:25.385652Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7489504983029954863:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:04:25.460003Z node 13 :TX_PROXY ERROR: Actor# [13:7489504983029954935:2695] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok >> YdbTableBulkUpsert::RetryOperation [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay/ydb_query_replay |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay |82.9%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::RetryOperation [GOOD] Test command err: 2025-04-04T17:03:43.711751Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504799925362695:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:43.712082Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001269/r3tmp/tmpQuqBNv/pdisk_1.dat 2025-04-04T17:03:44.232336Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:44.256424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:44.257399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:44.270365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23120, node 1 2025-04-04T17:03:44.476063Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:44.476086Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:44.476095Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:44.476223Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25223 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:44.887965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:47.992725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:03:48.811724Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504799925362695:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:48.812028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; SUCCESS 3 rows in 0.020521s 2025-04-04T17:03:49.021806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504825695169290:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:49.021881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504825695169281:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:49.022074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:03:49.025350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:03:49.054220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489504825695169295:2444], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:03:49.123816Z node 1 :TX_PROXY ERROR: Actor# [1:7489504825695169386:4147] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:03:49.735463Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0w4f8waefhw49kajj0d6m3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmE4MDVmMTEtMzFkNGE1MC0yODFiODRkZi05YzZjMDE0Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 3 rows 2025-04-04T17:03:52.145766Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504840358903952:2084];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:52.145817Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001269/r3tmp/tmp5nK2sK/pdisk_1.dat 2025-04-04T17:03:52.384849Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1674, node 4 2025-04-04T17:03:52.493675Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:52.493767Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:52.508771Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:03:52.569795Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:52.569832Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:52.569840Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:52.569970Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61713 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:52.863397Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:56.032166Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/ui8'Only async-indexed tables are supported by BulkUpsert
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable'unknown table 2025-04-04T17:03:58.173505Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489504865201593418:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:58.173591Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001269/r3tmp/tmppt7P1a/pdisk_1.dat 2025-04-04T17:03:58.453670Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:58.499965Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:58.500095Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:58.510229Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23403, node 7 2025-04-04T17:03:58.732957Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:58.732974Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:58.732979Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:58.733091Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:59.023061Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:02.201019Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: ... duration: 0 sec 2048 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 4096 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 8192 usec
: Error: Bulk upsert to table '/Root/ui32'longTx ydb://long-tx/read-only timed out, duration: 0 sec 16384 usec 2025-04-04T17:04:06.032661Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489504901246589459:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:06.032751Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001269/r3tmp/tmp0fGdSS/pdisk_1.dat 2025-04-04T17:04:06.735009Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:04:06.871466Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:04:06.871579Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:04:06.882140Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15200, node 10 2025-04-04T17:04:07.283071Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:07.283099Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:07.283110Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:07.283297Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10379 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:07.879738Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:11.034863Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489504901246589459:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:11.034940Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:04:12.292738Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Injecting ABORTED 10 times Result: ABORTED Injecting ABORTED 6 times Result: ABORTED Injecting ABORTED 5 times Result: SUCCESS Injecting ABORTED 3 times Result: SUCCESS Injecting ABORTED 0 times Result: SUCCESS Injecting OVERLOADED 10 times Result: OVERLOADED Injecting OVERLOADED 6 times Result: OVERLOADED Injecting OVERLOADED 5 times Result: SUCCESS Injecting OVERLOADED 3 times Result: SUCCESS Injecting OVERLOADED 0 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 10 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 6 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 5 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 3 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 0 times Result: SUCCESS Injecting UNAVAILABLE 10 times Result: UNAVAILABLE Injecting UNAVAILABLE 6 times Result: UNAVAILABLE Injecting UNAVAILABLE 5 times Result: SUCCESS Injecting UNAVAILABLE 3 times Result: SUCCESS Injecting UNAVAILABLE 0 times Result: SUCCESS Injecting BAD_SESSION 10 times Result: BAD_SESSION Injecting BAD_SESSION 6 times Result: BAD_SESSION Injecting BAD_SESSION 5 times Result: SUCCESS Injecting BAD_SESSION 3 times Result: SUCCESS Injecting BAD_SESSION 0 times Result: SUCCESS Injecting SESSION_BUSY 10 times Result: SESSION_BUSY Injecting SESSION_BUSY 6 times Result: SESSION_BUSY Injecting SESSION_BUSY 5 times Result: SUCCESS Injecting SESSION_BUSY 3 times Result: SUCCESS Injecting SESSION_BUSY 0 times Result: SUCCESS Injecting NOT_FOUND 10 times Result: NOT_FOUND Injecting NOT_FOUND 6 times Result: NOT_FOUND Injecting NOT_FOUND 5 times Result: SUCCESS Injecting NOT_FOUND 3 times Result: SUCCESS Injecting NOT_FOUND 0 times Result: SUCCESS Injecting UNDETERMINED 10 times Result: UNDETERMINED Injecting UNDETERMINED 6 times Result: UNDETERMINED Injecting UNDETERMINED 5 times Result: SUCCESS Injecting UNDETERMINED 3 times Result: SUCCESS Injecting UNDETERMINED 0 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 10 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 6 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 5 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 3 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 0 times Result: SUCCESS 2025-04-04T17:04:17.166169Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489504948646300342:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:17.166235Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001269/r3tmp/tmp6wfSXK/pdisk_1.dat 2025-04-04T17:04:17.926553Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:04:18.015421Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:04:18.015543Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:04:18.042044Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14666, node 13 2025-04-04T17:04:18.419265Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:18.419296Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:18.419306Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:18.419477Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11164 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:19.138612Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:22.170402Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7489504948646300342:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:22.170497Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:04:25.490789Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Injecting ABORTED 10 times Result: ABORTED Injecting ABORTED 6 times Result: ABORTED Injecting ABORTED 5 times Result: SUCCESS Injecting ABORTED 3 times Result: SUCCESS Injecting ABORTED 0 times Result: SUCCESS Injecting OVERLOADED 10 times Result: OVERLOADED Injecting OVERLOADED 6 times Result: OVERLOADED Injecting OVERLOADED 5 times Result: SUCCESS Injecting OVERLOADED 3 times Result: SUCCESS Injecting OVERLOADED 0 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 10 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 6 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 5 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 3 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 0 times Result: SUCCESS Injecting UNAVAILABLE 10 times Result: UNAVAILABLE Injecting UNAVAILABLE 6 times Result: UNAVAILABLE Injecting UNAVAILABLE 5 times Result: SUCCESS Injecting UNAVAILABLE 3 times Result: SUCCESS Injecting UNAVAILABLE 0 times Result: SUCCESS Injecting BAD_SESSION 10 times Result: BAD_SESSION Injecting BAD_SESSION 6 times Result: BAD_SESSION Injecting BAD_SESSION 5 times Result: SUCCESS Injecting BAD_SESSION 3 times Result: SUCCESS Injecting BAD_SESSION 0 times Result: SUCCESS Injecting SESSION_BUSY 10 times Result: SESSION_BUSY Injecting SESSION_BUSY 6 times Result: SESSION_BUSY Injecting SESSION_BUSY 5 times Result: SUCCESS Injecting SESSION_BUSY 3 times Result: SUCCESS Injecting SESSION_BUSY 0 times Result: SUCCESS Injecting NOT_FOUND 10 times Result: NOT_FOUND Injecting NOT_FOUND 6 times Result: NOT_FOUND Injecting NOT_FOUND 5 times Result: SUCCESS Injecting NOT_FOUND 3 times Result: SUCCESS Injecting NOT_FOUND 0 times Result: SUCCESS Injecting UNDETERMINED 10 times Result: UNDETERMINED Injecting UNDETERMINED 6 times Result: UNDETERMINED Injecting UNDETERMINED 5 times Result: SUCCESS Injecting UNDETERMINED 3 times Result: SUCCESS Injecting UNDETERMINED 0 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 10 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 6 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 5 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 3 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 0 times Result: SUCCESS >> YdbOlapStore::LogNonExistingUserId [GOOD] >> YdbOlapStore::LogPagingBefore >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> TGRpcYdbTest::DeleteFromAfterCreate [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::DeleteFromAfterCreate [GOOD] Test command err: 2025-04-04T17:03:49.558180Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504828741048936:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:49.558287Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00125c/r3tmp/tmplb6MAh/pdisk_1.dat 2025-04-04T17:03:49.978204Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:49.987129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:49.987239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:49.992573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26769, node 1 2025-04-04T17:03:50.012177Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:03:50.012293Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:03:50.151244Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:50.151264Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:50.151271Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:50.151379Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19659 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:50.457922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:50.750449Z node 1 :TX_PROXY ERROR: Actor# [1:7489504833036016962:2615] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-04T17:03:54.891739Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504850058276832:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:54.892523Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00125c/r3tmp/tmphD3U7p/pdisk_1.dat 2025-04-04T17:03:55.246977Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:55.303405Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:55.303499Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:55.310605Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20571, node 4 2025-04-04T17:03:55.555100Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:55.555127Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:55.555133Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:55.555283Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:56.068118Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:01.547195Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489504876936507283:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:01.547272Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00125c/r3tmp/tmpZ94b2A/pdisk_1.dat 2025-04-04T17:04:02.143448Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:04:02.327004Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:04:02.327099Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:04:02.330516Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14695, node 7 2025-04-04T17:04:02.667741Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:02.667764Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:02.667785Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:02.667944Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13797 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:03.261398Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:06.549711Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489504876936507283:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:06.549788Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:04:07.422400Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489504902706312171:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:07.422544Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:08.096734Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:04:08.435051Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489504907001279661:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:08.435149Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:08.435483Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489504907001279666:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:08.440465Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:04:08.470409Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489504907001279668:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:04:08.539504Z node 7 :TX_PROXY ERROR: Actor# [7:7489504907001279749:2829] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:04:08.891253Z node 7 :KQP_EXECUTER ERROR ... 968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:04:12.456274Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:04:12.471105Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13228, node 10 2025-04-04T17:04:12.823313Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:12.823347Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:12.823358Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:12.823526Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21886 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:13.368974Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:16.798496Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489504919772529651:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:16.798581Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:04:18.988799Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504949837301712:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:18.988967Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:18.989589Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489504949837301724:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:18.998383Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:04:19.070519Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489504949837301726:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:04:19.171026Z node 10 :TX_PROXY ERROR: Actor# [10:7489504954132269095:2711] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:04:19.625609Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7489504954132269124:2354], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[Root/NotFound]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:04:19.627651Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ZjgyNzZmNWItODQyNTU2YzQtMmJmMDNiYmEtM2U2YzI0YmU=, ActorId: [10:7489504949837301685:2337], ActorState: ExecuteState, TraceId: 01jr0w5d2e4d06g6h1xcyh86wq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:04:22.348578Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489504970572796718:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:22.358542Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00125c/r3tmp/tmp7OMR54/pdisk_1.dat 2025-04-04T17:04:22.849684Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:04:22.929857Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:04:22.929977Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:04:22.939456Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21338, node 13 2025-04-04T17:04:23.340136Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:23.340163Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:23.340173Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:23.340349Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:24.556033Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:24.809267Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:04:25.005391Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:04:27.309959Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7489504970572796718:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:27.310034Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:04:30.770520Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489505004932536398:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:30.770632Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:30.771185Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489505004932536410:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:04:30.780848Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-04T17:04:30.818415Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7489505004932536412:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-04T17:04:30.918538Z node 13 :TX_PROXY ERROR: Actor# [13:7489505004932536485:2931] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:04:31.148810Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0w5r1dbxhd31rk600ar0ng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MjQyZDQ2ZGItYWVmNzNiMTAtM2I0MTkyNWItODcyMDIzNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:04:31.160408Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0w5r1dbxhd31rk600ar0ng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MjQyZDQ2ZGItYWVmNzNiMTAtM2I0MTkyNWItODcyMDIzNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:04:31.342803Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0w5rdx54ven6tw99nnr5qw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MjQyZDQ2ZGItYWVmNzNiMTAtM2I0MTkyNWItODcyMDIzNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:04:31.355725Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0w5rdx54ven6tw99nnr5qw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MjQyZDQ2ZGItYWVmNzNiMTAtM2I0MTkyNWItODcyMDIzNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |83.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |83.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPQCompatTest::ReadWriteSessions [GOOD] Test command err: 2025-04-04T16:58:52.675854Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503550289372023:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:52.714389Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:58:53.272838Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T16:58:53.293442Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e6b/r3tmp/tmpTRyIuc/pdisk_1.dat 2025-04-04T16:58:53.473924Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:58:53.876101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:58:54.028682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:54.028791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:54.062751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:54.062831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:54.079936Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:54.089649Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:58:54.089830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:58:54.110871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27035, node 1 2025-04-04T16:58:54.694720Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001e6b/r3tmp/yandexNKA0xI.tmp 2025-04-04T16:58:54.694746Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001e6b/r3tmp/yandexNKA0xI.tmp 2025-04-04T16:58:54.694885Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001e6b/r3tmp/yandexNKA0xI.tmp 2025-04-04T16:58:54.695041Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T16:58:54.862605Z INFO: TTestServer started on Port 7655 GrpcPort 27035 TClient is connected to server localhost:7655 PQClient connected to localhost:27035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:55.624835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T16:58:55.778963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T16:58:57.641092Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503550289372023:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:57.641175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:58:59.695357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503580354144097:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:59.695464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503580354144082:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:59.695684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:59.704368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T16:58:59.709859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503580354144138:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:59.709941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:59.735092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503580354144104:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T16:58:59.985774Z node 1 :TX_PROXY ERROR: Actor# [1:7489503580354144184:2775] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:59:00.025741Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489503583086451315:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:59:00.026102Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489503580354144200:2355], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T16:59:00.027787Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTFlMjdmMTUtY2I5MjBkYjktMzMzNDA4OTItYzMwM2RkZWY=, ActorId: [1:7489503580354144062:2340], ActorState: ExecuteState, TraceId: 01jr0vvmpx7frb384n2qyrenhr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:59:00.029380Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:59:00.029841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:59:00.026770Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGY5YWY5NzUtZTYxNDNkY2EtY2M4ZDNlNjUtNWIyZjNmMTQ=, ActorId: [2:7489503583086451289:2315], ActorState: ExecuteState, TraceId: 01jr0vvmsd7fe16pe0xtbk0dwa, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T16:59:00.029386Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T16:59:00.137336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:59:00.339142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T16:59:00.683914Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0vvnh01tr90sdt88ga3k6s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTdmNTc5MjgtYzk0MGNiYWItNjZmMThkYTYtOWFlYWJlNGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489503584649111951:3118] === CheckClustersList. Ok 2025-04-04T16:59:06.878825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 CreateTopicNoLegacy: /Root/PQ/rt3.dc1--demo Create topic: /Root/PQ/rt3.dc1--demo AddTopic: /Root/PQ/rt3.dc1--demo ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/V ... 06165_v1 grpc read done: success# 0, data# { } 2025-04-04T17:04:21.924625Z node 27 :PQ_READ_PROXY INFO: session cookie 5 consumer shared/user session shared/user_27_5_4129184823433306165_v1 grpc read failed 2025-04-04T17:04:21.924654Z node 27 :PQ_READ_PROXY INFO: session cookie 5 consumer shared/user session shared/user_27_5_4129184823433306165_v1 grpc closed 2025-04-04T17:04:21.924698Z node 27 :PQ_READ_PROXY INFO: session cookie 5 consumer shared/user session shared/user_27_5_4129184823433306165_v1 is DEAD 2025-04-04T17:04:21.925977Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][topic2] pipe [27:7489504962721986909:2708] disconnected; active server actors: 1 2025-04-04T17:04:21.926023Z node 27 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][topic2] pipe [27:7489504962721986909:2708] client user disconnected session shared/user_27_5_4129184823433306165_v1 2025-04-04T17:04:21.926591Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { path: "account/topic2-mirrored-from-dc2" } consumer: "user" } } 2025-04-04T17:04:21.926875Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_6177822561050192770_v1 read init: from# ipv6:[::1]:56880, request# { init_request { topics_read_settings { path: "account/topic2-mirrored-from-dc2" } consumer: "user" } } 2025-04-04T17:04:21.927393Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_6177822561050192770_v1 auth for : user 2025-04-04T17:04:21.927431Z node 27 :PQ_METACACHE DEBUG: Handle describe topics 2025-04-04T17:04:21.927452Z node 27 :PQ_METACACHE DEBUG: SendSchemeCacheRequest 2025-04-04T17:04:21.927531Z node 27 :PQ_METACACHE DEBUG: send request for 1 topics, got 1 requests infly, db = "Root/LbCommunal" 2025-04-04T17:04:21.926881Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session shared/user_27_5_4129184823433306165_v1 2025-04-04T17:04:21.926959Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [27:7489504962721986918:2715] destroyed 2025-04-04T17:04:21.927029Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_27_5_4129184823433306165_v1 2025-04-04T17:04:21.941424Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server connected, pipe [27:7489504962721986931:2719], now have 1 active actors on pipe 2025-04-04T17:04:21.942233Z node 28 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic2-mirrored-from-dc2' requestId: 2025-04-04T17:04:21.942309Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] got client message batch for topic 'rt3.dc2--account--topic2' partition 0 2025-04-04T17:04:21.942397Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Created session shared/user_27_6_6177822561050192770_v1 on pipe: [27:7489504962721986931:2719] 2025-04-04T17:04:21.942513Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/user_27_6_6177822561050192770_v1:1 with generation 1 2025-04-04T17:04:21.942741Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] Topic 'rt3.dc2--account--topic2' partition 0 user user session is set to 0 (startOffset 0) session shared/user_27_6_6177822561050192770_v1 2025-04-04T17:04:21.943024Z node 28 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:04:21.952672Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:04:21.952767Z node 28 :PERSQUEUE DEBUG: Answer ok topic: 'topic2-mirrored-from-dc2' partition: 0 messageNo: 0 requestId: cookie: 18446744073709551615 2025-04-04T17:04:21.934938Z node 27 :PQ_METACACHE DEBUG: Handle SchemeCache response: result# { ErrorCount: 0 DatabaseName: Root/LbCommunal DomainOwnerId: 0 Instant: 12 ResultSet [{ Path: Root/LbCommunal/account/topic2-mirrored-from-dc2 TableId: [72057594046644480:18:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTopic DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:04:21.935152Z node 27 :PQ_METACACHE DEBUG: Got describe topics SC response 2025-04-04T17:04:21.935213Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_6177822561050192770_v1 Handle describe topics response 2025-04-04T17:04:21.935517Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_6177822561050192770_v1 auth is DEAD 2025-04-04T17:04:21.935693Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_6177822561050192770_v1 auth ok: topics# 1, initDone# 0 2025-04-04T17:04:21.937123Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_6177822561050192770_v1 register session: topic# rt3.dc2--account--topic2 2025-04-04T17:04:21.937983Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7489504962721986928:2716] connected; active server actors: 1 2025-04-04T17:04:21.938165Z node 27 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037899][topic2-mirrored-from-dc2] consumer "user" register session for pipe [27:7489504962721986928:2716] session shared/user_27_6_6177822561050192770_v1 2025-04-04T17:04:21.938265Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user register readable partition 0 2025-04-04T17:04:21.938394Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user family created family=1 (Status=Free, Partitions=[0]) 2025-04-04T17:04:21.938491Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] consumer user register reading session ReadingSession "shared/user_27_6_6177822561050192770_v1" (Sender=[27:7489504962721986925:2716], Pipe=[27:7489504962721986928:2716], Partitions=[], ActiveFamilyCount=0) 2025-04-04T17:04:21.938545Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user rebalancing was scheduled 2025-04-04T17:04:21.938648Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2025-04-04T17:04:21.938777Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/user_27_6_6177822561050192770_v1" (Sender=[27:7489504962721986925:2716], Pipe=[27:7489504962721986928:2716], Partitions=[], ActiveFamilyCount=0) 2025-04-04T17:04:21.938913Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] consumer user family 1 status Active partitions [0] session "shared/user_27_6_6177822561050192770_v1" sender [27:7489504962721986925:2716] lock partition 0 for ReadingSession "shared/user_27_6_6177822561050192770_v1" (Sender=[27:7489504962721986925:2716], Pipe=[27:7489504962721986928:2716], Partitions=[], ActiveFamilyCount=1) generation 1 step 3 2025-04-04T17:04:21.939018Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-04-04T17:04:21.939079Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing duration: 0.000384s 2025-04-04T17:04:21.940208Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_6177822561050192770_v1 assign: record# { Partition: 0 TabletId: 72075186224037898 Topic: "topic2-mirrored-from-dc2" Generation: 1 Step: 3 Session: "shared/user_27_6_6177822561050192770_v1" ClientId: "user" PipeClient { RawX1: 7489504962721986928 RawX2: 4503715591490204 } Path: "/Root/LbCommunal/account/topic2-mirrored-from-dc2" } 2025-04-04T17:04:21.940379Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_6177822561050192770_v1 INITING TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) 2025-04-04T17:04:21.941806Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_6177822561050192770_v1 TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037898 Generation: 1, pipe: [27:7489504962721986931:2719] 2025-04-04T17:04:21.953314Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_6177822561050192770_v1 TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 0 WriteTimestampMS: 1743786261556 CreateTimestampMS: 1743786261556 SizeLag: 0 WriteTimestampEstimateMS: 0 } Cookie: 18446744073709551615 } 2025-04-04T17:04:21.953388Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_6177822561050192770_v1 INIT DONE TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) EndOffset 0 readOffset 0 committedOffset 0 2025-04-04T17:04:21.953514Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_6177822561050192770_v1 sending to client partition status ===Got response: status: SUCCESS init_response { session_id: "shared/user_27_6_6177822561050192770_v1" } ===Got response: status: SUCCESS start_partition_session_request { partition_session { partition_session_id: 1 path: "account/topic2-mirrored-from-dc2" } partition_offsets { } } 2025-04-04T17:04:22.127112Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_6177822561050192770_v1 grpc read done: success# 0, data# { } 2025-04-04T17:04:22.127161Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_6177822561050192770_v1 grpc read failed 2025-04-04T17:04:22.127201Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_6177822561050192770_v1 grpc closed 2025-04-04T17:04:22.127253Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_6177822561050192770_v1 is DEAD 2025-04-04T17:04:22.129853Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Destroy direct read session shared/user_27_6_6177822561050192770_v1 2025-04-04T17:04:22.129934Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server disconnected, pipe [27:7489504962721986931:2719] destroyed 2025-04-04T17:04:22.130013Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_27_6_6177822561050192770_v1 2025-04-04T17:04:22.130819Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7489504962721986928:2716] disconnected; active server actors: 1 2025-04-04T17:04:22.130887Z node 27 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7489504962721986928:2716] client user disconnected session shared/user_27_6_6177822561050192770_v1 2025-04-04T17:04:22.170212Z node 28 :PQ_METACACHE DEBUG: Check version rescan 2025-04-04T17:04:22.201097Z node 28 :PQ_METACACHE DEBUG: Metacache: reset 2025-04-04T17:04:22.854477Z node 27 :PQ_METACACHE DEBUG: Check version rescan 2025-04-04T17:04:22.864577Z node 27 :PQ_METACACHE DEBUG: Metacache: reset >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |83.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut >> TTableProfileTests::OverwritePartitioningPolicy [GOOD] >> TTableProfileTests::OverwriteStoragePolicy |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts [GOOD] |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |83.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts [GOOD] Test command err: 2025-04-04T17:03:22.794322Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504712961887678:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:22.795690Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012a6/r3tmp/tmpjO9ekC/pdisk_1.dat 2025-04-04T17:03:23.223220Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:23.247474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:23.247578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:23.254191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19914, node 1 2025-04-04T17:03:23.358936Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:23.358958Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:23.358966Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:23.359089Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15348 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:23.843168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:24.139842Z node 1 :TICKET_PARSER DEBUG: Ticket 2A947D954E3138866FFDCEF28004E214E74F135043C8DC7B89ADFC9DCEBD68D5 (ipv6:[::1]:52562) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-04T17:03:24.151173Z node 1 :TICKET_PARSER ERROR: Ticket 2A947D954E3138866FFDCEF28004E214E74F135043C8DC7B89ADFC9DCEBD68D5: Cannot create token from certificate. Client certificate failed verification 2025-04-04T17:03:24.381022Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:52580) has now valid token of root@builtin 2025-04-04T17:03:24.743727Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-04T17:03:24.743770Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:03:24.743792Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:03:24.743831Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-04T17:03:29.142733Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504741939872419:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:29.142939Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012a6/r3tmp/tmpY7Z6fh/pdisk_1.dat 2025-04-04T17:03:29.524959Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:29.570331Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:29.570425Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:29.576309Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62490, node 4 2025-04-04T17:03:29.727166Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:29.727197Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:29.727205Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:29.727345Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:30.196064Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:30.349243Z node 4 :TICKET_PARSER DEBUG: Ticket 2A947D954E3138866FFDCEF28004E214E74F135043C8DC7B89ADFC9DCEBD68D5 (ipv6:[::1]:34096) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-04T17:03:30.349823Z node 4 :TICKET_PARSER ERROR: Ticket 2A947D954E3138866FFDCEF28004E214E74F135043C8DC7B89ADFC9DCEBD68D5: Cannot create token from certificate. Client certificate failed verification 2025-04-04T17:03:30.547300Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:34118) has now valid token of root@builtin 2025-04-04T17:03:30.671463Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-04T17:03:30.671490Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:03:30.671499Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:03:30.671534Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-04T17:03:34.902329Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489504761234265953:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:34.902385Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012a6/r3tmp/tmptB9Rk0/pdisk_1.dat 2025-04-04T17:03:35.217088Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:35.246352Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:35.246440Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:35.252147Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1851, node 7 2025-04-04T17:03:35.423096Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:35.423122Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:35.423132Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:35.423280Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19304 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:35.696851Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... E0404 17:03:35.806422140 320725 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0404 17:03:35.814696317 320719 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0404 17:03:35.851043571 321049 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0404 17:03:35.860716685 321049 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0404 17:03:35.887728086 320905 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0404 17:03:35.897844458 320719 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0404 17:03:35.911787348 320719 ssl_transport_security.cc:1431] Handshake faile ... NT. E0404 17:04:15.388919461 336902 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:9055'; Got args: {grpc.client_channel_factory=0x5020000ba330, grpc.default_authority=localhost:9055, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x506000af05e0, grpc.internal.event_engine=0x502000b60750, grpc.internal.subchannel_pool=0x5040000d4450, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x5040004358d0, grpc.server_uri=dns:///localhost:9055} E0404 17:04:15.406427405 336902 ssl_transport_security.cc:791] Invalid private key. E0404 17:04:15.406635637 336902 ssl_security_connector.cc:129] Handshaker factory creation failed with TSI_INVALID_ARGUMENT. E0404 17:04:15.406870089 336902 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:9055'; Got args: {grpc.client_channel_factory=0x5020000ba330, grpc.default_authority=localhost:9055, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x506000408d40, grpc.internal.event_engine=0x5020000492f0, grpc.internal.subchannel_pool=0x5040000d4450, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x5040004358d0, grpc.server_uri=dns:///localhost:9055} E0404 17:04:15.408812842 336902 ssl_transport_security.cc:791] Invalid private key. E0404 17:04:15.408981227 336902 ssl_security_connector.cc:129] Handshaker factory creation failed with TSI_INVALID_ARGUMENT. E0404 17:04:15.409195990 336902 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:9055'; Got args: {grpc.client_channel_factory=0x5020000ba330, grpc.default_authority=localhost:9055, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x506000408d40, grpc.internal.event_engine=0x502000009190, grpc.internal.subchannel_pool=0x5040000d4450, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x5040004358d0, grpc.server_uri=dns:///localhost:9055} E0404 17:04:15.432229398 336902 ssl_transport_security.cc:791] Invalid private key. E0404 17:04:15.432507780 336902 ssl_security_connector.cc:129] Handshaker factory creation failed with TSI_INVALID_ARGUMENT. E0404 17:04:15.432768754 336902 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:9055'; Got args: {grpc.client_channel_factory=0x5020000ba330, grpc.default_authority=localhost:9055, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x5060005aeae0, grpc.internal.event_engine=0x50200059ccb0, grpc.internal.subchannel_pool=0x5040000d4450, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x5040004358d0, grpc.server_uri=dns:///localhost:9055} E0404 17:04:15.442574988 336902 ssl_transport_security.cc:791] Invalid private key. E0404 17:04:15.442869899 336902 ssl_security_connector.cc:129] Handshaker factory creation failed with TSI_INVALID_ARGUMENT. E0404 17:04:15.443180344 336902 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:9055'; Got args: {grpc.client_channel_factory=0x5020000ba330, grpc.default_authority=localhost:9055, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x5060005aeae0, grpc.internal.event_engine=0x502000ca1170, grpc.internal.subchannel_pool=0x5040000d4450, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x5040004358d0, grpc.server_uri=dns:///localhost:9055} 2025-04-04T17:04:24.599714Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7489504976136650137:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:24.604412Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012a6/r3tmp/tmpG7CQPL/pdisk_1.dat 2025-04-04T17:04:24.931395Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:04:24.985752Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:04:24.985877Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:04:25.004362Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28313, node 25 2025-04-04T17:04:25.292619Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:25.292650Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:25.292662Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:25.292841Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29156 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:25.744899Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:26.064260Z node 25 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:36790) has now valid token of root@builtin 2025-04-04T17:04:26.217598Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-04T17:04:26.217640Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:04:26.217655Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:04:26.217709Z node 25 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-04T17:04:33.561659Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7489505014487982907:2100];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:33.571341Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012a6/r3tmp/tmpQ7wbLW/pdisk_1.dat 2025-04-04T17:04:33.920569Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:04:33.966796Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:04:33.966963Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:04:33.970722Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24044, node 28 2025-04-04T17:04:34.135322Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:34.135353Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:34.135365Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:34.135543Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61911 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:34.675072Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:34.972022Z node 28 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:47148) has now valid token of root@builtin 2025-04-04T17:04:35.124936Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-04T17:04:35.124983Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:04:35.124999Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:04:35.125055Z node 28 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fqrun/fqrun >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun |83.0%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |83.1%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/stress_tool/ydb_stress_tool |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |83.1%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |83.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |83.1%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::PipeAlivenessOfDeadTablet [GOOD] Test command err: 2025-04-04T16:57:57.947521Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T16:57:57.951321Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:57:57.951549Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T16:57:57.952492Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:44:2071] ControllerId# 72057594037932033 2025-04-04T16:57:57.952544Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T16:57:57.952661Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T16:57:57.952935Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T16:57:57.954278Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:57:57.954338Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:57:57.956541Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:43:2070] Create Queue# [2:50:2075] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.956711Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:43:2070] Create Queue# [2:51:2076] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.956853Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:43:2070] Create Queue# [2:52:2077] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.957035Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:43:2070] Create Queue# [2:53:2078] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.957204Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:43:2070] Create Queue# [2:54:2079] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.957335Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:43:2070] Create Queue# [2:55:2080] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.957492Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:43:2070] Create Queue# [2:56:2081] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.957518Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:57:57.957611Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:44:2071] 2025-04-04T16:57:57.957659Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:44:2071] 2025-04-04T16:57:57.957815Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-04T16:57:57.957897Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-04T16:57:57.958463Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-04T16:57:57.958530Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T16:57:57.961145Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:57:57.961370Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-04T16:57:57.962060Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-04T16:57:57.963364Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:257} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-04T16:57:57.963453Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T16:57:57.964340Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:66:2075] ControllerId# 72057594037932033 2025-04-04T16:57:57.964381Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T16:57:57.964466Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T16:57:57.964688Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T16:57:57.976350Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:57:57.976406Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:57:57.978092Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:65:2074] Create Queue# [1:74:2080] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.978382Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:65:2074] Create Queue# [1:75:2081] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.978528Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:65:2074] Create Queue# [1:76:2082] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.978693Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:65:2074] Create Queue# [1:77:2083] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.978837Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:65:2074] Create Queue# [1:78:2084] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.978964Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:65:2074] Create Queue# [1:79:2085] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.979119Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:65:2074] Create Queue# [1:80:2086] targetNodeId# 1 Marker# DSP01 2025-04-04T16:57:57.979152Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:57:57.979228Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:66:2075] 2025-04-04T16:57:57.979261Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:66:2075] 2025-04-04T16:57:57.979304Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-04T16:57:57.979361Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-04T16:57:57.980284Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:66:2075] 2025-04-04T16:57:57.980348Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:57:57.980399Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-04T16:57:57.980570Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-04T16:57:57.980749Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:57:57.980917Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:44:2071] 2025-04-04T16:57:57.980966Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:57:57.981005Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-04T16:57:57.981128Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:57:57.997282Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:66:2075] 2025-04-04T16:57:57.997379Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:57:57.997445Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-04T16:57:58.002986Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-04T16:57:58.004650Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-04T16:57:58.004866Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:57:58.004896Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-04T16:57:58.004983Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-04T16:57:58.005121Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-04T16:57:58.005506Z node 2 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T16:57:58.005700Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:57:58.005950Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:57:58.006213Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T16:57:58.006438Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:57:58.006599Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [2:48:2064] 2025-04-04T16:57:58.006624Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [2:48:2064] 2025-04-04T16:57:58.006706Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-04T16:57:58.006761Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-04T16:57:58.006819Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-04-04T16:57:58.006848Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-04-04T16:57:58.006890Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-04T16:57:58.006953Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:70:2064] 2025-04-04T16:57:58.006981Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:70:2064] 2025-04-04T16:57:58.007056Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-04-0 ... T17:04:16.526170Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-04T17:04:16.526201Z node 14 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-04-04T17:04:16.526269Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [14:408:2366] 2025-04-04T17:04:16.526310Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] schedule retry [14:408:2366] 2025-04-04T17:04:16.537436Z node 14 :BS_PROXY_PUT INFO: [94999ccdc54a9387] bootstrap ActorId# [14:412:2368] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:9:0:0:199:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-04T17:04:16.537630Z node 14 :BS_PROXY_PUT DEBUG: [94999ccdc54a9387] Id# [72057594037927937:2:9:0:0:199:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T17:04:16.537706Z node 14 :BS_PROXY_PUT DEBUG: [94999ccdc54a9387] restore Id# [72057594037927937:2:9:0:0:199:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T17:04:16.537792Z node 14 :BS_PROXY_PUT DEBUG: [94999ccdc54a9387] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:9:0:0:199:1] Marker# BPG33 2025-04-04T17:04:16.537853Z node 14 :BS_PROXY_PUT DEBUG: [94999ccdc54a9387] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:9:0:0:199:1] Marker# BPG32 2025-04-04T17:04:16.538067Z node 14 :BS_PROXY DEBUG: Send to queueActorId# [14:34:2078] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:9:0:0:199:1] FDS# 199 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T17:04:16.542984Z node 14 :BS_PROXY_PUT DEBUG: [94999ccdc54a9387] received {EvVPutResult Status# OK ID# [72057594037927937:2:9:0:0:199:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 24 } Cost# 81566 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 25 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-04T17:04:16.543175Z node 14 :BS_PROXY_PUT DEBUG: [94999ccdc54a9387] Result# TEvPutResult {Id# [72057594037927937:2:9:0:0:199:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-04T17:04:16.543270Z node 14 :BS_PROXY_PUT INFO: [94999ccdc54a9387] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:9:0:0:199:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-04T17:04:16.543464Z node 14 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.89 sample PartId# [72057594037927937:2:9:0:0:199:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 14 } TEvVPutResult{ TimestampMs# 5.851 VDiskId# [0:1:0:0:0] NodeId# 14 Status# OK } ] } 2025-04-04T17:04:16.543681Z node 14 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:9:0:0:199:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-04T17:04:16.543850Z node 14 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} commited cookie 1 for step 9 2025-04-04T17:04:16.558580Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037936131] client retry [14:142:2158] 2025-04-04T17:04:16.558707Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037936131] lookup [14:142:2158] 2025-04-04T17:04:16.558842Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936131 entry.State: StInit ev: {EvForward TabletID: 72057594037936131 Ev: nullptr Flags: 1:2:0} 2025-04-04T17:04:16.558988Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T17:04:16.559098Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-04-04T17:04:16.559154Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131} 2025-04-04T17:04:16.559201Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-04-04T17:04:16.559239Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-04-04T17:04:16.559282Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131} 2025-04-04T17:04:16.559316Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131} 2025-04-04T17:04:16.559399Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936131 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-04T17:04:16.559440Z node 14 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936131 followers: 0 2025-04-04T17:04:16.559504Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037936131] forward result error, check reconnect [14:142:2158] 2025-04-04T17:04:16.559540Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037936131] schedule retry [14:142:2158] 2025-04-04T17:04:16.582543Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] client retry [14:408:2366] 2025-04-04T17:04:16.582621Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [14:408:2366] 2025-04-04T17:04:16.582745Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-04T17:04:16.582875Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T17:04:16.582983Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-04T17:04:16.583033Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-04T17:04:16.583064Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-04T17:04:16.583103Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-04T17:04:16.583152Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-04T17:04:16.583182Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-04T17:04:16.583264Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-04T17:04:16.583302Z node 14 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-04-04T17:04:16.583382Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [14:408:2366] 2025-04-04T17:04:16.583416Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] schedule retry [14:408:2366] 2025-04-04T17:04:16.594562Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] client retry [14:408:2366] 2025-04-04T17:04:16.594646Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [14:408:2366] 2025-04-04T17:04:16.594770Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-04T17:04:16.594899Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T17:04:16.595008Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-04T17:04:16.595056Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-04T17:04:16.595088Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-04T17:04:16.595126Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-04T17:04:16.595174Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-04T17:04:16.595202Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-04T17:04:16.595283Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-04T17:04:16.595326Z node 14 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-04-04T17:04:16.595411Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [14:408:2366] 2025-04-04T17:04:16.595464Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed, check aliveness [14:408:2366] 2025-04-04T17:04:16.649355Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [14:416:2369] 2025-04-04T17:04:16.649428Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [14:416:2369] 2025-04-04T17:04:16.649501Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [14:416:2369] 2025-04-04T17:04:16.649587Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-04T17:04:16.649653Z node 14 :TABLET_RESOLVER DEBUG: SelectForward node 14 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [14:270:2261] 2025-04-04T17:04:16.649748Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [14:416:2369] 2025-04-04T17:04:16.649824Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [14:416:2369] 2025-04-04T17:04:16.650004Z node 14 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [14:416:2369] 2025-04-04T17:04:16.650209Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [14:416:2369] 2025-04-04T17:04:16.650300Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [14:416:2369] 2025-04-04T17:04:16.650354Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [14:416:2369] 2025-04-04T17:04:16.650435Z node 14 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [14:408:2366] EventType# 268697616 2025-04-04T17:04:16.650589Z node 14 :HIVE WARN: HIVE#72057594037927937 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-04-04T17:04:16.670490Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received poison pill [14:416:2369] 2025-04-04T17:04:16.670626Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [14:416:2369] 2025-04-04T17:04:16.670735Z node 14 :PIPE_SERVER DEBUG: [72057594037927937] Got PeerClosed from# [14:416:2369] |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |83.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] >> TFlatTest::AutoMergeBySize [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert [GOOD] >> TTableProfileTests::OverwriteStoragePolicy [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert [GOOD] Test command err: 2025-04-04T17:03:25.111608Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504722285793329:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:25.111680Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001292/r3tmp/tmp2t6uMQ/pdisk_1.dat 2025-04-04T17:03:26.033648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:26.033754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:26.042180Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:26.043301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21507, node 1 2025-04-04T17:03:26.467843Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:26.467865Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:26.467872Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:26.467989Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6615 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:27.159589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:27.365426Z node 1 :TICKET_PARSER DEBUG: Ticket 1CE87A1EC5179E4E8537ECAA89A559D7D5AFFAFC5787DBCA2BB3E93B1940A97A (ipv6:[::1]:37128) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-04T17:03:27.526360Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:37144) has now valid token of root@builtin 2025-04-04T17:03:27.662784Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-04T17:03:27.662819Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:03:27.662827Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:03:27.662865Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-04T17:03:33.529239Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504759795370362:2293];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001292/r3tmp/tmpdKSSKI/pdisk_1.dat 2025-04-04T17:03:33.634747Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:03:33.834381Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:33.898899Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:33.898983Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:33.911572Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11663, node 4 2025-04-04T17:03:34.341476Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:34.341505Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:34.341512Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:34.341642Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:35.124106Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:35.295305Z node 4 :TICKET_PARSER DEBUG: Ticket 1CE87A1EC5179E4E8537ECAA89A559D7D5AFFAFC5787DBCA2BB3E93B1940A97A (ipv6:[::1]:38996) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-04T17:03:35.555438Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:39010) has now valid token of root@builtin 2025-04-04T17:03:35.711893Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-04T17:03:35.711921Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:03:35.711929Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:03:35.711970Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-04T17:03:41.758584Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489504794133630579:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:41.758695Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001292/r3tmp/tmpTO7UIU/pdisk_1.dat 2025-04-04T17:03:42.168986Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:42.197589Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:42.197673Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:42.215569Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7486, node 7 2025-04-04T17:03:42.492602Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:42.492625Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:42.492633Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:42.492776Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9011 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:43.016719Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:43.378180Z node 7 :TICKET_PARSER DEBUG: Ticket 316C763C794F7735E0B1D500AB0346DDF1C81FA2AEB1406377D3ADEE92095B72 (ipv6:[::1]:46560) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-04T17:03:43.593956Z node 7 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:46590) has now valid token of root@builtin 2025-04-04T17:03:43.731131Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-04T17:03:43.731173Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:03:43.731189Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:03:43.731227Z node 7 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-04T17:03:47.916222Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489504818768202231:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:47.916324Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001292/r3tmp/tmpCldFKa/pdisk_1.dat 2025-04-04T17:03:48.101074Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:48.143055Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:48.143159Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected - ... lient_certificate:certificate verify failed. E0404 17:04:20.450416763 339543 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:20.473619200 339203 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. 2025-04-04T17:04:27.721635Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7489504988237303381:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:27.722172Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001292/r3tmp/tmpQ409Vl/pdisk_1.dat 2025-04-04T17:04:28.577399Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:04:28.648788Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:04:28.648903Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:04:28.660631Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64932, node 25 2025-04-04T17:04:29.051389Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:29.051422Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:29.051434Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:29.051605Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6110 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:29.634804Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:32.723064Z node 25 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[25:7489504988237303381:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:32.723133Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0404 17:04:39.877620739 342822 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:39.937132330 342823 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:40.062304601 342823 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:40.144258098 347268 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:40.267659996 342823 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:40.345707290 343077 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:40.410580265 343077 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:40.442316256 342823 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:40.483494116 347668 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:40.506759518 347658 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:40.540257196 347658 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:40.566972798 343076 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. 2025-04-04T17:04:44.136392Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7489505064690669576:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:44.153781Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001292/r3tmp/tmpJnpoeB/pdisk_1.dat 2025-04-04T17:04:44.831979Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:04:44.987369Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:04:44.987488Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:04:44.999175Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3259, node 28 2025-04-04T17:04:45.426267Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:45.426299Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:45.426310Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:45.426502Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:46.573494Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:49.118369Z node 28 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[28:7489505064690669576:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:49.118468Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0404 17:04:56.827135780 348917 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:56.875861709 349142 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:56.956005451 348917 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:57.019451791 348916 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:57.127804667 349142 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:57.160688378 349143 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:57.230928705 348917 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:57.258352410 348917 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:57.336575371 349142 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:57.364109686 348917 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:57.409365275 349142 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0404 17:04:57.453770569 348917 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TTableProfileTests::OverwriteStoragePolicy [GOOD] Test command err: 2025-04-04T17:03:49.141902Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504825175310312:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:49.141963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00125d/r3tmp/tmpWsSdrU/pdisk_1.dat 2025-04-04T17:03:49.907945Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:49.921778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:49.921906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:49.940850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6189, node 1 2025-04-04T17:03:50.432017Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:50.432038Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:50.432048Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:50.432163Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27153 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:51.089969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:27153 2025-04-04T17:03:51.574757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:51.651733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:52.183917Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489504841638397200:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:52.242418Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:03:52.286770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:52.286838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:52.305455Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T17:03:52.319488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27153 2025-04-04T17:03:53.120499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:27153 TClient::Ls request: /Root/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743786233448 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-04T17:03:53.957365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:03:54.124868Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489504825175310312:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:54.124928Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:27153 TClient::Ls request: /Root/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1743786234148 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-04T17:03:54.533478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:27153 TClient::Ls request: /Root/table-3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-3" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710662 CreateStep: 1743786234652 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-04T17:03:55.049484Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-04T17:03:55.050041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T17:03:59.451827Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504871668790010:2143];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:59.451888Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00125d/r3tmp/tmpjDaaip/pdisk_1.dat 2025-04-04T17:03:59.983179Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:04:00.055671Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:04:00.055768Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:04:00.064480Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25483, node 4 2025-04-04T17:04:00.399036Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:00.399062Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:00.399069Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:00.399218Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29057 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:00.938721Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClien ... ct path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00125d/r3tmp/tmpcDHtj3/pdisk_1.dat 2025-04-04T17:04:43.009794Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:04:43.120708Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:04:43.120878Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:04:43.131761Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26995, node 13 2025-04-04T17:04:43.420113Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:04:43.420156Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:04:43.420171Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:04:43.420469Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8660 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:04:44.045009Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:8660 2025-04-04T17:04:44.751529Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:44.797942Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:04:45.304917Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7489505066683604265:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:45.305010Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:04:45.317202Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:04:45.317332Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:04:45.327236Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-04-04T17:04:45.332075Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8660 2025-04-04T17:04:46.089830Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:04:47.707229Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7489505056137850427:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:47.707317Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:8660 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1743786286730 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-04T17:04:48.732232Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:04:50.310389Z node 15 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[15:7489505066683604265:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:04:50.310500Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:8660 TClient::Ls request: /Root/ydb_ut_tenant/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715661 CreateStep: 1743786289580 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-04T17:04:50.902919Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:8660 TClient::Ls request: /Root/ydb_ut_tenant/table-3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-3" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715662 CreateStep: 1743786291950 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-04T17:04:52.759574Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:8660 TClient::Ls request: /Root/ydb_ut_tenant/table-4 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-4" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715663 CreateStep: 1743786293750 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-4" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-04T17:04:54.660951Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:8660 TClient::Ls request: /Root/ydb_ut_tenant/table-5 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-5" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715664 CreateStep: 1743786295570 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-5" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-04-04T17:04:56.854496Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-04-04T17:04:56.864960Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::AutoMergeBySize [GOOD] Test command err: 2025-04-04T17:03:25.387851Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504722069288482:2240];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:25.387908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e8b/r3tmp/tmpBmyzBL/pdisk_1.dat 2025-04-04T17:03:26.092387Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:26.099464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:26.099662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:26.110562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13740 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:03:26.749449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:03:26.812286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786206953 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "String" TypeId: 4097 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) A-0 B-0 2025-04-04T17:03:27.763009Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.12, eph 1} end=0, 2 blobs 1r (max 1), put Spent{time=0.016s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-04-04T17:03:27.775535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2025-04-04T17:03:27.808080Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.14, eph 1} end=0, 2 blobs 1r (max 1), put Spent{time=0.040s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-04-04T17:03:27.874535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-04T17:03:27.874678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 6291502 row count 1 2025-04-04T17:03:27.874748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-04-04T17:03:27.874797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 3: RowCount 1, DataSize 6291502 2025-04-04T17:03:27.882607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-04T17:03:28.035571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2025-04-04T17:03:28.135077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-04T17:03:28.135222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 6291502 row count 1 2025-04-04T17:03:28.135288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-04-04T17:03:28.135318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 3: RowCount 1, DataSize 6291502 2025-04-04T17:03:28.136033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 A-1 2025-04-04T17:03:28.362724Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.18, eph 2} end=0, 2 blobs 1r (max 1), put Spent{time=0.067s,wait=0.004s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-04-04T17:03:28.440325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 12583004 rowCount 2 cpuUsage 0 2025-04-04T17:03:28.500705Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.20, eph 2} end=0, 3 blobs 2r (max 2), put Spent{time=0.093s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (12583142 0 0)b }, ecr=1.000 2025-04-04T17:03:28.540590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-04T17:03:28.540728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 12583004 row count 2 2025-04-04T17:03:28.540799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-04-04T17:03:28.540829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 3: RowCount 2, DataSize 12583004 2025-04-04T17:03:28.549558Z node 1 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046644480, LocalPathId: 3], datashard# 72075186224037888, compactionInfo# {72057594046644480:1, SH# 1, Rows# 2, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 0.000000s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046644480 2025-04-04T17:03:28.555621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 599 seconds 2025-04-04T17:03:28.555913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-04T17:03:28.557196Z node 1 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046644480, LocalPathId: 3], datashard# 72075186224037888, shardIdx# 72057594046644480:1 in# 16 ms, with status# 1, next wakeup in# 599.983776s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 1 shards, running# 0 shards at schemeshard 72057594046644480 2025-04-04T17:03:28.606013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 12583020 rowCount 2 cpuUsage 0 2025-04-04T17:03:28.707858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-04T17:03:28.707970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 12583020 row count 2 2025-04-04T17:03:28.708011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-04-04T17:03:28.708034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 3: RowCount 2, DataSize 12583020 2025-04-04T17:03:28.717822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 B-1 2025-04-04T17:03:28.983283Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 3} end=0, 2 blobs 1r (max 1), put Spent{time=0.048s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-04-04T17:03:29.211143Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 3} end=0, 4 blobs 3r (max 3), put Spent{time=0.096s,wait=0.004s,interrupts=1} Part{ 1 pk, lobs 0 +0, (18874688 0 0)b }, ecr=1.000 2025-04-04T17:03:29.311993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 18874522 rowCount 3 cpuUsage 0 2025-04-04T17:03:29.426076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-04T17:03:29.426331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 18874522 row count 3 2025-04-04T17:03:29.426385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-04-04T17:03:29.426415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 3: RowCount 3, DataSize 18874522 2025-04-04T17:03:29.455011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-04T17:03:29.472020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 p ... 3], pathId map=T1, is column=0, is olap=0 2025-04-04T17:04:52.787141Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037894 followerId=0, pathId 3: RowCount 5, DataSize 29380944 2025-04-04T17:04:52.798558Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-04T17:04:52.800729Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037904 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 212934596 rowCount 32 cpuUsage 45.5542 2025-04-04T17:04:52.904814Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-04T17:04:52.904929Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:17 data size 212934596 row count 32 2025-04-04T17:04:52.904983Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037904 maps to shardIdx: 72057594046644480:17 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-04-04T17:04:52.905014Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037904 followerId=0, pathId 3: RowCount 32, DataSize 212934596 2025-04-04T17:04:52.913923Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-04T17:04:53.320933Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037899 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 29360348 rowCount 4 cpuUsage 0.0895 2025-04-04T17:04:53.426709Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-04T17:04:53.426830Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:12 data size 29360348 row count 4 2025-04-04T17:04:53.426883Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037899 maps to shardIdx: 72057594046644480:12 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-04-04T17:04:53.426912Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037899 followerId=0, pathId 3: RowCount 4, DataSize 29360348 2025-04-04T17:04:53.427103Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-04T17:04:53.496143Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037903 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 29360348 rowCount 4 cpuUsage 0.2379 2025-04-04T17:04:53.502766Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037904 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 212934596 rowCount 32 cpuUsage 45.5542 2025-04-04T17:04:53.600493Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-04-04T17:04:53.600618Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:16 data size 29360348 row count 4 2025-04-04T17:04:53.600667Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037903 maps to shardIdx: 72057594046644480:16 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-04-04T17:04:53.600698Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037903 followerId=0, pathId 3: RowCount 4, DataSize 29360348 2025-04-04T17:04:53.600817Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:17 data size 212934596 row count 32 2025-04-04T17:04:53.600845Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037904 maps to shardIdx: 72057594046644480:17 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-04-04T17:04:53.600866Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037904 followerId=0, pathId 3: RowCount 32, DataSize 212934596 2025-04-04T17:04:53.606357Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-04T17:04:54.707271Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037904 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 212934612 rowCount 32 cpuUsage 45.5542 2025-04-04T17:04:54.822496Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-04T17:04:54.822627Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:17 data size 212934612 row count 32 2025-04-04T17:04:54.822699Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037904 maps to shardIdx: 72057594046644480:17 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-04-04T17:04:54.822732Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037904 followerId=0, pathId 3: RowCount 32, DataSize 212934612 2025-04-04T17:04:54.826548Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743786236143 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 9 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColu... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743786236143 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 9 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColu... (TRUNCATED) 2025-04-04T17:04:55.450679Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037904 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 220274683 rowCount 33 cpuUsage 45.5542 2025-04-04T17:04:55.554502Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-04T17:04:55.554637Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:17 data size 220274683 row count 33 2025-04-04T17:04:55.554714Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037904 maps to shardIdx: 72057594046644480:17 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-04-04T17:04:55.554747Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037904 followerId=0, pathId 3: RowCount 33, DataSize 220274683 2025-04-04T17:04:55.558766Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-04T17:04:55.623362Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046644480 2025-04-04T17:04:55.623434Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046644480 2025-04-04T17:04:55.623484Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046644480 2025-04-04T17:04:55.650928Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037901 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 29360348 rowCount 4 cpuUsage 0.1202 2025-04-04T17:04:55.755020Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-04T17:04:55.755142Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:14 data size 29360348 row count 4 2025-04-04T17:04:55.755195Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037901 maps to shardIdx: 72057594046644480:14 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-04-04T17:04:55.755224Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037901 followerId=0, pathId 3: RowCount 4, DataSize 29360348 2025-04-04T17:04:55.760541Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-04T17:04:55.822834Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037897 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 29360348 rowCount 4 cpuUsage 0.0647 2025-04-04T17:04:55.922553Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-04T17:04:55.922686Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:10 data size 29360348 row count 4 2025-04-04T17:04:55.922741Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037897 maps to shardIdx: 72057594046644480:10 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-04-04T17:04:55.922770Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037897 followerId=0, pathId 3: RowCount 4, DataSize 29360348 2025-04-04T17:04:55.926871Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |83.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |83.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |83.2%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> YdbOlapStore::LogExistingUserId [GOOD] >> YdbOlapStore::LogExistingRequest [GOOD] >> YdbOlapStore::LogCountByResource >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] >> THiveTest::TestCheckSubHiveMigrationWithReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental [GOOD] Test command err: 2025-04-04T17:01:12.229271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:01:12.229848Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:01:12.229923Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001643/r3tmp/tmpZr94lt/pdisk_1.dat 2025-04-04T17:01:12.728432Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:595:2519], Recipient [1:408:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:12.728519Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:12.728569Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:01:12.728740Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:592:2517], Recipient [1:408:2403]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-04-04T17:01:12.728781Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T17:01:12.871633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-04T17:01:12.871944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:12.872211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T17:01:12.872473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:01:12.872544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:12.872655Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:12.873516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T17:01:12.873706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T17:01:12.873761Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:12.873796Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-04T17:01:12.873990Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:408:2403], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:01:12.874028Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:01:12.874095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:12.874150Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T17:01:12.874186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:01:12.874266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:01:12.874389Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:12.874904Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:12.874947Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-04T17:01:12.875213Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:408:2403], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:01:12.875254Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:01:12.875319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:12.875390Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:01:12.875442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:01:12.875530Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:12.875929Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:12.875974Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-04T17:01:12.876085Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:408:2403], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:01:12.876122Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:01:12.876161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:12.876208Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:01:12.876261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-04T17:01:12.876291Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:01:12.876331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:01:12.896936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:01:12.897735Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:01:12.897799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:01:12.897988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:01:12.899496Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:600:2524], Recipient [1:408:2403]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:602:2525] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-04T17:01:12.899561Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-04T17:01:12.899613Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-04-04T17:01:12.899754Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:404:2399], Recipient [1:408:2403]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-04-04T17:01:12.900215Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:604:2527], Recipient [1:408:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:12.900265Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:12.900324Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:01:12.900478Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:592:2517], Recipient [1:408:2403]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-04-04T17:01:12.900507Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-04T17:01:12.900591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-04T17:01:12.900634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-04T17:01:12.900697Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-04T17:01:12.946941Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:408:2403]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-04-04T17:01:12.947056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2025-04-04T17:01:12.947095Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:12.947155Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-04T17:01:12.947222Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-04-04T17:01:13.004781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:13.004956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:13.016707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:01:13.097717Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:404:2399], Recipient [1:408:2403]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-04-04T17:01:13.098456Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:635:2543], Recipient [1:408:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:13.098505Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:01:13.098537Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:01:13.098686Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:408:2403]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-04-04T17:01:13.098719Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:01:13.098795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:01:13.098940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:01:13.098998Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationP ... 4046644480 2025-04-04T17:04:06.920311Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:3 ProgressState 2025-04-04T17:04:06.920362Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:04:06.920386Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:3 progress is 5/7 2025-04-04T17:04:06.920412Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 5/7 2025-04-04T17:04:06.920445Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:3 progress is 5/7 2025-04-04T17:04:06.920476Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 5/7 2025-04-04T17:04:06.920506Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 5/7, is published: true 2025-04-04T17:04:06.920808Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:408:2403], Recipient [3:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:04:06.920851Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:04:06.920913Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715668:6, at schemeshard: 72057594046644480 2025-04-04T17:04:06.920955Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:6 ProgressState 2025-04-04T17:04:06.921028Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:04:06.921058Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:6 progress is 6/7 2025-04-04T17:04:06.921086Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 6/7 2025-04-04T17:04:06.921120Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:6 progress is 6/7 2025-04-04T17:04:06.921146Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 6/7 2025-04-04T17:04:06.921176Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 6/7, is published: true 2025-04-04T17:04:06.921513Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:408:2403], Recipient [3:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:04:06.921549Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:04:06.921619Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715668:4, at schemeshard: 72057594046644480 2025-04-04T17:04:06.921661Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:4 ProgressState 2025-04-04T17:04:06.921724Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:04:06.921752Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:4 progress is 7/7 2025-04-04T17:04:06.921779Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 7/7 2025-04-04T17:04:06.921820Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:4 progress is 7/7 2025-04-04T17:04:06.921848Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 7/7 2025-04-04T17:04:06.921878Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 7/7, is published: true 2025-04-04T17:04:06.921976Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:1239:2965] message: TxId: 281474976715668 2025-04-04T17:04:06.922038Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 7/7 2025-04-04T17:04:06.922109Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:0 2025-04-04T17:04:06.922154Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:0 2025-04-04T17:04:06.922355Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 17] was 2 2025-04-04T17:04:06.922527Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:1 2025-04-04T17:04:06.922633Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:1 2025-04-04T17:04:06.922780Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 18] was 2 2025-04-04T17:04:06.922818Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:2 2025-04-04T17:04:06.922854Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:2 2025-04-04T17:04:06.922902Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 19] was 3 2025-04-04T17:04:06.922932Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:3 2025-04-04T17:04:06.922957Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:3 2025-04-04T17:04:06.923069Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 20] was 3 2025-04-04T17:04:06.923107Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 3 2025-04-04T17:04:06.923168Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:4 2025-04-04T17:04:06.923193Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:4 2025-04-04T17:04:06.923250Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 21] was 3 2025-04-04T17:04:06.923278Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 3 2025-04-04T17:04:06.923308Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:5 2025-04-04T17:04:06.923331Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:5 2025-04-04T17:04:06.923382Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 22] was 3 2025-04-04T17:04:06.923409Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-04-04T17:04:06.923437Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:6 2025-04-04T17:04:06.923460Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:6 2025-04-04T17:04:06.923516Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 23] was 3 2025-04-04T17:04:06.923541Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 16] was 3 2025-04-04T17:04:06.924278Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:04:06.924515Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:04:06.924643Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:04:06.924804Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:04:06.924929Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:1239:2965] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715668 at schemeshard: 72057594046644480 2025-04-04T17:04:06.925478Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:1246:2971], Recipient [3:408:2403]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:04:06.925526Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:04:06.925558Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-04T17:04:07.198551Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [3:1542:3206], serverId# [3:1543:3207], sessionId# [0:0:0] 2025-04-04T17:04:07.198859Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr0w50rn2vn8s9j635chpmsn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDkzZjY2ODEtZTY4YmRkMGQtZjVlNzM3YWMtZTljNTFmOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } }, { items { uint32_value: 4 } items { uint32_value: 40 } }, { items { uint32_value: 5 } items { uint32_value: 50 } } 2025-04-04T17:04:07.479861Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037895, clientId# [3:1571:3223], serverId# [3:1572:3224], sessionId# [0:0:0] 2025-04-04T17:04:07.480125Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jr0w511xb2mc75j01ys0vw1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Mjk0NzQ1NDYtNDk5MDdjYTItNjRkM2UwNTItZjc5NTViYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 11 } items { uint32_value: 101 } }, { items { uint32_value: 21 } items { uint32_value: 201 } }, { items { uint32_value: 31 } items { uint32_value: 301 } }, { items { uint32_value: 41 } items { uint32_value: 401 } }, { items { uint32_value: 51 } items { uint32_value: 501 } } 2025-04-04T17:04:07.669216Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037892, clientId# [3:1600:3240], serverId# [3:1601:3241], sessionId# [0:0:0] 2025-04-04T17:04:07.669408Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jr0w51a5b3wyxjvz19xgg560, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWM3ZWRiZTEtMTdmODFmYTUtZTdkYTFmZmItZjA3MGZjNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 12 } items { uint32_value: 102 } }, { items { uint32_value: 22 } items { uint32_value: 202 } }, { items { uint32_value: 32 } items { uint32_value: 302 } }, { items { uint32_value: 42 } items { uint32_value: 402 } }, { items { uint32_value: 52 } items { uint32_value: 502 } } 2025-04-04T17:04:07.845272Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [3:1629:3257], serverId# [3:1630:3258], sessionId# [0:0:0] 2025-04-04T17:04:07.845494Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr0w51g0ctpzqmx5qd80s3j7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTdmMDA5NWItNzJjZWYxYjUtZWZiYzA3YjAtZDBkM2MzMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 13 } items { uint32_value: 103 } }, { items { uint32_value: 23 } items { uint32_value: 203 } }, { items { uint32_value: 33 } items { uint32_value: 303 } }, { items { uint32_value: 43 } items { uint32_value: 403 } }, { items { uint32_value: 53 } items { uint32_value: 503 } } |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] [GOOD] |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] >> YdbOlapStore::LogTsRangeDescending [GOOD] >> YdbQueryService::TestAttachTwice >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePools >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] >> THiveTest::TestCreateAndDeleteTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWithStoragePools >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |83.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> THiveTest::TestCreateAndReassignTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWhileStarting >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] >> YdbOlapStore::LogPagingBefore [GOOD] >> YdbOlapStore::LogPagingBetween >> THiveTest::TestCreateAndReassignTabletWhileStarting [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogExistingUserId [GOOD] Test command err: 2025-04-04T17:01:27.583418Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504215533886897:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:01:27.583485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012e0/r3tmp/tmp0Pev8D/pdisk_1.dat 2025-04-04T17:01:28.256043Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:28.281652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:01:28.281760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:01:28.300580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11841, node 1 2025-04-04T17:01:28.828340Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:28.828367Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:28.828376Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:28.828517Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:01:29.651790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:3834 2025-04-04T17:01:30.101305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:01:30.398969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489504228418789706:2326];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:01:30.399331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489504228418789706:2326];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:01:30.399679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489504228418789706:2326];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:01:30.399860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489504228418789706:2326];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:01:30.399986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489504228418789706:2326];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:01:30.400152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489504228418789706:2326];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:01:30.400262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489504228418789706:2326];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:01:30.400438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489504228418789706:2326];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:01:30.400599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489504228418789706:2326];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:01:30.400706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489504228418789706:2326];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:01:30.400866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489504228418789706:2326];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:01:30.400980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489504228418789706:2326];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:01:30.459367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504228418789705:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:01:30.459446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504228418789705:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:01:30.459733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504228418789705:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:01:30.459874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504228418789705:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:01:30.460006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504228418789705:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:01:30.460166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504228418789705:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:01:30.460288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504228418789705:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:01:30.463551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504228418789705:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:01:30.463793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504228418789705:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:01:30.463898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504228418789705:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:01:30.463992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504228418789705:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:01:30.464100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504228418789705:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:01:30.517096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504228418789708:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:01:30.517209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504228418789708:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:01:30.517463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504228418789708:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:01:30.517607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504228418789708:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:01:30.517728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504228418789708:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:01:30.517832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504228418789708:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:01:30.517987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504228418789708:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:01:30.518117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504228418789708:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:01:30.518273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504228418789708:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Syn ... asks { TaskId: 39 CpuTimeUs: 1086 FinishTimeMs: 1743786312498 ComputeCpuTimeUs: 54 BuildCpuTimeUs: 1032 HostName: "ghrun-mup5cka3jq" NodeId: 28 CreateTimeMs: 1743786311648 } MaxMemoryUsage: 1048576 } 2025-04-04T17:05:12.501864Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7489505178188621737:3598], TxId: 281474976710670, task: 54. Ctx: { TraceId : 01jr0w6z7abbbhmte4hxdndy5d. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=28&id=NDRlOTdhOTgtZmY2NDZhNmItZDJlMzQxZGUtMTg2OWVlNWM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-04T17:05:12.501963Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7489505178188621737:3598], TxId: 281474976710670, task: 54. Ctx: { TraceId : 01jr0w6z7abbbhmte4hxdndy5d. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=28&id=NDRlOTdhOTgtZmY2NDZhNmItZDJlMzQxZGUtMTg2OWVlNWM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Send stats to executor actor [28:7489505178188621672:3538] TaskId: 54 Stats: CpuTimeUs: 4036 Tasks { TaskId: 54 CpuTimeUs: 3110 FinishTimeMs: 1743786312501 ComputeCpuTimeUs: 57 BuildCpuTimeUs: 3053 HostName: "ghrun-mup5cka3jq" NodeId: 28 CreateTimeMs: 1743786311653 } MaxMemoryUsage: 1048576 2025-04-04T17:05:12.501988Z node 28 :KQP_COMPUTE DEBUG: kqp_scan_compute_actor.cpp:199 :TEvFetcherFinished: [28:7489505178188621750:3610] 2025-04-04T17:05:12.502023Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7489505178188621672:3538] TxId: 281474976710670. Ctx: { TraceId: 01jr0w6z7abbbhmte4hxdndy5d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NDRlOTdhOTgtZmY2NDZhNmItZDJlMzQxZGUtMTg2OWVlNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7489505178188621724:3587], CA [28:7489505178188621719:3582], CA [28:7489505178188621714:3577], CA [28:7489505178188621689:3556], CA [28:7489505178188621728:3590], CA [28:7489505178188621733:3595], CA [28:7489505178188621687:3554], CA [28:7489505178188621747:3608], CA [28:7489505178188621682:3549], CA [28:7489505178188621742:3603], CA [28:7489505178188621737:3598], CA [28:7489505178188621731:3593], CA [28:7489505178188621691:3557], CA [28:7489505178188621685:3552], CA [28:7489505178188621745:3606], CA [28:7489505178188621680:3547], CA [28:7489505178188621740:3601], CA [28:7489505178188621734:3596], CA [28:7489505178188621743:3604], CA [28:7489505178188621738:3599], CA [28:7489505178188621703:3568], CA [28:7489505178188621701:3566], CA [28:7489505178188621695:3561], CA [28:7489505178188621707:3571], CA [28:7489505178188621710:3574], CA [28:7489505178188621699:3564], CA [28:7489505178188621705:3569], CA [28:7489505178188621708:3572], CA [28:7489505178188621722:3585], CA [28:7489505178188621717:3580], CA [28:7489505178188621725:3588], CA [28:7489505178188621720:3583], CA [28:7489505178188621715:3578], CA [28:7489505178188621729:3591], CA [28:7489505178188621718:3581], CA [28:7489505178188621713:3576], CA [28:7489505178188621688:3555], CA [28:7489505178188621677:3545], CA [28:7489505178188621683:3550], CA [28:7489505178188621732:3594], CA [28:7489505178188621692:3558], CA [28:7489505178188621686:3553], CA [28:7489505178188621746:3607], CA [28:7489505178188621681:3548], CA [28:7489505178188621741:3602], CA [28:7489505178188621730:3592], CA [28:7489505178188621736:3597], CA [28:7489505178188621684:3551], CA [28:7489505178188621744:3605], CA [28:7489505178188621679:3546], CA [28:7489505178188621739:3600], CA [28:7489505178188621693:3559], CA [28:7489505178188621696:3562], CA [28:7489505178188621702:3567], CA [28:7489505178188621711:3575], CA [28:7489505178188621706:3570], CA [28:7489505178188621700:3565], CA [28:7489505178188621694:3560], CA [28:7489505178188621749:3609], CA [28:7489505178188621709:3573], CA [28:7489505178188621698:3563], CA [28:7489505178188621723:3586], CA [28:7489505178188621726:3589], CA [28:7489505178188621721:3584], CA [28:7489505178188621716:3579], 2025-04-04T17:05:12.502066Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7489505178188621738:3599], TxId: 281474976710670, task: 55. Ctx: { TraceId : 01jr0w6z7abbbhmte4hxdndy5d. SessionId : ydb://session/3?node_id=28&id=NDRlOTdhOTgtZmY2NDZhNmItZDJlMzQxZGUtMTg2OWVlNWM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-04T17:05:12.502152Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7489505178188621738:3599], TxId: 281474976710670, task: 55. Ctx: { TraceId : 01jr0w6z7abbbhmte4hxdndy5d. SessionId : ydb://session/3?node_id=28&id=NDRlOTdhOTgtZmY2NDZhNmItZDJlMzQxZGUtMTg2OWVlNWM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Send stats to executor actor [28:7489505178188621672:3538] TaskId: 55 Stats: CpuTimeUs: 2265 Tasks { TaskId: 55 CpuTimeUs: 1037 FinishTimeMs: 1743786312502 ComputeCpuTimeUs: 66 BuildCpuTimeUs: 971 HostName: "ghrun-mup5cka3jq" NodeId: 28 CreateTimeMs: 1743786311656 } MaxMemoryUsage: 1048576 2025-04-04T17:05:12.502159Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7489505178188621672:3538] TxId: 281474976710670. Ctx: { TraceId: 01jr0w6z7abbbhmte4hxdndy5d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NDRlOTdhOTgtZmY2NDZhNmItZDJlMzQxZGUtMTg2OWVlNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7489505178188621721:3584], task: 40, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 2126 Tasks { TaskId: 40 CpuTimeUs: 1178 FinishTimeMs: 1743786312499 ComputeCpuTimeUs: 61 BuildCpuTimeUs: 1117 HostName: "ghrun-mup5cka3jq" NodeId: 28 CreateTimeMs: 1743786311650 } MaxMemoryUsage: 1048576 } 2025-04-04T17:05:12.502175Z node 28 :KQP_COMPUTE DEBUG: kqp_scan_compute_actor.cpp:199 :TEvFetcherFinished: [28:7489505178188621750:3610] 2025-04-04T17:05:12.502279Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7489505178188621739:3600], TxId: 281474976710670, task: 56. Ctx: { TraceId : 01jr0w6z7abbbhmte4hxdndy5d. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=28&id=NDRlOTdhOTgtZmY2NDZhNmItZDJlMzQxZGUtMTg2OWVlNWM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-04T17:05:12.502363Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7489505178188621739:3600], TxId: 281474976710670, task: 56. Ctx: { TraceId : 01jr0w6z7abbbhmte4hxdndy5d. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=28&id=NDRlOTdhOTgtZmY2NDZhNmItZDJlMzQxZGUtMTg2OWVlNWM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Send stats to executor actor [28:7489505178188621672:3538] TaskId: 56 Stats: CpuTimeUs: 2186 Tasks { TaskId: 56 CpuTimeUs: 1030 FinishTimeMs: 1743786312502 ComputeCpuTimeUs: 60 BuildCpuTimeUs: 970 HostName: "ghrun-mup5cka3jq" NodeId: 28 CreateTimeMs: 1743786311658 } MaxMemoryUsage: 1048576 2025-04-04T17:05:12.502396Z node 28 :KQP_COMPUTE DEBUG: kqp_scan_compute_actor.cpp:199 :TEvFetcherFinished: [28:7489505178188621750:3610] 2025-04-04T17:05:12.502449Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7489505178188621672:3538] TxId: 281474976710670. Ctx: { TraceId: 01jr0w6z7abbbhmte4hxdndy5d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NDRlOTdhOTgtZmY2NDZhNmItZDJlMzQxZGUtMTg2OWVlNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [28:7489505178188621724:3587], CA [28:7489505178188621719:3582], CA [28:7489505178188621714:3577], CA [28:7489505178188621689:3556], CA [28:7489505178188621728:3590], CA [28:7489505178188621733:3595], CA [28:7489505178188621687:3554], CA [28:7489505178188621747:3608], CA [28:7489505178188621682:3549], CA [28:7489505178188621742:3603], CA [28:7489505178188621737:3598], CA [28:7489505178188621731:3593], CA [28:7489505178188621691:3557], CA [28:7489505178188621685:3552], CA [28:7489505178188621745:3606], CA [28:7489505178188621680:3547], CA [28:7489505178188621740:3601], CA [28:7489505178188621734:3596], CA [28:7489505178188621743:3604], CA [28:7489505178188621738:3599], CA [28:7489505178188621703:3568], CA [28:7489505178188621701:3566], CA [28:7489505178188621695:3561], CA [28:7489505178188621707:3571], CA [28:7489505178188621710:3574], CA [28:7489505178188621699:3564], CA [28:7489505178188621705:3569], CA [28:7489505178188621708:3572], CA [28:7489505178188621722:3585], CA [28:7489505178188621717:3580], CA [28:7489505178188621725:3588], CA [28:7489505178188621720:3583], CA [28:7489505178188621715:3578], CA [28:7489505178188621729:3591], CA [28:7489505178188621718:3581], CA [28:7489505178188621713:3576], CA [28:7489505178188621688:3555], CA [28:7489505178188621677:3545], CA [28:7489505178188621683:3550], CA [28:7489505178188621732:3594], CA [28:7489505178188621692:3558], CA [28:7489505178188621686:3553], CA [28:7489505178188621746:3607], CA [28:7489505178188621681:3548], CA [28:7489505178188621741:3602], CA [28:7489505178188621730:3592], CA [28:7489505178188621736:3597], CA [28:7489505178188621684:3551], CA [28:7489505178188621744:3605], CA [28:7489505178188621679:3546], CA [28:7489505178188621739:3600], CA [28:7489505178188621693:3559], CA [28:7489505178188621696:3562], CA [28:7489505178188621702:3567], CA [28:7489505178188621711:3575], CA [28:7489505178188621706:3570], CA [28:7489505178188621700:3565], CA [28:7489505178188621694:3560], CA [28:7489505178188621749:3609], CA [28:7489505178188621709:3573], CA [28:7489505178188621698:3563], CA [28:7489505178188621723:3586], CA [28:7489505178188621726:3589], CA [28:7489505178188621721:3584], CA [28:7489505178188621716:3579], 2025-04-04T17:05:12.502461Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7489505178188621740:3601], TxId: 281474976710670, task: 57. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=28&id=NDRlOTdhOTgtZmY2NDZhNmItZDJlMzQxZGUtMTg2OWVlNWM=. TraceId : 01jr0w6z7abbbhmte4hxdndy5d. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-04T17:05:12.502551Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7489505178188621740:3601], TxId: 281474976710670, task: 57. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=28&id=NDRlOTdhOTgtZmY2NDZhNmItZDJlMzQxZGUtMTg2OWVlNWM=. TraceId : 01jr0w6z7abbbhmte4hxdndy5d. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Send stats to executor actor [28:7489505178188621672:3538] TaskId: 57 Stats: CpuTimeUs: 1888 Tasks { TaskId: 57 CpuTimeUs: 980 FinishTimeMs: 1743786312502 ComputeCpuTimeUs: 59 BuildCpuTimeUs: 921 HostName: "ghrun-mup5cka3jq" NodeId: 28 CreateTimeMs: 1743786311659 } MaxMemoryUsage: 1048576 2025-04-04T17:05:12.502572Z node 28 :KQP_COMPUTE DEBUG: kqp_scan_compute_actor.cpp:199 :TEvFetcherFinished: [28:7489505178188621750:3610] 2025-04-04T17:05:12.502593Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7489505178188621672:3538] TxId: 281474976710670. Ctx: { TraceId: 01jr0w6z7abbbhmte4hxdndy5d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NDRlOTdhOTgtZmY2NDZhNmItZDJlMzQxZGUtMTg2OWVlNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7489505178188621722:3585], task: 41, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 2063 Tasks { TaskId: 41 CpuTimeUs: 995 FinishTimeMs: 1743786312499 ComputeCpuTimeUs: 65 BuildCpuTimeUs: 930 HostName: "ghrun-mup5cka3jq" NodeId: 28 CreateTimeMs: 1743786311651 } MaxMemoryUsage: 1048576 } |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> THiveTest::TestCreateTabletAndReassignGroups [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups3 |83.3%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] >> THiveTest::TestCreateTabletAndReassignGroups3 [GOOD] >> THiveTest::TestCreateTabletAndMixedReassignGroups3 |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] 2025-04-04 17:05:21,218 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-04 17:05:21,430 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 159426 48.5M 48.2M 23.5M test_tool run_ut @/home/runner/.ya/build/build_root/i5tq/0013f0/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff/chunk9/testing_out_stuff/test_tool.arg 159771 483M 477M 435M └─ ydb-core-tx-schemeshard-ut_index --trace-path-append /home/runner/.ya/build/build_root/i5tq/0013f0/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stu Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:130:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:132:2058] recipient: [1:108:2140] 2025-04-04T16:55:23.124040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:23.124161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:23.124267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:23.124304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:23.124343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:23.124379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:23.124454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:23.124537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:23.124899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:23.209709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:23.209783Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:172:2058] recipient: [1:15:2062] 2025-04-04T16:55:23.225540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:23.225942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:23.226119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:23.237388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:23.237628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:23.238423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:23.239337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:23.244828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:23.246004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:23.246086Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:23.246273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:23.246321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:23.246354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:23.246453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] Leader for TabletID 72057594037968897 is [1:215:2214] sender: [1:216:2058] recipient: [1:209:2210] 2025-04-04T16:55:23.253120Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-04T16:55:23.418200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:23.418496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.418786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:23.419099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:23.419175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.421974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:23.422171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:23.422463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.422544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:23.422594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:23.422675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:23.426523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.426672Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:23.426717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:23.429169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.429234Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.429286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:23.429360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:23.441147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:23.443356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:23.443550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:23.444602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:23.444779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:23.444837Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:23.445139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:23.445198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:23.445400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:23.445493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:23.448136Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:23.448204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:23.448732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:23.448793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:23.449249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:23.449292Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:23.449395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:23.449448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:23.449497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:23.449525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:23.449566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:23.449605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:23.449649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id ... 046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 1 2025-04-04T17:05:21.398630Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 5 2025-04-04T17:05:21.398744Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/3, is published: true 2025-04-04T17:05:21.401743Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:2 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:3 msg type: 268697601 2025-04-04T17:05:21.401919Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:2 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:4 msg type: 268697601 2025-04-04T17:05:21.402005Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 2, tablet: 72057594037968897 2025-04-04T17:05:21.402053Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 1002, shardIdx: 72057594046678944:3, partId: 2 2025-04-04T17:05:21.402090Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 1002, shardIdx: 72057594046678944:4, partId: 2 2025-04-04T17:05:21.402348Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:1 msg type: 268697601 2025-04-04T17:05:21.402417Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:2 msg type: 268697601 2025-04-04T17:05:21.402815Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId collision in routes has found, TxId# 1002, partId# 0, prevPartId# 2, tablet# 72057594037968897, guessDefaultRootHive# yes, prevTx# WorkingDir: "/MyRoot/Table/UserDefinedIndex" OperationType: ESchemeOpCreateTable CreateTable { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" } Columns { Name: "key" Type: "Uint32" } KeyColumnNames: "indexed" KeyColumnNames: "key" PartitionConfig { PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } } Internal: false FailOnExist: false AllowCreateInTempDir: false, newTx# WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" } Columns { Name: "indexed" Type: "Uint32" } KeyColumnNames: "key" PartitionConfig { PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } } Internal: false FailOnExist: false AllowCreateInTempDir: false 2025-04-04T17:05:21.402868Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 0, tablet: 72057594037968897 2025-04-04T17:05:21.402903Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 1002, shardIdx: 72057594046678944:1, partId: 0 2025-04-04T17:05:21.402934Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 1002, shardIdx: 72057594046678944:2, partId: 0 Leader for TabletID 72057594037968897 is [168:215:2214] sender: [168:317:2058] recipient: [168:15:2062] 2025-04-04T17:05:21.404205Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-04-04T17:05:21.404375Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-04-04T17:05:21.404588Z node 168 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 3 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 5 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-04-04T17:05:21.404802Z node 168 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 3, type DataShard, boot OK, tablet id 72075186233409546 2025-04-04T17:05:21.405153Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 3 TabletID: 72075186233409546 Origin: 72057594037968897 2025-04-04T17:05:21.405201Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 1002, shardIdx: 72057594046678944:3, partId: 2 2025-04-04T17:05:21.405321Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:2, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 3 TabletID: 72075186233409546 Origin: 72057594037968897 2025-04-04T17:05:21.405371Z node 168 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1002:2 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-04-04T17:05:21.405440Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1002:2 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 3 TabletID: 72075186233409546 Origin: 72057594037968897 2025-04-04T17:05:21.406124Z node 168 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 4 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 5 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-04-04T17:05:21.406337Z node 168 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 4, type DataShard, boot OK, tablet id 72075186233409547 2025-04-04T17:05:21.406793Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409547 Origin: 72057594037968897 2025-04-04T17:05:21.406841Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 1002, shardIdx: 72057594046678944:4, partId: 2 2025-04-04T17:05:21.406949Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:2, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409547 Origin: 72057594037968897 2025-04-04T17:05:21.406991Z node 168 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1002:2 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-04-04T17:05:21.407053Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1002:2 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409547 Origin: 72057594037968897 2025-04-04T17:05:21.407138Z node 168 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:2 2 -> 3 2025-04-04T17:05:21.407629Z node 168 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-04-04T17:05:21.407823Z node 168 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 1, type DataShard, boot OK, tablet id 72075186233409548 2025-04-04T17:05:21.407953Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409548 Origin: 72057594037968897 2025-04-04T17:05:21.407993Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 1002, shardIdx: 72057594046678944:1, partId: 0 2025-04-04T17:05:21.408090Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409548 Origin: 72057594037968897 2025-04-04T17:05:21.408128Z node 168 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1002:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-04-04T17:05:21.408192Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1002:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 1 TabletID: 72075186233409548 Origin: 72057594037968897 2025-04-04T17:05:21.408528Z node 168 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-04-04T17:05:21.408704Z node 168 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 2, type DataShard, boot OK, tablet id 72075186233409549 2025-04-04T17:05:21.409281Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409549 Origin: 72057594037968897 2025-04-04T17:05:21.409323Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 1002, shardIdx: 72057594046678944:2, partId: 0 2025-04-04T17:05:21.409416Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409549 Origin: 72057594037968897 2025-04-04T17:05:21.409452Z node 168 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1002:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-04-04T17:05:21.409515Z node 168 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1002:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409549 Origin: 72057594037968897 2025-04-04T17:05:21.409588Z node 168 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 2 -> 3 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8067063302/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/i5tq/0013f0/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff/chunk9/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8067063302/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/i5tq/0013f0/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff/chunk9/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] 2025-04-04 17:05:14,214 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-04 17:05:14,400 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 158067 48.3M 48.3M 23.4M test_tool run_ut @/home/runner/.ya/build/build_root/i5tq/001550/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_tool.arg 158314 492M 488M 442M └─ ydb-core-tx-schemeshard-ut_index --trace-path-append /home/runner/.ya/build/build_root/i5tq/001550/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stu Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:130:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:132:2058] recipient: [1:108:2140] 2025-04-04T16:55:16.790381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T16:55:16.790503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:16.790649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T16:55:16.790696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T16:55:16.790746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T16:55:16.790785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T16:55:16.790870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T16:55:16.790944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T16:55:16.791280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T16:55:16.876941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T16:55:16.876996Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:172:2058] recipient: [1:15:2062] 2025-04-04T16:55:16.891549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T16:55:16.892423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T16:55:16.892643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T16:55:16.900564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T16:55:16.900743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T16:55:16.901271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:16.901905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:16.905257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:16.906592Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:16.906692Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:16.906893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T16:55:16.906949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:16.906996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T16:55:16.907127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] Leader for TabletID 72057594037968897 is [1:215:2214] sender: [1:216:2058] recipient: [1:209:2210] 2025-04-04T16:55:16.918917Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-04T16:55:17.054732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T16:55:17.054965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.055198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T16:55:17.055460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T16:55:17.055527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.058044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:17.058212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T16:55:17.058434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.058501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T16:55:17.058542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T16:55:17.058590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T16:55:17.061385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.061453Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T16:55:17.061487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T16:55:17.063497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.063551Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.063599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:17.063671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T16:55:17.067499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T16:55:17.071826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T16:55:17.072037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T16:55:17.073041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T16:55:17.073211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T16:55:17.073260Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:17.073569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T16:55:17.073626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T16:55:17.073789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T16:55:17.073877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T16:55:17.078574Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T16:55:17.078664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T16:55:17.078869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T16:55:17.078919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T16:55:17.086959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T16:55:17.087055Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T16:55:17.087184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:17.087271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:17.087320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T16:55:17.087350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:17.087383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T16:55:17.087421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T16:55:17.087460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id ... onPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:05:12.918181Z node 159 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:05:12.918757Z node 159 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 606us result status StatusSuccess 2025-04-04T17:05:12.919622Z node 159 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:05:12.945558Z node 159 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:3][72075186233409546][159:1247:3012] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-04T17:05:12.945697Z node 159 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:3][159:1211:3012] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-04T17:05:12.945866Z node 159 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:3][72075186233409546][159:1247:3012] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743786312819601 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743786312819601 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1743786312819601 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-04-04T17:05:12.955515Z node 159 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:3][72075186233409546][159:1247:3012] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-04-04T17:05:12.955697Z node 159 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:3][159:1211:3012] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } =========== RUN: Reboot tablet 72075186233409551 (#22) run 158 =========== Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8067063302/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/i5tq/001550/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8067063302/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/i5tq/001550/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> THiveTest::TestCreateTabletAndMixedReassignGroups3 [GOOD] >> THiveTest::TestCreateExternalTablet |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> THiveTest::TestCreateExternalTablet [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> YdbQueryService::TestAttachTwice [GOOD] |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |83.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |83.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> YdbLogStore::LogTable [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] |83.3%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage >> YdbMonitoring::SelfCheck >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] |83.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] [GOOD] |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |83.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus >> KqpQueryService::ReturnAndCloseSameTime [GOOD] |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> KqpQueryService::ReplaceIntoWithDefaultValue |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |83.5%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |83.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |83.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbQueryService::TestAttachTwice [GOOD] Test command err: 2025-04-04T16:59:58.238901Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503834313695116:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:58.238954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001382/r3tmp/tmp7JwmyW/pdisk_1.dat 2025-04-04T16:59:59.171690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:59.171797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:59.177152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:59:59.218941Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:59.264501Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 8484, node 1 2025-04-04T16:59:59.374179Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:59:59.374207Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T16:59:59.631089Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:59:59.631118Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:59:59.631125Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:59:59.631257Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5546 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:00.086632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:5546 2025-04-04T17:00:00.710831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "OlapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } Columns { Name: "request_id" Type: "Utf8" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:00:00.711305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/OlapStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:00.711836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: OlapStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-04T17:00:00.711877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-04T17:00:00.711962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-04T17:00:00.712026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-04T17:00:00.712078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-04T17:00:00.712128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-04-04T17:00:00.712444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-04-04T17:00:00.714122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-04T17:00:00.714518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:00:00.714543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:00.714714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:00:00.714780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-04-04T17:00:00.719266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-04T17:00:00.719453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/OlapStore 2025-04-04T17:00:00.719682Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:00:00.719698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:00:00.719846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T17:00:00.719981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:00:00.720000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489503838608663110:2433], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-04-04T17:00:00.720017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489503838608663110:2433], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-04-04T17:00:00.720056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:00.720088Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateOlapStore, at tablet# 72057594046644480 2025-04-04T17:00:00.720828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-04T17:00:00.721329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281 ... 4 \'50))\n (let $6 \'(\'(\'\"UsedKeyColumns\" \'(\'\"timestamp\")) \'(\'\"ExpectedMaxRanges\" \'1) \'(\'\"PointPrefixLen\" \'0)))\n (let $7 (KqpWideReadOlapTableRanges $1 %kqp%tx_result_binding_0_0 $4 \'(\'(\'\"ItemsLimit\" $5) \'(\'\"Reverse\")) $6 (lambda \'($10) $10)))\n (let $8 (Bool \'false))\n (let $9 \'(\'(\'8 $8) \'(\'6 $8) \'(\'5 $8) \'(\'9 $8)))\n (return (FromFlow (WideTopSort $7 $5 $9)))\n))))\n)\n" ComputeActors { CpuTimeUs: 1624 Tasks { TaskId: 58 CpuTimeUs: 773 FinishTimeMs: 1743786318602 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 55 BuildCpuTimeUs: 718 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-mup5cka3jq" NodeId: 22 CreateTimeMs: 1743786318431 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743786318557 } Stages { StageId: 1 StageGuid: "14d3282-34583bfa-d951d12-a2ff77a7" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 \'50)) (lambda \'($2 $3 $4 $5 $6 $7 $8 $9 $10 $11) (AsStruct \'(\'\"ingested_at\" $2) \'(\'\"json_payload\" $3) \'(\'\"level\" $4) \'(\'\"message\" $5) \'(\'\"request_id\" $6) \'(\'\"resource_id\" $7) \'(\'\"resource_type\" $8) \'(\'\"saved_at\" $9) \'(\'\"timestamp\" $10) \'(\'\"uid\" $11)))))))\n)\n" ComputeActors { CpuTimeUs: 13789 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 646 FinishTimeMs: 1743786318668 ComputeCpuTimeUs: 178 BuildCpuTimeUs: 468 HostName: "ghrun-mup5cka3jq" NodeId: 22 CreateTimeMs: 1743786318438 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743786318557 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":5,\"Plans\":[{\"Node Type\":\"ResultSet_1\",\"PlanNodeId\":4,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":2}],\"Limit\":\"50\",\"Name\":\"Limit\"}],\"PlanNodeId\":3,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":2,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"TopSort-TableRangeScan\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Limit\":\"50\",\"Name\":\"TopSort\",\"TopSortBy\":\"[row.timestamp,row.resource_type,row.resource_id,row.uid]\"},{\"E-Cost\":\"0\",\"E-Rows\":\"0\",\"E-Size\":\"0\",\"Inputs\":[],\"Name\":\"TableRangeScan\",\"Path\":\"\\/Root\\/OlapStore\\/log1\",\"ReadColumns\":[\"ingested_at\",\"json_payload\",\"level\",\"message\",\"request_id\",\"resource_id\",\"resource_type\",\"saved_at\",\"timestamp\",\"uid\"],\"ReadLimit\":\"50\",\"ReadRanges\":[\"timestamp [4000000, 4093000]\"],\"ReadRangesExpectedSize\":1,\"ReadRangesKeys\":[\"timestamp\"],\"Reverse\":true,\"Scan\":\"Parallel\",\"SsaProgram\":{\"Command\":[{\"Projection\":{\"Columns\":[{\"Id\":8},{\"Id\":2},{\"Id\":7},{\"Id\":1},{\"Id\":10},{\"Id\":3},{\"Id\":6},{\"Id\":9},{\"Id\":5},{\"Id\":4}]}}],\"Version\":5},\"Table\":\"OlapStore\\/log1\"}],\"PlanNodeId\":1,\"StageGuid\":\"5998738e-f3784f92-5f5df679-6b1fd8bd\",\"Stats\":{\"BaseTimeMs\":1743786318557,\"ComputeNodes\":[{\"CpuTimeUs\":1624,\"Tasks\":[{\"ComputeTimeUs\":55,\"FinishTimeMs\":1743786318602,\"Host\":\"ghrun-mup5cka3jq\",\"NodeId\":22,\"TaskId\":58}]}],\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"OlapStore\\/log1\"]}],\"SortColumns\":[\"timestamp (Desc)\",\"resource_type (Desc)\",\"resource_id (Desc)\",\"uid (Desc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"14d3282-34583bfa-d951d12-a2ff77a7\",\"Stats\":{\"BaseTimeMs\":1743786318557,\"ComputeNodes\":[{\"CpuTimeUs\":13789,\"Tasks\":[{\"ComputeTimeUs\":178,\"FinishTimeMs\":1743786318668,\"Host\":\"ghrun-mup5cka3jq\",\"NodeId\":22,\"TaskId\":65}]}],\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 1896 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\002\022\014\010\320\013\020\335k\030\231\314\013 A" } } 2025-04-04T17:05:18.677195Z node 22 :KQP_EXECUTER DEBUG: ActorId: [22:7489505210474858578:3548] TxId: 281474976710670. Ctx: { TraceId: 01jr0w75cv9g603qgtsamsfrp0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=MTNmNDE1YTYtOTRiMjJjYTYtMzljZGU3ODctZDJjYTc1MTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T17:05:18.677288Z node 22 :KQP_EXECUTER DEBUG: ActorId: [22:7489505210474858578:3548] TxId: 281474976710670. Ctx: { TraceId: 01jr0w75cv9g603qgtsamsfrp0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=MTNmNDE1YTYtOTRiMjJjYTYtMzljZGU3ODctZDJjYTc1MTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.189977s ReadRows: 0 ReadBytes: 0 ru: 126 rate limiter was not found force flag: 1 2025-04-04T17:05:18.677433Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=MTNmNDE1YTYtOTRiMjJjYTYtMzljZGU3ODctZDJjYTc1MTY=, ActorId: [22:7489505206179891224:3548], ActorState: ExecuteState, TraceId: 01jr0w75cv9g603qgtsamsfrp0, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-04T17:05:18.677994Z node 22 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=22&id=MTNmNDE1YTYtOTRiMjJjYTYtMzljZGU3ODctZDJjYTc1MTY=, ActorId: [22:7489505206179891224:3548], ActorState: ExecuteState, TraceId: 01jr0w75cv9g603qgtsamsfrp0, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 366.918 QueriesCount: 1 2025-04-04T17:05:18.678086Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=MTNmNDE1YTYtOTRiMjJjYTYtMzljZGU3ODctZDJjYTc1MTY=, ActorId: [22:7489505206179891224:3548], ActorState: ExecuteState, TraceId: 01jr0w75cv9g603qgtsamsfrp0, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-04T17:05:18.678268Z node 22 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=22&id=MTNmNDE1YTYtOTRiMjJjYTYtMzljZGU3ODctZDJjYTc1MTY=, ActorId: [22:7489505206179891224:3548], ActorState: ExecuteState, TraceId: 01jr0w75cv9g603qgtsamsfrp0, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:05:18.678334Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=MTNmNDE1YTYtOTRiMjJjYTYtMzljZGU3ODctZDJjYTc1MTY=, ActorId: [22:7489505206179891224:3548], ActorState: ExecuteState, TraceId: 01jr0w75cv9g603qgtsamsfrp0, EndCleanup, isFinal: 1 2025-04-04T17:05:18.678426Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=MTNmNDE1YTYtOTRiMjJjYTYtMzljZGU3ODctZDJjYTc1MTY=, ActorId: [22:7489505206179891224:3548], ActorState: ExecuteState, TraceId: 01jr0w75cv9g603qgtsamsfrp0, Sent query response back to proxy, proxyRequestId: 5, proxyId: [22:7489505107395638682:2085] 2025-04-04T17:05:18.678484Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=MTNmNDE1YTYtOTRiMjJjYTYtMzljZGU3ODctZDJjYTc1MTY=, ActorId: [22:7489505206179891224:3548], ActorState: unknown state, TraceId: 01jr0w75cv9g603qgtsamsfrp0, Cleanup temp tables: 0 2025-04-04T17:05:18.682839Z node 22 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710670;task_id=65;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-04T17:05:18.683084Z node 22 :KQP_COMPUTE DEBUG: SelfId: [22:7489505210474858670:3622], TxId: 281474976710670, task: 65. Ctx: { TraceId : 01jr0w75cv9g603qgtsamsfrp0. SessionId : ydb://session/3?node_id=22&id=MTNmNDE1YTYtOTRiMjJjYTYtMzljZGU3ODctZDJjYTc1MTY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Send stats to executor actor [22:7489505210474858578:3548] TaskId: 65 Stats: CpuTimeUs: 13789 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 646 FinishTimeMs: 1743786318668 ComputeCpuTimeUs: 178 BuildCpuTimeUs: 468 HostName: "ghrun-mup5cka3jq" NodeId: 22 CreateTimeMs: 1743786318438 } MaxMemoryUsage: 1048576 2025-04-04T17:05:18.685209Z node 22 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786318001, txId: 18446744073709551615] shutting down 2025-04-04T17:05:18.685396Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=MTNmNDE1YTYtOTRiMjJjYTYtMzljZGU3ODctZDJjYTc1MTY=, ActorId: [22:7489505206179891224:3548], ActorState: unknown state, TraceId: 01jr0w75cv9g603qgtsamsfrp0, Session actor destroyed RESULT: [] --------------------- STATS: total CPU: 2827 duration: 1386 usec cpu: 1386 usec duration: 352534 usec cpu: 357092 usec { name: "/Root/OlapStore/log1" } 2025-04-04T17:05:22.452966Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7489505228215299808:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:05:22.453086Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001382/r3tmp/tmpjLSjBW/pdisk_1.dat 2025-04-04T17:05:23.456349Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:05:23.467924Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:05:23.497410Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:05:23.497580Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:05:23.506675Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32529, node 25 2025-04-04T17:05:23.759461Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:05:23.759497Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:05:23.759516Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:05:23.759782Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9223 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:05:24.658366Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:05:27.453449Z node 25 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[25:7489505228215299808:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:05:27.453581Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] Test command err: 2025-04-04T17:02:27.349366Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:02:27.476865Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:02:27.503794Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:02:27.504169Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:02:27.525054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:02:27.525391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:02:27.525662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:02:27.525755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:02:27.525859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:02:27.525936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:02:27.526026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:02:27.526117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:02:27.526213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:02:27.526385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:02:27.526501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:02:27.526635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:02:27.566801Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:02:27.567794Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:02:27.567945Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:02:27.568192Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:02:27.568452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:02:27.568533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:02:27.568581Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:02:27.568694Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:02:27.568755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:02:27.568799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:02:27.568830Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:02:27.569068Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:02:27.569142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:02:27.569185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:02:27.569216Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:02:27.569305Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:02:27.569369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:02:27.569431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:02:27.569488Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:02:27.569569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:02:27.569616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:02:27.569677Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:02:27.569729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:02:27.569766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:02:27.569802Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:02:27.570208Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2025-04-04T17:02:27.570329Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=47; 2025-04-04T17:02:27.570413Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-04T17:02:27.570892Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=407; 2025-04-04T17:02:27.571127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:02:27.571207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:02:27.571247Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:02:27.571491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:02:27.571565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:02:27.571621Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:02:27.571814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:02:27.571861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:02:27.571905Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:02:27.572108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:02:27.572149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:02:27.572185Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:02:27.572302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:02:27.572342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:02:27.572410Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... lumn_id:8;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:8;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:10208];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:10208];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:9400];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:9392];;;;switched=(portion_id:44;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2507632;index_size:20;meta:((produced=SPLIT_COMPACTED;)););(portion_id:46;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););(portion_id:51;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););; 2025-04-04T17:05:46.392241Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:6063:8055];fline=general_compaction.cpp:131;event=blobs_created;appended=1;switched=3; 2025-04-04T17:05:46.393818Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6063:8055];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |83.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] >> YdbMonitoring::SelfCheck [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] >> Viewer::JsonStorageListingV1NodeIdFilter [GOOD] |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/pgwire/pgwire >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/pgwire/pgwire |83.6%| [LD] {RESULT} $(B)/ydb/apps/pgwire/pgwire >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 2025-04-04 17:05:43,216 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-04 17:05:43,562 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 167685 48.6M 47.3M 23.7M test_tool run_ut @/home/runner/.ya/build/build_root/i5tq/000fbc/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk32/testing_out_stuff/test_tool.args 168029 1.9G 1.9G 1.7G └─ ydb-core-tx-columnshard-ut_rw --trace-path-append /home/runner/.ya/build/build_root/i5tq/000fbc/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chu Test command err: 2025-04-04T16:55:44.920199Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T16:55:45.023842Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T16:55:45.042782Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T16:55:45.043042Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T16:55:45.051022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T16:55:45.051212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T16:55:45.051454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T16:55:45.051534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T16:55:45.051649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T16:55:45.051720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T16:55:45.051779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T16:55:45.051856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T16:55:45.051957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T16:55:45.052054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T16:55:45.052124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T16:55:45.052226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T16:55:45.075951Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T16:55:45.076140Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T16:55:45.076198Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T16:55:45.076338Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:45.076486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T16:55:45.076550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T16:55:45.076597Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T16:55:45.076688Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T16:55:45.076740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T16:55:45.076773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T16:55:45.076805Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T16:55:45.076953Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T16:55:45.077001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T16:55:45.077026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T16:55:45.077047Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T16:55:45.077123Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T16:55:45.077185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T16:55:45.077229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T16:55:45.077255Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T16:55:45.077302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T16:55:45.077325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T16:55:45.077347Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T16:55:45.077387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T16:55:45.077425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T16:55:45.077445Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T16:55:45.077756Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=36; 2025-04-04T16:55:45.077833Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-04-04T16:55:45.077897Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-04-04T16:55:45.077967Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=30; 2025-04-04T16:55:45.078090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T16:55:45.078142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T16:55:45.078185Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T16:55:45.078343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T16:55:45.078386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T16:55:45.078423Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T16:55:45.078545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T16:55:45.078585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T16:55:45.078628Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T16:55:45.078752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T16:55:45.078780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T16:55:45.078797Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T16:55:45.078889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T16:55:45.078929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T16:55:45.078977Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... _exists=1; 2025-04-04T17:05:43.301628Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:111:255:120:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-04T17:05:43.301710Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:109:255:263:2800:0];range=bytes=0-2799;object_exists=1; 2025-04-04T17:05:43.301822Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:111:255:127:2800:0];range=bytes=0-2799;object_exists=1; 2025-04-04T17:05:43.301925Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:109:255:95:2872:0];range=bytes=0-2871;object_exists=1; 2025-04-04T17:05:43.302012Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:109:255:35:2736:0];range=bytes=0-2735;object_exists=1; 2025-04-04T17:05:43.302116Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:109:255:144:9144:0];range=bytes=0-9143;object_exists=1; 2025-04-04T17:05:43.302223Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:111:255:148:8728:0];range=bytes=0-8727;object_exists=1; 2025-04-04T17:05:43.302425Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:109:255:38:2792:0];range=bytes=0-2791;object_exists=1; 2025-04-04T17:05:43.302526Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:111:255:166:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-04T17:05:43.302615Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:109:255:214:2736:0];range=bytes=0-2735;object_exists=1; 2025-04-04T17:05:43.302720Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:111:255:143:2776:0];range=bytes=0-2775;object_exists=1; 2025-04-04T17:05:43.302823Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:109:255:78:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-04T17:05:43.302909Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:111:255:213:2792:0];range=bytes=0-2791;object_exists=1; 2025-04-04T17:05:43.302993Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:109:255:271:2792:0];range=bytes=0-2791;object_exists=1; 2025-04-04T17:05:43.303100Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:109:255:223:2888:0];range=bytes=0-2887;object_exists=1; 2025-04-04T17:05:43.303215Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:109:255:129:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-04T17:05:43.303308Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:109:255:226:2840:0];range=bytes=0-2839;object_exists=1; 2025-04-04T17:05:43.303394Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:109:255:1:2888:0];range=bytes=0-2887;object_exists=1; 2025-04-04T17:05:43.303496Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:111:255:81:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-04T17:05:43.303605Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:111:255:93:2800:0];range=bytes=0-2799;object_exists=1; 2025-04-04T17:05:43.303697Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:109:255:49:2792:0];range=bytes=0-2791;object_exists=1; 2025-04-04T17:05:43.303780Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:111:255:116:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-04T17:05:43.303861Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:111:255:98:2864:0];range=bytes=0-2863;object_exists=1; 2025-04-04T17:05:43.303964Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:111:255:167:2880:0];range=bytes=0-2879;object_exists=1; 2025-04-04T17:05:43.304057Z node 1 :S3_WRAPPER DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=fake_storage.cpp:67;method=GetObject;id=[9437184:2:111:255:217:2784:0];range=bytes=0-2783;object_exists=1; 2025-04-04T17:05:43.310521Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=b05a3fe-117711f0-a9d36950-77ee1e5c;fline=actor.cpp:48;task=agents_waiting=2;additional_info=();; 2025-04-04T17:05:43.312502Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10298:12284];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T17:05:43.312661Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10298:12284];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-04T17:05:43.312704Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10298:12284];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-04T17:05:43.312743Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10298:12284];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:05:43.324941Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10298:12284];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T17:05:43.325142Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10298:12284];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-04T17:05:43.325191Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10298:12284];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-04T17:05:43.325239Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10298:12284];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:05:43.344065Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10298:12284];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T17:05:43.344275Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10298:12284];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-04T17:05:43.344329Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10298:12284];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-04T17:05:43.344380Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10298:12284];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:05:43.346234Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10298:12284];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T17:05:43.346457Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10298:12284];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-04T17:05:43.346511Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10298:12284];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-04T17:05:43.346560Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10298:12284];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8067063302/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/i5tq/000fbc/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk32/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8067063302/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/i5tq/000fbc/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk32/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbMonitoring::SelfCheck [GOOD] Test command err: 2025-04-04T17:02:02.947167Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504365799762122:2273];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:02.947352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012d5/r3tmp/tmpecjdGX/pdisk_1.dat 2025-04-04T17:02:03.466382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:03.468682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:03.473877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:02:03.519342Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16989, node 1 2025-04-04T17:02:03.664438Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:02:03.664466Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:02:03.831413Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:03.831442Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:03.831451Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:03.831580Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19563 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:04.191371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:04.373135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Uint8" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:51830" , at schemeshard: 72057594046644480 2025-04-04T17:02:04.373598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:02:04.374173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: LogStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-04T17:02:04.374230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-04T17:02:04.374330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-04T17:02:04.374420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-04T17:02:04.374464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-04T17:02:04.374503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-04-04T17:02:04.374819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-04-04T17:02:04.376329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-04T17:02:04.376530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:02:04.376549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:02:04.376674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:02:04.376706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-04-04T17:02:04.379188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-04T17:02:04.379359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-04-04T17:02:04.379693Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:02:04.379711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:02:04.379881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T17:02:04.379975Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:02:04.379989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489504370094730012:2511], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-04-04T17:02:04.380011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489504370094730012:2511], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-04-04T17:02:04.380072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:02:04.380106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateOlapStore, at tablet# 72057594046644480 2025-04-04T17:02:04.380944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-04T17:02:04.381457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChann ... 7:0, at schemeshard: 72057594046644480 2025-04-04T17:05:31.459524Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:05:31.459567Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:05:31.459634Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:05:31.459664Z node 64 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedDeleteParts operationId# 281474976710667:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:05:31.459728Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-04-04T17:05:31.459940Z node 64 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710667:0 progress is 1/1 2025-04-04T17:05:31.459955Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710667 ready parts: 1/1 2025-04-04T17:05:31.459977Z node 64 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710667:0 progress is 1/1 2025-04-04T17:05:31.459990Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710667 ready parts: 1/1 2025-04-04T17:05:31.460013Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710667, ready parts: 1/1, is published: true 2025-04-04T17:05:31.460076Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [64:7489505266199598350:2371] message: TxId: 281474976710667 2025-04-04T17:05:31.460105Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710667 ready parts: 1/1 2025-04-04T17:05:31.460132Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710667:0 2025-04-04T17:05:31.460145Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710667:0 2025-04-04T17:05:31.460312Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-04-04T17:05:31.462765Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:05:31.462793Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:05:31.462811Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:05:31.462831Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:05:31.488275Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-04T17:05:31.489156Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-04-04T17:05:31.489479Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-04T17:05:31.490150Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-04T17:05:31.490389Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-04T17:05:31.491161Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-04T17:05:31.491365Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-04T17:05:31.492164Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-04T17:05:31.492375Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-04T17:05:31.492398Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-04T17:05:31.493634Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[64:7489505261904630411:2326];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:05:31.498361Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037889 not found 2025-04-04T17:05:31.498422Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037890 not found 2025-04-04T17:05:31.498709Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037891 not found 2025-04-04T17:05:31.498735Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037888 not found 2025-04-04T17:05:31.499096Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:05:31.500067Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-04T17:05:31.500101Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-04T17:05:31.500159Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-04T17:05:31.500170Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-04T17:05:31.500193Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-04T17:05:31.500205Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-04T17:05:31.511818Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[64:7489505261904630408:2325];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:05:31.530151Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[64:7489505261904630452:2328];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:05:31.551789Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-04T17:05:31.551850Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-04T17:05:31.551956Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-04T17:05:31.560492Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[64:7489505261904630412:2327];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:05:37.698806Z node 67 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[67:7489505290575925344:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:05:37.699449Z node 67 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012d5/r3tmp/tmpRsCB04/pdisk_1.dat 2025-04-04T17:05:38.214189Z node 67 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:05:38.271676Z node 67 :HIVE WARN: HIVE#72057594037968897 Node(67, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:05:38.271838Z node 67 :HIVE WARN: HIVE#72057594037968897 Node(67, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:05:38.283861Z node 67 :HIVE WARN: HIVE#72057594037968897 Node(67, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20048, node 67 2025-04-04T17:05:38.651156Z node 67 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:05:38.651186Z node 67 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:05:38.651196Z node 67 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:05:38.651388Z node 67 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10233 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:05:39.369015Z node 67 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... self_check_result: GOOD issue_log { id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-67" reason: "YELLOW-9a33-e9e2-68" reason: "YELLOW-9a33-e9e2-69" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-67" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 67 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-68" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 68 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-69" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 69 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 67 host: "::1" port: 12001 } |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |83.7%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |83.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV1NodeIdFilter [GOOD] Test command err: 2025-04-04T16:58:13.996530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:58:13.997021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:58:13.997104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 17459, node 1 TClient is connected to server localhost:20590 2025-04-04T16:58:23.022332Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:336:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:58:23.022508Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:58:23.022734Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 30149, node 2 TClient is connected to server localhost:1953 2025-04-04T16:59:38.346713Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:1914:2301], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:38.348265Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:38.349071Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:38.351676Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:2772:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:38.352918Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:2775:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:38.353870Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:2763:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:38.354057Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:38.355153Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:2760:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:38.355480Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:38.355538Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:38.356191Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:38.356364Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:2766:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:38.356491Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:38.356902Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:38.356977Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:38.357729Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:38.357776Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:38.358369Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:38.359646Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:2778:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:38.360255Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:3134:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:38.360618Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:2769:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:38.361387Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:38.361595Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:38.362330Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:38.362390Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:38.362457Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:38.363170Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T16:59:38.888073Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:39.127457Z node 3 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-04T16:59:39.154106Z node 3 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-04T16:59:40.180696Z node 3 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 6329, node 3 TClient is connected to server localhost:30953 2025-04-04T16:59:40.933054Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:59:40.933148Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:59:40.933215Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:59:40.934016Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:01:53.038546Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:3111:2432], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:01:53.041140Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:01:53.044709Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:01:53.046549Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [17:1972:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:01:53.048015Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:1975:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:01:53.049256Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:01:53.050115Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:01:53.050183Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:01:53.051310Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [16:1062:2174], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:01:53.051517Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:01:53.051701Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [19:1978:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_mana ... ata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:01:53.053828Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:01:53.054817Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:01:53.054916Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:01:53.054986Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:01:53.060603Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:3107:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:01:53.064748Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:01:53.065720Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:01:53.065965Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [14:3114:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:01:53.067141Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:3117:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:01:53.067263Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:01:53.067311Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:01:53.068223Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:01:53.068369Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T17:01:54.344676Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:01:54.830708Z node 12 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-04T17:01:54.955594Z node 12 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-04T17:01:57.310268Z node 12 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 7069, node 12 TClient is connected to server localhost:64780 2025-04-04T17:01:58.842638Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:01:58.842772Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:01:58.842854Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:01:58.843819Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:05:12.783601Z node 21 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [21:3152:2435], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:05:12.786310Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:05:12.786931Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:05:12.788583Z node 26 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [26:2206:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:05:12.792008Z node 25 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [25:2203:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:05:12.792154Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:05:12.793354Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:05:12.794070Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:05:12.794398Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [27:2209:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:05:12.795169Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:05:12.796553Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [22:3148:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:05:12.796928Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:05:12.797484Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:05:12.799089Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:05:12.799724Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:05:12.800062Z node 28 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [28:2212:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:05:12.800987Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [29:2215:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:05:12.801799Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:05:12.802773Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:05:12.802827Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:05:12.803643Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:05:12.804918Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [23:3105:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:05:12.807000Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:05:12.808645Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:05:12.809277Z node 24 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [24:1193:2177], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:05:12.811275Z node 24 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:05:12.812187Z node 24 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T17:05:13.459819Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:05:13.752857Z node 21 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-04T17:05:13.798025Z node 21 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-04T17:05:14.831842Z node 21 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 3226, node 21 TClient is connected to server localhost:13262 2025-04-04T17:05:15.711368Z node 21 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:05:15.711503Z node 21 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:05:15.711608Z node 21 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:05:15.712457Z node 21 :NET_CLASSIFIER ERROR: got bad distributable configuration >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |83.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots >> KqpQueryService::ReplaceIntoWithDefaultValue [GOOD] >> KqpQueryService::ReadManyShardsRange >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] >> YdbOlapStore::LogCountByResource [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] Test command err: 2025-04-04T17:03:22.689868Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:03:22.938164Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:03:22.995405Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:03:22.995719Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:03:23.017656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:03:23.017939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:03:23.018224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:03:23.018407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:03:23.018546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:03:23.018688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:03:23.018823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:03:23.018952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:03:23.019073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:03:23.019206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:03:23.019302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:03:23.019468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:03:23.085468Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:03:23.086009Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:03:23.086085Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:03:23.086291Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:03:23.086547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:03:23.086623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:03:23.086686Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:03:23.086793Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:03:23.086865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:03:23.086905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:03:23.086955Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:03:23.087149Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:03:23.087213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:03:23.087251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:03:23.087298Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:03:23.087399Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:03:23.087479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:03:23.087544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:03:23.087595Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:03:23.087674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:03:23.087725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:03:23.087761Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:03:23.087814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:03:23.087851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:03:23.087885Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:03:23.088284Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2025-04-04T17:03:23.088408Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=63; 2025-04-04T17:03:23.088509Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2025-04-04T17:03:23.088586Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-04-04T17:03:23.088764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:03:23.088857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:03:23.088902Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:03:23.089099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:03:23.089145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:03:23.089174Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:03:23.089336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:03:23.089389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:03:23.089436Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:03:23.089611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:03:23.089653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:03:23.089686Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:03:23.089820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:03:23.089870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:03:23.089938Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... 1.056740Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:81:2696:0]; 2025-04-04T17:06:01.056802Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:82:8528:0]; 2025-04-04T17:06:01.056878Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:83:2776:0]; 2025-04-04T17:06:01.056951Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:84:2768:0]; 2025-04-04T17:06:01.057015Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:85:2768:0]; 2025-04-04T17:06:01.057074Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:86:2768:0]; 2025-04-04T17:06:01.057134Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:87:2768:0]; 2025-04-04T17:06:01.057193Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:88:2768:0]; 2025-04-04T17:06:01.057254Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:89:2768:0]; 2025-04-04T17:06:01.057318Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:90:2768:0]; 2025-04-04T17:06:01.057392Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:91:2768:0]; 2025-04-04T17:06:01.057463Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:92:2768:0]; 2025-04-04T17:06:01.057529Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:93:2768:0]; 2025-04-04T17:06:01.057595Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:94:2768:0]; 2025-04-04T17:06:01.057661Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:95:2768:0]; 2025-04-04T17:06:01.057720Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:96:2768:0]; 2025-04-04T17:06:01.057790Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:97:2768:0]; 2025-04-04T17:06:01.057902Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:98:2768:0]; 2025-04-04T17:06:01.057975Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:99:2768:0]; 2025-04-04T17:06:01.058045Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:100:2768:0]; 2025-04-04T17:06:01.058119Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:101:2768:0]; 2025-04-04T17:06:01.058195Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:102:2768:0]; 2025-04-04T17:06:01.058312Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:103:2768:0]; 2025-04-04T17:06:01.058389Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:104:2768:0]; 2025-04-04T17:06:01.058473Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:105:2768:0]; 2025-04-04T17:06:01.058554Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:106:2768:0]; 2025-04-04T17:06:01.058623Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:107:2768:0]; 2025-04-04T17:06:01.058702Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:108:2768:0]; 2025-04-04T17:06:01.058764Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:109:2768:0]; 2025-04-04T17:06:01.058836Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:110:2768:0]; 2025-04-04T17:06:01.058899Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:111:2768:0]; 2025-04-04T17:06:01.058962Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:112:2768:0]; 2025-04-04T17:06:01.059041Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:113:2768:0]; 2025-04-04T17:06:01.059119Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:114:2768:0]; 2025-04-04T17:06:01.059186Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:115:2768:0]; 2025-04-04T17:06:01.059251Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:116:2768:0]; 2025-04-04T17:06:01.059315Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:117:2768:0]; 2025-04-04T17:06:01.059402Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:118:2768:0]; 2025-04-04T17:06:01.059463Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:119:2696:0]; 2025-04-04T17:06:01.059528Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:120:2696:0]; 2025-04-04T17:06:01.059602Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:121:2696:0]; 2025-04-04T17:06:01.059679Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:122:2696:0]; 2025-04-04T17:06:01.059748Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:123:8528:0]; 2025-04-04T17:06:01.059812Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:124:2768:0]; 2025-04-04T17:06:01.059876Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:125:2768:0]; 2025-04-04T17:06:01.059946Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:126:2768:0]; 2025-04-04T17:06:01.060009Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:127:2768:0]; 2025-04-04T17:06:01.060079Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:128:2768:0]; 2025-04-04T17:06:01.060150Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:129:2768:0]; 2025-04-04T17:06:01.060224Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:130:2768:0]; 2025-04-04T17:06:01.060295Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:131:2768:0]; 2025-04-04T17:06:01.060356Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:132:2768:0]; 2025-04-04T17:06:01.060417Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:133:2768:0]; 2025-04-04T17:06:01.060483Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:134:2768:0]; 2025-04-04T17:06:01.060545Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:135:2768:0]; 2025-04-04T17:06:01.060606Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:136:2768:0]; 2025-04-04T17:06:01.060679Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:137:2768:0]; 2025-04-04T17:06:01.060749Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:138:2768:0]; 2025-04-04T17:06:01.060812Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:139:2768:0]; 2025-04-04T17:06:01.060874Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:140:2768:0]; 2025-04-04T17:06:01.060935Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:141:2768:0]; 2025-04-04T17:06:01.061000Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:142:2768:0]; 2025-04-04T17:06:01.061061Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:143:2768:0]; 2025-04-04T17:06:01.061119Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:144:2768:0]; 2025-04-04T17:06:01.061197Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:145:2768:0]; 2025-04-04T17:06:01.061266Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:146:2768:0]; 2025-04-04T17:06:01.061332Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:147:2768:0]; 2025-04-04T17:06:01.061409Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:148:2768:0]; 2025-04-04T17:06:01.061472Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:149:2768:0]; 2025-04-04T17:06:01.061536Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:150:2768:0]; 2025-04-04T17:06:01.061590Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:151:2768:0]; 2025-04-04T17:06:01.061649Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:152:2768:0]; 2025-04-04T17:06:01.061718Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:153:2768:0]; 2025-04-04T17:06:01.061792Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:154:2768:0]; 2025-04-04T17:06:01.061857Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:155:2768:0]; 2025-04-04T17:06:01.061915Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:156:2768:0]; 2025-04-04T17:06:01.061976Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:157:2768:0]; 2025-04-04T17:06:01.062037Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:158:2768:0]; 2025-04-04T17:06:01.062098Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:159:2768:0]; 2025-04-04T17:06:01.062159Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:160:2696:0]; 2025-04-04T17:06:01.062231Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:161:2696:0]; 2025-04-04T17:06:01.062357Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:162:2696:0]; 2025-04-04T17:06:01.062418Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:163:2696:0]; 2025-04-04T17:06:01.062494Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:164:8528:0]; 2025-04-04T17:06:01.592836Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-04T17:06:01.593724Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[4] (CS::GENERAL) apply at tablet 9437184 2025-04-04T17:06:01.678198Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:2 Blob count: 692 2025-04-04T17:06:01.685168Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2078720;raw_bytes=2324579;count=1;records=24469} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7587944;raw_bytes=7088522;count=3;records=75200} inactive {blob_bytes=100419184;raw_bytes=104021253;count=42;records=1103721} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestCreateExternalTablet [GOOD] Test command err: 2025-04-04T16:58:00.017633Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T16:58:00.021402Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:58:00.021695Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-04T16:58:00.022215Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-04T16:58:00.023335Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:257} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-04-04T16:58:00.023412Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T16:58:00.024359Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:26:2073] ControllerId# 72057594037932033 2025-04-04T16:58:00.024419Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T16:58:00.024563Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T16:58:00.024960Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T16:58:00.036972Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-04-04T16:58:00.037036Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-04-04T16:58:00.039281Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:25:2072] Create Queue# [1:34:2078] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:00.039476Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:25:2072] Create Queue# [1:35:2079] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:00.039639Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:25:2072] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:00.039841Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:25:2072] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:00.040051Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:25:2072] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:00.040208Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:25:2072] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:00.040359Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:25:2072] Create Queue# [1:40:2084] targetNodeId# 1 Marker# DSP01 2025-04-04T16:58:00.040391Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-04-04T16:58:00.040487Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:26:2073] 2025-04-04T16:58:00.040549Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:26:2073] 2025-04-04T16:58:00.040624Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-04-04T16:58:00.040684Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-04T16:58:00.041439Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-04T16:58:00.041799Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:26:2073] 2025-04-04T16:58:00.041854Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:58:00.041903Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-04T16:58:00.042134Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:58:00.058283Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:26:2073] 2025-04-04T16:58:00.058351Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:58:00.058395Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-04T16:58:00.062468Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-04T16:58:00.063376Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-04T16:58:00.063556Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T16:58:00.063764Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-04-04T16:58:00.063839Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-04-04T16:58:00.063884Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-04T16:58:00.070498Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:30:2063] 2025-04-04T16:58:00.070555Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:30:2063] 2025-04-04T16:58:00.070675Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-04T16:58:00.070811Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-04-04T16:58:00.071522Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:51:2091] 2025-04-04T16:58:00.071557Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:51:2091] 2025-04-04T16:58:00.071611Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-04T16:58:00.071962Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-04-04T16:58:00.072187Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:58:00.072292Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:30:2063] 2025-04-04T16:58:00.072356Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-04T16:58:00.072469Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:51:2091] 2025-04-04T16:58:00.072501Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-04-04T16:58:00.072574Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-04-04T16:58:00.072679Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-04T16:58:00.072737Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2025-04-04T16:58:00.072896Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:26:2073] 2025-04-04T16:58:00.072948Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:26:2073] 2025-04-04T16:58:00.077250Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:30:2063] 2025-04-04T16:58:00.077526Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:26:2073] 2025-04-04T16:58:00.077933Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T16:58:00.079048Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-04-04T16:58:00.079101Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-04-04T16:58:00.079143Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-04-04T16:58:00.079182Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T16:58:00.079411Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2025-04-04T16:58:00.079477Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2025-04-04T16:58:00.079516Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-04T16:58:00.079675Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2025-04-04T16:58:00.079752Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-04T16:58:00.079876Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-04-04T16:58:00.079932Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2025-04-04T16:58:00.080007Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-04T16:58:00.080043Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2025-04-04T16:58:00.080120Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:30:2063] 2025-04-04T16:58:00.080156Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:30:2063] 2025-04-04T16:58:00.080213Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2025-04-04T16:58:00.080633Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-04-04T16:58:00.080666Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-04T16:58:00.080802Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-04-04T16:58:00.080921Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {Ev ... 72057594037927937 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72057594037927937 Cookie: 0 CurrentLeader: [147:269:2260] CurrentLeaderTablet: [147:276:2264] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 0}} 2025-04-04T17:05:33.821384Z node 147 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72057594037927937 followers: 0 2025-04-04T17:05:33.821488Z node 147 :TABLET_RESOLVER DEBUG: SelectForward node 147 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [147:269:2260] 2025-04-04T17:05:33.821630Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [147:266:2259] 2025-04-04T17:05:33.821719Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [147:266:2259] 2025-04-04T17:05:33.821882Z node 147 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [147:266:2259] 2025-04-04T17:05:33.822118Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [147:266:2259] 2025-04-04T17:05:33.822221Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [147:266:2259] 2025-04-04T17:05:33.822331Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [147:266:2259] 2025-04-04T17:05:33.822432Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [147:266:2259] 2025-04-04T17:05:33.822501Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [147:266:2259] 2025-04-04T17:05:33.822763Z node 147 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [147:265:2258] EventType# 268697601 2025-04-04T17:05:33.823028Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} queued, type NKikimr::NHive::TTxCreateTablet 2025-04-04T17:05:33.823139Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:05:33.824068Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} hope 1 -> done Change{4, redo 1157b alter 0b annex 0, ~{ 14, 0, 1, 2 } -{ }, 0 gb} 2025-04-04T17:05:33.824210Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:05:33.824395Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [147:310:2287] 2025-04-04T17:05:33.824439Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [147:310:2287] 2025-04-04T17:05:33.824515Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [147:310:2287] 2025-04-04T17:05:33.824589Z node 147 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StNormal ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-04-04T17:05:33.824660Z node 147 :TABLET_RESOLVER DEBUG: SelectForward node 147 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037932033 followers: 0 countLeader 1 allowFollowers 0 winner: [147:93:2121] 2025-04-04T17:05:33.824768Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result local node, try to connect [147:310:2287] 2025-04-04T17:05:33.824817Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033]::SendEvent [147:310:2287] 2025-04-04T17:05:33.824916Z node 147 :PIPE_SERVER DEBUG: [72057594037932033] Accept Connect Originator# [147:310:2287] 2025-04-04T17:05:33.825060Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033] connected with status OK role: Leader [147:310:2287] 2025-04-04T17:05:33.825103Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send queued [147:310:2287] 2025-04-04T17:05:33.825138Z node 147 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [147:310:2287] 2025-04-04T17:05:33.825193Z node 147 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [147:276:2264] EventType# 268637702 2025-04-04T17:05:33.825430Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2025-04-04T17:05:33.825752Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:05:33.826029Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{20, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T17:05:33.826152Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:05:33.826616Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-04-04T17:05:33.826738Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:05:33.835410Z node 147 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923004818592}(72075186224037888)::Execute - TryToBoot was not successfull 2025-04-04T17:05:33.835680Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{5, redo 698b alter 0b annex 0, ~{ 2, 1, 3 } -{ }, 0 gb} 2025-04-04T17:05:33.835818Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:05:33.847281Z node 147 :BS_PROXY_PUT INFO: [aeed6b7f2709b4c0] bootstrap ActorId# [147:313:2290] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:4:0:0:698:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-04-04T17:05:33.847449Z node 147 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] Id# [72057594037927937:2:4:0:0:698:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-04-04T17:05:33.847506Z node 147 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] restore Id# [72057594037927937:2:4:0:0:698:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-04-04T17:05:33.847565Z node 147 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG33 2025-04-04T17:05:33.847612Z node 147 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG32 2025-04-04T17:05:33.847756Z node 147 :BS_PROXY DEBUG: Send to queueActorId# [147:34:2078] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:4:0:0:698:1] FDS# 698 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-04-04T17:05:33.849179Z node 147 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] received {EvVPutResult Status# OK ID# [72057594037927937:2:4:0:0:698:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 19 } Cost# 85496 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 20 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-04-04T17:05:33.849309Z node 147 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] Result# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-04-04T17:05:33.849376Z node 147 :BS_PROXY_PUT INFO: [aeed6b7f2709b4c0] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-04-04T17:05:33.849522Z node 147 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.662 sample PartId# [72057594037927937:2:4:0:0:698:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 147 } TEvVPutResult{ TimestampMs# 2.115 VDiskId# [0:1:0:0:0] NodeId# 147 Status# OK } ] } 2025-04-04T17:05:33.849697Z node 147 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-04-04T17:05:33.849841Z node 147 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} commited cookie 1 for step 4 2025-04-04T17:05:33.850206Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T17:05:33.850347Z node 147 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-04T17:05:33.850418Z node 147 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-04T17:05:33.850472Z node 147 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-04T17:05:33.850533Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-04T17:05:33.850614Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-04T17:05:33.850688Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-04T17:05:33.851133Z node 147 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [147:317:2293] 2025-04-04T17:05:33.851216Z node 147 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [147:317:2293] 2025-04-04T17:05:33.851381Z node 147 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-04-04T17:05:33.851588Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-04-04T17:05:33.851758Z node 147 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-04-04T17:05:33.851874Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-04T17:05:33.851952Z node 147 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-04-04T17:05:33.852001Z node 147 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-04-04T17:05:33.852056Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-04T17:05:33.852098Z node 147 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-04-04T17:05:33.852251Z node 147 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-04-04T17:05:33.852328Z node 147 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-04-04T17:05:33.852505Z node 147 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [147:317:2293] 2025-04-04T17:05:33.852568Z node 147 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed [147:317:2293] |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |83.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> KqpQueryService::ReadManyShardsRange [GOOD] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> YdbOlapStore::LogPagingBetween [GOOD] >> YdbOlapStore::LogPagingAfter |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ReadManyShardsRange [GOOD] Test command err: Trying to start YDB, gRPC: 15066, MsgBus: 7147 2025-04-04T16:58:40.002599Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503500018117946:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:40.004027Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018d2/r3tmp/tmp1yb5lq/pdisk_1.dat 2025-04-04T16:58:40.808917Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:58:40.824660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:58:40.824783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:58:40.832017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15066, node 1 2025-04-04T16:58:40.978879Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:58:40.978901Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:58:40.978908Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:58:40.979040Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7147 TClient is connected to server localhost:7147 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T16:58:42.246540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:42.310604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:42.510164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:42.715147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:42.806708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T16:58:44.665866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503517197988928:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:44.665990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:44.924846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T16:58:44.992251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T16:58:45.004017Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503500018117946:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:58:45.004091Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T16:58:45.079024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T16:58:45.155743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T16:58:45.211965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T16:58:45.268118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T16:58:45.382424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503521492956746:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:45.382546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:45.386414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503521492956751:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T16:58:45.393907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T16:58:45.412991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503521492956753:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T16:58:45.511088Z node 1 :TX_PROXY ERROR: Actor# [1:7489503521492956809:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T16:58:55.801251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T16:58:55.801293Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 26697, MsgBus: 27340 2025-04-04T17:05:48.547275Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489505337312777304:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:05:48.547531Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018d2/r3tmp/tmp9I89Wr/pdisk_1.dat 2025-04-04T17:05:49.695151Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:05:49.714899Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:05:49.735159Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:05:49.798454Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:05:49.820018Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:05:50.146633Z node 2 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.204371s 2025-04-04T17:05:50.146757Z node 2 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.204534s TServer::EnableGrpc on GrpcPort 26697, node 2 2025-04-04T17:05:50.700566Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:05:50.700604Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:05:50.700614Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:05:50.700793Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27340 TClient is connected to server localhost:27340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:05:52.295163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:05:53.542420Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489505337312777304:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:05:53.542506Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:05:54.267758Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489505363082581756:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:05:54.267888Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489505363082581748:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:05:54.268043Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:05:54.367426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:05:54.413925Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:05:54.414341Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489505363082581762:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:05:54.511670Z node 2 :TX_PROXY ERROR: Actor# [2:7489505363082581813:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:05:58.956324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25047, MsgBus: 29069 2025-04-04T17:06:02.441371Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489505399475426650:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:06:02.553806Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018d2/r3tmp/tmp8yqz0q/pdisk_1.dat 2025-04-04T17:06:02.659931Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:06:02.660005Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:06:02.660719Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:06:02.674299Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25047, node 3 2025-04-04T17:06:02.727430Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:06:02.727448Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:06:02.727454Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:06:02.727544Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29069 TClient is connected to server localhost:29069 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:06:03.436932Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:06:03.450860Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:06:06.708160Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489505416655296397:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:06:06.708278Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:06:06.843180Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:06:06.968984Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489505416655296610:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:06:06.969099Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:06:06.969488Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489505416655296615:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:06:06.979657Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:06:06.994122Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489505416655296617:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:06:07.065417Z node 3 :TX_PROXY ERROR: Actor# [3:7489505420950263964:2467] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:06:07.440954Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489505399475426650:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:06:07.441029Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogCountByResource [GOOD] Test command err: 2025-04-04T16:59:57.106871Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503830099905095:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:57.106932Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:59:57.270771Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503829830822732:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:57.270905Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T16:59:57.376497Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489503832272439495:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-04T16:59:57.376687Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00130a/r3tmp/tmpdTfbVS/pdisk_1.dat 2025-04-04T16:59:58.778595Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:58.776972Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:58.790184Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:59.751314Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:59.776188Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:59.781030Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:59.795540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:59.796052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:59.796320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:59.823799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:59.823876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:59.824028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T16:59:59.824058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T16:59:59.848364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T16:59:59.863801Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T16:59:59.863834Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T16:59:59.888242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:00:00.015873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7655, node 1 2025-04-04T17:00:00.735092Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:00.735121Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:00.735128Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:00.735269Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13775 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:01.828805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:00:02.126234Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503830099905095:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:02.126382Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:02.274361Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489503829830822732:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:02.274452Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:02.377940Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489503832272439495:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:02.378020Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Killing node 1 Killing node 2 2025-04-04T17:00:14.753419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:00:14.753457Z node 1 :IMPORT WARN: Table profiles were not loaded Killing node 3 2025-04-04T17:00:24.878376Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489503947209421119:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:24.878459Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00130a/r3tmp/tmpBStD9S/pdisk_1.dat 2025-04-04T17:00:25.184428Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:25.219335Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:25.219425Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:25.240742Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24891, node 5 2025-04-04T17:00:25.474740Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:00:25.474762Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:00:25.474768Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:00:25.474919Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9405 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:25.749512Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:9405 2025-04-04T17:00:26.206900Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "Root" DiffACL: "\n\033\010\000\022\027\010\001\020\200\200\002\032\ralice@builtin \003" } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:00:26.207087Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:26.207303Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 1] name: Root type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:00:26.207318Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:00:26.207514Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-04T17:00:26.207539Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:00:26.207621Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-04T17:00:26.207635Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation I ... -04-04T17:06:00.716854Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7489505391485019555:3827], TxId: 281474976710674, task: 1. Ctx: { TraceId : 01jr0w8dnp6pc1a1xc0zwzg8jq. SessionId : ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-04T17:06:00.717046Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7489505391485019555:3827], TxId: 281474976710674, task: 1. Ctx: { TraceId : 01jr0w8dnp6pc1a1xc0zwzg8jq. SessionId : ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Send stats to executor actor [50:7489505391485019551:3542] TaskId: 1 Stats: CpuTimeUs: 442 Tasks { TaskId: 1 CpuTimeUs: 225 FinishTimeMs: 1743786360716 OutputRows: 1 OutputBytes: 3 ResultRows: 1 ResultBytes: 3 ComputeCpuTimeUs: 76 BuildCpuTimeUs: 149 HostName: "ghrun-mup5cka3jq" NodeId: 50 CreateTimeMs: 1743786360716 } MaxMemoryUsage: 1048576 2025-04-04T17:06:00.717084Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7489505391485019555:3827], TxId: 281474976710674, task: 1. Ctx: { TraceId : 01jr0w8dnp6pc1a1xc0zwzg8jq. SessionId : ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-04T17:06:00.717140Z node 50 :KQP_COMPUTE DEBUG: TxId: 281474976710674, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-04T17:06:00.717363Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7489505391485019551:3542] TxId: 281474976710674. Ctx: { TraceId: 01jr0w8dnp6pc1a1xc0zwzg8jq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [50:7489505382895084104:3542], seqNo: 1, nRows: 1 2025-04-04T17:06:00.717564Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7489505391485019551:3542] TxId: 281474976710674. Ctx: { TraceId: 01jr0w8dnp6pc1a1xc0zwzg8jq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [50:7489505391485019555:3827], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 442 Tasks { TaskId: 1 CpuTimeUs: 225 FinishTimeMs: 1743786360716 OutputRows: 1 OutputBytes: 3 ResultRows: 1 ResultBytes: 3 ComputeCpuTimeUs: 76 BuildCpuTimeUs: 149 HostName: "ghrun-mup5cka3jq" NodeId: 50 CreateTimeMs: 1743786360716 } MaxMemoryUsage: 1048576 } 2025-04-04T17:06:00.717699Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7489505391485019551:3542] TxId: 281474976710674. Ctx: { TraceId: 01jr0w8dnp6pc1a1xc0zwzg8jq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [50:7489505391485019555:3827], 2025-04-04T17:06:00.717774Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=, ActorId: [50:7489505382895084104:3542], ActorState: ExecuteState, TraceId: 01jr0w8dnp6pc1a1xc0zwzg8jq, Forwarded TEvStreamData to [50:7489505382895084102:3541] 2025-04-04T17:06:00.718324Z node 50 :KQP_EXECUTER DEBUG: TxId: 281474976710674, send ack to channelId: 1, seqNo: 1, enough: 0, freeSpace: 8388572, to: [50:7489505391485019556:3827] 2025-04-04T17:06:00.718411Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7489505391485019555:3827], TxId: 281474976710674, task: 1. Ctx: { TraceId : 01jr0w8dnp6pc1a1xc0zwzg8jq. SessionId : ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-04T17:06:00.718454Z node 50 :KQP_COMPUTE DEBUG: TxId: 281474976710674, task: 1. Tasks execution finished 2025-04-04T17:06:00.718480Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7489505391485019555:3827], TxId: 281474976710674, task: 1. Ctx: { TraceId : 01jr0w8dnp6pc1a1xc0zwzg8jq. SessionId : ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-04T17:06:00.718594Z node 50 :KQP_COMPUTE DEBUG: TxId: 281474976710674, task: 1. pass away 2025-04-04T17:06:00.718747Z node 50 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710674;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-04T17:06:00.719149Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7489505391485019551:3542] TxId: 281474976710674. Ctx: { TraceId: 01jr0w8dnp6pc1a1xc0zwzg8jq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [50:7489505391485019555:3827], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1272 Tasks { TaskId: 1 CpuTimeUs: 229 FinishTimeMs: 1743786360718 OutputRows: 1 OutputBytes: 3 ResultRows: 1 ResultBytes: 3 ComputeCpuTimeUs: 80 BuildCpuTimeUs: 149 HostName: "ghrun-mup5cka3jq" NodeId: 50 CreateTimeMs: 1743786360716 } MaxMemoryUsage: 1048576 } 2025-04-04T17:06:00.719237Z node 50 :KQP_EXECUTER INFO: TxId: 281474976710674. Ctx: { TraceId: 01jr0w8dnp6pc1a1xc0zwzg8jq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [50:7489505391485019555:3827] 2025-04-04T17:06:00.720692Z node 50 :KQP_EXECUTER INFO: ActorId: [50:7489505391485019551:3542] TxId: 281474976710674. Ctx: { TraceId: 01jr0w8dnp6pc1a1xc0zwzg8jq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 4986 DurationUs: 4319 ExecuterCpuTimeUs: 3714 StartTimeMs: 1743786360714 FinishTimeMs: 1743786360719 Stages { StageGuid: "65790732-fde18376-a24392d1-4520d9a7" Program: "(\n(declare %kqp%tx_result_binding_0_0 (ListType (StructType \'(\'\"column0\" (DataType \'Uint64)))))\n(return (lambda \'() (Iterator %kqp%tx_result_binding_0_0)))\n)\n" ComputeActors { CpuTimeUs: 1272 Tasks { TaskId: 1 CpuTimeUs: 229 FinishTimeMs: 1743786360718 OutputRows: 1 OutputBytes: 3 ResultRows: 1 ResultBytes: 3 ComputeCpuTimeUs: 80 BuildCpuTimeUs: 149 HostName: "ghrun-mup5cka3jq" NodeId: 50 CreateTimeMs: 1743786360716 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743786360716 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":8,\"Plans\":[{\"Node Type\":\"ResultSet_1\",\"PlanNodeId\":7,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"CTE Name\":\"precompute_0_0\",\"Node Type\":\"ConstantExpr\",\"Operators\":[{\"Inputs\":[],\"Iterator\":\"precompute_0_0\",\"Name\":\"Iterator\"}],\"PlanNodeId\":6,\"StageGuid\":\"65790732-fde18376-a24392d1-4520d9a7\",\"Stats\":{\"BaseTimeMs\":1743786360716,\"ComputeNodes\":[{\"CpuTimeUs\":1272,\"Tasks\":[{\"ComputeTimeUs\":80,\"FinishTimeMs\":1743786360718,\"Host\":\"ghrun-mup5cka3jq\",\"NodeId\":50,\"OutputBytes\":3,\"OutputRows\":1,\"ResultBytes\":3,\"ResultRows\":1,\"TaskId\":1}]}],\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 668 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\022\013\010\370\t\020\370\t\030\370\t \001" } } 2025-04-04T17:06:00.720767Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7489505391485019551:3542] TxId: 281474976710674. Ctx: { TraceId: 01jr0w8dnp6pc1a1xc0zwzg8jq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T17:06:00.720830Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7489505391485019551:3542] TxId: 281474976710674. Ctx: { TraceId: 01jr0w8dnp6pc1a1xc0zwzg8jq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.001272s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-04T17:06:00.720930Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=, ActorId: [50:7489505382895084104:3542], ActorState: ExecuteState, TraceId: 01jr0w8dnp6pc1a1xc0zwzg8jq, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-04T17:06:00.721722Z node 50 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=, ActorId: [50:7489505382895084104:3542], ActorState: ExecuteState, TraceId: 01jr0w8dnp6pc1a1xc0zwzg8jq, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 1200.249 QueriesCount: 1 2025-04-04T17:06:00.721811Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=, ActorId: [50:7489505382895084104:3542], ActorState: ExecuteState, TraceId: 01jr0w8dnp6pc1a1xc0zwzg8jq, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-04T17:06:00.721946Z node 50 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=, ActorId: [50:7489505382895084104:3542], ActorState: ExecuteState, TraceId: 01jr0w8dnp6pc1a1xc0zwzg8jq, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:06:00.721993Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=, ActorId: [50:7489505382895084104:3542], ActorState: ExecuteState, TraceId: 01jr0w8dnp6pc1a1xc0zwzg8jq, EndCleanup, isFinal: 1 2025-04-04T17:06:00.722074Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=, ActorId: [50:7489505382895084104:3542], ActorState: ExecuteState, TraceId: 01jr0w8dnp6pc1a1xc0zwzg8jq, Sent query response back to proxy, proxyRequestId: 5, proxyId: [50:7489505305585668118:2087] 2025-04-04T17:06:00.722123Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=, ActorId: [50:7489505382895084104:3542], ActorState: unknown state, TraceId: 01jr0w8dnp6pc1a1xc0zwzg8jq, Cleanup temp tables: 0 2025-04-04T17:06:00.723172Z node 50 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786359000, txId: 18446744073709551615] shutting down 2025-04-04T17:06:00.723321Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=ZmRkMDZkZjgtODdkZWJiODAtNzc0N2RlNzAtN2NkM2U2ZDQ=, ActorId: [50:7489505382895084104:3542], ActorState: unknown state, TraceId: 01jr0w8dnp6pc1a1xc0zwzg8jq, Session actor destroyed RESULT: [[3u]] --------------------- STATS: total CPU: 1283 duration: 1181035 usec cpu: 851872 usec { name: "/Root/OlapStore/log1" reads { rows: 2 bytes: 16 } } duration: 4319 usec cpu: 4986 usec >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] |83.9%| [TA] $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] |83.9%| [TA] {RESULT} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] |83.9%| [TA] $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_ttl.py::TestTTLOnIndexedTable::test_case >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo [GOOD] >> test_ttl.py::TestTTLDefaultEnv::test_case >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> test_ttl.py::TestTTLAlterSettings::test_case >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] |84.0%| [TA] {RESULT} $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration [GOOD] >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration [GOOD] >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails [GOOD] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] |84.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] |84.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration [GOOD] |84.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [GOOD] |84.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact [GOOD] |84.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 [GOOD] |84.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration [GOOD] |84.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] |84.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] >> test_auditlog.py::test_dml_begin_commit_logged |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] >> test_auditlog.py::test_single_dml_query_logged[delete] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [GOOD] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_retry.py::TestRetry::test_fail_first[kikimr0] >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] >> test_dispatch.py::TestMapping::test_mapping >> test_cp_ic.py::TestCpIc::test_discovery |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |84.2%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> RetryPolicy::RetryWithBatching [GOOD] |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> YdbOlapStore::LogPagingAfter [GOOD] |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2025-04-04T17:00:13.237188Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.248362Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.248396Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:00:13.250861Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-04T17:00:13.250936Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.250971Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.255567Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006064s 2025-04-04T17:00:13.256192Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-04T17:00:13.256229Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.256255Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.256316Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009264s 2025-04-04T17:00:13.257083Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-04T17:00:13.257120Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.257149Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:00:13.257215Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006002s 2025-04-04T17:00:13.287804Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1743786013287770 2025-04-04T17:00:14.274811Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489503903208426529:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:14.274858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:00:14.807248Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:00:14.838135Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489503904632723369:2290];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:14.838409Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:00:14.838532Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0010a9/r3tmp/tmpTvMyWL/pdisk_1.dat 2025-04-04T17:00:15.388456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:00:15.750563Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:00:15.763449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:15.763568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:15.767179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:00:15.767291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:00:15.782459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:00:15.826966Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:00:15.827838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:00:16.132746Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.107407s 2025-04-04T17:00:16.132828Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.107507s TServer::EnableGrpc on GrpcPort 21872, node 1 2025-04-04T17:00:16.379822Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/0010a9/r3tmp/yandexPTqXtx.tmp 2025-04-04T17:00:16.379858Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/0010a9/r3tmp/yandexPTqXtx.tmp 2025-04-04T17:00:16.380046Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/0010a9/r3tmp/yandexPTqXtx.tmp 2025-04-04T17:00:16.380312Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:00:16.458493Z INFO: TTestServer started on Port 4273 GrpcPort 21872 TClient is connected to server localhost:4273 PQClient connected to localhost:21872 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:00:17.106453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-04T17:00:19.278423Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489503903208426529:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:19.278494Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:19.662606Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489503904632723369:2290];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:00:19.662726Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:00:19.895790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503924683264094:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:19.895982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503924683264085:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:19.896090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:19.895790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503926107559957:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:19.896852Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489503926107559943:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:19.896986Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:19.916206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-04T17:00:19.949900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489503924683264133:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:19.950023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:00:19.953504Z node 2 :TX_PROXY ERROR: Actor# [2:7489503926107559960:2125] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:00:19.976119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489503924683264100:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T17:00:19.976123Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489503926107559959:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T17:00:20.035327Z node 1 :TX_PROXY ERROR: Actor# [1:7489503928978231478:2702] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:00:20.082175Z node 2 :TX_PROXY ERROR: Actor# [2:7489503930402527283:2131] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:00:21.142542Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489503928978231497:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error ... ion 0 part blob processing sourceId '\0test-message-group-id' seqNo 10 partNo 0 2025-04-04T17:06:41.277631Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0test-message-group-id' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1612 count 10 nextOffset 10 batches 1 2025-04-04T17:06:41.278550Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 0,10 HeadOffset 0 endOffset 0 curOffset 10 d0000000000_00000000000000000000_00000_0000000010_00000| size 1208 WTime 1743786401277 2025-04-04T17:06:41.278800Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:06:41.278897Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 10 size 1208 2025-04-04T17:06:41.283108Z node 17 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 10 size 1208 actorID [17:7489505557479923625:2608] 2025-04-04T17:06:41.283231Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1230 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:06:41.283238Z node 17 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 10 parts 0 size 1208 2025-04-04T17:06:41.283290Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:06:41.283353Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-04T17:06:41.283387Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:06:41.283446Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-04-04T17:06:41.283476Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:06:41.283510Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-04-04T17:06:41.283537Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:06:41.283564Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2025-04-04T17:06:41.283582Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:06:41.283611Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2025-04-04T17:06:41.283631Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:06:41.283659Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-04-04T17:06:41.283677Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:06:41.283705Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 6 is stored on disk 2025-04-04T17:06:41.283724Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:06:41.283752Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 7 is stored on disk 2025-04-04T17:06:41.283770Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:06:41.283798Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 8 is stored on disk 2025-04-04T17:06:41.283816Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:06:41.283844Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 9 is stored on disk 2025-04-04T17:06:41.284056Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04T17:06:41.284104Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-04T17:06:41.284276Z node 17 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-04T17:06:41.284449Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T17:06:41.284923Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 0 2025-04-04T17:06:41.284963Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 10 2025-04-04T17:06:41.285158Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-04T17:06:41.285182Z node 17 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T17:06:41.285264Z node 17 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1743786401270 queuesize 0 startOffset 0 2025-04-04T17:06:41.285231Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e5893772-7b2fd67b-22a1bca9-e4ae38d9_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 6 queued_in_partition_duration_ms: 6 } 2025-04-04T17:06:41.304892Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e5893772-7b2fd67b-22a1bca9-e4ae38d9_0] Write session: acknoledged message 1 2025-04-04T17:06:41.304952Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e5893772-7b2fd67b-22a1bca9-e4ae38d9_0] Write session: acknoledged message 2 2025-04-04T17:06:41.304983Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e5893772-7b2fd67b-22a1bca9-e4ae38d9_0] Write session: acknoledged message 3 2025-04-04T17:06:41.305010Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e5893772-7b2fd67b-22a1bca9-e4ae38d9_0] Write session: acknoledged message 4 2025-04-04T17:06:41.305051Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e5893772-7b2fd67b-22a1bca9-e4ae38d9_0] Write session: acknoledged message 5 2025-04-04T17:06:41.305087Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e5893772-7b2fd67b-22a1bca9-e4ae38d9_0] Write session: acknoledged message 6 2025-04-04T17:06:41.305107Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e5893772-7b2fd67b-22a1bca9-e4ae38d9_0] Write session: acknoledged message 7 2025-04-04T17:06:41.305134Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e5893772-7b2fd67b-22a1bca9-e4ae38d9_0] Write session: acknoledged message 8 2025-04-04T17:06:41.305181Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e5893772-7b2fd67b-22a1bca9-e4ae38d9_0] Write session: acknoledged message 9 2025-04-04T17:06:41.305211Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e5893772-7b2fd67b-22a1bca9-e4ae38d9_0] Write session: acknoledged message 10 2025-04-04T17:06:41.305521Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e5893772-7b2fd67b-22a1bca9-e4ae38d9_0] Write session: close. Timeout = 0 ms 2025-04-04T17:06:41.305565Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e5893772-7b2fd67b-22a1bca9-e4ae38d9_0] Write session will now close 2025-04-04T17:06:41.305632Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e5893772-7b2fd67b-22a1bca9-e4ae38d9_0] Write session: aborting 2025-04-04T17:06:41.306075Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e5893772-7b2fd67b-22a1bca9-e4ae38d9_0] Write session: gracefully shut down, all writes complete 2025-04-04T17:06:41.306123Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e5893772-7b2fd67b-22a1bca9-e4ae38d9_0] Write session: destroy 2025-04-04T17:06:41.307073Z node 17 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message-group-id|e5893772-7b2fd67b-22a1bca9-e4ae38d9_0 grpc read done: success: 0 data: 2025-04-04T17:06:41.307104Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|e5893772-7b2fd67b-22a1bca9-e4ae38d9_0 grpc read failed 2025-04-04T17:06:41.307611Z node 17 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 7 sessionId: test-message-group-id|e5893772-7b2fd67b-22a1bca9-e4ae38d9_0 2025-04-04T17:06:41.307632Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|e5893772-7b2fd67b-22a1bca9-e4ae38d9_0 is DEAD 2025-04-04T17:06:41.308136Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:06:41.308528Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [17:7489505566069858499:2634] destroyed 2025-04-04T17:06:41.308590Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [GOOD] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] |84.4%| [TA] $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.4%| [TA] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TA] $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogPagingAfter [GOOD] Test command err: 2025-04-04T17:03:22.860804Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504709715255836:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:03:22.860883Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00129a/r3tmp/tmpNGzyuN/pdisk_1.dat 2025-04-04T17:03:23.453659Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:23.463352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:03:23.463454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:03:23.478129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25275, node 1 2025-04-04T17:03:23.683536Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:23.683561Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:23.683573Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:23.683736Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23791 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:03:24.209521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:23791 2025-04-04T17:03:24.886858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:03:25.120242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504722600158810:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:03:25.120520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504722600158810:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:03:25.120808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504722600158810:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:03:25.120936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504722600158810:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:03:25.121037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504722600158810:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:03:25.121184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504722600158810:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:03:25.121292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504722600158810:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:03:25.121399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504722600158810:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:03:25.121545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504722600158810:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:03:25.121662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504722600158810:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:03:25.121783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504722600158810:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:03:25.121894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489504722600158810:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:03:25.179135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504722600158811:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:03:25.179208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504722600158811:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:03:25.179462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504722600158811:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:03:25.179612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504722600158811:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:03:25.179709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504722600158811:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:03:25.179834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504722600158811:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:03:25.179959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504722600158811:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:03:25.180052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504722600158811:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:03:25.180172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504722600158811:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:03:25.180279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504722600158811:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:03:25.180401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504722600158811:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:03:25.180494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489504722600158811:2326];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:03:25.216567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489504722600158812:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:03:25.216638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489504722600158812:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:03:25.216849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489504722600158812:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:03:25.216968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489504722600158812:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:03:25.217075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489504722600158812:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:03:25.217170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489504722600158812:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:03:25.217254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489504722600158812:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:03:25.217337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489504722600158812:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:03:25.217447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489504722600158812:2327];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=S ... NGY5OTkzOTktZTViYWJlZGItNDZkZGYyNjAtZDViYWUwYTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7489505574483541574:3603] 2025-04-04T17:06:43.389770Z node 28 :KQP_EXECUTER INFO: ActorId: [28:7489505574483541473:3526] TxId: 281474976710670. Ctx: { TraceId: 01jr0w9r62a0yx1y2vpmesyj6p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NGY5OTkzOTktZTViYWJlZGItNDZkZGYyNjAtZDViYWUwYTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 208212 DurationUs: 188541 Tables { TablePath: "/Root/OlapStore/log1" } ExecuterCpuTimeUs: 103243 StartTimeMs: 1743786403197 FinishTimeMs: 1743786403386 Stages { StageGuid: "2982bf8c-ed1437cb-11a475d9-92f88b3f" Program: "(\n(declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType \'Timestamp)) (OptionalType (DataType \'Utf8)) (OptionalType (DataType \'Utf8)) (OptionalType (DataType \'Utf8)) (DataType \'Int32)) (TupleType (OptionalType (DataType \'Timestamp)) (OptionalType (DataType \'Utf8)) (OptionalType (DataType \'Utf8)) (OptionalType (DataType \'Utf8)) (DataType \'Int32))))))\n(return (lambda \'() (block \'(\n (let $1 (KqpTable \'\"/Root/OlapStore/log1\" \'\"72057594046644480:3\" \'\"\" \'1))\n (let $2 (OptionalType (DataType \'Utf8)))\n (let $3 (TupleType (OptionalType (DataType \'Timestamp)) $2 $2 $2 (DataType \'Int32)))\n (let $4 \'(\'\"level\" \'\"message\" \'\"resource_id\" \'\"resource_type\" \'\"timestamp\" \'\"uid\"))\n (let $5 \'(\'\"timestamp\" \'\"resource_type\" \'\"resource_id\" \'\"uid\"))\n (let $6 \'(\'(\'\"UsedKeyColumns\" $5) \'(\'\"ExpectedMaxRanges\" \'4) \'(\'\"PointPrefixLen\" \'0)))\n (let $7 (KqpWideReadOlapTableRanges $1 %kqp%tx_result_binding_0_0 $4 \'() $6 (lambda \'($10) (block \'(\n (let $11 \'(\'eq \'\"resource_type\" (String \'\"app\")))\n (let $12 \'(\'eq \'\"resource_id\" (String \'\"resource_1\")))\n (return (KqpOlapFilter $10 (KqpOlapAnd $11 $12)))\n )))))\n (let $8 (Bool \'true))\n (let $9 \'(\'(\'4 $8) \'(\'3 $8) \'(\'2 $8) \'(\'5 $8)))\n (return (FromFlow (WideTop $7 (Uint64 \'50) $9)))\n))))\n)\n" ComputeActors { CpuTimeUs: 1375 Tasks { TaskId: 36 CpuTimeUs: 634 FinishTimeMs: 1743786403358 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 33 BuildCpuTimeUs: 601 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-mup5cka3jq" NodeId: 28 CreateTimeMs: 1743786403261 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743786403329 } Stages { StageId: 2 StageGuid: "7363c894-ed2943f2-c78d8d47-89bf6833" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (ToFlow $1) (lambda \'($2 $3 $4 $5 $6 $7) (AsStruct \'(\'\"level\" $2) \'(\'\"message\" $3) \'(\'\"resource_id\" $4) \'(\'\"resource_type\" $5) \'(\'\"timestamp\" $6) \'(\'\"uid\" $7)))))))\n)\n" ComputeActors { CpuTimeUs: 1587 Tasks { TaskId: 66 StageId: 2 CpuTimeUs: 209 FinishTimeMs: 1743786403385 ComputeCpuTimeUs: 38 BuildCpuTimeUs: 171 HostName: "ghrun-mup5cka3jq" NodeId: 28 CreateTimeMs: 1743786403289 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743786403329 } Stages { StageId: 1 StageGuid: "8936245-79987097-8e98bbf7-19907414" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (Bool \'true))\n (let $3 \'(\'(\'4 $2) \'(\'3 $2) \'(\'2 $2) \'(\'5 $2)))\n (let $4 (WideTop (ToFlow $1) (Uint64 \'50) $3))\n (let $5 (Bool \'false))\n (let $6 \'(\'(\'4 $5) \'(\'3 $5) \'(\'2 $5) \'(\'5 $5)))\n (return (FromFlow (WideSort $4 $6)))\n))))\n)\n" BaseTimeMs: 1743786403329 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":7,\"Plans\":[{\"Node Type\":\"ResultSet_1\",\"PlanNodeId\":6,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Stage\",\"PlanNodeId\":5,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":4,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Sort-Top\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Sort\",\"SortBy\":\"[row.timestamp,row.resource_type,row.resource_id,row.uid]\"},{\"Inputs\":[{\"ExternalPlanNodeId\":2}],\"Limit\":\"50\",\"Name\":\"Top\",\"TopBy\":\"[row.timestamp,row.resource_type,row.resource_id,row.uid]\"}],\"PlanNodeId\":3,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":2,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Top-Filter-TableRangeScan\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Limit\":\"50\",\"Name\":\"Top\",\"TopBy\":\"[row.timestamp,row.resource_type,row.resource_id,row.uid]\"},{\"E-Cost\":\"0\",\"E-Rows\":\"0\",\"E-Size\":\"0\",\"Inputs\":[{\"InternalOperatorId\":2}],\"Name\":\"Filter\",\"Predicate\":\"resource_type == \\\"app\\\" AND resource_id == \\\"resource_1\\\"\",\"Pushdown\":\"True\"},{\"E-Cost\":\"0\",\"E-Rows\":\"0\",\"E-Size\":\"0\",\"Inputs\":[],\"Name\":\"TableRangeScan\",\"Path\":\"\\/Root\\/OlapStore\\/log1\",\"ReadColumns\":[\"level\",\"message\",\"resource_id\",\"resource_type\",\"timestamp\",\"uid\"],\"ReadRanges\":[\"timestamp (3000000, +∞)\",\"resource_type (nginx, +∞)\",\"resource_id (resource_), +∞)\",\"uid (10, +∞)\"],\"ReadRangesExpectedSize\":4,\"ReadRangesKeys\":[\"timestamp\",\"resource_type\",\"resource_id\",\"uid\"],\"Scan\":\"Parallel\",\"SsaProgram\":{\"Command\":[{\"Assign\":{\"Column\":{\"Id\":11},\"Constant\":{\"Bytes\":\"app\"}}},{\"Assign\":{\"Column\":{\"Id\":12},\"Function\":{\"Arguments\":[{\"Id\":6},{\"Id\":11}],\"FunctionType\":2,\"KernelIdx\":0,\"YqlOperationId\":11}}},{\"Assign\":{\"Column\":{\"Id\":13},\"Constant\":{\"Bytes\":\"resource_1\"}}},{\"Assign\":{\"Column\":{\"Id\":14},\"Function\":{\"Arguments\":[{\"Id\":3},{\"Id\":13}],\"FunctionType\":2,\"KernelIdx\":1,\"YqlOperationId\":11}}},{\"Assign\":{\"Column\":{\"Id\":15},\"Function\":{\"Arguments\":[{\"Id\":12},{\"Id\":14}],\"FunctionType\":2,\"KernelIdx\":2,\"YqlOperationId\":0}}},{\"Filter\":{\"Predicate\":{\"Id\":15}}},{\"Projection\":{\"Columns\":[{\"Id\":7},{\"Id\":1},{\"Id\":3},{\"Id\":6},{\"Id\":5},{\"Id\":4}]}}],\"Version\":5},\"Table\":\"OlapStore\\/log1\"}],\"PlanNodeId\":1,\"StageGuid\":\"2982bf8c-ed1437cb-11a475d9-92f88b3f\",\"Stats\":{\"BaseTimeMs\":1743786403329,\"ComputeNodes\":[{\"CpuTimeUs\":1375,\"Tasks\":[{\"ComputeTimeUs\":33,\"FinishTimeMs\":1743786403358,\"Host\":\"ghrun-mup5cka3jq\",\"NodeId\":28,\"TaskId\":36}]}],\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"OlapStore\\/log1\"]}],\"StageGuid\":\"\"}],\"StageGuid\":\"8936245-79987097-8e98bbf7-19907414\",\"Stats\":{\"BaseTimeMs\":1743786403329,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"SortColumns\":[\"timestamp (Desc)\",\"resource_type (Desc)\",\"resource_id (Desc)\",\"uid (Desc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"7363c894-ed2943f2-c78d8d47-89bf6833\",\"Stats\":{\"BaseTimeMs\":1743786403329,\"ComputeNodes\":[{\"CpuTimeUs\":1587,\"Tasks\":[{\"ComputeTimeUs\":38,\"FinishTimeMs\":1743786403385,\"Host\":\"ghrun-mup5cka3jq\",\"NodeId\":28,\"TaskId\":66}]}],\"PhysicalStageId\":2,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 3101 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\002\022\014\010\272\t\020\302T\030\211\264\006 B" } } 2025-04-04T17:06:43.389828Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7489505574483541473:3526] TxId: 281474976710670. Ctx: { TraceId: 01jr0w9r62a0yx1y2vpmesyj6p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NGY5OTkzOTktZTViYWJlZGItNDZkZGYyNjAtZDViYWUwYTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T17:06:43.389889Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7489505574483541473:3526] TxId: 281474976710670. Ctx: { TraceId: 01jr0w9r62a0yx1y2vpmesyj6p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NGY5OTkzOTktZTViYWJlZGItNDZkZGYyNjAtZDViYWUwYTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.104969s ReadRows: 0 ReadBytes: 0 ru: 69 rate limiter was not found force flag: 1 2025-04-04T17:06:43.389974Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NGY5OTkzOTktZTViYWJlZGItNDZkZGYyNjAtZDViYWUwYTM=, ActorId: [28:7489505565893606828:3526], ActorState: ExecuteState, TraceId: 01jr0w9r62a0yx1y2vpmesyj6p, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-04T17:06:43.390278Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976710670, task: 66. pass away 2025-04-04T17:06:43.390418Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710670;task_id=66;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-04T17:06:43.390577Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7489505574483541574:3603], TxId: 281474976710670, task: 66. Ctx: { TraceId : 01jr0w9r62a0yx1y2vpmesyj6p. SessionId : ydb://session/3?node_id=28&id=NGY5OTkzOTktZTViYWJlZGItNDZkZGYyNjAtZDViYWUwYTM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Send stats to executor actor [28:7489505574483541473:3526] TaskId: 66 Stats: CpuTimeUs: 1587 Tasks { TaskId: 66 StageId: 2 CpuTimeUs: 209 FinishTimeMs: 1743786403385 ComputeCpuTimeUs: 38 BuildCpuTimeUs: 171 HostName: "ghrun-mup5cka3jq" NodeId: 28 CreateTimeMs: 1743786403289 } MaxMemoryUsage: 1048576 2025-04-04T17:06:43.391187Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=NGY5OTkzOTktZTViYWJlZGItNDZkZGYyNjAtZDViYWUwYTM=, ActorId: [28:7489505565893606828:3526], ActorState: ExecuteState, TraceId: 01jr0w9r62a0yx1y2vpmesyj6p, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 200.535 QueriesCount: 1 2025-04-04T17:06:43.391262Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NGY5OTkzOTktZTViYWJlZGItNDZkZGYyNjAtZDViYWUwYTM=, ActorId: [28:7489505565893606828:3526], ActorState: ExecuteState, TraceId: 01jr0w9r62a0yx1y2vpmesyj6p, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-04T17:06:43.391375Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=NGY5OTkzOTktZTViYWJlZGItNDZkZGYyNjAtZDViYWUwYTM=, ActorId: [28:7489505565893606828:3526], ActorState: ExecuteState, TraceId: 01jr0w9r62a0yx1y2vpmesyj6p, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:06:43.391422Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NGY5OTkzOTktZTViYWJlZGItNDZkZGYyNjAtZDViYWUwYTM=, ActorId: [28:7489505565893606828:3526], ActorState: ExecuteState, TraceId: 01jr0w9r62a0yx1y2vpmesyj6p, EndCleanup, isFinal: 1 2025-04-04T17:06:43.391493Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NGY5OTkzOTktZTViYWJlZGItNDZkZGYyNjAtZDViYWUwYTM=, ActorId: [28:7489505565893606828:3526], ActorState: ExecuteState, TraceId: 01jr0w9r62a0yx1y2vpmesyj6p, Sent query response back to proxy, proxyRequestId: 5, proxyId: [28:7489505510059027750:2280] 2025-04-04T17:06:43.391537Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NGY5OTkzOTktZTViYWJlZGItNDZkZGYyNjAtZDViYWUwYTM=, ActorId: [28:7489505565893606828:3526], ActorState: unknown state, TraceId: 01jr0w9r62a0yx1y2vpmesyj6p, Cleanup temp tables: 0 2025-04-04T17:06:43.395240Z node 28 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786403000, txId: 18446744073709551615] shutting down 2025-04-04T17:06:43.395411Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NGY5OTkzOTktZTViYWJlZGItNDZkZGYyNjAtZDViYWUwYTM=, ActorId: [28:7489505565893606828:3526], ActorState: unknown state, TraceId: 01jr0w9r62a0yx1y2vpmesyj6p, Session actor destroyed RESULT: [] --------------------- STATS: total CPU: 2732 duration: 1461 usec cpu: 1461 usec duration: 188541 usec cpu: 208212 usec { name: "/Root/OlapStore/log1" } |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized >> test_cp_ic.py::TestCpIc::test_discovery [GOOD] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_cp_ic.py::TestCpIc::test_discovery [GOOD] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/001554/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk10/testing_out_stuff/test_auditlog.py.test_dml_begin_commit_logged/audit.txt 2025-04-04T17:06:52.595950Z: {"tx_id":"01jr0wa2hk2jcp3q79m3ytwam5","database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:06:52.595917Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2025-04-04T17:06:52.595401Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"BeginTransactionRequest","component":"grpc-proxy"} 2025-04-04T17:06:52.727686Z: {"tx_id":"01jr0wa2hk2jcp3q79m3ytwam5","database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:06:52.727644Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","commit_tx":"0","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-04T17:06:52.607643Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-04T17:06:52.742234Z: {"tx_id":"01jr0wa2hk2jcp3q79m3ytwam5","database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:06:52.742195Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2025-04-04T17:06:52.736648Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"CommitTransactionRequest","component":"grpc-proxy"} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/001523/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk12/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_sid_is_expected/audit.txt >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/001547/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk3/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_good_dynconfig/audit.txt 2025-04-04T17:06:53.176222Z: {"sanitized_token":"{none}","subject":"{none}","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/00155c/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk6/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_bad_dynconfig/audit.txt 2025-04-04T17:06:52.694139Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"ERROR","subject":"root@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/001552/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk16/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.delete/audit.txt 2025-04-04T17:06:53.240582Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:06:53.240540Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-04-04T17:06:53.027675Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/001507/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk19/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.select/audit.txt 2025-04-04T17:06:57.339973Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:06:57.339921Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-04-04T17:06:57.220507Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/0014f4/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk17/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.insert/audit.txt 2025-04-04T17:06:59.861832Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:06:59.861789Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-04-04T17:06:59.562094Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/0014f0/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk7/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_good_dynconfig/audit.txt 2025-04-04T17:07:00.230083Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] >> test_auditlog.py::test_dynconfig [GOOD] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/0014b6/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk2/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_bad_dynconfig/audit.txt 2025-04-04T17:07:04.491805Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"{none}","remote_address":"127.0.0.1","status":"ERROR","subject":"{none}","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/001485/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk14/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_unauthorized/audit.txt 2025-04-04T17:07:05.137741Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:07:05.137696Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-04-04T17:07:05.123943Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-04T17:07:05.262328Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:07:05.262293Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-04-04T17:07:05.247686Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-04T17:07:05.390373Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:07:05.390339Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-04-04T17:07:05.377125Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-04T17:07:05.530805Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:07:05.530764Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-04T17:07:05.507655Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-04T17:07:05.661475Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:07:05.661438Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-04-04T17:07:05.646813Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-04T17:07:05.781930Z: {"database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:07:05.781898Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-04-04T17:07:05.771394Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills >> test_retry.py::TestRetry::test_fail_first[kikimr0] [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/001440/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk1/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_good_dynconfig/audit.txt 2025-04-04T17:07:06.763005Z: {"sanitized_token":"**** (C877DF61)","subject":"__bad__@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/00142d/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk0/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_bad_dynconfig/audit.txt 2025-04-04T17:07:08.831370Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (C877DF61)","remote_address":"127.0.0.1","status":"ERROR","subject":"__bad__@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_disk.py::TestSafeDiskBreak::test_erase_method >> test_retry.py::TestRetry::test_low_rate[kikimr0] >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/001420/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk11/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_anonymous/audit.txt >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/001430/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk13/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_sid_is_unexpected/audit.txt 2025-04-04T17:07:09.724710Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:07:09.724663Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-04-04T17:07:09.499747Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-04T17:07:10.014219Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:07:10.014183Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-04-04T17:07:09.840715Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-04T17:07:10.237524Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:07:10.237490Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-04-04T17:07:10.129086Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-04T17:07:10.478561Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:07:10.478500Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-04T17:07:10.350720Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-04T17:07:10.692818Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:07:10.692780Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-04-04T17:07:10.587352Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-04-04T17:07:10.893833Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:07:10.893799Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-04-04T17:07:10.803343Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |85.1%| [TA] $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/00140f/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk15/testing_out_stuff/test_auditlog.py.test_dynconfig/audit.txt 2025-04-04T17:07:14.237174Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |85.2%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> TStateStorageTest::ShouldSaveGetOldSmallState2Tasks >> TStateStorageTest::ShouldSaveGetOldSmallState >> TStateStorageTest::ShouldDeleteNoCheckpoints >> TCheckpointStorageTest::ShouldRegisterCoordinator >> TStorageServiceTest::ShouldNotCreateCheckpointAfterGenerationChanged >> TCheckpointStorageTest::ShouldUpdateCheckpointStatusForCheckpointsWithTheSameGenAndNo >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] >> TStorageServiceTest::ShouldNotRegisterPrevGeneration >> TCheckpointStorageTest::ShouldCreateCheckpoint >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] >> TStateStorageTest::ShouldIssueErrorOnWrongGetStateParams >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] >> TStateStorageTest::ShouldIssueErrorOnWrongGetStateParams [GOOD] >> TStateStorageTest::ShouldIssueErrorOnNonExistentState >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] >> TCheckpointStorageTest::ShouldRegisterCoordinator [GOOD] >> TCheckpointStorageTest::ShouldGetCoordinators >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] >> TStateStorageTest::ShouldSaveGetOldSmallState2Tasks [GOOD] >> TStorageServiceTest::ShouldCreateCheckpoint >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] >> TStateStorageTest::ShouldSaveGetOldSmallState [GOOD] >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] >> TStateStorageTest::ShouldDeleteNoCheckpoints [GOOD] >> TStateStorageTest::ShouldDeleteNoCheckpoints2 >> TStateStorageTest::ShouldSaveGetOldBigState >> TCheckpointStorageTest::ShouldCreateCheckpoint [GOOD] >> TCheckpointStorageTest::ShouldCreateGetCheckpoints >> TCheckpointStorageTest::ShouldGetCoordinators [GOOD] >> TCheckpointStorageTest::ShouldMarkCheckpointsGc >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] >> TStorageServiceTest::ShouldNotRegisterPrevGeneration [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointWhenUnregistered >> TStateStorageTest::ShouldSaveGetOldBigState [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementSmallState >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] >> TStateStorageTest::ShouldIssueErrorOnNonExistentState [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] >> TStateStorageTest::ShouldLoadLastSnapshot >> TStorageServiceTest::ShouldNotCreateCheckpointAfterGenerationChanged [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutCreation >> TStateStorageTest::ShouldDeleteNoCheckpoints2 [GOOD] >> TStateStorageTest::ShouldDeleteCheckpoints >> TCheckpointStorageTest::ShouldUpdateCheckpointStatusForCheckpointsWithTheSameGenAndNo [GOOD] >> TGcTest::ShouldRemovePreviousCheckpoints >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_dispatch.py::TestMapping::test_mapping [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementSmallState [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementBigState >> TStorageServiceTest::ShouldCreateCheckpoint [GOOD] >> TStorageServiceTest::ShouldGetCheckpoints >> TStateStorageTest::ShouldLoadLastSnapshot [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendSnaphotState >> TStorageServiceTest::ShouldNotCreateCheckpointWhenUnregistered [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointTwice >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotAbortCheckpointWithoutCreation >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] >> TStateStorageTest::ShouldSaveGetIncrementBigState [GOOD] >> TStateStorageTest::ShouldDeleteCheckpoints [GOOD] >> TStateStorageTest::ShouldDeleteGraph >> TStateStorageTest::ShouldNotGetNonExistendState |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendSnaphotState [GOOD] >> TStateStorageTest::ShouldLoadIncrementSnapshot ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/0013d6/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk20/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.update/audit.txt 2025-04-04T17:07:21.954108Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:07:21.954075Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-04T17:07:21.727354Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> TCheckpointStorageTest::ShouldMarkCheckpointsGc [GOOD] >> TCheckpointStorageTest::ShouldNotDeleteUnmarkedCheckpoints >> TStateStorageTest::ShouldNotGetNonExistendState [GOOD] >> TCheckpointStorageTest::ShouldCreateGetCheckpoints [GOOD] >> TCheckpointStorageTest::ShouldGetCheckpointsEmpty >> TStorageServiceTest::ShouldNotAbortCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutPending >> TStateStorageTest::ShouldDeleteGraph [GOOD] >> TStateStorageTest::ShouldGetMultipleStates ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/0013f3/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk18/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.replace/audit.txt 2025-04-04T17:07:18.340164Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:07:18.340111Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-04-04T17:07:18.206803Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] >> TCheckpointStorageTest::ShouldGetCheckpointsEmpty [GOOD] >> TCheckpointStorageTest::ShouldDeleteGraph >> TStorageServiceTest::ShouldNotCreateCheckpointTwice [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointWithoutCreation >> TStateStorageTest::ShouldLoadIncrementSnapshot [GOOD] >> TStorageServiceTest::ShouldGetCheckpoints [GOOD] >> TStorageServiceTest::ShouldAbortCheckpoint >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutPending [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldNotGetNonExistendState [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged >> test_dispatch.py::TestMapping::test_idle ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/0013b9/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk4/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_bad_dynconfig/audit.txt 2025-04-04T17:07:27.997469Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"othe****ltin (27F910A9)","remote_address":"127.0.0.1","status":"ERROR","subject":"other-user@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] |85.2%| [TA] $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |85.2%| [TA] {RESULT} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/0013a8/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk5/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_good_dynconfig/audit.txt 2025-04-04T17:07:29.155343Z: {"sanitized_token":"othe****ltin (27F910A9)","subject":"other-user@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> TStateStorageTest::ShouldGetMultipleStates [GOOD] >> TCheckpointStorageTest::ShouldDeleteGraph [GOOD] >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints >> TGcTest::ShouldRemovePreviousCheckpoints [GOOD] >> TGcTest::ShouldIgnoreIncrementCheckpoint |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldLoadIncrementSnapshot [GOOD] >> TCheckpointStorageTest::ShouldNotDeleteUnmarkedCheckpoints [GOOD] >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged [GOOD] >> TStorageServiceTest::ShouldAbortCheckpoint [GOOD] >> TStorageServiceTest::ShouldGetState >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/0013bc/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk8/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs0/audit.txt 2025-04-04T17:07:29.811096Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","cloud_id":"cloud-id-A","end_time":"2025-04-04T17:07:29.811044Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-04T17:07:29.585052Z","subject":"root@builtin","detailed_status":"SUCCESS","resource_id":"database-id-C","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldGetMultipleStates [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/00139d/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk9/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs1/audit.txt 2025-04-04T17:07:32.108968Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:07:32.108925Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-04-04T17:07:31.871075Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId [GOOD] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged [GOOD] Test command err: 2025-04-04T17:07:30.371410Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7489505768785970306:2048] with connection to localhost:6455:local 2025-04-04T17:07:30.371721Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-04T17:07:31.749300Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-04T17:07:31.749330Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-04T17:07:31.753946Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.16] Got TEvRegisterCoordinatorRequest 2025-04-04T17:07:31.949151Z node 1 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.16] Failed to register graph:
: Warning: Table: local/TStorageServiceTestShouldNotRegisterPrevGeneration/coordinators_sync, pk: graph_graphich, current generation: 17, expected/new generation: 16, operation: RegisterCheck, code: 400130 2025-04-04T17:07:31.949184Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.16] Send TEvRegisterCoordinatorResponse 2025-04-04T17:07:34.506633Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7489505786346363960:2048] with connection to localhost:6455:local 2025-04-04T17:07:34.506734Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-04T17:07:35.424450Z node 2 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to create checkpoint:
: Warning: Table: local/TStorageServiceTestShouldNotCreateCheckpointWhenUnregistered/coordinators_sync, pk: graph_graphich, current generation: 0, expected/new generation: 17, operation: Check, code: 400130 2025-04-04T17:07:35.424489Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-04T17:07:37.425142Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7489505801383741013:2048] with connection to localhost:6455:local 2025-04-04T17:07:37.425231Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-04T17:07:37.784967Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-04T17:07:37.784996Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-04T17:07:37.787697Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-04T17:07:39.564499Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-04T17:07:39.564540Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-04T17:07:39.567467Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-04T17:07:40.177929Z node 3 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to create checkpoint:
: Error: Conflict with existing key., code: 2012 2025-04-04T17:07:40.177963Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-04T17:07:42.284890Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7489505819688288330:2048] with connection to localhost:6455:local 2025-04-04T17:07:42.284979Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-04T17:07:42.727591Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-04T17:07:42.727621Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-04T17:07:42.727920Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-04T17:07:43.044374Z node 4 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to set 'PendingCommit' status:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-04-04T17:07:43.044403Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-04T17:07:45.363240Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7489505832024915300:2048] with connection to localhost:6455:local 2025-04-04T17:07:45.363321Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-04T17:07:45.817298Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-04T17:07:45.817327Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-04T17:07:45.822225Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-04T17:07:47.832657Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-04T17:07:47.832694Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-04T17:07:47.834413Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-04-04T17:07:48.500891Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-04-04T17:07:48.500922Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-04-04T17:07:48.501981Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-04T17:07:48.863906Z node 5 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to set 'PendingCommit' status:
: Warning: Table: local/TStorageServiceTestShouldNotPendingCheckpointGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-04-04T17:07:48.863946Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/i5tq/001375/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk21/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.upsert/audit.txt 2025-04-04T17:07:34.368144Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-04-04T17:07:34.368093Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-04-04T17:07:34.259283Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged [GOOD] Test command err: 2025-04-04T17:07:30.344099Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7489505766989569716:2048] with connection to localhost:11365:local 2025-04-04T17:07:30.347887Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-04T17:07:31.174396Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-04T17:07:31.174424Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-04T17:07:31.175559Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-04T17:07:32.782376Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-04T17:07:32.782405Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-04T17:07:32.783917Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-04-04T17:07:33.125594Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-04-04T17:07:33.125620Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-04-04T17:07:33.126978Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-04-04T17:07:33.369745Z node 1 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:2] Failed to create checkpoint:
: Warning: Table: local/TStorageServiceTestShouldNotCreateCheckpointAfterGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-04-04T17:07:33.369776Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-04-04T17:07:35.172240Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7489505792677862932:2048] with connection to localhost:11365:local 2025-04-04T17:07:35.172312Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-04T17:07:35.414857Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-04T17:07:35.414894Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-04T17:07:35.423129Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-04-04T17:07:35.763565Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-04-04T17:07:35.763597Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-04-04T17:07:37.536132Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7489505800563232687:2048] with connection to localhost:11365:local 2025-04-04T17:07:37.536220Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-04T17:07:38.067798Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-04T17:07:38.067823Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-04T17:07:38.070518Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvAbortCheckpointRequest 2025-04-04T17:07:38.506445Z node 3 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to abort checkpoint:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-04-04T17:07:38.506483Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvAbortCheckpointResponse 2025-04-04T17:07:40.104292Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7489505813379277899:2048] with connection to localhost:11365:local 2025-04-04T17:07:40.104388Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-04T17:07:40.578127Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-04T17:07:40.578152Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-04T17:07:40.578697Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-04T17:07:42.367076Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-04T17:07:42.367104Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-04T17:07:42.372204Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-04-04T17:07:42.790697Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Selected checkpoint '17:1' with status Pending, while expected PendingCommit, code: 400080 2025-04-04T17:07:42.790730Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-04-04T17:07:44.586686Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7489505831852596296:2048] with connection to localhost:11365:local 2025-04-04T17:07:44.586779Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-04T17:07:44.883760Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-04T17:07:44.883789Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-04T17:07:44.884023Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-04T17:07:47.334555Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-04T17:07:47.334587Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-04T17:07:47.338737Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-04T17:07:48.473409Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-04-04T17:07:48.473442Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-04T17:07:48.474124Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-04-04T17:07:49.086713Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-04-04T17:07:49.086741Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-04-04T17:07:49.087606Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-04-04T17:07:49.310290Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Table: local/TStorageServiceTestShouldNotPendingCheckpointGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-04-04T17:07:49.310326Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] >> TStorageServiceTest::ShouldGetState [GOOD] |85.3%| [TA] $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] [GOOD] |85.3%| [TA] {RESULT} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] >> TGcTest::ShouldIgnoreIncrementCheckpoint [GOOD] >> TStateStorageTest::ShouldCountStates >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] >> TStateStorageTest::ShouldCountStates [GOOD] >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV1PDiskIdFilter 2025-04-04 17:07:56,907 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-04 17:07:57,391 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 208165 48.0M 47.0M 23.0M test_tool run_ut @/home/runner/.ya/build/build_root/i5tq/00107f/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk5/testing_out_stuff/test_tool.args 209918 3.4G 3.3G 3.3G └─ ydb-core-viewer-ut --trace-path-append /home/runner/.ya/build/build_root/i5tq/00107f/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk5/ytest.report.trace Test command err: 2025-04-04T16:59:05.427861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:3108:2432], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:05.430351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:05.431422Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:05.434947Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:2400:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:05.436633Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:3111:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:05.437481Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:05.438551Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:2409:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:05.438823Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:2412:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:05.439051Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:2415:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:05.439618Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:05.439815Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:05.440180Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:05.441782Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:05.441884Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:05.442002Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:05.443116Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:05.443189Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:05.443275Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:05.443950Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:2403:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:05.445278Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:3105:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:05.446318Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:05.446444Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:05.448428Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:05.448595Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T16:59:05.454142Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:2406:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T16:59:05.455480Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T16:59:05.456163Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T16:59:06.067483Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T16:59:06.321814Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-04T16:59:06.361086Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-04T16:59:07.011379Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 7559, node 1 TClient is connected to server localhost:65201 2025-04-04T16:59:07.335530Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T16:59:07.335589Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T16:59:07.335624Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T16:59:07.336307Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:00:54.914111Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:3102:2432], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:00:54.916882Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:00:54.917124Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:00:54.922609Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [17:1714:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:00:54.923692Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:3108:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:00:54.925048Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:3105:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:00:54.925422Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:00:54.926228Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:00:54.926497Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [14:3111:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:00:54.926864Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:00:54.927625Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:00:54.927725Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:00:54.927984Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:3114:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:00:54.928333Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:1717:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:00:54.929263Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:00:54.929385Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:00:54.929919Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:00:54.929977Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:00:54.930123Z node 18 :METADATA_PR ... etect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:03:22.139240Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:03:22.139766Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:03:22.139856Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:03:22.141123Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:03:22.143646Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [20:1478:2179], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:03:22.145195Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:03:22.145262Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T17:03:22.943301Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:03:23.793318Z node 19 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-04T17:03:23.884642Z node 19 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-04T17:03:25.946190Z node 19 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 8640, node 19 TClient is connected to server localhost:16808 2025-04-04T17:03:27.208755Z node 19 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:03:27.208889Z node 19 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:03:27.208988Z node 19 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:03:27.210310Z node 19 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:06:19.312120Z node 28 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [28:3157:2435], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:06:19.313100Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:06:19.314416Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:06:19.316233Z node 33 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [33:1968:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:06:19.316447Z node 34 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [34:1971:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:06:19.317137Z node 35 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [35:1974:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:06:19.318844Z node 33 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:06:19.318930Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:06:19.319305Z node 36 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [36:1977:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:06:19.319971Z node 33 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:06:19.320038Z node 35 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:06:19.320574Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:06:19.320629Z node 35 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:06:19.320665Z node 36 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:06:19.321222Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [31:3110:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:06:19.321350Z node 36 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:06:19.322501Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [29:3153:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:06:19.322648Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:06:19.323506Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:06:19.323698Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:06:19.324331Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:06:19.324677Z node 32 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [32:3113:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:06:19.325481Z node 30 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [30:3160:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:06:19.325998Z node 32 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:06:19.326574Z node 30 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:06:19.326738Z node 32 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:06:19.327524Z node 30 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T17:06:19.937670Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:06:20.181985Z node 28 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-04T17:06:20.213391Z node 28 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-04T17:06:21.244591Z node 28 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 63972, node 28 TClient is connected to server localhost:5137 2025-04-04T17:06:21.932377Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:06:21.932486Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:06:21.932574Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:06:21.933350Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8067063302/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/i5tq/00107f/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk5/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8067063302/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/i5tq/00107f/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/chunk5/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldGetState [GOOD] Test command err: 2025-04-04T17:07:32.892598Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7489505781811114230:2048] with connection to localhost:26283:local 2025-04-04T17:07:32.892684Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-04T17:07:33.318329Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-04T17:07:33.318365Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-04T17:07:33.342383Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-04T17:07:34.997763Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-04T17:07:34.997799Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-04T17:07:36.917718Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7489505796507000220:2048] with connection to localhost:26283:local 2025-04-04T17:07:36.917816Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-04T17:07:37.314441Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-04T17:07:37.314476Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-04T17:07:37.314808Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-04T17:07:39.353317Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-04T17:07:39.353347Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-04T17:07:39.354752Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-04-04T17:07:39.988851Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-04-04T17:07:39.988886Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-04-04T17:07:39.990087Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCreateCheckpointRequest 2025-04-04T17:07:40.503894Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Checkpoint created 2025-04-04T17:07:40.503923Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCreateCheckpointResponse 2025-04-04T17:07:40.510530Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-04T17:07:40.871363Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-04T17:07:42.894728Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7489505822613549398:2048] with connection to localhost:26283:local 2025-04-04T17:07:42.894820Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-04T17:07:43.259031Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-04T17:07:43.259059Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-04T17:07:43.259479Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-04T17:07:45.256560Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-04T17:07:45.256588Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-04T17:07:45.258478Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-04T17:07:45.981407Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-04-04T17:07:45.981443Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-04T17:07:45.984174Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-04-04T17:07:46.652095Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-04-04T17:07:46.652125Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-04-04T17:07:46.658085Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-04T17:07:47.293305Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-04-04T17:07:47.293335Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-04T17:07:47.295694Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-04-04T17:07:47.811785Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-04-04T17:07:47.811819Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-04-04T17:07:47.814938Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvAbortCheckpointRequest 2025-04-04T17:07:48.128792Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint aborted 2025-04-04T17:07:48.128830Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvAbortCheckpointResponse 2025-04-04T17:07:48.129960Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvAbortCheckpointRequest 2025-04-04T17:07:48.723198Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint aborted 2025-04-04T17:07:48.723226Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvAbortCheckpointResponse 2025-04-04T17:07:48.726730Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-04T17:07:49.449653Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-04T17:07:52.209835Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7489505861358607259:2048] with connection to localhost:26283:local 2025-04-04T17:07:52.209925Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-04T17:07:52.693668Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-04T17:07:52.693699Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-04T17:07:52.694206Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-04T17:07:55.079415Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-04T17:07:55.079451Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-04T17:07:55.080036Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvSaveTaskState: task 1317 2025-04-04T17:07:55.491266Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] TEvSaveTaskState Apply: task: 1317 2025-04-04T17:07:55.493380Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Send TEvSaveTaskStateResult: task: 1317 2025-04-04T17:07:55.494991Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvGetTaskState: tasks {1317} 2025-04-04T17:07:55.495026Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] GetState, tasks: 1317 2025-04-04T17:07:56.401543Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] ListOfStates results: 2025-04-04T17:07:56.401648Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] taskId 1317 checkpoint id: 17:1, rows count: 1 2025-04-04T17:07:56.401681Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] SkipStatesInFuture, skip 0 checkpoints 2025-04-04T17:07:56.509224Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] SelectState: task_id 1317, seq_no 1, blob_seq_num 0 2025-04-04T17:07:57.287298Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] DeserializeState, task id 1317, blob size 49 2025-04-04T17:07:57.287378Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] ApplyIncrements 2025-04-04T17:07:57.298830Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [{ Id: 1 Generation: 17 }] Send TEvGetTaskStateResult: tasks: {1317} >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints [GOOD] >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint [GOOD] Test command err: 2025-04-04T17:07:41.217184Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [1:36:2083] Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/TGcTestShouldRemovePreviousCheckpoints"); SELECT * FROM checkpoints_graphs_description; 2025-04-04T17:07:41.751240Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 11:3 for graph 'graph' 2025-04-04T17:07:42.458721Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph' up to 11:3 Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/TGcTestShouldRemovePreviousCheckpoints"); SELECT * FROM checkpoints_graphs_description; 2025-04-04T17:07:56.257374Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [2:36:2083] Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/ShouldIgnoreIncrementCheckpoint"); SELECT * FROM checkpoints_graphs_description; 2025-04-04T17:07:56.716565Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 11:3 for graph 'graph' 2025-04-04T17:07:56.716624Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: GC skip increment checkpoint for graph 'graph' >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters >> TFlatTest::SplitEmptyAndWrite >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] >> YdbS3Internal::TestS3Listing >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TFlatTest::SplitEmptyAndWrite [GOOD] >> TFlatTest::SplitBoundaryRead >> TPipeTrackerTest::TestSimpleAdd [GOOD] >> TResourceBroker::TestAutoTaskId >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] >> BasicStatistics::TwoServerlessDbs >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] >> TResourceBroker::TestAutoTaskId [GOOD] >> ClientStatsCollector::PrepareQuery >> TPartitionTests::CorrectRange_Commit |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestAutoTaskId [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] >> TPartitionTests::CorrectRange_Commit [GOOD] >> YdbS3Internal::TestS3Listing [GOOD] >> YdbS3Internal::TestAccessCheck |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] >> TPartitionTests::ConflictingTxIsAborted >> TFlatTest::SplitBoundaryRead [GOOD] >> TPartitionTests::ConflictingTxIsAborted [GOOD] >> TPartitionTests::ConflictingTxProceedAfterRollback >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> TGRpcClientLowTest::SimpleRequest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitBoundaryRead [GOOD] Test command err: 2025-04-04T17:08:20.285137Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489505989128436713:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:20.285179Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e88/r3tmp/tmpoIovRo/pdisk_1.dat 2025-04-04T17:08:20.966825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:20.966954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:21.005523Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:21.006317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2912 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:08:21.523451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:21.550566Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:21.567287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:21.764598Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1211 647 2154)b }, ecr=1.000 2025-04-04T17:08:21.775941Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1085 521 2626)b }, ecr=1.000 2025-04-04T17:08:21.824283Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1519 647 6413)b }, ecr=1.000 2025-04-04T17:08:21.851457Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.014s,wait=0.002s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2272 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786501702 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) waiting... 2025-04-04T17:08:22.013054Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.23, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-04T17:08:22.013412Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-04T17:08:22.013688Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-04T17:08:22.013899Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-04T17:08:22.015736Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.27, eph 3} end=0, 4 blobs 2r (max 2), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (1853 1533 0)b }, ecr=1.000 2025-04-04T17:08:22.021207Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.33, eph 3} end=0, 4 blobs 8r (max 8), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3196 2180 6413)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786501702 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-04-04T17:08:22.332506Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-04T17:08:22.332556Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-04T17:08:22.332568Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-04T17:08:22.337166Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-04T17:08:22.342445Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-04-04T17:08:24.656120Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506008846480648:2169];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e88/r3tmp/tmpXJdaw5/pdisk_1.dat 2025-04-04T17:08:24.763108Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:08:24.887438Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:24.913600Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:24.913675Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:24.914679Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15712 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:08:25.191369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:25.203553Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:25.219003Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:08:25.230070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:25.342154Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1211 647 2154)b }, ecr=1.000 2025-04-04T17:08:25.357031Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1085 521 2626)b }, ecr=1.000 2025-04-04T17:08:25.391888Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1519 647 6413)b }, ecr=1.000 2025-04-04T17:08:25.400030Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2272 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 Sch ... et: 72057594046644480 2025-04-04T17:08:25.581487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186224037891 splitOp: 281474976710678:0 alterVersion: 1 at tablet: 72057594046644480 2025-04-04T17:08:25.581771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710678:0 from tablet: 72057594046644480 to tablet: 72075186224037890 cookie: 72057594046644480:3 msg type: 269553152 2025-04-04T17:08:25.581873Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710678:0 from tablet: 72057594046644480 to tablet: 72075186224037891 cookie: 72057594046644480:4 msg type: 269553152 2025-04-04T17:08:25.581931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710678, partId: 0, tablet: 72075186224037890 2025-04-04T17:08:25.581940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710678, partId: 0, tablet: 72075186224037891 2025-04-04T17:08:25.601466Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710678, at schemeshard: 72057594046644480 2025-04-04T17:08:25.601493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710678, ready parts: 0/1, is published: true 2025-04-04T17:08:25.601509Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710678, at schemeshard: 72057594046644480 2025-04-04T17:08:25.622218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710678 TabletId: 72075186224037891 2025-04-04T17:08:25.622292Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId# 281474976710678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976710678 TabletId: 72075186224037891 2025-04-04T17:08:25.623062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:08:25.625589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710678 TabletId: 72075186224037890 2025-04-04T17:08:25.625627Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId# 281474976710678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976710678 TabletId: 72075186224037890 2025-04-04T17:08:25.625653Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710678:0 3 -> 131 2025-04-04T17:08:25.625997Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:08:25.626079Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:08:25.626114Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976710678:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:08:25.626136Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TSplitMerge TTransferData operationId# 281474976710678:0 Starting split on src datashard 72075186224037888 splitOpId# 281474976710678:0 at tablet 72057594046644480 2025-04-04T17:08:25.626428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553154 2025-04-04T17:08:25.626494Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710678, partId: 0, tablet: 72075186224037888 2025-04-04T17:08:25.631433Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-04T17:08:25.631692Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-04T17:08:25.631843Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.27, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-04T17:08:25.631976Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.28, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-04T17:08:25.632135Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.29, eph -9223372036854775808} end=0, 0 blobs 0r (max 0), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-04-04T17:08:25.640958Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710678 TabletId: 72075186224037888 2025-04-04T17:08:25.641016Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976710678:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710678 TabletId: 72075186224037888 2025-04-04T17:08:25.641266Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710678:0 131 -> 132 2025-04-04T17:08:25.641357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-04-04T17:08:25.641671Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:08:25.641816Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:08:25.641836Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710678, path id: [OwnerId: 72057594046644480, LocalPathId: 3] 2025-04-04T17:08:25.642049Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:08:25.642065Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:7489506008846481027:2234], at schemeshard: 72057594046644480, txId: 281474976710678, path id: 3 2025-04-04T17:08:25.642102Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:08:25.642132Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976710678:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:08:25.642151Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Notify src datashard 72075186224037888 on partitioning changed splitOp# 281474976710678 at tablet 72057594046644480 2025-04-04T17:08:25.644316Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710678 2025-04-04T17:08:25.644378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710678 2025-04-04T17:08:25.644388Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710678 2025-04-04T17:08:25.644405Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710678, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 4 2025-04-04T17:08:25.644419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-04-04T17:08:25.644465Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710678, ready parts: 0/1, is published: true 2025-04-04T17:08:25.644604Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553158 2025-04-04T17:08:25.644763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710678 2025-04-04T17:08:25.647473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710678 TabletId: 72075186224037888 2025-04-04T17:08:25.647508Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976710678:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-04-04T17:08:25.647573Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710678:0 progress is 1/1 2025-04-04T17:08:25.647589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710678 ready parts: 1/1 2025-04-04T17:08:25.647608Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710678:0 progress is 1/1 2025-04-04T17:08:25.647617Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710678 ready parts: 1/1 2025-04-04T17:08:25.647628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710678, ready parts: 1/1, is published: true 2025-04-04T17:08:25.647663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7489506013141448755:2359] message: TxId: 281474976710678 2025-04-04T17:08:25.647689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710678 ready parts: 1/1 2025-04-04T17:08:25.647709Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710678:0 2025-04-04T17:08:25.647718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710678:0 2025-04-04T17:08:25.647819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-04-04T17:08:25.648127Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:08:25.648137Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 281474976710678:0 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786505342 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] >> GrpcConnectionStringParserTest::NoDatabaseFlag |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] >> TFlatTest::ShardFreezeUnfreezeAlreadySet |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> TPartitionTests::ConflictingTxProceedAfterRollback [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] >> TPartitionTests::ConflictingCommitsInSeveralBatches >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] >> ClientStatsCollector::PrepareQuery [GOOD] >> ClientStatsCollector::CounterCacheMiss >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> YdbYqlClient::ConnectDbAclIsStrictlyChecked >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] >> TGRpcClientLowTest::GrpcRequestProxy |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_EmptyAllowedSids >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] >> TGRpcNewCoordinationClient::SessionMethods >> TFlatTest::ShardFreezeUnfreezeAlreadySet [GOOD] >> TFlatTest::ShardFreezeUnfreeze >> YdbS3Internal::TestAccessCheck [GOOD] >> YdbScripting::BasicV0 >> TGRpcClientLowTest::SimpleRequest [GOOD] >> TGRpcClientLowTest::SimpleRequestDummyService >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] >> TPartitionTests::ConflictingCommitsInSeveralBatches [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] >> TPartitionTests::ConflictingCommitProccesAfterRollback |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] |85.4%| [TA] $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |85.4%| [TA] {RESULT} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} >> GrpcConnectionStringParserTest::NoDatabaseFlag [GOOD] >> GrpcConnectionStringParserTest::IncorrectConnectionString >> GrpcConnectionStringParserTest::IncorrectConnectionString [GOOD] >> GrpcConnectionStringParserTest::CommonClientSettingsFromConnectionString >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] [GOOD] >> TFlatMetrics::TimeSeriesAvg16 [GOOD] >> TFlatMetrics::TimeSeriesAVG [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] >> TGRpcConsoleTest::SimpleConfigTest [GOOD] >> ClientStatsCollector::CounterCacheMiss [GOOD] >> ClientStatsCollector::CounterRetryOperation |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAVG [GOOD] >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TGRpcConsoleTest::SimpleConfigTest [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] >> TGRpcClientLowTest::GrpcRequestProxy [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyWithoutToken >> TLocksTest::Range_IncorrectNullDot1 >> YdbYqlClient::ConnectDbAclIsStrictlyChecked [GOOD] >> YdbYqlClient::CopyTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] Test command err: 2025-04-04T17:08:28.534492Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:08:28.534597Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:08:28.598022Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:178:2193] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:08:28.603575Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T17:08:28.000000Z 2025-04-04T17:08:28.603665Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:178:2193] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\340\375\231\216\3402" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\340\375\231\216\3402" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-04T17:08:29.599543Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:08:29.599640Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:08:30.191538Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:08:30.191633Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:08:30.227395Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:175:2190] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:08:30.229168Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T17:08:30.000000Z 2025-04-04T17:08:30.229237Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [3:175:2190] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Create distr tx with id = 2 and act no: 3 Create immediate tx with id = 4 and act no: 5 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 Wait batch completion Got batch complete: 2 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Wait batch completion Wait kv request Wait tx committed for tx 2 Wait immediate tx comple ... ed TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 2 Wait batch completion Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Wait kv request Got batch complete: 1 Wait batch completion Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Got batch complete: 1 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Wait batch completion Wait batch completion Got batch complete: 1 Wait kv request Wait tx committed for tx 3 Wait tx committed for tx 4 Create distr tx with id = 6 and act no: 7 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 3 Wait batch completion Wait kv request Wait immediate tx complete 8 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 8 Wait immediate tx complete 9 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 9 2025-04-04T17:08:38.191888Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:08:38.191953Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:08:38.205607Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:08:38.207980Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T17:08:38.000000Z 2025-04-04T17:08:38.208036Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Created Tx with id 0 as act# 0 Created Tx with id 1 as act# 1 Got batch complete: 1 Wait batch completion Got batch complete: 1 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Wait batch completion Wait kv request Wait tx committed for tx 1 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Wait for no tx committed Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] >> TFlatTest::ShardFreezeUnfreeze [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] >> TGRpcClientLowTest::SimpleRequestDummyService [GOOD] >> TGRpcLdapAuthentication::LdapAuthServerIsUnavailable |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] [GOOD] >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] >> TGRpcNewCoordinationClient::SessionMethods [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreeze [GOOD] Test command err: 2025-04-04T17:08:32.798404Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506041929432495:2127];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:32.798451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e87/r3tmp/tmpMygaWY/pdisk_1.dat 2025-04-04T17:08:33.417811Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:33.424033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:33.424152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:33.426231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10973 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:08:33.890303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:33.930610Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:08:33.940847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:34.225940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:34.264814Z node 1 :TX_PROXY ERROR: Actor# [1:7489506050519367760:2396] txid# 281474976710660, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-04-04T17:08:34.267138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:34.286712Z node 1 :TX_PROXY ERROR: Actor# [1:7489506050519367798:2428] txid# 281474976710662, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-04-04T17:08:37.089513Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506062132479244:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:37.089545Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e87/r3tmp/tmpkTpweN/pdisk_1.dat 2025-04-04T17:08:37.399438Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:37.439354Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:37.439435Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:37.447431Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25827 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:08:37.678457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:37.688162Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:08:37.697294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:37.785001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:37.821025Z node 2 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710660: 2025-04-04T17:08:37.821218Z node 2 :TX_PROXY ERROR: Actor# [2:7489506062132479962:2391] txid# 281474976710660 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-04-04T17:08:37.821336Z node 2 :TX_PROXY ERROR: Actor# [2:7489506062132479962:2391] txid# 281474976710660 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-04-04T17:08:37.821358Z node 2 :TX_PROXY ERROR: Actor# [2:7489506062132479962:2391] txid# 281474976710660 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2025-04-04T17:08:37.827792Z node 2 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710661: 2025-04-04T17:08:37.828069Z node 2 :TX_PROXY ERROR: Actor# [2:7489506062132479970:2396] txid# 281474976710661 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-04-04T17:08:37.828149Z node 2 :TX_PROXY ERROR: Actor# [2:7489506062132479970:2396] txid# 281474976710661 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-04-04T17:08:37.828174Z node 2 :TX_PROXY ERROR: Actor# [2:7489506062132479970:2396] txid# 281474976710661 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2025-04-04T17:08:37.871417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 waiting... >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] >> YdbScripting::BasicV0 [GOOD] >> YdbScripting::BasicV1 >> GrpcConnectionStringParserTest::CommonClientSettingsFromConnectionString [GOOD] >> LocalityOperation::LocksFromAnotherTenants+UseSink |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] >> TGRpcClientLowTest::GrpcRequestProxyWithoutToken [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Ignore |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> TableCreator::CreateTables |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> BackupRestore::RestoreViewQueryText >> TGRpcLdapAuthentication::LdapAuthServerIsUnavailable [GOOD] >> TGRpcLdapAuthentication::LdapAuthSetIncorrectDomain >> BackupRestoreS3::RestoreTablePartitioningSettings >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_EmptyAllowedSids [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithIssuerVerification_ClientWithSameIssuer >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] >> ClientStatsCollector::CounterRetryOperation [GOOD] >> ClientStatsCollector::ExternalMetricRegistryByRawPtr >> YdbYqlClient::CopyTables [GOOD] >> YdbYqlClient::CreateAndAltertTableWithCompactionPolicy >> TGRpcNewCoordinationClient::SessionDescribeWatchData [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchOwners >> TStorageServiceTest::ShouldRegister >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsParametricRanges >> KqpSystemView::FailNavigate >> KqpSysColV1::UpdateAndDelete >> KqpSysColV1::InnerJoinTables >> YdbScripting::BasicV1 [GOOD] >> YdbScripting::MultiResults >> KqpSysColV1::StreamSelectRange |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> TableCreator::CreateTables [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Ignore [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Check >> TStorageServiceTest::ShouldRegister [GOOD] >> TStorageServiceTest::ShouldRegisterNextGeneration >> test_dispatch.py::TestMapping::test_idle [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TableCreator::CreateTables [GOOD] Test command err: 2025-04-04T17:08:46.285569Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506103945043980:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:46.285636Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f19/r3tmp/tmp2FhKeX/pdisk_1.dat 2025-04-04T17:08:46.756326Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:46.786572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:46.787277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:46.791897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10519 TServer::EnableGrpc on GrpcPort 22070, node 1 2025-04-04T17:08:47.188591Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:47.188608Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:47.188616Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:47.188711Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:08:47.535505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:47.570305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-04T17:08:47.572386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 >> KqpSysColV1::StreamSelectRowById >> KqpSystemView::PartitionStatsSimple >> TGRpcLdapAuthentication::LdapAuthSetIncorrectDomain [GOOD] >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism |85.5%| [TA] $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.5%| [TA] {RESULT} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [FAIL] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] >> LocalityOperation::LocksFromAnotherTenants+UseSink [GOOD] >> LocalityOperation::LocksFromAnotherTenants-UseSink >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] [GOOD] >> BackupRestore::RestoreViewQueryText [GOOD] >> BackupRestore::RestoreViewReferenceTable >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] >> TStorageServiceTest::ShouldRegisterNextGeneration [GOOD] >> TStorageServiceTest::ShouldPendingAndCompleteCheckpoint |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchOwners [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchReplace >> ClientStatsCollector::ExternalMetricRegistryByRawPtr [GOOD] >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> KqpSysColV1::InnerJoinSelectAsterisk >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters [GOOD] >> KqpSystemView::PartitionStatsRanges >> BackupRestoreS3::RestoreTablePartitioningSettings [GOOD] >> BackupRestoreS3::RestoreIndexTablePartitioningSettings |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> YdbYqlClient::CreateAndAltertTableWithCompactionPolicy [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningByLoad >> KqpSystemView::ReadSuccess >> KqpSystemView::PartitionStatsRange2 >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Check [GOOD] >> TGRpcClientLowTest::MultipleSimpleRequests >> KqpSystemView::FailNavigate [GOOD] |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] >> KqpSysColV1::StreamSelectRange [GOOD] >> YdbScripting::MultiResults [GOOD] >> KqpSystemView::PartitionStatsParametricRanges [GOOD] >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism [GOOD] >> KqpSysColV1::InnerJoinTables [GOOD] |85.6%| [TA] $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} >> TRegisterNodeOverDiscoveryService::ServerWithIssuerVerification_ClientWithSameIssuer [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesCorrectCerts |85.6%| [TA] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailNavigate [GOOD] Test command err: Trying to start YDB, gRPC: 15293, MsgBus: 21435 2025-04-04T17:08:49.675448Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506116739469861:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:49.675502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0011b7/r3tmp/tmpC9PRA1/pdisk_1.dat 2025-04-04T17:08:50.127354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:50.128074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:50.130591Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:50.144667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15293, node 1 2025-04-04T17:08:50.344638Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:50.344656Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:50.344662Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:50.344749Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21435 TClient is connected to server localhost:21435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:51.113432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:51.151153Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:08:51.183267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:08:51.201244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:51.347277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:51.555167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:08:51.647289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.020261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506133919340740:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.020376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.556049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.587800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.614940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.653762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.694958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.751809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.840553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506133919341258:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.840632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.840930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506133919341263:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.844846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-04T17:08:53.870497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506133919341265:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-04T17:08:53.973254Z node 1 :TX_PROXY ERROR: Actor# [1:7489506133919341321:3455] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:08:54.679508Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506116739469861:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:54.679566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:08:55.137324Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7489506142509276186:3666], for# user0@builtin, access# DescribeSchema 2025-04-04T17:08:55.137355Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7489506142509276186:3666], for# user0@builtin, access# DescribeSchema 2025-04-04T17:08:55.149310Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489506142509276183:2495], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/.sys/partition_stats]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:08:55.150211Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWRmYzM3Yi1mMWUyMGQzMS1hNjYwOGUwLWJjZWY4NTAy, ActorId: [1:7489506142509276176:2491], ActorState: ExecuteState, TraceId: 01jr0wdt6gde5b7mxwjpbwvyx5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 61331, MsgBus: 17077 2025-04-04T17:08:49.666715Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506117106356208:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:49.666744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0011a6/r3tmp/tmpyZ5Kf2/pdisk_1.dat 2025-04-04T17:08:50.174140Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:50.208608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:50.208747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:50.212402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61331, node 1 2025-04-04T17:08:50.342525Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:50.342562Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:50.342585Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:50.342717Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17077 TClient is connected to server localhost:17077 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:51.141295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:51.159291Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:08:51.167741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:08:51.333534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:08:51.498356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:51.578375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:53.126769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506134286227088:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.126896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.555711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.594415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.662557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.703340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.782988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.855088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.898180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506134286227610:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.898262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.898479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506134286227615:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.901784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:08:53.912560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506134286227617:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:08:53.975202Z node 1 :TX_PROXY ERROR: Actor# [1:7489506134286227670:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:08:54.667014Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506117106356208:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:54.667094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:08:55.383500Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786535400, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsParametricRanges [GOOD] Test command err: Trying to start YDB, gRPC: 6724, MsgBus: 9266 2025-04-04T17:08:49.666325Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506116083963648:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:49.667113Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0011ad/r3tmp/tmpJ8paYx/pdisk_1.dat 2025-04-04T17:08:50.116090Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:50.157035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:50.157155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:50.159739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6724, node 1 2025-04-04T17:08:50.342580Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:50.342605Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:50.342615Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:50.342731Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9266 TClient is connected to server localhost:9266 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:51.112396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:51.171135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:08:51.333235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:08:51.533209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:51.633302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:53.025201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506133263834611:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.025340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.551767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.587636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.626717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.661622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.702659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.745585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.820723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506133263835125:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.820782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.821068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506133263835130:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.824087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:08:53.837330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506133263835132:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:08:53.920796Z node 1 :TX_PROXY ERROR: Actor# [1:7489506133263835186:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:08:54.664601Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506116083963648:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:54.664675Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:08:55.696124Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786535659, txId: 281474976710671] shutting down >> TStorageServiceTest::ShouldPendingAndCompleteCheckpoint [GOOD] >> TStorageServiceTest::ShouldSaveState >> KqpSysColV1::StreamSelectRowById [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbScripting::MultiResults [GOOD] Test command err: 2025-04-04T17:08:21.910289Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489505996412298300:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:21.910377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001251/r3tmp/tmp3tVHyj/pdisk_1.dat 2025-04-04T17:08:22.631237Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:22.667184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:22.667278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:22.675885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26366, node 1 2025-04-04T17:08:22.938931Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:22.938951Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:22.938957Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:22.939051Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29854 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:23.455599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:26.339886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:08:26.903875Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489505996412298300:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:26.903936Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:08:27.103841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506022182105041:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:27.104019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:27.106677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506022182105055:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:27.112330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:08:27.164066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506022182105057:2442], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:08:27.236152Z node 1 :TX_PROXY ERROR: Actor# [1:7489506022182105152:4182] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:08:27.966548Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0wcytw0npcj703272gfjcb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBmNjFlMWUtMTE0MTlkNy0zYjYwYzNjOC1hOTAxMDkxZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS 2025-04-04T17:08:29.569337Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506029187761699:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:29.569389Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001251/r3tmp/tmphSNplC/pdisk_1.dat 2025-04-04T17:08:30.042520Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:30.102228Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:30.102520Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:30.109297Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21614, node 4 2025-04-04T17:08:30.322938Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:30.322964Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:30.322969Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:30.323084Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14252 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:30.691971Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:33.969640Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:08:34.579295Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489506029187761699:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:34.579435Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:08:34.669611Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506050662601196:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:34.669671Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506050662601204:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:34.669718Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:34.673470Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:08:34.714302Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506050662601210:2442], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:08:34.796059Z node 4 :TX_PROXY ERROR: Actor# [4:7489506050662601323:4173] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:08:34.933915Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0wd6799e5es15mjmr22fax, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YTc0Y2I2NDktM2MyNzQwYzMtNjUzNmQ3YzgtNmYwMDEyYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:08:35.008810Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:14252 2025-04-04T17:08:37.610490Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489506065226968933:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:37.630918Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001251/r3tmp/tmpgNjh1a/pdisk_1.dat 2025-04-04T17 ... lient is connected to server localhost:3617 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:44.648788Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:47.568451Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489506107837344478:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:47.568563Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:47.655945Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:08:47.747151Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489506107837344642:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:47.747245Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:47.747467Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489506107837344647:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:47.751489Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:08:47.771734Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489506107837344649:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:08:47.858191Z node 10 :TX_PROXY ERROR: Actor# [10:7489506107837344724:2800] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:08:47.936832Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0wdg36dteac8njfar35grn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZDhkODI5YjAtNWI0YmQ0NmMtYjczNjgwMzYtYmE4MTE4YTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:08:48.047010Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0wdk6pfgd8zpjqtrfrsp79, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ODk2ZjAzNjEtYjljYWU0ZWYtZmYxYTQyMTAtYWQ1ZjdhMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:08:48.051147Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786528085, txId: 281474976715662] shutting down 2025-04-04T17:08:49.887931Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489506114213111515:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:49.887995Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001251/r3tmp/tmpvYsyj8/pdisk_1.dat 2025-04-04T17:08:50.081313Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:50.136629Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:50.136760Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:50.148199Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5597, node 13 2025-04-04T17:08:50.270808Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:50.270832Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:50.270839Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:50.270969Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9780 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:50.547679Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:53.647457Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489506131392981732:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.647545Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.949723Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:08:54.088296Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489506135687949201:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:54.088398Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:54.088655Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489506135687949206:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:54.096865Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:08:54.129340Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7489506135687949208:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:08:54.202792Z node 13 :TX_PROXY ERROR: Actor# [13:7489506135687949281:2804] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:08:54.300463Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0wdntc0wzt1x3a7ctg8nbg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NGMyOTE0ZjctMzJkOWNmYzYtNTFmMGMxZWItZmZjMDQ2ZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:08:54.463591Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0wdntc0wzt1x3a7ctg8nbg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YjdiYmZjMzItMjZhN2M2YmUtZGNiYmZiMjItYWJmZWQ0Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:08:54.550889Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0wdntc0wzt1x3a7ctg8nbg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZTVlOGMwMWYtYTZiNTYwNmUtYzQ0NTM1ZDYtZjFhOWQxNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:08:54.713462Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0wdntc0wzt1x3a7ctg8nbg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YTBmZTg1MDEtOTQxOGNmM2UtNDdlYzE2Y2MtZDM2YThiMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:08:54.890395Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7489506114213111515:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:54.890533Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism [GOOD] Test command err: 2025-04-04T17:08:31.162729Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506037414293303:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:31.162777Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00123f/r3tmp/tmp2LkYyl/pdisk_1.dat 2025-04-04T17:08:31.857255Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:31.885309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:31.885406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:31.900861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10204, node 1 2025-04-04T17:08:32.279674Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:32.279710Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:32.279719Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:32.279840Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:32.772961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:37.534336Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506062193338344:2217];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00123f/r3tmp/tmpJeGaW7/pdisk_1.dat 2025-04-04T17:08:37.610373Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:08:37.736378Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:37.765193Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:37.765270Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:37.768770Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22767, node 4 2025-04-04T17:08:37.909936Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:37.909956Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:37.909962Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:37.910094Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19379 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:38.156199Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:42.102549Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489506084567889811:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:42.102622Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00123f/r3tmp/tmpgMPHKd/pdisk_1.dat 2025-04-04T17:08:42.328364Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:42.328461Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:42.332685Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:42.339132Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5546, node 7 2025-04-04T17:08:42.438999Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:42.439021Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:42.439040Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:42.439161Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:42.708572Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00123f/r3tmp/tmpmaF32O/pdisk_1.dat 2025-04-04T17:08:46.876972Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:08:46.991683Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:47.009009Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:47.009108Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:47.012434Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22117, node 10 2025-04-04T17:08:47.174871Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:47.174911Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:47.174924Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:47.175080Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2689 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:47.468038Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:51.884832Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489506124727724463:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:51.884893Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00123f/r3tmp/tmpdMSFBZ/pdisk_1.dat 2025-04-04T17:08:52.053838Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:52.089426Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:52.089541Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:52.092807Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4379, node 13 2025-04-04T17:08:52.193023Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:52.193047Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:52.193055Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:52.193199Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23850 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:52.475584Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... >> KqpSysColV1::SelectRowById >> KqpSystemView::PartitionStatsSimple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 24776, MsgBus: 15269 2025-04-04T17:08:49.684993Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506116218878165:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:49.706772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0011aa/r3tmp/tmpZRiHZ9/pdisk_1.dat 2025-04-04T17:08:50.246341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:50.246451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:50.248014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:50.279129Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24776, node 1 2025-04-04T17:08:50.369221Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:50.369255Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:50.369285Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:50.369417Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15269 TClient is connected to server localhost:15269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:51.112442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:51.167697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:51.326391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:51.489834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:51.558183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:53.381106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506133398748967:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.381202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.623772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.652043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.711150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.746743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.792585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.864941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.936091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506133398749484:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.936175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.936521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506133398749489:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.940809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:08:53.956228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506133398749491:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:08:54.039095Z node 1 :TX_PROXY ERROR: Actor# [1:7489506137693716842:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:08:54.683213Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506116218878165:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:54.683280Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSysColV1::UpdateAndDelete [GOOD] >> KqpSysColV1::StreamSelectRowAsterisk >> TGRpcNewCoordinationClient::SessionDescribeWatchReplace [GOOD] >> TGRpcNewCoordinationClient::SessionReconnectReattach ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 1612, MsgBus: 9503 2025-04-04T17:08:51.354596Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506124356383853:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:51.355443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001196/r3tmp/tmp3oqqvf/pdisk_1.dat 2025-04-04T17:08:51.842639Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:51.852090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:51.852229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:51.854364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1612, node 1 2025-04-04T17:08:51.942819Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:51.942856Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:51.942865Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:51.943020Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9503 TClient is connected to server localhost:9503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:52.520787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:52.536685Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:08:52.550366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:52.686757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:52.890068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:52.975836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:54.935266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506137241287389:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:54.935367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:55.240879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:08:55.273572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:08:55.321421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:08:55.370021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:08:55.417628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:08:55.469009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:08:55.529084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506141536255199:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:55.529198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:55.529420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506141536255205:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:55.532904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:08:55.541193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506141536255207:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:08:55.601321Z node 1 :TX_PROXY ERROR: Actor# [1:7489506141536255259:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:08:56.344062Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506124356383853:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:56.344126Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:08:56.759730Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786536793, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 26188, MsgBus: 10321 2025-04-04T17:08:51.628574Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506124910060232:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:51.629069Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001190/r3tmp/tmpRRmAGa/pdisk_1.dat 2025-04-04T17:08:52.089135Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26188, node 1 2025-04-04T17:08:52.123376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:52.123504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:52.125059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:52.179042Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:52.179067Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:52.179077Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:52.179214Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10321 TClient is connected to server localhost:10321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:52.689153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:52.721827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:52.852286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:53.056169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:53.142425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:54.988599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506137794963770:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:54.988705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:55.260851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:08:55.335353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:08:55.373280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:08:55.440989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:08:55.513127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:08:55.584799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:08:55.672444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506142089931592:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:55.672554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:55.672825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506142089931597:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:55.678073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:08:55.690096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506142089931599:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:08:55.751200Z node 1 :TX_PROXY ERROR: Actor# [1:7489506142089931654:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:08:56.626298Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506124910060232:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:56.635951Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:08:57.031193Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786537018, txId: 281474976710671] shutting down >> KqpSystemView::PartitionStatsFollower >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 30009, MsgBus: 27131 2025-04-04T17:08:49.662934Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506117075654045:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:49.663009Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0011bd/r3tmp/tmpMh84TS/pdisk_1.dat 2025-04-04T17:08:50.127830Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:50.187825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:50.187935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 30009, node 1 2025-04-04T17:08:50.192436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:50.342586Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:50.342608Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:50.342621Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:50.342764Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27131 TClient is connected to server localhost:27131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:51.124196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:51.168706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:51.328743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:51.503712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:51.584779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:53.021362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506134255524996:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.021457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.552926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.588686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.628162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.683673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.719139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.751145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.823622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506134255525511:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.823695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.823984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506134255525516:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:53.828107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:08:53.854626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506134255525518:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:08:53.955031Z node 1 :TX_PROXY ERROR: Actor# [1:7489506134255525576:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:08:54.663252Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506117075654045:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:54.663311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] >> KqpSysColV0::SelectRowById |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] >> TLocksTest::Range_IncorrectNullDot1 [GOOD] >> TLocksTest::Range_IncorrectNullDot2 >> KqpSystemView::PartitionStatsRanges [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningByLoad [GOOD] >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] >> KqpSystemView::QueryStatsSimple >> KqpSysColV0::SelectRowAsterisk >> KqpSystemView::NodesRange1 >> KqpSystemView::Join >> BackupRestoreS3::RestoreIndexTablePartitioningSettings [GOOD] >> BackupRestoreS3::RestoreTableSplitBoundaries >> BackupRestore::RestoreViewReferenceTable [GOOD] >> BackupRestore::RestoreViewToDifferentDatabase ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr [GOOD] Test command err: 2025-04-04T17:08:27.755015Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506020695024421:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:27.755056Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001247/r3tmp/tmp1yhORo/pdisk_1.dat 2025-04-04T17:08:28.467652Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:28.481603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:28.483507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:28.489404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7243, node 1 2025-04-04T17:08:28.567604Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:28.567628Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:28.567693Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:28.567801Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:29.087785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:31.882962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506037874894647:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:31.883077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:31.883630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506037874894659:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:31.887254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:08:31.910376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506037874894661:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:08:32.011409Z node 1 :TX_PROXY ERROR: Actor# [1:7489506042169862028:2673] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:08:32.395873Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710660. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=1&id=ZmE1MDY4NDItM2I2NDJlOTUtZmIwZjkzMmEtYWUxOTMxYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-04-04T17:08:34.309988Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506049008529075:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:34.310070Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001247/r3tmp/tmpBi7PSJ/pdisk_1.dat 2025-04-04T17:08:34.660863Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:34.710308Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:34.710383Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:34.719232Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28482, node 4 2025-04-04T17:08:34.873563Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:34.873585Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:34.873591Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:34.873697Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8992 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:35.107243Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:37.857071Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506061893432039:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:37.857151Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506061893432031:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:37.857294Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:37.860375Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:08:37.884130Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506061893432045:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:08:37.951288Z node 4 :TX_PROXY ERROR: Actor# [4:7489506061893432120:2686] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:08:40.226673Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489506076575353468:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:40.226733Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001247/r3tmp/tmp4JpaIw/pdisk_1.dat 2025-04-04T17:08:40.385822Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:40.413094Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:40.413536Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:40.416976Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2942, node 7 2025-04-04T17:08:40.576934Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:40.576955Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:40.576962Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:40.577085Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 184467440737095 ... =, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:08:46.031353Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jr0wdhae2qt1cnssgdzcjatb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWFlN2QxNGItNzI0YTJiZWItYTk3NzhkNjYtNzMwYTU4MzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:08:46.032663Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=OWFlN2QxNGItNzI0YTJiZWItYTk3NzhkNjYtNzMwYTU4MzQ=, ActorId: [7:7489506093755223986:2363], ActorState: ExecuteState, TraceId: 01jr0wdhae2qt1cnssgdzcjatb, Create QueryResponse for error on request, msg: 2025-04-04T17:08:46.042413Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jr0wdhae2qt1cnssgdzcjatb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWFlN2QxNGItNzI0YTJiZWItYTk3NzhkNjYtNzMwYTU4MzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:08:46.288602Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jr0wdhbr4y5fsxh5md0rrngj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWFlN2QxNGItNzI0YTJiZWItYTk3NzhkNjYtNzMwYTU4MzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:08:46.380278Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jr0wdhjzd6tmfhkmrvcfam6k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTU0M2VlZjEtMTYzYjVkYjUtYTVjNWVlMzktZjg0OWRmNDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:08:46.400427Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jr0wdhnycbcnq97gdpxyen1f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWFlN2QxNGItNzI0YTJiZWItYTk3NzhkNjYtNzMwYTU4MzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:08:46.401747Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=OWFlN2QxNGItNzI0YTJiZWItYTk3NzhkNjYtNzMwYTU4MzQ=, ActorId: [7:7489506093755223986:2363], ActorState: ExecuteState, TraceId: 01jr0wdhnycbcnq97gdpxyen1f, Create QueryResponse for error on request, msg: 2025-04-04T17:08:46.402700Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jr0wdhnycbcnq97gdpxyen1f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OWFlN2QxNGItNzI0YTJiZWItYTk3NzhkNjYtNzMwYTU4MzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:08:47.781634Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489506107164371222:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:47.781703Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001247/r3tmp/tmpSBuLxq/pdisk_1.dat 2025-04-04T17:08:47.943416Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:47.980237Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:47.980318Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:47.982542Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63650, node 10 2025-04-04T17:08:48.048226Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:48.048250Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:48.048257Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:48.048394Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3237 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:48.296258Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:51.255400Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489506124344241432:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:51.255428Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489506124344241440:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:51.255500Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:51.259607Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:08:51.286711Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489506124344241446:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:08:51.349621Z node 10 :TX_PROXY ERROR: Actor# [10:7489506124344241517:2668] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:08:53.334404Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489506133325316445:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:53.337956Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001247/r3tmp/tmpb8fSjZ/pdisk_1.dat 2025-04-04T17:08:53.595294Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:53.631899Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:53.631999Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:53.635366Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19287, node 13 2025-04-04T17:08:53.783089Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:53.783112Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:53.783121Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:53.783270Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:54.139797Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:57.241949Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489506150505186697:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:57.241985Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489506150505186689:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:57.242090Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:57.247285Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:08:57.278818Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7489506150505186703:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:08:57.359758Z node 13 :TX_PROXY ERROR: Actor# [13:7489506150505186783:2681] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRanges [GOOD] Test command err: Trying to start YDB, gRPC: 22141, MsgBus: 30961 2025-04-04T17:08:53.735784Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506131322221195:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:53.741632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b9b/r3tmp/tmp4YSMXb/pdisk_1.dat 2025-04-04T17:08:54.135136Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22141, node 1 2025-04-04T17:08:54.229722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:54.230434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:54.248855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:54.293848Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:54.293881Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:54.293888Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:54.294019Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30961 TClient is connected to server localhost:30961 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:54.855641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:54.881091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:55.026133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:55.190995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:55.277015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:56.995044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506144207124840:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:56.995217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:57.246181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:08:57.276205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:08:57.302123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:08:57.326365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:08:57.364099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:08:57.407560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:08:57.452139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506148502092645:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:57.452199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:57.452258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506148502092650:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:57.463272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:08:57.476690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506148502092652:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:08:57.549640Z node 1 :TX_PROXY ERROR: Actor# [1:7489506148502092706:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:08:58.735678Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506131322221195:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:58.735746Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:08:58.905667Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786538894, txId: 281474976710671] shutting down >> KqpSystemView::FailResolve >> KqpSystemView::ReadSuccess [GOOD] >> KqpSysColV1::SelectRange |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 24283, MsgBus: 28163 2025-04-04T17:08:53.051319Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506131841981381:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:53.051856Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b9d/r3tmp/tmpVyOsns/pdisk_1.dat 2025-04-04T17:08:53.507539Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:53.526611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:53.526762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:53.528228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24283, node 1 2025-04-04T17:08:53.625738Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:53.625770Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:53.625780Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:53.625908Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28163 TClient is connected to server localhost:28163 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:54.221783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:54.242898Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:08:54.264905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:54.416945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:08:54.592314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:08:54.668810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:56.695291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506144726884988:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:56.695418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:56.931764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:08:56.967027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:08:56.999428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:08:57.026952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:08:57.064830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:08:57.162018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:08:57.270588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506149021852804:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:57.270693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:57.270903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506149021852809:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:57.274869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:08:57.285495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506149021852811:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:08:57.382197Z node 1 :TX_PROXY ERROR: Actor# [1:7489506149021852867:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:08:58.054381Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506131841981381:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:58.054462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSystemView::PartitionStatsRange3 >> TStorageServiceTest::ShouldSaveState [GOOD] >> TStorageServiceTest::ShouldUseGc >> KqpSysColV0::UpdateAndDelete |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] >> KqpSysColV1::InnerJoinSelect >> KqpSystemView::Sessions >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::ReadSuccess [GOOD] Test command err: Trying to start YDB, gRPC: 18749, MsgBus: 22742 2025-04-04T17:08:54.783264Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506137645101583:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:54.792475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b92/r3tmp/tmpuP4szm/pdisk_1.dat 2025-04-04T17:08:55.192561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:55.192710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:55.195923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:55.228804Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18749, node 1 2025-04-04T17:08:55.301115Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:55.301150Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:55.301158Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:55.301252Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22742 TClient is connected to server localhost:22742 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:56.009343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:56.037253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:56.213285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:56.374340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:56.440851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:58.154954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506154824972393:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:58.155065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:58.435990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:08:58.504091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:08:58.530130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:08:58.563950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:08:58.600718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:08:58.672419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:08:58.751088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506154824972912:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:58.751169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:58.752025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506154824972917:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:58.755600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:08:58.768337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506154824972919:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:08:58.851405Z node 1 :TX_PROXY ERROR: Actor# [1:7489506154824972976:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:08:59.707401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:08:59.775050Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506137645101583:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:59.775123Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:08:59.841250Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jr0wdyqe3dcpg7byc1f1tj25, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjVkMTBjNzMtM2EzZDExN2EtM2EyNDMyZWMtZjVhZDFjMjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:08:59.852173Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786539838, txId: 281474976710672] shutting down >> LocalityOperation::LocksFromAnotherTenants-UseSink [GOOD] >> KqpSystemView::PartitionStatsRange2 [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_dispatch.py::TestMapping::test_idle [GOOD] >> KqpSysColV0::InnerJoinSelectAsterisk >> KqpSysColV1::SelectRowAsterisk >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRange >> KqpSysColV1::StreamInnerJoinSelectAsterisk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 14939, MsgBus: 10789 2025-04-04T17:08:55.810068Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506141067762347:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:55.810448Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b8e/r3tmp/tmp5WZJgd/pdisk_1.dat 2025-04-04T17:08:56.224261Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:56.238025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:56.238133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:56.240576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14939, node 1 2025-04-04T17:08:56.348222Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:56.348248Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:56.348255Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:56.348372Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10789 TClient is connected to server localhost:10789 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:56.949462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:56.996318Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:08:57.014148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:57.225109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:57.416417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:57.500648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:59.164471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506158247633320:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:59.164608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:59.374222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:08:59.405373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:08:59.483447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:08:59.521000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:08:59.550223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:08:59.591890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:08:59.653859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506158247633832:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:59.653992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:59.654286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506158247633837:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:59.658778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:08:59.670765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506158247633839:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:08:59.735039Z node 1 :TX_PROXY ERROR: Actor# [1:7489506158247633892:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:00.810208Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506141067762347:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:00.810306Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:01.036851Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786540976, txId: 281474976710671] shutting down |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> TGRpcClientLowTest::MultipleSimpleRequests [GOOD] >> KqpSystemView::PartitionStatsRange1 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> LocalityOperation::LocksFromAnotherTenants-UseSink [GOOD] Test command err: 2025-04-04T17:08:32.944446Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506040848901062:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:32.944492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001233/r3tmp/tmpH6rMEH/pdisk_1.dat 2025-04-04T17:08:33.922721Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:33.956037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:08:33.975392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:33.975495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:33.983370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32133, node 1 2025-04-04T17:08:34.394831Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:34.394851Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:34.394856Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:34.394942Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:34.922063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:39.649249Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506074540985209:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:39.649300Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001233/r3tmp/tmpOLZwEN/pdisk_1.dat 2025-04-04T17:08:39.833160Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:39.867185Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:39.867265Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:39.871179Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29989, node 4 2025-04-04T17:08:40.001806Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:40.001829Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:40.001833Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:40.001917Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9577 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:40.275611Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:44.323350Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489506093873430084:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:44.323415Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001233/r3tmp/tmpLLdblh/pdisk_1.dat 2025-04-04T17:08:44.615179Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1679, node 7 2025-04-04T17:08:44.753257Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:44.753356Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:44.769775Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:44.791301Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:44.791320Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:44.791326Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:44.791455Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:45.091846Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:19532 2025-04-04T17:08:45.420075Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:45.442411Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:46.087037Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:46.087104Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:46.126825Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-04-04T17:08:46.143219Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:46.221491Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:46.306187Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:08:47.329392Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7489506108918063295:2158];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:47.366872Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:47.366957Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:47.373115Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-04-04T17:08:47.379346Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:47.452182Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:08:48.192682Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:08:48.351503Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:08:48.700152Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489506111053301349:2370], DatabaseId: /Root, PoolId: default, Failed to ... hildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:53.151284Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:4995 2025-04-04T17:08:53.627123Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:53.761417Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:54.275846Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7489506136360264882:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:54.275901Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_0/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:08:54.336547Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:54.336646Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:54.351192Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-04-04T17:08:54.370996Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:54.523363Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:54.695303Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:55.211790Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7489506141716151894:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:55.225468Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:08:55.351731Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:55.351892Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:55.363581Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 11 Cookie 11 2025-04-04T17:08:55.367265Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:57.397985Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:08:57.527469Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:08:57.557648Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489506126436956524:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:57.557719Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:08:57.638966Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489506147911795091:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:57.639105Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489506147911795096:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:57.639165Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:57.642913Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2025-04-04T17:08:57.668336Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489506147911795105:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-04-04T17:08:57.751629Z node 10 :TX_PROXY ERROR: Actor# [10:7489506147911795186:3405] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:08:57.837403Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr0wdwn2fgd8ph5f3a8znee5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=Yzg0MzI5M2YtNzVhYTM4NDItZjUzMWMyYTEtODcwNGM3OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:08:57.942863Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr0wdwwcft1ccc0384p9m1h3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=Yzg0MzI5M2YtNzVhYTM4NDItZjUzMWMyYTEtODcwNGM3OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:08:58.067744Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr0wdwz9fpq5gk3cq564py18, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=Yzg0MzI5M2YtNzVhYTM4NDItZjUzMWMyYTEtODcwNGM3OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:08:59.278420Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7489506136360264882:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:59.278508Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_0/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:08:59.387403Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr0wdwz9fpq5gk3cq564py18, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=Yzg0MzI5M2YtNzVhYTM4NDItZjUzMWMyYTEtODcwNGM3OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:08:59.391642Z node 10 :KQP_EXECUTER ERROR: ActorId: [10:7489506156501729947:2360] TxId: 281474976715669. Ctx: { TraceId: 01jr0wdwz9fpq5gk3cq564py18, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=Yzg0MzI5M2YtNzVhYTM4NDItZjUzMWMyYTEtODcwNGM3OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Handle TEvProposeTransactionResult: unable to select coordinator. Tx canceled, actorId: [10:7489506156501729947:2360], previously selected coordinator: 72075186224037888, coordinator selected at propose result: 72075186224037890 2025-04-04T17:08:59.391974Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=Yzg0MzI5M2YtNzVhYTM4NDItZjUzMWMyYTEtODcwNGM3OGU=, ActorId: [10:7489506147911794923:2360], ActorState: ExecuteState, TraceId: 01jr0wdwz9fpq5gk3cq564py18, Create QueryResponse for error on request, msg: 2025-04-04T17:08:59.392861Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jr0wdwz9fpq5gk3cq564py18, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=Yzg0MzI5M2YtNzVhYTM4NDItZjUzMWMyYTEtODcwNGM3OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:08:59.399883Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-04-04T17:08:59.400391Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T17:08:59.402442Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2025-04-04T17:08:59.402942Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T17:08:59.742749Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:08:59.743511Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7489506158896021417:2321], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:08:59.796279Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7489506158896021417:2321], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:08:59.946736Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7489506158896021417:2321], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:09:00.203494Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7489506141716151894:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:00.203578Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:00.260370Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7489506158896021417:2321], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:09:00.743677Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:09:00.899610Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7489506158896021417:2321], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> KqpSystemView::NodesRange2 >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] >> TGRpcNewCoordinationClient::SessionReconnectReattach [GOOD] >> KqpSystemView::NodesSimple >> KqpSysColV1::SelectRowById [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] Test command err: 2025-04-04T17:03:45.279319Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:03:45.395664Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:03:45.419022Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:03:45.419326Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:03:45.428094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:03:45.428369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:03:45.428611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:03:45.428728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:03:45.428898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:03:45.429058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:03:45.429172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:03:45.429300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:03:45.429425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:03:45.429543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:03:45.429670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:03:45.429792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:03:45.469984Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:03:45.473234Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:03:45.473347Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:03:45.473573Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:03:45.473796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:03:45.473880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:03:45.473923Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:03:45.474064Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:03:45.474129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:03:45.474174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:03:45.474204Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:03:45.474405Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:03:45.474471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:03:45.474510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:03:45.474558Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:03:45.474652Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:03:45.474732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:03:45.474781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:03:45.474810Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:03:45.474914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:03:45.474986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:03:45.475036Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:03:45.475092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:03:45.475128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:03:45.475163Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:03:45.475814Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=58; 2025-04-04T17:03:45.475919Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=48; 2025-04-04T17:03:45.475989Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-04T17:03:45.476087Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-04-04T17:03:45.476259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:03:45.476314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:03:45.476347Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:03:45.476536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:03:45.476580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:03:45.476608Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:03:45.476752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:03:45.476792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:03:45.476844Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:03:45.477032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:03:45.477069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:03:45.477105Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:03:45.477271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:03:45.477311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:03:45.477372Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:8568];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:74;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:75;blob_range:[NO_BLOB:0:8552];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:8488];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:54;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:55;blob_range:[NO_BLOB:0:8472];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:1;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;;;switched=(portion_id:55;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2117976;index_size:24;meta:((produced=INSERTED;)););(portion_id:54;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2549080;index_size:24;meta:((produced=SPLIT_COMPACTED;)););; 2025-04-04T17:09:02.282575Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11162:12789];fline=general_compaction.cpp:131;event=blobs_created;appended=1;switched=2; 2025-04-04T17:09:02.284727Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11162:12789];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcClientLowTest::MultipleSimpleRequests [GOOD] Test command err: 2025-04-04T17:08:35.120532Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506057394739323:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:35.120686Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001217/r3tmp/tmps2hIth/pdisk_1.dat 2025-04-04T17:08:35.935254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:35.935336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:35.944005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:36.023869Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4700, node 1 2025-04-04T17:08:36.247004Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:08:36.247031Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:08:36.258671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:08:36.370763Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:36.370786Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:36.370791Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:36.370892Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21062 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:36.732530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2025-04-04T17:08:39.313759Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-04T17:08:39.329878Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-04T17:08:40.934506Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506076016170057:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:40.934545Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001217/r3tmp/tmp9k3D1w/pdisk_1.dat 2025-04-04T17:08:41.175918Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:41.201204Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:41.201271Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:41.204257Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27567, node 4 2025-04-04T17:08:41.322171Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:41.322190Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:41.322195Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:41.322434Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21507 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:41.551229Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-04T17:08:41.645579Z node 4 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-04T17:08:41.660056Z node 4 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-04T17:08:45.570863Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489506097766692567:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:45.570939Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001217/r3tmp/tmpAr9fru/pdisk_1.dat 2025-04-04T17:08:45.729470Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:45.766037Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:45.766109Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:45.769169Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19007, node 7 2025-04-04T17:08:45.883027Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:45.883051Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:45.883058Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:45.883210Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:46.154227Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="") => {SUCCESS, 0} TestRequest(database="/blabla", token="") => {SUCCESS, 0} TestRequest(database="blabla", token="") => {SUCCESS, 0} TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2025-04-04T17:08:48.885516Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-04T17:08:48.896871Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-04T17:08:48.909670Z node 7 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token TestRequest(database="/Root", token="invalid token") => {UNAUTHORIZED, 0} 2025-04-04T17:08:48.924343Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} TestRequest(database="blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-04T17:08:48.934680Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 2025-04-04T17:08:50.602541Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489506117937595702:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:50.603113Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001217/r3tmp/tmpGZEMy9/pdisk_1.dat 2025-04-04T17:08:50.759048Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:50.794151Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:50.794222Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:50.800216Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19400, node 10 2025-04-04T17:08:50.920577Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:50.920607Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:50.920631Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:50.920834Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62124 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:51.105540Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="") => {SUCCESS, 0} TestRequest(database="/blabla", token="") => {SUCCESS, 0} TestRequest(database="blabla", token="") => {SUCCESS, 0} TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2025-04-04T17:08:54.167066Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-04T17:08:54.203451Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-04T17:08:54.230772Z node 10 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token TestRequest(database="/Root", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} TestRequest(database="/blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-04T17:08:54.263307Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 2025-04-04T17:08:54.290748Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-04-04T17:08:56.093466Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489506145421674657:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:56.098033Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001217/r3tmp/tmph48xxr/pdisk_1.dat 2025-04-04T17:08:56.264327Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:56.288184Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:56.288260Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:56.291986Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12840, node 13 2025-04-04T17:08:56.484423Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:56.484452Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:56.484460Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:56.484627Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7626 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:56.761796Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:01.084416Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7489506145421674657:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:01.084829Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::SessionReconnectReattach [GOOD] Test command err: 2025-04-04T17:08:37.735689Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506063194996862:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:37.735987Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001200/r3tmp/tmpD4UO98/pdisk_1.dat 2025-04-04T17:08:38.509942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:38.510068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:38.551443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:38.562335Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20609, node 1 2025-04-04T17:08:38.844703Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:38.844731Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:38.844738Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:38.844884Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24684 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:39.313743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:39.423619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:08:43.529875Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506091135186199:2144];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:43.535226Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001200/r3tmp/tmpVK7jJd/pdisk_1.dat 2025-04-04T17:08:43.758153Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9174, node 4 2025-04-04T17:08:43.886414Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:43.886549Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:43.938612Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:43.938639Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:43.938660Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:43.938791Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:08:43.941294Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:44.204121Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:44.272369Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:08:48.376330Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489506112344515951:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:48.376420Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001200/r3tmp/tmpNzaQA7/pdisk_1.dat 2025-04-04T17:08:48.525776Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:48.554496Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:48.554573Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:48.557354Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18923, node 7 2025-04-04T17:08:48.618574Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:48.618596Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:48.618602Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:48.618741Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9846 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:48.872497Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:48.939506Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:08:53.303875Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489506133102087915:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:53.305809Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001200/r3tmp/tmpBCyCV1/pdisk_1.dat 2025-04-04T17:08:53.479287Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:53.521390Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:53.521481Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:53.527355Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16138, node 10 2025-04-04T17:08:53.609943Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:53.609970Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:53.609980Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:53.610138Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20755 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:53.885766Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:53.963140Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:08:58.504315Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489506152005517401:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:58.504393Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001200/r3tmp/tmpompTPa/pdisk_1.dat 2025-04-04T17:08:58.735684Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:58.771097Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:58.771188Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:58.774670Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6827, node 13 2025-04-04T17:08:58.872563Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:58.872595Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:58.872604Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:58.872751Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26209 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:59.167490Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:59.246464Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 9957, MsgBus: 4819 2025-04-04T17:08:58.218511Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506154881040510:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:58.220199Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b8c/r3tmp/tmp6cStgF/pdisk_1.dat 2025-04-04T17:08:58.639400Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:58.657181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:58.657315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:58.660109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9957, node 1 2025-04-04T17:08:58.751073Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:58.751096Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:58.751102Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:58.751217Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4819 TClient is connected to server localhost:4819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:59.280749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:59.302902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:59.436886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:59.628889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:59.720315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:01.473021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506167765943972:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:01.473120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:01.709835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:01.744192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:01.779211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:01.806675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:01.832769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:01.915309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:02.006100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506172060911788:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:02.006168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:02.006454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506172060911793:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:02.010127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:02.025902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506172060911795:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:02.086273Z node 1 :TX_PROXY ERROR: Actor# [1:7489506172060911849:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:03.143819Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786543177, txId: 281474976710671] shutting down 2025-04-04T17:09:03.210473Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506154881040510:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:03.211994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 4905, MsgBus: 4763 2025-04-04T17:08:57.994068Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506150067134467:2218];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:57.994188Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b8d/r3tmp/tmppxparw/pdisk_1.dat 2025-04-04T17:08:58.442121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:58.442223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:58.442548Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:58.448002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4905, node 1 2025-04-04T17:08:58.533795Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:58.533824Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:58.533831Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:58.533961Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4763 TClient is connected to server localhost:4763 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:59.151670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:59.167654Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:08:59.182416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:59.321095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:59.508932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:59.594079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:01.480195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506167247005276:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:01.480318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:01.806193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:01.847962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:01.884912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:01.926039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:01.984762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:02.033390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:02.123918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506171541973089:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:02.123997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:02.124216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506171541973094:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:02.129470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:02.146194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506171541973096:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:02.238593Z node 1 :TX_PROXY ERROR: Actor# [1:7489506171541973152:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:02.981034Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506150067134467:2218];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:02.981096Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSysColV0::SelectRowById [GOOD] >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter [GOOD] >> KqpSysColV0::InnerJoinSelect >> KqpSysColV0::SelectRowAsterisk [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 8332, MsgBus: 5921 2025-04-04T17:08:59.481275Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506157629188554:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:59.483585Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b7c/r3tmp/tmpnaDDHo/pdisk_1.dat 2025-04-04T17:08:59.831880Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8332, node 1 2025-04-04T17:08:59.879522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:59.879648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:59.881090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:59.967662Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:59.967680Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:59.967696Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:59.967809Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5921 TClient is connected to server localhost:5921 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:00.555615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:00.574624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:00.732271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:00.987142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:01.089718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:02.916040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506170514092079:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:02.916147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:03.185525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:03.216657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:03.271987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:03.316332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:03.360720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:03.426175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:03.545533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506174809059893:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:03.545629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:03.545979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506174809059898:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:03.553926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:03.575414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506174809059900:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:03.663478Z node 1 :TX_PROXY ERROR: Actor# [1:7489506174809059956:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:04.470967Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506157629188554:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:04.471026Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSystemView::FailResolve [GOOD] >> KqpSysColV1::SelectRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter [GOOD] Test command err: 2025-04-04T17:08:34.983515Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506052989876345:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:34.984111Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001225/r3tmp/tmpQf9AlI/pdisk_1.dat 2025-04-04T17:08:35.545356Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:35.562572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:35.562678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:35.584860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21983, node 1 2025-04-04T17:08:36.110696Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:36.110716Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:36.110731Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:36.110835Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20765 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:36.549966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:36.646847Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:59062 Call 2025-04-04T17:08:36.705983Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:59068 2025-04-04T17:08:39.587707Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:59084 Call Call 2025-04-04T17:08:39.615214Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, user is a admin, database: /Root, user: root@builtin, from ip: ipv6:[::1]:59122 2025-04-04T17:08:39.622219Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, user is a admin, database: /Root, user: root@builtin, from ip: ipv6:[::1]:59130 2025-04-04T17:08:39.623439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:08:41.314471Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506081036101536:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:41.319043Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001225/r3tmp/tmpcnstEJ/pdisk_1.dat 2025-04-04T17:08:41.482192Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:41.490798Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:41.490894Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:41.499625Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16808, node 4 2025-04-04T17:08:41.614940Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:41.614961Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:41.614967Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:41.615083Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:41.832738Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:44.740971Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Table-1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:08:44.742062Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:08:44.742094Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:08:44.747273Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Table-1 2025-04-04T17:08:44.875096Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743786524921, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:08:44.937290Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-04T17:08:44.953676Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-2, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:08:44.954226Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:08:44.956676Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: CREATE TABLE COPY FROM, path: /Root/Table-2 2025-04-04T17:08:44.998296Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743786525040, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:08:45.024310Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715659, done: 0, blocked: 1 2025-04-04T17:08:45.025967Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-04-04T17:08:45.040677Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-3, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:08:45.041326Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:08:45.041355Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-4, opId: 281474976715660:1, at schemeshard: 72057594046644480 2025-04-04T17:08:45.041648Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:08:45.044970Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, subject: , status: StatusAccepted, operation: CREATE TABLE COPY FROM, dst path: /Root/Table-3, dst path: /Root/Table-4 2025-04-04T17:08:45.109136Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743786525152, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:08:45.130752Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715660, done: 0, blocked: 2 2025-04-04T17:08:45.132745Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-04-04T17:08:45.132897Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:1 2025-04-04T17:08:45.155279Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-5, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:08:45.155857Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:08:45.155891Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-6, opId: 281474976715661:1, at schemeshard: 72057594046644480 2025-04-04T17:08:45.156148Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:08:45.156170Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-7, opId: 281474976715661:2, at schemeshard: 72057594046644480 2025-04-04T17:08:45.156455Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:08:45.156478Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-8, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-04-04T17:08:45.156703Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:4, propose status:StatusAccepted, reason: ... ileState: Unknown -> Disconnected 2025-04-04T17:08:48.407000Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:48.410231Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20349, node 7 2025-04-04T17:08:48.494928Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:48.494962Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:48.494971Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:48.495121Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17437 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:48.759541Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:17437 2025-04-04T17:08:51.534928Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:17437 TClient::Ls request: Root/Test TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Test" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786531627 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Test" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) 2025-04-04T17:08:51.924810Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:17437 TClient::Ls request: Root/Test TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Test" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786531627 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Test" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) 2025-04-04T17:08:54.230321Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489506137538679125:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:54.230432Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001225/r3tmp/tmpcoqQUm/pdisk_1.dat 2025-04-04T17:08:54.517705Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16043, node 10 2025-04-04T17:08:54.571402Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:54.571516Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:54.575251Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:54.602729Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:54.602753Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:54.602760Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:54.602883Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:54.866234Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:54.878310Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:08:57.904027Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:08:58.039041Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:08:58.117312Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:08:59.951573Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489506160371019555:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:59.951648Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001225/r3tmp/tmpdsNiTS/pdisk_1.dat 2025-04-04T17:09:00.252494Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:00.292425Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:00.292549Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:00.304120Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8283, node 13 2025-04-04T17:09:00.458829Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:00.458847Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:00.458853Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:00.458988Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61180 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:00.738905Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:03.749626Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:09:03.926491Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 20750, MsgBus: 14326 2025-04-04T17:08:59.758487Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506159723851416:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:59.758782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b6e/r3tmp/tmpk2cFfK/pdisk_1.dat 2025-04-04T17:09:00.214683Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:00.235451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:00.235555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:00.236901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20750, node 1 2025-04-04T17:09:00.398728Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:00.398748Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:00.398755Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:00.398858Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14326 TClient is connected to server localhost:14326 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:01.107658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:01.127594Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:01.136334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:01.289002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:01.466194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:01.553977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:03.554280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506176903722146:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:03.554420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:03.840256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:03.873584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:03.955639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:04.006069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:04.082721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:04.159360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:04.238330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506181198689969:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:04.238431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:04.238463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506181198689974:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:04.241128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:04.251075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506181198689976:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:04.314896Z node 1 :TX_PROXY ERROR: Actor# [1:7489506181198690029:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:04.754482Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506159723851416:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:04.754538Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSystemView::PartitionStatsRange3 [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesCorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithOutCertVerification_ClientProvidesExpiredCert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailResolve [GOOD] Test command err: Trying to start YDB, gRPC: 5577, MsgBus: 12911 2025-04-04T17:09:00.795573Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506163074558510:2245];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:00.795676Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b67/r3tmp/tmpmbadXu/pdisk_1.dat 2025-04-04T17:09:01.249317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:01.249416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:01.251585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:01.297007Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5577, node 1 2025-04-04T17:09:01.361361Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:01.361381Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:01.361395Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:01.361533Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12911 TClient is connected to server localhost:12911 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:01.961701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:01.986605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:02.115087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:09:02.294803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:02.351109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:04.164138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506180254429273:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:04.164256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:04.459272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:04.521626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:04.560617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:04.596941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:04.633879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:04.675290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:04.752149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506180254429788:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:04.752261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:04.752735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506180254429793:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:04.756098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:04.767485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506180254429795:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:04.852918Z node 1 :TX_PROXY ERROR: Actor# [1:7489506180254429852:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:05.783851Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506163074558510:2245];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:05.783912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:05.884867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.998053Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7489506184549397471:3701], for# user0@builtin, access# SelectRow 2025-04-04T17:09:05.998215Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 1] Access: 1 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint64 : NULL, Uint64 : NULL, Uint64 : NULL, Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-04T17:09:06.008468Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTYzNTVhODAtOWNkOWI5MDEtYWUxMDMwOTktZDUxZTgwOTI=, ActorId: [1:7489506184549397434:2493], ActorState: ExecuteState, TraceId: 01jr0we4qkebx84nbmef0zp0g8, Create QueryResponse for error on request, msg: 2025-04-04T17:09:06.009416Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786545996, txId: 281474976710672] shutting down 2025-04-04T17:09:06.009749Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jr0we4qkebx84nbmef0zp0g8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTYzNTVhODAtOWNkOWI5MDEtYWUxMDMwOTktZDUxZTgwOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 11948, MsgBus: 2077 2025-04-04T17:09:00.900828Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506163703464376:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:00.901340Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b65/r3tmp/tmpXxX5us/pdisk_1.dat 2025-04-04T17:09:01.346523Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:01.356012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:01.356084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:01.358175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11948, node 1 2025-04-04T17:09:01.419766Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:01.419795Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:01.419805Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:01.419938Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2077 TClient is connected to server localhost:2077 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:01.980587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:02.001320Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:02.028383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:02.187285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:02.345815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:02.435688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:04.283479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506180883335331:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:04.283644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:04.604838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:04.637472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:04.701787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:04.730281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:04.758461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:04.841498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:04.891746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506180883335848:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:04.891836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:04.892045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506180883335853:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:04.895184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:04.905173Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T17:09:04.905607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506180883335855:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:04.990115Z node 1 :TX_PROXY ERROR: Actor# [1:7489506180883335911:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:05.902455Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506163703464376:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:05.902518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TStorageServiceTest::ShouldUseGc [GOOD] >> KqpSysColV1::InnerJoinSelect [GOOD] >> BackupRestoreS3::RestoreTableSplitBoundaries [GOOD] >> BackupRestoreS3::RestoreIndexTableSplitBoundaries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange3 [GOOD] Test command err: Trying to start YDB, gRPC: 10222, MsgBus: 16245 2025-04-04T17:09:01.296564Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506168283521847:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:01.296613Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b5f/r3tmp/tmpc8iHVh/pdisk_1.dat 2025-04-04T17:09:01.910328Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:01.916150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:01.916244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:01.919598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10222, node 1 2025-04-04T17:09:02.069192Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:02.069214Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:02.069223Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:02.069346Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16245 TClient is connected to server localhost:16245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:02.768460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:02.790726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:02.940258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:03.122696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:03.194886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:09:04.988668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506181168425304:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:04.988768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:05.262672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.300935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.368433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.397240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.435974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.477523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.573788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506185463393116:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:05.573879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:05.574129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506185463393121:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:05.578161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:05.591857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506185463393123:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:05.663685Z node 1 :TX_PROXY ERROR: Actor# [1:7489506185463393178:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:06.298336Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506168283521847:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:06.298397Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:06.864402Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786546826, txId: 281474976710671] shutting down >> KqpSysColV1::SelectRowAsterisk [GOOD] >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas >> TSchemeShardSubDomainTest::SchemeLimitsRejects >> TSchemeShardSubDomainTest::RmDir >> TSchemeShardSubDomainTest::DeleteAndRestart >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools >> TSchemeShardSubDomainTest::Restart >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects >> KqpSysColV0::SelectRange [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside >> TSchemeShardSubDomainTest::SimultaneousDefine >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false >> TSchemeShardSubDomainTest::CreateDropSolomon >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq >> KqpSysColV0::UpdateAndDelete [GOOD] >> BackupRestore::RestoreViewToDifferentDatabase [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobal >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 21604, MsgBus: 25132 2025-04-04T17:09:01.480312Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506164939147253:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:01.522794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b53/r3tmp/tmpTauPDZ/pdisk_1.dat 2025-04-04T17:09:01.998442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:01.998594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:02.002891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:02.018582Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21604, node 1 2025-04-04T17:09:02.106541Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:02.106567Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:02.106696Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:02.106824Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25132 TClient is connected to server localhost:25132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:02.832059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:02.847024Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:02.856841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:03.047554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:03.285290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:03.372379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.307730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506182119018085:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:05.307834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:05.575955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.610034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.680927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.709451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.765068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.814629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.872611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506182119018600:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:05.872695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:05.872897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506182119018606:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:05.876158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:05.886575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506182119018608:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:05.968720Z node 1 :TX_PROXY ERROR: Actor# [1:7489506182119018661:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:06.468524Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506164939147253:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:06.468574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop >> TSchemeShardSubDomainTest::SimultaneousCreateDelete >> KqpSystemView::PartitionStatsRange1 [GOOD] >> TSchemeShardSubDomainTest::CreateWithNoEqualName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 1950, MsgBus: 29443 2025-04-04T17:09:02.912055Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506171140190578:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:02.912099Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b2c/r3tmp/tmpMYUXLo/pdisk_1.dat 2025-04-04T17:09:03.441731Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:03.448702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:03.448821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:03.450695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1950, node 1 2025-04-04T17:09:03.614832Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:03.614856Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:03.614867Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:03.614974Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29443 TClient is connected to server localhost:29443 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:04.379190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:04.428349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:04.580758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:04.759812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:09:04.878995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:06.698573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506188320061520:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:06.698671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:06.891969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:06.931557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:06.963516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:06.990861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.019488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.046158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.098521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506192615029328:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:07.098613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:07.098965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506192615029333:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:07.102290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:07.112558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506192615029335:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:07.179557Z node 1 :TX_PROXY ERROR: Actor# [1:7489506192615029388:3438] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:07.913015Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506171140190578:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:07.925917Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 28112, MsgBus: 28847 2025-04-04T17:09:03.176238Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506177027943327:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:03.176292Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b04/r3tmp/tmpwOXQdw/pdisk_1.dat 2025-04-04T17:09:03.636925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:03.637052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:03.640171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28112, node 1 2025-04-04T17:09:03.695009Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:03.697858Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:09:03.796320Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:03.796344Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:03.796350Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:03.796510Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28847 TClient is connected to server localhost:28847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:04.485591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:04.518139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:04.680358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:04.910890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:04.993906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:06.768518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506189912846974:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:06.768647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:06.949621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:06.988875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.028076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.055573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.081428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.115741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.169626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506194207814785:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:07.169708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:07.170110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506194207814790:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:07.173123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:07.185544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506194207814792:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:07.260055Z node 1 :TX_PROXY ERROR: Actor# [1:7489506194207814848:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:08.178329Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506177027943327:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:08.178378Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable >> TSchemeShardSubDomainTest::Restart [GOOD] >> TSchemeShardSubDomainTest::RmDir [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 21116, MsgBus: 21673 2025-04-04T17:09:01.314568Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506166040466518:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:01.315018Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b54/r3tmp/tmpSThMym/pdisk_1.dat 2025-04-04T17:09:02.009134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:02.009259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:02.011525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:02.030041Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21116, node 1 2025-04-04T17:09:02.149465Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:02.149685Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:02.149697Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:02.149805Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21673 TClient is connected to server localhost:21673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:02.989015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:03.007169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:03.199700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:03.379311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:03.466626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:05.207288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506183220337346:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:05.207393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:05.432141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.473538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.509109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.584311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.617759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.665718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.727998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506183220337859:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:05.728080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:05.728278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506183220337864:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:05.732648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:05.748910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506183220337866:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:05.806071Z node 1 :TX_PROXY ERROR: Actor# [1:7489506183220337919:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:06.291817Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506166040466518:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:06.291874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 5119, MsgBus: 20231 2025-04-04T17:09:02.828563Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506172712349011:2144];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:02.845296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b39/r3tmp/tmpINoiOX/pdisk_1.dat 2025-04-04T17:09:03.360326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:03.360455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:03.363767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:03.371153Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5119, node 1 2025-04-04T17:09:03.506448Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:03.506475Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:03.506486Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:03.506597Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20231 TClient is connected to server localhost:20231 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:04.289943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:04.304475Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:04.313319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:04.476436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:04.674037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:04.808604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:06.510538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506189892219861:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:06.510683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:06.773627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:06.839666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:06.871502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:06.903040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:06.940265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:06.981425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.058671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506194187187677:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:07.058733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:07.059043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506194187187682:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:07.062758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:07.091106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506194187187684:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:07.167207Z node 1 :TX_PROXY ERROR: Actor# [1:7489506194187187739:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:07.822366Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506172712349011:2144];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:07.822438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 22555, MsgBus: 12026 2025-04-04T17:09:03.259067Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506175367142666:2272];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:03.259122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b11/r3tmp/tmpQSJlIZ/pdisk_1.dat 2025-04-04T17:09:03.794981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:03.795092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:03.800584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:03.838939Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22555, node 1 2025-04-04T17:09:03.983259Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:03.983279Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:03.983285Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:03.983380Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12026 TClient is connected to server localhost:12026 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:04.759457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:04.772319Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:04.781493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:04.941918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:05.119854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:05.203712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:06.942348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506188252046102:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:06.942498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:07.225400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.263084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.296730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.331448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.378172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.418729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.477060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506192547013912:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:07.477135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:07.477491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506192547013917:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:07.481344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:07.491970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506192547013919:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:07.548910Z node 1 :TX_PROXY ERROR: Actor# [1:7489506192547013972:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:08.262398Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506175367142666:2272];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:08.262480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:09.170357Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786549204, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 30200, MsgBus: 17660 2025-04-04T17:09:03.782999Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506173538606997:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:03.783040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000a6d/r3tmp/tmpzxoH04/pdisk_1.dat 2025-04-04T17:09:04.340513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:04.340644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:04.342380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:04.367336Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30200, node 1 2025-04-04T17:09:04.448579Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:04.448602Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:04.448608Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:04.448727Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17660 TClient is connected to server localhost:17660 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:05.141014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:05.171261Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:05.186554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.333669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:05.496825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:05.590731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:07.311119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506190718477945:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:07.311237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:07.496360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.531221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.588734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.618834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.648104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.682780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.748277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506190718478454:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:07.748362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:07.748604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506190718478459:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:07.752147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:07.770411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506190718478461:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:07.870557Z node 1 :TX_PROXY ERROR: Actor# [1:7489506190718478520:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:08.805681Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506173538606997:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:08.806309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:09.095855Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786549050, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:09.658218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:09.658343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.658408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:09.658450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:09.660250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:09.660297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:09.660370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.660467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:09.661237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:09.752797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:09.752858Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:09.762805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:09.763034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:09.763156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:09.772473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:09.774763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:09.777048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.778972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:09.788539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.801512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.801617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.801795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:09.801850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.801921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:09.802071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.812142Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:09.954459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:09.954706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.954918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:09.955164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:09.955248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.957421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.957555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:09.957721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.957770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:09.957803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:09.957852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:09.960230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.960291Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:09.960324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:09.962062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.962106Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.962147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.962210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.971380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:09.973199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:09.973351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:09.974291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.974441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:09.974496Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.974802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:09.974854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.975018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:09.975180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:09.983154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.983211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.983394Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.983435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:09.983780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.983828Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:09.983921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:09.983967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.984020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:09.984051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.984075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:09.984113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.984147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:09.984180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:09.984242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:09.984276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:09.984306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:09.986113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:09.986223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:09.986278Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T17:09:10.058160Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:10.058198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:10.058342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:09:10.058404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:10.058425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-04T17:09:10.058472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-04T17:09:10.058791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.058822Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:09:10.058901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:10.058932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:10.058972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:10.059013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:10.059059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-04T17:09:10.059091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:10.059115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:09:10.059137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:09:10.059181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:09:10.059205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-04T17:09:10.059240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-04T17:09:10.059264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-04T17:09:10.059746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:10.059826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:10.059857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:10.059884Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-04T17:09:10.059918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:10.060380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:10.060427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:10.060443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:10.060463Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-04T17:09:10.060490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:09:10.060605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-04T17:09:10.065109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:10.066045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T17:09:10.066273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:09:10.066312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-04T17:09:10.066746Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:09:10.066860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:10.066893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:337:2328] TestWaitNotification: OK eventTxId 101 2025-04-04T17:09:10.067352Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:10.067538Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 195us result status StatusSuccess 2025-04-04T17:09:10.068013Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:10.069454Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:10.069618Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 173us result status StatusSuccess 2025-04-04T17:09:10.069859Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:10.070217Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/MyDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:10.070445Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/MyDir" took 188us result status StatusSuccess 2025-04-04T17:09:10.070719Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/MyDir" PathDescription { Self { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:09.658401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:09.658534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.658595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:09.658630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:09.660207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:09.660261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:09.660342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.660421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:09.661183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:09.740837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:09.740912Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:09.764237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:09.764462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:09.764592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:09.775958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:09.776272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:09.777043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.779288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:09.789801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.799669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.799783Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.799986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:09.800041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.800090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:09.800231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.807493Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:09.927964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:09.929475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.931585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:09.933592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:09.933682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.938581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.938753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:09.938947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.939000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:09.939043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:09.939096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:09.942131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.942186Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:09.942223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:09.946058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.946104Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.946141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.946228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.949808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:09.952038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:09.952242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:09.954826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.954965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:09.955024Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.958663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:09.958733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.958921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:09.959012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:09.963129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.963173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.963314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.963351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:09.963643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.963703Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:09.963790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:09.963847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.963901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:09.963936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.963972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:09.964016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.964058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:09.964086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:09.964157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:09.964198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:09.964225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:09.976265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:09.976401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:09.976438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... , tablet: 72075186233409548 2025-04-04T17:09:10.123762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-04T17:09:10.123941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409546 2025-04-04T17:09:10.123997Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-04-04T17:09:10.124103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409546 shardIdx# 72057594046678944:1 at schemeshard# 72057594046678944 2025-04-04T17:09:10.128767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.130891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-04T17:09:10.131013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409547 2025-04-04T17:09:10.131053Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-04-04T17:09:10.131086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409547 shardIdx# 72057594046678944:2 at schemeshard# 72057594046678944 2025-04-04T17:09:10.131650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409548, partId: 0 2025-04-04T17:09:10.131733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409548 2025-04-04T17:09:10.131767Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-04-04T17:09:10.131796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2025-04-04T17:09:10.131824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2025-04-04T17:09:10.134518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.134660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.134740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.134763Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.134790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-04-04T17:09:10.134823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-04T17:09:10.134912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:10.136345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-04T17:09:10.136434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-04-04T17:09:10.136762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:10.136844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:10.136887Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-04T17:09:10.137076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-04T17:09:10.137109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-04T17:09:10.137233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:09:10.137281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T17:09:10.138837Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:10.138887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:10.139095Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:10.139130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-04T17:09:10.139373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.139414Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:09:10.139483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:10.139512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:10.139575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:10.139600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:10.139626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-04T17:09:10.139654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:10.139691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:09:10.139718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:09:10.139860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:09:10.139897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 1 2025-04-04T17:09:10.139921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2025-04-04T17:09:10.140398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:10.140462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:10.140484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:10.140518Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-04T17:09:10.140559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:09:10.140613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2025-04-04T17:09:10.140639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:308:2299] 2025-04-04T17:09:10.143868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:10.143932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:10.143952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:315:2306] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-04T17:09:10.144378Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:10.144517Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 152us result status StatusSuccess 2025-04-04T17:09:10.144773Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:09.658269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:09.658396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.658436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:09.658462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:09.659748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:09.659779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:09.659828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.659893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:09.661105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:09.748831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:09.748880Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:09.759751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:09.759972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:09.760097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:09.782292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:09.782584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:09.783216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.786963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:09.796330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.797522Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.797602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.797794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:09.797835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.797880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:09.797999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.806561Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:09.953276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:09.953459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.953668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:09.953848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:09.953901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.966992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.967136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:09.967318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.967378Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:09.967412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:09.967462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:09.969433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.969487Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:09.969521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:09.971198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.971243Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.971281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.971356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.974965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:09.976709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:09.977134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:09.978102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.978221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:09.978297Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.978631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:09.978710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.978890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:09.979020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:09.981054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.981101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.981263Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.981304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:09.981652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.981698Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:09.981787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:09.981836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.981893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:09.981935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.981995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:09.982053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.982090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:09.982119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:09.982198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:09.982261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:09.982294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:09.989539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:09.989670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:09.989703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... T17:09:10.054401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-04T17:09:10.054603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.054666Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 101:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:10.054717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-04T17:09:10.054826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:10.055513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:10.055598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:10.055635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:10.055682Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-04T17:09:10.055714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:10.056915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:10.056978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:10.057002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:10.057031Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-04T17:09:10.057053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:09:10.057112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-04T17:09:10.058864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-04T17:09:10.058993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-04-04T17:09:10.059673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:10.059768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:10.059835Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-04T17:09:10.059946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-04T17:09:10.060093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:09:10.060172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:09:10.060739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:10.063278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:10.063776Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:10.063826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:10.063957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:09:10.064042Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:10.064100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-04T17:09:10.064173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T17:09:10.064455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.064494Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:09:10.064589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:10.064675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:10.064717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:10.064747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:10.064781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-04T17:09:10.064828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:10.064867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:09:10.064894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:09:10.064947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:09:10.065000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-04T17:09:10.065035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-04T17:09:10.065057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-04T17:09:10.065727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:10.065802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:10.065833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:10.065880Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-04T17:09:10.065923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:10.066582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:10.066666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:10.066691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:10.066715Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-04T17:09:10.066739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:09:10.066829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-04T17:09:10.070659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:10.070918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-04T17:09:10.077033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0/dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "RowId" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:10.077422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/USER_0/dir/table_0, opId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.077530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/USER_0/dir/table_0, opId: 102:0, schema: Name: "table_0" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "RowId", at schemeshard: 72057594046678944 2025-04-04T17:09:10.077665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Inclusive subDomain do not support shared transactions, at schemeshard: 72057594046678944 2025-04-04T17:09:10.080338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Inclusive subDomain do not support shared transactions" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:10.080556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot/USER_0, subject: , status: StatusNameConflict, reason: Inclusive subDomain do not support shared transactions, operation: CREATE TABLE, path: /MyRoot/USER_0/dir/table_0 TestModificationResult got TxId: 102, wait until txId: 102 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RmDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:09.658374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:09.658506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.658565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:09.658601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:09.659784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:09.659834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:09.659905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.660028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:09.661125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:09.748272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:09.748329Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:09.774743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:09.775014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:09.775183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:09.793702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:09.793946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:09.794496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.803113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:09.824389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.825851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.825929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.826114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:09.826178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.826229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:09.826398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.843217Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:09.997592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:09.997844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.998072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:09.998720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:09.998814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.007318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:10.007501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:10.007717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.007777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:10.007815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:10.007893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:10.010994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.011054Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:10.011092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:10.014372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.014444Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.014494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:10.014571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.024777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:10.027084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:10.027277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:10.028330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:10.028571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:10.028628Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:10.028951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:10.029088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:10.029270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:10.029359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:10.031382Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:10.031431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:10.031590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:10.031634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:10.032004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.032050Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:10.032144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:10.032200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.032250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:10.032299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.032353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:10.032441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.032476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:10.032512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:10.032584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:10.032643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:10.032678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:10.034564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:10.034681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:10.034721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet# 72057594046678944 2025-04-04T17:09:10.242725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-04-04T17:09:10.242857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:10.244523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-04T17:09:10.244679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-04T17:09:10.244979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:10.245111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:10.245159Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-04T17:09:10.245453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-04T17:09:10.245507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-04T17:09:10.245663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:10.245725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-04T17:09:10.245775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-04T17:09:10.247321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:10.247364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:10.247513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:10.247606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:10.247649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-04T17:09:10.247699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-04T17:09:10.247917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.247958Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-04T17:09:10.248047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-04T17:09:10.248078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:09:10.248112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-04T17:09:10.248155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:09:10.248190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-04T17:09:10.248226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:09:10.248260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-04T17:09:10.248300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-04T17:09:10.248499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-04-04T17:09:10.248561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2025-04-04T17:09:10.248605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-04T17:09:10.248633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-04T17:09:10.249290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:09:10.249373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:09:10.249406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-04T17:09:10.249440Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:09:10.249491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:10.250133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:09:10.250210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:09:10.250255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-04T17:09:10.250282Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:09:10.250314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-04T17:09:10.250399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2025-04-04T17:09:10.250444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:565:2474] 2025-04-04T17:09:10.252951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-04T17:09:10.253021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-04T17:09:10.253074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-04T17:09:10.253104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:566:2475] TestWaitNotification: OK eventTxId 100 2025-04-04T17:09:10.253572Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:10.253771Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 173us result status StatusSuccess 2025-04-04T17:09:10.254212Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-04T17:09:10.256492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "USER_0" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:10.256614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/USER_0, pathId: 0, opId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.256766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-04T17:09:10.258479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathIsNotDirectory Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:10.258616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: DROP DIRECTORY, path: /MyRoot/USER_0 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Restart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:09.658297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:09.658410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.658461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:09.658499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:09.659789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:09.659858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:09.659950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.660045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:09.661136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:09.744190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:09.744259Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:09.759773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:09.759957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:09.760082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:09.781137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:09.781424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:09.782043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.783593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:09.792058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.797282Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.797365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.797546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:09.797610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.797676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:09.797807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.810548Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:09.948825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:09.949056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.949276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:09.949512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:09.949578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.951727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.951884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:09.952055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.952111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:09.952148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:09.952221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:09.953945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.953999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:09.954038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:09.955663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.955706Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.955750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.955815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.959599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:09.963436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:09.963645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:09.964881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.965007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:09.965064Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.965374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:09.965444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.965626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:09.965738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:09.968083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.968136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.968442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.968494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:09.968860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.968931Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:09.969114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:09.969174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.969226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:09.969268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.969310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:09.969353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.969391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:09.969425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:09.969520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:09.969725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:09.969802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:09.971501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:09.971582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:09.971609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:461:2058] recipient: [1:101:2136] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:464:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:465:2058] recipient: [1:463:2415] Leader for TabletID 72057594046678944 is [1:466:2416] sender: [1:467:2058] recipient: [1:463:2415] 2025-04-04T17:09:10.174903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:10.175069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:10.175128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:10.175182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:10.175228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:10.175291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:10.175408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:10.175504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:10.175911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:10.197964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:10.199301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:10.199457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:10.199595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:10.199633Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:10.200014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:10.200640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-04-04T17:09:10.200722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:10.200789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.200841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.201067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-04T17:09:10.202053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.202190Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-04T17:09:10.202441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.202632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.202832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-04-04T17:09:10.202906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:09:10.202948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:09:10.202969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:10.203044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.203117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.203327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-04-04T17:09:10.203618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.203782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.204160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.204246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.206441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.206583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.210377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.210627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.210826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.211019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.218455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.218734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.218807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.218888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.243646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:10.243728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:10.243975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:10.244043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:10.244103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:10.244253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:466:2416] sender: [1:524:2058] recipient: [1:15:2062] 2025-04-04T17:09:10.288057Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:10.288377Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 335us result status StatusSuccess 2025-04-04T17:09:10.288885Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:10.289570Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:10.289766Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 201us result status StatusSuccess 2025-04-04T17:09:10.290829Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:09.658323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:09.658445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.658505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:09.658544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:09.660064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:09.660131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:09.660207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.660274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:09.662922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:09.752629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:09.752691Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:09.763750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:09.763944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:09.764050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:09.774949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:09.775300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:09.777077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.779217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:09.790785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.797096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.797182Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.797381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:09.797439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.797487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:09.797618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.806401Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:09.934287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:09.934474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.934722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:09.934919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:09.934971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.943196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.943364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:09.943561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.943623Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:09.943670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:09.943728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:09.951017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.951088Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:09.951134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:09.958062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.958124Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.958181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.958264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.964700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:09.967932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:09.968149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:09.969152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.969296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:09.969348Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.969626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:09.969680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.969845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:09.969954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:09.972966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.973016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.973181Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.973243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:09.973538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.973591Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:09.973682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:09.973742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.973791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:09.973845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.973889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:09.973943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.974000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:09.974035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:09.974107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:09.974155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:09.974190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:09.976273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:09.976378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:09.976413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 678944 is [1:551:2485] sender: [1:612:2058] recipient: [1:15:2062] 2025-04-04T17:09:10.335171Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:09:10.335323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:10.335366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:610:2531] TestWaitNotification: OK eventTxId 101 2025-04-04T17:09:10.335865Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:10.336097Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 214us result status StatusPathDoesNotExist 2025-04-04T17:09:10.336286Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:09:10.337507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:551:2485] sender: [1:616:2058] recipient: [1:101:2136] Leader for TabletID 72057594046678944 is [1:551:2485] sender: [1:619:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:551:2485] sender: [1:620:2058] recipient: [1:618:2536] Leader for TabletID 72057594046678944 is [1:621:2537] sender: [1:622:2058] recipient: [1:618:2536] 2025-04-04T17:09:10.385275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:10.385382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:10.385434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:10.385482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:10.385520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:10.385550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:10.385625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:10.385728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:10.386051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:10.404583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:10.405928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:10.406101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:10.406447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:10.406481Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:10.406569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:10.407261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:10.407360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.407431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.407825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.407911Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-04T17:09:10.408093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.408176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.408293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.408392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.408494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.408647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.408905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.409024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.409620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.409717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.409927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.410058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.410145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.410367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.410460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.410702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.410964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.411129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.411194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.411247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.420238Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:10.420313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:10.420694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:10.420751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:10.420797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:10.421870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:621:2537] sender: [1:680:2058] recipient: [1:15:2062] 2025-04-04T17:09:10.454308Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:10.454522Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 280us result status StatusPathDoesNotExist 2025-04-04T17:09:10.454704Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:09:10.455432Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:10.455616Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 243us result status StatusSuccess 2025-04-04T17:09:10.456003Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:09.658362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:09.658500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.658573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:09.658614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:09.659849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:09.659908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:09.659989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.660077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:09.661186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:09.746850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:09.746916Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:09.760260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:09.760507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:09.760658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:09.772620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:09.774782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:09.777062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.779055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:09.791297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.797043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.797124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.797343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:09.797390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.797531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:09.797657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.810950Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:09.967057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:09.967337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.967556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:09.967857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:09.967921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.975364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.975604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:09.975776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.975859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:09.975892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:09.975942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:09.977735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.977782Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:09.977814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:09.980200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.980279Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.980330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.980406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.984044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:09.987498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:09.987709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:09.988808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.988947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:09.989007Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.989363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:09.989435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.989651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:09.989781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:09.995888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.995935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.996116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.996156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:09.996484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.996532Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:09.996625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:09.996676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.996726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:09.996759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.996802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:09.996848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.996910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:09.996939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:09.997034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:09.997075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:09.997103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:09.999164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:09.999272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:09.999311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... de 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:09:10.529456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.529562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.529758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.529935Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-04T17:09:10.530022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:09:10.530049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:09:10.530075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:09:10.530095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:09:10.530132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-04T17:09:10.530271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:489:2445] message: TxId: 103 2025-04-04T17:09:10.530312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:09:10.530361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T17:09:10.530385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T17:09:10.530474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-04T17:09:10.532092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:09:10.532139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:490:2446] TestWaitNotification: OK eventTxId 103 2025-04-04T17:09:10.532630Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:10.532847Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 203us result status StatusSuccess 2025-04-04T17:09:10.533327Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:10.533801Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:10.533938Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 139us result status StatusSuccess 2025-04-04T17:09:10.534165Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:10.534566Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:10.534732Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 145us result status StatusSuccess 2025-04-04T17:09:10.534935Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:10.535375Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:10.535553Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 180us result status StatusSuccess 2025-04-04T17:09:10.536691Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:10.244723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:10.244821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:10.244875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:10.244908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:10.244948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:10.244987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:10.245045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:10.245129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:10.245453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:10.319522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:10.319584Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:10.331849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:10.332100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:10.332243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:10.343542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:10.343842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:10.344470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:10.345225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:10.354111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:10.355449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:10.355503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:10.355628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:10.355671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:10.355704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:10.355775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.361727Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:10.488650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:10.488883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.489119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:10.489358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:10.489422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.491622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:10.491796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:10.491971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.492018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:10.492073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:10.492118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:10.493989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.494039Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:10.494071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:10.496130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.496171Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.496211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:10.496280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.505347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:10.507083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:10.507332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:10.508282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:10.508407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:10.508457Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:10.508743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:10.508793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:10.509011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:10.509105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:10.511092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:10.511155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:10.511366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:10.511414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:10.511821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.511867Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:10.511955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:10.512004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.512052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:10.512087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.512120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:10.512165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.512199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:10.512229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:10.512295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:10.512345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:10.512377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:10.514203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:10.514367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:10.514407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... RD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-04T17:09:10.745487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:10.745849Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-04T17:09:10.745979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T17:09:10.746117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409551 Forgetting tablet 72075186233409547 2025-04-04T17:09:10.747508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-04T17:09:10.747679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2025-04-04T17:09:10.748767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:10.748816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:10.748930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:09:10.749360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:09:10.751796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-04T17:09:10.751844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-04T17:09:10.752000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-04T17:09:10.752160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2025-04-04T17:09:10.752262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-04T17:09:10.752342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:10.752380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:10.752442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:10.754981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:09:10.755011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-04T17:09:10.755070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-04T17:09:10.755082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-04T17:09:10.755123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-04T17:09:10.755147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-04T17:09:10.755231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2025-04-04T17:09:10.755281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:09:10.755304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-04T17:09:10.755349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-04T17:09:10.755383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-04T17:09:10.755778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409549, at schemeshard: 72057594046678944 2025-04-04T17:09:10.756866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:09:10.756949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 100 2025-04-04T17:09:10.757244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-04T17:09:10.757284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-04T17:09:10.757402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:09:10.757429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-04T17:09:10.757481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:09:10.757500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T17:09:10.757941Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-04T17:09:10.758006Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:09:10.758043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-04T17:09:10.758089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:631:2533] 2025-04-04T17:09:10.758145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:10.758175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:631:2533] 2025-04-04T17:09:10.758277Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:09:10.758395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:09:10.758415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:631:2533] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-04-04T17:09:10.758849Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:10.759013Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 189us result status StatusPathDoesNotExist 2025-04-04T17:09:10.759192Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:09:10.759679Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:10.759882Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 175us result status StatusPathDoesNotExist 2025-04-04T17:09:10.759994Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:09:10.760412Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:10.760562Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 153us result status StatusSuccess 2025-04-04T17:09:10.760964Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:09.658446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:09.658563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.658616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:09.658663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:09.660481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:09.660536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:09.660620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.660697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:09.661081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:09.746545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:09.746604Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:09.759683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:09.759942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:09.760108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:09.780540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:09.780848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:09.781541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.782350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:09.787879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.799344Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.799428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.799601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:09.799647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.799691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:09.799800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.823060Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:09.950068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:09.950358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.950595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:09.950885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:09.950962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.953758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.953908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:09.954101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.954150Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:09.954194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:09.954263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:09.956537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.956589Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:09.956621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:09.958412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.958472Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.958504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.958575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.969934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:09.972162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:09.972378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:09.973429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.973587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:09.973686Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.974005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:09.974066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.974257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:09.974358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:09.976829Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.976878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.977081Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.977118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:09.977458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.977502Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:09.977602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:09.977659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.977706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:09.977757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.977802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:09.977836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.977868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:09.977896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:09.977996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:09.978043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:09.978078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:09.979812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:09.979932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:09.979973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 04T17:09:10.641923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 106 2025-04-04T17:09:10.641963Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-04T17:09:10.641989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:09:10.647502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 106 2025-04-04T17:09:10.647596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 106 2025-04-04T17:09:10.647614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-04-04T17:09:10.647636Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:09:10.647667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:09:10.647723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-04-04T17:09:10.649713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:10.649775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:10.649796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:10.649817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:10.649834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:10.650385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-04T17:09:10.654799Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2025-04-04T17:09:10.657933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-04-04T17:09:10.658167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:09:10.658847Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-04T17:09:10.660126Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-04T17:09:10.660256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:10.660443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:10.661016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-04T17:09:10.661196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409546 2025-04-04T17:09:10.661698Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-04T17:09:10.662845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T17:09:10.662994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 2025-04-04T17:09:10.663723Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409547 2025-04-04T17:09:10.664761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 Forgetting tablet 72075186233409549 2025-04-04T17:09:10.665122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-04T17:09:10.665258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:09:10.665493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-04T17:09:10.665709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:10.665752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T17:09:10.665803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:09:10.665989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:10.666034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:10.666149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:09:10.669418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-04T17:09:10.669488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-04T17:09:10.670067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:09:10.670107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-04T17:09:10.670343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-04T17:09:10.670385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-04T17:09:10.675202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:09:10.675246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-04T17:09:10.675343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-04T17:09:10.675388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-04T17:09:10.675546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-04T17:09:10.675636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:10.675677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:10.675743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:10.678968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:09:10.680270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-04T17:09:10.680588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-04T17:09:10.680623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-04T17:09:10.681069Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-04T17:09:10.681159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-04T17:09:10.681194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:908:2807] TestWaitNotification: OK eventTxId 106 2025-04-04T17:09:10.681842Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:10.681993Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 156us result status StatusSuccess 2025-04-04T17:09:10.682349Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:10.373756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:10.373852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:10.373905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:10.373946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:10.373986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:10.374010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:10.374060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:10.374135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:10.374456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:10.451469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:10.451528Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:10.462431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:10.462675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:10.462815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:10.474469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:10.474751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:10.475345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:10.478864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:10.489890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:10.491268Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:10.491343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:10.491531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:10.491574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:10.491620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:10.491767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.498712Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:10.625611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:10.625823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.626049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:10.626308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:10.626373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.636136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:10.636284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:10.636462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.636513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:10.636549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:10.636596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:10.638402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.638446Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:10.638468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:10.640168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.640217Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.640259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:10.640325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.643693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:10.645284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:10.645464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:10.646348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:10.646435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:10.646474Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:10.646726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:10.646780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:10.646930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:10.647051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:10.648381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:10.648415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:10.648529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:10.648554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:10.648819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.648853Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:10.648921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:10.648954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.648999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:10.649026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.649052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:10.649077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.649102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:10.649193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:10.649261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:10.649303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:10.649330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:10.650514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:10.650577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:10.650599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... nSubscriber for txId 102: satisfy waiter [1:578:2489] TestWaitNotification: OK eventTxId 102 2025-04-04T17:09:10.841641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409551, partId: 0 2025-04-04T17:09:10.841784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409551 2025-04-04T17:09:10.841836Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-04-04T17:09:10.841877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409551 shardIdx# 72057594046678944:6 at schemeshard# 72057594046678944 2025-04-04T17:09:10.841924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2025-04-04T17:09:10.844455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.844597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.844636Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.844670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-04-04T17:09:10.844712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-04T17:09:10.844847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:10.846324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-04T17:09:10.846428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-04T17:09:10.846890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:10.847003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:10.847058Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-04T17:09:10.847339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-04T17:09:10.847387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-04T17:09:10.847536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:10.847586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-04T17:09:10.847645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T17:09:10.850463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:10.850512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:10.850727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:10.850828Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:10.850863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-04T17:09:10.850908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-04T17:09:10.851249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.851309Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:09:10.851400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:10.851432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:10.851463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:10.851490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:10.851520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-04T17:09:10.851557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:10.851590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:09:10.851618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:09:10.851789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-04-04T17:09:10.851830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2025-04-04T17:09:10.851858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-04T17:09:10.851972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-04T17:09:10.852612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:10.852710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:10.852742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:10.852771Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:09:10.852802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:10.853530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:10.853592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:10.853633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:10.853659Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:09:10.853680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-04T17:09:10.853737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2025-04-04T17:09:10.853797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:577:2488] 2025-04-04T17:09:10.856353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:10.859660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:10.859788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:10.859833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:578:2489] TestWaitNotification: OK eventTxId 101 2025-04-04T17:09:10.860379Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:10.860578Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 230us result status StatusSuccess 2025-04-04T17:09:10.861087Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:09.809373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:09.809470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.809518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:09.809552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:09.809591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:09.809617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:09.809670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.809746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:09.810057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:09.920600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:09.920646Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:09.933760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:09.934002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:09.934148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:09.945371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:09.945669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:09.946386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.947235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:09.952156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.953508Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.953576Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.953750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:09.953795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.953839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:09.953949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.960162Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:10.132133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:10.132360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.132571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:10.132779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:10.132838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.135160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:10.135288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:10.135441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.135492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:10.135533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:10.135578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:10.141973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.142035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:10.142073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:10.150310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.150362Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.150417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:10.150485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.153896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:10.155776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:10.155996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:10.156937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:10.157054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:10.157107Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:10.157381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:10.157430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:10.157589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:10.157673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:10.159508Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:10.159551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:10.159680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:10.159721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:10.160041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.160088Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:10.160243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:10.160294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.160338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:10.160393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.160429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:10.160460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.160494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:10.160520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:10.160582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:10.160620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:10.160647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:10.162452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:10.162551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:10.162585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 10.845547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:10.845655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:10.845711Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 5000003, at tablet: 72057594046678944 2025-04-04T17:09:10.845800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2025-04-04T17:09:10.846102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2025-04-04T17:09:10.846203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409550 TxId: 104 Status: OK 2025-04-04T17:09:10.846282Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409550 TxId: 104 Status: OK 2025-04-04T17:09:10.846322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-04-04T17:09:10.846370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2025-04-04T17:09:10.846713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2025-04-04T17:09:10.846788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409551 TxId: 104 Status: OK 2025-04-04T17:09:10.846828Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409551 TxId: 104 Status: OK 2025-04-04T17:09:10.846861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-04-04T17:09:10.846893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-04T17:09:10.850935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.851135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 104 2025-04-04T17:09:10.943601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2025-04-04T17:09:10.943749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2025-04-04T17:09:10.943819Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2025-04-04T17:09:10.943870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:5, shard: 72075186233409550, left await: 1, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.943916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-04-04T17:09:10.943965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-04T17:09:10.944460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2025-04-04T17:09:10.944566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2025-04-04T17:09:10.944612Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2025-04-04T17:09:10.944649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:6, shard: 72075186233409551, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.944690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-04T17:09:10.944854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-04T17:09:10.945057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:10.945132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-04T17:09:10.949311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.949931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.950314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:10.950350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:10.950501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:09:10.950696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:10.950727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:335:2311], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-04-04T17:09:10.950771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:335:2311], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-04T17:09:10.951201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.951244Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-04T17:09:10.951348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:09:10.951382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:09:10.951421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:09:10.951449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:09:10.951479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-04T17:09:10.951516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:09:10.951552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-04T17:09:10.951581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-04T17:09:10.951711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-04-04T17:09:10.951753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-04-04T17:09:10.951800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-04T17:09:10.951829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-04T17:09:10.952587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:09:10.952677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:09:10.952712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:09:10.952752Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-04T17:09:10.952788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T17:09:10.953332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:09:10.953401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:09:10.953424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:09:10.953451Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-04T17:09:10.953476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-04T17:09:10.953542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-04T17:09:10.963924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T17:09:10.964483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 >> KqpSystemView::Sessions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:10.427126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:10.427218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:10.427268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:10.427316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:10.427354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:10.427376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:10.427422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:10.427485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:10.427746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:10.503671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:10.503722Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:10.514088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:10.514373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:10.514515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:10.524848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:10.525136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:10.525796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:10.526520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:10.530908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:10.532175Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:10.532258Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:10.532431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:10.532473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:10.532504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:10.532603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.538338Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:10.673479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:10.673682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.673888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:10.674172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:10.674234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.676317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:10.676463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:10.676641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.676689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:10.676723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:10.676783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:10.678629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.678704Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:10.678740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:10.680385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.680429Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.680467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:10.680565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.689809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:10.693224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:10.693404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:10.694466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:10.694597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:10.694671Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:10.694968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:10.695027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:10.695209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:10.695305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:10.697292Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:10.697342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:10.697512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:10.697552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:10.697952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.698013Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:10.698102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:10.698149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.698190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:10.698227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.698282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:10.698326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.698357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:10.698400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:10.698475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:10.698511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:10.698544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:10.700347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:10.700447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:10.700480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... blishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:09:11.136271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:11.136942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:11.137028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:11.137054Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:09:11.137118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:09:11.137149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:11.137178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:09:11.137197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:11.137251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T17:09:11.137299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:628:2562] message: TxId: 102 2025-04-04T17:09:11.137326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:11.137351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:09:11.137385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:09:11.137458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:09:11.138577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:09:11.138605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:629:2563] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 108 2025-04-04T17:09:11.140847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "USER_3" } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:11.141023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/USER_3, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-04T17:09:11.141124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-04T17:09:11.142925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/USER_3\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 108 SchemeshardId: 72057594046678944 PathId: 5 PathCreateTxId: 106, at schemeshard: 72057594046678944 2025-04-04T17:09:11.143062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/USER_3 TestModificationResult got TxId: 108, wait until txId: 108 2025-04-04T17:09:11.143602Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:11.143791Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 168us result status StatusSuccess 2025-04-04T17:09:11.144120Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:11.144663Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:11.144836Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 174us result status StatusSuccess 2025-04-04T17:09:11.145156Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "USER_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:11.145772Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:11.145912Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 161us result status StatusSuccess 2025-04-04T17:09:11.146195Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_2" PathDescription { Self { Name: "USER_2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 104 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:11.146744Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:11.146960Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_3" took 179us result status StatusSuccess 2025-04-04T17:09:11.147224Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_3" PathDescription { Self { Name: "USER_3" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 106 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 } DomainKey { SchemeShard: 72057594046678944 PathId: 5 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 5 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_retry.py::TestRetry::test_low_rate[kikimr0] [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:09.658347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:09.658495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.658587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:09.658684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:09.660230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:09.660284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:09.660366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.660441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:09.662663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:09.749604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:09.749673Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:09.762291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:09.762529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:09.762681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:09.772781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:09.774767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:09.777034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.779080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:09.794021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.797094Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.797177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.797403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:09.797451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.797498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:09.797623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.804360Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:09.965081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:09.965305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.965557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:09.965787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:09.965843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.973138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.973279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:09.973425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.973472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:09.973501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:09.973550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:09.977761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.977830Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:09.977869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:09.980072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.980118Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.980149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.980199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.983790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:09.988084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:09.988297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:09.989519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.989661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:09.989734Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.990040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:09.990095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.990305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:09.990637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:09.999185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.999254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.999475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.999527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:10.000055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:10.000106Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:10.000200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:10.000251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.000300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:10.000338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.000378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:10.000422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:10.000466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:10.000503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:10.000579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:10.000616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:10.000647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:10.010530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:10.010698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:10.010744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 269323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:11.269458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:11.269574Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:11.269607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-04-04T17:09:11.269659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-04T17:09:11.269957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:09:11.269991Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 104:0 ProgressState 2025-04-04T17:09:11.270078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:09:11.270113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:09:11.270154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:09:11.270184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:09:11.270321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-04T17:09:11.270370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:09:11.270403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-04T17:09:11.270436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-04T17:09:11.270578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:09:11.270620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-04-04T17:09:11.270669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-04T17:09:11.270753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-04T17:09:11.271486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:09:11.271566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:09:11.271590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:09:11.271631Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-04T17:09:11.271738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:11.272309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:09:11.272380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:09:11.272414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:09:11.272444Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:09:11.272469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:11.272516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-04T17:09:11.278617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:11.278700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:11.279885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T17:09:11.281010Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-04T17:09:11.281257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:11.281525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:11.281774Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-04T17:09:11.283247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T17:09:11.283457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409546 Forgetting tablet 72075186233409547 2025-04-04T17:09:11.284162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:11.284262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:11.284364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:09:11.284796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T17:09:11.284899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:11.284942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:11.285009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:11.288688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:09:11.288743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-04T17:09:11.288862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:09:11.288895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-04T17:09:11.289462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:09:11.289560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-04T17:09:11.289882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-04T17:09:11.289948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-04T17:09:11.290531Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-04T17:09:11.290636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-04T17:09:11.290689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:2093:3698] TestWaitNotification: OK eventTxId 104 2025-04-04T17:09:11.301298Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:11.301506Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 258us result status StatusPathDoesNotExist 2025-04-04T17:09:11.301710Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:09:11.302502Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:11.302745Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 183us result status StatusPathDoesNotExist 2025-04-04T17:09:11.302923Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldUseGc [GOOD] Test command err: 2025-04-04T17:08:49.644186Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7489506109323664602:2048] with connection to localhost:11565:local 2025-04-04T17:08:49.644316Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-04T17:08:50.266409Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-04T17:08:50.266438Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-04T17:08:51.542877Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7489506118221734273:2048] with connection to localhost:11565:local 2025-04-04T17:08:51.542974Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-04T17:08:51.781881Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-04T17:08:51.781912Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-04T17:08:51.791416Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-04-04T17:08:52.087098Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-04-04T17:08:52.087136Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-04-04T17:08:52.087437Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-04T17:08:52.216893Z node 2 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] Failed to register graph:
: Warning: Table: local/TStorageServiceTestShouldRegisterNextGeneration/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: RegisterCheck, code: 400130 2025-04-04T17:08:52.216934Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-04T17:08:53.460073Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7489506126520435384:2048] with connection to localhost:11565:local 2025-04-04T17:08:53.460174Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-04T17:08:53.709422Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-04T17:08:53.709460Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-04T17:08:53.742568Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-04T17:08:55.453935Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-04T17:08:55.453970Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-04T17:08:55.456475Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-04T17:08:56.066502Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-04-04T17:08:56.066543Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-04T17:08:56.070808Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-04-04T17:08:56.301255Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-04-04T17:08:56.301312Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-04-04T17:08:56.301759Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-04T17:08:56.534111Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-04-04T17:08:56.534140Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-04T17:08:56.534419Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-04-04T17:08:56.658070Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-04-04T17:08:56.658109Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-04-04T17:08:56.658489Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-04T17:08:56.970719Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-04T17:08:58.488394Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7489506149269870109:2048] with connection to localhost:11565:local 2025-04-04T17:08:58.488484Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-04T17:08:58.838898Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-04T17:08:58.838931Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-04T17:08:58.839476Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-04T17:09:00.070392Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-04T17:09:00.070452Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-04T17:09:00.074784Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvSaveTaskState: task 1317 2025-04-04T17:09:00.424184Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] TEvSaveTaskState Apply: task: 1317 2025-04-04T17:09:00.424262Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Send TEvSaveTaskStateResult: task: 1317 2025-04-04T17:09:02.041637Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7489506165679788953:2048] with connection to localhost:11565:local 2025-04-04T17:09:02.041689Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [5:7489506169974756351:2129] 2025-04-04T17:09:02.041719Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-04-04T17:09:02.346402Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-04-04T17:09:02.346425Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-04-04T17:09:02.350323Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-04-04T17:09:04.035317Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-04-04T17:09:04.035372Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-04-04T17:09:04.036651Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-04T17:09:04.616369Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-04-04T17:09:04.616401Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-04T17:09:04.618796Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-04-04T17:09:05.029684Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'Completed' 2025-04-04T17:09:05.029716Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvNewCheckpointSucceeded 2025-04-04T17:09:05.029745Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-04-04T17:09:05.029810Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:1 for graph 'graph_graphich' 2025-04-04T17:09:05.030255Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-04-04T17:09:05.343546Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-04-04T17:09:05.343578Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-04-04T17:09:05.343850Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-04T17:09:05.519672Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-04-04T17:09:05.519702Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-04T17:09:05.520034Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-04-04T17:09:05.649068Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-04-04T17:09:05.649108Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvNewCheckpointSucceeded 2025-04-04T17:09:05.649141Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-04-04T17:09:05.649190Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:2 for graph 'graph_graphich' 2025-04-04T17:09:05.650342Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCreateCheckpointRequest 2025-04-04T17:09:05.736466Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:2 2025-04-04T17:09:05.739141Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:1 2025-04-04T17:09:05.830042Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Checkpoint created 2025-04-04T17:09:05.830078Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCreateCheckpointResponse 2025-04-04T17:09:05.830482Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvSetCheckpointPendingCommitStatusRequest 2025-04-04T17:09:05.970424Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Status updated to 'PendingCommit' 2025-04-04T17:09:05.970459Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvSetCheckpointPendingCommitStatusResponse 2025-04-04T17:09:05.971729Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCompleteCheckpointRequest 2025-04-04T17:09:06.174491Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Status updated to 'Completed' 2025-04-04T17:09:06.174524Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvNewCheckpointSucceeded 2025-04-04T17:09:06.174568Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCompleteCheckpointResponse 2025-04-04T17:09:06.176107Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:3 for graph 'graph_graphich' 2025-04-04T17:09:06.177239Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-04T17:09:06.217796Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:3 2025-04-04T17:09:06.416930Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-04T17:09:06.517877Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-04T17:09:06.533902Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-04T17:09:06.638369Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-04T17:09:06.658793Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-04T17:09:06.759431Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-04T17:09:06.774634Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-04T17:09:06.875364Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-04T17:09:06.894777Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-04T17:09:06.998477Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-04T17:09:07.009352Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-04T17:09:07.110007Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-04T17:09:07.123660Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-04T17:09:07.226184Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-04T17:09:07.247626Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-04T17:09:07.348479Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-04T17:09:07.361706Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-04T17:09:07.462679Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-04T17:09:07.473352Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-04T17:09:07.574150Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-04T17:09:07.593406Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-04T17:09:07.699302Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-04T17:09:07.713491Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-04T17:09:07.814430Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-04T17:09:07.831212Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-04T17:09:07.934470Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-04T17:09:07.962781Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-04-04T17:09:08.063888Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-04-04T17:09:08.083234Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse >> KqpSysColV0::InnerJoinSelect [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine >> TSchemeShardSubDomainTest::RestartAtInFly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:10.957570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:10.957687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:10.957738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:10.957791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:10.957860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:10.957891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:10.957948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:10.958026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:10.958357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:11.030255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:11.030301Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:11.043878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:11.044146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:11.044310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:11.054774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:11.055121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:11.055903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:11.057303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:11.064780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:11.066179Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:11.066266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:11.066465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:11.066517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:11.066559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:11.066672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:11.073163Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:11.188833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:11.189090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:11.189326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:11.189532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:11.189591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:11.195709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:11.195851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:11.196049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:11.196103Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:11.196135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:11.196181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:11.199063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:11.199112Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:11.199136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:11.203432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:11.203488Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:11.203530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:11.203610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:11.207101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:11.214236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:11.214551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:11.215631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:11.215773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:11.215829Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:11.216132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:11.216183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:11.216425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:11.216537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:11.219011Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:11.219148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:11.219372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:11.219417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:11.219931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:11.219974Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:11.220057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:11.220111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:11.220147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:11.220190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:11.220233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:11.220272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:11.220307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:11.220350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:11.220431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:11.220471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:11.220499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:11.222375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:11.222471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:11.222505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-04-04T17:09:11.735765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409550 Forgetting tablet 72075186233409552 2025-04-04T17:09:11.737005Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-04T17:09:11.737155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:11.737199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-04T17:09:11.737255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:09:11.737292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T17:09:11.737317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-04T17:09:11.737809Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-04T17:09:11.738751Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 Forgetting tablet 72075186233409546 2025-04-04T17:09:11.740210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:11.740374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:09:11.741758Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-04T17:09:11.742038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-04T17:09:11.742206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409548 2025-04-04T17:09:11.742809Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-04T17:09:11.742991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-04T17:09:11.743147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409551 Forgetting tablet 72075186233409547 2025-04-04T17:09:11.745009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 Forgetting tablet 72075186233409549 2025-04-04T17:09:11.745330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:09:11.746009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T17:09:11.746171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:11.747003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-04T17:09:11.747165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:09:11.747563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:09:11.747713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:11.747752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:11.747865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:09:11.763763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-04T17:09:11.763838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-04T17:09:11.763958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-04T17:09:11.763979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-04-04T17:09:11.764196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-04T17:09:11.764589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:11.764636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:11.764710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:11.764996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:09:11.765026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-04T17:09:11.765355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-04T17:09:11.765380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-04T17:09:11.765475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-04T17:09:11.765493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-04T17:09:11.771086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:09:11.771139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-04T17:09:11.771244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-04T17:09:11.771296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-04T17:09:11.771461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:09:11.773192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-04T17:09:11.773423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-04T17:09:11.773460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-04T17:09:11.773892Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T17:09:11.773981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:09:11.774018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:783:2672] TestWaitNotification: OK eventTxId 103 2025-04-04T17:09:11.774585Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:11.774781Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 219us result status StatusPathDoesNotExist 2025-04-04T17:09:11.774962Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:09:11.775375Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:11.775531Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 172us result status StatusSuccess 2025-04-04T17:09:11.776007Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas >> TSchemeShardSubDomainTest::RedefineErrors >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas |85.9%| [TA] $(B)/ydb/core/fq/libs/checkpoint_storage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.9%| [TA] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSystemView::NodesSimple [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions [GOOD] Test command err: Trying to start YDB, gRPC: 8906, MsgBus: 22367 2025-04-04T17:09:01.630313Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506164902512726:2262];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:01.630488Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b44/r3tmp/tmpzheWgf/pdisk_1.dat 2025-04-04T17:09:02.116620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:02.116759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:02.118261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:02.141871Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8906, node 1 2025-04-04T17:09:02.330707Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:02.330731Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:02.330737Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:02.330830Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22367 TClient is connected to server localhost:22367 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:02.991323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:03.005206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:09:03.009746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:03.164320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:03.355034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:03.442178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:05.211047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506182082383503:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:05.211230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:05.447123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.475921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.520792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.552258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.584131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.628387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.730560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506182082384021:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:05.730673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:05.730993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506182082384026:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:05.734520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-04-04T17:09:05.746670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506182082384028:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-04-04T17:09:05.845969Z node 1 :TX_PROXY ERROR: Actor# [1:7489506182082384083:3456] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:06.629655Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506164902512726:2262];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:06.629715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 1 ydb-cpp-sdk/3.1.0 2025-04-04T17:09:11.194015Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786551186, txId: 281474976710684] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:12.191613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:12.191699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:12.191748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:12.191778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:12.191818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:12.191845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:12.191897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:12.191968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:12.192269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:12.277972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:12.278021Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:12.297414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:12.297666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:12.297810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:12.307126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:12.307651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:12.308243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:12.310533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:12.316176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:12.317454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:12.317527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:12.317704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:12.317753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:12.317795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:12.317905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.324144Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:12.447604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:12.447894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.448124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:12.448351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:12.448399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.453443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:12.453557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:12.453752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.453805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:12.453844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:12.453897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:12.455851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.455902Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:12.455936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:12.457530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.457587Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.457628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:12.457699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:12.461420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:12.463393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:12.463564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:12.464411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:12.464541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:12.464592Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:12.464847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:12.464891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:12.465042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:12.465136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:12.467004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:12.467050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:12.467202Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:12.467240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:12.467613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.467657Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:12.467738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:12.467781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:12.467833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:12.467873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:12.467932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:12.467969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:12.468000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:12.468031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:12.468095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:12.468140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:12.468169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:12.469874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:12.469968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:12.469997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T17:09:12.470030Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T17:09:12.470059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:12.470135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T17:09:12.480955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T17:09:12.481423Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-04-04T17:09:12.481990Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-04-04T17:09:12.504209Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-04-04T17:09:12.507414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Name: "USER_1" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:12.507667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_1, opId: 100:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.507749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, at schemeshard: 72057594046678944 2025-04-04T17:09:12.508493Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:09:12.514916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with coordinators, but no mediators" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:12.515089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, operation: CREATE DATABASE, path: /MyRoot/USER_1 2025-04-04T17:09:12.515629Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 2025-04-04T17:09:12.518507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Mediators: 1 Name: "USER_2" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:12.518776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_2, opId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.518879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, at schemeshard: 72057594046678944 2025-04-04T17:09:12.523365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with mediators, but no coordinators" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:12.523564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, operation: CREATE DATABASE, path: /MyRoot/USER_2 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-04-04T17:09:12.523921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-04T17:09:12.523972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-04T17:09:12.524075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:09:12.524093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-04T17:09:12.524572Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-04T17:09:12.524690Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:09:12.524747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-04T17:09:12.524823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:286:2277] 2025-04-04T17:09:12.524988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:12.525012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:286:2277] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-04-04T17:09:12.525358Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:12.525481Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 134us result status StatusPathDoesNotExist 2025-04-04T17:09:12.525604Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:09:12.526030Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:12.526167Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 141us result status StatusPathDoesNotExist 2025-04-04T17:09:12.526396Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:09:12.526857Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:12.527044Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 156us result status StatusSuccess 2025-04-04T17:09:12.527422Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop >> TSchemeShardSubDomainTest::Create >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable >> TSchemeShardSubDomainTest::DeleteAdd ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 13232, MsgBus: 18425 2025-04-04T17:09:06.155013Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506188345746564:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:06.155149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00087e/r3tmp/tmpHsj6qC/pdisk_1.dat 2025-04-04T17:09:06.529329Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:06.572383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:06.572525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:06.573738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13232, node 1 2025-04-04T17:09:06.650890Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:06.650917Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:06.650924Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:06.651064Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18425 TClient is connected to server localhost:18425 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:07.117899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:07.157732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:07.279872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:07.444434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:07.522880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:09:09.534608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506201230650157:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:09.534759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:09.758236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:09.790391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:09.825070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:09.899264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:09.931171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:09.972991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:10.034443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506205525617964:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:10.034506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:10.034924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506205525617969:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:10.042001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:10.060621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506205525617971:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:10.145972Z node 1 :TX_PROXY ERROR: Actor# [1:7489506205525618027:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:11.155330Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506188345746564:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:11.155411Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:12.450947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:12.451041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:12.451081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:12.451111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:12.451147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:12.451167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:12.451209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:12.451276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:12.451559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:12.530671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:12.530733Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:12.547375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:12.547647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:12.547802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:12.558550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:12.558888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:12.559570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:12.560402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:12.566811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:12.568275Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:12.568354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:12.568564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:12.568614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:12.568654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:12.568774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.579025Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:12.716204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:12.716433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.716661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:12.716898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:12.716957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.719194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:12.719339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:12.719506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.719555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:12.719647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:12.719695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:12.721587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.721647Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:12.721682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:12.723431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.723477Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.723515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:12.723604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:12.727161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:12.728972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:12.729156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:12.730390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:12.730547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:12.730624Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:12.730957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:12.731011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:12.731196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:12.731300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:12.733329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:12.733381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:12.733561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:12.733609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:12.733957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.734005Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:12.734118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:12.734171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:12.734224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:12.734297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:12.734344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:12.734387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:12.734422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:12.734455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:12.734540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:12.734586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:12.734622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:12.736973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:12.737081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:12.737119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TDropForceUnsafe TPropose, operationId: 101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-04T17:09:12.836982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:12.837030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:12.837160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2025-04-04T17:09:12.837409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:12.837482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:09:12.839067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:12.839225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T17:09:12.840904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:12.840963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:12.841090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:12.841218Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:12.841255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-04T17:09:12.841293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-04T17:09:12.841563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.841617Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-04-04T17:09:12.841710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:12.841778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:12.841835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:12.841869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:12.841908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-04T17:09:12.841943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:12.841977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:09:12.842009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:09:12.842071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:12.842108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-04T17:09:12.842146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-04T17:09:12.842188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-04T17:09:12.842767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:12.842860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:12.842901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:12.842956Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-04T17:09:12.842995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:12.843762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:12.843847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:12.843875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:12.843902Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:09:12.843929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:09:12.844028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-04T17:09:12.844948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:12.845000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:12.845088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:09:12.845387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:12.845434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:12.845492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:12.847749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:12.847983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:12.850002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:09:12.850080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T17:09:12.850337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:09:12.850381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-04T17:09:12.850780Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:09:12.850862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:12.850916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:341:2332] TestWaitNotification: OK eventTxId 101 2025-04-04T17:09:12.851390Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:12.851583Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 237us result status StatusPathDoesNotExist 2025-04-04T17:09:12.851760Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:09:12.852210Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:12.852370Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 159us result status StatusSuccess 2025-04-04T17:09:12.852733Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] >> TSchemeShardSubDomainTest::DeclareDefineAndDelete >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects >> TSchemeShardSubDomainTest::Delete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:12.913486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:12.913591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:12.913653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:12.913683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:12.913723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:12.913749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:12.913798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:12.913867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:12.914177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:12.976514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:12.976568Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:12.987354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:12.987566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:12.987679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:12.999367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:12.999661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:13.000278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.001056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:13.006932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.008149Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.008221Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.008399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:13.008440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.008473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:13.008579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.014453Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:13.127847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:13.128069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.128306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:13.128536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:13.128587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.130937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.131073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:13.131258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.131310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:13.131343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:13.131403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:13.133357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.133406Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:13.133457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:13.135401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.135447Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.135490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.135558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.138801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:13.140691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:13.140870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:13.141769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.141902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:13.141952Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.142233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:13.142311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.142474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:13.142573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:13.144563Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.144611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.144782Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.144821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:13.145131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.145178Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:13.145262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:13.145309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.145353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:13.145387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.145422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:13.145512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.145562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:13.145592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:13.145659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:13.145695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:13.145724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:13.147391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:13.147494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:13.147529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2025-04-04T17:09:13.185786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-04T17:09:13.186049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-04T17:09:13.186433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.186469Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.186507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet# 72057594046678944 2025-04-04T17:09:13.186557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-04-04T17:09:13.186721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:13.189062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-04T17:09:13.189220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-04T17:09:13.189552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.189652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:13.189694Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-04T17:09:13.189912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-04T17:09:13.189970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-04-04T17:09:13.190091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:13.190142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:09:13.190174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-04-04T17:09:13.191947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.191985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.192122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:13.192240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.192277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-04T17:09:13.192316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-04-04T17:09:13.192585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.192620Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-04T17:09:13.192707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-04T17:09:13.192741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:09:13.192777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-04T17:09:13.192825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:09:13.192899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-04T17:09:13.192936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:09:13.192967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-04T17:09:13.192995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-04T17:09:13.193047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:13.193080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-04T17:09:13.193106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-04T17:09:13.193130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-04T17:09:13.193753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:09:13.193834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:09:13.193865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-04T17:09:13.193899Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:09:13.193930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:13.194574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:09:13.194668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:09:13.194716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-04T17:09:13.194743Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:09:13.194789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:09:13.194849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-04T17:09:13.200208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-04T17:09:13.202910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-04-04T17:09:13.203187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-04T17:09:13.203225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-04T17:09:13.203331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:09:13.203350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-04T17:09:13.203778Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:09:13.203868Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-04T17:09:13.203913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:13.203951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:311:2302] 2025-04-04T17:09:13.204097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-04T17:09:13.204118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:311:2302] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 100 2025-04-04T17:09:13.204528Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:13.204725Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 220us result status StatusSuccess 2025-04-04T17:09:13.205110Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:09.658736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:09.658833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.658878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:09.658908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:09.659852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:09.659939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:09.660019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.660083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:09.661177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:09.759583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:09.759647Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:09.774983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:09.775226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:09.775385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:09.792698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:09.793024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:09.793785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.794587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:09.798368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.799698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.799772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.799946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:09.800016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.800067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:09.800183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.806104Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:09.952667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:09.952895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.953103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:09.953335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:09.953397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.955838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.955984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:09.956164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.956224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:09.956264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:09.956319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:09.958115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.958171Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:09.958210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:09.959885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.959929Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.959966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.960038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.963962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:09.965823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:09.965990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:09.967039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.967166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:09.967219Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.967518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:09.967577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.967747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:09.967909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:09.970026Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.970077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.970232Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.970292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:09.970613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.970676Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:09.970790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:09.970837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.970888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:09.970930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.970984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:09.971047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.971081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:09.971113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:09.971186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:09.971228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:09.971262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:09.977181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:09.977306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:09.977345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 139 2025-04-04T17:09:13.431839Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 139, pathId: [OwnerId: 72057594046678944, LocalPathId: 16], version: 18446744073709551615 2025-04-04T17:09:13.431867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 4 2025-04-04T17:09:13.433524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 139 2025-04-04T17:09:13.433610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 139 2025-04-04T17:09:13.433642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 139 2025-04-04T17:09:13.433666Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 139, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:09:13.433696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:09:13.433761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 139, subscribers: 0 2025-04-04T17:09:13.434581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:13.434628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:15 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:13.434665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:14 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:13.434688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:13.434709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:16 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:13.436118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2025-04-04T17:09:13.437098Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-04T17:09:13.438072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.438360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:13.438618Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 15 TxId_Deprecated: 15 TabletID: 72075186233409556 Forgetting tablet 72075186233409546 2025-04-04T17:09:13.439935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 15 ShardOwnerId: 72057594046678944 ShardLocalIdx: 15, at schemeshard: 72057594046678944 2025-04-04T17:09:13.440123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 3 Forgetting tablet 72075186233409556 2025-04-04T17:09:13.440943Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 14 TxId_Deprecated: 14 TabletID: 72075186233409555 2025-04-04T17:09:13.441768Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-04T17:09:13.442620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 14 ShardOwnerId: 72057594046678944 ShardLocalIdx: 14, at schemeshard: 72057594046678944 2025-04-04T17:09:13.442811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 2 2025-04-04T17:09:13.443056Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 16 TxId_Deprecated: 16 TabletID: 72075186233409557 2025-04-04T17:09:13.444089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 Forgetting tablet 72075186233409555 2025-04-04T17:09:13.445192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T17:09:13.445323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409557 2025-04-04T17:09:13.445943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 16 ShardOwnerId: 72057594046678944 ShardLocalIdx: 16, at schemeshard: 72057594046678944 2025-04-04T17:09:13.446113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 1 2025-04-04T17:09:13.446484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:13.446559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2025-04-04T17:09:13.446619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:09:13.446816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2025-04-04T17:09:13.446901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:13.446941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:13.447058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:09:13.448911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:09:13.448953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-04T17:09:13.449030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2025-04-04T17:09:13.449053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409556 2025-04-04T17:09:13.449151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:14 2025-04-04T17:09:13.449175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:14 tabletId 72075186233409555 2025-04-04T17:09:13.451023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:09:13.451061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-04T17:09:13.451140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:16 2025-04-04T17:09:13.451173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409557 2025-04-04T17:09:13.451326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-04T17:09:13.451386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:09:13.451458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:13.451514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:13.451590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:13.452895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 139, wait until txId: 139 TestWaitNotification wait txId: 139 2025-04-04T17:09:13.453538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: send EvNotifyTxCompletion 2025-04-04T17:09:13.453582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 139 2025-04-04T17:09:13.454396Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 139, at schemeshard: 72057594046678944 2025-04-04T17:09:13.454470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: got EvNotifyTxCompletionResult 2025-04-04T17:09:13.454498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: satisfy waiter [1:2281:4054] TestWaitNotification: OK eventTxId 139 2025-04-04T17:09:13.455746Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:13.455921Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 202us result status StatusSuccess 2025-04-04T17:09:13.456179Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 5 ShardsInside: 0 ShardsLimit: 6 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 20 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:13.205034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:13.205135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.205200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:13.205236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:13.205283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:13.205312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:13.205372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.205464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:13.205729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:13.292727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:13.292786Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:13.303601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:13.303842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:13.303983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:13.317688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:13.317999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:13.318701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.319828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:13.324521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.325843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.325906Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.326084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:13.326140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.326181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:13.326370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.333670Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:13.466438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:13.466676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.466900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:13.467199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:13.467265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.469404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.469540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:13.469771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.469831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:13.469869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:13.469929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:13.471816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.471883Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:13.471919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:13.473498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.473549Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.473590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.473664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.487651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:13.489583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:13.489772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:13.490865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.491010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:13.491066Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.491365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:13.491432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.491664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:13.491783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:13.493973Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.494025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.494180Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.494221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:13.494573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.494638Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:13.494754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:13.494791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.494836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:13.494877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.494923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:13.494966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.495002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:13.495037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:13.495136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:13.495186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:13.495219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:13.497020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:13.497126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:13.497162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T17:09:13.497201Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T17:09:13.497240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:13.497334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T17:09:13.500144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T17:09:13.500589Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-04-04T17:09:13.501208Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-04-04T17:09:13.517338Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-04-04T17:09:13.519627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:13.519898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_0, opId: 100:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.520033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: TimeCastBucketsPerMediator is 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.520806Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:09:13.523790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: TimeCastBucketsPerMediator is 0" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:13.523966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: TimeCastBucketsPerMediator is 0, operation: CREATE DATABASE, path: /MyRoot/USER_0 2025-04-04T17:09:13.524375Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-04T17:09:13.524619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-04T17:09:13.524664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-04T17:09:13.525122Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-04T17:09:13.525255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-04T17:09:13.525301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:282:2273] TestWaitNotification: OK eventTxId 100 2025-04-04T17:09:13.525774Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:13.525945Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 200us result status StatusPathDoesNotExist 2025-04-04T17:09:13.526121Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:12.971952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:12.972076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:12.972135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:12.972172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:12.972212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:12.972238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:12.972291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:12.972378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:12.972860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:13.045168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:13.045231Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:13.056545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:13.056826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:13.056990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:13.065914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:13.066192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:13.066859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.067572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:13.072462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.073633Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.073706Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.073881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:13.073947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.073986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:13.074096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.080058Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:13.239085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:13.239335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.239597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:13.239910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:13.239994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.242660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.242823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:13.243043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.243106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:13.243151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:13.243207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:13.245207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.245260Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:13.245291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:13.246887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.246930Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.246967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.247037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.255825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:13.259127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:13.259346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:13.260299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.260434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:13.260493Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.260767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:13.260823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.260989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:13.261088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:13.263117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.263174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.263328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.263369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:13.263701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.263747Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:13.263828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:13.263871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.263914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:13.263946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.263980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:13.264015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.264068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:13.264102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:13.264198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:13.264249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:13.264278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:13.266015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:13.266105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:13.266136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4T17:09:13.418039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.418077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:13.418117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:13.418158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:13.418196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:13.418284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.418358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:13.418629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:13.434340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:13.435460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:13.435603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:13.435684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:13.435725Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:13.435839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:13.436466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-04-04T17:09:13.436536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:13.436613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.436676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.436891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-04T17:09:13.437155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.437237Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-04T17:09:13.437427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.437518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.437607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-04-04T17:09:13.437654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:09:13.437684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:09:13.437702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:13.437789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.437864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.438032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-04-04T17:09:13.438428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.438578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.438949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.439028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.439224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.439339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.439456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.439610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.439678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.439823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.440032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.440186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.440237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.440274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.447211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.447270Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.447405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:13.447450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.447490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:13.447809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 100 2025-04-04T17:09:13.489836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-04T17:09:13.489885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 Leader for TabletID 72057594046678944 is [1:460:2410] sender: [1:520:2058] recipient: [1:15:2062] 2025-04-04T17:09:13.490506Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-04T17:09:13.490597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-04T17:09:13.490633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:518:2455] TestWaitNotification: OK eventTxId 100 2025-04-04T17:09:13.491125Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:13.491393Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 213us result status StatusSuccess 2025-04-04T17:09:13.491777Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:13.492248Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:13.492377Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 133us result status StatusSuccess 2025-04-04T17:09:13.492718Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] >> TSchemeShardSubDomainTest::Create [GOOD] >> TSchemeShardSubDomainTest::CreateAlterNbsChannels >> KqpSystemView::PartitionStatsFollower [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesSimple [GOOD] Test command err: Trying to start YDB, gRPC: 13730, MsgBus: 15698 2025-04-04T17:09:04.442825Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506178148328413:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:04.442940Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:09:04.546983Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506181117946554:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:04.549519Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:09:04.564667Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489506180212098536:2123];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0009f3/r3tmp/tmpbplCav/pdisk_1.dat 2025-04-04T17:09:05.010381Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:09:05.475925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:09:05.574385Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:09:05.614977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:05.615393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:05.616499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:05.616568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:05.618271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:05.618343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:05.619833Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T17:09:05.620022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:05.620796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:05.622958Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:09:05.623539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:05.647524Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13730, node 1 2025-04-04T17:09:05.701095Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:09:05.706078Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:09:05.871045Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:05.871065Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:05.871072Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:05.871210Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15698 TClient is connected to server localhost:15698 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:06.798178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976725657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:06.849154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:07.071749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:07.302721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725660:0, at schemeshard: 72057594046644480 2025-04-04T17:09:07.433035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:09.380844Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506178148328413:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:09.380971Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:09.508183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506199623166912:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:09.508288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:09.546968Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489506181117946554:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:09.547023Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:09.558697Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489506180212098536:2123];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:09.558753Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:09.742987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:09.797913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:09.883364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:09.981673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:10.045633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:10.118022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:10.262696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506203918134896:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:10.262779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:10.262970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506203918134901:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:10.267635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976725668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:10.290055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506203918134903:2404], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976725668 completed, doublechecking } 2025-04-04T17:09:10.363683Z node 1 :TX_PROXY ERROR: Actor# [1:7489506203918134978:4124] txid# 281474976725669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:11.512366Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786551476, txId: 281474976725671] shutting down 2025-04-04T17:09:11.770373Z node 2 :BS_PROXY_PUT ERROR: [38a311f97a5e53c5] Result# TEvPutResult {Id# [72075186224037900:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037900:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-04T17:09:11.784607Z node 3 :BS_PROXY_PUT ERROR: [02c2ce811a028e4f] Result# TEvPutResult {Id# [72075186224037893:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037893:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:13.133924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:13.134024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.134076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:13.134108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:13.134149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:13.134174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:13.134235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.134357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:13.134699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:13.211365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:13.211414Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:13.219825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:13.220086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:13.220184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:13.227832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:13.228167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:13.228768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.229588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:13.234107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.235357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.235418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.235589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:13.235636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.235691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:13.235804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.240925Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:13.373326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:13.373567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.373818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:13.374037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:13.374089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.379190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.379352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:13.379563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.379639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:13.379672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:13.379730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:13.383114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.383170Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:13.383206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:13.385811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.385875Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.385923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.386007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.395433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:13.397274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:13.397450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:13.398428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.398551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:13.398619Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.398898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:13.398944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.399108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:13.399210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:13.401176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.401240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.401421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.401474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:13.401780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.401818Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:13.401904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:13.401948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.401991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:13.402028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.402068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:13.402109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.402143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:13.402172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:13.402260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:13.402311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:13.402358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:13.404086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:13.404187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:13.404243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... d: 3], at schemeshard: 72057594046678944 2025-04-04T17:09:13.774078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:09:13.774463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-04T17:09:13.774581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409546 2025-04-04T17:09:13.775354Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-04T17:09:13.776001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-04-04T17:09:13.776114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:13.776478Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409551 2025-04-04T17:09:13.777480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T17:09:13.777605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2025-04-04T17:09:13.777842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-04T17:09:13.777947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2025-04-04T17:09:13.779241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:09:13.780117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:13.780183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:13.780305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:09:13.781298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-04-04T17:09:13.781352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-04-04T17:09:13.781414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-04-04T17:09:13.781456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-04-04T17:09:13.781619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:09:13.781644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-04T17:09:13.781715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409552, at schemeshard: 72057594046678944 2025-04-04T17:09:13.784036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-04T17:09:13.784146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:13.784184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:13.784239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:13.784590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-04T17:09:13.784624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-04T17:09:13.784674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-04T17:09:13.784694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-04T17:09:13.784790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:09:13.784809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-04T17:09:13.785055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-04T17:09:13.785102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-04T17:09:13.786018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:09:13.787416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-04-04T17:09:13.787656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:09:13.787696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-04T17:09:13.787772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:09:13.787790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T17:09:13.788181Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:09:13.788291Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:09:13.788348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:13.788382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:727:2614] 2025-04-04T17:09:13.788465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:09:13.788484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:727:2614] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-04-04T17:09:13.788929Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:13.789104Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 192us result status StatusPathDoesNotExist 2025-04-04T17:09:13.789276Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:09:13.789658Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:13.789835Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 162us result status StatusPathDoesNotExist 2025-04-04T17:09:13.789995Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:09:13.790470Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:13.790662Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 150us result status StatusSuccess 2025-04-04T17:09:13.790985Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpSystemView::NodesRange1 [GOOD] >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:13.425598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:13.425700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.425762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:13.425802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:13.425852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:13.425879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:13.425931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.426016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:13.426363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:13.504448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:13.504512Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:13.515595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:13.515845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:13.515984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:13.526272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:13.526575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:13.527268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.528095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:13.533416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.534790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.534879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.535078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:13.535125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.535164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:13.535286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.541504Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:13.669360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:13.669608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.669840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:13.670094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:13.670167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.672264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.672403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:13.672575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.672633Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:13.672674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:13.672732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:13.674822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.674885Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:13.674923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:13.676765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.676813Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.676859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.676937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.680785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:13.683015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:13.683229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:13.684242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.684411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:13.684475Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.684812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:13.684869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.685047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:13.685160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:13.690391Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.690450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.690636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.690692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:13.690986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.691025Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:13.691102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:13.691151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.691192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:13.691223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.691263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:13.691313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.691358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:13.691384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:13.691443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:13.691475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:13.691502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:13.692904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:13.692987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:13.693016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... cationSubscriber for txId 107: satisfy waiter [1:571:2525] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-04-04T17:09:13.917523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 2 Name: "USER_0" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:13.917756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /MyRoot/USER_0, opId: 108:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.917973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:13.918179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:13.918233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 108:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.920295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusAccepted TxId: 108 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-04-04T17:09:13.920428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /MyRoot/USER_0 2025-04-04T17:09:13.920627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.920699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 108:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:13.920762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 108:0 ProgressState no shards to create, do next state 2025-04-04T17:09:13.920817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 2 -> 3 2025-04-04T17:09:13.922704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.922761Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 108:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:13.922814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 3 -> 128 2025-04-04T17:09:13.924292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.924347Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.924432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 108:0, at tablet# 72057594046678944 2025-04-04T17:09:13.924475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 108 ready parts: 1/1 2025-04-04T17:09:13.924606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:13.926128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2025-04-04T17:09:13.926297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000007 2025-04-04T17:09:13.926699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.926805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:13.926849Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-04-04T17:09:13.927082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 128 -> 240 2025-04-04T17:09:13.927152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-04-04T17:09:13.927339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:09:13.927408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 108 2025-04-04T17:09:13.929160Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.929205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:13.929389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.929442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 108, path id: 2 2025-04-04T17:09:13.929732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.929786Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 108:0 ProgressState 2025-04-04T17:09:13.929874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-04-04T17:09:13.929907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-04T17:09:13.929940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-04-04T17:09:13.929985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-04T17:09:13.930020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: false 2025-04-04T17:09:13.930061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-04T17:09:13.930094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2025-04-04T17:09:13.930118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2025-04-04T17:09:13.930175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:09:13.930206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 108, publications: 1, subscribers: 0 2025-04-04T17:09:13.930231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 2], 8 2025-04-04T17:09:13.930935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-04-04T17:09:13.931033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-04-04T17:09:13.931081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2025-04-04T17:09:13.931135Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-04-04T17:09:13.931169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:09:13.931257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2025-04-04T17:09:13.934052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-04-04T17:09:13.934431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-04-04T17:09:13.934472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-04-04T17:09:13.935036Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-04-04T17:09:13.935122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-04T17:09:13.935160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:596:2550] TestWaitNotification: OK eventTxId 108 2025-04-04T17:09:13.935826Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:13.936079Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 258us result status StatusSuccess 2025-04-04T17:09:13.936438Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 6 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 6 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:09.658335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:09.658482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.658563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:09.658610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:09.659825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:09.659882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:09.659961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.660047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:09.661191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:09.754478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:09.754532Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:09.771078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:09.771324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:09.771447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:09.782373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:09.782789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:09.783413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.784491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:09.791787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.798699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.798784Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.798947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:09.799003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.799051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:09.799172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.805591Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:09.931292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:09.931506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.931721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:09.933594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:09.933677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.936298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.936473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:09.936647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.936698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:09.936787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:09.936838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:09.939113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.939179Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:09.939215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:09.940727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.940865Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.940907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.940976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.945602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:09.952975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:09.953165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:09.954997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.955123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:09.955187Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.958667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:09.958741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.958920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:09.959033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:09.963102Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.963153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.963337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.963380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:09.963653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.963721Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:09.963832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:09.963878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.963921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:09.963954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.963993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:09.964030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.964064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:09.964092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:09.964161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:09.964203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:09.964235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:09.966942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:09.967068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:09.967126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... 6678944, message: Source { RawX1: 440 RawX2: 4294969700 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-04T17:09:13.867903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2025-04-04T17:09:13.868041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 440 RawX2: 4294969700 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-04T17:09:13.868098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-04-04T17:09:13.870842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.870917Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-04T17:09:13.870968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-04T17:09:13.871020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2025-04-04T17:09:13.871111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-04T17:09:13.871241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2025-04-04T17:09:13.871371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:13.871431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:09:13.872207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.872355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.875108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.875160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:13.875381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:09:13.875571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.875619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-04T17:09:13.875681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-04T17:09:13.876149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.876220Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:09:13.876324Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.876367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-04T17:09:13.876407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-04T17:09:13.877289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:09:13.877413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:09:13.877447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:09:13.877500Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-04-04T17:09:13.877545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:09:13.878383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:09:13.878463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:09:13.878487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:09:13.878515Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-04T17:09:13.878540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:09:13.878609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-04T17:09:13.880849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.880907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:13.881283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:09:13.881484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:09:13.881522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:09:13.881592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:09:13.881648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:09:13.881687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-04T17:09:13.881749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:408:2375] message: TxId: 103 2025-04-04T17:09:13.881801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:09:13.881838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T17:09:13.881874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T17:09:13.881991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:09:13.883028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.883070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:13.883765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:09:13.884046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:09:13.885006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.885054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-04-04T17:09:13.885164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:09:13.885202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:667:2601] 2025-04-04T17:09:13.885897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-04-04T17:09:13.886854Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:13.887076Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 201us result status StatusSuccess 2025-04-04T17:09:13.887496Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:13.580065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:13.580183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.580234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:13.580266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:13.580308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:13.580337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:13.580387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.580460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:13.580727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:13.657602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:13.657648Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:13.666384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:13.666625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:13.666779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:13.677008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:13.677273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:13.677756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.681584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:13.693106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.694370Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.694436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.694633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:13.694697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.694738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:13.694860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.703079Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:13.821861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:13.822089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.822350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:13.822585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:13.822669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.824622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.824751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:13.824933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.824981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:13.825019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:13.825062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:13.827101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.827137Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:13.827162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:13.828644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.828695Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.828742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.828819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.832311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:13.834005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:13.834193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:13.835160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.835280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:13.835336Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.835620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:13.835675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.835867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:13.835958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:13.837801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.837844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.838016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.838051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:13.838396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.838461Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:13.838557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:13.838607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.838669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:13.838705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.838740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:13.838782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.838812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:13.838840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:13.838906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:13.838943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:13.838972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:13.840647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:13.840753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:13.840783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :09:14.135825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:14.135863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:09:14.135890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:09:14.136062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 9 2025-04-04T17:09:14.136100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-04T17:09:14.136132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2025-04-04T17:09:14.136157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 4], 3 2025-04-04T17:09:14.136871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:14.136964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:14.137008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:09:14.137046Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-04-04T17:09:14.137079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:14.137488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:14.137558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:14.137588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:09:14.137612Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-04T17:09:14.137648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 8 2025-04-04T17:09:14.137703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-04T17:09:14.140782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:09:14.140866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 100 2025-04-04T17:09:14.141079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-04T17:09:14.141120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-04-04T17:09:14.141255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:09:14.141283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-04T17:09:14.141332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:09:14.141349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T17:09:14.141896Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-04T17:09:14.141988Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:09:14.142069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-04T17:09:14.142106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:930:2764] 2025-04-04T17:09:14.142267Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:09:14.142332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:14.142372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:930:2764] 2025-04-04T17:09:14.142469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:09:14.142483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:930:2764] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-04-04T17:09:14.142811Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:14.143002Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_0" took 207us result status StatusSuccess 2025-04-04T17:09:14.143383Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:14.143870Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:14.143996Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_1" took 144us result status StatusSuccess 2025-04-04T17:09:14.144270Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_1" PathDescription { Self { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 4 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:14.144691Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:14.144822Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains" took 121us result status StatusSuccess 2025-04-04T17:09:14.145084Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains" PathDescription { Self { Name: "SubDomains" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:13.818712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:13.818820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.818886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:13.818924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:13.818972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:13.819004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:13.819067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.819165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:13.819519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:13.896907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:13.896954Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:13.907691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:13.907960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:13.908115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:13.917418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:13.917713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:13.918266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.919002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:13.924114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.925440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.925517Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.925705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:13.925771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.925836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:13.925971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.932385Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:14.049805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:14.050024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.050305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:14.050562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:14.050627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.055326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.055468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:14.055673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.055730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:14.055769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:14.055816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:14.057969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.058030Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:14.058067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:14.060068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.060122Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.060180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.060281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.064178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:14.066219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:14.066431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:14.067465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.067620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:14.067681Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.067981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:14.068034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.068214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:14.068321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:14.071240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:14.071303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:14.071489Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.071536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:14.071869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.071922Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:14.072018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:14.072084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.072133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:14.072170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.072204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:14.072247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.072288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:14.072318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:14.072400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:14.072438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:14.072474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:14.073942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:14.074060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:14.074097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... O: TDropSubdomain TPropose operationId# 101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-04T17:09:14.154164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:14.154195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:14.154319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2025-04-04T17:09:14.154450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:14.154509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:09:14.155821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:14.155976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T17:09:14.157555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:14.157596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:14.157739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:14.157893Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.157945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-04T17:09:14.157984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-04T17:09:14.158285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.158336Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-04-04T17:09:14.158386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:14.158420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:14.158458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:14.158492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:14.158531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-04T17:09:14.158567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:14.158600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:09:14.158634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:09:14.158722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:14.158773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-04T17:09:14.158825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-04T17:09:14.158861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-04T17:09:14.159455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:14.159573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:14.159610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:14.159651Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-04T17:09:14.159708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:14.160425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:14.160495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:14.160521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:14.160546Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:09:14.160571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:09:14.160647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-04T17:09:14.161556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:14.161611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:14.161700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:09:14.161953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:14.162008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:14.162068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:14.164465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:14.164717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:14.166277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:09:14.166364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T17:09:14.166597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:09:14.166687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-04T17:09:14.167115Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:09:14.167208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:14.167244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:341:2332] TestWaitNotification: OK eventTxId 101 2025-04-04T17:09:14.167750Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:14.167949Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 229us result status StatusPathDoesNotExist 2025-04-04T17:09:14.168154Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:09:14.168662Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:14.168829Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 171us result status StatusSuccess 2025-04-04T17:09:14.169208Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] >> TSchemeShardSubDomainTest::Delete [GOOD] >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:14.137006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:14.137097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:14.137134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:14.137165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:14.137204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:14.137224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:14.137265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:14.137365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:14.137692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:14.210127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:14.210183Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:14.221399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:14.221581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:14.221705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:14.232488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:14.232804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:14.233414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.234263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:14.239406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.240710Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:14.240787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.240962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:14.241008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:14.241044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:14.241144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.247020Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:14.376920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:14.377120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.377325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:14.377492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:14.377544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.379590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.379725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:14.379892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.379962Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:14.379998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:14.380053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:14.382427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.382481Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:14.382515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:14.384530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.384599Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.384636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.384708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.387822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:14.389465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:14.389638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:14.390602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.390728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:14.390785Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.391035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:14.391097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.391256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:14.391350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:14.393112Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:14.393148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:14.393268Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.393331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:14.393585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.393617Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:14.393679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:14.393715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.393746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:14.393798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.393849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:14.393895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.393926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:14.393952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:14.394015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:14.394059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:14.394090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:14.395726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:14.395821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:14.395854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:09:14.564018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:14.564054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:14.564194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:14.564355Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.564396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-04T17:09:14.564445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-04T17:09:14.564750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.564793Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 102:0 ProgressState 2025-04-04T17:09:14.564844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:09:14.564877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:14.564928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:09:14.564963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:14.565002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-04T17:09:14.565042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:14.565079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:09:14.565115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:09:14.565280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:09:14.565324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-04T17:09:14.565358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-04T17:09:14.565396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-04T17:09:14.566072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:14.566174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:14.566211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:09:14.566409Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-04T17:09:14.566459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:14.567300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:14.567394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:14.567436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:09:14.567463Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:09:14.567497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:09:14.567561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-04T17:09:14.572145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:14.572216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:14.572265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:14.573612Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-04T17:09:14.574288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.574706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:14.575104Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-04T17:09:14.575831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-04T17:09:14.576019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409546 Forgetting tablet 72075186233409548 2025-04-04T17:09:14.577329Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-04T17:09:14.577632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T17:09:14.577806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2025-04-04T17:09:14.579660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:09:14.579782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:14.579828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:14.579975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:09:14.580216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:09:14.580289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:14.580323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:14.580370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:14.582407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:09:14.582473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-04T17:09:14.584016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-04T17:09:14.584056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-04T17:09:14.584777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:09:14.584830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-04T17:09:14.585022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:09:14.585110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-04T17:09:14.585394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:09:14.585435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T17:09:14.585873Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:09:14.585978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:09:14.586023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:518:2472] TestWaitNotification: OK eventTxId 102 2025-04-04T17:09:14.586611Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:14.586823Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 223us result status StatusPathDoesNotExist 2025-04-04T17:09:14.586995Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsFollower [GOOD] Test command err: Trying to start YDB, gRPC: 3515, MsgBus: 1260 2025-04-04T17:08:59.090777Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506159332232823:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:59.090834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b87/r3tmp/tmpTmi7eP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3515, node 1 2025-04-04T17:08:59.435343Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:59.440914Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:08:59.496088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:59.506425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:59.533010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:59.554905Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:59.554934Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:59.554942Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:59.555078Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1260 TClient is connected to server localhost:1260 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:00.193657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:00.442359Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7489506159332233238:2187]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:09:00.442401Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:09:00.442500Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7489506159332233238:2187], Recipient [1:7489506159332233238:2187]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:09:00.442535Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:09:01.443132Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7489506159332233238:2187]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:09:01.443165Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:09:01.443214Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7489506159332233238:2187], Recipient [1:7489506159332233238:2187]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:09:01.443237Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:09:02.233350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506172217135379:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:02.233446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:02.443394Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7489506159332233238:2187]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:09:02.443437Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:09:02.443497Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7489506159332233238:2187], Recipient [1:7489506159332233238:2187]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:09:02.443510Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:09:02.449064Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7489506172217135404:2311], Recipient [1:7489506159332233238:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:09:02.449101Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:09:02.449118Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:09:02.449157Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7489506172217135400:2308], Recipient [1:7489506159332233238:2187]: {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T17:09:02.449176Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T17:09:02.506217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:09:02.506815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:09:02.506956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, schema: Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false, at schemeshard: 72057594046644480 2025-04-04T17:09:02.507454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Followers, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-04T17:09:02.507487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-04T17:09:02.507855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-04T17:09:02.507961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-04T17:09:02.507972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-04-04T17:09:02.508661Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TCreateTable Propose creating new table opId# 281474976710658:0 path# /Root/Followers pathId# [OwnerId: 72057594046644480, LocalPathId: 2] schemeshard# 72057594046644480 tx# WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } FailOnExist: false 2025-04-04T17:09:02.508815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:09:02.508831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:09:02.508980Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:09:02.509017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:09:02.509040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-04T17:09:02.509525Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSetPartitioning: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] path /Root/Followers ShardIndices size 1 2025-04-04T17:09:02.509736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-04T17:09:02.509860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Followers 2025-04-04T17:09:02.509882Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:09:02.509893Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976710658:0 2025-04-04T17:09:02.510078Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:7489506159332233238:2187], Recipient [1:7489506159332233238:2187]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOper ... ksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82472 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 1743786542517 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-04T17:09:12.588463Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-04T17:09:12.588497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 800 rowCount 4 cpuUsage 0 2025-04-04T17:09:12.588586Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] raw table stats: DataSize: 800 RowCount: 4 IndexSize: 0 InMemSize: 800 LastAccessTime: 1743786542994 LastUpdateTime: 1743786542891 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 1 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-04T17:09:12.588611Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.099994s, queue# 1 2025-04-04T17:09:12.626386Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7489506172217135478:2338]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-04T17:09:12.626420Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2025-04-04T17:09:12.626521Z node 1 :TX_DATASHARD DEBUG: SendPeriodicTableStats register new pipe at datashard 72075186224037888 FollowerId 1, TableInfos size = 1 2025-04-04T17:09:12.626584Z node 1 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 1, tableId 2 2025-04-04T17:09:12.627122Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7489506215166808743:2527], Recipient [1:7489506159332233238:2187]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:09:12.627144Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:09:12.627162Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:09:12.627369Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269877760, Sender [1:7489506215166808742:2413], Recipient [1:7489506172217135478:2338]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [1:7489506215166808743:2527] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-04T17:09:12.627391Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvTabletPipe::TEvClientConnected 2025-04-04T17:09:12.627665Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:7489506172217135478:2338], Recipient [1:7489506159332233238:2187]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { } ShardState: 3 NodeId: 1 StartTime: 1743786542597 TableOwnerId: 72057594046644480 FollowerId: 1 2025-04-04T17:09:12.627684Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-04T17:09:12.627709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 1 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Readonly' dataSize 0 rowCount 0 cpuUsage 0 2025-04-04T17:09:12.627759Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 1 pathId [OwnerId: 72057594046644480, LocalPathId: 2] raw table stats: ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-04T17:09:12.632584Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7489506172217135480:2339]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-04T17:09:12.632610Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2025-04-04T17:09:12.636858Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7489506172217135481:2340]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-04T17:09:12.636898Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2025-04-04T17:09:12.688977Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435090, Sender [0:0:0], Recipient [1:7489506159332233238:2187]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-04T17:09:12.689014Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-04T17:09:12.689037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-04-04T17:09:12.689085Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 2 2025-04-04T17:09:12.689103Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-04-04T17:09:12.689168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046644480:1 data size 800 row count 4 2025-04-04T17:09:12.689219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], pathId map=Followers, is column=0, is olap=0 2025-04-04T17:09:12.689250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 2: RowCount 4, DataSize 800 2025-04-04T17:09:12.689266Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037888, followerId 0 2025-04-04T17:09:12.689323Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:1 with partCount# 0, rowCount# 4, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-04-04T17:09:12.689415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046644480:1 data size 0 row count 0 2025-04-04T17:09:12.689435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=1, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], pathId map=Followers, is column=0, is olap=0 2025-04-04T17:09:12.689445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=1, pathId 2: RowCount 0, DataSize 0 2025-04-04T17:09:12.689451Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037888, followerId 1 2025-04-04T17:09:12.689503Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:09:12.689596Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435090, Sender [0:0:0], Recipient [1:7489506159332233238:2187]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-04T17:09:12.689607Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-04T17:09:12.689620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-04T17:09:12.694405Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] shardIdx 72057594046644480 1 followerId 0 stats DataSize: 800 RowCount: 4 IndexSize: 0 CPUCores: 0 TabletId: 72075186224037888 NodeId: 1 StartTime: 1743786542517 AccessTime: 1743786542994 UpdateTime: 1743786542891 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 1 RangeReads: 0 RangeReadRows: 0 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 0 2025-04-04T17:09:12.694527Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] shardIdx 72057594046644480 1 followerId 1 stats DataSize: 0 RowCount: 0 IndexSize: 0 CPUCores: 0 TabletId: 72075186224037888 NodeId: 1 StartTime: 1743786542597 AccessTime: 0 UpdateTime: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 1 2025-04-04T17:09:13.463137Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7489506159332233238:2187]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:09:13.463173Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:09:13.463226Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7489506159332233238:2187], Recipient [1:7489506159332233238:2187]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:09:13.463240Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:09:13.613834Z node 1 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [1:7489506159332232959:2194], database# /Root, no sysview processor ... SELECT from partition_stats, attempt 2 2025-04-04T17:09:14.007094Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7489506223756743358:2422], owner: [1:7489506223756743354:2420], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-04T17:09:14.007660Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7489506223756743358:2422], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T17:09:14.007926Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274595843, Sender [1:7489506223756743358:2422], Recipient [1:7489506159332233238:2187]: NKikimrSysView.TEvGetPartitionStats DomainKeyOwnerId: 72057594046644480 DomainKeyPathId: 1 From { } FromInclusive: true To { } ToInclusive: false IncludePathColumn: true 2025-04-04T17:09:14.007946Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSysView::TEvSysView::TEvGetPartitionStats 2025-04-04T17:09:14.008102Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7489506223756743358:2422], row count: 2, finished: 1 2025-04-04T17:09:14.008131Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7489506223756743358:2422], owner: [1:7489506223756743354:2420], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-04T17:09:14.013798Z node 1 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [1:7489506159332232959:2194], database# /Root, query hash# 14960494650040056739, cpu time# 210639 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:13.993965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:13.994079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.994133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:13.994169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:13.994222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:13.994273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:13.994333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.994417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:13.994727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:14.083616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:14.083677Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:14.095549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:14.095788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:14.095941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:14.107688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:14.108034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:14.108659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.109420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:14.115311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.116633Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:14.116765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.116979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:14.117027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:14.117069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:14.117190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.123590Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:14.257946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:14.258185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.258438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:14.258708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:14.258793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.260960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.261108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:14.261294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.261349Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:14.261385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:14.261440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:14.262962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.263022Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:14.263060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:14.264738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.264790Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.264828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.264903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.268645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:14.270707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:14.270906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:14.272014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.272148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:14.272208Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.272501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:14.272574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.272800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:14.272895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:14.275090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:14.275143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:14.275297Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.275341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:14.275707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.275764Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:14.275891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:14.275945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.275993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:14.276033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.276072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:14.276144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.276194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:14.276236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:14.276323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:14.276365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:14.276399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:14.278203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:14.278369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:14.278416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2025-04-04T17:09:14.669418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000004 2025-04-04T17:09:14.669691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.669783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:14.669841Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-04-04T17:09:14.670182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-04T17:09:14.670234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-04-04T17:09:14.670409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:14.670468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2025-04-04T17:09:14.670512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T17:09:14.672343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:14.672380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:14.672487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:09:14.672601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.672643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-04T17:09:14.672677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 3 FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:09:14.672784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.672825Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:09:14.672919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:09:14.672951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:14.673011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:09:14.673054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:14.673089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-04T17:09:14.673126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:14.673164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:09:14.673205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:09:14.673385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2025-04-04T17:09:14.673443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-04T17:09:14.673477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-04T17:09:14.673503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-04T17:09:14.674318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:14.674411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:14.674451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:09:14.674493Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-04T17:09:14.674535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:14.675090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:14.675170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:14.675203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:09:14.675230Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-04T17:09:14.675266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2025-04-04T17:09:14.675327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-04T17:09:14.678905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:09:14.679913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-04T17:09:14.680173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:09:14.680213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T17:09:14.680620Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:09:14.680736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:09:14.680775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:981:2804] TestWaitNotification: OK eventTxId 102 2025-04-04T17:09:14.681254Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:14.681457Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 214us result status StatusSuccess 2025-04-04T17:09:14.681798Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:14.682366Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:14.682512Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 158us result status StatusSuccess 2025-04-04T17:09:14.682788Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Delete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:14.508354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:14.508463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:14.508502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:14.508526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:14.508564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:14.508593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:14.508648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:14.508705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:14.508909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:14.585866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:14.585923Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:14.597214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:14.597464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:14.597601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:14.607310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:14.607599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:14.608200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.608930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:14.614026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.615798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:14.615869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.616064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:14.616109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:14.616188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:14.616307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.622796Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:14.739321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:14.739514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.739745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:14.739977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:14.740034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.747269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.747419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:14.747610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.747661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:14.747700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:14.747748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:14.749533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.749587Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:14.749620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:14.751288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.751334Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.751369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.751441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.754922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:14.756513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:14.756667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:14.757569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.757696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:14.757741Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.757985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:14.758042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.758194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:14.758318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:14.760185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:14.760235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:14.760385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.760417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:14.760726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.760778Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:14.760860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:14.760907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.760950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:14.760986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.761023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:14.761057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.761096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:14.761131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:14.761223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:14.761263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:14.761290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:14.763047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:14.763137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:14.763168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:14.893767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:14.893801Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:09:14.893830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:09:14.893891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-04T17:09:14.895081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:14.895129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:14.895168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:14.896446Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-04T17:09:14.897261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.897539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-04-04T17:09:14.898522Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-04T17:09:14.898784Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-04T17:09:14.898969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-04T17:09:14.899237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 2025-04-04T17:09:14.900123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T17:09:14.900284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2025-04-04T17:09:14.900703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:14.900852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:14.900888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:14.900985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:09:14.901422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:14.901508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:14.901545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:14.901606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:14.903800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:09:14.903860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-04T17:09:14.904120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-04T17:09:14.904158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-04T17:09:14.905146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:09:14.905188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-04T17:09:14.905295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:09:14.905429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T17:09:14.905625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:09:14.905664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-04T17:09:14.905978Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:09:14.906049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:14.906080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:493:2447] TestWaitNotification: OK eventTxId 101 2025-04-04T17:09:14.906574Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:14.906740Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 208us result status StatusPathDoesNotExist 2025-04-04T17:09:14.906905Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:09:14.907362Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:14.907536Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 150us result status StatusSuccess 2025-04-04T17:09:14.907846Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2025-04-04T17:09:14.908254Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-04T17:09:14.908349Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-04-04T17:09:14.908391Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2025-04-04T17:09:14.908743Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:14.908887Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 152us result status StatusSuccess 2025-04-04T17:09:14.909201Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] >> BackupRestoreS3::RestoreIndexTableSplitBoundaries [GOOD] >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:13.993546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:13.993681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.993739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:13.993774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:13.993819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:13.993860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:13.993927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.994013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:13.994369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:14.097061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:14.097116Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:14.107438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:14.107663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:14.107788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:14.118558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:14.118955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:14.119493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.120308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:14.126485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.127943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:14.128016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.128197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:14.128258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:14.128304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:14.128422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.136513Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:14.259189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:14.259406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.259600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:14.259814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:14.259877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.261872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.261966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:14.262116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.262171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:14.262200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:14.262257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:14.264015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.264069Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:14.264104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:14.265844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.265893Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.265935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.266001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.269481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:14.271410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:14.271594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:14.272643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.272783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:14.272838Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.273046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:14.273085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.273222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:14.273288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:14.275019Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:14.275073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:14.275236Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.275274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:14.275550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.275587Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:14.275674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:14.275725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.275762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:14.275792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.275819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:14.275859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.275897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:14.275930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:14.276010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:14.276063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:14.276098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:14.278023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:14.278170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:14.278216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:14.675180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:14.675212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:14.675243Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-04T17:09:14.675273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:09:14.675373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-04T17:09:14.676237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1273 } } 2025-04-04T17:09:14.676282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2025-04-04T17:09:14.676409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1273 } } 2025-04-04T17:09:14.676533Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1273 } } 2025-04-04T17:09:14.679694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 620 RawX2: 4294969825 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:14.679746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2025-04-04T17:09:14.679874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 620 RawX2: 4294969825 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:14.679943Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:09:14.680069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 620 RawX2: 4294969825 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:14.680128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:7, datashard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.680168Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.680209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2025-04-04T17:09:14.680403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-04T17:09:14.683580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:14.684505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:14.684676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.685007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.685255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.685305Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:09:14.685410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:14.685444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:14.685484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:14.685517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:14.685556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-04T17:09:14.685622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:273:2264] message: TxId: 101 2025-04-04T17:09:14.685674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:14.685739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:09:14.685778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:09:14.685887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:09:14.689235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:14.689280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:274:2265] TestWaitNotification: OK eventTxId 101 2025-04-04T17:09:14.689772Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:14.689961Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 199us result status StatusSuccess 2025-04-04T17:09:14.690425Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:14.691047Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:14.691314Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 231us result status StatusSuccess 2025-04-04T17:09:14.691704Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:14.601172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:14.601271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:14.601328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:14.601361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:14.601406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:14.601433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:14.601484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:14.601565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:14.601874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:14.678748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:14.678808Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:14.690472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:14.690754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:14.690911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:14.699660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:14.699979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:14.700608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.701489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:14.706807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.708160Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:14.708232Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.708417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:14.708464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:14.708506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:14.708634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.719334Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:14.864045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:14.864328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.864640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:14.864908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:14.864979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.872651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.872839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:14.873071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.873139Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:14.873180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:14.873234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:14.885298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.885396Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:14.885469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:14.894721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.894812Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.894876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.894960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.898849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:14.901030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:14.901268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:14.902348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.902492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:14.902550Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.902891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:14.902958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.903155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:14.903303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:14.905611Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:14.905654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:14.905812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.905843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:14.906122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.906180Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:14.906306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:14.906360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.906413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:14.906451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.906499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:14.906542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.906576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:14.906609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:14.906710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:14.906760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:14.906793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:14.908760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:14.908877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:14.908914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :14.945413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:14.947201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.947259Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.947314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-04-04T17:09:14.947364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-04T17:09:14.947501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:14.949040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-04T17:09:14.949148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-04T17:09:14.949409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.949501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:14.949559Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-04T17:09:14.949743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-04T17:09:14.949787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-04T17:09:14.949979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:14.950026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:09:14.950060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:14.952166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:14.952212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:14.952360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:14.952457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.952522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-04T17:09:14.952570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T17:09:14.952879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.952925Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:09:14.953015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:14.953049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:14.953082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:14.953126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:14.953171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-04T17:09:14.953207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:14.953258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:09:14.953292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:09:14.953348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:14.953384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-04T17:09:14.953434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-04T17:09:14.953467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-04T17:09:14.954028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:14.954104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:14.954137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:14.954174Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:09:14.954228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:14.956367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:14.956427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:14.956447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:14.956469Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:09:14.956506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:09:14.956564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-04T17:09:14.958207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:14.959198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-04T17:09:14.961612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:14.961802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /MyRoot/SomeDatabase, opId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.961981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, at schemeshard: 72057594046678944 2025-04-04T17:09:14.963928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-04-04T17:09:14.964091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-04-04T17:09:14.964336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:09:14.964384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-04T17:09:14.964493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:09:14.964514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T17:09:14.964932Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:09:14.965087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:14.965126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:313:2304] 2025-04-04T17:09:14.965281Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:09:14.965371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:09:14.965393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:313:2304] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 >> KqpSystemView::Join [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobal [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalAsync ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:13.193434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:13.193539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.193596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:13.193623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:13.193661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:13.193685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:13.193735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.193805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:13.194062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:13.264361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:13.264409Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:13.275316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:13.275572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:13.275718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:13.286437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:13.286746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:13.287402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.288737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:13.294052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.295256Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.295299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.295425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:13.295473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.295508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:13.295617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.301375Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:13.407841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:13.408043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.408287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:13.408511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:13.408576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.410857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.411031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:13.411261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.411316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:13.411349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:13.411403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:13.413569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.413620Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:13.413655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:13.415439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.415487Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.415548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.415614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.419396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:13.421348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:13.421514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:13.422431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.422545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:13.422595Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.422886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:13.422938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.423109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:13.423195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:13.424993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.425038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.425188Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.425222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:13.425483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.425532Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:13.425656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:13.425691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.425740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:13.425776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.425807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:13.425848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.425877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:13.425904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:13.425952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:13.425984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:13.426016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:13.427701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:13.427819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:13.427851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6545 cookie: 0:108 msg type: 269090816 2025-04-04T17:09:15.205225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 108 at step: 5000004 2025-04-04T17:09:15.205722Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:15.205808Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 126 RawX2: 8589936743 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:15.205877Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterOlapStore TPropose operationId# 108:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000004 2025-04-04T17:09:15.206013Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 128 -> 129 2025-04-04T17:09:15.206151Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:15.206219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-04T17:09:15.206595Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=108;fline=tx_controller.cpp:211;event=finished_tx;tx_id=108; FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-04-04T17:09:15.208043Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:15.208085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:15.208249Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-04-04T17:09:15.208388Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:15.208419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:337:2313], at schemeshard: 72057594046678944, txId: 108, path id: 1 2025-04-04T17:09:15.208454Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:337:2313], at schemeshard: 72057594046678944, txId: 108, path id: 5 2025-04-04T17:09:15.208789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-04T17:09:15.208837Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:09:15.208888Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409549 2025-04-04T17:09:15.209462Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-04-04T17:09:15.209537Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-04-04T17:09:15.209564Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-04-04T17:09:15.209596Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-04-04T17:09:15.209631Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T17:09:15.210178Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2025-04-04T17:09:15.210261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2025-04-04T17:09:15.210300Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-04-04T17:09:15.210323Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2025-04-04T17:09:15.210344Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-04T17:09:15.210392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2025-04-04T17:09:15.211826Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275382275 2025-04-04T17:09:15.212995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-04-04T17:09:15.213087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 FAKE_COORDINATOR: Erasing txId 108 2025-04-04T17:09:15.225317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409549 TxId: 108 MinStep: 0 Step: 5000004 2025-04-04T17:09:15.225370Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 108, tablet: 72075186233409549, partId: 0 2025-04-04T17:09:15.225479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 108:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409549 TxId: 108 MinStep: 0 Step: 5000004 2025-04-04T17:09:15.225567Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvColumnShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409549 TxId: 108 MinStep: 0 Step: 5000004 2025-04-04T17:09:15.225901Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2025-04-04T17:09:15.225946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 108, tablet: 72075186233409549, partId: 0 2025-04-04T17:09:15.226039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 108:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2025-04-04T17:09:15.226088Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 129 -> 240 2025-04-04T17:09:15.228274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-04T17:09:15.229121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-04T17:09:15.229259Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-04T17:09:15.229314Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 108:0 ProgressState 2025-04-04T17:09:15.229442Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-04-04T17:09:15.229475Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-04T17:09:15.229538Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-04-04T17:09:15.229574Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-04T17:09:15.229607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2025-04-04T17:09:15.229671Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:489:2437] message: TxId: 108 2025-04-04T17:09:15.229713Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-04T17:09:15.229751Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2025-04-04T17:09:15.229782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2025-04-04T17:09:15.230077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-04T17:09:15.231767Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-04T17:09:15.231822Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:860:2784] TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 2025-04-04T17:09:15.235055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore1" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "comment2" Type: "Utf8" } } } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:15.235253Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterOlapStore Propose, path: /MyRoot/OlapStore1, opId: 109:0, at schemeshard: 72057594046678944 2025-04-04T17:09:15.235629Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 109:1, propose status:StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, at schemeshard: 72057594046678944 2025-04-04T17:09:15.238101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 109, response: Status: StatusSchemeError Reason: "Too many columns. new: 4. Limit: 3" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:15.238277Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2025-04-04T17:09:15.238683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2025-04-04T17:09:15.238732Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2025-04-04T17:09:15.239263Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2025-04-04T17:09:15.239390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-04-04T17:09:15.239427Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:896:2820] TestWaitNotification: OK eventTxId 109 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:13.856349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:13.856456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.856499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:13.856531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:13.856575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:13.856603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:13.856659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.856748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:13.857022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:13.933712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:13.933768Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:13.944603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:13.944860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:13.945009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:13.954307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:13.954601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:13.955306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.956031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:13.960803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.962188Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.962265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.962441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:13.962489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.962532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:13.962662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.968875Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:14.067654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:14.067885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.068096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:14.068368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:14.068440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.070689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.070801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:14.070945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.070998Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:14.071028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:14.071065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:14.074888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.074947Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:14.074985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:14.077119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.077169Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.077227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.077290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.081015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:14.083057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:14.083241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:14.084300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.084435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:14.084497Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.084802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:14.084861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.085054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:14.085148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:14.087186Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:14.087264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:14.087434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.087475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:14.087788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.087836Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:14.087963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:14.088012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.088073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:14.088183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.088221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:14.088263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.088298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:14.088336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:14.088402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:14.088439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:14.088471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:14.090404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:14.090515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:14.090556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2025-04-04T17:09:15.281390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:09:15.281410Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-04-04T17:09:15.281436Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-04T17:09:15.281463Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:09:15.282490Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:09:15.282561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:09:15.282588Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-04T17:09:15.282617Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:09:15.282656Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:09:15.282742Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-04T17:09:15.283667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:15.283717Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:15.283745Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:15.283771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:15.284834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-04T17:09:15.285549Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-04T17:09:15.285781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:15.286050Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-04-04T17:09:15.287661Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-04T17:09:15.288591Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-04T17:09:15.288740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-04T17:09:15.290754Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409548 2025-04-04T17:09:15.291128Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T17:09:15.291266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:15.291903Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-04T17:09:15.292081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 Forgetting tablet 72075186233409547 2025-04-04T17:09:15.292806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-04T17:09:15.292976Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409549 2025-04-04T17:09:15.294011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:15.294053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T17:09:15.294104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:09:15.294757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:15.294797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:15.294930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:09:15.295186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-04T17:09:15.300623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:09:15.300683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-04T17:09:15.300935Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-04T17:09:15.300965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-04T17:09:15.301024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:09:15.301049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-04T17:09:15.301443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-04T17:09:15.301485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-04T17:09:15.302075Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-04T17:09:15.302175Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:15.302228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:15.302326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:15.302546Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:09:15.303841Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-04T17:09:15.304095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-04T17:09:15.304139Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-04T17:09:15.304564Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-04T17:09:15.304650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-04T17:09:15.304691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:649:2601] TestWaitNotification: OK eventTxId 105 2025-04-04T17:09:15.305226Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:15.305404Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 225us result status StatusPathDoesNotExist 2025-04-04T17:09:15.305567Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:09:15.306074Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:15.306224Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 160us result status StatusPathDoesNotExist 2025-04-04T17:09:15.306366Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 61267, MsgBus: 25513 2025-04-04T17:09:00.288480Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506160628331277:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:00.288520Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:09:00.345651Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506164741810425:2148];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:00.345737Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:09:00.366047Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489506164098097092:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:00.368054Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:09:00.414713Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506163160735629:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:00.414809Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:09:01.002367Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489506163982620880:2221];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b6b/r3tmp/tmpCtybKV/pdisk_1.dat 2025-04-04T17:09:01.503058Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:09:01.499970Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:09:01.545769Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:09:01.545419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:09:01.545087Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:09:02.510048Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:09:02.506924Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:09:02.555804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:09:02.581581Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:09:02.579204Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:09:02.675568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:02.675675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:02.678355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:02.678444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:02.686966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:02.687036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:02.687178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:02.687215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:02.691405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:02.691467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:02.698676Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-04T17:09:02.698708Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:09:02.698733Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T17:09:02.698745Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-04T17:09:02.699131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:02.703664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:02.703851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:02.703986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:02.704084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:02.800719Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61267, node 1 2025-04-04T17:09:02.858520Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:09:02.858547Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:09:03.426523Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:03.426544Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:03.426552Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:03.447023Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25513 2025-04-04T17:09:05.290398Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506160628331277:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:05.290451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:05.345887Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489506164741810425:2148];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:05.345971Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:05.374383Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489506164098097092:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:05.374466Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:05.405204Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489506163160735629:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:05.405275Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:05.482356Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506163982620880:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:05.482431Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:25513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:05.894115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:06.016537Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:06.039025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:06.622560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:06.945988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:07.216686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:09.366171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506199283038881:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:09.366314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:09.694179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:09.809107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:09.891541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:10.000664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:10.121088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:10.234530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:10.333982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506203578006842:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:10.334082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:10.334588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506203578006847:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:10.338506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:10.398734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506203578006849:2402], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:10.503860Z node 1 :TX_PROXY ERROR: Actor# [1:7489506203578006923:4042] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:11.777574Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786551762, txId: 281474976710671] shutting down 2025-04-04T17:09:12.091955Z node 5 :BS_PROXY_PUT ERROR: [8681c145edbbcd97] Result# TEvPutResult {Id# [72075186224037895:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037895:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-04T17:09:12.095715Z node 2 :BS_PROXY_PUT ERROR: [7a7492471e93209c] Result# TEvPutResult {Id# [72075186224037897:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037897:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-04T17:09:12.096200Z node 3 :BS_PROXY_PUT ERROR: [22618f883e8f1dac] Result# TEvPutResult {Id# [72075186224037889:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037889:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-04T17:09:12.098802Z node 4 :BS_PROXY_PUT ERROR: [63734ab95b59bfe7] Result# TEvPutResult {Id# [72075186224037916:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037916:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Join [GOOD] Test command err: Trying to start YDB, gRPC: 25828, MsgBus: 19296 2025-04-04T17:08:59.827901Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506157710880527:2227];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:59.828168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b69/r3tmp/tmpHNMCc7/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25828, node 1 2025-04-04T17:09:00.243446Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:00.244490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:00.244604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:00.248960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:00.416903Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:00.416931Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:00.416939Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:00.417064Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19296 TClient is connected to server localhost:19296 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:01.088138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:01.114576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:01.243921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:09:01.421194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:01.529935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:03.480339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506174890751312:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:03.480497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:03.782544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:03.832778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:03.902871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:03.950776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:04.038126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:04.117584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:04.202034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506179185719136:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:04.202125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:04.202390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506179185719141:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:04.205850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:04.218369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506179185719143:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:04.299933Z node 1 :TX_PROXY ERROR: Actor# [1:7489506179185719198:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:04.822708Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506157710880527:2227];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:04.844399Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:05.418933Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786545402, txId: 281474976710671] shutting down waiting... 2025-04-04T17:09:06.541692Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786546536, txId: 281474976710673] shutting down waiting... 2025-04-04T17:09:07.741017Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786547692, txId: 281474976710675] shutting down waiting... 2025-04-04T17:09:08.913903Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786548898, txId: 281474976710677] shutting down waiting... 2025-04-04T17:09:10.177169Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786550170, txId: 281474976710679] shutting down waiting... 2025-04-04T17:09:11.359982Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786551350, txId: 281474976710681] shutting down waiting... 2025-04-04T17:09:12.525542Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786552514, txId: 281474976710683] shutting down waiting... 2025-04-04T17:09:13.652564Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786553645, txId: 281474976710685] shutting down waiting... 2025-04-04T17:09:14.802272Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786554792, txId: 281474976710687] shutting down 2025-04-04T17:09:15.141566Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786555127, txId: 281474976710689] shutting down 2025-04-04T17:09:15.241883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:09:15.241914Z node 1 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:15.677659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:15.677754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:15.677809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:15.677842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:15.677887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:15.677915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:15.677969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:15.678051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:15.678365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:15.759214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:15.759275Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:15.770752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:15.770946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:15.771087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:15.781347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:15.781684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:15.782422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:15.783323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:15.789472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:15.790990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:15.791075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:15.791263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:15.791316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:15.791369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:15.791520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:15.798335Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:15.906098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:15.906370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:15.906659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:15.906853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:15.906917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:15.910195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:15.910383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:15.910600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:15.910667Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:15.910702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:15.910750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:15.912845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:15.912909Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:15.912946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:15.914810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:15.914856Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:15.914921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:15.915002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:15.924497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:15.926440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:15.926602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:15.927536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:15.927676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:15.927720Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:15.927953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:15.927997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:15.928144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:15.928222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:15.929951Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:15.929990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:15.930126Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:15.930157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:15.930433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:15.930470Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:15.930543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:15.930581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:15.930623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:15.930667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:15.930697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:15.930731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:15.930771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:15.930805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:15.930891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:15.930933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:15.930979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:15.932279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:15.932354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:15.932428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... _TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-04T17:09:15.968099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:15.968330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:15.968423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:15.968459Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-04T17:09:15.968604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-04T17:09:15.968646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-04T17:09:15.968809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:15.968858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:09:15.968933Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no IsActiveChild, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:15.969539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:15.971068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:15.971112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:15.971225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:15.971288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:15.971314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-04T17:09:15.971347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T17:09:15.971676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:15.971722Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:09:15.971826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:15.971868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:15.971905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:15.971951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:15.971993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-04T17:09:15.972029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:15.972066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:09:15.972101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:09:15.972161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:15.972190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-04T17:09:15.972216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-04T17:09:15.972249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-04T17:09:15.972732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:15.972794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:15.972820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:15.972847Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:09:15.972879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:15.973444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:15.973525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:15.973546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:15.973564Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:09:15.973584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:09:15.973632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-04T17:09:15.976161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:15.977064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-04T17:09:15.980139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:15.980420Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } 2025-04-04T17:09:15.980473Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, path /MyRoot/SomeDatabase 2025-04-04T17:09:15.980619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2025-04-04T17:09:15.980667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2025-04-04T17:09:15.982943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: " TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:15.983111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-04-04T17:09:15.983400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:09:15.983442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-04T17:09:15.983582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:09:15.983608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T17:09:15.984045Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:09:15.984187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:15.984221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:307:2298] 2025-04-04T17:09:15.984396Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:09:15.984496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:09:15.984540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:307:2298] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 >> KqpSystemView::NodesRange2 [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true >> TSchemeShardTTLTests::ConditionalErase >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings >> TSchemeShardTTLTestsWithReboots::AlterTable >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL >> TSchemeShardTTLTests::BuildIndexShouldSucceed >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable >> TSchemeShardTTLTests::TtlTiersValidation >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL >> TSchemeShardTTLTestsWithReboots::CreateTable >> TSchemeShardTTLUtility::ValidateTiers [GOOD] >> TSchemeShardTTLTestsWithReboots::CopyTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 23297, MsgBus: 63687 2025-04-04T17:09:04.365345Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506181746609488:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:04.365403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:09:04.472488Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506180960389314:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:04.484234Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:09:04.553698Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489506181649135192:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:04.554034Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000a31/r3tmp/tmpCaaRFo/pdisk_1.dat 2025-04-04T17:09:05.707536Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:09:05.707590Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:09:05.711331Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:09:05.730340Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:09:05.758403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:09:06.075686Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:06.078165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:06.078303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:06.080217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:06.080333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:06.080487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:06.080535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:06.081194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:06.081253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:06.083264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:06.083402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:06.088872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:06.089731Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-04T17:09:06.089771Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-04T17:09:06.092852Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:09:06.095093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:06.095346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:06.097480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:06.102076Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T17:09:06.109997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23297, node 1 2025-04-04T17:09:06.594899Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:06.594940Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:06.594953Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:06.595095Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63687 TClient is connected to server localhost:63687 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:07.829872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:07.889223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:08.089253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:08.868391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:09.014286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:09.400255Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506181746609488:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:09.400313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:09.483063Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489506180960389314:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:09.483174Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:09.538896Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506181649135192:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:09.538964Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:11.903060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506211811382476:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:11.903186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:12.137554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:12.220432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:12.356020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:12.439268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:12.492648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:12.567463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:12.689209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506216106350409:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:12.689337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:12.689557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506216106350414:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:12.693118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:12.721924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506216106350416:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:12.815094Z node 1 :TX_PROXY ERROR: Actor# [1:7489506216106350492:4031] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:14.210695Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786554200, txId: 281474976710671] shutting down 2025-04-04T17:09:14.359525Z node 4 :BS_PROXY_PUT ERROR: [98eb3946cbcd648b] Result# TEvPutResult {Id# [72075186224037894:1:16:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037894:1:16:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-04T17:09:14.364161Z node 2 :BS_PROXY_PUT ERROR: [2eea51f0e26bb70d] Result# TEvPutResult {Id# [72075186224037918:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037918:1:17:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-04T17:09:14.362905Z node 3 :BS_PROXY_PUT ERROR: [681a0c1fd693b40b] Result# TEvPutResult {Id# [72075186224037915:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037915:1:17:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-04T17:09:14.371596Z node 5 :BS_PROXY_PUT ERROR: [711d7bbe8292834d] Result# TEvPutResult {Id# [72075186224037914:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037914:1:17:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::ValidateTiers [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_retry.py::TestRetry::test_low_rate[kikimr0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:14.367954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:14.368049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:14.368108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:14.368139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:14.368175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:14.368198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:14.368248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:14.368321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:14.368603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:14.439593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:14.439642Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:14.450755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:14.451122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:14.451265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:14.461174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:14.461483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:14.462154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.462946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:14.468861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.470097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:14.470163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.470352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:14.470403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:14.470437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:14.470557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.476400Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:14.595935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:14.596141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.596373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:14.596619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:14.596679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.598527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.598777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:14.598942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.598994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:14.599033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:14.599076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:14.600734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.600798Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:14.600831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:14.602209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.602297Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.602337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.602428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.610824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:14.612511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:14.612653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:14.613417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:14.613541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:14.613590Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.613806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:14.613853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:14.614026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:14.614205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:14.616372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:14.616419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:14.616548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:14.616587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:14.616843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:14.616895Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:14.616981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:14.617021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.617064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:14.617095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.617130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:14.617165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:14.617197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:14.617224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:14.617291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:14.617332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:14.617364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:14.619088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:14.619189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:14.619223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... 6233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 525 RawX2: 4294969768 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-04T17:09:18.328780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-04-04T17:09:18.328896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 525 RawX2: 4294969768 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-04T17:09:18.328952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2025-04-04T17:09:18.332023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-04T17:09:18.332089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet# 72075186233409546 2025-04-04T17:09:18.332148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-04T17:09:18.332199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2025-04-04T17:09:18.332273Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet# 72075186233409546 2025-04-04T17:09:18.332377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2025-04-04T17:09:18.332503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2025-04-04T17:09:18.332563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-04T17:09:18.333091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-04T17:09:18.334844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-04T17:09:18.338607Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-04T17:09:18.338686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-04T17:09:18.338855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-04-04T17:09:18.339026Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-04T17:09:18.339070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:441:2393], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-04-04T17:09:18.339108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:441:2393], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-04-04T17:09:18.339470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-04T17:09:18.339523Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-04-04T17:09:18.339602Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-04T17:09:18.339637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-04-04T17:09:18.339670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-04-04T17:09:18.340571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-04-04T17:09:18.340662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-04-04T17:09:18.340697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-04T17:09:18.340726Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 9 2025-04-04T17:09:18.340758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-04-04T17:09:18.341543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-04T17:09:18.341614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-04T17:09:18.341636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-04T17:09:18.341661Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:09:18.341700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-04-04T17:09:18.341743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-04T17:09:18.344694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-04T17:09:18.344742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-04-04T17:09:18.345046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-04T17:09:18.345218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:09:18.345252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:09:18.345305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:09:18.345333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:09:18.345374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-04T17:09:18.345437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:553:2492] message: TxId: 104 2025-04-04T17:09:18.345482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:09:18.345520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-04T17:09:18.345548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-04T17:09:18.345643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-04-04T17:09:18.346163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-04T17:09:18.346196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-04T17:09:18.346774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-04T17:09:18.346867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-04T17:09:18.348025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-04T17:09:18.348060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:441:2393], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-04-04T17:09:18.348243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-04T17:09:18.348275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:753:2670] 2025-04-04T17:09:18.348712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-04-04T17:09:18.349639Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-04-04T17:09:18.349850Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 222us result status StatusSuccess 2025-04-04T17:09:18.350259Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSuccess >> TSchemeShardTTLTestsWithReboots::MoveTable >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] |86.1%| [TA] $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:18.658922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:18.659015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.659054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:18.659084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:18.660046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:18.660089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:18.660164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.660231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:18.661837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:18.750767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:18.750824Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:18.761827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:18.762049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:18.762185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:18.771823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:18.772073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:18.775063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.776703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:18.793929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.795280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.795342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.795508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:18.795552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.795594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:18.795724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.801923Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:18.920221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:18.920461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.920666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:18.920879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:18.920934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.922892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.923029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:18.923170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.923223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:18.923257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:18.923285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:18.924850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.924903Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:18.924934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:18.926423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.926470Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.926513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.926561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.930315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:18.931869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:18.932023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:18.932988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.933113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:18.933160Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.933425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:18.933476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.933645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:18.933723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:18.935571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.935604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.935766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.935806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:18.936165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.936209Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:18.936305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.936339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.936377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.936404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.936455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:18.936494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.936523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:18.936550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:18.936599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:18.936634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:18.936671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:18.943420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.943574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.943614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T17:09:18.943656Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T17:09:18.943689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:18.943784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T17:09:18.946574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T17:09:18.947073Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1743786558.954593 468875 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2025-04-04T17:09:18.955098Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-04-04T17:09:18.974482Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-04-04T17:09:18.976561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:18.977047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.977576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2025-04-04T17:09:18.980085Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:09:18.983012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:18.983163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2025-04-04T17:09:18.983589Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 W0000 00:00:1743786558.984023 468875 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2025-04-04T17:09:18.986781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:18.987043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.987250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, at schemeshard: 72057594046678944 2025-04-04T17:09:18.990995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Type \'DyNumber\' specified for column \'modified_at\' is not supported" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:18.991127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 |86.1%| [TA] {RESULT} $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:18.658921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:18.659026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.659066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:18.659102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:18.660053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:18.660102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:18.660178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.660258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:18.661870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:18.753485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:18.753542Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:18.765777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:18.766031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:18.766164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:18.775121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:18.775369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:18.776038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.776743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:18.787575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789267Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:18.789551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.789616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:18.789751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.802166Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:18.963522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:18.963765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.963967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:18.964200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:18.964268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.967961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.968111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:18.968264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.968325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:18.968365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:18.968397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:18.976748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.976826Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:18.976873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:18.978857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.978935Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.978995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.979050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.982299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:18.984704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:18.984889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:18.986001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.986143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:18.986197Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.986481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:18.986549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.986733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:18.986818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:18.990210Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.990271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.990419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.990460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:18.990754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.990806Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:18.990931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.990970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.991009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.991043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.991094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:18.991134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.991175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:18.991219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:18.991294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:18.991336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:18.991380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:18.993379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.993506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.993548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 9.203694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:19.203902Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:19.203958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-04T17:09:19.203998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-04T17:09:19.205142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.205212Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T17:09:19.206404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:19.206502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:19.206539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:19.206580Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:09:19.206624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:19.207316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:19.207410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:19.207439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:19.207471Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:09:19.207498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:19.207572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-04T17:09:19.208296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1248 } } 2025-04-04T17:09:19.208353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-04T17:09:19.208480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1248 } } 2025-04-04T17:09:19.208598Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1248 } } 2025-04-04T17:09:19.209270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:19.209325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-04T17:09:19.209472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:19.209527Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:09:19.209621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:19.209691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:19.209739Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.209779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:09:19.209819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-04T17:09:19.213266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:19.214011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:19.214125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.214220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.214510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.214562Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:09:19.214703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:19.214745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:19.214801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:19.214845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:19.214886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-04T17:09:19.214949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 101 2025-04-04T17:09:19.214999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:19.215036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:09:19.215070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:09:19.215213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:19.217051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:19.217120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:336:2315] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-04T17:09:19.220145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 Delete { } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:19.220405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.220822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, at schemeshard: 72057594046678944 2025-04-04T17:09:19.223035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Tier 0: only the last tier in TTL settings can have Delete action" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:19.223213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2025-04-04T17:09:19.226271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 EvictToExternalStorage { Storage: "/Root/abc" } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:19.226537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.226876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, at schemeshard: 72057594046678944 2025-04-04T17:09:19.229081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Only DELETE via TTL is allowed for row-oriented tables" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:19.229239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 103, wait until txId: 103 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:18.658859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:18.658949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.658990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:18.659024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:18.660033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:18.660078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:18.660186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.660272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:18.661894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:18.765435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:18.765497Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:18.776428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:18.776639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:18.776788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:18.785335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:18.785565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:18.786151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.786882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:18.791411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.792539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.792599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.792755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:18.792795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.792834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:18.792948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.798702Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:18.917370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:18.917532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.917670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:18.917828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:18.917868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.919480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.919594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:18.919732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.919794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:18.919823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:18.919850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:18.921294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.921352Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:18.921382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:18.922910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.922950Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.922988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.923040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.926623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:18.928072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:18.928223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:18.929166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.929280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:18.929325Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.929561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:18.929609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.929761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:18.929841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:18.931805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.931851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.932028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.932072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:18.932385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.932436Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:18.932526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.932563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.932598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.932624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.932672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:18.932705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.932737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:18.932760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:18.932815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:18.932847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:18.932881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:18.934506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.934626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.934677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 78944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 2025-04-04T17:09:19.163456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:19.163571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:19.163625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId# 101:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000002 2025-04-04T17:09:19.163823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 129 2025-04-04T17:09:19.163928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:19.163977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2025-04-04T17:09:19.173047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:19.173103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:19.173253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:19.173462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:19.173507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-04T17:09:19.173543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-04T17:09:19.174558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.174617Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T17:09:19.175733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:19.175824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:19.175856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:19.175892Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:09:19.175926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:19.176535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:19.176614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:19.176654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:19.176690Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:09:19.176714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:19.176768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-04T17:09:19.177475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1042 } } 2025-04-04T17:09:19.177529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-04T17:09:19.177643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1042 } } 2025-04-04T17:09:19.177766Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1042 } } 2025-04-04T17:09:19.178477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:19.178520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-04T17:09:19.178665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:19.178710Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:09:19.178783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:19.178832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:19.178873Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.178921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:09:19.178967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-04T17:09:19.182055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:19.182786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:19.182912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.183010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.183238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.183281Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:09:19.183404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:19.183439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:19.183528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:19.183558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:19.183592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-04T17:09:19.183674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 101 2025-04-04T17:09:19.183729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:19.183772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:09:19.183802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:09:19.183973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:19.185562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:19.185625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:336:2315] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-04T17:09:19.188047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" DropColumns { Name: "modified_at" } TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:19.188284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.190273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', at schemeshard: 72057594046678944 2025-04-04T17:09:19.193092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Cannot enable TTL on dropped column: \'modified_at\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:19.193220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:18.876629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:18.876703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.876731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:18.876773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:18.876818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:18.876840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:18.876918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.876978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:18.877239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:18.947476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:18.947532Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:18.960784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:18.961024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:18.961198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:18.969690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:18.969931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:18.970517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.971255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:18.976217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.977580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.977649Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.977836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:18.977885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.977935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:18.978088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.987906Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:19.099744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:19.100020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.100228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:19.100477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:19.100538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.103246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:19.103416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:19.103607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.103677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:19.103715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:19.103748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:19.105705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.105761Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:19.105793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:19.107472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.107519Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.107564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:19.107621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:19.111283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:19.113700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:19.113877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:19.114989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:19.115128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:19.115176Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:19.115438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:19.115510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:19.115696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:19.115786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:19.117916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:19.117962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:19.118145Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:19.118186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:19.118568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.118624Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:19.118750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:19.118792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:19.118836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:19.118870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:19.118922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:19.118963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:19.118997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:19.119027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:19.119088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:19.119128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:19.119168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:19.121063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:19.121187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:19.121228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T17:09:19.121289Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T17:09:19.121336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:19.121431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T17:09:19.124776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T17:09:19.125317Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-04T17:09:19.126582Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-04-04T17:09:19.146319Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-04-04T17:09:19.149113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:19.149455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.149565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { }, at schemeshard: 72057594046678944 2025-04-04T17:09:19.149976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL status must be specified, at schemeshard: 72057594046678944 2025-04-04T17:09:19.150707Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:09:19.155208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL status must be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:19.155386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL status must be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-04T17:09:19.155793Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:18.658905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:18.659004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.659045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:18.659104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:18.660053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:18.660100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:18.660177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.660259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:18.661884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:18.749419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:18.749474Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:18.763019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:18.763271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:18.763412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:18.772393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:18.772640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:18.775509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.776903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:18.784626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.790113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:18.790159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.790203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:18.790365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.798065Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:18.905273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:18.905476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.905674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:18.905928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:18.905998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.909495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.909689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:18.909851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.909907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:18.909951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:18.909995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:18.912375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.912427Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:18.912456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:18.914595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.914652Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.914704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.914757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.919138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:18.920750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:18.920897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:18.921984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.922100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:18.922149Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.923350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:18.923415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.923606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:18.923698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:18.925716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.925755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.925927Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.925988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:18.926344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.926387Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:18.926492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.926527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.926566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.926594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.926657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:18.926698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.926734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:18.926759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:18.926811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:18.926848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:18.926889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:18.928525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.928630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.928684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 78944, LocalPathId: 4], version: 3 2025-04-04T17:09:19.216795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-04T17:09:19.216870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/3, is published: true 2025-04-04T17:09:19.218828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 4294969601 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:19.218881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2025-04-04T17:09:19.219028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 4294969601 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:19.219081Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:09:19.219182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 321 RawX2: 4294969601 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:19.219264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:19.219301Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-04T17:09:19.219341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:09:19.219397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-04-04T17:09:19.220120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:19.220161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-04T17:09:19.220266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:19.220324Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:09:19.220383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:19.220427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:19.220455Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.220490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T17:09:19.220530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-04T17:09:19.228557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:19.229044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:19.235169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:19.235561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-04T17:09:19.235883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.236161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:19.236351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-04T17:09:19.236691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-04T17:09:19.236750Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-04-04T17:09:19.236835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-04T17:09:19.236862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-04T17:09:19.236889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-04T17:09:19.236916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-04T17:09:19.236943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-04-04T17:09:19.237187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.237442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.237468Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:09:19.237537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-04T17:09:19.237559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-04T17:09:19.237581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-04T17:09:19.237597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-04T17:09:19.237625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-04-04T17:09:19.237673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:375:2343] message: TxId: 101 2025-04-04T17:09:19.237713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-04T17:09:19.237753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:09:19.237786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:09:19.237889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:19.237918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-04-04T17:09:19.237933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-04-04T17:09:19.237953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:09:19.237975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-04-04T17:09:19.237993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-04-04T17:09:19.238066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T17:09:19.240394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:19.240441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:376:2344] TestWaitNotification: OK eventTxId 101 2025-04-04T17:09:19.240957Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:19.241482Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 298us result status StatusSuccess 2025-04-04T17:09:19.242053Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:18.658919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:18.659042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.659084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:18.659121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:18.660049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:18.660093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:18.660204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.660292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:18.663802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:18.755829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:18.755873Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:18.766309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:18.766536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:18.766676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:18.777019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:18.777249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:18.777800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.778483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:18.784139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789268Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:18.789560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.789604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:18.789774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.796764Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:18.915932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:18.916166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.916360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:18.916575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:18.916653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.918803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.918937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:18.919081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.919147Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:18.919182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:18.919208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:18.920803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.920851Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:18.920879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:18.922323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.922365Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.922396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.922441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.931534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:18.933276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:18.933440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:18.934353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.934475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:18.934521Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.934764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:18.934815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.934953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:18.935034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:18.936918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.936956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.937106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.937147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:18.937466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.937509Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:18.937591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.937623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.937655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.937684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.937753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:18.937793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.937824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:18.937855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:18.937913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:18.937946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:18.937979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:18.945275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.945437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.945475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 02 msg type: 269090816 2025-04-04T17:09:19.225067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-04T17:09:19.225618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:19.225718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:19.225782Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-04T17:09:19.226035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-04T17:09:19.226162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:19.232292Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:19.232349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:19.232625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:19.232672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-04T17:09:19.233203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.233251Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:09:19.233915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:19.234001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:19.234033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:09:19.234069Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-04T17:09:19.234123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:19.234216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:09:19.235164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2519 } } 2025-04-04T17:09:19.235199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-04T17:09:19.235299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2519 } } 2025-04-04T17:09:19.235430Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2519 } } 2025-04-04T17:09:19.236884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:09:19.236937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-04T17:09:19.237092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:09:19.237149Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:09:19.237236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:09:19.237305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:19.237342Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.237390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:09:19.237431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-04T17:09:19.240120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:09:19.240264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.241329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.241562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.241598Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:09:19.241728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:09:19.241767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:19.241801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:09:19.241828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:19.241864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T17:09:19.241914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-04-04T17:09:19.241963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:19.242005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:09:19.242034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:09:19.242139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:19.243780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:09:19.243825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:393:2365] TestWaitNotification: OK eventTxId 102 2025-04-04T17:09:19.244296Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:19.244508Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 237us result status StatusSuccess 2025-04-04T17:09:19.244866Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:18.658892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:18.658988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.659025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:18.659056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:18.659948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:18.659987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:18.660103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.660175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:18.661816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:18.750811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:18.750863Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:18.761723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:18.761962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:18.762099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:18.771939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:18.772176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:18.775030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.776791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:18.784135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:18.789645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.789695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:18.789825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.807307Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:18.923340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:18.923537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.923708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:18.923883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:18.923930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.925716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.925875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:18.926014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.926066Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:18.926098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:18.926133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:18.927899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.927952Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:18.927978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:18.929860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.929904Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.929952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.929998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.939734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:18.941560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:18.941698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:18.942858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.942966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:18.943008Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.943224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:18.943266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.943402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:18.943497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:18.945353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.945416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.945555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.945588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:18.945893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.945933Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:18.946027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.946061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.946094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.946120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.946179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:18.946217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.946270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:18.946298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:18.946347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:18.946392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:18.946431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:18.948032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.948130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.948172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... HARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:09:19.320238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-04T17:09:19.324967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:09:19.325048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:09:19.326958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1273 } } 2025-04-04T17:09:19.327007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-04T17:09:19.327109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1273 } } 2025-04-04T17:09:19.327176Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1273 } } 2025-04-04T17:09:19.327906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 408 RawX2: 4294969673 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:09:19.327966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-04T17:09:19.328067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 408 RawX2: 4294969673 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:09:19.328106Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:09:19.328180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 408 RawX2: 4294969673 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:09:19.328247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:19.328290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-04-04T17:09:19.329903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.331145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.344599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:09:19.344684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-04T17:09:19.344833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:09:19.344881Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:09:19.344948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:09:19.345006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:19.345040Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.345079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:09:19.345117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T17:09:19.345143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-04T17:09:19.346969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.347339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.347377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2025-04-04T17:09:19.347429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-04-04T17:09:19.347470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2025-04-04T17:09:19.347534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-04-04T17:09:19.347560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-04-04T17:09:19.349075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.349142Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:09:19.349248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:09:19.349290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:19.349327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:09:19.349358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:19.349410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T17:09:19.349489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-04-04T17:09:19.349548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:19.349614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:09:19.349646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:09:19.349773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:09:19.349800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:19.351479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:09:19.351525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:434:2395] TestWaitNotification: OK eventTxId 102 2025-04-04T17:09:19.352061Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:19.352323Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 231us result status StatusSuccess 2025-04-04T17:09:19.352789Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: true IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:18.658915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:18.659025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.659063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:18.659095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:18.660050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:18.660112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:18.660191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.660274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:18.661902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:18.751508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:18.751556Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:18.763195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:18.763379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:18.763473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:18.773751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:18.774045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:18.775090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.777458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:18.786577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:18.789717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.789760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:18.789914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.802763Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:18.928905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:18.929125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.929351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:18.929579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:18.929639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.935062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.935209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:18.935349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.935395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:18.935418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:18.935440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:18.942992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.943055Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:18.943090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:18.947894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.947960Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.948018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.948066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.957593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:18.959450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:18.959594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:18.960490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.960605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:18.960648Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.960897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:18.960947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.961104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:18.961167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:18.963369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.963426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.963562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.963610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:18.963889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.963928Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:18.964020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.964065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.964097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.964120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.964171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:18.964204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.964235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:18.964260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:18.964309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:18.964338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:18.964370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:18.965977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.966103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.966144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 8944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-04T17:09:19.228600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-04T17:09:19.229154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:19.229269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:19.229348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-04T17:09:19.229578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-04T17:09:19.229689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:19.232993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:19.233054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:19.233655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:19.233714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-04T17:09:19.234041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.234095Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:09:19.234705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:19.234815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:19.234850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:09:19.234885Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-04T17:09:19.234920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:19.234988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-04T17:09:19.238142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:09:19.251300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1090 } } 2025-04-04T17:09:19.251353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-04T17:09:19.251478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1090 } } 2025-04-04T17:09:19.251590Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1090 } } FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:09:19.252505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:09:19.252556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-04T17:09:19.252676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:09:19.252726Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:09:19.252805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:09:19.252866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:19.252900Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.252949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:09:19.252992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-04T17:09:19.254813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.255737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.255963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.256004Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:09:19.256105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:09:19.256153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:19.256196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:09:19.256239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:19.256275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T17:09:19.256333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-04-04T17:09:19.256378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:19.256412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:09:19.256439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:09:19.256540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:19.258176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:09:19.258224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:393:2365] TestWaitNotification: OK eventTxId 102 2025-04-04T17:09:19.258756Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:19.259013Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 247us result status StatusSuccess 2025-04-04T17:09:19.259518Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:18.658865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:18.658963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.659001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:18.659037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:18.660013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:18.660078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:18.660154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.660273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:18.664355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:18.750582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:18.750653Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:18.762007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:18.762330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:18.762480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:18.776281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:18.776542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:18.777164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.777916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:18.784117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789350Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:18.789631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.789676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:18.789803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.796110Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:18.932680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:18.932924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.933137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:18.933371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:18.933430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.935463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.935601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:18.935753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.935811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:18.935848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:18.935881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:18.937488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.937544Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:18.937579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:18.939255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.939300Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.939339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.939407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.943151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:18.944632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:18.944801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:18.945700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.945810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:18.945853Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.946077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:18.946125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.946306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:18.946378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:18.951230Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.951276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.951445Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.951507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:18.951835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.951891Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:18.951987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.952020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.952065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.952097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.952146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:18.952187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.952223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:18.952255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:18.952325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:18.952361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:18.952402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:18.954144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.954309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.954356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T17:09:19.532465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-04-04T17:09:19.532572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:09:19.532732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-04-04T17:09:19.532780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-04-04T17:09:19.532814Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-04-04T17:09:19.533054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:19.533136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:19.533181Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-04-04T17:09:19.533221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-04-04T17:09:19.534969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-04T17:09:19.535010Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-04-04T17:09:19.535079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-04T17:09:19.535119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-04T17:09:19.535152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-04T17:09:19.535175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-04T17:09:19.535201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-04-04T17:09:19.535252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:122:2148] message: TxId: 281474976710760 2025-04-04T17:09:19.535298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-04T17:09:19.535335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-04T17:09:19.535360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-04-04T17:09:19.535410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-04-04T17:09:19.536881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-04T17:09:19.536932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-04-04T17:09:19.536982Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-04-04T17:09:19.537059Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T17:09:19.538592Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-04T17:09:19.538682Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T17:09:19.538741Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-04T17:09:19.540101Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-04T17:09:19.540174Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:384:2356], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T17:09:19.540211Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-04T17:09:19.540333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:09:19.540372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:473:2434] TestWaitNotification: OK eventTxId 102 2025-04-04T17:09:19.540857Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:19.541063Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 231us result status StatusSuccess 2025-04-04T17:09:19.541626Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] >> TLocksTest::Range_IncorrectNullDot2 [GOOD] >> TSchemeShardTTLTests::CheckCounters >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:20.043179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:20.043275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:20.043314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:20.043350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:20.043413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:20.043450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:20.043549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:20.043622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:20.043989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:20.119595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:20.119658Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:20.130698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:20.130991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:20.131196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:20.141314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:20.141750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:20.142354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:20.143211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:20.147681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:20.148765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:20.148827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:20.149023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:20.149077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:20.149120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:20.149256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.156942Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:20.301226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:20.301491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.301702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:20.301946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:20.302018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.304407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:20.304583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:20.304773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.304843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:20.304881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:20.304915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:20.306830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.306910Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:20.306953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:20.308855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.308910Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.308950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:20.309005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:20.312889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:20.314943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:20.315136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:20.316209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:20.316361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:20.316418Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:20.316693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:20.316746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:20.316946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:20.317034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:20.319416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:20.319461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:20.319637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:20.319682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:20.320027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.320080Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:20.320195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:20.320248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:20.320290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:20.320323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:20.320377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:20.320427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:20.320470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:20.320501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:20.320570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:20.320608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:20.320653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:20.322618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:20.322766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:20.322808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T17:09:20.322865Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T17:09:20.322909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:20.323027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T17:09:20.326597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T17:09:20.327171Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-04T17:09:20.328302Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-04-04T17:09:20.346079Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-04-04T17:09:20.348852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:20.349253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.349374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } }, at schemeshard: 72057594046678944 2025-04-04T17:09:20.349853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', at schemeshard: 72057594046678944 2025-04-04T17:09:20.350730Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:09:20.354030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Cannot enable TTL on unknown column: \'created_at\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:20.354160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-04T17:09:20.354574Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalAsync [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalUnique [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries [GOOD] >> BackupRestoreS3::RestoreViewQueryText >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:20.198913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:20.198992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:20.199025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:20.199056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:20.199114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:20.199142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:20.199230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:20.199307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:20.199647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:20.273728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:20.273778Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:20.283832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:20.284025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:20.284182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:20.293270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:20.293527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:20.294141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:20.294908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:20.299555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:20.300478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:20.300519Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:20.300650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:20.300685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:20.300716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:20.300815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.306158Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:20.405358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:20.405548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.405680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:20.405871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:20.405931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.407688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:20.407806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:20.407936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.407993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:20.408022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:20.408061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:20.409610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.409655Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:20.409690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:20.411097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.411135Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.411160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:20.411193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:20.413455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:20.414595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:20.414718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:20.415423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:20.415500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:20.415532Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:20.415703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:20.415736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:20.415851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:20.415921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:20.417661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:20.417703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:20.417849Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:20.417898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:20.418195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.418260Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:20.418369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:20.418406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:20.418440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:20.418467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:20.418512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:20.418549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:20.418580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:20.418606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:20.418676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:20.418733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:20.418772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:20.420509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:20.420615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:20.420665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4046678944, LocalPathId: 4], version: 3 2025-04-04T17:09:20.652086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-04T17:09:20.652137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/3, is published: true 2025-04-04T17:09:20.652550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 4294969601 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:20.652588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2025-04-04T17:09:20.652703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 4294969601 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:20.652747Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:09:20.652832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 321 RawX2: 4294969601 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:20.652882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:20.652935Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-04T17:09:20.652975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:09:20.653018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-04-04T17:09:20.653355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:20.653388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-04-04T17:09:20.653473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:20.653508Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:09:20.653566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:20.653597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:20.653623Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.653646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T17:09:20.653676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-04T17:09:20.658842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:20.658927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:20.661229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:20.661331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-04T17:09:20.661397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.661448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:20.661553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-04T17:09:20.661827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.661977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-04-04T17:09:20.662010Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-04-04T17:09:20.662092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-04T17:09:20.662120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-04T17:09:20.662151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-04-04T17:09:20.662178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-04-04T17:09:20.662208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-04-04T17:09:20.662432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.662459Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:09:20.662528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-04T17:09:20.662596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-04T17:09:20.662637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-04-04T17:09:20.662670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-04T17:09:20.662690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-04-04T17:09:20.662738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:375:2343] message: TxId: 101 2025-04-04T17:09:20.662783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-04-04T17:09:20.662828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:09:20.662855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:09:20.662966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:20.663003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-04-04T17:09:20.663020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-04-04T17:09:20.663042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:09:20.663058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-04-04T17:09:20.663084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-04-04T17:09:20.663124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T17:09:20.664955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:20.665008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:376:2344] TestWaitNotification: OK eventTxId 101 2025-04-04T17:09:20.665447Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:20.665650Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 207us result status StatusSuccess 2025-04-04T17:09:20.666315Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:20.175635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:20.175719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:20.175748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:20.175777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:20.175822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:20.175847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:20.175918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:20.175974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:20.176230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:20.253475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:20.253538Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:20.265733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:20.266014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:20.266207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:20.276167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:20.276470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:20.277154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:20.277993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:20.283561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:20.284942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:20.285003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:20.285239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:20.285304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:20.285357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:20.285512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.292358Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:20.400703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:20.400938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.401108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:20.401298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:20.401363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.403417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:20.403577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:20.403775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.403836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:20.403874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:20.403908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:20.405542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.405600Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:20.405633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:20.407148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.407192Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.407230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:20.407292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:20.410985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:20.412778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:20.412936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:20.413894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:20.414024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:20.414071Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:20.414349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:20.414399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:20.414567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:20.414659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:20.416681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:20.416739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:20.416905Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:20.416943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:20.417254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.417307Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:20.417401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:20.417439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:20.417473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:20.417505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:20.417554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:20.417590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:20.417624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:20.417652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:20.417712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:20.417748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:20.417784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:20.419597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:20.419701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:20.419744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-04-04T17:09:20.739199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 104 at step: 5000004 2025-04-04T17:09:20.739809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:20.739945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:20.740004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 104:0 HandleReply TEvOperationPlan, operationId: 104:0, stepId: 5000004, at schemeshard: 72057594046678944 2025-04-04T17:09:20.740268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 129 2025-04-04T17:09:20.740395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:20.743238Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:20.743300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:20.743481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:20.743520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-04-04T17:09:20.743870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.743910Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:09:20.744239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:09:20.744326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:09:20.744356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:09:20.744384Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-04T17:09:20.744421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:20.744479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-04T17:09:20.747010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T17:09:20.759178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1060 } } 2025-04-04T17:09:20.759238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-04-04T17:09:20.759399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1060 } } 2025-04-04T17:09:20.759532Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1060 } } FAKE_COORDINATOR: Erasing txId 104 2025-04-04T17:09:20.760305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-04T17:09:20.760351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-04-04T17:09:20.760460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-04T17:09:20.760512Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:09:20.760612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-04-04T17:09:20.760679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:20.760715Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.760752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:09:20.760844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-04-04T17:09:20.763632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.764054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.764329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.764378Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-04T17:09:20.764486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:09:20.764538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:09:20.764578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:09:20.764608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:09:20.764648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-04T17:09:20.764713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 104 2025-04-04T17:09:20.764766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:09:20.764806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-04T17:09:20.764838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-04T17:09:20.764953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:20.766630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-04T17:09:20.766715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:441:2413] TestWaitNotification: OK eventTxId 104 2025-04-04T17:09:20.767287Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:20.767538Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 240us result status StatusSuccess 2025-04-04T17:09:20.767950Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 TTLSettings { Disabled { } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable >> TSchemeShardColumnTableTTL::CreateColumnTable >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectNullDot2 [GOOD] Test command err: 2025-04-04T17:08:40.752776Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506074994388974:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:40.753098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e86/r3tmp/tmptqKcgu/pdisk_1.dat 2025-04-04T17:08:41.264432Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:41.268336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:41.268465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:41.277007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20730 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:08:41.641890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:41.670768Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:41.681860Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:41.687107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:08:41.857203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:41.917416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:44.337984Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506092495404703:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:44.338019Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e86/r3tmp/tmpkjKy1O/pdisk_1.dat 2025-04-04T17:08:44.569475Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:44.581988Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:44.582077Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:44.584440Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19497 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:08:44.800439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:08:44.823807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:44.913353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:45.012343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:47.779099Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489506106182722698:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:47.779156Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e86/r3tmp/tmpWlLh6k/pdisk_1.dat 2025-04-04T17:08:47.884658Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:47.920533Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:47.920603Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:47.921773Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63945 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:08:48.089149Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:08:48.103969Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:48.180871Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:48.254837Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:51.101138Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506122241362855:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e86/r3tmp/tmpBQPjdQ/pdisk_1.dat 2025-04-04T17:08:51.160872Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:08:51.247440Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:51.249300Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:51.249536Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:51.250218Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29700 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:08:51.536643Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubD ... necting -> Connected TClient is connected to server localhost:32058 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:09:04.893717Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:04.899204Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:04.912246Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:04.980214Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:05.059587Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:09:08.583105Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7489506198584903687:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:08.583177Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e86/r3tmp/tmpRgxVxd/pdisk_1.dat 2025-04-04T17:09:08.736048Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:08.748535Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:08.748600Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:08.751124Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65408 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:09:08.987544Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:08.994711Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:09.007632Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:09:09.012682Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:09.098257Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:09.175155Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:12.753870Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7489506213106142010:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:12.753931Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e86/r3tmp/tmpYuYlji/pdisk_1.dat 2025-04-04T17:09:12.946310Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:12.946402Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:12.947255Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:12.961436Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20033 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-04T17:09:13.191118Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:13.210941Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:13.279210Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:13.348650Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:16.663303Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489506231315442704:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:16.663426Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e86/r3tmp/tmpM5jmSQ/pdisk_1.dat 2025-04-04T17:09:16.760515Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:16.796002Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:16.796101Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:16.797662Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28105 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:09:16.993430Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:17.006908Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:17.078333Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:17.147204Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TSchemeShardTTLTests::ConditionalErase [GOOD] >> TSchemeShardTTLTests::ShouldCheckQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:21.612273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:21.612354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:21.612388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:21.612419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:21.612483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:21.612512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:21.612609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:21.612672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:21.613088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:21.685734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:21.685783Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:21.696471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:21.696688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:21.696864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:21.704540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:21.704766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:21.705318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:21.706046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:21.710844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:21.711899Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:21.711945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:21.712086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:21.712126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:21.712157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:21.712275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.717719Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:21.850681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:21.850924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.851121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:21.851354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:21.851405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.853380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:21.853543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:21.853754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.853809Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:21.853845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:21.853878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:21.855526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.855578Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:21.855613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:21.857059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.857097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.857143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:21.857201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.860833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:21.862496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:21.862677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:21.863610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:21.863733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:21.863778Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:21.864017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:21.864062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:21.864230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:21.864307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:21.866283Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:21.866325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:21.866494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:21.866533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:21.866884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.866931Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:21.867016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:21.867049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.867083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:21.867109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.867164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:21.867203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.867233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:21.867261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:21.867317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:21.867360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:21.867398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:21.869153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:21.869263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:21.869303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T17:09:21.869334Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T17:09:21.869377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:21.869470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T17:09:21.872111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T17:09:21.872531Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-04T17:09:21.873784Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-04-04T17:09:21.889751Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-04-04T17:09:21.892185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:21.892539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.892885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Incorrect ttl column - not found in scheme, at schemeshard: 72057594046678944 2025-04-04T17:09:21.893529Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:09:21.896230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Incorrect ttl column - not found in scheme" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:21.896375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Incorrect ttl column - not found in scheme, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2025-04-04T17:09:21.896776Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:12.968731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:12.968938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:12.968992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:12.969028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:12.969070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:12.969100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:12.969158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:12.969235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:12.969581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:13.049075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:13.049137Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:13.057778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:13.057976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:13.058083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:13.066697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:13.066952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:13.067601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.068277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:13.072519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.073686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.073769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.073946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:13.073989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.074020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:13.074123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.080107Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:13.226479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:13.226722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.226958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:13.227194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:13.227259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.229475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.229610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:13.229794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.229846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:13.229893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:13.229968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:13.231831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.231886Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:13.231924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:13.233564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.233612Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.233664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.233741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.237645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:13.239680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:13.239869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:13.240932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.241058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:13.241111Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.241378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:13.241438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.241611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:13.241721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:13.243783Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.243834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.244052Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.244096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:13.244383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.244423Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:13.244523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:13.244572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.244621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:13.244667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.244707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:13.244747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.244780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:13.244811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:13.244881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:13.244924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:13.244955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:13.246748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:13.246870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:13.246913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... teStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1752 DataSize: 1752 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:21.896595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:480:2438] sender: [1:762:2058] recipient: [1:101:2136] Leader for TabletID 72057594046678944 is [1:480:2438] sender: [1:765:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:480:2438] sender: [1:766:2058] recipient: [1:764:2682] Leader for TabletID 72057594046678944 is [1:767:2683] sender: [1:768:2058] recipient: [1:764:2682] 2025-04-04T17:09:21.935340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:21.935426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:21.935461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:21.935493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:21.935532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:21.935571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:21.935623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:21.935698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:21.935976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:21.952119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:21.953387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:21.953555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:21.953752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:21.953793Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:21.953938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:21.954599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-04-04T17:09:21.954700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Table1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:21.954772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Table2, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T17:09:21.954851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.954913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.955286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 2, at schemeshard: 72057594046678944 2025-04-04T17:09:21.955378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-04T17:09:21.955429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-04T17:09:21.955502Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-04T17:09:21.955731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 4, at schemeshard: 72057594046678944 2025-04-04T17:09:21.955930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.956025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-04-04T17:09:21.956057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:09:21.956083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:09:21.956101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:09:21.956196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 3, at schemeshard: 72057594046678944 2025-04-04T17:09:21.956363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.956640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-04-04T17:09:21.956962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.957061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.957459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.957523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.957708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.957791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.957879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.958024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.958108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.958258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.958477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.958617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.958675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.958724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.966829Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:21.966897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:21.967719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:21.967775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:21.967813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:21.968170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:767:2683] sender: [1:827:2058] recipient: [1:15:2062] 2025-04-04T17:09:22.003096Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:22.003325Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 256us result status StatusSuccess 2025-04-04T17:09:22.003729Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1752 DataSize: 1752 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:21.687450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:21.687533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:21.687559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:21.687580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:21.687626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:21.687656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:21.687730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:21.687775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:21.688000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:21.747719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:21.747772Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:21.756591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:21.756753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:21.756897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:21.765320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:21.765540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:21.766112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:21.766988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:21.771563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:21.772720Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:21.772769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:21.772921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:21.772981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:21.773015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:21.773145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.778963Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:21.886616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:21.886834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.887005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:21.887217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:21.887265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.889197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:21.889331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:21.889484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.889531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:21.889563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:21.889591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:21.891234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.891285Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:21.891317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:21.892759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.892801Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.892833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:21.892906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.896099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:21.897498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:21.897620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:21.898393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:21.898497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:21.898537Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:21.898783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:21.898825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:21.898967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:21.899030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:21.900510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:21.900550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:21.900693Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:21.900725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:21.900946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.900997Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:21.901075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:21.901104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.901135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:21.901155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.901189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:21.901213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.901235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:21.901255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:21.901291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:21.901315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:21.901351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:21.902710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:21.902787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:21.902812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... minStep5000002 State->FrontStep: 5000002 2025-04-04T17:09:22.099682Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:22.099738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:22.099897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:22.100096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:22.100142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-04T17:09:22.100184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-04T17:09:22.101200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.101254Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T17:09:22.102311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:22.102398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:22.102430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:22.102486Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:09:22.102527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:22.103171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:22.103259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:22.103297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:22.103327Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:09:22.103352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:22.103404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-04T17:09:22.104039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1069 } } 2025-04-04T17:09:22.104090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-04T17:09:22.104219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1069 } } 2025-04-04T17:09:22.104329Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1069 } } 2025-04-04T17:09:22.104995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:22.105039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-04T17:09:22.105191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:22.105235Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:09:22.105318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:22.105370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:22.105402Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.105437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:09:22.105476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-04T17:09:22.108314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:22.108978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:22.109112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.109199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.109500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.109542Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:09:22.109644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:22.109680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:22.109714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:22.109755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:22.109798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-04T17:09:22.109860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 101 2025-04-04T17:09:22.109903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:22.109944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:09:22.109971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:09:22.110088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:22.111674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:22.111730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:336:2315] TestWaitNotification: OK eventTxId 101 2025-04-04T17:09:22.112218Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:22.112416Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 223us result status StatusSuccess 2025-04-04T17:09:22.112804Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:22.105537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:22.105610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:22.105643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:22.105671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:22.105723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:22.105755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:22.105838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:22.105901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:22.106181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:22.180790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:22.180838Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:22.190693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:22.190893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:22.191031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:22.199734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:22.199972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:22.200516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:22.201245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:22.205556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:22.206633Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:22.206705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:22.206848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:22.206886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:22.206919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:22.207015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.212484Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:22.330299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:22.330532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.330715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:22.330909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:22.330968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.332839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:22.332965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:22.333098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.333152Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:22.333183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:22.333211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:22.334670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.334742Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:22.334775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:22.336166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.336204Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.336238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:22.336283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:22.339525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:22.340848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:22.340995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:22.341850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:22.341989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:22.342035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:22.342259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:22.342294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:22.342409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:22.342459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:22.343933Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:22.343962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:22.344123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:22.344162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:22.344430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.344467Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:22.344554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:22.344588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:22.344628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:22.344657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:22.344710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:22.344742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:22.344770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:22.344796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:22.344843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:22.344873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:22.344908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:22.346500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:22.346594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:22.346627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T17:09:22.346675Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T17:09:22.346709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:22.346813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T17:09:22.349261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T17:09:22.349665Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-04T17:09:22.350613Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-04-04T17:09:22.365359Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-04-04T17:09:22.367611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:22.367897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.367979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } }, at schemeshard: 72057594046678944 2025-04-04T17:09:22.368346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2025-04-04T17:09:22.368953Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:09:22.371481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:22.371620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-04T17:09:22.371980Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:18.658875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:18.658973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.659013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:18.659046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:18.659984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:18.660060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:18.660156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.660261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:18.661888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:18.751352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:18.751405Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:18.763027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:18.763282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:18.763450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:18.776643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:18.776895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:18.777454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.778227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:18.787766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789264Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:18.789551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.789682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:18.789802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.795792Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:18.895099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:18.896776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.900717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:18.902979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:18.903154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.907001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.907149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:18.907336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.907417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:18.907527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:18.907561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:18.913987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.914055Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:18.914103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:18.919331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.919395Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.919438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.919512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.922970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:18.930745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:18.930924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:18.931923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.932041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:18.932087Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.932339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:18.932390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.932548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:18.932611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:18.939319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.939370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.939503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.939545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:18.939829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.939884Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:18.939990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.940024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.940060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.940120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.940178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:18.940215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.940274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:18.940303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:18.940357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:18.940391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:18.940426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:18.942274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.942376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.942418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Size 627 rowCount 2 cpuUsage 0 2025-04-04T17:09:22.264258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-04T17:09:22.264500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-04T17:09:22.264774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-04T17:09:22.265153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409546, request: TableId: 2 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640236000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-04T17:09:22.265407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409550, request: TableId: 6 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640236000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-04T17:09:22.265503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409549, request: TableId: 5 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640236000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-04T17:09:22.265568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409548, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640236000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-04T17:09:22.265624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409547, request: TableId: 3 Expiration { ColumnId: 2 WallClockTimestamp: 1600463040236000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-04T17:09:22.265682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409551, request: TableId: 7 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640236000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-04T17:09:22.267142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-04-04T17:09:22.268087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-04-04T17:09:22.268400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2025-04-04T17:09:22.269039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:09:22.269439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T17:09:22.269686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2025-04-04T17:09:22.270873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-04T17:09:22.270943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-04-04T17:09:22.271608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-04T17:09:22.271643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-04-04T17:09:22.278552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-04T17:09:22.278599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2025-04-04T17:09:22.279287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-04T17:09:22.279327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:09:22.280133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-04T17:09:22.280166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T17:09:22.280341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-04T17:09:22.280654Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-04T17:09:22.281279Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-04T17:09:22.281379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2020-09-18T23:04:00.237000Z, at schemeshard: 72057594046678944 2025-04-04T17:09:22.281495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-04T17:09:22.281525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:4, run at: 2020-09-18T23:04:00.237000Z, at schemeshard: 72057594046678944 2025-04-04T17:09:22.282519Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-04T17:09:22.282583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-04T17:09:22.282637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-04T17:09:22.282685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:5, run at: 2020-09-18T23:04:00.239000Z, at schemeshard: 72057594046678944 2025-04-04T17:09:22.282734Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-04T17:09:22.282817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 2020-09-18T23:04:00.239000Z, at schemeshard: 72057594046678944 2025-04-04T17:09:22.283964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-04T17:09:22.284014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-04T17:09:22.284084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-04T17:09:22.284107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-04T17:09:22.284128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-04T17:09:22.284187Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-04T17:09:22.284236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:2, run at: 2020-09-18T23:04:00.239000Z, at schemeshard: 72057594046678944 2025-04-04T17:09:22.284315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-04T17:09:22.353756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 5 2025-04-04T17:09:22.353917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046678944:2 data size 0 row count 0 2025-04-04T17:09:22.353989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=TTLEnabledTable2, is column=0, is olap=0 2025-04-04T17:09:22.354035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 3: RowCount 0, DataSize 0 2025-04-04T17:09:22.354125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 5 shard idx 72057594046678944:4 data size 43 row count 1 2025-04-04T17:09:22.354150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], pathId map=TTLEnabledTable4, is column=0, is olap=0 2025-04-04T17:09:22.354173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 5: RowCount 1, DataSize 43 2025-04-04T17:09:22.354285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 603 row count 2 2025-04-04T17:09:22.354329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTable3, is column=0, is olap=0 2025-04-04T17:09:22.354365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 2, DataSize 603 2025-04-04T17:09:22.354406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-04-04T17:09:22.354436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable1, is column=0, is olap=0 2025-04-04T17:09:22.354461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-04-04T17:09:22.354501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 6 shard idx 72057594046678944:5 data size 627 row count 2 2025-04-04T17:09:22.354531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409550 maps to shardIdx: 72057594046678944:5 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], pathId map=TTLEnabledTable5, is column=0, is olap=0 2025-04-04T17:09:22.354550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409550 followerId=0, pathId 6: RowCount 2, DataSize 627, with borrowed parts 2025-04-04T17:09:22.372609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-04T17:09:22.372674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2025-04-04T17:09:22.374687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-04T17:09:22.374829Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-04T17:09:22.374884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:6, run at: 2020-09-18T23:04:00.242000Z, at schemeshard: 72057594046678944 2025-04-04T17:09:22.374931Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:21.919865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:21.919947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:21.919982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:21.920012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:21.920070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:21.920100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:21.920200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:21.920267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:21.920597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:21.980690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:21.980728Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:21.989952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:21.990113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:21.990321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:21.997195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:21.997392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:21.997827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:21.998423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:22.011155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:22.012458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:22.012520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:22.012718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:22.012765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:22.012811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:22.012940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.019152Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:22.133244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:22.133429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.133623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:22.133829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:22.133872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.135732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:22.135858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:22.136024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.136076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:22.136101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:22.136124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:22.137515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.137571Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:22.137599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:22.139102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.139145Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.139178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:22.139236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:22.142548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:22.144015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:22.144157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:22.144910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:22.145001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:22.145055Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:22.145214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:22.145249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:22.145373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:22.145417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:22.147151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:22.147197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:22.147356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:22.147384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:22.147676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.147729Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:22.147814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:22.147837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:22.147878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:22.147908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:22.147956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:22.147992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:22.148016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:22.148035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:22.148090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:22.148120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:22.148154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:22.158128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:22.158232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:22.158287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 102 at step: 5000003 2025-04-04T17:09:22.426793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:22.426869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:22.426906Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-04T17:09:22.427141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-04T17:09:22.427250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-04T17:09:22.430733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:22.430793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:22.431077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:22.431128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-04T17:09:22.431206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.431250Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:09:22.432124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:22.432240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:22.432283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:09:22.432322Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-04T17:09:22.432383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:09:22.432458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-04T17:09:22.434936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:09:22.447294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1107 } } 2025-04-04T17:09:22.447371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-04T17:09:22.447502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1107 } } 2025-04-04T17:09:22.447609Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1107 } } FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:09:22.448471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:09:22.448517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-04T17:09:22.448631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:09:22.448678Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:09:22.448757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:09:22.448840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:22.448883Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.448953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T17:09:22.449006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-04T17:09:22.451028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.451476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.451728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.451775Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:09:22.451879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:09:22.451914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:22.451960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:09:22.452002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:22.452036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T17:09:22.452108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:375:2343] message: TxId: 102 2025-04-04T17:09:22.452170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:22.452208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:09:22.452245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:09:22.452353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:22.453824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:09:22.453867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:457:2418] TestWaitNotification: OK eventTxId 102 2025-04-04T17:09:22.454403Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:22.454668Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 263us result status StatusSuccess 2025-04-04T17:09:22.455158Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:22.083189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:22.083266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:22.083298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:22.083327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:22.083379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:22.083404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:22.083501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:22.083565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:22.083848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:22.144374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:22.144412Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:22.152947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:22.153170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:22.153364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:22.162919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:22.163151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:22.163732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:22.164425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:22.169008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:22.170170Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:22.170223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:22.170415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:22.170456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:22.170490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:22.170595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.176298Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:22.320805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:22.321041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.321239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:22.321497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:22.321556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.323773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:22.323957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:22.324134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.324198Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:22.324233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:22.324261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:22.325972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.326024Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:22.326058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:22.327753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.327798Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.327836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:22.327905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:22.331581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:22.333327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:22.333484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:22.334496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:22.334611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:22.334674Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:22.334928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:22.334978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:22.335136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:22.335216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:22.337123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:22.337166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:22.337356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:22.337396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:22.337706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.337766Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:22.337853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:22.337891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:22.337924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:22.337954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:22.338005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:22.338045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:22.338077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:22.338105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:22.338160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:22.338194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:22.338232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:22.339971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:22.340084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:22.340129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... AKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 102 at step: 5000003 2025-04-04T17:09:22.621687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:22.621792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:22.621838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-04T17:09:22.622068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-04T17:09:22.622181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:22.625039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:22.625089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:22.625356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:22.625410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-04T17:09:22.626031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.626086Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:09:22.626635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:22.626738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:22.626774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:09:22.626806Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-04T17:09:22.626842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:09:22.626931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-04T17:09:22.629898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:09:22.641795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1089 } } 2025-04-04T17:09:22.641829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-04T17:09:22.641909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1089 } } 2025-04-04T17:09:22.642002Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1089 } } FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:09:22.642497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:09:22.642544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-04T17:09:22.642624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:09:22.642665Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:09:22.642723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 4294969604 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:09:22.642760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:22.642785Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.642817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T17:09:22.642844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-04T17:09:22.644344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.645033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.645269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.645298Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:09:22.645379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:09:22.645404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:22.645440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:09:22.645473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:22.645506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T17:09:22.645548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:375:2343] message: TxId: 102 2025-04-04T17:09:22.645581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:22.645607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:09:22.645638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:09:22.645729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:22.646942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:09:22.646980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:504:2429] TestWaitNotification: OK eventTxId 102 2025-04-04T17:09:22.647320Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:22.647510Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 166us result status StatusSuccess 2025-04-04T17:09:22.647865Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:12.008599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:12.008701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:12.008734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:12.008770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:12.008818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:12.008846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:12.008917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:12.008995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:12.009347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:12.091846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:12.091912Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:12.103692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:12.103949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:12.104083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:12.116835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:12.117101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:12.117702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:12.118538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:12.124563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:12.125909Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:12.125978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:12.126173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:12.126255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:12.126306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:12.126437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.133038Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:12.272317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:12.272575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.272783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:12.273008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:12.273069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.275395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:12.275553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:12.275786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.275859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:12.275904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:12.275947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:12.277722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.277760Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:12.277791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:12.279667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.279729Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.279793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:12.279869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:12.285165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:12.287733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:12.287947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:12.289020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:12.289167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:12.289226Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:12.289562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:12.289625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:12.289826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:12.289940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:12.295625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:12.295707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:12.295926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:12.295969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:12.296314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:12.296363Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:12.296479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:12.296512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:12.296557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:12.296605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:12.296643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:12.296685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:12.296740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:12.296773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:12.296859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:12.296921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:12.296966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:12.298829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:12.298990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:12.299041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... RawX2: 4294969768 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-04T17:09:22.539025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-04-04T17:09:22.539161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 525 RawX2: 4294969768 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-04-04T17:09:22.539219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2025-04-04T17:09:22.539663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-04T17:09:22.539733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet# 72075186233409546 2025-04-04T17:09:22.539790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-04T17:09:22.539830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2025-04-04T17:09:22.539913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet# 72075186233409546 2025-04-04T17:09:22.540036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2025-04-04T17:09:22.540176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2025-04-04T17:09:22.540246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-04T17:09:22.542839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-04T17:09:22.544281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-04T17:09:22.544581Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-04T17:09:22.544652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-04T17:09:22.544841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-04-04T17:09:22.545035Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-04T17:09:22.545078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:441:2393], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-04-04T17:09:22.545132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:441:2393], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-04-04T17:09:22.545676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-04T17:09:22.545736Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-04-04T17:09:22.545827Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-04T17:09:22.545869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-04-04T17:09:22.545928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-04-04T17:09:22.547005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72075186233409546, cookie: 104 2025-04-04T17:09:22.547109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72075186233409546, cookie: 104 2025-04-04T17:09:22.547149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-04T17:09:22.547191Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 11 2025-04-04T17:09:22.547240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-04-04T17:09:22.548723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-04T17:09:22.548809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-04-04T17:09:22.548837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-04-04T17:09:22.548869Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:09:22.548901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-04-04T17:09:22.548965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-04T17:09:22.551058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-04-04T17:09:22.551116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-04-04T17:09:22.551518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-04-04T17:09:22.551706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:09:22.551746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:09:22.551789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:09:22.551824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:09:22.551865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-04T17:09:22.551931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:553:2492] message: TxId: 104 2025-04-04T17:09:22.551988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:09:22.552029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-04T17:09:22.552063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-04T17:09:22.552155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-04-04T17:09:22.553310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-04T17:09:22.553348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-04T17:09:22.554109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-04T17:09:22.555315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-04-04T17:09:22.556759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-04T17:09:22.556809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:441:2393], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-04-04T17:09:22.557250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-04T17:09:22.557309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1403:3313] 2025-04-04T17:09:22.557802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-04-04T17:09:22.561820Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-04-04T17:09:22.562049Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 276us result status StatusSuccess 2025-04-04T17:09:22.562421Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 4 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:13.242863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:13.242952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.243004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:13.243036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:13.243075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:13.243102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:13.243158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.243259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:13.243503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:13.316076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:13.316114Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:13.324584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:13.324757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:13.324894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:13.336797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:13.337062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:13.337670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.338489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:13.343069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.344136Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.344185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.344355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:13.344391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.344418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:13.344502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.349481Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:13.459874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:13.460112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.460315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:13.460535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:13.460609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.462603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.462748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:13.462932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.462978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:13.463007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:13.463055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:13.464730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.464781Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:13.464814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:13.466671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.466740Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.466792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.466876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.470460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:13.472519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:13.472702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:13.473756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.473882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:13.473937Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.474200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:13.474270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.474459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:13.474543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:13.476365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.476402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.476518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.476544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:13.476813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.476846Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:13.476924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:13.476974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.477016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:13.477053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.477085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:13.477129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.477174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:13.477196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:13.477258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:13.477295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:13.477324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:13.478667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:13.478764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:13.478788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... 09548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-04T17:09:23.322087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2025-04-04T17:09:23.322217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 440 RawX2: 4294969700 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-04-04T17:09:23.322292Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-04-04T17:09:23.322822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.322890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-04T17:09:23.322944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-04T17:09:23.322978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2025-04-04T17:09:23.323056Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-04T17:09:23.323173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2025-04-04T17:09:23.323293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:23.323354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:09:23.326536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.327271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.327918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:23.327974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:23.328165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:09:23.328361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:23.328412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-04T17:09:23.328462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-04T17:09:23.328571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.328630Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:09:23.328716Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.328761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-04T17:09:23.328807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-04T17:09:23.330147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:09:23.330272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:09:23.330310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:09:23.330348Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 10 2025-04-04T17:09:23.330395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:09:23.331739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:09:23.331814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:09:23.331842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:09:23.331867Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-04T17:09:23.331897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:09:23.331951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-04T17:09:23.335128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.335212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:23.335598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:09:23.335779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:09:23.335829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:09:23.335866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:09:23.335897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:09:23.335933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-04T17:09:23.335995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:408:2375] message: TxId: 103 2025-04-04T17:09:23.336081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:09:23.336119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T17:09:23.336151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T17:09:23.336247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:09:23.336821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:23.336859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:23.337149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:09:23.342417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:09:23.343909Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:23.343969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-04-04T17:09:23.344276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:09:23.344319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1315:3242] 2025-04-04T17:09:23.344975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 11 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-04-04T17:09:23.349064Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:23.349288Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 259us result status StatusSuccess 2025-04-04T17:09:23.349747Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:13.427001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:13.427100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.427146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:13.427175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:13.427213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:13.427240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:13.427288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:13.427354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:13.427667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:13.499872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:13.499927Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:13.509709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:13.509921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:13.510096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:13.519419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:13.519742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:13.520351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.521080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:13.525940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.527269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.527336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.527518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:13.527562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.527599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:13.527704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.533780Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:13.653485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:13.653704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.653933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:13.654150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:13.654208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.656438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.656580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:13.656748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.656792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:13.656837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:13.656890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:13.658657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.658708Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:13.658742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:13.660314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.660358Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.660394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.660464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.664072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:13.666392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:13.666596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:13.667591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:13.667708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:13.667761Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.667998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:13.668048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:13.668240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:13.668337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:13.670204Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:13.670267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:13.670419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:13.670470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:13.670837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:13.670898Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:13.670986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:13.671032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.671084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:13.671128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.671169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:13.671203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:13.671243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:13.671270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:13.671321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:13.671355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:13.671383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:13.673017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:13.673122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:13.673169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... shard: 72057594046678944, message: Source { RawX1: 765 RawX2: 4294969998 } Origin: 72075186233409549 State: 5 TxId: 107 Step: 0 Generation: 2 2025-04-04T17:09:23.401086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409549, partId: 0 2025-04-04T17:09:23.401213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 765 RawX2: 4294969998 } Origin: 72075186233409549 State: 5 TxId: 107 Step: 0 Generation: 2 2025-04-04T17:09:23.401260Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-04-04T17:09:23.403458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.403527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-04T17:09:23.403580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 107:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-04T17:09:23.403624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2025-04-04T17:09:23.403701Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-04T17:09:23.403832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2025-04-04T17:09:23.403958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:23.404026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T17:09:23.405356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.405476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.406855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:23.406906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:23.407052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-04T17:09:23.407173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:23.407213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 107, path id: 2 2025-04-04T17:09:23.407281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 107, path id: 4 2025-04-04T17:09:23.407659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.407709Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:09:23.407796Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.407835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-04T17:09:23.407874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-04-04T17:09:23.408676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2025-04-04T17:09:23.408777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2025-04-04T17:09:23.408813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-04T17:09:23.408866Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 14 2025-04-04T17:09:23.408906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:09:23.409528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-04-04T17:09:23.409603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-04-04T17:09:23.409628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-04T17:09:23.409674Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-04T17:09:23.409745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-04T17:09:23.409805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-04-04T17:09:23.412537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.412589Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:23.412949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T17:09:23.413077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-04T17:09:23.413111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-04T17:09:23.413153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-04T17:09:23.413188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-04T17:09:23.413246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-04-04T17:09:23.413299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-04T17:09:23.413340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-04T17:09:23.413373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-04T17:09:23.413460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-04T17:09:23.413832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:23.413871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:23.414855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-04T17:09:23.415315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-04T17:09:23.415647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:23.415702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-04-04T17:09:23.416314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 15 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification wait txId: 107 2025-04-04T17:09:23.416806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-04-04T17:09:23.416848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-04-04T17:09:23.417329Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-04-04T17:09:23.417410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-04T17:09:23.417447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:1000:2928] TestWaitNotification: OK eventTxId 107 2025-04-04T17:09:23.418054Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:23.418296Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 207us result status StatusSuccess 2025-04-04T17:09:23.418699Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:23.024618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:23.024680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:23.024704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:23.024739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:23.024779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:23.024800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:23.024869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:23.024931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:23.025196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:23.088658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:23.088698Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:23.097132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:23.097328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:23.097455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:23.106842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:23.107119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:23.107689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:23.108455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:23.113326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:23.114527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:23.114582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:23.114789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:23.114832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:23.114868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:23.114997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.120988Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:23.223434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:23.223611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.223762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:23.223968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:23.224008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.225849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:23.225973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:23.226107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.226149Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:23.226183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:23.226211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:23.227639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.227690Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:23.227722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:23.229002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.229031Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.229055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:23.229100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:23.232015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:23.233243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:23.233355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:23.234146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:23.234225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:23.234278Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:23.234462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:23.234507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:23.234657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:23.234725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:23.236464Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:23.236507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:23.236652Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:23.236681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:23.236907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.236955Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:23.237021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:23.237044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:23.237068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:23.237106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:23.237140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:23.237166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:23.237187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:23.237206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:23.237243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:23.237266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:23.237291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:23.238539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:23.238656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:23.238696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 3:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 200 2025-04-04T17:09:23.655160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 129 2025-04-04T17:09:23.655344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:09:23.655430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T17:09:23.664525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:23.664602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:23.664833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-04T17:09:23.665092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:23.665198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-04T17:09:23.665274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 4 2025-04-04T17:09:23.666097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.666175Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:09:23.667682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:09:23.667811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:09:23.667859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:09:23.667906Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-04-04T17:09:23.667989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:09:23.668739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:09:23.668856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:09:23.668891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:09:23.668925Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-04-04T17:09:23.668964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-04T17:09:23.669041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-04T17:09:23.669787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1440 } } 2025-04-04T17:09:23.669867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2025-04-04T17:09:23.670048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1440 } } 2025-04-04T17:09:23.670185Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1440 } } 2025-04-04T17:09:23.671014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 546 RawX2: 4294969787 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-04T17:09:23.671077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2025-04-04T17:09:23.671232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 546 RawX2: 4294969787 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-04T17:09:23.671295Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:09:23.671405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 546 RawX2: 4294969787 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-04T17:09:23.671479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:23.671548Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.671604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-04T17:09:23.671660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-04T17:09:23.675371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:09:23.677109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:09:23.677247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.677380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.677744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.677809Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-04T17:09:23.677940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:09:23.677998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:09:23.678085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:09:23.678133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:09:23.678205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-04T17:09:23.678301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:408:2375] message: TxId: 103 2025-04-04T17:09:23.678382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:09:23.678435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T17:09:23.678477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T17:09:23.678622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T17:09:23.680653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:09:23.680709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:572:2509] TestWaitNotification: OK eventTxId 103 W0000 00:00:1743786563.681460 471469 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 104 2025-04-04T17:09:23.685230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/SubDomain" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:23.685737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:09:23.685960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, schema: Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } }, at schemeshard: 72057594046678944 2025-04-04T17:09:23.686512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, at schemeshard: 72057594046678944 2025-04-04T17:09:23.689182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "TTL run interval cannot be less than limit: 1800" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:23.689362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot/SubDomain, subject: , status: StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, operation: CREATE TABLE, path: /MyRoot/SubDomain/Table4 TestModificationResult got TxId: 104, wait until txId: 104 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:09.658368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:09.658511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.658568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:09.658611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:09.659814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:09.659867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:09.659941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:09.660031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:09.661178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:09.753727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:09.753778Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:09.775362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:09.775592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:09.775730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:09.785637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:09.785928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:09.786549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.787324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:09.792312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.797336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.797422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.797621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:09.797672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.797724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:09.797864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.804592Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:09.948871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:09.949055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.949256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:09.949552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:09.949610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.953661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.953789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:09.953941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.953998Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:09.954046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:09.954122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:09.956085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.956144Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:09.956180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:09.957943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.957983Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.958018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.958080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.961650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:09.964644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:09.964799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:09.965501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:09.965589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:09.965667Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.965876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:09.965911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:09.966029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:09.966088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:09.968293Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:09.968362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:09.968539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:09.968588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:09.968860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:09.968901Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:09.968986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:09.969033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.969095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:09.969137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.969170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:09.969273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:09.969304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:09.969332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:09.969401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:09.969437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:09.969466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:09.976742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:09.976883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:09.976919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... 2025-04-04T17:09:22.772021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-04T17:09:22.772070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-04T17:09:22.773457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 3150, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:22.773575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 AckTo { RawX1: 0 RawX2: 0 } } Step: 3150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:22.773627Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropPQ TPropose, operationId: 103:0 HandleReply TEvOperationPlan, step: 3150, at schemeshard: 72057594046678944 2025-04-04T17:09:22.773803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:09:22.773901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-04-04T17:09:22.774089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:22.774149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:09:22.775609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:22.775642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:09:22.775974Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:22.776007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:22.776103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:22.776147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:09:22.776255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:22.776281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-04T17:09:22.776311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-04T17:09:22.776335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-04T17:09:22.776632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.776668Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-04T17:09:22.776770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:09:22.776812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:09:22.776861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:09:22.776894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:09:22.776922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-04T17:09:22.776951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:09:22.776991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T17:09:22.777016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T17:09:22.777123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:09:22.777165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-04-04T17:09:22.777204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 9 2025-04-04T17:09:22.777246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-04-04T17:09:22.777943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:09:22.778012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:09:22.778042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:09:22.778072Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-04T17:09:22.778117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:09:22.778710Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2025-04-04T17:09:22.778829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:22.778869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T17:09:22.778925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:09:22.779433Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2025-04-04T17:09:22.779525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:09:22.779566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:09:22.779586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:09:22.779604Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 9 2025-04-04T17:09:22.779621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:22.779662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-04T17:09:22.780324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-04T17:09:22.780685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-04T17:09:22.782888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:09:22.782957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:09:22.784553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:09:22.784651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-04T17:09:22.784730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-04T17:09:22.785148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-04T17:09:22.785191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-04T17:09:22.785628Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T17:09:22.785743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:09:22.785781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:778:2693] TestWaitNotification: OK eventTxId 103 2025-04-04T17:09:23.181327Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:23.181517Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 238us result status StatusSuccess 2025-04-04T17:09:23.181798Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpIndexLookupJoin::JoinWithSubquery+StreamLookup >> KqpJoinOrder::TestJoinOrderHintsSimple+StreamLookupJoin-ColumnStore >> KqpFlipJoin::RightSemi_1 >> KqpJoinOrder::Chain65Nodes >> KqpJoin::RightSemiJoin_SimpleKey >> KqpJoin::LeftJoinWithNull+StreamLookupJoin >> KqpJoinOrder::TPCDS34-StreamLookupJoin-ColumnStore >> KqpJoinOrder::TPCDS16-StreamLookupJoin-ColumnStore >> KqpJoin::ExclusionJoin >> KqpJoin::IdxLookupPartialWithTempTable >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin-NotNull >> KqpIndexLookupJoin::SimpleLeftOnlyJoin+StreamLookup >> KqpIndexLookupJoin::MultiJoins >> KqpJoinOrder::TPCH10-StreamLookupJoin-ColumnStore >> KqpJoinOrder::CanonizedJoinOrderTPCDS64-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn+StreamLookup >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn-StreamLookup >> KqpJoin::TwoJoinsWithQueryService >> OlapEstimationRowsCorrectness::TPCH2 >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] >> KqpJoinOrder::TPCDS95-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::LeftOnly+StreamLookup >> KqpJoinOrder::TestJoinOrderHintsSimple-StreamLookupJoin-ColumnStore >> KqpJoinOrder::FiveWayJoinWithConstantFold-StreamLookupJoin-ColumnStore |86.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoin::JoinWithDuplicates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] Test command err: 2025-04-04T17:03:48.379107Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:03:48.482564Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:03:48.507695Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:03:48.508154Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:03:48.516616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:03:48.516806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:03:48.517001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:03:48.517075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:03:48.517169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:03:48.517255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:03:48.517347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:03:48.517471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:03:48.517605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:03:48.517707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:03:48.517797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:03:48.517870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:03:48.543328Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:03:48.543591Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:03:48.543642Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:03:48.543833Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:03:48.543993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:03:48.544068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:03:48.544114Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:03:48.544200Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:03:48.544273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:03:48.544321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:03:48.544349Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:03:48.544675Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:03:48.544743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:03:48.544780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:03:48.544806Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:03:48.544903Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:03:48.544987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:03:48.545044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:03:48.545071Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:03:48.545133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:03:48.545166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:03:48.545194Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:03:48.545244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:03:48.545283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:03:48.545308Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:03:48.545669Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=41; 2025-04-04T17:03:48.545792Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=55; 2025-04-04T17:03:48.545869Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-04-04T17:03:48.545971Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-04-04T17:03:48.546143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:03:48.546200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:03:48.546234Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:03:48.546454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:03:48.546519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:03:48.546567Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:03:48.546765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:03:48.546812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:03:48.546852Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:03:48.547038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:03:48.547081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:03:48.547113Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:03:48.547226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:03:48.547262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:03:48.547322Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... LOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:61;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:57;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-04-04T17:09:24.562989Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11455:13082];fline=general_compaction.cpp:131;event=blobs_created;appended=1;switched=2; 2025-04-04T17:09:24.565299Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11455:13082];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |86.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] >> BackupRestoreS3::RestoreViewQueryText [GOOD] >> BackupRestoreS3::RestoreViewReferenceTable >> KqpJoinOrder::TestJoinHint2-StreamLookupJoin-ColumnStore >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] Test command err: 2025-04-04T17:08:46.971216Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506103236714779:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:46.971269Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpIACm87/pdisk_1.dat 2025-04-04T17:08:47.423845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:47.423948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:47.436176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:47.459907Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17134, node 1 2025-04-04T17:08:47.496793Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:08:47.496854Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:08:47.646875Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:47.646903Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:47.646917Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:47.647029Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5389 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:48.200575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:50.018810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506120416585012:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:50.018830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506120416585001:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:50.018909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:50.030976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:08:50.061300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506120416585015:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:08:50.168293Z node 1 :TX_PROXY ERROR: Actor# [1:7489506120416585082:2683] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Backup "/Root" to "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpo4tHDe/"Create temporary directory "/Root/~backup_20250404T170850" in databaseProcess "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpo4tHDe/view"Backup view "/Root/view" to "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpo4tHDe/view"Write view into "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpo4tHDe/view/create_view.sql"Write ACL into "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpo4tHDe/view/permissions.pb"Remove temporary directory "/Root/~backup_20250404T170850" in database2025-04-04T17:08:50.973141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710662:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpo4tHDe/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpo4tHDe/" to "/Root"Process "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpo4tHDe/view"Restore view "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpo4tHDe/view" to "/Root/view"Read view from "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpo4tHDe/view/create_view.sql"Created "/Root/view"Restore ACL "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpo4tHDe/view" to "/Root/view"Read ACL from "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpo4tHDe/view/permissions.pb"2025-04-04T17:08:51.183890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-04-04T17:08:52.744599Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506126845196035:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:52.744729Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpQlQOzY/pdisk_1.dat 2025-04-04T17:08:53.090916Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:53.130410Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:53.130507Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:53.133316Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28304, node 4 2025-04-04T17:08:53.348886Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:53.348910Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:53.348918Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:53.349083Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24531 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:53.569311Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:56.139652Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506144025066286:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:56.139652Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506144025066281:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:56.139723Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:56.142749Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:08:56.160523Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506144025066295:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:08:56.237173Z node 4 :TX_PROXY ERROR: Actor# [4:7489506144025066366:2679] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:08:56.261722Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-04T17:08:56.756207Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0wdvfcbacehwakm4shwj9e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NGE5YzdmZGEtNGQ1ZDA3ZDYtZDU0ZjAyN2UtN2IzMDNmMTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:08:57.232010Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0wdvyj5ys913k15r3yfask, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NGE5YzdmZGEtNGQ1ZDA3ZDYtZDU ... >: Error: Resource pool default not found or you don't have access permissions } Restore "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpPjHpjp/" to "/Root"2025-04-04T17:09:19.675202Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 13, TabletId: 72075186224037889 not found 2025-04-04T17:09:19.675243Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 13, TabletId: 72075186224037888 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpPjHpjp/" to "/Root"Process "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpPjHpjp/table"Read scheme from "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpPjHpjp/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpPjHpjp/table" to "/Root/table"2025-04-04T17:09:19.713703Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpPjHpjp/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-04-04T17:09:19.804378Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-04T17:09:19.851755Z node 13 :CHANGE_EXCHANGE WARN: [AsyncIndexChangeSenderMain][72075186224037892:1][13:7489506243470731149:2423] Failed entry at 'ResolveTargetTable': entry# { Path: TableId: [72057594046644480:11:0] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-04T17:09:19.930586Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 Restore ACL "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpPjHpjp/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpPjHpjp/table/permissions.pb"2025-04-04T17:09:20.143131Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-04-04T17:09:21.451969Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7489506251960426415:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:21.452037Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmpTLiTBz/pdisk_1.dat 2025-04-04T17:09:21.554597Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:21.590108Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:21.590206Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:21.593783Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1955, node 16 2025-04-04T17:09:21.653122Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:21.653148Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:21.653157Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:21.653307Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7419 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:21.814390Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:24.864194Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7489506264845329352:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:24.864333Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:24.883673Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmp567QvK/"Create temporary directory "/Root/~backup_20250404T170924" in databaseProcess "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmp567QvK/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250404T170924/table" }Backup table "/Root/~backup_20250404T170924/table" to "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmp567QvK/table"Describe table "/Root/~backup_20250404T170924/table"Write scheme into "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmp567QvK/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmp567QvK/table/permissions.pb"Read table "/Root/~backup_20250404T170924/table"Write data into "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmp567QvK/table/data_00.csv"Drop table "/Root/~backup_20250404T170924/table"Remove temporary directory "/Root/~backup_20250404T170924" in database2025-04-04T17:09:25.495523Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 16, TabletId: 72075186224037891 not found 2025-04-04T17:09:25.495565Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 16, TabletId: 72075186224037892 not found 2025-04-04T17:09:25.495588Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 16, TabletId: 72075186224037893 not found 2025-04-04T17:09:25.502721Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715664:0, at schemeshard: 72057594046644480 Backup completed successfully2025-04-04T17:09:25.536891Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7489506269140297687:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:25.536998Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:25.660648Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 16, TabletId: 72075186224037889 not found Restore "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmp567QvK/" to "/Root"2025-04-04T17:09:25.665957Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 16, TabletId: 72075186224037888 not found 2025-04-04T17:09:25.665984Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 16, TabletId: 72075186224037890 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmp567QvK/" to "/Root"Process "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmp567QvK/table"Read scheme from "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmp567QvK/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmp567QvK/table" to "/Root/table"2025-04-04T17:09:25.714317Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmp567QvK/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-04-04T17:09:25.842686Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-04T17:09:25.983227Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-04T17:09:26.059255Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 2025-04-04T17:09:26.244225Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710761:0, at schemeshard: 72057594046644480 2025-04-04T17:09:26.325360Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 16, TabletId: 72075186224037897 not found 2025-04-04T17:09:26.325399Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 16, TabletId: 72075186224037898 not found 2025-04-04T17:09:26.452198Z node 16 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7489506251960426415:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:26.452329Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Restore ACL "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmp567QvK/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/i5tq/0017a3/r3tmp/tmp567QvK/table/permissions.pb"2025-04-04T17:09:26.638891Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480 Restore completed successfully ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:21.517732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:21.517823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:21.517861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:21.517894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:21.517956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:21.517988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:21.518093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:21.518165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:21.518536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:21.597031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:21.597091Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:21.608452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:21.608672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:21.608850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:21.618049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:21.618345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:21.619020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:21.619774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:21.624849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:21.626052Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:21.626108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:21.626310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:21.626358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:21.626397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:21.626516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.632230Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:21.752157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:21.752397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.752586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:21.752817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:21.752873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.754871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:21.755038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:21.755198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.755250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:21.755289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:21.755321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:21.756933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.756987Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:21.757019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:21.758676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.758737Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.758777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:21.758856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.762416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:21.764001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:21.764156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:21.765058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:21.765169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:21.765212Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:21.765448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:21.765493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:21.765669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:21.765760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:21.769147Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:21.769197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:21.769403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:21.769445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:21.769776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.769836Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:21.769933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:21.769971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.770013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:21.770047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.770096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:21.770135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.770170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:21.770198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:21.770280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:21.770314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:21.770354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:21.772136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:21.772244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:21.772285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:28.123593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:28.123671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:28.123758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:28.124639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:28.124736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:28.124881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:28.124945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:28.124999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:28.125093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:28.125154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:28.125219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:28.127380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:28.127467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:28.127567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:09:28.127609Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-04T17:09:28.127712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:09:28.127744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:09:28.127780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:09:28.127809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:09:28.127856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-04T17:09:28.127944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:2740:4005] message: TxId: 103 2025-04-04T17:09:28.127990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:09:28.128058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T17:09:28.128089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T17:09:28.128949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-04-04T17:09:28.133498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:09:28.133712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:4071:5273] TestWaitNotification: OK eventTxId 103 2025-04-04T17:09:28.134352Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:28.134656Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 336us result status StatusSuccess 2025-04-04T17:09:28.135217Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnTableVersion: 3 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 3 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "saved_at" Type: "Datetime" TypeId: 49 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 4 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 5 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } TtlSettings { Disabled { } Version: 3 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 104 2025-04-04T17:09:28.138204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterSchema { AlterColumns { Name: "data" DefaultValue: "10" } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:28.138425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:09:28.142291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: schema update error: sparsed columns are disabled, at schemeshard: 72057594046678944 2025-04-04T17:09:28.145048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "schema update error: sparsed columns are disabled" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:28.145184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusSchemeError, reason: schema update error: sparsed columns are disabled, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-04T17:09:28.145551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-04T17:09:28.145603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-04T17:09:28.146086Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-04T17:09:28.146204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-04T17:09:28.146258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:4503:5703] TestWaitNotification: OK eventTxId 104 |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> KqpJoin::TwoJoinsWithQueryService [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64+StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin-NotNull >> KqpJoin::ExclusionJoin [GOOD] >> KqpJoin::FullOuterJoin >> KqpFlipJoin::RightSemi_1 [GOOD] >> KqpFlipJoin::RightSemi_2 >> KqpJoinOrder::FiveWayJoinStatsOverride-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::SimpleLeftOnlyJoin+StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftOnlyJoin-StreamLookup >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin-NotNull >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn-StreamLookup >> KqpJoin::LeftJoinWithNull+StreamLookupJoin [GOOD] >> KqpJoin::LeftJoinWithNull-StreamLookupJoin >> KqpJoin::IdxLookupPartialWithTempTable [GOOD] >> KqpJoin::IdxLookupSelf >> KqpIndexLookupJoin::JoinWithSubquery+StreamLookup [GOOD] >> KqpIndexLookupJoin::JoinWithSubquery-StreamLookup >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinRightNullFilter+StreamLookup |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] >> KqpJoin::RightSemiJoin_SimpleKey [GOOD] >> KqpJoin::RightSemiJoin_SecondaryIndex >> TRegisterNodeOverDiscoveryService::ServerWithOutCertVerification_ClientProvidesExpiredCert [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts >> KqpIndexLookupJoin::LeftOnly+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftOnly-StreamLookup >> KqpJoinOrder::CanonizedJoinOrderTPCH2-StreamLookupJoin-ColumnStore >> KqpJoin::JoinWithDuplicates [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin >> KqpIndexLookupJoin::MultiJoins [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter+StreamLookup |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] >> KqpJoinOrder::TestJoinOrderHintsSimple+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsSimple-StreamLookupJoin+ColumnStore >> KqpIndexLookupJoin::RightSemi >> KqpJoinOrder::TPCHRandomJoinViewJustWorks+StreamLookupJoin-ColumnStore |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] >> KqpJoinOrder::TestJoinOrderHintsSimple-StreamLookupJoin-ColumnStore [GOOD] >> OlapEstimationRowsCorrectness::TPCH10 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:18.658904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:18.659063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.659112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:18.659150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:18.660093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:18.660154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:18.660281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.660373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:18.661926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:18.748421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:18.748481Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:18.761318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:18.761571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:18.761831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:18.771827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:18.772079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:18.775072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.776864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:18.784542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:18.789614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.789661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:18.789785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.796594Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:18.915869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:18.916094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.916280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:18.916491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:18.916557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.918737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.918876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:18.919022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.919067Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:18.919090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:18.919131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:18.923151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.923239Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:18.923426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:18.927160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.927217Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.927256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.927317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.936860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:18.938572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:18.938743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:18.939744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.939857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:18.939900Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.940155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:18.940211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.940367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:18.940432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:18.942588Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.942654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.942827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.942866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:18.943171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.943224Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:18.943313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.943347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.943386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.943416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.943464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:18.943504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.943535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:18.943561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:18.943613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:18.943646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:18.943685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:18.945397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.945519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.945563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 142 } } 2025-04-04T17:09:34.844620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-04T17:09:34.844730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 102:0, left await: 0, at schemeshard: 72057594046678944 2025-04-04T17:09:34.844768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2025-04-04T17:09:34.846907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:34.847060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:34.847105Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:34.847171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-04T17:09:34.847317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:34.848903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-04T17:09:34.849015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-04T17:09:34.849672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:34.849777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:34.849831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-04T17:09:34.850076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-04T17:09:34.850191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-04T17:09:34.854775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:34.854825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:34.855015Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:34.855044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-04T17:09:34.855504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:34.855555Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:09:34.856006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:34.856081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:09:34.856106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:09:34.856135Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-04T17:09:34.856165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:34.856217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-04T17:09:34.856639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1147 } } 2025-04-04T17:09:34.856664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-04T17:09:34.856772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1147 } } 2025-04-04T17:09:34.856835Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1147 } } FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:09:34.857273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:09:34.857311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-04T17:09:34.857416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:09:34.857483Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:09:34.857553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:09:34.857597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:34.857633Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:34.857669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:09:34.857701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-04T17:09:34.864855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:09:34.864976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:34.865087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:34.865300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:09:34.865338Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:09:34.865442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:09:34.865498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:34.865563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:09:34.865625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:34.865661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T17:09:34.865715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-04-04T17:09:34.865757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:09:34.865789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:09:34.865821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:09:34.865928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:34.867601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:09:34.867645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:614:2569] TestWaitNotification: OK eventTxId 102 2025-04-04T17:09:34.868044Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-04T17:09:34.868112Z node 1 :FLAT_TX_SCHEMESHARD ERROR: Unsuccessful conditional erase: tabletId: 72075186233409546, status: SCHEME_ERROR, error: Schema version mismatch: got 1, expected 2, retry after: 300.000000s, at schemeshard: 72057594046678944 2025-04-04T17:09:34.869719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-04T17:09:34.869810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-04T17:09:34.869840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T00:06:00.037500Z, at schemeshard: 72057594046678944 2025-04-04T17:09:34.869871Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFold-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFold-StreamLookupJoin+ColumnStore >> KqpJoinOrder::TPCH10-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH10+StreamLookupJoin-ColumnStore >> BackupRestoreS3::RestoreViewReferenceTable [GOOD] >> BackupRestoreS3::RestoreViewDependentOnAnotherView >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:20.772244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:20.772317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:20.772350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:20.772378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:20.772434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:20.772462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:20.772548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:20.772609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:20.772917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:20.852819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:20.852873Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:20.864066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:20.864274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:20.864445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:20.874026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:20.874283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:20.875012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:20.876287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:20.880880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:20.881862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:20.881914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:20.882043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:20.882077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:20.882106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:20.882226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.887739Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:20.998792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:20.998989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.999155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:20.999336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:20.999380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.001119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:21.001231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:21.001382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.001424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:21.001452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:21.001475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:21.002943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.003005Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:21.003028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:21.004330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.004370Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.004407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:21.004462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.008046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:21.009566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:21.009713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:21.010998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:21.011106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:21.011158Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:21.011381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:21.011426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:21.011586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:21.011670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:21.013376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:21.013411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:21.013529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:21.013555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:21.013793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.013832Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:21.013898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:21.013927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.013963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:21.013983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.014021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:21.014050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.014076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:21.014095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:21.014138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:21.014166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:21.014200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:21.015520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:21.015601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:21.015628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... h, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:35.700375Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:35.700495Z node 18 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:35.700524Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [18:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-04T17:09:35.700562Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [18:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-04T17:09:35.700847Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:35.700884Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T17:09:35.702227Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:35.702326Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:35.702357Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:35.702387Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:09:35.702418Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:35.702951Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:35.703017Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:35.703041Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:35.703068Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:09:35.703098Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:35.703151Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-04T17:09:35.703723Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 928 } } 2025-04-04T17:09:35.703765Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-04T17:09:35.703847Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 928 } } 2025-04-04T17:09:35.703911Z node 18 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 928 } } 2025-04-04T17:09:35.704284Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 77309413623 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:35.704310Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-04T17:09:35.704393Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 77309413623 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:35.704448Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:09:35.704508Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 308 RawX2: 77309413623 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:35.704552Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:35.704591Z node 18 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:35.704627Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:09:35.704665Z node 18 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-04T17:09:35.706882Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:35.708136Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:35.708230Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:35.708294Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:35.708450Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:35.708477Z node 18 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:09:35.708536Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:35.708559Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:35.708608Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:35.708638Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:35.708672Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-04T17:09:35.708722Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [18:336:2315] message: TxId: 101 2025-04-04T17:09:35.708753Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:35.708781Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:09:35.708799Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:09:35.708880Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:35.710224Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:35.710279Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [18:337:2316] TestWaitNotification: OK eventTxId 101 2025-04-04T17:09:35.710694Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:35.710879Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" took 226us result status StatusSuccess 2025-04-04T17:09:35.711327Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "DyNumber" TypeId: 4866 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpJoin::IdxLookupLeftPredicate >> KqpJoin::FullOuterJoin [GOOD] >> KqpJoin::CrossJoinCount >> KqpJoinOrder::FourWayJoinLeftFirst-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin+NotNull >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin+NotNull >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn-StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn+StreamLookup >> KqpJoin::IdxLookupSelf [GOOD] >> KqpJoin::JoinAggregateSingleRow >> KqpIndexLookupJoin::SimpleLeftOnlyJoin-StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftSemiJoin+StreamLookup >> KqpFlipJoin::RightSemi_2 [GOOD] >> KqpFlipJoin::RightSemi_3 >> KqpJoinOrder::TestJoinHint2-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinHint2-StreamLookupJoin+ColumnStore >> KqpIndexLookupJoin::LeftJoinRightNullFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinRightNullFilter-StreamLookup >> KqpJoin::LeftJoinWithNull-StreamLookupJoin [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_Simple |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] >> KqpJoinOrder::TPCDS92-StreamLookupJoin-ColumnStore >> KqpJoinOrder::TPCDS34-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS61-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::JoinWithSubquery-StreamLookup [GOOD] >> KqpIndexLookupJoin::Left+StreamLookup >> KqpIndexLookupJoin::LeftOnly-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftSemi >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter-StreamLookup >> KqpJoinOrder::TPCHRandomJoinViewJustWorks-StreamLookupJoin-ColumnStore >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin [GOOD] >> KqpJoin::JoinLeftPureInner >> KqpIndexLookupJoin::RightSemi [GOOD] >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup >> KqpJoinOrder::FiveWayJoinStatsOverride-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinStatsOverride+StreamLookupJoin-ColumnStore >> KqpJoin::RightSemiJoin_SecondaryIndex [GOOD] >> KqpJoin::RightTableKeyPredicate >> KqpJoinOrder::TPCDS16-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS16+StreamLookupJoin-ColumnStore >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin-NotNull >> KqpJoin::IdxLookupLeftPredicate [GOOD] >> KqpJoin::IdxLookupPartialLeftPredicate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:21.656767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:21.656828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:21.656853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:21.656877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:21.656931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:21.656954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:21.657018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:21.657066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:21.657310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:21.721645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:21.721683Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:21.730531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:21.730755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:21.730912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:21.739527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:21.739773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:21.740361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:21.741130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:21.745437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:21.746681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:21.746736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:21.746902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:21.746943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:21.746983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:21.747120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.756970Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:21.870278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:21.870485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.870693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:21.870893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:21.870942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.872702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:21.872842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:21.872987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.873034Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:21.873063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:21.873092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:21.874601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.874659Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:21.874691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:21.876052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.876088Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.876132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:21.876179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.879456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:21.880843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:21.880977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:21.881809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:21.881919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:21.881962Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:21.882224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:21.882302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:21.882463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:21.882529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:21.884197Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:21.884231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:21.884385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:21.884422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:21.884732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.884770Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:21.884851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:21.884881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.884917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:21.884944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.884991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:21.885024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.885052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:21.885083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:21.885136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:21.885169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:21.885197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:21.886844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:21.886946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:21.886979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:42.377731Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:42.378015Z node 27 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:42.386681Z node 27 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [27:123:2149] sender: [27:236:2058] recipient: [27:15:2062] 2025-04-04T17:09:42.398854Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:42.399159Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:42.399429Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:42.399725Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:42.399810Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:42.402382Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:42.402532Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:42.402803Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:42.402900Z node 27 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:42.402968Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:42.403033Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:42.404782Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:42.404856Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:42.404927Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:42.406476Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:42.406540Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:42.406617Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:42.406707Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:42.406909Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:42.408558Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:42.408816Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:42.409836Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:42.410012Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 115964119146 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:42.410090Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:42.410475Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:42.410562Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:42.410905Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:42.411022Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:42.412895Z node 27 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:42.412969Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:42.413224Z node 27 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:42.413302Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [27:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:42.413621Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:42.413699Z node 27 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:42.413894Z node 27 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:42.413959Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:42.414027Z node 27 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:42.414084Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:42.414147Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:42.414225Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:42.414315Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:42.414365Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:42.414449Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:42.414510Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:42.414569Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:42.415507Z node 27 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:42.415646Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:42.415707Z node 27 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T17:09:42.415786Z node 27 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T17:09:42.415855Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:42.416002Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T17:09:42.418438Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T17:09:42.419116Z node 27 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-04T17:09:42.420364Z node 27 :TX_PROXY DEBUG: actor# [27:266:2257] Bootstrap 2025-04-04T17:09:42.451656Z node 27 :TX_PROXY DEBUG: actor# [27:266:2257] Become StateWork (SchemeCache [27:271:2262]) 2025-04-04T17:09:42.454629Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:42.455196Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:42.455364Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2025-04-04T17:09:42.455994Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2025-04-04T17:09:42.457403Z node 27 :TX_PROXY DEBUG: actor# [27:266:2257] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:09:42.460738Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:42.460998Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-04T17:09:42.461419Z node 27 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> KqpJoin::CrossJoinCount [GOOD] >> KqpJoin::AllowJoinsForComplexPredicates-StreamLookup >> BackupRestoreS3::RestoreViewDependentOnAnotherView [GOOD] >> KqpJoinOrder::TPCDS95-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS95-StreamLookupJoin+ColumnStore >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin+NotNull >> KqpJoinOrder::TPCHRandomJoinViewJustWorks+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCHRandomJoinViewJustWorks-StreamLookupJoin+ColumnStore >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:18.658894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:18.658971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.659003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:18.659031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:18.660034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:18.660083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:18.660140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.660197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:18.661833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:18.750411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:18.750474Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:18.761865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:18.762120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:18.762266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:18.772138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:18.772426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:18.775064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.776784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:18.784242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.790179Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.790270Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.790451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:18.790508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.790567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:18.790706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.802175Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:18.944485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:18.944730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.944942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:18.945170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:18.945237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.947862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.948169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:18.948317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.948380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:18.948418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:18.948451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:18.950137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.950189Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:18.950225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:18.951891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.951945Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.952000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.952059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.955834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:18.957782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:18.957966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:18.959019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.959136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:18.959203Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.959459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:18.959514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.959684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:18.959786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:18.961674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.961718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.961895Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.961936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:18.962312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.962362Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:18.962469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.962507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.962545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.962577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.962625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:18.962681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.962714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:18.962742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:18.962804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:18.962839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:18.962878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:18.964674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.964791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.964835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:43.566366Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:09:43.566573Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:43.566611Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-04T17:09:43.566670Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-04T17:09:43.567981Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:43.568038Z node 28 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T17:09:43.569091Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:43.569176Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:43.569208Z node 28 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:43.569242Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:09:43.569277Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:43.570072Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:43.570151Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:09:43.570180Z node 28 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:09:43.570209Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:09:43.570262Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:09:43.570328Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-04T17:09:43.570918Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1109 } } 2025-04-04T17:09:43.570961Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-04T17:09:43.571078Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1109 } } 2025-04-04T17:09:43.571171Z node 28 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1109 } } 2025-04-04T17:09:43.571770Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 120259086581 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:43.571810Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-04T17:09:43.571913Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 120259086581 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:43.571954Z node 28 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:09:43.572032Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 306 RawX2: 120259086581 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-04-04T17:09:43.572082Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:43.572117Z node 28 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:43.572153Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:09:43.572192Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-04-04T17:09:43.574160Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:43.575959Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:09:43.576067Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:43.576160Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:43.576373Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:43.576418Z node 28 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:09:43.576507Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:43.576541Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:43.576580Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:43.576611Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:43.576646Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-04T17:09:43.576703Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [28:334:2313] message: TxId: 101 2025-04-04T17:09:43.576743Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:43.576780Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:09:43.576808Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:09:43.576911Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:09:43.578500Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:43.578544Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [28:335:2314] TestWaitNotification: OK eventTxId 101 2025-04-04T17:09:43.579006Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:43.579204Z node 28 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" took 230us result status StatusSuccess 2025-04-04T17:09:43.579684Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "pgint8" TypeId: 12288 Id: 2 NotNull: false TypeInfo { PgTypeId: 20 } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpIndexLookupJoin::SimpleLeftSemiJoin+StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftSemiJoin-StreamLookup >> KqpJoinOrder::FiveWayJoinWithPreds-StreamLookupJoin-ColumnStore >> KqpJoin::JoinAggregateSingleRow [GOOD] >> KqpJoin::JoinAggregate >> KqpJoin::JoinDupColumnRight ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::RestoreViewDependentOnAnotherView [GOOD] Test command err: 2025-04-04T17:08:46.918588Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506101517434414:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:46.919357Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017a9/r3tmp/tmpO1q8TU/pdisk_1.dat 2025-04-04T17:08:47.422541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:47.423479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:47.429778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:47.451789Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11994, node 1 2025-04-04T17:08:47.497247Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:08:47.498961Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:08:47.646869Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:47.646886Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:47.646893Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:47.647016Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:48.200861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:50.171544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506118697304660:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:50.171664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:50.557127Z node 1 :TX_PROXY DEBUG: actor# [1:7489506101517434672:2138] Handle TEvProposeTransaction 2025-04-04T17:08:50.557163Z node 1 :TX_PROXY DEBUG: actor# [1:7489506101517434672:2138] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T17:08:50.557229Z node 1 :TX_PROXY DEBUG: actor# [1:7489506101517434672:2138] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489506118697304696:2631] 2025-04-04T17:08:50.678791Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506118697304696:2631] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { PartitioningPolicy { MinPartitionsCount: 10 SplitByLoadSettings { Enabled: true } } } Temporary: false } } } UserToken: "" DatabaseName: "" 2025-04-04T17:08:50.678858Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506118697304696:2631] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:08:50.679288Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506118697304696:2631] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:08:50.679399Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506118697304696:2631] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:08:50.679746Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506118697304696:2631] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:08:50.679960Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506118697304696:2631] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:08:50.680010Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506118697304696:2631] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T17:08:50.680144Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506118697304696:2631] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T17:08:50.681583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:08:50.684117Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506118697304696:2631] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-04T17:08:50.684200Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506118697304696:2631] txid# 281474976710658 SEND to# [1:7489506118697304695:2343] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-04T17:08:50.890378Z node 1 :TX_PROXY DEBUG: actor# [1:7489506101517434672:2138] Handle TEvNavigate describe path /Root/table 2025-04-04T17:08:50.890453Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506118697304841:2748] HANDLE EvNavigateScheme /Root/table 2025-04-04T17:08:50.890718Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506118697304841:2748] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:08:50.890836Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506118697304841:2748] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table" Options { ShowPrivateTable: false } 2025-04-04T17:08:50.893104Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506118697304841:2748] Handle TEvDescribeSchemeResult Forward to# [1:7489506118697304839:2349] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786530836 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 10 SplitByLoadSettings { Enabled: true } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2025-04-04T17:08:50.932211Z node 1 :TX_PROXY DEBUG: actor# [1:7489506101517434672:2138] Handle TEvNavigate describe path /Root 2025-04-04T17:08:50.93227 ... ath: "/Root/dependentView" } } StartTime { seconds: 1743786581 } } 2025-04-04T17:09:41.842160Z node 22 :IMPORT DEBUG: HandleChangefeeds TEvExternalStorage::TEvListObjectResponse: self# [22:7489506339025996878:2197], result# ListObjectsResult { } 2025-04-04T17:09:41.842232Z node 22 :IMPORT INFO: Reply: self# [22:7489506339025996878:2197], success# 1, error# 2025-04-04T17:09:41.842349Z node 22 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:09:41.842372Z node 22 :IMPORT DEBUG: TImport::TTxProgress: OnSchemeResult: id# 281474976710666, itemIdx# 0, success# 1 REQUEST: HEAD /test_bucket/dependentView/permissions.pb HTTP/1.1 HEADERS: Host: localhost:10017 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 41B8C789-17A2-408C-A298-01D617190AC0 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250404/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=2542d8a4fb9636e36eb0f6fc3feb0b4258d42198a9713309baf6f3888755abf6 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250404T170941Z 2025-04-04T17:09:41.867454Z node 22 :IMPORT DEBUG: HandlePermissions TEvExternalStorage::TEvHeadObjectResponse: self# [22:7489506339025996879:2197], result# No response body. 2025-04-04T17:09:41.880488Z node 22 :IMPORT DEBUG: TSchemeQueryExecutor HandleCompileResponse, self: [22:7489506339025996921:2907], status: SUCCESS 2025-04-04T17:09:41.880553Z node 22 :IMPORT INFO: TSchemeQueryExecutor Reply, self: [22:7489506339025996921:2907], status: SUCCESS 2025-04-04T17:09:41.880790Z node 22 :IMPORT DEBUG: TSchemeQueryExecutor Reply, self: [22:7489506339025996921:2907], status: SUCCESS, prepared query: "WorkingDir: \"/Root\" OperationType: ESchemeOpCreateView FailedOnAlreadyExists: false CreateView { Name: \"baseView\" QueryText: \"SELECT 1 AS Key\" CapturedContext { PathPrefix: \"/Root\" SyntaxVersion: 1 AnsiLexer: false PgParser: false Pragmas: \"AnsiInForEmptyOrNullableItemsCollections\" Pragmas: \"AnsiLike\" Pragmas: \"FlexibleTypes\" Pragmas: \"AnsiCurrentRow\" Pragmas: \"WarnOnAnsiAliasShadowing\" Pragmas: \"AnsiOptionalAs\" Pragmas: \"EmitAggApply\" } }" 2025-04-04T17:09:41.900056Z node 22 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:09:41.916571Z node 22 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:09:41.916623Z node 22 :IMPORT DEBUG: TImport::TTxProgress: OnSchemeQueryPreparation: id# 281474976710666, itemIdx# 0, status# SUCCESS, error# 2025-04-04T17:09:41.916789Z node 22 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976710666 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 2 }, item# { Idx: 0 DstPathName: '/Root/baseView' DstPathId: State: CreateSchemeObject SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-04-04T17:09:41.919110Z node 22 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:09:41.919249Z node 22 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:09:41.919269Z node 22 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976715758, id# 281474976710666 2025-04-04T17:09:41.919326Z node 22 :IMPORT INFO: TImport::TTxProgress: ExecutePreparedQuery: info# { Id: 281474976710666 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 2 }, item# { Idx: 0 DstPathName: '/Root/baseView' DstPathId: State: CreateSchemeObject SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976715758 2025-04-04T17:09:41.919401Z node 22 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:09:41.921928Z node 22 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:09:41.921969Z node 22 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976715758, status# StatusAccepted 2025-04-04T17:09:41.922108Z node 22 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976710666 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 2 }, item# { Idx: 0 DstPathName: '/Root/baseView' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 9] State: CreateSchemeObject SubState: Subscribed WaitTxId: 281474976715758 Issue: '' } REQUEST: GET /test_bucket?prefix=dependentView HTTP/1.1 HEADERS: Host: localhost:10017 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E91073BA-DEAF-4768-8476-5B151BEEA99B amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250404/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=4437ea118e1acb938925558ba5640168992b7ff4b9dd367006b29a48efb3a557 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250404T170941Z S3_MOCK::HttpServeList: dependentView 2025-04-04T17:09:41.926990Z node 22 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:09:41.931516Z node 22 :IMPORT DEBUG: HandleChangefeeds TEvExternalStorage::TEvListObjectResponse: self# [22:7489506339025996879:2197], result# ListObjectsResult { } 2025-04-04T17:09:41.931584Z node 22 :IMPORT INFO: Reply: self# [22:7489506339025996879:2197], success# 1, error# 2025-04-04T17:09:41.931667Z node 22 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:09:41.931690Z node 22 :IMPORT DEBUG: TImport::TTxProgress: OnSchemeResult: id# 281474976710666, itemIdx# 1, success# 1 2025-04-04T17:09:41.957705Z node 22 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:09:41.975785Z node 22 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:09:41.975826Z node 22 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976715758 2025-04-04T17:09:41.981971Z node 22 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:09:42.018335Z node 22 :IMPORT DEBUG: TSchemeQueryExecutor HandleCompileResponse, self: [22:7489506339025996946:2930], status: SUCCESS 2025-04-04T17:09:42.018402Z node 22 :IMPORT INFO: TSchemeQueryExecutor Reply, self: [22:7489506339025996946:2930], status: SUCCESS 2025-04-04T17:09:42.018595Z node 22 :IMPORT DEBUG: TSchemeQueryExecutor Reply, self: [22:7489506339025996946:2930], status: SUCCESS, prepared query: "WorkingDir: \"/Root\" OperationType: ESchemeOpCreateView FailedOnAlreadyExists: false CreateView { Name: \"dependentView\" QueryText: \"SELECT * FROM `/Root/baseView`\" CapturedContext { PathPrefix: \"/Root\" SyntaxVersion: 1 AnsiLexer: false PgParser: false Pragmas: \"AnsiInForEmptyOrNullableItemsCollections\" Pragmas: \"AnsiLike\" Pragmas: \"FlexibleTypes\" Pragmas: \"AnsiCurrentRow\" Pragmas: \"WarnOnAnsiAliasShadowing\" Pragmas: \"AnsiOptionalAs\" Pragmas: \"EmitAggApply\" } }" 2025-04-04T17:09:42.018801Z node 22 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:09:42.018822Z node 22 :IMPORT DEBUG: TImport::TTxProgress: OnSchemeQueryPreparation: id# 281474976710666, itemIdx# 1, status# SUCCESS, error# 2025-04-04T17:09:42.018964Z node 22 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976710666 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 2 }, item# { Idx: 1 DstPathName: '/Root/dependentView' DstPathId: State: CreateSchemeObject SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-04-04T17:09:42.023693Z node 22 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:09:42.023859Z node 22 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:09:42.023880Z node 22 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976715759, id# 281474976710666 2025-04-04T17:09:42.023932Z node 22 :IMPORT INFO: TImport::TTxProgress: ExecutePreparedQuery: info# { Id: 281474976710666 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 2 }, item# { Idx: 1 DstPathName: '/Root/dependentView' DstPathId: State: CreateSchemeObject SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976715759 2025-04-04T17:09:42.024011Z node 22 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:09:42.031432Z node 22 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:09:42.031468Z node 22 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976715759, status# StatusAccepted 2025-04-04T17:09:42.031636Z node 22 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976710666 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 2 }, item# { Idx: 1 DstPathName: '/Root/dependentView' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 10] State: CreateSchemeObject SubState: Subscribed WaitTxId: 281474976715759 Issue: '' } 2025-04-04T17:09:42.039389Z node 22 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:09:42.054464Z node 22 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:09:42.054497Z node 22 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976715759 2025-04-04T17:09:42.058983Z node 22 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:09:42.249493Z node 22 :TX_PROXY DEBUG: [GetImport] [22:7489506343320964315:2380] [0] Resolve database: name# /Root 2025-04-04T17:09:42.254422Z node 22 :TX_PROXY DEBUG: [GetImport] [22:7489506343320964315:2380] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:09:42.254461Z node 22 :TX_PROXY DEBUG: [GetImport] [22:7489506343320964315:2380] [0] Send request: schemeShardId# 72057594046644480 2025-04-04T17:09:42.255407Z node 22 :TX_PROXY DEBUG: [GetImport] [22:7489506343320964315:2380] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976710666 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:10017" scheme: HTTP bucket: "test_bucket" items { source_prefix: "baseView" destination_path: "/Root/baseView" } items { source_prefix: "dependentView" destination_path: "/Root/dependentView" } } StartTime { seconds: 1743786581 } EndTime { seconds: 1743786582 } } 2025-04-04T17:09:42.357890Z node 22 :TX_PROXY DEBUG: actor# [22:7489506317551159186:2131] Handle TEvExecuteKqpTransaction 2025-04-04T17:09:42.357923Z node 22 :TX_PROXY DEBUG: actor# [22:7489506317551159186:2131] TxId# 281474976710667 ProcessProposeKqpTransaction 2025-04-04T17:09:42.358229Z node 22 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0wf87q1n5gdgx30grffqwh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=ZjAzMzA2YjctM2I2YzFjYjEtN2RhMTUzZTQtZTA1MmQxODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpJoin::LeftJoinPushdownPredicate_Simple [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown >> KqpFlipJoin::RightSemi_3 [GOOD] >> KqpFlipJoin::RightOnly_1 >> KqpJoinOrder::CanonizedJoinOrderTPCH2-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH2+StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::LeftJoinRightNullFilter-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts [GOOD] >> KqpIndexLookupJoin::Left+StreamLookup [GOOD] >> KqpIndexLookupJoin::Left-StreamLookup >> KqpJoinOrder::TPCH3+StreamLookupJoin-ColumnStore >> KqpJoin::JoinLeftPureInner [GOOD] >> KqpJoin::JoinLeftPureInnerConverted >> KqpIndexLookupJoin::LeftSemi [GOOD] >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn+StreamLookup >> KqpJoinOrder::TPCH10+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH10-StreamLookupJoin+ColumnStore >> KqpJoin::RightSemiJoin_FullScan ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts [GOOD] Test command err: 2025-04-04T17:08:37.003255Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506058572932013:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:37.003323Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00120a/r3tmp/tmpzJl6n9/pdisk_1.dat 2025-04-04T17:08:37.885122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:37.885207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:37.903449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:37.926437Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:38.015362Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 20272, node 1 2025-04-04T17:08:38.251015Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:38.251040Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:38.251049Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:38.251152Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29040 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:38.663306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:38.793686Z node 1 :TICKET_PARSER DEBUG: Ticket 9F4496153121B58E4A6384544B92A2CC495DEF33E7341C83F76C8820618C5E12 (ipv6:[::1]:54006) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-04T17:08:38.901365Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:54034) has now valid token of root@builtin 2025-04-04T17:08:39.063680Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-04T17:08:39.063735Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:08:39.063745Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:08:39.063779Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-04T17:08:42.290704Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506083413524751:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:42.290775Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00120a/r3tmp/tmpY7vJKu/pdisk_1.dat 2025-04-04T17:08:42.503587Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2876, node 4 2025-04-04T17:08:42.606506Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:42.606597Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:42.631317Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:42.634972Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:42.634992Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:42.634998Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:42.635158Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21878 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:42.865963Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:42.940758Z node 4 :TICKET_PARSER DEBUG: Ticket 9F4496153121B58E4A6384544B92A2CC495DEF33E7341C83F76C8820618C5E12 (ipv6:[::1]:53178) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-04T17:08:43.030602Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:53204) has now valid token of root@builtin 2025-04-04T17:08:43.116821Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-04T17:08:43.116851Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:08:43.116859Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:08:43.116888Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-04T17:08:47.379186Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489506107984710540:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:47.379237Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00120a/r3tmp/tmpkDWMdw/pdisk_1.dat 2025-04-04T17:08:47.512738Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:47.543691Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:47.543772Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:47.546294Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19388, node 7 2025-04-04T17:08:47.615923Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:47.615945Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:47.615953Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:47.616079Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15854 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:08:47.831574Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:08:47.911621Z node 7 :TICKET_PARSER DEBUG: Ticket 4414FADA15D2616623692DED212B15F5EFCF45E693FFDF9EB3FD54F3BAA74B35 (ipv6:[::1]:44610) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-04T17:08:48.011777Z node 7 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:44630) has now valid token of root@builtin 2025-04-04T17:08:48.082620Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-04T17:08:48.082659Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:08:48.082667Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:08:48.082696Z node 7 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-04T17:08:51.750530Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489506123027869115:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:08:51.753365Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00120a/r3tmp/tmpyGfvfD/pdisk_1.dat 2025-04-04T17:08:52.047285Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:52.0765 ... t schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:13.255301Z node 19 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[19:7489506199066969752:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:13.255394Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:19.083389Z node 19 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:33700) has now valid token of root@builtin 2025-04-04T17:09:19.170261Z node 19 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-04T17:09:19.170300Z node 19 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:09:19.170310Z node 19 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:09:19.170365Z node 19 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-04T17:09:20.654146Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7489506246584986454:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:20.654205Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00120a/r3tmp/tmpUgwD5O/pdisk_1.dat 2025-04-04T17:09:20.808869Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:20.855532Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:20.855636Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:20.858009Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18311, node 22 2025-04-04T17:09:20.927539Z node 22 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:20.927567Z node 22 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:20.927578Z node 22 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:20.927752Z node 22 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7596 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:21.300616Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:25.654329Z node 22 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[22:7489506246584986454:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:25.654417Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:31.471441Z node 22 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:48362) has now valid token of root@builtin 2025-04-04T17:09:31.547890Z node 22 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-04T17:09:31.547936Z node 22 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:09:31.547950Z node 22 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:09:31.547997Z node 22 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-04T17:09:33.204295Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7489506304467492000:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:33.204392Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00120a/r3tmp/tmp1UxhgV/pdisk_1.dat 2025-04-04T17:09:33.499362Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:33.546746Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:33.546862Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:33.550721Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12201, node 25 2025-04-04T17:09:33.631266Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:33.631295Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:33.631306Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:33.631469Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:34.006493Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:34.175713Z node 25 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:47468) has now valid token of root@builtin 2025-04-04T17:09:34.253448Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-04T17:09:34.253491Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:09:34.253504Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:09:34.253569Z node 25 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-04-04T17:09:39.967357Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7489506329496315640:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:39.986364Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00120a/r3tmp/tmp9elnKq/pdisk_1.dat 2025-04-04T17:09:40.258176Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:40.311969Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:40.312089Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:40.316670Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26569, node 28 2025-04-04T17:09:40.463927Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:40.463952Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:40.463959Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:40.464099Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1592 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:40.847776Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:41.078706Z node 28 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:38126) has now valid token of root@builtin 2025-04-04T17:09:41.187534Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-04-04T17:09:41.187594Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:09:41.187607Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:09:41.187660Z node 28 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator >> KqpJoinOrder::FourWayJoinLeftFirst-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FourWayJoinLeftFirst+StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable+StreamLookupJoin >> TSchemeShardTTLTests::CheckCounters [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CheckCounters [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2025-04-04T17:09:21.369734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:21.369806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:21.369851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:21.369875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:21.369931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:21.369958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:21.370006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:21.370049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:21.370341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:21.439584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:21.439635Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:21.450043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:21.450696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:21.450848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:21.464236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:21.464826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:21.465483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:21.465666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:21.468442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:21.469583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:21.469636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:21.469779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:21.469821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:21.469859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:21.470094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.475699Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:21.576682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:21.576864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.577012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:21.577194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:21.577231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.579008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:21.579169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:21.579357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.579413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:21.579447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:21.579475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:21.581286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.581352Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:21.581390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:21.584406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.584451Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.584483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:21.584532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.593223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:21.595295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:21.595461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:21.596433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:21.596544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:21.596585Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:21.596827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:21.596877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:21.597055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:21.597385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:21.599534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:21.599580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:21.599742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:21.599776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:21.600197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:21.600244Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:21.600332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:21.600360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.600396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:21.600422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.600473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:21.600524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:21.600555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:21.600580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:21.600646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:21.600695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:21.600730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:21.602323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:21.602429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:21.602484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-04T17:09:47.264149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-04-04T17:09:47.264237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:339:2318] message: TxId: 107 2025-04-04T17:09:47.264292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-04T17:09:47.264353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-04T17:09:47.264393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-04T17:09:47.264533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-04T17:09:47.264575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:09:47.264964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:09:47.265013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:09:47.265079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:47.274942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-04T17:09:47.275019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:1354:3255] 2025-04-04T17:09:47.275444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2025-04-04T17:09:47.376587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-04T17:09:47.376786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-04T17:09:47.377318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2025-04-04T17:09:47.377413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-04-04T17:09:47.377469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-04-04T17:09:47.377793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2025-04-04T17:09:47.377843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-04-04T17:09:47.377873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 4: RowCount 0, DataSize 0 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2025-04-04T17:09:47.466345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-04-04T17:09:47.466440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-04T17:09:47.466561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-04T17:09:47.466723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409549, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1743799851784473 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-04T17:09:47.466823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409548, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1743799851784473 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-04-04T17:09:47.467522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-04-04T17:09:47.467851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-04-04T17:09:47.468586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-04T17:09:47.468644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-04-04T17:09:47.469072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-04-04T17:09:47.469108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-04-04T17:09:47.472381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-04T17:09:47.472504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-04T17:09:47.472565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2025-04-04T21:50:51.784473Z, at schemeshard: 72057594046678944 2025-04-04T17:09:47.473499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-04-04T17:09:47.473560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-04T17:09:47.473687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-04-04T17:09:47.473725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2025-04-04T21:50:51.784473Z, at schemeshard: 72057594046678944 2025-04-04T17:09:47.473765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-04-04T17:09:47.502521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-04T17:09:47.551020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-04T17:09:47.551138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-04T17:09:47.551203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2025-04-04T17:09:47.551279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-04-04T17:09:47.551329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-04-04T17:09:47.551547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2025-04-04T17:09:47.551586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-04-04T17:09:47.551616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 4: RowCount 0, DataSize 0 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2025-04-04T17:09:47.593624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-04T17:09:47.642477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0001 2025-04-04T17:09:47.642621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-04T17:09:47.642701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2025-04-04T17:09:47.642790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-04-04T17:09:47.642844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-04-04T17:09:47.643090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2025-04-04T17:09:47.643131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-04-04T17:09:47.643158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 4: RowCount 0, DataSize 0 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 >> KqpIndexLookupJoin::Inner+StreamLookup >> KqpJoin::RightTableKeyPredicate [GOOD] >> KqpJoin::RightTableIndexPredicate >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] >> KqpJoin::IdxLookupPartialLeftPredicate [GOOD] >> KqpJoin::FullOuterJoin2 >> KqpJoinOrder::TPCHRandomJoinViewJustWorks-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinHint1-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin-NotNull >> KqpJoinOrder::FiveWayJoin-StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:22.120720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:22.120795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:22.120830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:22.120869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:22.120920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:22.120949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:22.121033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:22.121091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:22.121364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:22.180896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:22.180940Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:22.190181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:22.190388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:22.190529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:22.198038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:22.198292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:22.198854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:22.199534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:22.204025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:22.204978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:22.205024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:22.205145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:22.205175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:22.205199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:22.205270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.210097Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:22.311609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:22.311790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.311928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:22.312099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:22.312135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.313875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:22.314015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:22.314180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.314262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:22.314308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:22.314354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:22.316012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.316085Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:22.316122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:22.317649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.317689Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.317726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:22.317783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:22.321160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:22.322496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:22.322632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:22.323517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:22.323615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:22.323656Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:22.323868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:22.323913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:22.324063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:22.324123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:22.325712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:22.325751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:22.325901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:22.325946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:22.326233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:22.326299Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:22.326386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:22.326412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:22.326446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:22.326486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:22.326528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:22.326567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:22.326595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:22.326619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:22.326680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:22.326714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:22.326750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:22.328280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:22.328356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:22.328413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.534597Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.534698Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.534802Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.535817Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.536770Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.537656Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.537772Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.537895Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.537994Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.538075Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.538174Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.539216Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.539304Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.539385Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.539473Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.539575Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.539642Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.539716Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.539790Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.545537Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.545697Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.545785Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.545869Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.545942Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.546051Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.546110Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:09:48.546232Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:48.546507Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:48.546556Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:09:48.546593Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:48.546635Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-04T17:09:48.546725Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:2713:3978] message: TxId: 101 2025-04-04T17:09:48.546778Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:09:48.546866Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:09:48.546913Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:09:48.547850Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-04-04T17:09:48.548569Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:48.551546Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:09:48.551615Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [4:2714:3979] TestWaitNotification: OK eventTxId 101 2025-04-04T17:09:48.552205Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:09:48.552476Z node 4 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 323us result status StatusSuccess 2025-04-04T17:09:48.553039Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_SECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } Version: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin-NotNull >> KqpJoinOrder::FiveWayJoinStatsOverride+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinStatsOverride-StreamLookupJoin+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:09:18.658895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:18.658991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.659044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:18.659080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:18.660049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:18.660120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:18.660211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.660324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:18.661924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:18.753300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:18.753340Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:18.762430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:18.762685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:18.762819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:18.771690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:18.771945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:18.775088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.776753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:18.783701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:18.789578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.789623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:18.789751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.796349Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:09:18.939631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:18.939854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.940061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:18.940270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:18.940333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.942270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.942418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:18.942571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.942622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:18.942673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:18.942704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:18.944277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.944328Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:18.944358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:18.945833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.945872Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.945914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.945990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.949511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:18.950959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:18.951124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:18.952013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.952131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:18.952174Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.952401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:18.952460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.952629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:18.952691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:18.954922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.955005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.955149Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.955183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:18.955503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.955549Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:18.955646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.955681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.955715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.955741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.955790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:18.955826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.955856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:18.955883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:18.955937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:18.955973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:18.956008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:18.957691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.957797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:18.957831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:50.366383Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:50.366608Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:09:50.377941Z node 37 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [37:128:2153] sender: [37:235:2058] recipient: [37:15:2062] 2025-04-04T17:09:50.392609Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:50.392973Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:50.393284Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:50.393570Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:50.393664Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:50.397096Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:50.397302Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:50.397615Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:50.397747Z node 37 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:50.397843Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:50.397917Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:50.403185Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:50.403294Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:50.403371Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:50.405451Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:50.405529Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:50.405622Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:50.405724Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:50.405994Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:50.407881Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:50.408202Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:50.409375Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:50.409603Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 121 RawX2: 158913792099 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:50.409701Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:50.410082Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:50.410189Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:50.410588Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:50.410753Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:50.413079Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:50.413174Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:50.413534Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:50.413637Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [37:202:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:09:50.414102Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:50.414208Z node 37 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:50.414515Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:50.414600Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:50.414708Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:50.414787Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:50.414880Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:50.414972Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:50.415064Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:09:50.415132Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:09:50.415264Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:09:50.415358Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:09:50.415447Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:09:50.416922Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:50.417123Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:09:50.417217Z node 37 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T17:09:50.417315Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T17:09:50.417407Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:50.417596Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T17:09:50.420663Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T17:09:50.421492Z node 37 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-04T17:09:50.423123Z node 37 :TX_PROXY DEBUG: actor# [37:265:2256] Bootstrap 2025-04-04T17:09:50.475757Z node 37 :TX_PROXY DEBUG: actor# [37:265:2256] Become StateWork (SchemeCache [37:270:2261]) 2025-04-04T17:09:50.481553Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:50.482352Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:09:50.482591Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2025-04-04T17:09:50.483689Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2025-04-04T17:09:50.485029Z node 37 :TX_PROXY DEBUG: actor# [37:265:2256] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:09:50.489534Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral PG type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:50.489844Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-04-04T17:09:50.490485Z node 37 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn-StreamLookup [GOOD] >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents+StreamLookupJoin >> KqpJoin::JoinDupColumnRight [GOOD] >> KqpJoin::JoinDupColumnRightPure >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_Nulls >> KqpJoinOrder::TPCH3-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::SimpleLeftSemiJoin-StreamLookup [GOOD] >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup >> KqpJoinOrder::TPCDS92-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS94-StreamLookupJoin-ColumnStore >> KqpFlipJoin::RightOnly_1 [GOOD] >> KqpFlipJoin::RightOnly_2 >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder+StreamLookup >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] >> KqpJoin::JoinLeftPureInnerConverted [GOOD] >> KqpJoin::JoinMismatchDictKeyTypes >> KqpIndexLookupJoin::Left-StreamLookup [GOOD] >> KqpIndexLookupJoin::JoinWithComplexCondition+StreamLookupJoin >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup >> KqpJoin::RightSemiJoin_FullScan [GOOD] >> KqpJoin::RightSemiJoin_ComplexKey >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin >> KqpJoin::AllowJoinsForComplexPredicates-StreamLookup [GOOD] >> KqpJoin::ComplexJoin >> KqpJoin::JoinAggregate [GOOD] >> KqpJoin::JoinConvert >> KqpJoinOrder::FiveWayJoinWithPreds-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithPreds+StreamLookupJoin-ColumnStore >> KqpFlipJoin::Right_1 >> KqpIndexLookupJoin::Inner+StreamLookup [GOOD] >> KqpIndexLookupJoin::Inner-StreamLookup >> KqpJoin::RightTableIndexPredicate [GOOD] >> KqpJoin::RightTableValuePredicate >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin+NotNull >> KqpJoinOrder::TPCH3+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH11-StreamLookupJoin-ColumnStore >> KqpJoin::FullOuterJoin2 [GOOD] >> KqpJoin::FullOuterJoinSizeCheck >> KqpJoinOrder::FourWayJoinLeftFirst+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FourWayJoinLeftFirst-StreamLookupJoin+ColumnStore >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull >> KqpJoinOrder::CanonizedJoinOrderTPCH2+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH2-StreamLookupJoin+ColumnStore >> KqpJoinOrder::TestJoinHint1-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinHint1+StreamLookupJoin-ColumnStore >> KqpJoin::JoinDupColumnRightPure [GOOD] >> KqpJoin::JoinLeftPureFull >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup >> KqpSystemView::QueryStatsSimple [GOOD] >> KqpJoinOrder::TPCDS61-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS61+StreamLookupJoin-ColumnStore >> KqpJoinOrder::FiveWayJoin-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoin+StreamLookupJoin-ColumnStore >> KqpJoin::LeftJoinPushdownPredicate_Nulls [GOOD] >> KqpJoin::JoinMismatchDictKeyTypes [GOOD] >> KqpFlipJoin::RightOnly_2 [GOOD] >> KqpFlipJoin::RightOnly_3 >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 1382, MsgBus: 22905 2025-04-04T17:09:00.201590Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506163365132669:2146];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:00.201642Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:09:00.347047Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506162822095417:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:00.347125Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b75/r3tmp/tmpDBrYUt/pdisk_1.dat 2025-04-04T17:09:00.821040Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:09:01.219753Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:01.239834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:09:01.245765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:01.248395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:01.248622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:01.248661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:01.252378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:01.252475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:01.263263Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T17:09:01.263409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:01.263789Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:09:01.264612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:01.267117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1382, node 1 2025-04-04T17:09:01.457991Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:01.458009Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:01.458019Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:01.458146Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22905 TClient is connected to server localhost:22905 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:02.496484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:02.558291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:03.056872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:03.499704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:03.633246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:05.124243Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506163365132669:2146];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:05.124305Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:05.342104Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489506162822095417:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:05.342176Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:05.813360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506184839971100:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:05.813496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:05.851703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.914581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:05.977129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:06.070474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:06.124381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:06.223609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:06.330323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506189134939087:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:06.330391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:06.330602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506189134939092:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:06.333678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:06.350732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506189134939094:2407], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:06.435600Z node 1 :TX_PROXY ERROR: Actor# [1:7489506189134939180:4142] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:07.824205Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786547792, txId: 281474976710673] shutting down 2025-04-04T17:09:08.013872Z node 2 :BS_PROXY_PUT ERROR: [4aa4617396a662a3] Result# TEvPutResult {Id# [72075186224037896:1:19:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037896:1:19:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-04T17:09:08.016539Z node 3 :BS_PROXY_PUT ERROR: [6ffed067c36fa6a7] Result# TEvPutResult {Id# [72075186224037895:1:19:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037895:1:19:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 Trying to start YDB, gRPC: 16187, MsgBus: 32762 2025-04-04T17:09:10.200030Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506204523658325:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:10.200112Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initializat ... us: 22748 2025-04-04T17:09:49.343255Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7489506373688716382:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:49.343301Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:09:49.393618Z node 17 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[17:7489506372891602193:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:49.467406Z node 18 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7489506374185117473:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:49.501876Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b75/r3tmp/tmpu7DKlL/pdisk_1.dat 2025-04-04T17:09:49.552256Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:09:49.694311Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:49.800868Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:49.800985Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:49.802064Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:49.802154Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:49.807419Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:49.807514Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:49.822201Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 18 Cookie 18 2025-04-04T17:09:49.822367Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:49.823299Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:49.824075Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 17 Cookie 17 2025-04-04T17:09:49.824742Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6407, node 16 2025-04-04T17:09:49.927034Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:49.927062Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:49.927072Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:49.927264Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22748 TClient is connected to server localhost:22748 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:50.569507Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:50.620649Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:50.805846Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:51.043357Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:51.203058Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:54.346505Z node 16 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7489506373688716382:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:54.346631Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:54.374359Z node 18 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[18:7489506374185117473:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:54.374456Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:54.378893Z node 17 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[17:7489506372891602193:2183];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:54.378981Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:55.443975Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7489506399458522191:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:55.444092Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:55.520796Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:55.614631Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:55.738288Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:55.867395Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:55.983423Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:56.092479Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:56.267965Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7489506403753490194:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:56.268125Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:56.268474Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7489506403753490199:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:56.276973Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:56.355910Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7489506403753490201:2407], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:56.427888Z node 16 :TX_PROXY ERROR: Actor# [16:7489506403753490280:4128] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:58.965779Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786598934, txId: 281474976710673] shutting down 2025-04-04T17:09:59.195684Z node 18 :BS_PROXY_PUT ERROR: [96909a478a82f161] Result# TEvPutResult {Id# [72075186224037892:1:22:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037892:1:22:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 16 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-04T17:09:59.210459Z node 17 :BS_PROXY_PUT ERROR: [a6dae3acc194b428] Result# TEvPutResult {Id# [72075186224037893:1:22:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037893:1:22:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 16 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin [GOOD] >> KqpJoinOrder::TPCDS16+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS16-StreamLookupJoin+ColumnStore >> KqpFlipJoin::Right_1 [GOOD] >> KqpFlipJoin::Right_2 >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinPushdownPredicate_Nulls [GOOD] Test command err: Trying to start YDB, gRPC: 5430, MsgBus: 18891 2025-04-04T17:09:24.998189Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506264514503234:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:25.008278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001541/r3tmp/tmpGVmdoZ/pdisk_1.dat 2025-04-04T17:09:25.555125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:25.555247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:25.556714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:25.567038Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5430, node 1 2025-04-04T17:09:25.648465Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:25.648488Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:25.648496Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:25.648627Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18891 TClient is connected to server localhost:18891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:26.390340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.429221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.612098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.814171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.898415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.369287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506281694374054:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.369413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.711317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.764424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.791887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.866294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.895188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.970444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.052562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506285989341872:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.052641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.052650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506285989341877:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.056361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:29.066934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506285989341879:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:29.132947Z node 1 :TX_PROXY ERROR: Actor# [1:7489506285989341935:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:29.915627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.943310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.973004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.998423Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506264514503234:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:29.998516Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1350, MsgBus: 22094 2025-04-04T17:09:31.827584Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506295218337021:2219];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001541/r3tmp/tmppYLda0/pdisk_1.dat 2025-04-04T17:09:31.862855Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:09:31.908202Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1350, node 2 2025-04-04T17:09:31.982027Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:31.982159Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:32.006890Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:32.097903Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:32.097954Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:32.097961Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:32.098069Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22094 TClient is connected to server localhost:22094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:32.447888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:32.455332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:32.509251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo ... lf is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:49.824843Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:49.886425Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506374433473459:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:49.886551Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:49.886634Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506374433473464:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:49.889896Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:49.908845Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506374433473466:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:09:50.004651Z node 4 :TX_PROXY ERROR: Actor# [4:7489506378728440819:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:50.724410Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489506357253601988:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:50.724513Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:51.128408Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.171793Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.214431Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1507, MsgBus: 1294 2025-04-04T17:09:53.034654Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489506388276313059:2221];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001541/r3tmp/tmpTQSBOP/pdisk_1.dat 2025-04-04T17:09:53.080942Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:09:53.187591Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1507, node 5 2025-04-04T17:09:53.349682Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:53.349839Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:53.351196Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:53.370018Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:53.370049Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:53.370059Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:53.370225Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1294 TClient is connected to server localhost:1294 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:54.175671Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:54.188863Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:54.210679Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:54.303913Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:54.530432Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:54.669130Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:57.463616Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506405456183832:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:57.463725Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:57.515059Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.561237Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.612955Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.687182Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.729357Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.846009Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.949806Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506405456184353:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:57.949908Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:57.950738Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506405456184358:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:57.956504Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:57.977576Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489506405456184360:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:58.013209Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506388276313059:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:58.013278Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:58.048660Z node 5 :TX_PROXY ERROR: Actor# [5:7489506409751151711:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:59.468947Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.592371Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.633172Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 |86.4%| [TA] $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} |86.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinMismatchDictKeyTypes [GOOD] Test command err: Trying to start YDB, gRPC: 2768, MsgBus: 16695 2025-04-04T17:09:26.677965Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506272954414555:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:26.681378Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014b5/r3tmp/tmpnEOkTA/pdisk_1.dat 2025-04-04T17:09:27.248345Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:27.251886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:27.251981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:27.255487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2768, node 1 2025-04-04T17:09:27.342795Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:27.342815Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:27.342826Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:27.343023Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16695 TClient is connected to server localhost:16695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:27.875696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:27.895327Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:27.902058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.048603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.223742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.294145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:29.974911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506285839318063:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.975024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:30.214610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.245774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.279539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.311228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.343606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.379639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.442526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506290134285868:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:30.442677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:30.443098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506290134285874:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:30.447148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:30.457180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506290134285876:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:30.532866Z node 1 :TX_PROXY ERROR: Actor# [1:7489506290134285929:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:31.401630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.429871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.667009Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506272954414555:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:31.679081Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 61073, MsgBus: 31403 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014b5/r3tmp/tmp6ThNBN/pdisk_1.dat 2025-04-04T17:09:33.190465Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:09:33.204356Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61073, node 2 2025-04-04T17:09:33.232942Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:33.233013Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:33.233732Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:33.334790Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:33.334819Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:33.334828Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:33.334936Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31403 TClient is connected to server localhost:31403 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:33.777477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:33.800570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:33.884772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:34.024632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESc ... is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.130496Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.199676Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.252905Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.317784Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506383334143343:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:51.317942Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:51.319097Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506383334143348:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:51.366387Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:51.379523Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506383334143350:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:09:51.457111Z node 4 :TX_PROXY ERROR: Actor# [4:7489506383334143406:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:51.966099Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489506361859304751:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:51.966178Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:52.648990Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.729573Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.769267Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25761, MsgBus: 7291 2025-04-04T17:09:55.093868Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489506398664349921:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:55.093967Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014b5/r3tmp/tmpAQzcdv/pdisk_1.dat 2025-04-04T17:09:55.303476Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:55.327192Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:55.327298Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:55.335550Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25761, node 5 2025-04-04T17:09:55.474060Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:55.474087Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:55.474098Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:55.474224Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7291 TClient is connected to server localhost:7291 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:56.127764Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:56.139128Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:56.157303Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:56.256540Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:56.452423Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:56.545551Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:59.387472Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506415844220847:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:59.387564Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:59.442279Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.522858Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.567080Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.609682Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.658177Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.748825Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.822424Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506415844221366:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:59.822540Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:59.822555Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506415844221371:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:59.828157Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:59.849525Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489506415844221373:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:59.916684Z node 5 :TX_PROXY ERROR: Actor# [5:7489506415844221427:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:00.094331Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506398664349921:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:00.094401Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:01.107917Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::Inner-StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder+StreamLookup >> KqpJoin::RightSemiJoin_ComplexKey [GOOD] >> KqpJoin::RightSemiJoin_KeyPrefix >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup [GOOD] >> KqpIndexLookupJoin::JoinWithComplexCondition+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 24216, MsgBus: 11423 2025-04-04T17:09:25.019945Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506272055193413:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:25.021641Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001555/r3tmp/tmpDOThTS/pdisk_1.dat 2025-04-04T17:09:25.426008Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:25.440950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:25.441059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:25.443361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24216, node 1 2025-04-04T17:09:25.634034Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:25.634055Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:25.634062Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:25.634166Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11423 TClient is connected to server localhost:11423 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:26.366408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.412293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.577162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.757019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.875964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.365504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506284940096945:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.365609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.712198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.767735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.795701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.822780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.859679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.898936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.951764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506284940097454:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.951842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.952078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506284940097459:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.956366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:28.967090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506284940097461:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:29.022903Z node 1 :TX_PROXY ERROR: Actor# [1:7489506289235064810:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:29.957777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.988530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.019716Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506272055193413:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:30.019767Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:30.020329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.059775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.089264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.124819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10876, MsgBus: 31744 2025-04-04T17:09:31.981838Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506297785557610:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:31.981907Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001555/r3tmp/tmpa8gRCY/pdisk_1.dat 2025-04-04T17:09:32.159685Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:32.172076Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:32.172149Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:32.175199Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10876, node 2 2025-04-04T17:09:32.266746Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:32.266771Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:32.266779Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:32.266884Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31744 TClient is connected to server localhost:31744 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ... pe: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:50.399116Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506378303055077:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:09:50.489373Z node 4 :TX_PROXY ERROR: Actor# [4:7489506378303055133:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:51.020029Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489506361123183608:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:51.020103Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:51.636303Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.676281Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.714620Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.750084Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.792698Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.833526Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20764, MsgBus: 18602 2025-04-04T17:09:54.203994Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489506395875652204:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001555/r3tmp/tmpQEKZiR/pdisk_1.dat 2025-04-04T17:09:54.329961Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:09:54.415892Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:54.432643Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:54.432742Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:54.435037Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20764, node 5 2025-04-04T17:09:54.535280Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:54.535308Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:54.535319Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:54.535472Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18602 TClient is connected to server localhost:18602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:09:55.226415Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:09:55.231803Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:55.252774Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:55.327302Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:09:55.574497Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:55.663469Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:09:58.663402Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506413055522959:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:58.663491Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:58.689376Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:58.728105Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:58.770032Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:58.850786Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:58.892878Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:58.942923Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:58.993779Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506413055523470:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:58.993872Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:58.994124Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506413055523475:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:58.997790Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:59.010448Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489506413055523477:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:59.106186Z node 5 :TX_PROXY ERROR: Actor# [5:7489506417350490830:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:59.161673Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506395875652204:2220];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:59.161747Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:00.465383Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.524176Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.569061Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.612101Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.673417Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.739197Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 12488, MsgBus: 5060 2025-04-04T17:09:24.983927Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506264091158462:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:24.983987Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001508/r3tmp/tmpIUM9MA/pdisk_1.dat 2025-04-04T17:09:25.583941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:25.584029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:25.598985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:25.606583Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12488, node 1 2025-04-04T17:09:25.754756Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:25.754777Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:25.754787Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:25.754927Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5060 TClient is connected to server localhost:5060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:26.350676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.391076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.645044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.854035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.973987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.564288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506281271029422:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.564434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.810405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.842446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.893308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.923893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.949547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.980178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.042287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506285565997227:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.042381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.042636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506285565997232:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.046112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:29.056649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506285565997234:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:29.151636Z node 1 :TX_PROXY ERROR: Actor# [1:7489506285565997290:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:29.984293Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506264091158462:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:29.984372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:30.009190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.040038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.071226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.109631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.144189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.212134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26503, MsgBus: 4233 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001508/r3tmp/tmp9jFIdN/pdisk_1.dat 2025-04-04T17:09:33.235394Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:09:33.263353Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:33.283230Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:33.283307Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:33.287651Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26503, node 2 2025-04-04T17:09:33.338695Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:33.338715Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:33.338721Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:33.338835Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4233 TClient is connected to server localhost:4233 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:33.764182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsa ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.270874Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.313500Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.388246Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.476573Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506384821345220:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:52.476657Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:52.476977Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506384821345225:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:52.481288Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:52.496828Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506384821345227:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:09:52.591303Z node 4 :TX_PROXY ERROR: Actor# [4:7489506384821345282:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:52.982790Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489506363346506457:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:52.982944Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:53.818331Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:53.887011Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11263, MsgBus: 18396 2025-04-04T17:09:55.676495Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489506398496136655:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:55.676538Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001508/r3tmp/tmp8MX1zs/pdisk_1.dat 2025-04-04T17:09:55.892751Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:55.892849Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:55.895388Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:55.910088Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11263, node 5 2025-04-04T17:09:55.986785Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:55.986809Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:55.986820Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:55.986959Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18396 TClient is connected to server localhost:18396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:56.712379Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:56.719051Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:56.730298Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:09:56.815153Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:57.034124Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:57.128929Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:59.815498Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506415676007613:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:59.815609Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:59.869236Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.944741Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.995229Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.043413Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.118494Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.191750Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.287677Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506419970975430:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:00.287782Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:00.288053Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506419970975435:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:00.293151Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:00.313521Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489506419970975437:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:10:00.389603Z node 5 :TX_PROXY ERROR: Actor# [5:7489506419970975492:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:00.677033Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506398496136655:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:00.677098Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:01.551328Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:01.612723Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TPCDS23-StreamLookupJoin-ColumnStore >> KqpJoinOrder::TPCH3-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH3-StreamLookupJoin+ColumnStore >> KqpJoin::JoinConvert [GOOD] >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup [GOOD] >> KqpJoinOrder::TPCDS90-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 19668, MsgBus: 18739 2025-04-04T17:09:26.200851Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506276109808732:2274];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:26.201113Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014cb/r3tmp/tmpuC5Z7P/pdisk_1.dat 2025-04-04T17:09:26.716603Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:26.719025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:26.719105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:26.724418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19668, node 1 2025-04-04T17:09:26.969622Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:26.969643Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:26.969653Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:26.969743Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18739 TClient is connected to server localhost:18739 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:27.601341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:27.621262Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:27.630627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:27.762368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:09:27.932508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.007410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:29.610336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506288994712177:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.610417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.834761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.892980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.965778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.989611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.012414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.046433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.103662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506293289679985:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:30.103737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:30.103965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506293289679990:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:30.108019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:30.118738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506293289679992:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:30.193840Z node 1 :TX_PROXY ERROR: Actor# [1:7489506293289680048:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:31.055218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.078237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.103284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.129146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.154668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.180648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.200969Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506276109808732:2274];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:31.201027Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 11477, MsgBus: 23329 2025-04-04T17:09:32.864947Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506299245518706:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:32.899728Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014cb/r3tmp/tmphO8hr6/pdisk_1.dat 2025-04-04T17:09:33.004067Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11477, node 2 2025-04-04T17:09:33.019220Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:33.019299Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:33.020682Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:33.089783Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:33.089809Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:33.089816Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:33.089929Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23329 TClient is connected to server localhost:23329 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 ... pe: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:51.539955Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506380808421796:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:51.595349Z node 4 :TX_PROXY ERROR: Actor# [4:7489506380808421849:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:52.158356Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489506363628550468:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:52.158432Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:52.772301Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.809395Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.852627Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.894157Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.972117Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:53.029143Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13614, MsgBus: 20537 2025-04-04T17:09:55.356917Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489506400875983473:2218];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014cb/r3tmp/tmpwLigdD/pdisk_1.dat 2025-04-04T17:09:55.428258Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:09:55.491470Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:55.508218Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:55.508319Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:55.509990Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13614, node 5 2025-04-04T17:09:55.699061Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:55.699096Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:55.699105Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:55.699253Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20537 TClient is connected to server localhost:20537 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:56.342264Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:56.352181Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:09:56.365416Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:56.454860Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:56.669864Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:56.747922Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:59.745435Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506418055854229:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:59.745534Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:59.800774Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.856778Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.905967Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.975406Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.026219Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.112915Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.180440Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506422350822045:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:00.180530Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:00.180820Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506422350822050:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:00.184583Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:00.198818Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489506422350822052:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:10:00.253989Z node 5 :TX_PROXY ERROR: Actor# [5:7489506422350822104:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:00.342372Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506400875983473:2218];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:00.348897Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:01.692918Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:01.776859Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:01.833457Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:10:01.892449Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-04T17:10:01.930841Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-04T17:10:01.976398Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TPCDS87-StreamLookupJoin-ColumnStore >> KqpJoin::RightTableValuePredicate [GOOD] >> KqpJoin::FullOuterJoinSizeCheck [GOOD] >> KqpJoin::FullOuterJoinNotNullJoinKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinConvert [GOOD] Test command err: Trying to start YDB, gRPC: 2870, MsgBus: 24399 2025-04-04T17:09:24.993792Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506266389580632:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:24.993836Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00153f/r3tmp/tmp62gdZE/pdisk_1.dat 2025-04-04T17:09:25.444882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:25.444975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:25.464370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:25.474081Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2870, node 1 2025-04-04T17:09:25.642761Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:25.642785Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:25.642791Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:25.642880Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24399 TClient is connected to server localhost:24399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:26.448654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.461062Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:26.480899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.652086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.837833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.958546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.632159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506283569451584:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.632268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.952174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.981504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.032077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.056177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.081267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.149365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.234985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506287864419401:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.235084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.235400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506287864419406:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.239564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:29.252202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506287864419408:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:29.312415Z node 1 :TX_PROXY ERROR: Actor# [1:7489506287864419460:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:29.993835Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506266389580632:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:29.993908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:30.275510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.303940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.335380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13379, MsgBus: 65438 2025-04-04T17:09:31.805379Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506295666250396:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:31.805420Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00153f/r3tmp/tmpmPmPaI/pdisk_1.dat 2025-04-04T17:09:31.879108Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13379, node 2 2025-04-04T17:09:31.937123Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:31.937455Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:31.957057Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:31.974486Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:31.974507Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:31.974514Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:31.974634Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65438 TClient is connected to server localhost:65438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:32.507641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:32.527165Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:09:32.556232Z nod ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:49.843210Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506371188067532:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:49.843323Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:49.843891Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506371188067537:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:49.848870Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:49.860398Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506371188067540:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:09:49.916817Z node 4 :TX_PROXY ERROR: Actor# [4:7489506371188067595:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:50.345410Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489506354008196087:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:50.345472Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:51.225643Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.301334Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.347819Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20828, MsgBus: 20055 2025-04-04T17:09:56.339589Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489506401883786179:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:56.339639Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00153f/r3tmp/tmpVBaBcP/pdisk_1.dat 2025-04-04T17:09:56.565904Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:56.607832Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:56.607981Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:56.610406Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20828, node 5 2025-04-04T17:09:56.823500Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:56.823547Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:56.823563Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:56.823725Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20055 TClient is connected to server localhost:20055 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:57.544521Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:57.552685Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:09:57.562296Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:57.659660Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:57.923700Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:58.012858Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:00.730492Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506419063657136:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:00.730613Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:00.791678Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.842867Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.896407Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.947109Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:01.018866Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:01.068250Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:01.162412Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506423358624946:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:01.162525Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:01.163287Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506423358624951:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:01.167276Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:01.184119Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489506423358624953:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:10:01.266289Z node 5 :TX_PROXY ERROR: Actor# [5:7489506423358625009:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:01.342694Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506401883786179:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:01.342805Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:02.463801Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:02.506992Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:02.546017Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:10:03.108877Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 >> KqpJoin::ComplexJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 3130, MsgBus: 19167 2025-04-04T17:09:24.983981Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506267699031686:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:24.984775Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001503/r3tmp/tmpX4whrR/pdisk_1.dat 2025-04-04T17:09:25.484750Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:25.512431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:25.512575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:25.514058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3130, node 1 2025-04-04T17:09:25.638478Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:25.638496Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:25.638502Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:25.638602Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19167 TClient is connected to server localhost:19167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:26.393054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.415227Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:26.429880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.626080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.810515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.884585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.414795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506284878902637:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.414923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.711323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.739104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.768141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.798719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.826117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.895870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.951473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506284878903150:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.951581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.952723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506284878903155:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.956008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:28.970482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506284878903157:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:29.061981Z node 1 :TX_PROXY ERROR: Actor# [1:7489506289173870508:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:29.950997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.985373Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506267699031686:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:29.990439Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:30.041684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.077021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.109470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.144307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.178356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29986, MsgBus: 6245 2025-04-04T17:09:31.632482Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506297648919550:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:31.632551Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001503/r3tmp/tmpW31MvX/pdisk_1.dat 2025-04-04T17:09:31.797958Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:31.802110Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:31.802170Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:31.805739Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29986, node 2 2025-04-04T17:09:31.886508Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:31.886530Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:31.886536Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:31.886610Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6245 TClient is connected to server localhost:6245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 C ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:49.359570Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:49.435737Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506373865288336:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:49.435834Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:49.435937Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506373865288341:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:49.441419Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:49.453684Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506373865288343:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:49.524657Z node 4 :TX_PROXY ERROR: Actor# [4:7489506373865288398:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:50.189180Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489506356685417032:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:50.189252Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:50.766018Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:50.818850Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:50.860353Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:50.900135Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:50.956455Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.002042Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24243, MsgBus: 20333 2025-04-04T17:09:53.692066Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489506391756638064:2193];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001503/r3tmp/tmpj3hAMj/pdisk_1.dat 2025-04-04T17:09:53.791481Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:09:53.902070Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:53.939377Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:53.939479Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:53.940721Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24243, node 5 2025-04-04T17:09:54.050790Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:54.050815Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:54.050826Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:54.050965Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20333 TClient is connected to server localhost:20333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:54.765772Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:54.771667Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:54.782412Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:54.873150Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:55.154484Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:55.227843Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.998395Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506408936508856:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:57.998512Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:58.035704Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:58.090309Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:58.131495Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:58.178886Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:58.223190Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:58.301195Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:58.405957Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506413231476672:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:58.406075Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:58.406082Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506413231476677:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:58.414995Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:58.430230Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489506413231476679:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:58.500278Z node 5 :TX_PROXY ERROR: Actor# [5:7489506413231476734:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:58.675289Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506391756638064:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:58.675373Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpJoinOrder::FiveWayJoinWithPreds+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoin::JoinLeftPureFull [GOOD] >> KqpJoinOrder::FiveWayJoinWithPreds-StreamLookupJoin+ColumnStore >> KqpJoin::JoinLeftPureExclusion ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightTableValuePredicate [GOOD] Test command err: Trying to start YDB, gRPC: 24448, MsgBus: 63805 2025-04-04T17:09:24.992136Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506267486593088:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:24.992203Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014f3/r3tmp/tmprTAlPR/pdisk_1.dat 2025-04-04T17:09:25.450579Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:25.454256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:25.454378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:25.459003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24448, node 1 2025-04-04T17:09:25.646992Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:25.647016Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:25.647026Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:25.647122Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63805 TClient is connected to server localhost:63805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:26.336408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.395251Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:26.405967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.570468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.769993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.851550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.245454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506284666464034:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.245588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.711286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.741839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.771282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.802496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.829919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.861830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.909726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506284666464541:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.909802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.910739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506284666464546:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.914858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:28.923980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506284666464548:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:29.024455Z node 1 :TX_PROXY ERROR: Actor# [1:7489506284666464602:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:29.929691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.956282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.984610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.991372Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506267486593088:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:29.991536Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:30.025719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.104558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19267, MsgBus: 1186 2025-04-04T17:09:32.868945Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506301405993666:2249];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:32.869134Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014f3/r3tmp/tmpMWcLZv/pdisk_1.dat 2025-04-04T17:09:32.993129Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:33.015304Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:33.015429Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:33.022951Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19267, node 2 2025-04-04T17:09:33.063931Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:33.063964Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:33.063975Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:33.064108Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1186 TClient is connected to server localhost:1186 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootI ... _SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:53.205088Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:53.252201Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:53.300619Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:53.358728Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:53.453498Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506391961369310:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:53.453596Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:53.453948Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506391961369315:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:53.459514Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:53.472502Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T17:09:53.474778Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506391961369317:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:53.568547Z node 4 :TX_PROXY ERROR: Actor# [4:7489506391961369375:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:54.340635Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489506374781497876:2091];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:54.340711Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:54.792975Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2998, MsgBus: 64085 2025-04-04T17:09:57.282302Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489506409417075134:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:57.283733Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014f3/r3tmp/tmpCvs47H/pdisk_1.dat 2025-04-04T17:09:57.544565Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:57.555713Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:57.555820Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:57.559234Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2998, node 5 2025-04-04T17:09:57.730894Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:57.730920Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:57.730929Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:57.731071Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64085 TClient is connected to server localhost:64085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:58.329662Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:58.341760Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:09:58.353595Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:58.468384Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:58.669531Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:58.756941Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:01.698297Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506426596946025:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:01.698399Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:01.758955Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:10:01.803710Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:01.853956Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:01.921968Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:01.995314Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:02.061469Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:02.158351Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506430891913843:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:02.158460Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:02.158775Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506430891913848:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:02.163819Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:02.192296Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489506430891913850:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:10:02.242941Z node 5 :TX_PROXY ERROR: Actor# [5:7489506430891913904:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:02.254669Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506409417075134:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:02.254763Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:03.499355Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpFlipJoin::Inner_1 >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull [GOOD] >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::ComplexJoin [GOOD] Test command err: Trying to start YDB, gRPC: 3637, MsgBus: 11187 2025-04-04T17:09:24.987862Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506265877556279:2160];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:24.998514Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001518/r3tmp/tmpG76s2y/pdisk_1.dat 2025-04-04T17:09:25.466491Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:25.468943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:25.469104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:25.471142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3637, node 1 2025-04-04T17:09:25.638739Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:25.638757Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:25.638763Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:25.638860Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11187 TClient is connected to server localhost:11187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:26.416263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.431165Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:26.448424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.657441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.868940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.987562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.484412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506283057427127:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.484496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.725355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.758773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.782961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.805673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.836471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.871272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.909899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506283057427637:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.910501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.910737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506283057427642:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.914811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:28.928456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506283057427644:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:29.019488Z node 1 :TX_PROXY ERROR: Actor# [1:7489506287352394995:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:29.898125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.927526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.974370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.991505Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506265877556279:2160];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:29.991563Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24725, MsgBus: 5479 2025-04-04T17:09:31.384784Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506296424411390:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:31.384882Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001518/r3tmp/tmpOsKEcC/pdisk_1.dat 2025-04-04T17:09:31.471028Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24725, node 2 2025-04-04T17:09:31.518845Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:31.518865Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:31.518872Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:31.518976Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:09:31.519951Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:31.520028Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:31.522100Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5479 TClient is connected to server localhost:5479 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:31.943953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:31.957134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at ... lf is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:47.659740Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:47.702112Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:47.803339Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506363103642906:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:47.803435Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:47.803872Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506363103642911:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:47.808626Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:47.818943Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506363103642913:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:09:47.917090Z node 4 :TX_PROXY ERROR: Actor# [4:7489506363103642968:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:48.882756Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489506345923771611:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:48.882817Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8969, MsgBus: 7651 2025-04-04T17:09:56.061641Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489506405273443815:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:56.061708Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001518/r3tmp/tmpZHwATz/pdisk_1.dat 2025-04-04T17:09:56.293823Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:56.293931Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:56.297030Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:56.315508Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8969, node 5 2025-04-04T17:09:56.450930Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:56.450969Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:56.450980Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:56.451154Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7651 TClient is connected to server localhost:7651 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:57.079737Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:57.111766Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:57.130359Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:57.219310Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:57.428087Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:57.514462Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:00.416794Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506422453314764:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:00.416869Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:00.465439Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.515669Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.596556Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.668525Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.707289Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.785429Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.903207Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506422453315289:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:00.903329Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:00.905899Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506422453315294:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:00.911583Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:00.926154Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489506422453315296:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:10:01.013258Z node 5 :TX_PROXY ERROR: Actor# [5:7489506426748282647:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:01.066335Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506405273443815:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:01.066404Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:02.422583Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:02.527654Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:02.571089Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:10:02.612031Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:10:02.647879Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:130:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:132:2058] recipient: [1:108:2140] 2025-04-04T17:09:18.658901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:18.659010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.659059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:18.659099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:18.660047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:18.660091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:18.660171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.660253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:18.661838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:18.753738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:18.753793Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:172:2058] recipient: [1:15:2062] 2025-04-04T17:09:18.772448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:18.772897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:18.773066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:18.779964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:18.780113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:18.780742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.781322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:18.784104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:18.789652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.789699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:18.789861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] Leader for TabletID 72057594037968897 is [1:215:2214] sender: [1:216:2058] recipient: [1:209:2210] 2025-04-04T17:09:18.796947Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-04T17:09:18.930715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:18.930954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.931172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:18.931421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:18.931494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.935598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.935773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:18.935936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.935998Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:18.936053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:18.936091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:18.943167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.943231Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:18.943270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:18.945221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.945271Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.945309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.945361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.949078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:18.951234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:18.951425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:18.952432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.952552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:18.952600Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.952865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:18.952918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.953055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:18.953140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:18.955268Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.955324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.955499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.955556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:18.955943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.955986Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:18.956075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.956110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.956145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.956174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.956216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:18.956279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.956316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the p ... t step: 5000004 2025-04-04T17:10:08.097756Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:08.097852Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 219043334249 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:08.097927Z node 51 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 1003:0 HandleReply TEvOperationPlan, operationId: 1003:0, stepId: 5000004, at schemeshard: 72057594046678944 2025-04-04T17:10:08.098180Z node 51 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 129 2025-04-04T17:10:08.098327Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-04-04T17:10:08.103962Z node 51 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:08.104011Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:10:08.104250Z node 51 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:08.104292Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [51:205:2207], at schemeshard: 72057594046678944, txId: 1003, path id: 3 FAKE_COORDINATOR: Erasing txId 1003 2025-04-04T17:10:08.104867Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-04T17:10:08.104916Z node 51 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:10:08.105380Z node 51 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-04T17:10:08.105446Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-04T17:10:08.105470Z node 51 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-04T17:10:08.105497Z node 51 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-04-04T17:10:08.105525Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:10:08.105582Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-04-04T17:10:08.106806Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1011 } } 2025-04-04T17:10:08.106843Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-04-04T17:10:08.106946Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1011 } } 2025-04-04T17:10:08.107017Z node 51 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1011 } } 2025-04-04T17:10:08.107598Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 219043334411 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-04T17:10:08.107628Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-04-04T17:10:08.107724Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 219043334411 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-04T17:10:08.107757Z node 51 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:10:08.107813Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 328 RawX2: 219043334411 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-04T17:10:08.107853Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:08.107877Z node 51 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-04T17:10:08.107906Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:10:08.107938Z node 51 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2025-04-04T17:10:08.110949Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-04T17:10:08.111217Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-04T17:10:08.111398Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-04T17:10:08.111764Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-04T17:10:08.111812Z node 51 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-04-04T17:10:08.111907Z node 51 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-04-04T17:10:08.111942Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-04T17:10:08.111986Z node 51 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-04-04T17:10:08.112015Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-04T17:10:08.112051Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-04-04T17:10:08.112093Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-04T17:10:08.112130Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-04-04T17:10:08.112160Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-04-04T17:10:08.112290Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-04-04T17:10:08.115640Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-04-04T17:10:08.115688Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-04-04T17:10:08.116063Z node 51 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-04-04T17:10:08.116146Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-04-04T17:10:08.116179Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [51:456:2429] TestWaitNotification: OK eventTxId 1003 2025-04-04T17:10:08.116593Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:10:08.116781Z node 51 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 226us result status StatusSuccess 2025-04-04T17:10:08.117226Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 23959, MsgBus: 26708 2025-04-04T17:09:25.022449Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506270735890417:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:25.022820Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001516/r3tmp/tmpOvTQSD/pdisk_1.dat 2025-04-04T17:09:25.482856Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:25.487609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:25.487703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:25.489443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23959, node 1 2025-04-04T17:09:25.637758Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:25.637790Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:25.637810Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:25.637925Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26708 TClient is connected to server localhost:26708 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:26.391179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.438604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.641342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:26.842236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:09:26.915911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.499230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506283620793932:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.499324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.750359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.801905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.828869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.861351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.891736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.928344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.977176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506283620794441:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.977238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506283620794446:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.977242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.980559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:28.991497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506283620794448:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:29.090108Z node 1 :TX_PROXY ERROR: Actor# [1:7489506287915761798:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:29.890861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.941292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.002922Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506270735890417:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:30.002998Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29762, MsgBus: 24926 2025-04-04T17:09:31.642231Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506295801928661:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:31.642404Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001516/r3tmp/tmpeWQJ8g/pdisk_1.dat 2025-04-04T17:09:31.753316Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29762, node 2 2025-04-04T17:09:31.772084Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:31.772153Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:31.775140Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:31.810653Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:31.810673Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:31.810678Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:31.810759Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24926 TClient is connected to server localhost:24926 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:32.295494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:32.304310Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:09:32.313881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:32.394557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474 ... is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:55.898655Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:55.940533Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:56.022688Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:56.124279Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506403727264687:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:56.124393Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:56.124660Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506403727264692:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:56.129399Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:56.146431Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489506403727264694:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:56.245073Z node 5 :TX_PROXY ERROR: Actor# [5:7489506403727264750:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:56.426331Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506382252425914:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:56.426473Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:57.486925Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.638942Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22727, MsgBus: 1691 2025-04-04T17:09:59.644892Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489506414883517249:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:59.689402Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001516/r3tmp/tmpBQ9H4d/pdisk_1.dat 2025-04-04T17:09:59.823761Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:59.842719Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:59.842844Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:59.844786Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22727, node 6 2025-04-04T17:09:59.980480Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:59.980512Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:59.980522Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:59.980658Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1691 TClient is connected to server localhost:1691 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:10:00.723718Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:00.742681Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:10:00.760603Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:00.827140Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:01.029471Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:01.143952Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:04.105514Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489506436358355338:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:04.105615Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:04.160720Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:10:04.232273Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:04.312038Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:04.360722Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:04.408513Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:04.476660Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:04.537718Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489506436358355857:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:04.537832Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:04.538128Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489506436358355862:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:04.543197Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:04.562228Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489506436358355864:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:10:04.641970Z node 6 :TX_PROXY ERROR: Actor# [6:7489506436358355918:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:04.646520Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489506414883517249:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:04.646592Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:06.020058Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:06.105064Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDateTime [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzTimeStamp [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::UuidType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantTuple [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantStruct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Void [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Void [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Struct [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Tuple [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Variant [GOOD] >> ConvertTableDescription::StorageSettings >> ConvertYdbValueToMiniKQLValueTest::Void [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuidTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Struct [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Tuple >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDate [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDateTime [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzTimeStamp [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32TypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> ConvertYdbPermissionNameToACLAttrs::TestEqualGranularAndDeprecatedAcl [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::List [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> ConvertTableDescription::StorageSettings [GOOD] >> ConvertTableDescription::ColumnFamilies [GOOD] >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Tuple [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Variant [GOOD] >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccess [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccessPg >> CellsFromTupleTest::CellsFromTupleSuccessPg [GOOD] >> CellsFromTupleTest::CellsFromTupleFails [GOOD] >> CellsFromTupleTest::CellsFromTupleFailsPg [GOOD] >> CompressionTests::Zstd [GOOD] >> CompressionTests::Unsupported [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] >> KqpJoinOrder::TPCH11-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH21-StreamLookupJoin-ColumnStore |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 13998, MsgBus: 12539 2025-04-04T17:09:25.020064Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506268864806561:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:25.020188Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014ef/r3tmp/tmp4alHbN/pdisk_1.dat 2025-04-04T17:09:25.516657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:25.516871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:25.519177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:25.538360Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13998, node 1 2025-04-04T17:09:25.686161Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:25.686176Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:25.686181Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:25.686281Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12539 TClient is connected to server localhost:12539 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:26.456590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.495165Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:26.512876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.710684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.919349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:27.003808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.535100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506281749710143:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.535242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.740478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.770157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.799452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.829755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.864098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.896222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.941152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506281749710654:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.941228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.941489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506281749710659:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.944888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:28.954116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506281749710661:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:29.056027Z node 1 :TX_PROXY ERROR: Actor# [1:7489506286044678013:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:29.997044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.019344Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506268864806561:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:30.019412Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:30.033163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.063275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.090805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.126431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.197206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14964, MsgBus: 27101 2025-04-04T17:09:31.682178Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506295785481499:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:31.682253Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014ef/r3tmp/tmpHT6hu1/pdisk_1.dat 2025-04-04T17:09:31.807212Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14964, node 2 2025-04-04T17:09:31.882192Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:31.882305Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:31.888753Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:31.897941Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:31.897962Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:31.897968Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:31.898071Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27101 TClient is connected to server localhost:27101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 ... is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.216375Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.267598Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.353146Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.449050Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506408240187177:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:57.449149Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:57.449553Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506408240187182:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:57.459524Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:57.510009Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489506408240187184:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:09:57.591960Z node 5 :TX_PROXY ERROR: Actor# [5:7489506408240187240:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:57.894336Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506386765348457:2092];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:57.894406Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:58.868819Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:58.971609Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5031, MsgBus: 23824 2025-04-04T17:10:00.826483Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489506420496038131:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:00.905655Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014ef/r3tmp/tmpELbGpV/pdisk_1.dat 2025-04-04T17:10:01.075341Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:01.076492Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:01.077821Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:01.097028Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5031, node 6 2025-04-04T17:10:01.274463Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:10:01.274486Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:10:01.274495Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:10:01.274649Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23824 TClient is connected to server localhost:23824 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:10:02.034214Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:10:02.043190Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:10:02.060314Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:02.175585Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:02.405154Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:02.510356Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:05.452944Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489506441970876246:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:05.453060Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:05.498465Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:10:05.545449Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:05.632802Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:05.687229Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:05.737806Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:05.763190Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489506420496038131:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:05.763247Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:05.788465Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:05.889980Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489506441970876766:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:05.890092Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:05.890308Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489506441970876771:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:05.894058Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:05.909425Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489506441970876773:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:10:05.984971Z node 6 :TX_PROXY ERROR: Actor# [6:7489506441970876831:3457] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:07.468679Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:07.512623Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpFlipJoin::Right_2 [GOOD] >> KqpFlipJoin::Right_3 >> KqpJoinOrder::TestJoinHint1+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinHint1-StreamLookupJoin+ColumnStore >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt64 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDate [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDateTime [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzTimeStamp [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleDecimal [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] >> KqpJoinOrder::FiveWayJoin+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoin-StreamLookupJoin+ColumnStore >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup [GOOD] >> KqpFlipJoin::RightOnly_3 [GOOD] |86.5%| [TA] $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.5%| [TA] {RESULT} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoin::RightSemiJoin_KeyPrefix [GOOD] >> KqpJoin::RightSemiJoin_ComplexSecondaryIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 2636, MsgBus: 28361 2025-04-04T17:09:34.534607Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506309030160774:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:34.534697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001499/r3tmp/tmpn6q4dC/pdisk_1.dat 2025-04-04T17:09:34.868262Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2636, node 1 2025-04-04T17:09:34.922368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:34.928420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:34.938218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:34.949744Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:34.949777Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:34.949790Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:34.949957Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28361 TClient is connected to server localhost:28361 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:35.491480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:35.513515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:35.671001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:35.818028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:35.907935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:37.383238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506321915064464:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:37.383393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:37.642811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:37.706265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:37.738877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:37.803765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:37.832409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:37.866228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:37.917584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506321915064979:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:37.917657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:37.917830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506321915064984:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:37.921355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:37.933080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506321915064986:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:38.024986Z node 1 :TX_PROXY ERROR: Actor# [1:7489506326210032335:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:39.083836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:39.158545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:39.199846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:39.273671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:39.310820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:39.344165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:39.534720Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506309030160774:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:39.534770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9888, MsgBus: 1426 2025-04-04T17:09:41.062590Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506338659067414:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:41.062624Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001499/r3tmp/tmp5IW95v/pdisk_1.dat 2025-04-04T17:09:41.224070Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:41.224156Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:41.225673Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:41.227669Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9888, node 2 2025-04-04T17:09:41.331533Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:41.331555Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:41.331564Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:41.331696Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1426 TClient is connected to server localhost:1426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 Process ... pe: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:00.323926Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506421630412846:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:10:00.393004Z node 4 :TX_PROXY ERROR: Actor# [4:7489506421630412900:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:00.646605Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489506400155574095:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:00.646690Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:01.668427Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:01.711090Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:01.776905Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:10:01.821759Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:10:01.867928Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:10:01.904073Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23974, MsgBus: 27405 2025-04-04T17:10:03.816070Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489506433796047768:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:03.816123Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001499/r3tmp/tmp8c0kKp/pdisk_1.dat 2025-04-04T17:10:03.996741Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:04.023249Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:04.023339Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:04.024703Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23974, node 5 2025-04-04T17:10:04.130750Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:10:04.130773Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:10:04.130782Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:10:04.130904Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27405 TClient is connected to server localhost:27405 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:10:04.871568Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:10:04.911345Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:10:04.930051Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:05.030334Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:05.235700Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:05.309736Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:07.958862Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506450975918713:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:07.958955Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:08.044702Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:10:08.130901Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:08.174871Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:08.216142Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:08.292655Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:08.340288Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:08.425338Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506455270886526:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:08.425423Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:08.425615Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506455270886531:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:08.429878Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:08.446632Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489506455270886533:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:10:08.501536Z node 5 :TX_PROXY ERROR: Actor# [5:7489506455270886587:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:08.816718Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506433796047768:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:08.816844Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:09.630996Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.687129Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.759295Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.807856Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.887218Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:10:10.004300Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::RightOnly_3 [GOOD] Test command err: Trying to start YDB, gRPC: 13466, MsgBus: 12880 2025-04-04T17:09:24.985304Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506267333412254:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:24.985354Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001531/r3tmp/tmpTucZkt/pdisk_1.dat 2025-04-04T17:09:25.413545Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:25.481053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:25.481149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 13466, node 1 2025-04-04T17:09:25.482161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:25.642910Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:25.642934Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:25.642940Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:25.643036Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12880 TClient is connected to server localhost:12880 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:26.353234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.391066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.592893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.791176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.885689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.359244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506284513283227:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.359369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.711340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.738618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.764423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.798447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.829451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.865446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.917101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506284513283741:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.917173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.917383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506284513283746:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.920669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:28.930344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506284513283748:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:29.028664Z node 1 :TX_PROXY ERROR: Actor# [1:7489506288808251098:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:29.986131Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506267333412254:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:29.986205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:29.989396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.027189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.096012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.134038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 63553, MsgBus: 5423 2025-04-04T17:09:31.558465Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506297279034370:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:31.558635Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001531/r3tmp/tmpEhFKDv/pdisk_1.dat 2025-04-04T17:09:31.733280Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:31.748948Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:31.749029Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:31.755553Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63553, node 2 2025-04-04T17:09:31.824416Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:31.824437Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:31.824446Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:31.824554Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5423 TClient is connected to server localhost:5423 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:32.279417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:32.302436Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, ... 5:7489506413675797553:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:58.834802Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:58.835020Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506413675797558:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:58.839740Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:58.849876Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489506413675797560:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:09:58.921091Z node 5 :TX_PROXY ERROR: Actor# [5:7489506413675797614:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:59.090542Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506396495926111:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:59.090620Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:00.160379Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.201563Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.244480Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.293009Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16930, MsgBus: 63376 2025-04-04T17:10:03.112313Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489506433271668821:2085];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001531/r3tmp/tmpgcXZNX/pdisk_1.dat 2025-04-04T17:10:03.173112Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:10:03.297418Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:03.323968Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:03.324069Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:03.328070Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16930, node 6 2025-04-04T17:10:03.413367Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:10:03.413384Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:10:03.413392Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:10:03.413514Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63376 TClient is connected to server localhost:63376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:10:04.086118Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:04.095623Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:10:04.103151Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:04.203549Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:04.412996Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:04.545381Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:07.475833Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489506450451539738:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:07.475942Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:07.555013Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:10:07.597153Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:07.645481Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:07.697653Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:07.778415Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:07.834504Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:07.949189Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489506450451540258:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:07.949310Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:07.949683Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489506450451540263:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:07.963080Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:07.984056Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489506450451540265:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:10:08.064597Z node 6 :TX_PROXY ERROR: Actor# [6:7489506454746507616:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:08.111303Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489506433271668821:2085];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:08.111371Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:09.445187Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.503428Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.550468Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.597817Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull [GOOD] >> KqpFlipJoin::Inner_1 [GOOD] >> KqpFlipJoin::Inner_2 >> KqpJoinOrder::TPCDS94-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS94+StreamLookupJoin-ColumnStore >> KqpJoin::JoinLeftPureExclusion [GOOD] >> KqpJoin::JoinLeftPureCross >> KqpJoin::FullOuterJoinNotNullJoinKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 61060, MsgBus: 26383 2025-04-04T17:09:31.368586Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506293791450441:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:31.381297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014ae/r3tmp/tmpk6iAt7/pdisk_1.dat 2025-04-04T17:09:31.706694Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61060, node 1 2025-04-04T17:09:31.767036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:31.767130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:31.768445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:31.785003Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:31.785025Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:31.785032Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:31.785133Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26383 TClient is connected to server localhost:26383 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:32.299243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:32.330633Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:32.343827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:32.500047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:32.654764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:32.729821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:34.506754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506306676353957:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:34.506854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:34.768534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:34.793577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:34.823812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:34.856353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:34.888744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:34.921076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:34.964210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506306676354464:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:34.964286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:34.964449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506306676354469:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:34.968211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:34.976419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506306676354471:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:35.036935Z node 1 :TX_PROXY ERROR: Actor# [1:7489506310971321820:3434] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:35.959457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:36.014723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:36.368785Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506293791450441:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:36.368854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29467, MsgBus: 14179 2025-04-04T17:09:37.577743Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506321887814247:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:37.577814Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014ae/r3tmp/tmpqj5j86/pdisk_1.dat 2025-04-04T17:09:37.701233Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:37.715476Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:37.715555Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:37.720196Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29467, node 2 2025-04-04T17:09:37.761298Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:37.761317Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:37.761323Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:37.761429Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14179 TClient is connected to server localhost:14179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:38.146737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:38.154141Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:09:38.160040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:3 ... 474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:02.161161Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:02.204616Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:02.248103Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:02.325780Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:02.402721Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506428797440449:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:02.402794Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:02.406527Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506428797440454:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:02.415161Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:02.428888Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489506428797440456:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:10:02.520804Z node 5 :TX_PROXY ERROR: Actor# [5:7489506428797440512:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:02.654334Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506407322601849:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:02.654431Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:03.871775Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:03.962196Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1980, MsgBus: 16330 2025-04-04T17:10:06.043679Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489506445674773660:2209];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014ae/r3tmp/tmpGgfuZy/pdisk_1.dat 2025-04-04T17:10:06.130031Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:10:06.203464Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:06.233538Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:06.233631Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:06.240782Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1980, node 6 2025-04-04T17:10:06.437625Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:10:06.437653Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:10:06.437665Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:10:06.437832Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16330 TClient is connected to server localhost:16330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:10:07.148906Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:10:07.166011Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:07.240362Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:07.447425Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:10:07.560019Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:10:10.366373Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489506462854644460:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:10.366467Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:10.408696Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:10:10.486067Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:10.564418Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:10.632791Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:10.676071Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:10.719814Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:10.814405Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489506462854644981:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:10.814506Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:10.814993Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489506462854644986:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:10.820196Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:10.843233Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489506462854644988:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:10:10.901741Z node 6 :TX_PROXY ERROR: Actor# [6:7489506462854645043:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:11.032313Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489506445674773660:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:11.032392Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:12.219654Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.298164Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> TModificationsValidatorTests::TestIsValidationRequired_NONE [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_DOMAIN >> TJaegerTracingConfiguratorTests::DefaultConfig >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached >> TConsoleTests::TestGetUnknownTenantStatus >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS_AND_NODE_TYPES [GOOD] >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer >> TConsoleTests::TestCreateTenant >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::FullOuterJoinNotNullJoinKey [GOOD] Test command err: Trying to start YDB, gRPC: 15910, MsgBus: 11885 2025-04-04T17:09:36.889875Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506318701262276:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:36.889958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00148c/r3tmp/tmpXppo9k/pdisk_1.dat 2025-04-04T17:09:37.221445Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15910, node 1 2025-04-04T17:09:37.295932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:37.296060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:37.297602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:37.309763Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:37.309802Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:37.309816Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:37.309943Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11885 TClient is connected to server localhost:11885 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:37.794750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:37.836676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:37.992087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:38.169980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:38.250987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:09:40.107473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506335881133234:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:40.107588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:40.410966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:40.441768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:40.511443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:40.549696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:40.623038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:40.697854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:40.757178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506335881133754:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:40.757223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:40.757391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506335881133759:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:40.760183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:40.770176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506335881133761:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:40.828947Z node 1 :TX_PROXY ERROR: Actor# [1:7489506335881133814:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:41.721886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:41.754471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:41.794870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:41.890074Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506318701262276:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:41.890150Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2090, MsgBus: 26696 2025-04-04T17:09:43.653616Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506347555504657:2210];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00148c/r3tmp/tmpSWrGp3/pdisk_1.dat 2025-04-04T17:09:43.686057Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:09:43.749222Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:43.780850Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:43.780939Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:43.783321Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2090, node 2 2025-04-04T17:09:43.840809Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:43.840830Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:43.840838Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:43.840940Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26696 TClient is connected to server localhost:26696 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:09:44.286780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:09:44.299864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:44.371448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is un ... 474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:02.625254Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:02.666882Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:02.706700Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:02.772367Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506430302415481:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:02.772463Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:02.772733Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506430302415486:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:02.827485Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:02.839699Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506430302415488:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:10:02.924923Z node 4 :TX_PROXY ERROR: Actor# [4:7489506430302415547:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:03.254513Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489506413122544201:2219];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:03.254574Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:04.088630Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:04.129018Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:04.169222Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2671, MsgBus: 15044 2025-04-04T17:10:06.989431Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489506445246243484:2214];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00148c/r3tmp/tmppHDWd3/pdisk_1.dat 2025-04-04T17:10:07.040510Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:10:07.121262Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:07.127116Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:07.127209Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:07.129003Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2671, node 5 2025-04-04T17:10:07.294950Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:10:07.294979Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:10:07.294992Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:10:07.295147Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15044 TClient is connected to server localhost:15044 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:10:08.036669Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:10:08.064685Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:08.158160Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:10:08.335322Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:10:08.441373Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:11.132322Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506466721081551:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:11.132444Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:11.195927Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.248620Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.296406Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.339614Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.411358Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.453053Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.555719Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506466721082065:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:11.555805Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:11.555992Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506466721082070:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:11.559272Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:11.571330Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489506466721082072:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:10:11.665485Z node 5 :TX_PROXY ERROR: Actor# [5:7489506466721082129:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:11.999340Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506445246243484:2214];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:11.999432Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:12.925764Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.981022Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin [GOOD] >> TJaegerTracingConfiguratorTests::DefaultConfig [GOOD] >> TJaegerTracingConfiguratorTests::GlobalRules >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached [GOOD] >> TConfigsCacheTests::TestFullConfigurationRestore >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription >> OlapEstimationRowsCorrectness::TPCH2 [GOOD] >> OlapEstimationRowsCorrectness::TPCH3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 63483, MsgBus: 18494 2025-04-04T17:09:24.986899Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506266228903198:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:24.986948Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001543/r3tmp/tmp2onjof/pdisk_1.dat 2025-04-04T17:09:25.438364Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:25.448613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:25.448696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:25.451378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63483, node 1 2025-04-04T17:09:25.642752Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:25.642770Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:25.642780Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:25.642885Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18494 TClient is connected to server localhost:18494 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:26.373725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.426558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.567385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.779312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.855381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.353479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506283408774019:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.353641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.711366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.778919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.804091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.842595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.876549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.910192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.995826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506283408774532:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.995931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.996045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506283408774537:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.999597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:29.022478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506283408774539:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:29.088797Z node 1 :TX_PROXY ERROR: Actor# [1:7489506287703741892:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:29.891707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.922351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.958808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.986274Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506266228903198:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:29.986362Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:30.001679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.032116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.102552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21806, MsgBus: 27329 2025-04-04T17:09:31.893286Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506295279696581:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:31.893330Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001543/r3tmp/tmpFgt4Vh/pdisk_1.dat 2025-04-04T17:09:32.026182Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:32.040443Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:32.040518Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:32.042544Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21806, node 2 2025-04-04T17:09:32.114716Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:32.114743Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:32.114752Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:32.114872Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27329 TClient is connected to server localhost:27329 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ... is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.790550Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.839005Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.927267Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.986722Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506417267761030:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:59.986878Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:59.987282Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506417267761035:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:59.995127Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:00.011597Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489506417267761037:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:10:00.069370Z node 5 :TX_PROXY ERROR: Actor# [5:7489506421562728386:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:00.135498Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506400087889560:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:00.135553Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:01.320364Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:01.365859Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4456, MsgBus: 24452 2025-04-04T17:10:04.742620Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489506435622298574:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:04.742679Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001543/r3tmp/tmpT6hWcp/pdisk_1.dat 2025-04-04T17:10:04.980764Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:04.995183Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:04.995277Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:04.996730Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4456, node 6 2025-04-04T17:10:05.091842Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:10:05.091869Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:10:05.091879Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:10:05.092035Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24452 TClient is connected to server localhost:24452 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:10:05.811631Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:05.819087Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:05.827479Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:10:05.923941Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:06.128892Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:06.254726Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:09.212934Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489506457097136800:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:09.213057Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:09.276353Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.313592Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.364134Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.424047Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.513185Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.563772Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.628953Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489506457097137315:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:09.629023Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:09.629337Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489506457097137320:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:09.634116Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:09.652249Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489506457097137322:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:10:09.743489Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489506435622298574:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:09.743510Z node 6 :TX_PROXY ERROR: Actor# [6:7489506457097137378:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:09.743555Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:11.204541Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.260891Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates >> TJaegerTracingConfiguratorTests::GlobalRules [GOOD] >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling >> TConfigsCacheTests::TestFullConfigurationRestore [GOOD] >> TConfigsCacheTests::TestConfigurationSaveOnNotification >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions >> KqpFlipJoin::Right_3 [GOOD] >> KqpIndexLookupJoin::CheckAllKeyTypesCast >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeThrottler >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinLeftFilter+StreamLookup >> TConsoleTests::TestGetUnknownTenantStatus [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain >> TConfigsCacheTests::TestConfigurationSaveOnNotification [GOOD] >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions >> KqpJoinOrder::TPCDS90-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS90+StreamLookupJoin-ColumnStore |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> KqpJoinOrder::TestJoinOrderHintsSimple-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees-StreamLookupJoin-ColumnStore >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest >> TJaegerTracingConfiguratorTests::RequestTypeThrottler [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeSampler >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification [GOOD] >> TConfigsCacheTests::TestConfigurationChangeSensor >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain [GOOD] >> TConsoleTests::TestSetDefaultStorageUnitsQuota >> TConsoleTests::TestCreateTenant [GOOD] >> TConsoleTests::TestCreateTenantExtSubdomain >> TJaegerTracingConfiguratorTests::RequestTypeSampler [GOOD] >> TJaegerTracingConfiguratorTests::SamplingSameScope >> KqpFlipJoin::Inner_2 [GOOD] >> KqpFlipJoin::Inner_3 |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TConfigsCacheTests::TestConfigurationChangeSensor [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotification >> KqpJoin::JoinLeftPureCross [GOOD] >> KqpJoin::RightSemiJoin_ComplexSecondaryIndex [GOOD] >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] >> TJaegerTracingConfiguratorTests::SamplingSameScope [GOOD] >> TJaegerTracingConfiguratorTests::ThrottlingByDb >> KqpJoinOrder::TPCDS87-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS87-StreamLookupJoin+ColumnStore >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain [GOOD] >> TConsoleTests::TestRestartConsoleAndPools >> TConfigsDispatcherTests::TestSubscriptionNotification [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:130:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:132:2058] recipient: [1:108:2140] 2025-04-04T17:09:20.172994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:20.173092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:20.173133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:20.173170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:20.173222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:20.173268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:20.173336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:20.173437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:20.173798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:20.251673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:20.251719Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:172:2058] recipient: [1:15:2062] 2025-04-04T17:09:20.263170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:20.263500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:20.263648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:20.270754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:20.270929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:20.271603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:20.272260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:20.275316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:20.276584Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:20.276644Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:20.276808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:20.276855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:20.276898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:20.277060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] Leader for TabletID 72057594037968897 is [1:215:2214] sender: [1:216:2058] recipient: [1:209:2210] 2025-04-04T17:09:20.283266Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-04T17:09:20.403377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:20.403591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.403791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:20.404028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:20.404085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.406387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:20.406521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:20.406686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.406742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:20.406797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:20.406833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:20.408714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.408772Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:20.408816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:20.410618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.410682Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.410726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:20.410777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:20.414775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:20.416539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:20.416708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:20.417623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:20.417744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:20.417812Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:20.418062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:20.418119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:20.418315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:20.418415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:20.420454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:20.420504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:20.420667Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:20.420710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:20.421122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:20.421164Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:20.421255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:20.421289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:20.421328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:20.421360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:20.421396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:20.421458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:20.421493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the p ... , blocked: 1 2025-04-04T17:10:21.576139Z node 62 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDeleteTableBarrier operationId: 1003:0 HandleReply TEvPrivate:TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-04T17:10:21.576414Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:10:21.576553Z node 62 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 137 -> 129 2025-04-04T17:10:21.576652Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T17:10:21.576706Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:10:21.576935Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-04T17:10:21.579687Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-04T17:10:21.580016Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-04T17:10:21.582398Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-04T17:10:21.582671Z node 62 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:21.582710Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:21.582875Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:10:21.583004Z node 62 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:21.583042Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [62:203:2205], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2025-04-04T17:10:21.583084Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [62:203:2205], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-04-04T17:10:21.583526Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-04T17:10:21.583572Z node 62 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:10:21.583648Z node 62 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-04T17:10:21.583681Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:10:21.583717Z node 62 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2025-04-04T17:10:21.584539Z node 62 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-04T17:10:21.584621Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-04T17:10:21.584654Z node 62 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-04T17:10:21.584691Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-04T17:10:21.584724Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-04T17:10:21.585177Z node 62 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-04T17:10:21.585258Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-04T17:10:21.585287Z node 62 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-04T17:10:21.585321Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-04T17:10:21.585355Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:10:21.585423Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-04-04T17:10:21.588097Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-04T17:10:21.588154Z node 62 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:10:21.588200Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 1003:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 3], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-04T17:10:21.588294Z node 62 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-04-04T17:10:21.588330Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-04T17:10:21.588369Z node 62 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-04-04T17:10:21.588400Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-04T17:10:21.588439Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-04-04T17:10:21.588481Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-04T17:10:21.588525Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-04-04T17:10:21.588557Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-04-04T17:10:21.588675Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T17:10:21.588711Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:10:21.589219Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:10:21.589263Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T17:10:21.589320Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T17:10:21.590715Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-04T17:10:21.590810Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-04T17:10:21.595286Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-04-04T17:10:21.595658Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-04-04T17:10:21.595707Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-04-04T17:10:21.596059Z node 62 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-04-04T17:10:21.596142Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-04-04T17:10:21.596177Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [62:469:2442] TestWaitNotification: OK eventTxId 1003 2025-04-04T17:10:21.596604Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableMoved" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:10:21.596799Z node 62 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableMoved" took 235us result status StatusSuccess 2025-04-04T17:10:21.597257Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableMoved" PathDescription { Self { Name: "TTLEnabledTableMoved" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TTLEnabledTableMoved" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:10:20.698414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:10:20.698540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:10:20.698591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:10:20.698627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:10:20.702462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:10:20.702522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:10:20.702627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:10:20.702707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:10:20.704154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:10:20.785629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:20.785698Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:20.799159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:10:20.799388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:10:20.799564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:10:20.815084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:10:20.815344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:10:20.815898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:20.817368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:10:20.824350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:20.835120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:20.835191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:20.835338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:10:20.835404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:20.835450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:10:20.835582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.841293Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:10:20.965114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:10:20.971764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.972026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:10:20.978119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:10:20.978214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.987082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:20.987221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:10:20.987380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.987473Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:10:20.987510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:10:20.987540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:10:20.989571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.989623Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:10:20.989657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:10:20.991268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.991309Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.991348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:20.991394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:10:20.994481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:10:20.995731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:10:20.995868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:10:20.997487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:20.997615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:20.997662Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:20.999176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:10:20.999239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:20.999423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:10:20.999517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:10:21.001463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:21.001502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:21.001652Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:21.001699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:10:21.001997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:21.002048Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:10:21.002131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:10:21.002169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:21.002207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:10:21.002258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:21.002290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:10:21.002335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:21.002367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:10:21.002397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:10:21.002445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:10:21.002480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:10:21.002514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:10:21.004178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:10:21.004280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:10:21.004316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... sion: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:10:21.820290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:10:21.820316Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-04T17:10:21.820340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T17:10:21.820406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-04T17:10:21.820450Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:10:21.820674Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268698118, Sender [1:216:2215], Recipient [1:283:2270]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 2025-04-04T17:10:21.820716Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvHive::TEvDeleteTabletReply 2025-04-04T17:10:21.820762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-04T17:10:21.821021Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268698118, Sender [1:216:2215], Recipient [1:283:2270]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 2025-04-04T17:10:21.821048Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvHive::TEvDeleteTabletReply 2025-04-04T17:10:21.821084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-04T17:10:21.822570Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409551][Topic3] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409551 2025-04-04T17:10:21.822683Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2025-04-04T17:10:21.823926Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:10:21.828276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T17:10:21.828366Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:10:21.828481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:10:21.832595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T17:10:21.832636Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:10:21.832746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-04T17:10:21.832812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-04T17:10:21.832977Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [1:1026:2889], Recipient [1:283:2270]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [1:1026:2889] ServerId: [1:1027:2890] } 2025-04-04T17:10:21.833010Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-04T17:10:21.833037Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72057594037968897, from:72057594046678944 is reset TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-04T17:10:21.833398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-04T17:10:21.833442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-04T17:10:21.833916Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1042:2905], Recipient [1:283:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:10:21.833962Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:10:21.833991Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-04T17:10:21.834160Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:548:2482], Recipient [1:283:2270]: NKikimrScheme.TEvNotifyTxCompletion TxId: 104 2025-04-04T17:10:21.834201Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-04T17:10:21.834285Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-04T17:10:21.834378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-04T17:10:21.834410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1040:2903] 2025-04-04T17:10:21.834619Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [1:1042:2905], Recipient [1:283:2270]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:10:21.834673Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:10:21.834707Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-04-04T17:10:21.835285Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1043:2906], Recipient [1:283:2270]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-04T17:10:21.835327Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-04T17:10:21.835428Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:10:21.835609Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 182us result status StatusSuccess 2025-04-04T17:10:21.836023Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 31 UsedReserveSize: 31 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:21.836774Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [1:1044:2907], Recipient [1:283:2270]: NKikimrPQ.TEvPeriodicTopicStats PathId: 4 Generation: 1 Round: 6 DataSize: 151 UsedReserveSize: 151 2025-04-04T17:10:21.836819Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-04-04T17:10:21.836853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 4] DataSize 151 UsedReserveSize 151 2025-04-04T17:10:21.836888Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-04T17:10:21.837254Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1045:2908], Recipient [1:283:2270]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-04T17:10:21.837296Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-04T17:10:21.837415Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:10:21.840690Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 160us result status StatusSuccess 2025-04-04T17:10:21.841076Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 182 UsedReserveSize: 182 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureCross [GOOD] Test command err: Trying to start YDB, gRPC: 15928, MsgBus: 4442 2025-04-04T17:09:45.569749Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506356313715493:2213];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001465/r3tmp/tmpv52Rai/pdisk_1.dat 2025-04-04T17:09:45.795183Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:09:45.962149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:45.962374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:45.964113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15928, node 1 2025-04-04T17:09:46.063458Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:09:46.063552Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:09:46.076311Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:46.114603Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:46.114617Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:46.114647Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:46.114771Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4442 TClient is connected to server localhost:4442 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:46.728106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:46.754835Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:46.764451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:46.949715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:47.164827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:47.256786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:49.052595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506373493586281:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:49.052740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:49.261691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:49.299232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:49.336401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:49.369001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:49.405063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:49.479051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:49.551862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506373493586797:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:49.551955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:49.552008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506373493586802:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:49.562027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:49.580193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506373493586804:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:49.652111Z node 1 :TX_PROXY ERROR: Actor# [1:7489506373493586859:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:50.541799Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506356313715493:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:50.545191Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:50.639785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:50.721530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:50.779211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3345, MsgBus: 6214 2025-04-04T17:09:52.906122Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506385944445287:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:52.906156Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001465/r3tmp/tmpnbnqtW/pdisk_1.dat 2025-04-04T17:09:53.084848Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:53.096094Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:53.096175Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:53.097264Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3345, node 2 2025-04-04T17:09:53.190756Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:53.190776Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:53.190783Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:53.190885Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6214 TClient is connected to server localhost:6214 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:53.664261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:53.670 ... {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:11.395439Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:11.415682Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.468185Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.509773Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.547516Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.583102Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.625293Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.687833Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506465693626574:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:11.687931Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:11.688130Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506465693626579:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:11.691755Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:11.706456Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506465693626581:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:10:11.807543Z node 4 :TX_PROXY ERROR: Actor# [4:7489506465693626639:3441] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:12.480910Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489506448513755269:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:12.480975Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 62234, MsgBus: 8019 2025-04-04T17:10:14.690431Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489506481168554321:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:14.690496Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001465/r3tmp/tmpNF1nM4/pdisk_1.dat 2025-04-04T17:10:14.825700Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:14.839539Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:14.839637Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:14.841209Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62234, node 5 2025-04-04T17:10:14.894887Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:10:14.894912Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:10:14.894921Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:10:14.895059Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8019 TClient is connected to server localhost:8019 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:10:15.527552Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:15.538830Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:10:15.560697Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:15.665349Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:15.921533Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:16.003865Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:18.643274Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506498348425282:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:18.643377Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:18.685507Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:10:18.727405Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:18.765258Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:18.802116Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:18.843530Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:18.879213Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:18.945128Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506498348425795:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:18.945208Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:18.945427Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506498348425800:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:18.953278Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:18.967615Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489506498348425802:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:10:19.034842Z node 5 :TX_PROXY ERROR: Actor# [5:7489506502643393151:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:19.692202Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506481168554321:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:19.692278Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate >> TJaegerTracingConfiguratorTests::ThrottlingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SamplingByDb >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscription >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem >> KqpJoinOrder::FiveWayJoinWithConstantFold-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-StreamLookupJoin-ColumnStore >> TJaegerTracingConfiguratorTests::SamplingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose >> TConfigsDispatcherTests::TestRemoveSubscription [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:10:20.698416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:10:20.698497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:10:20.698532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:10:20.698564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:10:20.702443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:10:20.702503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:10:20.702571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:10:20.702660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:10:20.705992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:10:20.785853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:20.785916Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:20.799192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:10:20.799429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:10:20.799584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:10:20.812915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:10:20.813213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:10:20.815704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:20.817374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:10:20.824628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:20.832032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:20.832209Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:20.832388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:10:20.832459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:20.832516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:10:20.832695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.838944Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:10:20.964921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:10:20.970491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.974343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:10:20.974662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:10:20.974739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.982984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:20.983130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:10:20.983291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.983339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:10:20.983377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:10:20.983407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:10:20.991046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.991106Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:10:20.991140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:10:20.995058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.995109Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.995156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:20.995202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:10:20.998930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:10:21.003012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:10:21.003154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:10:21.004044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:21.004157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:21.004200Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:21.004437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:10:21.004482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:21.004620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:10:21.004680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:10:21.006420Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:21.006472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:21.006608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:21.006663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:10:21.006993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:21.007037Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:10:21.007170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:10:21.007216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:21.007252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:10:21.007278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:21.007316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:10:21.007365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:21.007400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:10:21.007426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:10:21.007479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:10:21.007521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:10:21.007557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:10:21.009330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:10:21.009439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:10:21.009486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:22.763119Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-04-04T17:10:22.763210Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 2 2025-04-04T17:10:22.763916Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 0 2025-04-04T17:10:22.764014Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-04-04T17:10:22.764231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 0 2025-04-04T17:10:22.779737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-04T17:10:22.790156Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:22.790355Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 220us result status StatusSuccess 2025-04-04T17:10:22.790766Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:23.334184Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-04-04T17:10:23.334303Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2025-04-04T17:10:23.335184Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 4 DataSize: 16975298 UsedReserveSize: 0 2025-04-04T17:10:23.335309Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-04-04T17:10:23.335525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 0 2025-04-04T17:10:23.352717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-04T17:10:23.365083Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:23.365266Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 212us result status StatusSuccess 2025-04-04T17:10:23.365676Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:23.412074Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:10:23.412297Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 263us result status StatusSuccess 2025-04-04T17:10:23.412808Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:23.413598Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186233409547][Topic1] pipe [1:632:2553] connected; active server actors: 1 2025-04-04T17:10:23.429391Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] BALANCER INIT DONE for Topic1: (0, 72075186233409546) (1, 72075186233409546) (2, 72075186233409546) 2025-04-04T17:10:23.430225Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2025-04-04T17:10:23.439575Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:23.439750Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 195us result status StatusSuccess 2025-04-04T17:10:23.440235Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:23.440510Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TEvClientConnected TabletId 72057594046678944, NodeId 1, Generation 3 2025-04-04T17:10:23.441042Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TEvClientConnected TabletId 72075186233409546, NodeId 1, Generation 2 2025-04-04T17:10:23.471359Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186233409547][Topic1] pipe [1:679:2588] connected; active server actors: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:10:20.698783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:10:20.698860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:10:20.698901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:10:20.698933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:10:20.702857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:10:20.702915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:10:20.702993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:10:20.703078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:10:20.706132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:10:20.797442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:20.797515Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:20.811329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:10:20.811551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:10:20.811710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:10:20.837139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:10:20.837427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:10:20.838058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:20.839728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:10:20.844654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:20.845989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:20.846051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:20.846211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:10:20.846293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:20.846347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:10:20.846489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.852459Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:10:20.980963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:10:20.981167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.981361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:10:20.981594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:10:20.981665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.985868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:20.986002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:10:20.986148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.986191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:10:20.986224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:10:20.986272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:10:20.991010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.991081Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:10:20.991125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:10:20.993177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.993220Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.993257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:20.993314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:10:20.996627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:10:20.998231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:10:20.998418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:10:20.999301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:20.999404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:20.999481Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:20.999707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:10:20.999751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:20.999911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:10:20.999987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:10:21.001664Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:21.001701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:21.001887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:21.001924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:10:21.002195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:21.002260Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:10:21.002351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:10:21.002387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:21.002421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:10:21.002449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:21.002481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:10:21.002525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:21.002565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:10:21.002588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:10:21.002648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:10:21.002685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:10:21.002722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:10:21.004299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:10:21.004393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:10:21.004430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:22.806475Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-04-04T17:10:22.806553Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 2 2025-04-04T17:10:22.806943Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-04T17:10:22.807061Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-04T17:10:22.807128Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-04T17:10:22.807482Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 2 DataSize: 16975298 UsedReserveSize: 16975298 2025-04-04T17:10:22.807605Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-04-04T17:10:22.807829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 16975298 2025-04-04T17:10:22.827985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-04T17:10:22.838818Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:22.838986Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 202us result status StatusSuccess 2025-04-04T17:10:22.839389Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:23.355132Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-04-04T17:10:23.355223Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2025-04-04T17:10:23.355645Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-04T17:10:23.355751Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-04T17:10:23.355839Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-04-04T17:10:23.356230Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 16975298 2025-04-04T17:10:23.356332Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-04-04T17:10:23.356542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 16975298 2025-04-04T17:10:23.378445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-04T17:10:23.390619Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:23.390801Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 220us result status StatusSuccess 2025-04-04T17:10:23.391177Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:23.438949Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:10:23.439157Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 254us result status StatusSuccess 2025-04-04T17:10:23.439588Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small-StreamLookupJoin-ColumnStore >> KqpJoinOrder::TestJoinHint2-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsComplex-StreamLookupJoin-ColumnStore >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess [GOOD] >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits [GOOD] >> TJaegerTracingConfiguratorTests::SharedSamplingLimits >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled >> TConsoleTests::TestCreateTenantExtSubdomain [GOOD] >> TConsoleTests::TestCreateSharedTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:10:23.713589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:10:23.713685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:10:23.713720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:10:23.713752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:10:23.713788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:10:23.713829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:10:23.713921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:10:23.714001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:10:23.714353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:10:23.792084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:23.792153Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:23.807030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:10:23.807272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:10:23.807444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:10:23.823211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:10:23.823599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:10:23.824398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:23.825388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:10:23.831499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:23.833036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:23.833121Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:23.833299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:10:23.833366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:23.833411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:10:23.833555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:10:23.842930Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:10:23.968954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:10:23.969221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:23.969458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:10:23.969701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:10:23.969786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:23.972409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:23.972568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:10:23.972770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:23.972837Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:10:23.972869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:10:23.972904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:10:23.974983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:23.975054Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:10:23.975097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:10:23.976954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:23.977011Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:23.977060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:23.977117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:10:23.981042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:10:23.985184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:10:23.985399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:10:23.986551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:23.986714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:23.986770Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:23.987088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:10:23.987145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:23.987309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:10:23.987395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:10:23.990336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:23.990401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:23.990585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:23.990629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:10:23.991024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:23.991075Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:10:23.991188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:10:23.991226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:23.991268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:10:23.991303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:23.991342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:10:23.991401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:23.991446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:10:23.991477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:10:23.991560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:10:23.991599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:10:23.991638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:10:23.993644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:10:23.993766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:10:23.993811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... schemeshard: 72057594046678944 2025-04-04T17:10:24.425460Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-04T17:10:24.425656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:24.425754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:24.425874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 2, at schemeshard: 72057594046678944 2025-04-04T17:10:24.425926Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: PersQueue, at schemeshard: 72057594046678944 2025-04-04T17:10:24.425994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-04T17:10:24.426035Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: PersQueueReadBalancer, at schemeshard: 72057594046678944 2025-04-04T17:10:24.426061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:10:24.426153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:24.426256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:24.426458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 8, at schemeshard: 72057594046678944 2025-04-04T17:10:24.426619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:10:24.426971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:24.427116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:24.427493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:24.427582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:24.427838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:24.427957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:24.428061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:24.428240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:24.428339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:24.428525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:24.428745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:24.428920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:24.428976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:24.429082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:24.429336Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:10:24.436978Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:10:24.437148Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-04T17:10:24.438224Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435081, Sender [1:564:2494], Recipient [1:564:2494]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-04T17:10:24.438299Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-04T17:10:24.439775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:24.439847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:24.440251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:10:24.440301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:24.440341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:10:24.440377Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:10:24.441143Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:600:2494], Recipient [1:564:2494]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-04T17:10:24.441186Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-04T17:10:24.441222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:564:2494] sender: [1:621:2058] recipient: [1:15:2062] 2025-04-04T17:10:24.485748Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:620:2538], Recipient [1:564:2494]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-04T17:10:24.485826Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-04T17:10:24.485956Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:10:24.486158Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 203us result status StatusSuccess 2025-04-04T17:10:24.486663Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:24.487382Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [1:622:2539], Recipient [1:564:2494]: NKikimrPQ.TEvPeriodicTopicStats PathId: 2 Generation: 1 Round: 96 DataSize: 19 UsedReserveSize: 7 2025-04-04T17:10:24.487437Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-04-04T17:10:24.487495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 19 UsedReserveSize 7 2025-04-04T17:10:24.487540Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-04T17:10:24.487607Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2025-04-04T17:10:24.487833Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:623:2540], Recipient [1:564:2494]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-04T17:10:24.487881Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-04T17:10:24.487998Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:10:24.488209Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 178us result status StatusSuccess 2025-04-04T17:10:24.488615Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpJoinOrder::TPCH21-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH21+StreamLookupJoin-ColumnStore >> TConsoleTests::TestSetDefaultStorageUnitsQuota [GOOD] >> TConsoleTests::TestSetDefaultComputationalUnitsQuota >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification [GOOD] >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist >> TJaegerTracingConfiguratorTests::SharedSamplingLimits [GOOD] >> TLogSettingsConfiguratorTests::TestNoChanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:10:20.698045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:10:20.698144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:10:20.698185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:10:20.698223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:10:20.702821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:10:20.702879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:10:20.702953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:10:20.703035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:10:20.705542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:10:20.779204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:20.779256Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:20.799714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:10:20.799946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:10:20.800072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:10:20.814968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:10:20.815258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:10:20.815845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:20.817602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:10:20.825372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:20.835566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:20.835643Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:20.835790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:10:20.835858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:20.835910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:10:20.836038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.842376Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:10:20.976103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:10:20.976306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.976493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:10:20.976696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:10:20.976783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.979792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:20.980008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:10:20.980232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.980305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:10:20.980426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:10:20.980465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:10:20.983372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.983456Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:10:20.983501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:10:20.987192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.987253Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.987297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:20.987352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:10:20.993572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:10:20.997597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:10:20.997801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:10:20.998997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:20.999158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:20.999224Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:20.999550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:10:20.999616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:20.999804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:10:20.999894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:10:21.003579Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:21.003636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:21.003876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:21.003927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:10:21.004310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:21.004372Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:10:21.004503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:10:21.004542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:21.004580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:10:21.004616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:21.004654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:10:21.004720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:21.004761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:10:21.004791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:10:21.004887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:10:21.004926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:10:21.004973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:10:21.007046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:10:21.007178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:10:21.007220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:10:25.180329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:10:25.180376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:10:25.180428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:10:25.180467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:10:25.180544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:10:25.180649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:10:25.181015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:10:25.196371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:10:25.197873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:10:25.198047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:10:25.198185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:25.198224Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:25.198504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:10:25.199250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-04-04T17:10:25.199331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: SomeTable, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:10:25.199412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.199472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.199848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:25.200027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-04T17:10:25.200108Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-04T17:10:25.200324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2025-04-04T17:10:25.200454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.200556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:25.200594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:10:25.200734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:25.200926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:25.201202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-04-04T17:10:25.201536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.201657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.202024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.202119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.202376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.202484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.202568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.202773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.202857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.203026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.203264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.203440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.203493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.203537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.213339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:25.213407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:25.213647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:10:25.213711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:25.213752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:10:25.214026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:754:2708] sender: [1:806:2058] recipient: [1:15:2062] 2025-04-04T17:10:25.251149Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:10:25.251409Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeTable" took 308us result status StatusSuccess 2025-04-04T17:10:25.251819Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeTable" PathDescription { Self { Name: "SomeTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SomeTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 Family: 1 FamilyName: "alternative" NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 4140 RowCount: 100 IndexSize: 82 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 1020 IndexSize: 82 } PoolsUsage { PoolKind: "pool-kind-2" DataSize: 3120 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 82472 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4222 DataSize: 4140 IndexSize: 82 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1102 DataSize: 1020 IndexSize: 82 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:25.253897Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:10:25.254053Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 185us result status StatusSuccess 2025-04-04T17:10:25.254468Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SomeTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4222 DataSize: 4140 IndexSize: 82 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1102 DataSize: 1020 IndexSize: 82 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> OlapEstimationRowsCorrectness::TPCH10 [GOOD] >> OlapEstimationRowsCorrectness::TPCH11 >> KqpJoinOrder::TPCDS61+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS61-StreamLookupJoin+ColumnStore >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist [GOOD] >> TConfigsDispatcherTests::TestYamlEndToEnd >> TConsoleTests::TestRestartConsoleAndPools [GOOD] >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits >> TLogSettingsConfiguratorTests::TestNoChanges [GOOD] >> TLogSettingsConfiguratorTests::TestAddComponentEntries >> KqpIndexLookupJoin::InnerJoinLeftFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem >> TConfigsDispatcherTests::TestYamlEndToEnd [GOOD] >> TConsoleConfigHelpersTests::TestConfigCourier ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:130:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:132:2058] recipient: [1:108:2140] 2025-04-04T17:09:18.658918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:18.659026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.659070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:18.659108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:18.660072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:18.660143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:18.660228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.660327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:18.661951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:18.747532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:18.747596Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:172:2058] recipient: [1:15:2062] 2025-04-04T17:09:18.767018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:18.767426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:18.767623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:18.774599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:18.774762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:18.775395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.776597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:18.782193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789306Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789394Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:18.789617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.789667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:18.789825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] Leader for TabletID 72057594037968897 is [1:215:2214] sender: [1:216:2058] recipient: [1:209:2210] 2025-04-04T17:09:18.796387Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-04T17:09:18.920157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:18.920326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.920511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:18.920688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:18.920733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.922601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.922719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:18.922845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.922895Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:18.922934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:18.922960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:18.924317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.924359Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:18.924388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:18.925775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.925816Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.925852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.925893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.934719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:18.936421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:18.936586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:18.937390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.937502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:18.937550Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.937809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:18.937864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.938013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:18.938098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:18.940084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.940130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.940289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.940330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:18.940738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.940783Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:18.940872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.940909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.940946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.940977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.941018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:18.941088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.941124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the p ... d: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:10:26.949393Z node 72 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:26.949437Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:201:2203], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2025-04-04T17:10:26.949482Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:201:2203], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2025-04-04T17:10:26.949724Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-04-04T17:10:26.949775Z node 72 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:10:26.951764Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-04-04T17:10:26.952192Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-04-04T17:10:26.952239Z node 72 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-04-04T17:10:26.952275Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-04T17:10:26.952315Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 FAKE_COORDINATOR: Erasing txId 1002 2025-04-04T17:10:26.953883Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-04-04T17:10:26.953972Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-04-04T17:10:26.954005Z node 72 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-04-04T17:10:26.954040Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-04T17:10:26.954077Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:10:26.954150Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2025-04-04T17:10:26.955737Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1131 } } 2025-04-04T17:10:26.955783Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-04-04T17:10:26.955939Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1131 } } 2025-04-04T17:10:26.956032Z node 72 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1131 } } 2025-04-04T17:10:26.956577Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 309237647626 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-04-04T17:10:26.956622Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-04-04T17:10:26.956739Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 309237647626 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-04-04T17:10:26.956784Z node 72 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:10:26.956868Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 327 RawX2: 309237647626 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-04-04T17:10:26.956927Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1002:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:26.956963Z node 72 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1002:0, at schemeshard: 72057594046678944 2025-04-04T17:10:26.957001Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1002:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:10:26.957040Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 129 -> 240 2025-04-04T17:10:26.963234Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-04-04T17:10:26.963378Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-04-04T17:10:26.964080Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-04-04T17:10:26.964241Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-04-04T17:10:26.964662Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-04-04T17:10:26.964717Z node 72 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2025-04-04T17:10:26.964824Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2025-04-04T17:10:26.964861Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-04-04T17:10:26.964906Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2025-04-04T17:10:26.964943Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-04-04T17:10:26.964981Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2025-04-04T17:10:26.965025Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-04-04T17:10:26.965066Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2025-04-04T17:10:26.965099Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2025-04-04T17:10:26.965261Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2025-04-04T17:10:26.970007Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2025-04-04T17:10:26.970056Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2025-04-04T17:10:26.970434Z node 72 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2025-04-04T17:10:26.970532Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-04-04T17:10:26.970571Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [72:406:2379] TestWaitNotification: OK eventTxId 1002 2025-04-04T17:10:26.971042Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:10:26.971253Z node 72 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 264us result status StatusSuccess 2025-04-04T17:10:26.971746Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TLogSettingsConfiguratorTests::TestAddComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries >> KqpFlipJoin::Inner_3 [GOOD] >> KqpFlipJoin::LeftSemi_1 >> TConsoleConfigHelpersTests::TestConfigCourier [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriber |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest >> TransferWriter::Write_ColumnTable >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestChangeDefaults |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest >> TConsoleTests::TestSetDefaultComputationalUnitsQuota [GOOD] >> TConsoleTests::TestTenantConfigConsistency >> TLogSettingsConfiguratorTests::TestChangeDefaults [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestApplyValidatorsWithOldConfig [GOOD] >> TModificationsValidatorTests::TestChecksLimitError [GOOD] >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] >> TConsoleTests::TestCreateSharedTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenant |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] Test command err: 2025-04-04T17:10:16.314293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:16.314350Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:16.383476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:17.378597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:17.378696Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:17.416153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:18.277200Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:18.277270Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:18.318159Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:19.157636Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:19.157705Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:19.240922Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:20.241520Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:20.241579Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:20.283343Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:21.338754Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:21.338812Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:21.381934Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:22.338026Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:22.338105Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:22.383419Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:23.435767Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:23.435841Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:23.489882Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:24.647061Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:24.647131Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:24.705264Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:25.743135Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:25.743219Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:25.788012Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:26.865219Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 268637729, Sender [11:144:2157], Recipient [11:356:2295]: {TEvControllerProposeConfigRequest Record# } 2025-04-04T17:10:26.866685Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvBlobStorage::TEvControllerProposeConfigRequest 2025-04-04T17:10:26.879085Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 269877760, Sender [11:317:2284], Recipient [11:316:2281]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936131 Status: OK ServerId: [11:409:2341] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-04T17:10:26.879206Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-04T17:10:26.895289Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285144, Sender [11:316:2281], Recipient [11:356:2295]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionRequest { Generation: 1 Options { NodeId: 11 Host: "ghrun-mup5cka3jq.auto.internal" Tenant: "" NodeType: "" } ConfigItemKinds: 29 ConfigItemKinds: 1 ConfigItemKinds: 89 ConfigItemKinds: 2 ConfigItemKinds: 32 ConfigItemKinds: 3 ConfigItemKinds: 33 ConfigItemKinds: 34 ConfigItemKinds: 6 ConfigItemKinds: 36 ConfigItemKinds: 37 ConfigItemKinds: 8 ConfigItemKinds: 38 ConfigItemKinds: 10 ConfigItemKinds: 39 ConfigItemKinds: 43 ConfigItemKinds: 73 ConfigItemKinds: 75 ConfigItemKinds: 46 ConfigItemKinds: 77 ConfigItemKinds: 80 ConfigItemKinds: 81 ConfigItemKinds: 52 ConfigItemKinds: 54 ConfigItemKinds: 25 ConfigItemKinds: 55 ConfigItemKinds: 26 ServeYaml: true YamlApiVersion: 1 } 2025-04-04T17:10:26.895618Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285144, Sender [11:316:2281], Recipient [11:360:2307]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionRequest { Generation: 1 Options { NodeId: 11 Host: "ghrun-mup5cka3jq.auto.internal" Tenant: "" NodeType: "" } ConfigItemKinds: 29 ConfigItemKinds: 1 ConfigItemKinds: 89 ConfigItemKinds: 2 ConfigItemKinds: 32 ConfigItemKinds: 3 ConfigItemKinds: 33 ConfigItemKinds: 34 ConfigItemKinds: 6 ConfigItemKinds: 36 ConfigItemKinds: 37 ConfigItemKinds: 8 ConfigItemKinds: 38 ConfigItemKinds: 10 ConfigItemKinds: 39 ConfigItemKinds: 43 ConfigItemKinds: 73 ConfigItemKinds: 75 ConfigItemKinds: 46 ConfigItemKinds: 77 ConfigItemKinds: 80 ConfigItemKinds: 81 ConfigItemKinds: 52 ConfigItemKinds: 54 ConfigItemKinds: 25 ConfigItemKinds: 55 ConfigItemKinds: 26 ServeYaml: true YamlApiVersion: 1 } 2025-04-04T17:10:26.895682Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionRequest 2025-04-04T17:10:26.895809Z node 11 :CMS_CONFIGS DEBUG: TConfigsProvider registered new subscription [11:316:2281]:1 2025-04-04T17:10:26.895927Z node 11 :CMS_CONFIGS TRACE: TConfigsProvider: check if update is required for volatile subscription [11:316:2281]:1 2025-04-04T17:10:26.896017Z node 11 :CMS_CONFIGS TRACE: TConfigsProvider: new config found for subscription [11:316:2281]:1 version= 2025-04-04T17:10:26.896158Z node 11 :CMS_CONFIGS TRACE: TSubscriptionClientSender([11:316:2281]) send TEvConfigSubscriptionResponse 2025-04-04T17:10:26.897256Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273286169, Sender [11:410:2307], Recipient [11:316:2281]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionResponse { Generation: 1 Status { Code: SUCCESS } } 2025-04-04T17:10:26.897320Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionResponse 2025-04-04T17:10:26.897560Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285146, Sender [11:360:2307], Recipient [11:410:2307]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { } MainYamlConfigNotChanged: true } 2025-04-04T17:10:26.897618Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-04-04T17:10:26.897708Z node 11 :CMS_CONFIGS TRACE: TSubscriptionClientSender([11:316:2281]) send TEvConfigSubscriptionNotificationRequest: Order: 1 Generation: 1 Config { } MainYamlConfigNotChanged: true 2025-04-04T17:10:26.897881Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285146, Sender [11:410:2307], Recipient [11:316:2281]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Order: 1 Generation: 1 Config { } MainYamlConfigNotChanged: true } 2025-04-04T17:10:26.897923Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-04-04T17:10:26.905202Z node 11 :CMS_CONFIGS INFO: TLogSettingsConfigurator: got new config: 2025-04-04T17:10:26.905343Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component GLOBAL has been changed from WARN to NOTICE 2025-04-04T17:10:26.905421Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component GLOBAL has been changed from WARN to DEBUG 2025-04-04T17:10:26.905473Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT has been changed from WARN to NOTICE 2025-04-04T17:10:26.905508Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT has been changed from WARN to DEBUG 2025-04-04T17:10:26.905536Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TEST has been changed from WARN to NOTICE 2025-04-04T17:10:26.905563Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TEST has been changed from WARN to DEBUG 2025-04-04T17:10:26.905590Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component PROTOCOLS has been changed from WARN to NOTICE 2025-04-04T17:10:26.905616Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component PROTOCOLS has been changed from WARN to DEBUG 2025-04-04T17:10:26.905648Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_SPEED_TEST has been changed from WARN to NOTICE 2025-04-04T17:10:26.905679Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_SPEED_TEST has been changed from WARN to DEBUG 2025-04-04T17:10:26.905708Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_STATUS has been changed from WARN to NOTICE 2025-04-04T17:10:26.905735Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_STATUS has been changed from WARN to DEBUG 2025-04-04T17:10:26.905761Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_NETWORK has been changed from WARN to NOTICE 2025-04-04T17:10:26.905788Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_NETWORK has been changed from WARN to DEBUG 2025-04-04T17:10:26.905815Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_SESSION has been changed from WARN to NOTICE 2025-04-04T17:10:26.905844Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_SESSION has been changed from WARN to DEBUG 2025-04-04T17:10:26.905870Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component HTTP has been changed from WARN to NOTICE 2025-04-04T17:10:26.905897Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component HTTP has been changed from WARN to DEBUG 2025-04-04T17:10:26.905926Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component LOGGER has been changed from WARN to NOTICE 2025-04-04T17:10:26.905953Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component LOGGER has been changed from WARN to DEBUG 2025-04-04T17:10:26.905986Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BLOBSTORAGE has been changed from WARN to NOTI ... has been changed from 0 to 10 2025-04-04T17:10:29.813868Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_INITIALIZER has been changed from NOTICE to ALERT 2025-04-04T17:10:29.813895Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_INITIALIZER has been changed from DEBUG to ALERT 2025-04-04T17:10:29.813920Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_INITIALIZER has been changed from 0 to 10 2025-04-04T17:10:29.813945Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_MANAGER has been changed from NOTICE to ALERT 2025-04-04T17:10:29.813972Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_MANAGER has been changed from DEBUG to ALERT 2025-04-04T17:10:29.814011Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_MANAGER has been changed from 0 to 10 2025-04-04T17:10:29.814040Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_SECRET has been changed from NOTICE to ALERT 2025-04-04T17:10:29.814067Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_SECRET has been changed from DEBUG to ALERT 2025-04-04T17:10:29.814090Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_SECRET has been changed from 0 to 10 2025-04-04T17:10:29.814117Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_TIERING has been changed from NOTICE to ALERT 2025-04-04T17:10:29.814145Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_TIERING has been changed from DEBUG to ALERT 2025-04-04T17:10:29.814170Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_TIERING has been changed from 0 to 10 2025-04-04T17:10:29.814198Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BG_TASKS has been changed from NOTICE to ALERT 2025-04-04T17:10:29.814230Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BG_TASKS has been changed from DEBUG to ALERT 2025-04-04T17:10:29.814733Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BG_TASKS has been changed from 0 to 10 2025-04-04T17:10:29.814785Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DISCOVERY has been changed from NOTICE to ALERT 2025-04-04T17:10:29.814815Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DISCOVERY has been changed from DEBUG to ALERT 2025-04-04T17:10:29.814838Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DISCOVERY has been changed from 0 to 10 2025-04-04T17:10:29.814865Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DISCOVERY_CACHE has been changed from NOTICE to ALERT 2025-04-04T17:10:29.814893Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DISCOVERY_CACHE has been changed from DEBUG to ALERT 2025-04-04T17:10:29.814918Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DISCOVERY_CACHE has been changed from 0 to 10 2025-04-04T17:10:29.814964Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component EXT_INDEX has been changed from NOTICE to ALERT 2025-04-04T17:10:29.814994Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component EXT_INDEX has been changed from DEBUG to ALERT 2025-04-04T17:10:29.815019Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component EXT_INDEX has been changed from 0 to 10 2025-04-04T17:10:29.815049Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_CONVEYOR has been changed from NOTICE to ALERT 2025-04-04T17:10:29.815074Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_CONVEYOR has been changed from DEBUG to ALERT 2025-04-04T17:10:29.815099Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_CONVEYOR has been changed from 0 to 10 2025-04-04T17:10:29.815126Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_LIMITER has been changed from NOTICE to ALERT 2025-04-04T17:10:29.815152Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_LIMITER has been changed from DEBUG to ALERT 2025-04-04T17:10:29.815177Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_LIMITER has been changed from 0 to 10 2025-04-04T17:10:29.815209Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component ARROW_HELPER has been changed from NOTICE to ALERT 2025-04-04T17:10:29.815237Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component ARROW_HELPER has been changed from DEBUG to ALERT 2025-04-04T17:10:29.815261Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component ARROW_HELPER has been changed from 0 to 10 2025-04-04T17:10:29.815288Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component KAFKA_PROXY has been changed from NOTICE to ALERT 2025-04-04T17:10:29.815367Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component KAFKA_PROXY has been changed from DEBUG to ALERT 2025-04-04T17:10:29.815396Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component KAFKA_PROXY has been changed from 0 to 10 2025-04-04T17:10:29.815426Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component OBJECTS_MONITORING has been changed from NOTICE to ALERT 2025-04-04T17:10:29.815452Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component OBJECTS_MONITORING has been changed from DEBUG to ALERT 2025-04-04T17:10:29.815476Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component OBJECTS_MONITORING has been changed from 0 to 10 2025-04-04T17:10:29.815506Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component STATISTICS has been changed from NOTICE to ALERT 2025-04-04T17:10:29.815534Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component STATISTICS has been changed from DEBUG to ALERT 2025-04-04T17:10:29.815558Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component STATISTICS has been changed from 0 to 10 2025-04-04T17:10:29.815587Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_REQUEST_COST has been changed from NOTICE to ALERT 2025-04-04T17:10:29.815613Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_REQUEST_COST has been changed from DEBUG to ALERT 2025-04-04T17:10:29.815637Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_REQUEST_COST has been changed from 0 to 10 2025-04-04T17:10:29.815663Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_VDISK_BALANCING has been changed from NOTICE to ALERT 2025-04-04T17:10:29.815693Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_VDISK_BALANCING has been changed from DEBUG to ALERT 2025-04-04T17:10:29.815722Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_VDISK_BALANCING has been changed from 0 to 10 2025-04-04T17:10:29.815749Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_PROXY_GETBLOCK has been changed from NOTICE to ALERT 2025-04-04T17:10:29.815775Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_PROXY_GETBLOCK has been changed from DEBUG to ALERT 2025-04-04T17:10:29.815816Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_PROXY_GETBLOCK has been changed from 0 to 10 2025-04-04T17:10:29.815847Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_SHRED has been changed from NOTICE to ALERT 2025-04-04T17:10:29.815875Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_SHRED has been changed from DEBUG to ALERT 2025-04-04T17:10:29.815902Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_SHRED has been changed from 0 to 10 2025-04-04T17:10:29.815929Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component LDAP_AUTH_PROVIDER has been changed from NOTICE to ALERT 2025-04-04T17:10:29.815955Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component LDAP_AUTH_PROVIDER has been changed from DEBUG to ALERT 2025-04-04T17:10:29.815979Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component LDAP_AUTH_PROVIDER has been changed from 0 to 10 2025-04-04T17:10:29.816005Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component GROUPED_MEMORY_LIMITER has been changed from NOTICE to ALERT 2025-04-04T17:10:29.816033Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component GROUPED_MEMORY_LIMITER has been changed from DEBUG to ALERT 2025-04-04T17:10:29.816056Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component GROUPED_MEMORY_LIMITER has been changed from 0 to 10 2025-04-04T17:10:29.816086Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DATA_INTEGRITY has been changed from NOTICE to ALERT 2025-04-04T17:10:29.816112Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DATA_INTEGRITY has been changed from DEBUG to ALERT 2025-04-04T17:10:29.816137Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DATA_INTEGRITY has been changed from 0 to 10 2025-04-04T17:10:29.816164Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_PRIORITIES_QUEUE has been changed from NOTICE to ALERT 2025-04-04T17:10:29.816193Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_PRIORITIES_QUEUE has been changed from DEBUG to ALERT 2025-04-04T17:10:29.816215Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_PRIORITIES_QUEUE has been changed from 0 to 10 2025-04-04T17:10:29.816256Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BSCONFIG has been changed from NOTICE to ALERT 2025-04-04T17:10:29.816283Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BSCONFIG has been changed from DEBUG to ALERT 2025-04-04T17:10:29.816306Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BSCONFIG has been changed from 0 to 10 2025-04-04T17:10:29.816333Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component NAMESERVICE has been changed from NOTICE to ALERT 2025-04-04T17:10:29.816360Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component NAMESERVICE has been changed from DEBUG to ALERT 2025-04-04T17:10:29.816384Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component NAMESERVICE has been changed from 0 to 10 2025-04-04T17:10:29.816509Z node 14 :CMS_CONFIGS TRACE: TLogSettingsConfigurator: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } >> TConsoleConfigHelpersTests::TestConfigSubscriber [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup [GOOD] >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:10:20.698063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:10:20.698152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:10:20.698188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:10:20.698221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:10:20.701927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:10:20.701995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:10:20.702114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:10:20.702200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:10:20.704244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:10:20.786199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:20.786278Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:20.799159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:10:20.799383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:10:20.799663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:10:20.811641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:10:20.813178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:10:20.815674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:20.817319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:10:20.824349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:20.832064Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:20.832137Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:20.832314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:10:20.832384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:20.832539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:10:20.832685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.838964Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:10:20.970377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:10:20.970594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.972087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:10:20.978037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:10:20.978132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.983156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:20.983279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:10:20.983548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.983613Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:10:20.983666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:10:20.983723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:10:20.985454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.985501Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:10:20.985536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:10:20.987103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.987149Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:20.987184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:20.987222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:10:20.992062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:10:20.995202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:10:20.995357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:10:20.997534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:20.997669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:20.997716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:20.999158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:10:20.999223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:20.999400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:10:20.999477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:10:21.001465Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:21.001502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:21.001676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:21.001714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:10:21.002019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:21.002056Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:10:21.002135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:10:21.002174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:21.002209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:10:21.002263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:21.002306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:10:21.002355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:21.002389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:10:21.002415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:10:21.002465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:10:21.002496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:10:21.002535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:10:21.004168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:10:21.004284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:10:21.004321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... .069477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 2, at schemeshard: 72057594046678944 2025-04-04T17:10:30.069601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-04T17:10:30.069660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-04T17:10:30.069737Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-04T17:10:30.069961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 4, at schemeshard: 72057594046678944 2025-04-04T17:10:30.070100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:30.070211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 2, at schemeshard: 72057594046678944 2025-04-04T17:10:30.070293Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2025-04-04T17:10:30.070342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:10:30.070381Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 3], TabletType: DataShard, at schemeshard: 72057594046678944 2025-04-04T17:10:30.070403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:10:30.070509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 2, at schemeshard: 72057594046678944 2025-04-04T17:10:30.070665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:30.070930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 6, at schemeshard: 72057594046678944 2025-04-04T17:10:30.071238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:30.071366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:30.071713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:30.071800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:30.072012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:30.072093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:30.072181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:30.072391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:30.072471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:30.072635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:30.072845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:30.073004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:30.073048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:30.073095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:30.073327Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:10:30.094197Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:10:30.094400Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-04T17:10:30.095998Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435081, Sender [1:1134:3066], Recipient [1:1134:3066]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-04T17:10:30.096051Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-04T17:10:30.097170Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:30.097236Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:30.099101Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1134:3066], Recipient [1:1134:3066]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:10:30.099147Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:10:30.099411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:10:30.099489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:30.099542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:10:30.099575Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:10:30.100973Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1170:3066], Recipient [1:1134:3066]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-04T17:10:30.101021Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-04T17:10:30.101056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1134:3066] sender: [1:1190:2058] recipient: [1:15:2062] 2025-04-04T17:10:30.129999Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1189:3110], Recipient [1:1134:3066]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-04-04T17:10:30.130064Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-04T17:10:30.130173Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:10:30.130552Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 377us result status StatusSuccess 2025-04-04T17:10:30.131307Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 1 MinPartitionsCount: 20 MaxPartitionsCount: 20 } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 17803 Memory: 141352 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpJoinOrder::TPCHRandomJoinViewJustWorks-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCH8-StreamLookupJoin-ColumnStore |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees+StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 3510, MsgBus: 2538 2025-04-04T17:09:54.108984Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506395182180648:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:54.109197Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001399/r3tmp/tmpLxAJev/pdisk_1.dat 2025-04-04T17:09:54.651219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:54.651320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:54.655632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:54.686752Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3510, node 1 2025-04-04T17:09:54.802760Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:54.802780Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:54.802787Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:54.802888Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2538 TClient is connected to server localhost:2538 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:55.536798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:55.549166Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:55.560011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:55.710597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:55.870099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:55.957490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:57.600317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506408067084156:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:57.600462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:57.894424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.934286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.971427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:58.053508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:58.102553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:58.182789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:58.256866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506412362051971:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:58.256947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:58.257144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506412362051976:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:58.259828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:58.269184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506412362051978:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:58.363973Z node 1 :TX_PROXY ERROR: Actor# [1:7489506412362052034:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:59.127683Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506395182180648:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:59.128101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:59.397001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.459204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.490767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.520210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.545663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.585070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20091, MsgBus: 14259 2025-04-04T17:10:01.175458Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506422886248889:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:01.175499Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001399/r3tmp/tmp0zUpEl/pdisk_1.dat 2025-04-04T17:10:01.356477Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:01.359571Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:01.359663Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:01.361296Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20091, node 2 2025-04-04T17:10:01.514666Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:10:01.514690Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:10:01.514696Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:10:01.514797Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14259 TClient is connected to server localhost:14259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 C ... ave access permissions } 2025-04-04T17:10:20.147012Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:20.162436Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506506465800148:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:10:20.235280Z node 4 :TX_PROXY ERROR: Actor# [4:7489506506465800204:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:21.090435Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489506489285928659:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:21.090500Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:21.286789Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:21.359289Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:21.399626Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:10:21.439264Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-04T17:10:21.504121Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-04T17:10:21.537524Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1324, MsgBus: 7044 2025-04-04T17:10:23.327568Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489506517461409372:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:23.327630Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001399/r3tmp/tmpwf4PsX/pdisk_1.dat 2025-04-04T17:10:23.453046Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:23.469394Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:23.469494Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:23.471010Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1324, node 5 2025-04-04T17:10:23.542842Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:10:23.542866Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:10:23.542876Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:10:23.543023Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7044 TClient is connected to server localhost:7044 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:10:24.087527Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:24.103933Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:24.187592Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:24.371072Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:24.456592Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:27.260230Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506534641280329:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:27.260325Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:27.299407Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:10:27.334467Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:27.376518Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:27.413265Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:27.449626Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:27.526964Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:27.591884Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506534641280850:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:27.592002Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:27.592041Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506534641280855:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:27.596415Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:27.607759Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489506534641280857:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:10:27.697107Z node 5 :TX_PROXY ERROR: Actor# [5:7489506534641280912:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:28.330620Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506517461409372:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:28.330711Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:28.835939Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:28.869918Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:28.905606Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:10:28.945624Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-04T17:10:28.980143Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-04T17:10:29.051166Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits [GOOD] >> TConsoleTests::TestListTenants ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 31703, MsgBus: 22996 2025-04-04T17:09:47.571978Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506365806372398:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:47.572421Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00145c/r3tmp/tmpLwHDYs/pdisk_1.dat 2025-04-04T17:09:48.064010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:48.064119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:48.085810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31703, node 1 2025-04-04T17:09:48.148759Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:48.159366Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:09:48.159386Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:09:48.242741Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:48.242766Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:48.242778Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:48.242891Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22996 TClient is connected to server localhost:22996 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:48.924583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:48.955089Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:48.972757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:49.120873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:49.283066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:49.372840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:51.113979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506382986243214:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:51.114095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:51.371969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.458605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.495395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.536504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.578127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.624521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.683244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506382986243730:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:51.683341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:51.683400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506382986243735:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:51.687371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:51.698336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506382986243737:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:51.761269Z node 1 :TX_PROXY ERROR: Actor# [1:7489506382986243790:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:52.559098Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506365806372398:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:52.559168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:52.752015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.790368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.824527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.861660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.890222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 32093, MsgBus: 10132 2025-04-04T17:09:55.586780Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506399260791567:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:55.622964Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00145c/r3tmp/tmp015gtH/pdisk_1.dat 2025-04-04T17:09:55.846940Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:55.863776Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:55.863854Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:55.865447Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32093, node 2 2025-04-04T17:09:56.018788Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:56.018811Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:56.018818Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:56.018928Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10132 TClient is connected to server localhost:10132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 1844674407370955 ... } 2025-04-04T17:10:16.809980Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506488755668468:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:16.813405Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:16.826392Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506488755668470:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:10:16.909972Z node 4 :TX_PROXY ERROR: Actor# [4:7489506488755668524:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:17.690461Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489506471575796975:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:17.690519Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:17.889039Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:17.929522Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:17.973725Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:10:18.015971Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:10:18.049779Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23693, MsgBus: 12135 2025-04-04T17:10:21.835548Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489506510187069241:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:21.835600Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00145c/r3tmp/tmpjmTpi0/pdisk_1.dat 2025-04-04T17:10:21.969936Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:21.995538Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:21.995652Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:21.997426Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23693, node 5 2025-04-04T17:10:22.072626Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:10:22.072647Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:10:22.072659Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:10:22.072798Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12135 TClient is connected to server localhost:12135 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:10:22.720254Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:22.730617Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:10:22.740512Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:22.829687Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:23.059866Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:23.147593Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:25.549850Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506527366940211:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:25.549947Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:25.605097Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:10:25.647898Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:25.721595Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:25.796298Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:25.836817Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:25.876442Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:25.984300Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506527366940729:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:25.984399Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:25.984675Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506527366940734:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:25.988680Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:26.006924Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489506527366940736:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:10:26.073398Z node 5 :TX_PROXY ERROR: Actor# [5:7489506531661908087:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:26.835688Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506510187069241:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:26.835770Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:27.221412Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:27.293347Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:27.376036Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:10:27.416254Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-04T17:10:27.459168Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 >> SelfHealActorTest::NoMoreThanOneReplicating |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealBlock4Plus2 >> BsControllerTest::TestLocalBrokenRelocation >> SelfHealActorTest::SingleErrorDisk >> BsControllerTest::SelfHealMirror3dc >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::SingleErrorDisk [GOOD] >> TConsoleTests::TestTenantConfigConsistency [GOOD] >> TConsoleTests::TestSetConfig |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::SingleErrorDisk [GOOD] |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCDS64+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64-StreamLookupJoin+ColumnStore >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] >> KqpJoinOrder::TPCDS90+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS90-StreamLookupJoin+ColumnStore >> BsControllerTest::DecommitRejected >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:10:25.889951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:10:25.890035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:10:25.890066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:10:25.890096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:10:25.890134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:10:25.890175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:10:25.890304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:10:25.890377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:10:25.890697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:10:25.969676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:25.969739Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:25.982754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:10:25.982996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:10:25.983162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:10:25.992728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:10:25.993046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:10:25.993643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:25.994384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:10:25.999397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:26.000728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:26.000788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:26.000934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:10:26.001005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:26.001050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:10:26.001189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:10:26.007102Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:10:26.103707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:10:26.103975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:26.104217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:10:26.104453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:10:26.104528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:26.107913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:26.108064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:10:26.108237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:26.108298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:10:26.108335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:10:26.108395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:10:26.110404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:26.110463Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:10:26.110496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:10:26.112136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:26.112176Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:26.112213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:26.112264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:10:26.123062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:10:26.125166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:10:26.125340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:10:26.126389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:26.126523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:26.126577Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:26.126884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:10:26.126945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:26.127105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:10:26.127206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:10:26.129593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:26.129639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:26.129806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:26.129862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:10:26.130229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:26.130294Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:10:26.130390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:10:26.130428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:26.130466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:10:26.130512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:26.130564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:10:26.130619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:26.130679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:10:26.130711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:10:26.130779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:10:26.130833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:10:26.130874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:10:26.132745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:10:26.132853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:10:26.132904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... _TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Simple, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:10:33.606200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:33.606301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:33.606760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:33.606872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-04T17:10:33.606944Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-04T17:10:33.607210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2025-04-04T17:10:33.607348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:33.607465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:33.607512Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2025-04-04T17:10:33.607549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:10:33.607667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:33.607801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:33.608017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-04-04T17:10:33.608347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:33.608465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:33.608811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:33.608877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:33.609042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:33.609126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:33.609211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:33.609385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:33.609486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:33.609628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:33.609841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:33.610001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:33.610054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:33.610104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:33.610359Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:10:33.618495Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:10:33.618667Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-04T17:10:33.620126Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435081, Sender [1:1016:2960], Recipient [1:1016:2960]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-04T17:10:33.620177Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-04T17:10:33.621733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:33.621795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:33.622303Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1016:2960], Recipient [1:1016:2960]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:10:33.622350Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:10:33.622654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:10:33.622708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:33.622755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:10:33.622800Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:10:33.623689Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1052:2960], Recipient [1:1016:2960]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-04T17:10:33.623728Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-04T17:10:33.623762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1016:2960] sender: [1:1072:2058] recipient: [1:15:2062] 2025-04-04T17:10:33.655785Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1071:3004], Recipient [1:1016:2960]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-04-04T17:10:33.655853Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-04T17:10:33.655950Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:10:33.656232Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 259us result status StatusSuccess 2025-04-04T17:10:33.657062Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 82472 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BsControllerTest::DecommitRejected [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem >> BsControllerTest::TestLocalSelfHeal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::DecommitRejected [GOOD] Test command err: 2025-04-04T17:10:34.355967Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-04-04T17:10:34.356017Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-04-04T17:10:34.356103Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-04-04T17:10:34.356136Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-04-04T17:10:34.356187Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-04-04T17:10:34.356222Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-04-04T17:10:34.356261Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-04-04T17:10:34.356281Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-04-04T17:10:34.356336Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-04-04T17:10:34.356370Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-04-04T17:10:34.356425Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-04-04T17:10:34.356448Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-04-04T17:10:34.356502Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-04-04T17:10:34.356525Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-04-04T17:10:34.356559Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-04-04T17:10:34.356581Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-04-04T17:10:34.356620Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-04-04T17:10:34.356640Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-04-04T17:10:34.356671Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-04-04T17:10:34.356689Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-04-04T17:10:34.356721Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-04-04T17:10:34.356740Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-04-04T17:10:34.356777Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-04-04T17:10:34.356800Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-04-04T17:10:34.356853Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-04-04T17:10:34.356890Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-04-04T17:10:34.356941Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-04-04T17:10:34.356968Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-04-04T17:10:34.356999Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-04-04T17:10:34.357017Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-04-04T17:10:34.368572Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:508:32] Status# ERROR ClientId# [1:508:32] ServerId# [0:0:0] PipeClient# [1:508:32] 2025-04-04T17:10:34.369161Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:509:20] Status# ERROR ClientId# [2:509:20] ServerId# [0:0:0] PipeClient# [2:509:20] 2025-04-04T17:10:34.369238Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:510:20] Status# ERROR ClientId# [3:510:20] ServerId# [0:0:0] PipeClient# [3:510:20] 2025-04-04T17:10:34.369280Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:511:20] Status# ERROR ClientId# [4:511:20] ServerId# [0:0:0] PipeClient# [4:511:20] 2025-04-04T17:10:34.369332Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:512:20] Status# ERROR ClientId# [5:512:20] ServerId# [0:0:0] PipeClient# [5:512:20] 2025-04-04T17:10:34.369375Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:513:20] Status# ERROR ClientId# [6:513:20] ServerId# [0:0:0] PipeClient# [6:513:20] 2025-04-04T17:10:34.369409Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:514:20] Status# ERROR ClientId# [7:514:20] ServerId# [0:0:0] PipeClient# [7:514:20] 2025-04-04T17:10:34.369445Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:515:20] Status# ERROR ClientId# [8:515:20] ServerId# [0:0:0] PipeClient# [8:515:20] 2025-04-04T17:10:34.369511Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:516:20] Status# ERROR ClientId# [9:516:20] ServerId# [0:0:0] PipeClient# [9:516:20] 2025-04-04T17:10:34.369554Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:517:20] Status# ERROR ClientId# [10:517:20] ServerId# [0:0:0] PipeClient# [10:517:20] 2025-04-04T17:10:34.369589Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:518:20] Status# ERROR ClientId# [11:518:20] ServerId# [0:0:0] PipeClient# [11:518:20] 2025-04-04T17:10:34.369647Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:519:20] Status# ERROR ClientId# [12:519:20] ServerId# [0:0:0] PipeClient# [12:519:20] 2025-04-04T17:10:34.369690Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:520:20] Status# ERROR ClientId# [13:520:20] ServerId# [0:0:0] PipeClient# [13:520:20] 2025-04-04T17:10:34.369736Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:521:20] Status# ERROR ClientId# [14:521:20] ServerId# [0:0:0] PipeClient# [14:521:20] 2025-04-04T17:10:34.369770Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:522:20] Status# ERROR ClientId# [15:522:20] ServerId# [0:0:0] PipeClient# [15:522:20] 2025-04-04T17:10:34.415839Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] Connect 2025-04-04T17:10:34.415906Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] Connect 2025-04-04T17:10:34.415963Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] Connect 2025-04-04T17:10:34.416007Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] Connect 2025-04-04T17:10:34.416044Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] Connect 2025-04-04T17:10:34.416099Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] Connect 2025-04-04T17:10:34.416154Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] Connect 2025-04-04T17:10:34.416206Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] Connect 2025-04-04T17:10:34.416244Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] Connect 2025-04-04T17:10:34.416279Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] Connect 2025-04-04T17:10:34.416350Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] Connect 2025-04-04T17:10:34.416390Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] Connect 2025-04-04T17:10:34.416424Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] Connect 2025-04-04T17:10:34.416463Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] Connect 2025-04-04T17:10:34.416499Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] Connect 2025-04-04T17:10:34.418866Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:581:60] Status# OK ClientId# [1:581:60] ServerId# [1:610:61] PipeClient# [1:581:60] 2025-04-04T17:10:34.418973Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] State switched from 0 to 1 2025-04-04T17:10:34.422802Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:582:21] Status# OK ClientId# [2:582:21] ServerId# [1:611:62] PipeClient# [2:582:21] 2025-04-04T17:10:34.422841Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] State switched from 0 to 1 2025-04-04T17:10:34.422887Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:583:21] Status# OK ClientId# [3:583:21] ServerId# [1:612:63] PipeClient# [3:583:21] 2025-04-04T17:10:34.422911Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] State switched from 0 to 1 2025-04-04T17:10:34.422950Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:584:21] Status# OK ClientId# [4:584:21] ServerId# [1:613:64] PipeClient# [4:584:21] 2025-04-04T17:10:34.422990Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] State switched from 0 to 1 2025-04-04T17:10:34.423031Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:585:21] Status# OK ClientId# [5:585:21] ServerId# [1:614:65] PipeClient# [5:585:21] 2025-04-04T17:10:34.423103Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] State switched from 0 to 1 2025-04-04T17:10:34.423147Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:586:21] Status# OK ClientId# [6:586:21] ServerId# [1:615:66] PipeClient# [6:586:21] 2025-04-04T17:10:34.423171Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] State switched from 0 to 1 2025-04-04T17:10:34.423205Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:587:21] Status# OK ClientId# [7:587:21] ServerId# [1:616:67] PipeClient# [7:587:21] 2025-04-04T17:10:34.423254Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] State switched from 0 to 1 2025-04-04T17:10:34.423293Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:588:21] Status# OK ClientId# [8:588:21] ServerId# [1:617:68] PipeClient# [8:588:21] 2025-04-04T17:10:34.423326Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] State switched from 0 to 1 2025-04-04T17:10:34.423370Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:589:21] Status# OK ClientId# [9:589:21] ServerId# [1:618:69] PipeClient# [9:589:21] 2025-04-04T17:10:34.423393Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] State switched from 0 to 1 2025-04-04T17:10:34.423432Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:590:21] Status# OK ClientId# [10:590:21] ServerId# [1:619:70] PipeClient# [10:590:21] 2025-04-04T17:10:34.423454Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] State switched from 0 to 1 2025-04-04T17:10:34.423492Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:591:21] Status# OK ClientId# [11:591:21] ServerId# [1:620:71] PipeClient# [11:591:21] 2025-04-04T17:10:34.423513Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] State switched from 0 to 1 2025-04-04T17:10:34.423555Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:592:21] Status# OK ClientId# [12:592:21] ServerId# [1:621:72] PipeClient# [12:592:21] 2025-04-04T17:10:34.423579Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] State switched from 0 to 1 2025-04-04T17:10:34.423638Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:593:21] Status# OK ClientId# [13:593:21] ServerId# [1:622:73] PipeClient# [13:593:21] 2025-04-04T17:10:34.423664Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] State switched from 0 to 1 2025-04-04T17:10:34.423700Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:594:21] Status# OK ClientId# [14:594:21] ServerId# [1:623:74] PipeClient# [14:594:21] 2025-04-04T17:10:34.423723Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] State switched from 0 to 1 2025-04-04T17:10:34.423756Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:595:21] Status# OK ClientId# [15:595:21] ServerId# [1:624:75] PipeClient# [15:595:21] 2025-04-04T17:10:34.423778Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] State switched from 0 to 1 2025-04-04T17:10:34.426203Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-04T17:10:34.426287Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] PDiskId# 1000 VSlotId# 1000 created 2025-04-04T17:10:34.448586Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] status changed to INIT_PENDING 2025-04-04T17:10:34.449669Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-04-04T17:10:34.449722Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] PDiskId# 1000 VSlotId# 1000 created 2025-04-04T17:10:34.449789Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] status changed to INIT_PENDING 2025-04-04T17:10:34.449890Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-04-04T17:10:34.449926Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] PDiskId# 1000 VSlotId# 1000 created 2025-04-04T17:10:34.449965Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] status changed to INIT_PENDING 2025-04-04T17:10:34.450067Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-04-04T17:10:34.450106Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] PDiskId# 1000 VSlotId# 1000 created 2025-04-04T17:10:34.450168Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] status changed to INIT_PENDING 2025-04-04T17:10:34.450288Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-04-04T17:10:34.450336Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] PDiskId# 1000 VSlotId# 1000 created 2025-04-04T17:10:34.450381Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] status changed to INIT_PENDING 2025-04-04T1 ... },{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-04T17:10:34.716674Z 14 00h01m05.563512s :BS_NODE DEBUG: [14] VDiskId# [80000001:1:1:1:0] status changed to REPLICATING 2025-04-04T17:10:34.717024Z 1 00h01m05.563512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-04T17:10:34.717218Z 11 00h01m06.614512s :BS_NODE DEBUG: [11] VDiskId# [80000001:1:0:1:0] status changed to REPLICATING 2025-04-04T17:10:34.717471Z 1 00h01m06.614512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-04T17:10:34.717796Z 1 00h01m10.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-04T17:10:34.718134Z 15 00h01m10.357512s :BS_NODE DEBUG: [15] VDiskId# [80000001:1:1:2:0] status changed to READY 2025-04-04T17:10:34.718446Z 1 00h01m10.357512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-04T17:10:34.718621Z 14 00h01m10.915536s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] status changed to READY 2025-04-04T17:10:34.718931Z 1 00h01m10.915536s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-04T17:10:34.719475Z 8 00h01m10.916048s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2025-04-04T17:10:34.719525Z 8 00h01m10.916048s :BS_NODE DEBUG: [8] VDiskId# [80000000:2:2:1:0] destroyed 2025-04-04T17:10:34.720050Z 14 00h01m15.990512s :BS_NODE DEBUG: [14] VDiskId# [80000001:1:1:1:0] status changed to READY 2025-04-04T17:10:34.720449Z 1 00h01m15.990512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-04T17:10:34.720675Z 10 00h01m18.220512s :BS_NODE DEBUG: [10] VDiskId# [80000001:1:0:0:0] status changed to READY 2025-04-04T17:10:34.720959Z 1 00h01m18.220512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-04T17:10:34.721151Z 12 00h01m18.909512s :BS_NODE DEBUG: [12] VDiskId# [80000001:1:0:2:0] status changed to READY 2025-04-04T17:10:34.721411Z 1 00h01m18.909512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-04T17:10:34.721645Z 1 00h01m20.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-04-04T17:10:34.721822Z 13 00h01m24.006024s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] status changed to READY 2025-04-04T17:10:34.722218Z 1 00h01m24.006024s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:71} Reassigner starting GroupId# 2147483648 2025-04-04T17:10:34.722891Z 1 00h01m24.006024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-04T17:10:34.722940Z 1 00h01m24.006024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:0:0] DiskIsOk# true 2025-04-04T17:10:34.723234Z 1 00h01m24.006024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-04T17:10:34.723283Z 1 00h01m24.006024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:1:0] DiskIsOk# true 2025-04-04T17:10:34.723319Z 1 00h01m24.006024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-04T17:10:34.723345Z 1 00h01m24.006024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:2:0] DiskIsOk# true 2025-04-04T17:10:34.723377Z 1 00h01m24.006024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-04T17:10:34.723407Z 1 00h01m24.006024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:0:0] DiskIsOk# true 2025-04-04T17:10:34.723432Z 1 00h01m24.006024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-04T17:10:34.723459Z 1 00h01m24.006024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:1:0] DiskIsOk# true 2025-04-04T17:10:34.723486Z 1 00h01m24.006024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-04T17:10:34.723525Z 1 00h01m24.006024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:2:0] DiskIsOk# true 2025-04-04T17:10:34.723575Z 1 00h01m24.006024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-04T17:10:34.723617Z 1 00h01m24.006024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:0:0] DiskIsOk# true 2025-04-04T17:10:34.723666Z 1 00h01m24.006024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-04-04T17:10:34.723699Z 1 00h01m24.006024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:1:0] DiskIsOk# true 2025-04-04T17:10:34.726047Z 1 00h01m24.006536s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-04T17:10:34.726114Z 1 00h01m24.006536s :BS_NODE DEBUG: [1] VDiskId# [80000000:3:0:0:0] -> [80000000:4:0:0:0] 2025-04-04T17:10:34.726722Z 1 00h01m24.006536s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:206} Reassigner succeeded GroupId# 2147483648 Items# [80000000:3:2:2:0]: 9:1000:1000 -> 15:1000:1001 ConfigTxSeqNo# 23 2025-04-04T17:10:34.726769Z 1 00h01m24.006536s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:217} Reassigner finished GroupId# 2147483648 Success# true 2025-04-04T17:10:34.726894Z 7 00h01m24.006536s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-04-04T17:10:34.726954Z 7 00h01m24.006536s :BS_NODE DEBUG: [7] VDiskId# [80000000:1:2:0:0] destroyed 2025-04-04T17:10:34.727065Z 2 00h01m24.006536s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-04-04T17:10:34.727112Z 2 00h01m24.006536s :BS_NODE DEBUG: [2] VDiskId# [80000000:3:0:1:0] -> [80000000:4:0:1:0] 2025-04-04T17:10:34.727208Z 3 00h01m24.006536s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-04-04T17:10:34.727269Z 3 00h01m24.006536s :BS_NODE DEBUG: [3] VDiskId# [80000000:3:0:2:0] -> [80000000:4:0:2:0] 2025-04-04T17:10:34.727351Z 4 00h01m24.006536s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-04-04T17:10:34.727404Z 4 00h01m24.006536s :BS_NODE DEBUG: [4] VDiskId# [80000000:3:1:0:0] -> [80000000:4:1:0:0] 2025-04-04T17:10:34.727482Z 5 00h01m24.006536s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-04-04T17:10:34.727522Z 5 00h01m24.006536s :BS_NODE DEBUG: [5] VDiskId# [80000000:3:1:1:0] -> [80000000:4:1:1:0] 2025-04-04T17:10:34.727602Z 6 00h01m24.006536s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2025-04-04T17:10:34.727653Z 6 00h01m24.006536s :BS_NODE DEBUG: [6] VDiskId# [80000000:3:1:2:0] -> [80000000:4:1:2:0] 2025-04-04T17:10:34.727708Z 9 00h01m24.006536s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-04-04T17:10:34.727788Z 13 00h01m24.006536s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-04-04T17:10:34.727835Z 13 00h01m24.006536s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] -> [80000000:4:2:0:0] 2025-04-04T17:10:34.727922Z 14 00h01m24.006536s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-04-04T17:10:34.728006Z 14 00h01m24.006536s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] -> [80000000:4:2:1:0] 2025-04-04T17:10:34.728087Z 15 00h01m24.006536s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2025-04-04T17:10:34.728122Z 15 00h01m24.006536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] PDiskId# 1000 VSlotId# 1001 created 2025-04-04T17:10:34.728200Z 15 00h01m24.006536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to INIT_PENDING 2025-04-04T17:10:34.729195Z 1 00h01m24.724512s :BS_NODE DEBUG: [1] VDiskId# [80000001:1:2:0:0] status changed to READY 2025-04-04T17:10:34.729623Z 2 00h01m24.797512s :BS_NODE DEBUG: [2] VDiskId# [80000001:1:2:1:0] status changed to READY 2025-04-04T17:10:34.730946Z 11 00h01m25.952512s :BS_NODE DEBUG: [11] VDiskId# [80000001:1:0:1:0] status changed to READY 2025-04-04T17:10:34.731327Z 3 00h01m27.383512s :BS_NODE DEBUG: [3] VDiskId# [80000001:1:2:2:0] status changed to READY 2025-04-04T17:10:34.731907Z 15 00h01m29.869536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to REPLICATING 2025-04-04T17:10:34.732976Z 13 00h01m32.939512s :BS_NODE DEBUG: [13] VDiskId# [80000001:1:1:0:0] status changed to READY 2025-04-04T17:10:34.735835Z 15 00h01m47.649536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to READY 2025-04-04T17:10:34.736726Z 9 00h01m47.650048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-04-04T17:10:34.736795Z 9 00h01m47.650048s :BS_NODE DEBUG: [9] VDiskId# [80000000:3:2:2:0] destroyed >> KqpFlipJoin::LeftSemi_1 [GOOD] >> KqpFlipJoin::LeftSemi_2 >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup [GOOD] >> TConsoleTests::TestCreateServerlessTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb >> KqpJoinOrder::TPCDS94+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS94-StreamLookupJoin+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 8950, MsgBus: 24038 2025-04-04T17:09:49.383406Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506374604881095:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:49.383485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001448/r3tmp/tmp6nuCMe/pdisk_1.dat 2025-04-04T17:09:49.847219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:49.847367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:49.861850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:49.884481Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8950, node 1 2025-04-04T17:09:49.985470Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:49.985505Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:49.985512Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:49.985660Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24038 TClient is connected to server localhost:24038 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:50.658455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:50.703751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:50.844849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:51.017865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:51.112713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:52.898909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506387489784754:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:52.899000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:53.189790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:53.227586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:53.257255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:53.289712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:53.330018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:53.382940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:53.486679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506391784752569:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:53.486786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:53.487086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506391784752574:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:53.491691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:53.509597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506391784752576:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:53.578381Z node 1 :TX_PROXY ERROR: Actor# [1:7489506391784752631:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:54.386376Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506374604881095:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:54.386434Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:54.631654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:54.671249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:54.746857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:54.802290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:54.831479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:54.862078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15692, MsgBus: 9528 2025-04-04T17:09:56.998711Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506404933600714:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:56.998741Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001448/r3tmp/tmp4jZxaT/pdisk_1.dat 2025-04-04T17:09:57.243264Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:57.243690Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:57.243749Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:57.253868Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15692, node 2 2025-04-04T17:09:57.386718Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:57.386738Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:57.386748Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:57.386850Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9528 TClient is connected to server localhost:9528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 Proce ... pe: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:23.572288Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489506521011477360:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:10:23.629309Z node 5 :TX_PROXY ERROR: Actor# [5:7489506521011477415:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:24.134508Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506503831605893:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:24.134589Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:24.879918Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:24.915819Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:24.956683Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:10:24.995387Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-04T17:10:25.041602Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-04T17:10:25.080833Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25233, MsgBus: 10668 2025-04-04T17:10:27.326103Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489506538296884316:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:27.326148Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001448/r3tmp/tmpLjdsH6/pdisk_1.dat 2025-04-04T17:10:27.508143Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:27.510676Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:27.510773Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:27.512030Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25233, node 6 2025-04-04T17:10:27.577677Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:10:27.577705Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:10:27.577715Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:10:27.577855Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10668 TClient is connected to server localhost:10668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:10:28.108847Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:28.116375Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:10:28.131938Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:28.210893Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:28.419819Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:28.498785Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:31.105940Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489506555476755266:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:31.106023Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:31.146995Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:10:31.240828Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:31.283888Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:31.332890Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:31.381844Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:31.457666Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:31.543412Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489506555476755790:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:31.543516Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:31.543764Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489506555476755795:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:31.547033Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:31.557597Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489506555476755797:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:10:31.619306Z node 6 :TX_PROXY ERROR: Actor# [6:7489506555476755851:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:32.326532Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489506538296884316:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:32.326608Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:33.048277Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:33.103975Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:33.144645Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:10:33.185049Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:10:33.224239Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:10:33.266752Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser >> TConsoleTests::TestListTenants [GOOD] >> TConsoleTests::TestListTenantsExtSubdomain >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+StreamLookupJoin-ColumnStore >> KqpJoinOrder::TestJoinOrderHintsComplex-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsComplex+StreamLookupJoin-ColumnStore |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MonteCarlo >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesEmpty [GOOD] >> TConsoleTests::TestSetConfig [GOOD] >> TConsoleTests::TestTenantGeneration >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain >> TGroupMapperTest::NonUniformCluster >> TGroupMapperTest::MapperSequentialCalls >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser [GOOD] >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesEmpty [GOOD] >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] >> BsControllerTest::TestLocalBrokenRelocation [GOOD] >> BsControllerTest::TestLocalSelfHeal [GOOD] >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient >> TransferWriter::Write_ColumnTable [GOOD] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalBrokenRelocation [GOOD] Test command err: 2025-04-04T17:10:33.178859Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-04-04T17:10:33.178944Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-04-04T17:10:33.179581Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-04-04T17:10:33.179620Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-04-04T17:10:33.179674Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-04-04T17:10:33.179713Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-04-04T17:10:33.179761Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-04-04T17:10:33.179784Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-04-04T17:10:33.179885Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-04-04T17:10:33.179911Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-04-04T17:10:33.179952Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-04-04T17:10:33.179975Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-04-04T17:10:33.180011Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-04-04T17:10:33.180033Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-04-04T17:10:33.180069Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-04-04T17:10:33.180093Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-04-04T17:10:33.180130Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-04-04T17:10:33.180152Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-04-04T17:10:33.180215Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-04-04T17:10:33.180284Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-04-04T17:10:33.180334Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-04-04T17:10:33.180356Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-04-04T17:10:33.180392Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-04-04T17:10:33.180415Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-04-04T17:10:33.180473Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-04-04T17:10:33.180498Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-04-04T17:10:33.180552Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-04-04T17:10:33.180578Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-04-04T17:10:33.180618Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-04-04T17:10:33.180642Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-04-04T17:10:33.180682Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-04-04T17:10:33.180723Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-04-04T17:10:33.180768Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-04-04T17:10:33.180790Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-04-04T17:10:33.180828Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-04-04T17:10:33.180853Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-04-04T17:10:33.180908Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-04-04T17:10:33.180953Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-04-04T17:10:33.181002Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-04-04T17:10:33.181029Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-04-04T17:10:33.181098Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-04-04T17:10:33.181126Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-04-04T17:10:33.181175Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-04-04T17:10:33.181201Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-04-04T17:10:33.181241Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-04-04T17:10:33.181262Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-04-04T17:10:33.181297Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-04-04T17:10:33.181319Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-04-04T17:10:33.181358Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-04-04T17:10:33.181378Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-04-04T17:10:33.181433Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-04-04T17:10:33.181458Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-04-04T17:10:33.181510Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-04-04T17:10:33.181538Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-04-04T17:10:33.181587Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-04-04T17:10:33.181609Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-04-04T17:10:33.181671Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-04-04T17:10:33.181700Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-04-04T17:10:33.181739Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-04-04T17:10:33.181761Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-04-04T17:10:33.181797Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-04-04T17:10:33.181818Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-04-04T17:10:33.181881Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-04-04T17:10:33.181904Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-04-04T17:10:33.181942Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2025-04-04T17:10:33.181969Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2025-04-04T17:10:33.182006Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2025-04-04T17:10:33.182027Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2025-04-04T17:10:33.182065Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2025-04-04T17:10:33.182101Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2025-04-04T17:10:33.182159Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2025-04-04T17:10:33.182183Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2025-04-04T17:10:33.204328Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2713:53] Status# ERROR ClientId# [1:2713:53] ServerId# [0:0:0] PipeClient# [1:2713:53] 2025-04-04T17:10:33.205789Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2714:41] Status# ERROR ClientId# [2:2714:41] ServerId# [0:0:0] PipeClient# [2:2714:41] 2025-04-04T17:10:33.205865Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2715:41] Status# ERROR ClientId# [3:2715:41] ServerId# [0:0:0] PipeClient# [3:2715:41] 2025-04-04T17:10:33.205941Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2716:41] Status# ERROR ClientId# [4:2716:41] ServerId# [0:0:0] PipeClient# [4:2716:41] 2025-04-04T17:10:33.205977Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2717:41] Status# ERROR ClientId# [5:2717:41] ServerId# [0:0:0] PipeClient# [5:2717:41] 2025-04-04T17:10:33.206013Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2718:41] Status# ERROR ClientId# [6:2718:41] ServerId# [0:0:0] PipeClient# [6:2718:41] 2025-04-04T17:10:33.206047Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2719:41] Status# ERROR ClientId# [7:2719:41] ServerId# [0:0:0] PipeClient# [7:2719:41] 2025-04-04T17:10:33.206088Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2720:41] Status# ERROR ClientId# [8:2720:41] ServerId# [0:0:0] PipeClient# [8:2720:41] 2025-04-04T17:10:33.206123Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2721:41] Status# ERROR ClientId# [9:2721:41] ServerId# [0:0:0] PipeClient# [9:2721:41] 2025-04-04T17:10:33.206160Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2722:41] Status# ERROR ClientId# [10:2722:41] ServerId# [0:0:0] PipeClient# [10:2722:41] 2025-04-04T17:10:33.206217Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2723:41] Status# ERROR ClientId# [11:2723:41] ServerId# [0:0:0] PipeClient# [11:2723:41] 2025-04-04T17:10:33.206281Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2724:41] Status# ERROR ClientId# [12:2724:41] ServerId# [0:0:0] PipeClient# [12:2724:41] 2025-04-04T17:10:33.206318Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2725:41] Status# ERROR ClientId# [13:2725:41] ServerId# [0:0:0] PipeClient# [13:2725:41] 2025-04-04T17:10:33.206384Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2726:41] Status# ERROR ClientId# [14:2726:41] ServerId# [0:0:0] PipeClient# [14:2726:41] 2025-04-04T17:10:33.206434Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2727:41] Status# ERROR ClientId# [15:2727:41] ServerId# [0:0:0] PipeClient# [15:2727:41] 2025-04-04T17:10:33.206480Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2728:41] Status# ERROR ClientId# [16:2728:41] ServerId# [0:0:0] PipeClient# [16:2728:41] 2025-04-04T17:10:33.206539Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2729:41] Status# ERROR ClientId# [17:2729:41] ServerId# [0:0:0] PipeClient# [17:2729:41] 2025-04-04T17:10:33.206576Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2730:41] Status# ERROR ClientId# [18:2730:41] ServerId# [0:0:0] PipeClient# [18:2730:41] 2025-04-04T17:10:33.206613Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2731:41] Status# ERROR ClientId# [19:2731:41] ServerId# [0:0:0] PipeClient# [19:2731:41] 2025-04-04T17:10:33.206689Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2732:41] Status# ERROR ClientId# [20:2732:41] ServerId# [0:0:0] PipeClient# [20:2732:41] 2025-04-04T17:10:33.206733Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2733:41] Status# ERROR ClientId# [21:2733:41] ServerId# [0:0:0] PipeClient# [21:2733:41] 2025-04-04T17:10:33.206776Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2734:41] Status# ERROR ClientId# [22:2734:41] ServerId# [0:0:0] PipeClient# [22:2734:41] 2025-04-04T17:10:33.206812Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2735:41] Status# ERROR ClientId# [23:2735:41] ServerId# [0:0:0] PipeClient# [23:2735:41] 2025-04-04T17:10:33.206847Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2736:41] Status# ERROR ClientId# [24:2736:41] ServerId# [0:0:0] PipeClient# [24:2736:41] 2025-04-04T17:10:33.206885Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2737:41] Status# ERROR ClientId# [25:2737:41] ServerId# [0:0:0] PipeClient# [25:2737:41] 2025-04-04T17:10:33.206923Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2738:41] Status# ERROR ClientId# [26:2738:41] ServerId# [0:0:0] PipeClient# [26:2738:41] 2025-04-04T17:10:33.206957Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2739:41] Status# ERROR ClientId# [27:2739:41] ServerId# [0:0:0] PipeClient# [27:2739:41] 2025-04-04T17:10:33.207007Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2740:41] Status# ERROR ClientId# [28:2740:41] ServerId# [0:0:0] PipeClient# [28:2740:41] 2025-04-04T17:10:33.207055Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2741:41] Status# ERROR ClientId# [29:2741:41] ServerId# [0:0:0] PipeClient# [29:2741:41] 2025-04-04T17:10:33.207094Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2742:41] Status# ERROR ClientId# [30:2742:41] ServerId# [0:0:0] PipeClient# [30:2742:41] 2025-04-04T17:10:33.207129Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2743:41] Status# ERROR ClientId# [31:2743:41] ServerId# [0:0:0] PipeClient# [31:2743:41] 2025-04-04T17:10:33.207164Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2744:41] Status# ERROR ClientId# [32:2744:41] ServerId# [0:0:0] PipeClient# [32:2744:41] 2025-04-04T17:10:33.207204Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2745:41] Status# ERROR ClientId# [33:2745:41] ServerId# [0:0:0] PipeClient# [33:2745:41] 2025-04-04T17:10:33.207240Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2746:41] Status# ERROR ClientId# [34:2746:41] ServerId# [0:0:0] PipeClient# [34:2746:41] 2025-04-04T17:10:33.207275Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2747:41] Status# ERROR ClientId# [35:2747:41 ... 25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000001:2:2:2:0] -> [80000001:3:2:2:0] 2025-04-04T17:10:37.015163Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000021:2:2:2:0] -> [80000021:3:2:2:0] 2025-04-04T17:10:37.015203Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000031:2:2:2:0] -> [80000031:3:2:2:0] 2025-04-04T17:10:37.015242Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000051:2:2:2:0] -> [80000051:3:2:2:0] 2025-04-04T17:10:37.015279Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000061:2:2:2:0] -> [80000061:3:2:2:0] 2025-04-04T17:10:37.015887Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-04-04T17:10:37.015950Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000010:2:1:0:0] -> [80000010:3:1:0:0] 2025-04-04T17:10:37.015990Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000040:2:1:0:0] -> [80000040:3:1:0:0] 2025-04-04T17:10:37.016035Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000070:2:1:0:0] -> [80000070:3:1:0:0] 2025-04-04T17:10:37.016082Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000001:2:1:1:0] -> [80000001:3:1:1:0] 2025-04-04T17:10:37.016111Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000021:2:1:1:0] -> [80000021:3:1:1:0] 2025-04-04T17:10:37.016134Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000031:2:1:1:0] -> [80000031:3:1:1:0] 2025-04-04T17:10:37.016157Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000051:2:1:1:0] -> [80000051:3:1:1:0] 2025-04-04T17:10:37.016180Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000061:2:1:1:0] -> [80000061:3:1:1:0] 2025-04-04T17:10:37.016204Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000002:1:1:2:0] -> [80000002:2:1:2:0] 2025-04-04T17:10:37.016247Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000012:1:1:2:0] -> [80000012:2:1:2:0] 2025-04-04T17:10:37.016280Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000022:1:1:2:0] -> [80000022:2:1:2:0] 2025-04-04T17:10:37.016308Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000032:1:1:2:0] -> [80000032:2:1:2:0] 2025-04-04T17:10:37.016333Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000042:1:1:2:0] -> [80000042:2:1:2:0] 2025-04-04T17:10:37.016356Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000052:1:1:2:0] -> [80000052:2:1:2:0] 2025-04-04T17:10:37.016381Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000062:1:1:2:0] -> [80000062:2:1:2:0] 2025-04-04T17:10:37.016413Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000072:1:1:2:0] -> [80000072:2:1:2:0] 2025-04-04T17:10:37.016755Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-04-04T17:10:37.016787Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000010:2:2:2:0] -> [80000010:3:2:2:0] 2025-04-04T17:10:37.016811Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000040:2:2:2:0] -> [80000040:3:2:2:0] 2025-04-04T17:10:37.016846Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000070:2:2:2:0] -> [80000070:3:2:2:0] 2025-04-04T17:10:37.016870Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000002:1:2:0:0] -> [80000002:2:2:0:0] 2025-04-04T17:10:37.016902Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000012:1:2:0:0] -> [80000012:2:2:0:0] 2025-04-04T17:10:37.016925Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000022:1:2:0:0] -> [80000022:2:2:0:0] 2025-04-04T17:10:37.016948Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000032:1:2:0:0] -> [80000032:2:2:0:0] 2025-04-04T17:10:37.016971Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000042:1:2:0:0] -> [80000042:2:2:0:0] 2025-04-04T17:10:37.017003Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000052:1:2:0:0] -> [80000052:2:2:0:0] 2025-04-04T17:10:37.017038Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000062:1:2:0:0] -> [80000062:2:2:0:0] 2025-04-04T17:10:37.017080Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000072:1:2:0:0] -> [80000072:2:2:0:0] 2025-04-04T17:10:37.017327Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-04-04T17:10:37.017361Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000010:2:1:1:0] -> [80000010:3:1:1:0] 2025-04-04T17:10:37.017384Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000040:2:1:1:0] -> [80000040:3:1:1:0] 2025-04-04T17:10:37.017406Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000070:2:1:1:0] -> [80000070:3:1:1:0] 2025-04-04T17:10:37.017430Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000001:2:1:2:0] -> [80000001:3:1:2:0] 2025-04-04T17:10:37.017454Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000021:2:1:2:0] -> [80000021:3:1:2:0] 2025-04-04T17:10:37.017479Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000031:2:1:2:0] -> [80000031:3:1:2:0] 2025-04-04T17:10:37.017505Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000051:2:1:2:0] -> [80000051:3:1:2:0] 2025-04-04T17:10:37.017538Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000061:2:1:2:0] -> [80000061:3:1:2:0] 2025-04-04T17:10:37.019886Z 7 01h25m01.796560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to REPLICATING 2025-04-04T17:10:37.020280Z 7 01h25m02.014560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to REPLICATING 2025-04-04T17:10:37.020556Z 4 01h25m02.049560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to REPLICATING 2025-04-04T17:10:37.020840Z 7 01h25m02.198560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to REPLICATING 2025-04-04T17:10:37.021186Z 5 01h25m02.483560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to REPLICATING 2025-04-04T17:10:37.021545Z 10 01h25m03.388560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to REPLICATING 2025-04-04T17:10:37.021868Z 10 01h25m03.734560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to REPLICATING 2025-04-04T17:10:37.022323Z 4 01h25m03.806560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to REPLICATING 2025-04-04T17:10:37.022713Z 4 01h25m04.470560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to REPLICATING 2025-04-04T17:10:37.023064Z 2 01h25m04.949560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to REPLICATING 2025-04-04T17:10:37.024362Z 10 01h25m05.354560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to REPLICATING 2025-04-04T17:10:37.024743Z 2 01h25m05.389560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to REPLICATING 2025-04-04T17:10:37.025013Z 4 01h25m05.835560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to REPLICATING 2025-04-04T17:10:37.025237Z 7 01h25m05.851560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to REPLICATING 2025-04-04T17:10:37.025497Z 5 01h25m05.861560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to REPLICATING 2025-04-04T17:10:37.025799Z 8 01h25m06.012560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to REPLICATING 2025-04-04T17:10:37.026171Z 7 01h25m09.393560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to READY 2025-04-04T17:10:37.026888Z 1 01h25m09.394072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-04T17:10:37.026950Z 1 01h25m09.394072s :BS_NODE DEBUG: [1] VDiskId# [80000001:2:0:1:0] destroyed 2025-04-04T17:10:37.027475Z 7 01h25m11.708560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to READY 2025-04-04T17:10:37.028227Z 1 01h25m11.709072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-04T17:10:37.028274Z 1 01h25m11.709072s :BS_NODE DEBUG: [1] VDiskId# [80000031:2:0:1:0] destroyed 2025-04-04T17:10:37.028409Z 10 01h25m13.191560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to READY 2025-04-04T17:10:37.029199Z 1 01h25m13.192072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-04T17:10:37.029245Z 1 01h25m13.192072s :BS_NODE DEBUG: [1] VDiskId# [80000010:2:0:0:0] destroyed 2025-04-04T17:10:37.029347Z 10 01h25m14.714560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to READY 2025-04-04T17:10:37.030087Z 1 01h25m14.715072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-04T17:10:37.030140Z 1 01h25m14.715072s :BS_NODE DEBUG: [1] VDiskId# [80000040:2:0:0:0] destroyed 2025-04-04T17:10:37.031193Z 4 01h25m19.436560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to READY 2025-04-04T17:10:37.031902Z 1 01h25m19.437072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-04T17:10:37.031961Z 1 01h25m19.437072s :BS_NODE DEBUG: [1] VDiskId# [80000022:1:0:2:0] destroyed 2025-04-04T17:10:37.032060Z 4 01h25m19.738560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to READY 2025-04-04T17:10:37.032786Z 1 01h25m19.739072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-04T17:10:37.032834Z 1 01h25m19.739072s :BS_NODE DEBUG: [1] VDiskId# [80000002:1:0:2:0] destroyed 2025-04-04T17:10:37.033227Z 8 01h25m22.266560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to READY 2025-04-04T17:10:37.034003Z 1 01h25m22.267072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-04T17:10:37.034049Z 1 01h25m22.267072s :BS_NODE DEBUG: [1] VDiskId# [80000061:2:0:1:0] destroyed 2025-04-04T17:10:37.034669Z 10 01h25m25.802560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to READY 2025-04-04T17:10:37.035419Z 1 01h25m25.803072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-04T17:10:37.035465Z 1 01h25m25.803072s :BS_NODE DEBUG: [1] VDiskId# [80000070:2:0:0:0] destroyed 2025-04-04T17:10:37.035570Z 5 01h25m26.321560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to READY 2025-04-04T17:10:37.036219Z 1 01h25m26.322072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-04T17:10:37.036261Z 1 01h25m26.322072s :BS_NODE DEBUG: [1] VDiskId# [80000072:1:0:2:0] destroyed 2025-04-04T17:10:37.036612Z 7 01h25m27.224560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to READY 2025-04-04T17:10:37.037350Z 1 01h25m27.225072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-04T17:10:37.037389Z 1 01h25m27.225072s :BS_NODE DEBUG: [1] VDiskId# [80000051:2:0:1:0] destroyed 2025-04-04T17:10:37.037464Z 4 01h25m27.239560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to READY 2025-04-04T17:10:37.038137Z 1 01h25m27.240072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-04T17:10:37.038180Z 1 01h25m27.240072s :BS_NODE DEBUG: [1] VDiskId# [80000032:1:0:2:0] destroyed 2025-04-04T17:10:37.039442Z 7 01h25m30.265560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to READY 2025-04-04T17:10:37.040135Z 1 01h25m30.266072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-04T17:10:37.040168Z 1 01h25m30.266072s :BS_NODE DEBUG: [1] VDiskId# [80000021:2:0:1:0] destroyed 2025-04-04T17:10:37.040308Z 5 01h25m31.435560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to READY 2025-04-04T17:10:37.040980Z 1 01h25m31.436072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-04T17:10:37.041022Z 1 01h25m31.436072s :BS_NODE DEBUG: [1] VDiskId# [80000052:1:0:2:0] destroyed 2025-04-04T17:10:37.041141Z 2 01h25m34.306560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to READY 2025-04-04T17:10:37.041825Z 1 01h25m34.307072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-04T17:10:37.041876Z 1 01h25m34.307072s :BS_NODE DEBUG: [1] VDiskId# [80000042:1:0:2:0] destroyed 2025-04-04T17:10:37.042766Z 4 01h25m36.491560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to READY 2025-04-04T17:10:37.043310Z 1 01h25m36.492072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-04T17:10:37.043352Z 1 01h25m36.492072s :BS_NODE DEBUG: [1] VDiskId# [80000012:1:0:2:0] destroyed 2025-04-04T17:10:37.043862Z 2 01h25m40.039560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to READY 2025-04-04T17:10:37.044494Z 1 01h25m40.040072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-04-04T17:10:37.044526Z 1 01h25m40.040072s :BS_NODE DEBUG: [1] VDiskId# [80000062:1:0:2:0] destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalSelfHeal [GOOD] Test command err: 2025-04-04T17:10:35.303148Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-04-04T17:10:35.303212Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-04-04T17:10:35.303318Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-04-04T17:10:35.303340Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-04-04T17:10:35.303378Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-04-04T17:10:35.303408Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-04-04T17:10:35.303448Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-04-04T17:10:35.303465Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-04-04T17:10:35.303514Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-04-04T17:10:35.303534Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-04-04T17:10:35.303568Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-04-04T17:10:35.303585Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-04-04T17:10:35.303615Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-04-04T17:10:35.303634Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-04-04T17:10:35.303665Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-04-04T17:10:35.303684Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-04-04T17:10:35.303714Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-04-04T17:10:35.303733Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-04-04T17:10:35.303789Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-04-04T17:10:35.303821Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-04-04T17:10:35.303863Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-04-04T17:10:35.303882Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-04-04T17:10:35.303913Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-04-04T17:10:35.303931Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-04-04T17:10:35.303980Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-04-04T17:10:35.304001Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-04-04T17:10:35.304044Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-04-04T17:10:35.304065Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-04-04T17:10:35.304098Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-04-04T17:10:35.304115Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-04-04T17:10:35.304149Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-04-04T17:10:35.304172Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-04-04T17:10:35.304215Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-04-04T17:10:35.304235Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-04-04T17:10:35.304265Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-04-04T17:10:35.304283Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-04-04T17:10:35.304317Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-04-04T17:10:35.304349Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-04-04T17:10:35.304386Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-04-04T17:10:35.304406Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-04-04T17:10:35.304452Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-04-04T17:10:35.304487Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-04-04T17:10:35.304530Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-04-04T17:10:35.304551Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-04-04T17:10:35.304585Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-04-04T17:10:35.304602Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-04-04T17:10:35.304634Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-04-04T17:10:35.304652Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-04-04T17:10:35.304683Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-04-04T17:10:35.304700Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-04-04T17:10:35.304743Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-04-04T17:10:35.304764Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-04-04T17:10:35.304809Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-04-04T17:10:35.304831Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-04-04T17:10:35.304868Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-04-04T17:10:35.304889Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-04-04T17:10:35.304938Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-04-04T17:10:35.304958Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-04-04T17:10:35.304993Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-04-04T17:10:35.305011Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-04-04T17:10:35.305040Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-04-04T17:10:35.305059Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-04-04T17:10:35.305115Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-04-04T17:10:35.305133Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-04-04T17:10:35.305163Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2025-04-04T17:10:35.305184Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2025-04-04T17:10:35.305214Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2025-04-04T17:10:35.305233Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2025-04-04T17:10:35.305268Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2025-04-04T17:10:35.305297Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2025-04-04T17:10:35.305334Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2025-04-04T17:10:35.305369Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2025-04-04T17:10:35.320159Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2713:53] Status# ERROR ClientId# [1:2713:53] ServerId# [0:0:0] PipeClient# [1:2713:53] 2025-04-04T17:10:35.321394Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2714:41] Status# ERROR ClientId# [2:2714:41] ServerId# [0:0:0] PipeClient# [2:2714:41] 2025-04-04T17:10:35.321453Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2715:41] Status# ERROR ClientId# [3:2715:41] ServerId# [0:0:0] PipeClient# [3:2715:41] 2025-04-04T17:10:35.321516Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2716:41] Status# ERROR ClientId# [4:2716:41] ServerId# [0:0:0] PipeClient# [4:2716:41] 2025-04-04T17:10:35.321554Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2717:41] Status# ERROR ClientId# [5:2717:41] ServerId# [0:0:0] PipeClient# [5:2717:41] 2025-04-04T17:10:35.321587Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2718:41] Status# ERROR ClientId# [6:2718:41] ServerId# [0:0:0] PipeClient# [6:2718:41] 2025-04-04T17:10:35.321620Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2719:41] Status# ERROR ClientId# [7:2719:41] ServerId# [0:0:0] PipeClient# [7:2719:41] 2025-04-04T17:10:35.321656Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2720:41] Status# ERROR ClientId# [8:2720:41] ServerId# [0:0:0] PipeClient# [8:2720:41] 2025-04-04T17:10:35.321690Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2721:41] Status# ERROR ClientId# [9:2721:41] ServerId# [0:0:0] PipeClient# [9:2721:41] 2025-04-04T17:10:35.321727Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2722:41] Status# ERROR ClientId# [10:2722:41] ServerId# [0:0:0] PipeClient# [10:2722:41] 2025-04-04T17:10:35.321778Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2723:41] Status# ERROR ClientId# [11:2723:41] ServerId# [0:0:0] PipeClient# [11:2723:41] 2025-04-04T17:10:35.321819Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2724:41] Status# ERROR ClientId# [12:2724:41] ServerId# [0:0:0] PipeClient# [12:2724:41] 2025-04-04T17:10:35.321854Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2725:41] Status# ERROR ClientId# [13:2725:41] ServerId# [0:0:0] PipeClient# [13:2725:41] 2025-04-04T17:10:35.321887Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2726:41] Status# ERROR ClientId# [14:2726:41] ServerId# [0:0:0] PipeClient# [14:2726:41] 2025-04-04T17:10:35.321940Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2727:41] Status# ERROR ClientId# [15:2727:41] ServerId# [0:0:0] PipeClient# [15:2727:41] 2025-04-04T17:10:35.321980Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2728:41] Status# ERROR ClientId# [16:2728:41] ServerId# [0:0:0] PipeClient# [16:2728:41] 2025-04-04T17:10:35.322036Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2729:41] Status# ERROR ClientId# [17:2729:41] ServerId# [0:0:0] PipeClient# [17:2729:41] 2025-04-04T17:10:35.322069Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2730:41] Status# ERROR ClientId# [18:2730:41] ServerId# [0:0:0] PipeClient# [18:2730:41] 2025-04-04T17:10:35.322106Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2731:41] Status# ERROR ClientId# [19:2731:41] ServerId# [0:0:0] PipeClient# [19:2731:41] 2025-04-04T17:10:35.322156Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2732:41] Status# ERROR ClientId# [20:2732:41] ServerId# [0:0:0] PipeClient# [20:2732:41] 2025-04-04T17:10:35.322198Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2733:41] Status# ERROR ClientId# [21:2733:41] ServerId# [0:0:0] PipeClient# [21:2733:41] 2025-04-04T17:10:35.322256Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2734:41] Status# ERROR ClientId# [22:2734:41] ServerId# [0:0:0] PipeClient# [22:2734:41] 2025-04-04T17:10:35.322293Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2735:41] Status# ERROR ClientId# [23:2735:41] ServerId# [0:0:0] PipeClient# [23:2735:41] 2025-04-04T17:10:35.322326Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2736:41] Status# ERROR ClientId# [24:2736:41] ServerId# [0:0:0] PipeClient# [24:2736:41] 2025-04-04T17:10:35.322363Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2737:41] Status# ERROR ClientId# [25:2737:41] ServerId# [0:0:0] PipeClient# [25:2737:41] 2025-04-04T17:10:35.322399Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2738:41] Status# ERROR ClientId# [26:2738:41] ServerId# [0:0:0] PipeClient# [26:2738:41] 2025-04-04T17:10:35.322431Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2739:41] Status# ERROR ClientId# [27:2739:41] ServerId# [0:0:0] PipeClient# [27:2739:41] 2025-04-04T17:10:35.322464Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2740:41] Status# ERROR ClientId# [28:2740:41] ServerId# [0:0:0] PipeClient# [28:2740:41] 2025-04-04T17:10:35.322519Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2741:41] Status# ERROR ClientId# [29:2741:41] ServerId# [0:0:0] PipeClient# [29:2741:41] 2025-04-04T17:10:35.322554Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2742:41] Status# ERROR ClientId# [30:2742:41] ServerId# [0:0:0] PipeClient# [30:2742:41] 2025-04-04T17:10:35.322586Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2743:41] Status# ERROR ClientId# [31:2743:41] ServerId# [0:0:0] PipeClient# [31:2743:41] 2025-04-04T17:10:35.322620Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2744:41] Status# ERROR ClientId# [32:2744:41] ServerId# [0:0:0] PipeClient# [32:2744:41] 2025-04-04T17:10:35.322668Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2745:41] Status# ERROR ClientId# [33:2745:41] ServerId# [0:0:0] PipeClient# [33:2745:41] 2025-04-04T17:10:35.322702Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2746:41] Status# ERROR ClientId# [34:2746:41] ServerId# [0:0:0] PipeClient# [34:2746:41] 2025-04-04T17:10:35.322735Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2747:41] Status# ERROR ClientId# [35:2747:41 ... 8476Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2025-04-04T17:10:37.208518Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] VDiskId# [8000001c:1:1:0:0] -> [8000001c:2:1:0:0] 2025-04-04T17:10:37.208615Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2025-04-04T17:10:37.208669Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] VDiskId# [8000001c:1:2:2:0] -> [8000001c:2:2:2:0] 2025-04-04T17:10:37.208811Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-04-04T17:10:37.208850Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] VDiskId# [8000000c:2:1:1:0] PDiskId# 1002 VSlotId# 1009 created 2025-04-04T17:10:37.208907Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] VDiskId# [8000000c:2:1:1:0] status changed to INIT_PENDING 2025-04-04T17:10:37.209007Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-04-04T17:10:37.209056Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] VDiskId# [8000000c:1:0:0:0] -> [8000000c:2:0:0:0] 2025-04-04T17:10:37.209138Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-04-04T17:10:37.209177Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] VDiskId# [8000000c:1:1:2:0] -> [8000000c:2:1:2:0] 2025-04-04T17:10:37.209263Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2025-04-04T17:10:37.209311Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] VDiskId# [8000000c:1:0:1:0] -> [8000000c:2:0:1:0] 2025-04-04T17:10:37.209397Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-04-04T17:10:37.209447Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] VDiskId# [8000000c:1:0:2:0] -> [8000000c:2:0:2:0] 2025-04-04T17:10:37.209564Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2025-04-04T17:10:37.209615Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] VDiskId# [8000000c:1:2:0:0] -> [8000000c:2:2:0:0] 2025-04-04T17:10:37.209706Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-04T17:10:37.209761Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] VDiskId# [8000000c:1:2:1:0] -> [8000000c:2:2:1:0] 2025-04-04T17:10:37.209836Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2025-04-04T17:10:37.209876Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] VDiskId# [8000000c:1:1:0:0] -> [8000000c:2:1:0:0] 2025-04-04T17:10:37.209960Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2025-04-04T17:10:37.210012Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] VDiskId# [8000000c:1:2:2:0] -> [8000000c:2:2:2:0] 2025-04-04T17:10:37.210140Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-04-04T17:10:37.210176Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] VDiskId# [8000007c:2:1:1:0] PDiskId# 1003 VSlotId# 1009 created 2025-04-04T17:10:37.210267Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] VDiskId# [8000007c:2:1:1:0] status changed to INIT_PENDING 2025-04-04T17:10:37.210378Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-04-04T17:10:37.210430Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] VDiskId# [8000007c:1:0:0:0] -> [8000007c:2:0:0:0] 2025-04-04T17:10:37.210520Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-04-04T17:10:37.210565Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] VDiskId# [8000007c:1:1:2:0] -> [8000007c:2:1:2:0] 2025-04-04T17:10:37.210669Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2025-04-04T17:10:37.210708Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] VDiskId# [8000007c:1:0:1:0] -> [8000007c:2:0:1:0] 2025-04-04T17:10:37.210808Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-04-04T17:10:37.210851Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] VDiskId# [8000007c:1:0:2:0] -> [8000007c:2:0:2:0] 2025-04-04T17:10:37.210930Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2025-04-04T17:10:37.210974Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] VDiskId# [8000007c:1:2:0:0] -> [8000007c:2:2:0:0] 2025-04-04T17:10:37.211055Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-04T17:10:37.211101Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] VDiskId# [8000007c:1:2:1:0] -> [8000007c:2:2:1:0] 2025-04-04T17:10:37.211195Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2025-04-04T17:10:37.211235Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] VDiskId# [8000007c:1:1:0:0] -> [8000007c:2:1:0:0] 2025-04-04T17:10:37.211333Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2025-04-04T17:10:37.211389Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] VDiskId# [8000007c:1:2:2:0] -> [8000007c:2:2:2:0] 2025-04-04T17:10:37.211519Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-04-04T17:10:37.211551Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] VDiskId# [8000006c:2:1:1:0] PDiskId# 1000 VSlotId# 1010 created 2025-04-04T17:10:37.211608Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] VDiskId# [8000006c:2:1:1:0] status changed to INIT_PENDING 2025-04-04T17:10:37.211711Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-04-04T17:10:37.211758Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] VDiskId# [8000006c:1:0:0:0] -> [8000006c:2:0:0:0] 2025-04-04T17:10:37.211846Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-04-04T17:10:37.211910Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] VDiskId# [8000006c:1:1:2:0] -> [8000006c:2:1:2:0] 2025-04-04T17:10:37.211996Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2025-04-04T17:10:37.212039Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] VDiskId# [8000006c:1:0:1:0] -> [8000006c:2:0:1:0] 2025-04-04T17:10:37.212127Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-04-04T17:10:37.212174Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] VDiskId# [8000006c:1:0:2:0] -> [8000006c:2:0:2:0] 2025-04-04T17:10:37.212265Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2025-04-04T17:10:37.212309Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] VDiskId# [8000006c:1:2:0:0] -> [8000006c:2:2:0:0] 2025-04-04T17:10:37.212404Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-04T17:10:37.212495Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] VDiskId# [8000006c:1:2:1:0] -> [8000006c:2:2:1:0] 2025-04-04T17:10:37.212578Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2025-04-04T17:10:37.212622Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] VDiskId# [8000006c:1:1:0:0] -> [8000006c:2:1:0:0] 2025-04-04T17:10:37.212727Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2025-04-04T17:10:37.212769Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] VDiskId# [8000006c:1:2:2:0] -> [8000006c:2:2:2:0] 2025-04-04T17:10:37.212932Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-04-04T17:10:37.212979Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] VDiskId# [8000005c:2:1:1:0] PDiskId# 1002 VSlotId# 1010 created 2025-04-04T17:10:37.213058Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] VDiskId# [8000005c:2:1:1:0] status changed to INIT_PENDING 2025-04-04T17:10:37.213160Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-04-04T17:10:37.213204Z 3 00h05m00.102048s :BS_NODE DEBUG: [3] VDiskId# [8000005c:1:0:0:0] -> [8000005c:2:0:0:0] 2025-04-04T17:10:37.213299Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-04-04T17:10:37.213352Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] VDiskId# [8000005c:1:1:2:0] -> [8000005c:2:1:2:0] 2025-04-04T17:10:37.213435Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2025-04-04T17:10:37.213475Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] VDiskId# [8000005c:1:0:1:0] -> [8000005c:2:0:1:0] 2025-04-04T17:10:37.213562Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-04-04T17:10:37.213606Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] VDiskId# [8000005c:1:0:2:0] -> [8000005c:2:0:2:0] 2025-04-04T17:10:37.213684Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2025-04-04T17:10:37.213723Z 27 00h05m00.102048s :BS_NODE DEBUG: [27] VDiskId# [8000005c:1:2:0:0] -> [8000005c:2:2:0:0] 2025-04-04T17:10:37.213824Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-04T17:10:37.213870Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] VDiskId# [8000005c:1:2:1:0] -> [8000005c:2:2:1:0] 2025-04-04T17:10:37.213951Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2025-04-04T17:10:37.213992Z 15 00h05m00.102048s :BS_NODE DEBUG: [15] VDiskId# [8000005c:1:1:0:0] -> [8000005c:2:1:0:0] 2025-04-04T17:10:37.214079Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2025-04-04T17:10:37.214121Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] VDiskId# [8000005c:1:2:2:0] -> [8000005c:2:2:2:0] 2025-04-04T17:10:37.219136Z 18 00h05m01.413048s :BS_NODE DEBUG: [18] VDiskId# [8000000c:2:1:1:0] status changed to REPLICATING 2025-04-04T17:10:37.219855Z 18 00h05m02.591048s :BS_NODE DEBUG: [18] VDiskId# [8000004c:2:1:1:0] status changed to REPLICATING 2025-04-04T17:10:37.220654Z 18 00h05m04.351048s :BS_NODE DEBUG: [18] VDiskId# [8000006c:2:1:1:0] status changed to REPLICATING 2025-04-04T17:10:37.221563Z 18 00h05m04.424048s :BS_NODE DEBUG: [18] VDiskId# [8000007c:2:1:1:0] status changed to REPLICATING 2025-04-04T17:10:37.222591Z 18 00h05m04.776048s :BS_NODE DEBUG: [18] VDiskId# [8000002c:2:1:1:0] status changed to REPLICATING 2025-04-04T17:10:37.223392Z 18 00h05m04.945048s :BS_NODE DEBUG: [18] VDiskId# [8000005c:2:1:1:0] status changed to REPLICATING 2025-04-04T17:10:37.224903Z 18 00h05m05.819048s :BS_NODE DEBUG: [18] VDiskId# [8000003c:2:1:1:0] status changed to REPLICATING 2025-04-04T17:10:37.225776Z 18 00h05m05.985048s :BS_NODE DEBUG: [18] VDiskId# [8000001c:2:1:1:0] status changed to REPLICATING 2025-04-04T17:10:37.226982Z 18 00h05m14.457048s :BS_NODE DEBUG: [18] VDiskId# [8000003c:2:1:1:0] status changed to READY 2025-04-04T17:10:37.228405Z 18 00h05m14.457560s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-04-04T17:10:37.228464Z 18 00h05m14.457560s :BS_NODE DEBUG: [18] VDiskId# [8000003c:1:1:1:0] destroyed 2025-04-04T17:10:37.228629Z 18 00h05m14.827048s :BS_NODE DEBUG: [18] VDiskId# [8000005c:2:1:1:0] status changed to READY 2025-04-04T17:10:37.229878Z 18 00h05m14.827560s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-04-04T17:10:37.229943Z 18 00h05m14.827560s :BS_NODE DEBUG: [18] VDiskId# [8000005c:1:1:1:0] destroyed 2025-04-04T17:10:37.230965Z 18 00h05m20.050048s :BS_NODE DEBUG: [18] VDiskId# [8000001c:2:1:1:0] status changed to READY 2025-04-04T17:10:37.232334Z 18 00h05m20.050560s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-04-04T17:10:37.232382Z 18 00h05m20.050560s :BS_NODE DEBUG: [18] VDiskId# [8000001c:1:1:1:0] destroyed 2025-04-04T17:10:37.232726Z 18 00h05m25.555048s :BS_NODE DEBUG: [18] VDiskId# [8000000c:2:1:1:0] status changed to READY 2025-04-04T17:10:37.234063Z 18 00h05m25.555560s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-04-04T17:10:37.234108Z 18 00h05m25.555560s :BS_NODE DEBUG: [18] VDiskId# [8000000c:1:1:1:0] destroyed 2025-04-04T17:10:37.234335Z 18 00h05m28.637048s :BS_NODE DEBUG: [18] VDiskId# [8000004c:2:1:1:0] status changed to READY 2025-04-04T17:10:37.235820Z 18 00h05m28.637560s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-04-04T17:10:37.235865Z 18 00h05m28.637560s :BS_NODE DEBUG: [18] VDiskId# [8000004c:1:1:1:0] destroyed 2025-04-04T17:10:37.236006Z 18 00h05m28.842048s :BS_NODE DEBUG: [18] VDiskId# [8000007c:2:1:1:0] status changed to READY 2025-04-04T17:10:37.237502Z 18 00h05m28.842560s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-04-04T17:10:37.237548Z 18 00h05m28.842560s :BS_NODE DEBUG: [18] VDiskId# [8000007c:1:1:1:0] destroyed 2025-04-04T17:10:37.238859Z 18 00h05m30.230048s :BS_NODE DEBUG: [18] VDiskId# [8000002c:2:1:1:0] status changed to READY 2025-04-04T17:10:37.240457Z 18 00h05m30.230560s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-04-04T17:10:37.240503Z 18 00h05m30.230560s :BS_NODE DEBUG: [18] VDiskId# [8000002c:1:1:1:0] destroyed 2025-04-04T17:10:37.240678Z 18 00h05m32.966048s :BS_NODE DEBUG: [18] VDiskId# [8000006c:2:1:1:0] status changed to READY 2025-04-04T17:10:37.242259Z 18 00h05m32.966560s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-04-04T17:10:37.242306Z 18 00h05m32.966560s :BS_NODE DEBUG: [18] VDiskId# [8000006c:1:1:1:0] destroyed >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb [GOOD] >> TConsoleTests::TestCreateTenantWrongName >> TGroupMapperTest::MakeDisksForbidden [GOOD] >> KqpJoinOrder::TPCH10-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCH11+StreamLookupJoin-ColumnStore >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksForbidden [GOOD] >> KqpJoinOrder::FiveWayJoinStatsOverride-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds+StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest >> TransferWriter::Write_ColumnTable [GOOD] Test command err: 2025-04-04T17:10:29.341266Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506543714216691:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:29.341344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001025/r3tmp/tmpDt0Pih/pdisk_1.dat 2025-04-04T17:10:29.803003Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:29.817510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:29.821110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:29.829049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13684 TServer::EnableGrpc on GrpcPort 4102, node 1 2025-04-04T17:10:30.325930Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:10:30.325974Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:10:30.325986Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:10:30.326146Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13684 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:10:31.030401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:31.054909Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:10:31.107131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:31.778717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7489506552304152228:2317];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:10:31.779028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7489506552304152228:2317];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:10:31.779326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7489506552304152228:2317];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:10:31.779443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7489506552304152228:2317];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:10:31.779542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7489506552304152228:2317];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:10:31.779691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7489506552304152228:2317];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:10:31.779795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7489506552304152228:2317];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:10:31.779929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7489506552304152228:2317];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:10:31.780103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7489506552304152228:2317];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:10:31.780236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7489506552304152228:2317];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:10:31.780360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7489506552304152228:2317];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:10:31.780476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7489506552304152228:2317];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:10:31.819707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7489506552304152237:2319];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:10:31.819775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7489506552304152237:2319];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:10:31.819992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7489506552304152237:2319];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:10:31.820160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7489506552304152237:2319];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:10:31.820389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7489506552304152237:2319];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:10:31.820509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7489506552304152237:2319];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:10:31.820612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7489506552304152237:2319];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:10:31.820726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7489506552304152237:2319];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:10:31.820820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7489506552304152237:2319];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:10:31.820952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7489506552304152237:2319];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:10:31.821035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7489506552304152237:2319];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:10:31.821137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[1:7489506552304152237:2319];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:10:31.855688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489506552304152286:2328];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:10:31.855738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489506552304152286:2328];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:10:31.855880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489506552304152286:2328];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:10:31.855998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489506552304152286:2328];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:10:31.856058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489506552304152286:2328];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:10:31.856127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489506552304152286:2328];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:10:31.856199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489506552304152286:2328];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:10:31.856262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489506552304152286:2328];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:10:31.856337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[ ... 1474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.743025Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.751105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.754430Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.755705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.760928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.763353Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.770684Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.776091Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.780702Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.781978Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.784930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.790391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.796831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.797840Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.801141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.802704Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.805608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.807789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.810374Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.813141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.815082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.817634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.819667Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.822783Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.824917Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.827537Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.829679Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.832315Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.834382Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.836981Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.839000Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.842115Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.843873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.847069Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.849067Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.851965Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.854448Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:10:33.857032Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786633694 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTi... (TRUNCATED) 2025-04-04T17:10:33.889850Z node 1 :REPLICATION_SERVICE DEBUG: [TransferWriter][1:7489506560894088985:3708] GetTableScheme: worker# [0:0:0] 2025-04-04T17:10:33.890178Z node 1 :REPLICATION_SERVICE DEBUG: [TransferWriter][1:7489506560894088985:3708] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindColumnTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:10:33.891584Z node 1 :REPLICATION_SERVICE DEBUG: [TransferWriter][1:7489506560894088985:3708] CompileTransferLambda: worker# [0:0:0] 2025-04-04T17:10:34.343806Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506543714216691:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:34.344331Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:37.308300Z node 1 :REPLICATION_SERVICE DEBUG: [TransferWriter][1:7489506560894088985:3708] Handle TEvPurecalcCompileResponse: result# 2025-04-04T17:10:37.308332Z node 1 :REPLICATION_SERVICE DEBUG: [TransferWriter][1:7489506560894088985:3708] Handshake: worker# [1:7489506552304151743:2295] 2025-04-04T17:10:37.308824Z node 1 :REPLICATION_SERVICE DEBUG: [TransferWriter][1:7489506560894088985:3708] Handle TEvData NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 1 Data: 36b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 2 Data: 36b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 3 Data: 36b CreateTime: 1970-01-01T00:00:00Z }] } 2025-04-04T17:10:37.582074Z node 1 :REPLICATION_SERVICE DEBUG: [TransferWriter][1:7489506560894088985:3708] Handle TEvents::TEvCompleted: worker# [1:7489506552304151743:2295] status# 400000 >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small+StreamLookupJoin-ColumnStore |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] Test command err: 2025-04-04T17:10:16.322579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:16.322672Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:16.384815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:17.583571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:17.583635Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:17.625119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:18.449809Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:18.449876Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:18.492207Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:19.537016Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:19.537079Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:19.581504Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:20.870997Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:20.871069Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:20.911311Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:29.013105Z node 15 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:29.013188Z node 15 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:29.071598Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:30.171500Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:30.171571Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:30.210290Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:34.996591Z node 21 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:34.996675Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:35.043799Z node 21 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:36.178028Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:36.178105Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:36.229812Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:37.352977Z node 23 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:37.353068Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:37.403682Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:38.550526Z node 24 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:38.550628Z node 24 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:38.602416Z node 24 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 >> TGroupMapperTest::Mirror3dc3Nodes |86.7%| [TA] $(B)/ydb/core/tx/replication/service/ut_transfer_writer/test-results/unittest/{meta.json ... results_accumulator.log} |86.7%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> TGroupMapperTest::Mirror3dc3Nodes [GOOD] >> TGroupMapperTest::InterlacedRacksWithoutInterlacedNodes [GOOD] >> TGroupMapperTest::ReassignGroupTest3dc |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Mirror3dc3Nodes [GOOD] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::InterlacedRacksWithoutInterlacedNodes [GOOD] >> TConsoleTests::TestListTenantsExtSubdomain [GOOD] >> TConsoleTests::TestModifyUsedZoneKind >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] >> TMultiversionObjectMap::MonteCarlo |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge [GOOD] >> TConsoleTests::TestTenantGeneration [GOOD] >> TConsoleTests::TestTenantGenerationExtSubdomain >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse >> KqpJoinOrder::TPCH21+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH21-StreamLookupJoin+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] Test command err: 2025-04-04T17:10:33.178378Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-04-04T17:10:33.178447Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-04-04T17:10:33.179601Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-04-04T17:10:33.179662Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-04-04T17:10:33.179755Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-04-04T17:10:33.179783Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-04-04T17:10:33.179824Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-04-04T17:10:33.179846Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-04-04T17:10:33.179905Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-04-04T17:10:33.179932Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-04-04T17:10:33.180007Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-04-04T17:10:33.180032Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-04-04T17:10:33.180071Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-04-04T17:10:33.180094Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-04-04T17:10:33.180131Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-04-04T17:10:33.180153Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-04-04T17:10:33.180200Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-04-04T17:10:33.180222Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-04-04T17:10:33.180260Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-04-04T17:10:33.180282Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-04-04T17:10:33.180333Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-04-04T17:10:33.180361Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-04-04T17:10:33.180412Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-04-04T17:10:33.180434Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-04-04T17:10:33.180497Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-04-04T17:10:33.180522Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-04-04T17:10:33.180567Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-04-04T17:10:33.180591Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-04-04T17:10:33.180639Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-04-04T17:10:33.180666Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-04-04T17:10:33.180708Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-04-04T17:10:33.180732Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-04-04T17:10:33.180769Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-04-04T17:10:33.180815Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-04-04T17:10:33.180867Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-04-04T17:10:33.180893Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-04-04T17:10:33.180935Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-04-04T17:10:33.180958Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-04-04T17:10:33.180996Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-04-04T17:10:33.181019Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-04-04T17:10:33.181076Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-04-04T17:10:33.181102Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-04-04T17:10:33.181181Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-04-04T17:10:33.181206Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-04-04T17:10:33.181251Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-04-04T17:10:33.181274Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-04-04T17:10:33.181322Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-04-04T17:10:33.181343Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-04-04T17:10:33.181386Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-04-04T17:10:33.181411Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-04-04T17:10:33.181452Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-04-04T17:10:33.181477Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-04-04T17:10:33.181520Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-04-04T17:10:33.181546Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-04-04T17:10:33.181590Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-04-04T17:10:33.181615Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-04-04T17:10:33.181690Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-04-04T17:10:33.181719Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-04-04T17:10:33.181802Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-04-04T17:10:33.181826Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-04-04T17:10:33.181864Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-04-04T17:10:33.181888Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-04-04T17:10:33.181929Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-04-04T17:10:33.181973Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-04-04T17:10:33.204525Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2157:49] Status# ERROR ClientId# [1:2157:49] ServerId# [0:0:0] PipeClient# [1:2157:49] 2025-04-04T17:10:33.205902Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2158:37] Status# ERROR ClientId# [2:2158:37] ServerId# [0:0:0] PipeClient# [2:2158:37] 2025-04-04T17:10:33.205970Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2159:37] Status# ERROR ClientId# [3:2159:37] ServerId# [0:0:0] PipeClient# [3:2159:37] 2025-04-04T17:10:33.206037Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2160:37] Status# ERROR ClientId# [4:2160:37] ServerId# [0:0:0] PipeClient# [4:2160:37] 2025-04-04T17:10:33.206088Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2161:37] Status# ERROR ClientId# [5:2161:37] ServerId# [0:0:0] PipeClient# [5:2161:37] 2025-04-04T17:10:33.206132Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2162:37] Status# ERROR ClientId# [6:2162:37] ServerId# [0:0:0] PipeClient# [6:2162:37] 2025-04-04T17:10:33.206177Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2163:37] Status# ERROR ClientId# [7:2163:37] ServerId# [0:0:0] PipeClient# [7:2163:37] 2025-04-04T17:10:33.206226Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2164:37] Status# ERROR ClientId# [8:2164:37] ServerId# [0:0:0] PipeClient# [8:2164:37] 2025-04-04T17:10:33.206287Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2165:37] Status# ERROR ClientId# [9:2165:37] ServerId# [0:0:0] PipeClient# [9:2165:37] 2025-04-04T17:10:33.206333Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2166:37] Status# ERROR ClientId# [10:2166:37] ServerId# [0:0:0] PipeClient# [10:2166:37] 2025-04-04T17:10:33.206377Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2167:37] Status# ERROR ClientId# [11:2167:37] ServerId# [0:0:0] PipeClient# [11:2167:37] 2025-04-04T17:10:33.206427Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2168:37] Status# ERROR ClientId# [12:2168:37] ServerId# [0:0:0] PipeClient# [12:2168:37] 2025-04-04T17:10:33.206509Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2169:37] Status# ERROR ClientId# [13:2169:37] ServerId# [0:0:0] PipeClient# [13:2169:37] 2025-04-04T17:10:33.206561Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2170:37] Status# ERROR ClientId# [14:2170:37] ServerId# [0:0:0] PipeClient# [14:2170:37] 2025-04-04T17:10:33.206603Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2171:37] Status# ERROR ClientId# [15:2171:37] ServerId# [0:0:0] PipeClient# [15:2171:37] 2025-04-04T17:10:33.206681Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2172:37] Status# ERROR ClientId# [16:2172:37] ServerId# [0:0:0] PipeClient# [16:2172:37] 2025-04-04T17:10:33.206723Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2173:37] Status# ERROR ClientId# [17:2173:37] ServerId# [0:0:0] PipeClient# [17:2173:37] 2025-04-04T17:10:33.206769Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2174:37] Status# ERROR ClientId# [18:2174:37] ServerId# [0:0:0] PipeClient# [18:2174:37] 2025-04-04T17:10:33.206812Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2175:37] Status# ERROR ClientId# [19:2175:37] ServerId# [0:0:0] PipeClient# [19:2175:37] 2025-04-04T17:10:33.206872Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2176:37] Status# ERROR ClientId# [20:2176:37] ServerId# [0:0:0] PipeClient# [20:2176:37] 2025-04-04T17:10:33.206919Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2177:37] Status# ERROR ClientId# [21:2177:37] ServerId# [0:0:0] PipeClient# [21:2177:37] 2025-04-04T17:10:33.206986Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2178:37] Status# ERROR ClientId# [22:2178:37] ServerId# [0:0:0] PipeClient# [22:2178:37] 2025-04-04T17:10:33.207044Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2179:37] Status# ERROR ClientId# [23:2179:37] ServerId# [0:0:0] PipeClient# [23:2179:37] 2025-04-04T17:10:33.207100Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2180:37] Status# ERROR ClientId# [24:2180:37] ServerId# [0:0:0] PipeClient# [24:2180:37] 2025-04-04T17:10:33.207143Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2181:37] Status# ERROR ClientId# [25:2181:37] ServerId# [0:0:0] PipeClient# [25:2181:37] 2025-04-04T17:10:33.207170Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2182:37] Status# ERROR ClientId# [26:2182:37] ServerId# [0:0:0] PipeClient# [26:2182:37] 2025-04-04T17:10:33.207218Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2183:37] Status# ERROR ClientId# [27:2183:37] ServerId# [0:0:0] PipeClient# [27:2183:37] 2025-04-04T17:10:33.207247Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2184:37] Status# ERROR ClientId# [28:2184:37] ServerId# [0:0:0] PipeClient# [28:2184:37] 2025-04-04T17:10:33.207273Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2185:37] Status# ERROR ClientId# [29:2185:37] ServerId# [0:0:0] PipeClient# [29:2185:37] 2025-04-04T17:10:33.207304Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2186:37] Status# ERROR ClientId# [30:2186:37] ServerId# [0:0:0] PipeClient# [30:2186:37] 2025-04-04T17:10:33.207355Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2187:37] Status# ERROR ClientId# [31:2187:37] ServerId# [0:0:0] PipeClient# [31:2187:37] 2025-04-04T17:10:33.207406Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2188:37] Status# ERROR ClientId# [32:2188:37] ServerId# [0:0:0] PipeClient# [32:2188:37] 2025-04-04T17:10:33.392003Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.123949s 2025-04-04T17:10:33.392237Z 1 00h00m00.002048s :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.124203s 2025-04-04T17:10:33.401137Z 1 00h00m00.002560s :BS_NODE DEBUG: [1] CheckState from [1:2257:73] expected 1 current 0 2025-04-04T17:10:33.401203Z 2 00h00m00.002560s :BS_NODE DEBUG: [2] CheckState from [2:2258:38] expected 1 current 0 2025-04-04T17:10:33.401233Z 3 00h00m00.002560s :BS_NODE DEBUG: [3] CheckState from [3:2259:38] expected 1 current 0 2025-04-04T17:10:33.401265Z 4 00h00m00.002560s :BS_NODE DEBUG: [4] CheckState from [4:2260:38] expected 1 current 0 2025-04-04T17:10:33.401307Z 5 00h00m00.002560s :BS_NODE DEBUG: [5] CheckState from [5:2261:38] expected 1 current 0 2025-04-04T17:10:33.401328Z 6 00h00m00.002560s :BS_NODE DEBUG: [6] CheckState from [6:2262:38] expected 1 current 0 2025-04-04T17:10:33.401347Z 7 00h00m00.002560s :BS_NODE DEBUG: [7] CheckState from [7 ... 05h15m00.117920s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2025-04-04T17:10:40.925838Z 12 05h15m00.117920s :BS_NODE DEBUG: [12] VDiskId# [8000000e:5:0:4:0] -> [8000000e:6:0:4:0] 2025-04-04T17:10:40.925906Z 30 05h15m00.117920s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-04T17:10:40.925947Z 30 05h15m00.117920s :BS_NODE DEBUG: [30] VDiskId# [8000000e:5:0:1:0] -> [8000000e:6:0:1:0] 2025-04-04T17:10:40.926048Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-04T17:10:40.926087Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] VDiskId# [80000036:5:0:0:0] -> [80000036:6:0:0:0] 2025-04-04T17:10:40.926153Z 19 05h15m00.117920s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-04-04T17:10:40.926190Z 19 05h15m00.117920s :BS_NODE DEBUG: [19] VDiskId# [80000036:5:0:2:0] -> [80000036:6:0:2:0] 2025-04-04T17:10:40.926255Z 20 05h15m00.117920s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-04-04T17:10:40.926316Z 22 05h15m00.117920s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2025-04-04T17:10:40.926355Z 22 05h15m00.117920s :BS_NODE DEBUG: [22] VDiskId# [80000036:5:0:6:0] -> [80000036:6:0:6:0] 2025-04-04T17:10:40.926420Z 6 05h15m00.117920s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2025-04-04T17:10:40.926462Z 6 05h15m00.117920s :BS_NODE DEBUG: [6] VDiskId# [80000036:5:0:5:0] -> [80000036:6:0:5:0] 2025-04-04T17:10:40.926530Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-04-04T17:10:40.926562Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] VDiskId# [80000036:6:0:3:0] PDiskId# 1000 VSlotId# 1014 created 2025-04-04T17:10:40.926609Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] VDiskId# [80000036:6:0:3:0] status changed to INIT_PENDING 2025-04-04T17:10:40.926704Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-04T17:10:40.926744Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] VDiskId# [80000036:5:0:7:0] -> [80000036:6:0:7:0] 2025-04-04T17:10:40.926810Z 30 05h15m00.117920s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-04T17:10:40.926846Z 30 05h15m00.117920s :BS_NODE DEBUG: [30] VDiskId# [80000036:5:0:1:0] -> [80000036:6:0:1:0] 2025-04-04T17:10:40.926913Z 13 05h15m00.117920s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-04-04T17:10:40.926953Z 13 05h15m00.117920s :BS_NODE DEBUG: [13] VDiskId# [80000036:5:0:4:0] -> [80000036:6:0:4:0] 2025-04-04T17:10:40.927055Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-04T17:10:40.927094Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] VDiskId# [80000026:5:0:0:0] -> [80000026:6:0:0:0] 2025-04-04T17:10:40.927161Z 19 05h15m00.117920s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-04-04T17:10:40.927200Z 19 05h15m00.117920s :BS_NODE DEBUG: [19] VDiskId# [80000026:5:0:2:0] -> [80000026:6:0:2:0] 2025-04-04T17:10:40.927252Z 20 05h15m00.117920s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-04-04T17:10:40.927313Z 22 05h15m00.117920s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2025-04-04T17:10:40.927353Z 22 05h15m00.117920s :BS_NODE DEBUG: [22] VDiskId# [80000026:5:0:6:0] -> [80000026:6:0:6:0] 2025-04-04T17:10:40.927421Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-04-04T17:10:40.927450Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] VDiskId# [80000026:6:0:3:0] PDiskId# 1000 VSlotId# 1015 created 2025-04-04T17:10:40.927500Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] VDiskId# [80000026:6:0:3:0] status changed to INIT_PENDING 2025-04-04T17:10:40.927574Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-04T17:10:40.927611Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] VDiskId# [80000026:5:0:7:0] -> [80000026:6:0:7:0] 2025-04-04T17:10:40.927682Z 8 05h15m00.117920s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2025-04-04T17:10:40.927724Z 8 05h15m00.117920s :BS_NODE DEBUG: [8] VDiskId# [80000026:5:0:5:0] -> [80000026:6:0:5:0] 2025-04-04T17:10:40.927793Z 30 05h15m00.117920s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-04T17:10:40.927834Z 30 05h15m00.117920s :BS_NODE DEBUG: [30] VDiskId# [80000026:5:0:1:0] -> [80000026:6:0:1:0] 2025-04-04T17:10:40.927903Z 14 05h15m00.117920s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-04-04T17:10:40.927946Z 14 05h15m00.117920s :BS_NODE DEBUG: [14] VDiskId# [80000026:5:0:4:0] -> [80000026:6:0:4:0] 2025-04-04T17:10:40.928047Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-04T17:10:40.928089Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] VDiskId# [80000016:6:0:0:0] -> [80000016:7:0:0:0] 2025-04-04T17:10:40.928156Z 19 05h15m00.117920s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-04-04T17:10:40.928192Z 19 05h15m00.117920s :BS_NODE DEBUG: [19] VDiskId# [80000016:6:0:2:0] -> [80000016:7:0:2:0] 2025-04-04T17:10:40.928242Z 20 05h15m00.117920s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-04-04T17:10:40.928302Z 22 05h15m00.117920s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2025-04-04T17:10:40.928342Z 22 05h15m00.117920s :BS_NODE DEBUG: [22] VDiskId# [80000016:6:0:6:0] -> [80000016:7:0:6:0] 2025-04-04T17:10:40.928409Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-04-04T17:10:40.928440Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] VDiskId# [80000016:7:0:3:0] PDiskId# 1000 VSlotId# 1016 created 2025-04-04T17:10:40.928491Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] VDiskId# [80000016:7:0:3:0] status changed to INIT_PENDING 2025-04-04T17:10:40.928565Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-04T17:10:40.928605Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] VDiskId# [80000016:6:0:7:0] -> [80000016:7:0:7:0] 2025-04-04T17:10:40.928674Z 10 05h15m00.117920s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2025-04-04T17:10:40.928717Z 10 05h15m00.117920s :BS_NODE DEBUG: [10] VDiskId# [80000016:6:0:5:0] -> [80000016:7:0:5:0] 2025-04-04T17:10:40.928787Z 30 05h15m00.117920s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-04T17:10:40.928826Z 30 05h15m00.117920s :BS_NODE DEBUG: [30] VDiskId# [80000016:6:0:1:0] -> [80000016:7:0:1:0] 2025-04-04T17:10:40.928893Z 15 05h15m00.117920s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2025-04-04T17:10:40.928933Z 15 05h15m00.117920s :BS_NODE DEBUG: [15] VDiskId# [80000016:6:0:4:0] -> [80000016:7:0:4:0] 2025-04-04T17:10:40.929039Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-04-04T17:10:40.929080Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] VDiskId# [80000006:5:0:0:0] -> [80000006:6:0:0:0] 2025-04-04T17:10:40.929147Z 19 05h15m00.117920s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-04-04T17:10:40.929187Z 19 05h15m00.117920s :BS_NODE DEBUG: [19] VDiskId# [80000006:5:0:2:0] -> [80000006:6:0:2:0] 2025-04-04T17:10:40.929238Z 20 05h15m00.117920s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-04-04T17:10:40.929302Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-04-04T17:10:40.929336Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] VDiskId# [80000006:6:0:3:0] PDiskId# 1000 VSlotId# 1017 created 2025-04-04T17:10:40.929383Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] VDiskId# [80000006:6:0:3:0] status changed to INIT_PENDING 2025-04-04T17:10:40.929458Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-04-04T17:10:40.929497Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] VDiskId# [80000006:5:0:7:0] -> [80000006:6:0:7:0] 2025-04-04T17:10:40.929567Z 26 05h15m00.117920s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2025-04-04T17:10:40.929609Z 26 05h15m00.117920s :BS_NODE DEBUG: [26] VDiskId# [80000006:5:0:6:0] -> [80000006:6:0:6:0] 2025-04-04T17:10:40.929679Z 10 05h15m00.117920s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2025-04-04T17:10:40.929721Z 10 05h15m00.117920s :BS_NODE DEBUG: [10] VDiskId# [80000006:5:0:5:0] -> [80000006:6:0:5:0] 2025-04-04T17:10:40.929791Z 30 05h15m00.117920s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-04-04T17:10:40.929831Z 30 05h15m00.117920s :BS_NODE DEBUG: [30] VDiskId# [80000006:5:0:1:0] -> [80000006:6:0:1:0] 2025-04-04T17:10:40.929896Z 16 05h15m00.117920s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-04-04T17:10:40.929938Z 16 05h15m00.117920s :BS_NODE DEBUG: [16] VDiskId# [80000006:5:0:4:0] -> [80000006:6:0:4:0] 2025-04-04T17:10:40.934766Z 23 05h15m01.616920s :BS_NODE DEBUG: [23] VDiskId# [80000006:6:0:3:0] status changed to REPLICATING 2025-04-04T17:10:40.935217Z 23 05h15m02.154920s :BS_NODE DEBUG: [23] VDiskId# [80000026:6:0:3:0] status changed to REPLICATING 2025-04-04T17:10:40.935648Z 23 05h15m02.870920s :BS_NODE DEBUG: [23] VDiskId# [80000016:7:0:3:0] status changed to REPLICATING 2025-04-04T17:10:40.936077Z 23 05h15m03.926920s :BS_NODE DEBUG: [23] VDiskId# [80000036:6:0:3:0] status changed to REPLICATING 2025-04-04T17:10:40.936570Z 23 05h15m04.825920s :BS_NODE DEBUG: [23] VDiskId# [8000000b:4:0:1:0] status changed to REPLICATING 2025-04-04T17:10:40.938051Z 23 05h15m05.438920s :BS_NODE DEBUG: [23] VDiskId# [8000001e:8:0:3:0] status changed to REPLICATING 2025-04-04T17:10:40.938839Z 23 05h15m05.465920s :BS_NODE DEBUG: [23] VDiskId# [8000002e:7:0:3:0] status changed to REPLICATING 2025-04-04T17:10:40.939546Z 23 05h15m05.637920s :BS_NODE DEBUG: [23] VDiskId# [8000000e:6:0:3:0] status changed to REPLICATING 2025-04-04T17:10:40.940282Z 23 05h15m06.102920s :BS_NODE DEBUG: [23] VDiskId# [8000003e:6:0:3:0] status changed to REPLICATING 2025-04-04T17:10:40.941235Z 23 05h15m12.549920s :BS_NODE DEBUG: [23] VDiskId# [80000026:6:0:3:0] status changed to READY 2025-04-04T17:10:40.942499Z 20 05h15m12.550432s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-04-04T17:10:40.942544Z 20 05h15m12.550432s :BS_NODE DEBUG: [20] VDiskId# [80000026:5:0:3:0] destroyed 2025-04-04T17:10:40.943083Z 23 05h15m19.494920s :BS_NODE DEBUG: [23] VDiskId# [80000016:7:0:3:0] status changed to READY 2025-04-04T17:10:40.944227Z 20 05h15m19.495432s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-04-04T17:10:40.944283Z 20 05h15m19.495432s :BS_NODE DEBUG: [20] VDiskId# [80000016:6:0:3:0] destroyed 2025-04-04T17:10:40.944537Z 23 05h15m20.324920s :BS_NODE DEBUG: [23] VDiskId# [8000003e:6:0:3:0] status changed to READY 2025-04-04T17:10:40.945256Z 20 05h15m20.325432s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-04-04T17:10:40.945306Z 20 05h15m20.325432s :BS_NODE DEBUG: [20] VDiskId# [8000003e:5:0:3:0] destroyed 2025-04-04T17:10:40.945459Z 23 05h15m23.569920s :BS_NODE DEBUG: [23] VDiskId# [80000036:6:0:3:0] status changed to READY 2025-04-04T17:10:40.946168Z 20 05h15m23.570432s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-04-04T17:10:40.946199Z 20 05h15m23.570432s :BS_NODE DEBUG: [20] VDiskId# [80000036:5:0:3:0] destroyed 2025-04-04T17:10:40.954493Z 23 05h15m30.796920s :BS_NODE DEBUG: [23] VDiskId# [8000002e:7:0:3:0] status changed to READY 2025-04-04T17:10:40.956019Z 20 05h15m30.797432s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-04-04T17:10:40.956081Z 20 05h15m30.797432s :BS_NODE DEBUG: [20] VDiskId# [8000002e:6:0:3:0] destroyed 2025-04-04T17:10:40.956259Z 23 05h15m32.584920s :BS_NODE DEBUG: [23] VDiskId# [80000006:6:0:3:0] status changed to READY 2025-04-04T17:10:40.957327Z 20 05h15m32.585432s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-04-04T17:10:40.957375Z 20 05h15m32.585432s :BS_NODE DEBUG: [20] VDiskId# [80000006:5:0:3:0] destroyed 2025-04-04T17:10:40.957514Z 23 05h15m33.452920s :BS_NODE DEBUG: [23] VDiskId# [8000001e:8:0:3:0] status changed to READY 2025-04-04T17:10:40.958528Z 20 05h15m33.453432s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-04-04T17:10:40.958573Z 20 05h15m33.453432s :BS_NODE DEBUG: [20] VDiskId# [8000001e:7:0:3:0] destroyed 2025-04-04T17:10:40.959179Z 23 05h15m35.195920s :BS_NODE DEBUG: [23] VDiskId# [8000000b:4:0:1:0] status changed to READY 2025-04-04T17:10:40.960180Z 20 05h15m35.196432s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-04-04T17:10:40.960227Z 20 05h15m35.196432s :BS_NODE DEBUG: [20] VDiskId# [8000000b:3:0:1:0] destroyed 2025-04-04T17:10:40.960938Z 23 05h15m40.128920s :BS_NODE DEBUG: [23] VDiskId# [8000000e:6:0:3:0] status changed to READY 2025-04-04T17:10:40.961928Z 20 05h15m40.129432s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-04-04T17:10:40.961974Z 20 05h15m40.129432s :BS_NODE DEBUG: [20] VDiskId# [8000000e:5:0:3:0] destroyed >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] [GOOD] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge [GOOD] >> TGroupMapperTest::Block42_1disk >> TGroupMapperTest::NonUniformClusterMirror3dc >> TConsoleTests::TestCreateTenantWrongName [GOOD] >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::SanitizeGroupTest3dc >> TGroupMapperTest::NonUniformClusterMirror3dc [GOOD] >> KqpFlipJoin::LeftSemi_2 [GOOD] >> KqpFlipJoin::LeftSemi_3 >> TGroupMapperTest::NonUniformCluster2 >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains [GOOD] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dc [GOOD] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains [GOOD] >> KqpJoinOrder::TPCDS95-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS96-StreamLookupJoin-ColumnStore |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] >> TGroupMapperTest::MapperSequentialCalls [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees-StreamLookupJoin+ColumnStore >> TConsoleTests::TestModifyUsedZoneKind [GOOD] >> TConsoleTests::TestMergeConfig |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantWrongPool |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MapperSequentialCalls [GOOD] >> KqpJoinOrder::FourWayJoinLeftFirst-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-StreamLookupJoin-ColumnStore >> TConsoleTests::TestTenantGenerationExtSubdomain [GOOD] >> TConsoleTests::TestSchemeShardErrorForwarding >> TGroupMapperTest::MakeDisksNonoperational [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksNonoperational [GOOD] >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly >> BsControllerTest::SelfHealMirror3dc [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:10:25.009875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:10:25.009970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:10:25.010010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:10:25.010053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:10:25.010116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:10:25.010161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:10:25.010312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:10:25.010398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:10:25.010765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:10:25.088214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:25.088286Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:25.103825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:10:25.104110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:10:25.104314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:10:25.115275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:10:25.115632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:10:25.116321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:25.117197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:10:25.127380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:25.128916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:25.128984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:25.129146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:10:25.129208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:25.129253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:10:25.129403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.137384Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:10:25.279043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:10:25.279302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.279528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:10:25.279760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:10:25.279828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.283914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:25.284057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:10:25.284243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.284319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:10:25.284378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:10:25.284415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:10:25.286500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.286552Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:10:25.286583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:10:25.288299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.288342Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.288378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:25.288420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:10:25.291579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:10:25.293208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:10:25.293352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:10:25.294296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:25.294423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:10:25.294515Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:25.294787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:10:25.294837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:10:25.294970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:10:25.295053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:10:25.301195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:25.301600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:25.302824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:25.303164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:10:25.305364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:10:25.305653Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:10:25.305776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:10:25.305813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:25.305862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:10:25.305914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:25.305950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:10:25.306008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:10:25.306046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:10:25.306074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:10:25.306158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:10:25.306201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:10:25.306252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:10:25.315742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:10:25.315894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:10:25.315933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... CHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Simple, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:10:46.901238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:46.901305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:46.901671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:46.901773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-04T17:10:46.901850Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-04T17:10:46.902063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2025-04-04T17:10:46.902184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:46.910233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:46.910363Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2025-04-04T17:10:46.910425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:10:46.910586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-04-04T17:10:46.910688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:46.910842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-04-04T17:10:46.911134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:46.911217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:46.911489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:46.911558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:46.911697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:46.911757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:46.911817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:46.911931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:46.911985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:46.912083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:46.912221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:46.912317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:46.912371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:46.912404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-04T17:10:46.912556Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:10:46.927092Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:10:46.927291Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-04T17:10:46.928953Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435081, Sender [1:1751:3674], Recipient [1:1751:3674]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-04T17:10:46.929007Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-04T17:10:46.934361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:10:46.934458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:10:46.934837Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1751:3674], Recipient [1:1751:3674]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:10:46.934890Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:10:46.935124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:10:46.935194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:10:46.935243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:10:46.935282Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:10:46.935738Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1789:3674], Recipient [1:1751:3674]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-04T17:10:46.935783Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-04T17:10:46.935823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1751:3674] sender: [1:1807:2058] recipient: [1:15:2062] 2025-04-04T17:10:46.976171Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1806:3718], Recipient [1:1751:3674]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-04-04T17:10:46.976239Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-04T17:10:46.976351Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:10:46.976632Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 272us result status StatusSuccess 2025-04-04T17:10:46.977391Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 27456 RowCount: 200 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 17023 Memory: 156712 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 27456 DataSize: 27456 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TGroupMapperTest::NonUniformCluster2 [GOOD] |86.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} |86.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealMirror3dc [GOOD] Test command err: 2025-04-04T17:10:33.178547Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-04-04T17:10:33.178645Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-04-04T17:10:33.180186Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-04-04T17:10:33.180238Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-04-04T17:10:33.180281Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-04-04T17:10:33.180322Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-04-04T17:10:33.180368Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-04-04T17:10:33.180387Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-04-04T17:10:33.180444Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-04-04T17:10:33.180466Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-04-04T17:10:33.180497Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-04-04T17:10:33.180532Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-04-04T17:10:33.180562Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-04-04T17:10:33.180582Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-04-04T17:10:33.180616Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-04-04T17:10:33.180636Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-04-04T17:10:33.180681Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-04-04T17:10:33.180703Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-04-04T17:10:33.180736Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-04-04T17:10:33.180760Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-04-04T17:10:33.180818Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-04-04T17:10:33.180840Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-04-04T17:10:33.180881Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-04-04T17:10:33.180900Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-04-04T17:10:33.180952Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-04-04T17:10:33.180975Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-04-04T17:10:33.181021Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-04-04T17:10:33.181043Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-04-04T17:10:33.181083Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-04-04T17:10:33.181102Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-04-04T17:10:33.181136Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-04-04T17:10:33.181158Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-04-04T17:10:33.181201Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-04-04T17:10:33.181224Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-04-04T17:10:33.181262Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-04-04T17:10:33.181281Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-04-04T17:10:33.181314Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-04-04T17:10:33.181345Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-04-04T17:10:33.181387Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-04-04T17:10:33.181410Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-04-04T17:10:33.181465Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-04-04T17:10:33.181496Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-04-04T17:10:33.181544Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-04-04T17:10:33.181567Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-04-04T17:10:33.181601Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-04-04T17:10:33.181622Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-04-04T17:10:33.181658Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-04-04T17:10:33.181679Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-04-04T17:10:33.181726Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-04-04T17:10:33.181747Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-04-04T17:10:33.181785Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-04-04T17:10:33.181805Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-04-04T17:10:33.181851Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-04-04T17:10:33.181876Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-04-04T17:10:33.181921Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-04-04T17:10:33.181942Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-04-04T17:10:33.181998Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-04-04T17:10:33.182022Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-04-04T17:10:33.182064Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-04-04T17:10:33.182084Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-04-04T17:10:33.182122Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-04-04T17:10:33.182145Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-04-04T17:10:33.182180Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-04-04T17:10:33.182212Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-04-04T17:10:33.182274Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2025-04-04T17:10:33.182299Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2025-04-04T17:10:33.182336Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2025-04-04T17:10:33.182357Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2025-04-04T17:10:33.182389Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2025-04-04T17:10:33.182422Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2025-04-04T17:10:33.182461Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2025-04-04T17:10:33.182482Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2025-04-04T17:10:33.204512Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2713:53] Status# ERROR ClientId# [1:2713:53] ServerId# [0:0:0] PipeClient# [1:2713:53] 2025-04-04T17:10:33.206113Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2714:41] Status# ERROR ClientId# [2:2714:41] ServerId# [0:0:0] PipeClient# [2:2714:41] 2025-04-04T17:10:33.206177Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2715:41] Status# ERROR ClientId# [3:2715:41] ServerId# [0:0:0] PipeClient# [3:2715:41] 2025-04-04T17:10:33.206275Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2716:41] Status# ERROR ClientId# [4:2716:41] ServerId# [0:0:0] PipeClient# [4:2716:41] 2025-04-04T17:10:33.206329Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2717:41] Status# ERROR ClientId# [5:2717:41] ServerId# [0:0:0] PipeClient# [5:2717:41] 2025-04-04T17:10:33.206370Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2718:41] Status# ERROR ClientId# [6:2718:41] ServerId# [0:0:0] PipeClient# [6:2718:41] 2025-04-04T17:10:33.206410Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2719:41] Status# ERROR ClientId# [7:2719:41] ServerId# [0:0:0] PipeClient# [7:2719:41] 2025-04-04T17:10:33.206456Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2720:41] Status# ERROR ClientId# [8:2720:41] ServerId# [0:0:0] PipeClient# [8:2720:41] 2025-04-04T17:10:33.206497Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2721:41] Status# ERROR ClientId# [9:2721:41] ServerId# [0:0:0] PipeClient# [9:2721:41] 2025-04-04T17:10:33.206539Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2722:41] Status# ERROR ClientId# [10:2722:41] ServerId# [0:0:0] PipeClient# [10:2722:41] 2025-04-04T17:10:33.206599Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2723:41] Status# ERROR ClientId# [11:2723:41] ServerId# [0:0:0] PipeClient# [11:2723:41] 2025-04-04T17:10:33.206663Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2724:41] Status# ERROR ClientId# [12:2724:41] ServerId# [0:0:0] PipeClient# [12:2724:41] 2025-04-04T17:10:33.206704Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2725:41] Status# ERROR ClientId# [13:2725:41] ServerId# [0:0:0] PipeClient# [13:2725:41] 2025-04-04T17:10:33.206745Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2726:41] Status# ERROR ClientId# [14:2726:41] ServerId# [0:0:0] PipeClient# [14:2726:41] 2025-04-04T17:10:33.206800Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2727:41] Status# ERROR ClientId# [15:2727:41] ServerId# [0:0:0] PipeClient# [15:2727:41] 2025-04-04T17:10:33.206852Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2728:41] Status# ERROR ClientId# [16:2728:41] ServerId# [0:0:0] PipeClient# [16:2728:41] 2025-04-04T17:10:33.206920Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2729:41] Status# ERROR ClientId# [17:2729:41] ServerId# [0:0:0] PipeClient# [17:2729:41] 2025-04-04T17:10:33.206960Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2730:41] Status# ERROR ClientId# [18:2730:41] ServerId# [0:0:0] PipeClient# [18:2730:41] 2025-04-04T17:10:33.207000Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2731:41] Status# ERROR ClientId# [19:2731:41] ServerId# [0:0:0] PipeClient# [19:2731:41] 2025-04-04T17:10:33.207059Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2732:41] Status# ERROR ClientId# [20:2732:41] ServerId# [0:0:0] PipeClient# [20:2732:41] 2025-04-04T17:10:33.207112Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2733:41] Status# ERROR ClientId# [21:2733:41] ServerId# [0:0:0] PipeClient# [21:2733:41] 2025-04-04T17:10:33.207169Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2734:41] Status# ERROR ClientId# [22:2734:41] ServerId# [0:0:0] PipeClient# [22:2734:41] 2025-04-04T17:10:33.207209Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2735:41] Status# ERROR ClientId# [23:2735:41] ServerId# [0:0:0] PipeClient# [23:2735:41] 2025-04-04T17:10:33.207248Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2736:41] Status# ERROR ClientId# [24:2736:41] ServerId# [0:0:0] PipeClient# [24:2736:41] 2025-04-04T17:10:33.207294Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2737:41] Status# ERROR ClientId# [25:2737:41] ServerId# [0:0:0] PipeClient# [25:2737:41] 2025-04-04T17:10:33.207337Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2738:41] Status# ERROR ClientId# [26:2738:41] ServerId# [0:0:0] PipeClient# [26:2738:41] 2025-04-04T17:10:33.207377Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2739:41] Status# ERROR ClientId# [27:2739:41] ServerId# [0:0:0] PipeClient# [27:2739:41] 2025-04-04T17:10:33.207418Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2740:41] Status# ERROR ClientId# [28:2740:41] ServerId# [0:0:0] PipeClient# [28:2740:41] 2025-04-04T17:10:33.207475Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2741:41] Status# ERROR ClientId# [29:2741:41] ServerId# [0:0:0] PipeClient# [29:2741:41] 2025-04-04T17:10:33.207528Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2742:41] Status# ERROR ClientId# [30:2742:41] ServerId# [0:0:0] PipeClient# [30:2742:41] 2025-04-04T17:10:33.207567Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2743:41] Status# ERROR ClientId# [31:2743:41] ServerId# [0:0:0] PipeClient# [31:2743:41] 2025-04-04T17:10:33.207608Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2744:41] Status# ERROR ClientId# [32:2744:41] ServerId# [0:0:0] PipeClient# [32:2744:41] 2025-04-04T17:10:33.207655Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2745:41] Status# ERROR ClientId# [33:2745:41] ServerId# [0:0:0] PipeClient# [33:2745:41] 2025-04-04T17:10:33.207696Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2746:41] Status# ERROR ClientId# [34:2746:41] ServerId# [0:0:0] PipeClient# [34:2746:41] 2025-04-04T17:10:33.207734Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2747:41] Status# ERROR ClientId# [35:2747:41 ... 45m00.119456s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-04-04T17:10:47.008692Z 19 05h45m00.119456s :BS_NODE DEBUG: [19] VDiskId# [80000034:1:1:2:0] -> [80000034:2:1:2:0] 2025-04-04T17:10:47.008772Z 4 05h45m00.119456s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-04-04T17:10:47.008815Z 4 05h45m00.119456s :BS_NODE DEBUG: [4] VDiskId# [80000034:1:0:1:0] -> [80000034:2:0:1:0] 2025-04-04T17:10:47.008893Z 7 05h45m00.119456s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-04-04T17:10:47.008937Z 7 05h45m00.119456s :BS_NODE DEBUG: [7] VDiskId# [80000034:1:0:2:0] -> [80000034:2:0:2:0] 2025-04-04T17:10:47.008995Z 25 05h45m00.119456s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-04-04T17:10:47.009073Z 27 05h45m00.119456s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2025-04-04T17:10:47.009111Z 27 05h45m00.119456s :BS_NODE DEBUG: [27] VDiskId# [80000034:2:2:0:0] PDiskId# 1000 VSlotId# 1010 created 2025-04-04T17:10:47.009168Z 27 05h45m00.119456s :BS_NODE DEBUG: [27] VDiskId# [80000034:2:2:0:0] status changed to INIT_PENDING 2025-04-04T17:10:47.009257Z 28 05h45m00.119456s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-04-04T17:10:47.009303Z 28 05h45m00.119456s :BS_NODE DEBUG: [28] VDiskId# [80000034:1:2:1:0] -> [80000034:2:2:1:0] 2025-04-04T17:10:47.009385Z 13 05h45m00.119456s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-04-04T17:10:47.009428Z 13 05h45m00.119456s :BS_NODE DEBUG: [13] VDiskId# [80000034:1:1:0:0] -> [80000034:2:1:0:0] 2025-04-04T17:10:47.009507Z 31 05h45m00.119456s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-04-04T17:10:47.009550Z 31 05h45m00.119456s :BS_NODE DEBUG: [31] VDiskId# [80000034:1:2:2:0] -> [80000034:2:2:2:0] 2025-04-04T17:10:47.009631Z 16 05h45m00.119456s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-04-04T17:10:47.009673Z 16 05h45m00.119456s :BS_NODE DEBUG: [16] VDiskId# [80000034:1:1:1:0] -> [80000034:2:1:1:0] 2025-04-04T17:10:47.009794Z 34 05h45m00.119456s :BS_NODE DEBUG: [34] NodeServiceSetUpdate 2025-04-04T17:10:47.009828Z 34 05h45m00.119456s :BS_NODE DEBUG: [34] VDiskId# [80000024:2:2:0:0] PDiskId# 1003 VSlotId# 1010 created 2025-04-04T17:10:47.009887Z 34 05h45m00.119456s :BS_NODE DEBUG: [34] VDiskId# [80000024:2:2:0:0] status changed to INIT_PENDING 2025-04-04T17:10:47.009981Z 19 05h45m00.119456s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-04-04T17:10:47.010025Z 19 05h45m00.119456s :BS_NODE DEBUG: [19] VDiskId# [80000024:1:1:2:0] -> [80000024:2:1:2:0] 2025-04-04T17:10:47.010102Z 4 05h45m00.119456s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-04-04T17:10:47.010145Z 4 05h45m00.119456s :BS_NODE DEBUG: [4] VDiskId# [80000024:1:0:1:0] -> [80000024:2:0:1:0] 2025-04-04T17:10:47.010226Z 7 05h45m00.119456s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-04-04T17:10:47.010295Z 7 05h45m00.119456s :BS_NODE DEBUG: [7] VDiskId# [80000024:1:0:2:0] -> [80000024:2:0:2:0] 2025-04-04T17:10:47.010357Z 25 05h45m00.119456s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-04-04T17:10:47.010427Z 28 05h45m00.119456s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-04-04T17:10:47.010472Z 28 05h45m00.119456s :BS_NODE DEBUG: [28] VDiskId# [80000024:1:2:1:0] -> [80000024:2:2:1:0] 2025-04-04T17:10:47.010552Z 13 05h45m00.119456s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-04-04T17:10:47.010593Z 13 05h45m00.119456s :BS_NODE DEBUG: [13] VDiskId# [80000024:1:1:0:0] -> [80000024:2:1:0:0] 2025-04-04T17:10:47.010689Z 31 05h45m00.119456s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-04-04T17:10:47.010734Z 31 05h45m00.119456s :BS_NODE DEBUG: [31] VDiskId# [80000024:1:2:2:0] -> [80000024:2:2:2:0] 2025-04-04T17:10:47.010810Z 16 05h45m00.119456s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-04-04T17:10:47.010853Z 16 05h45m00.119456s :BS_NODE DEBUG: [16] VDiskId# [80000024:1:1:1:0] -> [80000024:2:1:1:0] 2025-04-04T17:10:47.010978Z 19 05h45m00.119456s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-04-04T17:10:47.011023Z 19 05h45m00.119456s :BS_NODE DEBUG: [19] VDiskId# [80000014:1:1:2:0] -> [80000014:2:1:2:0] 2025-04-04T17:10:47.011103Z 4 05h45m00.119456s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-04-04T17:10:47.011145Z 4 05h45m00.119456s :BS_NODE DEBUG: [4] VDiskId# [80000014:1:0:1:0] -> [80000014:2:0:1:0] 2025-04-04T17:10:47.011222Z 7 05h45m00.119456s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-04-04T17:10:47.011265Z 7 05h45m00.119456s :BS_NODE DEBUG: [7] VDiskId# [80000014:1:0:2:0] -> [80000014:2:0:2:0] 2025-04-04T17:10:47.011322Z 25 05h45m00.119456s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-04-04T17:10:47.011396Z 26 05h45m00.119456s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2025-04-04T17:10:47.011433Z 26 05h45m00.119456s :BS_NODE DEBUG: [26] VDiskId# [80000014:2:2:0:0] PDiskId# 1003 VSlotId# 1010 created 2025-04-04T17:10:47.011496Z 26 05h45m00.119456s :BS_NODE DEBUG: [26] VDiskId# [80000014:2:2:0:0] status changed to INIT_PENDING 2025-04-04T17:10:47.011583Z 28 05h45m00.119456s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-04-04T17:10:47.011622Z 28 05h45m00.119456s :BS_NODE DEBUG: [28] VDiskId# [80000014:1:2:1:0] -> [80000014:2:2:1:0] 2025-04-04T17:10:47.011699Z 13 05h45m00.119456s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-04-04T17:10:47.011739Z 13 05h45m00.119456s :BS_NODE DEBUG: [13] VDiskId# [80000014:1:1:0:0] -> [80000014:2:1:0:0] 2025-04-04T17:10:47.011817Z 31 05h45m00.119456s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-04-04T17:10:47.011859Z 31 05h45m00.119456s :BS_NODE DEBUG: [31] VDiskId# [80000014:1:2:2:0] -> [80000014:2:2:2:0] 2025-04-04T17:10:47.011934Z 16 05h45m00.119456s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-04-04T17:10:47.011974Z 16 05h45m00.119456s :BS_NODE DEBUG: [16] VDiskId# [80000014:1:1:1:0] -> [80000014:2:1:1:0] 2025-04-04T17:10:47.012093Z 36 05h45m00.119456s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2025-04-04T17:10:47.012130Z 36 05h45m00.119456s :BS_NODE DEBUG: [36] VDiskId# [80000004:2:2:0:0] PDiskId# 1003 VSlotId# 1010 created 2025-04-04T17:10:47.012187Z 36 05h45m00.119456s :BS_NODE DEBUG: [36] VDiskId# [80000004:2:2:0:0] status changed to INIT_PENDING 2025-04-04T17:10:47.012275Z 19 05h45m00.119456s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-04-04T17:10:47.012317Z 19 05h45m00.119456s :BS_NODE DEBUG: [19] VDiskId# [80000004:1:1:2:0] -> [80000004:2:1:2:0] 2025-04-04T17:10:47.012396Z 4 05h45m00.119456s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-04-04T17:10:47.012435Z 4 05h45m00.119456s :BS_NODE DEBUG: [4] VDiskId# [80000004:1:0:1:0] -> [80000004:2:0:1:0] 2025-04-04T17:10:47.012514Z 7 05h45m00.119456s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-04-04T17:10:47.012555Z 7 05h45m00.119456s :BS_NODE DEBUG: [7] VDiskId# [80000004:1:0:2:0] -> [80000004:2:0:2:0] 2025-04-04T17:10:47.012611Z 25 05h45m00.119456s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-04-04T17:10:47.012677Z 28 05h45m00.119456s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-04-04T17:10:47.012717Z 28 05h45m00.119456s :BS_NODE DEBUG: [28] VDiskId# [80000004:1:2:1:0] -> [80000004:2:2:1:0] 2025-04-04T17:10:47.012796Z 13 05h45m00.119456s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-04-04T17:10:47.012838Z 13 05h45m00.119456s :BS_NODE DEBUG: [13] VDiskId# [80000004:1:1:0:0] -> [80000004:2:1:0:0] 2025-04-04T17:10:47.012912Z 31 05h45m00.119456s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-04-04T17:10:47.012954Z 31 05h45m00.119456s :BS_NODE DEBUG: [31] VDiskId# [80000004:1:2:2:0] -> [80000004:2:2:2:0] 2025-04-04T17:10:47.013031Z 16 05h45m00.119456s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-04-04T17:10:47.013072Z 16 05h45m00.119456s :BS_NODE DEBUG: [16] VDiskId# [80000004:1:1:1:0] -> [80000004:2:1:1:0] 2025-04-04T17:10:47.018873Z 34 05h45m01.140456s :BS_NODE DEBUG: [34] VDiskId# [80000054:2:2:0:0] status changed to REPLICATING 2025-04-04T17:10:47.019467Z 34 05h45m01.418456s :BS_NODE DEBUG: [34] VDiskId# [80000023:3:2:2:0] status changed to REPLICATING 2025-04-04T17:10:47.019937Z 36 05h45m02.436456s :BS_NODE DEBUG: [36] VDiskId# [80000004:2:2:0:0] status changed to REPLICATING 2025-04-04T17:10:47.020295Z 35 05h45m02.789456s :BS_NODE DEBUG: [35] VDiskId# [80000044:2:2:0:0] status changed to REPLICATING 2025-04-04T17:10:47.020655Z 26 05h45m02.852456s :BS_NODE DEBUG: [26] VDiskId# [80000014:2:2:0:0] status changed to REPLICATING 2025-04-04T17:10:47.021009Z 27 05h45m03.531456s :BS_NODE DEBUG: [27] VDiskId# [80000034:2:2:0:0] status changed to REPLICATING 2025-04-04T17:10:47.021376Z 34 05h45m04.785456s :BS_NODE DEBUG: [34] VDiskId# [80000024:2:2:0:0] status changed to REPLICATING 2025-04-04T17:10:47.021848Z 27 05h45m04.917456s :BS_NODE DEBUG: [27] VDiskId# [80000064:2:2:0:0] status changed to REPLICATING 2025-04-04T17:10:47.022289Z 34 05h45m04.925456s :BS_NODE DEBUG: [34] VDiskId# [80000074:2:2:0:0] status changed to REPLICATING 2025-04-04T17:10:47.024070Z 26 05h45m05.039456s :BS_NODE DEBUG: [26] VDiskId# [8000000c:4:2:0:0] status changed to REPLICATING 2025-04-04T17:10:47.024848Z 26 05h45m10.482456s :BS_NODE DEBUG: [26] VDiskId# [80000014:2:2:0:0] status changed to READY 2025-04-04T17:10:47.025829Z 25 05h45m10.482968s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-04-04T17:10:47.025896Z 25 05h45m10.482968s :BS_NODE DEBUG: [25] VDiskId# [80000014:1:2:0:0] destroyed 2025-04-04T17:10:47.026044Z 34 05h45m10.923456s :BS_NODE DEBUG: [34] VDiskId# [80000023:3:2:2:0] status changed to READY 2025-04-04T17:10:47.027014Z 25 05h45m10.923968s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-04-04T17:10:47.027072Z 25 05h45m10.923968s :BS_NODE DEBUG: [25] VDiskId# [80000023:2:2:2:0] destroyed 2025-04-04T17:10:47.027224Z 27 05h45m11.511456s :BS_NODE DEBUG: [27] VDiskId# [80000064:2:2:0:0] status changed to READY 2025-04-04T17:10:47.028019Z 25 05h45m11.511968s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-04-04T17:10:47.028073Z 25 05h45m11.511968s :BS_NODE DEBUG: [25] VDiskId# [80000064:1:2:0:0] destroyed 2025-04-04T17:10:47.028238Z 34 05h45m14.470456s :BS_NODE DEBUG: [34] VDiskId# [80000024:2:2:0:0] status changed to READY 2025-04-04T17:10:47.029139Z 25 05h45m14.470968s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-04-04T17:10:47.029192Z 25 05h45m14.470968s :BS_NODE DEBUG: [25] VDiskId# [80000024:1:2:0:0] destroyed 2025-04-04T17:10:47.030186Z 34 05h45m17.504456s :BS_NODE DEBUG: [34] VDiskId# [80000074:2:2:0:0] status changed to READY 2025-04-04T17:10:47.031164Z 25 05h45m17.504968s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-04-04T17:10:47.031215Z 25 05h45m17.504968s :BS_NODE DEBUG: [25] VDiskId# [80000074:1:2:0:0] destroyed 2025-04-04T17:10:47.031359Z 27 05h45m18.996456s :BS_NODE DEBUG: [27] VDiskId# [80000034:2:2:0:0] status changed to READY 2025-04-04T17:10:47.032125Z 25 05h45m18.996968s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-04-04T17:10:47.032177Z 25 05h45m18.996968s :BS_NODE DEBUG: [25] VDiskId# [80000034:1:2:0:0] destroyed 2025-04-04T17:10:47.032652Z 35 05h45m23.154456s :BS_NODE DEBUG: [35] VDiskId# [80000044:2:2:0:0] status changed to READY 2025-04-04T17:10:47.033475Z 25 05h45m23.154968s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-04-04T17:10:47.033523Z 25 05h45m23.154968s :BS_NODE DEBUG: [25] VDiskId# [80000044:1:2:0:0] destroyed 2025-04-04T17:10:47.033654Z 36 05h45m23.181456s :BS_NODE DEBUG: [36] VDiskId# [80000004:2:2:0:0] status changed to READY 2025-04-04T17:10:47.034380Z 25 05h45m23.181968s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-04-04T17:10:47.034426Z 25 05h45m23.181968s :BS_NODE DEBUG: [25] VDiskId# [80000004:1:2:0:0] destroyed 2025-04-04T17:10:47.035107Z 26 05h45m25.587456s :BS_NODE DEBUG: [26] VDiskId# [8000000c:4:2:0:0] status changed to READY 2025-04-04T17:10:47.035869Z 25 05h45m25.587968s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-04-04T17:10:47.035921Z 25 05h45m25.587968s :BS_NODE DEBUG: [25] VDiskId# [8000000c:3:2:0:0] destroyed 2025-04-04T17:10:47.037991Z 34 05h45m34.220456s :BS_NODE DEBUG: [34] VDiskId# [80000054:2:2:0:0] status changed to READY 2025-04-04T17:10:47.038974Z 25 05h45m34.220968s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-04-04T17:10:47.039025Z 25 05h45m34.220968s :BS_NODE DEBUG: [25] VDiskId# [80000054:1:2:0:0] destroyed >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain [GOOD] >> TConsoleTests::TestAlterUnknownTenant |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster2 [GOOD] |86.8%| [TA] $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} |86.8%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::TPCDS23-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS23+StreamLookupJoin-ColumnStore >> TConsoleTests::TestMergeConfig [GOOD] >> TConsoleTests::TestRemoveTenant |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TConsoleTests::TestCreateTenantWrongPool [GOOD] >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TConsoleTests::TestSchemeShardErrorForwarding [GOOD] >> TConsoleTests::TestScaleRecommenderPolicies >> TProxyActorTest::TestAttachSession >> KqpJoinOrder::TestJoinOrderHintsComplex+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsComplex-StreamLookupJoin+ColumnStore >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-StreamLookupJoin+ColumnStore |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer [GOOD] >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency >> KqpJoinOrder::TPCH8-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH8+StreamLookupJoin-ColumnStore |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH2-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS78-StreamLookupJoin-ColumnStore >> TConsoleTests::TestAlterUnknownTenant [GOOD] >> TConsoleTests::TestAlterUnknownTenantExtSubdomain |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds-StreamLookupJoin+ColumnStore >> KqpFlipJoin::LeftSemi_3 [GOOD] >> KqpJoinOrder::TPCH3-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCH5-StreamLookupJoin-ColumnStore >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExists >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries |86.9%| [TA] $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.9%| [TA] {RESULT} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::LeftSemi_3 [GOOD] Test command err: Trying to start YDB, gRPC: 26343, MsgBus: 9929 2025-04-04T17:10:07.839373Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506449270202051:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:07.840826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001361/r3tmp/tmpzlbVUp/pdisk_1.dat 2025-04-04T17:10:08.387892Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:08.392842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:08.392968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:08.396341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26343, node 1 2025-04-04T17:10:08.473716Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:10:08.473732Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:10:08.473746Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:10:08.473869Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9929 TClient is connected to server localhost:9929 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:10:09.043676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:09.056780Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:10:09.063819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:09.179658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:09.333838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:09.397233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:11.165326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506466450072975:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:11.165427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:11.434417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.498000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.528870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.569318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.641078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.710969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.771179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506466450073495:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:11.771266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:11.771386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506466450073500:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:11.775012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:11.785289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506466450073502:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:10:11.875408Z node 1 :TX_PROXY ERROR: Actor# [1:7489506466450073558:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:12.750598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.792390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.829783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.840204Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506449270202051:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:12.840418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:12.902126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"Tables":["FJ_Table_1"],"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/FJ_Table_1","ReadRangesPointPrefixLen":"0","E-Rows":"4","Table":"FJ_Table_1","ReadColumns":["Key","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"},{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["FJ_Table_2"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/FJ_Table_2","ReadRangesPointPrefixLen":"0","E-Rows":"2","Table":"FJ_Table_2","ReadColumns":["Fk1","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Stats":{"ComputeNodes":[{"Tasks":[{"FinishTimeMs":1743786613639,"Host":"ghrun-mup5cka3jq","OutputRows":2,"StartTimeMs":1743786613639,"IngressRows":2,"ComputeTimeUs":115,"NodeId":1,"OutputChannels":[{"ChannelId":1,"Rows":2,"DstStageId":1,"Bytes":24}],"TaskId":1,"OutputBytes":24}],"PeakMemoryUsageBytes":65536,"CpuTimeUs":1684}],"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"FirstMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":24,"Max":24,"Min":24,"History":[3,24]}},"Name":"4","Push":{"WaitTimeUs":{"Count":1,"Sum":2415,"Max":2415,"Min":2415,"History":[3,2415]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"FirstMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[3,1048576]},"Tasks":1,"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"IngressRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":0,"Table":[{"Path":"\/Root\/FJ_Table_2","ReadRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"ReadBytes":{"Count":1,"Sum":22,"Max":22,"Min":22}}],"BaseTimeMs":1743786613637,"OutputBytes":{"Count":1,"Sum":24,"Max":24,"Min":24},"CpuTimeUs":{"Count":1,"Sum":735,"Max":735,"Min":735,"History":[3,735]},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"FirstMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":64,"Max":64,"Min":64,"History":[3,64]}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum" ... [5:7489506594096585206:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:40.094767Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:40.095064Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489506594096585211:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:40.099286Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:40.111029Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489506594096585213:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:10:40.187617Z node 5 :TX_PROXY ERROR: Actor# [5:7489506594096585267:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:40.730402Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489506572621746440:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:40.730484Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:41.461589Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:41.504543Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:41.590393Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:10:41.634514Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8445, MsgBus: 11044 2025-04-04T17:10:44.005610Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489506608321017405:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:44.005689Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001361/r3tmp/tmpqkxuzA/pdisk_1.dat 2025-04-04T17:10:44.199306Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:44.216360Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:44.216449Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:44.217974Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8445, node 6 2025-04-04T17:10:44.378977Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:10:44.379003Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:10:44.379014Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:10:44.379196Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11044 TClient is connected to server localhost:11044 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:10:45.027783Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:45.034905Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:10:45.049821Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:45.136090Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:45.337381Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:45.442683Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:48.476406Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489506625500888349:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:48.476510Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:48.520640Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:10:48.556865Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:48.591626Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:48.629724Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:48.669801Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:48.707106Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:48.807847Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489506625500888863:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:48.808006Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:48.808269Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489506625500888868:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:48.812847Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:48.828773Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489506625500888870:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:10:48.883685Z node 6 :TX_PROXY ERROR: Actor# [6:7489506625500888925:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:49.005994Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489506608321017405:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:49.006079Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:50.356141Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:50.403814Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:50.442731Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:10:50.481643Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 >> TGroupMapperTest::SanitizeGroupTest3dc [GOOD] >> TConsoleTests::TestScaleRecommenderPolicies [GOOD] >> TConsoleTests::TestScaleRecommenderPoliciesValidation >> KqpJoinOrder::FiveWayJoinWithPreds-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+StreamLookupJoin-ColumnStore |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::SanitizeGroupTest3dc [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small-StreamLookupJoin+ColumnStore >> KqpJoinOrder::TPCH11+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH11-StreamLookupJoin+ColumnStore >> TConsoleTests::TestAlterUnknownTenantExtSubdomain [GOOD] >> TConsoleTests::TestAlterBorrowedStorage >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes >> AggregateStatistics::ShouldBePings >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] >> AggregateStatistics::ShouldBePings [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] Test command err: 2025-04-04T17:10:56.233463Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-04T17:10:56.240190Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2 2025-04-04T17:10:56.240248Z node 1 :STATISTICS DEBUG: Tablet 2 is not local. 2025-04-04T17:10:56.240358Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-04-04T17:10:56.241033Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = ERROR 2025-04-04T17:10:56.241088Z node 1 :STATISTICS DEBUG: Tablet 1 is not local. 2025-04-04T17:10:56.241210Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-04-04T17:10:56.241233Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-04T17:10:56.241272Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = ERROR 2025-04-04T17:10:56.241290Z node 1 :STATISTICS DEBUG: Tablet 4 is not local. 2025-04-04T17:10:56.241349Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5 2025-04-04T17:10:56.241370Z node 1 :STATISTICS DEBUG: Tablet 5 is not local. 2025-04-04T17:10:56.241435Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 6 2025-04-04T17:10:56.241502Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = ERROR 2025-04-04T17:10:56.241523Z node 1 :STATISTICS DEBUG: Tablet 7 is not local. 2025-04-04T17:10:56.241548Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:15:2062], server id = [1:15:2062], tablet id = 8 2025-04-04T17:10:56.241566Z node 1 :STATISTICS DEBUG: Tablet 8 is not local. 2025-04-04T17:10:56.241589Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-04T17:10:56.241639Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2025-04-04T17:10:56.241655Z node 1 :STATISTICS DEBUG: Skip EvClientConnected ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] Test command err: 2025-04-04T17:10:56.233166Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-04T17:10:56.234275Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-04-04T17:10:56.234592Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-04-04T17:10:56.235841Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [1:39:2059], tablet id = 2, status = OK 2025-04-04T17:10:56.235894Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:39:2059], path = { OwnerId: 3 LocalId: 3 } 2025-04-04T17:10:56.236140Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-04T17:10:56.236290Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-04-04T17:10:56.236545Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-04T17:10:56.236682Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [1:40:2060], tablet id = 3, status = OK 2025-04-04T17:10:56.236735Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:40:2060], path = { OwnerId: 3 LocalId: 3 } 2025-04-04T17:10:56.236927Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:44:2057], server id = [2:44:2057], tablet id = 4, status = OK 2025-04-04T17:10:56.236962Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:44:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-04T17:10:56.236996Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 2 2025-04-04T17:10:56.237120Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:46:2057], server id = [3:46:2057], tablet id = 5, status = OK 2025-04-04T17:10:56.237162Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:46:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-04T17:10:56.237207Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2025-04-04T17:10:56.237310Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-04-04T17:10:56.237330Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-04T17:10:56.237437Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-04-04T17:10:56.237505Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:44:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-04-04T17:10:56.237521Z node 2 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-04T17:10:56.237539Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-04-04T17:10:56.237589Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 5 2025-04-04T17:10:56.237644Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-04T17:10:56.237736Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [0:0:0], tablet id = 2, status = ERROR 2025-04-04T17:10:56.237752Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-04T17:10:56.237799Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:46:2057], server id = [0:0:0], tablet id = 5, status = ERROR 2025-04-04T17:10:56.237820Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-04T17:10:56.237850Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [0:0:0], tablet id = 3, status = ERROR 2025-04-04T17:10:56.237866Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-04T17:10:56.237904Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [4:49:2057], tablet id = 6, status = OK 2025-04-04T17:10:56.237964Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:49:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-04T17:10:56.238083Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 6 2025-04-04T17:10:56.238134Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:10:56.238285Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [0:0:0], tablet id = 6, status = ERROR 2025-04-04T17:10:56.238306Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-04T17:10:56.238333Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-04-04T17:10:56.238497Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-04-04T17:10:56.238551Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-04T17:10:56.238784Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2025-04-04T17:10:56.238831Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] Test command err: 2025-04-04T17:10:56.224379Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-04T17:10:56.225086Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-04-04T17:10:56.229535Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-04-04T17:10:56.229764Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-04-04T17:10:56.232401Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-04T17:10:56.232640Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-04-04T17:10:56.232677Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-04T17:10:56.232820Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-04T17:10:56.233037Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [3:44:2057], tablet id = 3, status = OK 2025-04-04T17:10:56.233109Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:44:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-04T17:10:56.233205Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-04-04T17:10:56.233252Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-04T17:10:56.233409Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-04-04T17:10:56.233486Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-04-04T17:10:56.233510Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-04T17:10:56.233632Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2025-04-04T17:10:56.233685Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2025-04-04T17:10:56.233822Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-04-04T17:10:56.233865Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2025-04-04T17:10:56.233923Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:10:56.234019Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-04-04T17:10:56.234041Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-04T17:10:56.234156Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-04-04T17:10:56.244509Z node 3 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-04T17:10:56.244579Z node 3 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T17:10:56.244678Z node 4 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-04T17:10:56.244704Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T17:10:56.255324Z node 2 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-04-04T17:10:56.255401Z node 1 :STATISTICS INFO: Node 2 is unavailable 2025-04-04T17:10:56.255430Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-04T17:10:56.255521Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-04T17:10:56.255560Z node 1 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-04-04T17:10:56.255628Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-04T17:10:56.255659Z node 1 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T17:10:56.255810Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-04-04T17:10:56.255839Z node 1 :STATISTICS DEBUG: Skip TEvAggregateKeepAlive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBePings [GOOD] Test command err: 2025-04-04T17:10:56.221105Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-04T17:10:56.224266Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-04-04T17:10:56.333043Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 2 2025-04-04T17:10:56.333108Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-04-04T17:10:56.333160Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-04-04T17:10:56.333664Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:19:2055], server id = [0:0:0], tablet id = 2, status = ERROR 2025-04-04T17:10:56.333703Z node 2 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-04T17:10:56.333748Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:16:2056], server id = [0:0:0], tablet id = 1, status = ERROR 2025-04-04T17:10:56.333760Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-04-04T17:10:56.333804Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2025-04-04T17:10:56.333833Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 |86.9%| [TA] $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |86.9%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTests::TestRemoveTenant [GOOD] >> TConsoleTests::TestRemoveTenantExtSubdomain >> TConsoleTests::TestCreateTenantAlreadyExists [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain >> KqpJoinOrder::TestJoinHint1-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TestJoinHint2+StreamLookupJoin-ColumnStore >> TConsoleTests::TestScaleRecommenderPoliciesValidation [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSingle >> TDataShardTrace::TestTraceDistributedSelect >> TDataShardTrace::TestTraceDistributedSelectViaReadActors >> TDataShardTrace::TestTraceDistributedUpsert+UseSink >> TDataShardTrace::TestTraceWriteImmediateOnShard >> TDataShardTrace::TestTraceDistributedUpsert-UseSink >> KqpJoinOrder::FiveWayJoin-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::DatetimeConstantFold-StreamLookupJoin-ColumnStore >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+StreamLookupJoin-ColumnStore >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> TConsoleTests::TestAlterBorrowedStorage [GOOD] >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2025-04-04T17:11:00.433548Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.433582Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.433604Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:00.434035Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:00.436965Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:00.452086Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.453116Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:11:00.454671Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.454692Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.454710Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:00.455063Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:00.455578Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:00.455730Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.455911Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:11:00.456188Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-04T17:11:00.458373Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.458394Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.458416Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:00.458674Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:00.461953Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:00.462043Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.462272Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:11:00.471147Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.473554Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:11:00.473665Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:00.473710Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-04T17:11:00.475056Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.475083Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.475103Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:00.486344Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:00.488217Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:00.488439Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.488955Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2025-04-04T17:11:00.491067Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T17:11:00.491323Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-04T17:11:00.491665Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-04T17:11:00.491861Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-04T17:11:00.491974Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:00.492020Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-04T17:11:00.492055Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-04T17:11:00.492219Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2025-04-04T17:11:00.492312Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-04T17:11:00.492331Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-04T17:11:00.492353Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-04T17:11:00.492461Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2025-04-04T17:11:00.492522Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-04T17:11:00.492552Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-04T17:11:00.492572Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-04T17:11:00.492646Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2025-04-04T17:11:00.492695Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-04T17:11:00.492727Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-04T17:11:00.492749Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-04T17:11:00.492838Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2025-04-04T17:11:00.493987Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.494019Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.494038Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:00.494362Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:00.494959Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:00.495088Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.495264Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-04-04T17:11:00.496076Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T17:11:00.497015Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-04T17:11:00.497346Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-04T17:11:00.497595Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-04T17:11:00.497710Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:00.497739Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-04T17:11:00.497759Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-04T17:11:00.497774Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-04T17:11:00.497804Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-04T17:11:00.498019Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 5). Partition stream id: 1 GOT RANGE 0 5 Getting new event 2025-04-04T17:11:00.498098Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-04T17:11:00.498120Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-04T17:11:00.498137Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-04T17:11:00.498156Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-04T17:11:00.498177Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-04T17:11:00.498329Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 GOT RANGE 5 9 2025-04-04T17:11:00.499639Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.499703Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.499742Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:00.500068Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:00.500548Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:00.500687Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.500882Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:11:00.501821Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T17:11:00.502569Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T17:11:00.502947Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2025-04-04T17:11:00.503049Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-04T17:11:00.503139Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:00.503168Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-04T17:11:00.503189Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2025-04-04T17:11:00.503208Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2025-04-04T17:11:00.503240Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2025-04-04T17:11:00.503261Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-04-04T17:11:00.503404Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 end_offset: 3 } } RANGE 0 3 2025-04-04T17:11:00.503517Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 12). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 start_offset: 3 end_offset: 12 } } RANGE 3 12 >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain [GOOD] >> TConsoleTests::TestCreateSubSubDomain >> KqpJoinOrder::TPCDS96-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS96+StreamLookupJoin-ColumnStore >> TConsoleTxProcessorTests::TestTxProcessorSingle [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2025-04-04T17:11:00.486226Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.486282Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.486305Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:00.486892Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:00.487405Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-04T17:11:00.487469Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.488470Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.488491Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.488509Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:00.488821Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:00.489266Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-04-04T17:11:00.489326Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.490121Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.490142Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.490160Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:00.490500Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-04T17:11:00.490559Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.490591Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.490739Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2025-04-04T17:11:00.491429Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.491448Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.491468Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:00.491970Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-04-04T17:11:00.492002Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.492031Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.492099Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2025-04-04T17:11:00.493261Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-04T17:11:00.493287Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-04T17:11:00.493315Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:00.493682Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:00.494229Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:00.505563Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-04T17:11:00.505901Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:11:00.506266Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2025-04-04T17:11:00.510072Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2025-04-04T17:11:00.510776Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:00.510829Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-04T17:11:00.510865Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-04T17:11:00.510883Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-04-04T17:11:00.510913Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-04-04T17:11:00.510930Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-04-04T17:11:00.510950Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2025-04-04T17:11:00.510967Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2025-04-04T17:11:00.511004Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2025-04-04T17:11:00.511035Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2025-04-04T17:11:00.511054Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2025-04-04T17:11:00.511073Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2025-04-04T17:11:00.511088Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2025-04-04T17:11:00.511107Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2025-04-04T17:11:00.511124Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2025-04-04T17:11:00.511158Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2025-04-04T17:11:00.511208Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2025-04-04T17:11:00.511226Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2025-04-04T17:11:00.511243Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2025-04-04T17:11:00.511274Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2025-04-04T17:11:00.511297Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2025-04-04T17:11:00.511312Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2025-04-04T17:11:00.511334Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2025-04-04T17:11:00.511350Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2025-04-04T17:11:00.511369Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2025-04-04T17:11:00.511387Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2025-04-04T17:11:00.511404Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2025-04-04T17:11:00.511421Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2025-04-04T17:11:00.511437Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2025-04-04T17:11:00.511457Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2025-04-04T17:11:00.511474Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2025-04-04T17:11:00.511502Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2025-04-04T17:11:00.511624Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2025-04-04T17:11:00.511643Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2025-04-04T17:11:00.511662Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2025-04-04T17:11:00.511690Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2025-04-04T17:11:00.511711Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2025-04-04T17:11:00.511727Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2025-04-04T17:11:00.511747Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2025-04-04T17:11:00.511778Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2025-04-04T17:11:00.511798Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2025-04-04T17:11:00.511814Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2025-04-04T17:11:00.511831Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2025-04-04T17:11:00.511851Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2025-04-04T17:11:00.511869Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2025-04-04T17:11:00.511886Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2025-04-04T17:11:00.511904Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2025-04-04T17:11:00.511928Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2025-04-04T17:11:00.511949Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2025-04-04T17:11:00.511967Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2025-04-04T17:11:00.512018Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-04-04T17:11:00.514188Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2025-04-04T17:11:00.517382Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2025-04-04T17:11:00.517436Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2025-04-04T17:11:00.517459Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2025-04-04T17:11:00.517478Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2025-04-04T17:11:00.517508Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2025-04-04T17:11:00.517539Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2025-04-04T17:11:00.517558Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2025-04-04T17:11:00.517575Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2025-04-04T17:11:00.517605Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2025-04-04T17:11:00.517640Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2025-04-04T17:11:00.517657Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2025-04-04T17:11:00.517674Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2025-04-04T17:11:00.517702Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2025-04-04T17:11:00.517734Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2025-04-04T17:11:00.517754Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2025-04-04T17:11:00.517771Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2025-04-04T17:11:00.517829Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2025-04-04T17:11:00.517850Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2025-04-04T17:11:00.517867Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2025-04-04T17:11:00.517884Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2025-04-04T17:11:00.517903Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2025-04-04T17:11:00.517918Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2025-04-04T17:11:00.517938Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2025-04-04T17:11:00.517960Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2025-04-04T17:11:00.517977Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2025-04-04T17:11:00.517993Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2025-04-04T17:11:00.518010Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2025-04-04T17:11:00.518028Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2025-04-04T17:11:00.518062Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2025-04-04T17:11:00.518090Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2025-04-04T17:11:00.518110Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2025-04-04T17:11:00.518127Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2025-04-04T17:11:00.518197Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2025-04-04T17:11:00.518218Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2025-04-04T17:11:00.518235Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2025-04-04T17:11:00.518284Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2025-04-04T17:11:00.518301Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2025-04-04T17:11:00.518318Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2025-04-04T17:11:00.518335Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2025-04-04T17:11:00.518351Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2025-04-04T17:11:00.518374Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2025-04-04T17:11:00.518396Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2025-04-04T17:11:00.518416Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2025-04-04T17:11:00.518432Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2025-04-04T17:11:00.518456Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2025-04-04T17:11:00.518489Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2025-04-04T17:11:00.518508Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2025-04-04T17:11:00.518528Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2025-04-04T17:11:00.518558Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2025-04-04T17:11:00.518579Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2025-04-04T17:11:00.518646Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-04-04T17:11:00.518804Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-04-04T17:11:00.520601Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.520630Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.520650Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:00.521000Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:00.521463Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:00.521615Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.523248Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:11:00.624197Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.624522Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-04T17:11:00.624577Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:00.624611Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-04T17:11:00.624676Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-04T17:11:00.826351Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2025-04-04T17:11:00.930478Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-04T17:11:00.934332Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-04T17:11:00.934480Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-04-04T17:11:00.939560Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.939592Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.939615Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:00.950458Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:00.958563Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:00.958791Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:00.966411Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:11:01.063790Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:01.066418Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-04-04T17:11:01.066500Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:01.066537Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-04-04T17:11:01.066662Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2025-04-04T17:11:01.066760Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-04-04T17:11:01.066977Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-04-04T17:11:01.067065Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-04-04T17:11:01.067178Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> TGroupMapperTest::NonUniformCluster [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:130:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:132:2058] recipient: [1:108:2140] 2025-04-04T17:09:18.658823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:09:18.658955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.659000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:09:18.659032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:09:18.660019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:09:18.660076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:09:18.660169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:09:18.660262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:09:18.661892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:09:18.754151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:09:18.754196Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:172:2058] recipient: [1:15:2062] 2025-04-04T17:09:18.769178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:09:18.769656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:09:18.769814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:09:18.777958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:09:18.778122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:09:18.778714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.779320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:18.783149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789379Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.789545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:09:18.789592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.789644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:09:18.789821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] Leader for TabletID 72057594037968897 is [1:215:2214] sender: [1:216:2058] recipient: [1:209:2210] 2025-04-04T17:09:18.796844Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-04T17:09:18.921380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:09:18.921545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.921688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:09:18.921886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:09:18.921948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.924237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.924385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:09:18.924537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.924596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:09:18.924650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:09:18.924684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:09:18.926329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.926370Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:09:18.926399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:09:18.927729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.927763Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.927795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.927833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.930684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:09:18.932322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:09:18.932490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:09:18.933363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:09:18.933485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:09:18.933538Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.933817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:09:18.933877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:09:18.934028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:09:18.934115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:09:18.935795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:09:18.935835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:09:18.935957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:09:18.935989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:09:18.936376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:09:18.936419Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:09:18.936509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.936534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.936564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:09:18.936587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.936619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:09:18.936688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:09:18.936723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the p ... : 1003 Step: 0 Generation: 2 2025-04-04T17:11:00.918413Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-04-04T17:11:00.918544Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 416611830027 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-04T17:11:00.918591Z node 97 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:11:00.918679Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 328 RawX2: 416611830027 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-04T17:11:00.918731Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:11:00.918767Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-04-04T17:11:00.923341Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-04T17:11:00.923698Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-04T17:11:00.925284Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-04T17:11:00.925676Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1194 } } 2025-04-04T17:11:00.925721Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2025-04-04T17:11:00.925875Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1194 } } 2025-04-04T17:11:00.925976Z node 97 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1194 } } 2025-04-04T17:11:00.926674Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 432 RawX2: 416611830114 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-04T17:11:00.926715Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2025-04-04T17:11:00.926827Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 432 RawX2: 416611830114 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-04T17:11:00.926870Z node 97 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:11:00.926946Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 432 RawX2: 416611830114 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-04-04T17:11:00.927003Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:11:00.927039Z node 97 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-04T17:11:00.927075Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:11:00.927116Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T17:11:00.927142Z node 97 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2025-04-04T17:11:00.933898Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-04T17:11:00.935046Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-04T17:11:00.935513Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-04T17:11:00.935558Z node 97 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0ProgressState, operation type TxCopyTable 2025-04-04T17:11:00.935596Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1003:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-04-04T17:11:00.935625Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2025-04-04T17:11:00.935679Z node 97 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-04-04T17:11:00.935705Z node 97 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 240 -> 240 2025-04-04T17:11:00.939376Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-04T17:11:00.939425Z node 97 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-04-04T17:11:00.939520Z node 97 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-04-04T17:11:00.939555Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-04T17:11:00.939591Z node 97 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-04-04T17:11:00.939619Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-04T17:11:00.939655Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-04-04T17:11:00.939695Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-04-04T17:11:00.939732Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-04-04T17:11:00.939762Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-04-04T17:11:00.939894Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T17:11:00.939932Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-04-04T17:11:00.941646Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-04-04T17:11:00.941686Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-04-04T17:11:00.941998Z node 97 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-04-04T17:11:00.942071Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-04-04T17:11:00.942100Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [97:528:2489] TestWaitNotification: OK eventTxId 1003 2025-04-04T17:11:00.942525Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:11:00.942720Z node 97 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 231us result status StatusSuccess 2025-04-04T17:11:00.943144Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster [GOOD] >> CompressExecutor::TestReorderedExecutor >> ReadSessionImplTest::ProperlyOrdersDecompressedData >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks |87.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} |87.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase >> TMultiversionObjectMap::MonteCarlo [GOOD] >> ReadSessionImplTest::DecompressRaw >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> TConsoleTests::TestRemoveTenantExtSubdomain [GOOD] >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorTemporary >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant [GOOD] >> TConsoleTests::TestAlterServerlessTenant >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TMultiversionObjectMap::MonteCarlo [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2025-04-04T17:11:03.687615Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:03.687651Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:03.687675Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:03.688225Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:03.688771Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:03.706894Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:03.710481Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:11:03.711141Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T17:11:03.711460Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T17:11:03.714530Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-04T17:11:03.714673Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:11:03.715108Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:03.715139Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-04-04T17:11:03.715167Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-04T17:11:03.715188Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-04T17:11:03.722203Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:03.722256Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:03.722280Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:03.726859Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:03.730533Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:03.730692Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:03.731052Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-04-04T17:11:03.731736Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T17:11:03.731868Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-04T17:11:03.732952Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-04T17:11:03.733099Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-04T17:11:03.733189Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:03.733221Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-04T17:11:03.733244Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-04T17:11:03.733378Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2025-04-04T17:11:03.733457Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-04T17:11:03.733484Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-04T17:11:03.733505Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-04T17:11:03.733622Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2025-04-04T17:11:03.733664Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-04T17:11:03.733679Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-04T17:11:03.733698Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-04T17:11:03.733744Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2025-04-04T17:11:03.733778Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-04T17:11:03.733792Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-04T17:11:03.733805Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-04T17:11:03.733865Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2025-04-04T17:11:03.735289Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:03.735343Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:03.735366Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:03.735751Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:03.736179Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:03.736316Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:03.736516Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 100 Compressed message data size: 91 2025-04-04T17:11:03.737378Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T17:11:03.737556Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-04T17:11:03.737861Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-04T17:11:03.738108Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-04T17:11:03.738235Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:03.738285Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-04T17:11:03.738390Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 GOT RANGE 0 2 Getting new event 2025-04-04T17:11:03.738469Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-04T17:11:03.738489Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-04T17:11:03.738561Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 GOT RANGE 2 3 Getting new event 2025-04-04T17:11:03.738674Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-04T17:11:03.738703Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-04T17:11:03.738767Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 GOT RANGE 3 4 Getting new event 2025-04-04T17:11:03.738815Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-04T17:11:03.738833Z :DEBUG: [db] [sessionid] [cluster] The application data ... er". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-04T17:11:05.672124Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 201). Partition stream id: 1 GOT RANGE 0 201 2025-04-04T17:11:05.784501Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-04T17:11:05.784773Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-04T17:11:05.784938Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:05.785557Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:05.786935Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:05.787357Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-04T17:11:05.788075Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2025-04-04T17:11:05.911773Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2025-04-04T17:11:05.915214Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:05.917055Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-04T17:11:05.919758Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-04T17:11:05.920583Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-04-04T17:11:05.925337Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-04-04T17:11:05.926151Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-04-04T17:11:05.926972Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2025-04-04T17:11:05.927764Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2025-04-04T17:11:05.936196Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2025-04-04T17:11:05.937082Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2025-04-04T17:11:05.937187Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2025-04-04T17:11:05.937436Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-04T17:11:05.946484Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 11). Partition stream id: 1 GOT RANGE 0 11 2025-04-04T17:11:05.950201Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.950314Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.950361Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:05.950713Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:05.962502Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:05.962700Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.963109Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:11:05.963741Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2025-04-04T17:11:05.968159Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.968231Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.968269Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:05.968708Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:05.969134Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:05.969265Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.972516Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.974126Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:11:05.974321Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:05.974372Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-04T17:11:05.980035Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] Test command err: 2025-04-04T17:11:02.758079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:11:02.758598Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:11:02.758703Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f25/r3tmp/tmpNo27VW/pdisk_1.dat 2025-04-04T17:11:03.161728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:11:03.195261Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:03.240002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:03.240154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:03.255536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:03.345346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] >> KqpJoinOrder::TPCDS16-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::GeneralPrioritiesBug4 >> TConsoleTests::TestCreateSubSubDomain [GOOD] >> TConsoleTests::TestCreateSubSubDomainExtSubdomain >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] >> TDataShardTrace::TestTraceDistributedSelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] Test command err: 2025-04-04T17:11:02.571796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:11:02.572281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:11:02.572351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ef3/r3tmp/tmpT80CCe/pdisk_1.dat 2025-04-04T17:11:03.017781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:11:03.085308Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:03.127698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:03.128002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:03.139629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:03.242126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:11:05.499433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:932:2775], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:05.499632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:942:2780], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:05.499761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:05.506510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:11:05.534605Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-04T17:11:05.748285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:946:2783], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:11:05.836533Z node 1 :TX_PROXY ERROR: Actor# [1:1015:2832] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:06.742296Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0whsgf95reqdmj28h4wk94, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE1N2E5NjAtNGY2YzFkM2ItNmViNWJhNGUtNzc4MzI3MDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (LiteralExecuter) , (DataExecuter -> [(WaitForTableResolve) , (RunTasks) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)]) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)])])]) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] Test command err: 2025-04-04T17:11:02.405700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:11:02.406209Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:11:02.406296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f01/r3tmp/tmpaVOd23/pdisk_1.dat 2025-04-04T17:11:03.028723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:11:03.070336Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:03.121991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:03.125169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:03.137134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:03.250462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:11:05.502715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:932:2775], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:05.502864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:942:2780], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:05.503005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:05.508500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:11:05.534617Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-04T17:11:05.734189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:946:2783], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:11:05.819064Z node 1 :TX_PROXY ERROR: Actor# [1:1015:2832] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:06.705293Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0whsgn0jp1mw8m3ncndzkx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmQ5ZGE4MC0yNWJiMDA4ZS1jY2ZhYzgzNC1jYzY2ZGVkMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (DataExecuter -> [(WaitForTableResolve) , (ComputeActor -> [(TKqpForwardWriteActor)]) , (RunTasks) , (Commit -> [(Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)]) , (Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)])])])]) >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserLogin >> YdbYqlClient::TestTzTypesFullStack >> TGRpcAuthentication::ValidCredentials ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] Test command err: 2025-04-04T17:11:02.533176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:11:02.533661Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:11:02.533732Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ee9/r3tmp/tmp0yBz1e/pdisk_1.dat 2025-04-04T17:11:03.015656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:11:03.059291Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:03.121988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:03.124503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:03.136898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:03.242568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:11:05.490304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:932:2775], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:05.490423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:942:2780], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:05.490544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:05.500369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:11:05.536863Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-04T17:11:05.745988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:946:2783], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:11:05.828402Z node 1 :TX_PROXY ERROR: Actor# [1:1015:2832] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:06.742360Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0whsggek90py5r1gvy045v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZiMmIwMGUtZWJmZjJmMjUtM2UwOWVjY2UtYTAxYjg5ODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:06.874619Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0whts06ey1t9pxdcdrenb7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzA3Y2E2NmYtZWYyMzhjN2MtYTY3YmIzYjgtYWNlYWVkYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:07.149822Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0whtwf5xxr2789wgqfmfd0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDE1ZmRlYTQtODU5YWU1YzEtNjM2NjEwOGItNjBiNGZiMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelect [GOOD] Test command err: 2025-04-04T17:11:02.470803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:11:02.471276Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:11:02.471342Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f12/r3tmp/tmpKDTJbh/pdisk_1.dat 2025-04-04T17:11:03.010666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:11:03.063232Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:03.122654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:03.122817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:03.137734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:03.247909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:11:05.510852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:932:2775], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:05.510980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:942:2780], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:05.511084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:05.518465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:11:05.545472Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-04T17:11:05.751674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:946:2783], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:11:05.824040Z node 1 :TX_PROXY ERROR: Actor# [1:1015:2832] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:06.742036Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0whsh3d3xgeqyv9tv52t4p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjM0ZTZkNmQtZDY5YjZiZi02N2U0MDRjZC1jMTgyYTMwYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:06.881418Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0whtrs1ykrr7ek2kkr7vjz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWFhZjc2ZDUtMWNkOGIxZjAtNTI5MzQwNDMtZWI3ZTY0NzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:07.557610Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0whv3y1w9zbr96wjscabp7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmNlMjA5MzktMzhiMTdlODgtODVkNDdmYjQtZGJjZTQ4NjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TConsoleTxProcessorTests::TestTxProcessorTemporary [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorRandom |87.0%| [TA] $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |87.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::TPCH5-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH5+StreamLookupJoin-ColumnStore >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-StreamLookupJoin+ColumnStore >> YdbImport::Simple >> YdbTableBulkUpsert::Nulls >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase [GOOD] >> YdbYqlClient::ConnectDbAclIsOffWhenTokenIsOptionalAndNull >> TGRpcYdbTest::RemoveNotExistedDirectory >> TAuthenticationWithSqlExecution::CreateAlterUserWithHash >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants >> KqpJoinOrder::TestJoinHint2+StreamLookupJoin-ColumnStore [GOOD] >> TConsoleTests::TestAlterServerlessTenant [GOOD] >> TConsoleTests::TestAttributes >> KqpJoinOrder::DatetimeConstantFold-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::DatetimeConstantFold-StreamLookupJoin+ColumnStore >> KqpJoinOrder::TPCH8+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH8-StreamLookupJoin+ColumnStore >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserLogin [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserPassword >> TConsoleTests::TestCreateSubSubDomainExtSubdomain [GOOD] >> TConsoleTests::TestAuthorization ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinHint2+StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 31675, MsgBus: 15560 2025-04-04T17:09:40.453026Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506335835740876:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:40.453823Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001472/r3tmp/tmpe5mACX/pdisk_1.dat 2025-04-04T17:09:40.814545Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:40.841880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:40.841995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 31675, node 1 2025-04-04T17:09:40.844693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:40.971440Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:40.971464Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:40.971472Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:40.971606Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15560 TClient is connected to server localhost:15560 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:41.594026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:41.607998Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:41.623056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:41.768962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:41.941598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:42.020081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:43.699156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506348720644459:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:43.699242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:43.911257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:43.946701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:43.981896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:44.019196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:44.052904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:44.088070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:44.165476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506353015612273:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:44.165527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:44.165668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506353015612278:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:44.169837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:44.181472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506353015612280:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:44.261909Z node 1 :TX_PROXY ERROR: Actor# [1:7489506353015612335:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:45.230350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:45.296305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:45.368267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:45.403382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:45.443889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:45.464725Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506335835740876:2133];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:45.465007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:45.606132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:45.681026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:09:45.736186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:09:45.769627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:09:45.800692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:09:45.840138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:09:45.873339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:09:45.900177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:09:46.403675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:09:46.444616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:09:46.497111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:09:46.529919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 202 ... : { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:04.898293Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:11:04.939736Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:11:04.990827Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:11:05.044765Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-04T17:11:05.080400Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-04T17:11:05.266524Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-04T17:11:05.347193Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-04T17:11:05.476330Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-04T17:11:05.538373Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-04T17:11:05.594185Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-04T17:11:05.634752Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-04T17:11:05.714327Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-04T17:11:05.766064Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-04T17:11:06.686876Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2025-04-04T17:11:06.763822Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-04T17:11:06.817157Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-04T17:11:06.860601Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-04T17:11:06.937226Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.024950Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.086606Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.132336Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.173967Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.223360Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.263607Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.344443Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.410371Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.457655Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.528495Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.568891Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.606274Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.657880Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.703286Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.744358Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.786978Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.861326Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.909387Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.980926Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2025-04-04T17:11:08.330182Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2025-04-04T17:11:08.375137Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2025-04-04T17:11:08.416653Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2025-04-04T17:11:08.488898Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2025-04-04T17:11:08.538399Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2025-04-04T17:11:08.589319Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2025-04-04T17:11:08.661270Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2025-04-04T17:11:08.716109Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2025-04-04T17:11:08.794903Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2025-04-04T17:11:09.038971Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2025-04-04T17:11:09.096083Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2025-04-04T17:11:09.191615Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2025-04-04T17:11:09.271951Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 2025-04-04T17:11:09.390804Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715721:0, at schemeshard: 72057594046644480 2025-04-04T17:11:09.452278Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715722:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+StreamLookupJoin-ColumnStore [GOOD] >> YdbYqlClient::TestTzTypesFullStack [GOOD] >> YdbYqlClient::TestVariant >> TGRpcAuthentication::ValidCredentials [GOOD] >> TGRpcAuthentication::NoDescribeRights >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> YdbYqlClient::ConnectDbAclIsOffWhenTokenIsOptionalAndNull [GOOD] >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex >> TGRpcNewClient::YqlQueryWithParams >> TGRpcYdbTest::RemoveNotExistedDirectory [GOOD] >> TGRpcYdbTest::SdkUuid >> YdbImport::Simple [GOOD] >> YdbImport::EmptyData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 13385, MsgBus: 18192 2025-04-04T17:09:37.624403Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506323207863283:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:37.625887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001488/r3tmp/tmpMCcMVv/pdisk_1.dat 2025-04-04T17:09:38.019449Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:38.024996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:38.025082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:38.027945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13385, node 1 2025-04-04T17:09:38.201536Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:38.201559Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:38.201569Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:38.201679Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18192 TClient is connected to server localhost:18192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:38.880239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:38.898507Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:38.906295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:39.108701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:39.298188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:39.389388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:40.949172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506336092766850:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:40.949271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:41.132110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:41.195269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:41.228812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:41.259637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:41.290287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:41.338226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:41.394188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506340387734659:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:41.394360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:41.394737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506340387734665:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:41.398132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:41.419853Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T17:09:41.425978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506340387734667:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:41.489210Z node 1 :TX_PROXY ERROR: Actor# [1:7489506340387734720:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:42.354741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:42.392614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:42.423954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:42.453926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:42.485515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:42.593811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:42.625354Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506323207863283:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:42.630791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:09:42.632235Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:42.670602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:09:42.703311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:09:42.729490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:09:42.754712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:09:42.791086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:09:42.840241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:09:43.316369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:09:43.347504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:09:43.376155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:09:43.420145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operati ... : { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:07.021183Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.103583Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.140588Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.183088Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.241973Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.501090Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.561928Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.614977Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.677136Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.741088Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.789682Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.876414Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-04T17:11:07.990146Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-04T17:11:08.869500Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2025-04-04T17:11:08.947733Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-04T17:11:09.101180Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-04T17:11:09.194114Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-04T17:11:09.274084Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-04-04T17:11:09.334887Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-04T17:11:09.377185Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-04-04T17:11:09.452207Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2025-04-04T17:11:09.508686Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-04-04T17:11:09.564088Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-04T17:11:09.625162Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-04-04T17:11:09.661319Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2025-04-04T17:11:09.738083Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2025-04-04T17:11:09.796877Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2025-04-04T17:11:09.868728Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-04-04T17:11:09.918681Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2025-04-04T17:11:09.964660Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2025-04-04T17:11:10.003356Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2025-04-04T17:11:10.048288Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2025-04-04T17:11:10.093879Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2025-04-04T17:11:10.137209Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2025-04-04T17:11:10.190950Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2025-04-04T17:11:10.242141Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2025-04-04T17:11:10.298104Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2025-04-04T17:11:10.551095Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2025-04-04T17:11:10.599186Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2025-04-04T17:11:10.701077Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2025-04-04T17:11:10.759062Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2025-04-04T17:11:10.809729Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2025-04-04T17:11:10.863590Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2025-04-04T17:11:10.920581Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2025-04-04T17:11:10.975018Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2025-04-04T17:11:11.047107Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2025-04-04T17:11:11.208392Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2025-04-04T17:11:11.257287Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2025-04-04T17:11:11.320380Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2025-04-04T17:11:11.379542Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 2025-04-04T17:11:11.484275Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715721:0, at schemeshard: 72057594046644480 2025-04-04T17:11:11.551898Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715722:0, at schemeshard: 72057594046644480 >> TGroupMapperTest::ReassignGroupTest3dc [GOOD] >> TAuthenticationWithSqlExecution::CreateAlterUserWithHash [GOOD] >> TDatabaseQuotas::DisableWritesToDatabase >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant >> KqpJoinOrder::TPCDS87-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS88-StreamLookupJoin-ColumnStore |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::ReassignGroupTest3dc [GOOD] >> TConsoleTests::TestAttributes [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserPassword [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidSearchFilter >> YdbYqlClient::SecurityTokenAuth >> YdbYqlClient::TestVariant [GOOD] >> YdbYqlClient::TestTransactionQueryError >> CompressExecutor::TestReorderedExecutor [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort >> TConsoleTests::TestAuthorization [GOOD] >> TConsoleTests::TestAuthorizationExtSubdomain >> OlapEstimationRowsCorrectness::TPCH3 [GOOD] >> OlapEstimationRowsCorrectness::TPCH5 >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags >> TGRpcAuthentication::NoDescribeRights [GOOD] >> TGRpcClientLowTest::BiStreamPing >> YdbIndexTable::AlterIndexImplBySuperUser >> KqpJoinOrder::CanonizedJoinOrderTPCDS78-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS78-StreamLookupJoin+ColumnStore >> KqpJoinOrder::TPCDS96+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS96-StreamLookupJoin+ColumnStore >> TGRpcYdbTest::SdkUuid [GOOD] >> TGRpcYdbTest::SdkUuidViaParams >> YdbImport::EmptyData [GOOD] >> YdbImport::ImportFromS3ToExistingTable >> TGRpcNewClient::YqlQueryWithParams [GOOD] >> TGRpcNewClient::YqlExplainDataQuery >> KqpJoinOrder::GeneralPrioritiesBug4 [GOOD] >> KqpJoinOrder::OltpJoinTypeHintCBOTurnOFF >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex [GOOD] >> YdbYqlClient::ColumnFamiliesDescriptionWithStorageAndIndex >> TGRpcLdapAuthentication::LdapAuthWithInvalidSearchFilter [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidLogin >> TGRpcClientLowTest::BiStreamPing [GOOD] >> TGRpcClientLowTest::BiStreamCancelled >> YdbYqlClient::TestTransactionQueryError [GOOD] >> YdbYqlClient::TestReadWrongTable >> YdbYqlClient::SecurityTokenAuth [GOOD] >> YdbYqlClient::RetryOperationTemplate >> TConsoleTests::TestAuthorizationExtSubdomain [GOOD] >> TConsoleTests::TestAttributesExtSubdomain >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRemoveServerlessTenant >> TGRpcYdbTest::SdkUuidViaParams [GOOD] >> TGRpcYdbTest::ReadTablePg >> YdbImport::ImportFromS3ToExistingTable [GOOD] >> TYqlDecimalTests::SimpleUpsertSelect >> YdbIndexTable::AlterIndexImplBySuperUser [GOOD] >> YdbIndexTable::CreateTableAddIndex >> TGRpcNewClient::YqlExplainDataQuery [GOOD] >> TGRpcNewCoordinationClient::CheckUnauthorized >> TGroupMapperTest::Block42_1disk [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Block42_1disk [GOOD] >> KqpJoinOrder::TPCH5+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH5-StreamLookupJoin+ColumnStore >> TGRpcLdapAuthentication::LdapAuthWithInvalidLogin [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidPassword |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TGRpcClientLowTest::BiStreamCancelled [GOOD] >> TGRpcClientLowTest::ChangeAcl >> YdbYqlClient::RetryOperationTemplate [GOOD] >> YdbYqlClient::RetryOperationSync >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> YdbYqlClient::ColumnFamiliesDescriptionWithStorageAndIndex [GOOD] >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile >> YdbYqlClient::TestReadWrongTable [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> KqpJoinOrder::TPCDS90-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS92+StreamLookupJoin-ColumnStore >> YdbIndexTable::CreateTableAddIndex [GOOD] >> YdbIndexTable::AlterTableAddIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadWrongTable [GOOD] Test command err: 2025-04-04T17:11:09.267759Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506715510471690:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:09.267822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00134b/r3tmp/tmp4luoP2/pdisk_1.dat 2025-04-04T17:11:09.953254Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16766, node 1 2025-04-04T17:11:10.072241Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:11:10.072743Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:10.072773Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:10.072779Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:10.072881Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:11:10.073131Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:11:10.126947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:10.127063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:10.132022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14743 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:10.485745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:12.859717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506728395374645:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:12.859793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:12.860682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506728395374653:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:12.864334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:11:12.888942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506728395374659:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:11:12.987660Z node 1 :TX_PROXY ERROR: Actor# [1:7489506728395374732:2679] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:15.206062Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506744197782945:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:15.206174Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00134b/r3tmp/tmpvaAhQA/pdisk_1.dat 2025-04-04T17:11:15.534411Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:15.578494Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:15.578572Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:15.585296Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7342, node 4 2025-04-04T17:11:15.792402Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:15.792421Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:15.792427Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:15.792550Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:16.017733Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:18.332276Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506757082685888:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:18.332373Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:18.332653Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506757082685900:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:18.336339Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:11:18.385167Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506757082685902:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:11:18.499044Z node 4 :TX_PROXY ERROR: Actor# [4:7489506757082685979:2676] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:20.256262Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489506765308353399:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:20.258723Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00134b/r3tmp/tmpSXk8aw/pdisk_1.dat 2025-04-04T17:11:20.549423Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:20.630717Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:20.630800Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:20.637692Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18798, node 7 2025-04-04T17:11:20.755775Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:20.755801Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:20.755809Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:20.755954Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingPa ... info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:23.646460Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:23.671322Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:23.887391Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489506778193256534:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:23.887503Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:23.887857Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489506778193256539:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:23.891626Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:11:23.915772Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489506778193256541:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:11:23.999535Z node 7 :TX_PROXY ERROR: Actor# [7:7489506778193256616:2805] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:24.278388Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0wjbfecw22bbkx1dkhk42j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YTc3M2Y2NTYtZDA4MTkxODUtMjY4MzVkMC01NGZhZTQ0Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:24.293319Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0wjbfecw22bbkx1dkhk42j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YTc3M2Y2NTYtZDA4MTkxODUtMjY4MzVkMC01NGZhZTQ0Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:24.296770Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0wjbfecw22bbkx1dkhk42j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YTc3M2Y2NTYtZDA4MTkxODUtMjY4MzVkMC01NGZhZTQ0Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:24.430931Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0wjbycd5j42dek38z7mgj8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YWMzZDgzMDMtZjA5NWYzZTctZDNlYzdmYi0xMjQ3M2Q3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:24.509868Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=YTc3M2Y2NTYtZDA4MTkxODUtMjY4MzVkMC01NGZhZTQ0Mw==, ActorId: [7:7489506778193256328:2334], ActorState: ExecuteState, TraceId: 01jr0wjc0y1b4xmfxt29gstdjr, Create QueryResponse for error on request, msg: 2025-04-04T17:11:26.319556Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489506788777168484:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:26.319611Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00134b/r3tmp/tmpZXtzFp/pdisk_1.dat 2025-04-04T17:11:26.786800Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:26.816889Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:26.816978Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:26.821444Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15720, node 10 2025-04-04T17:11:27.010985Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:27.011013Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:27.011021Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:27.011181Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28279 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:27.411557Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:27.514532Z node 10 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jr0wjf0r0d3366335w6a1h2f, sdkBuildInfo# ydb-cpp-sdk/3.1.0, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:37482, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.995749s 2025-04-04T17:11:27.538430Z node 10 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jr0wjf1f320pja0k79j53qge, sdkBuildInfo# ydb-cpp-sdk/3.1.0, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:37494, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:11:29.965183Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jr0wjhdd1vnfthsxgdk2exze, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:37510, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:11:29.969184Z node 10 :TX_PROXY ERROR: [ReadTable [10:7489506801662071450:2336] TxId# 281474976710658] Navigate request failed for table 'Root/NoTable' 2025-04-04T17:11:29.969292Z node 10 :TX_PROXY ERROR: [ReadTable [10:7489506801662071450:2336] TxId# 281474976710658] RESPONSE Status# ResolveError shard: 0 table: Root/NoTable 2025-04-04T17:11:29.969737Z node 10 :READ_TABLE_API NOTICE: [10:7489506801662071449:2336] Finish grpc stream, status: 400070
: Error: Failed to resolve table Root/NoTable, code: 200400
: Error: Got ResolveError response from TxProxy
: Error: Failed to resolve table Root/NoTable 2025-04-04T17:11:29.979832Z node 10 :GRPC_SERVER DEBUG: [0x51a000103e80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-04T17:11:29.979945Z node 10 :GRPC_SERVER DEBUG: [0x51a000106880] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-04T17:11:29.980034Z node 10 :GRPC_SERVER DEBUG: [0x51a0000a3880] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-04T17:11:29.980139Z node 10 :GRPC_SERVER DEBUG: [0x51a000106280] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-04T17:11:29.980197Z node 10 :GRPC_SERVER DEBUG: [0x51a000105680] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-04T17:11:29.980320Z node 10 :GRPC_SERVER DEBUG: [0x51a000105080] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-04T17:11:29.980347Z node 10 :GRPC_SERVER DEBUG: [0x51a000104a80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-04T17:11:29.980488Z node 10 :GRPC_SERVER DEBUG: [0x51a000105c80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-04T17:11:29.980496Z node 10 :GRPC_SERVER DEBUG: [0x51a000104480] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-04T17:11:29.980640Z node 10 :GRPC_SERVER DEBUG: [0x51a000072680] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-04T17:11:29.980649Z node 10 :GRPC_SERVER DEBUG: [0x51a0000bb880] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-04T17:11:29.980795Z node 10 :GRPC_SERVER DEBUG: [0x51b0001b2e80] received request Name# KeyValue ok# false data# peer# current inflight# 0 2025-04-04T17:11:29.980796Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b7680] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2025-04-04T17:11:29.980939Z node 10 :GRPC_SERVER DEBUG: [0x51a00004e680] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-04T17:11:29.980944Z node 10 :GRPC_SERVER DEBUG: [0x51a000108680] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-04T17:11:29.981085Z node 10 :GRPC_SERVER DEBUG: [0x51a000107a80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-04T17:11:29.981096Z node 10 :GRPC_SERVER DEBUG: [0x51a000106e80] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-04T17:11:29.981229Z node 10 :GRPC_SERVER DEBUG: [0x51a00002b880] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2025-04-04T17:11:29.981245Z node 10 :GRPC_SERVER DEBUG: [0x51a000087080] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2025-04-04T17:11:29.981374Z node 10 :GRPC_SERVER DEBUG: [0x51a000108080] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2025-04-04T17:11:29.981393Z node 10 :GRPC_SERVER DEBUG: [0x51a00004da80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-04T17:11:29.981503Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f1880] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> TConsoleTests::TestAttributesExtSubdomain [GOOD] >> TConsoleTests::TestDatabaseQuotas >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBlockStoreVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBlobDepot [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobal >> TGRpcNewCoordinationClient::CheckUnauthorized [GOOD] >> TGRpcNewCoordinationClient::CreateAlter >> TConsoleTests::TestRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRegisterComputationalUnitsForPending >> OlapEstimationRowsCorrectness::TPCH11 [GOOD] >> OlapEstimationRowsCorrectness::TPCDS78 >> TGRpcYdbTest::ReadTablePg [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired >> TYqlDecimalTests::SimpleUpsertSelect [GOOD] >> TYqlDecimalTests::NegativeValues >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeInvalid [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypePersQueueGroup [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeRtmrVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeKesus [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeFileStore [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeReplication [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> TGRpcLdapAuthentication::LdapAuthWithInvalidPassword [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] >> TDatabaseQuotas::DisableWritesToDatabase [GOOD] >> TGRpcAuthentication::InvalidPassword >> KqpJoinOrder::TPCDS61-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS87+StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2025-04-04T17:11:00.497370Z :SpecifyClustersExplicitly INFO: Random seed for debugging is 1743786660497343 2025-04-04T17:11:00.969067Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506677041298741:2085];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:00.969132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:11:01.106718Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506681051003277:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:01.335463Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:11:01.335818Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001003/r3tmp/tmp4HDGxS/pdisk_1.dat 2025-04-04T17:11:01.376751Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:11:01.752107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:01.752273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:01.755729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:01.757215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:01.763750Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:11:01.763877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:01.772601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:01.860648Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62943, node 1 2025-04-04T17:11:01.986452Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:11:01.986513Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:11:02.058430Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:11:02.289019Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001003/r3tmp/yandexntpxgk.tmp 2025-04-04T17:11:02.289064Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001003/r3tmp/yandexntpxgk.tmp 2025-04-04T17:11:02.289224Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001003/r3tmp/yandexntpxgk.tmp 2025-04-04T17:11:02.289358Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:11:02.365349Z INFO: TTestServer started on Port 62008 GrpcPort 62943 TClient is connected to server localhost:62008 PQClient connected to localhost:62943 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:02.661843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:11:02.810743Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:05.031364Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506698230872616:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:05.031450Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506698230872628:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:05.031519Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:05.048915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T17:11:05.067483Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489506698230872631:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T17:11:05.334313Z node 2 :TX_PROXY ERROR: Actor# [2:7489506698230872659:2129] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:05.355832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:11:05.358303Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489506698516136301:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:11:05.359978Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTIxZmFjNDUtZTMxZTYwOTEtNGU4NjlmZTctYTA4MTI3ODM=, ActorId: [1:7489506698516136267:2337], ActorState: ExecuteState, TraceId: 01jr0whs5g4xyk1qx8gqb5gve7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:11:05.362103Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:11:05.369412Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489506698230872666:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:11:05.369688Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjM0M2JmNzMtOTIwNmNlZjAtMWI1YWU5MTItNDZjYzJkOWE=, ActorId: [2:7489506698230872614:2308], ActorState: ExecuteState, TraceId: 01jr0whs1nemap69d9wkve4c4c, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:11:05.370105Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:11:05.509405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:11:05.718763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:62943", true, true, 1000); 2025-04-04T17:11:05.968987Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506677041298741:2085];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:05.969040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:11:06.000295Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr0whsvmerbf69dp7mddfksd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTVhOWZiYTQtNTczOGM4ODMtNjA5MGMwMmUtMTg1NzAwZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:06.036425Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489506681051003277:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:06.036471Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7489506702811103993:2975] === CheckClustersList. Ok 2025-04-04T17:11:12.124880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:62943 ... 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_11184268552663650528_v1 grpc read done: success# 1, data# { read { } } 2025-04-04T17:11:31.052678Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_11184268552663650528_v1 got read request: guid# 9320c8-32085871-a8020a72-cc9e08df 2025-04-04T17:11:31.040092Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 4 added 0 blobs, size 0 count 0 last offset 2, current partition end offset: 3 2025-04-04T17:11:31.054084Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_11184268552663650528_v1 grpc read done: success# 0, data# { } 2025-04-04T17:11:31.040127Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 4. All data is from uncompacted head. 2025-04-04T17:11:31.054098Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_11184268552663650528_v1 grpc read failed 2025-04-04T17:11:31.054120Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_11184268552663650528_v1 grpc closed 2025-04-04T17:11:31.054162Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_11184268552663650528_v1 is DEAD 2025-04-04T17:11:31.058576Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489506804424202275:2550] disconnected; active server actors: 1 2025-04-04T17:11:31.058603Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489506804424202275:2550] client user disconnected session shared/user_3_1_11184268552663650528_v1 2025-04-04T17:11:31.040144Z node 4 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T17:11:31.040228Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 2 2025-04-04T17:11:31.089514Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_11184268552663650528_v1 2025-04-04T17:11:31.089558Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7489506804424202278:2553] destroyed 2025-04-04T17:11:31.089604Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_11184268552663650528_v1 2025-04-04T17:11:31.514466Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7489506813014137075:2593], TxId: 281474976720698, task: 1, CA Id [3:7489506813014137073:2593]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-04T17:11:31.521784Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:11:31.521817Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:31.554943Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7489506813014137075:2593], TxId: 281474976720698, task: 1, CA Id [3:7489506813014137073:2593]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T17:11:31.612122Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7489506813014137075:2593], TxId: 281474976720698, task: 1, CA Id [3:7489506813014137073:2593]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T17:11:31.651199Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7489506813014137075:2593], TxId: 281474976720698, task: 1, CA Id [3:7489506813014137073:2593]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T17:11:31.727680Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7489506813014137075:2593], TxId: 281474976720698, task: 1, CA Id [3:7489506813014137073:2593]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T17:11:32.523952Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:32.523992Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:32.524022Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:32.524402Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:32.524877Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:32.525124Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:32.525486Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2025-04-04T17:11:32.526841Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:32.526905Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:32.526933Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:32.527319Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:32.538525Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:32.538760Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:32.539086Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:11:32.540098Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T17:11:32.547823Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2025-04-04T17:11:32.547922Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2025-04-04T17:11:32.548315Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:32.548356Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-04T17:11:32.548381Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-04T17:11:32.548425Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-04-04T17:11:32.550127Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:32.550158Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:32.550275Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:32.550519Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:32.550894Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:32.550996Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:32.551181Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:11:32.551861Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:32.552058Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:11:32.552194Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:32.552278Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-04T17:11:32.552377Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 2025-04-04T17:11:32.554006Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:32.554056Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:32.554082Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:32.554375Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:32.568639Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:32.568827Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:32.569091Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:11:32.569811Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T17:11:32.570271Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T17:11:32.570548Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-04T17:11:32.570609Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:11:32.570685Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:32.570715Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-04-04T17:11:32.570854Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-04T17:11:32.570891Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-04T17:11:34.573443Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:34.573479Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:34.573524Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:34.573839Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:34.574255Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:34.574581Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:34.579359Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:34.579560Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:11:34.579637Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:34.579724Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::LdapAuthWithInvalidPassword [GOOD] Test command err: 2025-04-04T17:11:08.774549Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506711144554932:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:08.774597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001357/r3tmp/tmpgL7Pjd/pdisk_1.dat 2025-04-04T17:11:09.300626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:09.300744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:09.303894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16044, node 1 2025-04-04T17:11:09.350652Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:09.422478Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:11:09.422498Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:11:09.506701Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:09.506745Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:09.506758Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:09.506889Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62454 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:09.840990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:14.114291Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506736249783269:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:14.136215Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001357/r3tmp/tmpKORQUt/pdisk_1.dat 2025-04-04T17:11:14.454937Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:14.508204Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:14.508304Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:14.518912Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26860, node 4 2025-04-04T17:11:14.699270Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:14.699300Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:14.699308Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:14.699477Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:14.982596Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:19.474869Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489506761272008625:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:19.642210Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001357/r3tmp/tmp6xWxv7/pdisk_1.dat 2025-04-04T17:11:19.915642Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:19.938926Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:19.939031Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:19.951931Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19957, node 7 2025-04-04T17:11:20.236868Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:20.236894Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:20.236903Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:20.237048Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28407 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:20.559643Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:25.035800Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489506783552251105:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:25.035848Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001357/r3tmp/tmpkzVTT4/pdisk_1.dat 2025-04-04T17:11:25.215337Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29904, node 10 2025-04-04T17:11:25.355056Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:25.355082Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:25.355089Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:25.355245Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:11:25.355486Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:25.355556Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:25.361044Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62156 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:25.655585Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:30.411854Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489506805469773056:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:30.420303Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001357/r3tmp/tmpmEQYH1/pdisk_1.dat 2025-04-04T17:11:30.675429Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:30.725226Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:30.725344Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:30.728008Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24998, node 13 2025-04-04T17:11:30.838678Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:30.838706Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:30.838715Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:30.838858Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61073 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:31.142169Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> TGRpcClientLowTest::ChangeAcl [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> YdbIndexTable::AlterTableAddIndex [GOOD] >> YdbLogStore::AlterLogStore ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcClientLowTest::ChangeAcl [GOOD] Test command err: 2025-04-04T17:11:09.322572Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506716627420283:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:09.322621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001353/r3tmp/tmpjA7iTG/pdisk_1.dat 2025-04-04T17:11:09.851633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:09.851736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:09.869317Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:09.872457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11589, node 1 2025-04-04T17:11:09.958222Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:09.958251Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:09.958261Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:09.958355Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8460 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:10.509344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:8460 TClient is connected to server localhost:8460 2025-04-04T17:11:11.181813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:11:13.061884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506733807290573:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:13.061884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506733807290549:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:13.061955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:13.065018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-04T17:11:13.107863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506733807290578:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-04T17:11:13.177943Z node 1 :TX_PROXY ERROR: Actor# [1:7489506733807290661:2720] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } TClient is connected to server localhost:8460 TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786670577 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\001\020\200\204\002\032\004user \003" EffectiveACL: "\n\016\010\001\020\200\204\002\032\004user \003" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".metadata" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743786673132 ParentPathId: 1 PathState: EPathStateCreate Owner: "met... (TRUNCATED) 2025-04-04T17:11:15.493625Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506741894100890:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:15.497755Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001353/r3tmp/tmpvkR4En/pdisk_1.dat 2025-04-04T17:11:15.687076Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:15.716234Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:15.716318Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:15.720433Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2123, node 4 2025-04-04T17:11:15.890783Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:15.890800Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:15.890805Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:15.890899Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24250 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:16.109653Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:24250 TClient is connected to server localhost:24250 2025-04-04T17:11:16.649853Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:11:18.787400Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506754779003806:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:18.787492Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:18.788682Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506754779003833:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:18.794186Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-04T17:11:18.822362Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506754779003835:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-04T17:11:18.927885Z node 4 :TX_PROXY ERROR: Actor# [4:7489506754779003926:2705] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } TClient is connected to server localhost:24250 TClient::Ls request: Root 2025-04-04T17:11:19.328311Z node 4 :TX_PROXY ERROR: Access denied for user with access DescribeSchema to path Root TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 12 ErrorReason: "Access denied" 2025-04-04T17:11:21.187062Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489506769633873675:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:21.187528Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001353/r3tmp/tmpKwRu49/pdisk_1.dat 2025-04-04T17:11:21.366294Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:21.396876Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:21.396961Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:21.400792Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62899, node 7 2025-04-04T17:11:21.454969Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:21.454992Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:21.454999Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:21.455129Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9231 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:21.779089Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:21.946132Z node 7 :TICKET_PARSER ERROR: Ticket some****oken (BB86510A): Could not find correct token validator 2025-04-04T17:11:21.946279Z node 7 :GRPC_SERVER ERROR: Received TEvRefreshTokenResponse, Authenticated = 0 2025-04-04T17:11:26.106893Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489506791079607870:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:26.106949Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001353/r3tmp/tmpl7MolF/pdisk_1.dat 2025-04-04T17:11:26.351940Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21659, node 10 2025-04-04T17:11:26.424705Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:26.424794Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:26.439674Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:26.439700Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:26.439707Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:26.439845Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:11:26.448660Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61314 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:26.767663Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:26.778637Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:11:31.307126Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489506809084367536:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:31.307171Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001353/r3tmp/tmpxjW9T3/pdisk_1.dat 2025-04-04T17:11:31.487934Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:31.509506Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:31.509592Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:31.516964Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10671, node 13 2025-04-04T17:11:31.806981Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:31.807009Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:31.807021Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:31.807177Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22942 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:32.171805Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:22942 2025-04-04T17:11:32.637724Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] Test command err: 2025-04-04T17:11:05.287094Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.287127Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.287151Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:05.287590Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:05.299310Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:05.299507Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.299881Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:11:05.300288Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.300410Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:11:05.300509Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:05.300549Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-04T17:11:05.301186Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.301220Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.301239Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:05.301572Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:05.302174Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:05.302296Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.302466Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:11:05.302841Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.302940Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:11:05.303010Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:05.303055Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-04T17:11:05.303850Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.303871Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.303889Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:05.304229Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:05.304993Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:05.305111Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.305264Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:11:05.305886Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.306039Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:11:05.306124Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:05.306177Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-04T17:11:05.306968Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.306986Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.307022Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:05.307265Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:05.307763Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:05.307855Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.308041Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:11:05.309343Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.309791Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:11:05.309880Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:05.309919Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-04T17:11:05.310658Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.310679Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.310697Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:05.310988Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:05.311693Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:05.311848Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.312024Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:11:05.312381Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.312487Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:11:05.312574Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:05.312613Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-04T17:11:05.313111Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.313146Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.313164Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:05.313398Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:05.313915Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:05.314012Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.314192Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:11:05.314569Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.314683Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:11:05.314758Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:05.314788Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-04T17:11:05.315503Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.315521Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.315540Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:05.315785Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:05.316252Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:05.316339Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.316542Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:11:05.317093Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.317240Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:11:05.317316Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:05.317349Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-04T17:11:05.318039Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.318060Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.318098Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:11:05.318344Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:11:05.318959Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:11:05.319066Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.319223Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:11:05.320473Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:05.320824Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:11:05.320886Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:11:05.320916Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-04T17:11:05.326660Z :ReadSession INFO: Random seed for debugging is 1743786665326637 2025-04-04T17:11:05.687102Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506700652526792:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:05.687160Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:11:05.836744Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506697556408376:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:05.836880Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error ... 2, Partition: 0, State: StateIdle] read cookie 5 added 0 blobs, size 0 count 0 last offset 2, current partition end offset: 3 2025-04-04T17:11:24.965671Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 5. All data is from uncompacted head. 2025-04-04T17:11:24.965686Z node 2 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T17:11:24.965751Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 2 2025-04-04T17:11:24.966420Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1f0346ae-f7737c4c-d1ee7219-4ddbfdd6_0] Write session got write response: sequence_numbers: 3 offsets: 2 already_written: false write_statistics { persist_duration_ms: 15 } 2025-04-04T17:11:24.966471Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1f0346ae-f7737c4c-d1ee7219-4ddbfdd6_0] Write session: acknoledged message 1 2025-04-04T17:11:24.966310Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11385134726031808367_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000test-message-group-id" SeqNo: 3 WriteTimestampMS: 1743786684947 CreateTimestampMS: 1743786684946 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 20 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 2 } 2025-04-04T17:11:24.966399Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11385134726031808367_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 4 from offset3 2025-04-04T17:11:24.966430Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11385134726031808367_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 477b2db7-e1d4f2c3-33fc9f6e-8c091072 has messages 1 2025-04-04T17:11:24.966706Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11385134726031808367_v1 read done: guid# 477b2db7-e1d4f2c3-33fc9f6e-8c091072, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 200 2025-04-04T17:11:24.966731Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11385134726031808367_v1 response to read: guid# 477b2db7-e1d4f2c3-33fc9f6e-8c091072 2025-04-04T17:11:24.966916Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11385134726031808367_v1 Process answer. Aval parts: 0 2025-04-04T17:11:24.974836Z :DEBUG: [/Root] [/Root] [b411f01b-d8e685e2-2f0115cf-1b9d64d] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:24.978303Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2025-04-04T17:11:24.978382Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) GOT MESSAGE: Message { Data: "message3" Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2025-04-04T17:11:24.946000Z WriteTime: 2025-04-04T17:11:24.947000Z Ip: "ipv6:[::1]:45230" UncompressedSize: 8 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:45230" } } } 2025-04-04T17:11:24.982405Z :DEBUG: [/Root] [/Root] [b411f01b-d8e685e2-2f0115cf-1b9d64d] [dc1] Commit offsets [2, 3). Partition stream id: 1 2025-04-04T17:11:24.982663Z :DEBUG: [/Root] [/Root] [b411f01b-d8e685e2-2f0115cf-1b9d64d] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-04T17:11:24.982763Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11385134726031808367_v1 grpc read done: success# 1, data# { read { } } 2025-04-04T17:11:24.982831Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11385134726031808367_v1 got read request: guid# 8747c553-16b709a7-bab1c4a8-6af35a26 2025-04-04T17:11:24.986462Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11385134726031808367_v1 grpc read done: success# 1, data# { commit { offset_ranges { assign_id: 1 start_offset: 2 end_offset: 3 } } } 2025-04-04T17:11:24.986654Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11385134726031808367_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 4 2025-04-04T17:11:24.987142Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T17:11:24.987175Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T17:11:24.987268Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_11385134726031808367_v1 2025-04-04T17:11:24.987368Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:11:24.991467Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04T17:11:24.991530Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:11:24.991577Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 4 2025-04-04T17:11:24.992065Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11385134726031808367_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 4 } 2025-04-04T17:11:24.992105Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11385134726031808367_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 4 2025-04-04T17:11:24.992153Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11385134726031808367_v1 replying for commits: assignId# 1, from# 4, to# 4, offset# 3 2025-04-04T17:11:24.998396Z :DEBUG: [/Root] [/Root] [b411f01b-d8e685e2-2f0115cf-1b9d64d] [dc1] Committed response: { offset_ranges { assign_id: 1 start_offset: 2 end_offset: 3 } } 2025-04-04T17:11:25.047046Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1f0346ae-f7737c4c-d1ee7219-4ddbfdd6_0] Write session will now close 2025-04-04T17:11:25.047090Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1f0346ae-f7737c4c-d1ee7219-4ddbfdd6_0] Write session: aborting 2025-04-04T17:11:25.047453Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1f0346ae-f7737c4c-d1ee7219-4ddbfdd6_0] Write session: gracefully shut down, all writes complete 2025-04-04T17:11:25.047490Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|1f0346ae-f7737c4c-d1ee7219-4ddbfdd6_0] Write session: destroy 2025-04-04T17:11:25.050351Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|1f0346ae-f7737c4c-d1ee7219-4ddbfdd6_0 grpc read done: success: 0 data: 2025-04-04T17:11:25.050373Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|1f0346ae-f7737c4c-d1ee7219-4ddbfdd6_0 grpc read failed 2025-04-04T17:11:25.055776Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7489506782256908380:2621] destroyed 2025-04-04T17:11:25.054465Z node 1 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 4 sessionId: test-message-group-id|1f0346ae-f7737c4c-d1ee7219-4ddbfdd6_0 2025-04-04T17:11:25.054500Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|1f0346ae-f7737c4c-d1ee7219-4ddbfdd6_0 is DEAD 2025-04-04T17:11:25.054735Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:11:25.055839Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-04T17:11:27.434582Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11385134726031808367_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2025-04-04T17:11:34.966427Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11385134726031808367_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2025-04-04T17:11:35.066314Z :INFO: [/Root] [/Root] [b411f01b-d8e685e2-2f0115cf-1b9d64d] Closing read session. Close timeout: 0.000000s 2025-04-04T17:11:35.066385Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2025-04-04T17:11:35.066441Z :INFO: [/Root] [/Root] [b411f01b-d8e685e2-2f0115cf-1b9d64d] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16711 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:11:35.066541Z :NOTICE: [/Root] [/Root] [b411f01b-d8e685e2-2f0115cf-1b9d64d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-04T17:11:35.066586Z :DEBUG: [/Root] [/Root] [b411f01b-d8e685e2-2f0115cf-1b9d64d] [dc1] Abort session to cluster 2025-04-04T17:11:35.067094Z :NOTICE: [/Root] [/Root] [b411f01b-d8e685e2-2f0115cf-1b9d64d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:11:35.074365Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11385134726031808367_v1 grpc read done: success# 0, data# { } 2025-04-04T17:11:35.074408Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_11385134726031808367_v1 grpc read failed 2025-04-04T17:11:35.074442Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_11385134726031808367_v1 grpc closed 2025-04-04T17:11:35.078445Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_11385134726031808367_v1 is DEAD 2025-04-04T17:11:35.084079Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7489506756487104074:2531] disconnected; active server actors: 1 2025-04-04T17:11:35.084109Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7489506756487104074:2531] client user disconnected session shared/user_1_1_11385134726031808367_v1 2025-04-04T17:11:35.090467Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_11385134726031808367_v1 2025-04-04T17:11:35.090542Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7489506756487104077:2534] destroyed 2025-04-04T17:11:35.094333Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_11385134726031808367_v1 >> TConsoleTests::TestDatabaseQuotas [GOOD] >> TConsoleTests::TestDatabaseQuotasBadOverallQuota >> TGRpcNewCoordinationClient::CreateAlter [GOOD] >> TGRpcNewCoordinationClient::BasicMethods |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TLocksTest::Range_BrokenLock2 >> TConsoleTests::TestRegisterComputationalUnitsForPending [GOOD] >> TConsoleTests::TestNotifyOperationCompletion >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile [GOOD] >> BackupRestore::RestoreTablePartitioningSettings |87.1%| [TA] $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.1%| [TA] {RESULT} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::OltpJoinTypeHintCBOTurnOFF [GOOD] >> TYqlDecimalTests::NegativeValues [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeInvalid [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypePersQueueGroup ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile [GOOD] Test command err: 2025-04-04T17:11:05.451060Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506699520758620:2274];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:05.451749Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00135d/r3tmp/tmpePwII3/pdisk_1.dat 2025-04-04T17:11:06.110687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:06.110925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:06.120897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:06.133974Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2191, node 1 2025-04-04T17:11:06.206283Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:11:06.215153Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:11:06.275362Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:06.275389Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:06.275399Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:06.275518Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:06.629435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:10.803960Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506722773501971:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:10.804008Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00135d/r3tmp/tmppKc6GC/pdisk_1.dat 2025-04-04T17:11:11.014217Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:11.054839Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:11.054950Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:11.059863Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11302, node 4 2025-04-04T17:11:11.361446Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:11.361469Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:11.361473Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:11.361582Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17987 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:11.759937Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:16.527738Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489506747969881573:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:16.527795Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00135d/r3tmp/tmpz7nEzF/pdisk_1.dat 2025-04-04T17:11:16.798859Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:16.864100Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:16.864182Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:16.875759Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13086, node 7 2025-04-04T17:11:17.134777Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:17.134803Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:17.134812Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:17.134956Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22720 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:17.451758Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:22720 2025-04-04T17:11:17.865482Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:17.865863Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:11:17.865882Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:17.870345Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant 2025-04-04T17:11:17.893783Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743786677941, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:11:17.900607Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-04-04T17:11:17.900665Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2025-04-04T17:11:17.902984Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2025-04-04T17:11:17.908627Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:11:17.909228Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:11:17.909247Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:11:17.931267Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant waiting... waiting... 2025-04-04T17:11:19.053958Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:19.054040Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:19.063068Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-04-04T17:11:19.068110Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:19.855058Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743786679901, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:11:19.857247Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-04-04T17:11:19.858035Z node 7 :FLAT_TX_SCHEMESHARD ... FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2025-04-04T17:11:26.840841Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2025-04-04T17:11:29.096475Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:11:29.097734Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:11:29.097770Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:11:29.097887Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTableIndex Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex, operationId: 281474976715660:1, transaction: WorkingDir: "/Root/ydb_ut_tenant/Table-1" OperationType: ESchemeOpCreateTableIndex CreateTableIndex { Name: "MyIndex" KeyColumnNames: "Value" Type: EIndexTypeGlobal IndexImplTableDescriptions { } } Internal: false FailOnExist: false AllowCreateInTempDir: false, at schemeshard: 72057594046644480 2025-04-04T17:11:29.098050Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:11:29.098090Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex/indexImplTable, opId: 281474976715660:2, at schemeshard: 72057594046644480 2025-04-04T17:11:29.098903Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:11:29.110777Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root/ydb_ut_tenant, subject: , status: StatusAccepted, operation: CREATE TABLE WITH INDEXES, path: /Root/ydb_ut_tenant/Table-1 2025-04-04T17:11:29.305571Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743786689330, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:11:29.322484Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-04-04T17:11:29.322590Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:1 2025-04-04T17:11:29.322604Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:2 2025-04-04T17:11:29.366868Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-04-04T17:11:29.367427Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T17:11:29.506354Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489506780004581542:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:29.506484Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:11:31.754695Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489506810385284202:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:31.778784Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00135d/r3tmp/tmpGHAVnV/pdisk_1.dat 2025-04-04T17:11:32.085316Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:32.133297Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:32.133400Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:32.143950Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1188, node 13 2025-04-04T17:11:32.314432Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:32.314461Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:32.314472Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:32.314657Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20935 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:32.718817Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:20935 2025-04-04T17:11:33.145693Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:33.146103Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:11:33.146123Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:33.161061Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-04-04T17:11:33.181351Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743786693222, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:11:33.185035Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-04T17:11:33.185103Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2025-04-04T17:11:33.187098Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2025-04-04T17:11:33.194397Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:11:33.195113Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:11:33.195140Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:11:33.197477Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-04-04T17:11:33.793232Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7489506819564080548:2261];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:33.898849Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:33.898950Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:33.909109Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-04-04T17:11:33.918725Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:33.931873Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:11:34.094457Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743786694139, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:11:34.097347Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-04-04T17:11:34.097519Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2025-04-04T17:11:34.098443Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2025-04-04T17:11:36.590325Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:11:36.591783Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:11:36.591806Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:11:36.595518Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root/ydb_ut_tenant, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/ydb_ut_tenant/Table-1 2025-04-04T17:11:36.730835Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7489506810385284202:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:36.730910Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:11:36.740311Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743786696780, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:11:36.753297Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-04-04T17:11:36.801652Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-04-04T17:11:36.802159Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobal [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalAsync >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeDir ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TYqlDecimalTests::NegativeValues [GOOD] Test command err: 2025-04-04T17:11:10.886939Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506720077395129:2217];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:10.887030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001340/r3tmp/tmpW8oX86/pdisk_1.dat 2025-04-04T17:11:11.623991Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:11.627625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:11.627707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:11.641894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29175, node 1 2025-04-04T17:11:12.025200Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:12.025221Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:12.025227Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:12.025306Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26023 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:12.412706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:14.665617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 SUCCESS 3 rows in 0.030820s 2025-04-04T17:11:14.906187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506737257265403:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:14.906317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:14.906693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506737257265416:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:14.910334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:11:14.939075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506737257265418:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:11:15.031007Z node 1 :TX_PROXY ERROR: Actor# [1:7489506741552232791:2813] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:15.670961Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0wj2prfazys2trzratrzzz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTY1ZmUxZmYtNmU3Y2MyYTQtZTcyYTEzYmYtYmQzOGQwNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 3 rows 2025-04-04T17:11:17.718517Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506751776526656:2178];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:17.787411Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001340/r3tmp/tmpkjs0F7/pdisk_1.dat 2025-04-04T17:11:18.002121Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:18.045996Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:18.046067Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:18.048999Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30973, node 4 2025-04-04T17:11:18.217516Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:18.217536Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:18.217542Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:18.217664Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:18.509183Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:20.872257Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:22.738692Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489506773029367155:2101];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:22.739379Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001340/r3tmp/tmpc8IF6B/pdisk_1.dat 2025-04-04T17:11:22.883084Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:22.908732Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:22.908806Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:22.914780Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27888, node 7 2025-04-04T17:11:23.194653Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:23.194676Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:23.194682Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:23.194796Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3833 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:23.489428Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:25.693937Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:27.631414Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489506792590222695:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:27.631498Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001340/r3tmp/tmpbiuIYQ/pdisk_1.dat 202 ... fig is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:28.282963Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:28.282969Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:28.283098Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4970 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:28.507320Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:31.285670Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:31.371944Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489506809770093057:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:31.371996Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489506809770093066:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:31.372048Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:31.374758Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:11:31.393329Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489506809770093071:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:11:31.474179Z node 10 :TX_PROXY ERROR: Actor# [10:7489506809770093146:2797] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:31.599072Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0wjjsa95jyctpehqfcfjcf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=M2U2ZWMyYzktNjgxODQ1NTUtNjQxNGQyNjEtNDEzYTJkNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:31.715664Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr0wjk0tftg4z340gqpymvpn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=M2U2ZWMyYzktNjgxODQ1NTUtNjQxNGQyNjEtNDEzYTJkNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:31.865623Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jr0wjk4f0kqqyq6frwj7akx6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=M2U2ZWMyYzktNjgxODQ1NTUtNjQxNGQyNjEtNDEzYTJkNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:31.978209Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr0wjk8z1vsffhkngvj28pns, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=M2U2ZWMyYzktNjgxODQ1NTUtNjQxNGQyNjEtNDEzYTJkNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:32.125091Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0wjkcm573s0x3ewkfvk4hp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=M2U2ZWMyYzktNjgxODQ1NTUtNjQxNGQyNjEtNDEzYTJkNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:33.900457Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489506821276436719:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:33.900488Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001340/r3tmp/tmpu4pQ2h/pdisk_1.dat 2025-04-04T17:11:34.138218Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:34.169525Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:34.169637Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:34.178109Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14154, node 13 2025-04-04T17:11:34.318802Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:34.318825Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:34.318835Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:34.318978Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29428 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:34.631952Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:37.473546Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:37.613223Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489506838456307079:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:37.613334Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:37.613637Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489506838456307091:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:37.617748Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:11:37.645949Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7489506838456307093:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:11:37.717543Z node 13 :TX_PROXY ERROR: Actor# [13:7489506838456307164:2790] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:37.823561Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0wjrwb6fmtxnkfs1eq9eqp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MjIxYzAyYWQtNWZlNzRiOWUtMWRlYTA5ZWMtOTBhNGZlN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:37.948515Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr0wjs3j4jtxp7q9ct8ersg6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MjIxYzAyYWQtNWZlNzRiOWUtMWRlYTA5ZWMtOTBhNGZlN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:38.131711Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jr0wjs7263btv2xjm62ardg9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MjIxYzAyYWQtNWZlNzRiOWUtMWRlYTA5ZWMtOTBhNGZlN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:38.256035Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr0wjscz1j142ezfqaf2w0r0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MjIxYzAyYWQtNWZlNzRiOWUtMWRlYTA5ZWMtOTBhNGZlN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:38.413381Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0wjsgp1jck2xtc45zgkzb3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MjIxYzAyYWQtNWZlNzRiOWUtMWRlYTA5ZWMtOTBhNGZlN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired [GOOD] Test command err: 2025-04-04T17:11:11.716124Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506724193736409:2280];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:11.716165Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001337/r3tmp/tmpaMYHdV/pdisk_1.dat 2025-04-04T17:11:12.388146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:12.388260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:12.391741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:12.433529Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18544, node 1 2025-04-04T17:11:12.610487Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:11:12.610513Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:11:12.826316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:11:12.844883Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:12.844903Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:12.844909Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:12.844992Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31364 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:13.230429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:13.262449Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:11:13.357818Z node 1 :TX_PROXY ERROR: Actor# [1:7489506732783671756:2611] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-04T17:11:17.122579Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506752611260371:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:17.122692Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001337/r3tmp/tmpGgVdbU/pdisk_1.dat 2025-04-04T17:11:17.448753Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:17.476687Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:17.476741Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:17.479552Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14168, node 4 2025-04-04T17:11:17.682889Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:17.682911Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:17.682916Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:17.683032Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15947 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:17.983800Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:20.615346Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506765496163287:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:20.615410Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506765496163278:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:20.615532Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:20.619115Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:11:20.654300Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506765496163292:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:11:20.753501Z node 4 :TX_PROXY ERROR: Actor# [4:7489506765496163378:2683] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:22.534928Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489506774238700373:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:22.534988Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001337/r3tmp/tmpJSpohg/pdisk_1.dat 2025-04-04T17:11:22.727448Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63712, node 7 2025-04-04T17:11:22.858550Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:22.858686Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:22.869725Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:22.879368Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:22.879386Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:22.879391Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:22.879496Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:23.099667Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:25.564850Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489506787123603310:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:25.569588Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:25.569968Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489506787123603322:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access p ... /Root - no PublicKeys 2025-04-04T17:11:31.567041Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:11:31.567094Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:11:31.575801Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489506810020026843:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:11:31.660293Z node 10 :TX_PROXY ERROR: Actor# [10:7489506810020026920:2802] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:31.819886Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0wjjxpdpqvpr1fm6r9rzvw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MjBjNzliOTMtYjEwZGRkMzQtZTU2NjEzOTYtOTBhNzAzODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:31.866975Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jr0wjk8tfxda3cwbmj5pyq2t, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:59060, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:11:31.867512Z node 10 :READ_TABLE_API NOTICE: [10:7489506810020026961:2353] Finish grpc stream, status: 400010 2025-04-04T17:11:31.873966Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jr0wjk91drsqda9y1b1dy6wy, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:59060, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:11:31.898721Z node 10 :READ_TABLE_API DEBUG: [10:7489506810020026962:2354] Adding quota request to queue ShardId: 0, TxId: 281474976710662 2025-04-04T17:11:31.898775Z node 10 :READ_TABLE_API DEBUG: [10:7489506810020026962:2354] Assign stream quota to Shard 0, Quota 5, TxId 281474976710662 Reserved: 5 of 25, Queued: 0 2025-04-04T17:11:31.901872Z node 10 :READ_TABLE_API DEBUG: [10:7489506810020026962:2354] got stream part, size: 246, RU required: 128 rate limiter absent 2025-04-04T17:11:31.902291Z node 10 :READ_TABLE_API DEBUG: [10:7489506810020026962:2354] Starting inactivity timer for 600.000000s with tag 3 2025-04-04T17:11:31.979656Z node 10 :READ_TABLE_API NOTICE: [10:7489506810020026962:2354] Finish grpc stream, status: 400000 2025-04-04T17:11:31.982944Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jr0wjkceew01hd77bzmzea2y, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:59060, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:11:32.022256Z node 10 :READ_TABLE_API DEBUG: [10:7489506810020026986:2356] Adding quota request to queue ShardId: 0, TxId: 281474976710664 2025-04-04T17:11:32.022308Z node 10 :READ_TABLE_API DEBUG: [10:7489506810020026986:2356] Assign stream quota to Shard 0, Quota 5, TxId 281474976710664 Reserved: 5 of 25, Queued: 0 2025-04-04T17:11:32.023088Z node 10 :READ_TABLE_API DEBUG: [10:7489506810020026986:2356] got stream part, size: 84, RU required: 128 rate limiter absent 2025-04-04T17:11:32.023457Z node 10 :READ_TABLE_API DEBUG: [10:7489506810020026986:2356] Starting inactivity timer for 600.000000s with tag 3 2025-04-04T17:11:32.026849Z node 10 :READ_TABLE_API NOTICE: [10:7489506810020026986:2356] Finish grpc stream, status: 400000 2025-04-04T17:11:32.034654Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jr0wjke273jgyrt80nn2ch0q, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:59060, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:11:32.056548Z node 10 :READ_TABLE_API DEBUG: [10:7489506814314994302:2358] Adding quota request to queue ShardId: 0, TxId: 281474976710666 2025-04-04T17:11:32.056588Z node 10 :READ_TABLE_API DEBUG: [10:7489506814314994302:2358] Assign stream quota to Shard 0, Quota 5, TxId 281474976710666 Reserved: 5 of 25, Queued: 0 2025-04-04T17:11:32.058067Z node 10 :READ_TABLE_API DEBUG: [10:7489506814314994302:2358] got stream part, size: 210, RU required: 128 rate limiter absent 2025-04-04T17:11:32.058430Z node 10 :READ_TABLE_API DEBUG: [10:7489506814314994302:2358] Starting inactivity timer for 600.000000s with tag 3 2025-04-04T17:11:32.063224Z node 10 :READ_TABLE_API NOTICE: [10:7489506814314994302:2358] Finish grpc stream, status: 400000 2025-04-04T17:11:32.067471Z node 10 :GRPC_SERVER DEBUG: [0x51a00010bc80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-04T17:11:32.067677Z node 10 :GRPC_SERVER DEBUG: [0x51a00013b080] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-04T17:11:32.067834Z node 10 :GRPC_SERVER DEBUG: [0x51a00010c280] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-04T17:11:32.067971Z node 10 :GRPC_SERVER DEBUG: [0x51a00013aa80] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-04T17:11:32.068115Z node 10 :GRPC_SERVER DEBUG: [0x51a00010b680] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-04T17:11:32.068266Z node 10 :GRPC_SERVER DEBUG: [0x51a00010b080] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-04T17:11:32.068413Z node 10 :GRPC_SERVER DEBUG: [0x51a00013a480] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-04T17:11:32.068555Z node 10 :GRPC_SERVER DEBUG: [0x51a000139e80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-04T17:11:32.068742Z node 10 :GRPC_SERVER DEBUG: [0x51a000139880] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-04T17:11:32.068889Z node 10 :GRPC_SERVER DEBUG: [0x51a00002ca80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-04T17:11:32.069035Z node 10 :GRPC_SERVER DEBUG: [0x51a0000fb480] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-04T17:11:32.069173Z node 10 :GRPC_SERVER DEBUG: [0x51a00002c480] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2025-04-04T17:11:32.069311Z node 10 :GRPC_SERVER DEBUG: [0x51b000072380] received request Name# KeyValue ok# false data# peer# current inflight# 0 2025-04-04T17:11:32.069486Z node 10 :GRPC_SERVER DEBUG: [0x51a00007fe80] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-04T17:11:32.069653Z node 10 :GRPC_SERVER DEBUG: [0x51a00002ac80] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-04T17:11:32.069801Z node 10 :GRPC_SERVER DEBUG: [0x51a00004f880] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-04T17:11:32.069938Z node 10 :GRPC_SERVER DEBUG: [0x51a00010aa80] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-04T17:11:32.070091Z node 10 :GRPC_SERVER DEBUG: [0x51a000138c80] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2025-04-04T17:11:32.070260Z node 10 :GRPC_SERVER DEBUG: [0x51a00012c680] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2025-04-04T17:11:32.070408Z node 10 :GRPC_SERVER DEBUG: [0x51a00012d280] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2025-04-04T17:11:32.070566Z node 10 :GRPC_SERVER DEBUG: [0x51a000081680] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-04T17:11:32.070743Z node 10 :GRPC_SERVER DEBUG: [0x51a00012d880] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-04-04T17:11:34.766461Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489506823397481741:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:34.776234Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001337/r3tmp/tmp1kmm8K/pdisk_1.dat 2025-04-04T17:11:35.028379Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:35.077003Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:35.077108Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:35.084436Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27547, node 13 2025-04-04T17:11:35.243541Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:35.243574Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:35.243584Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:35.243746Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6373 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:35.543801Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:35.699344Z node 13 :TICKET_PARSER DEBUG: Ticket 122150FB09ECC5052784AD5EFA8ABD12AD38C358F3E49680C7378B4E3596B0D8 (ipv6:[::1]:41538) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-04T17:11:35.884220Z node 13 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token 2025-04-04T17:11:35.991723Z node 13 :TICKET_PARSER DEBUG: Ticket 63961F9A1B1042F82DE818E4A565B3FE7158E508A1ADD838B95F02FF4C2262DF (ipv6:[::1]:41598) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-04T17:11:35.992405Z node 13 :TICKET_PARSER ERROR: Ticket 63961F9A1B1042F82DE818E4A565B3FE7158E508A1ADD838B95F02FF4C2262DF: Cannot create token from certificate. Client certificate failed verification >> TGRpcAuthentication::InvalidPassword [GOOD] >> TGRpcAuthentication::DisableLoginAuthentication >> YdbYqlClient::RetryOperationSync [GOOD] >> YdbYqlClient::RetryOperationLimitedDuration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::OltpJoinTypeHintCBOTurnOFF [GOOD] Test command err: Trying to start YDB, gRPC: 18333, MsgBus: 26426 2025-04-04T17:09:24.983655Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506267135683900:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:24.983764Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001500/r3tmp/tmp97QvPz/pdisk_1.dat 2025-04-04T17:09:25.458615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:25.458892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:25.462941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:25.510324Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18333, node 1 2025-04-04T17:09:25.662231Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:25.662281Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:25.662298Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:25.662422Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26426 TClient is connected to server localhost:26426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:26.336308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.392498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.569761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.752788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.866105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.329403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506284315554873:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.329530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.712311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.738437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.807359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.835271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.863216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.930177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.017590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506288610522690:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.017684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.017778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506288610522695:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.022301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:29.033145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506288610522697:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:29.110300Z node 1 :TX_PROXY ERROR: Actor# [1:7489506288610522754:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:29.986988Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506267135683900:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:29.987068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:30.376233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.413539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.441334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.468824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.490517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.587999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.618106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.653602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.677064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.703221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.723215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.741961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.764025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.221066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:09:31.256228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.320909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.344082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.375211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... : { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:31.602737Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:11:31.646995Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:11:31.706121Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:11:31.761462Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-04T17:11:31.858920Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-04T17:11:32.115051Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-04T17:11:32.172462Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-04T17:11:32.236182Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-04T17:11:32.285939Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-04T17:11:32.337706Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-04T17:11:32.387744Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-04T17:11:32.444476Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-04T17:11:32.520304Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-04T17:11:33.440957Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2025-04-04T17:11:33.528063Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-04T17:11:33.573981Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-04T17:11:33.616949Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-04T17:11:33.659048Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-04-04T17:11:33.733039Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-04T17:11:33.774564Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-04-04T17:11:33.825381Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2025-04-04T17:11:33.883609Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-04-04T17:11:33.998664Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-04T17:11:34.060958Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-04-04T17:11:34.145157Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2025-04-04T17:11:34.236806Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2025-04-04T17:11:34.295816Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2025-04-04T17:11:34.358622Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-04-04T17:11:34.495118Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2025-04-04T17:11:34.555650Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2025-04-04T17:11:34.626015Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2025-04-04T17:11:34.701063Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2025-04-04T17:11:34.793552Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2025-04-04T17:11:34.896986Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2025-04-04T17:11:34.955171Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2025-04-04T17:11:35.021292Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2025-04-04T17:11:35.105538Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2025-04-04T17:11:35.361377Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2025-04-04T17:11:35.414799Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2025-04-04T17:11:35.494841Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2025-04-04T17:11:35.559114Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2025-04-04T17:11:35.614493Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2025-04-04T17:11:35.668816Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2025-04-04T17:11:35.719547Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2025-04-04T17:11:35.785612Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2025-04-04T17:11:35.845370Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2025-04-04T17:11:35.990978Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2025-04-04T17:11:36.079183Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2025-04-04T17:11:36.134883Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2025-04-04T17:11:36.201485Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 2025-04-04T17:11:36.322985Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715721:0, at schemeshard: 72057594046644480 2025-04-04T17:11:36.394419Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715722:0, at schemeshard: 72057594046644480 >> TConsoleTests::TestDatabaseQuotasBadOverallQuota [GOOD] >> TConsoleTests::TestDatabaseQuotasBadStorageQuota >> BasicStatistics::TwoServerlessDbs [GOOD] >> TSourceIdTests::SourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::SourceIdStorageMinDS [GOOD] >> TSourceIdTests::SourceIdStorageTestClean >> TSourceIdTests::SourceIdStorageTestClean [GOOD] >> TSourceIdTests::SourceIdStorageDeleteByMaxCount >> TSourceIdTests::SourceIdStorageDeleteByMaxCount [GOOD] >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] >> TTabletPipeTest::TestConnectReject >> YdbLogStore::AlterLogStore [GOOD] >> YdbLogStore::AlterLogTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessDbs [GOOD] Test command err: 2025-04-04T17:08:31.758704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:08:31.759263Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:08:31.759467Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0008b1/r3tmp/tmpvm7uLJ/pdisk_1.dat 2025-04-04T17:08:32.517871Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11534, node 1 2025-04-04T17:08:33.603450Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:08:33.603511Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:08:33.603541Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:08:33.604096Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:08:33.644765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:08:33.819347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:33.821917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:33.855236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14687 2025-04-04T17:08:34.674346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:08:38.726999Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:08:38.808389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:38.808499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:38.841068Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:08:38.842981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:39.140646Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:08:39.145014Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:08:39.145610Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:08:39.145751Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:08:39.145971Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:08:39.146053Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:08:39.146160Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:08:39.146304Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:08:39.146424Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:08:39.386992Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:08:39.387114Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:08:39.411196Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:08:39.579998Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:39.660870Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:08:39.661012Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:08:39.706598Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:08:39.706925Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:08:39.707146Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:08:39.707231Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:08:39.707287Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:08:39.707341Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:08:39.707413Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:08:39.707468Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:08:39.708047Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:08:39.748588Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:08:39.748700Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:08:39.760145Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1888:2607] 2025-04-04T17:08:39.776300Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1917:2620] 2025-04-04T17:08:39.776968Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1917:2620], schemeshard id = 72075186224037897 2025-04-04T17:08:39.783788Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-04T17:08:39.804603Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:08:39.804676Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:08:39.804757Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-04T17:08:39.834617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:08:39.848482Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:08:39.848659Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:08:40.136709Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:08:40.340124Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:08:40.437644Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:08:41.258854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:08:42.091074Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:42.292931Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-04T17:08:42.293004Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:08:42.293101Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2588:2947], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:08:42.295296Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2590:2948] 2025-04-04T17:08:42.295699Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2590:2948], schemeshard id = 72075186224037899 2025-04-04T17:08:43.249115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:08:43.891332Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:08:44.253947Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2025-04-04T17:08:44.254017Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037905 2025-04-04T17:08:44.254109Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3074:3148], at schemeshard: 72075186224037905, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037905 2025-04-04T17:08:44.255179Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3075:3149] 2025-04-04T17:08:44.256347Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:3075:3149], schemeshard id = 72075186224037905 2025-04-04T17:08:45.982258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3201:3407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:45.982464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:46.237157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2025-04-04T17:08:46.602748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3351:3442], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:46.603039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:08:46.604826Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3356:3446]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:08:46.605005Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:08:46.605203Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-04T17:08:46.605297Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3359:3449] 2025-04-04T17:08:46.606363Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3 ... 25-04-04T17:11:32.897536Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-04T17:11:32.897581Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:11:32.898118Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-04T17:11:32.938423Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:11:32.948046Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8930:6405], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:32.948180Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8940:6410], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:32.948307Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:32.966536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-04T17:11:33.118692Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:8944:6413], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-04T17:11:33.318956Z node 2 :TX_PROXY ERROR: Actor# [2:9042:6461] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Shared/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:33.384471Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:9071:6476]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:11:33.384819Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-04-04T17:11:33.385086Z node 2 :STATISTICS DEBUG: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-04-04T17:11:33.385156Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-04T17:11:33.385295Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:11:33.385370Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:9071:6476], StatRequests.size() = 1 2025-04-04T17:11:33.645240Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTIyZWYyODAtZGRmODFhNjgtNGJjZjIxZmMtOTg0NzE1ODg=, TxId: 2025-04-04T17:11:33.645324Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTIyZWYyODAtZGRmODFhNjgtNGJjZjIxZmMtOTg0NzE1ODg=, TxId: 2025-04-04T17:11:33.646015Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:11:33.672049Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:11:33.672117Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:11:33.728731Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:11:33.728808Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:11:33.803134Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:3716:3398], schemeshard count = 1 2025-04-04T17:11:34.081565Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-04T17:11:34.081637Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 211.000000s, at schemeshard: 72075186224037899 2025-04-04T17:11:34.081909Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2025-04-04T17:11:34.112742Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:11:34.353363Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:9109:6502]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:11:34.353697Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-04T17:11:34.353744Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:9109:6502], StatRequests.size() = 1 2025-04-04T17:11:35.906025Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:9160:6534]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:11:35.906519Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-04T17:11:35.906573Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:9160:6534], StatRequests.size() = 1 2025-04-04T17:11:36.490465Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037905 2025-04-04T17:11:36.490536Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 225.000000s, at schemeshard: 72075186224037905 2025-04-04T17:11:36.490970Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037905, stats size# 26 2025-04-04T17:11:36.519966Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:11:36.745873Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:11:36.757182Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:11:36.757244Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:11:36.757283Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-04-04T17:11:36.757318Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-04T17:11:36.757805Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-04T17:11:36.760260Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:11:36.787091Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjdiMThiZmUtZWFkZjhiNGUtMzUxMmI3NmYtNjgwYjJmMTI=, TxId: 2025-04-04T17:11:36.787163Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjdiMThiZmUtZWFkZjhiNGUtMzUxMmI3NmYtNjgwYjJmMTI=, TxId: 2025-04-04T17:11:36.787768Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:11:36.811784Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-04T17:11:36.811843Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:11:37.548173Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:9242:6589]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:11:37.548555Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-04T17:11:37.548604Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:9242:6589], StatRequests.size() = 1 2025-04-04T17:11:39.244331Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:9298:6622]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:11:39.244648Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-04T17:11:39.244690Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:9298:6622], StatRequests.size() = 1 2025-04-04T17:11:40.031200Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 3 2025-04-04T17:11:40.031795Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:11:40.032266Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:11:40.047470Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:11:40.047529Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:11:40.047570Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037905, LocalPathId: 2] is data table. 2025-04-04T17:11:40.047605Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037905, LocalPathId: 2] 2025-04-04T17:11:40.047947Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-04T17:11:40.050733Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:11:40.073884Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjNjNzRiY2UtZDk2Njc0ODAtNDYyMjk0MTMtMzAxNTI0ZDc=, TxId: 2025-04-04T17:11:40.073953Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjNjNzRiY2UtZDk2Njc0ODAtNDYyMjk0MTMtMzAxNTI0ZDc=, TxId: 2025-04-04T17:11:40.074583Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:11:40.095795Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037905, LocalPathId: 2] 2025-04-04T17:11:40.095859Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:11:40.848860Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:9375:6672]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:11:40.849197Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-04T17:11:40.849244Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:9375:6672], StatRequests.size() = 1 2025-04-04T17:11:40.850090Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:9377:6674]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:11:40.853691Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-04T17:11:40.853753Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:9377:6674], StatRequests.size() = 1 |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTable >> TTabletPipeTest::TestConnectReject [GOOD] >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen >> TGRpcNewCoordinationClient::BasicMethods [GOOD] >> TFlatTest::ReadOnlyMode >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] Test command err: 2025-04-04T17:11:44.173808Z node 3 :PIPE_SERVER ERROR: [9437185] NodeDisconnected NodeId# 2 >> TConsoleTests::TestNotifyOperationCompletion [GOOD] >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain >> BackupRestore::RestoreTablePartitioningSettings [GOOD] >> BackupRestore::RestoreIndexTablePartitioningSettings ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::BasicMethods [GOOD] Test command err: 2025-04-04T17:11:16.993339Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506745749777840:2285];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001326/r3tmp/tmpcJbHui/pdisk_1.dat 2025-04-04T17:11:17.317498Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:11:17.575855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:17.575979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:17.592644Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:17.594220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3655, node 1 2025-04-04T17:11:18.134816Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:18.134840Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:18.134846Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:18.134940Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:18.623886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:21.132902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506767224615145:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:21.133025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:21.134375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506767224615157:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:21.138572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:11:21.165463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506767224615159:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:11:21.224547Z node 1 :TX_PROXY ERROR: Actor# [1:7489506767224615230:2681] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:23.006455Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506773798128276:2074];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001326/r3tmp/tmpDjKDm1/pdisk_1.dat 2025-04-04T17:11:23.120683Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:11:23.245111Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:23.303725Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:23.303802Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:23.312589Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14463, node 4 2025-04-04T17:11:23.523000Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:23.523028Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:23.523035Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:23.523182Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:23.738478Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:23.759202Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:11:26.279264Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506790977998502:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:26.279370Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:26.298617Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:26.515933Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506790977998680:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:26.516020Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:26.516231Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506790977998685:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:26.520446Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:11:26.544486Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506790977998687:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:11:26.651478Z node 4 :TX_PROXY ERROR: Actor# [4:7489506790977998766:2793] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:28.650794Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489506799167435398:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:28.667341Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001326/r3tmp/tmp0Dl7rI/pdisk_1.dat 2025-04-04T17:11:28.848950Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:28.895941Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:28.896038Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:28.903838Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2437, node 7 2025-04-04T17:11:29.150850Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:29.150877Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:29.150885Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:29.151046Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:29.451610Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:29.607052Z node 7 :TX_PROXY ERROR: Actor# [7:7489506803462403617:2600] txid# 281474976715658, Access denied for bad@builtin on path /Root, with access CreateTable 2025-04-04T17:11:29.607212Z node 7 :TX_PROXY ERROR: Actor# [7:7489506803462403617:2600] txid# 281474976715658, issues: { message: "Access denied for bad@builtin on path /Root" issue_code: 200000 severity: 1 } 2025-04-04T17:11:33.595590Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489506820577299750:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:33.595660Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001326/r3tmp/tmp6FPMs3/pdisk_1.dat 2025-04-04T17:11:33.857078Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:33.919844Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:33.919936Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:33.939422Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14508, node 10 2025-04-04T17:11:34.230812Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:34.230837Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:34.230844Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:34.230998Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29597 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:34.563708Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:34.686050Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:34.825911Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterKesus, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:11:34.883908Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterKesus, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:11:34.972424Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterKesus, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:11:38.888999Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489506839875091664:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:38.889056Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001326/r3tmp/tmpExMfcP/pdisk_1.dat 2025-04-04T17:11:39.191620Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:39.235334Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:39.235445Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:39.243932Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28056, node 13 2025-04-04T17:11:39.403963Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:39.403989Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:39.403997Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:39.404157Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64706 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:39.720419Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:39.825440Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeDir [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBlockStoreVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExtSubDomain [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeColumnStore [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeColumnTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeCdcStream >> BackupRestore::TestAllSchemeObjectTypes-EPathTypePersQueueGroup [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeKesus >> KqpJoinOrder::TPCDS94-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS95+StreamLookupJoin-ColumnStore >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages >> BasicUsage::BrokenCredentialsProvider [GOOD] >> KqpJoinOrder::TPCH21-StreamLookupJoin+ColumnStore [GOOD] >> KqpQueryPerf::UpdateOn+QueryService-UseSink >> KqpQueryPerf::Replace-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] Test command err: 2025-04-04T17:10:16.381613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:16.381681Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:16.440608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:17.598959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-04-04T17:10:17.728915Z node 9 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:10:17.729406Z node 9 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpdTYysh/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:10:17.730587Z node 9 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpdTYysh/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpdTYysh/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5383953579619033814 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:10:17.772770Z node 8 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:10:17.773232Z node 8 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpdTYysh/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:10:17.773598Z node 8 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpdTYysh/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpdTYysh/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13259770307459819525 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:10:17.818067Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:10:17.826708Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpdTYysh/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:10:17.826952Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpdTYysh/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpdTYysh/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6421467369946391326 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:10:17.829799Z node 2 :BS_LOCALRECOVERY CRIT: VDISK[80000000:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpdTYysh/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-04T17:10:17.905591Z node 7 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:10:17.906081Z node 7 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpdTYysh/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:10:17.906351Z node 7 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpdTYysh/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpdTYysh/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14947444005024713979 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:10:17.937996Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 ... n file "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpqK8bFS/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:11:35.198331Z node 148 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpqK8bFS/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpqK8bFS/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14465080377260212779 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:11:35.271144Z node 147 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:11:35.271621Z node 147 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpqK8bFS/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:11:35.271809Z node 147 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpqK8bFS/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpqK8bFS/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4072538523197837472 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:11:35.282221Z node 152 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:11:35.282647Z node 152 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpqK8bFS/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:11:35.282778Z node 152 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpqK8bFS/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpqK8bFS/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4044534197114793372 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:11:35.283159Z node 147 :BS_LOCALRECOVERY CRIT: VDISK[80000001:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpqK8bFS/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-04T17:11:35.535201Z node 145 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:11:35.535290Z node 145 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:35.648707Z node 145 :STATISTICS WARN: [72075186233409554] TTxInit::Complete. EnableColumnStatistics=false 2025-04-04T17:11:35.785963Z node 153 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:11:35.787365Z node 153 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpqK8bFS/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:11:35.787736Z node 153 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpqK8bFS/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001840/r3tmp/tmpqK8bFS/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13109578940631473294 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:11:38.931230Z node 154 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:11:38.931336Z node 154 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:39.006820Z node 154 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:11:42.877143Z node 163 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:11:42.877241Z node 163 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:42.975081Z node 163 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 >> TGRpcAuthentication::DisableLoginAuthentication [GOOD] >> TGRpcAuthentication::NoConnectRights >> KqpJoinOrder::TPCDS23+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS23-StreamLookupJoin+ColumnStore >> KqpQueryPerf::Update+QueryService-UseSink >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalAsync [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalUnique [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2025-04-04T17:11:00.467168Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1743786660467142 2025-04-04T17:11:00.848051Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506678081006532:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:00.848097Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:11:01.010683Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506682898254347:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:01.010729Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:11:01.181764Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000fdf/r3tmp/tmpO4HiGH/pdisk_1.dat 2025-04-04T17:11:01.212025Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:11:01.550954Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:01.554494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:01.554588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:01.558489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:01.558572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:01.562444Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:11:01.562572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:01.563305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27093, node 1 2025-04-04T17:11:01.823426Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000fdf/r3tmp/yandexbXAYOM.tmp 2025-04-04T17:11:01.823453Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000fdf/r3tmp/yandexbXAYOM.tmp 2025-04-04T17:11:01.823592Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000fdf/r3tmp/yandexbXAYOM.tmp 2025-04-04T17:11:01.823717Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:11:01.960188Z INFO: TTestServer started on Port 20967 GrpcPort 27093 TClient is connected to server localhost:20967 PQClient connected to localhost:27093 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:02.403118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:11:02.481486Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:04.922077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506695260876734:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:04.922179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:04.922509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506695260876748:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:04.926458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-04T17:11:04.934391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506695260876787:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:04.934565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:04.963252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506695260876750:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T17:11:05.134436Z node 1 :TX_PROXY ERROR: Actor# [1:7489506699555844120:2676] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:05.169520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:11:05.169644Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489506699555844132:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:11:05.170580Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWE0MWY4ODktNDc0YTQxNTgtOTE5ZTAzYTctMzhlM2Y2ZDg=, ActorId: [1:7489506695260876730:2336], ActorState: ExecuteState, TraceId: 01jr0whrxwejsfr5a69ta6pe9x, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:11:05.172543Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:11:05.174354Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489506700078123857:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:11:05.175100Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Mjc0MjgzYzQtMjkwZWZhNDEtMWI1NzJkYTMtMzY0OGQ3OWI=, ActorId: [2:7489506695783156529:2308], ActorState: ExecuteState, TraceId: 01jr0whs19atc19fp57gz6ph5e, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:11:05.175496Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:11:05.328845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:11:05.473553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:27093", true, true, 1000); 2025-04-04T17:11:05.794375Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr0whsmc4xsqrjcw04fwq80t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDA5OTFmOGItNDE5MmZjNDUtODdjOTgzYS1kNTQ0NzljMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489506699555844527:2973] 2025-04-04T17:11:05.848506Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506678081006532:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:05.848575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:11:06.014603Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489506682898254347:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:06.017812Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:11:12.028651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 2814749767 ... created, have version: 1 2025-04-04T17:11:44.616612Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2025-04-04T17:11:44.616973Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-04-04T17:11:44.617012Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:64666 2025-04-04T17:11:44.625381Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-04T17:11:44.626441Z node 5 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-04T17:11:44.626478Z node 5 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-04-04T17:11:44.630815Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-04T17:11:44.630950Z node 5 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:48564 2025-04-04T17:11:44.630977Z node 5 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:48564 proto=v1 topic=test-topic durationSec=0 2025-04-04T17:11:44.630987Z node 5 :PQ_WRITE_PROXY INFO: init check schema 2025-04-04T17:11:44.633140Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-04T17:11:44.633239Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-04T17:11:44.633250Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-04T17:11:44.633260Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-04T17:11:44.633278Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7489506867475669301:2515] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-04T17:11:44.635689Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7489506867475669301:2515] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-04T17:11:44.856589Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7489506867475669301:2515] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-04T17:11:44.857008Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7489506867475669341:2515] connected; active server actors: 1 2025-04-04T17:11:44.857102Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7489506867475669301:2515] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-04T17:11:44.857117Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7489506867475669301:2515] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-04T17:11:44.874472Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7489506867475669341:2515] disconnected; active server actors: 1 2025-04-04T17:11:44.874512Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7489506867475669341:2515] disconnected no session 2025-04-04T17:11:44.998965Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7489506867475669301:2515] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-04T17:11:44.999011Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7489506867475669301:2515] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-04T17:11:44.999031Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7489506867475669301:2515] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-04T17:11:44.999060Z node 5 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-04T17:11:45.001655Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 6, Generation: 1 2025-04-04T17:11:45.011255Z node 5 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|6df11da8-1bd2e826-af5b58b5-d6f8b890_0 2025-04-04T17:11:45.008911Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [5:7489506867475669372:2515], now have 1 active actors on pipe 2025-04-04T17:11:45.008992Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T17:11:45.009018Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T17:11:45.009113Z node 6 :PERSQUEUE INFO: new Cookie src|6df11da8-1bd2e826-af5b58b5-d6f8b890_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-04T17:11:45.009210Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-04T17:11:45.009255Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:11:45.010189Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T17:11:45.010210Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T17:11:45.010327Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:11:45.022474Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743786705015 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:11:45.023012Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|6df11da8-1bd2e826-af5b58b5-d6f8b890_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-04T17:11:45.023333Z :INFO: [] MessageGroupId [src] SessionId [src|6df11da8-1bd2e826-af5b58b5-d6f8b890_0] Write session: close. Timeout = 0 ms 2025-04-04T17:11:45.023384Z :INFO: [] MessageGroupId [src] SessionId [src|6df11da8-1bd2e826-af5b58b5-d6f8b890_0] Write session will now close 2025-04-04T17:11:45.023433Z :DEBUG: [] MessageGroupId [src] SessionId [src|6df11da8-1bd2e826-af5b58b5-d6f8b890_0] Write session: aborting 2025-04-04T17:11:45.023942Z :INFO: [] MessageGroupId [src] SessionId [src|6df11da8-1bd2e826-af5b58b5-d6f8b890_0] Write session: gracefully shut down, all writes complete 2025-04-04T17:11:45.023993Z :DEBUG: [] MessageGroupId [src] SessionId [src|6df11da8-1bd2e826-af5b58b5-d6f8b890_0] Write session: destroy 2025-04-04T17:11:45.026672Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|6df11da8-1bd2e826-af5b58b5-d6f8b890_0 grpc read done: success: 0 data: 2025-04-04T17:11:45.027414Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7489506867475669372:2515] destroyed 2025-04-04T17:11:45.027473Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-04T17:11:45.026701Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|6df11da8-1bd2e826-af5b58b5-d6f8b890_0 grpc read failed 2025-04-04T17:11:45.026765Z node 5 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: src|6df11da8-1bd2e826-af5b58b5-d6f8b890_0 2025-04-04T17:11:45.026783Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|6df11da8-1bd2e826-af5b58b5-d6f8b890_0 is DEAD 2025-04-04T17:11:45.027086Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:11:45.057330Z :INFO: [/Root] [/Root] [6ec9e91d-730407c8-5403325b-980c4be6] Starting read session 2025-04-04T17:11:45.057398Z :DEBUG: [/Root] [/Root] [6ec9e91d-730407c8-5403325b-980c4be6] Starting session to cluster null (localhost:64666) 2025-04-04T17:11:45.059617Z :DEBUG: [/Root] [/Root] [6ec9e91d-730407c8-5403325b-980c4be6] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:45.059672Z :DEBUG: [/Root] [/Root] [6ec9e91d-730407c8-5403325b-980c4be6] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:45.059715Z :DEBUG: [/Root] [/Root] [6ec9e91d-730407c8-5403325b-980c4be6] [null] Reconnecting session to cluster null in 0.000000s 2025-04-04T17:11:45.061159Z :ERROR: [/Root] [/Root] [6ec9e91d-730407c8-5403325b-980c4be6] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2025-04-04T17:11:45.061228Z :DEBUG: [/Root] [/Root] [6ec9e91d-730407c8-5403325b-980c4be6] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:45.061259Z :DEBUG: [/Root] [/Root] [6ec9e91d-730407c8-5403325b-980c4be6] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:11:45.061369Z :INFO: [/Root] [/Root] [6ec9e91d-730407c8-5403325b-980c4be6] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2025-04-04T17:11:45.061536Z :NOTICE: [/Root] [/Root] [6ec9e91d-730407c8-5403325b-980c4be6] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:11:45.061575Z :DEBUG: [/Root] [/Root] [6ec9e91d-730407c8-5403325b-980c4be6] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2025-04-04T17:11:45.061649Z :INFO: [/Root] [/Root] [6ec9e91d-730407c8-5403325b-980c4be6] Closing read session. Close timeout: 0.000000s 2025-04-04T17:11:45.061686Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-04T17:11:45.061727Z :INFO: [/Root] [/Root] [6ec9e91d-730407c8-5403325b-980c4be6] Counters: { Errors: 1 CurrentSessionLifetimeMs: 4 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:11:45.061805Z :NOTICE: [/Root] [/Root] [6ec9e91d-730407c8-5403325b-980c4be6] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> TFlatTest::ReadOnlyMode [GOOD] >> TFlatTest::RejectByIncomingReadSetSize >> YdbLogStore::AlterLogTable [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH21-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 20568, MsgBus: 24982 2025-04-04T17:09:46.658282Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506362003376973:2165];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:46.658623Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001461/r3tmp/tmpb2xdwH/pdisk_1.dat 2025-04-04T17:09:47.117706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:47.117829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:47.122123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:47.178656Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20568, node 1 2025-04-04T17:09:47.338742Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:47.338764Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:47.338770Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:47.338906Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24982 TClient is connected to server localhost:24982 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:47.927277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:47.942473Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:47.962922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:48.121273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:48.306473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:09:48.381510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:50.072143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506379183247821:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:50.072260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:50.296346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:50.328155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:50.361196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:50.391037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:50.428054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:50.470370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:50.554609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506379183248332:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:50.554695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:50.555669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506379183248338:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:50.560450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:50.581206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506379183248340:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:50.646192Z node 1 :TX_PROXY ERROR: Actor# [1:7489506379183248394:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:51.656155Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506362003376973:2165];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:51.656236Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:51.681569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.719382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.754732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.789189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.869458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.008093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.048904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.086504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.124544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.177671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.213874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.287684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.322573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.895092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:09:52.939723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:09:52.998573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:09:53.033568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 202 ... cription=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:11:30.985944Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:11:30.985970Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:11:30.986025Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:11:30.986047Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:11:30.986082Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:11:30.986106Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:11:30.987541Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:11:30.987587Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:11:30.987767Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:11:30.987796Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:11:30.987911Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:11:30.987941Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:11:30.988105Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:11:30.988129Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:11:30.988223Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:11:30.988242Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:11:30.992587Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:11:30.992633Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:11:30.992710Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:11:30.992738Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:11:30.992865Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:11:30.992890Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:11:30.992962Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:11:30.992988Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:11:30.993038Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:11:30.993062Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:11:30.993097Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:11:30.993121Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:11:30.993586Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:11:30.993617Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:11:30.993763Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:11:30.993789Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:11:30.993890Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:11:30.993915Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:11:30.994046Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:11:30.994072Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:11:30.994157Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:11:30.994177Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:11:31.061551Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038728;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:31.063151Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038721;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:31.068479Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038731;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:31.069443Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038727;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:31.075136Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038722;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:31.075640Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038726;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:31.081743Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038733;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:31.082058Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038735;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:31.089289Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038720;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:31.089345Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038724;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:31.098788Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038730;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:31.099415Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038729;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:31.106876Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038732;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:31.106932Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038734;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:31.114342Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038723;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:31.114433Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038725;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> YdbTableBulkUpsert::Nulls [GOOD] >> YdbTableBulkUpsert::NotNulls >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeKesus [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeFileStore [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeReplication >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSubDomain [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeRtmrVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSolomonVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTableIndex |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain [GOOD] >> TConsoleTests::TestRemoveAttributes >> TLocksTest::Range_BrokenLock0 >> BackupRestore::RestoreIndexTablePartitioningSettings [GOOD] >> BackupRestore::RestoreTableSplitBoundaries >> TConsoleTxProcessorTests::TestTxProcessorRandom [GOOD] >> TImmediateControlsConfiguratorTests::TestControlsInitialization >> TFlatTest::RejectByIncomingReadSetSize [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpQueryPerf::IdxLookupJoin+QueryService >> TGRpcAuthentication::NoConnectRights [GOOD] >> KqpQueryPerf::DeleteOn+QueryService-UseSink >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeCdcStream [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBlobDepot [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] >> TImmediateControlsConfiguratorTests::TestControlsInitialization [GOOD] >> TImmediateControlsConfiguratorTests::TestModifiedControls >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees-StreamLookupJoin+ColumnStore [GOOD] >> KqpQueryPerf::Replace-QueryService+UseSink [GOOD] >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] >> KqpQueryPerf::UpdateOn+QueryService-UseSink [GOOD] >> KqpJoinOrder::TPCDS92+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS92-StreamLookupJoin+ColumnStore |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] Test command err: 2025-04-04T17:11:41.547616Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506853015068657:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:41.547862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpCm0PNB/pdisk_1.dat 2025-04-04T17:11:42.124395Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:42.168768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:42.168898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:42.178921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19316, node 1 2025-04-04T17:11:42.730589Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:42.730610Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:42.730635Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:42.730754Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25491 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:43.025328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Backup "/Root" to "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmp9VxpuM/"Create temporary directory "/Root/~backup_20250404T171143" in databaseProcess "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmp9VxpuM/dir"Create directory "/Root/~backup_20250404T171143/dir" in databaseWrite ACL into "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmp9VxpuM/dir/permissions.pb"Remove directory "/Root/~backup_20250404T171143/dir"2025-04-04T17:11:43.462406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710661:0, at schemeshard: 72057594046644480 Remove temporary directory "/Root/~backup_20250404T171143" in database2025-04-04T17:11:43.558779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710662:0, at schemeshard: 72057594046644480 Backup completed successfully2025-04-04T17:11:43.616957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710663:0, at schemeshard: 72057594046644480 Restore "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmp9VxpuM/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmp9VxpuM/" to "/Root"Process "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmp9VxpuM/dir"Restore empty directory "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmp9VxpuM/dir" to "/Root/dir"Restore ACL "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmp9VxpuM/dir" to "/Root/dir"Read ACL from "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmp9VxpuM/dir/permissions.pb"2025-04-04T17:11:43.739205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-04-04T17:11:47.068766Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506879733237856:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:47.068827Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpt9hNHN/pdisk_1.dat 2025-04-04T17:11:47.384635Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:47.461884Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:47.461971Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 1408, node 4 2025-04-04T17:11:47.471195Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:47.681558Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:47.681588Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:47.681594Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:47.681719Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19043 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:47.951473Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:50.347820Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506892618140796:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:50.347955Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:50.511349Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:50.694456Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506892618140972:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:50.694529Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:50.802659Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][4:7489506892618141165:2359] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:4:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-04T17:11:50.866235Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506892618141263:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:50.866321Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:50.940529Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][4:7489506892618141442:2380] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:6:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-04T17:11:50.999519Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506892618141547:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:50.999594Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:51.062455Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][4:7489506896913109022:2401] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:8:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } GetChangefeedAndTopicDescriptions: Backup "/Root" to "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/"Create temporary directory "/Root/~backup_20250404T171151" in databaseProcess "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250404T171151/table" }Backup table "/Root/~backup_20250404T171151/table" to "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table"Describe table "/Root/~backup_20250404T171151/table"Write scheme into "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/scheme.pb"Describe table "/Root/table"Process "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/a"Write changefeed into "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/a/changefeed_description.pb"Write topic into "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/a/topic_description.pb"Process "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/b"Write changefeed into "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/b/changefeed_description.pb"Write topic into "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/b/topic_description.pb"Process "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/c"Write changefeed into "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/c/changefeed_description.pb"Write topic into "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/c/topic_description.pb"Write ACL into "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/permissions.pb"Read table "/Root/~backup_20250404T171151/table"Write data into "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/data_00.csv"Drop table "/Root/~backup_20250404T171151/table"2025-04-04T17:11:51.694498Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037895 not found Remove temporary directory "/Root/~backup_20250404T171151" in database2025-04-04T17:11:51.732942Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480 Backup completed successfully2025-04-04T17:11:51.758770Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506896913109560:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:51.758855Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:51.768717Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710668:2, at schemeshard: 72057594046644480 2025-04-04T17:11:51.806639Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037892 not found 2025-04-04T17:11:51.806675Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037891 not found 2025-04-04T17:11:51.806688Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037893 not found 2025-04-04T17:11:51.806701Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037894 not found 2025-04-04T17:11:51.806715Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2025-04-04T17:11:51.806730Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037890 not found 2025-04-04T17:11:51.831019Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found 2025-04-04T17:11:51.836338Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-04-04T17:11:51.836394Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2025-04-04T17:11:51.836431Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2025-04-04T17:11:51.836458Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-04-04T17:11:51.836493Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,7) wasn't found Restore "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/" to "/Root"Resolved db base path: "/Root"2025-04-04T17:11:51.872547Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found Restore folder "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/" to "/Root"Process "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table"Read scheme from "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table" to "/Root/table"2025-04-04T17:11:51.909882Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/data_00.csv"Process "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/c"Read changefeed from "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/c/changefeed_description.pb"Read topic from "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/c/topic_description.pb"2025-04-04T17:11:52.074559Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489506879733237856:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:52.074791Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:11:52.080957Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037896:1][4:7489506901208077406:2480] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:13:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } Created "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/c"Process "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/a"Read changefeed from "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/a/changefeed_description.pb"Read topic from "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/a/topic_description.pb"2025-04-04T17:11:52.220852Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037896:1][4:7489506901208077671:2499] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:15:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } Created "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/a"Process "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/b"Read changefeed from "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/b/changefeed_description.pb"Read topic from "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/b/topic_description.pb"2025-04-04T17:11:52.297034Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037896:1][4:7489506901208077922:2511] Failed entry at 'ResolveCdcStream': entry# { Path: TableId: [72057594046644480:16:0] RequestType: ByTableId Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } Created "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/b"Restore ACL "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/i5tq/00178d/r3tmp/tmpU39jF8/table/permissions.pb"2025-04-04T17:11:52.331721Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710673:0, at schemeshard: 72057594046644480 Restore completed successfully ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::RejectByIncomingReadSetSize [GOOD] Test command err: 2025-04-04T17:11:44.887256Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506865456962949:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:44.887306Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e84/r3tmp/tmprURx6D/pdisk_1.dat 2025-04-04T17:11:45.597062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:45.597164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:45.604965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:45.654793Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3730 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:11:46.033598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:46.196089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpMkDir MkDir { Name: "Dir1" } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:11:46.196523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /dc-1/Dir1, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:46.196736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: dc-1, child name: Dir1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-04T17:11:46.196801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-04T17:11:46.196890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:11:46.197177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:11:46.197222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-04T17:11:46.206607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusAccepted TxId: 281474976715658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-04T17:11:46.206838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /dc-1/Dir1 2025-04-04T17:11:46.207083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:11:46.207097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:11:46.207241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T17:11:46.207327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:11:46.207347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489506869751930858:2384], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 1 2025-04-04T17:11:46.207366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489506869751930858:2384], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 2 2025-04-04T17:11:46.207414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:46.207438Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:11:46.207469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715658 ready parts: 1/1 waiting... 2025-04-04T17:11:46.221932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:11:46.223950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-04T17:11:46.224059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-04T17:11:46.224076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-04T17:11:46.224095Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-04-04T17:11:46.224113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-04T17:11:46.224402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-04T17:11:46.224445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-04T17:11:46.224456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-04T17:11:46.224468Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-04-04T17:11:46.224484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-04T17:11:46.224529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 0/1, is published: true 2025-04-04T17:11:46.224663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:11:46.224680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 0/1, is published: true 2025-04-04T17:11:46.224726Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:11:46.235389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715658 msg type: 269090816 2025-04-04T17:11:46.235505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715658, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:11:46.235594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715658 2025-04-04T17:11:46.235619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715658 2025-04-04T17:11:46.239069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743786706284, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:11:46.239236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743786706284 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:11:46.239291Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1743786706284, at schemeshard: 72057594046644480 2025-04-04T17:11:46.239429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2025-04-04T17:11:46.239565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:11:46.239609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-04T17:11:46.241110Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:11:46.241131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:11:46.241254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T17:11:46.241315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:11:46.241331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489506869751930858:2384], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 1 2025-04-04T17:11:46.241344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489506869751930858:2384], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 2 2025-04-04T17:11:46.241380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:46.241408Z node 1 :F ... 4480 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-04-04T17:11:46.537686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715661 2025-04-04T17:11:46.537695Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 9 2025-04-04T17:11:46.537705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 4 2025-04-04T17:11:46.537803Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{27, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{37, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-04-04T17:11:46.537829Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{27, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:11:46.537934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-04-04T17:11:46.537960Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2025-04-04T17:11:46.537974Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:11:46.538003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-04-04T17:11:46.538010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715661 2025-04-04T17:11:46.538018Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-04-04T17:11:46.538026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-04-04T17:11:46.538055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715661, subscribers: 0 2025-04-04T17:11:46.538172Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{38, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-04-04T17:11:46.538196Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:11:46.539665Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:19} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-04T17:11:46.539692Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:19} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:11:46.539741Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:19} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{14, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T17:11:46.539775Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:19} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:11:46.539847Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046382081] send [1:7489506869751930833:2358] 2025-04-04T17:11:46.539856Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046382081] push event to server [1:7489506869751930833:2358] 2025-04-04T17:11:46.540358Z node 1 :PIPE_SERVER DEBUG: [72057594046382081] HandleSend Sender# [1:7489506869751930829:2358] EventType# 269156352 2025-04-04T17:11:46.540633Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] connected with status OK role: Leader [1:7489506874046898552:2303] 2025-04-04T17:11:46.540649Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] send queued [1:7489506874046898552:2303] 2025-04-04T17:11:46.540657Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] push event to server [1:7489506874046898552:2303] 2025-04-04T17:11:46.540903Z node 1 :PIPE_SERVER DEBUG: [72057594046644480] HandleSend Sender# [1:7489506874046898551:2303] EventType# 271124996 2025-04-04T17:11:46.541014Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion 2025-04-04T17:11:46.541043Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:11:46.541073Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-04-04T17:11:46.541102Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} hope 1 -> done Change{39, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T17:11:46.541128Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:11:46.542874Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:16:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:11:46.542904Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:16:1:24576:122:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:11:46.542939Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:18:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:11:46.542956Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:18:1:24576:107:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:11:46.542976Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:17:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:11:46.542991Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:17:1:24576:119:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:11:46.543008Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:18:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:11:46.543025Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:18:1:24576:132:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:11:46.543109Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 16 2025-04-04T17:11:46.543204Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 17 2025-04-04T17:11:46.543216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2025-04-04T17:11:46.543237Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 18 2025-04-04T17:11:46.543246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2025-04-04T17:11:46.543297Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:19} commited cookie 1 for step 18 2025-04-04T17:11:46.546408Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] received poison pill [1:7489506874046898552:2303] 2025-04-04T17:11:46.546438Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] notify reset [1:7489506874046898552:2303] 2025-04-04T17:11:46.549754Z node 1 :PIPE_SERVER DEBUG: [72057594046644480] Got PeerClosed from# [1:7489506874046898552:2303] 2025-04-04T17:11:48.847641Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506886153629508:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e84/r3tmp/tmpekjBZF/pdisk_1.dat 2025-04-04T17:11:48.894413Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:11:48.946587Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:48.973533Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:48.973617Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:48.975068Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21024 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:11:49.198138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:49.206701Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:49.226151Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:11:49.237696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:52.599648Z node 2 :TX_PROXY ERROR: Actor# [2:7489506903333499638:2614] txid# 281474976715700 FailProposedRequest: Transaction incoming read set size 1000068 for tablet 72075186224037889 exceeded limit 1000 Status# ExecError 2025-04-04T17:11:52.599722Z node 2 :TX_PROXY ERROR: Actor# [2:7489506903333499638:2614] txid# 281474976715700 RESPONSE Status# ExecError marker# P13c ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcAuthentication::NoConnectRights [GOOD] Test command err: 2025-04-04T17:11:11.726542Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506724455471123:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:11.726581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00132e/r3tmp/tmpzo1WeW/pdisk_1.dat 2025-04-04T17:11:12.495776Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11148, node 1 2025-04-04T17:11:12.596997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:12.597088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:12.670834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:12.698937Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:12.698960Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:12.698965Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:12.699078Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21982 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:13.285882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:21982 TClient is connected to server localhost:21982 2025-04-04T17:11:13.968290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:11:15.814968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506741635341424:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:15.814969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506741635341406:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:15.815067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:15.818581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-04T17:11:15.841609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506741635341427:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-04T17:11:15.920197Z node 1 :TX_PROXY ERROR: Actor# [1:7489506741635341519:2725] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } TClient is connected to server localhost:21982 2025-04-04T17:11:16.431123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:11:16.730710Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506724455471123:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:16.730785Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:11:21.555688Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:453:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:11:21.556084Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:11:21.556206Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00132e/r3tmp/tmpUoHQxp/pdisk_1.dat 2025-04-04T17:11:22.113355Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:11:22.193837Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:22.194020Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:22.216299Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:22.553940Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [4:1003:2808], Recipient [4:530:2445]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:11:22.554027Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:11:22.554067Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:11:22.554199Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [4:1000:2806], Recipient [4:530:2445]: {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T17:11:22.554270Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T17:11:22.632305Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateSubDomain SubDomain { Name: "tenant" } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:11:22.632567Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/tenant, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:11:22.632681Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: tenant, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-04T17:11:22.632854Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-04-04T17:11:22.633030Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-04T17:11:22.633183Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:11:22.633248Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:11:22.633338Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:11:22.633408Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:11:22.633471Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-04T17:11:22.636977Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-04-04T17:11:22.637160Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/tenant 2025-04-04T17:11:22.637260Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:11:22.637306Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715657:0 2025-04-04T17:11:22.637603Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [4:530:2445], Recipient [4:530:2445]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:11:22.637646Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:11:22.637840Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:11:22.637894Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:11:22.638085Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T17:11:22.638187Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:11:22.638231Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:749:2598], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2025-04-04T17:11:22.638308Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:749:2598], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 2 2025-04-04T17:11:22.638815Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657 ... xCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 1 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 1 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 2 HasLoanedParts: false Channels { Channel: 1 DataSize: 30 IndexSize: 82 } Channels { Channel: 2 DataSize: 45 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82472 } ShardState: 2 UserTablePartOwners: 72075186224037890 NodeId: 5 StartTime: 1458 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-04T17:11:34.583767Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [5:1643:2456], Recipient [4:530:2445]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037890 TableLocalId: 3 Generation: 1 Round: 2 TableStats { DataSize: 75 RowCount: 1 IndexSize: 82 InMemSize: 0 LastAccessTime: 2027 LastUpdateTime: 2027 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 1 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 1 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 2 HasLoanedParts: false Channels { Channel: 1 DataSize: 30 IndexSize: 82 } Channels { Channel: 2 DataSize: 45 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82472 } ShardState: 2 UserTablePartOwners: 72075186224037890 NodeId: 5 StartTime: 1458 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-04T17:11:34.583834Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-04T17:11:34.583899Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 75 rowCount 1 cpuUsage 0 2025-04-04T17:11:34.584045Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] raw table stats: DataSize: 75 RowCount: 1 IndexSize: 82 InMemSize: 0 LastAccessTime: 2027 LastUpdateTime: 2027 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 1 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 1 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 2 HasLoanedParts: false Channels { Channel: 1 DataSize: 30 IndexSize: 82 } Channels { Channel: 2 DataSize: 45 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-04T17:11:34.584094Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-04T17:11:34.715610Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0wjnxyfk688dv7ngvqqyxa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MzNjZmM2NTMtNWRhNTE3MC1mMjdjZGJkMy03YmVjNTFjYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:36.418969Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489506832351014664:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:36.419089Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00132e/r3tmp/tmpTdAZOD/pdisk_1.dat 2025-04-04T17:11:36.693780Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:36.725314Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:36.725400Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:36.728776Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11375, node 6 2025-04-04T17:11:36.834440Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:36.834469Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:36.834481Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:36.834614Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9794 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:37.267717Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:9794 2025-04-04T17:11:41.814386Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7489506852516956383:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:41.814458Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00132e/r3tmp/tmpagtu7V/pdisk_1.dat 2025-04-04T17:11:42.144478Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27215, node 9 2025-04-04T17:11:42.250888Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:42.250996Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:42.277768Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:42.374353Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:42.374394Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:42.374407Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:42.374609Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:42.808122Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:48.085656Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7489506882422899734:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:48.085780Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00132e/r3tmp/tmpsKFfHU/pdisk_1.dat 2025-04-04T17:11:48.410850Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:48.452877Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:48.453000Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:48.463907Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1169, node 12 2025-04-04T17:11:48.719019Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:48.719044Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:48.719055Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:48.719201Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62054 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:49.148135Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:62054 >> TImmediateControlsConfiguratorTests::TestModifiedControls [GOOD] >> TImmediateControlsConfiguratorTests::TestResetToDefault |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20190, MsgBus: 8526 2025-04-04T17:11:47.384687Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506880097367410:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:47.386537Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c75/r3tmp/tmpdO5FND/pdisk_1.dat 2025-04-04T17:11:47.996512Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:48.010887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:48.010991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:48.022416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20190, node 1 2025-04-04T17:11:48.358040Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:48.358060Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:48.358067Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:48.358162Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8526 TClient is connected to server localhost:8526 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:49.208520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:49.282292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:49.541475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:49.689905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:49.771549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:51.039306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506897277238212:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:51.039404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:51.653678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:11:51.704313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:11:51.744955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:11:51.776353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:11:51.811142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:11:51.856501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:11:51.930616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506897277238724:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:51.930760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:51.931035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506897277238729:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:51.944138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:11:51.987810Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T17:11:51.988013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506897277238731:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:11:52.057525Z node 1 :TX_PROXY ERROR: Actor# [1:7489506901572206085:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:52.386351Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506880097367410:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:52.386442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 15599, MsgBus: 18603 2025-04-04T17:11:47.721571Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506879810072918:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c6c/r3tmp/tmpMXevGV/pdisk_1.dat 2025-04-04T17:11:48.010391Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:11:48.319941Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:48.336019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:48.336122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:48.337790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15599, node 1 2025-04-04T17:11:48.450795Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:48.450814Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:48.450821Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:48.450920Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18603 TClient is connected to server localhost:18603 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:49.245168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:49.263075Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:11:49.283272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:49.533798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:49.705954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:49.818352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:51.385490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506896989943746:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:51.385636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:51.654520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:11:51.715459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:11:51.747670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:11:51.778520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:11:51.814134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:11:51.888482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:11:51.961369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506896989944263:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:51.961480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:51.961780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506896989944268:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:51.965056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:11:51.980070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506896989944270:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:11:52.055438Z node 1 :TX_PROXY ERROR: Actor# [1:7489506901284911621:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:52.706456Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506879810072918:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:52.706520Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4995, MsgBus: 22378 2025-04-04T17:11:47.380436Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506880877372206:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:47.380485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c7d/r3tmp/tmpCmlCD6/pdisk_1.dat 2025-04-04T17:11:47.972244Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:48.010835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:48.010942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:48.020242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4995, node 1 2025-04-04T17:11:48.350700Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:48.350724Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:48.350730Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:48.350820Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22378 TClient is connected to server localhost:22378 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:49.208286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:49.285770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:49.552259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:49.721564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:49.798945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:51.047191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506898057243060:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:51.047304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:51.654447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:11:51.713256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:11:51.790708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:11:51.821352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:11:51.856218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:11:51.902145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:11:51.983493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506898057243579:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:51.983562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:51.983939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506898057243584:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:51.987314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:11:52.003781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506898057243586:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:11:52.075865Z node 1 :TX_PROXY ERROR: Actor# [1:7489506902352210940:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:52.452945Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506880877372206:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:52.453416Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TImmediateControlsConfiguratorTests::TestResetToDefault [GOOD] >> TImmediateControlsConfiguratorTests::TestMaxLimit >> TPipeTrackerTest::TestAddSameTabletTwice [GOOD] >> TPipeTrackerTest::TestAddTwoTablets [GOOD] >> YdbTableBulkUpsert::NotNulls [GOOD] >> YdbTableBulkUpsert::Errors ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] Test command err: 2025-04-04T17:11:33.518706Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506821375220323:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:33.518746Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017a0/r3tmp/tmpSE8ac5/pdisk_1.dat 2025-04-04T17:11:34.089414Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:34.101185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:34.101323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:34.107270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32752, node 1 2025-04-04T17:11:34.338762Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:34.338802Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:34.338809Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:34.338914Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31163 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:34.712301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:37.133893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506838555090535:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:37.134009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:37.406339Z node 1 :TX_PROXY DEBUG: actor# [1:7489506821375220550:2140] Handle TEvProposeTransaction 2025-04-04T17:11:37.406373Z node 1 :TX_PROXY DEBUG: actor# [1:7489506821375220550:2140] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T17:11:37.406425Z node 1 :TX_PROXY DEBUG: actor# [1:7489506821375220550:2140] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489506838555090571:2632] 2025-04-04T17:11:37.521478Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506838555090571:2632] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Uint32" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } IndexDescription { Name: "value_idx" KeyColumnNames: "Value" Type: EIndexTypeGlobal State: EIndexStateReady } } } } UserToken: "" DatabaseName: "" 2025-04-04T17:11:37.521529Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506838555090571:2632] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:11:37.521812Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506838555090571:2632] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:11:37.521868Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506838555090571:2632] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:11:37.522017Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506838555090571:2632] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:11:37.522128Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506838555090571:2632] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:11:37.522161Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506838555090571:2632] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T17:11:37.522638Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506838555090571:2632] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T17:11:37.524198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:37.532858Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506838555090571:2632] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-04-04T17:11:37.532936Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506838555090571:2632] txid# 281474976710658 SEND to# [1:7489506838555090570:2343] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-04-04T17:11:37.776240Z node 1 :TX_PROXY DEBUG: actor# [1:7489506821375220550:2140] Handle TEvNavigate describe path /Root 2025-04-04T17:11:37.776291Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506838555090803:2814] HANDLE EvNavigateScheme /Root 2025-04-04T17:11:37.776480Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506838555090803:2814] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:11:37.776560Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506838555090803:2814] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root" Options { ShowPrivateTable: false } 2025-04-04T17:11:37.777113Z node 1 :TX_PROXY DEBUG: Actor# [1:7489506838555090803:2814] Handle TEvDescribeSchemeResult Forward to# [1:7489506838555090801:2356] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 65 Record# Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786694783 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/Root:test" Kind: "test" } StoragePools { Name: "hdd" Kind: "hdd" } StoragePools { Name: "hdd1" Kind: "hdd1" } StoragePools { Name: "hdd2" Kind: "hdd2" } StoragePools { Name: "ssd" Kind: "ssd" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046644480 2025-04-04T17:11:37.793828Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7489506838555090805:2357] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } },{ Path: Root/table TableId: [72057594046644480:2:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:11:37.793852Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7489506838555090805:2357] [0] Allocate txId 2025-04-04T17:11:37.793910Z node 1 :TX_PROXY DEBUG: actor# [1:7489506821375220550:2140] Handle TEvAllocateTxId 2025-04-04T17:11:37.793952Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7489506838555090805:2357] [281474976710659] TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-04T17:11:37.793962Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7489506838555090805:2357] [281474976710659] Resolve database: name# /Root 2025-04-04T17:11:37.794188Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7489506838555090805:2357] [281474976710659] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:11:37.794209Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7489506838555090805:2357] [281474976710659] Send request: schemeShardId# 72057594046644480 2025-04-04T17:11:37.797068Z node 1 :EXPORT DEBUG: TExport::TTxCreate: DoExecute 2025-04-04T17:11:37.797346Z node 1 :EXPORT DEBUG: TExport::TTxCreate: Reply: status# SUCCESS, error# 2025-04-04T17:11:37.804367Z node 1 :EXPORT DEBUG: TExport::TTxCreate: DoComplete 2025-04-04T17:11:37.804484Z node 1 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-04T17:11:37.804499Z node 1 :EXPORT DEB ... etStatus from node 7, TabletId: 72075186224037896 not found 2025-04-04T17:11:53.241294Z node 7 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-04-04T17:11:53.241324Z node 7 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976715762 2025-04-04T17:11:53.242519Z node 7 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-04-04T17:11:53.867820Z node 7 :TX_PROXY DEBUG: [GetImport] [7:7489506905286248510:2424] [0] Resolve database: name# /Root 2025-04-04T17:11:53.868202Z node 7 :TX_PROXY DEBUG: [GetImport] [7:7489506905286248510:2424] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:11:53.868228Z node 7 :TX_PROXY DEBUG: [GetImport] [7:7489506905286248510:2424] [0] Send request: schemeShardId# 72057594046644480 2025-04-04T17:11:53.870581Z node 7 :TX_PROXY DEBUG: [GetImport] [7:7489506905286248510:2424] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976710661 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:23404" scheme: HTTP bucket: "test_bucket" items { source_prefix: "table" destination_path: "/Root/table" } } StartTime { seconds: 1743786712 } EndTime { seconds: 1743786713 } } 2025-04-04T17:11:53.887285Z node 7 :TX_PROXY DEBUG: actor# [7:7489506879516441422:2113] Handle TEvNavigate describe path /Root/table 2025-04-04T17:11:53.887350Z node 7 :TX_PROXY DEBUG: Actor# [7:7489506905286248520:4584] HANDLE EvNavigateScheme /Root/table 2025-04-04T17:11:53.887626Z node 7 :TX_PROXY DEBUG: Actor# [7:7489506905286248520:4584] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:11:53.887724Z node 7 :TX_PROXY DEBUG: Actor# [7:7489506905286248520:4584] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table" Options { ShowPrivateTable: false } 2025-04-04T17:11:53.889428Z node 7 :TX_PROXY DEBUG: Actor# [7:7489506905286248520:4584] Handle TEvDescribeSchemeResult Forward to# [7:7489506905286248518:2425] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715760 CreateStep: 1743786712710 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "table" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableIndexes { Name: "value_idx" LocalPathId: 9 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "Value" SchemaVersion: 2 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: SIMILARITY_INNER_PRODUCT vector_type: VECTOR_TYPE_FLOAT vector_dimension: 768 } clusters: 80 levels: 2 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 8 PathOwnerId: 72057594046644480 >> KqpQueryPerf::IndexUpdateOn-QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 24404, MsgBus: 8949 2025-04-04T17:09:24.998823Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506263988721690:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:24.998865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00151c/r3tmp/tmpfMropq/pdisk_1.dat 2025-04-04T17:09:25.469564Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:25.474549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:25.474628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:25.476579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24404, node 1 2025-04-04T17:09:25.639181Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:25.639199Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:25.639222Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:25.639328Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8949 TClient is connected to server localhost:8949 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:26.448038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.519170Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:26.532231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:26.681880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:09:26.892635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:27.004831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.394887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506281168592647:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.395011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.711278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.781398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.811142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.841800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.877707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.912281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.995825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506281168593166:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.995910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.996858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506281168593171:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.001951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:29.021374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506281168593173:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:29.089598Z node 1 :TX_PROXY ERROR: Actor# [1:7489506285463560526:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:29.983615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.998936Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506263988721690:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:29.999019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:30.031574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.101809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.143296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.180175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.331263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.359301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.386607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.416607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.485342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.514514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.542703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.572184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.005004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:09:31.039242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.067250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.092421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-0 ... alizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:11:44.710974Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:11:44.711045Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:11:44.711075Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:11:44.711119Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:11:44.711149Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:11:44.711894Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:11:44.711961Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:11:44.712187Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:11:44.712229Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:11:44.712377Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:11:44.712412Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:11:44.712593Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:11:44.712626Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:11:44.712743Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:11:44.712777Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:11:44.715819Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:11:44.715884Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:11:44.715979Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:11:44.716011Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:11:44.716184Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:11:44.716213Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:11:44.716301Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:11:44.716334Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:11:44.716401Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:11:44.716430Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:11:44.716475Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:11:44.716508Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:11:44.717184Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:11:44.717261Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:11:44.717447Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:11:44.717481Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:11:44.717615Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:11:44.717646Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:11:44.717822Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:11:44.717868Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:11:44.718006Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:11:44.718042Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:11:44.783560Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038723;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:44.784368Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038735;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:44.791854Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038720;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:44.795673Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038728;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:44.800449Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038734;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:44.807893Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038724;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:44.808610Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038733;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:44.817079Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038727;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:44.818575Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038721;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:44.830022Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038726;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:44.833082Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038725;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:44.840025Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038729;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:44.840565Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038732;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:44.850014Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038722;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:44.850800Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038731;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:44.858746Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038730;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestAddTwoTablets [GOOD] >> KqpQueryPerf::Upsert-QueryService+UseSink |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTableIndex [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSequence >> TObjectStorageListingTest::Listing >> TImmediateControlsConfiguratorTests::TestMaxLimit [GOOD] >> TImmediateControlsConfiguratorTests::TestDynamicMap >> TLocksTest::Range_BrokenLock2 [GOOD] >> TLocksTest::Range_BrokenLock3 >> TConsoleTests::TestRemoveAttributes [GOOD] >> TConsoleTests::TestRemoveAttributesExtSubdomain |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpQueryPerf::IndexInsert-QueryService-UseSink >> KqpQueryPerf::Delete-QueryService-UseSink |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> BackupRestore::RestoreTableSplitBoundaries [GOOD] >> BackupRestore::ImportDataShouldHandleErrors >> TImmediateControlsConfiguratorTests::TestDynamicMap [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds-StreamLookupJoin+ColumnStore [GOOD] >> KqpQueryPerf::DeleteOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::DeleteOn+QueryService+UseSink >> TFlatTest::RejectByPerShardReadSize >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS78+StreamLookupJoin-ColumnStore >> KqpJoinOrder::TestJoinOrderHintsComplex-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-StreamLookupJoin+ColumnStore [GOOD] >> KqpQueryPerf::IdxLookupJoin+QueryService [GOOD] >> KqpQueryPerf::IdxLookupJoin-QueryService >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] >> HttpRequest::ProbeServerless >> TResourceBrokerConfig::UpdateTasks [GOOD] >> TResourceBrokerInstant::Test >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions [GOOD] >> TResourceBrokerInstant::Test [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TImmediateControlsConfiguratorTests::TestDynamicMap [GOOD] Test command err: 2025-04-04T17:10:16.318338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:16.318401Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:16.395015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:17.567422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-04-04T17:10:17.680576Z node 9 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:10:17.681071Z node 9 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001847/r3tmp/tmp4MyS6B/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:10:17.681638Z node 9 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001847/r3tmp/tmp4MyS6B/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001847/r3tmp/tmp4MyS6B/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 11410870772057185223 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:10:17.727059Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:10:17.727505Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001847/r3tmp/tmp4MyS6B/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:10:17.727741Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001847/r3tmp/tmp4MyS6B/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001847/r3tmp/tmp4MyS6B/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8517297363256959627 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:10:17.788994Z node 7 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:10:17.789510Z node 7 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001847/r3tmp/tmp4MyS6B/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:10:17.789779Z node 7 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001847/r3tmp/tmp4MyS6B/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001847/r3tmp/tmp4MyS6B/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2785207897130020537 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:10:17.834620Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:10:17.835156Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001847/r3tmp/tmp4MyS6B/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:10:17.835371Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001847/r3tmp/tmp4MyS6B/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001847/r3tmp/tmp4MyS6B/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5773860475701950814 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:10:17.881157Z node 5 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:10:17.881631Z node 5 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/001847/r3tmp/tmp4MyS6B/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:10:17.881796Z node 5 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/001847/r3tmp/tmp4MyS6B/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/001847/r3tmp/tmp4MyS6B/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 425724509437428572 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:10:17.915945Z node 4 :BS_ ... ediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.CdcInitialScanReadAheadLo was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.CdcInitialScanReadAheadHi was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.PerRequestDataSizeLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.PerShardReadSizeLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.PerShardIncomingReadSetSizeLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.DefaultTimeoutMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.EnableLeaderLeases was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.MinLeaderLeaseDurationUs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.VolatilePlanLeaseMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.PlanAheadTimeShiftMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.MinPlanResolutionMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control SchemeShardControls.ForceShardSplitDataSize was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control SchemeShardControls.DisableForceShardSplit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.ProfileSamplingRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.GuardedSamplingRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.PageCacheTargetSize was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.PageCacheReleaseRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.EnableLocalSyncLogDataCutting was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.EnableSyncLogChunkCompressionHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.EnableSyncLogChunkCompressionSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxSyncLogChunksInFlightHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxSyncLogChunksInFlightSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.BurstThresholdNsHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.BurstThresholdNsSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.BurstThresholdNsNVME was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DiskTimeAvailableScaleHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DiskTimeAvailableScaleSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DiskTimeAvailableScaleNVME was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DefaultHugeGarbagePerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.HugeDefragFreeSpaceBorderPerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxChunksToDefragInflight was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingDryRun was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinLevel0SstCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxLevel0SstCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinInplacedSizeHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxInplacedSizeHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinInplacedSizeSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxInplacedSizeSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinOccupancyPerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxOccupancyPerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinLogChunkCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxLogChunkCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TabletControls.MaxCommitRedoMB was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThreshold was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplier was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.LongRequestThresholdMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisks was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThresholdHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplierHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisksHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThresholdSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplierSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisksSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.RequestReportingSettings.BucketSize was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.RequestReportingSettings.LeakDurationMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.RequestReportingSettings.LeakRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.MaxCommonLogChunksHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.MaxCommonLogChunksSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.UseNoopSchedulerHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.UseNoopSchedulerSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control BlobStorageControllerControls.EnableSelfHealWithDegraded was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TableServiceControls.EnableMergeDatashardReads was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::Test [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 30575, MsgBus: 6845 2025-04-04T17:09:31.670350Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506296106970662:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:31.671027Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014a1/r3tmp/tmpaSFOJe/pdisk_1.dat 2025-04-04T17:09:32.160662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:32.160800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:32.164419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:32.165002Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30575, node 1 2025-04-04T17:09:32.258686Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:32.258710Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:32.258716Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:32.258817Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6845 TClient is connected to server localhost:6845 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:32.915560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:32.935874Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:32.947068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:33.104795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:33.290210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:33.371246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:34.816078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506308991874324:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:34.816204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:35.034488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:35.060735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:35.084786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:35.109726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:35.175584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:35.203000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:35.288574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506313286842139:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:35.288654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:35.288694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506313286842144:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:35.291862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:35.300381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506313286842146:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:35.374265Z node 1 :TX_PROXY ERROR: Actor# [1:7489506313286842200:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:36.255922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:36.283018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:36.319536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:36.349360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:36.378920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:36.515378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:36.541080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:09:36.596562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:09:36.621334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:09:36.653190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:09:36.670858Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506296106970662:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:36.670926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:36.697659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:09:36.727369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:09:36.758806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:09:37.275636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:09:37.307109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:09:37.334883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:09:37.366112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-0 ... cription=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:11:49.848718Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:11:49.848745Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:11:49.848808Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:11:49.848855Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:11:49.848894Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:11:49.848943Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:11:49.849460Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:11:49.849500Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:11:49.849675Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:11:49.849703Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:11:49.849852Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:11:49.849894Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:11:49.850076Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:11:49.850106Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:11:49.850200Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:11:49.850224Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:11:49.850523Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:11:49.850572Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:11:49.850690Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:11:49.850720Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:11:49.850923Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:11:49.850952Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:11:49.851033Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:11:49.851061Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:11:49.851130Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:11:49.851153Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:11:49.851187Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:11:49.851213Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:11:49.851696Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:11:49.851735Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:11:49.851907Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:11:49.851968Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:11:49.852099Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:11:49.852129Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:11:49.852373Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:11:49.852425Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:11:49.852535Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:11:49.852572Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:11:49.949558Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038734;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:49.955294Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038726;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:49.960095Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038728;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:49.965984Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038733;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:49.974922Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038724;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:49.980756Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038721;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:49.985352Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038735;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:49.987597Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038723;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:49.993354Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038727;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:49.996437Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038725;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:50.001353Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038729;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:50.005133Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038720;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:50.009190Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038722;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:50.012507Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038730;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:50.016254Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038732;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:50.021749Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038731;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] Test command err: 2025-04-04T17:10:16.004808Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506489928416730:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:16.004867Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001838/r3tmp/tmpdW4Vbi/pdisk_1.dat 2025-04-04T17:10:16.514842Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:16.517286Z node 1 :HTTP ERROR: (#26,[::1]:7702) connection closed with error: Connection refused 2025-04-04T17:10:16.518321Z node 1 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-04T17:10:16.550020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:16.550877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:16.554491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:10:19.275180Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506503608799989:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:19.275209Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001838/r3tmp/tmpeZ62R7/pdisk_1.dat 2025-04-04T17:10:19.476304Z node 2 :HTTP ERROR: (#28,[::1]:12997) connection closed with error: Connection refused 2025-04-04T17:10:19.482452Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:19.496045Z node 2 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-04T17:10:19.497128Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:19.497210Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:19.507505Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:10:22.602011Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489506512695993332:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:22.602065Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001838/r3tmp/tmp9qv07D/pdisk_1.dat 2025-04-04T17:10:22.714577Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:22.716351Z node 3 :HTTP ERROR: (#26,[::1]:22831) connection closed with error: Connection refused 2025-04-04T17:10:22.716743Z node 3 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-04T17:10:22.728492Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:22.728574Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:22.730207Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:10:25.736213Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506527202811274:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:25.803671Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001838/r3tmp/tmpIjmlON/pdisk_1.dat 2025-04-04T17:10:25.901203Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:25.929947Z node 4 :HTTP ERROR: (#28,[::1]:26201) connection closed with error: Connection refused 2025-04-04T17:10:25.930669Z node 4 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-04T17:10:25.930880Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:25.930926Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:25.932805Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:10:28.988351Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489506541449912007:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:28.988455Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001838/r3tmp/tmpRH1yPh/pdisk_1.dat 2025-04-04T17:10:29.125738Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:29.140613Z node 5 :HTTP ERROR: (#30,[::1]:17171) connection closed with error: Connection refused 2025-04-04T17:10:29.141175Z node 5 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-04T17:10:29.155099Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:29.155189Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:29.157158Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:10:32.514567Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489506559774354175:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:32.514628Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001838/r3tmp/tmprsTkOQ/pdisk_1.dat 2025-04-04T17:10:32.666203Z node 6 :HTTP ERROR: (#32,[::1]:12318) connection closed with error: Connection refused 2025-04-04T17:10:32.667003Z node 6 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-04T17:10:32.670335Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:32.670421Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:32.673957Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:10:32.675556Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:36.049969Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489506571713271285:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001838/r3tmp/tmp5yl0Fi/pdisk_1.dat 2025-04-04T17:10:36.130129Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:10:36.190799Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:36.199546Z node 7 :HTTP ERROR: (#34,[::1]:22237) connection closed with error: Connection refused 2025-04-04T17:10:36.200014Z node 7 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-04T17:10:36.220591Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:36.220675Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:36.222308Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001838/r3tmp/tmpJcecEv/pdisk_1.dat 2025-04-04T17:10:39.916579Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:10:39.920467Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:39.936354Z node 8 :HTTP ERROR: (#36,[::1]:61598) connection closed with error: Connection refused 2025-04-04T17:10:39.938534Z node 8 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-04T17:10:39.939188Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:39.939264Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:39.940708Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:10:43.866822Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7489506603766274962:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:43.866887Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001838/r3tmp/tmpJS0HZH/pdisk_1.dat 2025-04-04T17:10:44.097011Z node 9 :HTTP ERROR: (#38,[::1]:7821) connection closed with error: Connection refused 2025-04-04T17:10:44.119844Z node 9 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-04T17:10:44.120147Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:44.121001Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:44.121077Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:44.123484Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:10:47.961247Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489506622094011494:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:47.961348Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001838/r3tmp/tmpVeZYXS/pdisk_1.dat 2025-04-04T17:10:48.070013Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:48.079747Z node 10 :HTTP ERROR: (#26,[::1]:6666) connection closed with error: Connection refused 2025-04-04T17:10:48.082383Z node 10 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-04T17:10:48.095037Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:48.095110Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:48.096863Z node 10 :HIVE WAR ... ase.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:11:05.678356Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:05.696982Z node 14 :HTTP ERROR: (#34,[::1]:26165) connection closed with error: Connection refused 2025-04-04T17:11:05.700470Z node 14 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-04T17:11:05.701948Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:05.702030Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:05.704638Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:10.298690Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7489506721131829293:2146];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001838/r3tmp/tmp9SfH2K/pdisk_1.dat 2025-04-04T17:11:10.395037Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:11:10.503279Z node 15 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:10.550208Z node 15 :HTTP ERROR: (#36,[::1]:3290) connection closed with error: Connection refused 2025-04-04T17:11:10.551821Z node 15 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-04T17:11:10.556417Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:10.556525Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:10.557970Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:15.390315Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7489506744292729536:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:15.390386Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001838/r3tmp/tmpp0XO0k/pdisk_1.dat 2025-04-04T17:11:15.573067Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:15.600331Z node 16 :HTTP ERROR: (#38,[::1]:10998) connection closed with error: Connection refused 2025-04-04T17:11:15.607582Z node 16 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-04T17:11:15.608467Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:15.608556Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:15.611101Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:20.617754Z node 17 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[17:7489506763789334539:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:20.617810Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001838/r3tmp/tmpv1reI9/pdisk_1.dat 2025-04-04T17:11:20.885299Z node 17 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:20.888854Z node 17 :HTTP ERROR: (#26,[::1]:12598) connection closed with error: Connection refused 2025-04-04T17:11:20.895839Z node 17 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-04T17:11:20.896997Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:20.897078Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:20.904996Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:26.188235Z node 18 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7489506787956299911:2231];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:26.188987Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001838/r3tmp/tmpRH3UZL/pdisk_1.dat 2025-04-04T17:11:26.350674Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:26.359183Z node 18 :HTTP ERROR: (#28,[::1]:12705) connection closed with error: Connection refused 2025-04-04T17:11:26.364827Z node 18 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-04T17:11:26.367976Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:26.368079Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:26.370382Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:31.682278Z node 19 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7489506811770497051:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:31.682353Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001838/r3tmp/tmpBe0N5e/pdisk_1.dat 2025-04-04T17:11:31.909047Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:31.937969Z node 19 :HTTP ERROR: (#30,[::1]:19248) connection closed with error: Connection refused 2025-04-04T17:11:31.939351Z node 19 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-04T17:11:31.940271Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:31.940352Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:31.941820Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:37.290577Z node 20 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7489506835880588471:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001838/r3tmp/tmplVWzAg/pdisk_1.dat 2025-04-04T17:11:37.380800Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:11:37.433288Z node 20 :HTTP ERROR: (#32,[::1]:16599) connection closed with error: Connection refused 2025-04-04T17:11:37.440053Z node 20 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-04T17:11:37.462759Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:37.488369Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:37.488473Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:37.499991Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:42.881563Z node 21 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[21:7489506857017377262:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:42.971161Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001838/r3tmp/tmp9Z2Dyf/pdisk_1.dat 2025-04-04T17:11:43.138481Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:43.143013Z node 21 :HTTP ERROR: (#34,[::1]:8406) connection closed with error: Connection refused 2025-04-04T17:11:43.145779Z node 21 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-04T17:11:43.154386Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:43.154501Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:43.158337Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:48.918571Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7489506882467175533:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:48.918662Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001838/r3tmp/tmpXvvxFW/pdisk_1.dat 2025-04-04T17:11:49.160392Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:49.169431Z node 22 :HTTP ERROR: (#36,[::1]:6089) connection closed with error: Connection refused 2025-04-04T17:11:49.170639Z node 22 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-04T17:11:49.273824Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:49.273938Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:49.275873Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:54.950527Z node 23 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7489506911136849383:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:54.950606Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001838/r3tmp/tmpjh30Sm/pdisk_1.dat 2025-04-04T17:11:55.120519Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:55.132261Z node 23 :HTTP ERROR: (#38,[::1]:4392) connection closed with error: Connection refused 2025-04-04T17:11:55.133574Z node 23 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-04-04T17:11:55.151216Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:55.151300Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:55.155310Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsComplex-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 13669, MsgBus: 62468 2025-04-04T17:09:28.721483Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506282575828215:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:28.721603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014b4/r3tmp/tmpa0W7A5/pdisk_1.dat 2025-04-04T17:09:29.070022Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13669, node 1 2025-04-04T17:09:29.106497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:29.106599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:29.108016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:29.161573Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:29.161596Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:29.161605Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:29.161744Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62468 TClient is connected to server localhost:62468 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:29.665850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:29.692129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:29.812927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:29.971095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:30.043795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:31.688633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506295460731872:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:31.688729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:31.942224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.973968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:32.009655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:32.048970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:32.088716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:32.178927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:32.261487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506299755699692:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:32.261568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:32.261767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506299755699697:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:32.265134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:32.275561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506299755699699:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:32.358455Z node 1 :TX_PROXY ERROR: Actor# [1:7489506299755699754:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:33.274672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:33.301675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:33.328322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:33.367536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:33.410410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:33.531987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:33.566826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:09:33.608079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:09:33.633199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:09:33.707167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:09:33.721832Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506282575828215:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:33.721894Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:33.738769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:09:33.804173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:09:33.872437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:09:34.388156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:09:34.418067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:09:34.448148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:09:34.474025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-04T17:09:34.503083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... LUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:11:49.967725Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:11:49.967766Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:11:49.967796Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:11:49.968409Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:11:49.968453Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:11:49.968640Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:11:49.968672Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:11:49.968847Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:11:49.968883Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:11:49.969087Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:11:49.969119Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:11:49.969247Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:11:49.969276Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:11:50.006494Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:11:50.006561Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:11:50.006700Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:11:50.006731Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:11:50.006922Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:11:50.006949Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:11:50.007043Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:11:50.007078Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:11:50.007142Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:11:50.007167Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:11:50.007205Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:11:50.007234Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:11:50.007929Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:11:50.007972Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:11:50.008163Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:11:50.008193Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:11:50.008369Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:11:50.008401Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:11:50.008600Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:11:50.008636Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:11:50.008741Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:11:50.008771Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038729;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:11:50.063363Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038724;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:11:50.066832Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038721;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:11:50.072418Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038723;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:11:50.074885Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038730;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:11:50.080523Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038725;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:11:50.081743Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038728;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:11:50.088109Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038734;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:11:50.088740Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038732;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:11:50.095752Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038722;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:11:50.095783Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038731;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:11:50.105609Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038733;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:11:50.105609Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038726;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:11:50.113869Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038727;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:11:50.115139Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038735;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:11:50.123094Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038729;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:11:50.123564Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038720;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: JoinOrder( (Unused1 Unused2) (Unused3 Unused4) ), code: 4534
: Warning: Unapplied hint: Rows(Unused # 10e8), code: 4534
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: JoinOrder( (Unused1 Unused2) (Unused3 Unused4) ), code: 4534
: Warning: Unapplied hint: Rows(Unused # 10e8), code: 4534
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 12097, MsgBus: 23269 2025-04-04T17:09:26.393856Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506275499972402:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:26.393893Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014c4/r3tmp/tmpKosNOY/pdisk_1.dat 2025-04-04T17:09:27.099166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:27.099277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:27.107482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:27.118336Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12097, node 1 2025-04-04T17:09:27.323786Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:27.323811Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:27.323819Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:27.323941Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23269 TClient is connected to server localhost:23269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:27.845295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:27.869287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.019182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:28.162875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.242615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.774526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506288384875936:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.774686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.981353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.060709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.094200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.128986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.157437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.194514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.254512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506292679843743:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:30.254581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:30.254789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506292679843748:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:30.261227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:30.273728Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T17:09:30.273951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506292679843750:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:30.360102Z node 1 :TX_PROXY ERROR: Actor# [1:7489506292679843806:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:31.159917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.182536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.209807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.238773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.269125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.380663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.393848Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506275499972402:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:31.393974Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:31.419760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.451522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.526739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.551993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.575905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.604186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.632530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:09:32.078908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:09:32.139448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:09:32.168402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:09:32.198660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 202 ... cription=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:11:50.338815Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:11:50.338848Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:11:50.338918Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:11:50.338963Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:11:50.339026Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:11:50.339062Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:11:50.339650Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:11:50.339692Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:11:50.339895Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:11:50.339907Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:11:50.339936Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:11:50.339949Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:11:50.340048Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:11:50.340083Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:11:50.340086Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:11:50.340118Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:11:50.340249Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:11:50.340283Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:11:50.340294Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:11:50.340328Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:11:50.340402Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:11:50.340436Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:11:50.340439Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:11:50.340461Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:11:50.340506Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:11:50.340535Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:11:50.340577Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:11:50.340608Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:11:50.341159Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:11:50.341216Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:11:50.341406Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:11:50.341440Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:11:50.341581Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:11:50.341612Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:11:50.341797Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:11:50.341826Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:11:50.341938Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:11:50.341966Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038731;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:11:50.459965Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038726;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:50.462124Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038735;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:50.470008Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038731;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:50.470183Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038730;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:50.480214Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038723;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:50.481618Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038727;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:50.489813Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038725;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:50.489820Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038721;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:50.503829Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038724;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:50.506684Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038729;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:50.512347Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038720;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:50.514106Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038732;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:50.520899Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038722;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:50.534521Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038728;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:50.538560Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038733;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:50.550677Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038734;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep >> KqpQueryPerf::Upsert-QueryService+UseSink [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep [GOOD] >> TPQTabletTests::Read_TEvTxCommit_After_Restart >> YdbTableBulkUpsert::Errors [GOOD] >> YdbTableBulkUpsert::Limits >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSequence [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeView |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TPQTabletTests::Read_TEvTxCommit_After_Restart [GOOD] >> KqpQueryPerf::Insert+QueryService-UseSink >> TObjectStorageListingTest::TestFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6139, MsgBus: 21196 2025-04-04T17:11:57.324662Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506924857645251:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:57.325111Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c4d/r3tmp/tmpTi753s/pdisk_1.dat 2025-04-04T17:11:57.785045Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6139, node 1 2025-04-04T17:11:57.802743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:57.802835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:57.804469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:57.933554Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:57.933578Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:57.933586Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:57.933695Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21196 TClient is connected to server localhost:21196 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:58.520048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:58.537059Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:11:58.552999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:11:58.693174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:11:58.910666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:58.996596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:00.782804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506937742548771:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:00.782899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:01.025866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:01.068120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:01.101636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:01.143127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:01.178040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:01.253426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:01.316999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506942037516584:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:01.317067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:01.317160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506942037516589:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:01.321418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:01.346653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506942037516591:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:12:01.401367Z node 1 :TX_PROXY ERROR: Actor# [1:7489506942037516645:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:02.322380Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506924857645251:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:02.322444Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> KqpJoinOrder::TPCDS87+StreamLookupJoin-ColumnStore [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpQueryPerf::UpdateOn-QueryService-UseSink >> KqpQueryPerf::DeleteOn+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Read_TEvTxCommit_After_Restart [GOOD] Test command err: 2025-04-04T17:12:01.147509Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:12:01.151501Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:12:01.151787Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-04T17:12:01.151835Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:12:01.151882Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-04T17:12:01.151913Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:12:01.151968Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:12:01.152019Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-04T17:12:01.170031Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:177:2192], now have 1 active actors on pipe 2025-04-04T17:12:01.170221Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T17:12:01.188464Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:12:01.191525Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-04T17:12:01.191646Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:12:01.193109Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:12:01.198470Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:12:01.198559Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:12:01.199096Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:12:01.199500Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-04T17:12:01.200320Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-04T17:12:01.200372Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:185:2198] 2025-04-04T17:12:01.200420Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:12:01.200894Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T17:12:01.200980Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-04T17:12:01.201019Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-04T17:12:01.201209Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04T17:12:01.201356Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:12:01.201537Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:187:2200] 2025-04-04T17:12:01.202132Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Initializing completed. 2025-04-04T17:12:01.202165Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:187:2200] 2025-04-04T17:12:01.202196Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:12:01.202525Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-04T17:12:01.202607Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-04-04T17:12:01.202656Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-04-04T17:12:01.202755Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:12:01.202897Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:12:01.203869Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:12:01.207859Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:12:01.207985Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:12:01.208317Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:198:2207], now have 1 active actors on pipe 2025-04-04T17:12:01.208926Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:201:2209], now have 1 active actors on pipe 2025-04-04T17:12:01.209662Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 176 RawX2: 4294969487 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Immediate: false } 2025-04-04T17:12:01.209714Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-04T17:12:01.209842Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-04-04T17:12:01.209891Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-04T17:12:01.209928Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-04-04T17:12:01.209962Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-04T17:12:01.210000Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-04-04T17:12:01.210157Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 230 MaxStep: 30230 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 4294969487 } Partitions { } 2025-04-04T17:12:01.211219Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:12:01.219337Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:12:01.219409Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-04-04T17:12:01.219463Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-04-04T17:12:01.219520Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-04-04T17:12:01.229829Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 176 RawX2: 4294969487 } } Step: 100 2025-04-04T17:12:01.229921Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARED 2025-04-04T17:12:01.229956Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARED 2025-04-04T17:12:01.229998Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNING 2025-04-04T17:12:01.230052Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890 2025-04-04T17:12:01.230296Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PLANNED MinStep: 230 MaxStep: 30230 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 4294969487 } Partitions { } 2025-04-04T17:12:01.230414Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PRO ... UEUE DEBUG: [PQ: 72057594037927937] TxsOrder: 67890 PLANNED 0 2025-04-04T17:12:03.779613Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:12:03.779660Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] has a tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:12:03.779790Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:12:03.780143Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:12:03.780375Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:303:2289] 2025-04-04T17:12:03.781159Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDiskStatusStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:12:03.782470Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitMetaStep 2025-04-04T17:12:03.782804Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInfoRangeStep 2025-04-04T17:12:03.783490Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataRangeStep 2025-04-04T17:12:03.783788Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataStep 2025-04-04T17:12:03.783831Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-04T17:12:03.783869Z node 5 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:12:03.783904Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-04T17:12:03.783953Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [5:303:2289] 2025-04-04T17:12:03.784023Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:12:03.784074Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T17:12:03.784149Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 5 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:12:03.784361Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PLANNED 2025-04-04T17:12:03.784402Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNED 2025-04-04T17:12:03.784436Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State PLANNED FrontTxId 67890 2025-04-04T17:12:03.784470Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2025-04-04T17:12:03.784506Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2025-04-04T17:12:03.784560Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2025-04-04T17:12:03.784598Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from PLANNED to CALCULATING 2025-04-04T17:12:03.784660Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2025-04-04T17:12:03.785018Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2025-04-04T17:12:03.785061Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvTxCalcPredicateResult 2025-04-04T17:12:03.785093Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2025-04-04T17:12:03.785124Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-04-04T17:12:03.785160Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2025-04-04T17:12:03.785198Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATING FrontTxId 67890 2025-04-04T17:12:03.785230Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-04T17:12:03.785270Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2025-04-04T17:12:03.785305Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATING to CALCULATED 2025-04-04T17:12:03.785458Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 21474838671 } Partitions { } 2025-04-04T17:12:03.785540Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:12:03.789299Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:12:03.789358Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-04-04T17:12:03.789392Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-04-04T17:12:03.789428Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATED FrontTxId 67890 2025-04-04T17:12:03.789468Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-04-04T17:12:03.789503Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-04-04T17:12:03.789544Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-04-04T17:12:03.789602Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-04-04T17:12:03.789749Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-04T17:12:03.791004Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-04-04T17:12:03.791053Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-04T17:12:03.793408Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:328:2307], now have 1 active actors on pipe 2025-04-04T17:12:03.793566Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2025-04-04T17:12:03.793620Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvReadSet 2025-04-04T17:12:03.793655Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Predicates 1/1 2025-04-04T17:12:03.793686Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state WAIT_RS 2025-04-04T17:12:03.793723Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2025-04-04T17:12:03.793760Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State WAIT_RS FrontTxId 67890 2025-04-04T17:12:03.793793Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-04-04T17:12:03.793842Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2025-04-04T17:12:03.793882Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2025-04-04T17:12:03.793912Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 1 2025-04-04T17:12:03.793993Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-04-04T17:12:03.794046Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 67890 2025-04-04T17:12:03.794211Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:12:03.794290Z node 5 :PERSQUEUE DEBUG: Connected to tablet 72057594037927937 from tablet 22222 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:12:03.796563Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:12:03.796650Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2025-04-04T17:12:03.796691Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-04-04T17:12:03.796726Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-04-04T17:12:03.796760Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-04-04T17:12:03.796793Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-04T17:12:03.796849Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-04-04T17:12:03.796885Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] complete TxId 67890 2025-04-04T17:12:03.796918Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-04-04T17:12:03.796983Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-04-04T17:12:03.797018Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-04-04T17:12:03.797174Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 Predicate: true } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 21474838671 } Partitions { } 2025-04-04T17:12:03.797254Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:12:03.800363Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:12:03.800415Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-04-04T17:12:03.800451Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-04-04T17:12:03.800506Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-04-04T17:12:03.800544Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-04-04T17:12:03.800592Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-04-04T17:12:03.800635Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-04-04T17:12:03.800667Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-04-04T17:12:03.800713Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-04-04T17:12:03.800737Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-04T17:12:03.800768Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> KqpQueryPerf::Delete-QueryService-UseSink [GOOD] >> KqpQueryPerf::Delete-QueryService+UseSink >> KqpQueryPerf::IndexUpdateOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink >> TObjectStorageListingTest::Listing [GOOD] >> TObjectStorageListingTest::ManyDeletes >> TConsoleTests::TestRemoveAttributesExtSubdomain [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> YdbYqlClient::RetryOperationLimitedDuration [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20773, MsgBus: 65466 2025-04-04T17:11:53.740634Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506907472018944:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:53.741608Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c55/r3tmp/tmprpRoNm/pdisk_1.dat 2025-04-04T17:11:54.146657Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:54.167848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:54.167950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:54.170470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20773, node 1 2025-04-04T17:11:54.230084Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:54.230106Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:54.230113Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:54.230260Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65466 TClient is connected to server localhost:65466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:54.741734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:54.770514Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:11:54.777557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:54.909093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:55.084830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:55.165920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:56.818661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506920356922469:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:56.818810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:57.077264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:11:57.113343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:11:57.186913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:11:57.262361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:11:57.305434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:11:57.346104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:11:57.396336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506924651890282:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:57.396409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:57.396631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506924651890287:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:57.399790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:11:57.412670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506924651890289:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:11:57.515621Z node 1 :TX_PROXY ERROR: Actor# [1:7489506924651890343:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:58.738700Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506907472018944:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:58.738785Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 4378, MsgBus: 1131 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c55/r3tmp/tmpWQYRjs/pdisk_1.dat 2025-04-04T17:11:59.654358Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:11:59.670862Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:59.682019Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:59.682087Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 4378, node 2 2025-04-04T17:11:59.682903Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:59.763252Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:59.763272Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:59.763280Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:59.763383Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1131 TClient is connected to server localhost:1131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:00.223389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:00.231933Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:12:00.244803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:00.342235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:00.529226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:00.615480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:02.590367Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506946076270943:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:02.590459Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:02.629839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:02.669460Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:02.717324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:02.753803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:02.785395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:02.855764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:02.926148Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506946076271461:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:02.926273Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:02.926568Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506946076271466:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:02.930031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:02.944473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489506946076271468:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:12:03.018103Z node 2 :TX_PROXY ERROR: Actor# [2:7489506950371238818:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::IndexInsert+QueryService-UseSink >> KqpJoinOrder::TPCDS88-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS88+StreamLookupJoin-ColumnStore |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TLocksFatTest::RangeSetRemove |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestRemoveAttributesExtSubdomain [GOOD] Test command err: 2025-04-04T17:10:16.317136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:16.317218Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:16.390701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:19.637048Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:19.637115Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:19.680323Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:22.717322Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:22.717383Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:22.786422Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:23.792815Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-04-04T17:10:23.905981Z node 24 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:10:23.906488Z node 24 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/00185f/r3tmp/tmpWIdTiV/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:10:23.907123Z node 24 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/00185f/r3tmp/tmpWIdTiV/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/00185f/r3tmp/tmpWIdTiV/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15280233616790549302 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:10:23.954792Z node 26 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:10:23.955299Z node 26 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/00185f/r3tmp/tmpWIdTiV/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:10:23.955555Z node 26 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/00185f/r3tmp/tmpWIdTiV/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/00185f/r3tmp/tmpWIdTiV/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2881236662697161974 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:10:23.986282Z node 25 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:10:23.986736Z node 25 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/00185f/r3tmp/tmpWIdTiV/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:10:23.986967Z node 25 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/00185f/r3tmp/tmpWIdTiV/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/00185f/r3tmp/tmpWIdTiV/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2048406166178066070 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:10:24.042726Z node 23 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:10:24.043503Z node 23 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/00185f/r3tmp/tmpWIdTiV/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:10:24.043731Z node 23 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/00185f/r3tmp/tmpWIdTiV/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/00185f/r3tmp/tmpWIdTiV/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5529429348356333199 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:10:24.075614Z node 22 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:10:24.076138Z node 22 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/00185f/r3tmp/tmpWIdTiV/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:10:24.076354Z node 22 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/00185f/r3tmp/tmpWIdTiV/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/00185f/r3tmp/tmpWIdTiV/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14077568579667575148 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 ... n is done id#281474976715661:0 progress is 1/1 2025-04-04T17:12:03.687063Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715661 ready parts: 1/1 2025-04-04T17:12:03.687160Z node 163 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715661:0 progress is 1/1 2025-04-04T17:12:03.687230Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715661 ready parts: 1/1 2025-04-04T17:12:03.687324Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 3] was 11 2025-04-04T17:12:03.687397Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715661, ready parts: 1/1, is published: false 2025-04-04T17:12:03.687580Z node 163 :FLAT_TX_SCHEMESHARD INFO: Send TEvUpdateTenantSchemeShard, to actor: [163:1377:2618], msg: TabletId: 72057594046578944 Generation: 2 UserAttributes { Key: "name1" Value: "value1" } UserAttributesVersion: 3, at schemeshard: 72057594046578944 2025-04-04T17:12:03.687653Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715661 ready parts: 1/1 2025-04-04T17:12:03.687735Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715661:0 2025-04-04T17:12:03.687809Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715661:0 2025-04-04T17:12:03.687916Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 3] was 12 2025-04-04T17:12:03.687993Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715661, publications: 1, subscribers: 1 2025-04-04T17:12:03.688070Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715661, [OwnerId: 72057594046578944, LocalPathId: 3], 7 2025-04-04T17:12:03.704007Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186233409546, msg: TabletId: 72057594046578944 Generation: 2 UserAttributes { Key: "name1" Value: "value1" } UserAttributesVersion: 3 2025-04-04T17:12:03.704138Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046578944 Generation: 2 UserAttributes { Key: "name1" Value: "value1" } UserAttributesVersion: 3, at schemeshard: 72075186233409546 2025-04-04T17:12:03.704445Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 2025-04-04T17:12:03.704825Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-04-04T17:12:03.704871Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 281474976715661, path id: [OwnerId: 72057594046578944, LocalPathId: 3] 2025-04-04T17:12:03.705053Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-04-04T17:12:03.705093Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [163:679:2236], at schemeshard: 72057594046578944, txId: 281474976715661, path id: 3 2025-04-04T17:12:03.705950Z node 163 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-04T17:12:03.706053Z node 163 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:21] persistent tx 281474976715661 for mediator 72057594046382081 tablet 72057594046578944 removed=1 2025-04-04T17:12:03.706089Z node 163 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:21] persistent tx 281474976715661 for mediator 72057594046382081 acknowledged 2025-04-04T17:12:03.706121Z node 163 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:21] persistent tx 281474976715661 acknowledged 2025-04-04T17:12:03.707191Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046578944, cookie: 281474976715661 2025-04-04T17:12:03.707287Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046578944, cookie: 281474976715661 2025-04-04T17:12:03.707355Z node 163 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 281474976715661 2025-04-04T17:12:03.707461Z node 163 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 281474976715661, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], version: 7 2025-04-04T17:12:03.707548Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 3] was 11 2025-04-04T17:12:03.707724Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 281474976715661, subscribers: 1 2025-04-04T17:12:03.707803Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [163:1931:2388] 2025-04-04T17:12:03.711668Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046578944, msg: DomainSchemeShard: 72057594046578944 DomainPathId: 3 TabletID: 72075186233409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 3 TenantHive: 18446744073709551615 TenantSysViewProcessor: 72075186233409553 TenantRootACL: "" TenantStatisticsAggregator: 72075186233409554 TenantGraphShard: 18446744073709551615 2025-04-04T17:12:03.711751Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], at schemeshard: 72057594046578944 2025-04-04T17:12:03.711835Z node 163 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046578944, LocalPathId: 3], Generation: 2, ActorId:[163:1377:2618], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 3, TenantHive: 18446744073709551615, TenantSysViewProcessor: 72075186233409553, TenantStatisticsAggregator: 72075186233409554, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 3, tenantHive: 18446744073709551615, tenantSysViewProcessor: 72075186233409553, at schemeshard: 72057594046578944 2025-04-04T17:12:03.711945Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-04T17:12:03.711980Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-04T17:12:03.712136Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-04T17:12:03.712173Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [163:1581:2766], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-04-04T17:12:03.714305Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72075186233409546, cookie: 0 2025-04-04T17:12:03.715412Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046578944, cookie: 281474976715661 2025-04-04T17:12:03.715484Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], at schemeshard: 72057594046578944 Reply: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "tenant-1" PathId: 3 SchemeshardId: 72057594046578944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 10 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409546 SysViewProcessor: 72075186233409553 StatisticsAggregator: 72075186233409554 } DomainKey { SchemeShard: 72057594046578944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046578944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { } SecurityState { } } UserAttributes { Key: "name1" Value: "value1" } } PathId: 3 PathOwnerId: 72057594046578944 Reply: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "tenant-1" PathId: 3 SchemeshardId: 72057594046578944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 10 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409546 SysViewProcessor: 72075186233409553 StatisticsAggregator: 72075186233409554 } DomainKey { SchemeShard: 72057594046578944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046578944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { } SecurityState { } } UserAttributes { Key: "name1" Value: "value1" } } PathId: 3 PathOwnerId: 72057594046578944 >> BackupRestore::ImportDataShouldHandleErrors [GOOD] >> BackupRestore::RestoreViewDependentOnAnotherView ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS87+StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 18249, MsgBus: 63852 2025-04-04T17:09:24.991453Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506265522705231:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:24.991500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001557/r3tmp/tmp1TEnlb/pdisk_1.dat 2025-04-04T17:09:25.408212Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:25.435655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:25.435797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:25.463789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18249, node 1 2025-04-04T17:09:25.639494Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:25.639517Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:25.639540Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:25.639663Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63852 TClient is connected to server localhost:63852 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:26.431911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.459614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.624334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:26.842327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:09:26.926276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.294856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506282702576187:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.294952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.711262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.737264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.768210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.840232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.886389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.955986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.013492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506286997544002:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.013550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.013596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506286997544007:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.017629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:29.029335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506286997544009:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:29.093274Z node 1 :TX_PROXY ERROR: Actor# [1:7489506286997544062:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:29.995261Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506265522705231:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:29.995330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:30.218919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.270694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.348722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.391749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.418925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.531736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.574159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.643988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.672803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.703143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.727637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.753753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.780390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.282148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:09:31.313684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.341921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.367931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.396209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... 17:11:45.840116Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-04T17:11:45.904290Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-04T17:11:45.964110Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-04T17:11:46.016369Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-04T17:11:46.863251Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2025-04-04T17:11:46.931676Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-04T17:11:47.007112Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-04T17:11:47.065744Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-04T17:11:47.133801Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-04-04T17:11:47.187492Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-04T17:11:47.239997Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-04-04T17:11:47.295458Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2025-04-04T17:11:47.371980Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-04-04T17:11:47.435295Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-04T17:11:47.524588Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-04-04T17:11:47.623085Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2025-04-04T17:11:47.687199Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2025-04-04T17:11:47.748825Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2025-04-04T17:11:47.809072Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-04-04T17:11:47.889237Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2025-04-04T17:11:47.972250Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2025-04-04T17:11:48.030782Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2025-04-04T17:11:48.105808Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2025-04-04T17:11:48.184884Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2025-04-04T17:11:48.251087Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2025-04-04T17:11:48.338453Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2025-04-04T17:11:48.427787Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2025-04-04T17:11:48.489723Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2025-04-04T17:11:48.808007Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2025-04-04T17:11:48.903972Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2025-04-04T17:11:48.959881Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2025-04-04T17:11:49.037258Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2025-04-04T17:11:49.121913Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2025-04-04T17:11:49.184328Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2025-04-04T17:11:49.238822Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2025-04-04T17:11:49.285010Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2025-04-04T17:11:49.321414Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2025-04-04T17:11:49.442077Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2025-04-04T17:11:49.505269Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2025-04-04T17:11:49.571738Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2025-04-04T17:11:49.651305Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 2025-04-04T17:11:49.764827Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715721:0, at schemeshard: 72057594046644480 2025-04-04T17:11:49.874650Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715722:0, at schemeshard: 72057594046644480 2025-04-04T17:11:51.584789Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:11:51.584819Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:02.945254Z node 5 :KQP_SLOW_LOG WARN: TraceId: "01jr0wk52s9z8qmyny46djyfev", SessionId: ydb://session/3?node_id=5&id=YzdlMGFhMzAtNzYxZWE2MWUtMTBkOGU3NjUtZDM4ZjJiNmU=, Slow query, duration: 12.839367s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "PRAGMA TablePathPrefix='/Root/test/ds';\n\n-- NB: Subquerys\n$bla1 = (select distinct\n COALESCE(c_last_name,'') as c_last_name,\n COALESCE(c_first_name,'') as c_first_name,\n COALESCE(cast(d_date as date), cast(0 as Date)) as d_date\n from store_sales as store_sales\n cross join date_dim as date_dim\n cross join customer as customer\n where store_sales.ss_sold_date_sk = date_dim.d_date_sk\n and store_sales.ss_customer_sk = customer.c_customer_sk\n and d_month_seq between 1221 and 1221+11);\n\n$bla2 = ((select distinct\n COALESCE(c_last_name,'') as c_last_name,\n COALESCE(c_first_name,'') as c_first_name,\n COALESCE(cast(d_date as date), cast(0 as Date)) as d_date\n from catalog_sales as catalog_sales\n cross join date_dim as date_dim\n cross join customer as customer\n where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk\n and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk\n and d_month_seq between 1221 and 1221+11)\n union all\n (select distinct\n COALESCE(c_last_name,'') as c_last_name,\n COALESCE(c_first_name,'') as c_first_name,\n COALESCE(cast(d_date as date), cast(0 as Date)) as d_date\n from web_sales as web_sales\n cross join date_dim as date_dim\n cross join customer as customer\n where web_sales.ws_sold_date_sk = date_dim.d_date_sk\n and web_sales.ws_bill_customer_sk = customer.c_customer_sk\n and d_month_seq between 1221 and 1221+11));\n\n-- start query 1 in stream 0 using template query87.tpl and seed 1819994127\nselect count(*)\nfrom $bla1 bla1 left only join $bla2 bla2 using (c_last_name, c_first_name, d_date)\n;\n\n-- end query 1 in stream 0 using template query87.tpl", parameters: 0b >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] >> KqpQueryPerf::IndexInsert-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexInsert-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::RetryOperationLimitedDuration [GOOD] Test command err: 2025-04-04T17:11:20.136987Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506765535276307:2249];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:20.137065Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00131d/r3tmp/tmpwULIci/pdisk_1.dat 2025-04-04T17:11:20.969719Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:20.978213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:20.978309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:20.989550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13201, node 1 2025-04-04T17:11:21.230554Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:21.230576Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:21.230580Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:21.230708Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24750 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:21.509564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:21.530947Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:11:23.734336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506778420179090:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:23.748934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:23.966698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:24.164311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:11:24.407530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506782715146606:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:24.407609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:24.407977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506782715146611:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:24.412104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-04T17:11:24.450646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506782715146613:2361], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-04T17:11:24.539768Z node 1 :TX_PROXY ERROR: Actor# [1:7489506782715146690:2846] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:24.553180Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7489506782715146711:2856], for# test_user@builtin, access# DescribeSchema 2025-04-04T17:11:24.553206Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7489506782715146711:2856], for# test_user@builtin, access# DescribeSchema 2025-04-04T17:11:24.565197Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489506782715146708:2365], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:21: Error: At function: KiReadTable!
:2:21: Error: Cannot find table 'db.[Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:11:24.565958Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjgwYjRmNWEtZmUxZjFhNWMtYTE0Y2NkOTktNjMzNzdiNDU=, ActorId: [1:7489506782715146602:2355], ActorState: ExecuteState, TraceId: 01jr0wjbzpa0kq20ygzsdaya7q, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:11:26.220141Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506788796674737:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:26.220183Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00131d/r3tmp/tmpED2maf/pdisk_1.dat 2025-04-04T17:11:26.502869Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28907, node 4 2025-04-04T17:11:26.630576Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:26.630748Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:26.677221Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:26.711493Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:26.711518Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:26.711526Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:26.711680Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15812 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:27.031559Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:27.068956Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:11:29.355372Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506801681577684:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:29.355453Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:29.383342Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:29.512089Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506801681577853:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:29.512189Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:29.512425Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506801681577858:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:29.516860Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:11:29.547929Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489506801681577860:2353], DatabaseId: /Root, PoolId: default ... 06801681577929:2795] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:29.765018Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0wjgz7f7w2aex45ya30d5f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZWEzNmE2OTYtMjIyMWMyZjItM2NkYWNmZTAtMmZhNjE2Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:29.968775Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr0wjh853yrweztb5yefvn4p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=N2ZlNDY2MDYtOTMwNzNjMzItNmM2ZTZmZDQtYjJkNTNmODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:31.531483Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489506809835436188:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:31.531521Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00131d/r3tmp/tmppcdu1r/pdisk_1.dat 2025-04-04T17:11:31.687357Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:31.725790Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:31.725869Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:31.731250Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27045, node 7 2025-04-04T17:11:31.866818Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:31.866839Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:31.866846Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:31.866973Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1147 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:32.166902Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 2025-04-04T17:11:36.534379Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489506809835436188:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:36.534455Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 2025-04-04T17:11:36.970388Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489506831310273777:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:36.970521Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:36.970654Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489506831310273789:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:36.978314Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:11:37.014614Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489506831310273791:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:11:37.116337Z node 7 :TX_PROXY ERROR: Actor# [7:7489506835605241160:2687] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 2025-04-04T17:11:42.386479Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489506857026025453:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:42.386536Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00131d/r3tmp/tmpCyxpJq/pdisk_1.dat 2025-04-04T17:11:42.695382Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:42.732933Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:42.733035Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:42.740002Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17948, node 10 2025-04-04T17:11:42.947024Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:42.947048Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:42.947056Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:42.947208Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30448 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:43.310957Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 2025-04-04T17:11:47.390397Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489506857026025453:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:47.390498Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 2 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 3 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 2025-04-04T17:11:57.674435Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:11:57.674464Z node 10 :IMPORT WARN: Table profiles were not loaded Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 2 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 3 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 >> KqpQueryPerf::AggregateToScalar+QueryService >> KqpJoinOrder::TPCH11-StreamLookupJoin+ColumnStore [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TObjectStorageListingTest::TestFilter [GOOD] >> TObjectStorageListingTest::TestSkipShards |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 5027, MsgBus: 12134 2025-04-04T17:11:53.698604Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506903821515301:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:53.698730Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c5f/r3tmp/tmp2d1vkW/pdisk_1.dat 2025-04-04T17:11:54.100730Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:54.107325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:54.107449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:54.109056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5027, node 1 2025-04-04T17:11:54.218409Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:54.218432Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:54.218444Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:54.218611Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12134 TClient is connected to server localhost:12134 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:54.744370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:54.773514Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:11:54.792581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:54.973850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:55.180325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:55.268247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:57.108485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506921001386037:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:57.108606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:57.404066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:11:57.440723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:11:57.514128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:11:57.550980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:11:57.596380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:11:57.638134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:11:57.755881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506921001386557:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:57.756948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:57.757415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506921001386562:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:57.761884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:11:57.778174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506921001386565:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:11:57.837076Z node 1 :TX_PROXY ERROR: Actor# [1:7489506921001386619:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:58.698663Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506903821515301:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:58.704123Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8586, MsgBus: 24044 2025-04-04T17:12:00.460009Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506934024172169:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:00.460056Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c5f/r3tmp/tmpiov0c1/pdisk_1.dat 2025-04-04T17:12:00.705481Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:00.708637Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:00.708718Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:00.711112Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8586, node 2 2025-04-04T17:12:00.858781Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:00.858803Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:00.858811Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:00.858911Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24044 TClient is connected to server localhost:24044 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:01.375923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:01.383944Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:12:01.402374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:01.509127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:01.685476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:01.757461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:04.057898Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506951204043113:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:04.058011Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:04.082947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:04.117085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:04.193595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:04.236809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:04.281760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:04.327188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:04.420491Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506951204043631:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:04.420583Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:04.422078Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506951204043636:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:04.425794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:04.438813Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489506951204043638:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:12:04.526660Z node 2 :TX_PROXY ERROR: Actor# [2:7489506951204043693:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:05.462054Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489506934024172169:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:05.462103Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TTabletPipeTest::TestOpen >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall >> KqpQueryPerf::RangeRead-QueryService >> TTabletPipeTest::TestOpen [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TCancelTx::CrossShardReadOnly |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestOpen [GOOD] >> Initializer::Simple >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeView [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] >> KqpQueryPerf::Upsert-QueryService-UseSink |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpQueryPerf::Insert+QueryService-UseSink [GOOD] >> KqpQueryPerf::Insert+QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH11-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 25693, MsgBus: 23059 2025-04-04T17:09:24.991646Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506266104142474:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:24.992124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001556/r3tmp/tmpfgvAU7/pdisk_1.dat 2025-04-04T17:09:25.527694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:25.527901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:25.531420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:25.559771Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25693, node 1 2025-04-04T17:09:25.668961Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:25.668983Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:25.668993Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:25.669096Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23059 TClient is connected to server localhost:23059 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:26.400715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.419080Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.440244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:09:26.628365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.782296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:26.860774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.552475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506283284013292:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.552590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.805140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.837333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.869373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.893728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.918085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.943251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.027573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506287578981101:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.027676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.027817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506287578981106:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.030927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:29.041012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506287578981108:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:29.126490Z node 1 :TX_PROXY ERROR: Actor# [1:7489506287578981166:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:29.990912Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506266104142474:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:29.993097Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:30.088861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.124279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.159234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.242403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.270079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.381937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.408074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.433778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.460058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.489508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.517341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.550187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.580373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.058635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:09:31.085283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.111209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.140326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 202 ... tablet_id=72075186224038730;self_id=[5:7489506911808950479:7656];tablet_id=72075186224038730;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:11:54.879893Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:11:54.879921Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;self_id=[5:7489506911808950479:7656];tablet_id=72075186224038730;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:11:54.879942Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:11:54.880068Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;self_id=[5:7489506911808950479:7656];tablet_id=72075186224038730;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:11:54.880251Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;self_id=[5:7489506911808950479:7656];tablet_id=72075186224038730;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:11:54.880344Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:11:54.880402Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:11:54.880458Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;self_id=[5:7489506911808950479:7656];tablet_id=72075186224038730;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:11:54.880633Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:11:54.880646Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;self_id=[5:7489506911808950479:7656];tablet_id=72075186224038730;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:11:54.880685Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:11:54.880803Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;self_id=[5:7489506911808950479:7656];tablet_id=72075186224038730;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:11:54.880815Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:11:54.880850Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:11:54.912156Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:11:54.912224Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:11:54.912340Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:11:54.912387Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:11:54.912577Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:11:54.912615Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:11:54.912723Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:11:54.912762Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:11:54.912836Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:11:54.912888Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:11:54.912942Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:11:54.912976Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:11:54.913759Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:11:54.913813Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:11:54.914094Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:11:54.914134Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:11:54.914700Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:11:54.914761Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:11:54.915025Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:11:54.915081Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:11:54.915242Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:11:54.915284Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:11:54.948965Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038724;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:54.954016Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038723;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:54.957968Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038734;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:54.962768Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038731;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:54.967681Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038721;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:54.976806Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038735;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:54.976806Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038725;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:54.987180Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038727;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:54.989625Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038729;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:55.007345Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038720;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:55.009935Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038733;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:55.015977Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038726;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:55.020042Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038722;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:55.025978Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038728;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:55.028449Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038730;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:11:55.035540Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038732;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; >> KqpQueryPerf::Delete-QueryService+UseSink [GOOD] >> TLocksTest::Range_BrokenLock0 [GOOD] >> TLocksTest::Range_BrokenLock1 |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpQueryPerf::UpdateOn-QueryService-UseSink [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeReplication [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalTable ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] Test command err: 2025-04-04T17:11:44.448720Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506867587321999:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:44.448807Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmp3c4i7S/pdisk_1.dat 2025-04-04T17:11:45.169415Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:45.218914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:45.219034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:45.221122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8057, node 1 2025-04-04T17:11:45.540666Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:45.540692Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:45.540700Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:45.541299Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12644 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:45.814031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:48.426095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506884767192250:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:48.426231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:48.442909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:48.607817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506884767192430:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:48.607891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:48.608110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506884767192435:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:48.611828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:11:48.638194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506884767192437:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:11:48.721568Z node 1 :TX_PROXY ERROR: Actor# [1:7489506884767192508:2795] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:48.858089Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0wk3kzd786d6vv6pfmj5qr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzgxN2MwYjEtMWJmYTM0OWEtN2UzYWI1NjUtYjdhNDVhYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:49.104635Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0wk3wre1a80j2420p6d6ak, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzgxN2MwYjEtMWJmYTM0OWEtN2UzYWI1NjUtYjdhNDVhYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmp9RUm5w/"Create temporary directory "/Root/~backup_20250404T171149" in databaseProcess "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmp9RUm5w/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250404T171149/table" }Backup table "/Root/~backup_20250404T171149/table" to "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmp9RUm5w/table"Describe table "/Root/~backup_20250404T171149/table"Write scheme into "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmp9RUm5w/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmp9RUm5w/table/permissions.pb"Read table "/Root/~backup_20250404T171149/table"2025-04-04T17:11:49.428390Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506867587321999:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:49.428526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Write data into "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmp9RUm5w/table/data_00.csv"Drop table "/Root/~backup_20250404T171149/table"2025-04-04T17:11:49.595949Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found Remove temporary directory "/Root/~backup_20250404T171149" in database2025-04-04T17:11:49.634299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715668:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmp9RUm5w/" to "/Root"2025-04-04T17:11:49.760051Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmp9RUm5w/" to "/Root"Process "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmp9RUm5w/table"Read scheme from "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmp9RUm5w/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmp9RUm5w/table" to "/Root/table"2025-04-04T17:11:49.806744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmp9RUm5w/table/data_00.csv"2025-04-04T17:11:50.030892Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jr0wk4xzfke8kgds92yy7mhe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTI1MjAzMzMtMzM5NmMxNzUtODhmMmZlYWMtODI3ZTVhMTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmp9RUm5w/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmp9RUm5w/table/permissions.pb"2025-04-04T17:11:50.079006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715672:0, at schemeshard: 72057594046644480 Restore completed successfully2025-04-04T17:11:50.246531Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jr0wk533dm3c6rrzmrj7ncz4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzgxN2MwYjEtMWJmYTM0OWEtN2UzYWI1NjUtYjdhNDVhYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:11:51.854734Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506898535337157:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:51.854888Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpeDuRXn/pdisk_1.dat 2025-04-04T17:11:52.017802Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:52.045081Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:52.045157Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:52.051624Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26415, node 4 2025-04-04T17:11:52.287795Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:52.287815Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:52.287822Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:52.287930Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersio ... : , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:01.322134Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0wkfq36kp7p44jqe2fd8xc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MzU1YzBhMmEtNzRmOTI5Yy04ZDE2Zjc3Ny02NTA1OWVlZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:01.471965Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0wkg251pvdg5ehn07yc1k8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MzU1YzBhMmEtNzRmOTI5Yy04ZDE2Zjc3Ny02NTA1OWVlZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpWDHJqw/"Create temporary directory "/Root/~backup_20250404T171201" in databaseProcess "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpWDHJqw/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250404T171201/table" }2025-04-04T17:12:01.567853Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715665:1, at schemeshard: 72057594046644480 Backup table "/Root/~backup_20250404T171201/table" to "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpWDHJqw/table"Describe table "/Root/~backup_20250404T171201/table"Write scheme into "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpWDHJqw/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpWDHJqw/table/permissions.pb"Read table "/Root/~backup_20250404T171201/table"Write data into "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpWDHJqw/table/data_00.csv"Drop table "/Root/~backup_20250404T171201/table"2025-04-04T17:12:01.959426Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976715668:1, at schemeshard: 72057594046644480 Remove temporary directory "/Root/~backup_20250404T171201" in database2025-04-04T17:12:02.009877Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2025-04-04T17:12:02.022673Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715669:0, at schemeshard: 72057594046644480 Backup completed successfully2025-04-04T17:12:02.064558Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976715670:1, at schemeshard: 72057594046644480 Restore "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpWDHJqw/" to "/Root"2025-04-04T17:12:02.122542Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpWDHJqw/" to "/Root"Process "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpWDHJqw/table"Read scheme from "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpWDHJqw/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpWDHJqw/table" to "/Root/table"2025-04-04T17:12:02.161505Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpWDHJqw/table/data_00.csv"2025-04-04T17:12:02.312584Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr0wkgy0e00yfjc4ayvag7s2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ODhjYmIyNTQtYmE2Mzk5YjktYjY1NzVmMjYtZGIxYmIyODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpWDHJqw/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpWDHJqw/table/permissions.pb"2025-04-04T17:12:02.364637Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 Restore completed successfully2025-04-04T17:12:02.515389Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jr0wkh330m0tg8jc41t7xh1m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MzU1YzBhMmEtNzRmOTI5Yy04ZDE2Zjc3Ny02NTA1OWVlZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:04.045596Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489506954998929727:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:04.045666Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpYpwzyT/pdisk_1.dat 2025-04-04T17:12:04.231493Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:04.260274Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:04.260358Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:04.263704Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7541, node 10 2025-04-04T17:12:04.410717Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:04.410736Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:04.410743Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:04.410865Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18567 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:04.659326Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:07.558184Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489506967883832478:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:07.558307Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:07.562549Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489506967883832490:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:07.566865Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:12:07.591945Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489506967883832492:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:12:07.690892Z node 10 :TX_PROXY ERROR: Actor# [10:7489506967883832563:2681] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:07.952907Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0wkpb56z8x7p1dbjaa6v5s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YzE0MTA1NzgtZGI0ODY5NzYtYjg2NGQ4NjItZjcwZDI5ZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpAa5oB7/"Create temporary directory "/Root/~backup_20250404T171207" in databaseProcess "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpAa5oB7/view"Backup view "/Root/view" to "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpAa5oB7/view"Write view into "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpAa5oB7/view/create_view.sql"Write ACL into "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpAa5oB7/view/permissions.pb"Remove temporary directory "/Root/~backup_20250404T171207" in database2025-04-04T17:12:08.259406Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715663:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpAa5oB7/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpAa5oB7/" to "/Root"Process "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpAa5oB7/view"Restore view "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpAa5oB7/view" to "/Root/view"Read view from "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpAa5oB7/view/create_view.sql"Created "/Root/view"Restore ACL "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpAa5oB7/view" to "/Root/view"Read ACL from "/home/runner/.ya/build/build_root/i5tq/00177e/r3tmp/tmpAa5oB7/view/permissions.pb"2025-04-04T17:12:08.511477Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715666:0, at schemeshard: 72057594046644480 Restore completed successfully2025-04-04T17:12:08.670928Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr0wkq2q5nbtxysatgkt4c3f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YzE0MTA1NzgtZGI0ODY5NzYtYjg2NGQ4NjItZjcwZDI5ZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> YdbTableBulkUpsert::Limits [GOOD] >> YdbTableBulkUpsert::Overload ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] Test command err: 2025-04-04T17:11:22.295309Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506771599123389:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:22.297641Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001315/r3tmp/tmpf4gLMB/pdisk_1.dat 2025-04-04T17:11:22.857902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:22.858034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:22.862036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:22.888322Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30173, node 1 2025-04-04T17:11:22.966368Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:11:22.966519Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:11:23.066756Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:23.066774Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:23.066779Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:23.066886Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29468 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:23.416771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:29468 2025-04-04T17:11:25.784055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:26.029273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: Root/Foo/TimestampIndex/indexImplTable, pathId: , opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:11:26.029402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusNameConflict, reason: Check failed: path: '/Root/Foo/TimestampIndex/indexImplTable', error: path is not a common path (id: [OwnerId: 72057594046644480, LocalPathId: 4], type: EPathTypeTable, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-04-04T17:11:26.031658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusNameConflict, reason: Check failed: path: '/Root/Foo/TimestampIndex/indexImplTable', error: path is not a common path (id: [OwnerId: 72057594046644480, LocalPathId: 4], type: EPathTypeTable, state: EPathStateNoChanges), operation: ALTER TABLE, path: Root/Foo/TimestampIndex/indexImplTable 2025-04-04T17:11:26.031906Z node 1 :TX_PROXY ERROR: Actor# [1:7489506788778994052:2950] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/Foo/TimestampIndex/indexImplTable\', error: path is not a common path (id: [OwnerId: 72057594046644480, LocalPathId: 4], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } Error 128: Administrative access denied TClient::Ls request: /Root/Foo/TimestampIndex/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786685977 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "Timestamp" Type: "Int64" TypeId: 3 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false ... (TRUNCATED) 2025-04-04T17:11:26.054956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTableIndex Propose, path: /Root/Foo/TimestampIndex, operationId: 281474976710660:0, transaction: WorkingDir: "/Root/Foo" OperationType: ESchemeOpAlterTableIndex AlterTableIndex { Name: "TimestampIndex" State: EIndexStateReady } Internal: false, at schemeshard: 72057594046644480 2025-04-04T17:11:26.055105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:11:26.055122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Foo/TimestampIndex/indexImplTable, pathId: , opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-04T17:11:26.055497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:11:26.055511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-04T17:11:26.059213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: root@builtin, status: StatusAccepted, operation: ALTER TABLE, path: /Root/Foo/TimestampIndex/indexImplTable waiting... 2025-04-04T17:11:26.069815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743786686117, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:11:26.080746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2025-04-04T17:11:26.080791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:1 TClient::Ls request: /Root/Foo/TimestampIndex/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786685977 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "Timestamp" Type: "Int64" TypeId: 3 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false ... (TRUNCATED) 2025-04-04T17:11:26.087300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTableIndex Propose, path: /Root/Foo/TimestampIndex, operationId: 281474976710661:0, transaction: WorkingDir: "/Root/Foo" OperationType: ESchemeOpAlterTableIndex AlterTableIndex { Name: "TimestampIndex" State: EIndexStateReady } Internal: false, at schemeshard: 72057594046644480 2025-04-04T17:11:26.087399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:11:26.087414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Foo/TimestampIndex/indexImplTable, pathId: , opId: 281474976710661:1, at schemeshard: 72057594046644480 2025-04-04T17:11:26.087718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:11:26.087742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:1, at schemeshard: 72057594046644480 2025-04-04T17:11:26.089416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710661, database: /Root, subject: root@builtin, status: StatusAccepted, operation: ALTER TABLE, path: /Root/Foo/TimestampIndex/indexImplTable waiting... 2025-04-04T17:11:26.100838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743786686145, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:11:26.113286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710661:0 2025-04-04T17:11:26.113330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710661:1 TClient::Ls request: /Root/Foo/TimestampIndex/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786685977 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "Timestamp" Type: "Int64" TypeId: 3 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false ... (TRUNCATED) 2025-04-04T17:11:26.118969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTableIndex Propose, path: /Root/Foo/TimestampIndex, operationId: 281474976710662:0, transaction: WorkingDir: "/Root/Foo" OperationType: ESchemeOpAlterTableIndex AlterTableIndex { Name: "TimestampIndex" State: EIndexStateReady } Internal: ... be) 2025-04-04T17:11:33.503269Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:33.503275Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:33.503447Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:33.840904Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:33.914040Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:34.118967Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T17:11:34.316414Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T17:11:38.585334Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489506840547209680:2148];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:38.623031Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001315/r3tmp/tmpjQU7Zz/pdisk_1.dat 2025-04-04T17:11:38.850802Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27553, node 10 2025-04-04T17:11:38.975293Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:38.975433Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:38.987889Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:39.062889Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:39.062914Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:39.062921Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:39.063086Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:39.466815Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:39.489076Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:11:44.259488Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489506869008798549:2217];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:44.315117Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001315/r3tmp/tmp11UKtW/pdisk_1.dat 2025-04-04T17:11:44.529595Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:44.561513Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:44.561745Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:44.570922Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3792, node 13 2025-04-04T17:11:44.731195Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:44.731224Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:44.731237Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:44.731389Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27004 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:45.048160Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:45.172336Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:50254" , at schemeshard: 72057594046644480 2025-04-04T17:11:45.172930Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:45.172968Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusPreconditionFailed, reason: Column stores are not supported, at schemeshard: 72057594046644480 2025-04-04T17:11:45.187291Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusPreconditionFailed Reason: "Column stores are not supported" TxId: 281474976715658 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:11:45.187485Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusPreconditionFailed, reason: Column stores are not supported, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-04-04T17:11:45.187780Z node 13 :TX_PROXY ERROR: Actor# [13:7489506873303766645:2604] txid# 281474976715658, issues: { message: "Column stores are not supported" severity: 1 } assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1BD59E20) NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&)+8617 (0x1B3E5E59) std::__y1::__function::__func, void ()>::operator()()+280 (0x1B40C758) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1BD8FB26) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1BD60849) NTestSuiteYdbLogStore::TCurrentTest::Execute()+1094 (0x1B40B966) NUnitTest::TTestFactory::Execute()+2438 (0x1BD62116) NUnitTest::RunMain(int, char**)+5213 (0x1BD8A45D) ??+0 (0x7FD787B5BD90) __libc_start_main+128 (0x7FD787B5BE40) _start+41 (0x18851029) >> TObjectStorageListingTest::TestSkipShards [GOOD] >> TFlatTest::SelectRangeItemsLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 15005, MsgBus: 62274 2025-04-04T17:11:59.076343Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506931491320447:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:59.083585Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c44/r3tmp/tmpZymqBd/pdisk_1.dat 2025-04-04T17:11:59.504257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:59.504351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:59.517316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:59.553780Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15005, node 1 2025-04-04T17:11:59.709911Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:59.709936Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:59.709949Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:59.710038Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62274 TClient is connected to server localhost:62274 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:00.291865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:00.325553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:00.463863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:00.655111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:00.740490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:02.563906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506944376224110:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:02.564057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:02.842994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:02.905511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:02.947735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:02.981118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:03.047746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:03.111363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:03.182714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506948671191923:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:03.182786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:03.183067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506948671191928:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:03.186833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:03.198649Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T17:12:03.198835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506948671191930:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:12:03.252640Z node 1 :TX_PROXY ERROR: Actor# [1:7489506948671191983:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:04.082335Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506931491320447:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:04.082397Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 63900, MsgBus: 2093 2025-04-04T17:12:05.216554Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506955209158498:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:05.216615Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c44/r3tmp/tmpM3shLv/pdisk_1.dat 2025-04-04T17:12:05.355219Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:05.400488Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:05.400552Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 63900, node 2 2025-04-04T17:12:05.403042Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:05.554890Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:05.554908Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:05.554914Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:05.555010Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2093 TClient is connected to server localhost:2093 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:05.999429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:06.011741Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:12:06.024215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:06.085043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:06.240847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:06.311601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:08.582412Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506968094062145:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:08.582525Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:08.618519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.654685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.689979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.765079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.808358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.884431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.977138Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506968094062660:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:08.977242Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:08.977510Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506968094062665:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:08.981202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:08.989492Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489506968094062667:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:12:09.064396Z node 2 :TX_PROXY ERROR: Actor# [2:7489506972389030018:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:10.218357Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489506955209158498:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:10.218436Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Upsert+QueryService-UseSink |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 65519, MsgBus: 62008 2025-04-04T17:12:04.888414Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506952912202291:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:04.888462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c3a/r3tmp/tmpoTLnD1/pdisk_1.dat 2025-04-04T17:12:05.468981Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:05.487351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:05.487446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:05.490068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65519, node 1 2025-04-04T17:12:05.685556Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:05.685578Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:05.685590Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:05.686280Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62008 TClient is connected to server localhost:62008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:06.347213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:06.359493Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:06.363607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:12:06.500440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:06.697805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:06.776942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:08.619215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506970092073044:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:08.619317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:08.916623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.968561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.999216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:09.029169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:09.058185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:09.100856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:09.199616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506974387040859:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:09.199708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:09.200051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506974387040864:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:09.203350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:09.221433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506974387040866:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:12:09.279094Z node 1 :TX_PROXY ERROR: Actor# [1:7489506974387040920:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:09.890370Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506952912202291:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:09.890456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TCancelTx::CrossShardReadOnly [GOOD] >> TCancelTx::CrossShardReadOnlyWithReadSets |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpQueryPerf::Insert-QueryService-UseSink >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::TestSkipShards [GOOD] Test command err: 2025-04-04T17:12:04.172054Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506953800909698:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:04.172107Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e80/r3tmp/tmppXhF6k/pdisk_1.dat 2025-04-04T17:12:04.675442Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:04.691185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:04.691342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:04.694914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32692, node 1 2025-04-04T17:12:04.774783Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:04.774812Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:04.774819Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:04.774947Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4112 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:05.217424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:05.236368Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:05.259186Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:12:05.297081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e80/r3tmp/tmpVIA7PZ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22696, node 2 TClient is connected to server localhost:21199 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... waiting... waiting... |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpQueryPerf::IndexInsert+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexInsert+QueryService+UseSink >> BackupRestore::RestoreViewDependentOnAnotherView [GOOD] >> BackupRestore::RestoreKesusResources >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket >> TPartitionTests::Batching >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck >> KqpQueryPerf::AggregateToScalar+QueryService [GOOD] >> KqpQueryPerf::AggregateToScalar-QueryService >> TPartitionTests::Batching [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> TPartitionTests::CommitOffsetRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7128, MsgBus: 20392 2025-04-04T17:11:56.895216Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506919724154448:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:56.902724Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c54/r3tmp/tmp9o885F/pdisk_1.dat 2025-04-04T17:11:57.296291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:57.296482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:57.313922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:57.343216Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7128, node 1 2025-04-04T17:11:57.438739Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:57.438758Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:57.438765Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:57.438851Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20392 TClient is connected to server localhost:20392 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:58.115977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:58.166979Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:11:58.188740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:58.333358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:58.516528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:58.580373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:00.407467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506936904025272:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:00.407589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:00.627504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:00.661232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:00.692692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:00.730233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:00.774761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:00.834282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:00.923951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506936904025784:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:00.924056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:00.924282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506936904025790:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:00.934392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:00.949562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506936904025792:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:12:01.007854Z node 1 :TX_PROXY ERROR: Actor# [1:7489506941198993143:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:01.934660Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506919724154448:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:01.934783Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:02.043089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:12:02.119482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:12:02.204391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26402, MsgBus: 13192 2025-04-04T17:12:05.435740Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506956678986668:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:05.436766Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c54/r3tmp/tmphYD7YL/pdisk_1.dat 2025-04-04T17:12:05.628745Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:05.640616Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:05.640706Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:05.642703Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26402, node 2 2025-04-04T17:12:05.750891Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:05.750914Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:05.750922Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:05.751047Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13192 TClient is connected to server localhost:13192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:06.243674Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:06.255001Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:12:06.272692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:06.338541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:12:06.546688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:06.617811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:08.999346Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506969563890265:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:08.999449Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:09.051388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:09.090823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:09.171574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:09.204917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:09.279567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:09.355473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:09.410275Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506973858858078:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:09.410362Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:09.410424Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506973858858083:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:09.414457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:09.426284Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T17:12:09.426502Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489506973858858085:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:12:09.498349Z node 2 :TX_PROXY ERROR: Actor# [2:7489506973858858138:3439] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:10.403966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:12:10.438476Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489506956678986668:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:10.438651Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:10.484097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:12:10.532646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpQueryPerf::Update-QueryService-UseSink |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> TLocksTest::BrokenLockUpdate >> KqpQueryPerf::IndexInsert-QueryService+UseSink [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpQueryPerf::RangeRead-QueryService [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TPartitionTests::CommitOffsetRanges [GOOD] >> TFlatTest::SelectRangeItemsLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs4 >> KqpQueryPerf::Insert+QueryService+UseSink [GOOD] >> TPartitionTests::ChangeConfig >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 25038, MsgBus: 16910 2025-04-04T17:12:09.223189Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506975468824770:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:09.223225Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c2b/r3tmp/tmpUeHHwK/pdisk_1.dat 2025-04-04T17:12:09.853796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:09.853891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:09.854767Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:09.856907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25038, node 1 2025-04-04T17:12:10.093692Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:10.093717Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:10.093729Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:10.093847Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16910 TClient is connected to server localhost:16910 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:10.789336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:10.814892Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:10.836864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:12:10.996852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:11.186760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:11.284974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:13.009084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506992648695748:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:13.009210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:13.266437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:13.336089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:13.375241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:13.404612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:13.439685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:13.493737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:13.562657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506992648696264:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:13.562748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:13.563126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506992648696269:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:13.567835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:13.578281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506992648696271:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:12:13.650436Z node 1 :TX_PROXY ERROR: Actor# [1:7489506992648696324:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:14.227585Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506975468824770:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:14.227661Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TPartitionTests::ChangeConfig [GOOD] >> KqpQueryPerf::Upsert-QueryService-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 27754, MsgBus: 6244 2025-04-04T17:11:58.960253Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506927702107372:2219];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c4a/r3tmp/tmpKMX9W3/pdisk_1.dat 2025-04-04T17:11:59.329980Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:11:59.517709Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:59.531243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:59.531345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:59.536115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27754, node 1 2025-04-04T17:11:59.630750Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:59.630785Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:59.630792Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:59.630908Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6244 TClient is connected to server localhost:6244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:00.265905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:00.290653Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:12:00.304767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:00.513190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:00.697803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:00.793750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:02.611247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506944881978134:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:02.611368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:02.868255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:02.906468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:02.944867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:02.974451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:03.007126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:03.055864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:03.173276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506949176945946:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:03.173341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:03.173441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506949176945951:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:03.176630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:03.189152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506949176945953:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:12:03.256305Z node 1 :TX_PROXY ERROR: Actor# [1:7489506949176946007:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:03.962361Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506927702107372:2219];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:03.962449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:04.206093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:12:04.262931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:12:04.332260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64029, MsgBus: 10688 2025-04-04T17:12:07.366412Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506964054243332:2210];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c4a/r3tmp/tmpDOyyR1/pdisk_1.dat 2025-04-04T17:12:07.508549Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:07.557183Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:07.577298Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:07.577401Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 64029, node 2 2025-04-04T17:12:07.588233Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:07.722954Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:07.722981Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:07.722991Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:07.723125Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10688 TClient is connected to server localhost:10688 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:12:08.302401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.326525Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:12:08.350908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:08.438497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:08.681193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:08.766116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:10.907290Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506976939146816:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:10.907376Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:10.968367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:11.007448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:11.047140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:11.091077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:11.169819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:11.240752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:11.357210Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506981234114631:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:11.357267Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506981234114636:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:11.357316Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:11.361528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:11.375006Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489506981234114638:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:12:11.449613Z node 2 :TX_PROXY ERROR: Actor# [2:7489506981234114693:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:12.278318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.328806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.339752Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489506964054243332:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:12.339897Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:12.417116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> TPartitionTests::ConflictingActsInSeveralBatches >> TLocksFatTest::RangeSetRemove [GOOD] >> TLocksFatTest::ShardLocks >> KqpQueryPerf::Delete+QueryService-UseSink |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [GOOD] Test command err: 2025-04-04T17:12:09.456665Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506972324628848:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:09.456978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001640/r3tmp/tmp2QUi4w/pdisk_1.dat TServer::EnableGrpc on GrpcPort 32660, node 1 2025-04-04T17:12:10.348690Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:10.375666Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:12:10.375701Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:12:10.395790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:10.396476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:10.420007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:10.698872Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:10.698896Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:10.698905Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:10.699026Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:11.817547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:13.143049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506989504499092:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:13.143153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:13.986633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:12:14.354379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506993799466590:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:14.354461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:14.354682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506993799466595:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:14.359941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:12:14.409293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506993799466597:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:12:14.457287Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506972324628848:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:14.457332Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:14.477821Z node 1 :TX_PROXY ERROR: Actor# [1:7489506993799466689:2818] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:14.879521Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0wkwrg6sz4t9n48b0wnsrm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTFkZDg1ZjAtNTA0ZjEzMTItZjgzMGFmZGYtOGY5ZDIyYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:15.035961Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr0wkxdt4kxg7339ez1kvv0h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRlZDg5OGQtODYzZDU0MDktZjU2ODU5NDgtYmY4YTg0NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28130, MsgBus: 7533 2025-04-04T17:12:03.998147Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506948350101327:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:03.998579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c3b/r3tmp/tmpzApu8z/pdisk_1.dat 2025-04-04T17:12:04.452338Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:04.459736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:04.459845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:04.482899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28130, node 1 2025-04-04T17:12:04.613274Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:04.613307Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:04.613321Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:04.613491Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7533 TClient is connected to server localhost:7533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:05.308966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:05.348260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:05.526855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:05.697573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:05.774136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:07.527755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506965529972168:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:07.527875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:07.763663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:07.832957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:07.906594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:07.980651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.054796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.129887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.234676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506969824939989:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:08.234754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:08.234953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506969824939994:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:08.239836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:08.255099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506969824939996:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:12:08.334234Z node 1 :TX_PROXY ERROR: Actor# [1:7489506969824940052:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:08.996232Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506948350101327:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:08.996501Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9745, MsgBus: 13642 2025-04-04T17:12:10.545544Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506978194965795:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:10.549110Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c3b/r3tmp/tmpL1bvjk/pdisk_1.dat 2025-04-04T17:12:10.713573Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:10.731666Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:10.731765Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:10.733422Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9745, node 2 2025-04-04T17:12:10.911159Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:10.911187Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:10.911195Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:10.911303Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13642 TClient is connected to server localhost:13642 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:11.415638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:11.457265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:11.618598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:11.781926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:11.862283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:13.964458Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506991079869433:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:13.964546Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:14.012267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:14.055595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:14.112342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:14.163838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:14.205403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:14.296623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:14.372432Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506995374837248:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:14.372536Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:14.372832Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506995374837253:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:14.376974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:14.394081Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489506995374837255:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:12:14.457237Z node 2 :TX_PROXY ERROR: Actor# [2:7489506995374837310:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:15.546368Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489506978194965795:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:15.546459Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12634, MsgBus: 23683 2025-04-04T17:12:10.407656Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506978640486455:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:10.408057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c2a/r3tmp/tmpRWfgDL/pdisk_1.dat 2025-04-04T17:12:10.931961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:10.932102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:10.934298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12634, node 1 2025-04-04T17:12:10.965073Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:12:10.970499Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:12:11.010397Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:11.131539Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:11.131556Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:11.131562Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:11.131668Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23683 TClient is connected to server localhost:23683 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:11.770840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:11.800919Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:12:11.815536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:11.996196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:12.179135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:12.263526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:14.059265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506995820357266:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:14.059405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:14.357071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:14.418483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:14.453901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:14.487376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:14.529064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:14.606614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:14.683131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506995820357783:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:14.683200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:14.683479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506995820357788:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:14.686541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:14.696707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506995820357790:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:12:14.779796Z node 1 :TX_PROXY ERROR: Actor# [1:7489506995820357845:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:15.390364Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506978640486455:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:15.405057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpJoinOrder::TPCDS95+StreamLookupJoin-ColumnStore [GOOD] >> TFlatTest::RejectByPerShardReadSize [GOOD] >> TFlatTest::RejectByPerRequestSize >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-StreamLookupJoin+ColumnStore [GOOD] >> KqpQueryPerf::Upsert+QueryService-UseSink [GOOD] |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpJoinOrder::DatetimeConstantFold-StreamLookupJoin+ColumnStore [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1854, MsgBus: 21187 2025-04-04T17:12:12.426703Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506988291916184:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:12.427292Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c20/r3tmp/tmpsOXMO1/pdisk_1.dat 2025-04-04T17:12:13.016033Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:13.017693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:13.017837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:13.022067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1854, node 1 2025-04-04T17:12:13.104333Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:13.104365Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:13.104373Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:13.104489Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21187 TClient is connected to server localhost:21187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:13.686646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:13.721906Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:12:13.736900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:13.892630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:14.044283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:14.129800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:16.019497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507005471787015:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:16.019667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:16.271307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:16.335857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:16.416331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:16.455329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:16.491407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:16.536515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:16.602591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507005471787529:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:16.602726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:16.602909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507005471787534:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:16.607271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:16.620964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507005471787536:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:12:16.702077Z node 1 :TX_PROXY ERROR: Actor# [1:7489507005471787591:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:17.426653Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506988291916184:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:17.426774Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation >> KqpQueryPerf::Insert-QueryService-UseSink [GOOD] >> KqpQueryPerf::Insert-QueryService+UseSink >> BackupRestore::RestoreKesusResources [GOOD] >> BackupRestore::RestoreReplicationWithoutSecret |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> YdbTableBulkUpsert::Overload [GOOD] >> TPartitionTests::ConflictingActsInSeveralBatches [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS95+StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 20113, MsgBus: 2832 2025-04-04T17:09:38.736695Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506324353180823:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:38.736805Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00147f/r3tmp/tmpO8uVzm/pdisk_1.dat 2025-04-04T17:09:39.165056Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:39.180799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:39.181179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:39.183731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20113, node 1 2025-04-04T17:09:39.288712Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:39.288739Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:39.288746Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:39.288872Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2832 TClient is connected to server localhost:2832 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:39.937871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:39.963621Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:39.985184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:40.129894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:40.307736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:40.375724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:09:42.024285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506341533051591:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:42.024374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:42.238872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:42.319513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:42.345641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:42.378680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:42.407311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:42.481120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:42.569544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506341533052117:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:42.569630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:42.569941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506341533052122:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:42.573670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:42.589065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506341533052124:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:42.663661Z node 1 :TX_PROXY ERROR: Actor# [1:7489506341533052178:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:43.719221Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506324353180823:2254];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:43.719287Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:44.004289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:44.038008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:44.068604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:44.116960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:44.188001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:44.337797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:44.371677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:09:44.399376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:09:44.470023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:09:44.502537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:09:44.533442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:09:44.566576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:09:44.600054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:09:45.060964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:09:45.137450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:09:45.170933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:09:45.198588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-0 ... 281474976715677:0, at schemeshard: 72057594046644480 2025-04-04T17:11:56.352269Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-04T17:11:56.395338Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-04T17:11:56.467896Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-04T17:11:56.521263Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-04T17:11:56.566049Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-04T17:11:56.614822Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-04T17:11:57.509824Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2025-04-04T17:11:57.601645Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-04T17:11:57.659453Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-04T17:11:57.717990Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-04T17:11:57.778600Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-04-04T17:11:57.843358Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-04T17:11:57.891260Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-04-04T17:11:57.969797Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2025-04-04T17:11:58.094955Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-04-04T17:11:58.175883Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-04T17:11:58.228222Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-04-04T17:11:58.281962Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2025-04-04T17:11:58.338618Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2025-04-04T17:11:58.419519Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2025-04-04T17:11:58.468484Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-04-04T17:11:58.515015Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2025-04-04T17:11:58.568113Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2025-04-04T17:11:58.642617Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2025-04-04T17:11:58.686619Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2025-04-04T17:11:58.745547Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2025-04-04T17:11:58.829571Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2025-04-04T17:11:58.884260Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2025-04-04T17:11:58.950221Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2025-04-04T17:11:59.005824Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2025-04-04T17:11:59.366224Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2025-04-04T17:11:59.423977Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2025-04-04T17:11:59.471025Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2025-04-04T17:11:59.515984Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2025-04-04T17:11:59.608012Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2025-04-04T17:11:59.661989Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2025-04-04T17:11:59.716510Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2025-04-04T17:11:59.779839Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2025-04-04T17:11:59.841731Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2025-04-04T17:11:59.995136Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2025-04-04T17:12:00.126351Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2025-04-04T17:12:00.210873Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2025-04-04T17:12:00.271393Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 2025-04-04T17:12:00.430533Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715721:0, at schemeshard: 72057594046644480 2025-04-04T17:12:00.528501Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715722:0, at schemeshard: 72057594046644480 2025-04-04T17:12:02.150357Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:12:02.150394Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:16.493708Z node 5 :KQP_SLOW_LOG WARN: TraceId: "01jr0wkfep4p7m1z0w200kvm3h", SessionId: ydb://session/3?node_id=5&id=N2M2ODFkMTUtNWRhZmUyMmMtZGFmNDE2OTAtYjNkMmM0NA==, Slow query, duration: 15.766545s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n-- NB: Subquerys\n$ws_wh =\n(select ws1.ws_order_number ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2\n from web_sales ws1 cross join web_sales ws2\n where ws1.ws_order_number = ws2.ws_order_number\n and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk);\n-- start query 1 in stream 0 using template query95.tpl and seed 2031708268\n select\n count(distinct ws1.ws_order_number) as `order count`\n ,sum(ws_ext_ship_cost) as `total shipping cost`\n ,sum(ws_net_profit) as `total net profit`\nfrom\n web_sales ws1\n cross join date_dim\n cross join customer_address\n cross join web_site\nwhere\n cast(d_date as date) between cast('2002-4-01' as date) and\n (cast('2002-4-01' as date) + DateTime::IntervalFromDays(60))\nand ws1.ws_ship_date_sk = d_date_sk\nand ws1.ws_ship_addr_sk = ca_address_sk\nand ca_state = 'AL'\nand ws1.ws_web_site_sk = web_site_sk\nand web_company_name = 'pri'\nand ws1.ws_order_number in (select ws_order_number\n from $ws_wh)\nand ws1.ws_order_number in (select wr_order_number\n from web_returns cross join $ws_wh ws_wh\n where wr_order_number = ws_wh.ws_order_number)\norder by `order count`\nlimit 100;\n", parameters: 0b >> TPartitionTests::ConflictingCommitFails |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TCancelTx::CrossShardReadOnlyWithReadSets [GOOD] >> TCancelTx::ImmediateReadOnly |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 17410, MsgBus: 13001 2025-04-04T17:09:45.291890Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506355403387272:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:45.292153Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001471/r3tmp/tmp0Quhem/pdisk_1.dat 2025-04-04T17:09:45.695250Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17410, node 1 2025-04-04T17:09:45.720439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:45.720533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:45.767984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:45.853920Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:45.853946Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:45.853957Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:45.854092Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13001 TClient is connected to server localhost:13001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:46.396421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:46.446392Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:46.460912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:46.625579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:46.800009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:09:46.869669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:48.634290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506368288290717:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:48.634441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:48.876262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:48.953008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:48.987718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:49.022255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:49.058478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:49.113429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:49.202368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506372583258532:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:49.202461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:49.202744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506372583258537:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:49.206201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:49.218981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506372583258539:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:49.309479Z node 1 :TX_PROXY ERROR: Actor# [1:7489506372583258594:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:50.287537Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506355403387272:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:50.287614Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:50.358497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:50.427090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:50.501583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:50.543885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:50.576796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:50.804843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:50.848626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:09:50.924691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:09:50.993412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.068603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.157506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.193530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.231637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.799361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:09:51.871792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.911138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:09:51.942273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 202 ... ecute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:12:07.821413Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:12:07.821443Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:12:07.821650Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:12:07.821696Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:12:07.821830Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:12:07.821861Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038733;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:12:07.822076Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:12:07.822128Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:12:07.822377Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:12:07.822414Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:12:07.822595Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:12:07.822647Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:12:07.822836Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:12:07.822866Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:12:07.822986Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:12:07.823012Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:12:07.855126Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:12:07.855198Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:12:07.855328Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:12:07.855369Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:12:07.855549Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:12:07.855582Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:12:07.855707Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:12:07.855748Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:12:07.855825Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:12:07.855856Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:12:07.855910Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:12:07.855945Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:12:07.856605Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:12:07.856655Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:12:07.856853Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:12:07.856889Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:12:07.857048Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:12:07.857085Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:12:07.857263Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:12:07.857297Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:12:07.857420Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:12:07.857452Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038727;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:12:07.894649Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038732;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:07.894785Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038730;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:07.903062Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038731;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:07.903063Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038723;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:07.911870Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038733;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:07.911874Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038726;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:07.920665Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038720;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:07.924807Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038728;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:07.929603Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038722;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:07.941441Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038724;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:07.949133Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038727;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:07.957383Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038735;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:07.964515Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038729;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:07.972123Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038734;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:07.979144Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038721;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:08.020573Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038725;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::DatetimeConstantFold-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 11705, MsgBus: 61844 2025-04-04T17:09:50.808076Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506378634241285:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:50.838427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00141b/r3tmp/tmpTl7Oxo/pdisk_1.dat 2025-04-04T17:09:51.451910Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:51.457044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:51.457128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:51.459368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11705, node 1 2025-04-04T17:09:51.658125Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:51.658145Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:51.658151Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:51.658255Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61844 TClient is connected to server localhost:61844 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:52.329438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:52.369447Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:52.373097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:52.533396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:52.700757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:52.797119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:54.572084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506395814112122:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:54.572171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:54.823740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:54.897105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:54.934470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:55.021319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:55.053749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:55.115244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:55.220450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506400109079936:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:55.220521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:55.220739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506400109079941:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:55.226101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:55.238807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506400109079943:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:55.323349Z node 1 :TX_PROXY ERROR: Actor# [1:7489506400109079998:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:55.765225Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506378634241285:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:55.765280Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:56.361598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:56.406820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:56.444156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:56.517221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:56.553745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:56.723374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:56.759189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:09:56.797616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:09:56.832103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:09:56.859933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:09:56.887584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:09:56.931207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:09:56.970866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.487887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:09:57.529657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.600132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.640159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 202 ... cription=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:12:11.601833Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:12:11.601879Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:12:11.601971Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:12:11.602007Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:12:11.602049Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:12:11.602082Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:12:11.604059Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:12:11.604129Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:12:11.604381Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:12:11.604424Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:12:11.604591Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:12:11.604625Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:12:11.604820Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:12:11.604850Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:12:11.604970Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:12:11.604997Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:12:11.628430Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:12:11.628494Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:12:11.628586Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:12:11.628620Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:12:11.628807Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:12:11.628842Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:12:11.628938Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:12:11.628993Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:12:11.629067Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:12:11.629098Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:12:11.629144Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:12:11.629178Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:12:11.629817Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:12:11.629864Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:12:11.630126Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:12:11.630167Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:12:11.630389Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:12:11.630427Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:12:11.630641Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:12:11.630680Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:12:11.630827Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:12:11.630868Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038726;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:12:11.654572Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038730;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:12:11.654849Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038732;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:12:11.664223Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038720;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:12:11.664251Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038722;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:12:11.673209Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038721;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:12:11.673210Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038734;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:12:11.681392Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038723;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:12:11.681575Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038733;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:12:11.698471Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038725;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:12:11.700158Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038735;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:12:11.707386Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038724;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:12:11.707468Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038726;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:12:11.715599Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038727;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:12:11.715798Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038731;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:12:11.723590Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038729;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:12:11.727719Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038728;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpQueryPerf::Update-QueryService-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] Test command err: 2025-04-04T17:12:12.312718Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506988014460723:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:12.322713Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e7d/r3tmp/tmp1S5VX0/pdisk_1.dat 2025-04-04T17:12:12.960812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:12.960916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:12.963617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:12.993216Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:12945 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:13.290636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:13.324901Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:13.342304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:16.110535Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507005836994539:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:16.110570Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e7d/r3tmp/tmpCaMkyE/pdisk_1.dat 2025-04-04T17:12:16.284725Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:16.298097Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:16.298181Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:16.299925Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10397 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:16.519695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:16.535017Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:16.557247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::Overload [GOOD] Test command err: 2025-04-04T17:11:10.836062Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506720624604731:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:10.836184Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001349/r3tmp/tmp9miNKX/pdisk_1.dat 2025-04-04T17:11:11.499378Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:11.501314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:11.501428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:11.514233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21242, node 1 2025-04-04T17:11:11.832334Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:11.832359Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:11.832365Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:11.832477Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:12.198978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:14.389526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 SUCCESS 2025-04-04T17:11:14.613823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506737804474906:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:14.613903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506737804474898:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:14.614177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:14.617836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:11:14.654498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506737804474912:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:11:14.738578Z node 1 :TX_PROXY ERROR: Actor# [1:7489506737804474998:2812] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:15.400455Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0wj2dk1bhw4f8qwf7mg1j0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWQwOTcwNzAtZWU5OTE1YzQtNjI0ODU0YjUtYjMyYzliZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-04-04T17:11:15.834258Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506720624604731:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:15.834413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:11:15.838318Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr0wj37g6wad65p1v10j8pqf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWQwOTcwNzAtZWU5OTE1YzQtNjI0ODU0YjUtYjMyYzliZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-04-04T17:11:15.919159Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-04T17:11:15.947861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 SUCCESS 2025-04-04T17:11:16.443095Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0wj3v301vby5tx5bp1fdpm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Y1YWU0ODYtY2NhZDU4N2UtNGViMTA3NjUtOTNkODk3OGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-04-04T17:11:16.831105Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr0wj47a14he2nvt32xp23t3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Y1YWU0ODYtY2NhZDU4N2UtNGViMTA3NjUtOTNkODk3OGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-04-04T17:11:16.932679Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-04T17:11:16.940746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 SUCCESS 2025-04-04T17:11:17.412887Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jr0wj4rz2j8skrk69fjst8w7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njc2NjdjNGUtODdhNjVhNWEtZmUzNDhkNmUtNjU1MTFhMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-04-04T17:11:17.875025Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jr0wj55zc16cbvpw49z2hnrh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njc2NjdjNGUtODdhNjVhNWEtZmUzNDhkNmUtNjU1MTFhMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-04-04T17:11:17.943094Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-04T17:11:17.977855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 SUCCESS 2025-04-04T17:11:18.631431Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jr0wj5va2enwz5dkfewenjv3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjRmYmJkMzctOTYyOWYzZTUtMzI4ZWIwODEtYzJiODJmZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-04-04T17:11:19.066101Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jr0wj6br60jr2rt2snpb9200, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjRmYmJkMzctOTYyOWYzZTUtMzI4ZWIwODEtYzJiODJmZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-04-04T17:11:19.206086Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-04T17:11:19.222695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 SUCCESS 2025-04-04T17:11:19.714971Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr0wj70q1egt5g2pftck14s4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmY3MGZiMmMtYzQ2YWVmNjEtYmQzYWQ0NTUtYjc5NTQ4Mzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-04-04T17:11:20.195305Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jr0wj7dk80xq3fy4dw8r7y20, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmY3MGZiMmMtYzQ2YWVmNjEtYmQzYWQ0NTUtYjc5NTQ4Mzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-04-04T17:11:20.248449Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-04-04T17:11:20.274056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 SUCCESS 2025-04-04T17:11:20.787356Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jr0wj8159ahp0bffssernjp0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODJhMzIwMTItYTgzYmRhZDgtM2M4NTk5ODItOWE5Y2ViMDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-04-04T17:11:21.154653Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jr0wj8f465cv1rgk084cdjyb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODJhMzIwMTItYTgzYmRhZDgtM2M4NTk5ODItOWE5Y2ViMDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-04-04T17:11:21.212778Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-04-04T17:11:21.259912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 SUCCESS 2025-04-04T17:11:21.723553Z node 1 :KQP_E ... de 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037892 2025-04-04T17:12:18.713094Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.713124Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.713615Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037891 2025-04-04T17:12:18.716281Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-04-04T17:12:18.716324Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:12:18.716514Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037892 2025-04-04T17:12:18.716529Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-04T17:12:18.716664Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037891 2025-04-04T17:12:18.716677Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 .2025-04-04T17:12:18.727948Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.727982Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.728534Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.728562Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.742209Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.742262Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.743578Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.743614Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.744892Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.744913Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.746089Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-04-04T17:12:18.746515Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037891 2025-04-04T17:12:18.750131Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-04-04T17:12:18.750177Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:12:18.750303Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037891 2025-04-04T17:12:18.750325Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-04T17:12:18.759573Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.759606Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.762155Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.762185Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.778288Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.778326Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.780069Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037892 2025-04-04T17:12:18.784558Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037892 2025-04-04T17:12:18.784598Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 .2025-04-04T17:12:18.794609Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.794657Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.795173Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.795189Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.795568Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.795583Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.805504Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-04-04T17:12:18.805981Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037891 2025-04-04T17:12:18.811897Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-04-04T17:12:18.811949Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:12:18.814484Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037891 2025-04-04T17:12:18.814537Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-04T17:12:18.815014Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.815030Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.815385Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.815400Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.815776Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.815790Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.816782Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037892 2025-04-04T17:12:18.824007Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037892 2025-04-04T17:12:18.824042Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-04T17:12:18.825279Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.825297Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.825730Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.825746Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.826501Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.826532Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 .2025-04-04T17:12:18.839996Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.840029Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.841960Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-04-04T17:12:18.842431Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037891 2025-04-04T17:12:18.842752Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037892 2025-04-04T17:12:18.847481Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037892 2025-04-04T17:12:18.847536Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-04T17:12:18.847675Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037891 2025-04-04T17:12:18.847696Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-04T17:12:18.847772Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-04-04T17:12:18.847786Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:12:18.849071Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.849093Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.851880Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.851907Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 .2025-04-04T17:12:18.858883Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.858925Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.866503Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.866538Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.866978Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.867004Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-04-04T17:12:18.867698Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:12:18.867725Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 >> TLocksTest::Range_BrokenLock3 [GOOD] |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpQueryPerf::IndexInsert+QueryService+UseSink [GOOD] |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 15544, MsgBus: 19947 2025-04-04T17:12:08.156747Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506969029002637:2215];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c2e/r3tmp/tmpTDCUeH/pdisk_1.dat 2025-04-04T17:12:08.542167Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:08.736257Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:08.739475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:08.739574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:08.743440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15544, node 1 2025-04-04T17:12:08.820083Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:08.820110Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:08.820117Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:08.820229Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19947 TClient is connected to server localhost:19947 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:09.434531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:09.453944Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:12:09.462394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:09.616879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:09.829400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:09.916546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:11.801353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506981913906113:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:11.801468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:12.060946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.092508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.126532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.166813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.203369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.272558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.354634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506986208873928:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:12.354753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:12.355022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506986208873933:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:12.359167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:12.370159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506986208873935:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:12:12.448591Z node 1 :TX_PROXY ERROR: Actor# [1:7489506986208873990:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:13.151898Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506969029002637:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:13.151990Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8361, MsgBus: 14191 2025-04-04T17:12:14.891275Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506996037027016:2213];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c2e/r3tmp/tmpOt1aSb/pdisk_1.dat 2025-04-04T17:12:14.994338Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:15.107805Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:15.127210Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:15.127311Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:15.128459Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8361, node 2 2025-04-04T17:12:15.218783Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:15.218807Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:15.218813Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:15.218909Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14191 TClient is connected to server localhost:14191 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:15.719276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:15.746648Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:12:15.764171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:15.887202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:16.053603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:16.131368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:18.483590Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507013216897802:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:18.483677Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:18.521665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:18.557791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:18.631843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:18.712558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:18.785233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:18.823379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:18.869195Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507013216898322:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:18.869297Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:18.869323Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507013216898327:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:18.872700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:18.882384Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507013216898329:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:12:18.938046Z node 2 :TX_PROXY ERROR: Actor# [2:7489507013216898381:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:19.873568Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489506996037027016:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:19.873624Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20018, MsgBus: 31392 2025-04-04T17:12:15.628047Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506999384164793:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:15.628263Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bef/r3tmp/tmpNsFvCJ/pdisk_1.dat 2025-04-04T17:12:16.154042Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:16.157927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:16.158050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:16.161163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20018, node 1 2025-04-04T17:12:16.274748Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:16.274771Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:16.274777Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:16.274880Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31392 TClient is connected to server localhost:31392 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:16.903811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:16.947981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:17.093777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:17.265393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:17.344753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:18.957834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507012269068238:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:18.958014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:19.201906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:19.241465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:19.280756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:19.323251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:19.371520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:19.418014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:19.506095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507016564036052:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:19.506183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:19.506434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507016564036057:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:19.511153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:19.524114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507016564036059:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:12:19.615197Z node 1 :TX_PROXY ERROR: Actor# [1:7489507016564036115:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:20.624374Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506999384164793:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:20.624463Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TFlatTest::RejectByPerRequestSize [GOOD] >> KqpQueryPerf::Delete+QueryService-UseSink [GOOD] >> KqpQueryPerf::Delete+QueryService+UseSink |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10138, MsgBus: 9919 2025-04-04T17:12:06.310341Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506962653062007:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:06.310815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c36/r3tmp/tmpx3uET8/pdisk_1.dat 2025-04-04T17:12:06.862864Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:06.865862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:06.865989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:06.869071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10138, node 1 2025-04-04T17:12:06.975295Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:06.975318Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:06.975329Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:06.975456Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9919 TClient is connected to server localhost:9919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:07.745426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:07.818902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:07.956268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:08.146210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:08.243473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:12:09.724510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506975537965530:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:09.724623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:09.946021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:09.992159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:10.049430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:10.085506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:10.121347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:10.162646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:10.252203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506979832933343:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:10.252281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:10.252562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506979832933348:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:10.256003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:10.266638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506979832933350:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:12:10.365699Z node 1 :TX_PROXY ERROR: Actor# [1:7489506979832933405:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:11.306682Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506962653062007:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:11.306749Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:11.351821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:12:11.406214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:12:11.454285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 61174, MsgBus: 27817 2025-04-04T17:12:14.486692Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506997863320195:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c36/r3tmp/tmpBqiFDt/pdisk_1.dat 2025-04-04T17:12:14.600186Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:14.696067Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:14.724085Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:14.724169Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:14.726123Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61174, node 2 2025-04-04T17:12:14.842849Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:14.842874Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:14.842882Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:14.843015Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27817 TClient is connected to server localhost:27817 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:15.339404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:15.351530Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:12:15.384655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:15.482996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:15.685400Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:12:15.808919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:18.002153Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507010748223694:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:18.002286Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:18.048350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:18.082075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:18.112110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:18.186042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:18.221308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:18.259939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:18.312144Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507015043191502:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:18.312223Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:18.312444Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507015043191507:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:18.316361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:18.328588Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507015043191509:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:12:18.407973Z node 2 :TX_PROXY ERROR: Actor# [2:7489507015043191562:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:19.271970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:12:19.312858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:12:19.390548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:12:19.474405Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489506997863320195:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:19.474529Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock3 [GOOD] Test command err: 2025-04-04T17:11:38.788392Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506841366050543:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:38.790637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e85/r3tmp/tmpGEbnsc/pdisk_1.dat 2025-04-04T17:11:39.337141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:39.337315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:39.338683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:39.354928Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:26635 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:11:39.589213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:39.618632Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:39.642463Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:11:39.653147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:39.882286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:39.956031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:42.634591Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506858822365491:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e85/r3tmp/tmpoufWcC/pdisk_1.dat 2025-04-04T17:11:42.750597Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:11:42.837402Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:42.853506Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:42.853570Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:42.854798Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24117 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:11:43.051777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:11:43.071803Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:11:43.076388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:43.209658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:43.289892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:46.358311Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489506877182241847:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e85/r3tmp/tmpBfeZnL/pdisk_1.dat 2025-04-04T17:11:46.389001Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:11:46.449443Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:46.467516Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:46.467588Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:46.468456Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20455 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:11:46.611441Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:46.621724Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:46.633172Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:46.685967Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:46.763419Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:50.305028Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506894545398132:2216];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e85/r3tmp/tmpsgsvf6/pdisk_1.dat 2025-04-04T17:11:50.428209Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:11:50.469984Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:50.501507Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:50.501628Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:50.507763Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20721 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { Sche ... PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:03.195644Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:03.202731Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:03.216306Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:03.288677Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:03.343613Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:07.466325Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7489506967603896615:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:07.466442Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e85/r3tmp/tmpMsuckK/pdisk_1.dat 2025-04-04T17:12:07.661656Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:07.677236Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:07.677349Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:07.679055Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13336 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:07.987686Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:08.011094Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:12:08.016308Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:08.100018Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:08.180409Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:12.369251Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7489506987702141214:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e85/r3tmp/tmphWZFpD/pdisk_1.dat 2025-04-04T17:12:12.540444Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:12.637127Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:12.641097Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:12.641199Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:12.642944Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9468 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:12.943895Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:12.950810Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:12.963998Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:12:13.003948Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:13.098853Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:13.160000Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:12:17.274307Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489507009510707916:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:17.274443Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e85/r3tmp/tmpGVFrts/pdisk_1.dat 2025-04-04T17:12:17.422890Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:17.422996Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:17.430477Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:17.440594Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19450 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:17.745183Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:17.751805Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:17.761997Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:12:17.767426Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:17.831016Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:17.893734Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::RejectByPerRequestSize [GOOD] Test command err: 2025-04-04T17:11:59.458114Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506929769331019:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:59.458711Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e81/r3tmp/tmpMki8H8/pdisk_1.dat 2025-04-04T17:11:59.937920Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:59.942229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:59.942425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:59.945171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25080 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:00.328193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:00.347629Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:00.368561Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:12:00.376660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:04.449658Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506929769331019:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:04.449731Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:08.572485Z node 1 :TX_DATASHARD ERROR: Transaction read size 85004227 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760 2025-04-04T17:12:08.572603Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 85004227 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760) | 2025-04-04T17:12:08.573876Z node 1 :TX_PROXY ERROR: Actor# [1:7489506968424038214:2942] txid# 281474976710760 RESPONSE Status# WrongRequest marker# P13c 2025-04-04T17:12:09.411306Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506973967398998:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:09.444507Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e81/r3tmp/tmp0h52TN/pdisk_1.dat 2025-04-04T17:12:09.552213Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:09.565354Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:09.565466Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:09.567356Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10482 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:09.793092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:09.799580Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:09.834688Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:12:09.850566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:14.378492Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489506973967398998:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:14.378550Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:17.738586Z node 2 :TX_DATASHARD ERROR: Transaction read size 85003987 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760 2025-04-04T17:12:17.738708Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 85003987 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760) | 2025-04-04T17:12:17.738840Z node 2 :TX_PROXY ERROR: Actor# [2:7489507008327138875:2938] txid# 281474976715760 RESPONSE Status# WrongRequest marker# P13c 2025-04-04T17:12:18.491674Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507011321811306:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:18.511708Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e81/r3tmp/tmpY0KJAr/pdisk_1.dat 2025-04-04T17:12:18.632934Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:18.633017Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:18.636371Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:18.652002Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15906 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:18.893218Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:18.916514Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:22.114588Z node 3 :TX_PROXY DEBUG: actor# [3:7489507011321811149:2086] Handle TEvProposeTransaction 2025-04-04T17:12:22.114615Z node 3 :TX_PROXY DEBUG: actor# [3:7489507011321811149:2086] TxId# 281474976715700 ProcessProposeTransaction 2025-04-04T17:12:22.114662Z node 3 :TX_PROXY DEBUG: actor# [3:7489507011321811149:2086] Cookie# 0 userReqId# "" txid# 281474976715700 SEND to# [3:7489507028501681422:2611] DataReq marker# P0 2025-04-04T17:12:22.114705Z node 3 :TX_PROXY DEBUG: Actor# [3:7489507028501681422:2611] Cookie# 0 txid# 281474976715700 HANDLE TDataReq marker# P1 2025-04-04T17:12:22.115169Z node 3 :TX_PROXY DEBUG: Actor [3:7489507028501681422:2611] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2025-04-04T17:12:22.115177Z node 3 :TX_PROXY DEBUG: Actor [3:7489507028501681422:2611] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2025-04-04T17:12:22.115205Z node 3 :TX_PROXY DEBUG: Actor# [3:7489507028501681422:2611] txid# 281474976715700 SEND to# [3:7489507011321811440:2126] TSchemeCache with 2 scheme entries. DataReq marker# P2 2025-04-04T17:12:22.115294Z node 3 :TX_PROXY DEBUG: Actor# [3:7489507028501681422:2611] txid# 281474976715700 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-04-04T17:12:22.116164Z node 3 :TX_PROXY DEBUG: Actor# [3:7489507028501681422:2611] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037888 with 734 bytes program affected shards 2 followers disallowed marker# P4 2025-04-04T17:12:22.116349Z node 3 :TX_PROXY DEBUG: Actor# [3:7489507028501681422:2611] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037889 with 734 bytes program affected shards 2 followers disallowed marker# P4 2025-04-04T17:12:22.116572Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:12:22.117448Z node 3 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037888 2025-04-04T17:12:22.117702Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:12:22.118348Z node 3 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037889 2025-04-04T17:12:22.118763Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:12:22.118838Z node 3 :TX_PROXY DEBUG: Actor# [3:7489507028501681422:2611] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037888 read size 17000839 out readset size 0 marker# P6 2025-04-04T17:12:22.119474Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T17:12:22.119530Z node 3 :TX_PROXY DEBUG: Actor# [3:7489507028501681422:2611] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037889 read size 9000447 out readset size 0 marker# P6 2025-04-04T17:12:22.119578Z node 3 :TX_PROXY ERROR: Actor# [3:7489507028501681422:2611] txid# 281474976715700 FailProposedRequest: Transaction total read size 26001286 exceeded limit 10000 Status# ExecError 2025-04-04T17:12:22.119632Z node 3 :TX_PROXY ERROR: Actor# [3:7489507028501681422:2611] txid# 281474976715700 RESPONSE Status# ExecError marker# P13c 2025-04-04T17:12:22.119741Z node 3 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037889 txId 281474976715700 2025-04-04T17:12:22.119790Z node 3 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037889 txId 281474976715700 2025-04-04T17:12:22.120213Z node 3 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037888 txId 281474976715700 2025-04-04T17:12:22.120249Z node 3 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037888 txId 281474976715700 >> TLocksFatTest::ShardLocks [GOOD] >> TCancelTx::ImmediateReadOnly [GOOD] >> KqpQueryPerf::Insert-QueryService+UseSink [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] >> TObjectStorageListingTest::ManyDeletes [GOOD] |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeConsumer >> TTopicApiDescribes::DescribeTopic |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TIcNodeCache::GetNodesInfoTest >> TTopicApiDescribes::GetLocalDescribe >> TTopicApiDescribes::GetPartitionDescribe |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5919, MsgBus: 24710 2025-04-04T17:12:13.684609Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506990655443423:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:13.684812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c19/r3tmp/tmp2KVs8e/pdisk_1.dat 2025-04-04T17:12:14.135417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:14.135523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:14.146011Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:14.148422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5919, node 1 2025-04-04T17:12:14.314951Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:14.314983Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:14.314996Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:14.315123Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24710 TClient is connected to server localhost:24710 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:15.017605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:15.032945Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:12:15.043352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:15.181942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:12:15.342066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:15.421853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:17.315154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507007835314377:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:17.315248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:17.587554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:17.662749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:17.693508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:17.726606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:17.761345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:17.848324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:17.949390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507007835314904:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:17.949439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:17.949704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507007835314909:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:17.953967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:17.969211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507007835314911:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:12:18.038913Z node 1 :TX_PROXY ERROR: Actor# [1:7489507012130282262:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:18.686349Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506990655443423:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:18.686440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 65440, MsgBus: 17250 2025-04-04T17:12:20.121112Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507021388855688:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:20.138835Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c19/r3tmp/tmpQBPKQl/pdisk_1.dat 2025-04-04T17:12:20.230613Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:20.253363Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:20.253441Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:20.258142Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65440, node 2 2025-04-04T17:12:20.338753Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:20.338776Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:20.338782Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:20.338882Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17250 TClient is connected to server localhost:17250 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:20.771750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:20.783642Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:12:20.796818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:20.895618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:12:21.070517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:21.165877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:23.078580Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507034273759188:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:23.078665Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:23.130424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:23.157234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:23.224539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:23.248167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:23.270745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:23.297540Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:23.333342Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507034273759700:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:23.333411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:23.333443Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507034273759705:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:23.336519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:23.344593Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507034273759707:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:12:23.425151Z node 2 :TX_PROXY ERROR: Actor# [2:7489507034273759760:3436] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::ShardLocks [GOOD] Test command err: 2025-04-04T17:12:06.593493Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506962961022899:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:06.593869Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e7f/r3tmp/tmpWcQEEG/pdisk_1.dat 2025-04-04T17:12:07.166321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:07.166466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:07.180255Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:07.184185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28129 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:07.512425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:07.542948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:07.715030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:07.808244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:11.599332Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506962961022899:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:11.599384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:12.319215Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506986426957178:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:12.319269Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e7f/r3tmp/tmplAfo4V/pdisk_1.dat 2025-04-04T17:12:12.557595Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:12.567143Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:12.567224Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:12.568660Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18367 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:12.782189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:12.786564Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:12.799487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:12.875564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:12.949866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:17.168618Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507009450244370:2143];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:17.170787Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e7f/r3tmp/tmpIj3Fc2/pdisk_1.dat 2025-04-04T17:12:17.323114Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:17.339209Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:17.339284Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:17.347308Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27916 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:17.598167Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:17.604612Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:17.612044Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:12:17.615355Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:17.676835Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:17.726594Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:12:20.642642Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489507020969616594:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:20.642718Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e7f/r3tmp/tmpq97tR8/pdisk_1.dat 2025-04-04T17:12:20.896180Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:20.896266Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:20.897962Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:20.911719Z node 4 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21868 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:21.162857Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:21.170804Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:21.185026Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:21.249596Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:21.308855Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> TSchemeShardViewTest::AsyncDropSameView |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateDifferentViews >> TSchemeShardViewTest::ReadOnlyMode >> TSchemeShardViewTest::CreateView >> TSchemeShardViewTest::AsyncCreateSameView >> TSchemeShardViewTest::EmptyName >> TSchemeShardViewTest::DropView ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] Test command err: 2025-04-04T17:11:40.706922Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506848761745648:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:40.706997Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpDEaKqO/pdisk_1.dat 2025-04-04T17:11:41.293664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:41.293787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:41.300522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:41.307155Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7267, node 1 2025-04-04T17:11:41.400406Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:11:41.447586Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:41.447634Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:41.447644Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:41.447775Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12456 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:41.721056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:44.293935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506865941615870:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:44.294024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:44.294267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506865941615882:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:44.298473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:11:44.333530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506865941615884:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:11:44.410539Z node 1 :TX_PROXY ERROR: Actor# [1:7489506865941615958:2690] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Backup "/Root" to "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmp0cBoRq/"Create temporary directory "/Root/~backup_20250404T171144" in databaseProcess "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmp0cBoRq/topic"Backup topic "/Root/topic" to "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmp0cBoRq/topic"Write topic into "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmp0cBoRq/topic/create_topic.pb"Write ACL into "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmp0cBoRq/topic/permissions.pb"Remove temporary directory "/Root/~backup_20250404T171144" in database2025-04-04T17:11:45.024790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710662:0, at schemeshard: 72057594046644480 Backup completed successfully2025-04-04T17:11:45.066418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:11:45.082342Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-04T17:11:45.082373Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-04T17:11:45.082385Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-04T17:11:45.093865Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-04-04T17:11:45.093943Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2025-04-04T17:11:45.093971Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found Restore "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmp0cBoRq/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmp0cBoRq/" to "/Root"Process "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmp0cBoRq/topic"Restore topic "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmp0cBoRq/topic" to "/Root/topic"Read topic from "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmp0cBoRq/topic/create_topic.pb"Created "/Root/topic"Restore ACL "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmp0cBoRq/topic" to "/Root/topic"Read ACL from "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmp0cBoRq/topic/permissions.pb"2025-04-04T17:11:45.207941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-04-04T17:11:47.075816Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506881412936396:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:47.075858Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmp0C0tht/pdisk_1.dat 2025-04-04T17:11:47.323397Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:47.375482Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:47.375563Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:47.383761Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19704, node 4 2025-04-04T17:11:47.563108Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:47.563134Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:47.563142Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:47.563290Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10486 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:47.807857Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:47.951175Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpkTqPa0/"Create temporary directory "/Root/~backup_20250404T171148" in databaseProcess "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpkTqPa0/kesus"Backup coordination node "/Root/kesus" to "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpkTqPa0/kesus"Write coordination node into "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpkTqPa0/kesus/create_coordination_node.pb"Write ACL into "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpkTqPa0/kesus/permissions.pb"Remove temporary directory "/Root/~backup_20250404T171148" in database2025-04-04T17:11:48.195267Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710660:0, at schemeshard: 72057594046644480 Backup completed successfully2025-04-04T17:11:48.232638Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropKesus, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:11:48.248131Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found Restore "/h ... it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:16.832575Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:12:16.861043Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpzJIc7m/"Create temporary directory "/Root/~backup_20250404T171216" in database2025-04-04T17:12:16.929025Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489506981454731328:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:16.929100Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Process "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpzJIc7m/externalTable"Backup external table "/Root/externalTable" to "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpzJIc7m/externalTable"Write external table into "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpzJIc7m/externalTable/create_external_table.sql"Write ACL into "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpzJIc7m/externalTable/permissions.pb"Process "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpzJIc7m/externalDataSource"Backup external data source "/Root/externalDataSource" to "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpzJIc7m/externalDataSource"Write external data source into "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpzJIc7m/externalDataSource/create_external_data_source.sql"Write ACL into "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpzJIc7m/externalDataSource/permissions.pb"Remove temporary directory "/Root/~backup_20250404T171216" in database2025-04-04T17:12:17.137843Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710663:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpzJIc7m/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpzJIc7m/" to "/Root"Process "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpzJIc7m/externalDataSource"Restore external data source "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpzJIc7m/externalDataSource" to "/Root/externalDataSource"Read external data source from "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpzJIc7m/externalDataSource/create_external_data_source.sql"2025-04-04T17:12:17.337017Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710666:0, at schemeshard: 72057594046644480 Created "/Root/externalDataSource"Restore ACL "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpzJIc7m/externalDataSource" to "/Root/externalDataSource"Read ACL from "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpzJIc7m/externalDataSource/permissions.pb"2025-04-04T17:12:17.375189Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 Process "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpzJIc7m/externalTable"Restore external table "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpzJIc7m/externalTable" to "/Root/externalTable"Read external table from "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpzJIc7m/externalTable/create_external_table.sql"2025-04-04T17:12:17.452525Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 Created "/Root/externalTable"Restore ACL "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpzJIc7m/externalTable" to "/Root/externalTable"Read ACL from "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpzJIc7m/externalTable/permissions.pb"2025-04-04T17:12:17.480991Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710669:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-04-04T17:12:19.257981Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489507019302325864:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:19.258054Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmpwZSRHl/pdisk_1.dat 2025-04-04T17:12:19.433362Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:19.469423Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:19.469529Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:19.476196Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25511, node 13 2025-04-04T17:12:19.574887Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:19.574913Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:19.574923Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:19.575097Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:19.896257Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:23.346992Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489507036482196090:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:23.346993Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489507036482196096:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:23.347089Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:23.350579Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:12:23.372265Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7489507036482196104:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:12:23.442287Z node 13 :TX_PROXY ERROR: Actor# [13:7489507036482196177:2682] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:23.458910Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715660:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmprWaxVi/"Create temporary directory "/Root/~backup_20250404T171223" in databaseProcess "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmprWaxVi/externalDataSource"Backup external data source "/Root/externalDataSource" to "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmprWaxVi/externalDataSource"Write external data source into "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmprWaxVi/externalDataSource/create_external_data_source.sql"Write ACL into "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmprWaxVi/externalDataSource/permissions.pb"Remove temporary directory "/Root/~backup_20250404T171223" in database2025-04-04T17:12:23.552427Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715662:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmprWaxVi/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmprWaxVi/" to "/Root"Process "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmprWaxVi/externalDataSource"Restore external data source "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmprWaxVi/externalDataSource" to "/Root/externalDataSource"Read external data source from "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmprWaxVi/externalDataSource/create_external_data_source.sql"2025-04-04T17:12:23.643159Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715664:0, at schemeshard: 72057594046644480 Created "/Root/externalDataSource"Restore ACL "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmprWaxVi/externalDataSource" to "/Root/externalDataSource"Read ACL from "/home/runner/.ya/build/build_root/i5tq/001795/r3tmp/tmprWaxVi/externalDataSource/permissions.pb"2025-04-04T17:12:23.663499Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 Restore completed successfully ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TCancelTx::ImmediateReadOnly [GOOD] Test command err: 2025-04-04T17:12:09.789580Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506975445706246:2272];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:09.789655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e7e/r3tmp/tmpUyTaI1/pdisk_1.dat 2025-04-04T17:12:10.296242Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:10.301165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:10.301260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:10.307902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5329 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:10.661123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:10.675051Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:10.679772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:5329 2025-04-04T17:12:11.148538Z node 1 :TX_PROXY ERROR: Actor# [1:7489506984035641362:2388] txid# 281474976710660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-04-04T17:12:11.148611Z node 1 :TX_PROXY ERROR: Actor# [1:7489506984035641362:2388] txid# 281474976710660 RESPONSE Status# ExecCancelled marker# P13c 2025-04-04T17:12:11.158416Z node 1 :TX_PROXY ERROR: Actor# [1:7489506984035641375:2398] txid# 281474976710661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-04-04T17:12:11.158478Z node 1 :TX_PROXY ERROR: Actor# [1:7489506984035641375:2398] txid# 281474976710661 RESPONSE Status# ExecCancelled marker# P13c 2025-04-04T17:12:11.173158Z node 1 :TX_PROXY ERROR: Actor# [1:7489506984035641388:2408] txid# 281474976710662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-04-04T17:12:11.173260Z node 1 :TX_PROXY ERROR: Actor# [1:7489506984035641388:2408] txid# 281474976710662 RESPONSE Status# ExecCancelled marker# P13c 2025-04-04T17:12:11.218137Z node 1 :TX_PROXY ERROR: Actor# [1:7489506984035641414:2428] txid# 281474976710664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-04-04T17:12:11.218198Z node 1 :TX_PROXY ERROR: Actor# [1:7489506984035641414:2428] txid# 281474976710664 RESPONSE Status# ExecCancelled marker# P13c 2025-04-04T17:12:11.231579Z node 1 :TX_PROXY ERROR: Actor# [1:7489506984035641427:2438] txid# 281474976710665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-04-04T17:12:11.231630Z node 1 :TX_PROXY ERROR: Actor# [1:7489506984035641427:2438] txid# 281474976710665 RESPONSE Status# ExecCancelled marker# P13c 2025-04-04T17:12:11.243286Z node 1 :TX_PROXY ERROR: Actor# [1:7489506984035641440:2448] txid# 281474976710666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-04-04T17:12:11.243340Z node 1 :TX_PROXY ERROR: Actor# [1:7489506984035641440:2448] txid# 281474976710666 RESPONSE Status# ExecCancelled marker# P13c 2025-04-04T17:12:13.399432Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506993384770407:2200];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e7e/r3tmp/tmpfIQRao/pdisk_1.dat 2025-04-04T17:12:13.487126Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:13.595711Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:13.619024Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:13.619096Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:13.621845Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10208 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:13.859536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:13.870416Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:12:13.884208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:10208 2025-04-04T17:12:17.291733Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507007322420210:2231];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:17.318368Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e7e/r3tmp/tmpBrPUXX/pdisk_1.dat 2025-04-04T17:12:17.448872Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:17.478945Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:17.479049Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:17.480639Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22673 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:17.718930Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:17.723913Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:12:17.726746Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:22673 2025-04-04T17:12:18.084786Z node 3 :TX_PROXY ERROR: Actor# [3:7489507011617388064:2382] txid# 281474976710660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-04-04T17:12:18.084840Z node 3 :TX_PROXY ERROR: Actor# [3:7489507011617388064:2382] txid# 281474976710660 RESPONSE Status# ExecCancelled marker# P13c 2025-04-04T17:12:18.094374Z node 3 :TX_PROXY ERROR: Actor# [3:7489507011617388080:2395] txid# 281474976710661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-04-04T17:12:18.094443Z node 3 :TX_PROXY ERROR: Actor# [3:7489507011617388080:2395] txid# 281474976710661 RESPONSE Status# ExecCancelled marker# P13c 2025-04-04T17:12:18.109814Z node 3 :TX_PROXY ERROR: Actor# [3:7489507011617388093:2405] txid# 281474976710662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-04-04T17:12:18.109863Z node 3 :TX_PROXY ERROR: Actor# [3:7489507011617388093:2405] txid# 281474976710662 RESPONSE Status# ExecCancelled marker# P13c 2025-04-04T17:12:18.144784Z node 3 :TX_PROXY ERROR: Actor# [3:7489507011617388121:2427] txid# 281474976710664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-04-04T17:12:18.144883Z node 3 :TX_PROXY ERROR: Actor# [3:7489507011617388121:2427] txid# 281474976710664 RESPONSE Status# ExecCancelled marker# P13c 2025-04-04T17:12:18.164658Z node 3 :TX_PROXY ERROR: Actor# [3:7489507011617388136:2439] txid# 281474976710665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-04-04T17:12:18.164733Z node 3 :TX_PROXY ERROR: Actor# [3:7489507011617388136:2439] txid# 281474976710665 RESPONSE Status# ExecCancelled marker# P13c 2025-04-04T17:12:18.181202Z node 3 :TX_PROXY ERROR: Actor# [3:7489507011617388150:2450] txid# 281474976710666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-04-04T17:12:18.181267Z node 3 :TX_PROXY ERROR: Actor# [3:7489507011617388150:2450] txid# 281474976710666 RESPONSE Status# ExecCancelled marker# P13c 2025-04-04T17:12:21.123978Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489507024660494338:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e7e/r3tmp/tmpxmb0BG/pdisk_1.dat 2025-04-04T17:12:21.248765Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:21.288844Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:21.301509Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:21.301593Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:21.306978Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6970 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-04T17:12:21.547271Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:21.553864Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:12:21.557656Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:6970 2025-04-04T17:12:21.929834Z node 4 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715660 at tablet 72075186224037888 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2025-04-04T17:12:21.930142Z node 4 :TX_PROXY ERROR: Actor# [4:7489507024660494884:2383] txid# 281474976715660 RESPONSE Status# ExecCancelled marker# P13c 2025-04-04T17:12:21.942888Z node 4 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715662 at tablet 72075186224037889 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2025-04-04T17:12:21.943250Z node 4 :TX_PROXY ERROR: Actor# [4:7489507024660494898:2391] txid# 281474976715662 RESPONSE Status# ExecCancelled marker# P13c >> TSchemeShardViewTest::EmptyQueryText ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::ManyDeletes [GOOD] Test command err: 2025-04-04T17:11:57.534720Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506922797158679:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:57.534760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e82/r3tmp/tmpH5IQmr/pdisk_1.dat 2025-04-04T17:11:58.028108Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:58.036012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:58.036135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:58.039257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1067, node 1 2025-04-04T17:11:58.607040Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:58.607079Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:58.607088Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:58.607242Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23929 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:11:59.453744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:11:59.494871Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:11:59.511100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:02.536725Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506922797158679:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:02.536853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:05.474906Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506959187592325:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e82/r3tmp/tmp3jFfh2/pdisk_1.dat 2025-04-04T17:12:05.546873Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:05.665056Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:05.669642Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:05.669735Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:05.671315Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11769, node 2 2025-04-04T17:12:05.746411Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:05.746434Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:05.746441Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:05.746557Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13243 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:05.974423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:05.982068Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:06.003413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... .2025-04-04T17:12:10.466681Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489506959187592325:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:10.466779Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; . 2025-04-04T17:12:17.085054Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:12:17.088266Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037889 2025-04-04T17:12:17.088668Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-04T17:12:17.089390Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037890 2025-04-04T17:12:17.089613Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037891 2025-04-04T17:12:17.090161Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037891 2025-04-04T17:12:17.090355Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037892 2025-04-04T17:12:17.091021Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037892 2025-04-04T17:12:17.094869Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T17:12:17.094974Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-04-04T17:12:17.095020Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037891 2025-04-04T17:12:17.095080Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037892 2025-04-04T17:12:17.103092Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976711500 at step 1743786737140 at tablet 72075186224037889 { Transactions { TxId: 281474976711500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743786737140 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-04T17:12:17.103137Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:12:17.103271Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T17:12:17.103289Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:12:17.103316Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743786737140:281474976711500] in PlanQueue unit at 72075186224037889 2025-04-04T17:12:17.103350Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 got data tx from cache 1743786737140:281474976711500 2025-04-04T17:12:17.105452Z node 2 :TX_DATASHARD DEBUG: tx 281474976711500 released its data 2025-04-04T17:12:17.105484Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:12:17.106340Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976711500 at step 1743786737140 at tablet 72075186224037891 { Transactions { TxId: 281474976711500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743786737140 MediatorID: 72057594046382081 TabletID: 72075186224037891 } 2025-04-04T17:12:17.106362Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-04T17:12:17.106485Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-04T17:12:17.106498Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:12:17.106516Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743786737140:281474976711500] in PlanQueue unit at 72075186224037891 2025-04-04T17:12:17.106543Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037891 got data tx from cache 1743786737140:281474976711500 2025-04-04T17:12:17.107371Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:12:17.107710Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976711500 at step 1743786737140 at tablet 72075186224037890 { Transactions { TxId: 281474976711500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743786737140 MediatorID: 72057594046382081 TabletID: 72075186224037890 } 2025-04-04T17:12:17.107721Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T17:12:17.107788Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-04T17:12:17.107799Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:12:17.107811Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743786737140:281474976711500] in PlanQueue unit at 72075186224037890 2025-04-04T17:12:17.107832Z node 2 :TX_DATASHARD ... se::Complete at 72075186224037892 2025-04-04T17:12:24.368736Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976711911 at step 1743786744413 at tablet 72075186224037889 { Transactions { TxId: 281474976711911 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743786744413 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-04T17:12:24.368759Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:12:24.368834Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T17:12:24.368855Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:12:24.368871Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743786744413:281474976711911] in PlanQueue unit at 72075186224037889 2025-04-04T17:12:24.368895Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 got data tx from cache 1743786744413:281474976711911 2025-04-04T17:12:24.371325Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 released its data 2025-04-04T17:12:24.371359Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:12:24.371493Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976711911 at step 1743786744413 at tablet 72075186224037892 { Transactions { TxId: 281474976711911 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743786744413 MediatorID: 72057594046382081 TabletID: 72075186224037892 } 2025-04-04T17:12:24.371520Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-04T17:12:24.371638Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-04T17:12:24.371657Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:12:24.371671Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743786744413:281474976711911] in PlanQueue unit at 72075186224037892 2025-04-04T17:12:24.371696Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037892 got data tx from cache 1743786744413:281474976711911 2025-04-04T17:12:24.372853Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 released its data 2025-04-04T17:12:24.372882Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:12:24.373133Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976711911 at step 1743786744413 at tablet 72075186224037890 { Transactions { TxId: 281474976711911 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743786744413 MediatorID: 72057594046382081 TabletID: 72075186224037890 } 2025-04-04T17:12:24.373147Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T17:12:24.373199Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-04T17:12:24.373212Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:12:24.373220Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743786744413:281474976711911] in PlanQueue unit at 72075186224037890 2025-04-04T17:12:24.373236Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037890 got data tx from cache 1743786744413:281474976711911 2025-04-04T17:12:24.373835Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 released its data 2025-04-04T17:12:24.373861Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:12:24.373994Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037892 step# 1743786744413} 2025-04-04T17:12:24.374108Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-04T17:12:24.374672Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976711911 at step 1743786744413 at tablet 72075186224037891 { Transactions { TxId: 281474976711911 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743786744413 MediatorID: 72057594046382081 TabletID: 72075186224037891 } 2025-04-04T17:12:24.374693Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-04T17:12:24.374710Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 at 72075186224037892 restored its data 2025-04-04T17:12:24.374769Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-04T17:12:24.374787Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:12:24.374804Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743786744413:281474976711911] in PlanQueue unit at 72075186224037891 2025-04-04T17:12:24.374831Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037891 got data tx from cache 1743786744413:281474976711911 2025-04-04T17:12:24.375345Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:12:24.375467Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1743786744413} 2025-04-04T17:12:24.375518Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1743786744413} 2025-04-04T17:12:24.375532Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:12:24.375975Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 1743786744413} 2025-04-04T17:12:24.376007Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-04T17:12:24.376038Z node 2 :TX_DATASHARD DEBUG: Complete [1743786744413 : 281474976711911] from 72075186224037891 at tablet 72075186224037891 send result to client [2:7489507040791984653:4589], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:12:24.376059Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-04T17:12:24.378650Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-04T17:12:24.378927Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-04T17:12:24.378984Z node 2 :TX_DATASHARD DEBUG: Complete [1743786744413 : 281474976711911] from 72075186224037892 at tablet 72075186224037892 send result to client [2:7489507040791984653:4589], exec latency: 3 ms, propose latency: 7 ms 2025-04-04T17:12:24.379014Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-04T17:12:24.379169Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 at 72075186224037890 restored its data 2025-04-04T17:12:24.379642Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:12:24.380617Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T17:12:24.380655Z node 2 :TX_DATASHARD DEBUG: Complete [1743786744413 : 281474976711911] from 72075186224037890 at tablet 72075186224037890 send result to client [2:7489507040791984653:4589], exec latency: 6 ms, propose latency: 7 ms 2025-04-04T17:12:24.380689Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T17:12:24.383246Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T17:12:24.383777Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 at 72075186224037889 restored its data 2025-04-04T17:12:24.387236Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:12:24.390452Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T17:12:24.390497Z node 2 :TX_DATASHARD DEBUG: Complete [1743786744413 : 281474976711911] from 72075186224037889 at tablet 72075186224037889 send result to client [2:7489507040791984653:4589], exec latency: 18 ms, propose latency: 21 ms 2025-04-04T17:12:24.390518Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:12:24.404033Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-04T17:12:24.404595Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 1 last path: "" contents: 0 common prefixes: 0 2025-04-04T17:12:24.404784Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 1 2025-04-04T17:12:24.405126Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-04T17:12:24.405245Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 0 2025-04-04T17:12:24.405492Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-04T17:12:24.405932Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 1 last path: "" contents: 0 common prefixes: 0 2025-04-04T17:12:24.406407Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Godfather.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 2 last path: "/Videos/Godfather.avi" contents: 2 common prefixes: 0 2025-04-04T17:12:24.406713Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/House of Cards/Season 1/Chapter 1.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 3 last path: "/Videos/House of Cards/Season 1/Chapter 1.avi" contents: 3 common prefixes: 1 2025-04-04T17:12:24.407177Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Terminator 2.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 4 last path: "/Videos/Terminator 2.avi" contents: 4 common prefixes: 1 2025-04-04T17:12:24.407262Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: finished status: 0 description: "" contents: 4 common prefixes: 1 >> TSchemeShardViewTest::DropView [GOOD] >> TSchemeShardViewTest::AsyncDropSameView [GOOD] >> TSchemeShardViewTest::CreateView [GOOD] >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] >> TSchemeShardViewTest::EmptyName [GOOD] >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] >> TPartitionTests::ConflictingCommitFails [GOOD] >> TSchemeShardViewTest::ReadOnlyMode [GOOD] >> TSchemeShardViewTest::EmptyQueryText [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::DropView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:26.630924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:26.631059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:26.631115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:26.631157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:26.632302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:26.632378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:26.632454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:26.632524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:26.635088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:26.730533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:26.730591Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:26.742973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:26.743216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:26.743376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:26.752291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:26.752486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:26.752899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.753403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:26.757374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.762561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:26.762651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.762847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:26.762897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:26.762946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:26.763074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.771818Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:26.890387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:26.890601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.890798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:26.891004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:26.891054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.893180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.893325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:26.893501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.893551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:26.893592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:26.893622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:26.895399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.895447Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:26.895477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:26.896953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.896995Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.897045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:26.897085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.901530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:26.903113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:26.903275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:26.904183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.904289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:26.904352Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:26.905690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:26.905759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:26.905929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:26.906023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:26.907768Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:26.907823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:26.907966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.907997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:26.908262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.908321Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:26.908407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:26.908435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.908487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:26.908522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.908552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:26.908586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.908616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:26.908643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:26.908705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:26.908745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:26.908772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:26.910322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:26.910414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:26.910451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... dReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-04-04T17:12:26.958989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropView Drop { Name: "MyView" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:26.959110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TDropView Propose, opId: 102:0, path: /MyRoot/MyView 2025-04-04T17:12:26.959241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-04T17:12:26.959292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:26.963209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusAccepted TxId: 102 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-04-04T17:12:26.963369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAccepted, operation: DROP VIEW, path: /MyRoot/MyView 2025-04-04T17:12:26.963516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.963561Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 ProgressState 2025-04-04T17:12:26.963609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-04T17:12:26.963740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:26.965192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-04T17:12:26.965321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-04T17:12:26.965556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.965647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:26.965682Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2025-04-04T17:12:26.965802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-04T17:12:26.965942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:26.965982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:12:26.967761Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:26.967802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:26.967928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:12:26.968029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.968059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-04T17:12:26.968097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-04T17:12:26.968144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.968195Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:12:26.968291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:12:26.968319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:12:26.968349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:12:26.968369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:12:26.968395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-04T17:12:26.968420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:12:26.968453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:12:26.968475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:12:26.968515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:12:26.968548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-04T17:12:26.968574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-04T17:12:26.968602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-04T17:12:26.969671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:12:26.969767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:12:26.969810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:12:26.969842Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:12:26.969873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:26.970929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:12:26.971036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:12:26.971066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:12:26.971090Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:12:26.971122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:12:26.971194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-04T17:12:26.971587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:12:26.971636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:12:26.971689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:26.973277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:12:26.974660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:12:26.974738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-04T17:12:26.974955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:12:26.974993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T17:12:26.975373Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:12:26.975446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:12:26.975477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:319:2310] TestWaitNotification: OK eventTxId 102 2025-04-04T17:12:26.975911Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:12:26.976079Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 171us result status StatusPathDoesNotExist 2025-04-04T17:12:26.976225Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::CreateView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:26.630906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:26.631032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:26.631092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:26.631137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:26.631925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:26.631982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:26.632057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:26.632140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:26.633231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:26.724843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:26.724902Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:26.734513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:26.734835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:26.735016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:26.745407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:26.745748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:26.748506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.749663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:26.757492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.763212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:26.763294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.763508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:26.763561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:26.763608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:26.763716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.771017Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:26.921822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:26.922054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.922283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:26.922512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:26.922559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.924817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.924994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:26.925206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.925261Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:26.925297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:26.925336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:26.928691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.928760Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:26.928801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:26.930828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.930881Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.930919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:26.930970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.935136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:26.937228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:26.937408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:26.938524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.938677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:26.938738Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:26.939047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:26.939132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:26.939322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:26.939420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:26.941549Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:26.941609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:26.941776Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.941836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:26.942170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.942256Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:26.942358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:26.942393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.942454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:26.942493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.942531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:26.942572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.942607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:26.942652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:26.942724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:26.942771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:26.942812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:26.944646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:26.944790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:26.944829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 100:0 2025-04-04T17:12:26.970205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 100:0, viewDescription: Name: "MyView" QueryText: "Some query" 2025-04-04T17:12:26.970370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:12:26.970443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-04T17:12:26.970497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:26.971343Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:12:26.974130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusAccepted TxId: 100 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-04-04T17:12:26.974306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2025-04-04T17:12:26.974637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.974694Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 100:0 ProgressState 2025-04-04T17:12:26.974747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-04-04T17:12:26.974888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:26.975343Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:12:26.976587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-04T17:12:26.976708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-04T17:12:26.977065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.977169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:26.977239Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 100:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-04-04T17:12:26.977382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-04T17:12:26.977545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:26.977627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:12:26.979481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:26.979536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:26.979793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:12:26.979948Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.979997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-04T17:12:26.980043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 FAKE_COORDINATOR: Erasing txId 100 2025-04-04T17:12:26.980329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.980391Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-04T17:12:26.980501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-04T17:12:26.980544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:12:26.980595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-04T17:12:26.980630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:12:26.980663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-04T17:12:26.980704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:12:26.980740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-04T17:12:26.980770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-04T17:12:26.980835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:12:26.980873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-04T17:12:26.980955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-04T17:12:26.980994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-04T17:12:26.981692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:12:26.981804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:12:26.981843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-04T17:12:26.981911Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-04T17:12:26.981953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:26.982723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:12:26.982797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:12:26.982823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-04T17:12:26.982851Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-04T17:12:26.982879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:12:26.982946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-04T17:12:26.991278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-04T17:12:26.991437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 101 2025-04-04T17:12:26.991701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:12:26.991748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-04T17:12:26.992190Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:12:26.992300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:12:26.992341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:295:2286] TestWaitNotification: OK eventTxId 101 2025-04-04T17:12:26.992846Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:12:26.993048Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 232us result status StatusSuccess 2025-04-04T17:12:26.993406Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncDropSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:26.630908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:26.631035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:26.631274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:26.631334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:26.632474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:26.632536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:26.632617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:26.632695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:26.634334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:26.728132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:26.728197Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:26.738456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:26.738702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:26.738857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:26.750439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:26.750738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:26.751381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.752150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:26.757719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.762532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:26.762616Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.762814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:26.762857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:26.762974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:26.763077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.769421Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:26.879236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:26.880732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.881885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:26.884599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:26.884691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.891103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.891313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:26.891616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.891673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:26.891750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:26.891808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:26.895365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.895425Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:26.895462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:26.897293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.897339Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.897381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:26.897430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.907884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:26.909884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:26.910081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:26.911145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.911280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:26.911346Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:26.911636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:26.911709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:26.911872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:26.911970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:26.913963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:26.914010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:26.914170Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.914218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:26.914541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.914583Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:26.914692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:26.914726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.914794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:26.914834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.914869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:26.914909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.914952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:26.914987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:26.915051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:26.915092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:26.915126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:26.917000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:26.917108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:26.917158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Id: 2 PathDropTxId: 102, at schemeshard: 72057594046678944 2025-04-04T17:12:26.970630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), operation: DROP VIEW, path: /MyRoot/MyView 2025-04-04T17:12:26.970798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusMultipleModifications Reason: "Check failed: path: \'/MyRoot/MyView\', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop)" TxId: 104 SchemeshardId: 72057594046678944 PathId: 2 PathDropTxId: 102, at schemeshard: 72057594046678944 2025-04-04T17:12:26.970896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), operation: DROP VIEW, path: /MyRoot/MyView 2025-04-04T17:12:26.972293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-04T17:12:26.972406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-04T17:12:26.972686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.972828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:26.972886Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2025-04-04T17:12:26.973016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-04T17:12:26.973172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:26.973246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:12:26.975083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:26.975131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:26.975300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:12:26.975472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.975515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-04T17:12:26.975555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-04T17:12:26.975626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.975692Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:12:26.975808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:12:26.975848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:12:26.975887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:12:26.975919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:12:26.975953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-04T17:12:26.975990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:12:26.976050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:12:26.976088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:12:26.976150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:12:26.976186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-04T17:12:26.976217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-04T17:12:26.976248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-04-04T17:12:26.977039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:12:26.977142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:12:26.977179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:12:26.977216Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:12:26.977273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:26.978561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:12:26.978669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:12:26.978705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:12:26.978738Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:12:26.978765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:12:26.978842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-04T17:12:26.979152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:12:26.979200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:12:26.979265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:26.981174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:12:26.982850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:12:26.982954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2025-04-04T17:12:26.983292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-04T17:12:26.983333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2025-04-04T17:12:26.983412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-04T17:12:26.983431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-04T17:12:26.983926Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T17:12:26.984002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:12:26.984051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:325:2316] 2025-04-04T17:12:26.984131Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-04T17:12:26.984276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-04T17:12:26.984300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:325:2316] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2025-04-04T17:12:26.984777Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:12:26.984990Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 187us result status StatusPathDoesNotExist 2025-04-04T17:12:26.985162Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:26.630896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:26.631053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:26.631110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:26.631161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:26.631860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:26.631904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:26.631968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:26.632041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:26.633212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:26.727825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:26.727884Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:26.739241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:26.739475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:26.739615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:26.749177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:26.749435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:26.749974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.750741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:26.756450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.763765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:26.763838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.764046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:26.764098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:26.764144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:26.764256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.770329Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:26.888680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:26.888941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.889155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:26.889380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:26.889442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.895521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.895674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:26.895875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.895923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:26.895961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:26.895996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:26.899058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.899122Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:26.899153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:26.901013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.901058Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.901093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:26.901134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.909793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:26.911537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:26.911719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:26.912656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.912768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:26.912825Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:26.913080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:26.913141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:26.913287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:26.913370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:26.915203Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:26.915249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:26.915406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.915438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:26.915720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.915762Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:26.915847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:26.915876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.915933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:26.915970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.916003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:26.916036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.916077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:26.916109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:26.916158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:26.916190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:26.916221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:26.917777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:26.917877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:26.917963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... athStateCreate)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-04-04T17:12:26.954065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path exists but creating right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateCreate), operation: CREATE VIEW, path: /MyRoot/MyView 2025-04-04T17:12:26.955364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-04T17:12:26.955557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-04T17:12:26.955957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.956062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:26.956109Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-04-04T17:12:26.956203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-04T17:12:26.956338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:26.956386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:12:26.956541Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T17:12:26.958156Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:26.958202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:26.958339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:12:26.958431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.958465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-04T17:12:26.958501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-04T17:12:26.958884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.958934Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:12:26.959017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:12:26.959061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:12:26.959149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:12:26.959187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:12:26.959215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-04T17:12:26.959243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:12:26.959275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:12:26.959340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:12:26.959398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:12:26.959433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-04T17:12:26.959479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-04T17:12:26.959506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-04T17:12:26.959956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:12:26.960052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:12:26.960106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:12:26.960163Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-04T17:12:26.960215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:26.960676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:12:26.960733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:12:26.960773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:12:26.960800Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-04T17:12:26.960841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:12:26.960896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-04T17:12:26.964102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:12:26.964204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2025-04-04T17:12:26.964509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:12:26.964586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-04T17:12:26.964679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:12:26.964697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-04-04T17:12:26.964748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-04T17:12:26.964792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-04T17:12:26.965213Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:12:26.965359Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:12:26.965414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:12:26.965448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:299:2290] 2025-04-04T17:12:26.965614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:12:26.965670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:299:2290] 2025-04-04T17:12:26.965765Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T17:12:26.965860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:12:26.965883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:299:2290] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-04-04T17:12:26.966371Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:12:26.966533Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 178us result status StatusSuccess 2025-04-04T17:12:26.966861Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:26.630929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:26.631027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:26.631074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:26.631129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:26.631908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:26.631973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:26.632078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:26.632162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:26.635877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:26.724651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:26.724726Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:26.734549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:26.734770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:26.734955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:26.746324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:26.746611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:26.748484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.749377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:26.759690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.762603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:26.762694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.762885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:26.762933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:26.762981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:26.763092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.769256Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:26.899791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:26.900206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.900598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:26.901006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:26.901080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.908577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.908858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:26.909057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.909108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:26.909149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:26.909386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:26.913817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.914035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:26.914074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:26.916687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.916744Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.916803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:26.916847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.920468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:26.922934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:26.923125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:26.924079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.924204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:26.924270Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:26.924543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:26.924617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:26.924779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:26.924869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:26.927207Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:26.927257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:26.927408Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.927461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:26.927770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.927809Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:26.927911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:26.927943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.928006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:26.928047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.928079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:26.928117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.928157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:26.928192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:26.928250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:26.928293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:26.928324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:26.930078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:26.930201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:26.930287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T17:12:26.930328Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T17:12:26.930363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:26.930455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T17:12:26.934276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T17:12:26.934798Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-04T17:12:26.935399Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-04-04T17:12:26.951511Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-04-04T17:12:26.953985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "" QueryText: "Some query" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:26.954155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0 2025-04-04T17:12:26.954263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0, viewDescription: Name: "" QueryText: "Some query" 2025-04-04T17:12:26.954362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2025-04-04T17:12:26.955008Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:12:26.958558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:26.958719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, operation: CREATE VIEW, path: /MyRoot/ 2025-04-04T17:12:26.959117Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:26.631666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:26.631785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:26.631845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:26.631883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:26.631939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:26.631975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:26.632028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:26.632102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:26.633271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:26.728662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:26.728720Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:26.739122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:26.739373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:26.739526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:26.750631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:26.750908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:26.751548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.752350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:26.757666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.762604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:26.762695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.762877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:26.762924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:26.762974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:26.763096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.769404Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:26.902916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:26.903151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.903352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:26.903559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:26.903616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.905638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.905786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:26.905952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.906003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:26.906038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:26.906076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:26.907775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.907824Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:26.907858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:26.909299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.909341Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.909379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:26.909424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.913066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:26.914723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:26.914908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:26.915837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.915953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:26.916006Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:26.916286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:26.916353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:26.916512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:26.916597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:26.918413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:26.918461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:26.918633Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.918682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:26.918993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.919040Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:26.919132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:26.919167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.919233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:26.919271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.919319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:26.919378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.919413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:26.919446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:26.919505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:26.919541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:26.919576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:26.921216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:26.921330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:26.921369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4-04T17:12:26.990941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-04T17:12:26.990981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-04T17:12:26.991007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-04T17:12:26.991810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:12:26.991853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.991895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-04T17:12:26.991930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-04T17:12:26.992814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:12:26.992894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:12:26.992922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:12:26.992950Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-04T17:12:26.992992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:12:26.994292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:12:26.994366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:12:26.994392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:12:26.994419Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-04T17:12:26.994445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:12:26.994522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-04T17:12:26.996415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:12:26.998356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2025-04-04T17:12:26.998697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:12:26.998741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-04-04T17:12:26.998846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:12:26.998870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-04-04T17:12:26.998913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-04T17:12:26.998940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-04T17:12:26.999426Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:12:26.999525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:12:26.999562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:332:2323] 2025-04-04T17:12:26.999686Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:12:26.999783Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T17:12:26.999830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:12:26.999852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:332:2323] 2025-04-04T17:12:26.999947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:12:26.999967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:332:2323] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-04-04T17:12:27.000522Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:12:27.000721Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir" took 201us result status StatusSuccess 2025-04-04T17:12:27.002304Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir" PathDescription { Self { Name: "SomeDir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:27.002897Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/FirstView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:12:27.003053Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/FirstView" took 165us result status StatusSuccess 2025-04-04T17:12:27.003323Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/FirstView" PathDescription { Self { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "FirstView" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 QueryText: "First query" CapturedContext { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:27.003753Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/SecondView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:12:27.003900Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/SecondView" took 151us result status StatusSuccess 2025-04-04T17:12:27.004132Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/SecondView" PathDescription { Self { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "SecondView" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 QueryText: "Second query" CapturedContext { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:26.631179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:26.631303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:26.631350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:26.631400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:26.631925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:26.631952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:26.632017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:26.632092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:26.633709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:26.726747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:26.726794Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:26.734641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:26.734887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:26.735019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:26.749090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:26.749369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:26.749940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.750959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:26.756346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.762601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:26.762695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.762887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:26.762933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:26.762979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:26.763108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.769202Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:26.906423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:26.906599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.906784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:26.906982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:26.907034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.909041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.909161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:26.909319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.909371Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:26.909420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:26.909461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:26.911055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.911108Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:26.911141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:26.912449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.912481Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.912515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:26.912559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.916182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:26.917404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:26.917551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:26.918404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:26.918494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:26.918542Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:26.918785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:26.918841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:26.918962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:26.919036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:26.920583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:26.920622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:26.920733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:26.920771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:26.921020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:26.921057Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:26.921135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:26.921171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.921221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:26.921250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.921275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:26.921323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:26.921372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:26.921411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:26.921458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:26.921494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:26.921534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:26.922945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:26.923034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:26.923064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.236312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.236405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.236471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.236537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.236664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.236717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.236797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.236995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.237110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.237160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.237188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.247852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:27.247932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:27.250901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:27.250972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:27.251025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:27.253289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 Leader for TabletID 72057594046678944 is [1:376:2345] sender: [1:433:2058] recipient: [1:15:2062] 2025-04-04T17:12:27.288439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "ThirdView" QueryText: "Some query" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:27.288700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0 2025-04-04T17:12:27.288801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0, viewDescription: Name: "ThirdView" QueryText: "Some query" 2025-04-04T17:12:27.288947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: ThirdView, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T17:12:27.289016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-04T17:12:27.289066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:27.291486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 3, at schemeshard: 72057594046678944 2025-04-04T17:12:27.291648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/ThirdView 2025-04-04T17:12:27.291823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.291887Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 103:0 ProgressState 2025-04-04T17:12:27.291971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-04-04T17:12:27.292091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:27.294071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-04T17:12:27.294211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000003 2025-04-04T17:12:27.294949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:27.295081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:27.295139Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 103:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003 2025-04-04T17:12:27.295274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-04-04T17:12:27.295458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:27.295525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 FAKE_COORDINATOR: Erasing txId 103 2025-04-04T17:12:27.297109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:27.297141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:27.297261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:12:27.297338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:27.297374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:424:2382], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-04T17:12:27.297418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:424:2382], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-04T17:12:27.297662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.297696Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-04T17:12:27.297769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:12:27.297808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:12:27.297840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:12:27.297862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:12:27.297899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-04T17:12:27.297930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:12:27.297956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T17:12:27.297983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T17:12:27.298038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:12:27.298072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-04-04T17:12:27.298094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-04T17:12:27.298121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-04T17:12:27.298759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:12:27.298831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:12:27.298864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:12:27.298899Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:12:27.299016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T17:12:27.299535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:12:27.299605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:12:27.299634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:12:27.299661Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-04T17:12:27.299688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:12:27.299759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-04T17:12:27.302812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:12:27.302920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyQueryText [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:27.049070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:27.049180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:27.049229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:27.049272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:27.049316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:27.049360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:27.049418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:27.049488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:27.049822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:27.159506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:27.159562Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:27.172460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:27.172714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:27.172871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:27.182141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:27.182479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:27.183096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:27.186824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:27.200007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:27.201391Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:27.201467Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:27.201677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:27.201723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:27.201767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:27.201887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.211089Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:27.321448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:27.321721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.321936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:27.322124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:27.322158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.325030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:27.325209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:27.325408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.325461Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:27.325496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:27.325553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:27.328129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.328184Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:27.328215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:27.329946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.329985Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.330022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:27.330081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:27.333223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:27.335356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:27.335559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:27.336704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:27.336796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:27.336835Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:27.337063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:27.337124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:27.337277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:27.337372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:27.339458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:27.339520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:27.339640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:27.339666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:27.339897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.339929Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:27.340004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:27.340051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:27.340093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:27.340121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:27.340145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:27.340171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:27.340206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:27.340232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:27.340297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:27.340332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:27.340361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:27.341868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:27.341974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:27.342005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T17:12:27.342039Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T17:12:27.342075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:27.342188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T17:12:27.346126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T17:12:27.346771Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-04T17:12:27.347329Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-04-04T17:12:27.364481Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-04-04T17:12:27.366989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "MyView" QueryText: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:27.367224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0 2025-04-04T17:12:27.367297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0, viewDescription: Name: "MyView" QueryText: "" 2025-04-04T17:12:27.367399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:12:27.367461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-04T17:12:27.367504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:27.368296Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:12:27.372965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusAccepted TxId: 101 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-04-04T17:12:27.373327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2025-04-04T17:12:27.373791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.373845Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 ProgressState 2025-04-04T17:12:27.373893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-04T17:12:27.374030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:27.374533Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:12:27.376690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-04T17:12:27.376840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-04T17:12:27.377174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:27.377288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:27.377338Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-04-04T17:12:27.377467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-04T17:12:27.377623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:27.377700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:12:27.380177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:27.380231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:27.380450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:12:27.380559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:27.380605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-04T17:12:27.380646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T17:12:27.380927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:12:27.380971Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:12:27.381068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:12:27.381099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:12:27.381135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:12:27.381177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:12:27.381214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-04T17:12:27.381250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:12:27.381281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:12:27.381309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:12:27.381368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:12:27.381404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-04T17:12:27.381433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-04T17:12:27.381479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-04T17:12:27.382165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:12:27.382282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:12:27.382318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:12:27.382361Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-04T17:12:27.382399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:27.383085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:12:27.383155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:12:27.383178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:12:27.383203Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-04T17:12:27.383227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:12:27.383298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-04T17:12:27.386098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:12:27.386224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingCommitFails [GOOD] Test command err: 2025-04-04T17:12:14.767752Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:12:14.767826Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:12:14.789156Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:178:2193] 2025-04-04T17:12:14.790057Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:178:2193] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\002\030\003\"\014session-id-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id-1" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-2" Value: "\010\000\020\004\030\005\"\014session-id-2(\0000\003" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-2" Value: "\000\000\000\000\000\000\000\000\004\000\000\000\005\000\000\000session-id-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-3" Value: "\010\000\020\006\030\007\"\014session-id-3(\0000\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-3" Value: "\000\000\000\000\000\000\000\000\006\000\000\000\007\000\000\000session-id-3" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\010\030\t\"\014session-id-2(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\010\000\000\000\t\000\000\000session-id-2" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-04T17:12:15.538941Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:12:15.539008Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:12:15.587660Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:178:2193] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:12:15.589184Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T17:12:15.000000Z 2025-04-04T17:12:15.589255Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [2:178:2193] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\230\353\247\216\3402" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\230\353\247\216\3402" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\230\353\247\216\3402" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\004\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\004\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-04T17:12:16.422960Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:12:16.423005Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:12:16.432408Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [3:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:12:16.434159Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T17:12:16.000000Z 2025-04-04T17:12:16.434203Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [3:176:2191] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Send change config Wait cmd write (initial) Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\200\363\247\216\3402" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } Wait commit 1 done Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Wait cmd write (change config) Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-2" IncludeFrom: true To: "m0000000003cclient-2" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-2" IncludeFrom: true To: "m0000000003uclient-2" IncludeTo: true } } CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\200\363\247\216\3402" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-3" Value: "\010\000\020\000\030\000\"\000(\0000\007" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-3" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\016\n\010client-1@\000H\000\252\002\016\n\010client-3@\007H\000" StorageChannel: INLINE } Wait config changed 2025-04-04T17:12:17.293012Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:12:17.293094Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:12:17.308965Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:175:2190] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:12:17.310864Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T17:12:17.000000Z 2025-04-04T17:12:17.310932Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:175:2190] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:12:17.633102Z node 4 :PERSQUEUE INFO: new Cookie src1|dc0f8f0c-95838dfa-69f8c8f9-48477fd2_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 Got batch complete: 1 2025-04-04T17:12:17.633410Z node 4 :PERSQUEUE INFO: new Cookie src4|378eba05-29dd017c-654301b9-6bc78c3b_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src4 Got batch complete: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSyste ... _ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 2 Wait batch completion Got batch complete: 1 Wait kv request Wait tx committed for tx 2 Wait for no tx committed Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 4 and act no: 5 Created Tx with id 7 as act# 7 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 2 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Wait batch completion Wait kv request Got batch complete: 1 Wait batch completion Wait kv request Create distr tx with id = 8 and act no: 9 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 3 Wait kv request Wait immediate tx complete 10 Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 10 Wait immediate tx complete 11 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 11 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } >> KqpQueryPerf::Delete+QueryService+UseSink [GOOD] |87.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |87.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbYqlClient::SimpleColumnFamilies [GOOD] >> YdbYqlClient::TableKeyRangesSinglePartition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2351, MsgBus: 3014 2025-04-04T17:12:17.383924Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507008886701143:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:17.388327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001be9/r3tmp/tmpbDNqji/pdisk_1.dat 2025-04-04T17:12:17.867719Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:17.879082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:17.879193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:17.881908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2351, node 1 2025-04-04T17:12:17.974845Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:17.974866Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:17.974883Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:17.974997Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3014 TClient is connected to server localhost:3014 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:18.558174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:18.586872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:18.724867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:12:18.914351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:18.990574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:20.766705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507021771604673:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:20.766875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:21.022914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:21.095757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:21.137457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:21.182033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:21.225747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:21.297365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:21.373391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507026066572488:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:21.373533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:21.373752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507026066572493:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:21.378678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:21.394662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507026066572495:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:12:21.495734Z node 1 :TX_PROXY ERROR: Actor# [1:7489507026066572553:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:22.347265Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507008886701143:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:22.347370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 31732, MsgBus: 8909 2025-04-04T17:12:23.351923Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507034897538593:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:23.351973Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001be9/r3tmp/tmprcj248/pdisk_1.dat 2025-04-04T17:12:23.427729Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31732, node 2 2025-04-04T17:12:23.476463Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:23.476569Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:23.477991Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:23.482182Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:23.482196Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:23.482200Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:23.482304Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8909 TClient is connected to server localhost:8909 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:23.816126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:23.823295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:23.869500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:24.020041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:24.076109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:26.234367Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507047782442265:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:26.234458Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:26.269616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:26.300710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:26.349498Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:26.386115Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:26.417762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:26.490414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:26.576376Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507047782442785:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:26.576454Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:26.576994Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507047782442790:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:26.580842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:12:26.596097Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507047782442792:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:12:26.658678Z node 2 :TX_PROXY ERROR: Actor# [2:7489507047782442847:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterSubdomain >> TSchemeShardAuditSettings::CreateExtSubdomain |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true >> TSchemeShardAuditSettings::CreateSubdomain >> KqpJoinOrder::CanonizedJoinOrderTPCDS78+StreamLookupJoin-ColumnStore [GOOD] >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:30.713173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:30.713352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:30.713445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:30.713494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:30.714791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:30.714847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:30.714926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:30.715008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:30.716796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:30.810385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:30.810440Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:30.820689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:30.820886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:30.821024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:30.829280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:30.829573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:30.834284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:30.835226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:30.844392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:30.862484Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:30.862600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:30.862847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:30.862909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:30.862956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:30.863092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.869526Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:31.000884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:31.001125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:31.001387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:31.001621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:31.001674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:31.003877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:31.004075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:31.004238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:31.004296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:31.004332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:31.004364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:31.006188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:31.006266Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:31.006312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:31.007996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:31.008040Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:31.008077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:31.008142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:31.012148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:31.013771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:31.013950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:31.014936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:31.015094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:31.015137Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:31.015383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:31.015432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:31.015589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:31.015683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:31.017626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:31.017687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:31.017836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:31.017913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:31.018217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:31.018285Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:31.018374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:31.018408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:31.018471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:31.018510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:31.018542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:31.018589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:31.018646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:31.018676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:31.018743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:31.018793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:31.018824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:31.020533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:31.020633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:31.020668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T17:12:31.257377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T17:12:31.257404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-04T17:12:31.257428Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 26 2025-04-04T17:12:31.257453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:31.258095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T17:12:31.258153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T17:12:31.258174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-04T17:12:31.258218Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2025-04-04T17:12:31.258266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-04T17:12:31.258337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-04-04T17:12:31.260090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2025-04-04T17:12:31.260190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000013 2025-04-04T17:12:31.260975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000013, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:31.261084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000013 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:31.261123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropForceUnsafe TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000013, at schemeshard: 72057594046678944 2025-04-04T17:12:31.261155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 7] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:31.261180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-04-04T17:12:31.261297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 128 -> 130 2025-04-04T17:12:31.261438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:31.261486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-04-04T17:12:31.262463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-04T17:12:31.262593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 FAKE_COORDINATOR: Erasing txId 112 2025-04-04T17:12:31.263812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:31.263845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:31.263938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-04-04T17:12:31.264015Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:31.264097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-04-04T17:12:31.264117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 112, path id: 7 2025-04-04T17:12:31.264153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-04-04T17:12:31.264175Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 112:0 ProgressState 2025-04-04T17:12:31.264216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-04-04T17:12:31.264231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-04T17:12:31.264250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-04-04T17:12:31.264265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-04T17:12:31.264284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: false 2025-04-04T17:12:31.264300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-04T17:12:31.264333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2025-04-04T17:12:31.264350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2025-04-04T17:12:31.264383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-04T17:12:31.264400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 112, publications: 2, subscribers: 0 2025-04-04T17:12:31.264415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 1], 27 2025-04-04T17:12:31.264431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 7], 18446744073709551615 2025-04-04T17:12:31.265129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T17:12:31.265196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T17:12:31.265216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2025-04-04T17:12:31.265234Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 27 2025-04-04T17:12:31.265250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:31.266224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T17:12:31.266310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T17:12:31.266336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2025-04-04T17:12:31.266351Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2025-04-04T17:12:31.266367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-04-04T17:12:31.266425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2025-04-04T17:12:31.266700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:12:31.266727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-04T17:12:31.266803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-04-04T17:12:31.267173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:12:31.267199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-04T17:12:31.267235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:31.268533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-04T17:12:31.269547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-04T17:12:31.269613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:12:31.270670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-04-04T17:12:31.270949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-04-04T17:12:31.270972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-04-04T17:12:31.271381Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-04-04T17:12:31.271454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-04-04T17:12:31.271478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:655:2646] TestWaitNotification: OK eventTxId 112 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:30.713187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:30.713339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:30.713402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:30.713454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:30.714791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:30.714852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:30.714943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:30.715031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:30.718458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:30.811922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:30.811981Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:30.822455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:30.822695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:30.822816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:30.831946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:30.832207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:30.832854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:30.834404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:30.843214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:30.862493Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:30.862593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:30.862808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:30.862858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:30.862917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:30.863043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.870900Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:30.980553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:30.981295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.983097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:30.985392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:30.985460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.988239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:30.988413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:30.988634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.988687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:30.988743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:30.988778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:30.990565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.990607Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:30.990644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:30.992085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.992121Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.992158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:30.992201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:30.995688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:30.997133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:30.997258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:30.997948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:30.998041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:30.998120Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:30.999867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:30.999914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:31.000059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:31.000135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:31.001700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:31.001733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:31.001881Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:31.001925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:31.002153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:31.002181Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:31.002284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:31.002315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:31.002364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:31.002386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:31.002413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:31.002450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:31.002478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:31.002497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:31.002540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:31.002573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:31.002601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:31.003876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:31.003944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:31.003976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T17:12:31.264953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-04T17:12:31.264983Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 26 2025-04-04T17:12:31.265009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:31.265462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T17:12:31.265547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T17:12:31.265576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-04T17:12:31.265600Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2025-04-04T17:12:31.265623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-04T17:12:31.265695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-04-04T17:12:31.267607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2025-04-04T17:12:31.267699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000013 2025-04-04T17:12:31.268364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000013, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:31.268438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000013 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:31.268473Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000013, at schemeshard: 72057594046678944 2025-04-04T17:12:31.268526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 7] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:31.268546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-04-04T17:12:31.268577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 128 -> 134 2025-04-04T17:12:31.269056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-04T17:12:31.270045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-04T17:12:31.271029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-04-04T17:12:31.271066Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 112:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:31.271165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 134 -> 135 2025-04-04T17:12:31.271299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:31.271349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 FAKE_COORDINATOR: Erasing txId 112 2025-04-04T17:12:31.272604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:31.272641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:31.272748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-04-04T17:12:31.272854Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:31.272884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-04-04T17:12:31.272913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 112, path id: 7 2025-04-04T17:12:31.273104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-04-04T17:12:31.273143Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 112:0 ProgressState 2025-04-04T17:12:31.273168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 135 -> 240 2025-04-04T17:12:31.273838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T17:12:31.273893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T17:12:31.273913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-04T17:12:31.273941Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 27 2025-04-04T17:12:31.273970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:31.274537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T17:12:31.274642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T17:12:31.274668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-04T17:12:31.274691Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2025-04-04T17:12:31.274716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-04T17:12:31.274772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-04-04T17:12:31.276600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-04-04T17:12:31.276627Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 112:0 ProgressState 2025-04-04T17:12:31.276682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-04-04T17:12:31.276700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-04T17:12:31.276720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-04-04T17:12:31.276735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-04T17:12:31.276751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: true 2025-04-04T17:12:31.276769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-04-04T17:12:31.276786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2025-04-04T17:12:31.276800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2025-04-04T17:12:31.276842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-04-04T17:12:31.277105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:12:31.277128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-04T17:12:31.277158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-04-04T17:12:31.277410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:12:31.277431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-04T17:12:31.277460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:31.277960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-04T17:12:31.278228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-04T17:12:31.279815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:12:31.279878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-04-04T17:12:31.280149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-04-04T17:12:31.280177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-04-04T17:12:31.280592Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-04-04T17:12:31.280666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-04-04T17:12:31.280690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:659:2650] TestWaitNotification: OK eventTxId 112 >> TestKinesisHttpProxy::DifferentContentTypes >> TestKinesisHttpProxy::TestPing >> TestKinesisHttpProxy::CreateStreamInIncorrectDb >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams >> TestYmqHttpProxy::TestGetQueueUrl >> TestKinesisHttpProxy::MissingAction >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest >> TestYmqHttpProxy::TestCreateQueue >> TestYmqHttpProxy::TestSendMessage >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl >> TLocksTest::BrokenLockUpdate [GOOD] >> TLocksTest::BrokenNullLock >> KqpJoinOrder::TPCH8-StreamLookupJoin+ColumnStore [GOOD] >> TLocksTest::Range_BrokenLock1 [GOOD] >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCDS78+StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 61525, MsgBus: 3094 2025-04-04T17:09:25.064266Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506270868999214:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:25.064768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00152a/r3tmp/tmpV7m1mJ/pdisk_1.dat 2025-04-04T17:09:25.555044Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:25.565100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:25.565200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:25.567601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61525, node 1 2025-04-04T17:09:25.690714Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:25.690755Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:25.690764Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:25.690871Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3094 TClient is connected to server localhost:3094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:26.375382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.397393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.595914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:26.763179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:09:26.847077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.422731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506283753902886:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.422982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.716320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.744774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.774034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.803451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.845807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.877085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.936316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506283753903396:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.936419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.936648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506283753903402:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.940415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:28.948642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506283753903404:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:29.027880Z node 1 :TX_PROXY ERROR: Actor# [1:7489506288048870753:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:30.067009Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506270868999214:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:30.067066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:30.175054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.247902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.280200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.350913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.401428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.544799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.572430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.599537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.623785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.652555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.676266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.701983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.729228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.135109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:09:31.173586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.201343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.233346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.264569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, ... : 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:08.234260Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.286432Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.377008Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.474815Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.556055Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.756566Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.807948Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.873913Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.937986Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.985215Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-04T17:12:09.040314Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-04T17:12:09.120607Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-04T17:12:09.204905Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-04T17:12:10.077464Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2025-04-04T17:12:10.164742Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-04T17:12:10.238528Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-04T17:12:10.291720Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-04T17:12:10.373260Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-04-04T17:12:10.447563Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-04T17:12:10.494675Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-04-04T17:12:10.565870Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2025-04-04T17:12:10.650000Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-04-04T17:12:10.732269Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-04T17:12:10.811693Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-04-04T17:12:10.864266Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2025-04-04T17:12:10.917398Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2025-04-04T17:12:11.006828Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2025-04-04T17:12:11.064300Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-04-04T17:12:11.160164Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2025-04-04T17:12:11.226590Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2025-04-04T17:12:11.291580Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2025-04-04T17:12:11.340862Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2025-04-04T17:12:11.388027Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2025-04-04T17:12:11.455128Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2025-04-04T17:12:11.541423Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2025-04-04T17:12:11.609402Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2025-04-04T17:12:11.665232Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2025-04-04T17:12:11.961554Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2025-04-04T17:12:12.039543Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.098547Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.153801Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.234155Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.275659Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.319015Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.377988Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.426125Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.574690Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.641070Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.700971Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.760276Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.887131Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715721:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.958270Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715722:0, at schemeshard: 72057594046644480 2025-04-04T17:12:14.905458Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:12:14.905488Z node 5 :IMPORT WARN: Table profiles were not loaded >> TGRpcCmsTest::AlterRemoveTest >> TGRpcCmsTest::DisabledTxTest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 2025-04-04 17:12:30,476 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-04 17:12:30,623 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 299450 48.4M 47.8M 23.6M test_tool run_ut @/home/runner/.ya/build/build_root/i5tq/000f1a/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk35/testing_out_stuff/test_tool.args 300013 2.0G 1.9G 1.7G └─ ydb-core-tx-columnshard-ut_rw --trace-path-append /home/runner/.ya/build/build_root/i5tq/000f1a/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chu Test command err: 2025-04-04T17:02:32.928697Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:02:33.133556Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:02:33.163996Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:02:33.164353Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:02:33.176372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:02:33.176632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:02:33.176903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:02:33.177015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:02:33.177160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:02:33.177279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:02:33.177381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:02:33.177498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:02:33.177657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:02:33.177803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:02:33.177905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:02:33.178077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:02:33.232584Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:02:33.238627Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:02:33.238719Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:02:33.238967Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:02:33.239211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:02:33.239302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:02:33.239348Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:02:33.239453Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:02:33.239512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:02:33.239551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:02:33.239590Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:02:33.239791Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:02:33.239855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:02:33.239895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:02:33.239925Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:02:33.240010Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:02:33.240075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:02:33.240140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:02:33.240167Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:02:33.240235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:02:33.240279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:02:33.240323Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:02:33.240375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:02:33.240409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:02:33.240467Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:02:33.240878Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=56; 2025-04-04T17:02:33.240976Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=46; 2025-04-04T17:02:33.241065Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-04-04T17:02:33.241164Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-04-04T17:02:33.241338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:02:33.241394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:02:33.241438Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:02:33.241628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:02:33.241670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:02:33.241699Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:02:33.241858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:02:33.241898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:02:33.241935Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:02:33.242144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:02:33.242202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:02:33.257858Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:02:33.258189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:02:33.258266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:02:33.258349Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... t=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:12:28.083645Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:15637:17611];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;; TEST CASE [numRows-1: FINISHED 2025-04-04T17:12:28.087286Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 13 at tablet 9437184 2025-04-04T17:12:28.091374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:15322:17304];ev=NActors::TEvents::TEvPoison;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:12:29.487936Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15649:17619];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:12:29.513854Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15649:17619];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:12:29.514137Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:12:29.517945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:15649:17619];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-04T17:12:29.518075Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:12:29.524957Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=0;current_normalizer=NO_DATA; 2025-04-04T17:12:29.525032Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; 2025-04-04T17:12:29.525118Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Complete;fline=columnshard__init.cpp:159;step=TTxUpdateSchema.Complete; 2025-04-04T17:12:29.527409Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-04T17:12:29.527766Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=17; 2025-04-04T17:12:29.527957Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:special_valuesLoadingTime=99; 2025-04-04T17:12:29.528242Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:special_valuesLoadingTime=223; 2025-04-04T17:12:29.528515Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tables_managerLoadingTime=211; 2025-04-04T17:12:29.528654Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-04-04T17:12:29.528743Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;SchemaPresetLoadingTime=46; 2025-04-04T17:12:29.528825Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=tables_manager.cpp:124;event=load_table_version;path_id=1;snapshot={10:10}; 2025-04-04T17:12:29.528875Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;TableVersionssLoadingTime=91; 2025-04-04T17:12:29.529044Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=tables_manager.cpp:158;event=load_preset;preset_id=1;snapshot={10:10};version=1; 2025-04-04T17:12:29.529078Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=tables_manager.cpp:162;event=index_schema;preset_id=1;snapshot={10:10};version=1; 2025-04-04T17:12:29.533906Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=4984; 2025-04-04T17:12:29.533965Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=column_engine.h:143;event=RegisterTable;path_id=1; 2025-04-04T17:12:29.534556Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tables_managerLoadingTime=5992; 2025-04-04T17:12:29.534762Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:column_enginesLoadingTime=10; 2025-04-04T17:12:29.534889Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:countersLoadingTime=60; 2025-04-04T17:12:29.535004Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:countersLoadingTime=64; 2025-04-04T17:12:29.535080Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:sharding_infoLoadingTime=30; 2025-04-04T17:12:29.535136Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:sharding_infoLoadingTime=20; 2025-04-04T17:12:29.535176Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:granulesLoadingTime=6; 2025-04-04T17:12:29.535238Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;PRECHARGE:granuleLoadingTime=9; 2025-04-04T17:12:29.535365Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:portionsLoadingTime=64; 2025-04-04T17:12:29.964153Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=428700; 2025-04-04T17:12:29.964272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=10; 2025-04-04T17:12:29.970885Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=6544; 2025-04-04T17:12:29.970954Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=435671; 2025-04-04T17:12:29.970994Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=435778; 2025-04-04T17:12:29.971065Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=12; 2025-04-04T17:12:29.973690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=2549; 2025-04-04T17:12:29.973763Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=438949; 2025-04-04T17:12:29.974161Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=331; 2025-04-04T17:12:29.974203Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=446376; 2025-04-04T17:12:29.977407Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:15690:17653];tablet_id=9437184;parent=[1:15649:17619];fline=manager.cpp:82;event=ask_data;request=request_id=369;1={portions_count=135};; 2025-04-04T17:12:29.980778Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15649:17619];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=135;path_id=1; 2025-04-04T17:12:29.995155Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15649:17619];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8067063302/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/i5tq/000f1a/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk35/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8067063302/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/i5tq/000f1a/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk35/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> TGRpcCmsTest::SimpleTenantsTestSyncOperation >> TGRpcCmsTest::AuthTokenTest >> TGRpcCmsTest::RemoveWithAnotherTokenTest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:30.713204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:30.713367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:30.713432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:30.713494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:30.714809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:30.714862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:30.714932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:30.715031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:30.716790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:30.811954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:30.812007Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:30.822455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:30.822690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:30.822828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:30.831856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:30.832116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:30.832768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:30.834935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:30.844251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:30.862402Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:30.862476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:30.862689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:30.862731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:30.862861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:30.862982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.868902Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:30.983537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:30.983770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.983973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:30.985394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:30.985469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.988427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:30.988588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:30.988768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.988827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:30.988863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:30.988898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:30.990875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.990931Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:30.990989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:30.992670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.992722Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.992764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:30.992818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:30.996759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:30.998534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:30.998723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:30.999811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:30.999937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:30.999983Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:31.000243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:31.000298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:31.000463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:31.000561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:31.002513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:31.002556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:31.002705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:31.002751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:31.003025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:31.003067Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:31.003152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:31.003186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:31.003258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:31.003293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:31.003338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:31.003379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:31.003411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:31.003442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:31.003500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:31.003533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:31.003568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:31.010603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:31.010737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:31.010780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 94046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2025-04-04T17:12:32.867080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-04T17:12:32.867127Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 102 2025-04-04T17:12:32.867160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:32.867773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-04-04T17:12:32.867847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-04-04T17:12:32.867869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-04T17:12:32.867897Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 4 2025-04-04T17:12:32.867921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-04-04T17:12:32.867976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-04-04T17:12:32.869777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 175:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:175 msg type: 269090816 2025-04-04T17:12:32.869885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 175, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 175 at step: 5000076 FAKE_COORDINATOR: advance: minStep5000076 State->FrontStep: 5000075 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000076 2025-04-04T17:12:32.872169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000076, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:32.872266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000076 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:32.872315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000076, at schemeshard: 72057594046678944 2025-04-04T17:12:32.872380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 26] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:32.872406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-04-04T17:12:32.872436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 128 -> 134 2025-04-04T17:12:32.873545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-04T17:12:32.873852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-04T17:12:32.879150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-04T17:12:32.879203Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 175:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:32.879300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 134 -> 135 2025-04-04T17:12:32.879472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:32.879533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 FAKE_COORDINATOR: Erasing txId 175 2025-04-04T17:12:32.882945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:32.882991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:32.883108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-04-04T17:12:32.883201Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:32.883251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 175, path id: 1 2025-04-04T17:12:32.883303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 175, path id: 26 2025-04-04T17:12:32.883597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-04T17:12:32.883635Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2025-04-04T17:12:32.883661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 135 -> 240 2025-04-04T17:12:32.884297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-04T17:12:32.884375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-04T17:12:32.884402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-04T17:12:32.884429Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2025-04-04T17:12:32.884458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:32.885110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-04T17:12:32.885180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-04T17:12:32.885203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-04T17:12:32.885228Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2025-04-04T17:12:32.885254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-04-04T17:12:32.885309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-04-04T17:12:32.887783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-04T17:12:32.887829Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 175:0 ProgressState 2025-04-04T17:12:32.887899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-04-04T17:12:32.887926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-04T17:12:32.887959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-04-04T17:12:32.887985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-04T17:12:32.888011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2025-04-04T17:12:32.888042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-04T17:12:32.888076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2025-04-04T17:12:32.888101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2025-04-04T17:12:32.888155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-04-04T17:12:32.888543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:12:32.888585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-04T17:12:32.888635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2025-04-04T17:12:32.888805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:12:32.888860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-04T17:12:32.888918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:32.889643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-04T17:12:32.890768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-04T17:12:32.892989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:12:32.893122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2025-04-04T17:12:32.894329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2025-04-04T17:12:32.894369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2025-04-04T17:12:32.895785Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2025-04-04T17:12:32.895876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2025-04-04T17:12:32.895922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:2620:4611] TestWaitNotification: OK eventTxId 175 >> TGRpcCmsTest::SimpleTenantsTest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:30.713186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:30.713340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:30.713391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:30.713434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:30.714763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:30.714833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:30.714931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:30.715019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:30.716781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:30.808136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:30.808203Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:30.819419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:30.819627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:30.819765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:30.829633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:30.829914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:30.832896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:30.834480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:30.842968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:30.862436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:30.862522Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:30.862714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:30.862753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:30.862791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:30.862884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.869085Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:31.023678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:31.023912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:31.024134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:31.024378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:31.024434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:31.026550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:31.026753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:31.026913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:31.026979Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:31.027013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:31.027043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:31.028782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:31.028830Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:31.028865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:31.030419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:31.030465Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:31.030503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:31.030559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:31.040205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:31.041927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:31.042093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:31.043100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:31.043226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:31.043275Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:31.043544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:31.043606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:31.043793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:31.043886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:31.045626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:31.045667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:31.045800Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:31.045842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:31.046149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:31.046197Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:31.046311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:31.046346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:31.046415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:31.046456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:31.046505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:31.046565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:31.046601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:31.046640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:31.046684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:31.046710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:31.046734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:31.048459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:31.048566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:31.048606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... hard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2025-04-04T17:12:32.856185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2025-04-04T17:12:32.856209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-04T17:12:32.856236Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 102 2025-04-04T17:12:32.856265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:32.857045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-04-04T17:12:32.857120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-04-04T17:12:32.857190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-04T17:12:32.857216Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 4 2025-04-04T17:12:32.857242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-04-04T17:12:32.857303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-04-04T17:12:32.860006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 175:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:175 msg type: 269090816 2025-04-04T17:12:32.860126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 175, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 175 at step: 5000076 FAKE_COORDINATOR: advance: minStep5000076 State->FrontStep: 5000075 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000076 2025-04-04T17:12:32.860929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000076, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:32.861012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000076 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:32.861052Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropForceUnsafe TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000076, at schemeshard: 72057594046678944 2025-04-04T17:12:32.861088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 26] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:32.861113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-04-04T17:12:32.861216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 128 -> 130 2025-04-04T17:12:32.861368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:32.861418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-04-04T17:12:32.862283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-04T17:12:32.862664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 FAKE_COORDINATOR: Erasing txId 175 2025-04-04T17:12:32.864338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:32.864375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:32.864490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-04-04T17:12:32.864605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:32.864635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 175, path id: 1 2025-04-04T17:12:32.864667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 175, path id: 26 2025-04-04T17:12:32.865017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-04T17:12:32.865056Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2025-04-04T17:12:32.865116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-04-04T17:12:32.865141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-04T17:12:32.865171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-04-04T17:12:32.865191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-04T17:12:32.865225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: false 2025-04-04T17:12:32.865261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-04T17:12:32.865305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2025-04-04T17:12:32.865331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2025-04-04T17:12:32.865382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-04-04T17:12:32.865408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 175, publications: 2, subscribers: 0 2025-04-04T17:12:32.865432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 1], 103 2025-04-04T17:12:32.865453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 26], 18446744073709551615 2025-04-04T17:12:32.866029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-04T17:12:32.866121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-04T17:12:32.866157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 175 2025-04-04T17:12:32.866183Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2025-04-04T17:12:32.866208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:32.866886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-04T17:12:32.866956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-04T17:12:32.866979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 175 2025-04-04T17:12:32.867021Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2025-04-04T17:12:32.867052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-04-04T17:12:32.867118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 175, subscribers: 0 2025-04-04T17:12:32.867394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:12:32.867427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-04T17:12:32.867501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2025-04-04T17:12:32.867908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:12:32.867943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-04T17:12:32.867993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:32.870429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-04T17:12:32.872607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-04T17:12:32.872792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:12:32.872856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2025-04-04T17:12:32.874030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2025-04-04T17:12:32.874068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2025-04-04T17:12:32.875419Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2025-04-04T17:12:32.875503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2025-04-04T17:12:32.875531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:2470:4461] TestWaitNotification: OK eventTxId 175 >> KqpJoinOrder::TPCDS96-StreamLookupJoin+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock1 [GOOD] Test command err: 2025-04-04T17:11:52.039377Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506899634699921:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:52.039510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e83/r3tmp/tmpACaxmH/pdisk_1.dat 2025-04-04T17:11:52.497382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:52.497472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:52.515996Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:52.517573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32235 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:11:52.829196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:52.847324Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:52.866956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:53.050056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:53.137894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:55.424582Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506916226826229:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:55.435770Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e83/r3tmp/tmp8uH3SL/pdisk_1.dat 2025-04-04T17:11:55.643012Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:55.659480Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:55.659554Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:55.662505Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1961 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:11:55.863953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:55.874606Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:55.890844Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:11:55.896777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:55.981037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:56.029103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:59.193852Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489506932625156426:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:59.197076Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e83/r3tmp/tmpRjSfhZ/pdisk_1.dat 2025-04-04T17:11:59.355211Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:59.383578Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:59.383808Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:59.388770Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61224 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:11:59.583330Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:11:59.609706Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:11:59.613625Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:59.688058Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:59.738138Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:02.834820Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506946256133094:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:02.876471Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e83/r3tmp/tmpcyw2s7/pdisk_1.dat 2025-04-04T17:12:02.983680Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:03.019859Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:03.019941Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:03.025625Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14987 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { Schem ... 2025-04-04T17:12:15.714482Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:15.717610Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:15.731377Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10898 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:15.993127Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:12:16.010831Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:16.077499Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:16.181952Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:12:19.980588Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7489507015375363794:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:19.980635Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e83/r3tmp/tmpMsDaPB/pdisk_1.dat 2025-04-04T17:12:20.214962Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:20.234235Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:20.234369Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:20.236320Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11416 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:20.506468Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:20.526422Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:20.602028Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:20.658003Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:24.387188Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7489507040363517016:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:24.387243Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e83/r3tmp/tmpFQFYAa/pdisk_1.dat 2025-04-04T17:12:24.475767Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:24.521022Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:24.521098Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:24.522590Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25484 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:24.752650Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:24.768492Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:24.839934Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:24.910302Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:28.454638Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489507057428203015:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:28.454752Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e83/r3tmp/tmpfYe7Jy/pdisk_1.dat 2025-04-04T17:12:28.570451Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:28.598024Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:28.598121Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:28.599870Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28075 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:28.812285Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:28.818912Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:28.837493Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:28.913050Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:28.972277Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH8-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 29508, MsgBus: 61541 2025-04-04T17:09:35.171959Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506311955189245:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:35.172668Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00148d/r3tmp/tmpiKrICv/pdisk_1.dat 2025-04-04T17:09:35.484142Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29508, node 1 2025-04-04T17:09:35.544738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:35.544838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:35.546652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:35.558431Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:35.558449Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:35.558457Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:35.558571Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61541 TClient is connected to server localhost:61541 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:36.122671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:36.157205Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:36.168166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:36.280000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:36.439219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:36.516521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:38.190675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506324840092744:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:38.190815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:38.455279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:38.482535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:38.515461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:38.563625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:38.612605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:38.665251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:38.754326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506324840093262:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:38.754405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:38.754459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506324840093267:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:38.758630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:38.770559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506324840093269:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:38.848093Z node 1 :TX_PROXY ERROR: Actor# [1:7489506324840093324:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:39.848045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:39.878040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:39.908404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:39.949501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:40.023701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:40.170375Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506311955189245:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:40.170450Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:40.173970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:40.206849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:09:40.247620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:09:40.312181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:09:40.345873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:09:40.382963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:09:40.417929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:09:40.451947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:09:40.938757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:09:40.971232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:09:41.002194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:09:41.028458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 202 ... nks;id=10; 2025-04-04T17:12:15.353628Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038734;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:12:15.353834Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038734;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:12:15.353881Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038734;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:12:15.354015Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038734;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:12:15.354056Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038734;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:12:15.355054Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038734;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:12:15.355113Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038734;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:12:15.355251Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038734;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:12:15.355297Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038734;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:12:15.355868Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:12:15.355900Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:12:15.355982Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:12:15.356006Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:12:15.356164Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:12:15.356200Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:12:15.356298Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:12:15.356321Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:12:15.356373Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:12:15.356396Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:12:15.356426Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:12:15.356448Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:12:15.357006Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:12:15.357036Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:12:15.357164Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:12:15.357196Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:12:15.357345Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:12:15.357370Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:12:15.399367Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:12:15.399451Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:12:15.399575Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:12:15.399608Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:12:15.457331Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038723;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:15.460046Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038729;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:15.467925Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038725;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:15.468498Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038731;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:15.475564Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038721;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:15.477549Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038733;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:15.485081Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038735;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:15.487275Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038727;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:15.493721Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038728;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:15.494846Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038732;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:15.502742Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038726;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:15.504754Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038734;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:15.511137Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038722;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:15.512817Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038720;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:15.521453Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038724;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:15.521453Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038730;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:25.841165Z node 5 :KQP_SLOW_LOG WARN: TraceId: "01jr0wky16e6a126kdkr772bs7", SessionId: ydb://session/3?node_id=5&id=MjM5NTZjY2UtNWVmZGI2ZmEtNjZjZWUxMGEtZGY4NTkwZTk=, Slow query, duration: 10.186307s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/\";\n\n-- TPC-H/TPC-R National Market Share Query (Q8)\n-- TPC TPC-H Parameter Substitution (Version 2.17.2 build 0)\n-- using 1680793381 as a seed to the RNG\n\nselect\n o_year,\n sum(case\n when nation = 'MOZAMBIQUE' then volume\n else 0\n end) / sum(volume) as mkt_share\nfrom\n (\n select\n DateTime::GetYear(o_orderdate) as o_year,\n l_extendedprice * (1 - l_discount) as volume,\n n2.n_name as nation\n from\n part\n cross join supplier\n cross join lineitem\n cross join orders\n cross join customer\n cross join nation n1\n cross join nation n2\n cross join region\n where\n p_partkey = l_partkey\n and s_suppkey = l_suppkey\n and l_orderkey = o_orderkey\n and o_custkey = c_custkey\n and c_nationkey = n1.n_nationkey\n and n1.n_regionkey = r_regionkey\n and r_name = 'AFRICA'\n and s_nationkey = n2.n_nationkey\n and o_orderdate between date('1995-01-01') and date('1996-12-31')\n and p_type = 'ECONOMY PLATED COPPER'\n ) as all_nations\ngroup by\n o_year\norder by\n o_year;", parameters: 0b |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DescribeOptionsTest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS96-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 7131, MsgBus: 18203 2025-04-04T17:09:25.629356Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506271209219849:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:25.629416Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014d9/r3tmp/tmpejBL4d/pdisk_1.dat 2025-04-04T17:09:26.097223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:26.097321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:26.103582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:26.135050Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7131, node 1 2025-04-04T17:09:26.246872Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:26.246892Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:26.246903Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:26.247010Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18203 TClient is connected to server localhost:18203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:26.978439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:27.015143Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:27.036774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:27.217129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:27.377802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:09:27.440112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.894429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506284094123504:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.894523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.084972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.111643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.134336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.166936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.200174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.239253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.290805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506288389091313:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.290887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.290997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506288389091318:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.294456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:29.306443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506288389091320:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:29.401334Z node 1 :TX_PROXY ERROR: Actor# [1:7489506288389091374:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:30.551549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.597656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.629306Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506271209219849:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:30.629417Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:30.635823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.709143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.731730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.875697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.903942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.932937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.966538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.993189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.019456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.044873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.067142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.522165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:09:31.590572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.615952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.652723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025- ... cription=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:12:23.638253Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038720;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:12:23.638302Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038720;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:12:23.638360Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038720;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:12:23.638386Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038720;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:12:23.638408Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038720;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:12:23.638427Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038720;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:12:23.638877Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038720;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:12:23.638908Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038720;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:12:23.639025Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038720;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:12:23.639047Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038720;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:12:23.639137Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038720;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:12:23.639161Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038720;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:12:23.639266Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038720;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:12:23.639285Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038720;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:12:23.639360Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038720;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:12:23.639382Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038720;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:12:23.639811Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:12:23.639852Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:12:23.639916Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:12:23.639948Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:12:23.640068Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:12:23.640096Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:12:23.640147Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:12:23.640169Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:12:23.640204Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:12:23.640220Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:12:23.640241Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:12:23.640258Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:12:23.640682Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:12:23.640719Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:12:23.640867Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:12:23.640899Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:12:23.640987Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:12:23.641008Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:12:23.641163Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:12:23.641185Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:12:23.641262Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:12:23.641283Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:12:23.750218Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038725;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:23.750218Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038720;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:23.757174Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038728;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:23.757195Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038729;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:23.763511Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038731;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:23.763511Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038730;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:23.770389Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038724;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:23.770447Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038735;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:23.776505Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038733;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:23.776600Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038726;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:23.784237Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038727;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:23.784240Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038721;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:23.790824Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038732;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:23.790824Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038722;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:23.797966Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038734;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:23.797968Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038723;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TableKeyRangesSinglePartition 2025-04-04 17:12:33,811 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-04 17:12:33,979 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 300882 48.0M 47.6M 23.0M test_tool run_ut @/home/runner/.ya/build/build_root/i5tq/0012bb/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/chunk50/testing_out_stuff/test_tool.args 301184 1.5G 1.5G 1.0G └─ ydb-services-ydb-ut --trace-path-append /home/runner/.ya/build/build_root/i5tq/0012bb/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/chunk50/ytest.report.tr Test command err: 2025-04-04T17:02:36.158183Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489504514431102411:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:36.158399Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012bb/r3tmp/tmpQui6Hg/pdisk_1.dat 2025-04-04T17:02:36.554528Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:36.596878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:36.597029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:36.605639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8823, node 1 2025-04-04T17:02:36.858529Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:36.858552Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:36.858566Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:36.858721Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:37.166728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:39.987060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504527316005354:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:39.987211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:40.290930Z node 1 :TX_PROXY ERROR: Actor# [1:7489504531610972676:2637] txid# 281474976710658, Access denied for badguy@builtin on path /Root, with access CreateTable 2025-04-04T17:02:40.291093Z node 1 :TX_PROXY ERROR: Actor# [1:7489504531610972676:2637] txid# 281474976710658, issues: { message: "Access denied for badguy@builtin on path /Root" issue_code: 200000 severity: 1 } 2025-04-04T17:02:40.447992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489504531610972688:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:40.448093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:40.465172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:02:42.895725Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489504537789022733:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:42.895775Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012bb/r3tmp/tmp7o8czP/pdisk_1.dat 2025-04-04T17:02:43.278198Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:43.350825Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:43.350913Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:43.371799Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21172, node 4 2025-04-04T17:02:43.602096Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:43.602127Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:43.602135Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:43.602306Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24098 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:43.840311Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:02:47.090548Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489504559263860291:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:47.090628Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:47.104319Z node 4 :TX_PROXY ERROR: Actor# [4:7489504559263860311:2621] txid# 281474976715658, Access denied for badguy@builtin on path /Root, with access CreateTable 2025-04-04T17:02:47.104448Z node 4 :TX_PROXY ERROR: Actor# [4:7489504559263860311:2621] txid# 281474976715658, issues: { message: "Access denied for badguy@builtin on path /Root" issue_code: 200000 severity: 1 } 2025-04-04T17:02:47.250748Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489504559263860323:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:47.250834Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:02:47.268096Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:02:50.196542Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489504572933049939:2179];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:02:50.248746Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012bb/r3tmp/tmpKvUAKG/pdisk_1.dat 2025-04-04T17:02:50.575799Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:02:50.633504Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:02:50.633614Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:02:50.636758Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21411, node 7 2025-04-04T17:02:50.911017Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:02:50.911049Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:02:50.911058Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:02:50.911231Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:02:51.685040Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, b ... 7049, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:25.385604Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714648. Ctx: { TraceId: 01jr0wm7ex4xnx27x6qgenkteg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:25.481933Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714649. Ctx: { TraceId: 01jr0wm7hn9204nc6y7hg1b854, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:25.581808Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714650. Ctx: { TraceId: 01jr0wm7mnbwqbyqjx9486x2w3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:25.674416Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714651. Ctx: { TraceId: 01jr0wm7qr90tgeqq2a5nhnv7c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:25.776202Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714652. Ctx: { TraceId: 01jr0wm7ttf1erev80pq16xeer, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:25.883120Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714653. Ctx: { TraceId: 01jr0wm7xz2scytgf6webqkv0c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:25.984517Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714654. Ctx: { TraceId: 01jr0wm81be4m8j0dp749gcs0m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:26.102582Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714655. Ctx: { TraceId: 01jr0wm84rf9r1egwhcm624ajm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:26.223253Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714656. Ctx: { TraceId: 01jr0wm889dga7mnkc3tdz7f33, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:26.346732Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714657. Ctx: { TraceId: 01jr0wm8cgbmcbteweswe0152y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:26.447977Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714658. Ctx: { TraceId: 01jr0wm8fy781n5te2ge1dfhag, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:26.547543Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714659. Ctx: { TraceId: 01jr0wm8jvcm0ph2bg3cmaycze, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:26.663030Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714660. Ctx: { TraceId: 01jr0wm8p99qj5gasgcqm4qjbb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:26.768434Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714661. Ctx: { TraceId: 01jr0wm8sz4d4svrrvgya4aa4m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:26.879895Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714662. Ctx: { TraceId: 01jr0wm8xa9965kbp14xwj5h0v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:26.973365Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714663. Ctx: { TraceId: 01jr0wm90fb24xrmtmrfhveyff, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:27.087071Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714664. Ctx: { TraceId: 01jr0wm93bbve3fnsn20aqmykg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:27.208631Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714665. Ctx: { TraceId: 01jr0wm97a3gc2m288c07fhych, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:27.328470Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714666. Ctx: { TraceId: 01jr0wm9b62rjdxz5sqmtn7jd1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:27.445590Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714667. Ctx: { TraceId: 01jr0wm9f7efk10zaer9tkpaf4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:27.553237Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976714668. Ctx: { TraceId: 01jr0wm9j73j7z4ahf0ymj3b5n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2Y0ZjQwNi03NTZhM2UyNy1mYzQ5NzJhMC0yOWUwODdmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:27.573489Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-04-04T17:12:27.574374Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T17:12:29.586133Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489507058558765013:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:29.586222Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012bb/r3tmp/tmpfdEtn5/pdisk_1.dat 2025-04-04T17:12:29.733684Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:29.759158Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:29.759270Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:29.762189Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31779, node 10 2025-04-04T17:12:29.815176Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:29.815200Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:29.815213Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:29.815383Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18780 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:30.081602Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:32.843760Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8067063302/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/i5tq/0012bb/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/chunk50/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8067063302/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/i5tq/0012bb/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/chunk50/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] >> TTopicApiDescribes::GetLocalDescribe [GOOD] >> TGRpcCmsTest::AlterRemoveTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:30.713189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:30.713319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:30.713369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:30.713415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:30.714783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:30.714835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:30.714914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:30.715001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:30.716794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:30.808880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:30.808928Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:30.819000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:30.819173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:30.819344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:30.830020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:30.830280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:30.832900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:30.834405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:30.842992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:30.862351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:30.862519Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:30.862737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:30.862786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:30.862834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:30.862964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.868381Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:30.979303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:30.981306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.983104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:30.985385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:30.985453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.988393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:30.988548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:30.988711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.988766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:30.988799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:30.988829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:30.990784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.990838Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:30.990868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:30.992556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.992593Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:30.992627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:30.992664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:30.995445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:30.996956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:30.997089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:30.997872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:30.997961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:30.997991Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:30.999842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:30.999886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:31.000021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:31.000095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:31.001696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:31.001740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:31.001885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:31.001918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:31.002182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:31.002223Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:31.002328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:31.002377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:31.002443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:31.002472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:31.002517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:31.002553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:31.002585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:31.002611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:31.002674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:31.002706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:31.002742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:31.004290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:31.004385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:31.004414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 233409618 TxId: 175 } 2025-04-04T17:12:37.124377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free owner tablets reply, message: Status: ALREADY Owner: 72075186233409618 TxId: 175 Origin: 72057594037968897, at schemeshard: 72057594046678944 2025-04-04T17:12:37.124471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 175:0, at schemeshard: 72057594046678944, message: Status: ALREADY Owner: 72075186233409618 TxId: 175 Origin: 72057594037968897 2025-04-04T17:12:37.124520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 175:0 HandleReply TDeleteExternalShards, Status: ALREADY, from Hive: 72057594037968897, Owner: 72075186233409618, at schemeshard: 72057594046678944 2025-04-04T17:12:37.124631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 134 -> 135 2025-04-04T17:12:37.124808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:37.124872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 5 2025-04-04T17:12:37.126879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-04T17:12:37.127103Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:37.127164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:37.127310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-04-04T17:12:37.127429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:37.127467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 175, path id: 1 2025-04-04T17:12:37.127507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 175, path id: 26 2025-04-04T17:12:37.127808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-04T17:12:37.127854Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2025-04-04T17:12:37.127892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 135 -> 240 2025-04-04T17:12:37.128892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-04T17:12:37.128979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-04-04T17:12:37.129010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-04T17:12:37.129042Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2025-04-04T17:12:37.129076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:37.129888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-04T17:12:37.129968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-04-04T17:12:37.129995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-04-04T17:12:37.130026Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2025-04-04T17:12:37.130059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 6 2025-04-04T17:12:37.130126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-04-04T17:12:37.132982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:74 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:12:37.133031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:73 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:12:37.133058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:75 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:12:37.133289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-04-04T17:12:37.133332Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 175:0 ProgressState 2025-04-04T17:12:37.133418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-04-04T17:12:37.133448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-04T17:12:37.133483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-04-04T17:12:37.133511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-04T17:12:37.133542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2025-04-04T17:12:37.133578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-04-04T17:12:37.133613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2025-04-04T17:12:37.133643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2025-04-04T17:12:37.133800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 5 2025-04-04T17:12:37.135288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-04T17:12:37.135838Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 74 TxId_Deprecated: 74 TabletID: 72075186233409619 2025-04-04T17:12:37.136043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 74 ShardOwnerId: 72057594046678944 ShardLocalIdx: 74, at schemeshard: 72057594046678944 2025-04-04T17:12:37.136276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 4 Forgetting tablet 72075186233409619 2025-04-04T17:12:37.137075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-04-04T17:12:37.137256Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 73 TxId_Deprecated: 73 TabletID: 72075186233409618 2025-04-04T17:12:37.138283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:37.140829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 73 ShardOwnerId: 72057594046678944 ShardLocalIdx: 73, at schemeshard: 72057594046678944 2025-04-04T17:12:37.141091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-04-04T17:12:37.141536Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 75 TxId_Deprecated: 75 TabletID: 72075186233409620 Forgetting tablet 72075186233409618 Forgetting tablet 72075186233409620 2025-04-04T17:12:37.142957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 75 ShardOwnerId: 72057594046678944 ShardLocalIdx: 75, at schemeshard: 72057594046678944 2025-04-04T17:12:37.143123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-04-04T17:12:37.144108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:12:37.144143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-04T17:12:37.144226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2025-04-04T17:12:37.145057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:12:37.145118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-04-04T17:12:37.145188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:37.148722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:74 2025-04-04T17:12:37.148775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:74 tabletId 72075186233409619 2025-04-04T17:12:37.148891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:73 2025-04-04T17:12:37.148920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:73 tabletId 72075186233409618 2025-04-04T17:12:37.149106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:75 2025-04-04T17:12:37.149146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:75 tabletId 72075186233409620 2025-04-04T17:12:37.151044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:12:37.151140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2025-04-04T17:12:37.152672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2025-04-04T17:12:37.152712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2025-04-04T17:12:37.154187Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2025-04-04T17:12:37.154353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2025-04-04T17:12:37.154390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:6759:7741] TestWaitNotification: OK eventTxId 175 >> TGRpcCmsTest::DisabledTxTest [GOOD] >> TTopicApiDescribes::DescribeConsumer [GOOD] >> BackupRestore::RestoreReplicationWithoutSecret [GOOD] >> BackupRestore::RestoreExternalDataSourceWithoutSecret |87.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} |87.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestExecutorMemUsage >> TTopicApiDescribes::GetPartitionDescribe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetLocalDescribe [GOOD] Test command err: 2025-04-04T17:12:26.080022Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507049222537042:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:26.080067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:26.185999Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507046648995469:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:26.186047Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ce2/r3tmp/tmpuRVaXI/pdisk_1.dat 2025-04-04T17:12:26.402022Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:12:26.413624Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:12:26.612491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:26.612604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:26.617952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:26.618063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:26.621590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:26.622694Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:12:26.623599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19930, node 1 2025-04-04T17:12:26.655327Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:26.671620Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:12:26.671927Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:12:26.803093Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001ce2/r3tmp/yandexfbdUzq.tmp 2025-04-04T17:12:26.803131Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001ce2/r3tmp/yandexfbdUzq.tmp 2025-04-04T17:12:26.803291Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001ce2/r3tmp/yandexfbdUzq.tmp 2025-04-04T17:12:26.803446Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:12:27.018607Z INFO: TTestServer started on Port 63210 GrpcPort 19930 TClient is connected to server localhost:63210 PQClient connected to localhost:19930 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:27.309168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:27.386111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:12:29.366583Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507059533897747:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:29.366677Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507059533897755:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:29.366742Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:29.371774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T17:12:29.388651Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507059533897761:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T17:12:29.566289Z node 2 :TX_PROXY ERROR: Actor# [2:7489507059533897788:2178] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:29.591071Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507062107440103:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:12:29.592391Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzgwNzM2ODgtYWEwNDY4Ny1iOGE0MDUxMC04MDE4ZWUwYQ==, ActorId: [1:7489507062107440062:2337], ActorState: ExecuteState, TraceId: 01jr0wmbfe90d7h2sn1jkpdqfd, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:12:29.594025Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489507059533897802:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:12:29.594224Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzBkZDUwODEtYzBiN2JiOS04OWNmNTUxMS1mMDMyOGFhYw==, ActorId: [2:7489507059533897745:2312], ActorState: ExecuteState, TraceId: 01jr0wmbdmevxfbd9x0781qe7n, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:12:29.594355Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:12:29.594613Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:12:29.597048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:29.699766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:29.790071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:12:30.327902Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0wmbx6cgpwtxw1tb0ch6f7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFlNjE5MTQtMTVjNDdjYTctNmE5OGIxZC00MjhiYmU2ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489507066402407857:3097] 2025-04-04T17:12:31.080147Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507049222537042:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:31.080257Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:31.186082Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489507046648995469:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:31.186170Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2025-04-04T17:12:36.247730Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-04-04T17:12:36.369092Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037 ... ppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.489915Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] bootstrapping 1 [1:7489507092172212678:2508] 2025-04-04T17:12:36.490404Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037899, Partition: 4, State: StateInit] bootstrapping 4 [1:7489507092172212680:2509] 2025-04-04T17:12:36.493642Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] bootstrapping 3 [2:7489507089598669580:2430] 2025-04-04T17:12:36.495646Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] bootstrapping 0 [2:7489507089598669581:2431] 2025-04-04T17:12:36.493423Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 1 generation 1 [1:7489507092172212678:2508] 2025-04-04T17:12:36.493698Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037899, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 4 generation 1 [1:7489507092172212680:2509] 2025-04-04T17:12:36.495717Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] bootstrapping 6 [1:7489507092172212681:2510] 2025-04-04T17:12:36.497139Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 1 [1:7489507092172212681:2510] 2025-04-04T17:12:36.498487Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 3 generation 1 [2:7489507089598669580:2430] 2025-04-04T17:12:36.498488Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 1 [2:7489507089598669581:2431] 2025-04-04T17:12:36.503836Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] bootstrapping 11 [1:7489507092172212731:2520] 2025-04-04T17:12:36.504442Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] bootstrapping 5 [1:7489507092172212713:2516] 2025-04-04T17:12:36.505768Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 1 [1:7489507092172212731:2520] 2025-04-04T17:12:36.506206Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 1 [1:7489507092172212713:2516] 2025-04-04T17:12:36.507818Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] bootstrapping 10 [1:7489507092172212714:2517] 2025-04-04T17:12:36.509567Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 1 [1:7489507092172212714:2517] 2025-04-04T17:12:36.511412Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] bootstrapping 14 [1:7489507092172212718:2519] 2025-04-04T17:12:36.515244Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 1 [1:7489507092172212718:2519] 2025-04-04T17:12:36.520428Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] bootstrapping 13 [2:7489507089598669601:2437] 2025-04-04T17:12:36.521751Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 1 [2:7489507089598669601:2437] 2025-04-04T17:12:36.527071Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] bootstrapping 12 [2:7489507089598669602:2438] 2025-04-04T17:12:36.533473Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 1 [2:7489507089598669602:2438] 2025-04-04T17:12:36.533703Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] bootstrapping 8 [2:7489507089598669599:2435] 2025-04-04T17:12:36.535410Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 1 [2:7489507089598669599:2435] 2025-04-04T17:12:36.539675Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] bootstrapping 7 [2:7489507089598669600:2436] ===Query complete 2025-04-04T17:12:36.542006Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 1 [2:7489507089598669600:2436] 2025-04-04T17:12:36.544429Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] bootstrapping 9 [2:7489507089598669610:2442] 2025-04-04T17:12:36.545899Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] bootstrapping 2 [2:7489507089598669611:2443] 2025-04-04T17:12:36.547661Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 1 [2:7489507089598669611:2443] 2025-04-04T17:12:36.552844Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 1 [2:7489507089598669610:2442] 2025-04-04T17:12:36.613461Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037899] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:12:36.636841Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:12:36.635444Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037893] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:12:36.635657Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:12:36.637501Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:12:36.638052Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:12:36.638572Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037895] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:12:36.645275Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig Create topic result: 1 2025-04-04T17:12:36.668292Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7489507092172212954:4008]: Request location 2025-04-04T17:12:36.668708Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7489507092172212967:4015] connected; active server actors: 1 2025-04-04T17:12:36.670336Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 1 2025-04-04T17:12:36.670357Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 1 2025-04-04T17:12:36.670368Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 1 2025-04-04T17:12:36.670397Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 2025-04-04T17:12:36.670408Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 1 2025-04-04T17:12:36.670418Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 1 2025-04-04T17:12:36.670426Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 1 2025-04-04T17:12:36.670438Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 1 2025-04-04T17:12:36.670456Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 1 2025-04-04T17:12:36.670467Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 1 2025-04-04T17:12:36.670478Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 1 2025-04-04T17:12:36.670488Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 1 2025-04-04T17:12:36.670496Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 1 2025-04-04T17:12:36.670505Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 1 2025-04-04T17:12:36.670516Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 1 2025-04-04T17:12:36.670770Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7489507092172212954:4008]: Got location 2025-04-04T17:12:36.675674Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7489507092172212967:4015] disconnected; active server actors: 1 2025-04-04T17:12:36.675704Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7489507092172212967:4015] disconnected no session 2025-04-04T17:12:36.678878Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7489507092172212972:4019] connected; active server actors: 1 2025-04-04T17:12:36.679341Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 1 2025-04-04T17:12:36.679362Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 2025-04-04T17:12:36.679374Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 1 2025-04-04T17:12:36.678489Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7489507092172212970:4017]: Request location 2025-04-04T17:12:36.681909Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7489507092172212970:4017]: Got location 2025-04-04T17:12:36.682287Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7489507092172212972:4019] disconnected; active server actors: 1 2025-04-04T17:12:36.682310Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7489507092172212972:4019] disconnected no session 2025-04-04T17:12:36.682603Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7489507092172212973:4020]: Request location 2025-04-04T17:12:36.684270Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7489507092172212975:4022] connected; active server actors: 1 >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl [GOOD] >> OlapEstimationRowsCorrectness::TPCH5 [GOOD] >> OlapEstimationRowsCorrectness::TPCH9 >> TestKinesisHttpProxy::TestPing [GOOD] >> TestKinesisHttpProxy::CreateStreamInIncorrectDb [GOOD] >> TestKinesisHttpProxy::MissingAction [GOOD] >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AlterRemoveTest [GOOD] Test command err: 2025-04-04T17:12:34.260518Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507081154274116:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:34.269186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f1e/r3tmp/tmpjPsBnL/pdisk_1.dat 2025-04-04T17:12:34.900953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:34.905098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:34.905394Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:34.913897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18949, node 1 2025-04-04T17:12:35.143127Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:35.143153Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:35.143162Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:35.143298Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12198 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:35.759583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:35.863258Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7489507085449242149:2314], Recipient [1:7489507081154274529:2206]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2025-04-04T17:12:35.863298Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-04-04T17:12:35.863315Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:35.863336Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:35.863451Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2025-04-04T17:12:35.863571Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1743786755863303) 2025-04-04T17:12:35.864478Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1743786755863303 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-04-04T17:12:35.864648Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-04-04T17:12:35.868151Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-04-04T17:12:35.868879Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786755863303&action=1" } } } 2025-04-04T17:12:35.869002Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:35.869055Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-04T17:12:35.869149Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-04T17:12:35.869493Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-04-04T17:12:35.869612Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-04T17:12:35.882554Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-04-04T17:12:35.882610Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-04T17:12:35.882686Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7489507085449242154:2206], Recipient [1:7489507081154274529:2206]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-04T17:12:35.882711Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-04-04T17:12:35.882723Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:35.882731Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:35.882782Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-04-04T17:12:35.882808Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-04-04T17:12:35.882860Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-04-04T17:12:35.886692Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7489507085449242159:2315], Recipient [1:7489507081154274529:2206]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786755863303&action=1" } UserToken: "" } 2025-04-04T17:12:35.886715Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-04T17:12:35.886911Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786755863303&action=1" } } 2025-04-04T17:12:35.887089Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-04T17:12:35.887103Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:35.887134Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:35.887142Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:35.887219Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-04-04T17:12:35.887236Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1743786755863303 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-04T17:12:35.906595Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-04T17:12:35.906747Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:35.906782Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-04-04T17:12:35.906803Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-04-04T17:12:35.926632Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-04-04T17:12:35.934927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-04T17:12:35.950923Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-04T17:12:35.951016Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2025-04-04T17:12:35.972877Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710658 2025-04-04T17:12:35.976499Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7489507085449242226:2318], Recipient [1:7489507081154274529:2206]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786755863303&action=1" } UserToken: "" } 2025-04-04T17:12:35.976538Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-04T17:12:35.976714Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786755863303&action=1" } } 2025-04-04T17:12:35.982043Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710658 2025-04-04T17:12:35.982646Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786756012 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey ... 25-04-04T17:12:36.061412Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786756012 ParentPathId: 2 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 0 TimeCastBucketsPerMediator: 0 Hive: 72075186224037888 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 10 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-04T17:12:36.061424Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) drop subdomain 2025-04-04T17:12:36.061552Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain drop cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root/users" OperationType: ESchemeOpForceDropExtSubDomain Drop { Name: "user-1" } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-04-04T17:12:36.069309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976710660 2025-04-04T17:12:36.082683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpForceDropExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:12:36.083935Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-04T17:12:36.083949Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:36.094935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976710660 2025-04-04T17:12:36.095309Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710659 2025-04-04T17:12:36.095324Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-04T17:12:36.095356Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-04-04T17:12:36.095431Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2025-04-04T17:12:36.095477Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710660 2025-04-04T17:12:36.095541Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435076, Sender [1:7489507085449242260:2206], Recipient [1:7489507081154274529:2206]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-04-04T17:12:36.095557Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainReady 2025-04-04T17:12:36.095567Z node 1 :CMS_TENANTS DEBUG: Ignoring ready subdomain for tenant /Root/users/user-1 in REMOVING_SUBDOMAIN state 2025-04-04T17:12:36.099114Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7489507089744209660:2325], Recipient [1:7489507081154274529:2206]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786756055388&action=2" } UserToken: "" } 2025-04-04T17:12:36.099131Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-04T17:12:36.099438Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786756055388&action=2" } } 2025-04-04T17:12:36.109140Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710660 2025-04-04T17:12:36.124965Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710660 2025-04-04T17:12:36.125006Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-04T17:12:36.125044Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-04T17:12:36.125122Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7489507089744209642:2206], Recipient [1:7489507081154274529:2206]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-04T17:12:36.125138Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-04-04T17:12:36.125159Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:36.125167Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:36.125215Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-04-04T17:12:36.125237Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1743786756055388 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-04T17:12:36.125293Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743786756055388 issue= 2025-04-04T17:12:36.126672Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-04-04T17:12:36.126769Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-04-04T17:12:36.126803Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:36.126950Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7489507081154274417:2191], Recipient [1:7489507081154274529:2206]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-04T17:12:36.126980Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-04T17:12:36.126994Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:36.127002Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:36.127022Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-04-04T17:12:36.127045Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1743786756055388 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-04T17:12:36.133357Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-04T17:12:36.133418Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:36.133453Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-04T17:12:36.133577Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-04T17:12:36.134087Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-04-04T17:12:36.134164Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-04-04T17:12:36.143299Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-04-04T17:12:36.143398Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7489507089744209733:2206], Recipient [1:7489507081154274529:2206]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-04-04T17:12:36.144391Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-04-04T17:12:36.144423Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:36.144434Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:36.144484Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-04-04T17:12:36.144511Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-04-04T17:12:36.155738Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-04T17:12:36.155780Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:36.155788Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:36.155793Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:36.155845Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1743786756055388 2025-04-04T17:12:36.155856Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743786756055388 issue= 2025-04-04T17:12:36.155868Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1743786756055388 issue= 2025-04-04T17:12:36.155875Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-04-04T17:12:36.155936Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1743786756055388 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-04T17:12:36.180243Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7489507089744209746:2327], Recipient [1:7489507081154274529:2206]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786756055388&action=2" } UserToken: "" } 2025-04-04T17:12:36.180266Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-04T17:12:36.180428Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786756055388&action=2" } } 2025-04-04T17:12:36.187451Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-04-04T17:12:36.187541Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:36.234887Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7489507089744209755:2329], Recipient [1:7489507081154274529:2206]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786756055388&action=2" } UserToken: "" } 2025-04-04T17:12:36.234917Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-04T17:12:36.235094Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786756055388&action=2" ready: true status: SUCCESS } } >> TestYmqHttpProxy::TestCreateQueue [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] >> TestYmqHttpProxy::TestGetQueueUrl [GOOD] >> TTopicWriterTests::TestTopicWriterParams_No_Delimiter [GOOD] >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_OnlyDelimiters [GOOD] >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DisabledTxTest [GOOD] Test command err: 2025-04-04T17:12:34.303859Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507083348892520:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:34.303957Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f07/r3tmp/tmpJGbwbc/pdisk_1.dat 2025-04-04T17:12:34.934157Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20232, node 1 2025-04-04T17:12:35.109983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:35.110069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:35.112496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:35.146763Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:35.146784Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:35.146789Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:35.146902Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:35.759760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:35.894148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-04T17:12:35.959698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 >> TestYmqHttpProxy::TestSendMessage [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_No_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] >> TTopicWriterTests::TestTopicWriterParams_Format_NewlineDelimited [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_Invalid_Encode [GOOD] >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] >> TTopicWriterTests::TestEnterMessage_EmptyInput [GOOD] >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] >> TTopicApiDescribes::DescribeTopic [GOOD] >> TestYmqHttpProxy::TestSendMessageFifoQueue >> TestKinesisHttpProxy::DifferentContentTypes [GOOD] >> TestKinesisHttpProxy::CreateStreamWithInvalidName >> TGRpcCmsTest::AuthTokenTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeConsumer [GOOD] Test command err: 2025-04-04T17:12:26.014452Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507048760921528:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:26.014540Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:26.061433Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507047043263756:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:26.062155Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:26.277935Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:12:26.308920Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cf3/r3tmp/tmpC5R1pc/pdisk_1.dat 2025-04-04T17:12:26.571986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:26.572847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:26.573895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:26.573938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:26.577717Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:12:26.577825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:26.586810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:26.631298Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21741, node 1 2025-04-04T17:12:26.663020Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:12:26.663079Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:12:26.794337Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001cf3/r3tmp/yandexWC0m7S.tmp 2025-04-04T17:12:26.794368Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001cf3/r3tmp/yandexWC0m7S.tmp 2025-04-04T17:12:26.794578Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001cf3/r3tmp/yandexWC0m7S.tmp 2025-04-04T17:12:26.794947Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:12:27.023936Z INFO: TTestServer started on Port 25183 GrpcPort 21741 TClient is connected to server localhost:25183 PQClient connected to localhost:21741 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:27.308539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:27.389145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:12:29.081232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507061645824519:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:29.081350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507061645824511:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:29.081502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:29.086821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:12:29.099810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507061645824525:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:12:29.195185Z node 1 :TX_PROXY ERROR: Actor# [1:7489507061645824613:2764] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:29.509206Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489507059928166055:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:12:29.509206Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507061645824623:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:12:29.509482Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWIyY2M4MzUtODllMjBjZmItODhkN2EwNmUtYjMxYmJiYWY=, ActorId: [2:7489507059928166031:2312], ActorState: ExecuteState, TraceId: 01jr0wmbcb8q5xp7wswqbjv1bt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:12:29.513088Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:12:29.515999Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjZlMWIzZDktZTU1Yjg2MWUtMTVjMzFkMDgtN2IyNjQ5YWY=, ActorId: [1:7489507061645824509:2337], ActorState: ExecuteState, TraceId: 01jr0wmb4q308zy1x7cw9vrk63, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:12:29.516369Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:12:29.597452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:12:29.683378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:29.789173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:12:30.327913Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0wmbxfdx1tgs1ewsw9m00d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTQ1NzhmY2ItOTMzZmFhOTktMmZiYTUyOTktYjllNzc5NDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489507065940792375:3110] 2025-04-04T17:12:31.013653Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507048760921528:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:31.013758Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:31.061221Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489507047043263756:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:31.061276Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2025-04-04T17:12:36.162483Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-04-04T17:12:36.294922Z node 2 :PERSQUEUE_READ_BALANCER INFO: [720751 ... partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1743786756 nanos: 840000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } partitions { partition_id: 11 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1743786756 nanos: 835000000 } max_write_time_lag { } bytes_written { } partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1743786756 nanos: 866000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } partitions { partition_id: 12 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1743786756 nanos: 887000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_consumer_stats { last_read_time { seconds: 1743786756 nanos: 905000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } partitions { partition_id: 13 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1743786756 nanos: 889000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_consumer_stats { last_read_time { seconds: 1743786756 nanos: 914000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } partitions { partition_id: 14 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1743786756 nanos: 829000000 } max_write_time_lag { } bytes_written { } partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1743786756 nanos: 849000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } } } } 2025-04-04T17:12:37.423551Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2025-04-04T17:12:37.423621Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" include_location: true 2025-04-04T17:12:37.424181Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7489507096005565833:2630]: Request location 2025-04-04T17:12:37.424542Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7489507096005565835:2631] connected; active server actors: 1 2025-04-04T17:12:37.424867Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 2 2025-04-04T17:12:37.424893Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2025-04-04T17:12:37.424911Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 2 2025-04-04T17:12:37.424924Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 2025-04-04T17:12:37.424936Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 2 2025-04-04T17:12:37.424963Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 2 2025-04-04T17:12:37.424974Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 2 2025-04-04T17:12:37.424987Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 2 2025-04-04T17:12:37.424997Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 2 2025-04-04T17:12:37.425007Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 2 2025-04-04T17:12:37.425017Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 2 2025-04-04T17:12:37.425027Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 2 2025-04-04T17:12:37.425043Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 2 2025-04-04T17:12:37.425176Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7489507096005565833:2630]: Got location 2025-04-04T17:12:37.425056Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 2 2025-04-04T17:12:37.425066Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 2 2025-04-04T17:12:37.425386Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7489507096005565835:2631] disconnected; active server actors: 1 2025-04-04T17:12:37.425404Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7489507096005565835:2631] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1743786756397 tx_id: 281474976710679 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } } } } 2025-04-04T17:12:37.428061Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2025-04-04T17:12:37.428126Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1743786756397 tx_id: 281474976710679 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } } } } 2025-04-04T17:12:37.431299Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2025-04-04T17:12:37.431385Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//bad-topic" consumer: "my-consumer" include_stats: true include_location: true Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } >> TTopicReaderTests::TestRun_ReadOneMessage >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName >> TestKinesisHttpProxy::TestRequestBadJson >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey >> TestKinesisHttpProxy::TestRequestWithWrongRegion |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] >> TGRpcCmsTest::SimpleTenantsTest [GOOD] >> TestYmqHttpProxy::TestReceiveMessage |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] >> TestKinesisHttpProxy::GoodRequestPutRecords >> TGRpcCmsTest::DescribeOptionsTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetPartitionDescribe [GOOD] Test command err: 2025-04-04T17:12:26.004758Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507047265225012:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:26.005726Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:26.090608Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507048328760394:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:26.090688Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:26.323954Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:12:26.341829Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cbc/r3tmp/tmpOzpdDH/pdisk_1.dat 2025-04-04T17:12:26.614841Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:26.619587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:26.619643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:26.625105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:26.625169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:26.628985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:26.630281Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:12:26.632422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16913, node 1 2025-04-04T17:12:26.797765Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001cbc/r3tmp/yandexdk7OFa.tmp 2025-04-04T17:12:26.797800Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001cbc/r3tmp/yandexdk7OFa.tmp 2025-04-04T17:12:26.797965Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001cbc/r3tmp/yandexdk7OFa.tmp 2025-04-04T17:12:26.798150Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:12:27.030749Z INFO: TTestServer started on Port 21640 GrpcPort 16913 TClient is connected to server localhost:21640 PQClient connected to localhost:16913 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:27.353657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:27.407227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:12:29.219925Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507061213662663:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:29.220000Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507061213662653:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:29.220057Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:29.225129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T17:12:29.247745Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507061213662670:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T17:12:29.310772Z node 2 :TX_PROXY ERROR: Actor# [2:7489507061213662697:2178] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:29.553304Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489507061213662711:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:12:29.553617Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjMwMTg2ZGYtZDkzZjQzZDItODhiYzNkYmEtOWYzMDNhYzY=, ActorId: [2:7489507061213662639:2312], ActorState: ExecuteState, TraceId: 01jr0wmb912awwjjs1kgkhdzqt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:12:29.555718Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:12:29.557177Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507060150128077:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:12:29.558580Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTkyYmIzZGItYTgzNjljNDEtMmEzMmM1ODMtN2VlY2I3OTM=, ActorId: [1:7489507060150128028:2337], ActorState: ExecuteState, TraceId: 01jr0wmbcjd6jvwkn518zzspfk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:12:29.558935Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:12:29.596952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:29.677426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:29.788793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:12:30.327900Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0wmbxg8xmn36zqy51azzrp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTk3NTgzZTctYWRiOTZjMmMtODViY2JmZTctYzJhZThiMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489507064445095827:3109] 2025-04-04T17:12:31.005087Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507047265225012:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:31.005210Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:31.091016Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489507048328760394:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:31.091091Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2025-04-04T17:12:36.177397Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-04-04T17:12:36.492383Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:12:36.492426Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898] doesn't have tx wri ... QUEUE INFO: [PQ: 72075186224037895] has a tx writes info 2025-04-04T17:12:37.012713Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] bootstrapping 9 [2:7489507095573402243:2476] 2025-04-04T17:12:37.014773Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] bootstrapping 2 [2:7489507095573402255:2478] 2025-04-04T17:12:37.016614Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] bootstrapping 1 [2:7489507095573402244:2477] 2025-04-04T17:12:37.041336Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] bootstrapping 6 [2:7489507095573402258:2479] 2025-04-04T17:12:37.045008Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:4:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:12:37.045042Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037899, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 4 generation 2 [1:7489507090214901341:2558] 2025-04-04T17:12:37.062545Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037899, NodeId 1, Generation 2 2025-04-04T17:12:37.080208Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:5:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:12:37.080564Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 2 [1:7489507090214901352:2560] 2025-04-04T17:12:37.081188Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:11:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:12:37.081213Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 2 [1:7489507090214901376:2563] 2025-04-04T17:12:37.081253Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:10:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:12:37.081274Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 2 [1:7489507090214901364:2561] 2025-04-04T17:12:37.081399Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:14:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:12:37.081474Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 2 [1:7489507090214901373:2562] 2025-04-04T17:12:37.089563Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:12:37.089593Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897] has a tx writes info 2025-04-04T17:12:37.089792Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037896, NodeId 1, Generation 2 2025-04-04T17:12:37.089819Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037898, NodeId 1, Generation 2 2025-04-04T17:12:37.090235Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:9:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:12:37.090288Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 2 [2:7489507095573402243:2476] 2025-04-04T17:12:37.090483Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:12:37.090508Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 2 [2:7489507095573402255:2478] 2025-04-04T17:12:37.091286Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:12:37.091329Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894] has a tx writes info 2025-04-04T17:12:37.091464Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:12:37.091500Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 1 generation 2 [2:7489507095573402244:2477] 2025-04-04T17:12:37.093075Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] bootstrapping 13 [2:7489507095573402348:2488] 2025-04-04T17:12:37.093166Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037895, NodeId 2, Generation 2 2025-04-04T17:12:37.093387Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] bootstrapping 8 [2:7489507095573402345:2486] 2025-04-04T17:12:37.095388Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] bootstrapping 12 [2:7489507095573402352:2489] 2025-04-04T17:12:37.095793Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] bootstrapping 7 [2:7489507095573402347:2487] 2025-04-04T17:12:37.096743Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:6:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:12:37.096760Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 2 [2:7489507095573402258:2479] 2025-04-04T17:12:37.097912Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037893, NodeId 2, Generation 2 2025-04-04T17:12:37.105546Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:7:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:12:37.105549Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:8:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:12:37.105574Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 2 [2:7489507095573402345:2486] 2025-04-04T17:12:37.105583Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 2 [2:7489507095573402347:2487] 2025-04-04T17:12:37.105793Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:12:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:12:37.105801Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:13:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:12:37.105813Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 2 [2:7489507095573402352:2489] 2025-04-04T17:12:37.105818Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 2 [2:7489507095573402348:2488] 2025-04-04T17:12:37.110367Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037897, NodeId 2, Generation 2 2025-04-04T17:12:37.110397Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037894, NodeId 2, Generation 2 2025-04-04T17:12:37.399995Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-04-04T17:12:37.400093Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 1 include_location: true 2025-04-04T17:12:37.400136Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7489507094509868807:2580]: Bootstrap 2025-04-04T17:12:37.400581Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7489507094509868807:2580]: Request location 2025-04-04T17:12:37.400979Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7489507094509868809:2581] connected; active server actors: 1 2025-04-04T17:12:37.401258Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 2, Generation 2 2025-04-04T17:12:37.401592Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7489507094509868807:2580]: Got location 2025-04-04T17:12:37.401954Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7489507094509868809:2581] disconnected; active server actors: 1 2025-04-04T17:12:37.401985Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7489507094509868809:2581] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 1 active: true partition_location { node_id: 2 generation: 2 } } } } } 2025-04-04T17:12:37.403654Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-04-04T17:12:37.403718Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 3 include_stats: true include_location: true 2025-04-04T17:12:37.403752Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7489507094509868810:2582]: Bootstrap 2025-04-04T17:12:37.404199Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7489507094509868810:2582]: Request location 2025-04-04T17:12:37.404657Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7489507094509868813:2584] connected; active server actors: 1 2025-04-04T17:12:37.404955Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 1, Generation 2 2025-04-04T17:12:37.405170Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7489507094509868810:2582]: Got location 2025-04-04T17:12:37.405458Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7489507094509868813:2584] disconnected; active server actors: 1 2025-04-04T17:12:37.405478Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7489507094509868813:2584] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 3 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1743786756 nanos: 943000000 } max_write_time_lag { } bytes_written { } partition_node_id: 1 } partition_location { node_id: 1 generation: 2 } } } } } 2025-04-04T17:12:37.407319Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-04-04T17:12:37.407380Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2025-04-04T17:12:37.407410Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7489507094509868817:2586]: Bootstrap Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] Test command err: 2025-04-04T17:12:34.372238Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507082512593406:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:34.374655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000efa/r3tmp/tmpRZedbJ/pdisk_1.dat 2025-04-04T17:12:34.943099Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:34.964269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:34.964381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:34.970481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24818, node 1 2025-04-04T17:12:35.146821Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:35.146841Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:35.146852Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:35.146957Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1982 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:35.759598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:35.865472Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7489507086807561343:2314], Recipient [1:7489507082512593761:2207]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2025-04-04T17:12:35.865514Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-04-04T17:12:35.865528Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:35.865539Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:35.865714Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2025-04-04T17:12:35.865867Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1743786755865655) 2025-04-04T17:12:35.866235Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1743786755865655 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-04-04T17:12:35.866417Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-04-04T17:12:35.871762Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-04-04T17:12:35.872494Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786755865655&action=1" } } } 2025-04-04T17:12:35.872667Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:35.872865Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-04T17:12:35.873003Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-04T17:12:35.873205Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285139, Sender [1:7489507086807561343:2314], Recipient [1:7489507082512593761:2207]: NKikimr::NConsole::TEvConsole::TEvNotifyOperationCompletionRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786755865655&action=1" } UserToken: "" } 2025-04-04T17:12:35.873233Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvNotifyOperationCompletionRequest 2025-04-04T17:12:35.873549Z node 1 :CMS_TENANTS DEBUG: Add subscription to /Root/users/user-1 for [1:7489507086807561343:2314] 2025-04-04T17:12:35.873626Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvNotifyOperationCompletionResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786755865655&action=1" } } 2025-04-04T17:12:35.873700Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-04-04T17:12:35.873801Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-04T17:12:35.881264Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-04-04T17:12:35.881328Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-04T17:12:35.881437Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7489507086807561348:2207], Recipient [1:7489507082512593761:2207]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-04T17:12:35.881456Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-04-04T17:12:35.881466Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:35.881474Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:35.881519Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-04-04T17:12:35.881542Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-04-04T17:12:35.881601Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-04-04T17:12:35.888568Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-04T17:12:35.888601Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:35.888608Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:35.888618Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:35.888661Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-04-04T17:12:35.888686Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1743786755865655 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-04T17:12:35.892829Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-04T17:12:35.893000Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:35.893044Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-04-04T17:12:35.893077Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-04-04T17:12:35.897042Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-04-04T17:12:35.898075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-04T17:12:35.902598Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-04T17:12:35.902696Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2025-04-04T17:12:35.908612Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710658 2025-04-04T17:12:35.917244Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710658 2025-04-04T17:12:35.917689Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786755956 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-04T17:12:35.917703Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-04T17:12:35.917740Z node 1 :CMS_TENANTS DEBUG: TSubdomainMa ... 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvNotifyOperationCompletionRequest 2025-04-04T17:12:36.562805Z node 1 :CMS_TENANTS DEBUG: Add subscription to /Root/users/user-1 for [1:7489507091102529158:2371] 2025-04-04T17:12:36.562838Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvNotifyOperationCompletionResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786756557891&action=2" } } 2025-04-04T17:12:36.563451Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786755956 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 10 Coordinators: 72075186224037895 Coordinators: 72075186224037893 Coordinators: 72075186224037896 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037892 Mediators: 72075186224037890 Mediators: 72075186224037889 SchemeShard: 72075186224037897 Hive: 72075186224037888 SysViewProcessor: 72075186224037891 StatisticsAggregator: 72075186224037894 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } StoragePools { Name: "/Root/users/user-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 10 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-04T17:12:36.563463Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) drop subdomain 2025-04-04T17:12:36.563548Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain drop cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root/users" OperationType: ESchemeOpForceDropExtSubDomain Drop { Name: "user-1" } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-04-04T17:12:36.565150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976710660 2025-04-04T17:12:36.565382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpForceDropExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:12:36.567461Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2025-04-04T17:12:36.567511Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710660 2025-04-04T17:12:36.567779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976710660 2025-04-04T17:12:36.570604Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710660 2025-04-04T17:12:36.586115Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710660 2025-04-04T17:12:36.586135Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-04T17:12:36.586166Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-04T17:12:36.586213Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7489507091102529191:2207], Recipient [1:7489507082512593761:2207]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-04T17:12:36.586225Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-04-04T17:12:36.586235Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:36.586261Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:36.586297Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-04-04T17:12:36.586322Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1743786756557891 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-04T17:12:36.586395Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743786756557891 issue= 2025-04-04T17:12:36.587245Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found - using supplied 72075186224037888 2025-04-04T17:12:36.588915Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-04-04T17:12:36.588984Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-04-04T17:12:36.589004Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:36.589224Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7489507082512593632:2205], Recipient [1:7489507082512593761:2207]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-04T17:12:36.589269Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-04T17:12:36.589285Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:36.589292Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:36.589335Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-04-04T17:12:36.589359Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1743786756557891 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-04T17:12:36.591432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2025-04-04T17:12:36.597948Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-04-04T17:12:36.598789Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-04T17:12:36.598869Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:36.598900Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-04T17:12:36.599025Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-04T17:12:36.601126Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-04-04T17:12:36.601216Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-04-04T17:12:36.604355Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-04-04T17:12:36.607145Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-04-04T17:12:36.607238Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7489507091102529277:2207], Recipient [1:7489507082512593761:2207]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-04-04T17:12:36.607274Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-04-04T17:12:36.607286Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:36.607305Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:36.607337Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-04-04T17:12:36.607355Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-04-04T17:12:36.636112Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-04T17:12:36.636148Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:36.636156Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:36.636164Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:36.636260Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1743786756557891 2025-04-04T17:12:36.636273Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743786756557891 issue= 2025-04-04T17:12:36.636293Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1743786756557891 issue= 2025-04-04T17:12:36.636303Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-04-04T17:12:36.636372Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1743786756557891 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-04T17:12:36.639641Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-04-04T17:12:36.639872Z node 1 :CMS_TENANTS TRACE: Send /Root/users/user-1 notification to [1:7489507091102529158:2371]: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786756557891&action=2" ready: true status: SUCCESS } } 2025-04-04T17:12:36.640019Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:36.655805Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7489507091102529326:2376], Recipient [1:7489507082512593761:2207]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" } 2025-04-04T17:12:36.655838Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-04T17:12:36.655990Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2025-04-04T17:12:36.671307Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285123, Sender [1:7489507091102529329:2377], Recipient [1:7489507082512593761:2207]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" } 2025-04-04T17:12:36.671338Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvListTenantsRequest 2025-04-04T17:12:36.671570Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2025-04-04T17:12:36.677903Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-04T17:12:36.679920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain >> TIcNodeCache::GetNodesInfoTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeTopic [GOOD] Test command err: 2025-04-04T17:12:26.012502Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507046704014670:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:26.012606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:26.134353Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507046095933871:2093];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:26.367769Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:12:26.391567Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ccf/r3tmp/tmpv5uC53/pdisk_1.dat 2025-04-04T17:12:26.446552Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:26.670832Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:26.680096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:26.680224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:26.681895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:26.681975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:26.684667Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:12:26.685548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:26.687437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10743, node 1 2025-04-04T17:12:26.798816Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001ccf/r3tmp/yandex0cDNYt.tmp 2025-04-04T17:12:26.798840Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001ccf/r3tmp/yandex0cDNYt.tmp 2025-04-04T17:12:26.798990Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001ccf/r3tmp/yandex0cDNYt.tmp 2025-04-04T17:12:26.799129Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:12:27.019172Z INFO: TTestServer started on Port 22358 GrpcPort 10743 TClient is connected to server localhost:22358 PQClient connected to localhost:10743 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:27.325418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:27.383088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:12:29.359675Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507058980836124:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:29.359775Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:29.359846Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507058980836132:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:29.364158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T17:12:29.377371Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507058980836139:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T17:12:29.433294Z node 2 :TX_PROXY ERROR: Actor# [2:7489507058980836166:2177] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:29.657686Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489507058980836180:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:12:29.657900Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmNhNjc5NjEtMWU3NTdjZmMtNzAzZmZiMi1iMjM2NDkxZA==, ActorId: [2:7489507058980836108:2311], ActorState: ExecuteState, TraceId: 01jr0wmbdd0begb7jkt5p4e830, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:12:29.656551Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507059588917728:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:12:29.658169Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Zjc2ZmFiMTctMTEwM2ZhMjgtNjIzYjNiMTYtYzEyMzA2YmE=, ActorId: [1:7489507059588917677:2335], ActorState: ExecuteState, TraceId: 01jr0wmbgh0ck76zqar85520wr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:12:29.659502Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:12:29.659636Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:12:29.672021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:29.749673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:29.843137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:12:30.374514Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0wmbyt96wdvnfgth3t67e7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGQ4ZWFmMzAtZTM0MDNiNzAtYTkzMTUyZWQtNTJhMDM5ZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489507063883885474:3089] 2025-04-04T17:12:31.010312Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507046704014670:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:31.010435Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:31.134741Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489507046095933871:2093];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:31.134824Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2025-04-04T17:12:36.349638Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-04-04T17:12:36.516262Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:12:36.516332Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898] doesn't have tx wri ... retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } consumer_stats { min_partitions_last_read_time { seconds: 1743786756 nanos: 953000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } topic_stats { min_last_write_time { seconds: 1743786757 nanos: 13000000 } max_write_time_lag { } bytes_written { } } } } } Describe topic with location 2025-04-04T17:12:37.589701Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-04-04T17:12:37.589804Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//rt3.dc1--topic-x" include_location: true 2025-04-04T17:12:37.589919Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x 2025-04-04T17:12:37.590448Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7489507093948658576:2610]: Request location 2025-04-04T17:12:37.590774Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7489507093948658578:2611] connected; active server actors: 1 2025-04-04T17:12:37.590991Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 1, Generation 2 2025-04-04T17:12:37.591020Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 2, Generation 2 2025-04-04T17:12:37.591033Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 2 2025-04-04T17:12:37.591066Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 1, Generation 2 2025-04-04T17:12:37.591078Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 2 2025-04-04T17:12:37.591096Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 2 2025-04-04T17:12:37.591108Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 2, Generation 2 2025-04-04T17:12:37.591120Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 2 2025-04-04T17:12:37.591130Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 2 2025-04-04T17:12:37.591139Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 2 2025-04-04T17:12:37.591148Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 2 2025-04-04T17:12:37.591161Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 2 2025-04-04T17:12:37.591188Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 2 2025-04-04T17:12:37.591352Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7489507093948658576:2610]: Got location 2025-04-04T17:12:37.591202Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 2 2025-04-04T17:12:37.591212Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 2 2025-04-04T17:12:37.591593Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7489507093948658578:2611] disconnected; active server actors: 1 2025-04-04T17:12:37.591612Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7489507093948658578:2611] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1743786756635 tx_id: 281474976710679 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } } } } } Describe topic with no stats or location 2025-04-04T17:12:37.595504Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-04-04T17:12:37.595601Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//rt3.dc1--topic-x" 2025-04-04T17:12:37.595677Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1743786756635 tx_id: 281474976710679 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } } } } } Describe bad topic 2025-04-04T17:12:37.600027Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-04-04T17:12:37.600123Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2025-04-04T17:12:37.600246Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//bad-topic Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] Test command err: 2025-04-04T17:12:34.340698Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507081703028373:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:34.340771Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ef0/r3tmp/tmpe5Bxd4/pdisk_1.dat 2025-04-04T17:12:34.940976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:34.941104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:34.941946Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:34.957889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3115, node 1 2025-04-04T17:12:35.142950Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:35.142975Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:35.143011Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:35.143136Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:35.759426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:1434 2025-04-04T17:12:35.994494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:12:36.071756Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7489507090292963576:2314], Recipient [1:7489507081703028631:2197]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" } 2025-04-04T17:12:36.071800Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-04-04T17:12:36.071815Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:36.071842Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:36.071956Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" 2025-04-04T17:12:36.072173Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1743786756071790) 2025-04-04T17:12:36.072869Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1743786756071790 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-04-04T17:12:36.073050Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-04-04T17:12:36.079679Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-04-04T17:12:36.080358Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786756071790&action=1" } } } 2025-04-04T17:12:36.080475Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:36.080531Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-04T17:12:36.080693Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-04T17:12:36.080989Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-04-04T17:12:36.081143Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-04T17:12:36.085960Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-04-04T17:12:36.086044Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-04T17:12:36.086113Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7489507090292963581:2197], Recipient [1:7489507081703028631:2197]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-04T17:12:36.086136Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-04-04T17:12:36.086153Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:36.086163Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:36.086206Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-04-04T17:12:36.086263Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-04-04T17:12:36.086338Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-04-04T17:12:36.090990Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-04T17:12:36.091020Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:36.091031Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:36.091042Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:36.091098Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-04-04T17:12:36.091125Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1743786756071790 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-04T17:12:36.098665Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7489507090292963589:2315], Recipient [1:7489507081703028631:2197]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786756071790&action=1" } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" } 2025-04-04T17:12:36.098704Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-04T17:12:36.098952Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786756071790&action=1" } } 2025-04-04T17:12:36.099086Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-04T17:12:36.099255Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:36.099315Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-04-04T17:12:36.099328Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-04-04T17:12:36.107791Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" DatabaseName: "Root" 2025-04-04T17:12:36.109695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-04-04T17:12:36.116934Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-04T17:12:36.117014Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710659 2025-04-04T17:12:36.131074Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710659 2025-04-04T17:12:36.144312Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710659 2025-04-04T17:12:36.144897Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786756180 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "user-1@builtin" ACL: "" EffectiveACL: "\n\032\010\001\020\377\377\003\032\016user-1@builtin \003(\001" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 7205759404664 ... n 2025-04-04T17:12:37.067518Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain drop cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root/users" OperationType: ESchemeOpForceDropExtSubDomain Drop { Name: "user-1" } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\016user-2@builtin\022\030\022\026\n\024all-users@well-known\032\016user-2@builtin\"\007Builtin*\017**** (FA717EBF)" DatabaseName: "Root" 2025-04-04T17:12:37.068533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976710663 2025-04-04T17:12:37.068877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpForceDropExtSubDomain, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:37.073161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976710663 2025-04-04T17:12:37.073898Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710663 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2025-04-04T17:12:37.073941Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710663 2025-04-04T17:12:37.074970Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7489507094587931517:2385], Recipient [1:7489507081703028631:2197]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786757059220&action=2" } UserToken: "" } 2025-04-04T17:12:37.075005Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-04T17:12:37.075144Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786757059220&action=2" } } 2025-04-04T17:12:37.081326Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710663 2025-04-04T17:12:37.098340Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found - using supplied 72075186224037888 2025-04-04T17:12:37.099792Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710663 2025-04-04T17:12:37.099811Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-04T17:12:37.099877Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-04T17:12:37.100013Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7489507094587931507:2197], Recipient [1:7489507081703028631:2197]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-04-04T17:12:37.100039Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-04-04T17:12:37.100053Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:37.100061Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:37.100093Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-04-04T17:12:37.100131Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1743786757059220 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-04-04T17:12:37.100192Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743786757059220 issue=AccessDenied: Access denied for request 2025-04-04T17:12:37.102288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2025-04-04T17:12:37.104539Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-04-04T17:12:37.104650Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-04-04T17:12:37.106887Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-04-04T17:12:37.106930Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-04-04T17:12:37.106954Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2025-04-04T17:12:37.106982Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-04-04T17:12:37.109920Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-04-04T17:12:37.109959Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-04-04T17:12:37.109976Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-04-04T17:12:37.118557Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-04-04T17:12:37.118661Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-04-04T17:12:37.118695Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:37.125124Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-04-04T17:12:37.129516Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7489507081703028540:2200], Recipient [1:7489507081703028631:2197]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-04T17:12:37.129546Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-04T17:12:37.129564Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:37.129574Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:37.129606Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-04-04T17:12:37.129634Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1743786757059220 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-04-04T17:12:37.131820Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7489507094587931600:2387], Recipient [1:7489507081703028631:2197]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786757059220&action=2" } UserToken: "" } 2025-04-04T17:12:37.131849Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-04T17:12:37.131998Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786757059220&action=2" } } 2025-04-04T17:12:37.137844Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-04T17:12:37.137920Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:37.137951Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-04T17:12:37.138055Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-04T17:12:37.138764Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 2 } } Success: true ConfigTxSeqNo: 10 2025-04-04T17:12:37.138850Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 2 } } } 2025-04-04T17:12:37.143139Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 11 2025-04-04T17:12:37.143240Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7489507094587931622:2197], Recipient [1:7489507081703028631:2197]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-04-04T17:12:37.143275Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-04-04T17:12:37.143289Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:37.143302Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:37.143347Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-04-04T17:12:37.143364Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-04-04T17:12:37.145586Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-04T17:12:37.145610Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:37.145617Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:37.145624Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:37.145679Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1743786757059220 2025-04-04T17:12:37.145693Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743786757059220 issue=AccessDenied: Access denied for request 2025-04-04T17:12:37.145711Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1743786757059220 issue=AccessDenied: Access denied for request 2025-04-04T17:12:37.145753Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-04-04T17:12:37.145848Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1743786757059220 code=SUCCESS errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-04-04T17:12:37.151036Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-04-04T17:12:37.151084Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:37.186650Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7489507094587931638:2390], Recipient [1:7489507081703028631:2197]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786757059220&action=2" } UserToken: "" } 2025-04-04T17:12:37.186692Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-04T17:12:37.186884Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786757059220&action=2" ready: true status: SUCCESS } } 2025-04-04T17:12:37.189606Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-04T17:12:37.189816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AuthTokenTest [GOOD] Test command err: 2025-04-04T17:12:34.359661Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507083735464000:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:34.359707Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f10/r3tmp/tmpIYbVNv/pdisk_1.dat 2025-04-04T17:12:34.929455Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:34.938140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:34.938291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:34.942980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7426, node 1 2025-04-04T17:12:35.146820Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:35.146843Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:35.146848Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:35.146959Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5837 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:35.759775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:35.861916Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7489507088030432098:2314], Recipient [1:7489507083735464463:2194]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-04T17:12:35.861975Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-04-04T17:12:35.861991Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:35.862005Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:35.862127Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" 2025-04-04T17:12:35.862555Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1743786755862254) 2025-04-04T17:12:35.863140Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1743786755862254 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-04-04T17:12:35.863364Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-04-04T17:12:35.866858Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-04-04T17:12:35.867616Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786755862254&action=1" } } } 2025-04-04T17:12:35.867777Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:35.867860Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-04T17:12:35.868015Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-04T17:12:35.869104Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-04-04T17:12:35.869263Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-04T17:12:35.872874Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7489507088030432107:2315], Recipient [1:7489507083735464463:2194]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786755862254&action=1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-04T17:12:35.872916Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-04T17:12:35.873184Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786755862254&action=1" } } 2025-04-04T17:12:35.874286Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-04-04T17:12:35.874358Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-04T17:12:35.874425Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7489507088030432103:2194], Recipient [1:7489507083735464463:2194]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-04T17:12:35.874445Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-04-04T17:12:35.874454Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:35.874462Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:35.874507Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-04-04T17:12:35.875453Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-04-04T17:12:35.875537Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-04-04T17:12:35.883692Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-04T17:12:35.883743Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:35.883762Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:35.883771Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:35.883847Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-04-04T17:12:35.883866Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1743786755862254 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-04T17:12:35.887998Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-04T17:12:35.888162Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:35.888214Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-04-04T17:12:35.888234Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-04-04T17:12:35.908430Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "Root" 2025-04-04T17:12:35.915033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-04T17:12:35.920734Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-04T17:12:35.920820Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2025-04-04T17:12:35.924491Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710658 2025-04-04T17:12:35.934114Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710658 2025-04-04T17:12:35.934696Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786755970 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecuritySta ... .382671Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7489507092325399925:2372], Recipient [1:7489507083735464463:2194]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-04T17:12:36.382710Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-04T17:12:36.382753Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-04T17:12:36.382854Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7489507083735464360:2193], Recipient [1:7489507083735464463:2194]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-04T17:12:36.382866Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-04T17:12:36.383932Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-04-04T17:12:36.392253Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7489507092325399930:2373], Recipient [1:7489507083735464463:2194]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-04T17:12:36.392279Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-04T17:12:36.393539Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-04T17:12:36.397145Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7489507083735464360:2193], Recipient [1:7489507083735464463:2194]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-04T17:12:36.397162Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-04T17:12:36.399798Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-04-04T17:12:36.427247Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7489507092325399949:2374], Recipient [1:7489507083735464463:2194]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-04T17:12:36.427283Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-04T17:12:36.427339Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-04T17:12:36.427449Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7489507083735464360:2193], Recipient [1:7489507083735464463:2194]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-04T17:12:36.427462Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-04T17:12:36.430041Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-04-04T17:12:36.439180Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7489507092325399958:2375], Recipient [1:7489507083735464463:2194]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-04T17:12:36.439216Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-04T17:12:36.439252Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-04T17:12:36.439331Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7489507083735464360:2193], Recipient [1:7489507083735464463:2194]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-04T17:12:36.439356Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-04T17:12:36.439860Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-04-04T17:12:36.442172Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7489507092325399963:2376], Recipient [1:7489507083735464463:2194]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-04T17:12:36.442193Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-04T17:12:36.442221Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-04T17:12:36.442345Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7489507083735464360:2193], Recipient [1:7489507083735464463:2194]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-04T17:12:36.442357Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-04T17:12:36.443087Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-04-04T17:12:36.443691Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710659 2025-04-04T17:12:36.443713Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-04-04T17:12:36.443761Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-04-04T17:12:36.443826Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435076, Sender [1:7489507088030432202:2194], Recipient [1:7489507083735464463:2194]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-04-04T17:12:36.443841Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainReady 2025-04-04T17:12:36.443853Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:36.443861Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:36.443893Z node 1 :CMS_TENANTS DEBUG: TTxUpdateConfirmedSubdomain for tenant /Root/users/user-1 to 2 2025-04-04T17:12:36.443915Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=RUNNING txid=1743786755862254 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-04T17:12:36.443956Z node 1 :CMS_TENANTS TRACE: Update database for /Root/users/user-1 confirmedsubdomain=2 2025-04-04T17:12:36.448072Z node 1 :CMS_TENANTS DEBUG: TTxUpdateConfirmedSubdomain complete for /Root/users/user-1 2025-04-04T17:12:36.448090Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:36.448505Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7489507092325399969:2377], Recipient [1:7489507083735464463:2194]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-04-04T17:12:36.448520Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-04T17:12:36.448547Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-04-04T17:12:36.448618Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7489507083735464360:2193], Recipient [1:7489507083735464463:2194]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-04T17:12:36.448627Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-04T17:12:36.448993Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: RUNNING required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } TClient is connected to server localhost:5837 TClient::Ls request: /Root/users/user-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root/users/user-1" PathId: 1 SchemeshardId: 72075186224037897 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72075186224037897 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 3 ProcessingParams { ... (TRUNCATED) 2025-04-04T17:12:36.779272Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-04T17:12:36.779657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T17:12:37.011886Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:12:38.012424Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTest [GOOD] Test command err: 2025-04-04T17:12:34.411617Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507083238743774:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:34.411665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000eef/r3tmp/tmpDpFKXI/pdisk_1.dat 2025-04-04T17:12:34.996896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:34.997045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:35.004134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:35.055059Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19455, node 1 2025-04-04T17:12:35.078967Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:12:35.079294Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:12:35.146570Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:35.146592Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:35.146598Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:35.146737Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62014 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:35.759689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:35.847748Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7489507087533711878:2314], Recipient [1:7489507083238744262:2196]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2025-04-04T17:12:35.847825Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-04-04T17:12:35.847842Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:35.847856Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:35.848069Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2025-04-04T17:12:35.850082Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1743786755848175) 2025-04-04T17:12:35.853538Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1743786755848175 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-04-04T17:12:35.853810Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-04-04T17:12:35.865034Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-04-04T17:12:35.865983Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786755848175&action=1" } } } 2025-04-04T17:12:35.866184Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:35.866284Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-04T17:12:35.866435Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-04T17:12:35.869070Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-04-04T17:12:35.869187Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-04T17:12:35.873655Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7489507087533711886:2315], Recipient [1:7489507083238744262:2196]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786755848175&action=1" } UserToken: "" } 2025-04-04T17:12:35.873687Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-04T17:12:35.873893Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786755848175&action=1" } } 2025-04-04T17:12:35.878897Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-04-04T17:12:35.878978Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-04T17:12:35.879050Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7489507087533711883:2196], Recipient [1:7489507083238744262:2196]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-04-04T17:12:35.879074Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-04-04T17:12:35.879088Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:35.879096Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:35.879143Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-04-04T17:12:35.879188Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-04-04T17:12:35.879254Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-04-04T17:12:35.886091Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-04T17:12:35.886168Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:35.886176Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:35.886183Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:35.886270Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-04-04T17:12:35.886305Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1743786755848175 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-04T17:12:35.890216Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-04T17:12:35.891264Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:35.891310Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-04-04T17:12:35.891332Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-04-04T17:12:35.927585Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-04-04T17:12:35.929433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-04T17:12:35.935535Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7489507087533711947:2318], Recipient [1:7489507083238744262:2196]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786755848175&action=1" } UserToken: "" } 2025-04-04T17:12:35.935561Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-04T17:12:35.935681Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786755848175&action=1" } } 2025-04-04T17:12:35.939308Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-04-04T17:12:35.939432Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2025-04-04T17:12:35.943646Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710658 2025-04-04T17:12:35.957605Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710658 2025-04-04T17:12:35.958156Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786755991 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 ... tx 2025-04-04T17:12:36.820004Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-04-04T17:12:36.820027Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1743786756764444 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-04T17:12:36.820100Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743786756764444 issue= 2025-04-04T17:12:36.821920Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-04-04T17:12:36.822008Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-04-04T17:12:36.822022Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:36.822201Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7489507083238744147:2200], Recipient [1:7489507083238744262:2196]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-04-04T17:12:36.822214Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-04-04T17:12:36.822228Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:36.822235Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:36.822276Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-04-04T17:12:36.822299Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1743786756764444 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-04T17:12:36.824451Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-04-04T17:12:36.824563Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found - using supplied 72075186224037888 2025-04-04T17:12:36.824504Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:36.824532Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-04-04T17:12:36.824654Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-04-04T17:12:36.825219Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-04-04T17:12:36.825313Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-04-04T17:12:36.833552Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-04-04T17:12:36.833715Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7489507091828679817:2196], Recipient [1:7489507083238744262:2196]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-04-04T17:12:36.833760Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-04-04T17:12:36.833776Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:36.833803Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:36.833861Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-04-04T17:12:36.833888Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-04-04T17:12:36.836752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2025-04-04T17:12:36.866181Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7489507091828679840:2378], Recipient [1:7489507083238744262:2196]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786756764444&action=2" } UserToken: "" } 2025-04-04T17:12:36.866220Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-04T17:12:36.866441Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786756764444&action=2" } } 2025-04-04T17:12:36.890441Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-04-04T17:12:36.890475Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-04-04T17:12:36.890488Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-04-04T17:12:36.890509Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-04-04T17:12:36.890530Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-04-04T17:12:36.890542Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-04-04T17:12:36.890553Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-04-04T17:12:36.890579Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-04-04T17:12:36.890599Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2025-04-04T17:12:36.891594Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037889 because of ERROR 2025-04-04T17:12:36.891691Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037894 because of ERROR 2025-04-04T17:12:36.891732Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037891 because of ERROR 2025-04-04T17:12:36.891761Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037892 because of ERROR 2025-04-04T17:12:36.891814Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037897 because of ERROR 2025-04-04T17:12:36.891865Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037895 because of ERROR 2025-04-04T17:12:36.891899Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037890 because of ERROR 2025-04-04T17:12:36.891923Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037893 because of ERROR 2025-04-04T17:12:36.891951Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037896 because of ERROR 2025-04-04T17:12:36.934707Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-04-04T17:12:36.944420Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037888 2025-04-04T17:12:36.955836Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-04-04T17:12:36.956209Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-04-04T17:12:36.956225Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:36.956232Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-04-04T17:12:36.957247Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1743786756764444 2025-04-04T17:12:36.957270Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743786756764444 issue= 2025-04-04T17:12:36.957285Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1743786756764444 issue= 2025-04-04T17:12:36.957317Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-04-04T17:12:36.957387Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1743786756764444 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-04-04T17:12:36.962555Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7489507091828679867:2380], Recipient [1:7489507083238744262:2196]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786756764444&action=2" } UserToken: "" } 2025-04-04T17:12:36.962583Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-04T17:12:36.962791Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786756764444&action=2" } } 2025-04-04T17:12:36.994599Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-04-04T17:12:36.994701Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-04-04T17:12:37.067016Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7489507096123647185:2386], Recipient [1:7489507083238744262:2196]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786756764444&action=2" } UserToken: "" } 2025-04-04T17:12:37.067069Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-04-04T17:12:37.067250Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743786756764444&action=2" ready: true status: SUCCESS } } 2025-04-04T17:12:37.078645Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7489507096123647188:2388], Recipient [1:7489507083238744262:2196]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" } 2025-04-04T17:12:37.078676Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-04-04T17:12:37.078851Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2025-04-04T17:12:37.091811Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285123, Sender [1:7489507096123647191:2389], Recipient [1:7489507083238744262:2196]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" } 2025-04-04T17:12:37.091856Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvListTenantsRequest 2025-04-04T17:12:37.092070Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2025-04-04T17:12:37.095084Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-04-04T17:12:37.095240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DescribeOptionsTest [GOOD] Test command err: 2025-04-04T17:12:36.646390Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507088948480928:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:36.646485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ee6/r3tmp/tmpb1rdxX/pdisk_1.dat 2025-04-04T17:12:37.066170Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:37.075814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:37.075908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:37.085510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8513, node 1 2025-04-04T17:12:37.191091Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:37.191119Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:37.191126Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:37.191249Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26567 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:37.447427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:26567 2025-04-04T17:12:37.651284Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now locking 2025-04-04T17:12:37.651348Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now locked by parent 2025-04-04T17:12:37.655894Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now active 2025-04-04T17:12:37.689222Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285140, Sender [1:7489507093243448989:2315], Recipient [1:7489507088948481369:2197]: NKikimr::NConsole::TEvConsole::TEvDescribeTenantOptionsRequest { Request { } UserToken: "" } 2025-04-04T17:12:37.689259Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvDescribeTenantOptionsRequest 2025-04-04T17:12:37.691920Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvDescribeTenantOptionsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.DescribeDatabaseOptionsResult] { storage_units { kind: "hdd2" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd1" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "ssd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "test" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } availability_zones { name: "dc-1" labels { key: "collocation" value: "disabled" } labels { key: "fixed_data_center" value: "DC-1" } } availability_zones { name: "any" labels { key: "any_data_center" value: "true" } labels { key: "collocation" value: "disabled" } } computational_units { kind: "slot" labels { key: "slot_type" value: "default" } labels { key: "type" value: "dynamic_slot" } allowed_availability_zones: "any" allowed_availability_zones: "dc-1" } } } } } |87.9%| [TA] $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.9%| [TA] {RESULT} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> AutoConfig::GetASPoolsWith3CPUs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TIcNodeCache::GetNodesInfoTest [GOOD] Test command err: 2025-04-04T17:12:26.026776Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507047225668503:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:26.026821Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:26.114497Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507048522108254:2157];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:26.294827Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:12:26.311774Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:26.312103Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cab/r3tmp/tmpZM0oT7/pdisk_1.dat 2025-04-04T17:12:26.587848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:26.588224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:26.589311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:26.589383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:26.593931Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:26.598035Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:12:26.598183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:26.599151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1832, node 1 2025-04-04T17:12:26.798381Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001cab/r3tmp/yandexiCBl8f.tmp 2025-04-04T17:12:26.798411Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001cab/r3tmp/yandexiCBl8f.tmp 2025-04-04T17:12:26.798725Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001cab/r3tmp/yandexiCBl8f.tmp 2025-04-04T17:12:26.798882Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:12:27.018807Z INFO: TTestServer started on Port 18410 GrpcPort 1832 TClient is connected to server localhost:18410 PQClient connected to localhost:1832 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:27.344783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:27.411507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:12:27.618573Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-04T17:12:29.198032Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507061407010448:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:29.198105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507061407010438:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:29.198174Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:29.202536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T17:12:29.220342Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507061407010455:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T17:12:29.417583Z node 2 :TX_PROXY ERROR: Actor# [2:7489507061407010484:2178] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:29.509214Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489507061407010498:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:12:29.509763Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTI2MTdlYTctYzM5OTFkOTQtMzQ0NzYwNTYtYTE5MzczYjQ=, ActorId: [2:7489507061407010424:2311], ActorState: ExecuteState, TraceId: 01jr0wmb8b3gfv0q4exxjrvdhd, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:12:29.509229Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507060110571553:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:12:29.513778Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:12:29.515963Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzM3MzRkMmQtYTU5ODc4OWUtNzMzYTZjZTUtNmE2ZmU5NWI=, ActorId: [1:7489507060110571527:2337], ActorState: ExecuteState, TraceId: 01jr0wmb9qe0n1552svrqz9ng0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:12:29.516297Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:12:29.626074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:29.704898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:12:29.796196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:12:30.327902Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0wmbxef1f5gmcedwy7jgex, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2YwMzcxNmEtYTRkOWFhNGMtYmRkOWQ3NWMtYzI5Y2U0MzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489507064405539311:3095] 2025-04-04T17:12:31.027086Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507047225668503:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:31.027152Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:31.112065Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489507048522108254:2157];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:31.112143Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok |87.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsith1CPU [GOOD] >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] >> AutoConfig::GetASPoolsWith2CPUs [GOOD] >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] |87.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest |87.9%| [TA] $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] |87.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsith1CPU [GOOD] |87.9%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith2CPUs [GOOD] |88.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] |88.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith3CPUs [GOOD] >> KqpJoinOrder::TPCH5-StreamLookupJoin+ColumnStore [GOOD] |88.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] |88.0%| [TA] $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.0%| [TA] {RESULT} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::CanonizedJoinOrderTPCDS78-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCC |88.0%| [TA] $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSubDomainTest::CreateTablet >> TModifyUserTest::ModifyUser >> PartitionStats::Collector >> TestKinesisHttpProxy::CreateStreamWithInvalidName [GOOD] >> TSubDomainTest::FailIfAffectedSetNotInterior >> PartitionStats::CollectorOverload [GOOD] >> TStorageTenantTest::CreateSolomonInsideSubDomain >> TStorageTenantTest::CreateTableInsideSubDomain2 >> TStorageTenantTest::DeclareAndDefine >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains >> TStorageTenantTest::LsLs >> TStorageTenantTest::Boot >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] >> TExportToS3Tests::ShouldSucceedOnSingleShardTable >> TStorageTenantTest::GenericCases >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed >> TestKinesisHttpProxy::TestRequestWithWrongRegion [GOOD] >> TestYmqHttpProxy::TestSendMessageFifoQueue [GOOD] >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet >> TestKinesisHttpProxy::TestRequestBadJson [GOOD] >> PartitionStats::Collector [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName [GOOD] >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::CollectorOverload [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH5-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 62374, MsgBus: 11443 2025-04-04T17:09:53.287285Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506391990536677:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:53.287798Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0013ab/r3tmp/tmpEXbVwT/pdisk_1.dat 2025-04-04T17:09:53.807932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:53.808034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:53.827871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:53.887305Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62374, node 1 2025-04-04T17:09:54.142746Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:54.142764Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:54.142770Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:54.142864Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11443 TClient is connected to server localhost:11443 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:54.922231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:54.936268Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:54.945854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:55.117152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:55.327158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:55.406299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:57.527117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506409170407502:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:57.527256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:57.570178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.615323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.657747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.725540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.798496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.851197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:57.943623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506409170408023:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:57.943718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:57.943978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506409170408028:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:57.948253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:57.964647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506409170408030:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:58.066980Z node 1 :TX_PROXY ERROR: Actor# [1:7489506413465375383:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:58.272223Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506391990536677:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:58.272301Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:59.172476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.209088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.252348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.296562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.332357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.488103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.561536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.601407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.629494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.704469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.748283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.781118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:09:59.822021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.297586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:10:00.334372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.367400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.395517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 202 ... ute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:12:29.298985Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038734;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:12:29.299008Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038728;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:12:29.299031Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038734;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:12:29.299036Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038728;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:12:29.299208Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038734;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:12:29.299210Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038728;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:12:29.299232Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038728;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:12:29.299253Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038734;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:12:29.299316Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038728;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:12:29.299352Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038734;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:12:29.299360Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038728;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:12:29.299378Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038734;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:12:29.299477Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038728;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:12:29.299506Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038728;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:12:29.299575Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038728;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:12:29.299595Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038728;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:12:29.300210Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:12:29.300259Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:12:29.300349Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:12:29.300391Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:12:29.300562Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:12:29.300622Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:12:29.300720Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:12:29.300766Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:12:29.300841Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:12:29.300877Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:12:29.300942Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:12:29.300996Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:12:29.301524Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:12:29.301584Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:12:29.301757Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:12:29.301804Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:12:29.301930Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:12:29.301973Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:12:29.302124Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:12:29.302161Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:12:29.302280Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:12:29.302318Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:12:29.369499Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038733;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:29.370110Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038720;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:29.376120Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038729;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:29.377146Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038724;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:29.382925Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038728;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:29.385325Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038727;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:29.389509Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038734;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:29.393363Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038721;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:29.396389Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038723;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:29.401588Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038725;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:29.403615Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038731;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:29.409810Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038735;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:29.410646Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038730;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:29.418449Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038732;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:29.418651Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038726;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:29.427317Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038722;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions >> TestKinesisHttpProxy::GoodRequestPutRecords [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCDS78-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 7945, MsgBus: 11565 2025-04-04T17:09:33.010010Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506301918979537:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:33.014462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00149d/r3tmp/tmpKO2sjK/pdisk_1.dat 2025-04-04T17:09:33.406513Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:33.428332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:33.428451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:33.455283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7945, node 1 2025-04-04T17:09:33.507417Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:33.507439Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:33.507456Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:33.507568Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11565 TClient is connected to server localhost:11565 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:33.976776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:34.003246Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:34.008807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:34.141181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:34.300083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:34.359116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:36.069330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506319098850486:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:36.069670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:36.316789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:36.344767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:36.375639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:36.404938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:36.438793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:36.469379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:36.516457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506319098850996:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:36.516529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:36.516906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506319098851001:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:36.521409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:36.533286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506319098851003:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:36.633882Z node 1 :TX_PROXY ERROR: Actor# [1:7489506319098851057:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:37.633809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:37.673188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:37.700325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:37.730809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:37.770279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:37.907842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:37.984140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:09:37.994232Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506301918979537:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:37.994335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:38.015888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:09:38.093984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:09:38.134324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:09:38.172920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:09:38.262778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:09:38.300146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:09:38.822802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:09:38.905853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:09:38.937994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:09:38.963697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025- ... cription=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:12:22.365177Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:12:22.365208Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:12:22.365270Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:12:22.365298Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:12:22.365338Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:12:22.365369Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:12:22.365885Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:12:22.365928Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:12:22.366143Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:12:22.366176Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:12:22.366709Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:12:22.366759Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:12:22.366959Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:12:22.366994Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:12:22.367092Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:12:22.367117Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:12:22.370138Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:12:22.370181Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:12:22.370474Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:12:22.370516Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:12:22.370697Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:12:22.370733Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:12:22.370821Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:12:22.370871Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:12:22.370975Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:12:22.371009Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:12:22.371052Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:12:22.371083Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:12:22.371708Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:12:22.371764Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:12:22.371928Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:12:22.371972Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:12:22.372148Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:12:22.372194Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:12:22.372343Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:12:22.372382Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:12:22.372477Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:12:22.372504Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:12:22.435914Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038728;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:22.437852Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038723;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:22.443230Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038722;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:22.446855Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038721;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:22.450201Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038727;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:22.453777Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038733;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:22.456422Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038725;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:22.460846Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038726;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:22.463655Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038734;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:22.468395Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038724;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:22.471364Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038731;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:22.474654Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038735;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:22.478854Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038729;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:22.481648Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038720;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:22.486591Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038730;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:22.488968Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038732;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; >> TestYmqHttpProxy::TestGetQueueUrlWithIAM >> TestYmqHttpProxy::TestSendMessageWithAttributes >> TestKinesisHttpProxy::TestConsumersEmptyNames >> TestKinesisHttpProxy::TestRequestWithIAM >> TestYmqHttpProxy::TestCreateQueueWithEmptyName >> TestYmqHttpProxy::TestCreateQueueWithWrongBody >> TestYmqHttpProxy::TestReceiveMessage [GOOD] >> TestKinesisHttpProxy::DoubleCreateStream >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> TestYmqHttpProxy::TestReceiveMessageWithAttributes |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |88.0%| [TA] {RESULT} $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TExportToS3Tests::ShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnMultiShardTable ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2025-04-04T17:11:03.038103Z :TestReorderedExecutor INFO: Random seed for debugging is 1743786663038081 2025-04-04T17:11:03.464778Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506691893404682:2147];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:03.471047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:11:03.947631Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:11:03.947737Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506689719548301:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:03.947768Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:11:03.938255Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000fca/r3tmp/tmpvn4ZHy/pdisk_1.dat 2025-04-04T17:11:04.537387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:11:04.541734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:04.541835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:04.542060Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:04.543192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:04.543253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:04.546432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:04.553599Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:11:04.554033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5757, node 1 2025-04-04T17:11:04.735830Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000fca/r3tmp/yandexrFPAeV.tmp 2025-04-04T17:11:04.735853Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000fca/r3tmp/yandexrFPAeV.tmp 2025-04-04T17:11:04.735980Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000fca/r3tmp/yandexrFPAeV.tmp 2025-04-04T17:11:04.736079Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:11:04.796028Z INFO: TTestServer started on Port 27082 GrpcPort 5757 TClient is connected to server localhost:27082 PQClient connected to localhost:5757 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:05.075407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-04T17:11:07.507537Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506706899417580:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:07.507607Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506706899417605:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:07.507667Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:07.513499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-04-04T17:11:07.535117Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489506706899417609:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-04-04T17:11:07.607514Z node 2 :TX_PROXY ERROR: Actor# [2:7489506706899417637:2129] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:08.004208Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489506706899417644:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:11:08.005916Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzNkODk5YWQtODkxMGNhNGQtNTNjODY0ZGItYjVjODM0NDk=, ActorId: [2:7489506706899417578:2308], ActorState: ExecuteState, TraceId: 01jr0whvfb245tjwdjsk6rejeq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:11:08.008205Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:11:08.009383Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489506709073274883:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:11:08.010877Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTI5NjFlMjAtNzgwNjBmNmUtYjE4OTI0ZS0zMWVmY2RlNg==, ActorId: [1:7489506709073274855:2337], ActorState: ExecuteState, TraceId: 01jr0whvkef4drdrtszgqbdztn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:11:08.011237Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:11:08.035032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2025-04-04T17:11:08.189772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-04-04T17:11:08.323337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:5757", true, true, 1000); 2025-04-04T17:11:08.463410Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506691893404682:2147];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:08.463472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:11:08.601268Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720664. Ctx: { TraceId: 01jr0whwd03r5y0pjsqptfh2qe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjZjNzNkYWItODYzOTM4MWItZDFiZjNkNDUtYTU2MmQyYTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489506713368242570:2964] 2025-04-04T17:11:08.690398Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489506689719548301:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:08.690463Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:11:15.076695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:5757 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-04T17:11:15.179658Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:575 ... 8Z node 13 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-04T17:12:45.161595Z node 13 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:51312 2025-04-04T17:12:45.161616Z node 13 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:51312 proto=v1 topic=test-topic durationSec=0 2025-04-04T17:12:45.161627Z node 13 :PQ_WRITE_PROXY INFO: init check schema 2025-04-04T17:12:45.163273Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-04T17:12:45.163396Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-04T17:12:45.163414Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-04T17:12:45.163433Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-04T17:12:45.163456Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7489507129597387581:2509] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-04T17:12:45.166231Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7489507129597387581:2509] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-04T17:12:45.341094Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7489507129597387581:2509] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-04T17:12:45.341399Z node 14 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [13:7489507129597387639:2509] connected; active server actors: 1 2025-04-04T17:12:45.341683Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7489507129597387581:2509] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-04T17:12:45.341710Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7489507129597387581:2509] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-04T17:12:45.342032Z node 14 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [13:7489507129597387639:2509] disconnected; active server actors: 1 2025-04-04T17:12:45.342064Z node 14 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [13:7489507129597387639:2509] disconnected no session 2025-04-04T17:12:45.482554Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7489507129597387581:2509] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-04T17:12:45.482585Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7489507129597387581:2509] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-04T17:12:45.482601Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7489507129597387581:2509] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-04T17:12:45.482637Z node 13 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-04T17:12:45.483056Z node 13 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 13, Generation: 1 2025-04-04T17:12:45.483075Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [13:7489507129597387656:2509], now have 1 active actors on pipe 2025-04-04T17:12:45.483108Z node 13 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T17:12:45.483137Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T17:12:45.483236Z node 13 :PERSQUEUE INFO: new Cookie src|381804a3-d410f48a-a4731280-165cbd6f_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-04T17:12:45.483330Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-04T17:12:45.483394Z node 13 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:12:45.483526Z node 13 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T17:12:45.483544Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T17:12:45.483611Z node 13 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:12:45.483717Z node 13 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|381804a3-d410f48a-a4731280-165cbd6f_0 2025-04-04T17:12:45.484341Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743786765484 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:12:45.484461Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|381804a3-d410f48a-a4731280-165cbd6f_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-04T17:12:45.484593Z :INFO: [] MessageGroupId [src] SessionId [src|381804a3-d410f48a-a4731280-165cbd6f_0] Write session: close. Timeout = 0 ms 2025-04-04T17:12:45.484629Z :INFO: [] MessageGroupId [src] SessionId [src|381804a3-d410f48a-a4731280-165cbd6f_0] Write session will now close 2025-04-04T17:12:45.484665Z :DEBUG: [] MessageGroupId [src] SessionId [src|381804a3-d410f48a-a4731280-165cbd6f_0] Write session: aborting 2025-04-04T17:12:45.485062Z :INFO: [] MessageGroupId [src] SessionId [src|381804a3-d410f48a-a4731280-165cbd6f_0] Write session: gracefully shut down, all writes complete 2025-04-04T17:12:45.485096Z :DEBUG: [] MessageGroupId [src] SessionId [src|381804a3-d410f48a-a4731280-165cbd6f_0] Write session: destroy 2025-04-04T17:12:45.486454Z node 13 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|381804a3-d410f48a-a4731280-165cbd6f_0 grpc read done: success: 0 data: 2025-04-04T17:12:45.486480Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|381804a3-d410f48a-a4731280-165cbd6f_0 grpc read failed 2025-04-04T17:12:45.486642Z node 13 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: src|381804a3-d410f48a-a4731280-165cbd6f_0 2025-04-04T17:12:45.486666Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|381804a3-d410f48a-a4731280-165cbd6f_0 is DEAD 2025-04-04T17:12:45.486986Z node 13 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:12:45.487142Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [13:7489507129597387656:2509] destroyed 2025-04-04T17:12:45.487197Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-04T17:12:45.522508Z :INFO: [/Root] [/Root] [bc54eb6d-2fbb1220-f7620b2f-8c491cba] Starting read session 2025-04-04T17:12:45.522555Z :DEBUG: [/Root] [/Root] [bc54eb6d-2fbb1220-f7620b2f-8c491cba] Starting cluster discovery 2025-04-04T17:12:45.522812Z :INFO: [/Root] [/Root] [bc54eb6d-2fbb1220-f7620b2f-8c491cba] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:20681: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:20681
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:20681. " 2025-04-04T17:12:45.522846Z :DEBUG: [/Root] [/Root] [bc54eb6d-2fbb1220-f7620b2f-8c491cba] Restart cluster discovery in 0.007204s 2025-04-04T17:12:45.530443Z :DEBUG: [/Root] [/Root] [bc54eb6d-2fbb1220-f7620b2f-8c491cba] Starting cluster discovery 2025-04-04T17:12:45.530726Z :INFO: [/Root] [/Root] [bc54eb6d-2fbb1220-f7620b2f-8c491cba] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:20681: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:20681
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:20681. " 2025-04-04T17:12:45.530769Z :DEBUG: [/Root] [/Root] [bc54eb6d-2fbb1220-f7620b2f-8c491cba] Restart cluster discovery in 0.016102s 2025-04-04T17:12:45.547430Z :DEBUG: [/Root] [/Root] [bc54eb6d-2fbb1220-f7620b2f-8c491cba] Starting cluster discovery 2025-04-04T17:12:45.547645Z :INFO: [/Root] [/Root] [bc54eb6d-2fbb1220-f7620b2f-8c491cba] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:20681: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:20681
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:20681. " 2025-04-04T17:12:45.547677Z :DEBUG: [/Root] [/Root] [bc54eb6d-2fbb1220-f7620b2f-8c491cba] Restart cluster discovery in 0.021744s 2025-04-04T17:12:45.570389Z :DEBUG: [/Root] [/Root] [bc54eb6d-2fbb1220-f7620b2f-8c491cba] Starting cluster discovery 2025-04-04T17:12:45.570709Z :NOTICE: [/Root] [/Root] [bc54eb6d-2fbb1220-f7620b2f-8c491cba] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:20681: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:20681
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:20681. " } 2025-04-04T17:12:45.570875Z :NOTICE: [/Root] [/Root] [bc54eb6d-2fbb1220-f7620b2f-8c491cba] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:20681: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:20681
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:20681. " } 2025-04-04T17:12:45.571023Z :INFO: [/Root] [/Root] [bc54eb6d-2fbb1220-f7620b2f-8c491cba] Closing read session. Close timeout: 0.000000s 2025-04-04T17:12:45.571137Z :NOTICE: [/Root] [/Root] [bc54eb6d-2fbb1220-f7620b2f-8c491cba] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> TModifyUserTest::ModifyUser [FAIL] >> TModifyUserTest::ModifyLdapUser >> TStorageTenantTest::Boot [GOOD] >> TStorageTenantTest::CopyTableAndConcurrentSplit >> TExportToS3Tests::ShouldSucceedOnMultiShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |88.0%| [TA] $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.0%| [TA] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TDatabaseResolverTests::DataStreams_Serverless >> TDatabaseResolverTests::Ydb_Serverless_Timeout >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError >> TDatabaseResolverTests::Greenplum_MasterNode >> TDatabaseResolverTests::MySQL [GOOD] >> TDatabaseResolverTests::MySQL_PermissionDenied >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] >> TDatabaseResolverTests::DataStreams_Serverless [GOOD] >> TDatabaseResolverTests::DataStreams_PermissionDenied >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] >> TDatabaseResolverTests::Greenplum_MasterNode [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] >> TStorageTenantTest::LsLs [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] Test command err: 2025-04-04T17:12:50.880022Z node 1 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Ydb database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': Connection timeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] Test command err: 2025-04-04T17:12:50.874402Z node 1 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgb1 via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgb1': Status: 404 Response body: {"message":"Database not found"} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] Test command err: 2025-04-04T17:12:50.960219Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed MySQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-mysql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] Test command err: 2025-04-04T17:12:50.971901Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Greenplum database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-greenplum/v1/clusters/etn021us5r9rhld1vgbh/master-hosts': you have no permission to resolve database id into database endpoint. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] Test command err: 2025-04-04T17:12:50.966207Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': you have no permission to resolve database id into database endpoint. >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] >> TStorageTenantTest::DeclareAndDefine [GOOD] >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] >> TDatabaseResolverTests::Ydb_Dedicated >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] >> TStorageTenantTest::GenericCases [GOOD] >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] >> TSubDomainTest::CreateTablet [GOOD] >> TSubDomainTest::CreateTabletForUnknownDomain >> TSubDomainTest::FailIfAffectedSetNotInterior [GOOD] >> TSubDomainTest::GenericCases >> TModifyUserTest::ModifyLdapUser [GOOD] >> TModifyUserTest::ModifyUserIsEnabled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::LsLs [GOOD] Test command err: 2025-04-04T17:12:45.999570Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507127837284036:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:45.999915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:46.101173Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507132264110956:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:46.118396Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f1f/r3tmp/tmpg1Ez55/pdisk_1.dat 2025-04-04T17:12:46.883743Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:46.929063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:46.929196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:46.939753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:46.946339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:46.958542Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:12:46.958677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:46.960311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22158 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:12:47.263220Z node 1 :TX_PROXY DEBUG: actor# [1:7489507132132251561:2141] Handle TEvNavigate describe path dc-1 2025-04-04T17:12:47.263276Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507136427219326:2455] HANDLE EvNavigateScheme dc-1 2025-04-04T17:12:47.263388Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489507132132251627:2166], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:47.263484Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507136427219257:2410][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7489507132132251627:2166], cookie# 1 2025-04-04T17:12:47.265278Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507136427219261:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507136427219258:2410], cookie# 1 2025-04-04T17:12:47.265316Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507136427219262:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507136427219259:2410], cookie# 1 2025-04-04T17:12:47.265330Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507136427219263:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507136427219260:2410], cookie# 1 2025-04-04T17:12:47.265379Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127837283904:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507136427219261:2410], cookie# 1 2025-04-04T17:12:47.265404Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127837283907:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507136427219262:2410], cookie# 1 2025-04-04T17:12:47.265419Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127837283910:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507136427219263:2410], cookie# 1 2025-04-04T17:12:47.265467Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507136427219261:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507127837283904:2052], cookie# 1 2025-04-04T17:12:47.265483Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507136427219262:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507127837283907:2055], cookie# 1 2025-04-04T17:12:47.265495Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507136427219263:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507127837283910:2058], cookie# 1 2025-04-04T17:12:47.265556Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507136427219257:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507136427219258:2410], cookie# 1 2025-04-04T17:12:47.265627Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507136427219257:2410][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:12:47.265645Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507136427219257:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507136427219259:2410], cookie# 1 2025-04-04T17:12:47.265665Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507136427219257:2410][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:12:47.265688Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507136427219257:2410][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507136427219260:2410], cookie# 1 2025-04-04T17:12:47.265708Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507136427219257:2410][/dc-1] Unexpected sync response: sender# [1:7489507136427219260:2410], cookie# 1 2025-04-04T17:12:47.265761Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489507132132251627:2166], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-04T17:12:47.293609Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489507132132251627:2166], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7489507136427219257:2410] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:12:47.293737Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489507132132251627:2166], cacheItem# { Subscriber: { Subscriber: [1:7489507136427219257:2410] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-04T17:12:47.300832Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489507136427219328:2457], recipient# [1:7489507136427219326:2455], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:12:47.301010Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507136427219326:2455] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:47.357635Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507136427219326:2455] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-04T17:12:47.364872Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507136427219326:2455] Handle TEvDescribeSchemeResult Forward to# [1:7489507136427219324:2453] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:47.402107Z node 1 :TX_PROXY DEBUG: actor# [1:7489507132132251561:2141] Handle TEvProposeTransaction 2025-04-04T17:12:47.402142Z node 1 :TX_PROXY DEBUG: actor# [1:7489507132132251561:2141] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T17:12:47.402234Z node 1 :TX_PROXY DEBUG: actor# [1:7489507132132251561:2141] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489507136427219333:2461] 2025-04-04T17:12:47.557191Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507136427219333:2461] txid# 281474976710657 Bootstrap EvSchemeRequest recor ... known Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:50.167265Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7489507132264111118:2106], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:50.167442Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7489507132264111118:2106], cacheItem# { Subscriber: { Subscriber: [2:7489507145149013063:2117] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:50.167489Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7489507132264111118:2106], cacheItem# { Subscriber: { Subscriber: [2:7489507145149013064:2118] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:50.167613Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7489507149443980377:2122], recipient# [2:7489507145149013062:2307], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:50.168000Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7489507145149013062:2307], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:12:50.518582Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7489507132264111118:2106], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:50.518753Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7489507132264111118:2106], cacheItem# { Subscriber: { Subscriber: [2:7489507145149013063:2117] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:50.518809Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7489507132264111118:2106], cacheItem# { Subscriber: { Subscriber: [2:7489507145149013064:2118] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:50.518935Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7489507149443980378:2123], recipient# [2:7489507145149013062:2307], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:50.519333Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7489507145149013062:2307], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:12:50.882645Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7489507132264111118:2106], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:50.882777Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7489507132264111118:2106], cacheItem# { Subscriber: { Subscriber: [2:7489507145149013044:2115] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:50.882883Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7489507149443980380:2124], recipient# [2:7489507149443980379:2309], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:50.883158Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:12:50.987283Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7489507132264111118:2106], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:50.987422Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7489507132264111118:2106], cacheItem# { Subscriber: { Subscriber: [2:7489507145149013063:2117] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:50.987494Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7489507132264111118:2106], cacheItem# { Subscriber: { Subscriber: [2:7489507145149013064:2118] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:50.987686Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7489507149443980381:2125], recipient# [2:7489507145149013062:2307], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:50.987912Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7489507145149013062:2307], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> TestYmqHttpProxy::TestGetQueueUrlWithIAM [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithEmptyName [GOOD] >> TestYmqHttpProxy::TestSendMessageWithAttributes [GOOD] >> KqpScan::ScanDuringSplit10 >> TestKinesisHttpProxy::TestConsumersEmptyNames [GOOD] >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions [GOOD] >> KqpScan::ScanRetryRead >> TestKinesisHttpProxy::TestRequestWithIAM [GOOD] >> KqpScan::RemoteShardScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] Test command err: 2025-04-04T17:12:45.900794Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507129550172142:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:45.900845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f41/r3tmp/tmpKg9YCw/pdisk_1.dat 2025-04-04T17:12:46.424333Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:46.434063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:46.434187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:46.471483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9230 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:12:46.749108Z node 1 :TX_PROXY DEBUG: actor# [1:7489507129550172408:2140] Handle TEvNavigate describe path dc-1 2025-04-04T17:12:46.749167Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507133845140133:2433] HANDLE EvNavigateScheme dc-1 2025-04-04T17:12:46.750062Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489507133845139750:2159], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:46.750115Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489507133845139750:2159], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T17:12:46.750341Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133845140134:2434][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:12:46.752200Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507129550172056:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507133845140140:2434] 2025-04-04T17:12:46.752252Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507129550172056:2057] Subscribe: subscriber# [1:7489507133845140140:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.752311Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133845140140:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507129550172056:2057] 2025-04-04T17:12:46.752347Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133845140134:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507133845140137:2434] 2025-04-04T17:12:46.752379Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507129550172056:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507133845140140:2434] 2025-04-04T17:12:46.752407Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507129550172050:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507133845140138:2434] 2025-04-04T17:12:46.752425Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507129550172050:2051] Subscribe: subscriber# [1:7489507133845140138:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.752445Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507129550172053:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507133845140139:2434] 2025-04-04T17:12:46.752458Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507129550172053:2054] Subscribe: subscriber# [1:7489507133845140139:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.752536Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133845140138:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507129550172050:2051] 2025-04-04T17:12:46.752551Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133845140139:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507129550172053:2054] 2025-04-04T17:12:46.752576Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133845140134:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507133845140135:2434] 2025-04-04T17:12:46.752649Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489507133845140134:2434][/dc-1] Set up state: owner# [1:7489507133845139750:2159], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:46.753245Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507129550172050:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507133845140138:2434] 2025-04-04T17:12:46.753273Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507129550172053:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507133845140139:2434] 2025-04-04T17:12:46.753888Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133845140134:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507133845140136:2434] 2025-04-04T17:12:46.753943Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489507133845140134:2434][/dc-1] Path was already updated: owner# [1:7489507133845139750:2159], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:46.753980Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133845140138:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133845140135:2434], cookie# 1 2025-04-04T17:12:46.753994Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133845140139:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133845140136:2434], cookie# 1 2025-04-04T17:12:46.754007Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133845140140:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133845140137:2434], cookie# 1 2025-04-04T17:12:46.758318Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507129550172056:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133845140140:2434], cookie# 1 2025-04-04T17:12:46.758418Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133845140140:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507129550172056:2057], cookie# 1 2025-04-04T17:12:46.758457Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133845140134:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507133845140137:2434], cookie# 1 2025-04-04T17:12:46.758486Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133845140134:2434][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:12:46.758503Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507129550172050:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133845140138:2434], cookie# 1 2025-04-04T17:12:46.758523Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507129550172053:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133845140139:2434], cookie# 1 2025-04-04T17:12:46.758556Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133845140138:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507129550172050:2051], cookie# 1 2025-04-04T17:12:46.758587Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133845140139:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507129550172053:2054], cookie# 1 2025-04-04T17:12:46.758607Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133845140134:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507133845140135:2434], cookie# 1 2025-04-04T17:12:46.758636Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133845140134:2434][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:12:46.758656Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133845140134:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507133845140136:2434], cookie# 1 2025-04-04T17:12:46.758668Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133845140134:2434][/dc-1] Unexpected sync response: sender# [1:7489507133845140136:2434], cookie# 1 2025-04-04T17:12:46.835211Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489507133845139750:2159], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T17:12:46.835587Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489507133845139750:2159], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... _TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 8 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2025-04-04T17:12:48.337947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-04T17:12:48.345298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-04T17:12:48.345472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-04T17:12:48.350694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-04T17:12:48.350727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-04T17:12:48.358233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-04T17:12:48.362739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-04T17:12:48.363422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-04T17:12:48.374538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-04T17:12:48.377227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-04T17:12:48.377254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-04T17:12:48.377298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-04-04T17:12:48.377312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-04-04T17:12:48.377340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-04T17:12:48.377347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-04T17:12:48.377363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-04T17:12:48.377370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-04T17:12:48.377386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-04-04T17:12:48.377392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-04-04T17:12:48.377407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-04T17:12:48.377413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-04T17:12:48.377430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:8 2025-04-04T17:12:48.377436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2025-04-04T17:12:48.385741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-04T17:12:48.385778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-04-04T17:12:48.385844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046644480 2025-04-04T17:12:48.385907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-04T17:12:48.385945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-04T17:12:48.385966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-04T17:12:48.386027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:12:48.409636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-04T17:12:48.640653Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489507138989348728:2122], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:48.640841Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489507138989348728:2122], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:48.640865Z node 3 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [3:7489507138989348728:2122], path# /dc-1/USER_0, domainOwnerId# 72057594046644480 2025-04-04T17:12:48.641078Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489507143284316304:2299][/dc-1/USER_0] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:12:48.641638Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489507143284316304:2299][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7489507143284316305:2299] 2025-04-04T17:12:48.641674Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489507143284316304:2299][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7489507143284316306:2299] 2025-04-04T17:12:48.641709Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7489507143284316304:2299][/dc-1/USER_0] Set up state: owner# [3:7489507138989348728:2122], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:48.641728Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489507143284316304:2299][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7489507143284316307:2299] 2025-04-04T17:12:48.641775Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7489507143284316304:2299][/dc-1/USER_0] Ignore empty state: owner# [3:7489507138989348728:2122], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:48.641825Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7489507138989348728:2122], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: Strong: 0 } 2025-04-04T17:12:48.641905Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7489507138989348728:2122], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [3:7489507143284316304:2299] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:12:48.641989Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489507138989348728:2122], cacheItem# { Subscriber: { Subscriber: [3:7489507143284316304:2299] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:48.642093Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489507143284316311:2300], recipient# [3:7489507143284316303:2298], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:48.642159Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489507138989348728:2122], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:48.642219Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489507143284316312:2301], recipient# [3:7489507143284316302:2318], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:48.643030Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:12:49.646720Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489507138989348728:2122], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:49.646867Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489507147579283610:2302], recipient# [3:7489507147579283609:2319], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:49.647812Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithWrongBody [GOOD] >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey [GOOD] >> KqpJoinOrder::TPCDS88+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS88-StreamLookupJoin+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:46.351450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:46.351546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:46.351589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:46.351645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:46.358995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:46.359056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:46.359142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:46.359233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:46.363391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:46.454963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:46.455027Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:46.471678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:46.471942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:46.472086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:46.481034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:46.481289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:46.484290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:46.485211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:46.492719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:46.503036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:46.503131Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:46.503327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:46.503382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:46.503436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:46.503577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:46.522748Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:46.683900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:46.684208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:46.684518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:46.684859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:46.684957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:46.688450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:46.688665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:46.688935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:46.689022Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:46.689060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:46.689105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:46.691612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:46.691684Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:46.691733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:46.697315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:46.697373Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:46.697426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:46.697469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:46.723358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:46.729034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:46.729253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:46.730459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:46.730705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:46.730773Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:46.731078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:46.731129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:46.731298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:46.731381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:46.733431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:46.733480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:46.733676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:46.733774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:46.734165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:46.734223Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:46.734350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:46.734386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:46.734445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:46.734485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:46.734530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:46.734565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:46.734598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:46.734639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:46.734701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:46.734736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:46.734787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:46.736640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:46.736738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:46.736773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 94046678944 2025-04-04T17:12:51.234336Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-04-04T17:12:51.234381Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2025-04-04T17:12:51.234493Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:51.234958Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:51.235048Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:51.235078Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-04T17:12:51.235101Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-04-04T17:12:51.235124Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-04T17:12:51.235776Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:51.235848Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:51.235873Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-04T17:12:51.235903Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2025-04-04T17:12:51.235985Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-04T17:12:51.236068Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-04-04T17:12:51.238101Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-04T17:12:51.238392Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-04T17:12:51.238461Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-04-04T17:12:51.238501Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-04T17:12:51.238575Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-04-04T17:12:51.238694Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000010 2025-04-04T17:12:51.239469Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:51.239552Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 12884904044 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:51.239596Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000010, at schemeshard: 72057594046678944 2025-04-04T17:12:51.239704Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-04-04T17:12:51.239773Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-04-04T17:12:51.239804Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-04T17:12:51.239843Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-04-04T17:12:51.239888Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-04T17:12:51.239945Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T17:12:51.240008Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T17:12:51.240038Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-04-04T17:12:51.240094Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-04T17:12:51.240126Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2025-04-04T17:12:51.240158Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2025-04-04T17:12:51.240217Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-04T17:12:51.240256Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-04-04T17:12:51.240289Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-04-04T17:12:51.240325Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-04-04T17:12:51.240993Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:51.241103Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:51.242499Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:51.242540Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:51.242710Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-04T17:12:51.242831Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:51.242867Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:207:2209], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-04-04T17:12:51.242901Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:207:2209], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-04-04T17:12:51.243696Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:51.243772Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:51.243802Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-04T17:12:51.243847Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-04-04T17:12:51.243889Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-04T17:12:51.244677Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:51.244762Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:51.244796Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-04T17:12:51.244835Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-04T17:12:51.244864Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T17:12:51.244935Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-04-04T17:12:51.244990Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:123:2149] 2025-04-04T17:12:51.247571Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:51.247816Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:51.247872Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-04-04T17:12:51.247907Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-04-04T17:12:51.247935Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-04T17:12:51.247954Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-04-04T17:12:51.247976Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-04-04T17:12:51.249191Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-04T17:12:51.249252Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:12:51.249290Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:842:2773] TestWaitNotification: OK eventTxId 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::DeclareAndDefine [GOOD] Test command err: 2025-04-04T17:12:45.911425Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507128890619165:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:45.911491Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f5e/r3tmp/tmpQ0F0P2/pdisk_1.dat 2025-04-04T17:12:46.504506Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:46.536309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:46.536392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:46.544572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10714 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:12:46.832853Z node 1 :TX_PROXY DEBUG: actor# [1:7489507128890619421:2138] Handle TEvNavigate describe path dc-1 2025-04-04T17:12:46.832922Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507133185587159:2443] HANDLE EvNavigateScheme dc-1 2025-04-04T17:12:46.833047Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489507133185586774:2162], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:46.833099Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489507133185586774:2162], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T17:12:46.833327Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133185587160:2444][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:12:46.835176Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128890619064:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507133185587164:2444] 2025-04-04T17:12:46.835232Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507128890619064:2050] Subscribe: subscriber# [1:7489507133185587164:2444], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.835292Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128890619067:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507133185587165:2444] 2025-04-04T17:12:46.835309Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507128890619067:2053] Subscribe: subscriber# [1:7489507133185587165:2444], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.835353Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128890619070:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507133185587166:2444] 2025-04-04T17:12:46.835370Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507128890619070:2056] Subscribe: subscriber# [1:7489507133185587166:2444], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.835409Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133185587164:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507128890619064:2050] 2025-04-04T17:12:46.835430Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133185587165:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507128890619067:2053] 2025-04-04T17:12:46.835473Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133185587166:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507128890619070:2056] 2025-04-04T17:12:46.835523Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133185587160:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507133185587161:2444] 2025-04-04T17:12:46.835545Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133185587160:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507133185587162:2444] 2025-04-04T17:12:46.835591Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489507133185587160:2444][/dc-1] Set up state: owner# [1:7489507133185586774:2162], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:46.835696Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133185587160:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507133185587163:2444] 2025-04-04T17:12:46.835741Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489507133185587160:2444][/dc-1] Path was already updated: owner# [1:7489507133185586774:2162], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:46.835777Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133185587164:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133185587161:2444], cookie# 1 2025-04-04T17:12:46.835792Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133185587165:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133185587162:2444], cookie# 1 2025-04-04T17:12:46.835826Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133185587166:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133185587163:2444], cookie# 1 2025-04-04T17:12:46.835855Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128890619064:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507133185587164:2444] 2025-04-04T17:12:46.835877Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128890619064:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133185587164:2444], cookie# 1 2025-04-04T17:12:46.835894Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128890619067:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507133185587165:2444] 2025-04-04T17:12:46.835922Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128890619067:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133185587165:2444], cookie# 1 2025-04-04T17:12:46.835940Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128890619070:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507133185587166:2444] 2025-04-04T17:12:46.835951Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128890619070:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133185587166:2444], cookie# 1 2025-04-04T17:12:46.862071Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133185587164:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507128890619064:2050], cookie# 1 2025-04-04T17:12:46.862451Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133185587165:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507128890619067:2053], cookie# 1 2025-04-04T17:12:46.862472Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133185587166:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507128890619070:2056], cookie# 1 2025-04-04T17:12:46.862506Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133185587160:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507133185587161:2444], cookie# 1 2025-04-04T17:12:46.862527Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133185587160:2444][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:12:46.862542Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133185587160:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507133185587162:2444], cookie# 1 2025-04-04T17:12:46.862564Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133185587160:2444][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:12:46.862586Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133185587160:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507133185587163:2444], cookie# 1 2025-04-04T17:12:46.862599Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133185587160:2444][/dc-1] Unexpected sync response: sender# [1:7489507133185587163:2444], cookie# 1 2025-04-04T17:12:46.912808Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489507133185586774:2162], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T17:12:46.913176Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489507133185586774:2162], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... 49.833627Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489507146070489864:3045], recipient# [1:7489507146070489838:2332], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:49.833668Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128890619070:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [1:7489507146070489863:3044] 2025-04-04T17:12:49.833677Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507128890619070:2056] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-04-04T17:12:49.833703Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507128890619070:2056] Subscribe: subscriber# [1:7489507146070489863:3044], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:49.833734Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507146070489861:3044][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7489507128890619064:2050] 2025-04-04T17:12:49.833754Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507146070489863:3044][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7489507128890619070:2056] 2025-04-04T17:12:49.833780Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507146070489857:3044][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7489507146070489858:3044] 2025-04-04T17:12:49.833829Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489507146070489857:3044][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7489507133185586774:2162], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:49.833851Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507146070489857:3044][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7489507146070489860:3044] 2025-04-04T17:12:49.833872Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489507146070489857:3044][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [1:7489507133185586774:2162], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:49.833925Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128890619064:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7489507146070489861:3044] 2025-04-04T17:12:49.833939Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128890619070:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7489507146070489863:3044] 2025-04-04T17:12:49.833967Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489507133185586774:2162], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-04T17:12:49.834021Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489507133185586774:2162], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7489507146070489857:3044] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:12:49.834094Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489507133185586774:2162], cacheItem# { Subscriber: { Subscriber: [1:7489507146070489857:3044] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:49.834144Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489507146070489865:3046], recipient# [1:7489507146070489856:2335], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:49.920523Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489507133185586774:2162], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:49.920626Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489507133185586774:2162], cacheItem# { Subscriber: { Subscriber: [1:7489507133185587171:2448] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:49.920709Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489507146070489867:3047], recipient# [1:7489507146070489866:2336], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:50.838938Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489507133185586774:2162], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:50.839081Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489507133185586774:2162], cacheItem# { Subscriber: { Subscriber: [1:7489507146070489857:3044] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:50.839162Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489507150365457183:3051], recipient# [1:7489507150365457182:2337], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:50.911517Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507128890619165:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:50.911605Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:50.922020Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489507133185586774:2162], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:50.922163Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489507133185586774:2162], cacheItem# { Subscriber: { Subscriber: [1:7489507133185587171:2448] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:50.922275Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489507150365457187:3054], recipient# [1:7489507150365457186:2338], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |88.1%| [TA] $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestKinesisHttpProxy::DoubleCreateStream [GOOD] >> TestYmqHttpProxy::TestGetQueueAttributes |88.1%| [TA] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] Test command err: 2025-04-04T17:12:45.966109Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507128871272653:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:45.966165Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f6d/r3tmp/tmp6L5VST/pdisk_1.dat 2025-04-04T17:12:46.539993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:46.540086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:46.561814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:46.614778Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:3002 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:12:46.827322Z node 1 :TX_PROXY DEBUG: actor# [1:7489507133166240180:2140] Handle TEvNavigate describe path dc-1 2025-04-04T17:12:46.827380Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507133166240626:2450] HANDLE EvNavigateScheme dc-1 2025-04-04T17:12:46.827498Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489507133166240264:2179], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:46.827540Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489507133166240264:2179], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T17:12:46.827724Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133166240627:2451][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:12:46.829655Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128871272525:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507133166240631:2451] 2025-04-04T17:12:46.829713Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507128871272525:2051] Subscribe: subscriber# [1:7489507133166240631:2451], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.830033Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133166240631:2451][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507128871272525:2051] 2025-04-04T17:12:46.830083Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133166240627:2451][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507133166240628:2451] 2025-04-04T17:12:46.830124Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128871272525:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507133166240631:2451] 2025-04-04T17:12:46.830147Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128871272528:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507133166240632:2451] 2025-04-04T17:12:46.830173Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507128871272528:2054] Subscribe: subscriber# [1:7489507133166240632:2451], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.830327Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133166240632:2451][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507128871272528:2054] 2025-04-04T17:12:46.830351Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133166240627:2451][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507133166240629:2451] 2025-04-04T17:12:46.830404Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489507133166240627:2451][/dc-1] Set up state: owner# [1:7489507133166240264:2179], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:46.830505Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133166240631:2451][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133166240628:2451], cookie# 1 2025-04-04T17:12:46.830540Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133166240632:2451][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133166240629:2451], cookie# 1 2025-04-04T17:12:46.830558Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133166240633:2451][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133166240630:2451], cookie# 1 2025-04-04T17:12:46.833796Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128871272531:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507133166240633:2451] 2025-04-04T17:12:46.833834Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507128871272531:2057] Subscribe: subscriber# [1:7489507133166240633:2451], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.833893Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128871272531:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133166240633:2451], cookie# 1 2025-04-04T17:12:46.833962Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128871272528:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507133166240632:2451] 2025-04-04T17:12:46.833981Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128871272528:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133166240632:2451], cookie# 1 2025-04-04T17:12:46.833997Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128871272525:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133166240631:2451], cookie# 1 2025-04-04T17:12:46.834036Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133166240633:2451][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507128871272531:2057] 2025-04-04T17:12:46.834079Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133166240633:2451][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507128871272531:2057], cookie# 1 2025-04-04T17:12:46.834093Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133166240632:2451][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507128871272528:2054], cookie# 1 2025-04-04T17:12:46.834119Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133166240631:2451][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507128871272525:2051], cookie# 1 2025-04-04T17:12:46.834156Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133166240627:2451][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507133166240630:2451] 2025-04-04T17:12:46.834209Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489507133166240627:2451][/dc-1] Path was already updated: owner# [1:7489507133166240264:2179], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:46.834255Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133166240627:2451][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507133166240630:2451], cookie# 1 2025-04-04T17:12:46.834279Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133166240627:2451][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:12:46.834293Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133166240627:2451][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507133166240629:2451], cookie# 1 2025-04-04T17:12:46.834310Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133166240627:2451][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:12:46.834332Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133166240627:2451][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507133166240628:2451], cookie# 1 2025-04-04T17:12:46.834346Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133166240627:2451][/dc-1] Unexpected sync response: sender# [1:7489507133166240628:2451], cookie# 1 2025-04-04T17:12:46.834360Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128871272531:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507133166240633:2451] 2025-04-04T17:12:46.888038Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489507133166240264:2179], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T17:12:46.891873Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489507133166240264:2179], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... er/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7489507146051143142:2899] 2025-04-04T17:12:49.736092Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507146051143128:2898][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7489507146051143136:2898] 2025-04-04T17:12:49.736124Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507146051143129:2899][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7489507146051143143:2899] 2025-04-04T17:12:49.736137Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489507146051143128:2898][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [1:7489507133166240264:2179], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:49.736159Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489507146051143129:2899][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7489507133166240264:2179], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:49.736179Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507146051143129:2899][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7489507146051143144:2899] 2025-04-04T17:12:49.736195Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128871272525:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7489507146051143133:2897] 2025-04-04T17:12:49.736204Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489507146051143129:2899][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [1:7489507133166240264:2179], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:49.736212Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128871272525:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7489507146051143139:2898] 2025-04-04T17:12:49.736229Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128871272525:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7489507146051143145:2899] 2025-04-04T17:12:49.736247Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128871272531:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7489507146051143138:2897] 2025-04-04T17:12:49.736248Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128871272528:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7489507146051143137:2897] 2025-04-04T17:12:49.736259Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128871272531:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7489507146051143141:2898] 2025-04-04T17:12:49.736272Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128871272531:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7489507146051143147:2899] 2025-04-04T17:12:49.736276Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128871272528:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7489507146051143140:2898] 2025-04-04T17:12:49.736289Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128871272528:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7489507146051143146:2899] 2025-04-04T17:12:49.736334Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489507133166240264:2179], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-04-04T17:12:49.736415Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489507133166240264:2179], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7489507146051143127:2897] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:12:49.737316Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489507133166240264:2179], cacheItem# { Subscriber: { Subscriber: [1:7489507146051143127:2897] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:49.737387Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489507133166240264:2179], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-04T17:12:49.737485Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489507133166240264:2179], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7489507146051143128:2898] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:12:49.737585Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489507133166240264:2179], cacheItem# { Subscriber: { Subscriber: [1:7489507146051143128:2898] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:49.737694Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489507133166240264:2179], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-04T17:12:49.737762Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489507146051143148:2900], recipient# [1:7489507146051143125:2319], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:49.737778Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489507133166240264:2179], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7489507146051143129:2899] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:12:49.737833Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489507133166240264:2179], cacheItem# { Subscriber: { Subscriber: [1:7489507146051143129:2899] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:49.737901Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489507146051143149:2901], recipient# [1:7489507146051143126:2320], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:50.477190Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489507141210036235:2122], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:50.477303Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489507141210036235:2122], cacheItem# { Subscriber: { Subscriber: [3:7489507145505003720:2242] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:50.477405Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489507149799971217:2366], recipient# [3:7489507149799971216:2325], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::GenericCases [GOOD] Test command err: 2025-04-04T17:12:45.871551Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507127324519694:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:45.871605Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f4a/r3tmp/tmp6X3DRP/pdisk_1.dat 2025-04-04T17:12:46.416057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:46.416209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:46.430967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:46.437039Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:7878 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:12:46.781593Z node 1 :TX_PROXY DEBUG: actor# [1:7489507127324519918:2115] Handle TEvNavigate describe path dc-1 2025-04-04T17:12:46.781682Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507131619487684:2435] HANDLE EvNavigateScheme dc-1 2025-04-04T17:12:46.781850Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489507131619487255:2138], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:46.781890Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489507131619487255:2138], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T17:12:46.782048Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131619487685:2436][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:12:46.783440Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127324519595:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507131619487689:2436] 2025-04-04T17:12:46.783458Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127324519598:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507131619487690:2436] 2025-04-04T17:12:46.783511Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507127324519598:2053] Subscribe: subscriber# [1:7489507131619487690:2436], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.783511Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507127324519595:2050] Subscribe: subscriber# [1:7489507131619487689:2436], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.783560Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127324519601:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507131619487691:2436] 2025-04-04T17:12:46.783586Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507127324519601:2056] Subscribe: subscriber# [1:7489507131619487691:2436], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.783635Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131619487689:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507127324519595:2050] 2025-04-04T17:12:46.783661Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131619487690:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507127324519598:2053] 2025-04-04T17:12:46.783684Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131619487691:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507127324519601:2056] 2025-04-04T17:12:46.783719Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131619487685:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507131619487686:2436] 2025-04-04T17:12:46.783749Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131619487685:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507131619487687:2436] 2025-04-04T17:12:46.783855Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489507131619487685:2436][/dc-1] Set up state: owner# [1:7489507131619487255:2138], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:46.783980Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131619487685:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507131619487688:2436] 2025-04-04T17:12:46.784027Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489507131619487685:2436][/dc-1] Path was already updated: owner# [1:7489507131619487255:2138], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:46.784065Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127324519595:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507131619487689:2436] 2025-04-04T17:12:46.784092Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127324519598:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507131619487690:2436] 2025-04-04T17:12:46.784096Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131619487689:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507131619487686:2436], cookie# 1 2025-04-04T17:12:46.784109Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127324519601:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507131619487691:2436] 2025-04-04T17:12:46.784113Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131619487690:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507131619487687:2436], cookie# 1 2025-04-04T17:12:46.784125Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131619487691:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507131619487688:2436], cookie# 1 2025-04-04T17:12:46.784159Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127324519595:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507131619487689:2436], cookie# 1 2025-04-04T17:12:46.784185Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127324519598:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507131619487690:2436], cookie# 1 2025-04-04T17:12:46.784199Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127324519601:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507131619487691:2436], cookie# 1 2025-04-04T17:12:46.784238Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131619487689:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507127324519595:2050], cookie# 1 2025-04-04T17:12:46.784256Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131619487690:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507127324519598:2053], cookie# 1 2025-04-04T17:12:46.784269Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131619487691:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507127324519601:2056], cookie# 1 2025-04-04T17:12:46.784297Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131619487685:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507131619487686:2436], cookie# 1 2025-04-04T17:12:46.784318Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131619487685:2436][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:12:46.784332Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131619487685:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507131619487687:2436], cookie# 1 2025-04-04T17:12:46.784349Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131619487685:2436][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:12:46.784370Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131619487685:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507131619487688:2436], cookie# 1 2025-04-04T17:12:46.784383Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131619487685:2436][/dc-1] Unexpected sync response: sender# [1:7489507131619487688:2436], cookie# 1 2025-04-04T17:12:46.855446Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489507131619487255:2138], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T17:12:46.855812Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489507131619487255:2138], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... 056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [1:7489507144504390531:3138] 2025-04-04T17:12:49.800463Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507127324519601:2056] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-04-04T17:12:49.800475Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507127324519601:2056] Subscribe: subscriber# [1:7489507144504390531:3138], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:49.800502Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507144504390528:3138][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7489507127324519595:2050] 2025-04-04T17:12:49.800545Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507144504390530:3138][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7489507127324519598:2053] 2025-04-04T17:12:49.800559Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507144504390531:3138][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7489507127324519601:2056] 2025-04-04T17:12:49.800584Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507144504390512:3138][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7489507144504390525:3138] 2025-04-04T17:12:49.800609Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507144504390512:3138][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7489507144504390526:3138] 2025-04-04T17:12:49.800628Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489507144504390512:3138][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7489507131619487255:2138], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:49.800662Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507144504390512:3138][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7489507144504390527:3138] 2025-04-04T17:12:49.800678Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489507144504390512:3138][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [1:7489507131619487255:2138], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:49.800697Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127324519595:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7489507144504390528:3138] 2025-04-04T17:12:49.800706Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127324519598:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7489507144504390530:3138] 2025-04-04T17:12:49.800720Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127324519601:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7489507144504390531:3138] 2025-04-04T17:12:49.800760Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489507131619487255:2138], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-04T17:12:49.800812Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489507131619487255:2138], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7489507144504390512:3138] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:12:49.800880Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489507131619487255:2138], cacheItem# { Subscriber: { Subscriber: [1:7489507144504390512:3138] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:49.800943Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489507144504390532:3140], recipient# [1:7489507144504390507:2337], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:49.882975Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489507131619487255:2138], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:49.883090Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489507131619487255:2138], cacheItem# { Subscriber: { Subscriber: [1:7489507131619487697:2440] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:49.883167Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489507144504390534:3141], recipient# [1:7489507144504390533:2339], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:50.818434Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489507131619487255:2138], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:50.818555Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489507131619487255:2138], cacheItem# { Subscriber: { Subscriber: [1:7489507144504390512:3138] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:50.818663Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489507148799357850:3145], recipient# [1:7489507148799357849:2340], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:50.871627Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507127324519694:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:50.871695Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:50.885986Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489507131619487255:2138], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:50.886111Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489507131619487255:2138], cacheItem# { Subscriber: { Subscriber: [1:7489507131619487697:2440] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:50.886184Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489507148799357854:3148], recipient# [1:7489507148799357853:2341], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes >> TestYmqHttpProxy::TestSetQueueAttributes >> TestKinesisHttpProxy::TestListStreamConsumers >> TestKinesisHttpProxy::CreateDeleteStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] Test command err: 2025-04-04T17:12:46.012656Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507132797528896:2280];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:46.012702Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f53/r3tmp/tmpE9fOrz/pdisk_1.dat 2025-04-04T17:12:46.544639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:46.544756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:46.550510Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:46.552237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17087 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:12:46.854423Z node 1 :TX_PROXY DEBUG: actor# [1:7489507132797528914:2134] Handle TEvNavigate describe path dc-1 2025-04-04T17:12:46.854471Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507132797529360:2440] HANDLE EvNavigateScheme dc-1 2025-04-04T17:12:46.854588Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489507132797528937:2147], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:46.854640Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489507132797528937:2147], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T17:12:46.854817Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507132797529361:2441][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:12:46.863988Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128502561267:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507132797529365:2441] 2025-04-04T17:12:46.864047Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507128502561267:2051] Subscribe: subscriber# [1:7489507132797529365:2441], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.864124Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128502561273:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507132797529367:2441] 2025-04-04T17:12:46.864154Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507128502561273:2057] Subscribe: subscriber# [1:7489507132797529367:2441], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.864209Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507132797529365:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507128502561267:2051] 2025-04-04T17:12:46.864236Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507132797529367:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507128502561273:2057] 2025-04-04T17:12:46.864277Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507132797529361:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507132797529362:2441] 2025-04-04T17:12:46.864308Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507132797529361:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507132797529364:2441] 2025-04-04T17:12:46.864368Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489507132797529361:2441][/dc-1] Set up state: owner# [1:7489507132797528937:2147], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:46.864470Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507132797529365:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507132797529362:2441], cookie# 1 2025-04-04T17:12:46.864483Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507132797529366:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507132797529363:2441], cookie# 1 2025-04-04T17:12:46.864494Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507132797529367:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507132797529364:2441], cookie# 1 2025-04-04T17:12:46.864521Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128502561267:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507132797529365:2441] 2025-04-04T17:12:46.864543Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128502561267:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507132797529365:2441], cookie# 1 2025-04-04T17:12:46.864559Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128502561273:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507132797529367:2441] 2025-04-04T17:12:46.864569Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128502561273:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507132797529367:2441], cookie# 1 2025-04-04T17:12:46.867039Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128502561270:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507132797529366:2441] 2025-04-04T17:12:46.867080Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507128502561270:2054] Subscribe: subscriber# [1:7489507132797529366:2441], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.867121Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128502561270:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507132797529366:2441], cookie# 1 2025-04-04T17:12:46.867189Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507132797529365:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507128502561267:2051], cookie# 1 2025-04-04T17:12:46.867204Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507132797529367:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507128502561273:2057], cookie# 1 2025-04-04T17:12:46.867231Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507132797529366:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507128502561270:2054] 2025-04-04T17:12:46.867248Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507132797529366:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507128502561270:2054], cookie# 1 2025-04-04T17:12:46.867288Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507132797529361:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507132797529362:2441], cookie# 1 2025-04-04T17:12:46.867312Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507132797529361:2441][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:12:46.867327Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507132797529361:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507132797529364:2441], cookie# 1 2025-04-04T17:12:46.867344Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507132797529361:2441][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:12:46.867374Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507132797529361:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507132797529363:2441] 2025-04-04T17:12:46.867423Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489507132797529361:2441][/dc-1] Path was already updated: owner# [1:7489507132797528937:2147], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:46.867446Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507132797529361:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507132797529363:2441], cookie# 1 2025-04-04T17:12:46.867458Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507132797529361:2441][/dc-1] Unexpected sync response: sender# [1:7489507132797529363:2441], cookie# 1 2025-04-04T17:12:46.867476Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128502561270:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507132797529366:2441] 2025-04-04T17:12:46.920765Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489507132797528937:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T17:12:46.921370Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489507132797528937:2147], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... e Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:51.230166Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489507136751242932:2225], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:51.230205Z node 3 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [3:7489507136751242932:2225], path# /dc-1/USER_0, domainOwnerId# 72057594046644480 2025-04-04T17:12:51.230309Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489507136751242932:2225], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:51.230357Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489507136751242932:2225], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:51.230419Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489507153931112134:2231][/dc-1/USER_0] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:12:51.230877Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489507153931112134:2231][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7489507153931112135:2231] 2025-04-04T17:12:51.230959Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489507153931112134:2231][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7489507153931112136:2231] 2025-04-04T17:12:51.231001Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7489507153931112134:2231][/dc-1/USER_0] Set up state: owner# [3:7489507136751242932:2225], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:51.231061Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489507153931112134:2231][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [3:7489507153931112137:2231] 2025-04-04T17:12:51.231091Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7489507153931112134:2231][/dc-1/USER_0] Ignore empty state: owner# [3:7489507136751242932:2225], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:51.231120Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7489507136751242932:2225], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: Strong: 0 } 2025-04-04T17:12:51.231248Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7489507136751242932:2225], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [3:7489507153931112134:2231] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:12:51.231355Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489507136751242932:2225], cacheItem# { Subscriber: { Subscriber: [3:7489507153931112134:2231] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:51.231422Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489507136751242932:2225], cacheItem# { Subscriber: { Subscriber: [3:7489507153931112134:2231] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:51.231517Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489507136751242932:2225], cacheItem# { Subscriber: { Subscriber: [3:7489507153931112134:2231] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:51.231619Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489507153931112143:2233], recipient# [3:7489507153931112131:2228], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:51.231636Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489507153931112142:2232], recipient# [3:7489507153931112132:2229], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:51.231714Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489507153931112144:2234], recipient# [3:7489507153931112133:2230], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:51.231715Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489507136751242932:2225], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:51.231829Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489507136751242932:2225], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:51.231867Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489507153931112145:2235], recipient# [3:7489507153931112126:2540], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:51.231902Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489507136751242932:2225], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:51.231948Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489507153931112146:2236], recipient# [3:7489507153931112128:2542], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:51.231997Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489507153931112147:2237], recipient# [3:7489507153931112130:2543], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:51.255452Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7489507153931112128:2542], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:12:51.255686Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:12:51.256121Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; >> TestKinesisHttpProxy::TestRequestNoAuthorization >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute >> TTopicReaderTests::TestRun_ReadOneMessage [GOOD] >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 >> TestKinesisHttpProxy::GoodRequestGetRecords >> TestKinesisHttpProxy::ListShards >> TLocksTest::BrokenNullLock [GOOD] >> TestYmqHttpProxy::TestReceiveMessageWithAttributes [GOOD] >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] Test command err: 2025-04-04T17:12:45.809396Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507129986590327:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:45.810013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f36/r3tmp/tmp9xK94x/pdisk_1.dat 2025-04-04T17:12:46.431094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:46.431179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:46.436215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:46.458491Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13952 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:12:46.756821Z node 1 :TX_PROXY DEBUG: actor# [1:7489507129986590576:2111] Handle TEvNavigate describe path dc-1 2025-04-04T17:12:46.756858Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507134281558359:2438] HANDLE EvNavigateScheme dc-1 2025-04-04T17:12:46.756948Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489507134281557910:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:46.756983Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489507134281557910:2129], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T17:12:46.757144Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507134281558360:2439][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:12:46.758954Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507129986590269:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507134281558364:2439] 2025-04-04T17:12:46.759005Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507129986590269:2049] Subscribe: subscriber# [1:7489507134281558364:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.759055Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507129986590272:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507134281558365:2439] 2025-04-04T17:12:46.759070Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507129986590272:2052] Subscribe: subscriber# [1:7489507134281558365:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.759097Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507129986590275:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507134281558366:2439] 2025-04-04T17:12:46.759118Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507129986590275:2055] Subscribe: subscriber# [1:7489507134281558366:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.759159Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507134281558364:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507129986590269:2049] 2025-04-04T17:12:46.759195Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507134281558365:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507129986590272:2052] 2025-04-04T17:12:46.759219Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507134281558366:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507129986590275:2055] 2025-04-04T17:12:46.759250Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507134281558360:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507134281558361:2439] 2025-04-04T17:12:46.759272Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507134281558360:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507134281558362:2439] 2025-04-04T17:12:46.759324Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489507134281558360:2439][/dc-1] Set up state: owner# [1:7489507134281557910:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:46.759416Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507134281558360:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507134281558363:2439] 2025-04-04T17:12:46.759461Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489507134281558360:2439][/dc-1] Path was already updated: owner# [1:7489507134281557910:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:46.759492Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507134281558364:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507134281558361:2439], cookie# 1 2025-04-04T17:12:46.759504Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507134281558365:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507134281558362:2439], cookie# 1 2025-04-04T17:12:46.759516Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507134281558366:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507134281558363:2439], cookie# 1 2025-04-04T17:12:46.759547Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507129986590269:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507134281558364:2439] 2025-04-04T17:12:46.759580Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507129986590269:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507134281558364:2439], cookie# 1 2025-04-04T17:12:46.759601Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507129986590272:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507134281558365:2439] 2025-04-04T17:12:46.759613Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507129986590272:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507134281558365:2439], cookie# 1 2025-04-04T17:12:46.759624Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507129986590275:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507134281558366:2439] 2025-04-04T17:12:46.759639Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507129986590275:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507134281558366:2439], cookie# 1 2025-04-04T17:12:46.762321Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507134281558364:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507129986590269:2049], cookie# 1 2025-04-04T17:12:46.762343Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507134281558365:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507129986590272:2052], cookie# 1 2025-04-04T17:12:46.762357Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507134281558366:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507129986590275:2055], cookie# 1 2025-04-04T17:12:46.762401Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507134281558360:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507134281558361:2439], cookie# 1 2025-04-04T17:12:46.762420Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507134281558360:2439][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:12:46.762434Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507134281558360:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507134281558362:2439], cookie# 1 2025-04-04T17:12:46.762450Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507134281558360:2439][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:12:46.762482Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507134281558360:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507134281558363:2439], cookie# 1 2025-04-04T17:12:46.762495Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507134281558360:2439][/dc-1] Unexpected sync response: sender# [1:7489507134281558363:2439], cookie# 1 2025-04-04T17:12:46.816901Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489507134281557910:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T17:12:46.818203Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489507134281557910:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... -1/USER_0 Version: 0 }: sender# [4:7489507153834006608:2758] 2025-04-04T17:12:51.501775Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7489507153834006607:2758][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [4:7489507153834006609:2758] 2025-04-04T17:12:51.501806Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][4:7489507153834006607:2758][/dc-1/USER_0] Set up state: owner# [4:7489507149539038291:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:51.501826Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7489507153834006607:2758][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 Version: 0 }: sender# [4:7489507153834006610:2758] 2025-04-04T17:12:51.501859Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: [main][4:7489507153834006607:2758][/dc-1/USER_0] Ignore empty state: owner# [4:7489507149539038291:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:51.501907Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [4:7489507149539038291:2107], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: Strong: 0 } 2025-04-04T17:12:51.502000Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7489507149539038291:2107], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [4:7489507153834006607:2758] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:12:51.502079Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7489507149539038291:2107], cacheItem# { Subscriber: { Subscriber: [4:7489507153834006607:2758] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:51.502227Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7489507153834006614:2759], recipient# [4:7489507153834006606:2757], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:51.502391Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7489507149539038291:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:51.502469Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7489507153834006615:2760], recipient# [4:7489507153834006605:2360], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:51.502583Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:12:52.510459Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7489507149539038291:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:52.510612Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7489507158128973914:2762], recipient# [4:7489507158128973913:2361], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:52.514280Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:12:53.514405Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7489507149539038291:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:53.514572Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7489507162423941212:2763], recipient# [4:7489507162423941211:2362], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:53.515144Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:12:53.743883Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7489507149539038291:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:53.744024Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7489507149539038291:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:53.744122Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7489507162423941226:2764], recipient# [4:7489507162423941224:2372], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:53.744199Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7489507162423941227:2765], recipient# [4:7489507162423941225:2373], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:53.744918Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7489507162423941224:2372], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:12:53.745080Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:12:53.802690Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7489507149539038291:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:53.802861Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7489507162423941228:2766], recipient# [4:7489507162423941224:2372], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:53.803058Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7489507162423941224:2372], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenNullLock [GOOD] Test command err: 2025-04-04T17:12:15.695116Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506999383520352:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:15.695157Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e7c/r3tmp/tmpfy80Lv/pdisk_1.dat 2025-04-04T17:12:16.317296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:16.317391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:16.332522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:16.332911Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25976 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:16.620399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:16.638988Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:16.650754Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:12:16.658959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:16.802535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:16.858150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:19.412707Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507015905824610:2092];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:19.412798Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e7c/r3tmp/tmpG2VIoN/pdisk_1.dat 2025-04-04T17:12:19.512134Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:19.548106Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:19.548204Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:19.549366Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20645 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:19.683412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:19.688818Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:19.696573Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:12:19.700661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:19.782518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:19.860999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:12:22.801255Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507030690372780:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:22.801319Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e7c/r3tmp/tmpqSFzZS/pdisk_1.dat 2025-04-04T17:12:22.909248Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:22.955194Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:22.955281Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:22.956898Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23137 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:23.108011Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:23.112910Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:23.140090Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:23.184954Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:23.221443Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:25.679358Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489507041267448830:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:25.679443Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e7c/r3tmp/tmpy9tBhn/pdisk_1.dat 2025-04-04T17:12:25.860252Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:25.895900Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:25.895986Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:25.898203Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5606 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 ... onnecting -> Connected TClient is connected to server localhost:3597 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:36.983839Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.990946Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.010115Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.098013Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.161849Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:40.625450Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7489507109497183682:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:40.634258Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e7c/r3tmp/tmpNDqbng/pdisk_1.dat 2025-04-04T17:12:40.788734Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:40.830444Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:40.830545Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:40.834969Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6974 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:41.093067Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:41.119483Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:41.236061Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:12:41.293210Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:44.456565Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7489507123814267673:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:44.456625Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e7c/r3tmp/tmpoL94Uc/pdisk_1.dat 2025-04-04T17:12:44.550981Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:44.588217Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:44.588312Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:44.589886Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2867 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:44.825666Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:44.844988Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:44.901168Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:44.977655Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:49.015547Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489507147709815954:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:49.015594Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e7c/r3tmp/tmpvuxvpy/pdisk_1.dat 2025-04-04T17:12:49.275363Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:49.309318Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:49.309425Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:49.312400Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9823 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:49.580640Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:49.592934Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:49.608868Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:12:49.615082Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:49.706962Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:49.764888Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed >> TModifyUserTest::ModifyUserIsEnabled [GOOD] >> TSchemeShardTopicSplitMergeTest::Boot >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions >> TSchemeShardTopicSplitMergeTest::MargePartitions >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary >> BackupRestore::RestoreExternalDataSourceWithoutSecret [GOOD] >> TSubDomainTest::CreateTabletForUnknownDomain [GOOD] >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition >> KqpIndexLookupJoin::CheckAllKeyTypesCast [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin-NotNull >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions2 >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] >> TSubDomainTest::GenericCases [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::RestoreExternalDataSourceWithoutSecret [GOOD] Test command err: 2025-04-04T17:11:39.386770Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506844657533480:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:39.386831Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpO0b9nU/pdisk_1.dat 2025-04-04T17:11:39.945722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:39.945836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:39.950534Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:39.952303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15173, node 1 2025-04-04T17:11:40.225046Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:40.225069Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:40.225075Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:40.225189Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24352 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:40.645475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:40.678832Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:11:42.825049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506857542436420:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:42.825153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:43.007986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpOxazCd/"Create temporary directory "/Root/~backup_20250404T171143" in databaseProcess "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpOxazCd/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250404T171143/table" }Backup table "/Root/~backup_20250404T171143/table" to "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpOxazCd/table"Describe table "/Root/~backup_20250404T171143/table"Write scheme into "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpOxazCd/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpOxazCd/table/permissions.pb"Read table "/Root/~backup_20250404T171143/table"Write data into "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpOxazCd/table/data_00.csv"Drop table "/Root/~backup_20250404T171143/table"2025-04-04T17:11:43.812326Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found Remove temporary directory "/Root/~backup_20250404T171143" in database2025-04-04T17:11:43.840611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710664:0, at schemeshard: 72057594046644480 Backup completed successfully2025-04-04T17:11:43.876797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506861837404307:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:43.876867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:43.933927Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found Restore "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpOxazCd/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpOxazCd/" to "/Root"Process "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpOxazCd/table"Read scheme from "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpOxazCd/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpOxazCd/table" to "/Root/table"2025-04-04T17:11:43.983460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpOxazCd/table/data_00.csv"Restore ACL "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpOxazCd/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpOxazCd/table/permissions.pb"2025-04-04T17:11:44.061091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-04-04T17:11:46.225194Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489506874496923381:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:46.225413Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpE9oI8a/pdisk_1.dat 2025-04-04T17:11:46.428843Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:46.480064Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:46.480143Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:46.487609Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1558, node 4 2025-04-04T17:11:46.673633Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:11:46.673656Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:11:46.673670Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:11:46.673806Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20477 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:46.939963Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:11:49.882131Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506887381826161:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:49.882216Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:49.895214Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:11:50.043441Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489506891676793699:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:50.043532Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:50.069462Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmplIsa2p/"Create temporary directory "/Root/~backup_20250404T171150" in databaseProcess "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmplIsa2p/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250404T171150/table" }Backup table "/Root/~backup_20250 ... undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710756:0, at schemeshard: 72057594046644480 2025-04-04T17:12:35.711426Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:12:35.711452Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:35.903468Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710761:0, at schemeshard: 72057594046644480 2025-04-04T17:12:37.276500Z node 19 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=19&id=NzdhY2Y1MTEtY2MxZDgxODEtNTViNjkxMGUtYWNkNGU2MzQ=, ActorId: [19:7489507077631661384:2999], ActorState: ExecuteState, TraceId: 01jr0wmfz3ec0d42m392xpkrvb, Create QueryResponse for error on request, msg: 2025-04-04T17:12:37.277080Z node 19 :KQP_EXECUTER ERROR: TxId: 281474976710772. Ctx: { TraceId: 01jr0wmfz3ec0d42m392xpkrvb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=19&id=NzdhY2Y1MTEtY2MxZDgxODEtNTViNjkxMGUtYWNkNGU2MzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore failed: [ {
: Info: path: /home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpkUc90R/replication } {
: Error: Secret "secret" does not exist or you do not have access permissions } ]Cleanup 2025-04-04T17:12:39.112094Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7489507105194872055:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:39.112267Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpkDyJIF/pdisk_1.dat 2025-04-04T17:12:39.265869Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:39.300229Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:39.300338Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:39.304262Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1736, node 22 2025-04-04T17:12:39.359536Z node 22 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:39.359563Z node 22 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:39.359574Z node 22 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:39.359741Z node 22 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3601 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:39.678209Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:43.613777Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [22:7489507122374742316:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:43.613816Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [22:7489507122374742321:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:43.613862Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:43.617632Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:12:43.634677Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [22:7489507122374742330:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:12:43.730427Z node 22 :TX_PROXY ERROR: Actor# [22:7489507122374742406:2691] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:43.748261Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480 2025-04-04T17:12:44.112491Z node 22 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[22:7489507105194872055:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:44.112605Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:44.287512Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:44.809822Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:1, at schemeshard: 72057594046644480 2025-04-04T17:12:45.440513Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-04T17:12:45.997462Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-04T17:12:46.598041Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-04-04T17:12:47.270304Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-04T17:12:47.382513Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-04T17:12:49.943253Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715705:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpSJn6Sj/"Create temporary directory "/Root/~backup_20250404T171249" in databaseProcess "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpSJn6Sj/externalDataSource"Backup external data source "/Root/externalDataSource" to "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpSJn6Sj/externalDataSource"Write external data source into "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpSJn6Sj/externalDataSource/create_external_data_source.sql"Write ACL into "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpSJn6Sj/externalDataSource/permissions.pb"Remove temporary directory "/Root/~backup_20250404T171249" in database2025-04-04T17:12:50.140551Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715711:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpSJn6Sj/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpSJn6Sj/" to "/Root"Process "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpSJn6Sj/externalDataSource"Restore external data source "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpSJn6Sj/externalDataSource" to "/Root/externalDataSource"Read external data source from "/home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpSJn6Sj/externalDataSource/create_external_data_source.sql"Check existence of the secret "secret"2025-04-04T17:12:52.235451Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715726:0, at schemeshard: 72057594046644480 2025-04-04T17:12:52.886565Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715733:0, at schemeshard: 72057594046644480 2025-04-04T17:12:53.526332Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715740:0, at schemeshard: 72057594046644480 2025-04-04T17:12:54.135859Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715743:0, at schemeshard: 72057594046644480 2025-04-04T17:12:54.232164Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:12:54.232198Z node 22 :IMPORT WARN: Table profiles were not loaded Restore failed: [ {
: Info: path: /home/runner/.ya/build/build_root/i5tq/001796/r3tmp/tmpSJn6Sj/externalDataSource } {
: Error: Secret "secret" does not exist or you do not have access permissions } ]Cleanup2025-04-04T17:12:55.444152Z node 22 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=22&id=MjMyZjY0OGEtYzAzMWJhMTQtNjkwMzZjMjctZWU5MGY3MmI=, ActorId: [22:7489507161029450754:2845], ActorState: ExecuteState, TraceId: 01jr0wn1qk4k5q8z0vm8bd6dc2, Create QueryResponse for error on request, msg: 2025-04-04T17:12:55.444596Z node 22 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jr0wn1qk4k5q8z0vm8bd6dc2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=MjMyZjY0OGEtYzAzMWJhMTQtNjkwMzZjMjctZWU5MGY3MmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:56.833258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:56.833376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:56.833429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:56.833464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:56.834124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:56.834162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:56.834215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:56.834305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:56.835416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:56.909370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:56.909444Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:56.920649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:56.920911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:56.921042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:56.931861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:56.932130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:56.933498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:56.938350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:56.946314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:56.954856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:56.954900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:56.955030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:56.962175Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:57.099091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:57.099349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.099594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:57.099835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:57.099969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.102117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:57.102308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:57.102517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.102587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:57.102647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:57.102702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:57.104684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.104761Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:57.104808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:57.106529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.106581Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.106632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.106686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.110567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:57.112405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:57.112623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:57.113664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:57.113787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:57.113859Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.114124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:57.114179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.114355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:57.114462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:57.116590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:57.116634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:57.116789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:57.116832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:57.117144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.117192Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:57.117305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:57.117343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.117409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:57.117442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.117479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:57.117518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.117551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:57.117587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:57.117647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:57.117686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:57.117722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:57.119630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:57.119733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:57.119773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 57594046678944 message# TabletId: 72075186233409548 TxId: 104 Status: OK 2025-04-04T17:12:57.357033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-04-04T17:12:57.357073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-04T17:12:57.358994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-04T17:12:57.359236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-04T17:12:57.359274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-04T17:12:57.359651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2025-04-04T17:12:57.359694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-04T17:12:57.359728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2025-04-04T17:12:57.393502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 150, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:57.393658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 AckTo { RawX1: 0 RawX2: 0 } } Step: 150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:57.393739Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 150, at tablet: 72057594046678944 2025-04-04T17:12:57.393829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-04T17:12:57.419543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2025-04-04T17:12:57.419708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-04-04T17:12:57.419836Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-04-04T17:12:57.419890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.419930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-04T17:12:57.420075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-04T17:12:57.420251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:12:57.420313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:12:57.422630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.423099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:57.423152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:12:57.423284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:12:57.423428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:57.423464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-04T17:12:57.423525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-04T17:12:57.423597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.423638Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-04T17:12:57.423732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:12:57.423827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:12:57.423880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:12:57.423913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:12:57.423946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-04T17:12:57.423981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:12:57.424032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-04T17:12:57.424065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-04T17:12:57.424186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-04T17:12:57.424228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2025-04-04T17:12:57.424258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-04T17:12:57.424286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-04T17:12:57.425743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:12:57.425844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:12:57.425879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:12:57.425912Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-04T17:12:57.425949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:12:57.427064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:12:57.427154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:12:57.427184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:12:57.427208Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-04T17:12:57.427233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:12:57.427308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2025-04-04T17:12:57.427345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:408:2375] 2025-04-04T17:12:57.430183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T17:12:57.432881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T17:12:57.432959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-04T17:12:57.432997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:542:2478] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } TestModificationResults wait txId: 105 2025-04-04T17:12:57.440732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:57.440923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.441097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Splitting partition does not exists: 7, at schemeshard: 72057594046678944 2025-04-04T17:12:57.443009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Splitting partition does not exists: 7" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:57.443149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Splitting partition does not exists: 7, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-04T17:12:57.443386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-04T17:12:57.443423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-04T17:12:57.443845Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-04T17:12:57.443925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-04T17:12:57.443980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:636:2561] TestWaitNotification: OK eventTxId 105 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:56.833252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:56.833379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:56.833424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:56.833454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:56.834140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:56.834167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:56.834211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:56.834306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:56.835396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:56.920319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:56.920372Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:56.931020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:56.931236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:56.931378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:56.940586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:56.940816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:56.941392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:56.942160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:56.946801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954516Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:56.954849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:56.954892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:56.955029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:56.961343Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:57.069385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:57.069616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.069837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:57.070076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:57.070123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.072259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:57.072363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:57.072538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.072606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:57.072644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:57.072678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:57.074340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.074388Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:57.074428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:57.075968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.076014Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.076050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.076099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.088048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:57.089537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:57.089666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:57.090413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:57.090526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:57.090569Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.090784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:57.090832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.090970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:57.091060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:57.092662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:57.092694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:57.092779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:57.092803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:57.093013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.093044Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:57.093111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:57.093150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.093199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:57.093227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.093248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:57.093272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.093303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:57.093327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:57.093363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:57.093413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:57.093441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:57.094824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:57.094900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:57.094936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... chemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:57.421826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary is empty, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-04T17:12:57.422057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-04T17:12:57.422096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-04T17:12:57.422465Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-04T17:12:57.422539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-04T17:12:57.422585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:636:2561] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } TestModificationResults wait txId: 106 2025-04-04T17:12:57.424620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:57.424786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.425971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2025-04-04T17:12:57.427797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'01\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:57.427930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-04T17:12:57.428146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-04T17:12:57.428187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-04T17:12:57.428529Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-04T17:12:57.428604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-04T17:12:57.428634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:643:2568] TestWaitNotification: OK eventTxId 106 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } TestModificationResults wait txId: 107 2025-04-04T17:12:57.431295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } } } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:57.431456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.431665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 107:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2025-04-04T17:12:57.433249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 107, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:57.433344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 107, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2025-04-04T17:12:57.433535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-04-04T17:12:57.433561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-04-04T17:12:57.433819Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-04-04T17:12:57.433868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-04T17:12:57.433892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:650:2575] TestWaitNotification: OK eventTxId 107 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } TestModificationResults wait txId: 108 2025-04-04T17:12:57.436493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:57.436677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 108:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.436847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2025-04-04T17:12:57.438684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AD\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:57.438813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-04-04T17:12:57.439097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-04-04T17:12:57.439137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-04-04T17:12:57.439498Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-04-04T17:12:57.439610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-04T17:12:57.439654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:657:2582] TestWaitNotification: OK eventTxId 108 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } TestModificationResults wait txId: 109 2025-04-04T17:12:57.442088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:57.442230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 109:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.442365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2025-04-04T17:12:57.443942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:57.444057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 109, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2025-04-04T17:12:57.444281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2025-04-04T17:12:57.444306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2025-04-04T17:12:57.444627Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2025-04-04T17:12:57.444692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-04-04T17:12:57.444724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [1:664:2589] TestWaitNotification: OK eventTxId 109 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] Test command err: 2025-04-04T17:12:46.038684Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507131186795669:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:46.038746Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f80/r3tmp/tmpD4MOxn/pdisk_1.dat 2025-04-04T17:12:46.674519Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:46.690418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:46.690545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:46.699075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8803 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:12:46.954145Z node 1 :TX_PROXY DEBUG: actor# [1:7489507131186795909:2140] Handle TEvNavigate describe path dc-1 2025-04-04T17:12:46.954206Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507131186796355:2447] HANDLE EvNavigateScheme dc-1 2025-04-04T17:12:46.954351Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489507131186795932:2153], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:46.954399Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489507131186795932:2153], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T17:12:46.954585Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131186796356:2448][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:12:46.964758Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507126891828257:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507131186796361:2448] 2025-04-04T17:12:46.964827Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507126891828257:2054] Subscribe: subscriber# [1:7489507131186796361:2448], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.964885Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507126891828260:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507131186796362:2448] 2025-04-04T17:12:46.964915Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507126891828260:2057] Subscribe: subscriber# [1:7489507131186796362:2448], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.965255Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131186796361:2448][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507126891828257:2054] 2025-04-04T17:12:46.965279Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131186796362:2448][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507126891828260:2057] 2025-04-04T17:12:46.965315Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131186796356:2448][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507131186796358:2448] 2025-04-04T17:12:46.965382Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131186796356:2448][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507131186796359:2448] 2025-04-04T17:12:46.965452Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489507131186796356:2448][/dc-1] Set up state: owner# [1:7489507131186795932:2153], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:46.965568Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131186796360:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507131186796357:2448], cookie# 1 2025-04-04T17:12:46.965580Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131186796361:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507131186796358:2448], cookie# 1 2025-04-04T17:12:46.965593Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131186796362:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507131186796359:2448], cookie# 1 2025-04-04T17:12:46.965704Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507126891828257:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507131186796361:2448] 2025-04-04T17:12:46.965726Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507126891828257:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507131186796361:2448], cookie# 1 2025-04-04T17:12:46.965746Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507126891828260:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507131186796362:2448] 2025-04-04T17:12:46.965777Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507126891828260:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507131186796362:2448], cookie# 1 2025-04-04T17:12:46.966295Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507126891828254:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507131186796360:2448] 2025-04-04T17:12:46.966329Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507126891828254:2051] Subscribe: subscriber# [1:7489507131186796360:2448], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.966375Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507126891828254:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507131186796360:2448], cookie# 1 2025-04-04T17:12:46.966525Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131186796361:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507126891828257:2054], cookie# 1 2025-04-04T17:12:46.966552Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131186796362:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507126891828260:2057], cookie# 1 2025-04-04T17:12:46.966579Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131186796360:2448][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507126891828254:2051] 2025-04-04T17:12:46.966597Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131186796360:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507126891828254:2051], cookie# 1 2025-04-04T17:12:46.966643Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131186796356:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507131186796358:2448], cookie# 1 2025-04-04T17:12:46.966662Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131186796356:2448][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:12:46.966677Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131186796356:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507131186796359:2448], cookie# 1 2025-04-04T17:12:46.966698Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131186796356:2448][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:12:46.966724Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131186796356:2448][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507131186796357:2448] 2025-04-04T17:12:46.966768Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489507131186796356:2448][/dc-1] Path was already updated: owner# [1:7489507131186795932:2153], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:46.966795Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131186796356:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507131186796357:2448], cookie# 1 2025-04-04T17:12:46.966808Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131186796356:2448][/dc-1] Unexpected sync response: sender# [1:7489507131186796357:2448], cookie# 1 2025-04-04T17:12:46.966975Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507126891828254:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507131186796360:2448] 2025-04-04T17:12:47.042751Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489507131186795932:2153], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T17:12:47.043090Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489507131186795932:2153], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... data/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:55.807757Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7489507171766768088:2334], recipient# [2:7489507171766768087:2571], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:56.085696Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489507136518961558:2231], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:56.085834Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489507136518961558:2231], cacheItem# { Subscriber: { Subscriber: [3:7489507153698830865:2298] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:56.085905Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489507136518961558:2231], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:56.085946Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489507175173679033:6210], recipient# [3:7489507175173679031:5149], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:56.085975Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489507136518961558:2231], cacheItem# { Subscriber: { Subscriber: [3:7489507153698830865:2298] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:56.086017Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489507175173679034:6211], recipient# [3:7489507175173679032:5150], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:56.110839Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489507136518961558:2231], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:56.110989Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489507136518961558:2231], cacheItem# { Subscriber: { Subscriber: [3:7489507153698830913:2314] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:56.111096Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489507175173679036:6212], recipient# [3:7489507175173679035:5151], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:56.548595Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7489507158881866013:2255], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:56.548752Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7489507158881866013:2255], cacheItem# { Subscriber: { Subscriber: [2:7489507171766767979:2294] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:56.548862Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7489507176061735387:2335], recipient# [2:7489507176061735386:2572], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:56.576673Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7489507158881866013:2255], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:56.576783Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7489507158881866013:2255], cacheItem# { Subscriber: { Subscriber: [2:7489507171766767979:2294] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:56.576864Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7489507176061735389:2336], recipient# [2:7489507176061735388:2573], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:56.578113Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7489507158881866013:2255], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:56.578176Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7489507158881866013:2255], cacheItem# { Subscriber: { Subscriber: [2:7489507171766767992:2298] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:56.578219Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7489507176061735391:2337], recipient# [2:7489507176061735390:2574], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:56.833266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:56.833400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:56.833460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:56.833497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:56.834161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:56.834197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:56.834269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:56.834341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:56.835435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:56.911409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:56.911459Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:56.922447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:56.922698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:56.922860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:56.932275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:56.932507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:56.933497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:56.938302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:56.947062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:56.955044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:56.955090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:56.955130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:56.955261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:56.961254Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:57.095433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:57.095700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.095936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:57.096188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:57.096253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.098412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:57.098584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:57.098796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.098868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:57.098917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:57.098959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:57.100874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.100931Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:57.100969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:57.102509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.102556Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.102600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.102669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.111742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:57.113665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:57.113840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:57.114642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:57.114764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:57.114812Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.115107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:57.115181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.115346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:57.115439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:57.117075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:57.117117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:57.117238Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:57.117272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:57.117524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.117577Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:57.117671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:57.117711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.117777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:57.117827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.117865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:57.117903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.117938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:57.117970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:57.118028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:57.118064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:57.118097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:57.119984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:57.120091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:57.120128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:57.513483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:738:2058] recipient: [1:101:2136] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:741:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:742:2058] recipient: [1:740:2656] Leader for TabletID 72057594046678944 is [1:743:2657] sender: [1:744:2058] recipient: [1:740:2656] 2025-04-04T17:12:57.557063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:57.557158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:57.557198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:57.557236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:57.557284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:57.557318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:57.557377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:57.557471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:57.557785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:57.575190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:57.576719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:57.576876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:57.577013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:57.577045Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:57.577213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:57.577944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-04-04T17:12:57.578026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:12:57.578068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T17:12:57.578135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.578202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.578477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:12:57.578777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.578866Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-04T17:12:57.579108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.579212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.579347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-04-04T17:12:57.579389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:12:57.579426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:12:57.579449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-04T17:12:57.579471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:12:57.579568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.579668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.579910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-04-04T17:12:57.580080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:12:57.580435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.580590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.580978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.581066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.581318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.581430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.581549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.581742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.581831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.582074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.582330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.582512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.582575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.582659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.589349Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:57.589423Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:57.589624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:57.589674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:57.589711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:57.590057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:743:2657] sender: [1:802:2058] recipient: [1:15:2062] 2025-04-04T17:12:57.655674Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:12:57.655981Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 328us result status StatusSuccess 2025-04-04T17:12:57.656577Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\177" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\177" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:56.833274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:56.833389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:56.833457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:56.833498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:56.834157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:56.834194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:56.834267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:56.834347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:56.835410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:56.917093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:56.917143Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:56.929853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:56.930103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:56.930266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:56.939545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:56.939779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:56.940385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:56.941128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:56.946343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:56.958071Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:56.958164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:56.958378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:56.958442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:56.958497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:56.958655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:56.964769Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:57.080678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:57.080912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.081104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:57.081346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:57.081409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.083460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:57.083595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:57.083752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.083813Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:57.083849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:57.083878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:57.085588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.085637Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:57.085674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:57.087224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.087271Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.087307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.087356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.091001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:57.092538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:57.092689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:57.093574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:57.093694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:57.093738Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.093973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:57.094028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.094169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:57.094268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:57.096129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:57.096168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:57.096292Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:57.096329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:57.096611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.096666Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:57.096747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:57.096790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.096853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:57.096885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.096918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:57.096952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.096982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:57.097009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:57.097060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:57.097094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:57.097139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:57.098889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:57.098980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:57.099010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... y Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Inactive ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\177" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active ParentPartitionIds: 1 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "\177" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:57.519957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:754:2058] recipient: [1:101:2136] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:757:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:758:2058] recipient: [1:756:2666] Leader for TabletID 72057594046678944 is [1:759:2667] sender: [1:760:2058] recipient: [1:756:2666] 2025-04-04T17:12:57.551506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:57.551567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:57.551608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:57.551641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:57.551671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:57.551697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:57.551745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:57.551798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:57.552060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:57.566207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:57.567412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:57.567571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:57.567641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:57.567663Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:57.567768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:57.568218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-04-04T17:12:57.568295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:12:57.568337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T17:12:57.568390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.568445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.568590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:12:57.568776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.568832Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-04T17:12:57.568979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.569065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.569146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-04-04T17:12:57.569168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:12:57.569187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:12:57.569200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-04T17:12:57.569210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:12:57.569256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.569319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.569447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-04-04T17:12:57.569563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:12:57.569794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.569898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.570231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.570319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.570484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.570552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.570607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.570726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.570774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.570896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.571095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.571195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.571224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.571265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.576856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:57.576911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:57.577072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:57.577118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:57.577162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:57.577223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 |88.2%| [TA] $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] |88.2%| [TA] {RESULT} $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:56.833257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:56.833387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:56.833433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:56.833468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:56.834135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:56.834165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:56.834228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:56.834313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:56.835417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:56.923525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:56.923579Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:56.933915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:56.934098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:56.934223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:56.941974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:56.942199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:56.942780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:56.943483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:56.948300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:56.954843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:56.954893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:56.955025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:56.961681Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:57.059440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:57.062174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.064608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:57.067365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:57.067544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.070385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:57.070538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:57.070741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.070787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:57.070882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:57.070930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:57.072865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.072917Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:57.072948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:57.074535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.074578Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.074612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.074704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.078989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:57.080784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:57.080950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:57.081937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:57.082078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:57.082122Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.083694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:57.083755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.083930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:57.083997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:57.085904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:57.085951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:57.086105Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:57.086143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:57.086444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.086484Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:57.086570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:57.086632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.086682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:57.086716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.086759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:57.086800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.086830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:57.086857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:57.086910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:57.086942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:57.086972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:57.088754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:57.088851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:57.088883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 678944 is [2:131:2154] sender: [2:644:2058] recipient: [2:15:2062] Leader for TabletID 72057594046678944 is [2:131:2154] sender: [2:645:2058] recipient: [2:643:2565] Leader for TabletID 72057594046678944 is [2:646:2566] sender: [2:647:2058] recipient: [2:643:2565] 2025-04-04T17:12:57.901365Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:57.901476Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:57.901515Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:57.901545Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:57.901579Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:57.901607Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:57.901656Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:57.901722Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:57.901969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:57.918029Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:57.919244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:57.919393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:57.919561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:57.919591Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:57.919740Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:57.920483Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-04-04T17:12:57.920562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:12:57.920592Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T17:12:57.920654Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.920722Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.920952Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:12:57.921202Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.921270Z node 2 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-04T17:12:57.921437Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.921523Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.921625Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-04-04T17:12:57.921660Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:12:57.921687Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:12:57.921706Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-04T17:12:57.921723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:12:57.921798Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.921870Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.922071Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-04-04T17:12:57.922290Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:12:57.922596Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.922715Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.923094Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.923156Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.923333Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.923422Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.923501Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.923650Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.923739Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.923901Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.924088Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.924221Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.924262Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.924303Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.930859Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:57.930922Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:57.931067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:57.931109Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:57.931145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:57.932109Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:646:2566] sender: [2:705:2058] recipient: [2:15:2062] 2025-04-04T17:12:57.975513Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:12:57.975740Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 273us result status StatusSuccess 2025-04-04T17:12:57.976200Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 1 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCC [GOOD] >> KqpJoinOrder::CanonizedJoinOrderLookupBug >> TestKinesisHttpProxy::TestRequestNoAuthorization [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:56.833255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:56.833373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:56.833419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:56.833452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:56.834149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:56.834176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:56.834222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:56.834311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:56.835380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:56.910253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:56.910298Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:56.919830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:56.920054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:56.920309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:56.930155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:56.930422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:56.933481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:56.938572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:56.947197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:56.954837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:56.954983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:56.955131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:56.961662Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:57.061281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:57.062170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.064624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:57.067370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:57.067513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.070379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:57.070551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:57.070765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.070816Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:57.070857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:57.070902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:57.072708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.072771Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:57.072809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:57.074563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.074605Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.074657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.074734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.078992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:57.080717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:57.080923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:57.081876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:57.081987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:57.082027Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.083655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:57.083718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.083912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:57.083999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:57.086033Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:57.086072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:57.086214Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:57.086270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:57.086500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.086538Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:57.086640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:57.086696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.086748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:57.086794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.086824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:57.086858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.086890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:57.086919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:57.086994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:57.087028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:57.087056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:57.088724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:57.088826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:57.088877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2057594046678944 2025-04-04T17:12:58.243377Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-04T17:12:58.243462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2025-04-04T17:12:58.243582Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409546 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 151 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409546 2025-04-04T17:12:58.244685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:0 from tablet: 72057594046678944 to tablet: 72075186233409548 cookie: 72057594046678944:3 msg type: 269550082 2025-04-04T17:12:58.244748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 0:105 msg type: 269090816 2025-04-04T17:12:58.244820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72075186233409546 2025-04-04T17:12:58.244969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-04-04T17:12:58.245074Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409548 TxId: 105 Status: OK 2025-04-04T17:12:58.245138Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409548 TxId: 105 Status: OK 2025-04-04T17:12:58.245171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-04-04T17:12:58.245210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-04T17:12:58.246587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-04T17:12:58.246790Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-04T17:12:58.246867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-04T17:12:58.247222Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2025-04-04T17:12:58.247262Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-04-04T17:12:58.247297Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2025-04-04T17:12:58.284451Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:58.284584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:58.284639Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 200, at tablet: 72057594046678944 2025-04-04T17:12:58.284683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-04T17:12:58.322994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-04-04T17:12:58.323153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-04-04T17:12:58.323220Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-04-04T17:12:58.323264Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:58.323303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-04T17:12:58.323455Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2025-04-04T17:12:58.323614Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:12:58.325652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-04T17:12:58.325927Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:58.325962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:12:58.326149Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:58.326177Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-04-04T17:12:58.326746Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-04T17:12:58.326796Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-04T17:12:58.326892Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-04T17:12:58.326936Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T17:12:58.326991Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-04T17:12:58.327023Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T17:12:58.327056Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-04T17:12:58.327093Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T17:12:58.327129Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-04T17:12:58.327159Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-04T17:12:58.327270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-04T17:12:58.327306Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-04-04T17:12:58.327338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-04T17:12:58.327880Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:12:58.327967Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:12:58.327999Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-04T17:12:58.328037Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-04T17:12:58.328071Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:12:58.328141Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-04-04T17:12:58.328180Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:411:2377] 2025-04-04T17:12:58.332209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-04T17:12:58.332351Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-04T17:12:58.332392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:672:2595] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } TestModificationResults wait txId: 106 2025-04-04T17:12:58.341169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:58.341329Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-04-04T17:12:58.341481Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid partition status: 2, at schemeshard: 72057594046678944 2025-04-04T17:12:58.343355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid partition status: 2" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:58.343521Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid partition status: 2, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-04T17:12:58.343812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-04T17:12:58.343863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-04T17:12:58.344263Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-04T17:12:58.344351Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-04T17:12:58.344387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:763:2674] TestWaitNotification: OK eventTxId 106 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::GenericCases [GOOD] Test command err: 2025-04-04T17:12:45.621550Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507127245748341:2165];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:45.621829Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015e3/r3tmp/tmpilDjT9/pdisk_1.dat 2025-04-04T17:12:46.058469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:46.058591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:46.059181Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:46.065479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11803 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:12:46.332389Z node 1 :TX_PROXY DEBUG: actor# [1:7489507127245748475:2139] Handle TEvNavigate describe path dc-1 2025-04-04T17:12:46.332454Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507131540716205:2433] HANDLE EvNavigateScheme dc-1 2025-04-04T17:12:46.332575Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489507127245748550:2171], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:46.332617Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489507127245748550:2171], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T17:12:46.332859Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131540716206:2434][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:12:46.334380Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127245748118:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507131540716210:2434] 2025-04-04T17:12:46.334436Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507127245748118:2051] Subscribe: subscriber# [1:7489507131540716210:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.334521Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127245748124:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507131540716212:2434] 2025-04-04T17:12:46.334545Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507127245748124:2057] Subscribe: subscriber# [1:7489507131540716212:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.334591Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131540716210:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507127245748118:2051] 2025-04-04T17:12:46.334630Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131540716212:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507127245748124:2057] 2025-04-04T17:12:46.334674Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131540716206:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507131540716207:2434] 2025-04-04T17:12:46.334709Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131540716206:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507131540716209:2434] 2025-04-04T17:12:46.334773Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489507131540716206:2434][/dc-1] Set up state: owner# [1:7489507127245748550:2171], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:46.334935Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131540716210:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507131540716207:2434], cookie# 1 2025-04-04T17:12:46.334965Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131540716211:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507131540716208:2434], cookie# 1 2025-04-04T17:12:46.335111Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127245748121:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507131540716211:2434] 2025-04-04T17:12:46.335152Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507127245748121:2054] Subscribe: subscriber# [1:7489507131540716211:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.335189Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127245748121:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507131540716211:2434], cookie# 1 2025-04-04T17:12:46.335229Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127245748118:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507131540716210:2434] 2025-04-04T17:12:46.335271Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127245748118:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507131540716210:2434], cookie# 1 2025-04-04T17:12:46.335293Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127245748124:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507131540716212:2434] 2025-04-04T17:12:46.341377Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131540716212:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507131540716209:2434], cookie# 1 2025-04-04T17:12:46.341449Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131540716211:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507127245748121:2054] 2025-04-04T17:12:46.341499Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131540716211:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507127245748121:2054], cookie# 1 2025-04-04T17:12:46.341523Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131540716210:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507127245748118:2051], cookie# 1 2025-04-04T17:12:46.341564Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131540716206:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507131540716208:2434] 2025-04-04T17:12:46.341619Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489507131540716206:2434][/dc-1] Path was already updated: owner# [1:7489507127245748550:2171], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:46.341667Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131540716206:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507131540716208:2434], cookie# 1 2025-04-04T17:12:46.341693Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131540716206:2434][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:12:46.341711Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131540716206:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507131540716207:2434], cookie# 1 2025-04-04T17:12:46.341738Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131540716206:2434][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:12:46.341826Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127245748124:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507131540716212:2434], cookie# 1 2025-04-04T17:12:46.341864Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127245748121:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507131540716211:2434] 2025-04-04T17:12:46.341887Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507131540716212:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507127245748124:2057], cookie# 1 2025-04-04T17:12:46.341905Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131540716206:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507131540716209:2434], cookie# 1 2025-04-04T17:12:46.341925Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507131540716206:2434][/dc-1] Unexpected sync response: sender# [1:7489507131540716209:2434], cookie# 1 2025-04-04T17:12:46.417398Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489507127245748550:2171], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T17:12:46.417792Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489507127245748550:2171], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... equests DomainOwnerId: 72057594046644480 }: sender# [4:7489507172157332054:3035] 2025-04-04T17:12:55.740497Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7489507159272428933:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [4:7489507172157332042:3034] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:12:55.740503Z node 4 :SCHEME_BOARD_REPLICA INFO: [4:7489507159272428598:2053] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-04-04T17:12:55.740527Z node 4 :SCHEME_BOARD_REPLICA INFO: [4:7489507159272428598:2053] Subscribe: subscriber# [4:7489507172157332054:3035], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:55.740560Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][4:7489507172157332053:3034][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7489507159272428598:2053] 2025-04-04T17:12:55.740571Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7489507159272428933:2126], cacheItem# { Subscriber: { Subscriber: [4:7489507172157332042:3034] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:55.740588Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7489507172157332042:3034][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7489507172157332046:3034] 2025-04-04T17:12:55.740603Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [4:7489507159272428933:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-04T17:12:55.740613Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: [main][4:7489507172157332042:3034][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7489507159272428933:2126], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:55.740640Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][4:7489507172157332054:3035][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7489507159272428598:2053] 2025-04-04T17:12:55.740651Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7489507159272428933:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [4:7489507172157332043:3035] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:12:55.740672Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7489507172157332043:3035][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7489507172157332049:3035] 2025-04-04T17:12:55.740693Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: [main][4:7489507172157332043:3035][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7489507159272428933:2126], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:55.740704Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7489507159272428933:2126], cacheItem# { Subscriber: { Subscriber: [4:7489507172157332043:3035] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:55.740717Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7489507159272428598:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7489507172157332053:3034] 2025-04-04T17:12:55.740732Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7489507159272428598:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7489507172157332054:3035] 2025-04-04T17:12:55.740799Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7489507172157332057:3037], recipient# [4:7489507172157332041:2331], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:55.757177Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7489507159272428933:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:55.757302Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7489507159272428933:2126], cacheItem# { Subscriber: { Subscriber: [4:7489507163567396865:2574] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:55.757385Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7489507172157332059:3038], recipient# [4:7489507172157332058:2332], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:56.742592Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7489507159272428933:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:56.742715Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7489507159272428933:2126], cacheItem# { Subscriber: { Subscriber: [4:7489507172157332034:3033] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:56.742842Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7489507176452299366:3042], recipient# [4:7489507176452299365:2333], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:56.758184Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7489507159272428933:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:56.758301Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7489507159272428933:2126], cacheItem# { Subscriber: { Subscriber: [4:7489507163567396865:2574] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:12:56.758382Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7489507176452299368:3043], recipient# [4:7489507176452299367:2334], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes [GOOD] >> TestKinesisHttpProxy::CreateDeleteStream [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute [GOOD] >> CompressExecutor::TestExecutorMemUsage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:56.833268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:56.833388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:56.833453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:56.833506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:56.834178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:56.834232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:56.834316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:56.834390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:56.835448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:56.917415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:56.917473Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:56.928820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:56.929094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:56.929246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:56.939392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:56.939689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:56.940403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:56.941283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:56.946986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954577Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:56.954862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:56.954911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:56.955024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:56.961208Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:57.086384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:57.086685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.086905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:57.087149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:57.087207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.089401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:57.089542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:57.089729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.089789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:57.089828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:57.089871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:57.091644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.091695Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:57.091725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:57.093255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.093313Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.093350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.093429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.101873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:57.103519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:57.103717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:57.104691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:57.104808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:57.104847Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.105140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:57.105191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.105350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:57.105429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:57.107387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:57.107433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:57.107580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:57.107616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:57.107980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.108025Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:57.108130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:57.108175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.108224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:57.108263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.108295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:57.108329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.108359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:57.108386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:57.108443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:57.108485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:57.108515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:57.110352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:57.110464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:57.110499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... iber for txId 105: send EvNotifyTxCompletion 2025-04-04T17:12:58.391864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-04T17:12:58.392251Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2025-04-04T17:12:58.392298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-04-04T17:12:58.392340Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2025-04-04T17:12:58.432705Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:58.432884Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:58.432958Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 200, at tablet: 72057594046678944 2025-04-04T17:12:58.433014Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-04T17:12:58.468742Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-04-04T17:12:58.468907Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-04-04T17:12:58.468982Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-04-04T17:12:58.469035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:58.469073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-04T17:12:58.469235Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2025-04-04T17:12:58.469410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:12:58.471489Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-04T17:12:58.471995Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:58.472043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:12:58.472334Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:58.472376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-04-04T17:12:58.472451Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-04T17:12:58.472498Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-04T17:12:58.472600Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-04T17:12:58.472631Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T17:12:58.472671Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-04T17:12:58.472702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T17:12:58.472739Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-04T17:12:58.472782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T17:12:58.472820Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-04T17:12:58.472869Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-04T17:12:58.472992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-04T17:12:58.473035Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-04-04T17:12:58.473072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-04T17:12:58.473845Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:12:58.473944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:12:58.473981Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-04T17:12:58.474016Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-04T17:12:58.474053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:12:58.474130Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-04-04T17:12:58.474165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:411:2377] 2025-04-04T17:12:58.478326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-04T17:12:58.478429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-04T17:12:58.478466Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:678:2599] TestWaitNotification: OK eventTxId 105 2025-04-04T17:12:58.486989Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:12:58.487239Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 286us result status StatusSuccess 2025-04-04T17:12:58.487923Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestKinesisHttpProxy::ListShards [GOOD] >> TestYmqHttpProxy::TestGetQueueAttributes [GOOD] >> TestYmqHttpProxy::TestSetQueueAttributes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:56.833990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:56.834101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:56.834155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:56.834188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:56.834223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:56.834301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:56.834363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:56.834433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:56.835448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:56.917260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:56.917309Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:56.928083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:56.928330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:56.928494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:56.936956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:56.937150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:56.937610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:56.939325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:56.945706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:56.954849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:56.954891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:56.955028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:56.965502Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:57.083072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:57.083329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.083491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:57.083667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:57.083718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.085508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:57.085608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:57.085735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.085776Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:57.085805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:57.085859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:57.087318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.087355Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:57.087384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:57.088840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.088885Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.088914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.089002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.091869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:57.093140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:57.093267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:57.094065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:57.094150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:57.094181Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.094379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:57.094431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.094557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:57.094678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:57.096351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:57.096382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:57.096492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:57.096519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:57.096734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.096767Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:57.096831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:57.096864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.096919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:57.096942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.096966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:57.096991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.097014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:57.097035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:57.097086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:57.097113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:57.097136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:57.098402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:57.098473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:57.098495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... essage# TabletId: 72075186233409548 TxId: 104 Status: OK 2025-04-04T17:12:58.467007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-04-04T17:12:58.467050Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-04T17:12:58.468567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-04T17:12:58.468806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-04T17:12:58.468851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-04T17:12:58.469263Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2025-04-04T17:12:58.469315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-04T17:12:58.469362Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2025-04-04T17:12:58.507012Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 150, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:58.507186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 AckTo { RawX1: 0 RawX2: 0 } } Step: 150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:58.507257Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 150, at tablet: 72057594046678944 2025-04-04T17:12:58.507366Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-04T17:12:58.538638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2025-04-04T17:12:58.538829Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-04-04T17:12:58.538909Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-04-04T17:12:58.538970Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-04T17:12:58.539013Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-04T17:12:58.539206Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-04T17:12:58.539392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:12:58.539456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:12:58.542523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:12:58.543067Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:58.543125Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:12:58.543342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:12:58.543552Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:58.543599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-04T17:12:58.543648Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-04-04T17:12:58.544233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:12:58.544293Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-04T17:12:58.544414Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:12:58.544456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:12:58.544500Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:12:58.544558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:12:58.544606Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-04T17:12:58.544655Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:12:58.544697Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-04T17:12:58.544735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-04T17:12:58.544888Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-04T17:12:58.544941Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2025-04-04T17:12:58.544977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-04T17:12:58.545012Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-04T17:12:58.545740Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:12:58.545841Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:12:58.545881Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:12:58.545921Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-04T17:12:58.545965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:12:58.546783Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:12:58.546863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:12:58.546892Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:12:58.546923Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-04T17:12:58.546952Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:12:58.547025Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2025-04-04T17:12:58.547072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:411:2377] 2025-04-04T17:12:58.556086Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T17:12:58.556507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T17:12:58.556622Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-04T17:12:58.556668Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:547:2482] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } TestModificationResults wait txId: 105 2025-04-04T17:12:58.569931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:58.570174Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2025-04-04T17:12:58.570436Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, at schemeshard: 72057594046678944 2025-04-04T17:12:58.572667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "You cannot merge non-contiguous partitions" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:58.572855Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-04T17:12:58.573149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-04T17:12:58.573198Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-04T17:12:58.573660Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-04T17:12:58.573763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-04T17:12:58.573804Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:643:2567] TestWaitNotification: OK eventTxId 105 >> TestKinesisHttpProxy::TestListStreamConsumers [GOOD] >> TestKinesisHttpProxy::TestUnauthorizedPutRecords >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 >> TestYmqHttpProxy::TestCreateQueueWithTags >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 >> TestKinesisHttpProxy::ListShardsEmptyFields >> TestYmqHttpProxy::BillingRecordsForJsonApi >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] >> TestYmqHttpProxy::TestDeleteQueue >> TestYmqHttpProxy::TestTagQueue >> TestKinesisHttpProxy::GoodRequestGetRecords [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:12:56.833287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:56.833406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:56.833478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:56.833517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:56.834169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:56.834211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:56.834290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:56.834364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:56.835423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:56.912022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:56.912069Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:56.921596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:56.921908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:56.922096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:56.933030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:56.933322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:56.934028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:56.938387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:56.947488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:56.954888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:56.954944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:56.954992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:56.955113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:56.961610Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:12:57.076445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:57.076652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.076879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:57.077121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:57.077188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.079462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:57.079610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:57.079811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.079870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:57.079941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:57.079996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:57.081720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.081773Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:57.081814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:57.083337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.083371Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.083423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.083484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.086521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:57.088088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:57.088288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:57.089257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:57.089372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:57.089419Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.089692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:57.089744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:57.089944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:57.090021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:57.091880Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:57.091920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:57.092042Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:57.092086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:57.092420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:57.092468Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:57.092565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:57.092620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.092685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:57.092722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.092760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:57.092801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:57.092840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:57.092874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:57.092930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:57.092967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:57.093003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:57.094885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:57.094996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:57.095052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... arts: 1/1 2025-04-04T17:12:59.042902Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-04T17:12:59.042968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T17:12:59.043016Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-04T17:12:59.043060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T17:12:59.043101Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-04T17:12:59.043135Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-04T17:12:59.043300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-04T17:12:59.043350Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2025-04-04T17:12:59.043384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-04T17:12:59.044016Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:12:59.044097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:12:59.044128Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-04T17:12:59.044181Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-04T17:12:59.044220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:12:59.044304Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-04T17:12:59.049507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-04T17:12:59.057453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-04T17:12:59.057512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-04T17:12:59.057965Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-04T17:12:59.058062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-04T17:12:59.058098Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:758:2671] TestWaitNotification: OK eventTxId 105 2025-04-04T17:12:59.566598Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:59.566994Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 461us result status StatusSuccess 2025-04-04T17:12:59.567731Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:59.630924Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:12:59.631217Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 346us result status StatusSuccess 2025-04-04T17:12:59.631723Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } TestModificationResults wait txId: 106 2025-04-04T17:12:59.634602Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:59.634833Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-04-04T17:12:59.634993Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046678944 2025-04-04T17:12:59.639547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:59.639720Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-04T17:12:59.639990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-04T17:12:59.640025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-04T17:12:59.640427Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-04T17:12:59.640539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-04T17:12:59.640568Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:773:2685] TestWaitNotification: OK eventTxId 106 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2025-04-04T17:11:00.488354Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1743786660488331 2025-04-04T17:11:01.083041Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506682974006012:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:11:01.095567Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:11:01.418718Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001054/r3tmp/tmpcqw6At/pdisk_1.dat 2025-04-04T17:11:01.467992Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:11:01.523122Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:11:01.953738Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:11:01.972983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:01.973088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:01.975527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:11:01.975611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:11:01.975818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:11:01.988850Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:11:01.994992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2981, node 1 2025-04-04T17:11:02.210939Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001054/r3tmp/yandexdi3cM6.tmp 2025-04-04T17:11:02.210995Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001054/r3tmp/yandexdi3cM6.tmp 2025-04-04T17:11:02.211320Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001054/r3tmp/yandexdi3cM6.tmp 2025-04-04T17:11:02.211484Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:11:02.263953Z INFO: TTestServer started on Port 25191 GrpcPort 2981 TClient is connected to server localhost:25191 PQClient connected to localhost:2981 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:11:02.589570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-04T17:11:04.946941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506695858908900:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:04.947105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:04.946819Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506696447569654:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:04.946941Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506696447569631:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:04.947020Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:04.948589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506695858908920:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:11:04.951823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-04T17:11:04.957586Z node 2 :TX_PROXY ERROR: Actor# [2:7489506696447569661:2124] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:11:04.970174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506695858908922:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T17:11:04.970545Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489506696447569660:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T17:11:05.033672Z node 2 :TX_PROXY ERROR: Actor# [2:7489506700742536984:2130] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:05.053263Z node 1 :TX_PROXY ERROR: Actor# [1:7489506700153876317:2685] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:11:05.187689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:11:05.188987Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489506700153876327:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:11:05.190531Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yzg0ZjVlNmEtODAyYjg3YWYtYzJiYzNkYTEtYzg1Y2U2YzU=, ActorId: [1:7489506695858908880:2334], ActorState: ExecuteState, TraceId: 01jr0whrz02jjnm4t15mrrnkvk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:11:05.192630Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:11:05.191498Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489506700742536991:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:11:05.192883Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Mjc0Njc5MWMtZDUzNWE2NjYtMmM3NjUxM2EtOTY5MmY0ZDY=, ActorId: [2:7489506696447569629:2308], ActorState: ExecuteState, TraceId: 01jr0whryx1jbsv22x4htp16e0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:11:05.193216Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:11:05.317042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:11:05.475767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:2981", true, true, 1000); 2025-04-04T17:11:05.743444Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr0whsjv078t6ax8wty1kzgd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzllMzRmMGMtMTI1ZTA1MTYtYjJmYTNmZWItYjAyOGRlYWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489506700153876789:3033] 2025-04-04T17:11:06.008081Z node 1 ... [test-message-group-id] SessionId [test-message-group-id|fe4f958b-f51d01f6-84320b28-97866f6b_0] Write session: send init request: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2025-04-04T17:12:56.781260Z node 13 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-04T17:12:56.781297Z node 13 :PQ_WRITE_PROXY DEBUG: new session created cookie 3 2025-04-04T17:12:56.781607Z node 13 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2025-04-04T17:12:56.781782Z node 13 :PQ_WRITE_PROXY INFO: session request cookie: 3 topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" from ipv6:[::1]:53076 2025-04-04T17:12:56.781809Z node 13 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:53076 proto=v1 topic=test-topic durationSec=0 2025-04-04T17:12:56.781822Z node 13 :PQ_WRITE_PROXY INFO: init check schema 2025-04-04T17:12:56.783267Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2025-04-04T17:12:56.783414Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-04T17:12:56.783428Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-04T17:12:56.783443Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-04T17:12:56.783468Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7489507177381005191:2565] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2025-04-04T17:12:56.786467Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7489507177381005191:2565] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2025-04-04T17:12:56.956248Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715697. Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-04T17:12:56.956409Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7489507177381005204:2567] TxId: 281474976715697. Ctx: { TraceId: 01jr0wn66j9c8se9x966vpr9px, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YjA4ZWRjMDMtMWM3ZTQ2YzUtYzg3Yzg2MDMtMWJjZjhhZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-04T17:12:56.956633Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=YjA4ZWRjMDMtMWM3ZTQ2YzUtYzg3Yzg2MDMtMWJjZjhhZGU=, ActorId: [13:7489507177381005192:2567], ActorState: ExecuteState, TraceId: 01jr0wn66j9c8se9x966vpr9px, Create QueryResponse for error on request, msg: 2025-04-04T17:12:56.957754Z node 13 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [13:7489507177381005191:2565] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=YjA4ZWRjMDMtMWM3ZTQ2YzUtYzg3Yzg2MDMtMWJjZjhhZGU=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jr0wn66k3zakqz5s4zsrfb6r" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2025-04-04T17:12:56.957859Z node 13 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=YjA4ZWRjMDMtMWM3ZTQ2YzUtYzg3Yzg2MDMtMWJjZjhhZGU=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jr0wn66k3zakqz5s4zsrfb6r" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2025-04-04T17:12:56.958216Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD Test retry state: get retry delay 2025-04-04T17:12:56.958769Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|fe4f958b-f51d01f6-84320b28-97866f6b_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=YjA4ZWRjMDMtMWM3ZTQ2YzUtYzg3Yzg2MDMtMWJjZjhhZGU=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jr0wn66k3zakqz5s4zsrfb6r" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2025-04-04T17:12:56.958803Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|fe4f958b-f51d01f6-84320b28-97866f6b_0] Write session will restart in 2.000000s 2025-04-04T17:12:56.958898Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|fe4f958b-f51d01f6-84320b28-97866f6b_0] Write session: Do CDS request 2025-04-04T17:12:56.958921Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|fe4f958b-f51d01f6-84320b28-97866f6b_0] Do schedule cds request after 2000 ms 2025-04-04T17:12:57.327524Z node 14 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720682. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T17:12:57.327656Z node 14 :KQP_EXECUTER WARN: ActorId: [14:7489507179725023183:2502] TxId: 281474976720682. Ctx: { TraceId: 01jr0wn6j52q1t43xcrks150x9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=NDIyODZjODctYmJjMDY3ZGQtMWJlN2VkYS0yOGQ3YjA4OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T17:12:57.327852Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=NDIyODZjODctYmJjMDY3ZGQtMWJlN2VkYS0yOGQ3YjA4OA==, ActorId: [14:7489507179725023180:2502], ActorState: ExecuteState, TraceId: 01jr0wn6j52q1t43xcrks150x9, Create QueryResponse for error on request, msg: 2025-04-04T17:12:57.328633Z node 14 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr0wn6j52q1t43xcrqeaegq3" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-04-04T17:12:57.461972Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715699. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T17:12:57.462140Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7489507181675972554:2570] TxId: 281474976715699. Ctx: { TraceId: 01jr0wn6pbcj48etyf04g0tp97, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZDFmNTJiM2YtZjdmMGRjNTctY2RiM2FlYjYtY2RlMmM0NjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T17:12:57.462406Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=ZDFmNTJiM2YtZjdmMGRjNTctY2RiM2FlYjYtY2RlMmM0NjI=, ActorId: [13:7489507181675972551:2570], ActorState: ExecuteState, TraceId: 01jr0wn6pbcj48etyf04g0tp97, Create QueryResponse for error on request, msg: 2025-04-04T17:12:57.463305Z node 13 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr0wn6pbcj48etyf07y68yfr" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-04-04T17:12:57.510364Z node 14 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720684. Failed to resolve tablet: 72075186224037888 after several retries. 2025-04-04T17:12:57.510503Z node 14 :KQP_EXECUTER WARN: ActorId: [14:7489507179725023226:2496] TxId: 281474976720684. Ctx: { TraceId: 01jr0wn6a96wdrc36csbn9n9f7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=OGRhNTJmOWMtYWY0YTcyYzctYmJjZWY5NDQtYjU0NmVlYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037888 after several retries. 2025-04-04T17:12:57.510725Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=OGRhNTJmOWMtYWY0YTcyYzctYmJjZWY5NDQtYjU0NmVlYjQ=, ActorId: [14:7489507175430055871:2496], ActorState: ExecuteState, TraceId: 01jr0wn6a96wdrc36csbn9n9f7, Create QueryResponse for error on request, msg: 2025-04-04T17:12:57.511387Z node 14 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037888 after several retries." severity: 1 } TxMeta { id: "01jr0wn6qw8rdhfg71hzbsfzb6" } } YdbStatus: UNAVAILABLE ConsumedRu: 285 } 2025-04-04T17:12:57.778879Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|fe4f958b-f51d01f6-84320b28-97866f6b_0] Write session: close. Timeout = 0 ms 2025-04-04T17:12:57.778943Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|fe4f958b-f51d01f6-84320b28-97866f6b_0] Write session will now close 2025-04-04T17:12:57.778989Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|fe4f958b-f51d01f6-84320b28-97866f6b_0] Write session: aborting 2025-04-04T17:12:57.779625Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|fe4f958b-f51d01f6-84320b28-97866f6b_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-04-04T17:12:57.779673Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|fe4f958b-f51d01f6-84320b28-97866f6b_0] Write session: destroy 2025-04-04T17:12:57.868372Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715701. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T17:12:57.868507Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7489507181675972623:2571] TxId: 281474976715701. Ctx: { TraceId: 01jr0wn6rt9hk9cf9mdqabhz7d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ODllZmRmMy00OGU0NmYyNS1jZjg4MzAyZS1mNTJlMTA0Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T17:12:57.868766Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=ODllZmRmMy00OGU0NmYyNS1jZjg4MzAyZS1mNTJlMTA0Zg==, ActorId: [13:7489507181675972575:2571], ActorState: ExecuteState, TraceId: 01jr0wn6rt9hk9cf9mdqabhz7d, Create QueryResponse for error on request, msg: 2025-04-04T17:12:57.869615Z node 13 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr0wn733365m5rg0epznh479" } } YdbStatus: UNAVAILABLE ConsumedRu: 216 } |88.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [TA] $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:125:2058] recipient: [1:108:2140] 2025-04-04T17:12:46.350985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:12:46.351154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:46.351201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:12:46.351238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:12:46.358345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:12:46.358432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:12:46.358561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:12:46.358692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:12:46.360714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:46.455331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:12:46.455392Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:46.468867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:46.469146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:12:46.469317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:12:46.480969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:12:46.481259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:12:46.484284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:46.485202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:12:46.492837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:46.501842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:46.501952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:46.502175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:12:46.502233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:46.502382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:12:46.502549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:12:46.512110Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-04T17:12:46.658411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:12:46.659972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:46.661154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:12:46.665885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:12:46.666054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:46.679749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:46.680041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:12:46.680465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:46.680559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:12:46.680648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:12:46.680716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:12:46.687071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:46.687187Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:12:46.687246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:12:46.697311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:46.697433Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:46.697493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:46.697542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:12:46.706343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:46.715107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:12:46.715384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:12:46.716585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:46.716780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:46.716840Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:46.722612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:12:46.722716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:12:46.722961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:12:46.723051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:12:46.727669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:46.727725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:46.727912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:46.727967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:12:46.728367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:12:46.728418Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:12:46.728515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:46.728554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:46.728594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:12:46.728638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:46.728679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:12:46.728718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:12:46.728749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:12:46.728776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:12:46.728846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:12:46.728888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:12:46.728933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:12:46.737140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:46.737253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:12:46.737293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 94046678944 2025-04-04T17:12:59.791544Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-04-04T17:12:59.791605Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2025-04-04T17:12:59.791728Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:59.792213Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:59.792285Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:59.792312Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-04T17:12:59.792339Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-04-04T17:12:59.792371Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-04T17:12:59.792990Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:59.793058Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:59.793115Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-04T17:12:59.793159Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2025-04-04T17:12:59.793186Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T17:12:59.793245Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-04-04T17:12:59.796735Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-04T17:12:59.796872Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-04T17:12:59.796903Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-04-04T17:12:59.796937Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2025-04-04T17:12:59.797014Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-04-04T17:12:59.797118Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000009 2025-04-04T17:12:59.797545Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:12:59.797654Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 12884904043 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:12:59.797697Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000009, at schemeshard: 72057594046678944 2025-04-04T17:12:59.797796Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-04-04T17:12:59.797873Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-04-04T17:12:59.797905Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-04T17:12:59.797945Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-04-04T17:12:59.797978Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-04T17:12:59.798023Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T17:12:59.798078Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-04T17:12:59.798115Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-04-04T17:12:59.798164Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-04-04T17:12:59.798199Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2025-04-04T17:12:59.798230Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2025-04-04T17:12:59.798302Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T17:12:59.798336Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-04-04T17:12:59.798400Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-04-04T17:12:59.798432Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-04-04T17:12:59.799026Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:59.799108Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:59.800809Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:12:59.800861Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:12:59.801022Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-04T17:12:59.801118Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:12:59.801162Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:206:2208], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-04-04T17:12:59.801221Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:206:2208], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-04-04T17:12:59.801943Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:59.802018Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:59.802047Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-04T17:12:59.802094Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-04-04T17:12:59.802145Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-04T17:12:59.802675Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:59.802757Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:59.802785Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-04-04T17:12:59.802826Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-04T17:12:59.802869Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-04T17:12:59.802931Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-04-04T17:12:59.802976Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:124:2150] 2025-04-04T17:12:59.807712Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:59.807990Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-04-04T17:12:59.808067Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-04-04T17:12:59.808123Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-04-04T17:12:59.808163Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-04-04T17:12:59.808190Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-04-04T17:12:59.808232Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-04-04T17:12:59.809787Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-04-04T17:12:59.809865Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:12:59.809915Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:772:2707] TestWaitNotification: OK eventTxId 103 |88.2%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestYmqHttpProxy::TestListQueues |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] >> OperationMapping::IndexBuildRejected >> OperationMapping::IndexBuildSuccess >> OperationMapping::IndexBuildRejected [GOOD] >> OperationMapping::IndexBuildSuccess [GOOD] >> SplitPathTests::WithDatabaseShouldFail [GOOD] >> OperationMapping::IndexBuildCanceled [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildRejected [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildSuccess [GOOD] |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildCanceled [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldFail [GOOD] |88.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} |88.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 |88.3%| [TA] $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.3%| [TA] {RESULT} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed >> BSCRestartPDisk::RestartOneByOneWithReconnects >> BSCRestartPDisk::RestartOneByOne |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] Test command err: 2025-04-04T17:12:46.030196Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507132212113515:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:46.042414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:46.095993Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507132079450921:2177];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f2a/r3tmp/tmpdAanWu/pdisk_1.dat 2025-04-04T17:12:46.564501Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:46.855810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:46.855935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:46.857239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:46.857289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:46.865271Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:12:46.865397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:46.866705Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:46.867469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16211 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:12:47.148960Z node 1 :TX_PROXY DEBUG: actor# [1:7489507132212113529:2141] Handle TEvNavigate describe path dc-1 2025-04-04T17:12:47.148997Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507136507081297:2456] HANDLE EvNavigateScheme dc-1 2025-04-04T17:12:47.149098Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489507132212113552:2154], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:47.149185Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507136507081279:2450][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7489507132212113552:2154], cookie# 1 2025-04-04T17:12:47.158281Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507136507081283:2450][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507136507081280:2450], cookie# 1 2025-04-04T17:12:47.158327Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507136507081284:2450][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507136507081281:2450], cookie# 1 2025-04-04T17:12:47.158345Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507136507081285:2450][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507136507081282:2450], cookie# 1 2025-04-04T17:12:47.158347Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127917145873:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507136507081283:2450], cookie# 1 2025-04-04T17:12:47.158383Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127917145876:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507136507081284:2450], cookie# 1 2025-04-04T17:12:47.158402Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507127917145879:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507136507081285:2450], cookie# 1 2025-04-04T17:12:47.158448Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507136507081283:2450][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507127917145873:2052], cookie# 1 2025-04-04T17:12:47.158483Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507136507081284:2450][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507127917145876:2055], cookie# 1 2025-04-04T17:12:47.158500Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507136507081285:2450][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507127917145879:2058], cookie# 1 2025-04-04T17:12:47.158532Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507136507081279:2450][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507136507081280:2450], cookie# 1 2025-04-04T17:12:47.158572Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507136507081279:2450][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:12:47.158607Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507136507081279:2450][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507136507081281:2450], cookie# 1 2025-04-04T17:12:47.158645Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507136507081279:2450][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:12:47.158686Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507136507081279:2450][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507136507081282:2450], cookie# 1 2025-04-04T17:12:47.158698Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507136507081279:2450][/dc-1] Unexpected sync response: sender# [1:7489507136507081282:2450], cookie# 1 2025-04-04T17:12:47.158727Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489507132212113552:2154], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-04T17:12:47.170531Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489507132212113552:2154], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7489507136507081279:2450] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:12:47.170670Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489507132212113552:2154], cacheItem# { Subscriber: { Subscriber: [1:7489507136507081279:2450] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-04T17:12:47.174365Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489507136507081298:2457], recipient# [1:7489507136507081297:2456], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:12:47.174437Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507136507081297:2456] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:47.220551Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507136507081297:2456] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-04T17:12:47.223825Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507136507081297:2456] Handle TEvDescribeSchemeResult Forward to# [1:7489507136507081296:2455] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:47.278788Z node 1 :TX_PROXY DEBUG: actor# [1:7489507132212113529:2141] Handle TEvProposeTransaction 2025-04-04T17:12:47.278818Z node 1 :TX_PROXY DEBUG: actor# [1:7489507132212113529:2141] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T17:12:47.278916Z node 1 :TX_PROXY DEBUG: actor# [1:7489507132212113529:2141] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489507136507081305:2463] 2025-04-04T17:12:47.427039Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507136507081305:2463] txid# 281474976710657 Bootstrap EvSchemeRequest recor ... ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:13:01.094602Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7489507132079451044:2106], cacheItem# { Subscriber: { Subscriber: [2:7489507136374418366:2111] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:13:01.094728Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7489507196503960620:2150], recipient# [2:7489507196503960619:2338], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:13:01.358126Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489507179141493213:2195], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:13:01.358303Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489507179141493213:2195], cacheItem# { Subscriber: { Subscriber: [3:7489507192026395199:2237] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:13:01.358352Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489507179141493213:2195], cacheItem# { Subscriber: { Subscriber: [3:7489507192026395200:2238] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:13:01.358516Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489507196321362517:2245], recipient# [3:7489507192026395195:2573], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-04-04T17:13:01.358714Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7489507192026395195:2573], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:13:01.609129Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7489507132079451044:2106], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:13:01.609280Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7489507132079451044:2106], cacheItem# { Subscriber: { Subscriber: [2:7489507136374418366:2111] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:13:01.609379Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7489507196503960622:2151], recipient# [2:7489507196503960621:2339], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:13:01.701615Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489507179141493213:2195], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:13:01.701745Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489507179141493213:2195], cacheItem# { Subscriber: { Subscriber: [3:7489507179141493233:2202] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:13:01.701844Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489507196321362519:2246], recipient# [3:7489507196321362518:2578], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:13:01.830811Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7489507132079451044:2106], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:13:01.830971Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7489507132079451044:2106], cacheItem# { Subscriber: { Subscriber: [2:7489507149259320286:2118] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:13:01.831075Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7489507196503960624:2152], recipient# [2:7489507196503960623:2340], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:13:02.095449Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7489507132079451044:2106], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:13:02.095589Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7489507132079451044:2106], cacheItem# { Subscriber: { Subscriber: [2:7489507136374418366:2111] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:13:02.095676Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7489507200798927922:2153], recipient# [2:7489507200798927921:2341], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> BSCRestartPDisk::RestartNotAllowed >> KqpScan::ScanRetryRead [GOOD] >> KqpScan::ScanRetryReadRanges >> KqpScan::ScanDuringSplit10 [GOOD] >> KqpScan::ScanDuringSplitThenMerge >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 |88.3%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |88.3%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 >> KqpScan::RemoteShardScan [GOOD] >> KqpScan::ScanDuringSplit >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup [GOOD] >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup [GOOD] Test command err: RandomSeed# 12088426752918162034 2025-04-04T17:13:04.629351Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.629458Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.629508Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.629546Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.629588Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.629621Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.629667Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.629711Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:7:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.630341Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.630419Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.630494Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.630544Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.630601Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.630650Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.630693Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.630745Z 8 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.630810Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.630901Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.630947Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.630978Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:7:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.631010Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.631044Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.631073Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.631105Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.632593Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.632645Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.632676Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.632703Z 8 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.632730Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.632786Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.632840Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.632870Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed [GOOD] Test command err: RandomSeed# 14095692949784955770 2025-04-04T17:13:04.631430Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.631566Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.631646Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.631714Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.631780Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.631837Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.631900Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.632790Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.632866Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.632914Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.632962Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.633008Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.633054Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.633102Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.633173Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.633241Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.633270Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.633344Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.633381Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.633425Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.633462Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:13:04.635158Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.635224Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.635294Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.635366Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.635408Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.635450Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:13:04.635491Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin+NotNull >> KqpJoinOrder::TPCDS92-StreamLookupJoin+ColumnStore [GOOD] >> TestKinesisHttpProxy::TestUnauthorizedPutRecords [GOOD] >> TPersQueueTest::DirectReadPreCached >> TPersQueueTest::SetupLockSession2 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 >> TestYmqHttpProxy::TestCreateQueueWithTags [GOOD] >> TPersQueueTest::ReadFromSeveralPartitions >> TopicService::OneConsumer_TheRangesDoNotOverlap >> TPersQueueTest::BadTopic >> TestKinesisHttpProxy::ListShardsEmptyFields [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults [GOOD] >> TPersQueueTest::UpdatePartitionLocation >> TPartitionWriterCacheActorTests::WriteReplyOrder >> TestKinesisHttpProxy::TestWrongStream >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 >> TestYmqHttpProxy::TestTagQueue [GOOD] >> TPersQueueTest::WriteExisting >> TPartitionWriterCacheActorTests::WriteReplyOrder [GOOD] >> TPartitionWriterCacheActorTests::DropOldWriter >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 >> TestYmqHttpProxy::TestDeleteMessage >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 [GOOD] >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent >> TestKinesisHttpProxy::TestListStreamConsumersWithToken >> TPartitionWriterCacheActorTests::DropOldWriter [GOOD] >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError >> TestYmqHttpProxy::TestUntagQueue >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName >> TPersQueueTest::ReadFromSeveralPartitionsMigrated ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS92-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 19881, MsgBus: 13536 2025-04-04T17:10:06.012193Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506443450300303:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:06.012541Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001379/r3tmp/tmpwRx4No/pdisk_1.dat 2025-04-04T17:10:06.496414Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:06.509804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:06.509918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:06.537202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19881, node 1 2025-04-04T17:10:06.748606Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:10:06.748626Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:10:06.748637Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:10:06.748749Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13536 TClient is connected to server localhost:13536 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:10:07.356248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:07.380481Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:10:07.396411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:07.573611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:07.757332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:07.862008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:09.569706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506460630171127:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:09.569803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:09.803434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.834432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.865606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.912730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.948013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.996471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:10.077801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506464925138935:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:10.077882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:10.078231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506464925138940:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:10.081715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:10.092028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506464925138942:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:10:10.169927Z node 1 :TX_PROXY ERROR: Actor# [1:7489506464925138997:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:10.998396Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506443450300303:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:10.998467Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:11.577749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.613331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.651592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.684970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.710544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.846880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.880418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.908536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.939426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.971066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.011403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.081729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.115203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.644260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:10:12.673565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.704697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.736077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 202 ... cription=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:12:50.911811Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:12:50.911881Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:12:50.912023Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:12:50.912092Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:12:50.912230Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:12:50.912290Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:12:50.913113Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:12:50.913222Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:12:50.913463Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:12:50.913501Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:12:50.913699Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:12:50.913740Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:12:50.914067Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:12:50.914109Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:12:50.914313Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:12:50.914351Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:12:50.914919Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:12:50.914994Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:12:50.915150Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:12:50.915194Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:12:50.915404Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:12:50.915447Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:12:50.915587Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:12:50.915641Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:12:50.915734Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:12:50.915780Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:12:50.915835Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:12:50.915874Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:12:50.916563Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:12:50.916651Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:12:50.916850Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:12:50.916892Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:12:50.917053Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:12:50.917099Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:12:50.917267Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:12:50.917304Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:12:50.917419Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:12:50.917452Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038735;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:12:50.977007Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038727;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:50.984513Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038720;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:50.988460Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038724;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:50.992555Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038722;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:50.997824Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038731;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:51.000817Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038721;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:51.007171Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038723;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:51.007944Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038735;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:51.014864Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038725;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:51.020141Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038732;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:51.024854Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038729;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:51.029851Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038733;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:51.034757Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038728;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:51.039086Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038730;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:51.044135Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038726;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:12:51.048198Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038734;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 >> TestYmqHttpProxy::TestListQueues [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TModifyUserTest::ModifyUserIsEnabled [GOOD] Test command err: 2025-04-04T17:12:45.485503Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507128604422521:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:45.486444Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015da/r3tmp/tmpu4Pdt9/pdisk_1.dat 2025-04-04T17:12:45.859218Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:45.893258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:45.897484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:45.900465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8196 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:12:46.151742Z node 1 :TX_PROXY DEBUG: actor# [1:7489507128604422630:2102] Handle TEvNavigate describe path dc-1 2025-04-04T17:12:46.151897Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507132899390205:2257] HANDLE EvNavigateScheme dc-1 2025-04-04T17:12:46.153150Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489507128604422677:2125], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:46.153220Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489507128604422677:2125], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T17:12:46.153431Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507132899390206:2258][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:12:46.155571Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128604422327:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507132899390210:2258] 2025-04-04T17:12:46.155578Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128604422330:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507132899390211:2258] 2025-04-04T17:12:46.155643Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507128604422327:2049] Subscribe: subscriber# [1:7489507132899390210:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.155658Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507128604422330:2052] Subscribe: subscriber# [1:7489507132899390211:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.155693Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128604422333:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489507132899390212:2258] 2025-04-04T17:12:46.155753Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489507128604422333:2055] Subscribe: subscriber# [1:7489507132899390212:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:12:46.155756Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507132899390210:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507128604422327:2049] 2025-04-04T17:12:46.155793Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128604422327:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507132899390210:2258] 2025-04-04T17:12:46.155807Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507132899390211:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507128604422330:2052] 2025-04-04T17:12:46.155827Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128604422330:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507132899390211:2258] 2025-04-04T17:12:46.155860Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507132899390212:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507128604422333:2055] 2025-04-04T17:12:46.155914Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507132899390206:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507132899390207:2258] 2025-04-04T17:12:46.155940Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128604422333:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489507132899390212:2258] 2025-04-04T17:12:46.155968Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507132899390206:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507132899390208:2258] 2025-04-04T17:12:46.156018Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489507132899390206:2258][/dc-1] Set up state: owner# [1:7489507128604422677:2125], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:46.157423Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507132899390206:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489507132899390209:2258] 2025-04-04T17:12:46.157521Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489507132899390206:2258][/dc-1] Path was already updated: owner# [1:7489507128604422677:2125], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:12:46.157613Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507132899390210:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507132899390207:2258], cookie# 1 2025-04-04T17:12:46.157665Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507132899390211:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507132899390208:2258], cookie# 1 2025-04-04T17:12:46.157707Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507132899390212:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507132899390209:2258], cookie# 1 2025-04-04T17:12:46.157755Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128604422327:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507132899390210:2258], cookie# 1 2025-04-04T17:12:46.157806Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128604422330:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507132899390211:2258], cookie# 1 2025-04-04T17:12:46.157853Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507128604422333:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507132899390212:2258], cookie# 1 2025-04-04T17:12:46.157887Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507132899390210:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507128604422327:2049], cookie# 1 2025-04-04T17:12:46.157901Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507132899390211:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507128604422330:2052], cookie# 1 2025-04-04T17:12:46.157923Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507132899390212:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507128604422333:2055], cookie# 1 2025-04-04T17:12:46.157956Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507132899390206:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507132899390207:2258], cookie# 1 2025-04-04T17:12:46.157983Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507132899390206:2258][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:12:46.158007Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507132899390206:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507132899390208:2258], cookie# 1 2025-04-04T17:12:46.158026Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507132899390206:2258][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:12:46.158045Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507132899390206:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507132899390209:2258], cookie# 1 2025-04-04T17:12:46.158074Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507132899390206:2258][/dc-1] Unexpected sync response: sender# [1:7489507132899390209:2258], cookie# 1 2025-04-04T17:12:46.212472Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489507128604422677:2125], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T17:12:46.213706Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489507128604422677:2125], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... Path: /dc-1 PathId: Partial: 0 } 2025-04-04T17:12:52.958787Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7489507159320925091:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [3:7489507159320925359:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 9 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743786772826 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:12:52.958885Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489507159320925091:2113], cacheItem# { Subscriber: { Subscriber: [3:7489507159320925359:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 9 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743786772826 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 9 IsSync: true Partial: 0 } 2025-04-04T17:12:52.959045Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489507159320925444:2327], recipient# [3:7489507159320925443:2326], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } }] } 2025-04-04T17:12:52.959096Z node 3 :TX_PROXY DEBUG: Actor# [3:7489507159320925443:2326] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:52.959139Z node 3 :TX_PROXY ERROR: Actor# [3:7489507159320925443:2326] txid# 281474976715662, Access denied for user2 on path /dc-1, with access AlterSchema 2025-04-04T17:12:52.959231Z node 3 :TX_PROXY ERROR: Actor# [3:7489507159320925443:2326] txid# 281474976715662, issues: { message: "Access denied for user2 on path /dc-1" issue_code: 200000 severity: 1 } 2025-04-04T17:12:52.959264Z node 3 :TX_PROXY DEBUG: Actor# [3:7489507159320925443:2326] txid# 281474976715662 SEND to# [3:7489507159320925442:2325] Source {TEvProposeTransactionStatus Status# 5} 2025-04-04T17:12:52.961326Z node 3 :TX_PROXY DEBUG: actor# [3:7489507159320925042:2086] Handle TEvProposeTransaction 2025-04-04T17:12:52.961349Z node 3 :TX_PROXY DEBUG: actor# [3:7489507159320925042:2086] TxId# 281474976715663 ProcessProposeTransaction 2025-04-04T17:12:52.961384Z node 3 :TX_PROXY DEBUG: actor# [3:7489507159320925042:2086] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [3:7489507159320925446:2329] 2025-04-04T17:12:52.963148Z node 3 :TX_PROXY DEBUG: Actor# [3:7489507159320925446:2329] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "user2" Password: "password" CanLogin: false } } } } UserToken: "\n\005user2\022\030\022\026\n\024all-users@well-known\032\322\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgyOTk3MiwiaWF0IjoxNzQzNzg2NzcyLCJzdWIiOiJ1c2VyMiJ9.PVhTBIRzcvXeFTqcOYjhHn1fP9QAyDORKQITgbR6Nlv42DqJZrPW4Sf4F3oUbhSLDMhU3ZdCSOtkApgwksWIZkgi0NOW8jmss-12Rhyoh_P5lMGMu29inNsQdQpSzE58o5o_lRwbrVgqu1-mq6ZAej2xw0R4Zy2MoCS0vxAzxlTeE08YJ0NYcLEqa0CO1wXjY6b9uC5qhF8vpnThv9D5L6ALPah9SIvyjdCUYKT6eF4XPkiOvophgKTVjAlS-ba_bPtaAlHskEjusv1KP6vgWxC6cZitwK9AX0d_gS8-4LU1ZW25J2Xi7QrLFewjtIaV8FSQLyJlZP5Tyy164xm9sA\"\005Login*~eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgyOTk3MiwiaWF0IjoxNzQzNzg2NzcyLCJzdWIiOiJ1c2VyMiJ9.**" PeerName: "" 2025-04-04T17:12:52.963207Z node 3 :TX_PROXY DEBUG: Actor# [3:7489507159320925446:2329] txid# 281474976715663 Bootstrap, UserSID: user2 CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:12:52.963223Z node 3 :TX_PROXY DEBUG: Actor# [3:7489507159320925446:2329] txid# 281474976715663 Bootstrap, UserSID: user2 IsClusterAdministrator: 1 2025-04-04T17:12:52.963268Z node 3 :TX_PROXY DEBUG: Actor# [3:7489507159320925446:2329] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:12:52.963340Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489507159320925091:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:52.963411Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489507159320925359:2258][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:7489507159320925091:2113], cookie# 10 2025-04-04T17:12:52.963477Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7489507159320925363:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7489507159320925360:2258], cookie# 10 2025-04-04T17:12:52.963500Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7489507159320925364:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7489507159320925361:2258], cookie# 10 2025-04-04T17:12:52.963510Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7489507159320925365:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7489507159320925362:2258], cookie# 10 2025-04-04T17:12:52.963530Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489507159320924785:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7489507159320925364:2258], cookie# 10 2025-04-04T17:12:52.963550Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489507159320924782:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7489507159320925363:2258], cookie# 10 2025-04-04T17:12:52.963553Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489507159320924788:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7489507159320925365:2258], cookie# 10 2025-04-04T17:12:52.963584Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7489507159320925364:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 }: sender# [3:7489507159320924785:2052], cookie# 10 2025-04-04T17:12:52.963597Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7489507159320925365:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 }: sender# [3:7489507159320924788:2055], cookie# 10 2025-04-04T17:12:52.963605Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7489507159320925363:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 }: sender# [3:7489507159320924782:2049], cookie# 10 2025-04-04T17:12:52.963635Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489507159320925359:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 }: sender# [3:7489507159320925361:2258], cookie# 10 2025-04-04T17:12:52.963668Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489507159320925359:2258][/dc-1] Sync is in progress: cookie# 10, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:12:52.963691Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489507159320925359:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 }: sender# [3:7489507159320925362:2258], cookie# 10 2025-04-04T17:12:52.963709Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489507159320925359:2258][/dc-1] Sync is done: cookie# 10, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:12:52.963741Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489507159320925359:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 }: sender# [3:7489507159320925360:2258], cookie# 10 2025-04-04T17:12:52.963757Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489507159320925359:2258][/dc-1] Unexpected sync response: sender# [3:7489507159320925360:2258], cookie# 10 2025-04-04T17:12:52.963788Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7489507159320925091:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-04T17:12:52.963888Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7489507159320925091:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [3:7489507159320925359:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743786772826 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:12:52.963979Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489507159320925091:2113], cacheItem# { Subscriber: { Subscriber: [3:7489507159320925359:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743786772826 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2025-04-04T17:12:52.964170Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489507159320925447:2330], recipient# [3:7489507159320925446:2329], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } }] } 2025-04-04T17:12:52.964213Z node 3 :TX_PROXY DEBUG: Actor# [3:7489507159320925446:2329] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:52.964254Z node 3 :TX_PROXY ERROR: Actor# [3:7489507159320925446:2329] txid# 281474976715663, Access denied for user2 on path /dc-1, with access AlterSchema 2025-04-04T17:12:52.964330Z node 3 :TX_PROXY ERROR: Actor# [3:7489507159320925446:2329] txid# 281474976715663, issues: { message: "Access denied for user2 on path /dc-1" issue_code: 200000 severity: 1 } 2025-04-04T17:12:52.964361Z node 3 :TX_PROXY DEBUG: Actor# [3:7489507159320925446:2329] txid# 281474976715663 SEND to# [3:7489507159320925445:2328] Source {TEvProposeTransactionStatus Status# 5} >> TestYmqHttpProxy::TestPurgeQueue >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 >> TestYmqHttpProxy::BillingRecordsForJsonApi [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 >> TestYmqHttpProxy::TestChangeMessageVisibility >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 >> BSCRestartPDisk::RestartNotAllowed [GOOD] >> TestKinesisHttpProxy::TestWrongStream [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartNotAllowed [GOOD] Test command err: RandomSeed# 9437085871372521939 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 >> OlapEstimationRowsCorrectness::TPCDS78 [GOOD] >> OlapEstimationRowsCorrectness::TPCDS96 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId [GOOD] >> TestKinesisHttpProxy::TestWrongStream2 >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin+NotNull [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TestYmqHttpProxy::TestDeleteMessage [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumersWithToken [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName [GOOD] >> KqpJoinOrder::CanonizedJoinOrderLookupBug [GOOD] >> TestKinesisHttpProxy::BadRequestUnknownMethod >> TestKinesisHttpProxy::ListShardsTimestamp >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 >> TestYmqHttpProxy::TestUntagQueue [GOOD] >> TestYmqHttpProxy::TestDeleteMessageBatch |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TestKinesisHttpProxy::TestCounters >> TestKinesisHttpProxy::ErroneousRequestGetRecords >> KqpScan::ScanRetryReadRanges [GOOD] >> KqpScan::ScanDuringSplitThenMerge [GOOD] >> KqpScan::ScanPg >> KqpScan::ScanDuringSplit [GOOD] >> KqpScan::ScanAfterSplitSlowMetaRead >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 13695, MsgBus: 2419 2025-04-04T17:09:56.703763Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506403841353055:2177];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:56.704587Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00138b/r3tmp/tmpufnn0n/pdisk_1.dat 2025-04-04T17:09:57.134293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:57.134380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:57.136140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:57.190595Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13695, node 1 2025-04-04T17:09:57.441832Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:57.441856Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:57.441874Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:57.442010Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2419 TClient is connected to server localhost:2419 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:58.126991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:58.139803Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:58.144614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:58.316728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:09:58.470181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:58.566477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:00.328501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506421021223890:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:00.328619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:00.636373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.674504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.752788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.780000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.809990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.847650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:00.895264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506421021224408:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:00.895380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:00.895811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506421021224413:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:00.900082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:00.912295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506421021224415:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:10:00.999182Z node 1 :TX_PROXY ERROR: Actor# [1:7489506421021224469:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:01.703506Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506403841353055:2177];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:01.703589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:02.022850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:02.052005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:02.084142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:10:02.161747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4396, MsgBus: 25258 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00138b/r3tmp/tmp5Q5ccg/pdisk_1.dat 2025-04-04T17:10:03.975418Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:10:03.976607Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4396, node 2 2025-04-04T17:10:04.010189Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:04.010300Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:04.013259Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:10:04.070786Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:10:04.070813Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:10:04.070820Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:10:04.070925Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25258 TClient is connected to server localhost:25258 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:10:04.551641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:04.572999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: ... 474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:02.210980Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:02.245889Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:02.280679Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:13:02.316878Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:13:02.361125Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489507202789096499:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:02.361249Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:02.361370Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489507202789096504:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:02.365561Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:13:02.376696Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489507202789096506:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:13:02.442156Z node 5 :TX_PROXY ERROR: Actor# [5:7489507202789096559:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:02.787441Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489507181314257742:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:02.787514Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:13:03.750171Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:13:03.877855Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2622, MsgBus: 18273 2025-04-04T17:13:05.556818Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489507213052512068:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:05.556883Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00138b/r3tmp/tmphu91Ev/pdisk_1.dat 2025-04-04T17:13:05.662517Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:05.691555Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:05.691676Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:05.693018Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2622, node 6 2025-04-04T17:13:05.733917Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:05.733939Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:05.733947Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:05.734075Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18273 TClient is connected to server localhost:18273 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:06.337119Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:06.351832Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:06.427684Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:06.714158Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:06.822717Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:10.269345Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489507234527350344:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:10.269446Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:10.343402Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:13:10.384217Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:10.426564Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:10.502445Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:10.557657Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489507213052512068:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:10.557770Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:13:10.586898Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:13:10.639666Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:13:10.771693Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489507234527350873:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:10.771762Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:10.772152Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489507234527350878:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:10.778850Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:13:10.796202Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489507234527350880:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:13:10.860979Z node 6 :TX_PROXY ERROR: Actor# [6:7489507234527350934:3458] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:12.170719Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:13:12.301502Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanRetryReadRanges [GOOD] Test command err: 2025-04-04T17:12:57.837619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:12:57.838132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:12:57.838386Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:12:57.839957Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:12:57.840163Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:12:57.840417Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00172a/r3tmp/tmpiTJrmW/pdisk_1.dat 2025-04-04T17:12:58.280494Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:58.481074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:12:58.587595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:58.588128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:58.591925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:58.592009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:58.607669Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:12:58.608316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:58.608715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:58.902009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:59.654429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1401:2837], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:59.654631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1412:2842], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:59.655302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:59.664754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:13:00.108500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1415:2845], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:13:00.256301Z node 1 :TX_PROXY ERROR: Actor# [1:1543:2914] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:01.646289Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0wn8zycva3zyw9gyp9wqmq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWMwMTA0ODEtYjk4ZGNkMjctYjg5NzNiYmEtMzExMTVkMDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- nodeId: 2 2025-04-04T17:13:02.362447Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0wnazzaht37wf1y7k0mc97, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2U0YTZmMGUtN2E4ZDY2Zi04MGE5YjE1NC0zOGI1MzBhOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvScan [1:1618:2967] -> [2:1574:2437] -- EvScanData from [2:1622:2444]: pass 2025-04-04T17:13:03.098005Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0wnazzaht37wf1y7k0mc97, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2U0YTZmMGUtN2E4ZDY2Zi04MGE5YjE1NC0zOGI1MzBhOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvStreamData: {"ResultSet":{"columns":[{"name":"column0","type":{"optional_type":{"item":{"type_id":4}}}}],"rows":[{"items":[{"uint64_value":596400}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":1} 2025-04-04T17:13:03.101320Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2025-04-04T17:13:10.418093Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:697:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:13:10.418633Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:13:10.418940Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:13:10.420010Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:694:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:13:10.420092Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:13:10.420446Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00172a/r3tmp/tmpLIJrwL/pdisk_1.dat 2025-04-04T17:13:10.764259Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:10.925434Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:13:11.022769Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:11.022908Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:11.027114Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:11.027229Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:11.042170Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-04T17:13:11.042829Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:11.043219Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:11.325749Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:11.938975Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1398:2834], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:11.939121Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1409:2839], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:11.939239Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:11.943651Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:13:12.378343Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:1412:2842], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:13:12.459072Z node 3 :TX_PROXY ERROR: Actor# [3:1540:2911] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:12.993910Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0wnn00dfbmrn759h6b9ydh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NjkzOGRlMDEtNmY5NGMwYjgtNjAyNGRlNTgtODBjYTM2MGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- nodeId: 4 2025-04-04T17:13:13.661074Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0wnp2x3amhm5mdjy6jcv4r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWMyODRlMGYtNThmMjU5YjEtYWY2Mjg3NWItNmRkMDJjODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvScan [3:1616:2965] -> [4:1571:2437] -- EvScanData from [4:1620:2444]: pass -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}],"rows":[{"items":[{"uint32_value":2},{"uint32_value":22}]},{"items":[{"uint32_value":21},{"uint32_value":2121}]},{"items":[{"uint32_value":22},{"uint32_value":2222}]},{"items":[{"uint32_value":23},{"uint32_value":2323}]},{"items":[{"uint32_value":24},{"uint32_value":2424}]},{"items":[{"uint32_value":25},{"uint32_value":2525}]},{"items":[{"uint32_value":26},{"uint32_value":2626}]},{"items":[{"uint32_value":27},{"uint32_value":2727}]},{"items":[{"uint32_value":28},{"uint32_value":2828}]},{"items":[{"uint32_value":29},{"uint32_value":2929}]},{"items":[{"uint32_value":40},{"uint32_value":4040}]},{"items":[{"uint32_value":41},{"uint32_value":4141}]},{"items":[{"uint32_value":42},{"uint32_value":4242}]},{"items":[{"uint32_value":43},{"uint32_value":4343}]},{"items":[{"uint32_value":44},{"uint32_value":4444}]},{"items":[{"uint32_value":45},{"uint32_value":4545}]},{"items":[{"uint32_value":46},{"uint32_value":4646}]},{"items":[{"uint32_value":47},{"uint32_value":4747}]},{"items":[{"uint32_value":48},{"uint32_value":4848}]},{"items":[{"uint32_value":49},{"uint32_value":4949}]},{"items":[{"uint32_value":50},{"uint32_value":5050}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":2} -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}]},"SeqNo":2,"QueryResultIndex":0,"ChannelId":2} 2025-04-04T17:13:13.675453Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down >> TestYmqHttpProxy::TestDeleteQueue [GOOD] |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderLookupBug [GOOD] Test command err: Trying to start YDB, gRPC: 23752, MsgBus: 21294 2025-04-04T17:09:25.147186Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506271069062393:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:25.147668Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014da/r3tmp/tmppqTXLt/pdisk_1.dat 2025-04-04T17:09:25.699937Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:25.715654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:25.715750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:25.718355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23752, node 1 2025-04-04T17:09:25.801361Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:25.801387Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:25.801398Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:25.801525Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21294 TClient is connected to server localhost:21294 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:26.338867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.383090Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:28.256535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506283953964936:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.256668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.711392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.858578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506283953965044:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.858675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.870946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.966558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506283953965127:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.966678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.976678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.015153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506288248932500:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.015214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.020062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506288248932506:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.024060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-04T17:09:29.034998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506288248932508:2370], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T17:09:29.136201Z node 1 :TX_PROXY ERROR: Actor# [1:7489506288248932561:2499] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:30.148308Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506271069062393:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:30.148374Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13386, MsgBus: 1328 2025-04-04T17:09:31.334570Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489506293942506443:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:31.334621Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014da/r3tmp/tmprgK558/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13386, node 2 2025-04-04T17:09:31.448076Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:31.458843Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:31.458917Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:31.460581Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:31.463842Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:31.463857Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:31.463864Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:31.463966Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1328 TClient is connected to server localhost:1328 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:31.888362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:31.896496Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:09:31.910034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:32.019999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:32.208816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:32.268296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:34.296729Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489506306827410094:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool defaul ... : { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:05.962505Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:13:05.999681Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:13:06.048621Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:13:06.134534Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:13:06.204139Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:13:06.366943Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:13:06.406204Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:13:06.500318Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:13:06.541951Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:13:06.581764Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:13:06.641436Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:13:06.695801Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:13:06.753175Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:13:07.780805Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:13:07.836396Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:13:07.888743Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:13:07.958950Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.038928Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.093260Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.164234Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.212304Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.284609Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710692:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.371645Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710693:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.433432Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.531540Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.595372Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.656982Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.707989Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.785165Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.835459Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710700:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.888183Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.928584Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710702:0, at schemeshard: 72057594046644480 2025-04-04T17:13:09.061710Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480 2025-04-04T17:13:09.103531Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 2025-04-04T17:13:09.157791Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2025-04-04T17:13:09.237423Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710706:0, at schemeshard: 72057594046644480 2025-04-04T17:13:09.290664Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 2025-04-04T17:13:09.544202Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710708:1, at schemeshard: 72057594046644480 2025-04-04T17:13:09.602773Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710709:0, at schemeshard: 72057594046644480 2025-04-04T17:13:09.667837Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2025-04-04T17:13:09.715506Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-04-04T17:13:09.804311Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 2025-04-04T17:13:09.853673Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710713:0, at schemeshard: 72057594046644480 2025-04-04T17:13:09.903415Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 2025-04-04T17:13:09.990084Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710715:0, at schemeshard: 72057594046644480 2025-04-04T17:13:10.070548Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710716:0, at schemeshard: 72057594046644480 2025-04-04T17:13:10.203808Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710717:0, at schemeshard: 72057594046644480 2025-04-04T17:13:10.300304Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710718:0, at schemeshard: 72057594046644480 2025-04-04T17:13:10.373627Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710719:0, at schemeshard: 72057594046644480 2025-04-04T17:13:10.432147Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710720:0, at schemeshard: 72057594046644480 2025-04-04T17:13:10.543268Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710721:0, at schemeshard: 72057594046644480 2025-04-04T17:13:10.594785Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710722:0, at schemeshard: 72057594046644480 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TestYmqHttpProxy::TestListDeadLetterSourceQueues |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TopicService::OneConsumer_TheRangesDoNotOverlap [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 >> TestYmqHttpProxy::TestPurgeQueue [GOOD] |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TopicService::OneConsumer_TheRangesOverlap |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TestYmqHttpProxy::TestSendMessageBatch |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TPersQueueTest::BadTopic [GOOD] >> TPersQueueTest::CloseActiveWriteSessionOnClusterDisable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TestYmqHttpProxy::TestChangeMessageVisibility [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 >> TPersQueueTest::WriteExisting [GOOD] >> TPersQueueTest::WriteExistingBigValue >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 >> TPersQueueTest::UpdatePartitionLocation [GOOD] >> TPersQueueTest::TopicServiceCommitOffset |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TPersQueueTest::DirectReadPreCached [GOOD] >> TPersQueueTest::DirectReadNotCached >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest >> TestKinesisHttpProxy::TestWrongStream2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TestKinesisHttpProxy::BadRequestUnknownMethod [GOOD] >> TPersQueueTest::SetupLockSession2 [GOOD] >> TPersQueueTest::SetupLockSession |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TestKinesisHttpProxy::ErroneousRequestGetRecords [GOOD] >> TestKinesisHttpProxy::ListShardsTimestamp [GOOD] >> TestKinesisHttpProxy::TestWrongRequest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] >> TestKinesisHttpProxy::TestCounters [GOOD] |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TestKinesisHttpProxy::GoodRequestCreateStream >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 >> ExternalIndex::Simple >> TestKinesisHttpProxy::ListShardsToken |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TestKinesisHttpProxy::TestEmptyHttpBody |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] Test command err: 2025-04-04T17:12:32.423594Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507072167307448:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:32.423758Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000fb0/r3tmp/tmpZHzDPJ/pdisk_1.dat 2025-04-04T17:12:32.807205Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:32.822839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:32.822976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:32.827540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15310, node 1 2025-04-04T17:12:32.991744Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:32.991776Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:32.991790Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:32.991935Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22053 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:33.577956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:22053 2025-04-04T17:12:33.765321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.769686Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:12:33.771734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.783375Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-04T17:12:33.795086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.929731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:33.973138Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-04-04T17:12:33.977697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:34.060901Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-04-04T17:12:34.065487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.101055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.141355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.179972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.215413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.285518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.320700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:35.517498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507085052210776:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.517618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.519196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507085052210784:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.523340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-04T17:12:35.544986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507085052210790:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-04T17:12:35.598001Z node 1 :TX_PROXY ERROR: Actor# [1:7489507085052210841:2919] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:36.683105Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr0wmhdt8xz5v7aj0eg4tzc3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTYzOTc4Y2UtODU5NDcxZDItNGIwYmM4LTQxYjk1OTZl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.702917Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr0wmhdt8xz5v7aj0eg4tzc3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTYzOTc4Y2UtODU5NDcxZDItNGIwYmM4LTQxYjk1OTZl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.708513Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr0wmhdt8xz5v7aj0eg4tzc3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTYzOTc4Y2UtODU5NDcxZDItNGIwYmM4LTQxYjk1OTZl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.758502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.797636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.823791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.895353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.972788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.017294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.056129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.091123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:37.118426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:12:37.177210Z node 1 :HTTP INFO: Listening on http://127.0.0.1:1382 2025-04-04T17:12:37.422969Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507072167307448:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:37.423043Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:38.178743Z node 1 :SQS INFO: Start SQS service actor 2025-04-04T17:12:38.178873Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true ... Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-04T17:13:20.276616Z node 7 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 15ms 2025-04-04T17:13:20.277060Z node 7 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715688 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-04T17:13:20.277093Z node 7 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-04-04T17:13:20.277198Z node 7 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 16ms 2025-04-04T17:13:20.277739Z node 7 :SQS TRACE: Handle queues list: { Status: 48 TxId: 281474976715688 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-04T17:13:20.279042Z node 7 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715687 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-04T17:13:20.279068Z node 7 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 17ms 2025-04-04T17:13:20.279287Z node 7 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715687 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-04T17:13:20.279317Z node 7 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-04-04T17:13:20.279378Z node 7 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 18ms 2025-04-04T17:13:20.279629Z node 7 :SQS TRACE: Handle user settings: { Status: 48 TxId: 281474976715687 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-04T17:13:20.428426Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7489507278844478295:2450]: Pool not found 2025-04-04T17:13:20.429047Z node 7 :SQS DEBUG: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-04-04T17:13:20.924432Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7489507278844478292:2448]: Pool not found 2025-04-04T17:13:20.925313Z node 7 :SQS DEBUG: [cleanup removed queues] getting queues... 2025-04-04T17:13:20.929052Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489507278844478447:2472], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:20.929132Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7489507278844478448:2473], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-04-04T17:13:20.929185Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:21.211293Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7489507278844478445:2471]: Pool not found 2025-04-04T17:13:21.211565Z node 7 :SQS DEBUG: [cleanup removed queues] there are no queues to delete Http output full {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 400 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 2025-04-04T17:13:21.241316Z node 7 :HTTP DEBUG: (#37,[::1]:56970) incoming connection opened 2025-04-04T17:13:21.241387Z node 7 :HTTP DEBUG: (#37,[::1]:56970) -> (POST /Root) 2025-04-04T17:13:21.241552Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [b81f:3801:6050:0:a01f:3801:6050:0] request [UnknownMethodName] url [/Root] database [/Root] requestId: 93d4a367-dfe9770-6166d2af-e9f99e83 2025-04-04T17:13:21.241806Z node 7 :HTTP_PROXY INFO: http request [UnknownMethodName] requestId [93d4a367-dfe9770-6166d2af-e9f99e83] reply with status: UNSUPPORTED message: Missing method name UnknownMethodName 2025-04-04T17:13:21.241984Z node 7 :HTTP DEBUG: (#37,[::1]:56970) <- (400 InvalidAction) 2025-04-04T17:13:21.242030Z node 7 :HTTP DEBUG: (#37,[::1]:56970) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.UnknownMethodName X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 3 { } 0 2025-04-04T17:13:21.242065Z node 7 :HTTP DEBUG: (#37,[::1]:56970) Response: HTTP/1.1 400 InvalidAction Connection: close x-amzn-requestid: 93d4a367-dfe9770-6166d2af-e9f99e83 x-amz-crc32: 139748724 Content-Type: application/x-amz-json-1.1 Content-Length: 76 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 2025-04-04T17:13:21.242169Z node 7 :HTTP DEBUG: (#37,[::1]:56970) connection closed |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2025-04-04T17:12:40.190534Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507108013432136:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:40.191014Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:40.264061Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507105711753746:2095];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:40.265208Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:40.498833Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018ba/r3tmp/tmpXlcFBC/pdisk_1.dat 2025-04-04T17:12:40.510513Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:12:40.811495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:40.811604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:40.812389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:40.812444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:40.817176Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:12:40.817310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:40.818320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:40.853475Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22499, node 1 2025-04-04T17:12:40.976474Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/0018ba/r3tmp/yandexdW6bHo.tmp 2025-04-04T17:12:40.976501Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/0018ba/r3tmp/yandexdW6bHo.tmp 2025-04-04T17:12:40.976642Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/0018ba/r3tmp/yandexdW6bHo.tmp 2025-04-04T17:12:40.976799Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:12:41.263792Z INFO: TTestServer started on Port 28050 GrpcPort 22499 TClient is connected to server localhost:28050 PQClient connected to localhost:22499 === TenantModeEnabled() = 0 === Init PQ - start server on port 22499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:41.583637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:12:41.584776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:41.587032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T17:12:41.588432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:12:41.588492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:41.595183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T17:12:41.595326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T17:12:41.595478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:41.595526Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T17:12:41.595561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-04T17:12:41.595571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-04T17:12:41.597497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:41.597668Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:12:41.597688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-04T17:12:41.598200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:12:41.598227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-04T17:12:41.598271Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:12:41.599386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:41.599416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:41.599433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:12:41.599459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:12:41.604633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:41.606325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-04T17:12:41.607365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:12:41.608926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743786761654, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:12:41.609063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743786761654 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:12:41.609094Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:12:41.610518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-04T17:12:41.610562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:12:41.610773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:12:41.610834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-04T17:12:41.613804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:12:41.613846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:12:41.614024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:12:41.614040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489507108013432793:2403], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-04T17:12:41.614080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:41.614126Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-04T17:12:41.614279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-04T17:12:41.614308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:12:41.614360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-04T17:12:41.614374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:12:41.614389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-04T17:12:41.614413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:12:41.614436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-04T17:12:41.614444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-04T17:12:41.614486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pat ... ingSession "shared/cli_5_1_14830514577128078018_v1" (Sender=[5:7489507278270871752:2652], Pipe=[5:7489507278270871755:2652], Partitions=[], ActiveFamilyCount=0) 2025-04-04T17:13:20.197103Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli rebalancing was scheduled 2025-04-04T17:13:20.197183Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2025-04-04T17:13:20.197273Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/cli_5_1_14830514577128078018_v1" (Sender=[5:7489507278270871752:2652], Pipe=[5:7489507278270871755:2652], Partitions=[], ActiveFamilyCount=0) 2025-04-04T17:13:20.197464Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] consumer cli family 1 status Active partitions [0] session "shared/cli_5_1_14830514577128078018_v1" sender [5:7489507278270871752:2652] lock partition 0 for ReadingSession "shared/cli_5_1_14830514577128078018_v1" (Sender=[5:7489507278270871752:2652], Pipe=[5:7489507278270871755:2652], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-04-04T17:13:20.197574Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-04-04T17:13:20.197604Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing duration: 0.000393s 2025-04-04T17:13:20.199065Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 assign: record# { Partition: 0 TabletId: 72075186224037897 Topic: "rt3.dc1--topic1" Generation: 1 Step: 1 Session: "shared/cli_5_1_14830514577128078018_v1" ClientId: "cli" PipeClient { RawX1: 7489507278270871755 RawX2: 4503621102209628 } Path: "/Root/PQ/rt3.dc1--topic1" } 2025-04-04T17:13:20.199213Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2025-04-04T17:13:20.199735Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1, pipe: [5:7489507278270871759:2656] 2025-04-04T17:13:20.199969Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/cli_5_1_14830514577128078018_v1:1 with generation 1 2025-04-04T17:13:20.208133Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 SizeLag: 409 WriteTimestampEstimateMS: 1743786800184 } Cookie: 18446744073709551615 } 2025-04-04T17:13:20.208179Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2025-04-04T17:13:20.208229Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 sending to client partition status 2025-04-04T17:13:20.209597Z :INFO: [] [] [4bda7a4b-c3bd9aa5-b23cf014-d92847a3] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (NULL) 2025-04-04T17:13:20.210696Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2025-04-04T17:13:20.210840Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2025-04-04T17:13:20.210876Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-04-04T17:13:20.210931Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2025-04-04T17:13:20.210988Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 0, sizeLag# 409 2025-04-04T17:13:20.211002Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1TEvPartitionReady. Aval parts: 1 2025-04-04T17:13:20.211038Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 performing read request: guid# e7c0d51f-eb8d0699-ba760342-35639aa1, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 490, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-04T17:13:20.211137Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 490 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid e7c0d51f-eb8d0699-ba760342-35639aa1 2025-04-04T17:13:20.213275Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1743786800081 CreateTimestampMS: 1743786800080 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1743786800086 CreateTimestampMS: 1743786800081 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1743786800087 CreateTimestampMS: 1743786800081 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 43 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 0 } 2025-04-04T17:13:20.213451Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset3 2025-04-04T17:13:20.213485Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid e7c0d51f-eb8d0699-ba760342-35639aa1 has messages 1 2025-04-04T17:13:20.213555Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 read done: guid# e7c0d51f-eb8d0699-ba760342-35639aa1, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 484 2025-04-04T17:13:20.213579Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 response to read: guid# e7c0d51f-eb8d0699-ba760342-35639aa1 2025-04-04T17:13:20.213783Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 Process answer. Aval parts: 0 2025-04-04T17:13:20.214186Z :DEBUG: [] [] [4bda7a4b-c3bd9aa5-b23cf014-d92847a3] [] Got ReadResponse, serverBytesSize = 484, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428316 2025-04-04T17:13:20.214319Z :DEBUG: [] [] [4bda7a4b-c3bd9aa5-b23cf014-d92847a3] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428316 2025-04-04T17:13:20.214589Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2025-04-04T17:13:20.214676Z :DEBUG: [] [] [4bda7a4b-c3bd9aa5-b23cf014-d92847a3] [] Returning serverBytesSize = 484 to budget 2025-04-04T17:13:20.214714Z :DEBUG: [] [] [4bda7a4b-c3bd9aa5-b23cf014-d92847a3] [] In ContinueReadingDataImpl, ReadSizeBudget = 484, ReadSizeServerDelta = 52428316 2025-04-04T17:13:20.214990Z :DEBUG: [] [] [4bda7a4b-c3bd9aa5-b23cf014-d92847a3] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-04T17:13:20.215145Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-04-04T17:13:20.215212Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-04-04T17:13:20.215244Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (2-2) 2025-04-04T17:13:20.215295Z :DEBUG: [] [] [4bda7a4b-c3bd9aa5-b23cf014-d92847a3] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2025-04-04T17:13:20.215349Z :DEBUG: [] [] [4bda7a4b-c3bd9aa5-b23cf014-d92847a3] [] Returning serverBytesSize = 0 to budget 2025-04-04T17:13:20.215540Z :INFO: [] [] [4bda7a4b-c3bd9aa5-b23cf014-d92847a3] Closing read session. Close timeout: 0.000000s 2025-04-04T17:13:20.215584Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2025-04-04T17:13:20.215633Z :INFO: [] [] [4bda7a4b-c3bd9aa5-b23cf014-d92847a3] Counters: { Errors: 0 CurrentSessionLifetimeMs: 32 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:13:20.215744Z :NOTICE: [] [] [4bda7a4b-c3bd9aa5-b23cf014-d92847a3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-04T17:13:20.215793Z :DEBUG: [] [] [4bda7a4b-c3bd9aa5-b23cf014-d92847a3] [] Abort session to cluster 2025-04-04T17:13:20.216285Z :NOTICE: [] [] [4bda7a4b-c3bd9aa5-b23cf014-d92847a3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:13:20.217196Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 grpc read done: success# 1, data# { read_request { bytes_size: 484 } } 2025-04-04T17:13:20.217231Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 grpc closed 2025-04-04T17:13:20.217257Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_14830514577128078018_v1 is DEAD 2025-04-04T17:13:20.217604Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/cli_5_1_14830514577128078018_v1 2025-04-04T17:13:20.219239Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] pipe [5:7489507278270871755:2652] disconnected; active server actors: 1 2025-04-04T17:13:20.219277Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037898][rt3.dc1--topic1] pipe [5:7489507278270871755:2652] client cli disconnected session shared/cli_5_1_14830514577128078018_v1 |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] Test command err: 2025-04-04T17:12:32.424116Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507073219180738:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:32.424185Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000fc6/r3tmp/tmpoQ0WNx/pdisk_1.dat 2025-04-04T17:12:32.833716Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:32.836258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:32.836353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:32.839306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27727, node 1 2025-04-04T17:12:32.992069Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:32.992095Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:32.992102Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:32.992235Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11931 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:33.669862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.687090Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:11931 2025-04-04T17:12:33.894576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.899790Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:12:33.901546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.918911Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-04T17:12:33.929711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:34.079116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.116788Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-04-04T17:12:34.120802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:12:34.165158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:12:34.204131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.247381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.326427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.373135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.410323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.460596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:35.648030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507086104084067:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.648134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.649085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507086104084079:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.656761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-04T17:12:35.671819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507086104084081:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-04T17:12:35.761451Z node 1 :TX_PROXY ERROR: Actor# [1:7489507086104084132:2917] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:36.682348Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr0wmhhxfghmb2nmyf26ef68, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjU2Mzg0YmYtNjVkNzJjZjgtNGExOWEyYi05NTJlY2Q0ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.706154Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr0wmhhxfghmb2nmyf26ef68, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjU2Mzg0YmYtNjVkNzJjZjgtNGExOWEyYi05NTJlY2Q0ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.727444Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr0wmhhxfghmb2nmyf26ef68, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjU2Mzg0YmYtNjVkNzJjZjgtNGExOWEyYi05NTJlY2Q0ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.764093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.796774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.833714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.864272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.901889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.928568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.970858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:37.001540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:12:37.034061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.100310Z node 1 :HTTP INFO: Listening on http://127.0.0.1:23946 2025-04-04T17:12:37.422993Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507073219180738:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:37.423093Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:38.101621Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-04T17:12:38.102662Z node 1 :SQS INFO: Start SQS service actor 2025-04-04T17:12:38.102757Z node 1 :SQS DEBUG: ... TRACE: HandleSqsResponse DeleteMessageBatch { RequestId: "156b1f3-e897e4fa-d4f80ebb-da375bae" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "156b1f3-e897e4fa-d4f80ebb-da375bae" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-04-04T17:13:21.904217Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7489507285059668207:2553]: DeleteMessageBatch { RequestId: "156b1f3-e897e4fa-d4f80ebb-da375bae" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "156b1f3-e897e4fa-d4f80ebb-da375bae" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-04-04T17:13:21.904272Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7489507285059668208:3817]. Found: 1 2025-04-04T17:13:21.904385Z node 7 :SQS TRACE: Request [156b1f3-e897e4fa-d4f80ebb-da375bae] HandleResponse: { DeleteMessageBatch { RequestId: "156b1f3-e897e4fa-d4f80ebb-da375bae" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "156b1f3-e897e4fa-d4f80ebb-da375bae" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false }, status: OK 2025-04-04T17:13:21.904452Z node 7 :SQS DEBUG: Request [156b1f3-e897e4fa-d4f80ebb-da375bae] Sending reply from proxy actor: { DeleteMessageBatch { RequestId: "156b1f3-e897e4fa-d4f80ebb-da375bae" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "156b1f3-e897e4fa-d4f80ebb-da375bae" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-04-04T17:13:21.904773Z node 7 :HTTP_PROXY DEBUG: http request [DeleteMessageBatch] requestId [156b1f3-e897e4fa-d4f80ebb-da375bae] Got succesfult GRPC response. 2025-04-04T17:13:21.904887Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [156b1f3-e897e4fa-d4f80ebb-da375bae] reply ok 2025-04-04T17:13:21.904998Z node 7 :HTTP_PROXY DEBUG: http request [DeleteMessageBatch] requestId [156b1f3-e897e4fa-d4f80ebb-da375bae] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 716 ResponseSizeInBytes: 221 SourceAddress: 7800:ef00:6050:0:6000:ef00:6050:0 ResourceId: 000000000000000101v0 Action: DeleteMessageBatch 2025-04-04T17:13:21.905576Z node 7 :HTTP DEBUG: (#37,[::1]:50044) <- (200 ) 2025-04-04T17:13:21.905669Z node 7 :HTTP DEBUG: (#37,[::1]:50044) connection closed Http output full {"Successful":[{"Id":"Id-0"},{"Id":"Id-1"}]} 2025-04-04T17:13:21.905920Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] HandleResponse { Status: 48 TxId: 281474976715716 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "messages" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "SentTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } } } Value { Struct { Optional { } } } } } 2025-04-04T17:13:21.905946Z node 7 :SQS DEBUG: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Attempt 1 execution duration: 3ms 2025-04-04T17:13:21.906097Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Sending mkql execution result: { Status: 48 TxId: 281474976715716 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "messages" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "SentTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } } } Value { Struct { Optional { } } } } } 2025-04-04T17:13:21.906125Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Minikql data response: {"messages": []} 2025-04-04T17:13:21.906189Z node 7 :SQS DEBUG: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] execution duration: 3ms 2025-04-04T17:13:21.906330Z node 7 :SQS DEBUG: Request [] Sending executed reply 2025-04-04T17:13:21.906429Z node 7 :SQS DEBUG: Handle oldest timestamp metrics for [cloud4/000000000000000101v0/1] 2025-04-04T17:13:21.906676Z node 7 :HTTP DEBUG: (#40,[::1]:50052) incoming connection opened 2025-04-04T17:13:21.906725Z node 7 :HTTP DEBUG: (#40,[::1]:50052) -> (POST /Root) 2025-04-04T17:13:21.906823Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [381c:7700:6050:0:201c:7700:6050:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 3beba922-7ba1b584-613172a3-375cef5e 2025-04-04T17:13:21.907162Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [3beba922-7ba1b584-613172a3-375cef5e] got new request from [381c:7700:6050:0:201c:7700:6050:0] 2025-04-04T17:13:21.907459Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [3beba922-7ba1b584-613172a3-375cef5e] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-04-04T17:13:21.907475Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [3beba922-7ba1b584-613172a3-375cef5e] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-04T17:13:21.907809Z node 7 :SQS DEBUG: Got new request in YMQ proxy. FolderId: folder4, CloudId: cloud4, UserSid: fake_user_sid@as, RequestId: 3beba922-7ba1b584-613172a3-375cef5e 2025-04-04T17:13:21.907904Z node 7 :SQS DEBUG: Request [3beba922-7ba1b584-613172a3-375cef5e] Proxy actor: used user_name='cloud4', queue_name='000000000000000101v0', folder_id='folder4' 2025-04-04T17:13:21.907912Z node 7 :SQS DEBUG: Request [3beba922-7ba1b584-613172a3-375cef5e] Request proxy started 2025-04-04T17:13:21.908210Z node 7 :SQS DEBUG: Request [3beba922-7ba1b584-613172a3-375cef5e] Answer configuration for queue [cloud4/000000000000000101v0] without leader 2025-04-04T17:13:21.908434Z node 7 :SQS DEBUG: Request [3beba922-7ba1b584-613172a3-375cef5e] Get configuration duration: 0ms 2025-04-04T17:13:21.908641Z node 7 :SQS DEBUG: Request [3beba922-7ba1b584-613172a3-375cef5e] Send get leader node request to sqs service for cloud4/000000000000000101v0 2025-04-04T17:13:21.908660Z node 7 :SQS DEBUG: Request [3beba922-7ba1b584-613172a3-375cef5e] Leader node for queue [cloud4/000000000000000101v0] is 7 2025-04-04T17:13:21.908679Z node 7 :SQS DEBUG: Request [3beba922-7ba1b584-613172a3-375cef5e] Got leader node for queue response. Node id: 7. Status: 0 2025-04-04T17:13:21.908764Z node 7 :SQS TRACE: Request [3beba922-7ba1b584-613172a3-375cef5e] Sending request from proxy to leader node 7: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "3beba922-7ba1b584-613172a3-375cef5e" 2025-04-04T17:13:21.908826Z node 7 :SQS DEBUG: Request [3beba922-7ba1b584-613172a3-375cef5e] Received Sqs Request: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "3beba922-7ba1b584-613172a3-375cef5e" 2025-04-04T17:13:21.908862Z node 7 :SQS DEBUG: Request [3beba922-7ba1b584-613172a3-375cef5e] Request started. Actor: [7:7489507285059668241:3845] 2025-04-04T17:13:21.908890Z node 7 :SQS TRACE: Inc local leader ref for actor [7:7489507285059668241:3845] 2025-04-04T17:13:21.908903Z node 7 :SQS DEBUG: Request [3beba922-7ba1b584-613172a3-375cef5e] Forward configuration request to queue [cloud4/000000000000000101v0] leader 2025-04-04T17:13:21.909117Z node 7 :SQS DEBUG: Request [3beba922-7ba1b584-613172a3-375cef5e] Get configuration duration: 0ms 2025-04-04T17:13:21.909135Z node 7 :SQS TRACE: Request [3beba922-7ba1b584-613172a3-375cef5e] Got configuration. Root url: http://ghrun-mup5cka3jq.auto.internal:8771, Shards: 4, Fail: 0 2025-04-04T17:13:21.909157Z node 7 :SQS TRACE: Request [3beba922-7ba1b584-613172a3-375cef5e] Got configuration. Attributes: { ContentBasedDeduplication: 0 DelaySeconds: 0.000000s FifoQueue: 0 MaximumMessageSize: 262144 MessageRetentionPeriod: 345600.000000s ReceiveMessageWaitTime: 0.000000s VisibilityTimeout: 30.000000s } 2025-04-04T17:13:21.909168Z node 7 :SQS TRACE: Request [3beba922-7ba1b584-613172a3-375cef5e] DoRoutine 2025-04-04T17:13:21.909223Z node 7 :SQS TRACE: Increment active message requests for [cloud4/000000000000000101v0/2]. ActiveMessageRequests: 1 2025-04-04T17:13:21.909239Z node 7 :SQS DEBUG: Request [3beba922-7ba1b584-613172a3-375cef5e] Received empty result from shard 2 infly. Infly capacity: 0. Messages count: 0 2025-04-04T17:13:21.909249Z node 7 :SQS DEBUG: Request [3beba922-7ba1b584-613172a3-375cef5e] No known messages in this shard. Skip attempt to add messages to infly 2025-04-04T17:13:21.909257Z node 7 :SQS DEBUG: Request [3beba922-7ba1b584-613172a3-375cef5e] Already tried to add messages to infly 2025-04-04T17:13:21.909287Z node 7 :SQS TRACE: Decrement active message requests for [[cloud4/000000000000000101v0/2]. ActiveMessageRequests: 0 2025-04-04T17:13:21.909347Z node 7 :SQS TRACE: Request [3beba922-7ba1b584-613172a3-375cef5e] SendReplyAndDie from action actor { ReceiveMessage { RequestId: "3beba922-7ba1b584-613172a3-375cef5e" } } 2025-04-04T17:13:21.909414Z node 7 :SQS TRACE: Request [3beba922-7ba1b584-613172a3-375cef5e] Sending sqs response: { ReceiveMessage { RequestId: "3beba922-7ba1b584-613172a3-375cef5e" } RequestId: "3beba922-7ba1b584-613172a3-375cef5e" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-04-04T17:13:21.909756Z node 7 :SQS DEBUG: Request ReceiveMessage working duration: 0ms 2025-04-04T17:13:21.909857Z node 7 :SQS TRACE: HandleSqsResponse ReceiveMessage { RequestId: "3beba922-7ba1b584-613172a3-375cef5e" } RequestId: "3beba922-7ba1b584-613172a3-375cef5e" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-04-04T17:13:21.909901Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7489507285059668237:2557]: ReceiveMessage { RequestId: "3beba922-7ba1b584-613172a3-375cef5e" } RequestId: "3beba922-7ba1b584-613172a3-375cef5e" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-04-04T17:13:21.909941Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7489507285059668241:3845]. Found: 1 2025-04-04T17:13:21.910023Z node 7 :SQS TRACE: Request [3beba922-7ba1b584-613172a3-375cef5e] HandleResponse: { ReceiveMessage { RequestId: "3beba922-7ba1b584-613172a3-375cef5e" } RequestId: "3beba922-7ba1b584-613172a3-375cef5e" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false }, status: OK 2025-04-04T17:13:21.910079Z node 7 :SQS DEBUG: Request [3beba922-7ba1b584-613172a3-375cef5e] Sending reply from proxy actor: { ReceiveMessage { RequestId: "3beba922-7ba1b584-613172a3-375cef5e" } RequestId: "3beba922-7ba1b584-613172a3-375cef5e" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-04-04T17:13:21.910392Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [3beba922-7ba1b584-613172a3-375cef5e] Got succesfult GRPC response. 2025-04-04T17:13:21.910435Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [3beba922-7ba1b584-613172a3-375cef5e] reply ok 2025-04-04T17:13:21.910499Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [3beba922-7ba1b584-613172a3-375cef5e] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 526 ResponseSizeInBytes: 179 SourceAddress: 381c:7700:6050:0:201c:7700:6050:0 ResourceId: 000000000000000101v0 Action: ReceiveMessage Http output full {} 2025-04-04T17:13:21.910572Z node 7 :HTTP DEBUG: (#40,[::1]:50052) <- (200 ) 2025-04-04T17:13:21.910654Z node 7 :HTTP DEBUG: (#40,[::1]:50052) connection closed >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] >> Initializer::Simple [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 >> TestYmqHttpProxy::TestListDeadLetterSourceQueues [GOOD] |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TestYmqHttpProxy::TestListQueueTags |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TPersQueueTest::ReadFromSeveralPartitions [GOOD] >> TPersQueueTest::Init ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> Initializer::Simple [GOOD] Test command err: 2025-04-04T17:12:13.742599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:12:13.743260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:12:13.743323Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001b59/r3tmp/tmpZUVqb9/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21878, node 1 TClient is connected to server localhost:28023 2025-04-04T17:12:15.119005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:12:15.182452Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:15.191287Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:15.191359Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:15.191398Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:15.191843Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:12:15.231751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:15.232786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:15.246905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:25.423553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:677:2568], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:25.423678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:687:2573], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:25.423764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:25.435386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T17:12:25.528895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:691:2576], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T17:12:25.628384Z node 1 :TX_PROXY ERROR: Actor# [1:763:2617] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:26.079060Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:773:2626], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:12:26.088509Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGE0ZDMxOGYtNTk3MDMzNmItMjc5YmVlYjgtYzc3MTEzMzk=, ActorId: [1:673:2565], ActorState: ExecuteState, TraceId: 01jr0wm7jafypc5v70zf0rd6fh, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=0 2025-04-04T17:12:26.208530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-04-04T17:12:27.685178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:12:28.108482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:12:28.913774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715670:0, at schemeshard: 72057594046644480 Initialization finished 2025-04-04T17:12:39.879429Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jr0wmnkj4ayf03m192yh7fr9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDZjZTlhMDMtOTY5YzNjMDYtZjFjYjBhZGQtMjkyYTdjYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=1 REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2025-04-04T17:12:51.341595Z node 1 :TX_PROXY ERROR: Actor# [1:1347:3061] txid# 281474976715678, Access denied for root@builtin on path /Root/.metadata/test, with access RemoveSchema 2025-04-04T17:12:51.341793Z node 1 :TX_PROXY ERROR: Actor# [1:1347:3061] txid# 281474976715678, issues: { message: "Access denied for root@builtin on path /Root/.metadata/test" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/test`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/test, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2025-04-04T17:13:01.946033Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jr0wnb4ge7k7dpta6733a4qv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWJhMTY3OTUtZDdmNjNlNTktOTFkNzllZTItOTlkMWExZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;RESULT=
: Fatal: ydb/core/kqp/host/kqp_host.cpp:977 ExecuteDataQuery(): requirement false failed, message: Unexpected query type for execute script action: Ddl, code: 1 ;EXPECTATION=0 FINISHED_REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 2025-04-04T17:13:23.157985Z node 1 :TX_PROXY ERROR: Actor# [1:1547:3211] txid# 281474976715686, Access denied for root@builtin on path /Root/.metadata/initialization/migrations, with access RemoveSchema 2025-04-04T17:13:23.158160Z node 1 :TX_PROXY ERROR: Actor# [1:1547:3211] txid# 281474976715686, issues: { message: "Access denied for root@builtin on path /Root/.metadata/initialization/migrations" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/initialization/migrations, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2025-04-04T17:12:39.814664Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507102666852440:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:39.814719Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:39.859441Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507101184748461:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:39.859489Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:40.030283Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:12:40.097696Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018fa/r3tmp/tmpDrag6i/pdisk_1.dat 2025-04-04T17:12:40.574871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:40.575717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:40.577566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:40.577625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:40.581955Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:12:40.582104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:40.584021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:40.619309Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13328, node 1 2025-04-04T17:12:40.671556Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:12:40.672629Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:12:40.957961Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/0018fa/r3tmp/yandexXawYqC.tmp 2025-04-04T17:12:40.957988Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/0018fa/r3tmp/yandexXawYqC.tmp 2025-04-04T17:12:40.958165Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/0018fa/r3tmp/yandexXawYqC.tmp 2025-04-04T17:12:40.958333Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:12:41.264145Z INFO: TTestServer started on Port 64891 GrpcPort 13328 TClient is connected to server localhost:64891 PQClient connected to localhost:13328 === TenantModeEnabled() = 0 === Init PQ - start server on port 13328 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:41.588040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:12:41.588240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:41.588410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T17:12:41.588623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:12:41.588664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:41.590995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:41.592195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T17:12:41.592379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:41.592410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T17:12:41.592462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-04-04T17:12:41.592472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2025-04-04T17:12:41.594155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:12:41.594187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-04-04T17:12:41.594211Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:12:41.599068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:41.599127Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:12:41.599150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-04T17:12:41.600970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:41.601026Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:41.601044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-04T17:12:41.601075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-04-04T17:12:41.604353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:12:41.606129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-04-04T17:12:41.607318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:12:41.608551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743786761654, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:12:41.608713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743786761654 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:12:41.608743Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-04T17:12:41.609874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-04T17:12:41.609916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-04T17:12:41.610102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:12:41.610179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-04T17:12:41.613803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:12:41.613858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:12:41.614011Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:12:41.614030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489507106961820353:2434], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2025-04-04T17:12:41.614114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:41.614149Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-04T17:12:41.614221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-04T17:12:41.614257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-04T17:12:41.614281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-04T17:12:41.614290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-04T17:12:41.614312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 1/1, is published: false 2025-04-04T17:12:41.614334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-04T17:12:41.614361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-04T17:12:41.614372Z node 1 :FLAT_TX_SCHEMESHA ... cli family 1 status Active partitions [0] session "shared/cli_5_1_15785852714778387709_v1" sender [5:7489507286150969821:2645] lock partition 0 for ReadingSession "shared/cli_5_1_15785852714778387709_v1" (Sender=[5:7489507286150969821:2645], Pipe=[5:7489507286150969824:2645], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-04-04T17:13:22.494832Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-04-04T17:13:22.494859Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing duration: 0.000181s 2025-04-04T17:13:22.495933Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 assign: record# { Partition: 0 TabletId: 72075186224037897 Topic: "rt3.dc1--topic1" Generation: 1 Step: 1 Session: "shared/cli_5_1_15785852714778387709_v1" ClientId: "cli" PipeClient { RawX1: 7489507286150969824 RawX2: 4503621102209621 } Path: "/Root/PQ/rt3.dc1--topic1" } 2025-04-04T17:13:22.496035Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2025-04-04T17:13:22.496347Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1, pipe: [5:7489507286150969826:2648] 2025-04-04T17:13:22.496499Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/cli_5_1_15785852714778387709_v1:1 with generation 1 2025-04-04T17:13:22.498524Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 grpc read done: success# 1, data# { read_request { bytes_size: 52428800 } } 2025-04-04T17:13:22.498625Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 got read request: guid# f8a7fb86-8171dec2-63920bd0-383f8e06 2025-04-04T17:13:22.508393Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 SizeLag: 409 WriteTimestampEstimateMS: 1743786802474 } Cookie: 18446744073709551615 } 2025-04-04T17:13:22.508449Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2025-04-04T17:13:22.508512Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 sending to client partition status 2025-04-04T17:13:22.510358Z :INFO: [] [] [3bf60197-ff47cc13-489387c2-6439de44] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (NULL) 2025-04-04T17:13:22.510988Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2025-04-04T17:13:22.511123Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2025-04-04T17:13:22.511177Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-04-04T17:13:22.511208Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2025-04-04T17:13:22.511266Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 0, sizeLag# 409 2025-04-04T17:13:22.511282Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1TEvPartitionReady. Aval parts: 1 2025-04-04T17:13:22.511332Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 performing read request: guid# cf3a17c0-13a43b7c-11f6285f-885864cf, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 490, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-04T17:13:22.511458Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 490 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid cf3a17c0-13a43b7c-11f6285f-885864cf 2025-04-04T17:13:22.512509Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1743786802315 CreateTimestampMS: 1743786802313 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1743786802371 CreateTimestampMS: 1743786802313 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1743786802371 CreateTimestampMS: 1743786802313 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 43 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 0 } 2025-04-04T17:13:22.512701Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset3 2025-04-04T17:13:22.512747Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid cf3a17c0-13a43b7c-11f6285f-885864cf has messages 1 2025-04-04T17:13:22.512826Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 read done: guid# cf3a17c0-13a43b7c-11f6285f-885864cf, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 371 2025-04-04T17:13:22.512853Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 response to read: guid# cf3a17c0-13a43b7c-11f6285f-885864cf 2025-04-04T17:13:22.513035Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 Process answer. Aval parts: 0 2025-04-04T17:13:22.513695Z :DEBUG: [] [] [3bf60197-ff47cc13-489387c2-6439de44] [] Got ReadResponse, serverBytesSize = 371, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428429 2025-04-04T17:13:22.513835Z :DEBUG: [] [] [3bf60197-ff47cc13-489387c2-6439de44] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428429 2025-04-04T17:13:22.515176Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2025-04-04T17:13:22.515259Z :DEBUG: [] [] [3bf60197-ff47cc13-489387c2-6439de44] [] Returning serverBytesSize = 371 to budget 2025-04-04T17:13:22.515303Z :DEBUG: [] [] [3bf60197-ff47cc13-489387c2-6439de44] [] In ContinueReadingDataImpl, ReadSizeBudget = 371, ReadSizeServerDelta = 52428429 2025-04-04T17:13:22.515637Z :DEBUG: [] [] [3bf60197-ff47cc13-489387c2-6439de44] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-04T17:13:22.518348Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-04-04T17:13:22.518411Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-04-04T17:13:22.518442Z :DEBUG: [] Take Data. Partition 0. Read: {1, 1} (2-2) 2025-04-04T17:13:22.518486Z :DEBUG: [] [] [3bf60197-ff47cc13-489387c2-6439de44] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2025-04-04T17:13:22.518534Z :DEBUG: [] [] [3bf60197-ff47cc13-489387c2-6439de44] [] Returning serverBytesSize = 0 to budget 2025-04-04T17:13:22.518484Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 grpc read done: success# 1, data# { read_request { bytes_size: 371 } } 2025-04-04T17:13:22.518732Z :INFO: [] [] [3bf60197-ff47cc13-489387c2-6439de44] Closing read session. Close timeout: 0.000000s 2025-04-04T17:13:22.518635Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 got read request: guid# 58f8578b-cf40ea5e-141fd13c-cf7a5b2b 2025-04-04T17:13:22.518772Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2025-04-04T17:13:22.518819Z :INFO: [] [] [3bf60197-ff47cc13-489387c2-6439de44] Counters: { Errors: 0 CurrentSessionLifetimeMs: 96 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:13:22.518923Z :NOTICE: [] [] [3bf60197-ff47cc13-489387c2-6439de44] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-04T17:13:22.518964Z :DEBUG: [] [] [3bf60197-ff47cc13-489387c2-6439de44] [] Abort session to cluster 2025-04-04T17:13:22.519455Z :NOTICE: [] [] [3bf60197-ff47cc13-489387c2-6439de44] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:13:22.520192Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 grpc read done: success# 0, data# { } 2025-04-04T17:13:22.520218Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 grpc read failed 2025-04-04T17:13:22.520241Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 grpc closed 2025-04-04T17:13:22.520282Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_15785852714778387709_v1 is DEAD 2025-04-04T17:13:22.522044Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/cli_5_1_15785852714778387709_v1 2025-04-04T17:13:22.522401Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] pipe [5:7489507286150969824:2645] disconnected; active server actors: 1 2025-04-04T17:13:22.522441Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037898][rt3.dc1--topic1] pipe [5:7489507286150969824:2645] client cli disconnected session shared/cli_5_1_15785852714778387709_v1 |88.6%| [TA] $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [TA] {RESULT} $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.6%| [TA] $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] |88.6%| [TA] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] Test command err: 2025-04-04T17:12:32.424324Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507074822429756:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:32.424394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f76/r3tmp/tmpeeaqV8/pdisk_1.dat 2025-04-04T17:12:32.805800Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:32.848592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:32.848705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:32.850564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23730, node 1 2025-04-04T17:12:32.991484Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:32.991512Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:32.991520Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:32.991624Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26320 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:33.543832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:26320 2025-04-04T17:12:33.775205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.794674Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:12:33.798952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.813013Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-04T17:12:33.820036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.962316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:12:34.007630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.048744Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-04-04T17:12:34.053424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.101684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.148730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.193282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.248823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.286772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.328835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:35.585410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507087707333084:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.585507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.585684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507087707333096:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.592130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-04T17:12:35.604347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507087707333098:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-04T17:12:35.662530Z node 1 :TX_PROXY ERROR: Actor# [1:7489507087707333149:2917] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:36.680156Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr0wmhfyd1bxjff7gn7bwmqz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ3OGYwMTItZjE4Y2ZiN2MtOGViMmYwNDUtNmVlMTk5MTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.703302Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr0wmhfyd1bxjff7gn7bwmqz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ3OGYwMTItZjE4Y2ZiN2MtOGViMmYwNDUtNmVlMTk5MTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.707520Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr0wmhfyd1bxjff7gn7bwmqz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ3OGYwMTItZjE4Y2ZiN2MtOGViMmYwNDUtNmVlMTk5MTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.765904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.824211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.859001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.898748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:36.932549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:12:36.967361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:37.007200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-04T17:12:37.034135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.070147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.137019Z node 1 :HTTP INFO: Listening on http://127.0.0.1:64090 2025-04-04T17:12:37.423072Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507074822429756:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:37.423141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:38.138962Z node 1 :SQS INFO: Start SQS service actor 2025-04-04T17:12:38.138964Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-04T17:12:38.139075Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: false EnableDeadLetterQueues: true } 2025-04-04T17:12:38.139698Z node 1 :HTTP INFO: Listening on http:// ... E_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] HandleResponse { Status: 48 TxId: 281474976715909 Step: 1743786804221 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{\"k1\":\"v\"}" } } } } } 2025-04-04T17:13:24.215666Z node 7 :SQS DEBUG: Request [b788b3d3-a5d557df-c9d48396-c9abcffb] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Attempt 1 execution duration: 56ms 2025-04-04T17:13:24.216003Z node 7 :HTTP_PROXY DEBUG: http request [ListQueueTags] requestId [a1f24274-6435c998-a221529b-c4a4df87] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-04-04T17:13:24.216025Z node 7 :HTTP_PROXY INFO: http request [ListQueueTags] requestId [a1f24274-6435c998-a221529b-c4a4df87] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-04T17:13:24.216187Z node 7 :SQS TRACE: Request [b788b3d3-a5d557df-c9d48396-c9abcffb] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Sending mkql execution result: { Status: 48 TxId: 281474976715909 Step: 1743786804221 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{\"k1\":\"v\"}" } } } } } 2025-04-04T17:13:24.216290Z node 7 :SQS TRACE: Request [b788b3d3-a5d557df-c9d48396-c9abcffb] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Minikql data response: {"attrs": {"ContentBasedDeduplication": false, "DelaySeconds": 0, "DlqArn": "", "DlqName": "", "FifoQueue": true, "MaxReceiveCount": 0, "MaximumMessageSize": 262144, "MessageRetentionPeriod": 345600000, "ReceiveMessageWaitTime": 0, "ShowDetailedCountersDeadline": null, "VisibilityTimeout": 30000}, "queueExists": true, "tags": "{\"k1\":\"v\"}"} 2025-04-04T17:13:24.216460Z node 7 :SQS DEBUG: Request [b788b3d3-a5d557df-c9d48396-c9abcffb] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] execution duration: 58ms 2025-04-04T17:13:24.216544Z node 7 :SQS DEBUG: Request [b788b3d3-a5d557df-c9d48396-c9abcffb] Sending executed reply 2025-04-04T17:13:24.218503Z node 7 :SQS DEBUG: Got new request in YMQ proxy. FolderId: folder4, CloudId: cloud4, UserSid: fake_user_sid@as, RequestId: a1f24274-6435c998-a221529b-c4a4df87 2025-04-04T17:13:24.218658Z node 7 :SQS DEBUG: Request [a1f24274-6435c998-a221529b-c4a4df87] Proxy actor: used user_name='cloud4', queue_name='000000000000000301v0', folder_id='folder4' 2025-04-04T17:13:24.218676Z node 7 :SQS DEBUG: Request [a1f24274-6435c998-a221529b-c4a4df87] Request proxy started 2025-04-04T17:13:24.218866Z node 7 :SQS DEBUG: Request [a1f24274-6435c998-a221529b-c4a4df87] Answer configuration for queue [cloud4/000000000000000301v0] without leader 2025-04-04T17:13:24.219107Z node 7 :SQS DEBUG: Request [a1f24274-6435c998-a221529b-c4a4df87] Get configuration duration: 0ms 2025-04-04T17:13:24.219270Z node 7 :SQS DEBUG: Request [a1f24274-6435c998-a221529b-c4a4df87] Send get leader node request to sqs service for cloud4/000000000000000301v0 2025-04-04T17:13:24.219301Z node 7 :SQS DEBUG: Request [a1f24274-6435c998-a221529b-c4a4df87] Leader node for queue [cloud4/000000000000000301v0] is 7 2025-04-04T17:13:24.219319Z node 7 :SQS DEBUG: Request [a1f24274-6435c998-a221529b-c4a4df87] Got leader node for queue response. Node id: 7. Status: 0 2025-04-04T17:13:24.219422Z node 7 :SQS TRACE: Request [a1f24274-6435c998-a221529b-c4a4df87] Sending request from proxy to leader node 7: ListQueueTags { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000301v0" } RequestId: "a1f24274-6435c998-a221529b-c4a4df87" 2025-04-04T17:13:24.219495Z node 7 :SQS DEBUG: Request [a1f24274-6435c998-a221529b-c4a4df87] Received Sqs Request: ListQueueTags { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000301v0" } RequestId: "a1f24274-6435c998-a221529b-c4a4df87" 2025-04-04T17:13:24.219561Z node 7 :SQS DEBUG: Request [a1f24274-6435c998-a221529b-c4a4df87] Request started. Actor: [7:7489507298482893096:5408] 2025-04-04T17:13:24.219597Z node 7 :SQS TRACE: Inc local leader ref for actor [7:7489507298482893096:5408] 2025-04-04T17:13:24.219614Z node 7 :SQS DEBUG: Request [a1f24274-6435c998-a221529b-c4a4df87] Forward configuration request to queue [cloud4/000000000000000301v0] leader 2025-04-04T17:13:24.219647Z node 7 :SQS DEBUG: Request [a1f24274-6435c998-a221529b-c4a4df87] Get configuration duration: 0ms 2025-04-04T17:13:24.219677Z node 7 :SQS TRACE: Request [a1f24274-6435c998-a221529b-c4a4df87] Got configuration. Root url: http://ghrun-mup5cka3jq.auto.internal:8771, Shards: 1, Fail: 0 2025-04-04T17:13:24.219690Z node 7 :SQS TRACE: Request [a1f24274-6435c998-a221529b-c4a4df87] DoRoutine 2025-04-04T17:13:24.219787Z node 7 :SQS TRACE: Request [a1f24274-6435c998-a221529b-c4a4df87] SendReplyAndDie from action actor { ListQueueTags { RequestId: "a1f24274-6435c998-a221529b-c4a4df87" Tags { Key: "k1" Value: "v" } } } 2025-04-04T17:13:24.219892Z node 7 :SQS TRACE: Request [a1f24274-6435c998-a221529b-c4a4df87] Sending sqs response: { ListQueueTags { RequestId: "a1f24274-6435c998-a221529b-c4a4df87" Tags { Key: "k1" Value: "v" } } RequestId: "a1f24274-6435c998-a221529b-c4a4df87" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k1" Value: "v" } } 2025-04-04T17:13:24.219986Z node 7 :SQS TRACE: HandleSqsResponse ListQueueTags { RequestId: "a1f24274-6435c998-a221529b-c4a4df87" Tags { Key: "k1" Value: "v" } } RequestId: "a1f24274-6435c998-a221529b-c4a4df87" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k1" Value: "v" } 2025-04-04T17:13:24.220034Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7489507298482893094:2765]: ListQueueTags { RequestId: "a1f24274-6435c998-a221529b-c4a4df87" Tags { Key: "k1" Value: "v" } } RequestId: "a1f24274-6435c998-a221529b-c4a4df87" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k1" Value: "v" } 2025-04-04T17:13:24.220064Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7489507298482893096:5408]. Found: 1 2025-04-04T17:13:24.220641Z node 7 :SQS TRACE: Request [a1f24274-6435c998-a221529b-c4a4df87] HandleResponse: { ListQueueTags { RequestId: "a1f24274-6435c998-a221529b-c4a4df87" Tags { Key: "k1" Value: "v" } } RequestId: "a1f24274-6435c998-a221529b-c4a4df87" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k1" Value: "v" } }, status: OK 2025-04-04T17:13:24.220748Z node 7 :SQS DEBUG: Request [a1f24274-6435c998-a221529b-c4a4df87] Sending reply from proxy actor: { ListQueueTags { RequestId: "a1f24274-6435c998-a221529b-c4a4df87" Tags { Key: "k1" Value: "v" } } RequestId: "a1f24274-6435c998-a221529b-c4a4df87" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k1" Value: "v" } } 2025-04-04T17:13:24.220888Z node 7 :HTTP_PROXY DEBUG: http request [ListQueueTags] requestId [a1f24274-6435c998-a221529b-c4a4df87] Got succesfult GRPC response. 2025-04-04T17:13:24.220967Z node 7 :HTTP_PROXY INFO: http request [ListQueueTags] requestId [a1f24274-6435c998-a221529b-c4a4df87] reply ok 2025-04-04T17:13:24.221036Z node 7 :HTTP_PROXY DEBUG: http request [ListQueueTags] requestId [a1f24274-6435c998-a221529b-c4a4df87] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 530 ResponseSizeInBytes: 196 SourceAddress: b870:d200:6050:0:a070:d200:6050:0 ResourceId: 000000000000000301v0 Action: ListQueueTags 2025-04-04T17:13:24.221133Z node 7 :HTTP DEBUG: (#37,[::1]:38836) <- (200 ) 2025-04-04T17:13:24.221239Z node 7 :HTTP DEBUG: (#37,[::1]:38836) connection closed Http output full {"Tags":{"k1":"v"}} |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpScan::ScanPg [GOOD] |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 >> TPersQueueTest::ReadFromSeveralPartitionsMigrated [GOOD] >> TPersQueueTest::SchemeshardRestart >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanPg [GOOD] Test command err: 2025-04-04T17:12:58.061484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:12:58.062000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:12:58.062214Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:12:58.063722Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:12:58.063930Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:12:58.064160Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00173c/r3tmp/tmph0SO8e/pdisk_1.dat 2025-04-04T17:12:58.438624Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:58.619588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:12:58.722395Z node 1 :TX_PROXY DEBUG: actor# [1:206:2172] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:12:58.724520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:58.724640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:58.727857Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:12:58.728404Z node 2 :TX_PROXY DEBUG: actor# [2:236:2128] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:12:58.729874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:58.729953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:58.735432Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976720656 RangeEnd# 281474976725656 txAllocator# 72057594046447617 2025-04-04T17:12:58.748239Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:12:58.748645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:58.749003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:59.024786Z node 1 :TX_PROXY DEBUG: actor# [1:206:2172] Handle TEvProposeTransaction 2025-04-04T17:12:59.024851Z node 1 :TX_PROXY DEBUG: actor# [1:206:2172] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T17:12:59.025014Z node 1 :TX_PROXY DEBUG: actor# [1:206:2172] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1232:2746] 2025-04-04T17:12:59.151675Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2746] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T17:12:59.151773Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2746] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:12:59.152474Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2746] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:12:59.152572Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2746] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:12:59.153062Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2746] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:59.153254Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2746] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:12:59.153378Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2746] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T17:12:59.155085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:59.155655Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2746] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T17:12:59.158025Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2746] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T17:12:59.158143Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2746] txid# 281474976715657 SEND to# [1:1141:2688] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-04T17:12:59.217140Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1283:2389] 2025-04-04T17:12:59.217334Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:59.256800Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:59.256996Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:12:59.258598Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:12:59.258702Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:12:59.258761Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:12:59.259096Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:12:59.259364Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:12:59.259437Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:1307:2389] in generation 1 2025-04-04T17:12:59.283720Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:12:59.310463Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:12:59.310706Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:12:59.310837Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1310:2406] 2025-04-04T17:12:59.310871Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:12:59.310906Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:12:59.310943Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:12:59.311315Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:12:59.311437Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:12:59.311533Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:12:59.311567Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:12:59.311659Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:12:59.311704Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:12:59.366795Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1267:2776], serverId# [2:1314:2407], sessionId# [0:0:0] 2025-04-04T17:12:59.367222Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:12:59.367453Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:12:59.367568Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:12:59.369700Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:12:59.382681Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:12:59.382784Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:12:59.657359Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-04-04T17:12:59.657556Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-04-04T17:12:59.657662Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-04T17:12:59.722365Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-04-04T17:12:59.722667Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-04-04T17:12:59.722851Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-04-04T17:12:59.722989Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-04-04T17:12:59.738900Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1344:2797], serverId# [2:1346:2417], sessionId# [0:0:0] 2025-04-04T17:12:59.759378Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:12:59.759493Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:12:59.759842Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:12:59.759891Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:12:59.759959Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:12:59.760227Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:12:59.760386Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025 ... Z node 3 :KQP_COMPUTE DEBUG: TxId: 281474976715664, task: 1. Tasks execution finished 2025-04-04T17:13:13.970014Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:1646:2980], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01jr0wnp0j6enhmpt2g3re5g9m. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=YmU3YTQ2YWYtNzM2NTdiYjMtZTE5OGMxNTYtYmI3ZmYwNTQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-04-04T17:13:13.970061Z node 3 :KQP_COMPUTE DEBUG: TxId: 281474976715664, task: 1. pass away 2025-04-04T17:13:13.970111Z node 3 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715664;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-04T17:13:13.970205Z node 3 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715664, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-04T17:13:13.970378Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1643:2941] TxId: 281474976715664. Ctx: { TraceId: 01jr0wnp0j6enhmpt2g3re5g9m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmU3YTQ2YWYtNzM2NTdiYjMtZTE5OGMxNTYtYmI3ZmYwNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:1646:2980], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1360 Tasks { TaskId: 1 CpuTimeUs: 388 FinishTimeMs: 1743786793969 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 55 BuildCpuTimeUs: 333 HostName: "ghrun-mup5cka3jq" NodeId: 3 CreateTimeMs: 1743786793967 } MaxMemoryUsage: 1048576 } 2025-04-04T17:13:13.970428Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715664. Ctx: { TraceId: 01jr0wnp0j6enhmpt2g3re5g9m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmU3YTQ2YWYtNzM2NTdiYjMtZTE5OGMxNTYtYmI3ZmYwNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:1646:2980] 2025-04-04T17:13:13.971078Z node 3 :KQP_EXECUTER INFO: ActorId: [3:1643:2941] TxId: 281474976715664. Ctx: { TraceId: 01jr0wnp0j6enhmpt2g3re5g9m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmU3YTQ2YWYtNzM2NTdiYjMtZTE5OGMxNTYtYmI3ZmYwNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 2461 DurationUs: 1743786792013973 ExecuterCpuTimeUs: 1101 StartTimeMs: 1956 FinishTimeMs: 1743786793970 Stages { StageGuid: "b0ecf86b-1f80001f-e06a60db-aeb4703e" Program: "(\n(declare %kqp%tx_result_binding_0_0 (ListType (StructType \'(\'\"column0\" (OptionalType (DataType \'Uint64))))))\n(return (lambda \'() (Iterator %kqp%tx_result_binding_0_0)))\n)\n" ComputeActors { CpuTimeUs: 1360 Tasks { TaskId: 1 CpuTimeUs: 388 FinishTimeMs: 1743786793969 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 55 BuildCpuTimeUs: 333 HostName: "ghrun-mup5cka3jq" NodeId: 3 CreateTimeMs: 1743786793967 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743786793968 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":8,\"Plans\":[{\"Node Type\":\"ResultSet_1\",\"PlanNodeId\":7,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"CTE Name\":\"precompute_0_0\",\"Node Type\":\"ConstantExpr\",\"Operators\":[{\"Inputs\":[],\"Iterator\":\"precompute_0_0\",\"Name\":\"Iterator\"}],\"PlanNodeId\":6,\"StageGuid\":\"b0ecf86b-1f80001f-e06a60db-aeb4703e\",\"Stats\":{\"BaseTimeMs\":1743786793968,\"ComputeNodes\":[{\"CpuTimeUs\":1360,\"Tasks\":[{\"ComputeTimeUs\":55,\"FinishTimeMs\":1743786793969,\"Host\":\"ghrun-mup5cka3jq\",\"NodeId\":3,\"OutputBytes\":6,\"OutputRows\":1,\"ResultBytes\":6,\"ResultRows\":1,\"TaskId\":1}]}],\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 667 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\022\013\010\320\n\020\320\n\030\320\n \001" } } 2025-04-04T17:13:13.971131Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1643:2941] TxId: 281474976715664. Ctx: { TraceId: 01jr0wnp0j6enhmpt2g3re5g9m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmU3YTQ2YWYtNzM2NTdiYjMtZTE5OGMxNTYtYmI3ZmYwNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T17:13:13.971167Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1643:2941] TxId: 281474976715664. Ctx: { TraceId: 01jr0wnp0j6enhmpt2g3re5g9m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmU3YTQ2YWYtNzM2NTdiYjMtZTE5OGMxNTYtYmI3ZmYwNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-04-04T17:13:13.971201Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1643:2941] TxId: 281474976715664. Ctx: { TraceId: 01jr0wnp0j6enhmpt2g3re5g9m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmU3YTQ2YWYtNzM2NTdiYjMtZTE5OGMxNTYtYmI3ZmYwNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.001360s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-04T17:13:13.971906Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2025-04-04T17:13:13.971993Z node 3 :TX_PROXY DEBUG: actor# [3:206:2172] Handle TEvProposeTransaction 2025-04-04T17:13:13.972033Z node 3 :TX_PROXY DEBUG: actor# [3:206:2172] TxId# 0 ProcessProposeTransaction 2025-04-04T17:13:13.972132Z node 3 :TX_PROXY DEBUG: actor# [3:206:2172] Cookie# 0 userReqId# "" txid# 0 reqId# [3:1648:2981] SnapshotReq marker# P0 2025-04-04T17:13:13.972936Z node 3 :TX_PROXY DEBUG: Actor# [3:1651:2981] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2025-04-04T17:13:13.973103Z node 3 :TX_PROXY DEBUG: Actor# [3:1651:2981] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2025-04-04T17:13:13.973182Z node 3 :TX_PROXY DEBUG: Actor# [3:1648:2981] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2025-04-04T17:13:21.560416Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:697:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:13:21.560898Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:13:21.561339Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:13:21.562401Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:694:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:13:21.562813Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:13:21.563133Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00173c/r3tmp/tmp26HDGw/pdisk_1.dat 2025-04-04T17:13:21.935729Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:22.116184Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:13:22.203486Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:22.203625Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:22.209153Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:22.209252Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:22.226359Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-04-04T17:13:22.227039Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:22.227524Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:22.556285Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:23.258747Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1398:2835], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:23.258871Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1408:2840], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:23.258964Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:23.264903Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:13:23.854716Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:1412:2843], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:13:23.963615Z node 5 :TX_PROXY ERROR: Actor# [5:1542:2914] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:24.450855Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0wp01rfxbtczm4q1km7f5s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MmUyZTg0YTMtOWRiOWQ1Y2YtYjkwNWIzZGEtZWQ2YWYz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:13:25.079050Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0wp17y22z9akw2vrtk1cra, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ODJlZGQ5ZTgtM2MwMTQ2OTctYzg0MTkzOWItOWViOTM4NGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:13:25.608326Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0wp17y22z9akw2vrtk1cra, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ODJlZGQ5ZTgtM2MwMTQ2OTctYzg0MTkzOWItOWViOTM4NGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:13:25.611356Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] Test command err: 2025-04-04T17:12:32.424042Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507072584560907:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:32.424123Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000fee/r3tmp/tmp6uGuGr/pdisk_1.dat 2025-04-04T17:12:32.823240Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2319, node 1 2025-04-04T17:12:32.863643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:32.863779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:32.865380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:32.990858Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:32.990878Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:32.990884Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:32.990972Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61873 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:33.546632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:61873 2025-04-04T17:12:33.754771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.760288Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:12:33.762600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.775210Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-04T17:12:33.782128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.929565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:33.974192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:34.057131Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-04-04T17:12:34.061871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.096034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.176663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.239830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.289964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.337874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.368916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:35.710558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507085469464241:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.710561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507085469464233:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.710694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.714363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-04T17:12:35.725361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507085469464247:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-04T17:12:35.817198Z node 1 :TX_PROXY ERROR: Actor# [1:7489507085469464298:2915] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:36.680127Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr0wmhkwcawtbzvtyk3hn156, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDhmZjAxMjktNzE5NzA4N2EtNTFjNDFiYTMtZTkzYTMwMTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.707697Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr0wmhkwcawtbzvtyk3hn156, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDhmZjAxMjktNzE5NzA4N2EtNTFjNDFiYTMtZTkzYTMwMTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.719009Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr0wmhkwcawtbzvtyk3hn156, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDhmZjAxMjktNzE5NzA4N2EtNTFjNDFiYTMtZTkzYTMwMTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.792561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.830419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:36.866883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.896565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:12:36.928680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:36.965663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:12:36.995628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.023317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.049399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.117486Z node 1 :HTTP INFO: Listening on http://127.0.0.1:32759 2025-04-04T17:12:37.422943Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507072584560907:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:37.423002Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:38.117393Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-04T17:12:38.117396Z node 1 :SQS INFO: Start SQS service actor 2025-04-04T17:12:38.117487Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-04T17:12:38.118571Z node 1 :SQS DEBUG: Enable scheme board sc ... node 7 :SQS DEBUG: Request SendMessageBatch working duration: 150ms 2025-04-04T17:13:26.085381Z node 7 :SQS TRACE: Request [6219f580-57741f59-cea90f62-48fceed1] Sending sqs response: { SendMessageBatch { RequestId: "6219f580-57741f59-cea90f62-48fceed1" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "1308c8dd-9ddc5eb4-29e20246-ad224010" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "5c6300e5-251824df-20b9fe7b-959f8fcd" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "6219f580-57741f59-cea90f62-48fceed1" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true } 2025-04-04T17:13:26.085577Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7489507302321456006:3656]. Found: 1 2025-04-04T17:13:26.085620Z node 7 :SQS TRACE: HandleSqsResponse SendMessageBatch { RequestId: "6219f580-57741f59-cea90f62-48fceed1" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "1308c8dd-9ddc5eb4-29e20246-ad224010" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "5c6300e5-251824df-20b9fe7b-959f8fcd" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "6219f580-57741f59-cea90f62-48fceed1" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true 2025-04-04T17:13:26.085748Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7489507302321456000:2511]: SendMessageBatch { RequestId: "6219f580-57741f59-cea90f62-48fceed1" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "1308c8dd-9ddc5eb4-29e20246-ad224010" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "5c6300e5-251824df-20b9fe7b-959f8fcd" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "6219f580-57741f59-cea90f62-48fceed1" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true 2025-04-04T17:13:26.086007Z node 7 :SQS TRACE: Request [6219f580-57741f59-cea90f62-48fceed1] HandleResponse: { SendMessageBatch { RequestId: "6219f580-57741f59-cea90f62-48fceed1" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "1308c8dd-9ddc5eb4-29e20246-ad224010" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "5c6300e5-251824df-20b9fe7b-959f8fcd" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "6219f580-57741f59-cea90f62-48fceed1" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true }, status: OK 2025-04-04T17:13:26.086150Z node 7 :SQS DEBUG: Request [6219f580-57741f59-cea90f62-48fceed1] Sending reply from proxy actor: { SendMessageBatch { RequestId: "6219f580-57741f59-cea90f62-48fceed1" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "1308c8dd-9ddc5eb4-29e20246-ad224010" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "5c6300e5-251824df-20b9fe7b-959f8fcd" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "6219f580-57741f59-cea90f62-48fceed1" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true } Http output full {"Successful":[{"SequenceNumber":"1","Id":"Id-0","MD5OfMessageBody":"94a29778a1f1f41bf68142847b2e6106","MD5OfMessageAttributes":"3d778967e1fa431d626ffb890c486385","MessageId":"1308c8dd-9ddc5eb4-29e20246-ad224010"},{"SequenceNumber":"2","Id":"Id-1","MD5OfMessageBody":"3bf7e6d806a0b8062135ae945eca30bf","MessageId":"5c6300e5-251824df-20b9fe7b-959f8fcd"}],"Failed":[{"Message":"No MessageGroupId parameter.","Id":"Id-2","Code":"MissingParameter","SenderFault":true}]} 2025-04-04T17:13:26.086951Z node 7 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [6219f580-57741f59-cea90f62-48fceed1] Got succesfult GRPC response. 2025-04-04T17:13:26.087176Z node 7 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [6219f580-57741f59-cea90f62-48fceed1] reply ok 2025-04-04T17:13:26.087345Z node 7 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [6219f580-57741f59-cea90f62-48fceed1] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 1063 ResponseSizeInBytes: 644 SourceAddress: f846:2900:6050:0:e046:2900:6050:0 ResourceId: 000000000000000101v0 Action: SendMessageBatch 2025-04-04T17:13:26.087433Z node 7 :HTTP DEBUG: (#37,[::1]:49350) <- (200 ) 2025-04-04T17:13:26.087519Z node 7 :HTTP DEBUG: (#37,[::1]:49350) connection closed 2025-04-04T17:13:26.088228Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Compile program response: { Status: 48 MiniKQLCompileResults { CompiledProgram: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001\032\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } } 2025-04-04T17:13:26.088270Z node 7 :SQS DEBUG: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] compilation duration: 3ms 2025-04-04T17:13:26.088325Z node 7 :SQS DEBUG: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) has been prepared 2025-04-04T17:13:26.088337Z node 7 :SQS DEBUG: Request [] Executing compiled query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) 2025-04-04T17:13:26.088415Z node 7 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID). Mode: COMPILE_AND_EXEC 2025-04-04T17:13:26.088480Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 0, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 12311263855443095412, "TIME_FROM": 0} 2025-04-04T17:13:26.088778Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001\032\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } Params { Bin: "\037\000\005\205\n\203\010\203\010\203\010\203\004\203\010> TestKinesisHttpProxy::TestWrongRequest [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 >> TestKinesisHttpProxy::ListShardsToken [GOOD] |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TopicService::OneConsumer_TheRangesOverlap [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] Test command err: 2025-04-04T17:12:32.426163Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507071596173816:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:32.426337Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f67/r3tmp/tmp51EsW0/pdisk_1.dat 2025-04-04T17:12:32.798461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:32.799162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:32.801808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:32.847658Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25821, node 1 2025-04-04T17:12:32.994302Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:32.994328Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:32.994335Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:32.994476Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24222 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:33.630102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.643232Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:24222 2025-04-04T17:12:33.842284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.846826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.875177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.037494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:34.079130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:34.119245Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-04-04T17:12:34.123750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.161919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.199335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:34.237855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-04T17:12:34.278563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.311298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.389765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:35.682052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507084481077010:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.682214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507084481077001:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.682390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.686096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-04T17:12:35.701083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507084481077015:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-04T17:12:35.793154Z node 1 :TX_PROXY ERROR: Actor# [1:7489507084481077066:2919] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:36.680833Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr0wmhjzbx1hvv1bn276zfxx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjI5ZDRmMmQtMjExMmZlM2UtNTQzM2JhNTMtOWI4ZmE4NjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.703047Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr0wmhjzbx1hvv1bn276zfxx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjI5ZDRmMmQtMjExMmZlM2UtNTQzM2JhNTMtOWI4ZmE4NjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.707336Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr0wmhjzbx1hvv1bn276zfxx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjI5ZDRmMmQtMjExMmZlM2UtNTQzM2JhNTMtOWI4ZmE4NjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.753398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.812694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.867762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.893197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.925834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.967081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.004856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.037891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.076312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.142412Z node 1 :HTTP INFO: Listening on http://127.0.0.1:4952 2025-04-04T17:12:37.425870Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507071596173816:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:37.425947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:38.144089Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-04T17:12:38.144091Z node 1 :SQS INFO: Start SQS service actor 2025-04-04T17:12:38.144171Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-04T17:12:38.144910Z node 1 :HTTP INFO: Listening on http://[::]:7004 2025-04-04T17:12:38.145108Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-04T17:12:38.157348Z node 1 :SQS INFO: Request SQS users list 202 ... a2296b5b] Sending execute request for query(idx=CHANGE_VISIBILITY_ID) to queue leader 2025-04-04T17:13:27.871025Z node 7 :SQS DEBUG: Request [5606af9f-353e2c24-1b2edbd3-a2296b5b] Executing compiled query(idx=CHANGE_VISIBILITY_ID) 2025-04-04T17:13:27.871106Z node 7 :SQS DEBUG: Request [5606af9f-353e2c24-1b2edbd3-a2296b5b] Starting executor actor for query(idx=CHANGE_VISIBILITY_ID). Mode: COMPILE_AND_EXEC 2025-04-04T17:13:27.871203Z node 7 :SQS TRACE: Request [5606af9f-353e2c24-1b2edbd3-a2296b5b] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 1, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 5923258363543965525, "NOW": 1743786807870, "GROUPS_READ_ATTEMPT_IDS_PERIOD": 300000, "KEYS": [{"LockTimestamp": 1743786807755, "Offset": 1, "NewVisibilityDeadline": 1743786808870}, {"LockTimestamp": 1743786807792, "Offset": 2, "NewVisibilityDeadline": 1743786809870}]} 2025-04-04T17:13:27.871596Z node 7 :SQS TRACE: Request [5606af9f-353e2c24-1b2edbd3-a2296b5b] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "O\034\014Exists*NewVisibilityDeadline\014Offset\006Arg\014Member\nFlags\010Name\010Args\016Payload\022Parameter\006And\032LockTimestamp$VisibilityDeadline\014Invoke\t\211\004\206\202?\000\206\202\030Extend\000\006\002?\000\t\211\004\202\203\005@\206\205\n\203\014\207\203\010\203\014\203\010?\020(ChangeConddCurrentVisibilityDeadline\002\006\n$SetResult\000\003?\006\014result\t\211\006?\024\206\205\006?\020?\020?\020.\006\n?\032?\0220MapParameter\000\t\351\000?\034\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?&\003?(\010KEYS\003&\000\t\251\000?\032\016\000\005?\022\t\211\004?\010\207\203\014?\010 Coalesce\000\t\211\004?<\207\203\014\207\203\014*\000\t\211\006?B\203\005@\203\010?\0146\000\003?J\026LessOrEqual\t\351\000?L\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?X\003?Z\006NOW\003&\000\t\211\004?\014\207\205\004\207\203\010?\014.2\203\004\022\000\t\211\n?n\203\005\004\200\205\004\203\004\203\004.2\213\010\203\010\203\010\203\004?\020\203\004$SelectRow\000\003?t \000\001\205\000\000\000\000\001\030\000\000\000\000\000\000\000?l\005?z\003?v\020\003?x\026\003\013?\202\t\351\000?|\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?\226\003?\230> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChange [GOOD] >> JsonChangeRecord::Heartbeat [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> TopicService::DifferentConsumers_TheRangesOverlap >> JsonChangeRecord::DataChangeVersion [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::Heartbeat [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestWrongRequest [GOOD] Test command err: 2025-04-04T17:12:32.429271Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507073133833129:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:32.429317Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f9d/r3tmp/tmp0RxKqd/pdisk_1.dat 2025-04-04T17:12:32.857809Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:32.862150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:32.862277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:32.864137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1354, node 1 2025-04-04T17:12:32.991204Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:32.991231Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:32.991244Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:32.991381Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:33.543615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:23158 2025-04-04T17:12:33.751866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.760316Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:12:33.762600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.773299Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-04T17:12:33.783200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.928725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:34.007728Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-04-04T17:12:34.013287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:34.083428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.119078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.150042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.179175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.250446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.291371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.336961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:35.455283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507086018736460:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.455466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.455885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507086018736472:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.466677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-04T17:12:35.477137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507086018736474:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-04T17:12:35.564548Z node 1 :TX_PROXY ERROR: Actor# [1:7489507086018736525:2918] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:36.680429Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr0wmhbqbcysgay4bkp02rsc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDBkNWM0NDItZmQ0NzNkZi0yZjdhOGFhMy1lYWJmOTM2NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.703735Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr0wmhbqbcysgay4bkp02rsc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDBkNWM0NDItZmQ0NzNkZi0yZjdhOGFhMy1lYWJmOTM2NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.707336Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr0wmhbqbcysgay4bkp02rsc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDBkNWM0NDItZmQ0NzNkZi0yZjdhOGFhMy1lYWJmOTM2NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root waiting... 2025-04-04T17:12:36.800441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:12:36.831012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.862031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.900778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:36.931114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:12:36.967758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.004601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.037684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:37.071237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:12:37.130129Z node 1 :HTTP INFO: Listening on http://127.0.0.1:24997 2025-04-04T17:12:37.429767Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507073133833129:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:37.429829Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:38.131609Z node 1 :SQS INFO: Start SQS service actor 2025-04-04T17:12:38.131630Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-04T17:12:38.131725Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-04T17:12:38.132498Z node 1 :HTTP INFO: Listening on http://[: ... ional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-04T17:13:27.603970Z node 8 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 12ms 2025-04-04T17:13:27.604361Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715687 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-04T17:13:27.604392Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-04-04T17:13:27.604534Z node 8 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 13ms 2025-04-04T17:13:27.605169Z node 8 :SQS TRACE: Handle queues list: { Status: 48 TxId: 281474976715687 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-04T17:13:27.607975Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715688 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-04T17:13:27.608003Z node 8 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 6ms 2025-04-04T17:13:27.608200Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715688 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-04T17:13:27.608229Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-04-04T17:13:27.608297Z node 8 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 7ms 2025-04-04T17:13:27.608558Z node 8 :SQS TRACE: Handle user settings: { Status: 48 TxId: 281474976715688 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-04T17:13:27.738771Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7489507308951708400:2450]: Pool not found 2025-04-04T17:13:27.738973Z node 8 :SQS DEBUG: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-04-04T17:13:28.200851Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7489507308951708349:2443]: Pool not found 2025-04-04T17:13:28.200963Z node 8 :SQS DEBUG: [cleanup removed queues] getting queues... 2025-04-04T17:13:28.202992Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7489507313246675844:2472], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:28.203047Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7489507313246675845:2473], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-04-04T17:13:28.203068Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:28.434714Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7489507313246675842:2471]: Pool not found 2025-04-04T17:13:28.434974Z node 8 :SQS DEBUG: [cleanup removed queues] there are no queues to delete Http output full {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} 400 {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} 2025-04-04T17:13:28.554681Z node 8 :HTTP DEBUG: (#37,[::1]:37234) incoming connection opened 2025-04-04T17:13:28.554774Z node 8 :HTTP DEBUG: (#37,[::1]:37234) -> (POST /) 2025-04-04T17:13:28.554908Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [5803:5800:6050:0:4003:5800:6050:0] request [CreateStream] url [/] database [] requestId: bdba0547-81b65366-3e47bbe8-ca20ca43 2025-04-04T17:13:28.555536Z node 8 :HTTP_PROXY WARN: http request [CreateStream] requestId [bdba0547-81b65366-3e47bbe8-ca20ca43] got new request with incorrect json from [5803:5800:6050:0:4003:5800:6050:0] database '' 2025-04-04T17:13:28.555753Z node 8 :HTTP_PROXY INFO: http request [CreateStream] requestId [bdba0547-81b65366-3e47bbe8-ca20ca43] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName 2025-04-04T17:13:28.555933Z node 8 :HTTP DEBUG: (#37,[::1]:37234) <- (400 InvalidArgumentException) 2025-04-04T17:13:28.555986Z node 8 :HTTP DEBUG: (#37,[::1]:37234) Request: POST / HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 57 { "ShardCount":5, "StreamName":"testtopic", "WrongStreamName":"WrongStreamName" } 0 2025-04-04T17:13:28.556017Z node 8 :HTTP DEBUG: (#37,[::1]:37234) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: bdba0547-81b65366-3e47bbe8-ca20ca43 x-amz-crc32: 3053902336 Content-Type: application/x-amz-json-1.1 Content-Length: 135 {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} 2025-04-04T17:13:28.556110Z node 8 :HTTP DEBUG: (#37,[::1]:37234) connection closed |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] Test command err: 2025-04-04T17:12:57.760702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:12:57.761216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:12:57.761420Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:12:57.762952Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:12:57.763099Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:12:57.763304Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00170d/r3tmp/tmpAIx0ZS/pdisk_1.dat 2025-04-04T17:12:58.270709Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:58.480679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:12:58.587102Z node 1 :TX_PROXY DEBUG: actor# [1:206:2172] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:12:58.588879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:58.589011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:58.591580Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:12:58.592089Z node 2 :TX_PROXY DEBUG: actor# [2:236:2128] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:12:58.593543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:58.593631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:58.594912Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976720656 RangeEnd# 281474976725656 txAllocator# 72057594046447617 2025-04-04T17:12:58.607746Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:12:58.608338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:58.608699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:58.889435Z node 1 :TX_PROXY DEBUG: actor# [1:206:2172] Handle TEvProposeTransaction 2025-04-04T17:12:58.889513Z node 1 :TX_PROXY DEBUG: actor# [1:206:2172] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T17:12:58.894565Z node 1 :TX_PROXY DEBUG: actor# [1:206:2172] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1231:2745] 2025-04-04T17:12:59.028617Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2745] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 7 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T17:12:59.028745Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2745] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:12:59.029455Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2745] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:12:59.029553Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2745] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:12:59.029981Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2745] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:59.030186Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2745] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:12:59.030325Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2745] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T17:12:59.030687Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2745] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T17:12:59.032717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:59.035585Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2745] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T17:12:59.035678Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2745] txid# 281474976715657 SEND to# [1:1141:2688] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-04T17:12:59.157622Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:1309:2803] 2025-04-04T17:12:59.157934Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:59.209258Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [1:1311:2804] 2025-04-04T17:12:59.209530Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:59.220761Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:1313:2805] 2025-04-04T17:12:59.220958Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:59.234817Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:59.235490Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:12:59.237065Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-04T17:12:59.237158Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-04T17:12:59.237216Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-04T17:12:59.237633Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:12:59.237840Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:12:59.237930Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:1409:2803] in generation 1 2025-04-04T17:12:59.244663Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:59.244971Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:12:59.246631Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2025-04-04T17:12:59.246703Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037894 2025-04-04T17:12:59.246748Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037894 2025-04-04T17:12:59.247090Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:12:59.248185Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:12:59.248255Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037894 persisting started state actor id [1:1421:2804] in generation 1 2025-04-04T17:12:59.254344Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:59.255493Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:12:59.256855Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037892 2025-04-04T17:12:59.256931Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037892 2025-04-04T17:12:59.256989Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037892 2025-04-04T17:12:59.257358Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:12:59.257747Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:12:59.257819Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037892 persisting started state actor id [1:1434:2805] in generation 1 2025-04-04T17:12:59.274314Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1412:2398] 2025-04-04T17:12:59.274553Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:59.326753Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [2:1417:2399] 2025-04-04T17:12:59.326974Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:59.341041Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037893 actor [2:1425:2401] 2025-04-04T17:12:59.341279Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:59.354706Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:59.355046Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:12:59.356677Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:12:59.356746Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:12:59.356809Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:12:59.357121Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:12:59.357363Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:12:59.357431Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:1493:2398] in generation 1 2025-04-04T17:12:59.358968Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [2:1427:2402] 2025-04-04T17:12:59.359160Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:12:59.401419Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:59.401742Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:12:59.403117Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-04-04T17:12:59.403195Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037891 2025-04-04T17:12:59.403245Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037891 2025-04-04T17:12:59.403590Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:12:59.404033Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:12:59.404097Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037891 persisting started state actor id [2:1502:2399] in generation 1 2025-04-04T17:12:59.411585Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:12:59.411777Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:12:59.413206Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: Q ... merSuppliedId : . TraceId : 01jr0wp2cqakbawz73gpgjex93. SessionId : ydb://session/3?node_id=5&id=MzYyZTY0MDgtM2ZmMDNhMmYtODI2OGU4NzQtZGQxOTg0MzE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-04T17:13:28.526560Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1977:3164], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wp2cqakbawz73gpgjex93. SessionId : ydb://session/3?node_id=5&id=MzYyZTY0MDgtM2ZmMDNhMmYtODI2OGU4NzQtZGQxOTg0MzE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Poll inputs 2025-04-04T17:13:28.526578Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1977:3164], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wp2cqakbawz73gpgjex93. SessionId : ydb://session/3?node_id=5&id=MzYyZTY0MDgtM2ZmMDNhMmYtODI2OGU4NzQtZGQxOTg0MzE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Poll sources 2025-04-04T17:13:28.526601Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1977:3164], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wp2cqakbawz73gpgjex93. SessionId : ydb://session/3?node_id=5&id=MzYyZTY0MDgtM2ZmMDNhMmYtODI2OGU4NzQtZGQxOTg0MzE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Resume execution, run status: Finished 2025-04-04T17:13:28.526642Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1977:3164], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wp2cqakbawz73gpgjex93. SessionId : ydb://session/3?node_id=5&id=MzYyZTY0MDgtM2ZmMDNhMmYtODI2OGU4NzQtZGQxOTg0MzE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. ProcessOutputsState.Inflight: 0 2025-04-04T17:13:28.526688Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1977:3164], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wp2cqakbawz73gpgjex93. SessionId : ydb://session/3?node_id=5&id=MzYyZTY0MDgtM2ZmMDNhMmYtODI2OGU4NzQtZGQxOTg0MzE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Do not drain channelId: 1, finished 2025-04-04T17:13:28.526743Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-04-04T17:13:28.526895Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1974:2974] TxId: 281474976715667. Ctx: { TraceId: 01jr0wp2cqakbawz73gpgjex93, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MzYyZTY0MDgtM2ZmMDNhMmYtODI2OGU4NzQtZGQxOTg0MzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [5:1977:3164], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 324 Tasks { TaskId: 1 CpuTimeUs: 150 FinishTimeMs: 1743786808525 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 52 BuildCpuTimeUs: 98 HostName: "ghrun-mup5cka3jq" NodeId: 5 CreateTimeMs: 1743786808525 } MaxMemoryUsage: 1048576 } 2025-04-04T17:13:28.526964Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1974:2974] TxId: 281474976715667. Ctx: { TraceId: 01jr0wp2cqakbawz73gpgjex93, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MzYyZTY0MDgtM2ZmMDNhMmYtODI2OGU4NzQtZGQxOTg0MzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [5:1977:3164], ... response 271646822 NKikimr::NKqp::TEvKqpExecuter::TEvStreamData NKikimrKqp.TEvExecuterStreamData ResultSet { columns { name: "column0" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint64_value: 596400 } } } SeqNo: 1 QueryResultIndex: 0 ChannelId: 1 2025-04-04T17:13:28.527223Z node 5 :KQP_EXECUTER DEBUG: TxId: 281474976715667, send ack to channelId: 1, seqNo: 1, enough: 0, freeSpace: 100, to: [5:1978:3164] 2025-04-04T17:13:28.527282Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. Received channel data ack for channelId: 1, seqNo: 1, lastSentSeqNo: 1, freeSpace: 100, early finish: 0 2025-04-04T17:13:28.527317Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. PeerState, peerState:(freeSpace:100;inFlightBytes:0;inFlightCount:0;), sentSeqNo: 1, ackSeqNo: 1 2025-04-04T17:13:28.527339Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. Resume compute actor 2025-04-04T17:13:28.527444Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1977:3164], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wp2cqakbawz73gpgjex93. SessionId : ydb://session/3?node_id=5&id=MzYyZTY0MDgtM2ZmMDNhMmYtODI2OGU4NzQtZGQxOTg0MzE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-04-04T17:13:28.527475Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1977:3164], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wp2cqakbawz73gpgjex93. SessionId : ydb://session/3?node_id=5&id=MzYyZTY0MDgtM2ZmMDNhMmYtODI2OGU4NzQtZGQxOTg0MzE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Poll inputs 2025-04-04T17:13:28.527514Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1977:3164], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wp2cqakbawz73gpgjex93. SessionId : ydb://session/3?node_id=5&id=MzYyZTY0MDgtM2ZmMDNhMmYtODI2OGU4NzQtZGQxOTg0MzE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Poll sources 2025-04-04T17:13:28.527555Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1977:3164], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wp2cqakbawz73gpgjex93. SessionId : ydb://session/3?node_id=5&id=MzYyZTY0MDgtM2ZmMDNhMmYtODI2OGU4NzQtZGQxOTg0MzE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Resume execution, run status: Finished 2025-04-04T17:13:28.527577Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1977:3164], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wp2cqakbawz73gpgjex93. SessionId : ydb://session/3?node_id=5&id=MzYyZTY0MDgtM2ZmMDNhMmYtODI2OGU4NzQtZGQxOTg0MzE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. ProcessOutputsState.Inflight: 0 2025-04-04T17:13:28.527591Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1977:3164], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wp2cqakbawz73gpgjex93. SessionId : ydb://session/3?node_id=5&id=MzYyZTY0MDgtM2ZmMDNhMmYtODI2OGU4NzQtZGQxOTg0MzE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Do not drain channelId: 1, finished 2025-04-04T17:13:28.527618Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. Tasks execution finished 2025-04-04T17:13:28.527642Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1977:3164], TxId: 281474976715667, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wp2cqakbawz73gpgjex93. SessionId : ydb://session/3?node_id=5&id=MzYyZTY0MDgtM2ZmMDNhMmYtODI2OGU4NzQtZGQxOTg0MzE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-04-04T17:13:28.527697Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. pass away 2025-04-04T17:13:28.527751Z node 5 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715667;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-04T17:13:28.527840Z node 5 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715667, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-04T17:13:28.528027Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1974:2974] TxId: 281474976715667. Ctx: { TraceId: 01jr0wp2cqakbawz73gpgjex93, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MzYyZTY0MDgtM2ZmMDNhMmYtODI2OGU4NzQtZGQxOTg0MzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [5:1977:3164], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1545 Tasks { TaskId: 1 CpuTimeUs: 154 FinishTimeMs: 1743786808527 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 56 BuildCpuTimeUs: 98 HostName: "ghrun-mup5cka3jq" NodeId: 5 CreateTimeMs: 1743786808525 } MaxMemoryUsage: 1048576 } 2025-04-04T17:13:28.528093Z node 5 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jr0wp2cqakbawz73gpgjex93, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MzYyZTY0MDgtM2ZmMDNhMmYtODI2OGU4NzQtZGQxOTg0MzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [5:1977:3164] 2025-04-04T17:13:28.528705Z node 5 :KQP_EXECUTER INFO: ActorId: [5:1974:2974] TxId: 281474976715667. Ctx: { TraceId: 01jr0wp2cqakbawz73gpgjex93, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MzYyZTY0MDgtM2ZmMDNhMmYtODI2OGU4NzQtZGQxOTg0MzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 3108 DurationUs: 1743786804994120 ExecuterCpuTimeUs: 1563 StartTimeMs: 3534 FinishTimeMs: 1743786808528 Stages { StageGuid: "c84cd924-772372ba-b93b2a28-b4903f48" Program: "(\n(declare %kqp%tx_result_binding_0_0 (ListType (StructType \'(\'\"column0\" (OptionalType (DataType \'Uint64))))))\n(return (lambda \'() (Iterator %kqp%tx_result_binding_0_0)))\n)\n" ComputeActors { CpuTimeUs: 1545 Tasks { TaskId: 1 CpuTimeUs: 154 FinishTimeMs: 1743786808527 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 56 BuildCpuTimeUs: 98 HostName: "ghrun-mup5cka3jq" NodeId: 5 CreateTimeMs: 1743786808525 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743786808525 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":8,\"Plans\":[{\"Node Type\":\"ResultSet_1\",\"PlanNodeId\":7,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"CTE Name\":\"precompute_0_0\",\"Node Type\":\"ConstantExpr\",\"Operators\":[{\"Inputs\":[],\"Iterator\":\"precompute_0_0\",\"Name\":\"Iterator\"}],\"PlanNodeId\":6,\"StageGuid\":\"c84cd924-772372ba-b93b2a28-b4903f48\",\"Stats\":{\"BaseTimeMs\":1743786808525,\"ComputeNodes\":[{\"CpuTimeUs\":1545,\"Tasks\":[{\"ComputeTimeUs\":56,\"FinishTimeMs\":1743786808527,\"Host\":\"ghrun-mup5cka3jq\",\"NodeId\":5,\"OutputBytes\":6,\"OutputRows\":1,\"ResultBytes\":6,\"ResultRows\":1,\"TaskId\":1}]}],\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 667 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\022\013\010\211\014\020\211\014\030\211\014 \001" } } 2025-04-04T17:13:28.528759Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1974:2974] TxId: 281474976715667. Ctx: { TraceId: 01jr0wp2cqakbawz73gpgjex93, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MzYyZTY0MDgtM2ZmMDNhMmYtODI2OGU4NzQtZGQxOTg0MzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T17:13:28.528784Z node 5 :KQP_EXECUTER TRACE: ActorId: [5:1974:2974] TxId: 281474976715667. Ctx: { TraceId: 01jr0wp2cqakbawz73gpgjex93, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MzYyZTY0MDgtM2ZmMDNhMmYtODI2OGU4NzQtZGQxOTg0MzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-04-04T17:13:28.528816Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1974:2974] TxId: 281474976715667. Ctx: { TraceId: 01jr0wp2cqakbawz73gpgjex93, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MzYyZTY0MDgtM2ZmMDNhMmYtODI2OGU4NzQtZGQxOTg0MzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.001545s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 ... response 271646721 NKikimr::NKqp::NPrivateEvents::TEvQueryResponse NKikimrKqp.TEvQueryResponse Response { TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 756 |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChangeVersion [GOOD] >> KqpJoinOrder::TPCDS23-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS34+StreamLookupJoin-ColumnStore |88.7%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [TA] $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::ListShardsToken [GOOD] Test command err: 2025-04-04T17:12:32.424069Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507072595550304:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:32.424921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ffe/r3tmp/tmpppvXSP/pdisk_1.dat 2025-04-04T17:12:32.783439Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:32.817535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:32.817652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:32.819663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5714, node 1 2025-04-04T17:12:32.993123Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:32.993161Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:32.993170Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:32.993297Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:33.577550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.595290Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:16677 2025-04-04T17:12:33.799931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.811071Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:12:33.823471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.838752Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-04T17:12:33.846008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.012723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:34.063269Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-04-04T17:12:34.066682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:34.109315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.176996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.249230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.285713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:34.322521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-04-04T17:12:34.368659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.417170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:35.648077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507085480453642:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.648186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507085480453634:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.648352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.652661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-04T17:12:35.668820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507085480453648:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-04T17:12:35.736433Z node 1 :TX_PROXY ERROR: Actor# [1:7489507085480453699:2915] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:36.680475Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr0wmhhx9pw1pzqgwz39pag5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmViYWMwMTMtYzhlYjFkODEtZjUwYThiMWMtYTI2YzdiMzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.702878Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr0wmhhx9pw1pzqgwz39pag5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmViYWMwMTMtYzhlYjFkODEtZjUwYThiMWMtYTI2YzdiMzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.713865Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr0wmhhx9pw1pzqgwz39pag5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmViYWMwMTMtYzhlYjFkODEtZjUwYThiMWMtYTI2YzdiMzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.761003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.832565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.865297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.897955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.948931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.983937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.021544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.052005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.081929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.140575Z node 1 :HTTP INFO: Listening on http://127.0.0.1:29913 2025-04-04T17:12:37.423055Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507072595550304:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:37.423130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:38.141832Z node 1 :SQS INFO: Start SQS service actor 2025-04-04T17:12:38.141858Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-04T17:12:38.141917Z node 1 :SQS DEBUG: ... cks: 0/0 2025-04-04T17:13:29.194740Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-04T17:13:29.194748Z node 8 :PERSQUEUE DEBUG: [TxId: 281474976715694] PredicateAcks: 0/0 2025-04-04T17:13:29.194758Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] add an TxId 281474976715694 to the list for deletion 2025-04-04T17:13:29.194771Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] TxId 281474976715694, NewState DELETING 2025-04-04T17:13:29.194785Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] delete key for TxId 281474976715694 2025-04-04T17:13:29.194818Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:13:29.195058Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Registered with mediator time cast 2025-04-04T17:13:29.195414Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Registered with mediator time cast 2025-04-04T17:13:29.195437Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:13:29.195450Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Try execute txs with state DELETING 2025-04-04T17:13:29.195461Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] TxId 281474976715694, State DELETING 2025-04-04T17:13:29.195474Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] delete TxId 281474976715694 2025-04-04T17:13:29.195622Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:13:29.195631Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:13:29.195633Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Try execute txs with state DELETING 2025-04-04T17:13:29.195642Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] TxId 281474976715694, State DELETING 2025-04-04T17:13:29.195645Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Try execute txs with state DELETING 2025-04-04T17:13:29.195652Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] delete TxId 281474976715694 2025-04-04T17:13:29.195657Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] TxId 281474976715694, State DELETING 2025-04-04T17:13:29.195670Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] delete TxId 281474976715694 2025-04-04T17:13:29.195707Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Registered with mediator time cast Http output full {}2025-04-04T17:13:29.195856Z node 8 :HTTP_PROXY INFO: http request [CreateStream] requestId [64a608e2-5db506a8-768fe911-138daece] reply ok 2025-04-04T17:13:29.195893Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] Registered with mediator time cast 2025-04-04T17:13:29.196090Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Registered with mediator time cast 2025-04-04T17:13:29.196112Z node 8 :HTTP DEBUG: (#37,[::1]:58830) <- (200 ) 2025-04-04T17:13:29.196215Z node 8 :HTTP DEBUG: (#37,[::1]:58830) connection closed 200 {} 2025-04-04T17:13:29.197214Z node 8 :HTTP DEBUG: (#40,[::1]:58840) incoming connection opened 2025-04-04T17:13:29.197269Z node 8 :HTTP DEBUG: (#40,[::1]:58840) -> (POST /Root) 2025-04-04T17:13:29.197442Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [38ec:2800:6050:0:20ec:2800:6050:0] request [ListShards] url [/Root] database [/Root] requestId: e4c3ad5f-80d4229b-73412923-7f4f36cd 2025-04-04T17:13:29.197813Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [e4c3ad5f-80d4229b-73412923-7f4f36cd] got new request from [38ec:2800:6050:0:20ec:2800:6050:0] database '/Root' stream 'teststream' 2025-04-04T17:13:29.198188Z node 8 :HTTP_PROXY DEBUG: http request [ListShards] requestId [e4c3ad5f-80d4229b-73412923-7f4f36cd] [auth] Authorized successfully 2025-04-04T17:13:29.198257Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [e4c3ad5f-80d4229b-73412923-7f4f36cd] sending grpc request to '' database: '/Root' iam token size: 0 E0000 00:00:1743786809.198316 548230 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-04T17:13:29.199873Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7489507317802237242:2534], now have 1 active actors on pipe 2025-04-04T17:13:29.199904Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7489507317802237243:2535], now have 1 active actors on pipe 2025-04-04T17:13:29.200465Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [e4c3ad5f-80d4229b-73412923-7f4f36cd] reply ok 2025-04-04T17:13:29.200718Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7489507317802237242:2534] destroyed 2025-04-04T17:13:29.200750Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7489507317802237243:2535] destroyed 2025-04-04T17:13:29.200777Z node 8 :HTTP DEBUG: (#40,[::1]:58840) <- (200 ) 2025-04-04T17:13:29.200854Z node 8 :HTTP DEBUG: (#40,[::1]:58840) connection closed Http output full {"NextToken":"CPCurI7gMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CPCurI7gMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-04-04T17:13:29.201976Z node 8 :HTTP DEBUG: (#37,[::1]:58848) incoming connection opened 2025-04-04T17:13:29.202041Z node 8 :HTTP DEBUG: (#37,[::1]:58848) -> (POST /Root) 2025-04-04T17:13:29.202140Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [387c:2900:6050:0:207c:2900:6050:0] request [ListShards] url [/Root] database [/Root] requestId: 8c44225a-32db08f5-2c39f828-2b5393cc 2025-04-04T17:13:29.202505Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [8c44225a-32db08f5-2c39f828-2b5393cc] got new request from [387c:2900:6050:0:207c:2900:6050:0] database '/Root' stream 'teststream' 2025-04-04T17:13:29.202862Z node 8 :HTTP_PROXY DEBUG: http request [ListShards] requestId [8c44225a-32db08f5-2c39f828-2b5393cc] [auth] Authorized successfully 2025-04-04T17:13:29.202918Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [8c44225a-32db08f5-2c39f828-2b5393cc] sending grpc request to '' database: '/Root' iam token size: 0 E0000 00:00:1743786809.202980 548230 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-04T17:13:29.203565Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7489507317802237254:2539], now have 1 active actors on pipe 2025-04-04T17:13:29.203577Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7489507317802237255:2540], now have 1 active actors on pipe Http output full {"NextToken":"CPOurI7gMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-04-04T17:13:29.203897Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7489507317802237254:2539] destroyed 2025-04-04T17:13:29.203935Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7489507317802237255:2540] destroyed 2025-04-04T17:13:29.204069Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [8c44225a-32db08f5-2c39f828-2b5393cc] reply ok 2025-04-04T17:13:29.204188Z node 8 :HTTP DEBUG: (#37,[::1]:58848) <- (200 ) 2025-04-04T17:13:29.204273Z node 8 :HTTP DEBUG: (#37,[::1]:58848) connection closed 200 {"NextToken":"CPOurI7gMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-04-04T17:13:29.205276Z node 8 :HTTP DEBUG: (#37,[::1]:58854) incoming connection opened 2025-04-04T17:13:29.205344Z node 8 :HTTP DEBUG: (#37,[::1]:58854) -> (POST /Root) 2025-04-04T17:13:29.205457Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [78e1:4100:6050:0:60e1:4100:6050:0] request [ListShards] url [/Root] database [/Root] requestId: c1cba13-435142cd-f8158fa0-b120c3de 2025-04-04T17:13:29.205803Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [c1cba13-435142cd-f8158fa0-b120c3de] got new request from [78e1:4100:6050:0:60e1:4100:6050:0] database '/Root' stream 'teststream' 2025-04-04T17:13:29.206093Z node 8 :HTTP_PROXY DEBUG: http request [ListShards] requestId [c1cba13-435142cd-f8158fa0-b120c3de] [auth] Authorized successfully 2025-04-04T17:13:29.206147Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [c1cba13-435142cd-f8158fa0-b120c3de] sending grpc request to '' database: '/Root' iam token size: 0 E0000 00:00:1743786809.206204 548230 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-04T17:13:29.206900Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7489507317802237266:2544], now have 1 active actors on pipe 2025-04-04T17:13:29.206918Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7489507317802237267:2545], now have 1 active actors on pipe 2025-04-04T17:13:29.207227Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7489507317802237266:2544] destroyed 2025-04-04T17:13:29.207245Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7489507317802237267:2545] destroyed 2025-04-04T17:13:29.207401Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [c1cba13-435142cd-f8158fa0-b120c3de] reply ok 2025-04-04T17:13:29.207516Z node 8 :HTTP DEBUG: (#37,[::1]:58854) <- (200 ) Http output full {"NextToken":"CPeurI7gMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-04-04T17:13:29.207590Z node 8 :HTTP DEBUG: (#37,[::1]:58854) connection closed 200 {"NextToken":"CPeurI7gMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] Test command err: 2025-04-04T17:12:32.424521Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507074140112315:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:32.424575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f8a/r3tmp/tmp6F5Ak2/pdisk_1.dat 2025-04-04T17:12:32.812887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:32.813069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:32.816694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:32.857814Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14514, node 1 2025-04-04T17:12:32.991112Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:32.991153Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:32.991162Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:32.991283Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7761 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:33.551435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:7761 2025-04-04T17:12:33.770561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.789139Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:12:33.793427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.803652Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.810312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:12:33.936155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:12:33.984961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.031421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.066019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.103040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.174579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.210418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.282210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.316367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:35.550876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507087025015647:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.551004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.551225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507087025015659:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.555113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-04T17:12:35.569181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507087025015661:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-04T17:12:35.630094Z node 1 :TX_PROXY ERROR: Actor# [1:7489507087025015712:2916] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:36.680156Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr0wmhew28cr81k6kfxc5hsr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmM4MWYxMzAtMzY4YTgyYTAtNDU2Y2E4MzMtZjZkMTI4MWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.702126Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr0wmhew28cr81k6kfxc5hsr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmM4MWYxMzAtMzY4YTgyYTAtNDU2Y2E4MzMtZjZkMTI4MWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.712429Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr0wmhew28cr81k6kfxc5hsr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmM4MWYxMzAtMzY4YTgyYTAtNDU2Y2E4MzMtZjZkMTI4MWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.801316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.842217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.875239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.919242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.990040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.027793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.068895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.103590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.170967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.236478Z node 1 :HTTP INFO: Listening on http://127.0.0.1:25957 2025-04-04T17:12:37.422918Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507074140112315:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:37.422989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:38.238063Z node 1 :SQS INFO: Start SQS service actor 2025-04-04T17:12:38.238071Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-04T17:12:38.238175Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-04T17:12:38.239057Z node 1 :HTTP INFO: Listening on http://[::]:5442 2025-04-04T17:12:38.239519Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-04T17:12:38.255965Z node 1 :SQS INFO: Request SQS users list 2025 ... e" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-04T17:13:28.545576Z node 8 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 14ms 2025-04-04T17:13:28.545836Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715687 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-04T17:13:28.545879Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-04-04T17:13:28.545958Z node 8 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 15ms 2025-04-04T17:13:28.546226Z node 8 :SQS TRACE: Handle user settings: { Status: 48 TxId: 281474976715687 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-04T17:13:28.546501Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715688 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-04T17:13:28.546529Z node 8 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 13ms 2025-04-04T17:13:28.546974Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715688 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-04T17:13:28.547012Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-04-04T17:13:28.547123Z node 8 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 15ms 2025-04-04T17:13:28.547545Z node 8 :SQS TRACE: Handle queues list: { Status: 48 TxId: 281474976715688 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-04-04T17:13:28.783963Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7489507312470805182:2449]: Pool not found 2025-04-04T17:13:28.784159Z node 8 :SQS DEBUG: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-04-04T17:13:29.139042Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7489507312470805177:2448]: Pool not found 2025-04-04T17:13:29.139198Z node 8 :SQS DEBUG: [cleanup removed queues] getting queues... 2025-04-04T17:13:29.142053Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7489507316765772623:2472], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:29.142147Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7489507316765772624:2473], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-04-04T17:13:29.142210Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:29.435128Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7489507316765772621:2471]: Pool not found 2025-04-04T17:13:29.435385Z node 8 :SQS DEBUG: [cleanup removed queues] there are no queues to delete 2025-04-04T17:13:29.510219Z node 8 :HTTP DEBUG: (#37,[::1]:39192) incoming connection opened 2025-04-04T17:13:29.510313Z node 8 :HTTP DEBUG: (#37,[::1]:39192) -> (POST /Root) 2025-04-04T17:13:29.510447Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [385c:1501:6050:0:205c:1501:6050:0] request [CreateStream] url [/Root] database [/Root] requestId: 1c7a0131-75eaae05-100908f3-6f7b28b2 2025-04-04T17:13:29.511093Z node 8 :HTTP_PROXY INFO: http request [CreateStream] requestId [1c7a0131-75eaae05-100908f3-6f7b28b2] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map 2025-04-04T17:13:29.511244Z node 8 :HTTP DEBUG: (#37,[::1]:39192) <- (400 MissingParameter) 2025-04-04T17:13:29.511310Z node 8 :HTTP DEBUG: (#37,[::1]:39192) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 4 null 0 2025-04-04T17:13:29.511354Z node 8 :HTTP DEBUG: (#37,[::1]:39192) Response: HTTP/1.1 400 MissingParameter Connection: close x-amzn-requestid: 1c7a0131-75eaae05-100908f3-6f7b28b2 x-amz-crc32: 851558042 Content-Type: application/x-amz-json-1.1 Content-Length: 127 {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map"} 2025-04-04T17:13:29.511455Z node 8 :HTTP DEBUG: (#37,[::1]:39192) connection closed Http output full {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map"} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] Test command err: 2025-04-04T17:12:32.424286Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507074141514704:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:32.424335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000fd9/r3tmp/tmplHkJZc/pdisk_1.dat 2025-04-04T17:12:32.812880Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:32.850664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:32.850759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 30786, node 1 2025-04-04T17:12:32.852533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:32.992251Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:32.992275Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:32.992282Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:32.992405Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24311 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:33.546722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:24311 2025-04-04T17:12:33.767476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.774707Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:12:33.776081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.794803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.946359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:33.988203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:34.038895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:34.082855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.139698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.181207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-04T17:12:34.220449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.264678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:34.291630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:12:35.483078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507087026418027:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.483096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507087026418035:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.483155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.486349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-04T17:12:35.496559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507087026418041:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-04T17:12:35.581345Z node 1 :TX_PROXY ERROR: Actor# [1:7489507087026418092:2916] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:36.680497Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr0wmhcsa82jvh5e194a76s5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTc3YTI2MDQtZGEzZTBlODMtNjIyNjE3ZjctM2QyNjZkYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.706111Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr0wmhcsa82jvh5e194a76s5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTc3YTI2MDQtZGEzZTBlODMtNjIyNjE3ZjctM2QyNjZkYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.709912Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr0wmhcsa82jvh5e194a76s5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTc3YTI2MDQtZGEzZTBlODMtNjIyNjE3ZjctM2QyNjZkYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.762289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.821750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:36.860800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:12:36.889395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:36.922400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:12:36.954308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.988834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.017519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.086115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.184474Z node 1 :HTTP INFO: Listening on http://127.0.0.1:6189 2025-04-04T17:12:37.422933Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507074141514704:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:37.423000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:38.192256Z node 1 :SQS INFO: Start SQS service actor 2025-04-04T17:12:38.192285Z node 1 :SQS INFO: Start SQS proxy service actor 2025-04-04T17:12:38.192345Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-04T17:12:38.193194Z node 1 :HTTP INFO: Listening on http://[::]:10631 2025-04-04T17:12:38.193614Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-04T17:12:38.225303Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-04-04T17:12:38.225348Z node 1 :SQS INFO: Request SQS users list 2025-04-04T17:12:38.225385Z node 1 :SQS DEBUG: Request SQS ... retryable:0 2025-04-04T17:13:29.457571Z node 8 :TICKET_PARSER DEBUG: Ticket **** (C9049D91) () has now valid token of Service1_id@as 2025-04-04T17:13:29.457680Z node 8 :HTTP_PROXY DEBUG: http request [DescribeStream] requestId [eb54c26a-182cdcab-e427613e-327525cf] [auth] Authorized successfully 2025-04-04T17:13:29.457780Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [eb54c26a-182cdcab-e427613e-327525cf] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-04T17:13:29.459280Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server connected, pipe [8:7489507318792521138:2536], now have 1 active actors on pipe 2025-04-04T17:13:29.459304Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server connected, pipe [8:7489507318792521139:2537], now have 1 active actors on pipe 2025-04-04T17:13:29.459372Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7489507318792521137:2535], now have 1 active actors on pipe 2025-04-04T17:13:29.459430Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server connected, pipe [8:7489507318792521140:2538], now have 1 active actors on pipe 2025-04-04T17:13:29.459474Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7489507318792521141:2539], now have 1 active actors on pipe 2025-04-04T17:13:29.460133Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7489507318792521137:2535] destroyed 2025-04-04T17:13:29.460160Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server disconnected, pipe [8:7489507318792521138:2536] destroyed 2025-04-04T17:13:29.460180Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [8:7489507318792521140:2538] destroyed 2025-04-04T17:13:29.460182Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server disconnected, pipe [8:7489507318792521139:2537] destroyed 2025-04-04T17:13:29.460197Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7489507318792521141:2539] destroyed 2025-04-04T17:13:29.461071Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [eb54c26a-182cdcab-e427613e-327525cf] reply ok 2025-04-04T17:13:29.461309Z node 8 :HTTP DEBUG: (#37,[::1]:50774) <- (200 ) 2025-04-04T17:13:29.461406Z node 8 :HTTP DEBUG: (#37,[::1]:50774) connection closed Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1743786809,"StorageLimitMb":0,"StreamName":"testtopic"}} 200 {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1743786809,"StorageLimitMb":0,"StreamName":"testtopic"}} 2025-04-04T17:13:29.462539Z node 8 :HTTP DEBUG: (#37,[::1]:50776) incoming connection opened 2025-04-04T17:13:29.462601Z node 8 :HTTP DEBUG: (#37,[::1]:50776) -> (POST /Root) 2025-04-04T17:13:29.462712Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [98a9:3b00:6050:0:80a9:3b00:6050:0] request [DescribeStreamSummary] url [/Root] database [/Root] requestId: 63250161-5ff4fefe-81a24d40-6b8ebb1 2025-04-04T17:13:29.463030Z node 8 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [63250161-5ff4fefe-81a24d40-6b8ebb1] got new request from [98a9:3b00:6050:0:80a9:3b00:6050:0] database '/Root' stream 'testtopic' 2025-04-04T17:13:29.463339Z node 8 :HTTP_PROXY DEBUG: http request [DescribeStreamSummary] requestId [63250161-5ff4fefe-81a24d40-6b8ebb1] [auth] Authorized successfully 2025-04-04T17:13:29.463374Z node 8 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [63250161-5ff4fefe-81a24d40-6b8ebb1] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-04T17:13:29.464183Z node 8 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [63250161-5ff4fefe-81a24d40-6b8ebb1] reply ok 2025-04-04T17:13:29.464300Z node 8 :HTTP DEBUG: (#37,[::1]:50776) <- (200 ) 2025-04-04T17:13:29.464355Z node 8 :HTTP DEBUG: (#37,[::1]:50776) connection closed Http output full {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1743786.809,"StreamName":"testtopic"}} 200 {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1743786.809,"StreamName":"testtopic"}} 2025-04-04T17:13:29.464846Z node 8 :HTTP DEBUG: (#37,[::1]:50782) incoming connection opened 2025-04-04T17:13:29.464896Z node 8 :HTTP DEBUG: (#37,[::1]:50782) -> (POST /Root) 2025-04-04T17:13:29.464996Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [987c:2f00:6050:0:807c:2f00:6050:0] request [DescribeStream] url [/Root] database [/Root] requestId: 73456768-893e095f-7adf4ca8-f36ccad4 2025-04-04T17:13:29.465248Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [73456768-893e095f-7adf4ca8-f36ccad4] got new request from [987c:2f00:6050:0:807c:2f00:6050:0] database '/Root' stream 'testtopic' 2025-04-04T17:13:29.465554Z node 8 :HTTP_PROXY DEBUG: http request [DescribeStream] requestId [73456768-893e095f-7adf4ca8-f36ccad4] [auth] Authorized successfully 2025-04-04T17:13:29.465577Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [73456768-893e095f-7adf4ca8-f36ccad4] sending grpc request to '' database: '/Root' iam token size: 0 2025-04-04T17:13:29.466182Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server connected, pipe [8:7489507318792521165:2548], now have 1 active actors on pipe 2025-04-04T17:13:29.466184Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7489507318792521164:2547], now have 1 active actors on pipe 2025-04-04T17:13:29.466223Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7489507318792521168:2551], now have 1 active actors on pipe 2025-04-04T17:13:29.466236Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server connected, pipe [8:7489507318792521167:2550], now have 1 active actors on pipe 2025-04-04T17:13:29.466333Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server connected, pipe [8:7489507318792521166:2549], now have 1 active actors on pipe 2025-04-04T17:13:29.466848Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7489507318792521164:2547] destroyed 2025-04-04T17:13:29.466855Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server disconnected, pipe [8:7489507318792521165:2548] destroyed 2025-04-04T17:13:29.466876Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server disconnected, pipe [8:7489507318792521166:2549] destroyed 2025-04-04T17:13:29.466882Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [8:7489507318792521167:2550] destroyed 2025-04-04T17:13:29.466893Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7489507318792521168:2551] destroyed 2025-04-04T17:13:29.467290Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [73456768-893e095f-7adf4ca8-f36ccad4] reply ok 2025-04-04T17:13:29.467485Z node 8 :HTTP DEBUG: (#37,[::1]:50782) <- (200 ) 2025-04-04T17:13:29.467582Z node 8 :HTTP DEBUG: (#37,[::1]:50782) connection closed Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1743786809,"StorageLimitMb":0,"StreamName":"testtopic"}} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 >> TestYmqHttpProxy::TestListQueueTags [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 >> TPersQueueTest::CloseActiveWriteSessionOnClusterDisable [GOOD] >> TPersQueueTest::Cache >> TBlobStorageWardenTest::TestCreatePDiskAndGroup |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestListQueueTags [GOOD] Test command err: 2025-04-04T17:12:32.424033Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507072744614373:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:32.424286Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00101f/r3tmp/tmp1DWaTl/pdisk_1.dat 2025-04-04T17:12:32.769264Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:32.836599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:32.836709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 9499, node 1 2025-04-04T17:12:32.840967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:32.992656Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:32.992678Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:32.992684Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:32.992786Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10544 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:12:33.612566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.625528Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:10544 2025-04-04T17:12:33.807004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.817856Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:12:33.819660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.839484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:33.988509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:34.030725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:34.082893Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-04-04T17:12:34.090337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:12:34.131680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:12:34.199481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.242966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.311632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.389207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:34.462190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:35.702579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507085629517702:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.702667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507085629517714:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.702722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:35.706875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-04-04T17:12:35.716850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507085629517716:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-04-04T17:12:35.785957Z node 1 :TX_PROXY ERROR: Actor# [1:7489507085629517767:2917] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:12:36.680647Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr0wmhkmd4k5gw7nzmj70d6b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTcxZmM3MTEtZGVlY2Q2YWEtMjAwYTA1YmEtMjg1NTBhZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.708863Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr0wmhkmd4k5gw7nzmj70d6b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTcxZmM3MTEtZGVlY2Q2YWEtMjAwYTA1YmEtMjg1NTBhZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.712847Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr0wmhkmd4k5gw7nzmj70d6b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTcxZmM3MTEtZGVlY2Q2YWEtMjAwYTA1YmEtMjg1NTBhZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:12:36.761180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.823285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.890817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.924902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.960820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:36.993675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.027313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.060206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.126764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:12:37.184098Z node 1 :HTTP INFO: Listening on http://127.0.0.1:1627 2025-04-04T17:12:37.423258Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507072744614373:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:37.423321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:12:38.188349Z node 1 :SQS INFO: Start SQS service actor 2025-04-04T17:12:38.188489Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-04-04T17:12:38.189672Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-04-04T17:12:38.194420Z node 1 :SQS INFO: Start SQS proxy serv ... 3\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?( \000\001\205\000\000\000\000\001\027\000\000\000\000\000\000\000?\030\005?4\003?*\010\003?,\016\003?.\030\003?0\020\003?2\014\037\013?:\t\351\000?6\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?b\003?d(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?8\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?x\003?z\036QUEUE_ID_NUMBER\003\022\000\000\013?\000\003?@\000\003?B\000\003?D\000\006\004?H\003\203\014\000\003\203\014\000\003\003?J\000\377\007\003?$\000\002\001\000/" } } 2025-04-04T17:13:31.872604Z node 7 :SQS DEBUG: Request [] Query(idx=GET_STATE_ID) Queue [cloud4/000000000000000301v0] compilation duration: 3ms 2025-04-04T17:13:31.872718Z node 7 :SQS DEBUG: Request [] Query(idx=GET_STATE_ID) has been prepared 2025-04-04T17:13:31.872741Z node 7 :SQS DEBUG: Request [] Executing compiled query(idx=GET_STATE_ID) 2025-04-04T17:13:31.872831Z node 7 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_STATE_ID). Mode: COMPILE_AND_EXEC 2025-04-04T17:13:31.872935Z node 7 :SQS TRACE: Request [] Query(idx=GET_STATE_ID) Queue [cloud4/000000000000000301v0] Serializing params: {"QUEUE_ID_NUMBER": 4, "QUEUE_ID_NUMBER_HASH": 2169371982377735806} 2025-04-04T17:13:31.873170Z node 7 :SQS TRACE: Request [] Query(idx=GET_STATE_ID) Queue [cloud4/000000000000000301v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "\037\024\nFlags\010Name\010Args\016Payload\022Parameter CreatedTimestamp\024InflyCount\030InflyVersion\030MessageCount\"RetentionBoundary\006\002\206\202\t\211\004\202\203\005@\206\205\n\207\203\010\207\203\006\207\203\010\207\203\006\207\203\010\026\032\036\"&$SetResult\000\003?\002\nstate\t\211\004?\032\205\004?\032\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\"\203\005\004\200\205\n\203\004\203\004\203\004\203\004\203\004\026\032\036\"&\213\004\203\010\203\010\213\004?6?8\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?( \000\001\205\000\000\000\000\001\027\000\000\000\000\000\000\000?\030\005?4\003?*\010\003?,\016\003?.\030\003?0\020\003?2\014\037\013?:\t\351\000?6\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?b\003?d(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?8\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?x\003?z\036QUEUE_ID_NUMBER\003\022\000\000\013?\000\003?@\000\003?B\000\003?D\000\006\004?H\003\203\014\000\003\203\014\000\003\003?J\000\377\007\003?$\000\002\001\000/" } Params { Bin: "\037\000\005\205\004\203\010\203\010> TPersQueueTest::TopicServiceCommitOffset [GOOD] >> TPersQueueTest::TopicServiceCommitOffsetBadOffsets >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 >> KqpJoinOrder::Chain65Nodes [GOOD] >> KqpJoinOrder::DatetimeConstantFold+StreamLookupJoin-ColumnStore |88.8%| [TA] $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 |88.8%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] Test command err: 2025-04-04T17:13:33.189645Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:1:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T17:13:33.193856Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T17:13:33.196526Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T17:13:33.197414Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T17:13:33.201845Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-04-04T17:13:33.206350Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 Sending TEvPut Sending TEvGet Sending TEvVGet Sending TEvPut Sending TEvGet |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderEmpty >> IncrementalRestoreScan::ChangeSenderSimple >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 >> IncrementalRestoreScan::Empty |88.8%| [TA] $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [TA] {RESULT} $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbProxy::ListDirectory >> YdbProxy::CreateTopic >> YdbProxy::RemoveDirectory >> YdbProxy::DropTable >> YdbProxy::CopyTable >> YdbProxy::ReadTopic >> YdbProxy::DescribePath >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 >> TPersQueueTest::DirectReadNotCached [GOOD] >> TPersQueueTest::DirectReadBadCases >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] >> YdbProxy::CreateTopic [GOOD] >> YdbProxy::DescribeConsumer >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 >> YdbProxy::ListDirectory [GOOD] >> YdbProxy::DropTopic |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 >> IncrementalRestoreScan::Empty [GOOD] >> YdbProxy::DescribePath [GOOD] >> YdbProxy::DescribeTable >> YdbProxy::RemoveDirectory [GOOD] >> YdbProxy::StaticCreds >> YdbProxy::DropTable [GOOD] >> YdbProxy::DescribeTopic >> YdbProxy::CopyTable [GOOD] >> YdbProxy::CopyTables |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> TPersQueueTest::SchemeshardRestart [GOOD] >> TPersQueueTest::SameOffset >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::Empty [GOOD] Test command err: 2025-04-04T17:13:38.471518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:13:38.472012Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:13:38.472104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f1c/r3tmp/tmp554WQW/pdisk_1.dat 2025-04-04T17:13:38.938637Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:596:2520] Exhausted 2025-04-04T17:13:38.938801Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:596:2520] Handle TEvIncrementalRestoreScan::TEvFinished NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvFinished 2025-04-04T17:13:38.938865Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:596:2520] Finish 0 >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] |88.9%| [TA] $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 >> TPersQueueTest::Init [GOOD] >> TPersQueueTest::NoDecompressionMemoryLeaks >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> TopicService::DifferentConsumers_TheRangesOverlap [GOOD] |88.9%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] Test command err: 2025-04-04T17:13:38.473018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:13:38.473469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:13:38.473528Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f31/r3tmp/tmpYZC7tj/pdisk_1.dat 2025-04-04T17:13:39.068497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-04T17:13:39.069644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:13:39.071736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T17:13:39.072901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:13:39.073009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:13:39.073855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T17:13:39.075138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T17:13:39.075400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:13:39.075474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T17:13:39.075573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:13:39.075608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:13:39.077645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:13:39.077710Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:13:39.077745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:13:39.078190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:13:39.078228Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:13:39.078310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-04T17:13:39.078371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:13:39.082755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:13:39.083258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:13:39.084503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:13:39.086462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-04T17:13:39.086516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-04T17:13:39.086557Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-04T17:13:39.124484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2025-04-04T17:13:39.124563Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:39.170342Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:13:39.171258Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:13:39.173410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:39.173855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:39.186770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:39.262226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:13:39.262428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:13:39.262485Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-04T17:13:39.264668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:13:39.264740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-04T17:13:39.264956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:13:39.265028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-04T17:13:39.267063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:13:39.267119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:13:39.267333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:13:39.267385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:570:2497], at schemeshard: 72057594046644480, txId: 1, path id: 1 2025-04-04T17:13:39.267768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:13:39.267827Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2025-04-04T17:13:39.267950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:13:39.267989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:13:39.268028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:13:39.268067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:13:39.268105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:13:39.268148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:13:39.268189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:13:39.268219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:13:39.268279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-04T17:13:39.268319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-04T17:13:39.268368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-04T17:13:39.270518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-04T17:13:39.270599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-04T17:13:39.270639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2025-04-04T17:13:39.270669Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-04T17:13:39.270715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:13:39.270817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-04T17:13:39.270854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:592:2517] 2025-04-04T17:13:39.271343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2025-04-04T17:13:39.276206Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-04T17:13:39.276274Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T17:13:39.279232Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-04-04T17:13:39.285572Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T17:13:39.285666Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:13:39.287068Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:13:39.287168Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:13:39.287536Z no ... " PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2025-04-04T17:13:39.921861Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvNavigate describe path /Root/IncrBackupTable 2025-04-04T17:13:39.921943Z node 1 :TX_PROXY DEBUG: Actor# [1:827:2681] HANDLE EvNavigateScheme /Root/IncrBackupTable 2025-04-04T17:13:39.925063Z node 1 :TX_PROXY DEBUG: Actor# [1:827:2681] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:13:39.925167Z node 1 :TX_PROXY DEBUG: Actor# [1:827:2681] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" 2025-04-04T17:13:39.926137Z node 1 :TX_PROXY DEBUG: Actor# [1:827:2681] Handle TEvDescribeSchemeResult Forward to# [1:592:2517] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-04T17:13:39.928045Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:829:2683] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:13:39.928341Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:829:2683] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:13:39.928663Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:829:2683] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-04T17:13:39.928818Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:829:2683] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] Test command err: 2025-04-04T17:13:38.680145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:13:38.680745Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:13:38.680808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f16/r3tmp/tmpyCJYCX/pdisk_1.dat 2025-04-04T17:13:39.189409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-04T17:13:39.189671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:13:39.189885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T17:13:39.190097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:13:39.190156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:13:39.191022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T17:13:39.191185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T17:13:39.191408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:13:39.191466Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T17:13:39.191507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:13:39.191536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:13:39.192036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:13:39.192094Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:13:39.192126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:13:39.192497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:13:39.192538Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:13:39.192594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-04T17:13:39.192653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:13:39.196047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:13:39.196483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:13:39.196703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:13:39.197804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-04T17:13:39.197872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-04T17:13:39.197912Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-04T17:13:39.236733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2025-04-04T17:13:39.236795Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:39.274662Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:13:39.275408Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:13:39.275688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:39.275802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:39.290151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:39.364761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:13:39.364888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:13:39.364932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-04T17:13:39.365186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:13:39.365230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-04-04T17:13:39.365386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:13:39.365448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-04T17:13:39.366290Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:13:39.366326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:13:39.366474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:13:39.366519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:570:2497], at schemeshard: 72057594046644480, txId: 1, path id: 1 2025-04-04T17:13:39.366781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:13:39.366823Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2025-04-04T17:13:39.366917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:13:39.366951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:13:39.366979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:13:39.367010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:13:39.367043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:13:39.367080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:13:39.367117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:13:39.367144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:13:39.367189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-04T17:13:39.367221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-04-04T17:13:39.367250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-04T17:13:39.375510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-04T17:13:39.375629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-04-04T17:13:39.375660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2025-04-04T17:13:39.375706Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-04T17:13:39.375761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:13:39.375872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-04-04T17:13:39.375928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:592:2517] 2025-04-04T17:13:39.376476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2025-04-04T17:13:39.377018Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-04T17:13:39.377064Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T17:13:39.377210Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-04-04T17:13:39.383220Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T17:13:39.383294Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:13:39.383812Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:13:39.383903Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:13:39.384222Z no ... ress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:13:39.964492Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2025-04-04T17:13:39.964585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-04T17:13:39.964635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-04-04T17:13:39.964674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-04T17:13:39.964718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-04-04T17:13:39.964752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 1/1, is published: true 2025-04-04T17:13:39.964805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:592:2517] message: TxId: 281474976715658 2025-04-04T17:13:39.964844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-04-04T17:13:39.964880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-04T17:13:39.964909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715658:0 2025-04-04T17:13:39.965014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-04T17:13:39.965578Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvNavigate describe path /Root/IncrBackupTable 2025-04-04T17:13:39.965666Z node 1 :TX_PROXY DEBUG: Actor# [1:815:2675] HANDLE EvNavigateScheme /Root/IncrBackupTable 2025-04-04T17:13:39.968279Z node 1 :TX_PROXY DEBUG: Actor# [1:815:2675] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:13:39.968404Z node 1 :TX_PROXY DEBUG: Actor# [1:815:2675] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" Options { ShowPrivateTable: true } 2025-04-04T17:13:39.969451Z node 1 :TX_PROXY DEBUG: Actor# [1:815:2675] Handle TEvDescribeSchemeResult Forward to# [1:592:2517] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-04T17:13:39.971464Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:825:2679], serverId# [1:826:2680], sessionId# [0:0:0] 2025-04-04T17:13:39.973089Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:827:2681] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:13:39.973371Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:827:2681] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:13:39.973707Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:827:2681] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-04T17:13:39.973918Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:827:2681] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] BodySize: 18 }] } 2025-04-04T17:13:39.974055Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:827:2681] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 3] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-04T17:13:39.974309Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvGetProxyServicesRequest 2025-04-04T17:13:39.974406Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:831:2681] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-04-04T17:13:39.974835Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:832:2685], serverId# [1:833:2686], sessionId# [0:0:0] 2025-04-04T17:13:40.017621Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:831:2681] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-04T17:13:40.017739Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:827:2681] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-04T17:13:40.017893Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:831:2681] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 3] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-04T17:13:40.017978Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:827:2681] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-04-04T17:13:40.018119Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:827:2681] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |88.9%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |88.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 >> TopicService::UnknownConsumer >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> YdbProxy::DescribeConsumer [GOOD] >> YdbProxy::DropTopic [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> YdbProxy::StaticCreds [GOOD] >> YdbProxy::DescribeTable [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 >> YdbProxy::DescribeTopic [GOOD] >> TPersQueueTest::WriteExistingBigValue [GOOD] >> TPersQueueTest::WriteEmptyData >> YdbProxy::CopyTables [GOOD] >> YdbProxy::AlterTopic >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeConsumer [GOOD] Test command err: 2025-04-04T17:13:36.006839Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507348773664409:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:36.006949Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ca6/r3tmp/tmpzSg0GW/pdisk_1.dat 2025-04-04T17:13:36.445874Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:36.461972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:36.462061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:36.469799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25509 TServer::EnableGrpc on GrpcPort 4247, node 1 2025-04-04T17:13:36.759959Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:36.759983Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:36.759989Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:36.760108Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25509 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:37.336012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:37.451259Z node 1 :TX_PROXY ERROR: Actor# [1:7489507353068632308:2296] txid# 281474976710658, issues: { message: "Invalid retention period: specified: 31536000s, min: 1s, max: 2678400s" severity: 1 } 2025-04-04T17:13:39.286883Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507362605822707:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:39.286939Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ca6/r3tmp/tmpjghcfd/pdisk_1.dat 2025-04-04T17:13:39.378600Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:39.421858Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:39.421954Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:39.423507Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14810 TServer::EnableGrpc on GrpcPort 27242, node 2 2025-04-04T17:13:39.610799Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:39.610826Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:39.610832Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:39.610949Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14810 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:39.935621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> YdbProxy::ReadTopic [GOOD] >> YdbProxy::ReadNonExistentTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DropTopic [GOOD] Test command err: 2025-04-04T17:13:36.007242Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507346626039787:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:36.007309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cb4/r3tmp/tmpv2KAHL/pdisk_1.dat 2025-04-04T17:13:36.445690Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:36.464158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:36.464251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:36.467582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11464 TServer::EnableGrpc on GrpcPort 22244, node 1 2025-04-04T17:13:36.759946Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:36.759971Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:36.759984Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:36.760122Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:37.395539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:37.415160Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:37.510888Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:13:39.353496Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507360701371088:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:39.353587Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cb4/r3tmp/tmpBP09yZ/pdisk_1.dat 2025-04-04T17:13:39.449629Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:39.480088Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:39.480162Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:39.481248Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1062 TServer::EnableGrpc on GrpcPort 17953, node 2 2025-04-04T17:13:39.658854Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:39.658879Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:39.658886Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:39.659002Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1062 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:13:39.963400Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:40.100832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:13:40.109576Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-04T17:13:40.111324Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-04T17:13:40.122855Z node 2 :TX_PROXY ERROR: Actor# [2:7489507364996339166:2396] txid# 281474976715660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::StaticCreds [GOOD] Test command err: 2025-04-04T17:13:36.005943Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507346095241806:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:36.006054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cde/r3tmp/tmpRg9acA/pdisk_1.dat 2025-04-04T17:13:36.451165Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:36.453340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:36.453451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:36.464866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62235 TServer::EnableGrpc on GrpcPort 16663, node 1 2025-04-04T17:13:36.762874Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:36.762904Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:36.762912Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:36.763012Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62235 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:37.357741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:37.454270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:13:37.461349Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-04T17:13:37.480881Z node 1 :TX_PROXY ERROR: Actor# [1:7489507350390209757:2326] txid# 281474976710660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-04T17:13:39.404449Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507362111485580:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:39.404603Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cde/r3tmp/tmpGpzGyf/pdisk_1.dat 2025-04-04T17:13:39.542835Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:39.572700Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:39.572785Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:39.574400Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19294 TServer::EnableGrpc on GrpcPort 10781, node 2 2025-04-04T17:13:39.815308Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:39.815330Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:39.815337Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:39.815492Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19294 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:40.121842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:40.131934Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:13:40.162219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786820167 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 1 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 ... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786820167 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 ... (TRUNCATED) 2025-04-04T17:13:40.367178Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 |88.9%| [TA] $(B)/ydb/core/kqp/executer_actor/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.0%| [TA] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTable [GOOD] Test command err: 2025-04-04T17:13:36.007337Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507346220313378:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:36.007459Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cd7/r3tmp/tmpMxZmpu/pdisk_1.dat 2025-04-04T17:13:36.408693Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:36.456598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:36.456714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:36.458462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21873 TServer::EnableGrpc on GrpcPort 24255, node 1 2025-04-04T17:13:36.759963Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:36.759991Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:36.760002Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:36.760141Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21873 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:37.404147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:37.494528Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:13:39.388686Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507359497423906:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:39.388733Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cd7/r3tmp/tmpfUaZyN/pdisk_1.dat 2025-04-04T17:13:39.494622Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:39.512013Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:39.512094Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:39.513203Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7247 TServer::EnableGrpc on GrpcPort 23347, node 2 2025-04-04T17:13:39.734213Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:39.734252Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:39.734260Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:39.734376Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7247 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:40.057952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:40.062491Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:13:42.332917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTopic [GOOD] Test command err: 2025-04-04T17:13:36.006037Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507346382891260:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:36.006133Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cb0/r3tmp/tmpXKIXhc/pdisk_1.dat 2025-04-04T17:13:36.447231Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:36.447516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:36.448537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:36.458063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63745 TServer::EnableGrpc on GrpcPort 11167, node 1 2025-04-04T17:13:36.762411Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:36.762436Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:36.762446Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:36.762558Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:37.365170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:37.387393Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:39.023218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:13:39.154823Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-04T17:13:39.155968Z node 1 :TX_PROXY ERROR: Actor# [1:7489507359267793935:2398] txid# 281474976710660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cb0/r3tmp/tmphpAYaS/pdisk_1.dat 2025-04-04T17:13:39.938367Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:13:39.993437Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:40.031288Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:40.031387Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:40.032786Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65205 TServer::EnableGrpc on GrpcPort 12409, node 2 2025-04-04T17:13:40.225885Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:40.225907Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:40.225914Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:40.226036Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65205 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:40.566177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:40.575446Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 >> TAccessServiceTest::Authenticate >> FolderServiceTest::TFolderServiceTransitional >> TAccessServiceTest::PassRequestId >> FolderServiceTest::TFolderService |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 >> FolderServiceTest::TFolderServiceAdapter >> TUserAccountServiceTest::Get |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 >> TCertificateCheckerTest::CheckSubjectDns |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 >> KqpJoinOrder::DatetimeConstantFold+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH9-StreamLookupJoin-ColumnStore >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] Test command err: RandomSeed# 14392251916883508006 >> TPersQueueTest::SetupLockSession [GOOD] >> TPersQueueTest::StreamReadCreateAndDestroyMsgs >> BSCRestartPDisk::RestartOneByOne [GOOD] >> TCertificateCheckerTest::CheckSubjectDns [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 >> YdbProxy::AlterTopic [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateCheckerTest::CheckSubjectDns [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOne [GOOD] Test command err: RandomSeed# 15509869837079616959 >> YdbProxy::ReadNonExistentTopic [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 >> KqpQueryService::DdlPermission |89.0%| [TA] $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.0%| [TA] {RESULT} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.0%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> FolderServiceTest::TFolderServiceTransitional [GOOD] >> KqpDocumentApi::RestrictWrite >> KqpQueryService::TableSink_OltpLiteralUpsert >> KqpService::CloseSessionsWithLoad >> TAccessServiceTest::Authenticate [GOOD] >> KqpQueryService::PeriodicTaskInSessionPool >> KqpQueryServiceScripts::ParseScript >> KqpQueryService::AlterTempTable >> KqpQueryService::CreateAndDropTopic >> KqpQueryService::TableSink_OltpUpsert >> KqpQueryService::TableSink_HtapInteractive-withOltpSink |89.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TAccessServiceTest::PassRequestId [GOOD] >> KqpQueryService::ExecuteQueryPg >> KqpService::SessionBusy >> FolderServiceTest::TFolderService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTopic [GOOD] Test command err: 2025-04-04T17:13:36.006793Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507346651145763:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:36.006833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cc6/r3tmp/tmpaHcZA0/pdisk_1.dat 2025-04-04T17:13:36.446186Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:36.456137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:36.456240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:36.460497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16604 TServer::EnableGrpc on GrpcPort 17666, node 1 2025-04-04T17:13:36.760315Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:36.760340Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:36.760349Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:36.760479Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16604 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:37.336245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:39.018827Z node 1 :TX_PROXY ERROR: Actor# [1:7489507359536048309:2305] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-04T17:13:39.030527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:13:39.849691Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507359923584091:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:39.849795Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cc6/r3tmp/tmpjDunAa/pdisk_1.dat 2025-04-04T17:13:39.973100Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:40.005448Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:40.005546Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:40.007426Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19524 TServer::EnableGrpc on GrpcPort 29810, node 2 2025-04-04T17:13:40.181445Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:40.181467Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:40.181474Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:40.181593Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:40.451119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:42.742963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:13:42.824410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:13:43.605843Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507378735459451:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:43.605887Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cc6/r3tmp/tmprw4CVp/pdisk_1.dat 2025-04-04T17:13:43.690882Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:43.731319Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:43.731411Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:43.739181Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19506 TServer::EnableGrpc on GrpcPort 5210, node 3 2025-04-04T17:13:43.926974Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:43.926996Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:43.927004Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:43.927131Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19506 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:44.237154Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:44.382395Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:13:44.409636Z node 3 :TX_PROXY ERROR: Actor# [3:7489507383030427530:2394] txid# 281474976715660, issues: { message: "Invalid retention period: specified: 31536000s, min: 1s, max: 2678400s" severity: 1 } >> FolderServiceTest::TFolderServiceAdapter [GOOD] >> TUserAccountServiceTest::Get [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 >> TServiceAccountServiceTest::IssueToken ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::ReadNonExistentTopic [GOOD] Test command err: 2025-04-04T17:13:36.024748Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507349669050123:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:36.028196Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cc2/r3tmp/tmpoP5IYT/pdisk_1.dat 2025-04-04T17:13:36.406279Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:36.451009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:36.451171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:36.458878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3409 TServer::EnableGrpc on GrpcPort 24498, node 1 2025-04-04T17:13:36.760663Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:36.760693Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:36.760707Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:36.760853Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3409 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:37.363133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:37.748286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-04T17:13:39.000504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507358258985575:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:39.000504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507358258985558:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:39.000580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507358258985574:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:39.000647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:39.006073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:2, at schemeshard: 72057594046644480 2025-04-04T17:13:39.011339Z node 1 :TX_PROXY ERROR: Actor# [1:7489507362553952877:2452] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:13:39.018184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507362553952874:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-04T17:13:39.018196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507362553952875:2375], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-04T17:13:39.079962Z node 1 :TX_PROXY ERROR: Actor# [1:7489507362553952923:2482] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:39.080173Z node 1 :TX_PROXY ERROR: Actor# [1:7489507362553952924:2483] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:40.152990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-04T17:13:40.569292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:13:40.992043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:13:41.023901Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507349669050123:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:41.032804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:13:41.470019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:13:41.880011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:13:44.043881Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507383649474631:2289];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cc2/r3tmp/tmpjjPVhw/pdisk_1.dat 2025-04-04T17:13:44.082634Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:13:44.196204Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:44.210936Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:44.211026Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:44.212874Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61510 TServer::EnableGrpc on GrpcPort 8955, node 2 2025-04-04T17:13:44.444574Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:44.444601Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:44.444612Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:44.444725Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61510 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:13:44.735241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceTransitional [GOOD] Test command err: 2025-04-04T17:13:44.949958Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507381440373489:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:44.950006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ad1/r3tmp/tmpOnw5ef/pdisk_1.dat 2025-04-04T17:13:45.334713Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:45.376533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:45.376640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:45.378857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25907 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:45.665079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:45.682789Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:45.685782Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Connect to grpc://localhost:32421 2025-04-04T17:13:45.722865Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-04-04T17:13:45.740359Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:32421: Failed to connect to remote host: Connection refused 2025-04-04T17:13:45.743415Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-04-04T17:13:45.744042Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:32421: Failed to connect to remote host: Connection refused 2025-04-04T17:13:46.744627Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-04-04T17:13:46.747790Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Status 5 Not Found 2025-04-04T17:13:46.748185Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Request ListFoldersRequest { id: "i_am_exists" } 2025-04-04T17:13:46.751103Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Response ListFoldersResponse { result { cloud_id: "response_cloud_id" } } >> TPersQueueTest::TopicServiceCommitOffsetBadOffsets [GOOD] >> TPersQueueTest::TopicServiceReadBudget >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError >> TServiceAccountServiceTest::IssueToken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::Authenticate [GOOD] Test command err: 2025-04-04T17:13:44.862134Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507384160248139:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:44.862554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001aaa/r3tmp/tmpk2tSx9/pdisk_1.dat 2025-04-04T17:13:45.239699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:45.239838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:45.242590Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:45.246951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:45.550752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:45.563734Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:45.591908Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Connect to grpc://localhost:17070 2025-04-04T17:13:45.598779Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Request AuthenticateRequest { iam_token: "**** (047D44F1)" } 2025-04-04T17:13:45.649038Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Status 7 Permission Denied 2025-04-04T17:13:45.649770Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Request AuthenticateRequest { iam_token: "**** (342498C1)" } 2025-04-04T17:13:45.659154Z node 1 :GRPC_CLIENT DEBUG: [51700007be08] Response AuthenticateResponse { subject { user_account { id: "1234" } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::PassRequestId [GOOD] Test command err: 2025-04-04T17:13:44.896685Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507383370793716:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:44.898027Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a8c/r3tmp/tmpgzqzXW/pdisk_1.dat 2025-04-04T17:13:45.270695Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:45.275836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:45.275931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:45.277569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3753 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:45.590584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:45.610545Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:45.628168Z node 1 :GRPC_CLIENT DEBUG: [51700007be08]{trololo} Connect to grpc://localhost:29406 2025-04-04T17:13:45.629179Z node 1 :GRPC_CLIENT DEBUG: [51700007be08]{trololo} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2025-04-04T17:13:45.649455Z node 1 :GRPC_CLIENT DEBUG: [51700007be08]{trololo} Response AuthenticateResponse { subject { user_account { id: "1234" } } } >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 >> TPersQueueTest::DirectReadBadCases [GOOD] >> TPersQueueTest::DirectReadStop >> KqpQueryService::TableSink_HtapComplex+withOltpSink ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderService [GOOD] Test command err: 2025-04-04T17:13:45.059217Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507386287743318:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:45.059291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a85/r3tmp/tmpYPuzm9/pdisk_1.dat 2025-04-04T17:13:45.515283Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:45.517869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:45.517993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:45.523243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10821 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:45.873515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:45.898974Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:45.900397Z node 1 :GRPC_CLIENT DEBUG: [51700008c108] Connect to grpc://localhost:6002 2025-04-04T17:13:45.951693Z node 1 :GRPC_CLIENT DEBUG: [51700008c108] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-04-04T17:13:45.962466Z node 1 :GRPC_CLIENT DEBUG: [51700008c108] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:6002: Failed to connect to remote host: Connection refused 2025-04-04T17:13:45.964160Z node 1 :GRPC_CLIENT DEBUG: [51700008c108] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-04-04T17:13:45.964845Z node 1 :GRPC_CLIENT DEBUG: [51700008c108] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:6002: Failed to connect to remote host: Connection refused 2025-04-04T17:13:46.965708Z node 1 :GRPC_CLIENT DEBUG: [51700008c108] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-04-04T17:13:46.969308Z node 1 :GRPC_CLIENT DEBUG: [51700008c108] Status 5 Not Found 2025-04-04T17:13:46.969759Z node 1 :GRPC_CLIENT DEBUG: [51700008c108] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-04-04T17:13:46.972899Z node 1 :GRPC_CLIENT DEBUG: [51700008c108] Response ResolveFoldersResponse { resolved_folders { cloud_id: "response_cloud_id" } } |89.1%| [TA] $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [TA] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryService::ExecuteQueryUpsertDoesntChangeIndexedValuesIfNotChanged ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceAdapter [GOOD] Test command err: 2025-04-04T17:13:45.210834Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507385525394053:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:45.210862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ab2/r3tmp/tmpH4c7ix/pdisk_1.dat 2025-04-04T17:13:45.687672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:45.687784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:45.687994Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:45.691843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17622 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:45.985922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:46.018692Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:46.069866Z node 1 :GRPC_CLIENT DEBUG: [51700006d008] Connect to grpc://localhost:1246 2025-04-04T17:13:46.070805Z node 1 :GRPC_CLIENT DEBUG: [51700006d008] Request ListFoldersRequest { id: "i_am_exists" } 2025-04-04T17:13:46.094183Z node 1 :GRPC_CLIENT DEBUG: [51700006d008] Response ListFoldersResponse { result { cloud_id: "cloud_from_old_service" } } 2025-04-04T17:13:46.095392Z node 1 :GRPC_CLIENT DEBUG: [517000016988] Connect to grpc://localhost:61155 2025-04-04T17:13:46.096131Z node 1 :GRPC_CLIENT DEBUG: [517000016988] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-04-04T17:13:46.105913Z node 1 :GRPC_CLIENT DEBUG: [517000016988] Response ResolveFoldersResponse { resolved_folders { cloud_id: "cloud_from_new_service" } } 2025-04-04T17:13:46.110677Z node 1 :GRPC_CLIENT DEBUG: [517000016988] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-04-04T17:13:46.112993Z node 1 :GRPC_CLIENT DEBUG: [517000016988] Status 5 Not Found 2025-04-04T17:13:46.113539Z node 1 :GRPC_CLIENT DEBUG: [51700006d008] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-04-04T17:13:46.115000Z node 1 :GRPC_CLIENT DEBUG: [51700006d008] Status 5 Not Found ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TUserAccountServiceTest::Get [GOOD] Test command err: 2025-04-04T17:13:45.401398Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507388044952229:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:45.402037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ac0/r3tmp/tmpy3Obvm/pdisk_1.dat 2025-04-04T17:13:45.839594Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:45.860012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:45.860113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:45.861621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:46.154633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... >> KqpQueryService::Write >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 >> KqpQueryService::DdlColumnTable >> KqpQueryService::ClosedSessionRemovedWhileActiveWithQuery >> KqpQueryService::ReadDatashardAndColumnshard >> KqpQueryServiceScripts::ExecuteScriptStatsBasic >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 >> KqpQueryService::SessionFromPoolError >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 >> KqpQueryServiceScripts::ExecuteScriptWithWorkloadManager >> KqpQueryService::ExecuteCollectMeta >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 >> KqpQueryService::TableSink_OltpLiteralUpsert [GOOD] >> KqpQueryService::TableSink_OltpInsert >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 >> KqpQueryService::CreateAndDropTopic [GOOD] >> KqpQueryService::CreateAndAlterTopic >> KqpJoinOrder::TPCDS34+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS34-StreamLookupJoin+ColumnStore >> KqpQueryService::AlterTempTable [GOOD] >> KqpQueryService::CTASWithoutPerStatement ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::IssueToken [GOOD] Test command err: 2025-04-04T17:13:44.997437Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507382945470239:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:44.997481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a9b/r3tmp/tmpl8rJ1w/pdisk_1.dat 2025-04-04T17:13:45.351050Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:45.371079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:45.371166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:45.373111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:45.686810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:45.710996Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:48.263849Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507401512341162:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:48.263887Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a9b/r3tmp/tmpneYZY6/pdisk_1.dat 2025-04-04T17:13:48.551420Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:48.571692Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:48.571769Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:48.574794Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:13:48.807126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:48.813631Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 >> TopicService::UnknownConsumer [GOOD] |89.1%| [TA] $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [TA] {RESULT} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 >> KqpQueryServiceScripts::ParseScript [GOOD] >> KqpQueryServiceScripts::ListScriptExecutions >> KqpDocumentApi::RestrictWrite [GOOD] >> KqpDocumentApi::AllowRead >> KqpQueryService::ExecuteQueryPg [GOOD] >> KqpQueryService::ExecuteQueryMultiResult >> KqpService::SessionBusy [GOOD] >> KqpService::SessionBusyRetryOperation >> KqpQueryService::TableSink_HtapInteractive-withOltpSink [GOOD] >> KqpQueryService::TableSink_OlapInsert >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 >> TopicService::UnknownTopic >> KqpQueryServiceScripts::ValidateScript >> KqpQueryService::DdlPermission [GOOD] >> KqpQueryService::DdlSecret >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 >> OlapEstimationRowsCorrectness::TPCH9 [GOOD] >> OlapEstimationRowsCorrectness::TPCH21 >> KqpQueryService::Write [GOOD] >> KqpQueryServiceScripts::CancelScriptExecution >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 >> TPersQueueTest::SameOffset [GOOD] >> TPersQueueTest::SchemeOperationsTest >> KqpQueryService::SessionFromPoolError [GOOD] >> KqpQueryService::SessionFromPoolSuccess >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 >> KqpQueryService::ExecuteCollectMeta [GOOD] >> KqpQueryService::ExecStatsPlan >> KqpQueryService::ReadDatashardAndColumnshard [GOOD] >> KqpQueryService::ReadManyRanges >> KqpQueryService::CTASWithoutPerStatement [GOOD] >> KqpQueryService::CheckIsolationLevelFroPerStatementMode >> KqpQueryService::ClosedSessionRemovedWhileActiveWithQuery [GOOD] >> KqpQueryService::CloseSessionsWithLoad >> TPersQueueTest::NoDecompressionMemoryLeaks [GOOD] >> TPersQueueTest::PreferredCluster_TwoEnabledClustersAndWriteSessionsWithDifferentPreferredCluster_SessionWithMismatchedClusterDiesAndOthersAlive >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 >> KqpQueryService::TableSink_OltpUpsert [GOOD] >> KqpQueryService::TableSink_OltpUpdate >> TPersQueueTest::WriteEmptyData [GOOD] >> TPersQueueTest::WriteNonExistingPartition >> TPersQueueTest::Cache [GOOD] >> TPersQueueTest::CacheHead >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 >> KqpQueryServiceScripts::ExecuteScriptStatsBasic [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsFull >> KqpQueryService::CreateAndAlterTopic [GOOD] >> KqpQueryService::CreateOrDropTopicOverTable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 >> KqpQueryService::TableSink_HtapComplex+withOltpSink [GOOD] >> KqpQueryService::TableSink_HtapComplex-withOltpSink >> KqpDocumentApi::AllowRead [GOOD] >> KqpDocumentApi::RestrictAlter >> KqpQueryServiceScripts::ExecuteScriptWithWorkloadManager [GOOD] >> KqpQueryServiceScripts::ExplainScript >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 >> KqpQueryService::ExecuteQueryMultiResult [GOOD] >> KqpQueryService::ExecuteQueryMultiScalar >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 >> KqpService::SessionBusyRetryOperation [GOOD] >> KqpService::RangeCache-UseCache >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] >> KqpQueryServiceScripts::ValidateScript [GOOD] >> KqpQueryServiceScripts::TestTruncatedByRows >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] Test command err: 2025-04-04T17:10:16.314676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:16.314739Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:16.379654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:17.387163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:17.387217Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:17.423554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:18.222920Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:18.222987Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:18.263976Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:21.025640Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:21.025713Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:21.066018Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:23.735678Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:23.735735Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:23.776953Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:24.697184Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:24.697266Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:24.749216Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:25.266371Z node 6 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 7 2025-04-04T17:10:25.266762Z node 6 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 7 2025-04-04T17:10:25.266837Z node 6 :PIPE_SERVER ERROR: [72057594037936131] NodeDisconnected NodeId# 7 2025-04-04T17:10:25.267014Z node 7 :TX_PROXY WARN: actor# [7:342:2086] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-04-04T17:10:26.048310Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:26.048395Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:26.099746Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:26.619232Z node 8 :BS_CONTROLLER ERROR: {BSC26@console_interaction.cpp:113} failed to parse config obtained from Console ErrorReason# ydb/library/yaml_config/yaml_config_parser.cpp:1268: Condition violated: `config.HasDomainsConfig()' Yaml# --- metadata: kind: MainConfig cluster: "" version: 1 config: log_config: cluster_name: cluster1 allowed_labels: test: type: enum values: ? true selector_config: [] 2025-04-04T17:10:27.281920Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:27.281978Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:27.332775Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:28.371561Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-04-04T17:10:28.489593Z node 14 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:10:28.490020Z node 14 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/00185a/r3tmp/tmpMhCovL/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:10:28.490802Z node 14 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/00185a/r3tmp/tmpMhCovL/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/00185a/r3tmp/tmpMhCovL/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13146260698701895151 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:10:28.549979Z node 12 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:10:28.550556Z node 12 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/00185a/r3tmp/tmpMhCovL/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:10:28.550865Z node 12 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/00185a/r3tmp/tmpMhCovL/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/00185a/r3tmp/tmpMhCovL/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4016576452848388299 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:10:28.554685Z node 12 :BS_LOCALRECOVERY CRIT: VDISK[80000002:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/00185a/r3tmp/tmpMhCovL/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-04T17:10:28.622875Z node 13 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:10:28.623342Z node 13 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/00185a/r3tmp/tmpMhCovL/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:10:28.623587Z node 13 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/00185a/r3tmp/tmpMhCovL/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/00185a/r3tmp/tmpMhCovL/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5323833240825892666 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 Sect ... 00-s[16/16]o)]} 2025-04-04T17:13:08.939395Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:08.939630Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:12.904547Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:12.904836Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:17.100870Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:17.101117Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:21.270484Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:21.270831Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:21.454878Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:21.455380Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:25.422150Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:25.422565Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:29.461138Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:29.461338Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:33.555545Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:33.555857Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:37.596439Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:37.596780Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:41.735400Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:41.735699Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:46.048728Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:46.049056Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:50.041290Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:50.041627Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:54.264741Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:54.264988Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:58.420928Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-04-04T17:13:58.421251Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 >> KqpQueryService::TableSink_OltpInsert [GOOD] >> KqpQueryService::TableSink_OltpInteractive >> KqpQueryService::ExecStatsPlan [GOOD] >> KqpQueryService::ExecStatsAst >> KqpQueryService::ReadManyRanges [GOOD] >> KqpQueryService::ReadManyRangesAndPoints >> KqpQueryService::SessionFromPoolSuccess [GOOD] >> KqpQueryService::SeveralCTAS+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 >> TPersQueueTest::DirectReadStop [GOOD] >> TPersQueueTest::DirectReadCleanCache >> KqpJoinOrder::CanonizedJoinOrderTPCH9-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH9+StreamLookupJoin-ColumnStore >> TPersQueueTest::StreamReadCreateAndDestroyMsgs [GOOD] >> TPersQueueTest::StreamReadCommitAndStatusMsgs >> KqpQueryService::ExecuteQueryUpsertDoesntChangeIndexedValuesIfNotChanged [GOOD] >> KqpQueryService::ExecuteQueryWithWorkloadManager >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 >> TPersQueueTest::TopicServiceReadBudget [GOOD] >> TPersQueueTest::TopicServiceSimpleHappyWrites >> KqpQueryService::Ddl >> KqpQueryServiceScripts::CancelScriptExecution [GOOD] >> KqpQueryServiceScripts::EmptyNextFetchToken >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 >> KqpQueryService::CreateOrDropTopicOverTable [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError >> KqpQueryService::TableSink_OlapInsert [GOOD] >> KqpQueryService::TableSink_OlapDelete >> KqpQueryService::PeriodicTaskInSessionPool [GOOD] >> KqpQueryService::PeriodicTaskInSessionPoolSessionCloseByIdle >> KqpQueryService::CheckIsolationLevelFroPerStatementMode [GOOD] >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid >> KqpDocumentApi::RestrictAlter [GOOD] >> KqpDocumentApi::RestrictDrop >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 >> KqpQueryServiceScripts::ExecuteScriptStatsFull [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsNone >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::CreateOrDropTopicOverTable [GOOD] Test command err: Trying to start YDB, gRPC: 30014, MsgBus: 27668 2025-04-04T17:13:48.103090Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507397819863577:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:48.103322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018ad/r3tmp/tmpfQKlFh/pdisk_1.dat 2025-04-04T17:13:48.599033Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:48.602756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:48.602879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:48.606358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30014, node 1 2025-04-04T17:13:48.825658Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:48.825687Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:48.825692Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:48.825804Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27668 TClient is connected to server localhost:27668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:49.403418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.422605Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:51.360657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507410704766001:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.360767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.361081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507410704766013:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.364975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:13:51.376776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507410704766015:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:13:51.444277Z node 1 :TX_PROXY ERROR: Actor# [1:7489507410704766070:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Topic created 2025-04-04T17:13:51.927951Z node 1 :TX_PROXY ERROR: Actor# [1:7489507410704766240:2429] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/TempTopic\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:51.973599Z node 1 :TX_PROXY ERROR: Actor# [1:7489507410704766256:2438] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/TempTopic\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:51.973846Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710662, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TempTopic', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypePersQueueGroup, state: EPathStateNoChanges) 2025-04-04T17:13:51.973957Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGRkY2QzNmUtYWFiYzkwZDEtODk5YmZkZGQtYTExODYxMWQ=, ActorId: [1:7489507410704765972:2328], ActorState: ExecuteState, TraceId: 01jr0wpw2q7gzx4tbc0gnj94cd, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecComplete, reason: Check failed: path: '/Root/TempTopic', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypePersQueueGroup, state: EPathStateNoChanges) Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1743786831416, tx_id: 281474976710658 } } Scheme entry: { name: TempTopic, owner: root@builtin, type: Topic, size_bytes: 0, created_at: { plan_step: 1743786831864, tx_id: 281474976710660 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2025-04-04T17:13:52.018759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:52.025261Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-04T17:13:52.025282Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-04T17:13:52.026391Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-04-04T17:13:52.026442Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found Topic dropped Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1743786831416, tx_id: 281474976710658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2025-04-04T17:13:52.050200Z node 1 :TX_PROXY ERROR: Actor# [1:7489507414999733622:2488] txid# 281474976710665, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-04T17:13:52.068256Z node 1 :TX_PROXY ERROR: Actor# [1:7489507414999733630:2492] txid# 281474976710666, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-04T17:13:52.068518Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGRkY2QzNmUtYWFiYzkwZDEtODk5YmZkZGQtYTExODYxMWQ=, ActorId: [1:7489507410704765972:2328], ActorState: ExecuteState, TraceId: 01jr0wpw5vax7r1vr6qf1dkz2c, Create QueryResponse for error on request, msg: Query failed, status: SCHEME_ERROR:
: Error: Path does not exist, code: 2003
: Error: Query invalidated on scheme/internal error during Scheme execution, code: 2019 Trying to start YDB, gRPC: 6028, MsgBus: 8661 2025-04-04T17:13:52.867246Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507417181697800:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:52.867399Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018ad/r3tmp/tmpEEqlu3/pdisk_1.dat 2025-04-04T17:13:53.028742Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:53.046206Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:53.046426Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:53.048531Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6028, node 2 2025-04-04T17:13:53.142566Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:53.142589Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:53.142599Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:53.142724Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8661 TClient is connected to server localhost:8661 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:53.490497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:53.496488Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17 ... completed, doublechecking } 2025-04-04T17:14:02.340884Z node 3 :TX_PROXY ERROR: Actor# [3:7489507460785146611:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:03.286912Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:03.433183Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507443605275136:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:03.433248Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:03.435265Z node 3 :TX_PROXY ERROR: Actor# [3:7489507465080114275:3713] txid# 281474976710672, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup" severity: 1 } 2025-04-04T17:14:03.435508Z node 3 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710672, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup 2025-04-04T17:14:03.435664Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OWMwYTYwZDUtNzRmYjAxYzUtODI2OWNmOTgtNTc4NTkz, ActorId: [3:7489507465080114263:2505], ActorState: ExecuteState, TraceId: 01jr0wq78j1zxeph85ethm9wbv, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1743786842280, tx_id: 281474976710668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786839886, tx_id: 281474976710661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786839613, tx_id: 281474976710659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786842189, tx_id: 281474976710666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786842245, tx_id: 281474976710667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786842021, tx_id: 281474976710662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786842049, tx_id: 281474976710663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786842084, tx_id: 281474976710664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786839704, tx_id: 281474976710660 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786842147, tx_id: 281474976710665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786843365, tx_id: 281474976710671 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786839459, tx_id: 281474976710658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2025-04-04T17:14:03.487904Z node 3 :TX_PROXY ERROR: Actor# [3:7489507465080114298:3726] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup" severity: 1 } 2025-04-04T17:14:03.489124Z node 3 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710674, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup 2025-04-04T17:14:03.489271Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OWMwYTYwZDUtNzRmYjAxYzUtODI2OWNmOTgtNTc4NTkz, ActorId: [3:7489507465080114263:2505], ActorState: ExecuteState, TraceId: 01jr0wq7ae9y7x7akkr3b0akpj, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1743786842280, tx_id: 281474976710668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786839886, tx_id: 281474976710661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786839613, tx_id: 281474976710659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786842189, tx_id: 281474976710666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786842245, tx_id: 281474976710667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786842021, tx_id: 281474976710662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786842049, tx_id: 281474976710663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786842084, tx_id: 281474976710664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786839704, tx_id: 281474976710660 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786842147, tx_id: 281474976710665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786843365, tx_id: 281474976710671 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786839459, tx_id: 281474976710658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2025-04-04T17:14:03.540134Z node 3 :TX_PROXY ERROR: Actor# [3:7489507465080114333:3752] txid# 281474976710676, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:03.540429Z node 3 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710676, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-04T17:14:03.540563Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OWMwYTYwZDUtNzRmYjAxYzUtODI2OWNmOTgtNTc4NTkz, ActorId: [3:7489507465080114263:2505], ActorState: ExecuteState, TraceId: 01jr0wq7c20hwvnpy020ttqha1, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-04T17:14:03.567575Z node 3 :TX_PROXY ERROR: Actor# [3:7489507465080114350:3761] txid# 281474976710678, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:03.567764Z node 3 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710678, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-04T17:14:03.567887Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OWMwYTYwZDUtNzRmYjAxYzUtODI2OWNmOTgtNTc4NTkz, ActorId: [3:7489507465080114263:2505], ActorState: ExecuteState, TraceId: 01jr0wq7d38w9e7npb6ey65z0a, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1743786842280, tx_id: 281474976710668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786839886, tx_id: 281474976710661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786839613, tx_id: 281474976710659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786842189, tx_id: 281474976710666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786842245, tx_id: 281474976710667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786842021, tx_id: 281474976710662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786842049, tx_id: 281474976710663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786842084, tx_id: 281474976710664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786839704, tx_id: 281474976710660 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786842147, tx_id: 281474976710665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786843365, tx_id: 281474976710671 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786839459, tx_id: 281474976710658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2025-04-04T17:14:03.600931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 >> KqpQueryService::ExecuteQueryMultiScalar [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 >> KqpQueryServiceScripts::ExplainScript [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecution >> TopicService::UnknownTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryMultiScalar [GOOD] Test command err: Trying to start YDB, gRPC: 32696, MsgBus: 61157 2025-04-04T17:13:48.313123Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507401164604348:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:48.318420Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00189a/r3tmp/tmpMxuFyv/pdisk_1.dat 2025-04-04T17:13:48.738550Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:48.741715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:48.741796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:48.746183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32696, node 1 2025-04-04T17:13:48.899413Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:48.899438Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:48.899456Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:48.899586Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61157 TClient is connected to server localhost:61157 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:49.504962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.531088Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.545110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:13:49.736582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.890410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.977304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:51.635381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507414049508025:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.635500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.853330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.887373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.956432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:52.024472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:52.063229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:13:52.131177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:13:52.181082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507418344475840:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:52.181149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:52.181344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507418344475845:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:52.184757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:13:52.196204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507418344475847:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:13:52.278003Z node 1 :TX_PROXY ERROR: Actor# [1:7489507418344475901:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:53.313487Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507401164604348:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:53.313584Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2866, MsgBus: 8608 2025-04-04T17:13:54.187539Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507425370267133:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:54.187665Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00189a/r3tmp/tmptU3mD7/pdisk_1.dat 2025-04-04T17:13:54.344804Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2866, node 2 2025-04-04T17:13:54.376594Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:54.376683Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:54.384052Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:54.432444Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:54.432476Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:54.432484Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:54.432590Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8608 TClient is connected to server localhost:8608 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:13:54.843695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.852450Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:13:54.870859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:54.952174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:55.096611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:55.180854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:57.189657Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507438255170784:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:57.189754Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:57.230879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:13:57.267786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:57.300320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:57.333654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:57.399683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:13:57.485476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:13:57.578570Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507438255171310:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:57.578682Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:57.578948Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507438255171315:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:57.582905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:13:57.594526Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507438255171317:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:13:57.655906Z node 2 :TX_PROXY ERROR: Actor# [2:7489507438255171372:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 5780, MsgBus: 29332 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00189a/r3tmp/tmp7TmyKx/pdisk_1.dat 2025-04-04T17:13:59.872299Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:13:59.878935Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:59.892628Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:59.892729Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:59.894260Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5780, node 3 2025-04-04T17:14:00.062822Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:00.062851Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:00.062860Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:00.062981Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29332 TClient is connected to server localhost:29332 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:00.540539Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:00.551648Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:00.564980Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:00.631395Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:14:00.828606Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:00.905284Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:14:03.175196Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507465015629995:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:03.175308Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:03.207232Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:03.242907Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:03.276285Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:03.324034Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:03.369559Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:03.407990Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:03.468616Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507465015630507:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:03.468736Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:03.473135Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507465015630512:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:03.476912Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:03.497493Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507465015630514:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:03.568393Z node 3 :TX_PROXY ERROR: Actor# [3:7489507465015630567:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 >> KqpQueryService::ReadManyRangesAndPoints [GOOD] >> KqpQueryService::TableSink_Htap+withOltpSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 >> KqpQueryService::TableSink_OltpUpdate [GOOD] >> KqpQueryService::TableSink_Oltp_Replace+UseSink >> KqpQueryServiceScripts::ListScriptExecutions [GOOD] >> KqpQueryServiceScripts::Tcl >> KqpQueryService::TableSink_OltpInteractive [GOOD] >> KqpQueryService::ExecStatsAst [GOOD] >> TopicService::UseDoubleSlashInTopicPath >> KqpQueryService::SeveralCTAS+UseSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ReadManyRangesAndPoints [GOOD] Test command err: Trying to start YDB, gRPC: 2809, MsgBus: 2074 2025-04-04T17:13:50.985069Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507407996153477:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:50.986005Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00188d/r3tmp/tmpykMAy3/pdisk_1.dat 2025-04-04T17:13:51.518816Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:51.533215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:51.533288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:51.537051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2809, node 1 2025-04-04T17:13:51.698734Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:51.698753Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:51.698759Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:51.698888Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2074 TClient is connected to server localhost:2074 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:52.325185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.336431Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:54.227394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507425176023178:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.227490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.227582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507425176023205:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.231800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:13:54.249663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507425176023207:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:13:54.317982Z node 1 :TX_PROXY ERROR: Actor# [1:7489507425176023258:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:54.575451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.692580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.839402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507425176023502:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:13:54.839641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507425176023502:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:13:54.839939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507425176023502:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:13:54.840110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507425176023502:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:13:54.840244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507425176023502:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:13:54.840373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507425176023502:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:13:54.840492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507425176023502:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:13:54.840633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507425176023502:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:13:54.840764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507425176023502:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:13:54.840876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507425176023502:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:13:54.841019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507425176023502:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:13:54.841155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507425176023502:2349];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:13:54.848374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507425176023501:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:13:54.848460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507425176023501:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:13:54.848909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507425176023501:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:13:54.849045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507425176023501:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:13:54.849159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507425176023501:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:13:54.849292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507425176023501:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:13:54.849400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507425176023501:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:13:54.849536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507425176023501:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:13:54.849672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507425176023501:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:13:54.849851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507425176023501:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:13:54.850004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507425176023501:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:13:54.850141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507425176023501:2348];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:13:54.883046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489507425176023524:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normal ... 976710661; 2025-04-04T17:13:55.287560Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=281474976710664;tx_id=281474976710664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710664; 2025-04-04T17:13:55.975725Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507407996153477:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:55.975810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9139, MsgBus: 64943 2025-04-04T17:13:57.235689Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507438902246520:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:57.238925Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00188d/r3tmp/tmpCXkecq/pdisk_1.dat 2025-04-04T17:13:57.363624Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:57.387687Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:57.387760Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:57.390757Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9139, node 2 2025-04-04T17:13:57.437136Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:57.437157Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:57.437164Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:57.437282Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64943 TClient is connected to server localhost:64943 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:57.895686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:57.903576Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:00.371013Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507451787148892:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:00.371106Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:00.400788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:14:00.581850Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507451787149444:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:00.581919Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:00.582073Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507451787149449:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:00.585007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:14:00.595809Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507451787149451:2381], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:14:00.689192Z node 2 :TX_PROXY ERROR: Actor# [2:7489507451787149502:2686] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 22968, MsgBus: 15886 2025-04-04T17:14:02.521292Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507457624219472:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:02.521342Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00188d/r3tmp/tmpHhEGeC/pdisk_1.dat 2025-04-04T17:14:02.595624Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:02.624489Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:02.624576Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:02.626153Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22968, node 3 2025-04-04T17:14:02.710758Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:02.710782Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:02.710790Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:02.710894Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15886 TClient is connected to server localhost:15886 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:03.213476Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:03.222260Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:06.231559Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507474804089288:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:06.231673Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:06.253712Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:14:06.414379Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507474804089741:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:06.414483Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:06.418539Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507474804089746:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:06.422712Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:14:06.436224Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507474804089748:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:14:06.501263Z node 3 :TX_PROXY ERROR: Actor# [3:7489507474804089799:2615] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryServiceScripts::ExecuteScript >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpInteractive [GOOD] Test command err: Trying to start YDB, gRPC: 13690, MsgBus: 2554 2025-04-04T17:13:48.087961Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507398882034799:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:48.088008Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00189f/r3tmp/tmplljb7a/pdisk_1.dat 2025-04-04T17:13:48.536175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:48.536249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:48.545216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:48.573254Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13690, node 1 2025-04-04T17:13:48.727045Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:48.727062Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:48.727068Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:48.727177Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2554 TClient is connected to server localhost:2554 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:49.355918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.370784Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:51.256028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507411766937342:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.256131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.484742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.630863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507411766937446:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.630946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.631187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507411766937451:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.634697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:13:51.646006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507411766937453:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:13:51.702179Z node 1 :TX_PROXY ERROR: Actor# [1:7489507411766937504:2399] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 30251, MsgBus: 30086 2025-04-04T17:13:52.663933Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507418848202916:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:52.664003Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00189f/r3tmp/tmpjJypp5/pdisk_1.dat 2025-04-04T17:13:52.785529Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:52.811958Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:52.812049Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:52.813768Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30251, node 2 2025-04-04T17:13:52.934868Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:52.934902Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:52.934914Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:52.935037Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30086 TClient is connected to server localhost:30086 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:13:53.372581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:55.669196Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507431733105441:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:55.669352Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:55.680596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:13:55.739532Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507431733105544:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:55.739617Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:55.739971Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507431733105549:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:55.743900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:13:55.755709Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507431733105551:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:13:55.814862Z node 2 :TX_PROXY ERROR: Actor# [2:7489507431733105602:2393] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:56.144172Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=4; 2025-04-04T17:13:56.155248Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 4 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T17:13:56.155429Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 4 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T17:13:56.155645Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489507436028072985:2366], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [2:7489507436028072969:2366]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[2:7489507436028072985:2366].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-04T17:13:56.156244Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489507436028072978:2366], SessionActorId: [2:7489507436028072969:2366], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[2:7489507436028072969:2366]. isRollback=0 2025-04-04T17:13:56.156489Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjdiM2UzODMtNTBhMDY0MjctNDRiYjFiZGUtMjMwNGFlMjY=, ActorId: [2:7489507436028072969:2366], ActorState: ExecuteState, TraceId: 01jr0wq03fawh9tsczjwn68n8s, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7489507436028072979:2366] from: [2:7489507436028072978:2366] 2025-04-04T17:13:56.156576Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7489507436028072979:2366] TxId: 281474976715663. Ctx: { TraceId: 01jr0wq03fawh9tsczjwn68n8s, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjdiM2UzODMtNTBhMDY0MjctNDRiYjFiZGUtMjMwNGFlMjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-04T17:13:56.157482Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjdiM2UzODMtNTBhMDY0MjctNDRiYjFiZGUtMjMwNGFlMjY=, ActorId: [2:7489507436028072969:2366], ActorState: ExecuteState, TraceId: 01jr0wq03fawh9tsczjwn68n8s, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-04T17:13:57.666650Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489507418848202916:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:57.666743Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 6621, MsgBus: 24330 2025-04-04T17:14:02.122259Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507458533130596:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:02.122320Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00189f/r3tmp/tmpMdWYx1/pdisk_1.dat 2025-04-04T17:14:02.321423Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:02.332370Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:02.332460Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:02.335407Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6621, node 3 2025-04-04T17:14:02.449417Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:02.449448Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:02.449457Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:02.449595Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24330 TClient is connected to server localhost:24330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:14:02.878750Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:14:05.432783Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507471418033143:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:05.433021Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:05.454550Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:14:05.725416Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:14:06.077585Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507475713001805:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:06.077680Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:06.083442Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507475713001816:2445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:06.083517Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:06.083706Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507475713001821:2448], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:06.087684Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-04T17:14:06.108021Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-04-04T17:14:06.108247Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507475713001823:2449], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-04T17:14:06.188401Z node 3 :TX_PROXY ERROR: Actor# [3:7489507475713001901:3266] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:07.126355Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507458533130596:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:07.126435Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryService::TableSink_HtapComplex-withOltpSink [GOOD] >> KqpQueryService::TableSink_HtapInteractive+withOltpSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecStatsAst [GOOD] Test command err: Trying to start YDB, gRPC: 22993, MsgBus: 7789 2025-04-04T17:13:51.565107Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507413591433306:2261];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:51.565285Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001880/r3tmp/tmpxtCycg/pdisk_1.dat 2025-04-04T17:13:52.008923Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:52.025281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:52.025341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:52.027014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22993, node 1 2025-04-04T17:13:52.138802Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:52.138824Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:52.138830Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:52.138948Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7789 TClient is connected to server localhost:7789 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:52.669702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.690215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.852760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:53.034395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:53.101378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:54.557167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507426476336755:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.557297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.832185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.862347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.889105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.914179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.947534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.997998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:13:55.055609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507430771304562:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:55.055692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:55.055904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507430771304567:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:55.059217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:13:55.071218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507430771304569:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:13:55.164224Z node 1 :TX_PROXY ERROR: Actor# [1:7489507430771304624:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 14024, MsgBus: 18019 2025-04-04T17:13:57.011454Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507436338719763:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:57.011496Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001880/r3tmp/tmptMlNQW/pdisk_1.dat 2025-04-04T17:13:57.097291Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:57.118800Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:57.118889Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:57.121396Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14024, node 2 2025-04-04T17:13:57.175597Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:57.175620Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:57.175626Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:57.175725Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18019 TClient is connected to server localhost:18019 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:57.607419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:57.615943Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:13:57.627236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:57.690263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:57.843975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:57.908545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:59.903331Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507444928656093:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T ... 46644480 2025-04-04T17:14:00.089808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:00.171220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:00.216550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:00.286301Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507449223623906:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:00.286379Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:00.286547Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507449223623911:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:00.290380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:00.301086Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507449223623913:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:00.380708Z node 2 :TX_PROXY ERROR: Actor# [2:7489507449223623968:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 31702, MsgBus: 2589 2025-04-04T17:14:02.353221Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507460363877657:2221];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001880/r3tmp/tmpW8Hzed/pdisk_1.dat 2025-04-04T17:14:02.407061Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:14:02.436363Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31702, node 3 2025-04-04T17:14:02.472416Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:02.472550Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:02.485890Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:02.519476Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:02.519489Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:02.519494Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:02.519580Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2589 TClient is connected to server localhost:2589 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:14:02.892734Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:14:02.908422Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:02.974312Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:03.121875Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:03.201816Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:05.531483Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507473248781129:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:05.531568Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:05.570445Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:05.609170Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:05.645324Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:05.675553Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:05.709870Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:05.750368Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:05.815965Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507473248781639:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:05.816074Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:05.816331Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507473248781644:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:05.820712Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:05.839610Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507473248781646:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:05.933135Z node 3 :TX_PROXY ERROR: Actor# [3:7489507473248781701:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:07.178255Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489507481838716570:2495], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject, At function: SqlProjectItem
:1:8: Error: At function: Member
:1:8: Error: Member not found: test_ast_column 2025-04-04T17:14:07.178484Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NjEyYzFjNTQtOTBlNmE2MTYtNWYyM2M1ZGEtOGNhNDZmNTQ=, ActorId: [3:7489507481838716568:2494], ActorState: ExecuteState, TraceId: 01jr0wqax645gqekvfrxv0png0, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T17:14:07.249903Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7489507481838716605:2499], TxId: 281474976715672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=M2QxMmVlYTItNTRhZTVkZGYtMzQ4MzQ5MjQtNmUwZTNjYmY=. TraceId : 01jr0wqayjfm5p2f8b3709pyah. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(43):
:1:8: Failed to unwrap empty optional }. 2025-04-04T17:14:07.251655Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=M2QxMmVlYTItNTRhZTVkZGYtMzQ4MzQ5MjQtNmUwZTNjYmY=, ActorId: [3:7489507481838716598:2499], ActorState: ExecuteState, TraceId: 01jr0wqayjfm5p2f8b3709pyah, Create QueryResponse for error on request, msg: 2025-04-04T17:14:07.348278Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507460363877657:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:07.348424Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::SeveralCTAS+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 61782, MsgBus: 2046 2025-04-04T17:13:51.358023Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507412932935256:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:51.358711Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001882/r3tmp/tmpBAQnS6/pdisk_1.dat 2025-04-04T17:13:51.781869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:51.781987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:51.783821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:51.808226Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61782, node 1 2025-04-04T17:13:51.968777Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:51.968799Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:51.968805Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:51.968924Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2046 TClient is connected to server localhost:2046 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:52.550161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.562950Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:52.576165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.709168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.896234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.965245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:54.550159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507425817838863:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.550279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.797805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.832420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.886656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.925905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.955005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:13:55.030943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:13:55.087701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507430112806678:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:55.087798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:55.088153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507430112806683:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:55.091692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:13:55.102103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507430112806685:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:13:55.166559Z node 1 :TX_PROXY ERROR: Actor# [1:7489507430112806738:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 1356, MsgBus: 64680 2025-04-04T17:13:56.757842Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507434684592438:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:56.757898Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001882/r3tmp/tmpwDRkiF/pdisk_1.dat 2025-04-04T17:13:56.859197Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1356, node 2 2025-04-04T17:13:56.899167Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:56.899246Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:56.901360Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:56.934724Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:56.934742Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:56.934749Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:56.934833Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64680 TClient is connected to server localhost:64680 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:57.351865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:57.358721Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:57.367862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:13:57.446079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:57.576916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:57.653390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:59.634474Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507447569496086:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:59.634559Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:59.665295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:13:59.701591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:59.734064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:59.772889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:59.807516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:13:59.857087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:13:59.905840Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507447569496599:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:59.905913Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:59.906092Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507447569496604:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:59.909358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:13:59.916875Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507447569496606:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:00.004855Z node 2 :TX_PROXY ERROR: Actor# [2:7489507451864463955:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:01.761859Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489507434684592438:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:01.761923Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19519, MsgBus: 13215 2025-04-04T17:14:02.636337Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507459352372723:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:02.636378Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001882/r3tmp/tmpjT1kph/pdisk_1.dat 2025-04-04T17:14:02.759633Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19519, node 3 2025-04-04T17:14:02.797707Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:02.797780Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:02.798906Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:02.870128Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:02.870148Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:02.870156Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:02.870284Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13215 TClient is connected to server localhost:13215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:03.395756Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:03.401432Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:06.219418Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507476532242543:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:06.230696Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:06.231986Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507476532242578:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:06.236363Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:14:06.249273Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507476532242580:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:14:06.325223Z node 3 :TX_PROXY ERROR: Actor# [3:7489507476532242631:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:06.427060Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-04T17:14:06.635571Z node 3 :TX_PROXY ERROR: Actor# [3:7489507476532242913:2502] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:06.652793Z node 3 :TX_PROXY ERROR: Actor# [3:7489507476532242920:2507] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/NTExMTQ2MTQtM2I3Mzc4MTctYWNlMzZiYTUtMjg5YmNmMTI=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:06.656107Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:06.880016Z node 3 :TX_PROXY ERROR: Actor# [3:7489507476532243113:2624] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:06.881999Z node 3 :TX_PROXY ERROR: Actor# [3:7489507476532243120:2629] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/NTExMTQ2MTQtM2I3Mzc4MTctYWNlMzZiYTUtMjg5YmNmMTI=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:06.884809Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:07.638418Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507459352372723:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:07.638499Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryService::ExecuteQueryWithWorkloadManager [GOOD] >> KqpQueryService::ExecuteQueryWithResourcePoolClassifier >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid [GOOD] >> KqpQueryService::Ddl [GOOD] >> KqpQueryService::CreateTempTable >> KqpQueryService::ExecuteQueryPure >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 >> KqpQueryServiceScripts::ForgetScriptExecutionOnLongQuery >> KqpDocumentApi::RestrictDrop [GOOD] >> KqpService::SwitchCache-UseCache >> TPersQueueTest::WriteNonExistingPartition [GOOD] >> TPersQueueTest::WriteNonExistingTopic >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid [GOOD] Test command err: Trying to start YDB, gRPC: 30480, MsgBus: 14609 2025-04-04T17:13:48.115753Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507399315301893:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:48.115825Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018a1/r3tmp/tmprsdWkJ/pdisk_1.dat 2025-04-04T17:13:48.628220Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:48.644941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:48.645039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:48.648383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30480, node 1 2025-04-04T17:13:48.729981Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:48.730006Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:48.730014Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:48.730153Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14609 TClient is connected to server localhost:14609 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:49.454470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.473988Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:51.375988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507412200204453:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.376021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507412200204441:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.376191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.379903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:13:51.389797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507412200204455:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:13:51.494945Z node 1 :TX_PROXY ERROR: Actor# [1:7489507412200204506:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:51.737356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:1, at schemeshard: 72057594046644480 2025-04-04T17:13:51.892804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.932769Z node 1 :TX_PROXY ERROR: Actor# [1:7489507412200204751:2484] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:51.944424Z node 1 :TX_PROXY ERROR: Actor# [1:7489507412200204758:2489] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/Mjk5MWJmZDktNmZkYmQ1ZTctYzE1MjY1OTYtZjMwNmI0YzE=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:51.967912Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-04T17:13:51.989254Z node 1 :TX_PROXY ERROR: Actor# [1:7489507412200204818:2536] txid# 281474976710667, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:51.991082Z node 1 :TX_PROXY ERROR: Actor# [1:7489507412200204825:2541] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/Mjk5MWJmZDktNmZkYmQ1ZTctYzE1MjY1OTYtZjMwNmI0YzE=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:51.993458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-04-04T17:13:52.239809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:13:52.344155Z node 1 :TX_PROXY ERROR: Actor# [1:7489507416495172299:2647] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:52.346163Z node 1 :TX_PROXY ERROR: Actor# [1:7489507416495172306:2652] txid# 281474976710675, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/Mjk5MWJmZDktNmZkYmQ1ZTctYzE1MjY1OTYtZjMwNmI0YzE=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:52.368397Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-04T17:13:52.380673Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507416495172362:2409], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:21: Error: At function: KiReadTable!
:3:21: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:13:52.380969Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzRmNTlkYTktZjQ0YmFkY2YtMTE4Zjc1YzAtNmYzZTk4ZTI=, ActorId: [1:7489507416495172360:2408], ActorState: ExecuteState, TraceId: 01jr0wpwfgfjy13xj9jjetrahw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:13:52.407254Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507416495172372:2414], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:13:52.407581Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGZiMTUwZTQtZWQzYmY3MDItMzMyYjQyNzYtYzM4NTAzOGU=, ActorId: [1:7489507416495172370:2413], ActorState: ExecuteState, TraceId: 01jr0wpwgcbtcmrbhh4fqmgdfm, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 1973, MsgBus: 5557 2025-04-04T17:13:53.144435Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507423139228026:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:53.167167Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018a1/r3tmp/tmplqvtFG/pdisk_1.dat 2025-04-04T17:13:53.267765Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:53.279309Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:53.279385Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:53.280580Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1973, node 2 2025-04-04T17:13:53.338676Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:53.338716Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:53.338723Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:53.338816Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5557 TClient is connected to server localhost:5557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 ... on part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:01.157032Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:01.201974Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:01.279733Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:01.336107Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507456176997393:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:01.336221Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:01.336441Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507456176997398:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:01.340337Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:01.352285Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507456176997400:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:01.447351Z node 3 :TX_PROXY ERROR: Actor# [3:7489507456176997458:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:02.378322Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507438997125908:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:02.378530Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:02.520595Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:02.613046Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:02.704519Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:03.089253Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-04T17:14:03.290013Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-04-04T17:14:03.395558Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-04T17:14:03.596846Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2025-04-04T17:14:03.792690Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-04T17:14:03.919616Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5378, MsgBus: 9828 2025-04-04T17:14:05.122992Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489507474671937898:2212];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018a1/r3tmp/tmpWx0H45/pdisk_1.dat 2025-04-04T17:14:05.182657Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:14:05.289026Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:05.303658Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:05.303749Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:05.311250Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5378, node 4 2025-04-04T17:14:05.382829Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:05.382856Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:05.382871Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:05.383005Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9828 TClient is connected to server localhost:9828 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:05.978912Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:05.990687Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:09.055009Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489507491851807542:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:09.055096Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:09.055158Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489507491851807577:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:09.059528Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:14:09.075428Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:14:09.076581Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489507491851807579:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:14:09.164885Z node 4 :TX_PROXY ERROR: Actor# [4:7489507491851807630:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:09.261639Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-04T17:14:09.538268Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7489507491851807779:2357], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:30: Error: At function: KiWriteTable!
:2:84: Error: Failed to convert type: Struct<'id':Int32,'val1':Null,'val2':Int32> to Struct<'id':Int32,'val1':Int32,'val2':Int32?>
:2:84: Error: Failed to convert 'val1': Null to Int32
:2:84: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-04T17:14:09.539971Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Yjg3ZjE2YTUtZDMzYTA0OGMtZmU3YTE3Ny01M2E0ZGMwMQ==, ActorId: [4:7489507491851807777:2356], ActorState: ExecuteState, TraceId: 01jr0wqd7a5wkf2stdjtnnykc6, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T17:14:09.577485Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:09.619875Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:10.108523Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489507474671937898:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:10.108605Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryService::DdlUser >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 >> KqpQueryServiceScripts::EmptyNextFetchToken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpDocumentApi::RestrictDrop [GOOD] Test command err: Trying to start YDB, gRPC: 28743, MsgBus: 23929 2025-04-04T17:13:48.101989Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507400898519711:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:48.102453Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018b9/r3tmp/tmpmvMNfY/pdisk_1.dat 2025-04-04T17:13:48.536580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:48.536696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:48.539485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:48.581737Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28743, node 1 2025-04-04T17:13:48.706899Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:48.706924Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:48.706931Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:48.707054Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23929 TClient is connected to server localhost:23929 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:49.414351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.466141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.616132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.763245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.826004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:51.422304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507413783423235:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.422396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.637454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.694569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.726485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.756527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.824561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.858088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.920373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507413783423748:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.920442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.921055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507413783423753:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.925317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:13:51.934930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507413783423756:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:13:52.029373Z node 1 :TX_PROXY ERROR: Actor# [1:7489507418078391106:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:52.910109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:13:53.018674Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507418078391429:2498], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2025-04-04T17:13:53.018906Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzZmYmIwOGItZDBmY2NmNTctN2M4Y2VlY2EtMmRhM2FjMmI=, ActorId: [1:7489507418078391359:2488], ActorState: ExecuteState, TraceId: 01jr0wpx30bw810e2p7zry7kq7, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2025-04-04T17:13:53.097973Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507400898519711:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:53.098018Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30998, MsgBus: 27898 2025-04-04T17:13:53.913838Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507419312844282:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:53.913877Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018b9/r3tmp/tmpAAs8g5/pdisk_1.dat 2025-04-04T17:13:54.055570Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:54.069015Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:54.069091Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:54.071820Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30998, node 2 2025-04-04T17:13:54.118070Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:54.118093Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:54.118100Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:54.118209Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27898 TClient is connected to server localhost:27898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:54.560205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterS ... boperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:02.887935Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:02.915297Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:02.984518Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:03.030550Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:03.072714Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507464006841604:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:03.072814Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:03.073088Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507464006841609:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:03.076792Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:03.087071Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507464006841611:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:03.166045Z node 3 :TX_PROXY ERROR: Actor# [3:7489507464006841665:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:04.066987Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480
: Error: Type annotation, code: 1030
:2:61: Error: At function: KiAlterTable!
:2:61: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2025-04-04T17:14:04.150892Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 31664, MsgBus: 1386 2025-04-04T17:14:05.239521Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489507474406786392:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:05.239572Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018b9/r3tmp/tmpHp4jC8/pdisk_1.dat 2025-04-04T17:14:05.426765Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:05.426858Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:05.443647Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:05.445257Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31664, node 4 2025-04-04T17:14:05.522430Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:05.522454Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:05.522463Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:05.522591Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1386 TClient is connected to server localhost:1386 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:06.072503Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:06.079100Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:06.088717Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:06.172608Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:06.392301Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:06.487147Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:08.848490Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489507487291690046:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:08.848593Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:08.879640Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:08.930001Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:09.011034Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:09.056252Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:09.135252Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:09.193745Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:09.369493Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489507491586657869:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:09.369606Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:09.369861Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489507491586657874:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:09.373365Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:09.391356Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489507491586657876:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:09.462479Z node 4 :TX_PROXY ERROR: Actor# [4:7489507491586657932:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:10.242368Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489507474406786392:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:10.242441Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:10.687555Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480
: Error: Type annotation, code: 1030
:2:24: Error: At function: KiDropTable!
:2:24: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 >> KqpQueryServiceScripts::ExecuteScriptStatsNone [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 >> TPersQueueTest::PreferredCluster_TwoEnabledClustersAndWriteSessionsWithDifferentPreferredCluster_SessionWithMismatchedClusterDiesAndOthersAlive [GOOD] >> TPersQueueTest::PreferredCluster_DisabledRemoteClusterAndWriteSessionsWithDifferentPreferredClusterAndLaterRemoteClusterEnabled_SessionWithMismatchedClusterDiesAfterPreferredClusterEnabledAndOtherSessionsAlive >> KqpQueryService::TableSink_Olap_Replace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::EmptyNextFetchToken [GOOD] Test command err: Trying to start YDB, gRPC: 27960, MsgBus: 22925 2025-04-04T17:13:50.471446Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507408865911089:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:50.471561Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001891/r3tmp/tmpbtDaWC/pdisk_1.dat 2025-04-04T17:13:50.794757Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27960, node 1 2025-04-04T17:13:50.883279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:50.883403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:50.885318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:50.910254Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:50.910278Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:50.910285Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:50.910407Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22925 TClient is connected to server localhost:22925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:51.549183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:51.567154Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:51.577647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:13:51.760805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.914176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.005584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:53.763482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507421750814748:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:53.763633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.042347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.071954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.099359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.125849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.157226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.228035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.320618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507426045782566:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.320691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.320919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507426045782571:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.324813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:13:54.338900Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T17:13:54.339132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507426045782573:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:13:54.442031Z node 1 :TX_PROXY ERROR: Actor# [1:7489507426045782630:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:55.474181Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507408865911089:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:55.474264Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8431, MsgBus: 9478 2025-04-04T17:13:56.094753Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507432644034646:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:56.094799Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001891/r3tmp/tmpFFlw0e/pdisk_1.dat 2025-04-04T17:13:56.191990Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8431, node 2 2025-04-04T17:13:56.239062Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:56.239143Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:56.241109Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:56.252716Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:56.252746Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:56.252753Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:56.252860Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9478 TClient is connected to server localhost:9478 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:56.687695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:56.704271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:13:56.750796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:13:56.889514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2 ... operation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:13:59.464763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:13:59.525654Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507445528938801:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:59.525729Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:59.525853Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507445528938806:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:59.528975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:13:59.542484Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507445528938808:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:13:59.632819Z node 2 :TX_PROXY ERROR: Actor# [2:7489507445528938864:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:00.524309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:00.525724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:00.527164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:01.237346Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489507432644034646:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:01.237505Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 11665, MsgBus: 12902 2025-04-04T17:14:04.060499Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507468434891456:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:04.060549Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001891/r3tmp/tmpeKqpyz/pdisk_1.dat 2025-04-04T17:14:04.260447Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:04.274364Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:04.274453Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:04.276212Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11665, node 3 2025-04-04T17:14:04.362775Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:04.362800Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:04.362810Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:04.362933Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12902 TClient is connected to server localhost:12902 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:14:04.865392Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:14:04.898223Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:05.006737Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:05.182744Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:05.306646Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:07.689647Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507481319795093:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:07.689750Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:07.722578Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:07.796279Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:07.833622Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:07.879188Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:07.962234Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:07.999752Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:08.090394Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507485614762907:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:08.090514Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:08.091043Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507485614762912:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:08.095230Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:08.106730Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507485614762914:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:08.166265Z node 3 :TX_PROXY ERROR: Actor# [3:7489507485614762969:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:09.064755Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507468434891456:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:09.064837Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:09.155824Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:09.157359Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:09.163509Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:12.058354Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786852080, txId: 281474976715708] shutting down >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptStatsNone [GOOD] Test command err: Trying to start YDB, gRPC: 19713, MsgBus: 22843 2025-04-04T17:13:51.016058Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507414053381534:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:51.016130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00188a/r3tmp/tmpOsLBKw/pdisk_1.dat 2025-04-04T17:13:51.421783Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:51.453040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:51.453146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:51.456286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19713, node 1 2025-04-04T17:13:51.576539Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:51.576561Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:51.576568Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:51.576685Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22843 TClient is connected to server localhost:22843 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:52.084616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.111130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.283162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.441955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.515053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:54.220395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507426938285173:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.220479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.470200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.505233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.538069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.576948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.649773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.721589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.819896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507426938285695:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.819976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.820331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507426938285700:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.823146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:13:54.832769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507426938285702:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:13:54.929931Z node 1 :TX_PROXY ERROR: Actor# [1:7489507426938285758:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:55.762442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:13:55.763869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:13:55.764992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:13:56.015832Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507414053381534:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:56.015901Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20842, MsgBus: 6014 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00188a/r3tmp/tmpKw8jIL/pdisk_1.dat 2025-04-04T17:13:58.369142Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:13:58.371542Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:58.390089Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:58.390164Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:58.391506Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20842, node 2 2025-04-04T17:13:58.452897Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:58.452914Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:58.452922Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:58.453019Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6014 TClient is connected to server localhost:6014 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:13:58.883553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:58.891139Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:13:58.899228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:58.991102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreate ... le, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:01.392196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:01.461276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:01.503435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:01.557278Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507455493225015:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:01.557355Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:01.557661Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507455493225020:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:01.561349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:01.573563Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507455493225022:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:01.672960Z node 2 :TX_PROXY ERROR: Actor# [2:7489507455493225077:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:02.594016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:02.595452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:02.596500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7518, MsgBus: 15129 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00188a/r3tmp/tmp01WZL4/pdisk_1.dat 2025-04-04T17:14:05.594382Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507472296624857:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:05.651912Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:14:05.736647Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:05.742968Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:05.743045Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:05.746531Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7518, node 3 2025-04-04T17:14:05.840798Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:05.840817Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:05.840824Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:05.840935Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15129 TClient is connected to server localhost:15129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:06.300382Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:06.310904Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:06.332860Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:06.439565Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:06.649889Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:06.745518Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:08.971402Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507485181528348:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:08.971476Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:09.014687Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:09.058488Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:09.094752Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:09.168321Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:09.205842Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:09.238696Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:09.329297Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507489476496162:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:09.329357Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:09.331612Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507489476496167:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:09.335501Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:09.350422Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507489476496170:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:09.425378Z node 3 :TX_PROXY ERROR: Actor# [3:7489507489476496225:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:10.538350Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507472296624857:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:10.542851Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:10.672147Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:10.673492Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:10.675185Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpQueryService::TableSink_Oltp_Replace+UseSink [GOOD] >> KqpQueryService::ExecStats >> KqpService::RangeCache-UseCache [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_Oltp_Replace+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 30058, MsgBus: 12693 2025-04-04T17:13:48.129381Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507401711940846:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:48.129442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00189d/r3tmp/tmp7RdwqR/pdisk_1.dat 2025-04-04T17:13:48.604022Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:48.606361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:48.606488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:48.611961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30058, node 1 2025-04-04T17:13:48.722724Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:48.722746Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:48.722763Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:48.722902Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12693 TClient is connected to server localhost:12693 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:49.405474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.435491Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:51.151693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507414596843380:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.151811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.376961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.505291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507414596843485:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.505395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.505726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507414596843490:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.509183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:13:51.519782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507414596843492:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:13:51.603475Z node 1 :TX_PROXY ERROR: Actor# [1:7489507414596843543:2398] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:51.946895Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507414596843635:2373], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiWriteTable!
:2:29: Error: Missing key column in input: Col1 for table: /Root/DataShard, code: 2029 2025-04-04T17:13:51.948108Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2UyN2U2ZmUtNTZlOTBkOGUtMzdjYTQyZjItNDhkMDQyMTU=, ActorId: [1:7489507414596843633:2372], ActorState: ExecuteState, TraceId: 01jr0wpw1na41bnbs3sbthgpsg, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-04T17:13:53.134383Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507401711940846:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:53.134453Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 4025, MsgBus: 22552 2025-04-04T17:13:57.762094Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507436623743735:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:57.762136Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00189d/r3tmp/tmpZ6Vv4a/pdisk_1.dat 2025-04-04T17:13:57.958677Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:57.960945Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:57.961021Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:57.967215Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4025, node 2 2025-04-04T17:13:58.065785Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:58.065805Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:58.065827Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:58.065923Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22552 TClient is connected to server localhost:22552 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:58.650984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:58.670556Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:01.058356Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507453803613584:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:01.058437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:01.067727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:14:01.159161Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507453803613687:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:01.159250Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:01.159394Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507453803613692:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:01.163184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:14:01.174139Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507453803613694:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:14:01.248968Z node 2 :TX_PROXY ERROR: Actor# [2:7489507453803613745:2394] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } WAIT_INDEXATION: 0 2025-04-04T17:14:02.763409Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489507436623743735:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:02.763479Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 11505, MsgBus: 27191 2025-04-04T17:14:07.876654Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507479742529020:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:07.876711Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00189d/r3tmp/tmp4T9qg5/pdisk_1.dat 2025-04-04T17:14:08.174791Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:08.210786Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:08.210868Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:08.219239Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11505, node 3 2025-04-04T17:14:08.438798Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:08.438821Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:08.438828Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:08.438944Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27191 TClient is connected to server localhost:27191 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:09.123523Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:09.138595Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:11.739265Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507496922398855:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:11.739344Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:11.767490Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:14:11.975083Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:14:12.278585Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507501217367513:2442], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:12.278686Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:12.278916Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507501217367518:2445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:12.283770Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-04T17:14:12.301471Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507501217367520:2446], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-04T17:14:12.351523Z node 3 :TX_PROXY ERROR: Actor# [3:7489507501217367587:3241] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:12.878342Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507479742529020:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:12.884858Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryService::StreamExecuteQuery >> KqpQueryService::CreateTempTable [GOOD] >> KqpQueryService::DdlCache >> KqpQueryService::TableSink_OlapDelete [GOOD] >> TPersQueueTest::DirectReadCleanCache [GOOD] >> TPersQueueTest::DirectReadRestartPQRB ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::RangeCache-UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 63592, MsgBus: 8818 2025-04-04T17:13:48.354106Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507398629119744:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:48.354150Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001895/r3tmp/tmpph6owp/pdisk_1.dat 2025-04-04T17:13:48.821360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:48.821464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:48.833800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:48.866013Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63592, node 1 2025-04-04T17:13:49.046336Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:49.046359Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:49.046366Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:49.046499Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8818 TClient is connected to server localhost:8818 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:49.582852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.624450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.822719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.997165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:50.076689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:51.741173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507411514023401:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.741269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:52.025629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:13:52.059681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:52.093085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:52.123411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:52.160664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:13:52.223620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:13:52.318994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507415808991215:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:52.319093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:52.319329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507415808991220:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:52.323520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:13:52.337029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507415808991222:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:13:52.430587Z node 1 :TX_PROXY ERROR: Actor# [1:7489507415808991280:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:53.354678Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507398629119744:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:53.355222Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:13:53.355627Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODgyOTI0ZWEtODhjMmVlYjYtNDZhOTIwMWItOTVhZTUwNTY=, ActorId: [1:7489507420103958829:2488], ActorState: ExecuteState, TraceId: 01jr0wpxeaf35jyy6xs9c8jr6k, Reply query error, msg: Pending previous query completion proxyRequestId: 7 2025-04-04T17:13:53.355702Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODgyOTI0ZWEtODhjMmVlYjYtNDZhOTIwMWItOTVhZTUwNTY=, ActorId: [1:7489507420103958829:2488], ActorState: ExecuteState, TraceId: 01jr0wpxeaf35jyy6xs9c8jr6k, Reply query error, msg: Pending previous query completion proxyRequestId: 8 2025-04-04T17:13:53.355734Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODgyOTI0ZWEtODhjMmVlYjYtNDZhOTIwMWItOTVhZTUwNTY=, ActorId: [1:7489507420103958829:2488], ActorState: ExecuteState, TraceId: 01jr0wpxeaf35jyy6xs9c8jr6k, Reply query error, msg: Pending previous query completion proxyRequestId: 9 2025-04-04T17:13:53.355762Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODgyOTI0ZWEtODhjMmVlYjYtNDZhOTIwMWItOTVhZTUwNTY=, ActorId: [1:7489507420103958829:2488], ActorState: ExecuteState, TraceId: 01jr0wpxeaf35jyy6xs9c8jr6k, Reply query error, msg: Pending previous query completion proxyRequestId: 10 2025-04-04T17:13:53.355784Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODgyOTI0ZWEtODhjMmVlYjYtNDZhOTIwMWItOTVhZTUwNTY=, ActorId: [1:7489507420103958829:2488], ActorState: ExecuteState, TraceId: 01jr0wpxeaf35jyy6xs9c8jr6k, Reply query error, msg: Pending previous query completion proxyRequestId: 11 2025-04-04T17:13:53.355806Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODgyOTI0ZWEtODhjMmVlYjYtNDZhOTIwMWItOTVhZTUwNTY=, ActorId: [1:7489507420103958829:2488], ActorState: ExecuteState, TraceId: 01jr0wpxeaf35jyy6xs9c8jr6k, Reply query error, msg: Pending previous query completion proxyRequestId: 12 2025-04-04T17:13:53.355830Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODgyOTI0ZWEtODhjMmVlYjYtNDZhOTIwMWItOTVhZTUwNTY=, ActorId: [1:7489507420103958829:2488], ActorState: ExecuteState, TraceId: 01jr0wpxeaf35jyy6xs9c8jr6k, Reply query error, msg: Pending previous query completion proxyRequestId: 13 2025-04-04T17:13:53.358555Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODgyOTI0ZWEtODhjMmVlYjYtNDZhOTIwMWItOTVhZTUwNTY=, ActorId: [1:7489507420103958829:2488], ActorState: ExecuteState, TraceId: 01jr0wpxeaf35jyy6xs9c8jr6k, Reply query error, msg: Pending previous query completion proxyRequestId: 14 2025-04-04T17:13:53.366030Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODgyOTI0ZWEtODhjMmVlYjYtNDZhOTIwMWItOTVhZTUwNTY=, ActorId: [1:7489507420103958829:2488], ActorState: ExecuteState, TraceId: 01jr0wpxeaf35jyy6xs9c8jr6k, Reply query error, msg: Pending previous query completion proxyRequestId: 15 Trying to start YDB, gRPC: 7611, MsgBus: 5215 2025-04-04T17:13:54.297414Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507427320739523:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:54.297476Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001895/r3tmp/tmp18ICtL/pdisk_1.dat 2025-04-04T17:13:54.437668Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7611, node 2 2025-04-04T17:13:54.484300Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:54.484403Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:54.490483Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:5 ... ng previous query completion proxyRequestId: 49 2025-04-04T17:13:59.125997Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmI1NjQ1NGQtMTJkMTBjYS1iODM0N2I0Yi0zYzlhYzEzOQ==, ActorId: [2:7489507448795578882:2562], ActorState: ExecuteState, TraceId: 01jr0wq32n96gfj9phqks2r5y8, Reply query error, msg: Pending previous query completion proxyRequestId: 52 2025-04-04T17:13:59.138908Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmI1NjQ1NGQtMTJkMTBjYS1iODM0N2I0Yi0zYzlhYzEzOQ==, ActorId: [2:7489507448795578882:2562], ActorState: ExecuteState, TraceId: 01jr0wq32n96gfj9phqks2r5y8, Reply query error, msg: Pending previous query completion proxyRequestId: 53 2025-04-04T17:13:59.148828Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmI1NjQ1NGQtMTJkMTBjYS1iODM0N2I0Yi0zYzlhYzEzOQ==, ActorId: [2:7489507448795578882:2562], ActorState: ExecuteState, TraceId: 01jr0wq32n96gfj9phqks2r5y8, Reply query error, msg: Pending previous query completion proxyRequestId: 54 2025-04-04T17:13:59.154880Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmI1NjQ1NGQtMTJkMTBjYS1iODM0N2I0Yi0zYzlhYzEzOQ==, ActorId: [2:7489507448795578882:2562], ActorState: ExecuteState, TraceId: 01jr0wq32n96gfj9phqks2r5y8, Reply query error, msg: Pending previous query completion proxyRequestId: 55 2025-04-04T17:13:59.277005Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2Y4ZGNkOWYtNjY2MjlhOTMtMzQwNjc0MzgtNjIzNWZlNA==, ActorId: [2:7489507448795578928:2573], ActorState: ExecuteState, TraceId: 01jr0wq37c1a5sapwnseq10qz4, Reply query error, msg: Pending previous query completion proxyRequestId: 58 2025-04-04T17:13:59.277080Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2Y4ZGNkOWYtNjY2MjlhOTMtMzQwNjc0MzgtNjIzNWZlNA==, ActorId: [2:7489507448795578928:2573], ActorState: ExecuteState, TraceId: 01jr0wq37c1a5sapwnseq10qz4, Reply query error, msg: Pending previous query completion proxyRequestId: 59 2025-04-04T17:13:59.277111Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2Y4ZGNkOWYtNjY2MjlhOTMtMzQwNjc0MzgtNjIzNWZlNA==, ActorId: [2:7489507448795578928:2573], ActorState: ExecuteState, TraceId: 01jr0wq37c1a5sapwnseq10qz4, Reply query error, msg: Pending previous query completion proxyRequestId: 60 2025-04-04T17:13:59.298457Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489507427320739523:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:59.298514Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:13:59.418090Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDc2OTY3Yi04ZTUwNzU5ZC1hMDY1NWMyMS1iYWNkM2UxMg==, ActorId: [2:7489507448795578969:2583], ActorState: ExecuteState, TraceId: 01jr0wq3brd86g225neq1h5f6q, Reply query error, msg: Pending previous query completion proxyRequestId: 63 2025-04-04T17:13:59.426752Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDc2OTY3Yi04ZTUwNzU5ZC1hMDY1NWMyMS1iYWNkM2UxMg==, ActorId: [2:7489507448795578969:2583], ActorState: ExecuteState, TraceId: 01jr0wq3brd86g225neq1h5f6q, Reply query error, msg: Pending previous query completion proxyRequestId: 64 2025-04-04T17:13:59.525431Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDFhOTE2OGItMmFkODgxNzctNjcxNzk5OWMtZTcyMGQxYTY=, ActorId: [2:7489507448795578995:2593], ActorState: ExecuteState, TraceId: 01jr0wq3f46sym48kvwr4vdjjp, Reply query error, msg: Pending previous query completion proxyRequestId: 67 Trying to start YDB, gRPC: 8308, MsgBus: 23547 2025-04-04T17:14:00.713456Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507450073454434:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:00.713504Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001895/r3tmp/tmpCxUuEW/pdisk_1.dat 2025-04-04T17:14:00.935566Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:00.941454Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:00.941542Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:00.942703Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8308, node 3 2025-04-04T17:14:00.988266Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:00.988286Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:00.988293Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:00.988420Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23547 TClient is connected to server localhost:23547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:14:01.572503Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:14:01.578481Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:01.583249Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:14:01.643637Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:01.839141Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:01.905911Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:04.082862Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507467253325387:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:04.082964Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:04.125654Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:04.200511Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:04.236828Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:04.290935Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:04.335315Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:04.384189Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:04.438532Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507467253325898:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:04.438645Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:04.439138Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507467253325903:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:04.443053Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:04.453013Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507467253325905:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:04.508531Z node 3 :TX_PROXY ERROR: Actor# [3:7489507467253325958:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:05.715031Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507450073454434:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:05.715090Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; took: 8.978363s took: 8.974388s took: 8.987229s took: 8.948608s took: 8.949575s took: 8.993093s took: 8.934139s took: 8.983143s took: 8.952230s took: 9.002016s >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 >> KqpQueryServiceScripts::Tcl [GOOD] >> KqpQueryService::TableSink_HtapInteractive+withOltpSink [GOOD] >> KqpQueryService::TempTablesDrop >> KqpQueryService::ExecuteQueryPure [GOOD] >> KqpQueryService::ExecuteQueryPgTableSelect >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 >> KqpQueryServiceScripts::ExecuteScript [GOOD] >> KqpQueryServiceScripts::ExecuteMultiScript >> TPersQueueTest::SchemeOperationsTest [GOOD] >> TPersQueueTest::SchemeOperationFirstClassCitizen ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OlapDelete [GOOD] Test command err: Trying to start YDB, gRPC: 26006, MsgBus: 8146 2025-04-04T17:13:48.092766Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507399903618211:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:48.092832Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018a6/r3tmp/tmpYuTiGv/pdisk_1.dat 2025-04-04T17:13:48.583128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:48.583432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:48.593437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:48.600857Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26006, node 1 2025-04-04T17:13:48.706954Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:48.706974Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:48.706980Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:48.707097Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8146 TClient is connected to server localhost:8146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:49.466186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.487268Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:51.180712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507412788520772:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.180835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.406805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.578030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507412788520907:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:13:51.579060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507412788520907:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:13:51.579295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507412788520907:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:13:51.579399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507412788520907:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:13:51.579503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507412788520907:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:13:51.579623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507412788520907:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:13:51.579730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507412788520907:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:13:51.579864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507412788520907:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:13:51.579983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507412788520907:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:13:51.580081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507412788520907:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:13:51.580169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507412788520907:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:13:51.580254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507412788520907:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:13:51.588174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507412788520925:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:13:51.588238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507412788520925:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:13:51.588453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507412788520925:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:13:51.588559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507412788520925:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:13:51.588643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507412788520925:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:13:51.588760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507412788520925:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:13:51.588854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507412788520925:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:13:51.588955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507412788520925:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:13:51.589058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507412788520925:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:13:51.589149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507412788520925:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:13:51.589252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507412788520925:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:13:51.589342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507412788520925:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:13:51.627843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507412788521036:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:13:51.627899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507412788521036:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:13:51.628049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507412788521036:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:13:51.628106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507412788521036:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:13:51.628180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507412788521036:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:13:51.628260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507412788521036:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:13:51.628329Z node 1 :TX_COLUMNS ... ARN: tablet_id=72075186224037888;self_id=[3:7489507484909201274:2337];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-04T17:14:10.582090Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7489507484909201275:2338];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-04T17:14:10.582142Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7489507484909201275:2338];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-04T17:14:10.582235Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;self_id=[3:7489507484909201271:2335];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-04T17:14:10.582273Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;self_id=[3:7489507484909201271:2335];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-04T17:14:10.582341Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;self_id=[3:7489507484909201276:2339];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-04T17:14:10.582364Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;self_id=[3:7489507484909201276:2339];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-04T17:14:10.582425Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;self_id=[3:7489507484909201273:2336];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-04T17:14:10.582448Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;self_id=[3:7489507484909201273:2336];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-04T17:14:10.582549Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;self_id=[3:7489507484909201412:2344];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-04T17:14:10.582570Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;self_id=[3:7489507484909201412:2344];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-04T17:14:10.582653Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7489507484909201295:2343];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-04T17:14:10.582687Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7489507484909201295:2343];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-04T17:14:10.582783Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;self_id=[3:7489507484909201282:2340];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-04T17:14:10.582814Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;self_id=[3:7489507484909201282:2340];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-04T17:14:10.582893Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;self_id=[3:7489507484909201290:2342];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-04T17:14:10.582915Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;self_id=[3:7489507484909201290:2342];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-04T17:14:10.582993Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;self_id=[3:7489507484909201283:2341];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-04-04T17:14:10.583034Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;self_id=[3:7489507484909201283:2341];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-04-04T17:14:10.636317Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[3:7489507484909201273:2336];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.636842Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[3:7489507484909201273:2336];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037892;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.645545Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[3:7489507484909201295:2343];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.645886Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[3:7489507484909201295:2343];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037894;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.646098Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[3:7489507484909201283:2341];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.646714Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7489507484909201271:2335];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.646957Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7489507484909201274:2337];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.647204Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[3:7489507484909201276:2339];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.649608Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7489507484909201271:2335];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.649949Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7489507484909201274:2337];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.650084Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[3:7489507484909201276:2339];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.650968Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[3:7489507484909201412:2344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.651423Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[3:7489507484909201283:2341];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037897;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.651716Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[3:7489507484909201412:2344];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.651869Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[3:7489507484909201282:2340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.654125Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[3:7489507484909201282:2340];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037895;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.655434Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[3:7489507484909201290:2342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.656863Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[3:7489507484909201290:2342];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037896;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.666590Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;task_id=3979982a-117811f0-9b024cac-fde67799;tablet_id=72075186224037897;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.666885Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.669570Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7489507484909201275:2338];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.670625Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7489507484909201275:2338];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:10.679069Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-04-04T17:14:10.680488Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Complete;commit_tx_id=281474976715672;commit_lock_id=281474976715671;fline=manager.cpp:94;broken_lock_id=281474976715669; 2025-04-04T17:14:10.680681Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 >> KqpQueryService::IssuesInCaseOfSuccess ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_HtapInteractive+withOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 16899, MsgBus: 32101 2025-04-04T17:13:49.454630Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507402051091532:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:49.455570Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001892/r3tmp/tmplfVT1h/pdisk_1.dat 2025-04-04T17:13:49.878576Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:49.884093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:49.884193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:49.885868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16899, node 1 2025-04-04T17:13:49.999365Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:49.999383Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:49.999388Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:49.999494Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32101 TClient is connected to server localhost:32101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:50.572446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.511482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507414935993992:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:52.511597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:52.756035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:13:52.901142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489507414935994170:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:13:52.901335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489507414935994170:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:13:52.901621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489507414935994170:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:13:52.901760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489507414935994170:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:13:52.901897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489507414935994170:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:13:52.901911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489507414935994175:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:13:52.901984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489507414935994175:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:13:52.902057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489507414935994170:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:13:52.902146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489507414935994175:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:13:52.902197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489507414935994170:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:13:52.902234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489507414935994175:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:13:52.902323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489507414935994175:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:13:52.902328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489507414935994170:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:13:52.902429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489507414935994170:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:13:52.902487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489507414935994175:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:13:52.902541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489507414935994170:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:13:52.902663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489507414935994170:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:13:52.902675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489507414935994175:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:13:52.902768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489507414935994170:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:13:52.902779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489507414935994175:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:13:52.902889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489507414935994175:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:13:52.903033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489507414935994175:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:13:52.903190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489507414935994175:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:13:52.903318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489507414935994175:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:13:52.938560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7489507414935994183:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:13:52.938646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7489507414935994183:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:13:52.938662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489507414935994179:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:13:52.938699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489507414935994179:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:13:52.939478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489507414935994179:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:13:52.939527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7489507414935994183:2344];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:13:52.939638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489507414935994179:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CL ... ateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:14:14.520626Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:14:14.520828Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:14:14.520867Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:14:14.521048Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:14:14.521107Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:14:14.521327Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:14:14.521359Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:14:14.521490Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:14:14.521523Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:14:14.522578Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:14:14.522626Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:14:14.522721Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:14:14.522748Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:14:14.522991Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:14:14.523021Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:14:14.523104Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:14:14.523155Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:14:14.523221Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:14:14.523247Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:14:14.524348Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:14:14.524391Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:14:14.525033Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:14:14.525080Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:14:14.525325Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:14:14.525356Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:14:14.525550Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:14:14.525584Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:14:14.525828Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:14:14.525858Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:14:14.526005Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:14:14.526034Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:14:14.552803Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507488372328285:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:14.552949Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:14.581716Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:14.585776Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:14.589726Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:14.593196Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:14.596280Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:14.604883Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:14.613179Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:14.615483Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:14.622721Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:14.630992Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:14.640925Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:14:14.800436Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507509847166023:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:14.800556Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:14.809753Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507509847166028:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:14.818625Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-04T17:14:14.832912Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507509847166030:2418], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-04T17:14:14.921533Z node 3 :TX_PROXY ERROR: Actor# [3:7489507509847166084:2663] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:15.160018Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976710664;tx_id=281474976710664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710664; 2025-04-04T17:14:15.388307Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976710667;tx_id=281474976710667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710667; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::Tcl [GOOD] Test command err: Trying to start YDB, gRPC: 27286, MsgBus: 12805 2025-04-04T17:13:48.157485Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507401387618794:2273];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:48.157618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018b3/r3tmp/tmpqC6R5V/pdisk_1.dat 2025-04-04T17:13:48.637973Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:48.647713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:48.647790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:48.655225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27286, node 1 2025-04-04T17:13:48.800272Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:48.800290Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:48.800296Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:48.800399Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12805 TClient is connected to server localhost:12805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:49.417199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.431096Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:49.445337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.604812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.769259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.841551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:51.361301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507414272522215:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.361401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.632187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.661436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.688688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.760613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.793771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.841480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.913320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507414272522728:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.913397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.913407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507414272522733:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.916853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:13:51.927059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507414272522735:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:13:52.013985Z node 1 :TX_PROXY ERROR: Actor# [1:7489507418567490087:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:53.157379Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507401387618794:2273];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:53.157490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20170, MsgBus: 11543 2025-04-04T17:13:53.810319Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507419585372848:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:53.810488Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018b3/r3tmp/tmprqPcVD/pdisk_1.dat 2025-04-04T17:13:53.935944Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:53.951057Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:53.951145Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:53.953117Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20170, node 2 2025-04-04T17:13:54.002294Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:54.002316Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:54.002323Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:54.002528Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11543 TClient is connected to server localhost:11543 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:54.440611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:54.449449Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:13:54.465408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:54.520534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:54.653524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting ... chemeshard: 72057594046644480 2025-04-04T17:13:57.140794Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-04T17:13:57.141355Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507436765244269:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:13:57.223508Z node 2 :TX_PROXY ERROR: Actor# [2:7489507436765244321:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:58.184394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:13:58.186586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:13:58.187892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:13:58.809901Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489507419585372848:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:58.809950Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17393, MsgBus: 10694 2025-04-04T17:14:08.136037Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507483648718560:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:08.136113Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018b3/r3tmp/tmp3H1MEx/pdisk_1.dat 2025-04-04T17:14:08.452314Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:08.487563Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:08.487670Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:08.489111Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17393, node 3 2025-04-04T17:14:08.690518Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:08.690541Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:08.690550Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:08.690715Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10694 TClient is connected to server localhost:10694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:09.455924Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:09.470738Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:09.497298Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:09.609415Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:09.892572Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:09.990947Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:12.543378Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507500828589461:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:12.543474Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:12.592296Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:12.632219Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:12.676191Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:12.755503Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:12.830304Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:12.870823Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:12.954696Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507500828589975:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:12.954822Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:12.966413Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507500828589980:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:12.973063Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:12.987741Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507500828589982:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:13.045141Z node 3 :TX_PROXY ERROR: Actor# [3:7489507505123557334:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:13.138436Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507483648718560:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:13.138500Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:14.211028Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:14.212803Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:14.214101Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:14.616346Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489507509418525188:2514], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: COMMIT not supported inside YDB query, code: 2008 2025-04-04T17:14:14.618144Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Njg4YTE0YmQtMjg5ZTNmMjUtMzQzYWVjMy03YzM2OGFkYg==, ActorId: [3:7489507509418525185:2513], ActorState: ExecuteState, TraceId: 01jr0wqhsx1ywvsyf78f6bj00w, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T17:14:16.017432Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489507518008460345:2683], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: ROLLBACK not supported inside YDB query, code: 2008 2025-04-04T17:14:16.017706Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZWNjY2ZhYjMtNDlkOWQzZmEtNzI0NWJhYWQtODFjYmM2ZjE=, ActorId: [3:7489507513713493046:2682], ActorState: ExecuteState, TraceId: 01jr0wqkh733gz7nmwatbkpwvb, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpQueryService::DdlUser [GOOD] >> KqpQueryService::DdlTx >> KqpService::Shutdown >> TPersQueueTest::TopicServiceSimpleHappyWrites [GOOD] >> TPersQueueTest::WhenDisableNodeAndCreateTopic_ThenAllPartitionsAreOnOtherNode >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 >> KqpQueryService::FlowControllOnHugeLiteralAsTable >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 >> TPersQueueTest::StreamReadCommitAndStatusMsgs [GOOD] >> TPersQueueTest::StreamReadManyUpdateTokenAndRead >> KqpQueryServiceScripts::ExecuteScriptWithUnspecifiedMode >> KqpQueryServiceScripts::ForgetScriptExecutionOnLongQuery [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecutionRace >> KqpQueryService::TableSink_Olap_Replace [GOOD] >> KqpQueryService::TableSink_OlapUpsert >> KqpQueryService::ExecuteQueryExplicitTxTLI >> TopicService::UseDoubleSlashInTopicPath [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError >> KqpQueryService::ExecStats [GOOD] >> KqpQueryService::DmlNoTx >> KqpQueryService::TableSink_Htap+withOltpSink [GOOD] >> KqpQueryService::TableSink_Htap-withOltpSink >> KqpQueryService::ExecuteQueryPgTableSelect [GOOD] >> KqpQueryService::ExecuteQueryScalar >> KqpJoinOrder::CanonizedJoinOrderTPCH9+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH9-StreamLookupJoin+ColumnStore >> KqpQueryService::TempTablesDrop [GOOD] >> KqpQueryService::Tcl >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 >> KqpQueryService::DdlCache [GOOD] >> TopicService::RelativePath >> KqpQueryService::DdlSecret [GOOD] >> KqpQueryService::DdlMixedDml >> KqpQueryService::StreamExecuteQuery [GOOD] >> KqpQueryService::StreamExecuteCollectMeta >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 >> KqpQueryService::PeriodicTaskInSessionPoolSessionCloseByIdle [GOOD] >> KqpQueryService::MixedReadQueryWithoutStreamLookup >> KqpQueryService::ExecuteQueryWithResourcePoolClassifier [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DdlCache [GOOD] Test command err: Trying to start YDB, gRPC: 19182, MsgBus: 29909 2025-04-04T17:14:03.940051Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507463990457233:2241];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:03.955794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00187a/r3tmp/tmpeDDnjM/pdisk_1.dat 2025-04-04T17:14:04.387054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:04.387154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:04.391191Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:04.393029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19182, node 1 2025-04-04T17:14:04.522426Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:04.522442Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:04.522445Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:04.522549Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29909 TClient is connected to server localhost:29909 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:05.307342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:05.319142Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:05.336567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:05.490110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:05.648520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:14:05.747799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:14:07.479798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507481170327999:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:07.479926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:07.746481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:07.779047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:07.815042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:07.864590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:07.959362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:08.012823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:08.125861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507485465295814:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:08.125944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:08.126255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507485465295819:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:08.130223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:08.141560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507485465295821:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:08.228398Z node 1 :TX_PROXY ERROR: Actor# [1:7489507485465295876:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:08.977590Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507463990457233:2241];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:08.977848Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:09.208513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:09.528245Z node 1 :TX_PROXY ERROR: Actor# [1:7489507489760263597:3758] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/TestDdl_0\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:09.528414Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710674, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl_0', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-04T17:14:09.528543Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDAyZmE2N2UtYzE0MjcyNzYtYzBlODMxMDMtMjJkNDA1Yzk=, ActorId: [1:7489507489760263585:2515], ActorState: ExecuteState, TraceId: 01jr0wqd724fw4wx7c6t5s9zfc, Create QueryResponse for error on request, msg: 2025-04-04T17:14:09.571631Z node 1 :TX_PROXY ERROR: Actor# [1:7489507489760263620:3769] txid# 281474976710676, issues: { message: "Check failed: path: \'/Root/TestDdl_0\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:09.661636Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2025-04-04T17:14:09.677728Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507489760263720:2540], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:14:09.678578Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjEyYWU4YmMtZjE4NTc0NTYtZTIzYzk4N2QtNzkwZjllNWQ=, ActorId: [1:7489507489760263716:2539], ActorState: ExecuteState, TraceId: 01jr0wqdbv531bkt7jb35gt3p8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:14:09.699544Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507489760263729:2544], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiDropTable!
:2:29: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:14:09.700696Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDkyMGRjYTItMzRiOWFkYTctYzA3ZTE2MDctZDEyYTM2ZmQ=, ActorId: [1:7489507489760263727:2543], ActorState: ExecuteState, TraceId: 01jr0wqdcp9kg971r2g9e9vr4p, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:14:09.720080Z node 1 :TX_PROXY ERROR: Actor# [1:7489507489760263743:3850] txid# 281474976710679, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-04T17:14:09.739214Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507489760263749:2552], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:14:09.73971 ... Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:14:14.260000Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507512338132179:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:14:14.354070Z node 2 :TX_PROXY ERROR: Actor# [2:7489507512338132232:2340] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:14.387277Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-04-04T17:14:14.404997Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2025-04-04T17:14:14.408730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:1, at schemeshard: 72057594046644480 2025-04-04T17:14:14.576880Z node 2 :TX_PROXY ERROR: Actor# [2:7489507512338132465:2464] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/NjY2NjM2ZmEtMTZkYTdhZmUtNmJjOTNjZDMtNWUyMDY2NDM=\', error: path is temporary (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:14.577230Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjY2NjM2ZmEtMTZkYTdhZmUtNmJjOTNjZDMtNWUyMDY2NDM=, ActorId: [2:7489507512338132157:2328], ActorState: ExecuteState, TraceId: 01jr0wqj4y5p39akcf48yrp971, Create QueryResponse for error on request, msg: 2025-04-04T17:14:14.625028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-04T17:14:14.634004Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489507512338132494:2367], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:14:14.634279Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2M2M2MwMDYtZWNiYjIwNjQtMTA2MGYyN2UtYmQ4ZTQ1MTA=, ActorId: [2:7489507512338132492:2366], ActorState: ExecuteState, TraceId: 01jr0wqj6ffk1ttktn2nwakn4r, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:14:14.635211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715669:0, at schemeshard: 72057594046644480 2025-04-04T17:14:14.638721Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found Trying to start YDB, gRPC: 2185, MsgBus: 29302 2025-04-04T17:14:16.561912Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507520100693301:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:16.562032Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00187a/r3tmp/tmpUMP5d9/pdisk_1.dat 2025-04-04T17:14:16.755418Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:16.784087Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:16.784171Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:16.785683Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2185, node 3 2025-04-04T17:14:16.918764Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:16.918785Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:16.918794Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:16.918913Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29302 TClient is connected to server localhost:29302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:14:17.315257Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:17.340380Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:14:17.450123Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:17.673434Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:17.783431Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:19.963185Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507532985596889:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:19.963247Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:20.002710Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:20.038887Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:20.073602Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:20.112623Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:20.160237Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:20.218113Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:20.307720Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507537280564701:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:20.307825Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:20.308087Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507537280564706:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:20.312845Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:20.337115Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507537280564708:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:20.441266Z node 3 :TX_PROXY ERROR: Actor# [3:7489507537280564764:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:21.570363Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507520100693301:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:21.570444Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:21.684249Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:21.793906Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037919 not found 2025-04-04T17:14:21.823697Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpQueryServiceScripts::ForgetScriptExecution [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryWithResourcePoolClassifier [GOOD] Test command err: Trying to start YDB, gRPC: 6944, MsgBus: 8967 2025-04-04T17:13:49.532698Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507403703031932:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:49.532742Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001893/r3tmp/tmpCwBGNQ/pdisk_1.dat 2025-04-04T17:13:50.090852Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:50.093802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:50.093878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:50.099768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6944, node 1 2025-04-04T17:13:50.207447Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:50.207466Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:50.207472Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:50.207558Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8967 TClient is connected to server localhost:8967 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:50.741359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:50.790645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:50.915279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:51.073019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:51.146098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.691189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507416587935587:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:52.691289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:52.925950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:13:52.958626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:53.050000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:53.083006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:53.126647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:13:53.201236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:13:53.255780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507420882903401:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:53.255855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:53.256252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507420882903406:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:53.259429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:13:53.269433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507420882903408:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:13:53.366024Z node 1 :TX_PROXY ERROR: Actor# [1:7489507420882903463:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:54.196848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.533277Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507403703031932:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:54.533333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 61561, MsgBus: 15216 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001893/r3tmp/tmpuZLZQs/pdisk_1.dat 2025-04-04T17:14:03.614554Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:14:03.664836Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61561, node 2 2025-04-04T17:14:03.703441Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:03.703554Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:03.711544Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:03.739987Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:03.740024Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:03.740032Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:03.740140Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15216 TClient is connected to server localhost:15216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:04.192864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:04.199487Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:04.212091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:04.296766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:04.495998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:04.645195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation t ... missions } 2025-04-04T17:14:09.161741Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolResolverActor] ActorId: [2:7489507491512994376:2503], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=2&id=OTRmMzJjYzgtYTU2MTI2MmEtMTU0NDQ1NTItMjllY2FlOWE=, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-04-04T17:14:09.161793Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool another_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-04T17:14:09.161876Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply continue error NOT_FOUND to [2:7489507491512994373:2501]: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-04-04T17:14:09.161967Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTRmMzJjYzgtYTU2MTI2MmEtMTU0NDQ1NTItMjllY2FlOWE=, ActorId: [2:7489507491512994373:2501], ActorState: ExecuteState, TraceId: 01jr0wqcw51vz99jzms4e6as5x, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool 2025-04-04T17:14:09.162173Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7489507491512994373:2501]: Pool another_pool_id not found Trying to start YDB, gRPC: 1672, MsgBus: 9076 2025-04-04T17:14:10.330465Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507494190587663:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:10.330541Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001893/r3tmp/tmpym89OD/pdisk_1.dat 2025-04-04T17:14:10.566394Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:10.566462Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:10.566639Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:10.569963Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1672, node 3 2025-04-04T17:14:10.626785Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:10.626808Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:10.626816Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:10.626921Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9076 TClient is connected to server localhost:9076 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:11.132016Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:11.147678Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:11.156544Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:11.257124Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:11.509860Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:11.597647Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:13.882386Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507507075491317:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:13.882495Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:13.964797Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:14.005651Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:14.051211Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:14.084395Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:14.118478Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:14.195151Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:14.261958Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507511370459128:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:14.262043Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:14.262402Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507511370459133:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:14.266273Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:14.281541Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507511370459135:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:14.386469Z node 3 :TX_PROXY ERROR: Actor# [3:7489507511370459190:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:15.307134Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507494190587663:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:15.307206Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:15.491425Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:15.503064Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:1, at schemeshard: 72057594046644480 2025-04-04T17:14:16.068157Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:14:16.754852Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:1, at schemeshard: 72057594046644480 2025-04-04T17:14:17.395679Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-04T17:14:18.003665Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710692:0, at schemeshard: 72057594046644480 2025-04-04T17:14:18.541586Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710697:0, at schemeshard: 72057594046644480 2025-04-04T17:14:20.866082Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710715:0, at schemeshard: 72057594046644480 Wait resource pool classifier 0.210193s: status = SUCCESS, issues = 2025-04-04T17:14:22.098718Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NWUwYmY0ZmEtN2QxMmU0MjAtMThiNTQyMWItMWQwZWFmODI=, ActorId: [3:7489507545730199212:2829], ActorState: ExecuteState, TraceId: 01jr0wqsgc1qk96m2tamqhy4gr, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool MyPool >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 >> KqpQueryService::DdlTx [GOOD] >> KqpQueryService::DdlWithExplicitTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ForgetScriptExecution [GOOD] Test command err: Trying to start YDB, gRPC: 19500, MsgBus: 7690 2025-04-04T17:13:51.581134Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507413428774068:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:51.581351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00187d/r3tmp/tmpSwwlTc/pdisk_1.dat 2025-04-04T17:13:52.028371Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:52.028471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:52.030602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:52.052998Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19500, node 1 2025-04-04T17:13:52.166349Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:52.166371Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:52.166384Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:52.166503Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7690 TClient is connected to server localhost:7690 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:52.686690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.698819Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:52.713974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.869238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:53.066049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:13:53.147827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.733967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507426313677526:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.734068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:55.020111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:13:55.047006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:55.075731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:55.152801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:55.221796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:13:55.254376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:13:55.300825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507430608645337:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:55.300899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:55.301124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507430608645342:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:55.305094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:13:55.316186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507430608645344:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:13:55.375707Z node 1 :TX_PROXY ERROR: Actor# [1:7489507430608645396:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:56.229996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:13:56.231262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:13:56.232640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:13:56.616756Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507413428774068:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:56.617234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:13:58.400157Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786838430, txId: 281474976710699] shutting down 2025-04-04T17:13:58.440280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507443493548618:2750], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-04T17:13:58.440353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507443493548616:2748], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-04T17:13:58.440444Z node 1 :KQP_WORKLOAD_SERVICE ERROR: [WorkloadService] [TPoolResolverActor] ActorId: [1:7489507443493548617:2749], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=Y2JmMGYyYzEtY2I3ZGFmZDQtMmJjZjIyYmEtODI0NjQ3NDQ=, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-04T17:13:58.440507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolResolverActor] ActorId: [1:7489507443493548617:2749], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=Y2JmMGYyYzEtY2I3ZGFmZDQtMmJjZjIyYmEtODI0NjQ3NDQ=, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-04-04T17:13:58.440545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool another_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-04-04T17:13:58.440608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply continue error NOT_FOUND to [1:7489507443493548614:2747]: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-04-04T17:13:58.440683Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2JmMGYyYzEtY2I3ZGFmZDQtMmJjZjIyYmEtODI0NjQ3NDQ=, ActorId: [1:7489507443493548614:2747], ActorState: ExecuteState, TraceId: 01jr0wq2cjcjkg3pfs892d2ewj, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool 2025-04-04T17:13:58.440881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7489507443493548614:2747]: Pool another_pool_id not found Trying to start YDB, gRPC: 26979, MsgBus: 6740 2025-04-04T17:13:59.299120Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507445763149100:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:59.299161Z node 2 :METADATA_PROVIDER ERROR: ... D WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:02.940816Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507458648053290:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:02.940926Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:02.940935Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507458648053295:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:02.943982Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:02.953269Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507458648053297:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:03.031525Z node 2 :TX_PROXY ERROR: Actor# [2:7489507462943020646:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:03.920763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:03.922285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:03.923399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:04.333776Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489507445763149100:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:04.344381Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28693, MsgBus: 28445 2025-04-04T17:14:06.980248Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507476720356426:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:06.980370Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00187d/r3tmp/tmpIgWUaB/pdisk_1.dat 2025-04-04T17:14:07.140514Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:07.169580Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:07.169671Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:07.171419Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28693, node 3 2025-04-04T17:14:07.287231Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:07.287259Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:07.287272Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:07.287402Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28445 TClient is connected to server localhost:28445 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:07.835563Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:07.843113Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:07.850018Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:07.923680Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:08.107150Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:08.188316Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:10.670316Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507493900227245:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:10.670400Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:10.725888Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:10.761578Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:10.858284Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:10.931126Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:10.972813Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:11.058108Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:11.124560Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507498195195063:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:11.124647Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:11.124837Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507498195195068:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:11.128690Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:11.144073Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507498195195070:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:11.246661Z node 3 :TX_PROXY ERROR: Actor# [3:7489507498195195125:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:11.979533Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507476720356426:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:11.979600Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:12.405127Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:12.407218Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:12.409330Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:22.135059Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:14:22.135096Z node 3 :IMPORT WARN: Table profiles were not loaded >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 >> KqpQueryService::TableSink_ReplaceFromSelectLargeOlap >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 >> KqpQueryService::TableSink_OlapUpdate >> KqpQueryServiceScripts::ExecuteMultiScript [GOOD] >> KqpQueryServiceScripts::ExecuteScriptPg >> KqpQueryServiceScripts::ExecuteScriptWithUnspecifiedMode [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithTimeout >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 >> KqpQueryService::IssuesInCaseOfSuccess [GOOD] >> KqpQueryService::ForbidInteractiveTxOnImplicitSession >> TPersQueueTest::WriteNonExistingTopic [GOOD] >> TPersQueueTest::WriteAfterAlter >> KqpQueryService::ExecuteQueryExplicitTxTLI [GOOD] >> KqpQueryService::ExecuteQueryInteractiveTx >> KqpQueryService::DmlNoTx [GOOD] >> KqpQueryService::Ddl_Dml >> KqpQueryService::FlowControllOnHugeLiteralAsTable [GOOD] >> KqpQueryService::ExecuteRetryQuery >> KqpQueryService::Followers >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 >> KqpQueryService::Tcl [GOOD] >> KqpQueryService::TableSink_ReplaceFromSelectOlap >> KqpService::Shutdown [GOOD] >> KqpService::SessionBusyRetryOperationSync >> KqpQueryService::ExecuteQueryScalar [GOOD] >> KqpQueryService::StreamExecuteCollectMeta [GOOD] >> KqpQueryService::SeveralCTAS-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 >> KqpQueryServiceScripts::ForgetScriptExecutionRace [GOOD] >> KqpQueryServiceScripts::InvalidFetchToken >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 >> KqpQueryService::DdlMixedDml [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryScalar [GOOD] Test command err: Trying to start YDB, gRPC: 27356, MsgBus: 5359 2025-04-04T17:14:11.265432Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507496978462160:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:11.265594Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001863/r3tmp/tmp7ush0a/pdisk_1.dat 2025-04-04T17:14:11.806505Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:11.814628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:11.814731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:11.817039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27356, node 1 2025-04-04T17:14:12.004632Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:12.004652Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:12.004662Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:12.004792Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5359 TClient is connected to server localhost:5359 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:12.656817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:12.681847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:12.845228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:13.015300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:13.088300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:14.859297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507509863365694:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:14.859411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:15.187103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:15.265873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:15.335319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:15.375295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:15.415013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:15.460315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:15.504794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507514158333506:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:15.504854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:15.504995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507514158333511:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:15.507826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:15.534132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507514158333513:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:15.592497Z node 1 :TX_PROXY ERROR: Actor# [1:7489507514158333566:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:16.266361Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507496978462160:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:16.266422Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 11179, MsgBus: 25908 2025-04-04T17:14:17.506707Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507525231222150:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:17.506743Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001863/r3tmp/tmpXeg6vB/pdisk_1.dat 2025-04-04T17:14:17.732825Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:17.732899Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:17.736993Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:17.744025Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11179, node 2 2025-04-04T17:14:17.910564Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:17.910585Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:17.910592Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:17.910699Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25908 TClient is connected to server localhost:25908 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:18.571270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:18.589309Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:21.453723Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507542411091930:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:21.453816Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:21.465398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:14:21.549937Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507542411092033:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:21.550016Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:21.550411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507542411092039:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:21.554707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:14:21.566521Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507542411092041:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:14:21.624831Z node 2 :TX_PROXY ERROR: Actor# [2:7489507542411092094:2397] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 11182, MsgBus: 24042 2025-04-04T17:14:22.579299Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507547167309051:2093];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001863/r3tmp/tmph1Qct4/pdisk_1.dat 2025-04-04T17:14:22.644219Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:14:22.719580Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11182, node 3 2025-04-04T17:14:22.754286Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:22.758776Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:22.849058Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:22.898755Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:22.898776Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:22.898787Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:22.898903Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24042 TClient is connected to server localhost:24042 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:23.481695Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:23.486946Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:23.490878Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:23.599253Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:23.868568Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:23.993938Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:25.970403Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507560052212671:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:25.970522Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:26.021119Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:26.106065Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:26.145542Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:26.184900Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:26.222369Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:26.311572Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:26.396377Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507564347180487:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:26.396480Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:26.396723Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507564347180492:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:26.400541Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:26.417642Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507564347180494:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:26.505874Z node 3 :TX_PROXY ERROR: Actor# [3:7489507564347180549:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:27.575122Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507547167309051:2093];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:27.575182Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryServiceScripts::TestTruncatedByRows [GOOD] >> KqpQueryServiceScripts::TestTruncatedBySize >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 >> TPersQueueTest::PreferredCluster_DisabledRemoteClusterAndWriteSessionsWithDifferentPreferredClusterAndLaterRemoteClusterEnabled_SessionWithMismatchedClusterDiesAfterPreferredClusterEnabledAndOtherSessionsAlive [GOOD] >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndCloseClientSessionWithEnabledRemotePreferredClusterDelaySec_SessionDiesOnlyAfterDelay >> KqpQueryService::DdlWithExplicitTransaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DdlMixedDml [GOOD] Test command err: Trying to start YDB, gRPC: 8622, MsgBus: 24474 2025-04-04T17:13:48.082951Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507401271708285:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:48.083003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00189b/r3tmp/tmpWSLRp7/pdisk_1.dat 2025-04-04T17:13:48.545954Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:48.557620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:48.557698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:48.578068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8622, node 1 2025-04-04T17:13:48.710285Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:48.710316Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:48.710323Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:48.710459Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24474 TClient is connected to server localhost:24474 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:49.477302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.508964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:13:49.665003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:13:49.831058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.908948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:51.279089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507414156611932:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.279189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.547001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.576325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.602020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.628282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.658778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.688315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.755264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507414156612443:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.755348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.755567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507414156612448:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.759570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:13:51.772534Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T17:13:51.772998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507414156612450:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:13:51.866088Z node 1 :TX_PROXY ERROR: Actor# [1:7489507414156612506:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:52.935446Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507418451580149:2502], status: GENERIC_ERROR, issues:
:2:18: Error: Unexpected token 'ROW' : cannot match to any predicted input... 2025-04-04T17:13:52.935692Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmFlMTk5MTktYjQxZGRmODMtNjY1YzI3ZTUtZDgxZTdjNWY=, ActorId: [1:7489507418451580147:2501], ActorState: ExecuteState, TraceId: 01jr0wpx0zdgcxzba5d86tfjgw, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T17:13:52.993788Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507418451580160:2506], status: GENERIC_ERROR, issues:
:2:18: Error: Unexpected token '`ydb.database.connect`' : cannot match to any predicted input... 2025-04-04T17:13:52.994574Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjhlY2Q1MDAtMjYxNTIwZWMtMmU5YjI0OGYtYzllOTQxMzM=, ActorId: [1:7489507418451580158:2505], ActorState: ExecuteState, TraceId: 01jr0wpx2w46p65grb7fjpq02p, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T17:13:53.034823Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507422746547471:2510], status: GENERIC_ERROR, issues:
:2:27: Error: Unexpected token 'READ' : Extraneous input - expected nothing ... 2025-04-04T17:13:53.035025Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDljYTFhMGQtMTRkMWNjNjgtZjE5MWJkNzMtZDAzMWNiMmI=, ActorId: [1:7489507422746547469:2509], ActorState: ExecuteState, TraceId: 01jr0wpx433dwn4vs8ef3rvesk, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T17:13:53.082635Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507401271708285:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:53.082726Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:13:53.171429Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507422746547491:2514], status: GENERIC_ERROR, issues:
: Error: Unknown permission name: 2025-04-04T17:13:53.172682Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWNhMDI0OWQtNTdjZTM2M2MtMjAyYTc1ZGEtZTc2ZWUxMzQ=, ActorId: [1:7489507422746547489:2513], ActorState: ExecuteState, TraceId: 01jr0wpx4s4qj5fnbab7sa20k6, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T17:13:53.224579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:13:53.284025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:13:53.343038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:13:53.402461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:13:53.457168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-04T17:13:53.460086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:13:53.519491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:13:53.581112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is u ... CREATE OBJECT my_secret_2 (TYPE SECRET) WITH (value="qwerty"); 2025-04-04T17:14:19.866096Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489507531162797835:4173], TxId: 281474976710894, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODgxYTI2Ny1lNmU5MGFkYi1lMzYyZmZiNi1lN2MxMDUzNA==. TraceId : 01jr0wqpwnbqbr4hpyek5xmy12. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-04T17:14:19.866683Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489507531162797836:4174], TxId: 281474976710894, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wqpwnbqbr4hpyek5xmy12. SessionId : ydb://session/3?node_id=2&id=ODgxYTI2Ny1lNmU5MGFkYi1lMzYyZmZiNi1lN2MxMDUzNA==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7489507531162797829:4065], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:14:19.866985Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODgxYTI2Ny1lNmU5MGFkYi1lMzYyZmZiNi1lN2MxMDUzNA==, ActorId: [2:7489507526867830208:4065], ActorState: ExecuteState, TraceId: 01jr0wqpwnbqbr4hpyek5xmy12, Create QueryResponse for error on request, msg: 2025-04-04T17:14:19.885798Z node 2 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jr0wqpjjetne9jgaaagjt0zn" } } } } ;request=session_id: "ydb://session/3?node_id=2&id=ODgxYTI2Ny1lNmU5MGFkYi1lMzYyZmZiNi1lN2MxMDUzNA==" tx_control { tx_id: "01jr0wqpjjetne9jgaaagjt0zn" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/values`\nSELECT ownerUserId,secretId,value FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "value" type { type_id: UTF8 } } } } } } value { items { items { text_value: "" } items { text_value: "my_secret_2" } items { text_value: "qwerty" } } } } } ; 2025-04-04T17:14:19.890406Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODgzNDdhMjUtYzQ0Zjc4NDQtZDQwMDJkNTctMTQxNDA0MTE=, ActorId: [2:7489507526867830200:4060], ActorState: ExecuteState, TraceId: 01jr0wqp497n3k0e11w8ypqg93, Create QueryResponse for error on request, msg: Execute SQL: UPSERT OBJECT my_secret_2 (TYPE SECRET) WITH value = "edcba"; Trying to start YDB, gRPC: 18844, MsgBus: 6536 2025-04-04T17:14:22.878835Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507546794838478:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:22.878876Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00189b/r3tmp/tmpOhwci8/pdisk_1.dat 2025-04-04T17:14:23.048097Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:23.089014Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:23.089097Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:23.095132Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18844, node 3 2025-04-04T17:14:23.193543Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:23.193564Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:23.193572Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:23.193698Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6536 TClient is connected to server localhost:6536 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:23.849897Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:23.867841Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:23.973931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:24.158115Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:24.252298Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:26.985278Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507563974709410:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:26.985365Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:27.018630Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:27.090266Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:27.125775Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:27.164221Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:27.236564Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:27.323954Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:27.384697Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507568269677227:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:27.384787Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:27.385163Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507568269677232:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:27.389232Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:27.399430Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507568269677234:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:27.485613Z node 3 :TX_PROXY ERROR: Actor# [3:7489507568269677288:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:27.879926Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507546794838478:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:27.879979Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:28.627436Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489507572564644853:2493], status: GENERIC_ERROR, issues:
: Error: Optimization, code: 1070
:8:25: Error: Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations., code: 2009 2025-04-04T17:14:28.629749Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Njk0MDA1ZmEtODZmZmRhOWUtMjE0NTI3ZGItMzAwYmY4ZDk=, ActorId: [3:7489507572564644846:2489], ActorState: ExecuteState, TraceId: 01jr0wqzvq16vzasp6yeyjmc3h, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 >> TPersQueueTest::SchemeOperationFirstClassCitizen [GOOD] >> TPersQueueTest::SchemeOperationsCheckPropValues >> KqpQueryService::TableSink_OlapUpsert [GOOD] >> KqpQueryService::TableSink_OltpDelete >> KqpQueryServiceScripts::ExecuteScriptStatsProfile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DdlWithExplicitTransaction [GOOD] Test command err: Trying to start YDB, gRPC: 4569, MsgBus: 6897 2025-04-04T17:14:12.479776Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507504601683100:2095];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:12.491109Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001841/r3tmp/tmpKn4I3u/pdisk_1.dat 2025-04-04T17:14:12.951504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:12.953293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:12.958562Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:12.959491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4569, node 1 2025-04-04T17:14:13.166405Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:13.166436Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:13.166449Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:13.166551Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6897 TClient is connected to server localhost:6897 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:13.948965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:13.974369Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:13.993269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:14.122224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:14.291072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:14.392045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:16.261851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507521781554017:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:16.261973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:16.528144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:16.606716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:16.639619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:16.677896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:16.717872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:16.779112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:16.837100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507521781554531:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:16.837171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:16.837388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507521781554536:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:16.840791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:16.852597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507521781554538:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:16.963002Z node 1 :TX_PROXY ERROR: Actor# [1:7489507521781554594:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:17.481539Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507504601683100:2095];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:17.481632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:18.087735Z node 1 :TX_PROXY ERROR: Actor# [1:7489507530371489475:3667] txid# 281474976710672, issues: { message: "User already exists" severity: 1 } 2025-04-04T17:14:18.106404Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjliYTE4NTctZDk4NjY2NjMtNDBkMDMwYTctOTQyNmRmMDY=, ActorId: [1:7489507530371489464:2495], ActorState: ExecuteState, TraceId: 01jr0wqnjv37epn01dp325regj, Create QueryResponse for error on request, msg: 2025-04-04T17:14:18.207461Z node 1 :TX_PROXY ERROR: Actor# [1:7489507530371489535:3703] txid# 281474976710676, issues: { message: "User not found" severity: 1 } 2025-04-04T17:14:18.207635Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjVmMzE4ZDgtNTdlMTczM2YtZDBiYTZmY2MtZDk4ZGQ4Y2E=, ActorId: [1:7489507530371489527:2504], ActorState: ExecuteState, TraceId: 01jr0wqnpn0pshb0swqjq1q1vt, Create QueryResponse for error on request, msg: 2025-04-04T17:14:18.237147Z node 1 :TX_PROXY ERROR: Actor# [1:7489507530371489551:3710] txid# 281474976710678, issues: { message: "User not found" severity: 1 } 2025-04-04T17:14:18.237315Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzQwZDFiZGUtZDk4NDQ4YTctNzY2MGNmM2MtNDQ4MGJiYzY=, ActorId: [1:7489507530371489545:2508], ActorState: ExecuteState, TraceId: 01jr0wqnqgbpqh0fc3jye4bv3f, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 16342, MsgBus: 18267 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001841/r3tmp/tmpdimzsE/pdisk_1.dat 2025-04-04T17:14:19.370139Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:19.378703Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:14:19.391905Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:19.391982Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:19.393120Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16342, node 2 2025-04-04T17:14:19.534671Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:19.534690Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:19.534697Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:19.534798Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18267 TClient is connected to server localhost:18267 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version ... ARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:23.105564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:23.195924Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507548582522687:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:23.196021Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:23.196259Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507548582522692:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:23.200717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:23.222829Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507548582522694:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:23.296547Z node 2 :TX_PROXY ERROR: Actor# [2:7489507548582522750:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:24.365849Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjYwOTEyYTgtNDgyMjBiMTktNTQ0MjRlZTctMWIzNWFkNzA=, ActorId: [2:7489507552877490304:2488], ActorState: ExecuteState, TraceId: 01jr0wqvpq6wjmcvr8drkaxxy8, Create QueryResponse for error on request, msg: Scheme operations cannot be executed inside transaction Trying to start YDB, gRPC: 23572, MsgBus: 25538 2025-04-04T17:14:25.210439Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507557423147971:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:25.211055Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001841/r3tmp/tmpHkt33W/pdisk_1.dat 2025-04-04T17:14:25.328897Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:25.368148Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:25.368217Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:25.370000Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23572, node 3 2025-04-04T17:14:25.437125Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:25.437151Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:25.437158Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:25.437289Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25538 TClient is connected to server localhost:25538 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:14:25.906794Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:14:25.923952Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:25.936688Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:26.033514Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:26.189485Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:26.266181Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:28.497600Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507570308051598:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:28.497688Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:28.557135Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:28.598074Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:28.660720Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:28.702941Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:28.739391Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:28.826985Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:28.930835Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507570308052118:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:28.930933Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:28.931219Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507570308052123:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:28.934837Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:28.951547Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-04T17:14:28.951869Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507570308052125:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:29.052798Z node 3 :TX_PROXY ERROR: Actor# [3:7489507574603019476:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:29.874623Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDFhY2U3MjQtYmNhMWIwOGItNDMyZDEwNDctNGY1M2E1NjE=, ActorId: [3:7489507574603019727:2489], ActorState: ExecuteState, TraceId: 01jr0wr12t90f8wjz2392hdrcw, Create QueryResponse for error on request, msg: Scheme operations cannot be executed inside transaction 2025-04-04T17:14:29.924517Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzEyZjNjZWUtZjM2MGI4NjQtMWI2YzU4NzYtYjhjYjNmY2Q=, ActorId: [3:7489507574603019754:2498], ActorState: ExecuteState, TraceId: 01jr0wr14a1qpxcdzbf1774vcw, Create QueryResponse for error on request, msg: Scheme operations cannot be executed inside transaction 2025-04-04T17:14:29.964123Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.054005Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489507578897987168:2519], status: GENERIC_ERROR, issues:
: Error: Optimization, code: 1070
:7:17: Error: Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations., code: 2009 2025-04-04T17:14:30.055463Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YTJlNDQ5OTQtZDgwZGFhMzYtYWRjMWQyNTAtM2JjOTRlNzg=, ActorId: [3:7489507578897987165:2517], ActorState: ExecuteState, TraceId: 01jr0wr17p10c0aqhv4tc9txqv, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpQueryService::MixedReadQueryWithoutStreamLookup [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 >> KqpQueryService::ForbidInteractiveTxOnImplicitSession [GOOD] >> KqpQueryService::MaterializeTxResults >> KqpQueryService::ExecuteQueryInteractiveTx [GOOD] >> KqpQueryService::ExecuteQueryInteractiveTxCommitWithQuery >> KqpQueryServiceScripts::TestPaging >> KqpQueryService::ExecuteRetryQuery [GOOD] >> KqpQueryService::Explain >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 >> KqpService::CloseSessionsWithLoad [GOOD] >> KqpService::PatternCache ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::MixedReadQueryWithoutStreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 2982, MsgBus: 62968 2025-04-04T17:13:48.147201Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507397732919223:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:48.147238Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018a2/r3tmp/tmpuySyIG/pdisk_1.dat 2025-04-04T17:13:48.548514Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:48.591573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:48.591687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:48.595709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2982, node 1 2025-04-04T17:13:48.705994Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:48.706028Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:48.706035Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:48.706172Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62968 TClient is connected to server localhost:62968 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:49.353014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.378030Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:49.394869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:13:49.520711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:13:49.680214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.742771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:51.351351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507410617822695:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.351456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.650665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.711709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.745337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.823323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.857978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.907553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:13:52.001637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507414912790513:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:52.001730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:52.002016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507414912790518:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:52.005696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:13:52.019973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507414912790520:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:13:52.089461Z node 1 :TX_PROXY ERROR: Actor# [1:7489507414912790576:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:53.147636Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507397732919223:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:53.154394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:03.535323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:14:03.535356Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 10717, MsgBus: 65424 2025-04-04T17:14:05.006271Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507469998626006:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:05.068596Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018a2/r3tmp/tmpyafRzg/pdisk_1.dat 2025-04-04T17:14:05.234078Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:05.261454Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:05.261538Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:05.263810Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10717, node 2 2025-04-04T17:14:05.394762Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:05.394786Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:05.394793Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:05.394907Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65424 TClient is connected to server localhost:65424 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:14:05.920114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:14:05.976858Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:05.994084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:14:06.068846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:06.258040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Oper ... ntroller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.494528Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.494543Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.500273Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.500273Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.505815Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.506848Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.511439Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.512294Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.517364Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.517887Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.523399Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.523696Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.528991Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.528994Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.534631Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.534633Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.541731Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.541732Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.547627Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.547640Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.553827Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.553834Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.560012Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.560821Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.566043Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.566647Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.572446Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.572523Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.578100Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.578102Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.583807Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.583819Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.589343Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.593772Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.603161Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.604772Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.608647Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.610017Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.613595Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.615145Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.619997Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037952;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.625091Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.630171Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.633019Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.637832Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.646099Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:14:29.939025Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-04-04T17:14:29.939070Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-04-04T17:14:29.939396Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;self_id=[3:7489507565484586416:2414];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037899;local_tx_no=13;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037907;receive=72075186224037947; 2025-04-04T17:14:29.939413Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-04-04T17:14:29.939653Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;self_id=[3:7489507565484586416:2414];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037899;local_tx_no=15;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903;receive=72075186224037907; 2025-04-04T17:14:29.939965Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; >> TPersQueueTest::CacheHead [GOOD] >> TPersQueueTest::CheckACLForGrpcWrite >> KqpQueryServiceScripts::ExecuteScriptWithParameters >> KqpQueryService::SeveralCTAS-UseSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 >> TopicService::RelativePath [GOOD] >> KqpQueryServiceScripts::ExecuteScriptPg [GOOD] >> KqpService::SessionBusyRetryOperationSync [GOOD] >> KqpService::SwitchCache+UseCache >> TPersQueueTest::WhenDisableNodeAndCreateTopic_ThenAllPartitionsAreOnOtherNode [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Compressed >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 >> OlapEstimationRowsCorrectness::TPCDS96 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::SeveralCTAS-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10608, MsgBus: 28426 2025-04-04T17:14:16.627384Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507520829138924:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:16.627448Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001817/r3tmp/tmpRs7hMn/pdisk_1.dat 2025-04-04T17:14:17.172764Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:17.181494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:17.181605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:17.184611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10608, node 1 2025-04-04T17:14:17.332547Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:17.332570Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:17.332576Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:17.332680Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28426 TClient is connected to server localhost:28426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:17.905605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:17.961072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:18.152876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:18.317883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:18.397713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:19.915964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507533714042579:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:19.916052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:20.180046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:20.219118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:20.293276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:20.362466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:20.404967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:20.482428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:20.585510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507538009010405:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:20.585578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:20.585777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507538009010410:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:20.589511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:20.603542Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T17:14:20.603779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507538009010412:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:20.705945Z node 1 :TX_PROXY ERROR: Actor# [1:7489507538009010469:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:21.628030Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507520829138924:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:21.628118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 65532, MsgBus: 24246 2025-04-04T17:14:22.923361Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507544450718838:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:22.936402Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001817/r3tmp/tmp2Pr7Pa/pdisk_1.dat 2025-04-04T17:14:23.059630Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65532, node 2 2025-04-04T17:14:23.086359Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:23.086439Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:23.090857Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:23.201497Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:23.201516Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:23.201523Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:23.203145Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24246 TClient is connected to server localhost:24246 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:23.727075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:23.755872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:23.842466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:23.990808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:24.069327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:26.371671Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507561630589649:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:26.387653Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:26.398528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:26.465344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:26.497632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:26.572693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:26.606737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:26.683171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:26.754647Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507561630590170:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:26.754744Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:26.754995Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507561630590175:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:26.759575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:26.771546Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507561630590177:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:26.841285Z node 2 :TX_PROXY ERROR: Actor# [2:7489507561630590231:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:27.913275Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489507544450718838:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:27.913330Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10500, MsgBus: 26647 2025-04-04T17:14:29.030122Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507575397085629:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:29.030170Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001817/r3tmp/tmp8KZL6b/pdisk_1.dat 2025-04-04T17:14:29.176421Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:29.180377Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:29.180482Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:29.181780Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10500, node 3 2025-04-04T17:14:29.238851Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:29.238876Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:29.238885Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:29.239003Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26647 TClient is connected to server localhost:26647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:29.696173Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:32.486460Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507588281988147:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:32.486590Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:32.490719Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507588281988182:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:32.495754Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:14:32.509370Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507588281988184:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:14:32.608119Z node 3 :TX_PROXY ERROR: Actor# [3:7489507588281988235:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:32.684189Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-04-04T17:14:32.688552Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:1, at schemeshard: 72057594046644480 2025-04-04T17:14:32.886196Z node 3 :TX_PROXY ERROR: Actor# [3:7489507588281988519:2501] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:32.900237Z node 3 :TX_PROXY ERROR: Actor# [3:7489507588281988526:2506] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/NGI5ODZhZi1kZDBlMDU5Ni0yNTUxN2NlOS1kMDIzMGQ0Nw==\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:32.903209Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:33.081690Z node 3 :TX_PROXY ERROR: Actor# [3:7489507592576956009:2619] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:33.083785Z node 3 :TX_PROXY ERROR: Actor# [3:7489507592576956016:2624] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/NGI5ODZhZi1kZDBlMDU5Ni0yNTUxN2NlOS1kMDIzMGQ0Nw==\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:33.087079Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpService::SwitchCache-UseCache [GOOD] >> KqpService::ToDictCache+UseCache >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 >> KqpQueryService::TableSink_ReplaceFromSelectOlap [GOOD] >> KqpQueryService::StreamExecuteQueryPure >> KqpQueryService::TableSink_Htap-withOltpSink [GOOD] >> KqpQueryService::TableSink_DisableSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 4686, MsgBus: 32148 2025-04-04T17:14:09.535104Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507490147620976:2098];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:09.535829Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00186b/r3tmp/tmp8Qh1wd/pdisk_1.dat 2025-04-04T17:14:10.138110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:10.142461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:10.151276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:10.155309Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4686, node 1 2025-04-04T17:14:10.375119Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:10.375169Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:10.375183Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:10.375295Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32148 TClient is connected to server localhost:32148 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:10.981753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:11.023092Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:11.043312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:11.293314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:11.523333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:14:11.644805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:14:13.156174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507507327491899:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:13.156270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:13.418518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:13.478306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:13.515034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:13.547146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:13.582559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:13.639089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:13.703120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507507327492409:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:13.703217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:13.703675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507507327492414:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:13.707512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:13.717541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507507327492416:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:13.772861Z node 1 :TX_PROXY ERROR: Actor# [1:7489507507327492469:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:14.538479Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507490147620976:2098];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:14.538530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:14.701588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:14.707621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:14.708732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:16.927116Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786856959, txId: 281474976710699] shutting down Trying to start YDB, gRPC: 65300, MsgBus: 26414 2025-04-04T17:14:17.973737Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507523120341737:2210];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00186b/r3tmp/tmpsQvNJL/pdisk_1.dat 2025-04-04T17:14:18.093584Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:14:18.140395Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:18.170073Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:18.170144Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:18.171700Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65300, node 2 2025-04-04T17:14:18.310512Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:18.310533Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:18.310539Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:18.310668Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26414 TClient is connected to server localhost:26414 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:18.795407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17: ... tch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:21.872261Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:21.872591Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507540300213052:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:21.877122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:21.892186Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507540300213054:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:21.968911Z node 2 :TX_PROXY ERROR: Actor# [2:7489507540300213107:3436] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:22.906507Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489507523120341737:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:22.906585Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:23.108500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:23.110069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:23.111706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:25.515023Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786865548, txId: 281474976715704] shutting down 2025-04-04T17:14:25.756828Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786865793, txId: 281474976715707] shutting down Trying to start YDB, gRPC: 9650, MsgBus: 21668 2025-04-04T17:14:26.715587Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507564387111197:2147];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:26.764089Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00186b/r3tmp/tmp3Znj2q/pdisk_1.dat 2025-04-04T17:14:26.862808Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:26.889437Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:26.889517Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 9650, node 3 2025-04-04T17:14:26.892603Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:27.020416Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:27.020436Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:27.020446Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:27.020565Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21668 TClient is connected to server localhost:21668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:27.528694Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:27.543148Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:27.561217Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:27.633596Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:27.858651Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:27.953773Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:30.213574Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507581566982046:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:30.213663Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:30.255176Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.288810Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.318936Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.352998Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.386101Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.471859Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.570716Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507581566982565:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:30.570813Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:30.570906Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507581566982570:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:30.575631Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:30.590792Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507581566982572:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:30.669208Z node 3 :TX_PROXY ERROR: Actor# [3:7489507581566982627:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:31.683093Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:31.684505Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:31.686627Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:31.715830Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507564387111197:2147];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:31.715930Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:34.180077Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786874207, txId: 281474976715704] shutting down >> TopicService::AccessRights >> KqpQueryService::Followers [GOOD] >> KqpQueryService::FlowControllOnHugeRealTable+LongRow >> KqpQueryService::Ddl_Dml [GOOD] >> KqpJoinOrder::TPCDS88-StreamLookupJoin+ColumnStore [GOOD] >> KqpQueryServiceScripts::InvalidFetchToken [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_ReplaceFromSelectOlap [GOOD] Test command err: Trying to start YDB, gRPC: 28119, MsgBus: 15205 2025-04-04T17:14:17.348258Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507522519937987:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:17.348293Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001815/r3tmp/tmpQn1pVk/pdisk_1.dat 2025-04-04T17:14:17.901372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:17.901484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:17.902977Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:17.906145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28119, node 1 2025-04-04T17:14:18.150485Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:18.150516Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:18.150522Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:18.150661Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15205 TClient is connected to server localhost:15205 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:18.803393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:20.811459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507535404840538:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:20.811571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:20.818462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507535404840550:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:20.826176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:14:20.846662Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:14:20.850448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507535404840552:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:14:20.930350Z node 1 :TX_PROXY ERROR: Actor# [1:7489507535404840603:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:21.312808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:2, at schemeshard: 72057594046644480 2025-04-04T17:14:21.582145Z node 1 :TX_PROXY ERROR: Actor# [1:7489507539699808140:2470] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:21.603713Z node 1 :TX_PROXY ERROR: Actor# [1:7489507539699808147:2475] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ZmY1ODFlMDAtMWMwMGUxZDktNGQ4MmI1MjEtZjEzOWRjNmY=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:21.626339Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-04T17:14:21.639614Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507539699808198:2367], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/test/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:14:21.640832Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmY1ODFlMDAtMWMwMGUxZDktNGQ4MmI1MjEtZjEzOWRjNmY=, ActorId: [1:7489507535404840511:2329], ActorState: ExecuteState, TraceId: 01jr0wqs1s3yn9qgzm518vdy9c, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:14:21.716159Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507539699808211:2374], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/test/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:14:21.717175Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWVjNTBiNWUtMTM0YzYwNGItZjBlNDMwNy1hNGZkNzVhNg==, ActorId: [1:7489507539699808207:2371], ActorState: ExecuteState, TraceId: 01jr0wqs423nnwv701vgnrq06k, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 24140, MsgBus: 29775 2025-04-04T17:14:22.582664Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507545364935930:2205];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001815/r3tmp/tmpTRXyJV/pdisk_1.dat 2025-04-04T17:14:22.764259Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:14:22.831066Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:22.836967Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:22.837041Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:22.838900Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24140, node 2 2025-04-04T17:14:22.986685Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:22.986702Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:22.986708Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:22.986790Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29775 TClient is connected to server localhost:29775 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:14:23.536945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:14:23.625256Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:23.637961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:23.744936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:23.933878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:24.013705Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:26.119267Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507562544806717:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:26.119347Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool d ... ; 2025-04-04T17:14:34.680943Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=281474976710669;tx_id=281474976710669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710669; 2025-04-04T17:14:34.682007Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=281474976710669;tx_id=281474976710669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710669; 2025-04-04T17:14:34.682261Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=281474976710669;tx_id=281474976710669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710669; 2025-04-04T17:14:35.098089Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710672; 2025-04-04T17:14:35.098295Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710672; 2025-04-04T17:14:35.098311Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710672; 2025-04-04T17:14:35.098468Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710672; 2025-04-04T17:14:35.098778Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710672; 2025-04-04T17:14:35.098914Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710672; 2025-04-04T17:14:35.098966Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710672; 2025-04-04T17:14:35.099136Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710672; 2025-04-04T17:14:35.099340Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710672; 2025-04-04T17:14:35.099645Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710672; 2025-04-04T17:14:35.100616Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710672; 2025-04-04T17:14:35.100646Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710672; 2025-04-04T17:14:35.101201Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=23;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910,72075186224037915;receive=72075186224037909; 2025-04-04T17:14:35.101294Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=24;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910,72075186224037915;receive=72075186224037909; 2025-04-04T17:14:35.101365Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=25;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910,72075186224037915;receive=72075186224037909; 2025-04-04T17:14:35.101436Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=26;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910,72075186224037915;receive=72075186224037909; 2025-04-04T17:14:35.101502Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=27;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910,72075186224037915;receive=72075186224037909; 2025-04-04T17:14:35.101561Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=28;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910,72075186224037915;receive=72075186224037909; 2025-04-04T17:14:35.101617Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=29;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910,72075186224037915;receive=72075186224037909; 2025-04-04T17:14:35.101672Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=30;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910,72075186224037915;receive=72075186224037909; 2025-04-04T17:14:35.101724Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=31;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910,72075186224037915;receive=72075186224037909; 2025-04-04T17:14:35.101750Z node 3 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037915;local_tx_no=21;method=complete;tx_info=;fline=secondary.h:126;event=duplication_tablet_broken_flag;txId=281474976710672; 2025-04-04T17:14:35.101774Z node 3 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037915;local_tx_no=22;method=complete;tx_info=;fline=secondary.h:126;event=duplication_tablet_broken_flag;txId=281474976710672; 2025-04-04T17:14:35.101782Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=32;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910,72075186224037915;receive=72075186224037909; 2025-04-04T17:14:35.101920Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=34;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037910; 2025-04-04T17:14:35.101978Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=35;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037910; 2025-04-04T17:14:35.102038Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=36;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037910; 2025-04-04T17:14:35.102095Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=37;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037910; 2025-04-04T17:14:35.102149Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=38;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037910; 2025-04-04T17:14:35.102201Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=39;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037910; 2025-04-04T17:14:35.102270Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=40;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037910; 2025-04-04T17:14:35.102329Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=41;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037910; 2025-04-04T17:14:35.102383Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=42;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037910; 2025-04-04T17:14:35.102441Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=43;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037910; 2025-04-04T17:14:35.102498Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=44;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037910; 2025-04-04T17:14:35.102552Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489507588885790111:2487];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=45;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037915;receive=72075186224037910; 2025-04-04T17:14:35.102686Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710672; 2025-04-04T17:14:35.103273Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710672; >> TPersQueueTest::StreamReadManyUpdateTokenAndRead [GOOD] >> TPersQueueTest::SetupWriteSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::Ddl_Dml [GOOD] Test command err: Trying to start YDB, gRPC: 2469, MsgBus: 2568 2025-04-04T17:14:15.299663Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507517201122365:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:15.299697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001819/r3tmp/tmpU6PFhD/pdisk_1.dat 2025-04-04T17:14:15.873886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:15.873967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:15.883145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:15.895136Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2469, node 1 2025-04-04T17:14:16.222349Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:16.222368Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:16.222374Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:16.222485Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2568 TClient is connected to server localhost:2568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:16.948765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:16.979844Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:17.001399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:17.152997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:17.357021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:17.448865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:19.235536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507534380993322:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:19.235648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:19.523589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:19.561146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:19.608108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:19.656318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:19.690522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:19.726998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:19.801226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507534380993835:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:19.801296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:19.801476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507534380993840:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:19.804604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:19.823694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507534380993842:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:19.880579Z node 1 :TX_PROXY ERROR: Actor# [1:7489507534380993895:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:20.302373Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507517201122365:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:20.302468Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14439, MsgBus: 21317 2025-04-04T17:14:21.842332Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507542126206914:2221];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001819/r3tmp/tmpouaZeI/pdisk_1.dat 2025-04-04T17:14:21.895940Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:14:21.959795Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:21.976750Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:21.976798Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 14439, node 2 2025-04-04T17:14:21.980301Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:22.138354Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:22.138374Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:22.138381Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:22.138480Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21317 TClient is connected to server localhost:21317 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:14:22.575607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:14:22.579899Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:22.594103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:22.692491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:22.857330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2 ... WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:31.111164Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507582155488418:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:31.115277Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:31.129278Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507582155488420:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:31.216654Z node 3 :TX_PROXY ERROR: Actor# [3:7489507582155488475:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:32.172709Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:32.427741Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:14:32.448331Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NzIyMmJjMDAtOWZkMzExYTItNDNhOTUxMmMtN2IyMWM3NzI=, ActorId: [3:7489507586450456109:2502], ActorState: ExecuteState, TraceId: 01jr0wr3d0cqtp7bt1122fd6xf, Create QueryResponse for error on request, msg: 2025-04-04T17:14:32.581594Z node 3 :KQP_COMPILE_SERVICE WARN: queryId in recompile request and queryId in cache are different, queryId in request: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT INTO TestDdlDml2 (Key, Value1) VALUES (1, \"1\");\n SELECT * FROM TestDdlDml2;\n UPSERT INTO TestDdlDml2 (Key, Value1) VALUES (2, \"2\");\n SELECT * FROM TestDdlDml2;\n CREATE TABLE TestDdlDml33 (\n Key Uint64,\n PRIMARY KEY (Key)\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_database":"Root" }, "settings": { "ydb_database":"Root" }, "rollback_settings": { } } }}, queryId in cache: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT INTO TestDdlDml2 (Key, Value1, Value2) VALUES (1, \"1\", \"1\");\n SELECT * FROM TestDdlDml2;\n ALTER TABLE TestDdlDml2 DROP COLUMN Value2;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_database":"Root" }, "settings": { "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-04-04T17:14:32.827655Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:14:33.033957Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:14:33.332201Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489507590745423903:2585], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:11:17: Error: At function: KiReadTable!
:11:17: Error: Cannot find table 'db.[/Root/TestDdlDml5]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:14:33.333534Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Y2NhMTIzZC03NTQ3ZmIyNS01MjY4ZWZkNC0yZDM4YTljZQ==, ActorId: [3:7489507590745423749:2562], ActorState: ExecuteState, TraceId: 01jr0wr45f9na9k9az35chpqzm, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:14:33.462279Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-04T17:14:33.608492Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-04-04T17:14:34.146768Z node 3 :TX_PROXY ERROR: Actor# [3:7489507595040391516:4168] txid# 281474976710697, issues: { message: "Check failed: path: \'/Root/TestDdl1\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 19], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:34.147071Z node 3 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710697, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl1', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 19], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-04T17:14:34.147217Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjRkZTAxYTMtMjI5MTFmNjAtZDJkZGYwNjYtNDlkMDNkMQ==, ActorId: [3:7489507595040391503:2641], ActorState: ExecuteState, TraceId: 01jr0wr58a0emrjw9ja1aergm5, Create QueryResponse for error on request, msg: 2025-04-04T17:14:34.180548Z node 3 :TX_PROXY ERROR: Actor# [3:7489507595040391542:4181] txid# 281474976710699, issues: { message: "Check failed: path: \'/Root/TestDdl2\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:34.180854Z node 3 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710699, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl2', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-04T17:14:34.181015Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NWNiMzQ1OTgtYWZhZjliNzAtNzJjZDU1OTQtODZhNzQ4ZWU=, ActorId: [3:7489507595040391527:2648], ActorState: ExecuteState, TraceId: 01jr0wr59d117hpmjpe0t772tg, Create QueryResponse for error on request, msg: 2025-04-04T17:14:34.582706Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 2025-04-04T17:14:34.733487Z node 3 :TX_PROXY ERROR: Actor# [3:7489507595040391730:4285] txid# 281474976710705, issues: { message: "Check failed: path: \'/Root/TestDdl2\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:34.733820Z node 3 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710705, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl2', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-04T17:14:34.733952Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZjNiNWUwNmMtYTU1Mjg0MmUtMWRkNDYwNzEtZmQ4NTE0Njk=, ActorId: [3:7489507595040391616:2673], ActorState: ExecuteState, TraceId: 01jr0wr5k0ffsjshqz1z2efx89, Create QueryResponse for error on request, msg: 2025-04-04T17:14:34.916746Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489507595040391788:2713], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestDdl4]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:14:34.918028Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTI3ZmNmOTctZjlmMWU1NzMtMWM2ZjkwYzctNjJhODg4N2E=, ActorId: [3:7489507595040391785:2711], ActorState: ExecuteState, TraceId: 01jr0wr60k7gcepcf7zcw72hzj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:14:35.148614Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 2025-04-04T17:14:35.701268Z node 3 :KQP_COMPILE_ACTOR ERROR: Get parsing result with error, self: [3:7489507599335359321:2764], owner: [3:7489507577860520562:2395], statement id: 1 2025-04-04T17:14:35.701489Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YTViMDI2OWEtNTVlZjAxZjQtZWU1NWY3ZDktMTJiYzM2OGU=, ActorId: [3:7489507599335359319:2763], ActorState: ExecuteState, TraceId: 01jr0wr6sf70b9w4vb3kht1rfh, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T17:14:35.945987Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489507599335359371:2782], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:44: Error: Failed to convert type: Struct<'Key':Int32,'Value':String> to Struct<'Key':Uint64?,'Value':Uint64?>
:3:44: Error: Failed to convert 'Value': String to Optional
:3:44: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-04T17:14:35.946434Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MWYxYzAwODUtZGQyY2UxOTgtZWI1NThlY2MtMzJhMGM4Nw==, ActorId: [3:7489507599335359349:2775], ActorState: ExecuteState, TraceId: 01jr0wr6y7axzd754w47mw308x, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T17:14:36.005092Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710722:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.160716Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489507603630326792:2805], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:8:29: Error: At function: KiWriteTable!
:8:44: Error: Failed to convert type: Struct<'Key':Int32,'Value':String> to Struct<'Key':Uint64?,'Value':Uint64?>
:8:44: Error: Failed to convert 'Value': String to Optional
:8:44: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-04T17:14:36.162017Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=N2FkM2NhZGYtNzMxNTZlZjQtNGNhNzEyZjQtZWZmNTRiOTE=, ActorId: [3:7489507599335359399:2791], ActorState: ExecuteState, TraceId: 01jr0wr7279g094fm6xg1eacp5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpQueryService::Explain [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 >> KqpQueryService::ExecuteQueryInteractiveTxCommitWithQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::InvalidFetchToken [GOOD] Test command err: Trying to start YDB, gRPC: 10199, MsgBus: 65134 2025-04-04T17:14:11.843022Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507498934921332:2224];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:11.843184Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001858/r3tmp/tmpv23gXx/pdisk_1.dat 2025-04-04T17:14:12.268557Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10199, node 1 2025-04-04T17:14:12.438415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:12.438531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:12.512839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:12.554815Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:12.554845Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:12.554859Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:12.554966Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65134 TClient is connected to server localhost:65134 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:13.292372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:13.317482Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:13.333535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:13.510047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:14:13.702337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:14:13.786179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:15.538482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507516114792131:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:15.538572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:15.826399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:15.864183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:15.896131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:15.975731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:16.056130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:16.139369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:16.216161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507520409759950:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:16.216245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:16.216669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507520409759955:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:16.220677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:16.233442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507520409759957:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:16.334047Z node 1 :TX_PROXY ERROR: Actor# [1:7489507520409760012:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:16.838512Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507498934921332:2224];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:16.838626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:17.226687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:17.228129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:17.229325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:18.228475Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 24676650-4cc5ce2f-f44298f2-e7d1c747, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-04T17:14:18.259387Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 24676650-4cc5ce2f-f44298f2-e7d1c747, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-04T17:14:18.278395Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 24676650-4cc5ce2f-f44298f2-e7d1c747, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-04T17:14:18.307327Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 24676650-4cc5ce2f-f44298f2-e7d1c747, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-04T17:14:18.394056Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 24676650-4cc5ce2f-f44298f2-e7d1c747, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-04T17:14:18.415349Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 24676650-4cc5ce2f-f44298f2-e7d1c747, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-04T17:14:18.455717Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 24676650-4cc5ce2f-f44298f2-e7d1c747, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-04T17:14:18.489201Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 24676650-4cc5ce2f-f44298f2-e7d1c747, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-04T17:14:18.513782Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 24676650-4cc5ce2f-f44298f2-e7d1c747, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-04T17:14:18.530397Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 24676650-4cc5ce2f-f44298f2-e7d1c747, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-04T17:14:18.552687Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 24676650-4cc5ce2f-f44298f2-e7d1c747, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-04T17:14:18.571474Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 24676650-4cc5ce2f-f44298f2-e7d1c747, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-04T17:14:18.594039Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 24676650-4cc5ce2f-f44298f2-e7d1c747, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-04-04T17:14:18.633728Z node 1 ... ol info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:24.619267Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:24.619545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:24.635775Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507553716140908:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:24.716772Z node 2 :TX_PROXY ERROR: Actor# [2:7489507553716140964:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:25.688546Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489507536536269485:2088];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:25.688782Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:25.712152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:25.714578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:25.716023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:28.285597Z node 2 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionOperationQueryActor] TraceId: 6ec8f6e6-8b6dc68c-64d3e11f-95c5edfd, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=ZmQ0ZmEyNjEtNjJmZWJmNzUtN2M4YjM5OWItNWUzYjEzMTU=, TxId: 2025-04-04T17:14:28.306322Z node 2 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 6ec8f6e6-8b6dc68c-64d3e11f-95c5edfd, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=NjRlYzMyOTEtMTE0OTdlYjgtYTRiODk2MmUtMmFlNDc3NjU=, TxId: 2025-04-04T17:14:28.317028Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 6ec8f6e6-8b6dc68c-64d3e11f-95c5edfd, reply NOT_FOUND, issues: {
: Error: No such execution } Trying to start YDB, gRPC: 5662, MsgBus: 15594 2025-04-04T17:14:29.554949Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507576987420545:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:29.555005Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001858/r3tmp/tmpxDHJWU/pdisk_1.dat 2025-04-04T17:14:29.691285Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:29.720629Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:29.720702Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:29.721842Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5662, node 3 2025-04-04T17:14:29.802766Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:29.802789Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:29.802796Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:29.802913Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15594 TClient is connected to server localhost:15594 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:30.271568Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:30.279692Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:30.289073Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:14:30.362417Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.552527Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:30.631141Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:33.118092Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507594167291493:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:33.118235Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:33.164267Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:33.243405Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:33.283919Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:33.367029Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:33.416298Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:33.465207Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:33.518234Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507594167292012:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:33.518324Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:33.518566Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507594167292017:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:33.523069Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:33.536869Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507594167292019:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:33.641066Z node 3 :TX_PROXY ERROR: Actor# [3:7489507594167292078:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:34.557454Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507576987420545:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:34.557528Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:34.876189Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:34.878654Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:34.880059Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCDS96 [GOOD] Test command err: Trying to start YDB, gRPC: 23067, MsgBus: 13555 2025-04-04T17:09:26.288924Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506276438951851:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:26.305490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014c6/r3tmp/tmpQFq666/pdisk_1.dat 2025-04-04T17:09:26.854633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:26.854748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:26.867463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:09:26.941025Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23067, node 1 2025-04-04T17:09:27.149220Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:27.149245Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:27.149258Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:27.149417Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13555 TClient is connected to server localhost:13555 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:27.662160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:27.693281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:27.850349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:09:28.020884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.100200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:29.748246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506289323855378:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.748392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.966551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.064706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.089253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.117470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.152668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.188329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.269325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506293618823187:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:30.269386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:30.269635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506293618823192:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:30.273739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:30.293497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506293618823194:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:30.368144Z node 1 :TX_PROXY ERROR: Actor# [1:7489506293618823251:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:31.203369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.229785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.268170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.274710Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506276438951851:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:31.275082Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:31.300808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.325958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.476104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.506557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.575541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.645171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.691333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.760561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.809289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.871862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:09:32.369061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:09:32.406509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:09:32.434491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:09:32.467366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-04T17:09:32.494961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... cription=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:14:16.453349Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:14:16.453384Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:14:16.453456Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:14:16.453487Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:14:16.453532Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:14:16.453564Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:14:16.454161Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:14:16.454257Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:14:16.454464Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:14:16.454500Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:14:16.454652Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:14:16.454690Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:14:16.454864Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:14:16.454896Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:14:16.455010Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:14:16.455038Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038724;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:14:16.471657Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:14:16.471733Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:14:16.471898Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:14:16.471955Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:14:16.472154Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:14:16.472199Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:14:16.472302Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:14:16.472344Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:14:16.472426Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:14:16.472458Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:14:16.472502Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:14:16.472537Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:14:16.473259Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:14:16.473310Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:14:16.473529Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:14:16.473566Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:14:16.473718Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:14:16.473756Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:14:16.473948Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:14:16.473982Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:14:16.474106Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:14:16.474136Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:14:16.524955Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038725;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:16.526907Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038735;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:16.539770Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038730;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:16.541618Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038727;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:16.548510Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038733;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:16.550630Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038729;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:16.557851Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038731;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:16.559265Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038734;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:16.567394Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038728;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:16.567888Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038732;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:16.576722Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038726;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:16.576842Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038720;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:16.587099Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038721;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:16.591263Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038723;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:16.601627Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038722;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:16.608972Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038724;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 >> KqpQueryService::MaterializeTxResults [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 >> KqpQueryServiceScripts::ExecuteScriptStatsProfile [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::Explain [GOOD] Test command err: Trying to start YDB, gRPC: 27160, MsgBus: 10320 2025-04-04T17:14:20.395082Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507538911353990:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:20.403466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00180d/r3tmp/tmpM0uPyO/pdisk_1.dat 2025-04-04T17:14:20.868657Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:20.892019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:20.892104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:20.894169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27160, node 1 2025-04-04T17:14:20.992475Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:20.992505Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:20.992513Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:20.992620Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10320 TClient is connected to server localhost:10320 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:21.740276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:21.755016Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:21.773900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:21.941201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:22.115251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:22.199236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:24.155178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507556091224891:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:24.155334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:24.418724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:24.464616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:24.499399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:24.580181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:24.642919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:24.729836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:24.809955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507556091225413:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:24.810030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:24.810276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507556091225418:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:24.818629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:24.834678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507556091225420:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:24.900033Z node 1 :TX_PROXY ERROR: Actor# [1:7489507556091225474:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:25.393735Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507538911353990:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:25.393846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8507, MsgBus: 23226 2025-04-04T17:14:27.438021Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507568311434960:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:27.438069Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00180d/r3tmp/tmpYFsKQv/pdisk_1.dat 2025-04-04T17:14:27.673285Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:27.685614Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:27.685889Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:27.695180Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8507, node 2 2025-04-04T17:14:27.822720Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:27.822738Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:27.822747Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:27.822859Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23226 TClient is connected to server localhost:23226 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:28.226296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:28.257383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:28.342977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:28.501629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:28.581433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:30.535384Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507581196338616:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:30.535462Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:30.573652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.606095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.638341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.669452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.723530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.772460Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.825237Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507581196339125:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:30.825323Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:30.825580Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507581196339130:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:30.828993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:30.841934Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507581196339132:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:30.916710Z node 2 :TX_PROXY ERROR: Actor# [2:7489507581196339185:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 11326, MsgBus: 12554 2025-04-04T17:14:33.054465Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507590640455222:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00180d/r3tmp/tmpLMYZQ5/pdisk_1.dat 2025-04-04T17:14:33.115380Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:14:33.201321Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:33.216496Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:33.216579Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11326, node 3 2025-04-04T17:14:33.221091Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:33.326769Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:33.326791Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:33.326801Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:33.326915Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12554 TClient is connected to server localhost:12554 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:33.824251Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:33.840159Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:33.916393Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:34.109225Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:34.182773Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:36.322413Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507607820326013:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:36.322521Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:36.401970Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.438719Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.516503Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.556427Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.601340Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.653889Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.735703Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507607820326528:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:36.735785Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:36.736112Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507607820326533:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:36.740186Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:36.754741Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-04T17:14:36.755033Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507607820326535:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:36.814693Z node 3 :TX_PROXY ERROR: Actor# [3:7489507607820326588:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:38.017654Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507590640455222:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:38.017737Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS88-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 15790, MsgBus: 26245 2025-04-04T17:10:06.185255Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506443997491303:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:06.185888Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001378/r3tmp/tmpdW6taj/pdisk_1.dat 2025-04-04T17:10:06.591501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:06.591591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:06.595179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:10:06.619472Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15790, node 1 2025-04-04T17:10:06.786734Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:10:06.786761Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:10:06.786768Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:10:06.786874Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26245 TClient is connected to server localhost:26245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:10:07.416980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:07.446978Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:10:07.455875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:07.668792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:07.865390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:07.956521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:09.704154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506456882394831:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:09.704282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:09.981251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:10:10.011784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:10.041624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:10.109038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:10.144925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:10.186081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:10.261535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506461177362643:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:10.261620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:10.261995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506461177362648:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:10.265519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:10.277321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506461177362650:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:10:10.350861Z node 1 :TX_PROXY ERROR: Actor# [1:7489506461177362706:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:11.174146Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506443997491303:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:11.174198Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:11.787002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.821060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.853310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.890113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.919780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.044078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.083460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.138608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.166717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.232057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.267001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.296773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.325022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.787539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:10:12.831839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.906025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.946529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 202 ... r;id=9; 2025-04-04T17:13:49.120938Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:13:49.121540Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:13:49.121596Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:13:49.121832Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:13:49.121879Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:13:49.122031Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:13:49.122071Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:13:49.122306Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:13:49.122340Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:13:49.122452Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:13:49.122491Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:13:49.194389Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038726;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:13:49.194562Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038730;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:13:49.204222Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038732;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:13:49.204878Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038734;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:13:49.214591Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038723;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:13:49.214676Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038721;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:13:49.223815Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038720;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:13:49.223829Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038729;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:13:49.233920Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038725;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:13:49.233963Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038727;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:13:49.242892Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038733;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:13:49.243216Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038735;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:13:49.251857Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038728;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:13:49.251964Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038731;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:13:49.260490Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038722;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:13:49.261204Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038724;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:14:28.339223Z node 5 :KQP_SLOW_LOG WARN: TraceId: "01jr0wpsj71db1mrzskd51pyq0", SessionId: ydb://session/3?node_id=5&id=MTlkOWM2ODktNWVkZmJkNzEtOTg1NTFjZjgtMTEzMzZiNjM=, Slow query, duration: 38.954539s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query88.tpl and seed 318176889\nselect *\nfrom\n (select count(*) h8_30_to_9\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 8\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s1 cross join\n (select count(*) h9_to_9_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 9\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s2 cross join\n (select count(*) h9_30_to_10\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 9\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s3 cross join\n (select count(*) h10_to_10_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 10\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s4 cross join\n (select count(*) h10_30_to_11\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 10\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s5 cross join\n (select count(*) h11_to_11_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 11\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s6 cross join\n (select count(*) h11_30_to_12\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 11\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s7 cross join\n (select count(*) h12_to_12_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 12\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s8\n;", parameters: 0b ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryInteractiveTxCommitWithQuery [GOOD] Test command err: Trying to start YDB, gRPC: 19315, MsgBus: 22176 2025-04-04T17:14:20.771153Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507537265746384:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:20.772762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001809/r3tmp/tmpkwl18j/pdisk_1.dat 2025-04-04T17:14:21.230378Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:21.233195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:21.233254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:21.236391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19315, node 1 2025-04-04T17:14:21.482075Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:21.482093Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:21.482110Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:21.490398Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22176 TClient is connected to server localhost:22176 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:22.077456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:22.090859Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:22.098104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:14:22.266994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:14:22.432193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:22.529558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:24.430260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507554445617287:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:24.430351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:24.652768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:24.690385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:24.764790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:24.803232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:24.849918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:24.933116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:25.022376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507558740585102:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:25.022460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:25.022677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507558740585107:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:25.026667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:25.047410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507558740585109:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:25.118741Z node 1 :TX_PROXY ERROR: Actor# [1:7489507558740585164:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:25.771346Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507537265746384:2080];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:25.771407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:26.442929Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmMyMTY4MjItOGEyMzBmNWQtZTM4MzhhYTgtYzkxNmQ3NGU=, ActorId: [1:7489507563035552716:2489], ActorState: ExecuteState, TraceId: 01jr0wqxqv19akfjm70nnmw23g, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 4755, MsgBus: 23451 2025-04-04T17:14:27.191662Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507567173392573:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:27.191714Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001809/r3tmp/tmp1c8KGT/pdisk_1.dat 2025-04-04T17:14:27.317192Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4755, node 2 2025-04-04T17:14:27.364681Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:27.366709Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:27.369071Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:27.406701Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:27.406722Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:27.406729Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:27.406829Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23451 TClient is connected to server localhost:23451 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:27.828933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:27.833243Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:27.844285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:27.917802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 28147 ... 46644480 waiting... 2025-04-04T17:14:29.871220Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507575763328923:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:29.871300Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:29.909823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:29.943052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:29.973060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.004048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.048611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.127655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.215396Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507580058296742:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:30.215457Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:30.215532Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507580058296747:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:30.219042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:30.227504Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507580058296749:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:30.308096Z node 2 :TX_PROXY ERROR: Actor# [2:7489507580058296804:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 2583, MsgBus: 5407 2025-04-04T17:14:32.846247Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507587877881440:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:32.846310Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001809/r3tmp/tmpoDaU3V/pdisk_1.dat 2025-04-04T17:14:33.017724Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:33.020261Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:33.020334Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:33.022016Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2583, node 3 2025-04-04T17:14:33.126093Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:33.126115Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:33.126130Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:33.126271Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5407 TClient is connected to server localhost:5407 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:33.667305Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:33.679094Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:33.692115Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:33.749317Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:33.924537Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:33.991673Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:36.191255Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507605057752366:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:36.191413Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:36.251361Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.296135Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.337191Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.370455Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.405432Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.480859Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.590596Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507605057752888:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:36.590718Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:36.594480Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507605057752893:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:36.602025Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:36.617942Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T17:14:36.618221Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507605057752895:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:36.712988Z node 3 :TX_PROXY ERROR: Actor# [3:7489507605057752951:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:37.846452Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507587877881440:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:37.846539Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::MaterializeTxResults [GOOD] Test command err: Trying to start YDB, gRPC: 7864, MsgBus: 62539 2025-04-04T17:14:18.603928Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507529201737164:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:18.603988Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001813/r3tmp/tmpTS1MDB/pdisk_1.dat 2025-04-04T17:14:19.218352Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:19.219463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:19.219536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:19.224933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7864, node 1 2025-04-04T17:14:19.426793Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:19.426826Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:19.426836Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:19.426959Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62539 TClient is connected to server localhost:62539 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:20.209320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:20.252959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:20.393385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:20.550361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:14:20.640441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:14:22.236116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507546381608105:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:22.236219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:22.506149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:22.593599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:22.634866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:22.718711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:22.746310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:22.785488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:22.890674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507546381608627:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:22.890764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:22.894700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507546381608632:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:22.899535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:22.913083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507546381608634:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:22.997434Z node 1 :TX_PROXY ERROR: Actor# [1:7489507546381608690:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:23.606383Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507529201737164:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:23.606500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:24.207768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:24.281709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:24.368077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 63453, MsgBus: 30989 2025-04-04T17:14:27.129659Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507565815790282:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:27.130227Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001813/r3tmp/tmprKaZM0/pdisk_1.dat 2025-04-04T17:14:27.279696Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:27.317284Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:27.317368Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:27.323440Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63453, node 2 2025-04-04T17:14:27.578837Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:27.578862Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:27.578870Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:27.578984Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30989 TClient is connected to server localhost:30989 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:14:28.127604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:14:28.139048Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:28.148500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, ... s: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:30.692982Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:30.728097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.764997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.798866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.833912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.869390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.939987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:31.009452Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507582995661732:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:31.009560Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:31.009780Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507582995661737:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:31.013795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:31.023486Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507582995661739:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:31.101353Z node 2 :TX_PROXY ERROR: Actor# [2:7489507582995661792:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:32.124748Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489507565815790282:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:32.124846Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 4116, MsgBus: 3881 2025-04-04T17:14:32.960153Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507589536488506:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:32.960203Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001813/r3tmp/tmpzq2m6T/pdisk_1.dat 2025-04-04T17:14:33.175641Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:33.206368Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:33.206513Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:33.208251Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4116, node 3 2025-04-04T17:14:33.330880Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:33.330901Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:33.330908Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:33.331030Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3881 TClient is connected to server localhost:3881 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:33.976377Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:33.984118Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:33.996847Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:34.108924Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:34.284799Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:34.378358Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:36.699330Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507606716359462:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:36.699407Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:36.752732Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.787453Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.823529Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.852061Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.888491Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.969308Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:37.068703Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507611011327279:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:37.068850Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:37.069363Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507611011327284:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:37.103071Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:37.116108Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507611011327286:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:37.205034Z node 3 :TX_PROXY ERROR: Actor# [3:7489507611011327342:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:37.962495Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507589536488506:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:37.962575Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryService::TableSink_OlapUpdate [GOOD] >> KqpQueryService::TableSink_OlapOrder >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant >> KqpQueryService::CloseSessionsWithLoad [GOOD] >> KqpQueryService::ClosedSessionRemovedFromPool >> KqpQueryServiceScripts::TestPaging [GOOD] >> KqpQueryServiceScripts::TestFetchMoreThanLimit >> TTicketParserTest::AuthorizationRetryError >> TTicketParserTest::AuthenticationWithUserAccount >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 >> TTicketParserTest::TicketFromCertificateCheckIssuerGood >> TTicketParserTest::LoginBad >> KqpQueryService::TableSink_OltpDelete [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationGood >> TTicketParserTest::LoginGood >> TTicketParserTest::LoginRefreshGroupsWithError >> TTicketParserTest::NebiusAuthenticationUnavailable >> KqpQueryService::StreamExecuteQueryPure [GOOD] >> KqpQueryService::StreamExecuteQueryMultiResult >> TTicketParserTest::BulkAuthorizationRetryError >> KqpQueryServiceScripts::TestTruncatedBySize [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithParameters [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithForgetAfter >> KqpQueryServiceScripts::ExecuteScriptWithTimeout [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 >> KqpQueryService::FlowControllOnHugeRealTable+LongRow [GOOD] >> KqpQueryService::FlowControllOnHugeRealTable-LongRow >> KqpQueryService::TableSink_DisableSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 >> TTicketParserTest::AccessServiceAuthenticationOk >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpDelete [GOOD] Test command err: Trying to start YDB, gRPC: 23086, MsgBus: 5686 2025-04-04T17:14:14.311490Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507511270127145:2269];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:14.311522Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00181b/r3tmp/tmpsRJSzR/pdisk_1.dat 2025-04-04T17:14:14.761366Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:14.764905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:14.764997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:14.773577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23086, node 1 2025-04-04T17:14:15.026448Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:15.026474Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:15.026481Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:15.028215Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5686 TClient is connected to server localhost:5686 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:15.737211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:17.995406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507524155029477:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:17.995499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:18.238534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:14:18.465275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507528449996972:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:14:18.465603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507528449996972:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:14:18.465855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507528449996972:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:14:18.465960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507528449996972:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:14:18.466045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507528449996972:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:14:18.466165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507528449996972:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:14:18.466508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507528449996972:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:14:18.466703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507528449996972:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:14:18.466826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507528449996972:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:14:18.466916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507528449996972:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:14:18.467023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507528449996972:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:14:18.467119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489507528449996972:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:14:18.470047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507528449996920:2335];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:14:18.470136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507528449996920:2335];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:14:18.470389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507528449996920:2335];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:14:18.470544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507528449996920:2335];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:14:18.470685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507528449996920:2335];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:14:18.470792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507528449996920:2335];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:14:18.470915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507528449996920:2335];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:14:18.471013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507528449996920:2335];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:14:18.471157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507528449996920:2335];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:14:18.471270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507528449996920:2335];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:14:18.471389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507528449996920:2335];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:14:18.471514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507528449996920:2335];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:14:18.519263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489507528449996923:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:14:18.519356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489507528449996923:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:14:18.519568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489507528449996923:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:14:18.519676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489507528449996923:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:14:18.519771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489507528449996923:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:14:18.519876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489507528449996923:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:14:18.519955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489507528449996923:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;eve ... .842833Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7489507554555033919:2335];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:25.843375Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7489507554555033919:2335];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:25.843632Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[2:7489507554555033927:2339];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:25.844125Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7489507554555033920:2336];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:25.844554Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[2:7489507554555033922:2338];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:25.846285Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[2:7489507554555033922:2338];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:25.846510Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[2:7489507554555033927:2339];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037897;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:25.846737Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7489507554555033920:2336];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:25.869682Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7489507554555033942:2343];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:25.877622Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7489507554555033933:2340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:25.879880Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[2:7489507554555033934:2341];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:25.880395Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7489507554555033942:2343];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037892;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:25.882513Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7489507554555033921:2337];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:25.884311Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7489507554555033956:2344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:25.884748Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[2:7489507554555033934:2341];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:25.885818Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7489507554555033933:2340];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037894;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:25.896462Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7489507554555033921:2337];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037895;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:25.907576Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7489507554555033956:2344];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037896;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:25.920381Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;task_id=428824c2-117811f0-895a5732-8b112d6f;tablet_id=72075186224037890;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:25.920749Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:25.923108Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;task_id=428e63b4-117811f0-b5584b48-4fe81772;tablet_id=72075186224037894;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:14:25.923337Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 64768, MsgBus: 30443 2025-04-04T17:14:31.816302Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507582249066556:2116];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00181b/r3tmp/tmpx8DWEa/pdisk_1.dat 2025-04-04T17:14:31.979931Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:14:32.042060Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:32.049300Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:32.049381Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:32.050786Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64768, node 3 2025-04-04T17:14:32.102706Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:32.102728Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:32.102735Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:32.102835Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30443 TClient is connected to server localhost:30443 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:32.632043Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:32.642861Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:35.378374Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507599428936327:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:35.378491Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:35.399465Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:14:35.490011Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507599428936431:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:35.490124Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:35.490375Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507599428936436:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:35.493422Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:14:35.532399Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507599428936438:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:14:35.612996Z node 3 :TX_PROXY ERROR: Actor# [3:7489507599428936489:2393] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } WAIT_INDEXATION: 0 2025-04-04T17:14:36.817571Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507582249066556:2116];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:36.817642Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::TestTruncatedBySize [GOOD] Test command err: Trying to start YDB, gRPC: 3931, MsgBus: 2462 2025-04-04T17:13:54.970929Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507425706184821:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:54.973220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00187b/r3tmp/tmpKdfgT4/pdisk_1.dat 2025-04-04T17:13:55.409146Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:55.436820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:55.436908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 3931, node 1 2025-04-04T17:13:55.440542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:55.488001Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:55.488042Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:55.488050Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:55.488173Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2462 TClient is connected to server localhost:2462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:56.033128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:56.053074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:56.153905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:56.316320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:56.396890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:58.048084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507442886055694:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:58.048185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:58.392218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:13:58.423566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:58.456127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:58.489573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:58.520987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:13:58.598595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:13:58.688973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507442886056212:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:58.689070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:58.689353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507442886056217:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:58.693303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:13:58.710286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507442886056219:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:13:58.797541Z node 1 :TX_PROXY ERROR: Actor# [1:7489507442886056273:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:59.974340Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507425706184821:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:59.974436Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23390, MsgBus: 10545 2025-04-04T17:14:00.804243Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507452774494871:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:00.820889Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00187b/r3tmp/tmphiIGqQ/pdisk_1.dat 2025-04-04T17:14:01.067903Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:01.071536Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:01.071617Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:01.075685Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23390, node 2 2025-04-04T17:14:01.238276Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:01.238297Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:01.238304Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:01.238408Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10545 TClient is connected to server localhost:10545 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:01.655919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:01.675994Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:01.684808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:01.758321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:01.912702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:02.013670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... idx:1785|>,<|idx:1786|>,<|idx:1787|>,<|idx:1788|>,<|idx:1789|>,<|idx:1790|>,<|idx:1791|>,<|idx:1792|>,<|idx:1793|>,<|idx:1794|>,<|idx:1795|>,<|idx:1796|>,<|idx:1797|>,<|idx:1798|>,<|idx:1799|>,<|idx:1800|>,<|idx:1801|>,<|idx:1802|>,<|idx:1803|>,<|idx:1804|>,<|idx:1805|>,<|idx:1806|>,<|idx:1807|>,<|idx:1808|>,<|idx:1809|>,<|idx:1810|>,<|idx:1811|>,<|idx:1812|>,<|idx:1813|>,<|idx:1814|>,<|idx:1815|>,<|idx:1816|>,<|idx:1817|>,<|idx:1818|>,<|idx:1819|>,<|idx:1820|>,<|idx:1821|>,<|idx:1822|>,<|idx:1823|>,<|idx:1824|>,<|idx:1825|>,<|idx:1826|>,<|idx:1827|>,<|idx:1828|>,<|idx:1829|>,<|idx:1830|>,<|idx:1831|>,<|idx:1832|>,<|idx:1833|>,<|idx:1834|>,<|idx:1835|>,<|idx:1836|>,<|idx:1837|>,<|idx:1838|>,<|idx:1839|>,<|idx:1840|>,<|idx:1841|>,<|idx:1842|>,<|idx:1843|>,<|idx:1844|>,<|idx:1845|>,<|idx:1846|>,<|idx:1847|>,<|idx:1848|>,<|idx:1849|>,<|idx:1850|>,<|idx:1851|>,<|idx:1852|>,<|idx:1853|>,<|idx:1854|>,<|idx:1855|>,<|idx:1856|>,<|idx:1857|>,<|idx:1858|>,<|idx:1859|>,<|idx:1860|>,<|idx:1861|>,<|idx:1862|>,<|idx:1863|>,<|idx:1864|>,<|idx:1865|>,<|idx:1866|>,<|idx:1867|>,<|idx:1868|>,<|idx:1869|>,<|idx:1870|>,<|idx:1871|>,<|idx:1872|>,<|idx:1873|>,<|idx:1874|>,<|idx:1875|>,<|idx:1876|>,<|idx:1877|>,<|idx:1878|>,<|idx:1879|>,<|idx:1880|>,<|idx:1881|>,<|idx:1882|>,<|idx:1883|>,<|idx:1884|>,<|idx:1885|>,<|idx:1886|>,<|idx:1887|>,<|idx:1888|>,<|idx:1889|>,<|idx:1890|>,<|idx:1891|>,<|idx:1892|>,<|idx:1893|>,<|idx:1894|>,<|idx:1895|>,<|idx:1896|>,<|idx:1897|>,<|idx:1898|>,<|idx:1899|>,<|idx:1900|>,<|idx:1901|>,<|idx:1902|>,<|idx:1903|>,<|idx:1904|>,<|idx:1905|>,<|idx:1906|>,<|idx:1907|>,<|idx:1908|>,<|idx:1909|>,<|idx:1910|>,<|idx:1911|>,<|idx:1912|>,<|idx:1913|>,<|idx:1914|>,<|idx:1915|>,<|idx:1916|>,<|idx:1917|>,<|idx:1918|>,<|idx:1919|>,<|idx:1920|>,<|idx:1921|>,<|idx:1922|>,<|idx:1923|>,<|idx:1924|>,<|idx:1925|>,<|idx:1926|>,<|idx:1927|>,<|idx:1928|>,<|idx:1929|>,<|idx:1930|>,<|idx:1931|>,<|idx:1932|>,<|idx:1933|>,<|idx:1934|>,<|idx:1935|>,<|idx:1936|>,<|idx:1937|>,<|idx:1938|>,<|idx:1939|>,<|idx:1940|>,<|idx:1941|>,<|idx:1942|>,<|idx:1943|>,<|idx:1944|>,<|idx:1945|>,<|idx:1946|>,<|idx:1947|>,<|idx:1948|>,<|idx:1949|>,<|idx:1950|>,<|idx:1951|>,<|idx:1952|>,<|idx:1953|>,<|idx:1954|>,<|idx:1955|>,<|idx:1956|>,<|idx:1957|>,<|idx:1958|>,<|idx:1959|>,<|idx:1960|>,<|idx:1961|>,<|idx:1962|>,<|idx:1963|>,<|idx:1964|>,<|idx:1965|>,<|idx:1966|>,<|idx:1967|>,<|idx:1968|>,<|idx:1969|>,<|idx:1970|>,<|idx:1971|>,<|idx:1972|>,<|idx:1973|>,<|idx:1974|>,<|idx:1975|>,<|idx:1976|>,<|idx:1977|>,<|idx:1978|>,<|idx:1979|>,<|idx:1980|>,<|idx:1981|>,<|idx:1982|>,<|idx:1983|>,<|idx:1984|>,<|idx:1985|>,<|idx:1986|>,<|idx:1987|>,<|idx:1988|>,<|idx:1989|>,<|idx:1990|>,<|idx:1991|>,<|idx:1992|>,<|idx:1993|>,<|idx:1994|>,<|idx:1995|>,<|idx:1996|>,<|idx:1997|>,<|idx:1998|>,<|idx:1999|>]);", parameters: 0b 2025-04-04T17:14:29.160525Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786869139, txId: 281474976711137] shutting down Trying to start YDB, gRPC: 24516, MsgBus: 18707 2025-04-04T17:14:30.131730Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507581150148948:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:30.131812Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00187b/r3tmp/tmpxFNxDE/pdisk_1.dat 2025-04-04T17:14:30.347934Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:30.366074Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:30.366173Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:30.369423Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24516, node 3 2025-04-04T17:14:30.462838Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:30.462860Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:30.462868Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:30.463015Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18707 TClient is connected to server localhost:18707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:31.072820Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:31.092455Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:31.131614Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:14:31.227789Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:31.422589Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:31.540138Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:34.393008Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507598330019910:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:34.393103Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:34.448435Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:34.484040Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:34.525194Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:34.571914Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:34.622188Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:34.728436Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:34.828332Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507598330020431:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:34.828439Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:34.828736Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507598330020436:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:34.833093Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:34.849356Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507598330020438:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:34.929361Z node 3 :TX_PROXY ERROR: Actor# [3:7489507598330020493:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:35.131793Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507581150148948:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:35.131861Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:36.150952Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.152925Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.155581Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:40.970211Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786881004, txId: 281474976710742] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_DisableSink [GOOD] Test command err: Trying to start YDB, gRPC: 62395, MsgBus: 20113 2025-04-04T17:14:07.909099Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507481396000442:2100];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:07.910427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001873/r3tmp/tmppfxHni/pdisk_1.dat 2025-04-04T17:14:08.383981Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:08.419224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:08.419302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:08.420864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62395, node 1 2025-04-04T17:14:08.734792Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:08.734820Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:08.734827Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:08.734945Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20113 TClient is connected to server localhost:20113 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:09.488144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:09.522357Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:11.689866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507498575870246:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:11.689968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:11.946026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:14:12.175384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507502870837734:2345];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:14:12.175588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507502870837734:2345];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:14:12.175818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507502870837734:2345];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:14:12.175926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507502870837734:2345];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:14:12.176020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507502870837734:2345];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:14:12.176134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507502870837734:2345];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:14:12.176224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507502870837734:2345];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:14:12.176352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507502870837734:2345];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:14:12.176459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507502870837734:2345];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:14:12.176555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507502870837734:2345];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:14:12.176639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507502870837734:2345];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:14:12.176733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507502870837734:2345];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:14:12.179198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489507502870837713:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:14:12.179269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489507502870837713:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:14:12.179427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489507502870837713:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:14:12.179527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489507502870837713:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:14:12.179620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489507502870837713:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:14:12.179734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489507502870837713:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:14:12.179822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489507502870837713:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:14:12.179900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489507502870837713:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:14:12.179990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489507502870837713:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:14:12.180080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489507502870837713:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:14:12.180159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489507502870837713:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:14:12.180240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489507502870837713:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:14:12.236439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489507502870837714:2342];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:14:12.236494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489507502870837714:2342];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:14:12.236662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489507502870837714:2342];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:14:12.236748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489507502870837714:2342];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:14:12.236849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489507502870837714:2342];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:14:12.236942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489507502870837714:2342];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:14:12.237023Z node 1 :TX_COLU ... 86224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:14:41.060597Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:14:41.060643Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:14:41.060679Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:14:41.060707Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:14:41.060747Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:14:41.060810Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:14:41.060853Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:14:41.060961Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:14:41.060990Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:14:41.061106Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:14:41.061169Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:14:41.061235Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:14:41.061262Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:14:41.061300Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:14:41.061323Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:14:41.061804Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:14:41.061856Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:14:41.062057Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:14:41.062102Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:14:41.062264Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:14:41.062304Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:14:41.062504Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:14:41.062550Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:14:41.062666Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:14:41.062690Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:14:41.111567Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:41.111597Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:41.122977Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:41.123663Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:41.129577Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:41.132774Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:41.136695Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:41.139659Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:41.143202Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:41.146032Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:41.148373Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:41.150092Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:41.156663Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:41.156664Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:41.164179Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:41.164197Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-04-04T17:14:41.182924Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507626634014414:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:41.183046Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:41.183090Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507626634014419:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:41.188754Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:14:41.200192Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507626634014421:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:14:41.290273Z node 3 :TX_PROXY ERROR: Actor# [3:7489507626634014472:2708] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:41.399228Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7489507626634014497:2436] TxId: 281474976710661. Ctx: { TraceId: 01jr0wrc4v2j9zx13r1c5hp4ax, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzhhNDQ5NWYtZjg3M2EzMDktZGQzYWMwMDUtYzIyZGEzMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Data manipulation queries do not support column shard tables. 2025-04-04T17:14:41.399476Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzhhNDQ5NWYtZjg3M2EzMDktZGQzYWMwMDUtYzIyZGEzMGM=, ActorId: [3:7489507626634014412:2436], ActorState: ExecuteState, TraceId: 01jr0wrc4v2j9zx13r1c5hp4ax, Create QueryResponse for error on request, msg: 2025-04-04T17:14:41.661549Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507605159176578:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:41.661622Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 >> TPersQueueTest::WriteAfterAlter [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Compressed >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 >> TTicketParserTest::LoginBad [GOOD] >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions >> TTicketParserTest::LoginGood [GOOD] >> TTicketParserTest::LoginGoodWithGroups >> TTicketParserTest::NebiusAuthenticationUnavailable [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryError >> TTicketParserTest::AuthenticationWithUserAccount [GOOD] >> TTicketParserTest::AuthenticationUnsupported >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 >> TTicketParserTest::TicketFromCertificateWithValidationGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood >> TTicketParserTest::TicketFromCertificateCheckIssuerGood [GOOD] >> TTicketParserTest::TicketFromCertificateCheckIssuerBad >> TTicketParserTest::AccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk >> KqpQueryService::StreamExecuteQueryMultiResult [GOOD] >> KqpQueryService::TableSink_BadTransactions >> TPersQueueTest::CheckACLForGrpcWrite [GOOD] >> TPersQueueTest::CheckACLForGrpcRead >> TPersQueueTest::SchemeOperationsCheckPropValues [GOOD] >> TPersQueueTest::ReadRuleServiceType >> TopicService::AccessRights [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck >> KqpQueryServiceScripts::TestFetchMoreThanLimit [GOOD] >> KqpQueryServiceScripts::TestAstWithCompression >> TTicketParserTest::LoginGoodWithGroups [GOOD] >> TTicketParserTest::LoginRefreshGroupsGood >> TTicketParserTest::AuthenticationUnsupported [GOOD] >> TTicketParserTest::AuthenticationUnknown >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 >> HttpRequest::ProbeServerless [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedQueue >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndCloseClientSessionWithEnabledRemotePreferredClusterDelaySec_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_NonExistentPreferredCluster_SessionDiesOnlyAfterDelay >> KqpWorkloadService::TestQueueSizeSimple >> KqpWorkloadServiceActors::TestPoolFetcher >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 >> TopicService::ThereAreGapsInTheOffsetRanges >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad >> TTicketParserTest::TicketFromCertificateCheckIssuerBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationBad >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk [GOOD] >> TTicketParserTest::AuthenticationUnavailable >> KqpQueryServiceScripts::ExecuteScriptWithForgetAfter [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtl >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeServerless [GOOD] Test command err: 2025-04-04T17:12:03.905391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:12:03.905562Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:12:03.905675Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000893/r3tmp/tmpAd5MSy/pdisk_1.dat 2025-04-04T17:12:04.329167Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4250, node 1 2025-04-04T17:12:04.666472Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:04.666530Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:04.666561Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:04.667075Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:12:04.669502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:12:04.774071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:04.774207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:04.794108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19625 2025-04-04T17:12:05.391111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:08.969553Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:12:09.012863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:09.012987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:09.057116Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:12:09.061748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:09.295509Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:12:09.296079Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:12:09.296505Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:12:09.296630Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:12:09.296894Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:12:09.296986Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:12:09.297065Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:12:09.297142Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:12:09.297218Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:12:09.487920Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:09.488037Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:09.507852Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:09.671197Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:09.726569Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:12:09.726708Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:12:09.808194Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:12:09.809431Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:12:09.809646Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:12:09.809708Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:12:09.809780Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:12:09.809840Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:12:09.809889Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:12:09.809939Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:12:09.810367Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:12:09.863356Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:12:09.863486Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1870:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:12:09.877233Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:12:09.892250Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1914:2621] 2025-04-04T17:12:09.892941Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1914:2621], schemeshard id = 72075186224037897 2025-04-04T17:12:09.899638Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-04T17:12:09.950644Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:12:09.950709Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:12:09.950802Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-04T17:12:09.965423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:12:09.973451Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:12:09.973654Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:12:10.175674Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:12:10.402510Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:12:10.514981Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:12:11.466746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:12:12.411970Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:12.676853Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-04T17:12:12.676916Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:12:12.676992Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2591:2949], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:12:12.678023Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2592:2950] 2025-04-04T17:12:12.679578Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2592:2950], schemeshard id = 72075186224037899 2025-04-04T17:12:14.128310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2719:3239], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:14.128472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:12:14.172093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-04T17:12:14.597211Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2852:3077];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:12:14.597486Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2852:3077];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:12:14.597795Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2852:3077];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:12:14.597924Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2852:3077];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:12:14.598043Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2852:3077];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:12:14.598165Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2852:3077];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:12:14.598299Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2852:3077];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:12:14.598426Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2852:3077];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:12:14.598581Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2852:3077];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17: ... 894] TTxAnalyzeTableResponse::Complete. 2025-04-04T17:14:46.802298Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-04T17:14:46.802320Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-04T17:14:46.802344Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-04T17:14:46.802367Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-04T17:14:46.802390Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-04T17:14:46.802411Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-04T17:14:46.802432Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-04T17:14:46.802455Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-04T17:14:47.945969Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:14:47.946109Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-04T17:14:47.946149Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-04T17:14:47.946896Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:14:47.962829Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:14:47.963325Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:14:47.963396Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:14:47.963895Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:14:47.990390Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:14:47.990577Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-04T17:14:47.991490Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9878:7438], server id = [2:9883:7443], tablet id = 72075186224037905, status = OK 2025-04-04T17:14:47.991595Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9878:7438], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:14:47.992971Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9879:7439], server id = [2:9884:7444], tablet id = 72075186224037906, status = OK 2025-04-04T17:14:47.993048Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9879:7439], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:14:47.993843Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9880:7440], server id = [2:9885:7445], tablet id = 72075186224037907, status = OK 2025-04-04T17:14:47.993909Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9880:7440], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:14:47.994102Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9881:7441], server id = [2:9886:7446], tablet id = 72075186224037908, status = OK 2025-04-04T17:14:47.994157Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9881:7441], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:14:47.995418Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9882:7442], server id = [2:9888:7448], tablet id = 72075186224037909, status = OK 2025-04-04T17:14:47.995480Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9882:7442], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:14:47.996252Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-04T17:14:47.997190Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9878:7438], server id = [2:9883:7443], tablet id = 72075186224037905 2025-04-04T17:14:47.997223Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:14:47.997555Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-04T17:14:47.997721Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-04T17:14:47.998819Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9879:7439], server id = [2:9884:7444], tablet id = 72075186224037906 2025-04-04T17:14:47.998853Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:14:47.999067Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9881:7441], server id = [2:9886:7446], tablet id = 72075186224037908 2025-04-04T17:14:47.999093Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:14:47.999194Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-04T17:14:47.999651Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037909 2025-04-04T17:14:47.999779Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9893:7453], server id = [2:9894:7454], tablet id = 72075186224037910, status = OK 2025-04-04T17:14:47.999839Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9893:7453], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:14:48.000709Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9880:7440], server id = [2:9885:7445], tablet id = 72075186224037907 2025-04-04T17:14:48.000741Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:14:48.000817Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9895:7455], server id = [2:9897:7457], tablet id = 72075186224037911, status = OK 2025-04-04T17:14:48.000876Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9895:7455], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:14:48.001579Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9882:7442], server id = [2:9888:7448], tablet id = 72075186224037909 2025-04-04T17:14:48.001611Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:14:48.002079Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9896:7456], server id = [2:9899:7459], tablet id = 72075186224037912, status = OK 2025-04-04T17:14:48.002142Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9896:7456], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:14:48.002398Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9898:7458], server id = [2:9901:7461], tablet id = 72075186224037913, status = OK 2025-04-04T17:14:48.002457Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9898:7458], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:14:48.003416Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037910 2025-04-04T17:14:48.004053Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9900:7460], server id = [2:9903:7463], tablet id = 72075186224037914, status = OK 2025-04-04T17:14:48.004114Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9900:7460], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:14:48.004182Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9893:7453], server id = [2:9894:7454], tablet id = 72075186224037910 2025-04-04T17:14:48.004205Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:14:48.005174Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2025-04-04T17:14:48.005333Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2025-04-04T17:14:48.005885Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9895:7455], server id = [2:9897:7457], tablet id = 72075186224037911 2025-04-04T17:14:48.005917Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:14:48.006196Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9896:7456], server id = [2:9899:7459], tablet id = 72075186224037912 2025-04-04T17:14:48.006227Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:14:48.006369Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037913 2025-04-04T17:14:48.006658Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037914 2025-04-04T17:14:48.006702Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:14:48.006971Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9898:7458], server id = [2:9901:7461], tablet id = 72075186224037913 2025-04-04T17:14:48.007011Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:14:48.007116Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:14:48.007330Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:14:48.007665Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-04T17:14:48.009629Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9900:7460], server id = [2:9903:7463], tablet id = 72075186224037914 2025-04-04T17:14:48.009658Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:14:48.010469Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:14:48.035745Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzEyZjM1OTItNmE0YTc4OWEtNDlkZjczMDQtODE1YWYwZTk=, TxId: 2025-04-04T17:14:48.035815Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzEyZjM1OTItNmE0YTc4OWEtNDlkZjczMDQtODE1YWYwZTk=, TxId: 2025-04-04T17:14:48.036483Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:14:48.056011Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-04T17:14:48.056081Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=<4tfç, ActorId=[1:4626:3496] 2025-04-04T17:14:48.057189Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:9925:5737]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:14:48.057447Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:14:48.057496Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:14:48.057688Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:14:48.057733Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-04T17:14:48.057790Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-04-04T17:14:48.072132Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table1[Value]=4' >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 >> TPersQueueTest::DirectReadRestartPQRB [GOOD] >> TPersQueueTest::DirectReadRestartTablet >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool >> TTicketParserTest::AuthenticationUnknown [GOOD] >> TTicketParserTest::Authorization >> TTicketParserTest::BulkAuthorizationWithUserAccount [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount2 >> KqpWorkloadServiceActors::TestPoolFetcher [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation >> KqpQueryService::TableSink_BadTransactions [GOOD] >> TTicketParserTest::LoginRefreshGroupsWithError [GOOD] >> TTicketParserTest::NebiusAccessServiceAuthenticationOk >> TTicketParserTest::AuthenticationUnavailable [GOOD] >> TTicketParserTest::AuthenticationRetryError >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood >> TTicketParserTest::TicketFromCertificateWithValidationBad [GOOD] >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions >> KqpQueryService::ClosedSessionRemovedFromPool [GOOD] >> KqpQueryService::CloseConnection >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfter [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfterAndTimeout >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 >> KqpService::ToDictCache+UseCache [GOOD] >> KqpService::ToDictCache-UseCache >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Compressed [GOOD] >> TPersQueueTest::TestWriteStat ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_BadTransactions [GOOD] Test command err: Trying to start YDB, gRPC: 32125, MsgBus: 11447 2025-04-04T17:14:36.691106Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507605289029178:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:36.691273Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017f9/r3tmp/tmpVL3U4s/pdisk_1.dat 2025-04-04T17:14:37.105735Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:37.110115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:37.110218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:37.114900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32125, node 1 2025-04-04T17:14:37.222560Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:37.222579Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:37.222585Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:37.222699Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11447 TClient is connected to server localhost:11447 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:37.834780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:37.849385Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:37.857073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:38.028295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:38.209404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:38.276373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:39.952663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507618173932840:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:39.952766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:40.218920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:40.252192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:40.279188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:40.322277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:40.363238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:40.436771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:40.493126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507622468900653:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:40.493231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:40.493415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507622468900658:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:40.497184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:40.507559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507622468900660:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:40.563297Z node 1 :TX_PROXY ERROR: Actor# [1:7489507622468900712:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 15904, MsgBus: 10361 2025-04-04T17:14:42.157026Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507632733510876:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:42.157117Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017f9/r3tmp/tmpvq9qL9/pdisk_1.dat 2025-04-04T17:14:42.342854Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:42.343789Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:42.343852Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:42.345630Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15904, node 2 2025-04-04T17:14:42.481120Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:42.481147Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:42.481154Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:42.481262Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10361 TClient is connected to server localhost:10361 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:43.051830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:43.059075Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:43.062616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:43.203740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:43.353864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:43.436375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:45.345566Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507645618414503:2406], DatabaseId: /Root, ... orePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:14:51.538885Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:14:51.538934Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:14:51.539073Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:14:51.539097Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:14:51.539194Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:14:51.539217Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:14:51.539343Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:14:51.539364Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:14:51.539441Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:14:51.539458Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:14:51.586910Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:14:51.587956Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:14:51.593262Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:14:51.593293Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:14:51.598425Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:14:51.598426Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:14:51.603654Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:14:51.603654Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:14:51.608689Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:14:51.608740Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:14:51.617689Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:14:51.701827Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507672032450342:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:51.701896Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:51.704820Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507672032450347:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:51.708803Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-04T17:14:51.733555Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507672032450349:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-04T17:14:51.832733Z node 3 :TX_PROXY ERROR: Actor# [3:7489507672032450402:2682] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:51.968705Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976715663;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-04-04T17:14:51.968776Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715663;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-04-04T17:14:51.969020Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=281474976715663;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-04-04T17:14:51.969534Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976715663;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-04-04T17:14:52.338668Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NmIyODU4NjUtZGUxOTBmYjYtMjA2Nzg1YWItNjdiMWJiMjI=, ActorId: [3:7489507676327417837:2496], ActorState: ExecuteState, TraceId: 01jr0wrpt91vs3p8bwmtw4en2k, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-04-04T17:14:52.463532Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ODkwNzNhMWItMTA2N2Q2ZWMtOTJjNWExYjktNTA3MjY1MjU=, ActorId: [3:7489507676327417858:2504], ActorState: ExecuteState, TraceId: 01jr0wrq22a36mtf6b4vfwhza5, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-04-04T17:14:52.616780Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjQyOWEyOGUtY2E1ZmU3YTAtODE3NzQ0NmEtMmM5ZjNmYmY=, ActorId: [3:7489507676327417877:2511], ActorState: ExecuteState, TraceId: 01jr0wrq5y76j4xcdgwx4nweqk, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-04-04T17:14:52.747279Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507654852579873:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:52.747657Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:52.950850Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NjNlZjZkYzAtOTA3ZWE3MGItODVmOGM4NDktZDI1MWQ5ZDk=, ActorId: [3:7489507676327417900:2520], ActorState: ExecuteState, TraceId: 01jr0wrqak1f2evrcyqemfg93d, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-04-04T17:14:53.177559Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NmIwNTg2ZjEtNWQ0NTZlNTYtOWFmYjI5NmUtNTdjYjNmN2I=, ActorId: [3:7489507676327417922:2528], ActorState: ExecuteState, TraceId: 01jr0wrqn23eskx9t2rkykxc5e, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-04-04T17:14:53.339510Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=281474976715671;tx_id=281474976715671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715671; 2025-04-04T17:14:53.339624Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976715671;tx_id=281474976715671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715671; 2025-04-04T17:14:53.339932Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976715671;tx_id=281474976715671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715671; 2025-04-04T17:14:53.340724Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7489507672032449843:2341];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037894;local_tx_no=21;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037889,72075186224037897;receive=72075186224037893; 2025-04-04T17:14:53.340791Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7489507672032449843:2341];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037894;local_tx_no=22;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037889,72075186224037897;receive=72075186224037893; 2025-04-04T17:14:53.340968Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7489507672032449843:2341];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037894;local_tx_no=24;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037889;receive=72075186224037897; 2025-04-04T17:14:53.341060Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7489507672032449843:2341];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037894;local_tx_no=25;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037889;receive=72075186224037897; 2025-04-04T17:14:53.341414Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715671;tx_id=281474976715671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715671; >> KqpQueryService::FlowControllOnHugeRealTable-LongRow [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 >> TTicketParserTest::BulkAuthorizationRetryError [GOOD] >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately >> TTicketParserTest::AuthorizationRetryError [GOOD] >> TTicketParserTest::AuthorizationRetryErrorImmediately >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier >> TTicketParserTest::BulkAuthorizationWithUserAccount2 [GOOD] >> TTicketParserTest::BulkAuthorizationUnavailable >> KqpQueryServiceScripts::TestAstWithCompression [GOOD] >> TTicketParserTest::Authorization [GOOD] >> TTicketParserTest::AuthorizationModify >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::FlowControllOnHugeRealTable-LongRow [GOOD] Test command err: Trying to start YDB, gRPC: 65180, MsgBus: 15791 2025-04-04T17:14:30.587459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:14:30.587935Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:14:30.588009Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001804/r3tmp/tmpuG0UDI/pdisk_1.dat TServer::EnableGrpc on GrpcPort 65180, node 1 2025-04-04T17:14:31.084799Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:31.088977Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:31.089028Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:31.089075Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:31.089373Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:14:31.124880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:31.125008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:31.136498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15791 TClient is connected to server localhost:15791 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:31.459962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:31.581200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:32.013591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:32.410530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:32.727117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:33.525732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1814:3408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:33.525928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:33.549208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:33.763554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:34.051456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:34.353088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:34.624495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:35.000108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:35.324460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2399:3857], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:35.324587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:35.324896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2404:3862], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:35.330059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:35.509891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2406:3864], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:35.564997Z node 1 :TX_PROXY ERROR: Actor# [1:2469:3908] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 16509, MsgBus: 31851 2025-04-04T17:14:37.161014Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507608549743864:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:37.161057Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001804/r3tmp/tmpHzzIE2/pdisk_1.dat 2025-04-04T17:14:37.416550Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16509, node 2 2025-04-04T17:14:37.514379Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:37.514530Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:37.516557Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:37.522772Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:37.522796Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:37.522804Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:37.522913Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31851 TClient is connected to server localhost:31851 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:37.939722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:37.946851Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:37.960826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:38.024664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:38.170822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:38.248126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:40.184097Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ... have access permissions } 2025-04-04T17:14:40.184200Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:40.223736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:40.255394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:40.286317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:40.313532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:40.347377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:40.382413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:40.434200Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507621434648035:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:40.434313Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:40.434595Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507621434648040:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:40.438370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:40.450160Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507621434648042:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:40.541127Z node 2 :TX_PROXY ERROR: Actor# [2:7489507621434648096:3436] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:41.464209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 10 Trying to start YDB, gRPC: 28045, MsgBus: 62697 2025-04-04T17:14:42.851414Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507632029156908:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001804/r3tmp/tmptMd1pN/pdisk_1.dat 2025-04-04T17:14:43.011726Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:14:43.108437Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:43.135199Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:43.135290Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:43.139627Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28045, node 3 2025-04-04T17:14:43.228452Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:43.228469Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:43.228475Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:43.228586Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62697 TClient is connected to server localhost:62697 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:43.752399Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:43.770479Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:43.797508Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:43.888762Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:44.021131Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:44.085013Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:45.983298Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507644914060400:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:45.983371Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:46.019510Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:46.093009Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:46.126833Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:46.162847Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:46.233491Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:46.285621Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:46.331135Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507649209028213:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:46.331218Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:46.331287Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507649209028218:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:46.334404Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:46.343443Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507649209028220:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:46.428342Z node 3 :TX_PROXY ERROR: Actor# [3:7489507649209028272:3439] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:47.407330Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:47.811810Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507632029156908:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:47.811868Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 121 >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 >> TTicketParserTest::NebiusAccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryError >> KqpWorkloadService::TestQueueSizeSimple [GOOD] >> KqpWorkloadService::TestQueueSizeManyQueries >> TPersQueueTest::SetupWriteSession [GOOD] >> TPersQueueTest::StoreNoMoreThanXSourceIDs >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::TestAstWithCompression [GOOD] Test command err: Trying to start YDB, gRPC: 4907, MsgBus: 17633 2025-04-04T17:14:33.099243Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507594364524267:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:33.103092Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017ff/r3tmp/tmpgzWktj/pdisk_1.dat 2025-04-04T17:14:33.560956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:33.561049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:33.563014Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:33.565154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4907, node 1 2025-04-04T17:14:33.762816Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:33.762855Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:33.762862Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:33.762976Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17633 TClient is connected to server localhost:17633 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:34.504273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:34.528415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:34.708933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:34.878315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:34.944120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:36.547943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507607249427894:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:36.548031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:36.796459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.829446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.859460Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-04-04T17:14:36.862110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.891297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.924196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.976169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:37.036820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507611544395702:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:37.036877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:37.037040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507611544395707:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:37.041144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:37.061806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507611544395709:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:37.136324Z node 1 :TX_PROXY ERROR: Actor# [1:7489507611544395762:3438] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:38.099681Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507594364524267:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:38.099809Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:38.114071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:38.117978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:38.131022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:40.221120Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-04T17:14:40.221349Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489507624429298950:2744] TxId: 281474976710701. Ctx: { TraceId: 01jr0wrazrdfhqycc4xk4rqjne, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWVlYzA1MDctZjdhOWRmYTUtYmQ2MGViOTMtYWU4NTBlODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-04T17:14:40.222792Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489507624429298955:2751], TxId: 281474976710701, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wrazrdfhqycc4xk4rqjne. SessionId : ydb://session/3?node_id=1&id=YWVlYzA1MDctZjdhOWRmYTUtYmQ2MGViOTMtYWU4NTBlODU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7489507624429298950:2744], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:14:40.223487Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWVlYzA1MDctZjdhOWRmYTUtYmQ2MGViOTMtYWU4NTBlODU=, ActorId: [1:7489507620134331615:2744], ActorState: ExecuteState, TraceId: 01jr0wrazrdfhqycc4xk4rqjne, Create QueryResponse for error on request, msg: 2025-04-04T17:14:40.224027Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786880248, txId: 281474976710700] shutting down 2025-04-04T17:14:40.439506Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-04T17:14:40.439674Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489507624429299017:2762] TxId: 281474976710705. Ctx: { TraceId: 01jr0wrb7pdnj34jsg0cjt9jsd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzgxYzVkNDQtYjA3NjJlZjgtZmY2YmRjOGMtNDM2ODQwNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-04T17:14:40.440675Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzgxYzVkNDQtYjA3NjJlZjgtZmY2YmRjOGMtNDM2ODQwNDc=, ActorId: [1:7489507624429298994:2762], ActorState: ExecuteState, TraceId: 01jr0wrb7pdnj34jsg0cjt9jsd, Create QueryResponse for error on request, msg: 2025-04-04T17:14:40.441016Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786880472, txId: 281474976710704] shutting down 2025-04-04T17:14:40.441025Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489507624429299022:2767], TxId: 281474976710705, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NzgxYzVkNDQtYjA3NjJlZjgtZmY2YmRjOGMtNDM2ODQwNDc=. TraceId : 01jr0wrb7pdnj34jsg0cjt9jsd. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489507624429299017:2762], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:14:40.652259Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-04T17:14:40.652409Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489507624429299083:2778] TxId: 281474976710709. Ctx: { TraceId: 01jr0wrbef7rch64gdwvswdzbx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjZhMTRiMj ... uboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:45.254507Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507644202777781:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:45.254634Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:45.255013Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507644202777786:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:45.259292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:45.273951Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507644202777788:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:45.371631Z node 2 :TX_PROXY ERROR: Actor# [2:7489507644202777843:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:46.232206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:46.233499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:46.243816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:46.389312Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489507627022906480:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:46.389397Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:48.521122Z node 2 :RPC_REQUEST WARN: Client lost 2025-04-04T17:14:48.522549Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786888550, txId: 281474976715705] shutting down Trying to start YDB, gRPC: 3587, MsgBus: 11788 2025-04-04T17:14:49.362445Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507662925272622:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:49.362491Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017ff/r3tmp/tmpTc6hJl/pdisk_1.dat 2025-04-04T17:14:49.527294Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:49.529290Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:49.529358Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 3587, node 3 2025-04-04T17:14:49.556681Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:49.602793Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:49.602816Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:49.602824Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:49.602942Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11788 TClient is connected to server localhost:11788 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:50.170025Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:50.184337Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:50.196570Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:14:50.278532Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:14:50.461099Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:50.547484Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:52.995366Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507675810176250:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:52.995467Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:53.028343Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:53.063101Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:53.104337Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:53.143561Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:53.194660Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:53.266040Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:53.330610Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507680105144063:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:53.330696Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:53.330892Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507680105144068:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:53.342444Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:53.363980Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507680105144070:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:53.429797Z node 3 :TX_PROXY ERROR: Actor# [3:7489507680105144125:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:54.362994Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507662925272622:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:54.363093Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:54.478633Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:54.479784Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:54.481000Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 [GOOD] >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::NebiusAuthorizationUnavailable >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad >> TTicketParserTest::NebiusAuthorizationRetryError [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersSysViewOnServerless >> TTicketParserTest::AuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::AuthorizationWithRequiredPermissions >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::BulkAuthorization >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtl [GOOD] >> TTicketParserTest::AuthorizationModify [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop [GOOD] >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions >> TopicService::ThereAreGapsInTheOffsetRanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtl [GOOD] Test command err: Trying to start YDB, gRPC: 18875, MsgBus: 4911 2025-04-04T17:14:34.589021Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507596466762641:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:34.596790Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017fc/r3tmp/tmp7axxuz/pdisk_1.dat 2025-04-04T17:14:35.059965Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:35.068730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:35.068845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:35.072127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18875, node 1 2025-04-04T17:14:35.144543Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:35.144561Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:35.144567Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:35.144660Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4911 TClient is connected to server localhost:4911 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:35.704357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:35.726922Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:35.745078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:35.892712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:36.081742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:14:36.180841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:14:37.900717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507609351666145:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:37.900826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:38.168438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:38.205635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:38.236799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:38.265435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:38.289989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:38.366552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:38.428721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507613646633955:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:38.428845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:38.429159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507613646633961:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:38.433517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:38.445010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507613646633963:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:38.508228Z node 1 :TX_PROXY ERROR: Actor# [1:7489507613646634015:3441] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:39.389368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:39.390792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:39.391805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:39.580601Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507596466762641:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:39.628841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:41.395764Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786881431, txId: 281474976710701] shutting down Trying to start YDB, gRPC: 15756, MsgBus: 5808 2025-04-04T17:14:42.359184Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507632077080131:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:42.359229Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017fc/r3tmp/tmp1AkroL/pdisk_1.dat 2025-04-04T17:14:42.545339Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:42.577856Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:42.577936Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:42.583463Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15756, node 2 2025-04-04T17:14:42.786876Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:42.786897Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:42.786907Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:42.787059Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5808 TClient is connected to server localhost:5808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:43.391827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:4 ... :7489507632077080131:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:47.386906Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:48.374923Z node 2 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionOperationQueryActor] TraceId: 2486deed-6faa8077-e85fc5a2-841b815, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=ZWM0MjI2ZTktYmUxZmEyMTAtZGUzMjNkMGEtMzM4OTkyOQ==, TxId: 2025-04-04T17:14:49.110336Z node 2 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 2486deed-6faa8077-e85fc5a2-841b815, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=ZjY5ZDIzNmUtNTQ0N2RjNjEtOWJhOTI2NTktMmJiZWRlNzQ=, TxId: 2025-04-04T17:14:49.240713Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 2486deed-6faa8077-e85fc5a2-841b815, reply NOT_FOUND, issues: {
: Error: No such execution } 2025-04-04T17:14:49.266289Z node 2 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 2486deed-6faa8077-e85fc5a2-841b815, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=NDkzNTU0NC0xYjk5Njg3YS0xZGIxYjI0Ni05ZTBiM2E0ZA==, TxId: 2025-04-04T17:14:49.266416Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TCancelScriptExecutionOperationActor] ExecutionId: 2486deed-6faa8077-e85fc5a2-841b815, check lease failed 2025-04-04T17:14:49.656774Z node 2 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: 2486deed-6faa8077-e85fc5a2-841b815, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Script execution not found }, SessionId: ydb://session/3?node_id=2&id=YzhiN2E0MC04YmY0Zjk5OC1hNDY3Mjk1ZC03YTI1NzEwYQ==, TxId: Trying to start YDB, gRPC: 27618, MsgBus: 28764 2025-04-04T17:14:50.959673Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507665960155974:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:50.969367Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017fc/r3tmp/tmpTbaO4n/pdisk_1.dat 2025-04-04T17:14:51.183823Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:51.185823Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:51.185889Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:51.188903Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27618, node 3 2025-04-04T17:14:51.293249Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:51.293271Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:51.293279Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:51.293401Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28764 TClient is connected to server localhost:28764 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:51.903634Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:51.912490Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:51.920677Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:52.035618Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:52.224576Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:52.308629Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:54.553497Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507683140026839:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:54.553580Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:54.593025Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:54.665013Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:54.711178Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:54.756221Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:54.792290Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:54.833005Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:54.879664Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507683140027353:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:54.879768Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:54.880168Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507683140027358:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:54.883295Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:54.906883Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507683140027360:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:54.973473Z node 3 :TX_PROXY ERROR: Actor# [3:7489507683140027413:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:55.940860Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507665960155974:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:55.940914Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:55.967843Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:55.969136Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:55.970694Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:58.346989Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786898385, txId: 281474976710701] shutting down 2025-04-04T17:14:58.593382Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786898630, txId: 281474976710704] shutting down 2025-04-04T17:14:58.859626Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786898889, txId: 281474976710707] shutting down 2025-04-04T17:14:59.144889Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786899183, txId: 281474976710710] shutting down 2025-04-04T17:14:59.470315Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786899505, txId: 281474976710713] shutting down 2025-04-04T17:14:59.503353Z node 3 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: a202869f-43dc5b04-1ec739e6-1705154d, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Results are expired }, SessionId: ydb://session/3?node_id=3&id=OGJhZGI5MzctMWNmMmI4ZDYtMTBlNTBiY2YtNmI2NGE3ZGU=, TxId: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationModify [GOOD] Test command err: 2025-04-04T17:14:42.262957Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507630956467664:2126];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:42.262989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c30/r3tmp/tmpV5DHue/pdisk_1.dat 2025-04-04T17:14:42.928177Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:42.937554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:42.937678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:42.939404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2880, node 1 2025-04-04T17:14:43.086975Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:43.087010Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:43.087027Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:43.087158Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10522 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:43.615855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:43.667252Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:14:43.667288Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:43.667296Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:14:43.667730Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-04T17:14:43.667846Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Connect to grpc://localhost:7799 2025-04-04T17:14:43.686143Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-04-04T17:14:43.712496Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-04-04T17:14:43.718470Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-04T17:14:43.719726Z node 1 :GRPC_CLIENT DEBUG: [517000010788] Connect to grpc://localhost:27954 2025-04-04T17:14:43.720560Z node 1 :GRPC_CLIENT DEBUG: [517000010788] Request GetUserAccountRequest { user_account_id: "user1" } 2025-04-04T17:14:43.728410Z node 1 :GRPC_CLIENT DEBUG: [517000010788] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-04-04T17:14:43.730926Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-04-04T17:14:46.074712Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507650323437608:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c30/r3tmp/tmpF3w4ye/pdisk_1.dat 2025-04-04T17:14:46.194560Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:14:46.288353Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:46.291039Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:46.291140Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:46.292212Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7902, node 2 2025-04-04T17:14:46.347606Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:46.347622Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:46.347628Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:46.347717Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17989 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:14:46.579580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:14:46.592742Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:46.597053Z node 2 :TICKET_PARSER ERROR: Ticket **** (8E120919): Token is not supported 2025-04-04T17:14:49.415183Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507663572735552:2205];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c30/r3tmp/tmpsKBZMt/pdisk_1.dat 2025-04-04T17:14:49.461095Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:14:49.544619Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31571, node 3 2025-04-04T17:14:49.570968Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:49.571048Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:49.573980Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:49.696365Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:49.696391Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:49.696398Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:49.696570Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:49.947467Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:49.953261Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:49.958328Z node 3 :TICKET_PARSER ERROR: Ticket **** (8E120919): Unknown token 2025-04-04T17:14:53.239317Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489507680277118583:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:53.239359Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c30/r3tmp/tmpmWIL5e/pdisk_1.dat 2025-04-04T17:14:53.434450Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:53.458400Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:53.458481Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:53.460130Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22332, node 4 2025-04-04T17:14:53.570682Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:53.570701Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:53.570706Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (em ... enied" retryable:0 2025-04-04T17:14:53.859748Z node 4 :TICKET_PARSER DEBUG: Ticket **** (E2D1584C) () has now permanent error message 'Access Denied' 2025-04-04T17:14:53.860349Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:14:53.860378Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:53.860388Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:14:53.860409Z node 4 :TICKET_PARSER TRACE: Ticket **** (BE2EA0D0) asking for AccessServiceAuthorization(something.read) 2025-04-04T17:14:53.860567Z node 4 :GRPC_CLIENT DEBUG: [517000090388] Request AuthorizeRequest { iam_token: "**** (BE2EA0D0)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:14:53.865654Z node 4 :GRPC_CLIENT DEBUG: [517000090388] Status 16 Access Denied 2025-04-04T17:14:53.865785Z node 4 :TICKET_PARSER TRACE: Ticket **** (BE2EA0D0) permission something.read now has a permanent error "Access Denied" retryable:0 2025-04-04T17:14:53.865813Z node 4 :TICKET_PARSER DEBUG: Ticket **** (BE2EA0D0) () has now permanent error message 'Access Denied' 2025-04-04T17:14:53.866488Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:14:53.866522Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:53.866532Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:14:53.866555Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-04T17:14:53.866711Z node 4 :GRPC_CLIENT DEBUG: [517000090388] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2025-04-04T17:14:53.871753Z node 4 :GRPC_CLIENT DEBUG: [517000090388] Status 16 Access Denied 2025-04-04T17:14:53.871855Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2025-04-04T17:14:53.871880Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2025-04-04T17:14:53.872396Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:14:53.872427Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:53.872436Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:14:53.872462Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-04T17:14:53.872617Z node 4 :GRPC_CLIENT DEBUG: [517000090388] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:14:53.874254Z node 4 :GRPC_CLIENT DEBUG: [517000090388] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-04T17:14:53.882981Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-04T17:14:53.883069Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-04T17:14:53.884460Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:14:53.884481Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:53.884490Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:14:53.884530Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-04T17:14:53.884686Z node 4 :GRPC_CLIENT DEBUG: [517000090388] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2025-04-04T17:14:53.886382Z node 4 :GRPC_CLIENT DEBUG: [517000090388] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-04T17:14:53.886498Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-04T17:14:53.886560Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-04T17:14:53.887765Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:14:53.887800Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:53.887809Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:14:53.887843Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(monitoring.view) 2025-04-04T17:14:53.887991Z node 4 :GRPC_CLIENT DEBUG: [517000090388] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "monitoring.view" resource_path { id: "gizmo" type: "iam.gizmo" } } 2025-04-04T17:14:53.891219Z node 4 :GRPC_CLIENT DEBUG: [517000090388] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-04T17:14:53.891309Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission monitoring.view now has a valid subject "user1@as" 2025-04-04T17:14:53.891389Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-04T17:14:57.435775Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489507697880850662:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:57.435834Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c30/r3tmp/tmpI3Odud/pdisk_1.dat 2025-04-04T17:14:57.563073Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:57.601316Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:57.601418Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 3057, node 5 2025-04-04T17:14:57.607093Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:57.665329Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:57.665346Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:57.665353Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:57.665475Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:57.943466Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:57.950511Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:57.954346Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:14:57.954370Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:57.954386Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:14:57.954421Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-04T17:14:57.954503Z node 5 :GRPC_CLIENT DEBUG: [51700002e008] Connect to grpc://localhost:32735 2025-04-04T17:14:57.955362Z node 5 :GRPC_CLIENT DEBUG: [51700002e008] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:14:57.967602Z node 5 :GRPC_CLIENT DEBUG: [51700002e008] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-04T17:14:57.967845Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-04T17:14:57.967941Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-04T17:14:57.968481Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:14:57.968511Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:57.968524Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:14:57.968549Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-04T17:14:57.968598Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-04T17:14:57.968755Z node 5 :GRPC_CLIENT DEBUG: [51700002e008] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:14:57.969286Z node 5 :GRPC_CLIENT DEBUG: [51700002e008] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:14:57.974534Z node 5 :GRPC_CLIENT DEBUG: [51700002e008] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-04T17:14:57.975030Z node 5 :GRPC_CLIENT DEBUG: [51700002e008] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-04T17:14:57.975217Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2025-04-04T17:14:57.978902Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-04T17:14:57.979018Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as >> TTicketParserTest::NebiusAuthorizationUnavailable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] Test command err: 2025-04-04T17:14:42.224824Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507632381068184:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:42.224884Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c50/r3tmp/tmp5hvmVt/pdisk_1.dat 2025-04-04T17:14:42.791417Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:42.802355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:42.802461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:42.822290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22259, node 1 2025-04-04T17:14:43.035822Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:43.035855Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:43.035864Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:43.035950Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16154 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:43.611665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:43.651296Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:43.898562Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T17:14:43.898948Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:14:43.898969Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:43.899665Z node 1 :TICKET_PARSER DEBUG: Ticket **** (5DAB89DE) () has now permanent error message 'Token is not in correct format' 2025-04-04T17:14:43.899683Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A2 error Token is not in correct format 2025-04-04T17:14:43.899727Z node 1 :TICKET_PARSER ERROR: Ticket **** (5DAB89DE): Token is not in correct format test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c50/r3tmp/tmpLUPzk6/pdisk_1.dat 2025-04-04T17:14:45.865505Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:14:45.872420Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11626, node 2 2025-04-04T17:14:45.903239Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:45.903315Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:45.905520Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:45.953002Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:45.953020Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:45.953025Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:45.953099Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1162 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:14:46.135206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:14:46.139186Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:46.140879Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:14:46.140920Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:46.140927Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:14:46.141015Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-04-04T17:14:46.141056Z node 2 :GRPC_CLIENT DEBUG: [517000031b88] Connect to grpc://localhost:20126 2025-04-04T17:14:46.143813Z node 2 :GRPC_CLIENT DEBUG: [517000031b88] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-04-04T17:14:46.162952Z node 2 :GRPC_CLIENT DEBUG: [517000031b88] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2025-04-04T17:14:46.163415Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-04-04T17:14:46.163510Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-04T17:14:46.164001Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:14:46.164037Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:46.164045Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:14:46.164084Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-04-04T17:14:46.164218Z node 2 :GRPC_CLIENT DEBUG: [517000031b88] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-04-04T17:14:46.167527Z node 2 :GRPC_CLIENT DEBUG: [517000031b88] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2025-04-04T17:14:46.167652Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-04-04T17:14:46.167694Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'something.write for folder_id aaaa1234 - Access Denied' 2025-04-04T17:14:49.136232Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507661459023644:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:49.136313Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c50/r3tmp/tmp4M2sdj/pdisk_1.dat 2025-04-04T17:14:49.269731Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:49.284875Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:49.284966Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:49.286634Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12828, node 3 2025-04-04T17:14:49.354791Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:49.354812Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:49.354820Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:49.354935Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64267 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeE ... 7365Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489507677399116235:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:53.397420Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c50/r3tmp/tmpK3HSPC/pdisk_1.dat 2025-04-04T17:14:53.590376Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:53.617389Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:53.617485Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:53.619436Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24275, node 4 2025-04-04T17:14:53.703492Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:53.703518Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:53.703527Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:53.703715Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3016 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:53.963817Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:53.975138Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:53.981218Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:14:53.981251Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:53.981258Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:14:53.981384Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read somewhere.sleep something.list something.write something.eat) 2025-04-04T17:14:53.981433Z node 4 :GRPC_CLIENT DEBUG: [517000110d88] Connect to grpc://localhost:21065 2025-04-04T17:14:53.982817Z node 4 :GRPC_CLIENT DEBUG: [517000110d88] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "somewhere.sleep" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.list" ...(truncated) } 2025-04-04T17:14:54.008820Z node 4 :GRPC_CLIENT DEBUG: [517000110d88] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "something.list" r...(truncated) } 2025-04-04T17:14:54.009030Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read access denied for subject "user1@as" 2025-04-04T17:14:54.009050Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission somewhere.sleep access denied for subject "user1@as" 2025-04-04T17:14:54.009061Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.list access denied for subject "user1@as" 2025-04-04T17:14:54.009079Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.eat access denied for subject "user1@as" 2025-04-04T17:14:54.009107Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-04T17:14:54.009312Z node 4 :GRPC_CLIENT DEBUG: [517000111108] Connect to grpc://localhost:9165 2025-04-04T17:14:54.010106Z node 4 :GRPC_CLIENT DEBUG: [517000111108] Request GetUserAccountRequest { user_account_id: "user1" } 2025-04-04T17:14:54.044832Z node 4 :GRPC_CLIENT DEBUG: [517000111108] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-04-04T17:14:54.045374Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-04-04T17:14:57.306295Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489507694554116838:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:57.314932Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c50/r3tmp/tmp6Su5nd/pdisk_1.dat 2025-04-04T17:14:57.441242Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19814, node 5 2025-04-04T17:14:57.461396Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:57.461483Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:57.465033Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:57.484108Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:57.484130Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:57.484137Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:57.484246Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26239 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:57.743456Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:57.753165Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:57.761424Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:14:57.761465Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:57.761475Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:14:57.761607Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-04-04T17:14:57.761671Z node 5 :GRPC_CLIENT DEBUG: [5170000eb708] Connect to grpc://localhost:6666 2025-04-04T17:14:57.762881Z node 5 :GRPC_CLIENT DEBUG: [5170000eb708] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-04-04T17:14:57.778403Z node 5 :GRPC_CLIENT DEBUG: [5170000eb708] Status 14 Service Unavailable 2025-04-04T17:14:57.779483Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-04T17:14:57.779528Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2025-04-04T17:14:57.779562Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-04T17:14:57.779701Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-04-04T17:14:57.780082Z node 5 :GRPC_CLIENT DEBUG: [5170000eb708] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-04-04T17:14:57.784198Z node 5 :GRPC_CLIENT DEBUG: [5170000eb708] Status 1 CANCELLED 2025-04-04T17:14:57.784363Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" retryable: 1 2025-04-04T17:14:57.784400Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "CANCELLED" retryable: 1 2025-04-04T17:14:57.784433Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 >> KqpWorkloadServiceDistributed::TestDistributedQueue [GOOD] >> KqpWorkloadServiceDistributed::TestNodeDisconnect >> KqpService::SwitchCache+UseCache [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAuthorization >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] >> TTicketParserTest::BulkAuthorization [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount2 >> TPersQueueTest::ReadRuleServiceType [GOOD] >> TPersQueueTest::ReadRuleServiceTypeLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationUnavailable [GOOD] Test command err: 2025-04-04T17:14:42.650561Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507630488303263:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:42.652495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c1a/r3tmp/tmpAHMbyu/pdisk_1.dat 2025-04-04T17:14:43.276589Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:43.279025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:43.279302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:43.287724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13086, node 1 2025-04-04T17:14:43.490524Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:43.490552Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:43.490560Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:43.490678Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25785 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:43.925580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:43.955041Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:43.958415Z node 1 :TICKET_PARSER DEBUG: Ticket 3E3D7572CDD2532C09B2CBFA8FF57C2F1A9F8722665A08624EC22808901059EA () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-04T17:14:47.007094Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507652226633117:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:47.007137Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c1a/r3tmp/tmpSJefe7/pdisk_1.dat 2025-04-04T17:14:47.230963Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:47.252044Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:47.252128Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:47.253589Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27102, node 2 2025-04-04T17:14:47.325316Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:47.325341Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:47.325348Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:47.325447Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4374 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:47.573259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:47.588003Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:47.596298Z node 2 :TICKET_PARSER DEBUG: Ticket FA357FB761887546E8A2FC3BDD44F1980CD8F6613B23D14FEDB7CEBC5F7B45FA () has now permanent error message 'Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers' 2025-04-04T17:14:47.596890Z node 2 :TICKET_PARSER ERROR: Ticket FA357FB761887546E8A2FC3BDD44F1980CD8F6613B23D14FEDB7CEBC5F7B45FA: Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers 2025-04-04T17:14:51.032104Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507664497888556:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:51.095789Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c1a/r3tmp/tmp4jfk9Z/pdisk_1.dat 2025-04-04T17:14:51.249011Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:51.252802Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:51.252887Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:51.259319Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18880, node 3 2025-04-04T17:14:51.375044Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:51.375069Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:51.375076Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:51.375217Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26320 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:51.599854Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:51.611437Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:51.616902Z node 3 :TICKET_PARSER DEBUG: Ticket 0D6A7C89F02BE1322516AA3B07B8BE3098527E87EEF56DB17D4E9F5780D44018 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-04T17:14:51.617500Z node 3 :TICKET_PARSER ERROR: Ticket 0D6A7C89F02BE1322516AA3B07B8BE3098527E87EEF56DB17D4E9F5780D44018: Cannot create token from certificate. Client certificate failed verification 2025-04-04T17:14:54.833334Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489507683432502134:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:54.833470Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c1a/r3tmp/tmpCEJbRB/pdisk_1.dat 2025-04-04T17:14:55.020765Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:55.040447Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:55.040526Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:55.047206Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10584, node 4 2025-04-04T17:14:55.139711Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:55.139734Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:55.139742Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:55.139871Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 S ... CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:55.387574Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:55.394984Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:55.397538Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:14:55.397570Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:55.397584Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:14:55.398087Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-04-04T17:14:55.398151Z node 4 :GRPC_CLIENT DEBUG: [517000081208] Connect to grpc://localhost:8538 2025-04-04T17:14:55.401642Z node 4 :GRPC_CLIENT DEBUG: [517000081208] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2025-04-04T17:14:55.414621Z node 4 :GRPC_CLIENT DEBUG: [517000081208] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } } 2025-04-04T17:14:55.414914Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "" 2025-04-04T17:14:55.415041Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-04T17:14:55.415570Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:14:55.415590Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:55.415598Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:14:55.415658Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-04-04T17:14:55.415889Z node 4 :GRPC_CLIENT DEBUG: [517000081208] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2025-04-04T17:14:55.424099Z node 4 :GRPC_CLIENT DEBUG: [517000081208] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } } 2025-04-04T17:14:55.424285Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-04-04T17:14:55.424357Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'something.write for aaaa1234 bbbb4554 - PERMISSION_DENIED' 2025-04-04T17:14:58.743884Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489507698516330168:2143];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:58.761836Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c1a/r3tmp/tmpWdiXAm/pdisk_1.dat 2025-04-04T17:14:58.875345Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:58.901512Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:58.901603Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:58.902950Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11401, node 5 2025-04-04T17:14:58.946827Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:58.946854Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:58.946862Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:58.947002Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11241 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:59.213054Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:59.228719Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:59.231203Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:14:59.231235Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:59.231242Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:14:59.231308Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-04-04T17:14:59.231349Z node 5 :GRPC_CLIENT DEBUG: [5170000b9588] Connect to grpc://localhost:4080 2025-04-04T17:14:59.232291Z node 5 :GRPC_CLIENT DEBUG: [5170000b9588] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 14: "Service Unavailable" 2025-04-04T17:14:59.241186Z node 5 :GRPC_CLIENT DEBUG: [5170000b9588] Status 14 Service Unavailable 2025-04-04T17:14:59.241533Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-04T17:14:59.241557Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2025-04-04T17:14:59.241589Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-04T17:14:59.241651Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-04-04T17:14:59.241968Z node 5 :GRPC_CLIENT DEBUG: [5170000b9588] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } 2025-04-04T17:14:59.249546Z node 5 :GRPC_CLIENT DEBUG: [5170000b9588] Status 1 CANCELLED 2025-04-04T17:14:59.249909Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" retryable: 1 2025-04-04T17:14:59.249929Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "CANCELLED" retryable: 1 2025-04-04T17:14:59.249950Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' >> TTicketParserTest::AuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount >> TPersQueueTest::CheckACLForGrpcRead [GOOD] >> TPersQueueTest::CheckKillBalancer >> TopicService::OnePartitionAndNoGapsInTheOffsets >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Compressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Compressed >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::SwitchCache+UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 26762, MsgBus: 18422 2025-04-04T17:14:19.236312Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507533433544458:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:19.236838Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001810/r3tmp/tmpwfJtxg/pdisk_1.dat 2025-04-04T17:14:19.821582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:19.821663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:19.829584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:19.881333Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26762, node 1 2025-04-04T17:14:19.985962Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:19.985994Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:19.986003Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:19.986114Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18422 TClient is connected to server localhost:18422 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:20.777986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:20.813486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:14:20.995523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:14:21.234952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:21.317555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:23.049297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507550613415290:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:23.049401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:23.370451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:23.449756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:23.489250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:23.526039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:23.566649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:23.642988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:23.732524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507550613415813:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:23.732593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:23.732784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507550613415819:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:23.736839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:23.759147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507550613415821:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:23.826346Z node 1 :TX_PROXY ERROR: Actor# [1:7489507550613415876:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:24.230419Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507533433544458:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:24.230512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28339, MsgBus: 32546 2025-04-04T17:14:28.691175Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507571575892744:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:28.691223Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001810/r3tmp/tmpsLhiMT/pdisk_1.dat 2025-04-04T17:14:28.964177Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:28.964266Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:28.978968Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:28.979345Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28339, node 2 2025-04-04T17:14:29.010613Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:29.010633Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:29.010641Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:29.010754Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32546 TClient is connected to server localhost:32546 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:29.446876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:29.450926Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:29.463135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:29.538503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:29.737496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:29.808969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCre ... g previous query completion proxyRequestId: 49 2025-04-04T17:14:34.103947Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTBlZGI5YWYtNjUxNDkyNjYtNTllNmFjY2YtNDU1M2Q3YWY=, ActorId: [2:7489507597345699376:2563], ActorState: ExecuteState, TraceId: 01jr0wr57q599js7j3ev364jaz, Reply query error, msg: Pending previous query completion proxyRequestId: 52 2025-04-04T17:14:34.112351Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTBlZGI5YWYtNjUxNDkyNjYtNTllNmFjY2YtNDU1M2Q3YWY=, ActorId: [2:7489507597345699376:2563], ActorState: ExecuteState, TraceId: 01jr0wr57q599js7j3ev364jaz, Reply query error, msg: Pending previous query completion proxyRequestId: 53 2025-04-04T17:14:34.112739Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTBlZGI5YWYtNjUxNDkyNjYtNTllNmFjY2YtNDU1M2Q3YWY=, ActorId: [2:7489507597345699376:2563], ActorState: ExecuteState, TraceId: 01jr0wr57q599js7j3ev364jaz, Reply query error, msg: Pending previous query completion proxyRequestId: 54 2025-04-04T17:14:34.115642Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTBlZGI5YWYtNjUxNDkyNjYtNTllNmFjY2YtNDU1M2Q3YWY=, ActorId: [2:7489507597345699376:2563], ActorState: ExecuteState, TraceId: 01jr0wr57q599js7j3ev364jaz, Reply query error, msg: Pending previous query completion proxyRequestId: 55 2025-04-04T17:14:34.222701Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTllYTc5ZjItOWM2ZTczNDMtNDhkN2U2OWMtNDdhMzY3NmU=, ActorId: [2:7489507597345699406:2574], ActorState: ExecuteState, TraceId: 01jr0wr5bd24eqeqr895zjdajq, Reply query error, msg: Pending previous query completion proxyRequestId: 58 2025-04-04T17:14:34.222810Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTllYTc5ZjItOWM2ZTczNDMtNDhkN2U2OWMtNDdhMzY3NmU=, ActorId: [2:7489507597345699406:2574], ActorState: ExecuteState, TraceId: 01jr0wr5bd24eqeqr895zjdajq, Reply query error, msg: Pending previous query completion proxyRequestId: 59 2025-04-04T17:14:34.222847Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTllYTc5ZjItOWM2ZTczNDMtNDhkN2U2OWMtNDdhMzY3NmU=, ActorId: [2:7489507597345699406:2574], ActorState: ExecuteState, TraceId: 01jr0wr5bd24eqeqr895zjdajq, Reply query error, msg: Pending previous query completion proxyRequestId: 60 2025-04-04T17:14:34.342850Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmMzZmViN2UtZjc0NGQ4MjMtOTY0NzU3YTUtZmUxZDZhOTM=, ActorId: [2:7489507597345699447:2584], ActorState: ExecuteState, TraceId: 01jr0wr5f5eymdz9bp57z4wppc, Reply query error, msg: Pending previous query completion proxyRequestId: 63 2025-04-04T17:14:34.343358Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmMzZmViN2UtZjc0NGQ4MjMtOTY0NzU3YTUtZmUxZDZhOTM=, ActorId: [2:7489507597345699447:2584], ActorState: ExecuteState, TraceId: 01jr0wr5f5eymdz9bp57z4wppc, Reply query error, msg: Pending previous query completion proxyRequestId: 64 2025-04-04T17:14:34.444654Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2M1MDQ5MjAtNzNjMjZmMy00MTA1YjkxLTVlYzNjN2U4, ActorId: [2:7489507597345699472:2593], ActorState: ExecuteState, TraceId: 01jr0wr5jc3zvtwfbkzedym7sc, Reply query error, msg: Pending previous query completion proxyRequestId: 67 Trying to start YDB, gRPC: 17089, MsgBus: 8321 2025-04-04T17:14:35.670079Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507602143113622:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:35.670143Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001810/r3tmp/tmpdkxmiX/pdisk_1.dat 2025-04-04T17:14:35.831053Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:35.831790Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:35.831872Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:35.836320Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17089, node 3 2025-04-04T17:14:35.966850Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:35.966880Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:35.966887Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:35.967012Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8321 TClient is connected to server localhost:8321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:36.481567Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:36.488555Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:36.501733Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:36.597607Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:14:36.782597Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.854657Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:39.349179Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507619322984544:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:39.349240Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:39.369446Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:39.400046Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:39.469127Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:39.503813Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:39.541775Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:39.616773Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:39.681048Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507619322985061:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:39.681132Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:39.681333Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507619322985066:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:39.686084Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:39.700251Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507619322985068:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:39.764919Z node 3 :TX_PROXY ERROR: Actor# [3:7489507619322985123:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:40.670253Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507602143113622:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:40.670318Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:40.837582Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:50.826362Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:14:50.826399Z node 3 :IMPORT WARN: Table profiles were not loaded took: 21.119787s took: 21.120728s took: 21.121549s took: 21.125997s took: 21.126726s took: 21.126335s took: 21.128035s took: 21.128782s took: 21.130838s took: 21.135526s ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] Test command err: 2025-04-04T17:14:42.368347Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507632757650677:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:42.369053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c6f/r3tmp/tmpQQNYs3/pdisk_1.dat 2025-04-04T17:14:42.908839Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:42.931685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:42.931781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:42.939456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3585, node 1 2025-04-04T17:14:43.190636Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:43.190665Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:43.190672Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:43.190789Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:43.711536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:43.745350Z node 1 :TICKET_PARSER DEBUG: Ticket 3B2FAEF8896E9743CA0D5E58D6B64227F161C1D43654D2EB9A3DA4AC0BD8B558 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-04T17:14:46.684109Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507646772680473:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:46.684154Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c6f/r3tmp/tmpftQDyv/pdisk_1.dat 2025-04-04T17:14:46.827143Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:46.845262Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:46.845372Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 12626, node 2 2025-04-04T17:14:46.846617Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:46.896023Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:46.896041Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:46.896047Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:46.896170Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:47.114118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:47.124590Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:47.129543Z node 2 :TICKET_PARSER DEBUG: Ticket 36F1E84493FAF8E125F6CAAD40D3128E6B9DC5176C304D4AFE55BE5A679A9173 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-04T17:14:50.478581Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507667234416436:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:50.478633Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c6f/r3tmp/tmp1LmkT6/pdisk_1.dat 2025-04-04T17:14:50.653147Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:50.673151Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:50.673248Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:50.675412Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20774, node 3 2025-04-04T17:14:50.821636Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:50.821651Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:50.821658Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:50.821772Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:51.085184Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:51.099275Z node 3 :TICKET_PARSER DEBUG: Ticket AC76ED50D2CD229A26670570916F400F7E836D761AFF891CB4F86A68FCC0E142 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-04T17:14:51.099879Z node 3 :TICKET_PARSER ERROR: Ticket AC76ED50D2CD229A26670570916F400F7E836D761AFF891CB4F86A68FCC0E142: Cannot create token from certificate. Client certificate failed verification 2025-04-04T17:14:54.863470Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489507684500455991:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:54.863534Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c6f/r3tmp/tmpP0Vz4G/pdisk_1.dat 2025-04-04T17:14:55.021074Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:55.034926Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:55.035013Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:55.037432Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23013, node 4 2025-04-04T17:14:55.131057Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:55.131079Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:55.131086Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:55.131237Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9238 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:55.373679Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:55.394905Z node 4 :TICKET_PARSER DEBUG: Ticket 898B7BE79A2769A3AEE41BE05354EB7A5080BD68F54AF80BBF256D72E71324EC () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-04-04T17:14:59.620516Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489507702663099218:2157];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:59.620961Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c6f/r3tmp/tmpGHmcNd/pdisk_1.dat 2025-04-04T17:14:59.798556Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:59.803420Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:59.803516Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:59.807508Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18041, node 5 2025-04-04T17:14:59.850456Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:59.850509Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:59.850519Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:59.850684Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32690 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:00.119613Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:00.141189Z node 5 :TICKET_PARSER DEBUG: Ticket 9081BCF08E5E14CFA2857AE745FE676DCB4F53DD83AC098EB132709F84141407 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-04-04T17:15:00.141710Z node 5 :TICKET_PARSER ERROR: Ticket 9081BCF08E5E14CFA2857AE745FE676DCB4F53DD83AC098EB132709F84141407: Cannot create token from certificate. Client certificate failed verification >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 >> ResourcePoolsDdl::TestDefaultPoolRestrictions >> KqpWorkloadService::TestQueueSizeManyQueries [GOOD] >> KqpWorkloadService::TestZeroQueueSize >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter [GOOD] >> KqpQueryService::CloseConnection [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions >> KqpQueryService::TableSink_OlapOrder [GOOD] >> KqpQueryService::TableSink_OlapRWQueries >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless >> TTicketParserTest::AuthorizationWithUserAccount2 [GOOD] >> TTicketParserTest::BulkAuthorizationModify ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter [GOOD] Test command err: Trying to start YDB, gRPC: 26452, MsgBus: 11099 2025-04-04T17:14:20.716754Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507535532253822:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:20.724315Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00180b/r3tmp/tmp7QgtCy/pdisk_1.dat 2025-04-04T17:14:21.389292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:21.389395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:21.391735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:21.394639Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26452, node 1 2025-04-04T17:14:21.610897Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:21.610920Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:21.610940Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:21.611101Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11099 TClient is connected to server localhost:11099 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:22.317175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:22.336792Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:22.346190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:22.529206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:22.712510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:22.784700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:24.499074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507552712124651:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:24.499164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:24.767973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:24.798196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:24.830438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:24.900654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:24.931831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:24.976561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:25.084848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507557007092468:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:25.084930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:25.085382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507557007092473:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:25.089033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:25.103759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507557007092475:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:25.173313Z node 1 :TX_PROXY ERROR: Actor# [1:7489507557007092531:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:25.694334Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507535532253822:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:25.694404Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20448, MsgBus: 22265 2025-04-04T17:14:26.790804Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507560699423625:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:26.791046Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00180b/r3tmp/tmpGJElDU/pdisk_1.dat 2025-04-04T17:14:26.928925Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:26.944886Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:26.944960Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:26.946617Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20448, node 2 2025-04-04T17:14:27.010718Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:27.010742Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:27.010749Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:27.010850Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22265 TClient is connected to server localhost:22265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:27.391435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:27.411476Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:27.424737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:27.504248Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:27.645929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting ... 4:48.316367Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:48.318074Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:50.927574Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786890944, txId: 281474976710706] shutting down 2025-04-04T17:14:51.228520Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786891252, txId: 281474976710709] shutting down 2025-04-04T17:14:51.573832Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786891602, txId: 281474976710712] shutting down 2025-04-04T17:14:51.875630Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743786891910, txId: 281474976710715] shutting down 2025-04-04T17:14:51.912853Z node 4 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: 649076b6-6f05f8f5-9d89e91d-ad1fa4da, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Results are expired }, SessionId: ydb://session/3?node_id=4&id=MmY1MjdhNDQtNjRkMWM5NTAtYjAzNzZjYS0yYmU2Mzg0Yw==, TxId: Trying to start YDB, gRPC: 1972, MsgBus: 22003 2025-04-04T17:14:53.832234Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489507678114560023:2092];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:53.833142Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00180b/r3tmp/tmpPfBkix/pdisk_1.dat 2025-04-04T17:14:54.046794Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:54.050077Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:54.050161Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:54.051882Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1972, node 5 2025-04-04T17:14:54.125949Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:54.125975Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:54.125985Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:54.126134Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22003 TClient is connected to server localhost:22003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:54.668762Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:54.680816Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:54.694708Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:54.783463Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:54.978442Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:55.066829Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:57.811045Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489507695294430928:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:57.811170Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:57.861965Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:57.938006Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:57.985633Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:58.029425Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:58.075149Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:58.147050Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:58.210749Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489507699589398744:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:58.210874Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:58.210943Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489507699589398749:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:58.216023Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:58.228342Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489507699589398751:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:58.315886Z node 5 :TX_PROXY ERROR: Actor# [5:7489507699589398807:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:58.832691Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489507678114560023:2092];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:58.910869Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:59.402130Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:59.403955Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:14:59.405222Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:15:01.806898Z node 5 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionOperationQueryActor] TraceId: c6870140-81973fdb-5cd0d2f5-c7421607, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=YjZiYTViMy00ZWJiODFmMS04N2EzNjE4YS1iNmVjNGM1Nw==, TxId: 2025-04-04T17:15:02.863484Z node 5 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: c6870140-81973fdb-5cd0d2f5-c7421607, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=N2MxMjA4OWYtZWY1ZDNlY2ItMjk5YjRjZjQtMzNhOTdkN2U=, TxId: 2025-04-04T17:15:03.048797Z node 5 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: c6870140-81973fdb-5cd0d2f5-c7421607, reply NOT_FOUND, issues: {
: Error: No such execution } 2025-04-04T17:15:03.086007Z node 5 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: c6870140-81973fdb-5cd0d2f5-c7421607, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=ZmFlNGQwOWQtZGJlMWU1MzUtNDRhNjJlNzMtNDQ3NWMyNjY=, TxId: 2025-04-04T17:15:03.086182Z node 5 :KQP_PROXY WARN: [ScriptExecutions] [TCancelScriptExecutionOperationActor] ExecutionId: c6870140-81973fdb-5cd0d2f5-c7421607, check lease failed 2025-04-04T17:15:03.692714Z node 5 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: c6870140-81973fdb-5cd0d2f5-c7421607, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Script execution not found }, SessionId: ydb://session/3?node_id=5&id=YmI4YzViNDEtZTc0NGExMGUtMTEwNjEwZDAtNGJkZjc5NjU=, TxId: >> TTicketParserTest::NebiusAuthorization [GOOD] >> TTicketParserTest::NebiusAuthorizationModify ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::CloseConnection [GOOD] Test command err: Trying to start YDB, gRPC: 65365, MsgBus: 15498 2025-04-04T17:13:50.990395Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507406671858190:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:50.990435Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001883/r3tmp/tmpsrLOW5/pdisk_1.dat 2025-04-04T17:13:51.450673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:51.450768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:51.452494Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:51.464886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65365, node 1 2025-04-04T17:13:51.598323Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:51.598352Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:51.598358Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:51.598456Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15498 TClient is connected to server localhost:15498 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:52.129076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.191924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.368903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.552506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.633841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:54.179420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507423851728998:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.179536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.436386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.470430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.504277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.545214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.581795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.615733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.682258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507423851729511:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.682332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.682684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507423851729516:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.686063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:13:54.695774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507423851729518:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:13:54.748584Z node 1 :TX_PROXY ERROR: Actor# [1:7489507423851729571:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:55.990649Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507406671858190:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:55.990735Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5233, MsgBus: 8552 2025-04-04T17:13:57.535737Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507438966902393:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:57.535789Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001883/r3tmp/tmpidA0A8/pdisk_1.dat 2025-04-04T17:13:57.647378Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:57.666111Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:57.666195Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:57.668085Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5233, node 2 2025-04-04T17:13:57.754552Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:57.754574Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:57.754581Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:57.754725Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8552 TClient is connected to server localhost:8552 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:58.192844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:58.212322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:58.269069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:58.445755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:58.532062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:00.493670Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TP ... -04T17:15:01.999279Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NTY5OGM5OS0zMzYxMTVmZC1hZjMwNGE1Yi1iMDEyMjA5Yg==, ActorId: [4:7489507711579279376:2540], ActorState: ExecuteState, TraceId: 01jr0ws0eqevs2sea8nrmbqwpz, Create QueryResponse for error on request, msg: 2025-04-04T17:15:02.014662Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-04T17:15:02.043070Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-04T17:15:02.043616Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7489507715874246712:2550] TxId: 281474976715677. Ctx: { TraceId: 01jr0ws0g4654vkhz9ms9hdadt, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzFhYTViMzUtZmU3OGU4MjAtOGE0OTE1NjEtMTlmMjBjNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-04T17:15:02.045150Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489507715874246718:2552], TxId: 281474976715677, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NzFhYTViMzUtZmU3OGU4MjAtOGE0OTE1NjEtMTlmMjBjNzA=. CustomerSuppliedId : . TraceId : 01jr0ws0g4654vkhz9ms9hdadt. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7489507715874246712:2550], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:15:02.045493Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489507715874246719:2553], TxId: 281474976715677, task: 2. Ctx: { TraceId : 01jr0ws0g4654vkhz9ms9hdadt. SessionId : ydb://session/3?node_id=4&id=NzFhYTViMzUtZmU3OGU4MjAtOGE0OTE1NjEtMTlmMjBjNzA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7489507715874246712:2550], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:15:02.045675Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489507715874246720:2554], TxId: 281474976715677, task: 3. Ctx: { TraceId : 01jr0ws0g4654vkhz9ms9hdadt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NzFhYTViMzUtZmU3OGU4MjAtOGE0OTE1NjEtMTlmMjBjNzA=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7489507715874246712:2550], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:15:02.045847Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489507715874246721:2555], TxId: 281474976715677, task: 4. Ctx: { TraceId : 01jr0ws0g4654vkhz9ms9hdadt. SessionId : ydb://session/3?node_id=4&id=NzFhYTViMzUtZmU3OGU4MjAtOGE0OTE1NjEtMTlmMjBjNzA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7489507715874246712:2550], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:15:02.045997Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489507715874246722:2556], TxId: 281474976715677, task: 5. Ctx: { TraceId : 01jr0ws0g4654vkhz9ms9hdadt. SessionId : ydb://session/3?node_id=4&id=NzFhYTViMzUtZmU3OGU4MjAtOGE0OTE1NjEtMTlmMjBjNzA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7489507715874246712:2550], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:15:02.047760Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzFhYTViMzUtZmU3OGU4MjAtOGE0OTE1NjEtMTlmMjBjNzA=, ActorId: [4:7489507715874246709:2550], ActorState: ExecuteState, TraceId: 01jr0ws0g4654vkhz9ms9hdadt, Create QueryResponse for error on request, msg: 2025-04-04T17:15:02.078395Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-04T17:15:02.107679Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-04T17:15:02.196696Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-04T17:15:02.197326Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7489507715874246885:2592] TxId: 281474976715683. Ctx: { TraceId: 01jr0ws0mx1bf4ma5bgaczp90f, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NWYxNzY0YzQtZjljYTI2MWQtNTRmNmEyMmQtOTNjMzIyMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-04T17:15:02.197450Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489507715874246895:2598], TxId: 281474976715683, task: 5. Ctx: { TraceId : 01jr0ws0mx1bf4ma5bgaczp90f. SessionId : ydb://session/3?node_id=4&id=NWYxNzY0YzQtZjljYTI2MWQtNTRmNmEyMmQtOTNjMzIyMzE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7489507715874246885:2592], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:15:02.269318Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489507715874246891:2594], TxId: 281474976715683, task: 1. Ctx: { TraceId : 01jr0ws0mx1bf4ma5bgaczp90f. SessionId : ydb://session/3?node_id=4&id=NWYxNzY0YzQtZjljYTI2MWQtNTRmNmEyMmQtOTNjMzIyMzE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7489507715874246885:2592], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:15:02.269855Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NWYxNzY0YzQtZjljYTI2MWQtNTRmNmEyMmQtOTNjMzIyMzE=, ActorId: [4:7489507715874246882:2592], ActorState: ExecuteState, TraceId: 01jr0ws0mx1bf4ma5bgaczp90f, Create QueryResponse for error on request, msg: 2025-04-04T17:15:02.413702Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-04T17:15:02.414213Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7489507715874247113:2648] TxId: 281474976715691. Ctx: { TraceId: 01jr0ws0vgd6yte2wrc169f7a8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZTllOWRkYjUtMTZhZTczYmQtM2E0NDAyMTAtYTJhNjg5MDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-04T17:15:02.414535Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489507715874247133:2654], TxId: 281474976715691, task: 5. Ctx: { TraceId : 01jr0ws0vgd6yte2wrc169f7a8. SessionId : ydb://session/3?node_id=4&id=ZTllOWRkYjUtMTZhZTczYmQtM2E0NDAyMTAtYTJhNjg5MDc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7489507715874247113:2648], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:15:02.460871Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489507715874247131:2652], TxId: 281474976715691, task: 3. Ctx: { SessionId : ydb://session/3?node_id=4&id=ZTllOWRkYjUtMTZhZTczYmQtM2E0NDAyMTAtYTJhNjg5MDc=. TraceId : 01jr0ws0vgd6yte2wrc169f7a8. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7489507715874247113:2648], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:15:02.461291Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTllOWRkYjUtMTZhZTczYmQtM2E0NDAyMTAtYTJhNjg5MDc=, ActorId: [4:7489507715874247110:2648], ActorState: ExecuteState, TraceId: 01jr0ws0vgd6yte2wrc169f7a8, Create QueryResponse for error on request, msg: 2025-04-04T17:15:02.687216Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-04T17:15:02.687737Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7489507715874247419:2720] TxId: 281474976715701. Ctx: { TraceId: 01jr0ws13k6c01cdhrmbmyq3ne, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Mzc3N2NjOGYtMWM2MzgyMTgtYzE3ODI0YWYtYTMwOWI1Y2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-04T17:15:02.777047Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489507715874247430:2726], TxId: 281474976715701, task: 5. Ctx: { CustomerSuppliedId : . TraceId : 01jr0ws13k6c01cdhrmbmyq3ne. SessionId : ydb://session/3?node_id=4&id=Mzc3N2NjOGYtMWM2MzgyMTgtYzE3ODI0YWYtYTMwOWI1Y2U=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7489507715874247419:2720], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:15:02.778836Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mzc3N2NjOGYtMWM2MzgyMTgtYzE3ODI0YWYtYTMwOWI1Y2U=, ActorId: [4:7489507715874247416:2720], ActorState: ExecuteState, TraceId: 01jr0ws13k6c01cdhrmbmyq3ne, Create QueryResponse for error on request, msg: 2025-04-04T17:15:03.262579Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-04T17:15:03.263187Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7489507720169215245:2859] TxId: 281474976715719. Ctx: { TraceId: 01jr0ws1n24142mywem9r0rvjx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NzQ2YmU5OTktNDk0MmQ5ZjMtMTc0ZjA2MWQtMzNhNmQ4NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-04T17:15:03.263329Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489507720169215256:2865], TxId: 281474976715719, task: 5. Ctx: { CustomerSuppliedId : . TraceId : 01jr0ws1n24142mywem9r0rvjx. SessionId : ydb://session/3?node_id=4&id=NzQ2YmU5OTktNDk0MmQ5ZjMtMTc0ZjA2MWQtMzNhNmQ4NzE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7489507720169215245:2859], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:15:03.344551Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489507720169215253:2862], TxId: 281474976715719, task: 2. Ctx: { TraceId : 01jr0ws1n24142mywem9r0rvjx. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NzQ2YmU5OTktNDk0MmQ5ZjMtMTc0ZjA2MWQtMzNhNmQ4NzE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7489507720169215245:2859], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:15:03.345226Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzQ2YmU5OTktNDk0MmQ5ZjMtMTc0ZjA2MWQtMzNhNmQ4NzE=, ActorId: [4:7489507720169215242:2859], ActorState: ExecuteState, TraceId: 01jr0ws1n24142mywem9r0rvjx, Create QueryResponse for error on request, msg: 2025-04-04T17:15:03.851928Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-04T17:15:03.852431Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7489507720169215689:2971] TxId: 281474976715734. Ctx: { TraceId: 01jr0ws272e235g6mgcyec6ynd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NDAyMjJjNTQtYmUwYmQ0MjctZjQ2MzMyYjMtNzhmYjRjNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-04T17:15:03.852529Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489507720169215700:2977], TxId: 281474976715734, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NDAyMjJjNTQtYmUwYmQ0MjctZjQ2MzMyYjMtNzhmYjRjNWM=. TraceId : 01jr0ws272e235g6mgcyec6ynd. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7489507720169215689:2971], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:15:03.866187Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489507720169215697:2974], TxId: 281474976715734, task: 2. Ctx: { TraceId : 01jr0ws272e235g6mgcyec6ynd. SessionId : ydb://session/3?node_id=4&id=NDAyMjJjNTQtYmUwYmQ0MjctZjQ2MzMyYjMtNzhmYjRjNWM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7489507720169215689:2971], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:15:03.868153Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NDAyMjJjNTQtYmUwYmQ0MjctZjQ2MzMyYjMtNzhmYjRjNWM=, ActorId: [4:7489507720169215686:2971], ActorState: ExecuteState, TraceId: 01jr0ws272e235g6mgcyec6ynd, Create QueryResponse for error on request, msg: >> TTicketParserTest::AuthorizationWithUserAccount [GOOD] >> TTicketParserTest::AuthorizationUnavailable >> TTicketParserTest::AuthenticationRetryError [GOOD] >> TTicketParserTest::AuthenticationRetryErrorImmediately >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless >> TPersQueueTest::PreferredCluster_NonExistentPreferredCluster_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndRemoteClusterEnabledDelaySec_SessionDiesOnlyAfterDelay >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 >> ResourcePoolsDdl::TestDefaultPoolRestrictions [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToLimitedState >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 >> TSchemeshardCompactionQueueTest::UpdateBelowThreshold [GOOD] >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit >> TSchemeshardCompactionQueueTest::EnqueueBelowSearchHeightThreshold [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueBelowRowDeletesThreshold [GOOD] >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled >> TTicketParserTest::LoginRefreshGroupsGood [GOOD] >> TTicketParserTest::LoginCheckRemovedUser |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit >> TTicketParserTest::NebiusAuthenticationRetryError [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 >> TTicketParserTest::BulkAuthorizationModify [GOOD] >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAuthorizationModify [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 >> TTicketParserTest::AuthorizationUnavailable [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 >> KqpWorkloadService::TestZeroQueueSize [GOOD] >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationModify [GOOD] Test command err: 2025-04-04T17:14:42.222806Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507632967800275:2127];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:42.223847Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c33/r3tmp/tmpD8fHRL/pdisk_1.dat 2025-04-04T17:14:42.777202Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:42.829039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:42.829137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:42.835762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16834, node 1 2025-04-04T17:14:43.034830Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:43.034861Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:43.034869Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:43.034972Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:43.668696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:43.703759Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-04T17:14:43.703832Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Connect to grpc://localhost:22585 2025-04-04T17:14:43.708328Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-04T17:14:43.750394Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Status 14 Service Unavailable 2025-04-04T17:14:43.750845Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-04T17:14:43.750899Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-04T17:14:43.750970Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-04T17:14:43.751194Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-04T17:14:43.752819Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Status 14 Service Unavailable 2025-04-04T17:14:43.753006Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-04T17:14:43.753032Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-04T17:14:45.250361Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-04T17:14:45.254675Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-04T17:14:45.255147Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-04T17:14:45.264628Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Status 14 Service Unavailable 2025-04-04T17:14:45.264829Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-04T17:14:45.264849Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-04T17:14:46.253713Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-04T17:14:46.253784Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-04T17:14:46.254162Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-04T17:14:46.259334Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-04T17:14:46.259790Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-04-04T17:14:47.223181Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507632967800275:2127];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:47.223252Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:56.384698Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507690473276437:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:56.385181Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c33/r3tmp/tmpQwT0XH/pdisk_1.dat 2025-04-04T17:14:56.563663Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:56.570835Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:56.570912Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:56.573461Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4436, node 2 2025-04-04T17:14:56.694091Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:56.694108Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:56.694112Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:56.694207Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19883 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:56.936963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:56.951117Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:56.962518Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-04T17:14:56.962591Z node 2 :GRPC_CLIENT DEBUG: [5170000a6c08] Connect to grpc://localhost:22553 2025-04-04T17:14:56.963491Z node 2 :GRPC_CLIENT DEBUG: [5170000a6c08] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-04T17:14:56.977789Z node 2 :GRPC_CLIENT DEBUG: [5170000a6c08] Status 14 Service Unavailable 2025-04-04T17:14:56.982348Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-04T17:14:56.982393Z node 2 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-04T17:14:56.982448Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-04-04T17:14:56.982699Z node 2 :GRPC_CLIENT DEBUG: [5170000a6c08] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-04T17:14:56.990741Z node 2 :GRPC_CLIENT DEBUG: [5170000a6c08] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } ... ed 2025-04-04T17:15:03.873009Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:03.874934Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11704, node 4 2025-04-04T17:15:03.968719Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:03.968735Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:03.968742Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:03.968844Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5680 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:04.220497Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:04.228823Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:15:04.231878Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:15:04.231908Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:15:04.231915Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:15:04.231948Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-04T17:15:04.232012Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(somewhere.sleep) 2025-04-04T17:15:04.232038Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.list) 2025-04-04T17:15:04.232075Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-04T17:15:04.232100Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.eat) 2025-04-04T17:15:04.232156Z node 4 :GRPC_CLIENT DEBUG: [51700008f588] Connect to grpc://localhost:6356 2025-04-04T17:15:04.235163Z node 4 :GRPC_CLIENT DEBUG: [51700008f588] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:15:04.235430Z node 4 :GRPC_CLIENT DEBUG: [51700008f588] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:15:04.235543Z node 4 :GRPC_CLIENT DEBUG: [51700008f588] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.list" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:15:04.235701Z node 4 :GRPC_CLIENT DEBUG: [51700008f588] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:15:04.235813Z node 4 :GRPC_CLIENT DEBUG: [51700008f588] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.eat" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:15:04.251027Z node 4 :GRPC_CLIENT DEBUG: [51700008f588] Status 16 Access Denied 2025-04-04T17:15:04.251275Z node 4 :GRPC_CLIENT DEBUG: [51700008f588] Status 16 Access Denied 2025-04-04T17:15:04.251420Z node 4 :GRPC_CLIENT DEBUG: [51700008f588] Status 16 Access Denied 2025-04-04T17:15:04.251643Z node 4 :GRPC_CLIENT DEBUG: [51700008f588] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-04T17:15:04.251716Z node 4 :GRPC_CLIENT DEBUG: [51700008f588] Status 16 Access Denied 2025-04-04T17:15:04.252098Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.list now has a permanent error "Access Denied" retryable:0 2025-04-04T17:15:04.252138Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission somewhere.sleep now has a permanent error "Access Denied" retryable:0 2025-04-04T17:15:04.252159Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.eat now has a permanent error "Access Denied" retryable:0 2025-04-04T17:15:04.252201Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2025-04-04T17:15:04.252235Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2025-04-04T17:15:04.252258Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-04T17:15:04.253191Z node 4 :GRPC_CLIENT DEBUG: [51700008fc88] Connect to grpc://localhost:20719 2025-04-04T17:15:04.253833Z node 4 :GRPC_CLIENT DEBUG: [51700008fc88] Request GetUserAccountRequest { user_account_id: "user1" } 2025-04-04T17:15:04.263097Z node 4 :GRPC_CLIENT DEBUG: [51700008fc88] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-04-04T17:15:04.263469Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c33/r3tmp/tmp2FqVNb/pdisk_1.dat 2025-04-04T17:15:07.526116Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:15:07.587559Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9243, node 5 2025-04-04T17:15:07.617185Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:07.617285Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:07.622105Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:07.669830Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:07.669848Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:07.669863Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:07.669957Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:07.909001Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:07.913580Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:15:07.915533Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:15:07.915558Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:15:07.915565Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:15:07.915631Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2025-04-04T17:15:07.915666Z node 5 :GRPC_CLIENT DEBUG: [51700011ae88] Connect to grpc://localhost:63773 2025-04-04T17:15:07.916548Z node 5 :GRPC_CLIENT DEBUG: [51700011ae88] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-04-04T17:15:07.926468Z node 5 :GRPC_CLIENT DEBUG: [51700011ae88] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-04T17:15:07.926736Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-04T17:15:07.927303Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:15:07.927323Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:15:07.927330Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:15:07.927404Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-04-04T17:15:07.927589Z node 5 :GRPC_CLIENT DEBUG: [51700011ae88] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-04-04T17:15:07.929023Z node 5 :GRPC_CLIENT DEBUG: [51700011ae88] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-04T17:15:07.929220Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as >> KqpQueryService::TableSink_OlapRWQueries [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] Test command err: 2025-04-04T17:14:43.540501Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507636511561502:2083];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:43.543115Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c05/r3tmp/tmp6P8Jsv/pdisk_1.dat 2025-04-04T17:14:44.109954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:44.110037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:44.113240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:44.134846Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17628, node 1 2025-04-04T17:14:44.222510Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:44.222529Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:44.222535Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:44.222651Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12382 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:44.492864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:44.509351Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:44.515023Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-04T17:14:44.515108Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Connect to grpc://localhost:61837 2025-04-04T17:14:44.523802Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-04-04T17:14:44.561861Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-04-04T17:14:44.562078Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-04T17:14:46.993860Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507646690595481:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:46.993884Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c05/r3tmp/tmpuLnpji/pdisk_1.dat 2025-04-04T17:14:47.168143Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:47.197253Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:47.197339Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 63234, node 2 2025-04-04T17:14:47.204639Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:47.326695Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:47.326717Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:47.326722Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:47.326804Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4754 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:47.541733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:47.549391Z node 2 :TICKET_PARSER TRACE: Ticket ApiK****alid (AB5B5EA8) asking for AccessServiceAuthentication 2025-04-04T17:14:47.549452Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Connect to grpc://localhost:16192 2025-04-04T17:14:47.550236Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Request AuthenticateRequest { api_key: "ApiK****alid (AB5B5EA8)" } 2025-04-04T17:14:47.562843Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Response AuthenticateResponse { subject { user_account { id: "ApiKey-value-valid" } } } 2025-04-04T17:14:47.563179Z node 2 :TICKET_PARSER DEBUG: Ticket ApiK****alid (AB5B5EA8) () has now valid token of ApiKey-value-valid@as 2025-04-04T17:14:50.715343Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507666424695743:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:50.795102Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c05/r3tmp/tmpNwrEvB/pdisk_1.dat 2025-04-04T17:14:50.937803Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:50.966562Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:50.966666Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:50.969048Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28065, node 3 2025-04-04T17:14:51.150850Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:51.150874Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:51.150880Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:51.150995Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10055 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:51.380365Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:51.387079Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:51.389296Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:14:51.389324Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:51.389333Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:14:51.389361Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-04T17:14:51.389403Z node 3 :GRPC_CLIENT DEBUG: [5170000f0088] Connect to grpc://localhost:26251 2025-04-04T17:14:51.390169Z node 3 :GRPC_CLIENT DEBUG: [5170000f0088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-04-04T17:14:51.407106Z node 3 :GRPC_CLIENT DEBUG: [5170000f0088] Status 14 Service Unavailable 2025-04-04T17:14:51.407432Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-04T17:14:51.407459Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-04T17:14:51.407598Z node 3 :GRPC_CLIENT DEBUG: [5170000f0088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-04-04T17:14:51.411840Z node 3 :GRPC_CLIENT DEBUG: [5170000f0088] Status 1 CANCELLED 2025-04-04T17:14:51.412418Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c05/r3tmp/tmpS8Xxiy/pdisk_1.dat 2025-04-04T17:14:54.674461Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:14:54.697684Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:54.701426Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:54.701505Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:54.704208Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3822, node 4 2025-04-04T17:14:54.767150Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:54.767174Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:54.767180Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:54.767305Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2436 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:14:55.032314Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:14:55.039275Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-04T17:14:55.039346Z node 4 :GRPC_CLIENT DEBUG: [51700010ff88] Connect to grpc://localhost:14113 2025-04-04T17:14:55.040584Z node 4 :GRPC_CLIENT DEBUG: [51700010ff88] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-04T17:14:55.058386Z node 4 :GRPC_CLIENT DEBUG: [51700010ff88] Status 14 Service Unavailable 2025-04-04T17:14:55.058772Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-04T17:14:55.058805Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-04T17:14:55.058982Z node 4 :GRPC_CLIENT DEBUG: [51700010ff88] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-04T17:14:55.064318Z node 4 :GRPC_CLIENT DEBUG: [51700010ff88] Status 14 Service Unavailable 2025-04-04T17:14:55.064654Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-04T17:14:56.518866Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-04T17:14:56.518897Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-04T17:14:56.519057Z node 4 :GRPC_CLIENT DEBUG: [51700010ff88] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-04T17:14:56.522440Z node 4 :GRPC_CLIENT DEBUG: [51700010ff88] Status 14 Service Unavailable 2025-04-04T17:14:56.522778Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-04T17:14:57.519317Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-04T17:14:57.519355Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-04T17:14:57.519528Z node 4 :GRPC_CLIENT DEBUG: [51700010ff88] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-04T17:14:57.521707Z node 4 :GRPC_CLIENT DEBUG: [51700010ff88] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-04-04T17:14:57.522159Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-04-04T17:15:07.721844Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489507738618489303:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:07.728951Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c05/r3tmp/tmpXRBTJf/pdisk_1.dat 2025-04-04T17:15:07.906020Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:07.906109Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:07.908034Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:07.910063Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18741, node 5 2025-04-04T17:15:08.014913Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:08.014940Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:08.014946Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:08.015054Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21510 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:08.286657Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:08.298400Z node 5 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-04T17:15:08.298459Z node 5 :GRPC_CLIENT DEBUG: [517000078d08] Connect to grpc://localhost:3138 2025-04-04T17:15:08.299273Z node 5 :GRPC_CLIENT DEBUG: [517000078d08] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-04T17:15:08.318185Z node 5 :GRPC_CLIENT DEBUG: [517000078d08] Status 14 Service Unavailable 2025-04-04T17:15:08.318463Z node 5 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-04T17:15:08.318491Z node 5 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-04-04T17:15:08.318726Z node 5 :GRPC_CLIENT DEBUG: [517000078d08] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-04-04T17:15:08.320949Z node 5 :GRPC_CLIENT DEBUG: [517000078d08] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-04-04T17:15:08.321149Z node 5 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationUnavailable [GOOD] Test command err: 2025-04-04T17:14:42.229254Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507633351231048:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:42.229301Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c14/r3tmp/tmpLyBplE/pdisk_1.dat 2025-04-04T17:14:42.794288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:42.794385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:42.804439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:42.843891Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15654, node 1 2025-04-04T17:14:43.030766Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:43.030788Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:43.030795Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:43.030903Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:43.657639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:43.677490Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-04T17:14:43.678782Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Connect to grpc://localhost:21643 2025-04-04T17:14:43.682147Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:14:43.710371Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-04-04T17:14:43.710762Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-04-04T17:14:43.710797Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-04T17:14:43.710823Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-04T17:14:43.711081Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:14:43.714851Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-04-04T17:14:43.715143Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-04-04T17:14:43.715168Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-04T17:14:44.246576Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-04T17:14:44.246662Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-04T17:14:44.247381Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:14:44.252414Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-04-04T17:14:44.254348Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-04-04T17:14:44.254392Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-04T17:14:45.248132Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-04T17:14:45.248182Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-04T17:14:45.248460Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:14:45.254335Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-04-04T17:14:45.254706Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-04-04T17:14:45.254733Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-04-04T17:14:47.232869Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507633351231048:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:47.233002Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:48.253447Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-04-04T17:14:48.253488Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-04T17:14:48.253706Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:14:48.266923Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-04T17:14:48.267109Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a valid subject "user1@as" 2025-04-04T17:14:48.267203Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-04-04T17:14:56.406415Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507692555912055:2145];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c14/r3tmp/tmp8IAw4p/pdisk_1.dat 2025-04-04T17:14:56.432174Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:14:56.522721Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:56.541711Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:56.541791Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:56.543422Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3933, node 2 2025-04-04T17:14:56.607740Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:56.607776Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:56.607789Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:56.607916Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1436 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:56.852084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:56.874712Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-04-04T17:14:56.874824Z node 2 :GRPC_CLIENT DEBUG: [5170000c6408] Connect to grpc://localhost:15980 2025-04-04T17:14:56.875873Z node 2 :GRPC_CLIENT DEBUG: [5170000c6408] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:14:56.892181Z node 2 :GRPC_CLIENT DEBUG: [5170000c6408] Status 14 Service Unavailable 2025-04-04T17:14:56.894461Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-04-04T17:14:56.894497Z node 2 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Se ... ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:04.274903Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:04.283452Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:15:04.289679Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:15:04.289711Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:15:04.289720Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:15:04.289785Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-04T17:15:04.289875Z node 4 :GRPC_CLIENT DEBUG: [517000103b88] Connect to grpc://localhost:11907 2025-04-04T17:15:04.290890Z node 4 :GRPC_CLIENT DEBUG: [517000103b88] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:15:04.314128Z node 4 :GRPC_CLIENT DEBUG: [517000103b88] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-04T17:15:04.314433Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-04T17:15:04.314465Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-04T17:15:04.315869Z node 4 :GRPC_CLIENT DEBUG: [517000104288] Connect to grpc://localhost:9211 2025-04-04T17:15:04.316934Z node 4 :GRPC_CLIENT DEBUG: [517000104288] Request GetUserAccountRequest { user_account_id: "user1" } 2025-04-04T17:15:04.327373Z node 4 :GRPC_CLIENT DEBUG: [517000104288] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-04-04T17:15:04.328006Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-04-04T17:15:04.334147Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:15:04.334176Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:15:04.334207Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:15:04.334256Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-04T17:15:04.334454Z node 4 :GRPC_CLIENT DEBUG: [517000103b88] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:15:04.347513Z node 4 :GRPC_CLIENT DEBUG: [517000103b88] Status 16 Access Denied 2025-04-04T17:15:04.347735Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "Access Denied" retryable:0 2025-04-04T17:15:04.347769Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2025-04-04T17:15:04.348435Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:15:04.348459Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:15:04.348466Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:15:04.348495Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-04T17:15:04.348542Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-04T17:15:04.348705Z node 4 :GRPC_CLIENT DEBUG: [517000103b88] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:15:04.349198Z node 4 :GRPC_CLIENT DEBUG: [517000103b88] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:15:04.351820Z node 4 :GRPC_CLIENT DEBUG: [517000103b88] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-04T17:15:04.351953Z node 4 :GRPC_CLIENT DEBUG: [517000103b88] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-04T17:15:04.354328Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-04T17:15:04.354382Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2025-04-04T17:15:04.354400Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-04-04T17:15:04.354544Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-04-04T17:15:07.664606Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489507740326832431:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:07.664675Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c14/r3tmp/tmpXSBwXb/pdisk_1.dat 2025-04-04T17:15:07.819743Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:07.825944Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:07.826028Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:07.831038Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14894, node 5 2025-04-04T17:15:07.932611Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:07.932633Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:07.932641Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:07.932763Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6339 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:08.199740Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:08.211912Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:15:08.211952Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:15:08.211960Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:15:08.211996Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-04T17:15:08.212056Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-04T17:15:08.212112Z node 5 :GRPC_CLIENT DEBUG: [51700004e288] Connect to grpc://localhost:30593 2025-04-04T17:15:08.212964Z node 5 :GRPC_CLIENT DEBUG: [51700004e288] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:15:08.213293Z node 5 :GRPC_CLIENT DEBUG: [51700004e288] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:15:08.234474Z node 5 :GRPC_CLIENT DEBUG: [51700004e288] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-04-04T17:15:08.234649Z node 5 :GRPC_CLIENT DEBUG: [51700004e288] Status 14 Service Unavailable 2025-04-04T17:15:08.235296Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-04-04T17:15:08.235365Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "Service Unavailable" retryable:1 2025-04-04T17:15:08.235392Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-04T17:15:08.235425Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-04-04T17:15:08.235472Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-04-04T17:15:08.235642Z node 5 :GRPC_CLIENT DEBUG: [51700004e288] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:15:08.236206Z node 5 :GRPC_CLIENT DEBUG: [51700004e288] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-04-04T17:15:08.242387Z node 5 :GRPC_CLIENT DEBUG: [51700004e288] Status 1 CANCELLED 2025-04-04T17:15:08.242510Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" 2025-04-04T17:15:08.242627Z node 5 :GRPC_CLIENT DEBUG: [51700004e288] Status 1 CANCELLED 2025-04-04T17:15:08.242822Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "CANCELLED" retryable:1 2025-04-04T17:15:08.242851Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationModify [GOOD] Test command err: 2025-04-04T17:14:42.225770Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507631900056388:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:42.225811Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c3f/r3tmp/tmpcdRzTQ/pdisk_1.dat 2025-04-04T17:14:42.813993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:42.814074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:42.818660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:42.852933Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12262, node 1 2025-04-04T17:14:43.036067Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:43.036090Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:43.036107Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:43.036213Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31937 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:43.619937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:43.666489Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:14:43.666523Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:43.666533Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:14:43.667007Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-04T17:14:43.670380Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Connect to grpc://localhost:28034 2025-04-04T17:14:43.672522Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2025-04-04T17:14:43.738737Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-04-04T17:14:43.746377Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-04T17:14:43.746417Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-04T17:14:43.746577Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2025-04-04T17:14:43.759078Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-04-04T17:14:43.759402Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-04T17:14:45.884971Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507645274862551:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:45.885035Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c3f/r3tmp/tmp8zSCh3/pdisk_1.dat 2025-04-04T17:14:46.053916Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:46.079477Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:46.079576Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:46.081128Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5067, node 2 2025-04-04T17:14:46.153196Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:46.153227Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:46.153233Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:46.153331Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:46.351712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:46.357266Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:46.361249Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:14:46.361280Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:46.361287Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:14:46.361361Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-04T17:14:46.361421Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Connect to grpc://localhost:23638 2025-04-04T17:14:46.363125Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-04-04T17:14:46.381413Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Status 14 Service Unavailable 2025-04-04T17:14:46.382216Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-04T17:14:46.382276Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-04-04T17:14:46.382343Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-04T17:14:46.382673Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } 2025-04-04T17:14:46.384975Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Status 14 Service Unavailable 2025-04-04T17:14:46.385327Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-04T17:14:46.385352Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-04-04T17:14:47.927105Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-04-04T17:14:47.927198Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-04T17:14:47.927447Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } 2025-04-04T17:14:47.929443Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Status 14 Service Unavailable 2025-04-04T17:14:47.929637Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-04-04T17:14:47.929655Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-04T17:14:48.930520Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-04-04T17:14:48.930587Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-04T17:14:48.930812Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService ... G: [517000056e88] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (BE2EA0D0)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "invalid-token1" } } NebiusAccessService::Authorize response results { key: 0 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2025-04-04T17:15:04.040589Z node 4 :GRPC_CLIENT DEBUG: [517000056e88] Response AuthorizeResponse { results { key: 0 value { resultCode: PERMISSION_DENIED } } } 2025-04-04T17:15:04.040740Z node 4 :TICKET_PARSER TRACE: Ticket **** (BE2EA0D0) permission something.read access denied for subject "" 2025-04-04T17:15:04.040761Z node 4 :TICKET_PARSER DEBUG: Ticket **** (BE2EA0D0) () has now permanent error message 'Access Denied' 2025-04-04T17:15:04.041178Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:15:04.041193Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:15:04.041198Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:15:04.041233Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-04T17:15:04.041362Z node 4 :GRPC_CLIENT DEBUG: [517000056e88] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "XXXXXXXX" resource_path { path { id: "XXXXXXXX" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "XXXXXXXX" resource_path { path { id: "XXXXXXXX" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2025-04-04T17:15:04.042851Z node 4 :GRPC_CLIENT DEBUG: [517000056e88] Response AuthorizeResponse { results { key: 0 value { resultCode: PERMISSION_DENIED } } } 2025-04-04T17:15:04.042983Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read access denied for subject "" 2025-04-04T17:15:04.043001Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2025-04-04T17:15:04.043404Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:15:04.043418Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:15:04.043423Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:15:04.043457Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-04T17:15:04.043576Z node 4 :GRPC_CLIENT DEBUG: [517000056e88] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "XXXXXXXX" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "XXXXXXXX" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-04-04T17:15:04.045137Z node 4 :GRPC_CLIENT DEBUG: [517000056e88] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2025-04-04T17:15:04.045363Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-04T17:15:04.045760Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:15:04.045775Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:15:04.045786Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:15:04.045818Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-04T17:15:04.045951Z node 4 :GRPC_CLIENT DEBUG: [517000056e88] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "XXXXXXXX" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "XXXXXXXX" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-04-04T17:15:04.047580Z node 4 :GRPC_CLIENT DEBUG: [517000056e88] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2025-04-04T17:15:04.047771Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-04T17:15:07.551529Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489507739194110561:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:07.551591Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c3f/r3tmp/tmpzX5YzW/pdisk_1.dat 2025-04-04T17:15:07.725038Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:07.751770Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:07.751840Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:07.754997Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18854, node 5 2025-04-04T17:15:07.854912Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:07.854936Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:07.854943Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:07.855075Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:08.076655Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:08.083425Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:15:08.088493Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:15:08.088526Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:15:08.088536Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:15:08.088610Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-04-04T17:15:08.088658Z node 5 :GRPC_CLIENT DEBUG: [517000103b88] Connect to grpc://localhost:12068 2025-04-04T17:15:08.089755Z node 5 :GRPC_CLIENT DEBUG: [517000103b88] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-04-04T17:15:08.115066Z node 5 :GRPC_CLIENT DEBUG: [517000103b88] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2025-04-04T17:15:08.118660Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-04T17:15:08.126543Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:15:08.126586Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:15:08.126609Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:15:08.126703Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-04-04T17:15:08.127074Z node 5 :GRPC_CLIENT DEBUG: [517000103b88] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-04-04T17:15:08.138956Z node 5 :GRPC_CLIENT DEBUG: [517000103b88] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { account { user_account { id: "user1" } } } } } 2025-04-04T17:15:08.142489Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueSinglePartedShardWithMemData [GOOD] >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfterAndTimeout [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OlapRWQueries [GOOD] Test command err: Trying to start YDB, gRPC: 25822, MsgBus: 16634 2025-04-04T17:14:26.580733Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507564087637220:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:26.580767Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001806/r3tmp/tmpGP2ys4/pdisk_1.dat 2025-04-04T17:14:27.038215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:27.041476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:27.046070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:27.064131Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25822, node 1 2025-04-04T17:14:27.166722Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:27.166743Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:27.166749Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:27.166860Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16634 TClient is connected to server localhost:16634 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:27.969137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:27.994889Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:29.789975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507576972539776:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:29.790067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:29.983998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:14:30.164115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507581267507203:2338];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:14:30.164352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507581267507203:2338];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:14:30.164593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507581267507203:2338];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:14:30.164694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507581267507203:2338];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:14:30.164807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507581267507203:2338];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:14:30.164907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507581267507203:2338];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:14:30.165012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507581267507203:2338];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:14:30.165161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507581267507203:2338];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:14:30.165271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507581267507203:2338];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:14:30.165378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507581267507203:2338];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:14:30.165475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507581267507203:2338];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:14:30.165562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489507581267507203:2338];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:14:30.166055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507581267507206:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:14:30.166095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507581267507206:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:14:30.166299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507581267507206:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:14:30.166427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507581267507206:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:14:30.166518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507581267507206:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:14:30.166617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507581267507206:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:14:30.166702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507581267507206:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:14:30.166802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507581267507206:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:14:30.166924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507581267507206:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:14:30.167018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507581267507206:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:14:30.167112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507581267507206:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:14:30.167213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489507581267507206:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:14:30.201925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507581267507209:2342];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:14:30.201990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507581267507209:2342];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:14:30.202218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507581267507209:2342];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:14:30.202370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507581267507209:2342];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:14:30.202470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507581267507209:2342];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:14:30.202569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489507581267507209:2342];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:14:30.202697Z node 1 :TX_COLU ... 6224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:15:10.475078Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7489507749847031319:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:15:10.475162Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7489507749847031319:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:15:10.475472Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7489507749847031319:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:15:10.475601Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7489507749847031319:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:15:10.475716Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7489507749847031319:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:15:10.475824Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7489507749847031319:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:15:10.475968Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7489507749847031319:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:15:10.476109Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7489507749847031319:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:15:10.476233Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7489507749847031319:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:15:10.476344Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7489507749847031319:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:15:10.476497Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7489507749847031319:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:15:10.476637Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7489507749847031319:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:15:10.481341Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:15:10.481412Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:15:10.481519Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:15:10.481552Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:15:10.481757Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:15:10.481791Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:15:10.481887Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:15:10.481935Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:15:10.482008Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:15:10.482035Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:15:10.482076Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:15:10.482107Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:15:10.482847Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:15:10.482895Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:15:10.483111Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:15:10.483149Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:15:10.483303Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:15:10.483335Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:15:10.483560Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:15:10.483595Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:15:10.483713Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:15:10.483746Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:15:10.509207Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:15:10.513345Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:15:10.519981Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:15:10.541252Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507749847031414:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:10.541344Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:10.541610Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507749847031419:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:10.545532Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:15:10.555351Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507749847031421:2362], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:15:10.621245Z node 3 :TX_PROXY ERROR: Actor# [3:7489507749847031472:2432] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:11.789514Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=281474976715665;tx_id=281474976715665;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715665; 2025-04-04T17:15:11.790030Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976715665;tx_id=281474976715665;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715665; 2025-04-04T17:15:11.790379Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;self_id=[3:7489507749847031284:2336];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037890;local_tx_no=13;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037889;receive=72075186224037888; 2025-04-04T17:15:11.791142Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=281474976715665;tx_id=281474976715665;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715665; 2025-04-04T17:15:11.926207Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507732667161572:2220];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:11.926308Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfterAndTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 3404, MsgBus: 14343 2025-04-04T17:14:32.015013Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507586988329151:2269];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:32.015063Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001802/r3tmp/tmp1UWVjg/pdisk_1.dat 2025-04-04T17:14:32.578389Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:32.582133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:32.582221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:32.592816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3404, node 1 2025-04-04T17:14:32.801103Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:32.801124Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:32.801130Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:32.801239Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14343 TClient is connected to server localhost:14343 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:33.528318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:33.566420Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:33.588607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:33.728536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:33.897259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:33.996851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:35.623196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507599873232617:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:35.623319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:35.884206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:35.955620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:35.988040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.020546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.055270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.120895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:36.225405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507604168200428:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:36.225492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:36.225767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507604168200433:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:36.229605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:36.245463Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T17:14:36.245706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507604168200435:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:36.314289Z node 1 :TX_PROXY ERROR: Actor# [1:7489507604168200489:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:37.014384Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507586988329151:2269];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:37.014462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:37.256831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:37.258029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:14:37.260542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10202, MsgBus: 27504 2025-04-04T17:14:39.749179Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507618599974293:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:39.749848Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001802/r3tmp/tmpudvJCl/pdisk_1.dat 2025-04-04T17:14:39.881161Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10202, node 2 2025-04-04T17:14:39.924248Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:39.924326Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:39.925786Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:39.959729Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:39.959748Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:39.959758Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:39.959868Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27504 TClient is connected to server localhost:27504 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:40.446328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:40.452341Z nod ... pool default not found or you don't have access permissions } 2025-04-04T17:14:59.224022Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:59.224390Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489507704359538023:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:59.229996Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:59.243948Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489507704359538025:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:14:59.340826Z node 4 :TX_PROXY ERROR: Actor# [4:7489507704359538081:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:00.018046Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489507687179666725:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:00.018126Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:15:00.617474Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:15:00.618993Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:15:00.620055Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:15:00.959672Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Njg3MzU4MzktOTI0NDdlMjMtN2IzMTMzNjUtZGE4MzRhNTQ=, ActorId: [4:7489507708654505931:2511], ActorState: ExecuteState, TraceId: 01jr0wrz43461248sfha1bzdsa, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 7958, MsgBus: 18989 2025-04-04T17:15:03.483522Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489507721268454846:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:03.483584Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001802/r3tmp/tmpeIgQHb/pdisk_1.dat 2025-04-04T17:15:03.730141Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:03.750886Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:03.750997Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:03.752045Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7958, node 5 2025-04-04T17:15:03.867893Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:03.867915Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:03.867923Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:03.868101Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18989 TClient is connected to server localhost:18989 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:15:04.632166Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:15:04.645423Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:15:04.673038Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:04.775513Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:04.982528Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:05.074419Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:07.676794Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489507738448325794:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:07.676896Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:07.727413Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:15:07.794609Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:15:07.849660Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:15:07.896777Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:15:07.969240Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:15:08.010101Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:15:08.076042Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489507742743293602:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:08.076147Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:08.076338Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489507742743293607:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:08.079945Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:15:08.094206Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489507742743293609:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:15:08.193316Z node 5 :TX_PROXY ERROR: Actor# [5:7489507742743293665:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:08.484067Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489507721268454846:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:08.484164Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:15:09.547763Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:15:09.550237Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:15:09.552333Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:15:09.963500Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=MmYyNjg4NGQtZmFmODZjZTYtZGQ2M2ZlMjktNmE2OTZjZDY=, ActorId: [5:7489507747038261528:2513], ActorState: ExecuteState, TraceId: 01jr0ws7v4642mqw2m3vqnnkkh, Create QueryResponse for error on request, msg: >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAccessKeySignatureUnsupported >> KqpJoinOrder::TPCDS34-StreamLookupJoin+ColumnStore [GOOD] >> TopicService::OnePartitionAndNoGapsInTheOffsets [GOOD] >> KqpService::ToDictCache-UseCache [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureRun3Cycles >> TestDataErasure::DataErasureWithMerge >> TestDataErasure::DataErasureWithCopyTable >> TestDataErasure::DataErasureWithSplit >> TestDataErasure::SimpleDataErasureTest >> TestDataErasure::DataErasureManualLaunch >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits [GOOD] >> KqpWorkloadService::TestLargeConcurrentQueryLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped [GOOD] Test command err: 2025-04-04T17:12:45.672054Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507129278569217:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:45.672682Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:45.694190Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507130809854949:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:45.694316Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:12:45.700051Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507131044663007:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:45.701378Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015ec/r3tmp/tmpdcIXp2/pdisk_1.dat 2025-04-04T17:12:46.261017Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:46.279709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:46.279867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:46.280612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:46.280730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:46.282149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:46.282213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:46.287657Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T17:12:46.288218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:46.290589Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:12:46.296042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:46.299081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8498 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:12:47.097137Z node 1 :TX_PROXY DEBUG: actor# [1:7489507129278569444:2142] Handle TEvNavigate describe path dc-1 2025-04-04T17:12:47.097208Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507137868504526:2476] HANDLE EvNavigateScheme dc-1 2025-04-04T17:12:47.097368Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489507129278569467:2155], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:12:47.097464Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133573537204:2462][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7489507129278569467:2155], cookie# 1 2025-04-04T17:12:47.099503Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133573537208:2462][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133573537205:2462], cookie# 1 2025-04-04T17:12:47.099581Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133573537209:2462][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133573537206:2462], cookie# 1 2025-04-04T17:12:47.099605Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133573537210:2462][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133573537207:2462], cookie# 1 2025-04-04T17:12:47.099641Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507129278569083:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133573537208:2462], cookie# 1 2025-04-04T17:12:47.099664Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507129278569086:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133573537209:2462], cookie# 1 2025-04-04T17:12:47.099680Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489507129278569089:2059] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489507133573537210:2462], cookie# 1 2025-04-04T17:12:47.099709Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133573537208:2462][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507129278569083:2053], cookie# 1 2025-04-04T17:12:47.099727Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133573537209:2462][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507129278569086:2056], cookie# 1 2025-04-04T17:12:47.099759Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489507133573537210:2462][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507129278569089:2059], cookie# 1 2025-04-04T17:12:47.099812Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133573537204:2462][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507133573537205:2462], cookie# 1 2025-04-04T17:12:47.099835Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133573537204:2462][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:12:47.099848Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133573537204:2462][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507133573537206:2462], cookie# 1 2025-04-04T17:12:47.099870Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133573537204:2462][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:12:47.099916Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133573537204:2462][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489507133573537207:2462], cookie# 1 2025-04-04T17:12:47.099942Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489507133573537204:2462][/dc-1] Unexpected sync response: sender# [1:7489507133573537207:2462], cookie# 1 2025-04-04T17:12:47.100006Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489507129278569467:2155], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-04T17:12:47.134094Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489507129278569467:2155], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7489507133573537204:2462] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:12:47.134221Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489507129278569467:2155], cacheItem# { Subscriber: { Subscriber: [1:7489507133573537204:2462] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-04T17:12:47.141775Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489507137868504529:2479], recipient# [1:7489507137868504526:2476], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:12:47.141862Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507137868504526:2476] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:47.199456Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507137868504526:2476] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-04-04T17:12:47.208817Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507137868504526:2476] Handle TEvDescribeSchemeResult Forward to# [1:7489507137868504525:2475] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: ... ncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:15:12.547100Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7489507762231107992:3626], recipient# [7:7489507762231107990:2630], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:15:12.547164Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7489507762231107993:3627], recipient# [7:7489507762231107991:2631], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:15:13.450127Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [8:7489507184654124530:2129], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:15:13.450286Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [8:7489507184654124530:2129], cacheItem# { Subscriber: { Subscriber: [8:7489507188949092176:2345] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:15:13.450350Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [8:7489507184654124530:2129], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:15:13.450430Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [8:7489507184654124530:2129], cacheItem# { Subscriber: { Subscriber: [8:7489507193244059497:2350] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:15:13.450516Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [8:7489507764474710495:2650], recipient# [8:7489507764474710493:2634], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:15:13.450573Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [8:7489507764474710496:2651], recipient# [8:7489507764474710494:2635], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:15:13.550604Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7489507182410520319:2250], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:15:13.550777Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7489507182410520319:2250], cacheItem# { Subscriber: { Subscriber: [7:7489507195295422880:2760] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:15:13.550896Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7489507182410520319:2250], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:15:13.550985Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7489507182410520319:2250], cacheItem# { Subscriber: { Subscriber: [7:7489507186705488083:2607] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:15:13.551077Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7489507766526075301:3631], recipient# [7:7489507766526075300:2633], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:15:13.551149Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7489507766526075302:3632], recipient# [7:7489507766526075299:2632], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:15:14.550730Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7489507182410520319:2250], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:15:14.550882Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7489507182410520319:2250], cacheItem# { Subscriber: { Subscriber: [7:7489507195295422880:2760] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:15:14.550962Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7489507182410520319:2250], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:15:14.551051Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7489507182410520319:2250], cacheItem# { Subscriber: { Subscriber: [7:7489507186705488083:2607] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:15:14.551127Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7489507770821042612:3638], recipient# [7:7489507770821042610:2634], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:15:14.551188Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7489507770821042613:3639], recipient# [7:7489507770821042611:2635], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> ResourcePoolsDdl::TestPoolSwitchToLimitedState [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::ToDictCache-UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 15166, MsgBus: 15531 2025-04-04T17:14:11.849738Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507496460989937:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:11.850137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00184f/r3tmp/tmpyJRuLE/pdisk_1.dat 2025-04-04T17:14:12.374547Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:12.375583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:12.375624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:12.378801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15166, node 1 2025-04-04T17:14:12.530208Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:12.530228Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:12.530233Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:12.530343Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15531 TClient is connected to server localhost:15531 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:13.232049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:14:13.291536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:14:13.432467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:13.612991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:13.708776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:15.456603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507513640860746:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:15.456705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:15.693707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:14:15.736649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:14:15.816252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:14:15.887744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:14:15.947802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:14:15.994883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:14:16.080249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507517935828561:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:16.080318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:16.080629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507517935828566:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:16.084187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:14:16.095043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507517935828568:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:14:16.170995Z node 1 :TX_PROXY ERROR: Actor# [1:7489507517935828623:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:16.850360Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507496460989937:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:16.850439Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:17.213056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:14:27.373950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:14:27.373980Z node 1 :IMPORT WARN: Table profiles were not loaded took: 17.905824s took: 17.957806s took: 17.951379s took: 17.962045s took: 17.889934s took: 17.937461s took: 17.960378s took: 17.953101s took: 17.938726s took: 17.975416s Trying to start YDB, gRPC: 21976, MsgBus: 8586 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00184f/r3tmp/tmp2wD7xy/pdisk_1.dat 2025-04-04T17:14:36.331331Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:14:36.419150Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:36.436431Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:36.436527Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:36.439355Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21976, node 2 2025-04-04T17:14:36.564530Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:36.564564Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:36.564571Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:36.564708Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8586 TClient is connected to server localhost:8586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:37.198870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:37.227189Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:39.903621Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507620531990340:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:39.903718Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resou ... ok: 0.945254s took: 0.947451s took: 0.795813s took: 0.795934s took: 0.796623s took: 0.797517s took: 0.759068s took: 0.761550s took: 0.762979s took: 0.765539s took: 0.775038s took: 0.776333s took: 0.776984s took: 0.777379s took: 0.868740s took: 0.869512s took: 0.870915s took: 0.876659s took: 0.776521s took: 0.778329s took: 0.779545s took: 0.781295s took: 0.800518s took: 0.800586s took: 0.801639s took: 0.866441s took: 1.102733s took: 1.103631s took: 1.105535s took: 1.106696s 2025-04-04T17:14:51.401034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:14:51.401070Z node 2 :IMPORT WARN: Table profiles were not loaded took: 0.915433s took: 0.917178s took: 0.920945s took: 0.923759s took: 0.903853s took: 0.906185s took: 0.909165s took: 0.910778s took: 0.831612s took: 0.833110s took: 0.835180s took: 0.838011s took: 0.878911s took: 0.879654s took: 0.881267s took: 0.881814s Trying to start YDB, gRPC: 13181, MsgBus: 32375 2025-04-04T17:14:55.308032Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507688007055449:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:55.309136Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00184f/r3tmp/tmpZw4CA2/pdisk_1.dat 2025-04-04T17:14:55.442193Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:55.452400Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:55.452501Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:55.453971Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13181, node 3 2025-04-04T17:14:55.515124Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:55.515148Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:55.515155Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:55.515295Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32375 TClient is connected to server localhost:32375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:56.044330Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:56.056176Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:58.947798Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507700891957900:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:58.947882Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507700891957917:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:58.947958Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:58.948116Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507700891957919:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:58.948160Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507700891957918:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:58.955216Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507700891957929:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:58.955310Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507700891957931:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:58.955398Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:58.960823Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-04-04T17:14:58.969236Z node 3 :TX_PROXY ERROR: Actor# [3:7489507700891957933:2312] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:14:58.969319Z node 3 :TX_PROXY ERROR: Actor# [3:7489507700891957932:2311] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:14:58.969883Z node 3 :TX_PROXY ERROR: Actor# [3:7489507700891957934:2313] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:14:58.976829Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507700891957926:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-04T17:14:58.976880Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507700891957924:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-04T17:14:58.976902Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507700891957925:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-04T17:14:58.976928Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507700891957940:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-04T17:14:59.050437Z node 3 :TX_PROXY ERROR: Actor# [3:7489507705186925307:2363] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:59.050647Z node 3 :TX_PROXY ERROR: Actor# [3:7489507705186925306:2362] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:59.059296Z node 3 :TX_PROXY ERROR: Actor# [3:7489507705186925327:2377] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:59.076688Z node 3 :TX_PROXY ERROR: Actor# [3:7489507705186925341:2383] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } took: 3.917643s took: 3.920641s took: 3.920536s took: 3.921856s 2025-04-04T17:15:00.307841Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507688007055449:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:00.307915Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; took: 0.926636s took: 0.936694s took: 0.936733s took: 0.937334s took: 0.994549s took: 0.994424s took: 0.998331s took: 1.000771s took: 0.922967s took: 0.925749s took: 0.925767s took: 0.926608s took: 1.002268s took: 1.004850s took: 1.005220s took: 1.005485s took: 1.068789s took: 1.072495s took: 1.075788s took: 1.080402s took: 1.004560s took: 1.006376s took: 1.009138s took: 1.079613s took: 0.912901s took: 0.913283s took: 0.914060s took: 0.915402s took: 1.014394s took: 1.016088s took: 1.016130s took: 1.024620s took: 0.880394s took: 0.882160s took: 0.881997s took: 0.883389s took: 1.045820s took: 1.046031s took: 1.046743s took: 1.048172s 2025-04-04T17:15:10.386394Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:15:10.386426Z node 3 :IMPORT WARN: Table profiles were not loaded took: 0.986455s took: 0.986460s took: 0.988715s took: 0.992170s took: 1.052847s took: 1.053506s took: 1.058329s took: 1.059023s took: 1.008618s took: 1.008642s took: 1.008625s took: 1.012372s took: 0.968982s took: 0.970530s took: 0.971888s took: 0.973689s took: 0.915794s took: 0.917023s took: 0.920302s took: 0.921054s >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant |89.4%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicService::MultiplePartitionsAndNoGapsInTheOffsets |89.4%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS34-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 64565, MsgBus: 23064 2025-04-04T17:10:05.519043Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506443370731913:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:05.519465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001388/r3tmp/tmpUxKkBZ/pdisk_1.dat 2025-04-04T17:10:06.019719Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:06.037889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:10:06.037985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:10:06.039703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64565, node 1 2025-04-04T17:10:06.144416Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:10:06.144436Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:10:06.144448Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:10:06.144540Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23064 TClient is connected to server localhost:23064 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:10:06.845229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:06.864638Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:10:06.870155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:07.110255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:07.280812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:07.362287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:10:09.038759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506460550602719:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:09.038879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:09.288597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.320736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.347523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.387490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.420569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.462613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:10:09.548414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506460550603234:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:09.548470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:09.548866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506460550603239:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:10:09.552198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:10:09.561842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506460550603241:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:10:09.624298Z node 1 :TX_PROXY ERROR: Actor# [1:7489506460550603296:3441] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:10:10.510342Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506443370731913:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:10:10.510440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:10:10.913272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:10:10.987204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.018300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.053946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.138832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.273020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.332581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.369691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.400848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.438678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.510187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.549338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:10:11.628121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.175721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2025-04-04T17:10:12.215123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.288623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:10:12.322830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 202 ... er_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:14:56.639366Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038728;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:14:56.639406Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:14:56.639454Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:14:56.639490Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038728;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:14:56.639524Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038728;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:14:56.639570Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:14:56.639603Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038723;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:14:56.643770Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:14:56.643834Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:14:56.643933Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:14:56.643972Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:14:56.644148Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:14:56.644190Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:14:56.644295Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:14:56.644338Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:14:56.644424Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:14:56.644464Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:14:56.644513Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:14:56.644554Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:14:56.645254Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:14:56.645310Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:14:56.645511Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:14:56.645554Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:14:56.645703Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:14:56.645741Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:14:56.645940Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:14:56.645978Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:14:56.646093Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:14:56.646128Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:14:56.709352Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038726;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:56.714962Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038732;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:56.717604Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038728;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:56.723835Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038734;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:56.726871Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038721;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:56.735257Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038730;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:56.735290Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038729;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:56.743903Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038731;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:56.743916Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038733;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:56.752743Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038720;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:56.752902Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038735;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:56.762412Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038723;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:56.762641Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038725;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:56.771858Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038722;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:56.771907Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038727;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:14:56.781407Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038724;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715722; 2025-04-04T17:15:08.673898Z node 5 :KQP_SLOW_LOG WARN: TraceId: "01jr0wrvgfa4px7kmffy0qa56n", SessionId: ydb://session/3?node_id=5&id=NjVkMjhiOC0yZjM0OTE1MC0zNWJhM2MzNS0zM2Y1MDM3NQ==, Slow query, duration: 11.761773s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query34.tpl and seed 1971067816\nselect c_last_name\n ,c_first_name\n ,c_salutation\n ,c_preferred_cust_flag\n ,ss_ticket_number\n ,cnt from\n (select store_sales.ss_ticket_number ss_ticket_number\n ,store_sales.ss_customer_sk ss_customer_sk\n ,count(*) cnt\n from store_sales as store_sales \n cross join date_dim as date_dim\n cross join store as store\n cross join household_demographics as household_demographics\n where store_sales.ss_sold_date_sk = date_dim.d_date_sk\n and store_sales.ss_store_sk = store.s_store_sk\n and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk\n and (date_dim.d_dom between 1 and 3 or date_dim.d_dom between 25 and 28)\n and (household_demographics.hd_buy_potential = '>10000' or\n household_demographics.hd_buy_potential = 'Unknown')\n and household_demographics.hd_vehicle_count > 0\n and (case when household_demographics.hd_vehicle_count > 0\n\t then household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count\n\t else null\n\t end) > 1.2\n and date_dim.d_year in (2000,2000+1,2000+2)\n and store.s_county in ('Salem County','Terrell County','Arthur County','Oglethorpe County',\n 'Lunenburg County','Perry County','Halifax County','Sumner County')\n group by store_sales.ss_ticket_number,store_sales.ss_customer_sk) dn \n cross join customer as customer\n where ss_customer_sk = c_customer_sk\n and cnt between 15 and 20\n order by c_last_name,c_first_name,c_salutation,c_preferred_cust_flag desc, ss_ticket_number;\n\n-- end query 1 in stream 0 using template query34.tpl", parameters: 0b >> TestDataErasure::DataErasureManualLaunch3Cycles >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersSysViewOnServerless [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersSysViewFilters >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 >> TPersQueueTest::ReadRuleServiceTypeLimit [GOOD] >> TPersQueueTest::ReadRuleDisallowDefaultServiceType >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] >> TestDataErasure::DataErasureManualLaunch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] Test command err: 2025-04-04T17:14:42.256704Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507633414979666:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:42.257127Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c24/r3tmp/tmpWlhled/pdisk_1.dat 2025-04-04T17:14:42.790874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:42.794496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:42.818054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:42.842649Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26253, node 1 2025-04-04T17:14:43.031093Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:43.031113Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:43.031143Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:43.031262Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9028 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:43.611915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:43.794078Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 2025-04-04T17:14:43.809107Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T17:14:43.809143Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:43.809981Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****BjnA (FC724366) () has now retryable error message 'Security state is empty' 2025-04-04T17:14:43.810260Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T17:14:43.810273Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:43.810578Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****BjnA (FC724366) () has now retryable error message 'Security state is empty' 2025-04-04T17:14:43.810592Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A2 error Security state is empty 2025-04-04T17:14:43.810624Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A2 error Security state is empty 2025-04-04T17:14:43.810662Z node 1 :TICKET_PARSER ERROR: Ticket eyJh****BjnA (FC724366): Security state is empty 2025-04-04T17:14:46.264055Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****BjnA (FC724366) 2025-04-04T17:14:46.264395Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T17:14:46.264426Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:46.264695Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****BjnA (FC724366) () has now retryable error message 'Security state is empty' 2025-04-04T17:14:46.264719Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A2 error Security state is empty 2025-04-04T17:14:46.812269Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T17:14:47.253445Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507633414979666:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:47.253503Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:50.274292Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****BjnA (FC724366) 2025-04-04T17:14:50.274483Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T17:14:50.274496Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:50.275294Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****BjnA (FC724366) () has now valid token of user1 2025-04-04T17:14:50.275305Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-04-04T17:14:54.346735Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507681663758692:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:54.346873Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c24/r3tmp/tmpZ37r6H/pdisk_1.dat 2025-04-04T17:14:54.517268Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:54.529106Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:54.529198Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:54.535279Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7178, node 2 2025-04-04T17:14:54.702307Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:54.702327Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:54.702333Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:54.702431Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27383 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:54.947639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:54.967774Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-04T17:14:54.967866Z node 2 :GRPC_CLIENT DEBUG: [517000037308] Connect to grpc://localhost:28349 2025-04-04T17:14:54.970528Z node 2 :GRPC_CLIENT DEBUG: [517000037308] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2025-04-04T17:14:54.996334Z node 2 :GRPC_CLIENT DEBUG: [517000037308] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2025-04-04T17:14:54.996585Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-04T17:14:57.938803Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507697250893548:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:57.939092Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c24/r3tmp/tmpJcvnYV/pdisk_1.dat 2025-04-04T17:14:58.126171Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:58.139231Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:58.139322Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:58.140723Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12793, node 3 2025-04-04T17:14:58.207541Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:58.207567Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:58.207575Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:58.207739Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21613 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:58.457999Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:58.464015Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:58.465907Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:14:58.465930Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:58.465938Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:14:58.465968Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-04T17:14:58.466012Z node 3 :GRPC_CLIENT DEBUG: [517000008d08] Connect to grpc://localhost:1428 2025-04-04T17:14:58.466742Z node 3 :GRPC_CLIENT DEBUG: [517000008d08] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2025-04-04T17:14:58.486203Z node 3 :GRPC_CLIENT DEBUG: [517000008d08] Status 14 Service Unavailable 2025-04-04T17:14:58.490345Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' NebiusAccessService::Authenticate request 2025-04-04T17:14:58.490406Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication iam_token: "user1" 2025-04-04T17:14:58.490553Z node 3 :GRPC_CLIENT DEBUG: [517000008d08] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-04-04T17:14:58.492859Z node 3 :GRPC_CLIENT DEBUG: [517000008d08] Status 14 Service Unavailable 2025-04-04T17:14:58.493076Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-04T17:14:59.956735Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-04-04T17:14:59.956770Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-04T17:14:59.956898Z node 3 :GRPC_CLIENT DEBUG: [517000008d08] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2025-04-04T17:14:59.960677Z node 3 :GRPC_CLIENT DEBUG: [517000008d08] Status 14 Service Unavailable 2025-04-04T17:14:59.960801Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2025-04-04T17:15:00.957075Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-04-04T17:15:00.957111Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-04T17:15:00.957324Z node 3 :GRPC_CLIENT DEBUG: [517000008d08] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-04-04T17:15:00.959990Z node 3 :GRPC_CLIENT DEBUG: [517000008d08] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2025-04-04T17:15:00.960508Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-04T17:15:02.942380Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507697250893548:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:02.942480Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:15:11.308030Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489507756760010301:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c24/r3tmp/tmpxmv3e4/pdisk_1.dat 2025-04-04T17:15:11.385605Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:15:11.485622Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:11.509314Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:11.509407Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:11.514015Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16468, node 4 2025-04-04T17:15:11.630924Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:11.630959Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:11.630985Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:11.631128Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3638 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:11.900652Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:11.917215Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-04-04T17:15:11.917245Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:15:11.917263Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-04-04T17:15:11.917298Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-04T17:15:11.917346Z node 4 :GRPC_CLIENT DEBUG: [517000112608] Connect to grpc://localhost:2643 2025-04-04T17:15:11.918318Z node 4 :GRPC_CLIENT DEBUG: [517000112608] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2025-04-04T17:15:11.930554Z node 4 :GRPC_CLIENT DEBUG: [517000112608] Status 14 Service Unavailable NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2025-04-04T17:15:11.931047Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-04-04T17:15:11.931086Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-04-04T17:15:11.931235Z node 4 :GRPC_CLIENT DEBUG: [517000112608] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-04-04T17:15:11.933216Z node 4 :GRPC_CLIENT DEBUG: [517000112608] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2025-04-04T17:15:11.933394Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-04-04T17:15:15.494757Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489507774155008182:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:15.494808Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c24/r3tmp/tmpXGljNQ/pdisk_1.dat 2025-04-04T17:15:15.673691Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:15.673798Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:15.674121Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:15.689605Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7558, node 5 2025-04-04T17:15:15.842429Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:15.842451Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:15.842459Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:15.842615Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11083 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:15:16.172048Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:15:16.179169Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:15:16.181689Z node 5 :TICKET_PARSER ERROR: Ticket AKIA****MPLE (B3EDC139): Access key signature is not supported >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureManualLaunch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:15:16.752368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:15:16.752457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:15:16.752492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:15:16.752515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:15:16.752545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:15:16.752564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:15:16.752602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:15:16.752644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:15:16.752884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:16.820113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:15:16.820164Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:16.831354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:16.831592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:15:16.831749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:15:16.840640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:15:16.841119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:15:16.841687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:16.842994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:15:16.847909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:16.849330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:15:16.849405Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:16.849577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:15:16.849622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:15:16.849672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:15:16.849784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.855977Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:15:16.965883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:15:16.966062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.966205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:15:16.966408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:15:16.966446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.975082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:16.975249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:15:16.975476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.975557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:15:16.975610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:15:16.975654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:15:16.977561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.977623Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:15:16.977667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:15:16.979367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.979416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.979449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:16.979505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:15:16.988096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:15:16.989954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:15:16.990117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:15:16.991090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:16.991201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:15:16.991241Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:16.991466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:15:16.991509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:16.991655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:15:16.991733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:15:16.993748Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:15:16.993787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:15:16.993910Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:16.993951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:15:16.994206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.994263Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:15:16.994344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:15:16.994390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:16.994430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:15:16.994466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:16.994498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:15:16.994542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:16.994573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:15:16.994620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:15:16.994699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:15:16.994732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:15:16.994758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:15:16.996281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:15:16.996380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:15:16.996412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... LAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1889:3557], Recipient [1:827:2709]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046678944 Status: OK ServerId: [1:1890:3558] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-04-04T17:15:18.285863Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-04T17:15:18.285928Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046678944, status: OK, at schemeshard: 72075186233409551 2025-04-04T17:15:18.287506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# false 2025-04-04T17:15:18.310933Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553241, Sender [1:632:2548], Recipient [1:455:2407]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 1 TabletId: 72075186233409549 Status: OK 2025-04-04T17:15:18.310989Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-04-04T17:15:18.311039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409546 2025-04-04T17:15:18.311103Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409546, LocalPathId: 2], datashard# 72075186233409549, shardIdx# 72075186233409546:4 in# 64 ms, next wakeup in# 14.936000s, rate# 1, in queue# 0 shards, running# 1 shards at schemeshard 72075186233409546 2025-04-04T17:15:18.313051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409546, NeedResponseComplete# false 2025-04-04T17:15:18.326528Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553241, Sender [1:633:2549], Recipient [1:455:2407]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 1 TabletId: 72075186233409550 Status: OK 2025-04-04T17:15:18.326588Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-04-04T17:15:18.326660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409546 2025-04-04T17:15:18.326725Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409546, LocalPathId: 2], datashard# 72075186233409550, shardIdx# 72075186233409546:5 in# 66 ms, next wakeup in# 14.934000s, rate# 1, in queue# 0 shards, running# 0 shards at schemeshard 72075186233409546 2025-04-04T17:15:18.326818Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Data erasure in shards is completed. Send response to root schemeshard 2025-04-04T17:15:18.326853Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Complete: Generation# 1 2025-04-04T17:15:18.328821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409546, NeedResponseComplete# true 2025-04-04T17:15:18.329207Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1905:3573], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:15:18.329248Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:15:18.329294Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-04T17:15:18.329432Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1904:3572], Recipient [1:455:2407]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046678944 Status: OK ServerId: [1:1905:3573] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-04-04T17:15:18.329463Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-04T17:15:18.329494Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046678944, status: OK, at schemeshard: 72075186233409546 2025-04-04T17:15:18.329598Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:455:2407], Recipient [1:290:2274]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 2 } Generation: 1 Status: COMPLETED 2025-04-04T17:15:18.329643Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-04-04T17:15:18.329694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-04-04T17:15:18.329746Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2] in# 67 ms, next wakeup# 599.933000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-04-04T17:15:18.329813Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-04-04T17:15:18.331683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-04-04T17:15:18.331729Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-04T17:15:18.333169Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1909:3577], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:1910:3578] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-04T17:15:18.333215Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-04T17:15:18.333246Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2025-04-04T17:15:18.333492Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2025-04-04T17:15:18.333527Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-04T17:15:18.333562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-04T17:15:18.333619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-04T17:15:18.333683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 0 2025-04-04T17:15:18.333738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-04T17:15:18.333804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-04T17:15:19.210550Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:19.210636Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:19.210686Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-04T17:15:19.210843Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:19.210881Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:19.211032Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2025-04-04T17:15:19.211067Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-04T17:15:19.211096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-04T17:15:19.211165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-04T17:15:19.211203Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 5000 2025-04-04T17:15:19.211250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-04T17:15:19.211288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-04T17:15:19.535027Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:19.535089Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:19.535205Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:19.535231Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:19.535257Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-04T17:15:19.535369Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:19.535398Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:19.535513Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-04-04T17:15:19.535538Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-04T17:15:19.535561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-04T17:15:19.535605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-04T17:15:19.535630Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-04-04T17:15:19.535661Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 1, duration# 2 s 2025-04-04T17:15:19.537231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-04-04T17:15:19.537783Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1989:3657], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:15:19.537858Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:15:19.537890Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-04T17:15:19.538014Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:272:2263], Recipient [1:290:2274]: NKikimrScheme.TEvDataErasureInfoRequest 2025-04-04T17:15:19.538040Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-04-04T17:15:19.538071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> TestDataErasure::SimpleDataErasureTest [GOOD] >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables [GOOD] >> KqpWorkloadServiceTables::TestPoolStateFetcherActor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::SimpleDataErasureTest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:15:16.770443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:15:16.770576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:15:16.770647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:15:16.770688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:15:16.770730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:15:16.770779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:15:16.770839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:15:16.770911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:15:16.771266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:16.854618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:15:16.854676Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:16.865912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:16.866173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:15:16.866364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:15:16.875864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:15:16.876147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:15:16.876819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:16.877635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:15:16.882441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:16.883769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:15:16.883851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:16.884065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:15:16.884118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:15:16.884174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:15:16.884295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.890743Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:15:17.024970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:15:17.025205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:17.025407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:15:17.025655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:15:17.025707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:17.028145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:17.028302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:15:17.028482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:17.028555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:15:17.028588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:15:17.028648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:15:17.031935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:17.032014Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:15:17.032056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:15:17.033951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:17.033996Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:17.034040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:17.034098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:15:17.037969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:15:17.040128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:15:17.040335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:15:17.041450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:17.041589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:15:17.041641Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:17.041969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:15:17.042033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:17.042235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:15:17.042373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:15:17.044543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:15:17.044601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:15:17.044788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:17.044851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:15:17.045187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:17.045233Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:15:17.045331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:15:17.045392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:17.045447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:15:17.045492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:17.045530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:15:17.045570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:17.045605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:15:17.045641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:15:17.045736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:15:17.045778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:15:17.045811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:15:17.047749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:15:17.047876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:15:17.047940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... , processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:15:19.619632Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-04T17:15:19.619745Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1979:3647], Recipient [1:827:2709]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046678944 Status: OK ServerId: [1:1980:3648] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-04-04T17:15:19.619776Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-04T17:15:19.619802Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046678944, status: OK, at schemeshard: 72075186233409551 2025-04-04T17:15:19.619908Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:827:2709], Recipient [1:290:2274]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 3 } Generation: 1 Status: COMPLETED 2025-04-04T17:15:19.619932Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-04-04T17:15:19.619976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-04-04T17:15:19.620020Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 3] in# 63 ms, next wakeup# 599.937000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-04-04T17:15:19.620088Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-04-04T17:15:19.621881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-04-04T17:15:19.621935Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-04T17:15:19.622324Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1984:3652], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:1985:3653] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-04T17:15:19.622361Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-04T17:15:19.622387Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2025-04-04T17:15:19.622532Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2025-04-04T17:15:19.622568Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-04T17:15:19.622631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-04T17:15:19.622715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-04T17:15:19.622768Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 0 2025-04-04T17:15:19.622854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-04T17:15:19.622908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-04T17:15:19.969401Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:19.969514Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:19.969637Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:455:2407]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:19.969667Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:19.969736Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:827:2709]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:19.969763Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:19.969836Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:19.969876Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:19.969967Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:455:2407], Recipient [1:455:2407]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:19.969993Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:19.970044Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:827:2709], Recipient [1:827:2709]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:19.970068Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:20.002633Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:20.002716Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:20.002825Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-04T17:15:20.003124Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2025-04-04T17:15:20.003159Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-04T17:15:20.003188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-04T17:15:20.003270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-04T17:15:20.003335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 5000 2025-04-04T17:15:20.003386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-04T17:15:20.003433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-04T17:15:20.280509Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:827:2709]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:20.280597Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:20.280690Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:20.280721Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:20.280770Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:455:2407]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:20.280792Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:20.280846Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:455:2407], Recipient [1:455:2407]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:20.280872Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:20.280947Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:827:2709], Recipient [1:827:2709]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:20.280975Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:20.281030Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:20.281054Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:20.311966Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:20.312029Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:20.312066Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-04T17:15:20.312261Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-04-04T17:15:20.312292Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-04T17:15:20.312334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-04T17:15:20.312393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-04T17:15:20.312425Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-04-04T17:15:20.312515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 0.936000s, Timestamp# 1970-01-01T00:00:05.108000Z 2025-04-04T17:15:20.312602Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 1, duration# 2 s 2025-04-04T17:15:20.314652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-04-04T17:15:20.315745Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:2006:3674], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:15:20.315841Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:15:20.315910Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-04T17:15:20.316058Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:272:2263], Recipient [1:290:2274]: NKikimrScheme.TEvDataErasureInfoRequest 2025-04-04T17:15:20.316115Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-04-04T17:15:20.316154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> DstCreator::SameOwner >> DstCreator::ReplicationModeMismatch >> DstCreator::NonExistentSrc >> DstCreator::WithIntermediateDir >> DstCreator::WithSyncIndex >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 >> TTicketParserTest::LoginCheckRemovedUser [GOOD] >> TTicketParserTest::LoginEmptyTicketBad >> DstCreator::Basic >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 >> DstCreator::WithSyncIndexAndIntermediateDir >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath [GOOD] >> KqpWorkloadService::TestZeroQueueSizeManyQueries >> DstCreator::ExistingDst >> TestDataErasure::DataErasureRun3Cycles [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless [GOOD] >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState [GOOD] >> ResourcePoolsDdl::TestDropResourcePool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureRun3Cycles [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:15:16.753408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:15:16.753501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:15:16.753550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:15:16.753598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:15:16.753638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:15:16.753666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:15:16.753717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:15:16.753790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:15:16.754105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:16.834007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:15:16.834077Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:16.850146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:16.850421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:15:16.850626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:15:16.861104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:15:16.861392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:15:16.862090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:16.862932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:15:16.868183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:16.869496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:15:16.869580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:16.869771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:15:16.869828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:15:16.869880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:15:16.870004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.876414Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:15:17.015252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:15:17.015499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:17.015711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:15:17.015970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:15:17.016026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:17.019531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:17.019694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:15:17.019896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:17.019962Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:15:17.020007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:15:17.020061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:15:17.021969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:17.022040Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:15:17.022099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:15:17.025552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:17.025600Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:17.025645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:17.025706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:15:17.040488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:15:17.045127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:15:17.045371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:15:17.046426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:17.046575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:15:17.046656Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:17.046942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:15:17.046997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:17.047184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:15:17.047266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:15:17.051164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:15:17.051216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:15:17.051381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:17.051449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:15:17.051816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:17.051865Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:15:17.051983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:15:17.052036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:17.052084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:15:17.052127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:17.052167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:15:17.052207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:17.052254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:15:17.052294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:15:17.052360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:15:17.052397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:15:17.052433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:15:17.056226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:15:17.056346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:15:17.056397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ipient [1:455:2407]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 3 TabletId: 72075186233409550 Status: OK 2025-04-04T17:15:22.255636Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-04-04T17:15:22.255680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409546 2025-04-04T17:15:22.255737Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409546, LocalPathId: 2], datashard# 72075186233409550, shardIdx# 72075186233409546:5 in# 63 ms, next wakeup in# 8.937000s, rate# 1, in queue# 0 shards, running# 0 shards at schemeshard 72075186233409546 2025-04-04T17:15:22.255791Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Data erasure in shards is completed. Send response to root schemeshard 2025-04-04T17:15:22.255822Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Complete: Generation# 3 2025-04-04T17:15:22.257382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409546, NeedResponseComplete# false 2025-04-04T17:15:22.258153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409546, NeedResponseComplete# true 2025-04-04T17:15:22.258353Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:455:2407], Recipient [1:290:2274]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 2 } Generation: 3 Status: COMPLETED 2025-04-04T17:15:22.258391Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-04-04T17:15:22.258437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-04-04T17:15:22.258489Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2] in# 65 ms, next wakeup# 593.935000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-04-04T17:15:22.258542Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-04-04T17:15:22.259934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-04-04T17:15:22.259975Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-04-04T17:15:22.260149Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2025-04-04T17:15:22.260183Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-04T17:15:22.260212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-04T17:15:22.260254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-04T17:15:22.260281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 0 2025-04-04T17:15:22.260322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-04T17:15:22.260362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-04T17:15:22.632637Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:22.632710Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:22.632782Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:455:2407]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:22.632807Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:22.632860Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:827:2709]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:22.632885Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:22.632961Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:455:2407], Recipient [1:455:2407]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:22.632993Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:22.633062Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:827:2709], Recipient [1:827:2709]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:22.633088Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:22.633144Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:22.633169Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:22.671929Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:22.672022Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:22.672066Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-04-04T17:15:22.672308Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2025-04-04T17:15:22.672343Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-04T17:15:22.672375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-04T17:15:22.672439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-04T17:15:22.672475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 5000 2025-04-04T17:15:22.672532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-04T17:15:22.672579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-04T17:15:22.977351Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:455:2407]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:22.977431Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:22.977506Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:827:2709]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:22.977533Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:22.977592Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:22.977618Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:22.977678Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:455:2407], Recipient [1:455:2407]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:22.977706Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:22.977779Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:827:2709], Recipient [1:827:2709]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:22.977803Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:22.977856Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:22.977879Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.014551Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:23.014645Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:23.014681Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-04-04T17:15:23.014923Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2025-04-04T17:15:23.014962Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-04T17:15:23.014995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-04T17:15:23.015058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-04T17:15:23.015088Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-04-04T17:15:23.015135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 0.934000s, Timestamp# 1970-01-01T00:00:11.110000Z 2025-04-04T17:15:23.015165Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 3, duration# 2 s 2025-04-04T17:15:23.023049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-04-04T17:15:23.023750Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:3584:4940], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:15:23.023823Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:15:23.023864Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-04T17:15:23.023968Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:272:2263], Recipient [1:290:2274]: NKikimrScheme.TEvDataErasureInfoRequest 2025-04-04T17:15:23.024000Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-04-04T17:15:23.024040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> TestDataErasure::DataErasureManualLaunch3Cycles [GOOD] >> TestDataErasure::DataErasureWithMerge [GOOD] >> TestDataErasure::DataErasureWithSplit [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureManualLaunch3Cycles [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:15:18.981531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:15:18.981646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:15:18.981704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:15:18.981742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:15:18.981785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:15:18.981814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:15:18.981870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:15:18.981951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:15:18.982297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:19.058701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:15:19.058755Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:19.071709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:19.071960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:15:19.072130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:15:19.081627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:15:19.081916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:15:19.082570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:19.083429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:15:19.088398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:19.089667Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:15:19.089749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:19.089989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:15:19.090040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:15:19.090082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:15:19.090202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:15:19.097535Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:15:19.222940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:15:19.223176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:19.223386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:15:19.223621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:15:19.223674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:19.225959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:19.226115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:15:19.226349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:19.226412Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:15:19.226448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:15:19.226512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:15:19.228842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:19.228901Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:15:19.228938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:15:19.230632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:19.230678Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:19.230715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:19.230775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:15:19.239233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:15:19.242665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:15:19.242986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:15:19.244081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:19.244227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:15:19.244276Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:19.244564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:15:19.244619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:19.244807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:15:19.244888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:15:19.247407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:15:19.247453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:15:19.247620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:19.247663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:15:19.247980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:19.248025Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:15:19.248117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:15:19.248172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:19.248220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:15:19.248263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:19.248300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:15:19.248341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:19.248409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:15:19.248449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:15:19.248516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:15:19.248558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:15:19.248592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:15:19.250430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:15:19.250527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:15:19.250561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ing# 1 shards at schemeshard 72075186233409551 2025-04-04T17:15:23.085032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409551, NeedResponseComplete# false 2025-04-04T17:15:23.097768Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553241, Sender [1:1005:2851], Recipient [1:827:2709]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 3 TabletId: 72075186233409554 Status: OK 2025-04-04T17:15:23.097830Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-04-04T17:15:23.097907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409551 2025-04-04T17:15:23.097975Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409551, LocalPathId: 2], datashard# 72075186233409554, shardIdx# 72075186233409551:4 in# 63 ms, next wakeup in# 10.802000s, rate# 1, in queue# 0 shards, running# 0 shards at schemeshard 72075186233409551 2025-04-04T17:15:23.098056Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Data erasure in shards is completed. Send response to root schemeshard 2025-04-04T17:15:23.098084Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Complete: Generation# 3 2025-04-04T17:15:23.099735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409551, NeedResponseComplete# true 2025-04-04T17:15:23.099962Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:827:2709], Recipient [1:290:2274]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 3 } Generation: 3 Status: COMPLETED 2025-04-04T17:15:23.100002Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-04-04T17:15:23.100072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-04-04T17:15:23.100123Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 3] in# 64 ms, next wakeup# 595.801000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-04-04T17:15:23.100188Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-04-04T17:15:23.101558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-04-04T17:15:23.101604Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-04-04T17:15:23.101811Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2025-04-04T17:15:23.101848Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-04T17:15:23.101882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-04T17:15:23.101933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-04T17:15:23.101969Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 0 2025-04-04T17:15:23.102056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-04T17:15:23.102123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-04T17:15:23.491195Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.491292Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.491396Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.491428Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.501873Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:455:2407]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.501960Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.502037Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:827:2709]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.502065Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.502126Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:455:2407], Recipient [1:455:2407]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.502153Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.502225Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:827:2709], Recipient [1:827:2709]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.502267Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.534041Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:23.534116Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:23.534183Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-04-04T17:15:23.534501Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2025-04-04T17:15:23.534541Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-04T17:15:23.534574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-04T17:15:23.534650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-04T17:15:23.534683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 5000 2025-04-04T17:15:23.534740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-04-04T17:15:23.534783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-04-04T17:15:23.799570Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.799650Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.799732Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.799763Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.810121Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:455:2407]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.810185Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.810254Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:827:2709]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.810281Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.810346Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:455:2407], Recipient [1:455:2407]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.810376Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.810445Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:827:2709], Recipient [1:827:2709]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.810474Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.846894Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:23.846972Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:23.847009Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-04-04T17:15:23.847253Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2025-04-04T17:15:23.847285Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-04T17:15:23.847316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-04T17:15:23.847376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-04T17:15:23.847408Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-04-04T17:15:23.847454Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 3, duration# 2 s 2025-04-04T17:15:23.849610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-04-04T17:15:23.850196Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:3557:4913], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:15:23.850274Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:15:23.850311Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-04T17:15:23.850446Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:2773:4285], Recipient [1:290:2274]: NKikimrScheme.TEvDataErasureInfoRequest 2025-04-04T17:15:23.850480Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-04-04T17:15:23.850517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Compressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Uncompressed >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureWithSplit [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:64:2058] recipient: [1:57:2099] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:64:2058] recipient: [1:57:2099] Leader for TabletID 72057594046678944 is [1:68:2103] sender: [1:72:2058] recipient: [1:57:2099] 2025-04-04T17:15:16.572643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:15:16.572738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:15:16.572778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:15:16.572811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:15:16.573841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:15:16.573893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:15:16.573957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:15:16.574058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:15:16.582683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:16.700962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:15:16.701017Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:16.707731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:16.707978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:15:16.708144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:15:16.710563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:15:16.715109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:15:16.715857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:16.716494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:15:16.717448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:16.718723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:15:16.718793Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:16.718896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:15:16.718949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:15:16.718996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:15:16.719185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.721986Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:68:2103] sender: [1:147:2058] recipient: [1:16:2063] 2025-04-04T17:15:16.850167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:15:16.850404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.850627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:15:16.850901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:15:16.850952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.851876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:16.851998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:15:16.852156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.852214Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:15:16.852256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:15:16.852310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:15:16.852839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.853022Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:15:16.853059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:15:16.853452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.853490Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.853526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:16.853566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:15:16.864870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:15:16.865577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:15:16.865796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:15:16.866892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:16.867049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 73 RawX2: 4294969403 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:15:16.867105Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:16.867365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:15:16.867434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:16.867601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:15:16.867679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:15:16.868462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:15:16.868529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:15:16.868683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:16.868738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:123:2134], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:15:16.869098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.869143Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:15:16.869233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:15:16.869264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:16.869302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:15:16.869335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:16.869372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:15:16.869426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:16.869461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:15:16.869499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:15:16.869563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:15:16.869604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:15:16.869653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:15:16.871434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:15:16.871536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:15:16.871570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, a ... _SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:182:2175], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.800453Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.811220Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.811290Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.811393Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:278:2237], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.811422Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.846072Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.846131Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.846776Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:182:2175], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.846817Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.861158Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:1007:2871], Recipient [1:278:2237]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409550 TableLocalId: 2 Generation: 2 Round: 2 TableStats { DataSize: 5019511 RowCount: 49 IndexSize: 1062 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 5019511 IndexSize: 1062 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1108 Memory: 94107 Storage: 5020600 } ShardState: 2 UserTablePartOwners: 72075186233409550 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2025-04-04T17:15:23.861460Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-04T17:15:23.862005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409550 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] state 'Ready' dataSize 5019511 rowCount 49 cpuUsage 0.1108 2025-04-04T17:15:23.862309Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409550 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] raw table stats: DataSize: 5019511 RowCount: 49 IndexSize: 1062 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 5019511 IndexSize: 1062 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-04T17:15:23.862371Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-04T17:15:23.862714Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:1012:2873], Recipient [1:278:2237]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409551 TableLocalId: 2 Generation: 2 Round: 2 TableStats { DataSize: 5121950 RowCount: 50 IndexSize: 1083 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 5121950 IndexSize: 1083 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1013 Memory: 94152 Storage: 5123060 } ShardState: 2 UserTablePartOwners: 72075186233409551 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2025-04-04T17:15:23.862753Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-04T17:15:23.862799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] state 'Ready' dataSize 5121950 rowCount 50 cpuUsage 0.1013 2025-04-04T17:15:23.862892Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] raw table stats: DataSize: 5121950 RowCount: 50 IndexSize: 1083 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 5121950 IndexSize: 1083 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-04T17:15:23.873371Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.873452Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.873564Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:278:2237], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.873597Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.894567Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:23.894660Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:23.894702Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-04T17:15:23.894829Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435090, Sender [0:0:0], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-04T17:15:23.894857Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-04T17:15:23.894887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72075186233409546, queue size# 2 2025-04-04T17:15:23.894939Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 2 2025-04-04T17:15:23.894968Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-04-04T17:15:23.895083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72075186233409546:5 data size 5019511 row count 49 2025-04-04T17:15:23.895138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409550 maps to shardIdx: 72075186233409546:5 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-04-04T17:15:23.895174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409550 followerId=0, pathId 2: RowCount 49, DataSize 5019511 2025-04-04T17:15:23.895238Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:5 with partCount# 1, rowCount# 49, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72075186233409546 2025-04-04T17:15:23.895325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72075186233409546:6 data size 5121950 row count 50 2025-04-04T17:15:23.895360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409551 maps to shardIdx: 72075186233409546:6 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-04-04T17:15:23.895384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409551 followerId=0, pathId 2: RowCount 50, DataSize 5121950 2025-04-04T17:15:23.895420Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:6 with partCount# 1, rowCount# 50, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72075186233409546 2025-04-04T17:15:23.895480Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2025-04-04T17:15:23.895690Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:185:2177], Recipient [1:182:2175]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-04-04T17:15:23.895726Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-04T17:15:23.895751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-04T17:15:23.895793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-04T17:15:23.895818Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-04-04T17:15:23.895872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 19.899500s, Timestamp# 1970-01-01T00:01:20.100500Z 2025-04-04T17:15:23.895907Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 1, duration# 30 s 2025-04-04T17:15:23.896401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-04-04T17:15:23.900033Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1194:3028], Recipient [1:182:2175]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:15:23.900095Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:15:23.900127Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-04T17:15:23.900295Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:168:2168], Recipient [1:182:2175]: NKikimrScheme.TEvDataErasureInfoRequest 2025-04-04T17:15:23.900332Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-04-04T17:15:23.900370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureWithMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:64:2058] recipient: [1:57:2099] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:64:2058] recipient: [1:57:2099] Leader for TabletID 72057594046678944 is [1:68:2103] sender: [1:72:2058] recipient: [1:57:2099] 2025-04-04T17:15:16.572347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:15:16.572443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:15:16.572478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:15:16.572510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:15:16.573511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:15:16.573563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:15:16.573622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:15:16.573716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:15:16.576423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:16.680473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:15:16.680521Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:16.686564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:16.686782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:15:16.687664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:15:16.691405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:15:16.695951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:15:16.698677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:16.699978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:15:16.704222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:16.711684Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:15:16.711753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:16.711850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:15:16.711898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:15:16.712004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:15:16.712208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.715905Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:68:2103] sender: [1:147:2058] recipient: [1:16:2063] 2025-04-04T17:15:16.872682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:15:16.873183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.873441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:15:16.873749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:15:16.873853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.874753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:16.874892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:15:16.875119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.875192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:15:16.875230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:15:16.875288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:15:16.875890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.875942Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:15:16.875977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:15:16.876388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.876425Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.876464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:16.876512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:15:16.880421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:15:16.880900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:15:16.881112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:15:16.882042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:16.882171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 73 RawX2: 4294969403 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:15:16.882228Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:16.882853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:15:16.882924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:16.883107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:15:16.883195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:15:16.883915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:15:16.883975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:15:16.884133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:16.884187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:123:2134], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:15:16.884533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.884578Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:15:16.884670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:15:16.884703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:16.884743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:15:16.884793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:16.884840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:15:16.884880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:16.884915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:15:16.884952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:15:16.885026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:15:16.885064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:15:16.885113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:15:16.887978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:15:16.888080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:15:16.888119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, a ... chemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.770557Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.770695Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:182:2175], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.770727Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.781219Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.781295Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.781414Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:278:2237], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.781448Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.813717Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.813795Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.813903Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:182:2175], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.813935Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.824775Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.824852Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.824981Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:278:2237], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.825009Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.857135Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.857202Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.857290Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:182:2175], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.857316Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.867785Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.867866Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.867971Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:278:2237], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.867998Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.900888Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.900947Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.901136Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:182:2175], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.901164Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.912048Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:1218:3031], Recipient [1:278:2237]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409551 TableLocalId: 2 Generation: 2 Round: 2 TableStats { DataSize: 10141461 RowCount: 99 IndexSize: 2112 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 10141461 IndexSize: 2112 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 3359 Memory: 96381 Storage: 10143600 } ShardState: 2 UserTablePartOwners: 72075186233409551 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2025-04-04T17:15:23.912116Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-04T17:15:23.912163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] state 'Ready' dataSize 10141461 rowCount 99 cpuUsage 0.3359 2025-04-04T17:15:23.912261Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 2] raw table stats: DataSize: 10141461 RowCount: 99 IndexSize: 2112 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 10141461 IndexSize: 2112 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-04T17:15:23.912296Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-04T17:15:23.933290Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.933362Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:23.933477Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:278:2237], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.933520Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:23.943990Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:23.944048Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:23.944080Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-04T17:15:23.944154Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435090, Sender [0:0:0], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-04T17:15:23.944172Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-04T17:15:23.944192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72075186233409546, queue size# 1 2025-04-04T17:15:23.944253Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-04T17:15:23.944273Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-04T17:15:23.944427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72075186233409546:6 data size 10141461 row count 99 2025-04-04T17:15:23.944470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409551 maps to shardIdx: 72075186233409546:6 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-04-04T17:15:23.944496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409551 followerId=0, pathId 2: RowCount 99, DataSize 10141461 2025-04-04T17:15:23.944551Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:6 with partCount# 1, rowCount# 99, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72075186233409546 2025-04-04T17:15:23.944639Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2025-04-04T17:15:23.944798Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:185:2177], Recipient [1:182:2175]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-04-04T17:15:23.944829Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-04T17:15:23.944850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-04T17:15:23.944887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-04T17:15:23.944912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-04-04T17:15:23.944957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 19.899500s, Timestamp# 1970-01-01T00:01:20.100500Z 2025-04-04T17:15:23.944986Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 1, duration# 30 s 2025-04-04T17:15:23.945418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-04-04T17:15:23.947699Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1395:3185], Recipient [1:182:2175]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:15:23.947748Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:15:23.947776Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-04T17:15:23.947843Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:168:2168], Recipient [1:182:2175]: NKikimrScheme.TEvDataErasureInfoRequest 2025-04-04T17:15:23.947868Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-04-04T17:15:23.947900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit >> TestDataErasure::DataErasureWithCopyTable [GOOD] >> DstCreator::ReplicationModeMismatch [GOOD] >> DstCreator::ReplicationConsistencyLevelMismatch >> DstCreator::NonExistentSrc [GOOD] >> DstCreator::KeyColumnsSizeMismatch >> DstCreator::SameOwner [GOOD] >> DstCreator::SamePartitionCount >> DstCreator::WithIntermediateDir [GOOD] >> DstCreator::WithAsyncIndex >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks >> DstCreator::WithSyncIndex [GOOD] >> DstCreator::Basic [GOOD] >> DstCreator::CannotFindColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureWithCopyTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:64:2058] recipient: [1:57:2099] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:64:2058] recipient: [1:57:2099] Leader for TabletID 72057594046678944 is [1:68:2103] sender: [1:72:2058] recipient: [1:57:2099] 2025-04-04T17:15:16.571924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:15:16.572098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:15:16.572137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:15:16.572171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:15:16.573092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:15:16.573158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:15:16.573223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:15:16.573287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:15:16.574586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:16.680293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:15:16.680353Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:16.686506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:16.686749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:15:16.687644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:15:16.691410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:15:16.696060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:15:16.698697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:16.699980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:15:16.703306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:16.711596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:15:16.711760Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:16.711852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:15:16.711897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:15:16.712044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:15:16.712214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.715139Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:68:2103] sender: [1:147:2058] recipient: [1:16:2063] 2025-04-04T17:15:16.842364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:15:16.844035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.846322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:15:16.848376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:15:16.848451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.849771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:16.849917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:15:16.851611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.851689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:15:16.851786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:15:16.851832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:15:16.852456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.852515Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:15:16.852551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:15:16.853010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.853042Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.853074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:16.853115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:15:16.857992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:15:16.858550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:15:16.858816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:15:16.859782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:16.859896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 73 RawX2: 4294969403 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:15:16.859944Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:16.862547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:15:16.862631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:16.862796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:15:16.862867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:15:16.863547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:15:16.863594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:15:16.863755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:16.863808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:123:2134], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:15:16.864128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:16.864179Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:15:16.864267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:15:16.864297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:16.864332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:15:16.864359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:16.864390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:15:16.864436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:16.864472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:15:16.864507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:15:16.864575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:15:16.864612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:15:16.864655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:15:16.870333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:15:16.870463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:15:16.870498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, a ... 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-04T17:15:24.504290Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435090, Sender [0:0:0], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-04T17:15:24.504366Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-04T17:15:24.504399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72075186233409546, queue size# 2 2025-04-04T17:15:24.504478Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 2 2025-04-04T17:15:24.504513Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-04-04T17:15:24.504650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72075186233409546:6 data size 5019511 row count 49 2025-04-04T17:15:24.504718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409551 maps to shardIdx: 72075186233409546:6 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], pathId map=SimpleCopy, is column=0, is olap=0 2025-04-04T17:15:24.504763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409551 followerId=0, pathId 3: RowCount 49, DataSize 5019511 2025-04-04T17:15:24.504843Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:6 with partCount# 1, rowCount# 49, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:50.000000Z at schemeshard 72075186233409546 2025-04-04T17:15:24.504928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72075186233409546:7 data size 5121950 row count 50 2025-04-04T17:15:24.504972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409546:7 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], pathId map=SimpleCopy, is column=0, is olap=0 2025-04-04T17:15:24.504998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409552 followerId=0, pathId 3: RowCount 50, DataSize 5121950 2025-04-04T17:15:24.505034Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:7 with partCount# 1, rowCount# 50, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:50.000000Z at schemeshard 72075186233409546 2025-04-04T17:15:24.505100Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2025-04-04T17:15:24.517371Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435090, Sender [0:0:0], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-04T17:15:24.517454Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-04T17:15:24.517487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72075186233409546, queue size# 0 2025-04-04T17:15:24.540719Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:24.540799Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:24.540937Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:182:2175], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:24.540969Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:24.555115Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:24.555207Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:24.555321Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:278:2237], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:24.555368Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:24.590711Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:24.590768Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:24.590840Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:182:2175], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:24.590857Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:24.602652Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:24.602733Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:24.602871Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:278:2237], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:24.602904Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:24.635329Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:24.635412Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:24.635518Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:182:2175], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:24.635545Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:24.646432Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:24.646507Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:24.646591Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:278:2237], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:24.646629Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:24.680776Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:24.680852Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:24.680943Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:182:2175], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:24.680970Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:24.691563Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:24.691641Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:24.691748Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:278:2237], Recipient [1:278:2237]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:24.691776Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:24.724249Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:24.724323Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:15:24.724446Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:182:2175], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:24.724475Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:15:24.735094Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:182:2175]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:24.735169Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-04-04T17:15:24.735201Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-04-04T17:15:24.735437Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:185:2177], Recipient [1:182:2175]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-04-04T17:15:24.735471Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-04-04T17:15:24.735500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-04-04T17:15:24.735564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-04-04T17:15:24.735597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-04-04T17:15:24.735657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 14.999500s, Timestamp# 1970-01-01T00:01:25.000500Z 2025-04-04T17:15:24.735693Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 1, duration# 35 s 2025-04-04T17:15:24.736272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-04-04T17:15:24.739742Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1759:3463], Recipient [1:182:2175]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:15:24.739803Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:15:24.739843Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-04T17:15:24.740021Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:168:2168], Recipient [1:182:2175]: NKikimrScheme.TEvDataErasureInfoRequest 2025-04-04T17:15:24.740062Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-04-04T17:15:24.740097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] >> TTicketParserTest::LoginEmptyTicketBad [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 [GOOD] >> DstCreator::ColumnsSizeMismatch >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 |89.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndex [GOOD] Test command err: 2025-04-04T17:15:21.655639Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507797040695244:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:21.658142Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00172f/r3tmp/tmp7X4e89/pdisk_1.dat 2025-04-04T17:15:22.122663Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:22.151865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:22.151992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:22.159439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9056 TServer::EnableGrpc on GrpcPort 16837, node 1 2025-04-04T17:15:22.437330Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:22.437353Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:22.437362Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:22.437477Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9056 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:23.001102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:23.015460Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:15:23.033611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786923382 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786923060 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786923382 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 18446... (TRUNCATED) 2025-04-04T17:15:23.435006Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:23.435145Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:23.435169Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-04T17:15:23.435721Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-04T17:15:24.884261Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786923382, tx_id: 281474976710658 } } } 2025-04-04T17:15:24.884680Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-04T17:15:24.886568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:15:24.889778Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-04T17:15:24.889802Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-04T17:15:24.923956Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-04T17:15:24.925615Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Replicated" PathDescription { Self { Name: "Replicated" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786924964 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableIndexes { Name: "index_by_value" LocalPathId: 6 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeT ... ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 7 PathOwnerId: 72057594046644480 } 2025-04-04T17:15:24.944474Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 7] TClient::Ls request: /Root/Replicated/index_by_value TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "index_by_value" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786924964 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786924964 ParentPathId: 6 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { ... (TRUNCATED) TClient::Ls request: /Root/Replicated/index_by_value/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786924964 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } ... (TRUNCATED) Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786924964 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Replicated/index_by_value/indexImplTable" >> DstCreator::ExistingDst [GOOD] >> DstCreator::EmptyReplicationConfig >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndRemoteClusterEnabledDelaySec_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_RemotePreferredClusterEnabledWhileSessionInitializing_SessionDiesOnlyAfterInitializationAndDelay >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool [GOOD] >> KqpWorkloadService::TestStartQueryAfterCancel ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] Test command err: 2025-04-04T17:15:22.397367Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507804825176566:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:22.397405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00171a/r3tmp/tmpxqFqhz/pdisk_1.dat 2025-04-04T17:15:22.786164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:22.786354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:22.814861Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:22.815063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12896 TServer::EnableGrpc on GrpcPort 6155, node 1 2025-04-04T17:15:23.144004Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:23.144028Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:23.144035Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:23.144153Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12896 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:23.532428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:23.550454Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:15:23.555565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786923886 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786923585 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786923886 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 18446... (TRUNCATED) 2025-04-04T17:15:23.945407Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:23.945681Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:23.945712Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-04T17:15:23.946278Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-04T17:15:25.587156Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786923886, tx_id: 281474976710658 } } } 2025-04-04T17:15:25.587617Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-04T17:15:25.590084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-04-04T17:15:25.593209Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-04T17:15:25.596652Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-04T17:15:25.664657Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-04T17:15:25.666105Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dir/Replicated" PathDescription { Self { Name: "Replicated" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786925699 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableIndexes { Name: "index_by_value" LocalPathId: 7 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForce ... ionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 8 PathOwnerId: 72057594046644480 } 2025-04-04T17:15:25.684774Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 8] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "index_by_value" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786925699 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786925699 ParentPathId: 7 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { ... (TRUNCATED) TClient::Ls request: /Root/Dir/Replicated/index_by_value/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786925699 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } ... (TRUNCATED) Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786925699 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Dir/Replicated/index_by_value/indexImplTable" ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::LoginEmptyTicketBad [GOOD] Test command err: 2025-04-04T17:14:42.230824Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507632874645320:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:42.230859Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c52/r3tmp/tmpiwhddH/pdisk_1.dat 2025-04-04T17:14:42.809089Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:42.827440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:42.827524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:42.830149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30768, node 1 2025-04-04T17:14:43.036341Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:43.036361Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:43.036370Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:43.036464Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16827 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:43.609247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:43.797891Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T17:14:43.815162Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T17:14:43.815232Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:43.818087Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****xLdQ (AF51A4C4) () has now valid token of user1 2025-04-04T17:14:43.818128Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-04-04T17:14:45.940835Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507642752993054:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:45.940881Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c52/r3tmp/tmpR8BPjM/pdisk_1.dat 2025-04-04T17:14:46.100607Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:46.140479Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:46.140556Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:46.145389Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23190, node 2 2025-04-04T17:14:46.199801Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:46.199823Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:46.199830Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:46.199924Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14506 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:46.420949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:46.427475Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:46.482792Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T17:14:46.489801Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T17:14:46.489833Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:46.490483Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****QiEg (6DAAB743) () has now valid token of user1 2025-04-04T17:14:46.490494Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-04-04T17:14:49.372401Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507662771016171:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:49.372447Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c52/r3tmp/tmpBN1TnQ/pdisk_1.dat 2025-04-04T17:14:49.486619Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25203, node 3 2025-04-04T17:14:49.524391Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:49.524510Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:49.570906Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:49.595240Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:49.595256Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:49.595261Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:49.595374Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:14:49.843448Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:14:49.966060Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T17:14:49.972142Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T17:14:49.972174Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:14:49.972862Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****DbqQ (6D205DEA) () has now valid token of user1 2025-04-04T17:14:49.972878Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-04-04T17:14:49.973341Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T17:14:54.372628Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507662771016171:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:54.372697Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:14:55.394372Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****DbqQ (6D205DEA) 2025-04-04T17:14:55.394772Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****DbqQ (6D205DEA) () has now valid token of user1 2025-04-04T17:14:59.973771Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T17:15:00.400791Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****DbqQ (6D205DEA) 2025-04-04T17:15:00.401131Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****DbqQ (6D205DEA) () has now valid token of user1 2025-04-04T17:15:04.462014Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:15:04.462044Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:05.416114Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****DbqQ (6D205DEA) 2025-04-04T17:15:05.416477Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****DbqQ (6D205DEA) () has now valid token of user1 2025-04-04T17:15:08.415840Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****DbqQ (6D205DEA) 2025-04-04T17:15:08.416273Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****DbqQ (6D205DEA) () has now valid token of user1 2025-04-04T17:15:10.683273Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489507750938692198:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:10.685448Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c52/r3tmp/tmp9lTbGe/pdisk_1.dat 2025-04-04T17:15:10.867409Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:10.883837Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:10.883944Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:10.891623Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32194, node 4 2025-04-04T17:15:11.030932Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:11.030961Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:11.030973Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:11.031107Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:11.306001Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:11.315534Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:15:11.370412Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T17:15:11.379700Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-04-04T17:15:11.379737Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-04-04T17:15:11.380505Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****KDYQ (A3363BCB) () has now valid token of user1 2025-04-04T17:15:11.380518Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-04-04T17:15:11.380998Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T17:15:15.686375Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489507750938692198:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:15.686468Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:15:16.702470Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****KDYQ (A3363BCB) 2025-04-04T17:15:16.702790Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****KDYQ (A3363BCB) () has now permanent error message 'User not found' test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c52/r3tmp/tmp1FfOxE/pdisk_1.dat 2025-04-04T17:15:22.298956Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:15:22.300555Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:22.327309Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:22.327418Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:22.330326Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1878, node 5 2025-04-04T17:15:22.394802Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:22.394827Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:22.394837Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:22.394962Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:22.688056Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:22.890393Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-04-04T17:15:22.902700Z node 5 :TICKET_PARSER ERROR: Ticket **** (00000000): Ticket is empty >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 |89.5%| [TA] $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TA] {RESULT} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::CanonizedJoinOrderTPCH9-StreamLookupJoin+ColumnStore [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 >> DstCreator::KeyColumnsSizeMismatch [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::Simple >> GroupWriteTest::TwoTables >> DstCreator::WithAsyncIndex [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 >> DstCreator::SamePartitionCount [GOOD] >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] >> TopicService::MultiplePartitionsAndNoGapsInTheOffsets [GOOD] >> DstCreator::CannotFindColumn [GOOD] >> KqpWorkloadServiceTables::TestPoolStateFetcherActor [GOOD] >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 >> DstCreator::ColumnsSizeMismatch [GOOD] >> DstCreator::ColumnTypeMismatch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnsSizeMismatch [GOOD] Test command err: 2025-04-04T17:15:21.665755Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507799258514009:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:21.665805Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001727/r3tmp/tmpLjd4S7/pdisk_1.dat 2025-04-04T17:15:22.132689Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:22.136576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:22.136761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:22.140823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23094 TServer::EnableGrpc on GrpcPort 11484, node 1 2025-04-04T17:15:22.411097Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:22.411128Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:22.411137Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:22.411241Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:22.972231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786923039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786923039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version... (TRUNCATED) 2025-04-04T17:15:23.005203Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:23.005439Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:23.005453Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-04T17:15:23.005957Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-04T17:15:24.754604Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { status: SCHEME_ERROR, issues: } } 2025-04-04T17:15:24.754664Z node 1 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot describe table: status: SCHEME_ERROR, issue: 2025-04-04T17:15:25.340170Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507816756396163:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:25.340215Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001727/r3tmp/tmpGebDI1/pdisk_1.dat 2025-04-04T17:15:25.501066Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:25.511338Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:25.511417Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:25.517475Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4930 TServer::EnableGrpc on GrpcPort 29796, node 2 2025-04-04T17:15:25.687065Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:25.687088Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:25.687095Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:25.687200Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4930 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:26.063250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:26.069155Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:15:26.072229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:15:26.273105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786926112 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743786926343 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786926112 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743786926343 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-04T17:15:26.314299Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:26.314467Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:26.314480Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-04T17:15:26.315078Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-04T17:15:28.246925Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786926182, tx_id: 281474976715658 } } } 2025-04-04T17:15:28.247251Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-04T17:15:28.248698Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-04T17:15:28.250540Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743786926343 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnNames: "value" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-04T17:15:28.250775Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key columns size mismatch: expected: 1, got: 2 >> GroupWriteTest::WithRead >> GroupWriteTest::ByTableName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::SamePartitionCount [GOOD] Test command err: 2025-04-04T17:15:21.660196Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507800949360968:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:21.662529Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00174b/r3tmp/tmpD29xM3/pdisk_1.dat 2025-04-04T17:15:22.093474Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:22.123386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:22.123518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:22.127668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28947 TServer::EnableGrpc on GrpcPort 30054, node 1 2025-04-04T17:15:22.414815Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:22.414834Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:22.414840Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:22.414923Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28947 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:22.978267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:23.003308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:15:23.010537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786923144 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786923039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786923144 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-04T17:15:23.189737Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:23.189905Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:23.189919Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-04T17:15:23.191176Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-04T17:15:24.741480Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786923144, tx_id: 281474976710659 } } } 2025-04-04T17:15:24.741854Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-04T17:15:24.743424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:15:24.744664Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2025-04-04T17:15:24.744679Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710660 TClient::Ls request: /Root/Replicated 2025-04-04T17:15:24.774040Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710660 2025-04-04T17:15:24.774081Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743786924817 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) 2025-04-04T17:15:25.437900Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507816735138572:2242];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:25.467842Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00174b/r3tmp/tmpB1vrBO/pdisk_1.dat 2025-04-04T17:15:25.585488Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:25.608464Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:25.608545Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:25.614215Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62978 TServer::EnableGrpc on GrpcPort 3021, node 2 2025-04-04T17:15:25.831830Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:25.831855Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:25.831862Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:25.831967Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62978 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:26.179846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:26.187756Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:15:26.192412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786926294 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786926231 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786926294 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-04T17:15:26.263820Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:26.263951Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:26.263965Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-04T17:15:26.264487Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-04T17:15:28.545840Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786926294, tx_id: 281474976710658 } } } 2025-04-04T17:15:28.546335Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-04T17:15:28.547986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:15:28.549307Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-04T17:15:28.549318Z node 2 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-04T17:15:28.619971Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-04T17:15:28.619997Z node 2 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786926294 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786928660 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] Test command err: 2025-04-04T17:15:21.656605Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507797331170051:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:21.656660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001737/r3tmp/tmpL3Ylba/pdisk_1.dat 2025-04-04T17:15:22.116278Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:22.145968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:22.146091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:22.151169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4155 TServer::EnableGrpc on GrpcPort 20061, node 1 2025-04-04T17:15:22.414908Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:22.414940Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:22.414947Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:22.415054Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:22.977757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:23.010535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:23.172970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786923039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786923256 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786923039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786923256 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-04T17:15:23.226337Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:23.226461Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:23.226473Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-04T17:15:23.226972Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-04T17:15:24.662385Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786923151, tx_id: 281474976710658 } } } 2025-04-04T17:15:24.663546Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-04T17:15:24.666693Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-04T17:15:24.668981Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786923256 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_NONE ConsistencyLevel: CONSISTENCY_LEVEL_UNKNOWN } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: ... 17:15:25.515137Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2211 TServer::EnableGrpc on GrpcPort 65082, node 2 2025-04-04T17:15:25.739553Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:25.739585Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:25.739595Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:25.739701Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:26.107682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:26.123007Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:15:26.132339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:26.216521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786926154 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743786926308 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786926154 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743786926308 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-04T17:15:26.275735Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:26.275875Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:26.275887Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-04T17:15:26.279860Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-04T17:15:28.709624Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786926245, tx_id: 281474976715658 } } } 2025-04-04T17:15:28.709950Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-04T17:15:28.711514Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-04T17:15:28.712636Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743786926308 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_GLOBAL } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-04T17:15:28.712843Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Replication consistency level mismatch: expected: CONSISTENCY_LEVEL_ROW, got: 1 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 >> DstCreator::EmptyReplicationConfig [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithAsyncIndex [GOOD] Test command err: 2025-04-04T17:15:21.662835Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507797200194017:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:21.662886Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001726/r3tmp/tmp3FJjhG/pdisk_1.dat 2025-04-04T17:15:22.114896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:22.117182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:22.117539Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:22.123411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30692 TServer::EnableGrpc on GrpcPort 27312, node 1 2025-04-04T17:15:22.411934Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:22.411956Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:22.411962Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:22.412083Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30692 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:22.983421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:23.010611Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:15:23.040606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786923207 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786923039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786923207 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-04T17:15:23.222427Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:23.222586Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:23.222613Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-04T17:15:23.223090Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-04T17:15:24.672223Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786923207, tx_id: 281474976710658 } } } 2025-04-04T17:15:24.672856Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-04T17:15:24.676136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-04-04T17:15:24.677203Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-04T17:15:24.677234Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-04T17:15:24.710276Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-04T17:15:24.710302Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 4] TClient::Ls request: /Root/Dir/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786924747 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) 2025-04-04T17:15:25.354404Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507818242406861:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:25.354474Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001726/r3tmp/tmpSKEkOd/pdisk_1.dat 2025-04-04T17:15:25.563169Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:25.665429Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:25.665478Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:25.667046Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3616 TServer::EnableGrpc on GrpcPort 9250, node 2 2025-04-04T17:15:25.830807Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:25.830827Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:25.830833Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:25.830937Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3616 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:26.120198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:26.138885Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:15:26.144504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743786926504 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786926168 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743786926504 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 18446... (TRUNCATED) 2025-04-04T17:15:26.522900Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:26.523193Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:26.523216Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-04T17:15:26.523681Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-04T17:15:28.578166Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786926504, tx_id: 281474976715658 } } } 2025-04-04T17:15:28.578546Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-04T17:15:28.580066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:15:28.581237Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-04-04T17:15:28.581252Z node 2 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 TClient::Ls request: /Root/Replicated 2025-04-04T17:15:28.651997Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2025-04-04T17:15:28.652032Z node 2 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 5] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743786928695 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key... (TRUNCATED) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH9-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 3728, MsgBus: 11644 2025-04-04T17:09:25.011590Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506271653069343:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:25.011628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00152c/r3tmp/tmpfLpeEN/pdisk_1.dat 2025-04-04T17:09:25.392677Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:25.430563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:25.430679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:25.432452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3728, node 1 2025-04-04T17:09:25.643358Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:25.643378Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:25.643384Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:25.643474Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11644 TClient is connected to server localhost:11644 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:26.473313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.495378Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:26.505935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.632650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.814534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.902681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.377341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506284537972970:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.377470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.711327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.744866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.811423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.842089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.896875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.942602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:29.043213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506288832940785:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.043272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.043331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506288832940790:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:29.046109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:29.055214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506288832940792:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:29.130174Z node 1 :TX_PROXY ERROR: Actor# [1:7489506288832940849:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:30.014545Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489506271653069343:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:30.014622Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:09:30.085682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.136555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.199143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.264683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.316911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.377610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.467980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.520220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.582996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.635303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.719271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.763721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.815401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.867521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.921597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.967046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:09:31.024980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025- ... cription=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:15:16.967600Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:15:16.967632Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:15:16.967691Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:15:16.967719Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:15:16.967760Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:15:16.967800Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:15:16.968487Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:15:16.968533Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:15:16.968733Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:15:16.968762Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:15:16.968899Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:15:16.968931Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:15:16.969117Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:15:16.969148Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:15:16.969273Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:15:16.969296Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038725;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:15:16.982633Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:15:16.982681Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:15:16.982782Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:15:16.982805Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:15:16.982899Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:15:16.982919Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:15:16.982968Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:15:16.982988Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:15:16.983029Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:15:16.983049Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:15:16.983075Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:15:16.983096Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:15:16.983467Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:15:16.983492Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:15:16.983634Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:15:16.983659Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:15:16.983743Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:15:16.983775Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:15:16.983911Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:15:16.983930Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:15:16.984017Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:15:16.984036Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038730;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:15:17.027153Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038721;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:17.030921Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038731;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:17.034729Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038735;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:17.038426Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038730;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:17.042368Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038722;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:17.045784Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038732;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:17.051051Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038733;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:17.052854Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038720;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:17.058423Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038729;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:17.059919Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038727;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:17.065933Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038723;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:17.066875Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038725;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:17.072970Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038734;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:17.073888Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038724;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:17.080614Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038726;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:17.080727Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038728;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; >> KqpWorkloadService::TestZeroQueueSizeManyQueries [GOOD] >> KqpWorkloadServiceActors::TestCreateDefaultPool >> GroupWriteTest::WriteHardRateDispatcher ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::CannotFindColumn [GOOD] Test command err: 2025-04-04T17:15:22.274653Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507801820310828:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:22.274769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001721/r3tmp/tmp7P1cgo/pdisk_1.dat 2025-04-04T17:15:22.704493Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:22.711288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:22.711922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:22.714739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22004 TServer::EnableGrpc on GrpcPort 23821, node 1 2025-04-04T17:15:23.066888Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:23.066921Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:23.066931Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:23.067048Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22004 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:23.463574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:15:23.481927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786923592 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786923522 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743786923592 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-04T17:15:23.624533Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:23.624698Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:23.624710Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-04T17:15:23.625252Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-04T17:15:25.230860Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786923592, tx_id: 281474976710658 } } } 2025-04-04T17:15:25.231224Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-04T17:15:25.232714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:15:25.234009Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-04T17:15:25.234031Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-04T17:15:25.286996Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-04T17:15:25.287032Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786925314 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) 2025-04-04T17:15:25.973428Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507816288692938:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:25.973541Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001721/r3tmp/tmpGgSvKh/pdisk_1.dat 2025-04-04T17:15:26.163237Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:26.182587Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:26.182683Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:26.183950Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14735 TServer::EnableGrpc on GrpcPort 27109, node 2 2025-04-04T17:15:26.397051Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:26.397077Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:26.397092Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:26.397217Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14735 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:26.710673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:26.716424Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:15:26.720716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:26.808376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786926756 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743786926882 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786926756 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743786926882 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-04T17:15:26.853184Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:26.853312Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:26.853328Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-04T17:15:26.853765Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-04T17:15:29.160528Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786926840, tx_id: 281474976715658 } } } 2025-04-04T17:15:29.160883Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-04T17:15:29.162414Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-04T17:15:29.163734Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743786926882 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-04T17:15:29.164003Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot find column: name: value >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 >> TTopicYqlTest::DropTopicYql >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 >> ResourcePoolsDdl::TestDropResourcePool [GOOD] >> ResourcePoolsDdl::TestResourcePoolAcl >> KqpWorkloadService::TestLargeConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestLessConcurrentQueryLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::EmptyReplicationConfig [GOOD] Test command err: 2025-04-04T17:15:23.296942Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507806171530172:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:23.297056Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001719/r3tmp/tmpN6YLkf/pdisk_1.dat 2025-04-04T17:15:23.705432Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:23.743354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:23.743476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:23.745122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63899 TServer::EnableGrpc on GrpcPort 12669, node 1 2025-04-04T17:15:23.950827Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:23.950849Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:23.950863Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:23.950966Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63899 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:24.316430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:24.343459Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:24.352965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:15:24.504499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786924383 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786924579 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786924383 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786924579 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-04T17:15:24.549356Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:24.549534Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:24.549548Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-04T17:15:24.550185Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-04T17:15:26.230702Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786924460, tx_id: 281474976710658 } } } 2025-04-04T17:15:26.231054Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-04T17:15:26.232409Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-04T17:15:26.234274Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786924579 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricat ... 968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:27.013061Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:27.019130Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16720 TServer::EnableGrpc on GrpcPort 10109, node 2 2025-04-04T17:15:27.190236Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:27.190276Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:27.190283Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:27.190387Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16720 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:27.551690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:27.560754Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:15:27.564608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:27.640816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786927603 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743786927750 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786927603 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743786927750 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-04T17:15:27.727226Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:27.727368Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:27.727381Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-04T17:15:27.727869Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-04T17:15:29.910818Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786927673, tx_id: 281474976715658 } } } 2025-04-04T17:15:29.911132Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-04T17:15:29.912435Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-04T17:15:29.913313Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743786927750 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-04T17:15:29.913448Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Empty replication config >> TPersQueueTest::CheckKillBalancer [GOOD] >> TPersQueueTest::CheckDeleteTopic >> OlapEstimationRowsCorrectness::TPCH21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 >> DstCreator::ColumnTypeMismatch [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 >> EraseRowsTests::EraseRowsShouldSuccess >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds >> EraseRowsTests::ConditionalEraseRowsShouldNotErase >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows >> TPersQueueTest::ReadRuleDisallowDefaultServiceType [GOOD] >> TPersQueueTest::ReadRuleServiceTypeMigration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ColumnTypeMismatch [GOOD] Test command err: 2025-04-04T17:15:26.634733Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507822390218044:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:26.641078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001708/r3tmp/tmp47ql4d/pdisk_1.dat 2025-04-04T17:15:27.055096Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:27.094251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:27.094416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:27.100458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10192 TServer::EnableGrpc on GrpcPort 16901, node 1 2025-04-04T17:15:27.365501Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:27.365528Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:27.365542Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:27.365654Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:27.778760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:27.793371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:27.915105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786927834 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786928030 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786927834 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786928030 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-04T17:15:27.991047Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:27.991259Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:27.991287Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-04T17:15:27.991784Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-04T17:15:29.683747Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786927904, tx_id: 281474976710658 } } } 2025-04-04T17:15:29.684095Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-04T17:15:29.685599Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-04T17:15:29.687698Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743786928030 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "extra" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { ... necting 2025-04-04T17:15:30.518218Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28947 TServer::EnableGrpc on GrpcPort 30654, node 2 2025-04-04T17:15:30.722809Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:30.722837Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:30.722844Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:30.722951Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28947 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:31.000791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:31.008536Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:15:31.016759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:31.092933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786931047 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743786931166 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743786931047 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743786931166 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-04-04T17:15:31.130647Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:31.130811Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:15:31.130823Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-04T17:15:31.131554Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-04T17:15:33.136142Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743786931124, tx_id: 281474976715658 } } } 2025-04-04T17:15:33.136453Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-04T17:15:33.137659Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-04-04T17:15:33.138612Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743786931166 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-04-04T17:15:33.138789Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Column type mismatch: name: value, expected: Utf8, got: Uint32 |89.5%| [TA] $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpWorkloadServiceActors::TestCreateDefaultPool [GOOD] >> KqpWorkloadServiceActors::TestCpuLoadActor >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH21 [GOOD] Test command err: Trying to start YDB, gRPC: 6746, MsgBus: 10895 2025-04-04T17:09:25.183797Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489506268812626045:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:09:25.190762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014ee/r3tmp/tmpMfV0vO/pdisk_1.dat 2025-04-04T17:09:25.656606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:09:25.656705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:09:25.658877Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:09:25.673509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6746, node 1 2025-04-04T17:09:25.762856Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:09:25.762876Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:09:25.762887Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:09:25.763024Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10895 TClient is connected to server localhost:10895 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:09:26.468921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.481391Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:09:26.499493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.679590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.867796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:26.960519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:09:28.508543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506281697529642:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.508716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.711293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.739716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.767847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.790055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.819712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.857355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:09:28.910532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506281697530149:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.910603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.910827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489506281697530154:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:09:28.914867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:09:28.922657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489506281697530156:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:09:29.009748Z node 1 :TX_PROXY ERROR: Actor# [1:7489506285992497506:3438] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:09:29.954745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:09:30.192043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489506290287465296:2514];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:09:30.192354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489506290287465296:2514];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:09:30.192688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489506290287465296:2514];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:09:30.192817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489506290287465296:2514];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:09:30.192950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489506290287465296:2514];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:09:30.193129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489506290287465296:2514];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:09:30.193260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489506290287465296:2514];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:09:30.193412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489506290287465296:2514];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:09:30.193567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489506290287465296:2514];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:09:30.193748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489506290287465296:2514];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:09:30.193866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489506290287465296:2514];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:09:30.193918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7489506290287465289:2510];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:09:30.193971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7489506290287465289:2510];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:09:30.193978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489506290287465296:2514];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:09:30.194163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7489506290287465289:2510];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:09:30.194 ... cription=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:15:04.654200Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:15:04.655128Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:15:04.655252Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:15:04.655295Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:15:04.655344Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:15:04.655380Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:15:04.655932Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:15:04.655967Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:15:04.656112Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:15:04.656135Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:15:04.656250Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:15:04.656274Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:15:04.656416Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:15:04.656441Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:15:04.656526Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:15:04.656549Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038732;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:15:04.663327Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:15:04.663401Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:15:04.663523Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:15:04.663562Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:15:04.663771Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:15:04.663816Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:15:04.663944Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:15:04.663992Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:15:04.664066Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:15:04.664105Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:15:04.664155Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:15:04.664196Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:15:04.664739Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:15:04.664801Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:15:04.664975Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:15:04.665020Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:15:04.665154Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:15:04.665201Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:15:04.665377Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:15:04.665419Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:15:04.665540Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:15:04.665581Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038722;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:15:04.715762Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038727;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:04.717860Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038731;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:04.725770Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038720;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:04.728732Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038734;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:04.735151Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038722;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:04.737602Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038729;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:04.747911Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038735;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:04.747943Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038733;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:04.759111Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038725;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:04.759119Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038721;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:04.768781Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038726;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:04.768786Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038724;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:04.776419Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038730;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:04.777270Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038728;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:04.783932Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038723;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; 2025-04-04T17:15:04.785049Z node 5 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038732;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710722;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710722; >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePool >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 >> GroupWriteTest::WithRead [GOOD] >> KqpQueryService::DdlColumnTable [GOOD] >> KqpQueryService::DdlGroup >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WithRead [GOOD] Test command err: RandomSeed# 6263787103777395109 2025-04-04T17:15:31.441221Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 3 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-04T17:15:31.465252Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-04T17:15:31.465324Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 going to send TEvBlock {TabletId# 3 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-04T17:15:31.467977Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-04T17:15:31.481560Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-04T17:15:31.484232Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-04T17:15:38.423945Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-04T17:15:38.424047Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-04T17:15:38.424103Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-04T17:15:38.424144Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-04T17:15:38.508209Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} 2025-04-04T17:15:38.508328Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Status# OK} >> ResourcePoolsDdl::TestResourcePoolAcl [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldNotErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersSysViewFilters [GOOD] >> ResourcePoolsDdl::TestCreateResourcePool >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> EraseRowsTests::EraseRowsFromReplicatedTable >> EraseRowsTests::EraseRowsShouldSuccess [GOOD] >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors >> GroupWriteTest::TwoTables [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsDdl::TestResourcePoolAcl [GOOD] Test command err: 2025-04-04T17:15:05.468714Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507731621108473:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:05.468771Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00178b/r3tmp/tmpqp2drA/pdisk_1.dat 2025-04-04T17:15:05.963911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:05.964018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:05.966498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:05.968979Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12831, node 1 2025-04-04T17:15:06.007444Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:15:06.007468Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:15:06.057076Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:06.057105Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:06.057111Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:06.057209Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15009 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:06.376447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:08.403702Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-04T17:15:08.403957Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507744506011001:2326], Start check tables existence, number paths: 2 2025-04-04T17:15:08.406935Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-04T17:15:08.406971Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-04T17:15:08.409646Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjA4NDQ1ZmUtN2ZjNmQxZjQtYmZiMTVkZDQtZDJjNmIyMTc=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YjA4NDQ1ZmUtN2ZjNmQxZjQtYmZiMTVkZDQtZDJjNmIyMTc= 2025-04-04T17:15:08.410065Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507744506011001:2326], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-04T17:15:08.410102Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507744506011001:2326], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-04T17:15:08.410126Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507744506011001:2326], Successfully finished 2025-04-04T17:15:08.421756Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-04T17:15:08.421798Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-04T17:15:08.421958Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjA4NDQ1ZmUtN2ZjNmQxZjQtYmZiMTVkZDQtZDJjNmIyMTc=, ActorId: [1:7489507744506011027:2329], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:15:08.433836Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507744506011029:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-04T17:15:08.436915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:15:08.439151Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507744506011029:2301], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-04T17:15:08.440946Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507744506011029:2301], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-04T17:15:08.448054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507744506011029:2301], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:15:08.503030Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507744506011029:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-04T17:15:08.506198Z node 1 :TX_PROXY ERROR: Actor# [1:7489507744506011083:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:08.506369Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507744506011029:2301], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-04T17:15:08.517308Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-04T17:15:08.517338Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-04T17:15:08.517464Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507744506011092:2332], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-04T17:15:08.517531Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjA4NDQ1ZmUtN2ZjNmQxZjQtYmZiMTVkZDQtZDJjNmIyMTc=, ActorId: [1:7489507744506011027:2329], ActorState: ReadyState, TraceId: 01jr0ws6v4cyfpm4caykw7rbz3, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: CREATE RESOURCE POOL default WITH ( CONCURRENT_QUERY_LIMIT=0 ); rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-04T17:15:08.519186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507744506011092:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:08.519290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:08.711567Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=YjA4NDQ1ZmUtN2ZjNmQxZjQtYmZiMTVkZDQtZDJjNmIyMTc=, ActorId: [1:7489507744506011027:2329], ActorState: ExecuteState, TraceId: 01jr0ws6v4cyfpm4caykw7rbz3, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7489507744506011101:2329] WorkloadServiceCleanup: 0 2025-04-04T17:15:08.712972Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjA4NDQ1ZmUtN2ZjNmQxZjQtYmZiMTVkZDQtZDJjNmIyMTc=, ActorId: [1:7489507744506011027:2329], ActorState: CleanupState, TraceId: 01jr0ws6v4cyfpm4caykw7rbz3, EndCleanup, isFinal: 0 2025-04-04T17:15:08.713147Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjA4NDQ1ZmUtN2ZjNmQxZjQtYmZiMTVkZDQtZDJjNmIyMTc=, ActorId: [1:7489507744506011027:2329], ActorState: CleanupState, TraceId: 01jr0ws6v4cyfpm4caykw7rbz3, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7489507731621108727:2277] 2025-04-04T17:15:08.718664Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjdmOTllZjMtNWU4YzViNDktNDA5MTc1NDMtNGZiNzMyMjM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MjdmOTllZjMtNWU4YzViNDktNDA5MTc1NDMtNGZiNzMyMjM= 2025-04-04T17:15:08.718951Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-04T17:15:08.718983Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjdmOTllZjMtNWU4YzViNDktNDA5MTc1NDMtNGZiNzMyMjM=, ActorId: [1:7489507744506011103:2333], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:15:08.719124Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjdmOTllZjMtNWU4YzViNDktNDA5MTc1NDMtNGZiNzMyMjM=, ActorId: [1:7489507744506011103:2333], ActorState: ReadyState, TraceId: 01jr0ws71f3b93ea579qh5aynq, received request, proxyRequestId: 4 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7489507744506011102:2343] database: Root databaseId: /Root pool id: default 2025-04-04T17:15:08.719157Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7489507744506011103:2333], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=1&id=MjdmOTllZjMtNWU4YzViNDktNDA5MTc1NDMtNGZiNzMyMjM= 2025-04-04T17:15:08.719190Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507744506011105:2334], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-04T17:15:08.719248Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7489507744506011106:2335], Database: /Root, Start database fetching 2025-04-04T17:15:08.719495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507744506011105:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:08.719544Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7489507744506011106:2335], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-04T17:15:08.719582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, Databas ... te: ExecuteState, TraceId: 01jr0wt489e3w041etzk7yqgx7, Sending to Executer TraceId: 0 8 2025-04-04T17:15:38.641528Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ExecuteState, TraceId: 01jr0wt489e3w041etzk7yqgx7, Created new KQP executer: [5:7489507871155569317:2446] isRollback: 0 2025-04-04T17:15:38.648124Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=N2U1OTM4YTYtODgxNjVkNjgtZmVkYWQ0ZjItOWQ4OGVjMjk=, ActorId: [5:7489507858270666796:2330], ActorState: ReadyState, Session closed due to explicit close event 2025-04-04T17:15:38.648197Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=N2U1OTM4YTYtODgxNjVkNjgtZmVkYWQ0ZjItOWQ4OGVjMjk=, ActorId: [5:7489507858270666796:2330], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:15:38.648239Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=N2U1OTM4YTYtODgxNjVkNjgtZmVkYWQ0ZjItOWQ4OGVjMjk=, ActorId: [5:7489507858270666796:2330], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-04T17:15:38.648279Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=N2U1OTM4YTYtODgxNjVkNjgtZmVkYWQ0ZjItOWQ4OGVjMjk=, ActorId: [5:7489507858270666796:2330], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:15:38.648387Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=N2U1OTM4YTYtODgxNjVkNjgtZmVkYWQ0ZjItOWQ4OGVjMjk=, ActorId: [5:7489507858270666796:2330], ActorState: unknown state, Session actor destroyed 2025-04-04T17:15:38.651397Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ExecuteState, TraceId: 01jr0wt489e3w041etzk7yqgx7, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-04T17:15:38.651647Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ExecuteState, TraceId: 01jr0wt489e3w041etzk7yqgx7, txInfo Status: Committed Kind: ReadWrite TotalDuration: 17.273 ServerDuration: 17.108 QueriesCount: 2 2025-04-04T17:15:38.651817Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ExecuteState, TraceId: 01jr0wt489e3w041etzk7yqgx7, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-04T17:15:38.651925Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ExecuteState, TraceId: 01jr0wt489e3w041etzk7yqgx7, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:15:38.651959Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ExecuteState, TraceId: 01jr0wt489e3w041etzk7yqgx7, EndCleanup, isFinal: 0 2025-04-04T17:15:38.652026Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ExecuteState, TraceId: 01jr0wt489e3w041etzk7yqgx7, Sent query response back to proxy, proxyRequestId: 18, proxyId: [5:7489507841090797118:2200] 2025-04-04T17:15:38.652668Z node 5 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, TxId: 2025-04-04T17:15:38.652788Z node 5 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2025-04-04T17:15:38.653362Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ReadyState, TraceId: 01jr0wt48x1a4xvxs4m0kqrv7f, received request, proxyRequestId: 19 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [5:7489507871155569328:2454] database: /Root databaseId: /Root pool id: default 2025-04-04T17:15:38.653390Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ReadyState, TraceId: 01jr0wt48x1a4xvxs4m0kqrv7f, request placed into pool from cache: default 2025-04-04T17:15:38.654440Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ExecuteState, TraceId: 01jr0wt48x1a4xvxs4m0kqrv7f, ExecutePhyTx, tx: 0x000050C000129898 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-04-04T17:15:38.654480Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ExecuteState, TraceId: 01jr0wt48x1a4xvxs4m0kqrv7f, Sending to Executer TraceId: 0 8 2025-04-04T17:15:38.654537Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ExecuteState, TraceId: 01jr0wt48x1a4xvxs4m0kqrv7f, Created new KQP executer: [5:7489507871155569332:2446] isRollback: 0 2025-04-04T17:15:38.664037Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ExecuteState, TraceId: 01jr0wt48x1a4xvxs4m0kqrv7f, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-04-04T17:15:38.664120Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ExecuteState, TraceId: 01jr0wt48x1a4xvxs4m0kqrv7f, ExecutePhyTx, tx: 0x000050C000128BD8 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-04T17:15:38.665118Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ExecuteState, TraceId: 01jr0wt48x1a4xvxs4m0kqrv7f, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-04T17:15:38.665252Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ExecuteState, TraceId: 01jr0wt48x1a4xvxs4m0kqrv7f, txInfo Status: Committed Kind: ReadOnly TotalDuration: 10.876 ServerDuration: 10.802 QueriesCount: 2 2025-04-04T17:15:38.665358Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ExecuteState, TraceId: 01jr0wt48x1a4xvxs4m0kqrv7f, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-04T17:15:38.666024Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ExecuteState, TraceId: 01jr0wt48x1a4xvxs4m0kqrv7f, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:15:38.666057Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ExecuteState, TraceId: 01jr0wt48x1a4xvxs4m0kqrv7f, EndCleanup, isFinal: 0 2025-04-04T17:15:38.666112Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ExecuteState, TraceId: 01jr0wt48x1a4xvxs4m0kqrv7f, Sent query response back to proxy, proxyRequestId: 19, proxyId: [5:7489507841090797118:2200] 2025-04-04T17:15:38.666507Z node 5 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, TxId: 2025-04-04T17:15:38.666579Z node 5 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, TxId: 2025-04-04T17:15:38.666674Z node 5 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [5:7489507858270666926:2337], DatabaseId: /Root, PoolId: my_pool, succefully refreshed pool state, in flight: 0, delayed: 0 2025-04-04T17:15:38.667104Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ReadyState, Session closed due to explicit close event 2025-04-04T17:15:38.667132Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:15:38.667156Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-04T17:15:38.667177Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:15:38.667245Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NjAwMTJiM2MtY2ZjMWRiNWQtOTkzMDBiNDItMmMwMjdjN2Q=, ActorId: [5:7489507871155569297:2446], ActorState: unknown state, Session actor destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::TwoTables [GOOD] Test command err: RandomSeed# 13154718002845073102 2025-04-04T17:15:30.443617Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058679074007041 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-04T17:15:30.443725Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058502699329537 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-04T17:15:30.494419Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-04T17:15:30.494503Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 going to send TEvBlock {TabletId# 72058679074007041 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-04T17:15:30.494632Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-04T17:15:30.494668Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 going to send TEvBlock {TabletId# 72058502699329537 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-04T17:15:30.499053Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-04T17:15:30.499136Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-04T17:15:30.523144Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-04T17:15:30.523271Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-04T17:15:30.527700Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-04T17:15:30.527776Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-04T17:15:39.764987Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-04T17:15:39.765079Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-04T17:15:39.765138Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-04T17:15:39.765172Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-04T17:15:39.765207Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-04T17:15:39.765244Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-04T17:15:39.765274Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-04T17:15:39.765309Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-04T17:15:39.765345Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-04T17:15:39.853466Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2025-04-04T17:15:39.853563Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2025-04-04T17:15:39.853610Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Status# OK} 2025-04-04T17:15:39.853652Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Status# OK} 2025-04-04T17:15:39.853695Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Status# OK} 2025-04-04T17:15:39.853736Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Status# OK} >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks [GOOD] >> ResourcePoolClassifiersDdl::TestExplicitPoolId >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart [GOOD] >> KqpWorkloadServiceTables::TestLeaseExpiration >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 |89.6%| [TA] $(B)/ydb/core/kqp/ut/join/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 [GOOD] >> KqpWorkloadService::TestStartQueryAfterCancel [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 [GOOD] >> KqpWorkloadService::TestZeroConcurrentQueryLimit >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Uncompressed [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 [GOOD] >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Uncompressed >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] >> KqpWorkloadServiceDistributed::TestNodeDisconnect [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> KqpQueryService::DdlGroup [GOOD] >> TPersQueueTest::StoreNoMoreThanXSourceIDs [GOOD] >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 >> TPersQueueTest::SetupWriteSessionOnDisabledCluster >> KqpQueryService::DdlExecuteScript >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard |89.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/join/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] Test command err: 2025-04-04T17:15:37.221067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:15:37.221623Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:15:37.221676Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00124a/r3tmp/tmp7b8nCE/pdisk_1.dat 2025-04-04T17:15:37.696087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:15:37.765560Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:37.806742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:37.808698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:37.821726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:37.921891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:15:37.977737Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:15:37.977994Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:38.030351Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:38.030459Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:15:38.032072Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:15:38.032142Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:15:38.032197Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:15:38.032533Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:15:38.032650Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:15:38.032712Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:15:38.043619Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:15:38.065881Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:15:38.067276Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:15:38.067405Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:15:38.067464Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:38.067496Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:15:38.067527Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.068957Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:15:38.069108Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:15:38.069233Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.069326Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:38.069369Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:38.069423Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.069508Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:15:38.071696Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:38.072746Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:15:38.072866Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:15:38.074772Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.085594Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:38.086943Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:38.237842Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:15:38.258418Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:15:38.258502Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.259331Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.259382Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:15:38.259452Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:15:38.259719Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:15:38.259878Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:15:38.260457Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.260546Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:15:38.264207Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:15:38.264618Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:38.270823Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:15:38.270872Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.271857Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:15:38.271929Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.273072Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.273405Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.273441Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:38.273497Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:15:38.273552Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:15:38.273597Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:15:38.273699Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.288771Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:15:38.288961Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:15:38.289042Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:15:38.307078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:735:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.307204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.307275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.326263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:15:38.331836Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.491664Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.494385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:749:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:15:38.579479Z node 1 :TX_PROXY ERROR: Actor# [1:823:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:39.208810Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0wt3y0f4z6jhewabw9xjj1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzQ0M2EwZDYtMWFlNTkxMzAtZjdkMThjOS1iOTVlMzQzNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:15:39.219080Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:850:2687], serverId# [1:851:2688], sessionId# [0:0:0] 2025-04-04T17:15:39.219341Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:39.233666Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17 ... 37968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:43.069131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:15:43.092669Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:665:2569] 2025-04-04T17:15:43.092883Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:43.140055Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:43.140184Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:15:43.141808Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:15:43.141883Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:15:43.141941Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:15:43.142294Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:15:43.142423Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:15:43.142493Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:681:2569] in generation 1 2025-04-04T17:15:43.154735Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:15:43.154815Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:15:43.154914Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:15:43.155006Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:683:2579] 2025-04-04T17:15:43.155040Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:43.155072Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:15:43.155105Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.155459Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:15:43.155547Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:15:43.155606Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:43.155634Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:43.155664Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:43.155736Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:43.155797Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:661:2566], serverId# [2:670:2571], sessionId# [0:0:0] 2025-04-04T17:15:43.156092Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:43.156235Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:15:43.156285Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:15:43.157438Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:43.168039Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:43.168147Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:43.315970Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:702:2592], serverId# [2:704:2594], sessionId# [0:0:0] 2025-04-04T17:15:43.316420Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:15:43.316468Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.317037Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:43.317079Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:15:43.317120Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:15:43.317297Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:15:43.317381Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:15:43.317867Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:43.317930Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:15:43.318494Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:15:43.318892Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:43.320492Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:15:43.320540Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.321166Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:15:43.321276Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:43.322285Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:43.322333Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:43.322371Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:15:43.322426Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:417:2411], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:15:43.322487Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:15:43.322605Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.323475Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:43.325323Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:15:43.325368Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:15:43.325536Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:15:43.329423Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:738:2620], serverId# [2:739:2621], sessionId# [0:0:0] 2025-04-04T17:15:43.329539Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-04T17:15:43.350665Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-04T17:15:43.350760Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.351163Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:738:2620], serverId# [2:739:2621], sessionId# [0:0:0] 2025-04-04T17:15:43.353077Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:744:2626], serverId# [2:745:2627], sessionId# [0:0:0] 2025-04-04T17:15:43.353219Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-04T17:15:43.353398Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-04T17:15:43.353447Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.353618Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:744:2626], serverId# [2:745:2627], sessionId# [0:0:0] 2025-04-04T17:15:43.356667Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:749:2631], serverId# [2:750:2632], sessionId# [0:0:0] 2025-04-04T17:15:43.356798Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-04T17:15:43.356983Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-04T17:15:43.357023Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.357253Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:749:2631], serverId# [2:750:2632], sessionId# [0:0:0] 2025-04-04T17:15:43.358657Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:754:2636], serverId# [2:755:2637], sessionId# [0:0:0] 2025-04-04T17:15:43.358754Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-04T17:15:43.358869Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-04T17:15:43.358895Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.358998Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:754:2636], serverId# [2:755:2637], sessionId# [0:0:0] 2025-04-04T17:15:43.360081Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:759:2641], serverId# [2:760:2642], sessionId# [0:0:0] 2025-04-04T17:15:43.360184Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-04T17:15:43.360351Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-04T17:15:43.360379Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.360496Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:759:2641], serverId# [2:760:2642], sessionId# [0:0:0] 2025-04-04T17:15:43.361549Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:764:2646], serverId# [2:765:2647], sessionId# [0:0:0] 2025-04-04T17:15:43.361638Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-04T17:15:43.361729Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-04T17:15:43.361753Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.361895Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:764:2646], serverId# [2:765:2647], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] Test command err: 2025-04-04T17:15:37.222064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:15:37.222631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:15:37.222691Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00125a/r3tmp/tmptX58Jc/pdisk_1.dat 2025-04-04T17:15:37.696455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:15:37.761423Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:37.806751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:37.807884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:37.821359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:37.922559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:15:37.977388Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:15:37.977616Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:38.029591Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:38.029740Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:15:38.031809Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:15:38.031893Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:15:38.031953Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:15:38.032370Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:15:38.032518Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:15:38.032616Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:15:38.043608Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:15:38.077066Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:15:38.077253Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:15:38.077372Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:15:38.077422Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:38.077459Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:15:38.077494Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.077966Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:15:38.078089Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:15:38.078228Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.079491Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:38.079547Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:38.079618Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.079715Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:15:38.080222Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:38.080535Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:15:38.080636Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:15:38.082387Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.095038Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:38.095201Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:38.239599Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:15:38.245108Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:15:38.245194Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.246043Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.246097Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:15:38.246178Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:15:38.246830Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:15:38.247014Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:15:38.247693Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.247791Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:15:38.252961Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:15:38.253420Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:38.255151Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:15:38.255203Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.256252Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:15:38.256330Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.257497Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.257842Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.257877Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:38.257931Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:15:38.257989Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:15:38.258034Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:15:38.258114Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.273600Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:15:38.273765Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:15:38.273816Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:15:38.296098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:735:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.296211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.296283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.308078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:15:38.313790Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.467640Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.470370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:749:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:15:38.556834Z node 1 :TX_PROXY ERROR: Actor# [1:823:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:39.208832Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0wt3xn6q8t7kaxpbeg73g3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmQ3ZTM2ZWQtZDM4NTQxMzUtNWVhYTU1MGQtNWNlZDBmODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:15:39.234896Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:850:2687], serverId# [1:851:2688], sessionId# [0:0:0] 2025-04-04T17:15:39.235157Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:39.247672Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17 ... 86224037888 2025-04-04T17:15:43.204575Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:43.204620Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:43.204666Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:43.204733Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:661:2566], serverId# [2:670:2571], sessionId# [0:0:0] 2025-04-04T17:15:43.205170Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:43.205399Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:15:43.205479Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:15:43.207386Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:43.218132Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:43.218274Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:43.372072Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:702:2592], serverId# [2:704:2594], sessionId# [0:0:0] 2025-04-04T17:15:43.372691Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:15:43.372743Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.373376Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:43.373426Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:15:43.373471Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:15:43.373699Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:15:43.373831Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:15:43.374530Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:43.374610Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:15:43.375000Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:15:43.375368Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:43.376965Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:15:43.377031Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.377621Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:15:43.377692Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:43.379118Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:43.379161Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:43.379209Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:15:43.379269Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:417:2411], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:15:43.379319Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:15:43.379394Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.380295Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:43.382602Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:15:43.382671Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:15:43.382932Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:15:43.390838Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:736:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:43.390947Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:747:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:43.391294Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:43.396429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:15:43.402647Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:43.556109Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:43.558874Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:750:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:15:43.593522Z node 2 :TX_PROXY ERROR: Actor# [2:824:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:43.690462Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0wt8wx3pnvykdajhm81k2n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTYwOGZiZGMtMjQ5NjY2NDAtZTc0YTIxZDYtYWI0NDg1MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:15:43.691038Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:851:2687], serverId# [2:852:2688], sessionId# [0:0:0] 2025-04-04T17:15:43.691243Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:43.703603Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:43.703748Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.709799Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:859:2694], serverId# [2:860:2695], sessionId# [0:0:0] 2025-04-04T17:15:43.710723Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-04T17:15:43.722422Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-04T17:15:43.722485Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.722740Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-04T17:15:43.722778Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-04T17:15:43.723017Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:43.723073Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:43.723123Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:43.723188Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:43.723267Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:859:2694], serverId# [2:860:2695], sessionId# [0:0:0] 2025-04-04T17:15:43.724908Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:43.725264Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:43.725494Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:43.725547Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:43.725590Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-04T17:15:43.725829Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:43.725894Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:43.726492Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-04T17:15:43.726745Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T17:15:43.726882Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-04T17:15:43.726922Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-04T17:15:43.746848Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-04T17:15:43.746913Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-04T17:15:43.747305Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:43.747349Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:43.747384Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-04T17:15:43.747508Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:43.747563Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:43.747612Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] Test command err: 2025-04-04T17:15:37.223301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:15:37.223733Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:15:37.223801Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0008ac/r3tmp/tmpkCyn02/pdisk_1.dat 2025-04-04T17:15:37.696025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:15:37.756680Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:37.808005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:37.808159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:37.823414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:37.928362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:15:37.990162Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:15:37.990416Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:38.036216Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:38.036361Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:15:38.038270Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:15:38.038365Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:15:38.038424Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:15:38.038844Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:15:38.038991Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:15:38.039076Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:15:38.049947Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:15:38.071865Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:15:38.072056Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:15:38.072229Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:15:38.072280Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:38.072319Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:15:38.072356Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.072794Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:15:38.072915Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:15:38.073053Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.073113Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:38.073157Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:38.073217Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.073293Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:15:38.073681Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:38.073947Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:15:38.074038Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:15:38.075946Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.086768Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:38.086919Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:38.242792Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:15:38.247950Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:15:38.248025Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.248789Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.248842Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:15:38.248919Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:15:38.249200Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:15:38.249376Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:15:38.250585Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.250697Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:15:38.252601Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:15:38.253005Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:38.262192Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:15:38.262271Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.263404Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:15:38.263499Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.265166Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.265532Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.265575Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:38.265630Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:15:38.265694Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:15:38.265764Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:15:38.265856Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.274316Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:15:38.274500Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:15:38.274562Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:15:38.295944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:735:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.296057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.296124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.307114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:15:38.313008Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.465035Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.467376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:749:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:15:38.542178Z node 1 :TX_PROXY ERROR: Actor# [1:823:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:39.209031Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0wt3xn68dk6bsvw8c1md2d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGMzN2QwY2UtNjhhZDM2OTItYmQxNTJmZTUtZDQ0MDc5OGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:15:39.216613Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:850:2687], serverId# [1:851:2688], sessionId# [0:0:0] 2025-04-04T17:15:39.217723Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:39.233431Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17 ... 86224037888 2025-04-04T17:15:43.531761Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:43.531987Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:43.532034Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:43.532103Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:661:2566], serverId# [2:670:2571], sessionId# [0:0:0] 2025-04-04T17:15:43.532544Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:43.532745Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:15:43.532818Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:15:43.534367Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:43.545285Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:43.545394Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:43.702130Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:702:2592], serverId# [2:704:2594], sessionId# [0:0:0] 2025-04-04T17:15:43.702767Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:15:43.702818Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.703468Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:43.703514Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:15:43.703553Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:15:43.703804Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:15:43.703937Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:15:43.704615Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:43.704682Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:15:43.705056Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:15:43.705435Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:43.707388Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:15:43.707453Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.707961Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:15:43.708028Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:43.708875Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:43.708917Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:43.708963Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:15:43.709022Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:417:2411], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:15:43.709069Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:15:43.709141Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.710016Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:43.712487Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:15:43.712544Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:15:43.712785Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:15:43.722099Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:736:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:43.722231Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:747:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:43.722630Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:43.729996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:15:43.734984Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:43.883902Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:43.886738Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:750:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:15:43.923714Z node 2 :TX_PROXY ERROR: Actor# [2:824:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:44.019695Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0wt978534v41g6480trnrb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDAzNGZhNGEtZDlmYzk0ZTAtMmVhM2EyOGMtMTQxMDZmZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:15:44.020192Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:851:2687], serverId# [2:852:2688], sessionId# [0:0:0] 2025-04-04T17:15:44.020375Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:44.032535Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:44.032692Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:44.036412Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:859:2694], serverId# [2:860:2695], sessionId# [0:0:0] 2025-04-04T17:15:44.037580Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-04T17:15:44.048864Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-04T17:15:44.048945Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:44.049208Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-04T17:15:44.049245Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-04T17:15:44.049516Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:44.049568Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:44.049613Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:44.049682Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:44.049768Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:859:2694], serverId# [2:860:2695], sessionId# [0:0:0] 2025-04-04T17:15:44.050671Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:44.051037Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:44.051231Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:44.051280Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:44.051328Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-04T17:15:44.051526Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:44.051584Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:44.052140Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-04T17:15:44.052356Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T17:15:44.052477Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-04T17:15:44.052520Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-04T17:15:44.071183Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-04T17:15:44.071258Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-04T17:15:44.071648Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:44.071688Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:44.071730Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-04T17:15:44.071863Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:44.071919Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:44.071961Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] Test command err: 2025-04-04T17:15:37.321373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:15:37.321820Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:15:37.321876Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001231/r3tmp/tmpUTFNkM/pdisk_1.dat 2025-04-04T17:15:37.704880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:15:37.760268Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:37.806628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:37.807736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:37.820191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:37.922080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:15:37.991567Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:15:37.991861Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:38.065006Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:38.065155Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:15:38.066944Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:15:38.067027Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:15:38.067083Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:15:38.067509Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:15:38.067646Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:15:38.067721Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:15:38.078575Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:15:38.100926Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:15:38.101120Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:15:38.101272Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:15:38.101319Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:38.101373Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:15:38.101413Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.101859Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:15:38.101974Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:15:38.102101Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.102153Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:38.102200Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:38.102303Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.102392Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:15:38.102868Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:38.103137Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:15:38.103237Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:15:38.105138Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.115987Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:38.116132Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:38.258852Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:15:38.265272Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:15:38.265385Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.266221Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.266295Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:15:38.266370Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:15:38.266670Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:15:38.266806Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:15:38.267273Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.267336Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:15:38.268695Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:15:38.269030Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:38.272127Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:15:38.272200Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.273432Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:15:38.273523Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.274827Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.275166Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.275203Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:38.275257Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:15:38.275319Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:15:38.275375Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:15:38.275479Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.280554Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:15:38.280730Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:15:38.280791Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:15:38.298283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:735:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.298418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.298490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.312565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:15:38.321683Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.478052Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.480496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:749:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:15:38.543701Z node 1 :TX_PROXY ERROR: Actor# [1:823:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:39.208817Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0wt3xr2a1n2h2m4k76dst3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmYwM2Y4NDgtNjc2ZTM2NjItNjY3NGRiMzgtNTUzZDk3Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:15:39.228785Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:850:2687], serverId# [1:851:2688], sessionId# [0:0:0] 2025-04-04T17:15:39.229072Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:39.242535Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17 ... 4-04T17:15:39.357563Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:39.357614Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:39.357664Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for WaitForStreamClearance 2025-04-04T17:15:39.357957Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:39.358025Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:39.362871Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-04T17:15:39.363173Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T17:15:39.363311Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-04-04T17:15:39.363360Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 0 2025-04-04T17:15:39.365019Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-04T17:15:39.365066Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715662, at: 72075186224037888 2025-04-04T17:15:39.365476Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:39.365516Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:39.365553Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for ReadTableScan 2025-04-04T17:15:39.365683Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:39.365759Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:39.365807Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:42.673440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:15:42.673555Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:15:42.673779Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001231/r3tmp/tmpwmPvGU/pdisk_1.dat 2025-04-04T17:15:42.941320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:15:42.970191Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:43.008237Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:43.008374Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:43.019865Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:43.101517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:15:43.123775Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:665:2569] 2025-04-04T17:15:43.124006Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:43.186959Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:43.187049Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:15:43.188342Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:15:43.188425Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:15:43.188479Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:15:43.188743Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:15:43.188835Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:15:43.188902Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:681:2569] in generation 1 2025-04-04T17:15:43.199537Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:15:43.199599Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:15:43.199686Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:15:43.199790Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:683:2579] 2025-04-04T17:15:43.199827Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:43.199865Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:15:43.199901Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.200248Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:15:43.200336Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:15:43.200410Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:43.200451Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:43.200484Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:43.200524Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:43.200580Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:661:2566], serverId# [2:670:2571], sessionId# [0:0:0] 2025-04-04T17:15:43.200988Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:43.201171Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:15:43.201233Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:15:43.202791Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:43.213467Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:43.213566Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:43.372092Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:702:2592], serverId# [2:704:2594], sessionId# [0:0:0] 2025-04-04T17:15:43.372694Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:15:43.372747Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.373373Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:43.373423Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:15:43.373468Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:15:43.373705Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:15:43.373831Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:15:43.374519Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:43.374604Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:15:43.374993Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:15:43.375394Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:43.377069Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:15:43.377115Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.377614Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:15:43.377693Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:43.378860Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:43.378907Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:43.378956Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:15:43.379021Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:417:2411], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:15:43.379085Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:15:43.379175Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:43.380200Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:43.382664Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:15:43.382723Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:15:43.382955Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:15:43.388422Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:738:2620], serverId# [2:739:2621], sessionId# [0:0:0] 2025-04-04T17:15:43.388552Z node 2 :TX_DATASHARD NOTICE: Rejecting erase request on datashard: tablet# 72075186224037888, error# Can't execute erase at replicated table 2025-04-04T17:15:43.388742Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:738:2620], serverId# [2:739:2621], sessionId# [0:0:0] >> TTopicYqlTest::DropTopicYql [GOOD] >> TTopicYqlTest::CreateTopicYqlBackCompatibility >> TPersQueueTest::PreferredCluster_RemotePreferredClusterEnabledWhileSessionInitializing_SessionDiesOnlyAfterInitializationAndDelay [GOOD] >> TPersQueueTest::PartitionsMapping |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck >> KqpUserConstraint::KqpReadNull+UploadNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] Test command err: 2025-04-04T17:15:37.357932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:15:37.358223Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:15:37.366292Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0009d8/r3tmp/tmpKIV3bo/pdisk_1.dat 2025-04-04T17:15:37.701696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:15:37.769685Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:37.812372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:37.812523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:37.826156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:37.920539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:15:38.005477Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:15:38.005800Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:38.049338Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:38.049482Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:15:38.051283Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:15:38.051371Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:15:38.051435Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:15:38.051894Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:15:38.052021Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:15:38.052098Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:15:38.066642Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:15:38.099174Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:15:38.099385Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:15:38.099540Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:15:38.099596Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:38.099631Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:15:38.099668Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.100105Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:15:38.100202Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:15:38.100323Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.100379Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:38.100424Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:38.100490Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.100561Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:15:38.100978Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:38.101247Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:15:38.101374Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:15:38.103269Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.117636Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:38.117765Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:38.280144Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:15:38.284690Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:15:38.284783Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.285586Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.285638Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:15:38.285707Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:15:38.285975Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:15:38.286169Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:15:38.286818Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.286915Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:15:38.288835Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:15:38.289285Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:38.291054Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:15:38.291107Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.292226Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:15:38.292313Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.293579Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.293919Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.293953Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:38.294016Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:15:38.294077Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:15:38.294129Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:15:38.294316Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.299146Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:15:38.299328Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:15:38.299394Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:15:38.309287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:735:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.309404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.309474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.314814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:15:38.320543Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.483584Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.486309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:749:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:15:38.561872Z node 1 :TX_PROXY ERROR: Actor# [1:823:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:39.209227Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0wt3y3ashbkg90280bw4h9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzk4YTU1NTgtYjI4MTAxNmItZmVmMTAzNTUtOTBjMTQ0M2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:15:39.216968Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:850:2687], serverId# [1:851:2688], sessionId# [0:0:0] 2025-04-04T17:15:39.218288Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:39.233882Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17 ... 24037893 2025-04-04T17:15:44.785771Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1246:3029], serverId# [2:1247:3030], sessionId# [0:0:0] 2025-04-04T17:15:44.785927Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1246:3029], serverId# [2:1247:3030], sessionId# [0:0:0] 2025-04-04T17:15:44.787203Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1251:3034], serverId# [2:1252:3035], sessionId# [0:0:0] 2025-04-04T17:15:44.787355Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1251:3034], serverId# [2:1252:3035], sessionId# [0:0:0] 2025-04-04T17:15:44.788440Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1256:3039], serverId# [2:1257:3040], sessionId# [0:0:0] 2025-04-04T17:15:44.788619Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1256:3039], serverId# [2:1257:3040], sessionId# [0:0:0] 2025-04-04T17:15:44.790510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:15:44.793778Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2025-04-04T17:15:44.793862Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T17:15:44.793938Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-04T17:15:44.794373Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-04-04T17:15:44.794459Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-04-04T17:15:44.794505Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:44.832991Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [2:1278:3058] 2025-04-04T17:15:44.833193Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:44.843041Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:44.843153Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:15:44.844385Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2025-04-04T17:15:44.844464Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037894 2025-04-04T17:15:44.844518Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037894 2025-04-04T17:15:44.844787Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:15:44.844898Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:15:44.844965Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037894 persisting started state actor id [2:1295:3058] in generation 1 2025-04-04T17:15:44.865997Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:15:44.866073Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037894 2025-04-04T17:15:44.866153Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:15:44.866214Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037894, actorId: [2:1297:3069] 2025-04-04T17:15:44.866281Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037894 2025-04-04T17:15:44.866310Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037894, state: WaitScheme 2025-04-04T17:15:44.866353Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-04T17:15:44.866724Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037894 2025-04-04T17:15:44.866807Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037894 2025-04-04T17:15:44.866851Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-04-04T17:15:44.866877Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:44.866906Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037894 TxInFly 0 2025-04-04T17:15:44.866932Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-04-04T17:15:44.867255Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1279:3059], serverId# [2:1286:3063], sessionId# [0:0:0] 2025-04-04T17:15:44.867348Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-04T17:15:44.867513Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037894 txId 281474976715663 ssId 72057594046644480 seqNo 2:7 2025-04-04T17:15:44.867596Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715663 at tablet 72075186224037894 2025-04-04T17:15:44.868038Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2025-04-04T17:15:44.881024Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-04T17:15:44.881120Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:45.015933Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1307:3079], serverId# [2:1309:3081], sessionId# [0:0:0] 2025-04-04T17:15:45.016762Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715663 at step 4000 at tablet 72075186224037894 { Transactions { TxId: 281474976715663 AckTo { RawX1: 0 RawX2: 0 } } Step: 4000 MediatorID: 72057594046382081 TabletID: 72075186224037894 } 2025-04-04T17:15:45.016796Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-04T17:15:45.017306Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-04-04T17:15:45.017340Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:15:45.017384Z node 2 :TX_DATASHARD DEBUG: Found ready operation [4000:281474976715663] in PlanQueue unit at 72075186224037894 2025-04-04T17:15:45.017584Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037894 loaded tx from db 4000:281474976715663 keys extracted: 0 2025-04-04T17:15:45.017671Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:15:45.017902Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-04-04T17:15:45.017971Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037894 tableId# [OwnerId: 72057594046644480, LocalPathId: 8] schema version# 1 2025-04-04T17:15:45.018338Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037894 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:15:45.018686Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:45.019867Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037894 time 3500 2025-04-04T17:15:45.019922Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-04T17:15:45.020515Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037894 step# 4000} 2025-04-04T17:15:45.020557Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-04-04T17:15:45.021354Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2025-04-04T17:15:45.021433Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T17:15:45.021511Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2025-04-04T17:15:45.021675Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-04T17:15:45.021816Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-04-04T17:15:45.021854Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-04-04T17:15:45.021886Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:45.021944Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-04-04T17:15:45.021967Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037894 2025-04-04T17:15:45.021998Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037894 2025-04-04T17:15:45.022069Z node 2 :TX_DATASHARD DEBUG: Complete [4000 : 281474976715663] from 72075186224037894 at tablet 72075186224037894 send result to client [2:417:2411], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:15:45.022119Z node 2 :TX_DATASHARD INFO: 72075186224037894 Sending notify to schemeshard 72057594046644480 txId 281474976715663 state Ready TxInFly 0 2025-04-04T17:15:45.022220Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-04T17:15:45.023452Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037894 coordinator 72057594046316545 last step 0 next step 4000 2025-04-04T17:15:45.024363Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037894 state Ready 2025-04-04T17:15:45.024408Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-04-04T17:15:45.028180Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1336:3102], serverId# [2:1337:3103], sessionId# [0:0:0] 2025-04-04T17:15:45.028354Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1336:3102], serverId# [2:1337:3103], sessionId# [0:0:0] 2025-04-04T17:15:45.029573Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1341:3107], serverId# [2:1342:3108], sessionId# [0:0:0] 2025-04-04T17:15:45.029765Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1341:3107], serverId# [2:1342:3108], sessionId# [0:0:0] 2025-04-04T17:15:45.031419Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1346:3112], serverId# [2:1347:3113], sessionId# [0:0:0] 2025-04-04T17:15:45.031647Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1346:3112], serverId# [2:1347:3113], sessionId# [0:0:0] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] >> TPersQueueTest::AllEqual [GOOD] >> TPersQueueTest::BadSids >> GroupWriteTest::Simple [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::Simple [GOOD] Test command err: RandomSeed# 8659466132292831351 2025-04-04T17:15:30.497447Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 1 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-04T17:15:30.526146Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-04T17:15:30.526220Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 going to send TEvBlock {TabletId# 1 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-04T17:15:30.529398Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-04T17:15:30.545994Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-04T17:15:30.549085Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-04T17:15:46.490654Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-04T17:15:46.490778Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-04T17:15:46.490837Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-04T17:15:46.490875Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-04T17:15:46.554113Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} 2025-04-04T17:15:46.554212Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Status# OK} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] Test command err: 2025-04-04T17:15:04.520197Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507727750429197:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:04.520246Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00179e/r3tmp/tmpyLcwjX/pdisk_1.dat 2025-04-04T17:15:05.011665Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:05.038692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:05.038819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 28213, node 1 2025-04-04T17:15:05.049111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:05.186559Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:05.186617Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:05.186628Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:05.186772Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23458 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:05.513520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:07.710784Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-04T17:15:07.710812Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was disabled 2025-04-04T17:15:07.713439Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=M2QxMWIwZjItZTJiMDhjMzAtYzhiNjBiNzktNTkzMmJlNmU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id M2QxMWIwZjItZTJiMDhjMzAtYzhiNjBiNzktNTkzMmJlNmU= 2025-04-04T17:15:07.713792Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=M2QxMWIwZjItZTJiMDhjMzAtYzhiNjBiNzktNTkzMmJlNmU=, ActorId: [1:7489507740635331724:2327], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:15:07.744756Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjMzZTkwLTc3NzU2YThiLWViMGE4NmJhLWVjYzBjNGYw, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YjMzZTkwLTc3NzU2YThiLWViMGE4NmJhLWVjYzBjNGYw 2025-04-04T17:15:07.744864Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjMzZTkwLTc3NzU2YThiLWViMGE4NmJhLWVjYzBjNGYw, ActorId: [1:7489507740635331726:2328], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:15:07.745181Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjMzZTkwLTc3NzU2YThiLWViMGE4NmJhLWVjYzBjNGYw, ActorId: [1:7489507740635331726:2328], ActorState: ReadyState, TraceId: 01jr0ws63185y241d0pmceja69, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7489507740635331725:2297] database: Root databaseId: /Root pool id: 2025-04-04T17:15:07.745286Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjMzZTkwLTc3NzU2YThiLWViMGE4NmJhLWVjYzBjNGYw, ActorId: [1:7489507740635331726:2328], ActorState: ExecuteState, TraceId: 01jr0ws63185y241d0pmceja69, Sending CompileQuery request 2025-04-04T17:15:07.980025Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjMzZTkwLTc3NzU2YThiLWViMGE4NmJhLWVjYzBjNGYw, ActorId: [1:7489507740635331726:2328], ActorState: ExecuteState, TraceId: 01jr0ws63185y241d0pmceja69, ExecutePhyTx, tx: 0x000050C00022BF58 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-04T17:15:07.980060Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjMzZTkwLTc3NzU2YThiLWViMGE4NmJhLWVjYzBjNGYw, ActorId: [1:7489507740635331726:2328], ActorState: ExecuteState, TraceId: 01jr0ws63185y241d0pmceja69, Sending to Executer TraceId: 0 8 2025-04-04T17:15:07.980148Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjMzZTkwLTc3NzU2YThiLWViMGE4NmJhLWVjYzBjNGYw, ActorId: [1:7489507740635331726:2328], ActorState: ExecuteState, TraceId: 01jr0ws63185y241d0pmceja69, Created new KQP executer: [1:7489507740635331731:2328] isRollback: 0 2025-04-04T17:15:08.004406Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjMzZTkwLTc3NzU2YThiLWViMGE4NmJhLWVjYzBjNGYw, ActorId: [1:7489507740635331726:2328], ActorState: ExecuteState, TraceId: 01jr0ws63185y241d0pmceja69, Forwarded TEvStreamData to [1:7489507740635331725:2297] 2025-04-04T17:15:08.008549Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjMzZTkwLTc3NzU2YThiLWViMGE4NmJhLWVjYzBjNGYw, ActorId: [1:7489507740635331726:2328], ActorState: ExecuteState, TraceId: 01jr0ws63185y241d0pmceja69, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-04T17:15:08.008719Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=YjMzZTkwLTc3NzU2YThiLWViMGE4NmJhLWVjYzBjNGYw, ActorId: [1:7489507740635331726:2328], ActorState: ExecuteState, TraceId: 01jr0ws63185y241d0pmceja69, txInfo Status: Committed Kind: Pure TotalDuration: 28.776 ServerDuration: 28.726 QueriesCount: 2 2025-04-04T17:15:08.008801Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjMzZTkwLTc3NzU2YThiLWViMGE4NmJhLWVjYzBjNGYw, ActorId: [1:7489507740635331726:2328], ActorState: ExecuteState, TraceId: 01jr0ws63185y241d0pmceja69, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-04T17:15:08.008958Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=YjMzZTkwLTc3NzU2YThiLWViMGE4NmJhLWVjYzBjNGYw, ActorId: [1:7489507740635331726:2328], ActorState: ExecuteState, TraceId: 01jr0ws63185y241d0pmceja69, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:15:08.008978Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjMzZTkwLTc3NzU2YThiLWViMGE4NmJhLWVjYzBjNGYw, ActorId: [1:7489507740635331726:2328], ActorState: ExecuteState, TraceId: 01jr0ws63185y241d0pmceja69, EndCleanup, isFinal: 1 2025-04-04T17:15:08.009021Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjMzZTkwLTc3NzU2YThiLWViMGE4NmJhLWVjYzBjNGYw, ActorId: [1:7489507740635331726:2328], ActorState: ExecuteState, TraceId: 01jr0ws63185y241d0pmceja69, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7489507727750429448:2277] 2025-04-04T17:15:08.009046Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjMzZTkwLTc3NzU2YThiLWViMGE4NmJhLWVjYzBjNGYw, ActorId: [1:7489507740635331726:2328], ActorState: unknown state, TraceId: 01jr0ws63185y241d0pmceja69, Cleanup temp tables: 0 2025-04-04T17:15:08.009422Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjMzZTkwLTc3NzU2YThiLWViMGE4NmJhLWVjYzBjNGYw, ActorId: [1:7489507740635331726:2328], ActorState: unknown state, TraceId: 01jr0ws63185y241d0pmceja69, Session actor destroyed 2025-04-04T17:15:08.024185Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=M2QxMWIwZjItZTJiMDhjMzAtYzhiNjBiNzktNTkzMmJlNmU=, ActorId: [1:7489507740635331724:2327], ActorState: ReadyState, Session closed due to explicit close event 2025-04-04T17:15:08.024225Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=M2QxMWIwZjItZTJiMDhjMzAtYzhiNjBiNzktNTkzMmJlNmU=, ActorId: [1:7489507740635331724:2327], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:15:08.024250Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=M2QxMWIwZjItZTJiMDhjMzAtYzhiNjBiNzktNTkzMmJlNmU=, ActorId: [1:7489507740635331724:2327], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-04T17:15:08.024287Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=M2QxMWIwZjItZTJiMDhjMzAtYzhiNjBiNzktNTkzMmJlNmU=, ActorId: [1:7489507740635331724:2327], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:15:08.024341Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=M2QxMWIwZjItZTJiMDhjMzAtYzhiNjBiNzktNTkzMmJlNmU=, ActorId: [1:7489507740635331724:2327], ActorState: unknown state, Session actor destroyed 2025-04-04T17:15:08.992465Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507742914658036:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:08.992521Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00179e/r3tmp/tmpMBNr0d/pdisk_1.dat 2025-04-04T17:15:09.198125Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10568, node 2 2025-04-04T17:15:09.301386Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:09.301502Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:09.303939Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:09.306825Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:09.306851Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:09.306857Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:09.306972Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23630 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 ... 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] RunDataQuery: -- TCpuLoadFetcherActor::OnRunQuery SELECT SUM(CpuThreads) AS ThreadsCount, SUM(CpuThreads * (1.0 - CpuIdle)) AS TotalLoad FROM `.sys/nodes`; 2025-04-04T17:15:42.782697Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-04T17:15:42.782798Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, ActorId: [8:7489507888964254929:2359], ActorState: ReadyState, TraceId: 01jr0wt89y2ardfj0encxm3nae, received request, proxyRequestId: 6 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TCpuLoadFetcherActor::OnRunQuery SELECT SUM(CpuThreads) AS ThreadsCount, SUM(CpuThreads * (1.0 - CpuIdle)) AS TotalLoad FROM `.sys/nodes`; rpcActor: [8:7489507888964254930:2360] database: /Root databaseId: /Root pool id: default 2025-04-04T17:15:42.782815Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7489507888964254932:2361], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-04T17:15:42.782890Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [8:7489507888964254929:2359], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw== 2025-04-04T17:15:42.782992Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [8:7489507888964254933:2362], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, Start pool fetching 2025-04-04T17:15:42.783048Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7489507888964254934:2363], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-04T17:15:42.783342Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7489507888964254932:2361], DatabaseId: /Root, PoolId: default, Pool info successfully fetched 2025-04-04T17:15:42.783406Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool default, DatabaseId: /Root 2025-04-04T17:15:42.783406Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7489507888964254934:2363], DatabaseId: /Root, PoolId: default, Pool info successfully fetched 2025-04-04T17:15:42.783453Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [8:7489507888964254933:2362], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, Pool info successfully resolved 2025-04-04T17:15:42.783481Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool default, DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw== 2025-04-04T17:15:42.783527Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw== 2025-04-04T17:15:42.783542Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7489507888964254894:2349], DatabaseId: /Root, PoolId: default, Received new request, worker id: [8:7489507888964254929:2359], session id: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw== 2025-04-04T17:15:42.783581Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7489507888964254894:2349], DatabaseId: /Root, PoolId: default, Reply continue success to [8:7489507888964254929:2359], session id: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, local in flight: 1 2025-04-04T17:15:42.783584Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, ActorId: [8:7489507888964254929:2359], ActorState: ExecuteState, TraceId: 01jr0wt89y2ardfj0encxm3nae, continue request, pool id: default 2025-04-04T17:15:42.783850Z node 8 :KQP_SESSION INFO: Scheme error, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], status: PathNotTable 2025-04-04T17:15:43.203809Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, ActorId: [8:7489507888964254929:2359], ActorState: ExecuteState, TraceId: 01jr0wt89y2ardfj0encxm3nae, ExecutePhyTx, tx: 0x000050C000549358 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-04-04T17:15:43.203874Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, ActorId: [8:7489507888964254929:2359], ActorState: ExecuteState, TraceId: 01jr0wt89y2ardfj0encxm3nae, Sending to Executer TraceId: 0 8 2025-04-04T17:15:43.203958Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, ActorId: [8:7489507888964254929:2359], ActorState: ExecuteState, TraceId: 01jr0wt89y2ardfj0encxm3nae, Created new KQP executer: [8:7489507893259222250:2359] isRollback: 0 2025-04-04T17:15:43.210305Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, ActorId: [8:7489507888964254929:2359], ActorState: ExecuteState, TraceId: 01jr0wt89y2ardfj0encxm3nae, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-04-04T17:15:43.210415Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, ActorId: [8:7489507888964254929:2359], ActorState: ExecuteState, TraceId: 01jr0wt89y2ardfj0encxm3nae, ExecutePhyTx, tx: 0x000050C000549E98 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-04T17:15:43.211093Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, ActorId: [8:7489507888964254929:2359], ActorState: ExecuteState, TraceId: 01jr0wt89y2ardfj0encxm3nae, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-04T17:15:43.211246Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, ActorId: [8:7489507888964254929:2359], ActorState: ExecuteState, TraceId: 01jr0wt89y2ardfj0encxm3nae, txInfo Status: Committed Kind: ReadOnly TotalDuration: 7.528 ServerDuration: 7.464 QueriesCount: 2 2025-04-04T17:15:43.211340Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, ActorId: [8:7489507888964254929:2359], ActorState: ExecuteState, TraceId: 01jr0wt89y2ardfj0encxm3nae, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-04T17:15:43.211411Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, ActorId: [8:7489507888964254929:2359], ActorState: ExecuteState, TraceId: 01jr0wt89y2ardfj0encxm3nae, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2025-04-04T17:15:43.211444Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7489507888964254894:2349], DatabaseId: /Root, PoolId: default, Received cleanup request, worker id: [8:7489507888964254929:2359], session id: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, duration: 0.428445s, cpu consumed: 0.001837s 2025-04-04T17:15:43.211483Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7489507888964254894:2349], DatabaseId: /Root, PoolId: default, Reply cleanup success to [8:7489507888964254929:2359], session id: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, local in flight: 0 2025-04-04T17:15:43.211557Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request finished in pool, DatabaseId: /Root, PoolId: default, Duration: 0.428445s, CpuConsumed: 0.001837s, AdjustCpuQuota: 0 2025-04-04T17:15:43.211717Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, ActorId: [8:7489507888964254929:2359], ActorState: CleanupState, TraceId: 01jr0wt89y2ardfj0encxm3nae, EndCleanup, isFinal: 0 2025-04-04T17:15:43.211788Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, ActorId: [8:7489507888964254929:2359], ActorState: CleanupState, TraceId: 01jr0wt89y2ardfj0encxm3nae, Sent query response back to proxy, proxyRequestId: 6, proxyId: [8:7489507858899482964:2279] 2025-04-04T17:15:43.212102Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, TxId: 2025-04-04T17:15:43.212210Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] Finish with SUCCESS, SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, TxId: 2025-04-04T17:15:43.212426Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, ActorId: [8:7489507888964254929:2359], ActorState: ReadyState, Session closed due to explicit close event 2025-04-04T17:15:43.212468Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, ActorId: [8:7489507888964254929:2359], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:15:43.212491Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, ActorId: [8:7489507888964254929:2359], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-04T17:15:43.212509Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, ActorId: [8:7489507888964254929:2359], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:15:43.212619Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDZjY2IwNWMtMzVjZTY3YTEtNGU2ZTRkOTMtMzk1Yjc0Yw==, ActorId: [8:7489507888964254929:2359], ActorState: unknown state, Session actor destroyed 2025-04-04T17:15:43.221899Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ODg4YmIzZTAtYjU3YzdjZjQtMjkxNmEwMzgtYmQ3OThlYzU=, ActorId: [8:7489507884669287428:2336], ActorState: ReadyState, Session closed due to explicit close event 2025-04-04T17:15:43.221955Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ODg4YmIzZTAtYjU3YzdjZjQtMjkxNmEwMzgtYmQ3OThlYzU=, ActorId: [8:7489507884669287428:2336], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:15:43.221987Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ODg4YmIzZTAtYjU3YzdjZjQtMjkxNmEwMzgtYmQ3OThlYzU=, ActorId: [8:7489507884669287428:2336], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-04T17:15:43.222017Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ODg4YmIzZTAtYjU3YzdjZjQtMjkxNmEwMzgtYmQ3OThlYzU=, ActorId: [8:7489507884669287428:2336], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:15:43.222108Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ODg4YmIzZTAtYjU3YzdjZjQtMjkxNmEwMzgtYmQ3OThlYzU=, ActorId: [8:7489507884669287428:2336], ActorState: unknown state, Session actor destroyed >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup [GOOD] >> TConsoleConfigTests::TestAddConfigItem >> ResourcePoolsDdl::TestCreateResourcePool [GOOD] >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] >> TPersQueueTest::DirectReadRestartTablet [GOOD] >> TPersQueueTest::EachMessageGetsExactlyOneAcknowledgementInCorrectOrder |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] >> TConsoleConfigTests::TestAddConfigItem [GOOD] >> TConsoleConfigTests::TestAutoKind ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] Test command err: 2025-04-04T17:14:49.863815Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507662242739137:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:49.863939Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017e3/r3tmp/tmp0thkVD/pdisk_1.dat 2025-04-04T17:14:50.377935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:50.378063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:50.384893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:50.422945Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22861, node 1 2025-04-04T17:14:50.682578Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:50.682608Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:50.682614Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:50.682730Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:51.373402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:52.956999Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-04T17:14:52.957093Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507675127641529:2326], Start check tables existence, number paths: 2 2025-04-04T17:14:52.957426Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-04T17:14:52.957444Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-04T17:14:52.964267Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-04T17:14:52.964347Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507675127641529:2326], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-04T17:14:52.964399Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507675127641529:2326], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-04T17:14:52.964438Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507675127641529:2326], Successfully finished 2025-04-04T17:14:52.968281Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-04T17:14:53.007214Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzBlN2Y3ZmItOGEwZmU3ZWItOWVlODI4YWEtMjQ5MDk3NTc=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NzBlN2Y3ZmItOGEwZmU3ZWItOWVlODI4YWEtMjQ5MDk3NTc= 2025-04-04T17:14:53.010354Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzBlN2Y3ZmItOGEwZmU3ZWItOWVlODI4YWEtMjQ5MDk3NTc=, ActorId: [1:7489507679422608853:2330], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:14:53.048921Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507679422608855:2305], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-04T17:14:53.055439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:14:53.059349Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507679422608855:2305], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-04T17:14:53.061514Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507679422608855:2305], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-04T17:14:53.068573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507679422608855:2305], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:14:53.162363Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507679422608855:2305], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-04T17:14:53.166029Z node 1 :TX_PROXY ERROR: Actor# [1:7489507679422608906:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:53.166151Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507679422608855:2305], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-04T17:14:53.169957Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjIwNjRlMDYtMzM4YjYzZWEtZDIzOTg3NGQtYjI1MjMyODA=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZjIwNjRlMDYtMzM4YjYzZWEtZDIzOTg3NGQtYjI1MjMyODA= 2025-04-04T17:14:53.170236Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjIwNjRlMDYtMzM4YjYzZWEtZDIzOTg3NGQtYjI1MjMyODA=, ActorId: [1:7489507679422608914:2331], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:14:53.170956Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-04T17:14:53.170972Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-04T17:14:53.171125Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjIwNjRlMDYtMzM4YjYzZWEtZDIzOTg3NGQtYjI1MjMyODA=, ActorId: [1:7489507679422608914:2331], ActorState: ReadyState, TraceId: 01jr0wrqvk450bx16mq24b2zx9, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7489507679422608913:2343] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-04T17:14:53.171194Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7489507679422608914:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZjIwNjRlMDYtMzM4YjYzZWEtZDIzOTg3NGQtYjI1MjMyODA= 2025-04-04T17:14:53.171259Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507679422608916:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-04T17:14:53.171316Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7489507679422608917:2333], Database: /Root, Start database fetching 2025-04-04T17:14:53.172543Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7489507679422608917:2333], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-04T17:14:53.172656Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507679422608916:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-04T17:14:53.172689Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-04T17:14:53.172714Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-04-04T17:14:53.172734Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-04T17:14:53.173143Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7489507679422608927:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZjIwNjRlMDYtMzM4YjYzZWEtZDIzOTg3NGQtYjI1MjMyODA=, Start pool fetching 2025-04-04T17:14:53.173175Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507679422608928:2335], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-04T17:14:53.173420Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507679422608928:2335], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-04T17:14:53.173482Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7489507679422608927:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZjIwNjRlMDYtMzM4YjYzZWEtZDIzOTg3NGQtYjI1MjMyODA=, Pool info successfully resolved 2025-04-04T17:14:53.173778Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjIwNjRlMDYtMzM4YjYzZWEtZDIzOTg3NGQtYjI1MjMyODA= 2025-04-04T17:14:53.173866Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7489507679422608930:2336], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-04T17:14:53.173917Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7489507679422608930:2336], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7489507679422608914:2331], session id: ydb://session/3?node_id=1&id=ZjIwNjRlMDYtMzM4YjYzZWEtZDIzOTg3NGQtYjI1MjMyODA= 2025-04-04T17:14:53.173982Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZjIwNjRlMDYtMzM4YjYzZWEtZDIzOTg3NGQtYjI1MjMyODA= 2025-04-04T17:14:53.174015Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got create teables request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-04T17:14:53.174032Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService ... essionId: ydb://session/3?node_id=6&id=MjU4Njk3OTQtZmQ5N2ZmZTMtNTgxOWYxZTQtMTZkNzExNDU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MjU4Njk3OTQtZmQ5N2ZmZTMtNTgxOWYxZTQtMTZkNzExNDU= 2025-04-04T17:15:46.977008Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-04T17:15:46.977047Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-04T17:15:46.977089Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-04T17:15:46.982950Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7489507904612245918:2328], Start check tables existence, number paths: 2 2025-04-04T17:15:46.983072Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjU4Njk3OTQtZmQ5N2ZmZTMtNTgxOWYxZTQtMTZkNzExNDU=, ActorId: [6:7489507904612245920:2329], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:15:46.985662Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7489507904612245918:2328], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-04T17:15:46.985723Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7489507904612245918:2328], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-04T17:15:46.985760Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7489507904612245918:2328], Successfully finished 2025-04-04T17:15:46.985821Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-04T17:15:46.990816Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489507904612245946:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-04T17:15:46.999474Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:15:47.002584Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489507904612245946:2300], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2025-04-04T17:15:47.005264Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489507904612245946:2300], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-04T17:15:47.016170Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489507904612245946:2300], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:15:47.108823Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489507904612245946:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-04T17:15:47.111833Z node 6 :TX_PROXY ERROR: Actor# [6:7489507908907213293:2332] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:47.111958Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489507904612245946:2300], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-04T17:15:47.116691Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=Yjg4MTNkMDgtMWQ4NWE5NzctY2M5YjExMzEtZGJiZjZiY2U=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id Yjg4MTNkMDgtMWQ4NWE5NzctY2M5YjExMzEtZGJiZjZiY2U= 2025-04-04T17:15:47.116976Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-04T17:15:47.116990Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-04T17:15:47.117050Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=Yjg4MTNkMDgtMWQ4NWE5NzctY2M5YjExMzEtZGJiZjZiY2U=, ActorId: [6:7489507908907213300:2331], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:15:47.117176Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=Yjg4MTNkMDgtMWQ4NWE5NzctY2M5YjExMzEtZGJiZjZiY2U=, ActorId: [6:7489507908907213300:2331], ActorState: ReadyState, TraceId: 01jr0wtchd03h7h0pzmzv26860, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [6:7489507908907213299:2337] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-04T17:15:47.117221Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [6:7489507908907213300:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=Yjg4MTNkMDgtMWQ4NWE5NzctY2M5YjExMzEtZGJiZjZiY2U= 2025-04-04T17:15:47.117272Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489507908907213302:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-04T17:15:47.117349Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7489507908907213303:2333], Database: /Root, Start database fetching 2025-04-04T17:15:47.118976Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7489507908907213303:2333], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-04T17:15:47.119030Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-04T17:15:47.119060Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489507908907213302:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-04T17:15:47.119075Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [6:7489507908907213313:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=Yjg4MTNkMDgtMWQ4NWE5NzctY2M5YjExMzEtZGJiZjZiY2U=, Start pool fetching 2025-04-04T17:15:47.119108Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-04-04T17:15:47.119117Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489507908907213314:2335], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-04T17:15:47.119125Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-04T17:15:47.119327Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7489507908907213315:2336], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-04T17:15:47.120683Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489507908907213314:2335], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-04T17:15:47.120740Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7489507908907213315:2336], DatabaseId: /Root, PoolId: sample_pool_id, Got watch notification 2025-04-04T17:15:47.120856Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [6:7489507908907213313:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=Yjg4MTNkMDgtMWQ4NWE5NzctY2M5YjExMzEtZGJiZjZiY2U=, Pool info successfully resolved 2025-04-04T17:15:47.120929Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=Yjg4MTNkMDgtMWQ4NWE5NzctY2M5YjExMzEtZGJiZjZiY2U= 2025-04-04T17:15:47.121015Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=Yjg4MTNkMDgtMWQ4NWE5NzctY2M5YjExMzEtZGJiZjZiY2U= 2025-04-04T17:15:47.121088Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=Yjg4MTNkMDgtMWQ4NWE5NzctY2M5YjExMzEtZGJiZjZiY2U=, ActorId: [6:7489507908907213300:2331], ActorState: ExecuteState, TraceId: 01jr0wtchd03h7h0pzmzv26860, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool sample_pool_id 2025-04-04T17:15:47.121194Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=Yjg4MTNkMDgtMWQ4NWE5NzctY2M5YjExMzEtZGJiZjZiY2U=, ActorId: [6:7489507908907213300:2331], ActorState: ExecuteState, TraceId: 01jr0wtchd03h7h0pzmzv26860, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2025-04-04T17:15:47.121360Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Finished request with worker actor [6:7489507908907213300:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=Yjg4MTNkMDgtMWQ4NWE5NzctY2M5YjExMzEtZGJiZjZiY2U= 2025-04-04T17:15:47.121427Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=Yjg4MTNkMDgtMWQ4NWE5NzctY2M5YjExMzEtZGJiZjZiY2U=, ActorId: [6:7489507908907213300:2331], ActorState: CleanupState, TraceId: 01jr0wtchd03h7h0pzmzv26860, EndCleanup, isFinal: 1 2025-04-04T17:15:47.121524Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=Yjg4MTNkMDgtMWQ4NWE5NzctY2M5YjExMzEtZGJiZjZiY2U=, ActorId: [6:7489507908907213300:2331], ActorState: CleanupState, TraceId: 01jr0wtchd03h7h0pzmzv26860, Sent query response back to proxy, proxyRequestId: 3, proxyId: [6:7489507891727343635:2276] 2025-04-04T17:15:47.121552Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=Yjg4MTNkMDgtMWQ4NWE5NzctY2M5YjExMzEtZGJiZjZiY2U=, ActorId: [6:7489507908907213300:2331], ActorState: unknown state, TraceId: 01jr0wtchd03h7h0pzmzv26860, Cleanup temp tables: 0 2025-04-04T17:15:47.121669Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=Yjg4MTNkMDgtMWQ4NWE5NzctY2M5YjExMzEtZGJiZjZiY2U=, ActorId: [6:7489507908907213300:2331], ActorState: unknown state, TraceId: 01jr0wtchd03h7h0pzmzv26860, Session actor destroyed 2025-04-04T17:15:47.142583Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=MjU4Njk3OTQtZmQ5N2ZmZTMtNTgxOWYxZTQtMTZkNzExNDU=, ActorId: [6:7489507904612245920:2329], ActorState: ReadyState, Session closed due to explicit close event 2025-04-04T17:15:47.142649Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=MjU4Njk3OTQtZmQ5N2ZmZTMtNTgxOWYxZTQtMTZkNzExNDU=, ActorId: [6:7489507904612245920:2329], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:15:47.142677Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjU4Njk3OTQtZmQ5N2ZmZTMtNTgxOWYxZTQtMTZkNzExNDU=, ActorId: [6:7489507904612245920:2329], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-04T17:15:47.142703Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjU4Njk3OTQtZmQ5N2ZmZTMtNTgxOWYxZTQtMTZkNzExNDU=, ActorId: [6:7489507904612245920:2329], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:15:47.142782Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjU4Njk3OTQtZmQ5N2ZmZTMtNTgxOWYxZTQtMTZkNzExNDU=, ActorId: [6:7489507904612245920:2329], ActorState: unknown state, Session actor destroyed >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] Test command err: 2025-04-04T17:15:37.250964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:15:37.251413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:15:37.251469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001279/r3tmp/tmpcDaZIs/pdisk_1.dat 2025-04-04T17:15:37.697268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:15:37.760997Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:37.806936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:37.807754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:37.820193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:37.922288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:15:38.001674Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:15:38.001972Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:38.050652Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:38.050777Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:15:38.052510Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:15:38.052599Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:15:38.052676Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:15:38.053051Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:15:38.053175Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:15:38.053250Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:15:38.065013Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:15:38.104207Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:15:38.104378Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:15:38.104542Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:15:38.104608Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:38.104652Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:15:38.104692Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.105211Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:15:38.105329Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:15:38.105460Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.105515Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:38.105558Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:38.105623Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.105705Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:15:38.106105Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:38.106422Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:15:38.106520Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:15:38.108460Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.120178Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:38.120294Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:38.280209Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:15:38.284395Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:15:38.284466Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.285157Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.285206Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:15:38.285257Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:15:38.285487Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:15:38.285632Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:15:38.286148Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.286256Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:15:38.288094Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:15:38.288549Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:38.290288Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:15:38.290342Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.291370Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:15:38.291469Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.292657Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.292978Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.293017Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:38.293087Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:15:38.293154Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:15:38.293198Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:15:38.293281Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.297903Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:15:38.298099Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:15:38.298152Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:15:38.307178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:735:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.307286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.307353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.312377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:15:38.317658Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.486697Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.489592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:749:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:15:38.568044Z node 1 :TX_PROXY ERROR: Actor# [1:823:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:39.209085Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0wt3y1f2whkkw6w2h8qj85, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjgxOTIzMTEtODMwN2YxMjUtODU3ZjNmM2QtZGIxNGE4MDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:15:39.216749Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:850:2687], serverId# [1:851:2688], sessionId# [0:0:0] 2025-04-04T17:15:39.217738Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:39.233971Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17 ... 86224037888 2025-04-04T17:15:47.627115Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:47.627156Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:47.627202Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:47.627612Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:661:2566], serverId# [3:672:2573], sessionId# [0:0:0] 2025-04-04T17:15:47.627758Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:47.627983Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:15:47.628069Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:15:47.629827Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:47.642890Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:47.643013Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:47.799125Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:704:2594], sessionId# [0:0:0] 2025-04-04T17:15:47.800628Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:15:47.800690Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:47.801247Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:47.801297Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:15:47.801342Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:15:47.801629Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:15:47.801780Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:15:47.802070Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:47.802131Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:15:47.802627Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:15:47.803089Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:47.804973Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:15:47.805025Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:47.805756Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:15:47.805841Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:47.807172Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:47.807228Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:47.807278Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:15:47.807341Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:15:47.807390Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:15:47.807474Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:47.808119Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:47.810850Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:15:47.810915Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:15:47.811341Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:15:47.820144Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:736:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:47.820251Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:746:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:47.820362Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:47.826984Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:15:47.834752Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:47.997660Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:48.000772Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:750:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:15:48.045650Z node 3 :TX_PROXY ERROR: Actor# [3:824:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:48.260374Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0wtd7a45wkjyy72eep1k7h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OWUxNThlYjQtNDUzOWM3Zi0zMzk5ODIwMy02YTIxOWVhZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:15:48.260906Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:851:2687], serverId# [3:852:2688], sessionId# [0:0:0] 2025-04-04T17:15:48.261139Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:48.273539Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:48.273702Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:48.277733Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:859:2694], serverId# [3:860:2695], sessionId# [0:0:0] 2025-04-04T17:15:48.278994Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-04T17:15:48.292959Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-04T17:15:48.293046Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:48.293420Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-04T17:15:48.293477Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-04T17:15:48.293638Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:859:2694], serverId# [3:860:2695], sessionId# [0:0:0] 2025-04-04T17:15:48.293745Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:48.293792Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:48.293833Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:48.293894Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:48.294827Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:48.295195Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:48.295369Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:48.295414Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:48.295456Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-04T17:15:48.295687Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:48.295753Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:48.296338Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-04T17:15:48.296697Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 48, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T17:15:48.296831Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-04T17:15:48.296877Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-04T17:15:48.298351Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-04T17:15:48.298391Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-04T17:15:48.298724Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:48.298759Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:48.298796Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-04T17:15:48.298927Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:48.298978Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:48.299019Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] Test command err: 2025-04-04T17:15:37.335041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:15:37.335546Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:15:37.335599Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000801/r3tmp/tmpwcLIcF/pdisk_1.dat 2025-04-04T17:15:37.697549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:15:37.756639Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:37.806690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:37.807375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:37.820193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:37.920903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:15:37.979048Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:15:37.979342Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:38.021396Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:38.021494Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:15:38.025699Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:15:38.025795Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:15:38.025843Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:15:38.031094Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:15:38.031310Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:15:38.031407Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:15:38.042136Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:15:38.071427Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:15:38.071608Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:15:38.071722Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:15:38.071762Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:38.071795Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:15:38.071831Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.072296Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:15:38.072414Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:15:38.072550Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.072615Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:38.072657Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:38.072712Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.072813Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:15:38.073240Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:38.073471Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:15:38.073560Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:15:38.075297Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.087912Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:38.088045Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:38.240061Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:15:38.244669Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:15:38.244737Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.245580Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.245645Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:15:38.245708Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:15:38.246001Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:15:38.246188Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:15:38.246857Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.246952Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:15:38.250055Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:15:38.250651Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:38.252380Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:15:38.252440Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.259397Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:15:38.259494Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.260791Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.261204Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.261247Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:38.261316Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:15:38.261377Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:15:38.261426Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:15:38.261514Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.272326Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:15:38.272515Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:15:38.272575Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:15:38.296259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:735:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.296362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.296436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.307125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:15:38.312969Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.468956Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.471633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:749:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:15:38.551364Z node 1 :TX_PROXY ERROR: Actor# [1:823:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:39.208862Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0wt3xnf7tr2gjq2g115s2x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2E2YjBmMTItMWVkNzY3MTYtYTM0MTZhYzQtNDhiNzJmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:15:39.216666Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:850:2687], serverId# [1:851:2688], sessionId# [0:0:0] 2025-04-04T17:15:39.217729Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:39.233865Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17 ... 86224037888 2025-04-04T17:15:47.883860Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:47.883903Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:47.883953Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:47.884342Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:661:2566], serverId# [3:672:2573], sessionId# [0:0:0] 2025-04-04T17:15:47.884521Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:47.884715Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:15:47.884799Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:15:47.886517Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:47.902124Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:47.902265Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:48.064847Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:704:2594], sessionId# [0:0:0] 2025-04-04T17:15:48.066854Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:15:48.066940Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:48.067614Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:48.067669Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:15:48.067719Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:15:48.067981Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:15:48.068135Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:15:48.068455Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:48.068522Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:15:48.069002Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:15:48.069419Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:48.071388Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:15:48.071439Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:48.072238Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:15:48.072329Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:48.073647Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:48.073693Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:48.073742Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:15:48.073807Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:15:48.073862Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:15:48.073950Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:48.076087Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:48.078741Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:15:48.078815Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:15:48.079265Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:15:48.089174Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:736:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:48.089290Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:746:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:48.089375Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:48.108541Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:15:48.123068Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:48.303017Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:48.306229Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:750:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:15:48.344883Z node 3 :TX_PROXY ERROR: Actor# [3:824:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:48.442339Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0wtdfq44wtegy7jdgt7549, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2Y5MzcxNGQtNWE5Yzk5ZjEtODgxMTU3NGEtNzRhYjJjM2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:15:48.442909Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:851:2687], serverId# [3:852:2688], sessionId# [0:0:0] 2025-04-04T17:15:48.443142Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:48.455509Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:48.455637Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:48.458945Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:859:2694], serverId# [3:860:2695], sessionId# [0:0:0] 2025-04-04T17:15:48.459972Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-04T17:15:48.471299Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-04T17:15:48.471380Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:48.471794Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-04T17:15:48.471845Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-04T17:15:48.472043Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:859:2694], serverId# [3:860:2695], sessionId# [0:0:0] 2025-04-04T17:15:48.472145Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:48.472193Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:48.472243Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:48.472319Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:48.473252Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:48.473616Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:48.473809Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:48.473851Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:48.473917Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-04T17:15:48.474126Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:48.474184Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:48.474903Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-04T17:15:48.475161Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 37, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T17:15:48.475315Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-04T17:15:48.475369Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-04T17:15:48.515312Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-04T17:15:48.515380Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-04T17:15:48.515755Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:48.515789Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:48.515819Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-04T17:15:48.515953Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:48.516015Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:48.516054Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit [GOOD] >> TConsoleConfigTests::TestAutoKind [GOOD] >> TConsoleConfigTests::TestAllowedScopes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] Test command err: 2025-04-04T17:15:37.285720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:15:37.286220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:15:37.286297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000922/r3tmp/tmp6tLxbB/pdisk_1.dat 2025-04-04T17:15:37.695921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:15:37.765092Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:37.816158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:37.816337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:37.827950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:37.930717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:15:38.011883Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:15:38.012133Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:38.068205Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:38.068333Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:15:38.070013Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:15:38.070092Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:15:38.070153Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:15:38.070574Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:15:38.070716Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:15:38.070786Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:15:38.081562Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:15:38.108944Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:15:38.109123Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:15:38.109262Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:15:38.109308Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:38.109341Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:15:38.109378Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.109822Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:15:38.109952Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:15:38.110087Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.110141Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:38.110186Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:38.110268Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.110343Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:15:38.110785Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:38.111092Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:15:38.111189Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:15:38.112991Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.126808Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:38.126948Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:38.271954Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:15:38.282710Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:15:38.282787Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.283589Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.283645Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:15:38.283718Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:15:38.284012Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:15:38.284188Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:15:38.284727Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.284833Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:15:38.292897Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:15:38.295019Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:38.296815Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:15:38.296863Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.297930Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:15:38.298024Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.299728Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.300049Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.300092Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:38.300165Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:15:38.300236Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:15:38.300283Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:15:38.300370Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.309512Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:15:38.309716Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:15:38.309771Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:15:38.318091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:735:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.318160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.318208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:38.323550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:15:38.329676Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.486009Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.488679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:749:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:15:38.563736Z node 1 :TX_PROXY ERROR: Actor# [1:823:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:39.209190Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0wt3yc2ex7mc2dan7gj82s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmZjMmFjMTQtN2U3ZmRkYjgtMTM3NDFlYTItZjVlODc0YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:15:39.216573Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:850:2687], serverId# [1:851:2688], sessionId# [0:0:0] 2025-04-04T17:15:39.217726Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:39.234192Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17 ... 86224037888 2025-04-04T17:15:48.328695Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:48.328732Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:48.328774Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:48.329185Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:661:2566], serverId# [3:672:2573], sessionId# [0:0:0] 2025-04-04T17:15:48.329373Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:48.329601Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:15:48.329692Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:15:48.331510Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:48.346844Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:48.346973Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:48.525941Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:704:2594], sessionId# [0:0:0] 2025-04-04T17:15:48.527490Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:15:48.527551Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:48.528133Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:48.528184Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:15:48.528251Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:15:48.529269Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:15:48.529438Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:15:48.529756Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:48.529814Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:15:48.530253Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:15:48.530704Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:48.532343Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:15:48.532406Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:48.533158Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:15:48.533252Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:48.534625Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:48.534670Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:48.534721Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:15:48.534783Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:15:48.534841Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:15:48.534939Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:48.535561Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:48.537991Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:15:48.538053Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:15:48.538537Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:15:48.546853Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:736:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:48.546948Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:746:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:48.547048Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:48.555353Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:15:48.563217Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:48.726168Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:48.729406Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:750:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:15:48.765953Z node 3 :TX_PROXY ERROR: Actor# [3:824:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:48.921589Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0wtdy15x8dwjep894bxhbf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Yjg4NTE0ZTktMmMwN2E5ZDEtYWU1YzY4MDgtZTA2MDU0MTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:15:48.922149Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:851:2687], serverId# [3:852:2688], sessionId# [0:0:0] 2025-04-04T17:15:48.922389Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:48.934853Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:48.935003Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:48.940446Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:859:2694], serverId# [3:860:2695], sessionId# [0:0:0] 2025-04-04T17:15:48.941722Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-04T17:15:48.953192Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-04T17:15:48.953278Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:48.953688Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-04T17:15:48.953764Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-04T17:15:48.953924Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:859:2694], serverId# [3:860:2695], sessionId# [0:0:0] 2025-04-04T17:15:48.954034Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:48.954083Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:48.954134Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:48.954203Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:48.955205Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:48.955555Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:48.955742Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:48.955785Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:48.955827Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-04T17:15:48.956080Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:48.956150Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:48.956810Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-04T17:15:48.957144Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 43, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T17:15:48.957271Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-04T17:15:48.957320Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-04T17:15:48.958933Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-04T17:15:48.958983Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-04T17:15:48.959381Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:48.959418Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:48.959453Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-04T17:15:48.959573Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:48.959623Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:48.959667Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [GOOD] Test command err: Starting YDB, grpc: 6081, msgbus: 26237 2025-04-04T17:12:58.604211Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507185974243244:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:58.604321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000feb/r3tmp/tmpakR7Zh/pdisk_1.dat 2025-04-04T17:12:59.087040Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:59.096475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:59.096626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 6081, node 1 2025-04-04T17:12:59.104795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:59.114962Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T17:12:59.116061Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T17:12:59.261429Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:59.261453Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:59.261457Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:59.261593Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26237 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:12:59.619138Z node 1 :TX_PROXY DEBUG: actor# [1:7489507185974243360:2114] Handle TEvNavigate describe path dc-1 2025-04-04T17:12:59.619201Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211187:2455] HANDLE EvNavigateScheme dc-1 2025-04-04T17:12:59.619565Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211187:2455] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:59.660348Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211187:2455] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-04T17:12:59.670146Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211187:2455] Handle TEvDescribeSchemeResult Forward to# [1:7489507190269211186:2454] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:59.686724Z node 1 :TX_PROXY DEBUG: actor# [1:7489507185974243360:2114] Handle TEvProposeTransaction 2025-04-04T17:12:59.686756Z node 1 :TX_PROXY DEBUG: actor# [1:7489507185974243360:2114] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T17:12:59.689929Z node 1 :TX_PROXY DEBUG: actor# [1:7489507185974243360:2114] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489507190269211194:2461] 2025-04-04T17:12:59.755911Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211194:2461] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T17:12:59.756899Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211194:2461] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:12:59.756944Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211194:2461] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:12:59.757622Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211194:2461] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:12:59.757971Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211194:2461] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:59.758266Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211194:2461] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-04T17:12:59.758501Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211194:2461] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-04T17:12:59.758650Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211194:2461] txid# 281474976710657 HANDLE EvClientConnected 2025-04-04T17:12:59.763856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:59.769435Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211194:2461] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-04T17:12:59.769486Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211194:2461] txid# 281474976710657 SEND to# [1:7489507190269211193:2460] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-04T17:12:59.787905Z node 1 :TX_PROXY DEBUG: actor# [1:7489507185974243360:2114] Handle TEvProposeTransaction 2025-04-04T17:12:59.787941Z node 1 :TX_PROXY DEBUG: actor# [1:7489507185974243360:2114] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T17:12:59.787973Z node 1 :TX_PROXY DEBUG: actor# [1:7489507185974243360:2114] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489507190269211236:2499] 2025-04-04T17:12:59.790121Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211236:2499] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T17:12:59.790196Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211236:2499] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:12:59.790214Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211236:2499] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:12:59.790358Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211236:2499] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:12:59.790603Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211236:2499] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:59.794355Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211236:2499] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:12:59.794442Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211236:2499] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T17:12:59.794629Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211236:2499] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T17:12:59.795154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:12:59.797042Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211236:2499] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-04T17:12:59.797102Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211236:2499] txid# 281474976710658 SEND to# [1:7489507190269211235:2498] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-04T17:12:59.845307Z node 1 :TX_PROXY DEBUG: actor# [1:7489507185974243360:2114] Handle TEvProposeTransaction 2025-04-04T17:12:59.845336Z node 1 :TX_PROXY DEBUG: actor# [1:7489507185974243360:2114] TxId# 281474976710659 ProcessProposeTransaction 2025-04-04T17:12:59.845398Z node 1 :TX_PROXY DEBUG: actor# [1:7489507185974243360:2114] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7489507190269211254:2509] 2025-04-04T17:12:59.847778Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507190269211254:2509] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\0 ... LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:48.085863Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292740:2520] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-04-04T17:15:48.086007Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292740:2520] txid# 281474976710660 HANDLE EvClientConnected 2025-04-04T17:15:48.087522Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-04T17:15:48.093107Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292740:2520] txid# 281474976710660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2025-04-04T17:15:48.093172Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292740:2520] txid# 281474976710660 SEND to# [59:7489507914340292739:2342] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 53} 2025-04-04T17:15:48.129006Z node 59 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [59:7489507914340292739:2342], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-04T17:15:48.209681Z node 59 :TX_PROXY DEBUG: actor# [59:7489507892865455497:2111] Handle TEvProposeTransaction 2025-04-04T17:15:48.209715Z node 59 :TX_PROXY DEBUG: actor# [59:7489507892865455497:2111] TxId# 281474976710661 ProcessProposeTransaction 2025-04-04T17:15:48.209766Z node 59 :TX_PROXY DEBUG: actor# [59:7489507892865455497:2111] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [59:7489507914340292806:2566] 2025-04-04T17:15:48.212634Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292806:2566] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n#\010\000\022\037\010\001\020\377\377\003\032\025cluster_admin@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-04T17:15:48.212687Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292806:2566] txid# 281474976710661 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:15:48.212709Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292806:2566] txid# 281474976710661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-04T17:15:48.213445Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292806:2566] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:15:48.213551Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292806:2566] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:48.213760Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292806:2566] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:48.213908Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292806:2566] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:48.213963Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292806:2566] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-04T17:15:48.214112Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292806:2566] txid# 281474976710661 HANDLE EvClientConnected 2025-04-04T17:15:48.219715Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292806:2566] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-04T17:15:48.219858Z node 59 :TX_PROXY ERROR: Actor# [59:7489507914340292806:2566] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:48.219890Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292806:2566] txid# 281474976710661 SEND to# [59:7489507914340292739:2342] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-04T17:15:48.241343Z node 59 :TX_PROXY DEBUG: actor# [59:7489507892865455497:2111] Handle TEvProposeTransaction 2025-04-04T17:15:48.241373Z node 59 :TX_PROXY DEBUG: actor# [59:7489507892865455497:2111] TxId# 281474976710662 ProcessProposeTransaction 2025-04-04T17:15:48.241410Z node 59 :TX_PROXY DEBUG: actor# [59:7489507892865455497:2111] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7489507914340292829:2577] 2025-04-04T17:15:48.243906Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292829:2577] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:43144" 2025-04-04T17:15:48.243977Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292829:2577] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:15:48.243997Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292829:2577] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:15:48.244052Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292829:2577] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:48.244367Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292829:2577] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:48.244469Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292829:2577] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:48.244525Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292829:2577] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-04T17:15:48.244664Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292829:2577] txid# 281474976710662 HANDLE EvClientConnected 2025-04-04T17:15:48.251239Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292829:2577] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-04T17:15:48.251296Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292829:2577] txid# 281474976710662 SEND to# [59:7489507914340292828:2335] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-04T17:15:48.318317Z node 59 :TX_PROXY DEBUG: actor# [59:7489507892865455497:2111] Handle TEvProposeTransaction 2025-04-04T17:15:48.318351Z node 59 :TX_PROXY DEBUG: actor# [59:7489507892865455497:2111] TxId# 281474976710663 ProcessProposeTransaction 2025-04-04T17:15:48.318391Z node 59 :TX_PROXY DEBUG: actor# [59:7489507892865455497:2111] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7489507914340292862:2591] 2025-04-04T17:15:48.320712Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292862:2591] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\025cluster_admin@builtin\022\030\022\026\n\024all-users@well-known\032\025cluster_admin@builtin\"\007Builtin*\027clus****ltin (2AB0E265)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:43174" 2025-04-04T17:15:48.320768Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292862:2591] txid# 281474976710663 Bootstrap, UserSID: cluster_admin@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:15:48.320789Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292862:2591] txid# 281474976710663 Bootstrap, UserSID: cluster_admin@builtin IsClusterAdministrator: 1 2025-04-04T17:15:48.320835Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292862:2591] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:48.321104Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292862:2591] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:48.321150Z node 59 :TX_PROXY ERROR: Actor# [59:7489507914340292862:2591] txid# 281474976710663, Access denied for cluster_admin@builtin on path /dc-1, with access AlterSchema 2025-04-04T17:15:48.321240Z node 59 :TX_PROXY ERROR: Actor# [59:7489507914340292862:2591] txid# 281474976710663, issues: { message: "Access denied for cluster_admin@builtin on path /dc-1" issue_code: 200000 severity: 1 } 2025-04-04T17:15:48.321265Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507914340292862:2591] txid# 281474976710663 SEND to# [59:7489507914340292861:2352] Source {TEvProposeTransactionStatus Status# 5} 2025-04-04T17:15:48.321536Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=ZjIxNmIwNDYtMmYxODcxNmItMjQyYzE3MmItNmIzZDU4MTU=, ActorId: [59:7489507914340292847:2352], ActorState: ExecuteState, TraceId: 01jr0wtdpf2y1t27a5rx1p6606, Create QueryResponse for error on request, msg: 2025-04-04T17:15:48.321998Z node 59 :TX_PROXY DEBUG: actor# [59:7489507892865455497:2111] Handle TEvExecuteKqpTransaction 2025-04-04T17:15:48.322024Z node 59 :TX_PROXY DEBUG: actor# [59:7489507892865455497:2111] TxId# 281474976710664 ProcessProposeKqpTransaction 2025-04-04T17:15:48.401240Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7489507892865455351:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:48.401314Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> ResourcePoolClassifiersDdl::TestDropResourcePool [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [GOOD] >> TConsoleConfigTests::TestAllowedScopes [GOOD] >> TConsoleConfigTests::TestAffectedConfigs |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit [GOOD] Test command err: 2025-04-04T17:14:49.849600Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507662706468879:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:49.850751Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017d4/r3tmp/tmpWPY8Ow/pdisk_1.dat 2025-04-04T17:14:50.360350Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:50.388778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:50.388874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:50.415482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9090, node 1 2025-04-04T17:14:50.683769Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:50.683794Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:50.683806Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:50.683901Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29964 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:51.368147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:52.970620Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-04T17:14:52.970726Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507675591371422:2329], Start check tables existence, number paths: 2 2025-04-04T17:14:52.983864Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-04T17:14:52.983907Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-04T17:14:52.983925Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-04T17:14:52.983992Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507675591371422:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-04T17:14:52.984055Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507675591371422:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-04T17:14:52.984081Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507675591371422:2329], Successfully finished 2025-04-04T17:14:52.984163Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-04T17:14:53.006654Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Y2U1ZGQ5NTItNDk0NTViNjItODM5MWExNTYtZjVlNGY4OTA=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id Y2U1ZGQ5NTItNDk0NTViNjItODM5MWExNTYtZjVlNGY4OTA= 2025-04-04T17:14:53.007227Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Y2U1ZGQ5NTItNDk0NTViNjItODM5MWExNTYtZjVlNGY4OTA=, ActorId: [1:7489507679886338734:2330], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:14:53.053079Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507679886338736:2304], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-04T17:14:53.056394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:14:53.057869Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507679886338736:2304], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-04T17:14:53.060099Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507679886338736:2304], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-04T17:14:53.075906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507679886338736:2304], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:14:53.134379Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507679886338736:2304], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-04T17:14:53.140514Z node 1 :TX_PROXY ERROR: Actor# [1:7489507679886338787:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:53.140640Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507679886338736:2304], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-04T17:14:53.141102Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507679886338794:2342], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-04-04T17:14:53.141966Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507679886338794:2342], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-04T17:14:53.158789Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=Y2U1ZGQ5NTItNDk0NTViNjItODM5MWExNTYtZjVlNGY4OTA=, ActorId: [1:7489507679886338734:2330], ActorState: ReadyState, Session closed due to explicit close event 2025-04-04T17:14:53.158838Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=Y2U1ZGQ5NTItNDk0NTViNjItODM5MWExNTYtZjVlNGY4OTA=, ActorId: [1:7489507679886338734:2330], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:14:53.162354Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Y2U1ZGQ5NTItNDk0NTViNjItODM5MWExNTYtZjVlNGY4OTA=, ActorId: [1:7489507679886338734:2330], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-04T17:14:53.162387Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Y2U1ZGQ5NTItNDk0NTViNjItODM5MWExNTYtZjVlNGY4OTA=, ActorId: [1:7489507679886338734:2330], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:14:53.162493Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Y2U1ZGQ5NTItNDk0NTViNjItODM5MWExNTYtZjVlNGY4OTA=, ActorId: [1:7489507679886338734:2330], ActorState: unknown state, Session actor destroyed 2025-04-04T17:14:53.806661Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507680103565640:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:53.817444Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017d4/r3tmp/tmpItrdjY/pdisk_1.dat 2025-04-04T17:14:53.892916Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31818, node 2 2025-04-04T17:14:53.929080Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:53.929188Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:53.941466Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:54.086672Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:54.086694Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:54.086701Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:54.086794Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63039 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:54.325774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:54.329992Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:14:56.610490Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-04T17:14:56.612079Z node 2 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=2&id=ZjhiNGVlMzYtYjNiYzExODYtYzhkYTc2NjAtZWQ0MGE5Mw==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZjhiNGVlMzYtYjNiYzExODYtYzhkYTc2NjAtZWQ0MGE5Mw== 2025-04-04T17:14:56.613139Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [S ... 2055389570:2333], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-04T17:15:48.414461Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ODkwODJmNWMtNjEwYTFjY2QtMzBjZGVkMDAtOWVkMThlMjE=, ActorId: [6:7489507772055389570:2333], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:15:48.414567Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ODkwODJmNWMtNjEwYTFjY2QtMzBjZGVkMDAtOWVkMThlMjE=, ActorId: [6:7489507772055389570:2333], ActorState: unknown state, Session actor destroyed 2025-04-04T17:15:48.428009Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ExecuteState, TraceId: 01jr0wtdsabbw7fvk5ke0cawab, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-04T17:15:48.428300Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ExecuteState, TraceId: 01jr0wtdsabbw7fvk5ke0cawab, txInfo Status: Committed Kind: ReadWrite TotalDuration: 33.025 ServerDuration: 32.883 QueriesCount: 2 2025-04-04T17:15:48.428465Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ExecuteState, TraceId: 01jr0wtdsabbw7fvk5ke0cawab, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-04T17:15:48.428585Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ExecuteState, TraceId: 01jr0wtdsabbw7fvk5ke0cawab, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:15:48.428633Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ExecuteState, TraceId: 01jr0wtdsabbw7fvk5ke0cawab, EndCleanup, isFinal: 0 2025-04-04T17:15:48.428726Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ExecuteState, TraceId: 01jr0wtdsabbw7fvk5ke0cawab, Sent query response back to proxy, proxyRequestId: 525, proxyId: [6:7489507754875519693:2247] 2025-04-04T17:15:48.429689Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, TxId: 2025-04-04T17:15:48.429867Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2025-04-04T17:15:48.430470Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ReadyState, TraceId: 01jr0wtdte9j64t6m9mac5n52h, received request, proxyRequestId: 526 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [6:7489507913789316995:4429] database: /Root databaseId: /Root pool id: default 2025-04-04T17:15:48.430511Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ReadyState, TraceId: 01jr0wtdte9j64t6m9mac5n52h, request placed into pool from cache: default 2025-04-04T17:15:48.431654Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ExecuteState, TraceId: 01jr0wtdte9j64t6m9mac5n52h, ExecutePhyTx, tx: 0x000050C00034F558 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-04-04T17:15:48.431716Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ExecuteState, TraceId: 01jr0wtdte9j64t6m9mac5n52h, Sending to Executer TraceId: 0 8 2025-04-04T17:15:48.431777Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ExecuteState, TraceId: 01jr0wtdte9j64t6m9mac5n52h, Created new KQP executer: [6:7489507913789316998:4423] isRollback: 0 2025-04-04T17:15:48.443968Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ExecuteState, TraceId: 01jr0wtdte9j64t6m9mac5n52h, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-04-04T17:15:48.444107Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ExecuteState, TraceId: 01jr0wtdte9j64t6m9mac5n52h, ExecutePhyTx, tx: 0x000050C00034F6D8 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-04T17:15:48.448494Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ExecuteState, TraceId: 01jr0wtdte9j64t6m9mac5n52h, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-04T17:15:48.448652Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ExecuteState, TraceId: 01jr0wtdte9j64t6m9mac5n52h, txInfo Status: Committed Kind: ReadOnly TotalDuration: 17.125 ServerDuration: 17.01 QueriesCount: 2 2025-04-04T17:15:48.448756Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ExecuteState, TraceId: 01jr0wtdte9j64t6m9mac5n52h, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-04T17:15:48.448810Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ExecuteState, TraceId: 01jr0wtdte9j64t6m9mac5n52h, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:15:48.448839Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ExecuteState, TraceId: 01jr0wtdte9j64t6m9mac5n52h, EndCleanup, isFinal: 0 2025-04-04T17:15:48.448886Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ExecuteState, TraceId: 01jr0wtdte9j64t6m9mac5n52h, Sent query response back to proxy, proxyRequestId: 526, proxyId: [6:7489507754875519693:2247] 2025-04-04T17:15:48.449771Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, TxId: 2025-04-04T17:15:48.449844Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, TxId: 2025-04-04T17:15:48.449990Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7489507776350357204:2345], DatabaseId: /Root, PoolId: sample_pool_id, succefully refreshed pool state, in flight: 0, delayed: 0 2025-04-04T17:15:48.450067Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ReadyState, Session closed due to explicit close event 2025-04-04T17:15:48.450099Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:15:48.450129Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-04T17:15:48.450159Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:15:48.450221Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZjQ1ZmExNDEtZDA1M2VkMGQtZGJmNDFiNS05ZWE3MzQ3Mg==, ActorId: [6:7489507913789316969:4423], ActorState: unknown state, Session actor destroyed 2025-04-04T17:15:48.635536Z node 7 :BS_PROXY_PUT ERROR: [3f91ee9b070fc0ea] Result# TEvPutResult {Id# [72075186224037889:1:771:0:0:42:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037889:1:771:0:0:42:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 6 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-04T17:15:48.636494Z node 8 :BS_PROXY_PUT ERROR: [cb87d0c6c38f7439] Result# TEvPutResult {Id# [72075186224037888:1:863:0:0:42:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037888:1:863:0:0:42:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 6 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> ResourcePoolClassifiersDdl::TestExplicitPoolId [GOOD] >> ResourcePoolClassifiersDdl::TestMultiGroupClassification >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] Test command err: 2025-04-04T17:15:37.247188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:15:37.247667Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:15:37.247723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0007ba/r3tmp/tmpd02UjD/pdisk_1.dat 2025-04-04T17:15:37.698104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:15:37.771743Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:37.816006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:37.816184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:37.831411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:37.920559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:15:37.978704Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:686:2585] 2025-04-04T17:15:37.978924Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:38.027723Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:38.027843Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:15:38.029507Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:15:38.029596Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:15:38.029663Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:15:38.030056Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:15:38.030420Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:15:38.030491Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:712:2585] in generation 1 2025-04-04T17:15:38.032421Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:690:2587] 2025-04-04T17:15:38.032632Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:38.042633Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:38.043019Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:693:2589] 2025-04-04T17:15:38.043222Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:38.051613Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:15:38.052901Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-04T17:15:38.052989Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-04T17:15:38.053034Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-04T17:15:38.053315Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:15:38.054178Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:15:38.054258Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:737:2587] in generation 1 2025-04-04T17:15:38.054740Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:38.054802Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:15:38.056032Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-04T17:15:38.056084Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-04T17:15:38.056126Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-04T17:15:38.056368Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:15:38.056455Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:15:38.056519Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:738:2589] in generation 1 2025-04-04T17:15:38.068387Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:15:38.116211Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:15:38.116391Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:15:38.116520Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:742:2616] 2025-04-04T17:15:38.116567Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:38.116608Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:15:38.116642Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:38.116960Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:15:38.117001Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-04T17:15:38.117054Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:15:38.117128Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:743:2617] 2025-04-04T17:15:38.117161Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-04T17:15:38.117186Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-04T17:15:38.117206Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:15:38.117509Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:15:38.117552Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-04T17:15:38.117597Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:15:38.117671Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:744:2618] 2025-04-04T17:15:38.117693Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-04T17:15:38.117713Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-04T17:15:38.117731Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T17:15:38.117877Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:15:38.117987Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:15:38.118203Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:38.118291Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:38.118338Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:38.118383Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:38.118437Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-04T17:15:38.118492Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-04T17:15:38.118694Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:677:2580], serverId# [1:704:2594], sessionId# [0:0:0] 2025-04-04T17:15:38.118752Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T17:15:38.118778Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:38.118800Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-04T17:15:38.118827Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T17:15:38.118859Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-04T17:15:38.118904Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-04T17:15:38.119434Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:38.119693Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:15:38.119789Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:15:38.120255Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:678:2581], serverId# [1:710:2599], sessionId# [0:0:0] 2025-04-04T17:15:38.120320Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-04T17:15:38.120361Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:38.120389Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-04-04T17:15:38.120425Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T17:15:38.120602Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:15:38.120754Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-04T17:15:38.120814Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-04T17:15:38.122835Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:38.122927Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T17:15:38.134968Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:38.135086Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:38.135226Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T17:15:38.135265Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:38.182935Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:679:2582], serverId# [1:761:2628], sessionId# [0:0:0] 2025-04-04T17:15:38.183104Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-04T17:15:38.183275Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 2 ... egularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-04T17:15:50.968613Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1101:2846] Handle TEvDataShard::TEvEraseRowsRequest 2025-04-04T17:15:50.968767Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1101:2846] Propose tx: txId# 281474976715663, shard# 72075186224037890, keys# 3, dependents# 0, dependencies# 1 2025-04-04T17:15:50.968876Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1101:2846] Propose tx: txId# 281474976715663, shard# 72075186224037888, keys# 3, dependents# 0, dependencies# 1 2025-04-04T17:15:50.968949Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1101:2846] Propose tx: txId# 281474976715663, shard# 72075186224037889, keys# 3, dependents# 2, dependencies# 0 2025-04-04T17:15:50.969192Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-04T17:15:50.969351Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715663 at tablet 72075186224037890 2025-04-04T17:15:50.969754Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:50.969846Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715663 at tablet 72075186224037888 2025-04-04T17:15:50.970043Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:15:50.970134Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715663 at tablet 72075186224037889 2025-04-04T17:15:50.981535Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-04T17:15:50.981630Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:50.981767Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715662 2025-04-04T17:15:50.981826Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037888 consumer 72075186224037888 txId 281474976715662 2025-04-04T17:15:50.981903Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1101:2846] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715663, shard# 72075186224037888, status# 1 2025-04-04T17:15:50.982028Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T17:15:50.982100Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-04T17:15:50.982127Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-04-04T17:15:50.982176Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1101:2846] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715663, shard# 72075186224037889, status# 1 2025-04-04T17:15:50.982224Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715662 2025-04-04T17:15:50.982278Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715662 2025-04-04T17:15:50.982339Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1101:2846] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715663, shard# 72075186224037890, status# 1 2025-04-04T17:15:50.982371Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1101:2846] Register plan: txId# 281474976715663, minStep# 1539, maxStep# 31539 2025-04-04T17:15:50.995160Z node 3 :TX_DATASHARD INFO: OnDetach: 72075186224037888 2025-04-04T17:15:50.995965Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-04T17:15:50.998817Z node 3 :TX_DATASHARD ERROR: [DistEraser] [3:1101:2846] Reply: txId# 281474976715663, status# SHARD_UNKNOWN, error# Tx state unknown: reason# lost pipe while waiting for reply (plan), txId# 281474976715663, shard# 72075186224037888 2025-04-04T17:15:50.998995Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037888 from 72075186224037889 is reset 2025-04-04T17:15:50.999042Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037888 from 72075186224037890 is reset 2025-04-04T17:15:50.999779Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-04-04T17:15:50.999826Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-04-04T17:15:51.000181Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1096:2842], serverId# [3:1097:2843], sessionId# [0:0:0] 2025-04-04T17:15:51.000293Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T17:15:51.000359Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:15:51.000410Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 1 2025-04-04T17:15:51.000461Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T17:15:51.021755Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:1113:2857] 2025-04-04T17:15:51.022054Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:51.027217Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:51.028462Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:15:51.030793Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:15:51.030876Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:15:51.030935Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:15:51.031378Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:15:51.031628Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:15:51.031699Z node 3 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [3:1128:2857] in generation 2 2025-04-04T17:15:51.045487Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:15:51.045606Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037888 2025-04-04T17:15:51.045746Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:15:51.046070Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:1131:2865] 2025-04-04T17:15:51.046113Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:51.046164Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:15:51.046207Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:51.046510Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-04-04T17:15:51.046740Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-04-04T17:15:51.047821Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:15:51.047925Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:15:51.048115Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:51.048225Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1538 2025-04-04T17:15:51.048271Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:51.048343Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:51.048392Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:15:51.048434Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 1 2025-04-04T17:15:51.048482Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:51.048746Z node 3 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2025-04-04T17:15:51.048790Z node 3 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715662 2025-04-04T17:15:51.048833Z node 3 :TX_DATASHARD DEBUG: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715662 2025-04-04T17:15:51.049024Z node 3 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2025-04-04T17:15:51.049049Z node 3 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715662 2025-04-04T17:15:51.049073Z node 3 :TX_DATASHARD DEBUG: Send RS 2 at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715662 2025-04-04T17:15:51.049155Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 1500 next step 1538 2025-04-04T17:15:51.049229Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715662 2025-04-04T17:15:51.049303Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 1538 txid# 281474976715662 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-04T17:15:51.049357Z node 3 :TX_DATASHARD NOTICE: Outdated readset for 1538:281474976715662 at 72075186224037889 2025-04-04T17:15:51.049416Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-04T17:15:51.049474Z node 3 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037889 {TEvReadSet step# 1538 txid# 281474976715662 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-04T17:15:51.049602Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715662 2025-04-04T17:15:51.049657Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715662 2025-04-04T17:15:51.049717Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 1538 txid# 281474976715662 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2025-04-04T17:15:51.049748Z node 3 :TX_DATASHARD NOTICE: Outdated readset for 1538:281474976715662 at 72075186224037890 2025-04-04T17:15:51.049777Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-04T17:15:51.049808Z node 3 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037890 {TEvReadSet step# 1538 txid# 281474976715662 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2025-04-04T17:15:51.049880Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715662 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [GOOD] Test command err: Starting YDB, grpc: 63987, msgbus: 6504 2025-04-04T17:12:58.625407Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507182875166650:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:58.627818Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000fad/r3tmp/tmpFQNOjQ/pdisk_1.dat 2025-04-04T17:12:59.083997Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:12:59.085823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:59.085917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:59.104414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63987, node 1 2025-04-04T17:12:59.113557Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T17:12:59.113579Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T17:12:59.262481Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:59.262522Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:59.262531Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:59.262673Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6504 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:12:59.626419Z node 1 :TX_PROXY DEBUG: actor# [1:7489507182875166882:2115] Handle TEvNavigate describe path dc-1 2025-04-04T17:12:59.626492Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134687:2447] HANDLE EvNavigateScheme dc-1 2025-04-04T17:12:59.627632Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134687:2447] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:59.673118Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134687:2447] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-04T17:12:59.688713Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134687:2447] Handle TEvDescribeSchemeResult Forward to# [1:7489507187170134686:2446] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:59.719340Z node 1 :TX_PROXY DEBUG: actor# [1:7489507182875166882:2115] Handle TEvProposeTransaction 2025-04-04T17:12:59.719406Z node 1 :TX_PROXY DEBUG: actor# [1:7489507182875166882:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T17:12:59.719731Z node 1 :TX_PROXY DEBUG: actor# [1:7489507182875166882:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489507187170134712:2458] 2025-04-04T17:12:59.811189Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134712:2458] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T17:12:59.811281Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134712:2458] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:12:59.811305Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134712:2458] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:12:59.811444Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134712:2458] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:12:59.811877Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134712:2458] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:59.812091Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134712:2458] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-04T17:12:59.812238Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134712:2458] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-04T17:12:59.812471Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134712:2458] txid# 281474976710657 HANDLE EvClientConnected 2025-04-04T17:12:59.813329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:59.821878Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134712:2458] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-04T17:12:59.821953Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134712:2458] txid# 281474976710657 SEND to# [1:7489507187170134711:2457] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-04T17:12:59.840980Z node 1 :TX_PROXY DEBUG: actor# [1:7489507182875166882:2115] Handle TEvProposeTransaction 2025-04-04T17:12:59.841018Z node 1 :TX_PROXY DEBUG: actor# [1:7489507182875166882:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T17:12:59.841050Z node 1 :TX_PROXY DEBUG: actor# [1:7489507182875166882:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489507187170134753:2492] 2025-04-04T17:12:59.843189Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134753:2492] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T17:12:59.843238Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134753:2492] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:12:59.843269Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134753:2492] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:12:59.843331Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134753:2492] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:12:59.843605Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134753:2492] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:59.843670Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134753:2492] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:12:59.843729Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134753:2492] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T17:12:59.843888Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134753:2492] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T17:12:59.844305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:12:59.846769Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134753:2492] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-04T17:12:59.846832Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134753:2492] txid# 281474976710658 SEND to# [1:7489507187170134752:2491] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-04T17:12:59.878658Z node 1 :TX_PROXY DEBUG: actor# [1:7489507182875166882:2115] Handle TEvProposeTransaction 2025-04-04T17:12:59.878692Z node 1 :TX_PROXY DEBUG: actor# [1:7489507182875166882:2115] TxId# 281474976710659 ProcessProposeTransaction 2025-04-04T17:12:59.878760Z node 1 :TX_PROXY DEBUG: actor# [1:7489507182875166882:2115] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7489507187170134771:2502] 2025-04-04T17:12:59.881041Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187170134771:2502] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\0 ... ltin 2025-04-04T17:15:50.063955Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507063:2590] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:15:50.064025Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507063:2590] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:50.064162Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507063:2590] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:50.064306Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507063:2590] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:50.064362Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507063:2590] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-04T17:15:50.064461Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507063:2590] txid# 281474976710661 HANDLE EvClientConnected 2025-04-04T17:15:50.067834Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507063:2590] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-04T17:15:50.067940Z node 59 :TX_PROXY ERROR: Actor# [59:7489507923204507063:2590] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:50.067961Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507063:2590] txid# 281474976710661 SEND to# [59:7489507918909539691:2342] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-04T17:15:50.081842Z node 59 :TX_PROXY DEBUG: actor# [59:7489507901729669796:2112] Handle TEvProposeTransaction 2025-04-04T17:15:50.081870Z node 59 :TX_PROXY DEBUG: actor# [59:7489507901729669796:2112] TxId# 281474976710662 ProcessProposeTransaction 2025-04-04T17:15:50.081901Z node 59 :TX_PROXY DEBUG: actor# [59:7489507901729669796:2112] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7489507923204507087:2602] 2025-04-04T17:15:50.084387Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507087:2602] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:37864" 2025-04-04T17:15:50.084458Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507087:2602] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:50.084479Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507087:2602] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:15:50.084527Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507087:2602] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:50.084817Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507087:2602] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:50.084924Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507087:2602] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:50.084988Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507087:2602] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-04T17:15:50.085125Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507087:2602] txid# 281474976710662 HANDLE EvClientConnected 2025-04-04T17:15:50.093397Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507087:2602] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-04T17:15:50.093454Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507087:2602] txid# 281474976710662 SEND to# [59:7489507923204507086:2334] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-04T17:15:50.104949Z node 59 :TX_PROXY DEBUG: actor# [59:7489507901729669796:2112] Handle TEvProposeTransaction 2025-04-04T17:15:50.104987Z node 59 :TX_PROXY DEBUG: actor# [59:7489507901729669796:2112] TxId# 281474976710663 ProcessProposeTransaction 2025-04-04T17:15:50.105041Z node 59 :TX_PROXY DEBUG: actor# [59:7489507901729669796:2112] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7489507923204507100:2611] 2025-04-04T17:15:50.107578Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507100:2611] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "" NewOwner: "db_admin@builtin" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:37872" 2025-04-04T17:15:50.107653Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507100:2611] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:50.107674Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507100:2611] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:15:50.107724Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507100:2611] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:50.108113Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507100:2611] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:50.108257Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507100:2611] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:50.108333Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507100:2611] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-04-04T17:15:50.108528Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507100:2611] txid# 281474976710663 HANDLE EvClientConnected 2025-04-04T17:15:50.108956Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:15:50.116096Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507100:2611] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-04-04T17:15:50.116154Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507100:2611] txid# 281474976710663 SEND to# [59:7489507923204507099:2347] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-04-04T17:15:50.173376Z node 59 :TX_PROXY DEBUG: actor# [59:7489507901729669796:2112] Handle TEvProposeTransaction 2025-04-04T17:15:50.173413Z node 59 :TX_PROXY DEBUG: actor# [59:7489507901729669796:2112] TxId# 281474976710664 ProcessProposeTransaction 2025-04-04T17:15:50.173474Z node 59 :TX_PROXY DEBUG: actor# [59:7489507901729669796:2112] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [59:7489507923204507132:2625] 2025-04-04T17:15:50.176019Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507132:2625] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\020db_admin@builtin\022\030\022\026\n\024all-users@well-known\032\020db_admin@builtin\"\007Builtin*\027db_a****ltin (DEFA2CD5)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:37914" 2025-04-04T17:15:50.176086Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507132:2625] txid# 281474976710664 Bootstrap, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:50.176107Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507132:2625] txid# 281474976710664 Bootstrap, UserSID: db_admin@builtin IsClusterAdministrator: 0 2025-04-04T17:15:50.176241Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507132:2625] txid# 281474976710664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-04T17:15:50.176282Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507132:2625] txid# 281474976710664 HandleResolveDatabase, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 1 DatabaseOwner: db_admin@builtin 2025-04-04T17:15:50.176323Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507132:2625] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:50.176547Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507132:2625] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:50.176642Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507132:2625] HANDLE EvNavigateKeySetResult, txid# 281474976710664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:50.176696Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507132:2625] txid# 281474976710664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710664 TabletId# 72057594046644480} 2025-04-04T17:15:50.176812Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507132:2625] txid# 281474976710664 HANDLE EvClientConnected 2025-04-04T17:15:50.179923Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507132:2625] txid# 281474976710664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710664} 2025-04-04T17:15:50.179983Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923204507132:2625] txid# 281474976710664 SEND to# [59:7489507923204507131:2353] Source {TEvProposeTransactionStatus txid# 281474976710664 Status# 48} 2025-04-04T17:15:50.238976Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7489507901729669583:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:50.239054Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersDdl::TestDropResourcePool [GOOD] Test command err: 2025-04-04T17:14:53.061000Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507677197773921:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:53.061511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017c5/r3tmp/tmptk6usT/pdisk_1.dat 2025-04-04T17:14:53.435138Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:53.440258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:53.440339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:53.442622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10114, node 1 2025-04-04T17:14:53.594833Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:53.594863Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:53.594880Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:53.595004Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25965 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:53.873374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:53.893937Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:14:56.014785Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-04T17:14:56.023487Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTdlYzhmMi01ZDI4MTg4LWRlMzJjOTMwLWFkM2YwZmNj, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NTdlYzhmMi01ZDI4MTg4LWRlMzJjOTMwLWFkM2YwZmNj 2025-04-04T17:14:56.024258Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-04T17:14:56.024271Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-04T17:14:56.024282Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-04T17:14:56.032545Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507690082676306:2326], Start check tables existence, number paths: 2 2025-04-04T17:14:56.032635Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTdlYzhmMi01ZDI4MTg4LWRlMzJjOTMwLWFkM2YwZmNj, ActorId: [1:7489507690082676307:2327], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:14:56.034015Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507690082676306:2326], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-04T17:14:56.034078Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507690082676306:2326], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-04T17:14:56.034103Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507690082676306:2326], Successfully finished 2025-04-04T17:14:56.034173Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-04T17:14:56.044309Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507690082676334:2299], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-04T17:14:56.047125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:14:56.048029Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507690082676334:2299], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-04T17:14:56.048171Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507690082676334:2299], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-04T17:14:56.062410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507690082676334:2299], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:14:56.143073Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507690082676334:2299], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-04T17:14:56.147001Z node 1 :TX_PROXY ERROR: Actor# [1:7489507690082676387:2332] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:56.147122Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507690082676334:2299], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-04T17:14:56.149285Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmQ1OTU0N2QtYzhiOWQ3NDAtMjc0NmMwNjAtMmRkN2ZhNTU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NmQ1OTU0N2QtYzhiOWQ3NDAtMjc0NmMwNjAtMmRkN2ZhNTU= 2025-04-04T17:14:56.149521Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmQ1OTU0N2QtYzhiOWQ3NDAtMjc0NmMwNjAtMmRkN2ZhNTU=, ActorId: [1:7489507690082676394:2331], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:14:56.149656Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmQ1OTU0N2QtYzhiOWQ3NDAtMjc0NmMwNjAtMmRkN2ZhNTU=, ActorId: [1:7489507690082676394:2331], ActorState: ReadyState, TraceId: 01jr0wrtrna4f0df89hvr3eryr, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7489507690082676393:2337] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-04T17:14:56.149703Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-04T17:14:56.149712Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-04T17:14:56.149754Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7489507690082676394:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NmQ1OTU0N2QtYzhiOWQ3NDAtMjc0NmMwNjAtMmRkN2ZhNTU= 2025-04-04T17:14:56.149796Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507690082676396:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-04T17:14:56.149851Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7489507690082676397:2333], Database: /Root, Start database fetching 2025-04-04T17:14:56.151169Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7489507690082676397:2333], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-04T17:14:56.151259Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507690082676396:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-04T17:14:56.151288Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-04T17:14:56.151328Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-04-04T17:14:56.151340Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-04T17:14:56.151510Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7489507690082676408:2335], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-04T17:14:56.151556Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7489507690082676407:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NmQ1OTU0N2QtYzhiOWQ3NDAtMjc0NmMwNjAtMmRkN2ZhNTU=, Start pool fetching 2025-04-04T17:14:56.151581Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507690082676409:2336], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-04T17:14:56.152554Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507690082676409:2336], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-04T17:14:56.152582Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7489507690082676408:2335], DatabaseId: /Root, PoolId: sample_pool_id, Got watch notification 2025-04-04T17:14:56.152649Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7489507690082676407:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NmQ1OTU0N2QtYzhiOWQ3NDAtMjc0NmMwNjAtMmRkN2ZhNTU=, Pool info successfully resolved 2025-04-04T17:14:56.152699Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmQ1OTU0N2QtYzhiOWQ3NDAtMjc0NmMwNjAtMmRkN2ZhNTU= 2025-04-04T17:14:56.152765Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7489507690082676408:2335], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7489507690082676394:2331], session id: ydb://session/3?node_id=1&id=NmQ1OTU0N2QtYzhiOWQ3NDAtMjc0NmMwNjAtMmRkN2ZhNTU= 2025-04-04T17:14:56.152799Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:74895076900826 ... tODRhYmIxNDA=, ActorId: [7:7489507921882840979:2720], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 1 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:15:50.574719Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YzIyMmQxNWYtZjA0ZTgyMzgtN2MwMWEzNjItODRhYmIxNDA=, ActorId: [7:7489507921882840979:2720], ActorState: CleanupState, EndCleanup, isFinal: 1 2025-04-04T17:15:50.574735Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YzIyMmQxNWYtZjA0ZTgyMzgtN2MwMWEzNjItODRhYmIxNDA=, ActorId: [7:7489507921882840979:2720], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:15:50.574791Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YzIyMmQxNWYtZjA0ZTgyMzgtN2MwMWEzNjItODRhYmIxNDA=, ActorId: [7:7489507921882840979:2720], ActorState: unknown state, Session actor destroyed 2025-04-04T17:15:50.579994Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [7:7489507917587873617:2703], DatabaseId: /Root, PoolId: my_pool, Got delete notification 2025-04-04T17:15:50.580070Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-04-04T17:15:50.580115Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489507921882841056:2738], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-04-04T17:15:50.580952Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489507921882841056:2738], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-04T17:15:50.581068Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-04T17:15:50.583279Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=ZTlkYzllZjYtMTkzZmYzMi1iZGE3Nzk3NC00YTRhNzBlOQ==, ActorId: [7:7489507887523101089:2331], ActorState: ExecuteState, TraceId: 01jr0wtfx6djh2qb4haevj3d5v, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [7:7489507921882841030:2331] WorkloadServiceCleanup: 0 2025-04-04T17:15:50.584876Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZTlkYzllZjYtMTkzZmYzMi1iZGE3Nzk3NC00YTRhNzBlOQ==, ActorId: [7:7489507887523101089:2331], ActorState: CleanupState, TraceId: 01jr0wtfx6djh2qb4haevj3d5v, EndCleanup, isFinal: 0 2025-04-04T17:15:50.584928Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZTlkYzllZjYtMTkzZmYzMi1iZGE3Nzk3NC00YTRhNzBlOQ==, ActorId: [7:7489507887523101089:2331], ActorState: CleanupState, TraceId: 01jr0wtfx6djh2qb4haevj3d5v, Sent query response back to proxy, proxyRequestId: 68, proxyId: [7:7489507866048263966:2071] 2025-04-04T17:15:50.589649Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzEwMTQ3OC0zZjRiMjY2Ny0xMDMwMmM5LWU0ODcyMDI0, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NzEwMTQ3OC0zZjRiMjY2Ny0xMDMwMmM5LWU0ODcyMDI0 2025-04-04T17:15:50.589758Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzEwMTQ3OC0zZjRiMjY2Ny0xMDMwMmM5LWU0ODcyMDI0, ActorId: [7:7489507921882841065:2739], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:15:50.589940Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-04-04T17:15:50.589993Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489507921882841066:2740], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-04-04T17:15:50.590059Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzEwMTQ3OC0zZjRiMjY2Ny0xMDMwMmM5LWU0ODcyMDI0, ActorId: [7:7489507921882841065:2739], ActorState: ReadyState, TraceId: 01jr0wtfxyfe0c7vk9x142pdd2, received request, proxyRequestId: 69 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [7:7489507921882841064:3075] database: Root databaseId: /Root pool id: default 2025-04-04T17:15:50.590092Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzEwMTQ3OC0zZjRiMjY2Ny0xMDMwMmM5LWU0ODcyMDI0, ActorId: [7:7489507921882841065:2739], ActorState: ReadyState, TraceId: 01jr0wtfxyfe0c7vk9x142pdd2, request placed into pool from cache: default 2025-04-04T17:15:50.590183Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzEwMTQ3OC0zZjRiMjY2Ny0xMDMwMmM5LWU0ODcyMDI0, ActorId: [7:7489507921882841065:2739], ActorState: ExecuteState, TraceId: 01jr0wtfxyfe0c7vk9x142pdd2, Sending CompileQuery request 2025-04-04T17:15:50.590360Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489507921882841066:2740], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-04T17:15:50.590430Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-04-04T17:15:50.650267Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzEwMTQ3OC0zZjRiMjY2Ny0xMDMwMmM5LWU0ODcyMDI0, ActorId: [7:7489507921882841065:2739], ActorState: ExecuteState, TraceId: 01jr0wtfxyfe0c7vk9x142pdd2, ExecutePhyTx, tx: 0x000050C000355B58 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-04T17:15:50.650324Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzEwMTQ3OC0zZjRiMjY2Ny0xMDMwMmM5LWU0ODcyMDI0, ActorId: [7:7489507921882841065:2739], ActorState: ExecuteState, TraceId: 01jr0wtfxyfe0c7vk9x142pdd2, Sending to Executer TraceId: 0 8 2025-04-04T17:15:50.650402Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzEwMTQ3OC0zZjRiMjY2Ny0xMDMwMmM5LWU0ODcyMDI0, ActorId: [7:7489507921882841065:2739], ActorState: ExecuteState, TraceId: 01jr0wtfxyfe0c7vk9x142pdd2, Created new KQP executer: [7:7489507921882841071:2739] isRollback: 0 2025-04-04T17:15:50.651673Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzEwMTQ3OC0zZjRiMjY2Ny0xMDMwMmM5LWU0ODcyMDI0, ActorId: [7:7489507921882841065:2739], ActorState: ExecuteState, TraceId: 01jr0wtfxyfe0c7vk9x142pdd2, Forwarded TEvStreamData to [7:7489507921882841064:3075] 2025-04-04T17:15:50.652844Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzEwMTQ3OC0zZjRiMjY2Ny0xMDMwMmM5LWU0ODcyMDI0, ActorId: [7:7489507921882841065:2739], ActorState: ExecuteState, TraceId: 01jr0wtfxyfe0c7vk9x142pdd2, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-04T17:15:50.652949Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NzEwMTQ3OC0zZjRiMjY2Ny0xMDMwMmM5LWU0ODcyMDI0, ActorId: [7:7489507921882841065:2739], ActorState: ExecuteState, TraceId: 01jr0wtfxyfe0c7vk9x142pdd2, txInfo Status: Committed Kind: Pure TotalDuration: 2.764 ServerDuration: 2.722 QueriesCount: 2 2025-04-04T17:15:50.653004Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzEwMTQ3OC0zZjRiMjY2Ny0xMDMwMmM5LWU0ODcyMDI0, ActorId: [7:7489507921882841065:2739], ActorState: ExecuteState, TraceId: 01jr0wtfxyfe0c7vk9x142pdd2, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-04T17:15:50.653162Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NzEwMTQ3OC0zZjRiMjY2Ny0xMDMwMmM5LWU0ODcyMDI0, ActorId: [7:7489507921882841065:2739], ActorState: ExecuteState, TraceId: 01jr0wtfxyfe0c7vk9x142pdd2, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:15:50.653187Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzEwMTQ3OC0zZjRiMjY2Ny0xMDMwMmM5LWU0ODcyMDI0, ActorId: [7:7489507921882841065:2739], ActorState: ExecuteState, TraceId: 01jr0wtfxyfe0c7vk9x142pdd2, EndCleanup, isFinal: 1 2025-04-04T17:15:50.653230Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzEwMTQ3OC0zZjRiMjY2Ny0xMDMwMmM5LWU0ODcyMDI0, ActorId: [7:7489507921882841065:2739], ActorState: ExecuteState, TraceId: 01jr0wtfxyfe0c7vk9x142pdd2, Sent query response back to proxy, proxyRequestId: 69, proxyId: [7:7489507866048263966:2071] 2025-04-04T17:15:50.653256Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzEwMTQ3OC0zZjRiMjY2Ny0xMDMwMmM5LWU0ODcyMDI0, ActorId: [7:7489507921882841065:2739], ActorState: unknown state, TraceId: 01jr0wtfxyfe0c7vk9x142pdd2, Cleanup temp tables: 0 2025-04-04T17:15:50.653532Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzEwMTQ3OC0zZjRiMjY2Ny0xMDMwMmM5LWU0ODcyMDI0, ActorId: [7:7489507921882841065:2739], ActorState: unknown state, TraceId: 01jr0wtfxyfe0c7vk9x142pdd2, Session actor destroyed 2025-04-04T17:15:50.661231Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=ZTlkYzllZjYtMTkzZmYzMi1iZGE3Nzk3NC00YTRhNzBlOQ==, ActorId: [7:7489507887523101089:2331], ActorState: ReadyState, Session closed due to explicit close event 2025-04-04T17:15:50.661276Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=ZTlkYzllZjYtMTkzZmYzMi1iZGE3Nzk3NC00YTRhNzBlOQ==, ActorId: [7:7489507887523101089:2331], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:15:50.661301Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZTlkYzllZjYtMTkzZmYzMi1iZGE3Nzk3NC00YTRhNzBlOQ==, ActorId: [7:7489507887523101089:2331], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-04T17:15:50.661325Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZTlkYzllZjYtMTkzZmYzMi1iZGE3Nzk3NC00YTRhNzBlOQ==, ActorId: [7:7489507887523101089:2331], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:15:50.661394Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZTlkYzllZjYtMTkzZmYzMi1iZGE3Nzk3NC00YTRhNzBlOQ==, ActorId: [7:7489507887523101089:2331], ActorState: unknown state, Session actor destroyed 2025-04-04T17:15:50.728851Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzcxMTlkN2EtNDRhNDA4MGMtYmUxYThjY2UtNzlhNGVkMWY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NzcxMTlkN2EtNDRhNDA4MGMtYmUxYThjY2UtNzlhNGVkMWY= 2025-04-04T17:15:50.729097Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzcxMTlkN2EtNDRhNDA4MGMtYmUxYThjY2UtNzlhNGVkMWY=, ActorId: [7:7489507921882841081:2744], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:15:50.729545Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzcxMTlkN2EtNDRhNDA4MGMtYmUxYThjY2UtNzlhNGVkMWY=, ActorId: [7:7489507921882841081:2744], ActorState: ReadyState, TraceId: 01jr0wtg290rh0tk3x58v49jmj, received request, proxyRequestId: 71 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/.metadata/initialization/migrations`; rpcActor: [7:7489507921882841082:2745] database: /Root databaseId: /Root pool id: default 2025-04-04T17:15:50.729574Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzcxMTlkN2EtNDRhNDA4MGMtYmUxYThjY2UtNzlhNGVkMWY=, ActorId: [7:7489507921882841081:2744], ActorState: ReadyState, TraceId: 01jr0wtg290rh0tk3x58v49jmj, request placed into pool from cache: default 2025-04-04T17:15:50.729660Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzcxMTlkN2EtNDRhNDA4MGMtYmUxYThjY2UtNzlhNGVkMWY=, ActorId: [7:7489507921882841081:2744], ActorState: ExecuteState, TraceId: 01jr0wtg290rh0tk3x58v49jmj, Sending CompileQuery request >> TConsoleConfigTests::TestAffectedConfigs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 32143, msgbus: 10877 2025-04-04T17:12:58.664819Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507184609427816:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:58.666281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ffb/r3tmp/tmpNw0YBS/pdisk_1.dat 2025-04-04T17:12:59.045980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:59.046066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:59.051074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:59.073465Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32143, node 1 2025-04-04T17:12:59.115707Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T17:12:59.115735Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T17:12:59.261799Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:59.261815Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:59.261819Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:59.270730Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10877 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:12:59.627355Z node 1 :TX_PROXY DEBUG: actor# [1:7489507184609428030:2114] Handle TEvNavigate describe path dc-1 2025-04-04T17:12:59.627401Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395833:2443] HANDLE EvNavigateScheme dc-1 2025-04-04T17:12:59.628380Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395833:2443] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:59.675572Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395833:2443] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-04T17:12:59.689211Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395833:2443] Handle TEvDescribeSchemeResult Forward to# [1:7489507188904395832:2442] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:59.713482Z node 1 :TX_PROXY DEBUG: actor# [1:7489507184609428030:2114] Handle TEvProposeTransaction 2025-04-04T17:12:59.713523Z node 1 :TX_PROXY DEBUG: actor# [1:7489507184609428030:2114] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T17:12:59.713598Z node 1 :TX_PROXY DEBUG: actor# [1:7489507184609428030:2114] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489507188904395858:2454] 2025-04-04T17:12:59.817288Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395858:2454] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T17:12:59.817372Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395858:2454] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:12:59.817394Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395858:2454] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:12:59.817461Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395858:2454] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:12:59.817779Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395858:2454] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:59.817922Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395858:2454] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-04T17:12:59.817996Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395858:2454] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-04T17:12:59.818149Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395858:2454] txid# 281474976710657 HANDLE EvClientConnected 2025-04-04T17:12:59.819072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:59.824059Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395858:2454] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-04T17:12:59.824141Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395858:2454] txid# 281474976710657 SEND to# [1:7489507188904395857:2453] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-04T17:12:59.836012Z node 1 :TX_PROXY DEBUG: actor# [1:7489507184609428030:2114] Handle TEvProposeTransaction 2025-04-04T17:12:59.836032Z node 1 :TX_PROXY DEBUG: actor# [1:7489507184609428030:2114] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T17:12:59.836056Z node 1 :TX_PROXY DEBUG: actor# [1:7489507184609428030:2114] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489507188904395896:2488] 2025-04-04T17:12:59.838154Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395896:2488] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T17:12:59.838203Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395896:2488] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:12:59.838217Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395896:2488] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:12:59.838298Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395896:2488] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:12:59.838522Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395896:2488] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:59.838591Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395896:2488] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:12:59.838683Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395896:2488] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T17:12:59.838798Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395896:2488] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T17:12:59.839132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:12:59.840652Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395896:2488] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-04T17:12:59.840718Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395896:2488] txid# 281474976710658 SEND to# [1:7489507188904395895:2487] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-04T17:12:59.872765Z node 1 :TX_PROXY DEBUG: actor# [1:7489507184609428030:2114] Handle TEvProposeTransaction 2025-04-04T17:12:59.872793Z node 1 :TX_PROXY DEBUG: actor# [1:7489507184609428030:2114] TxId# 281474976710659 ProcessProposeTransaction 2025-04-04T17:12:59.872830Z node 1 :TX_PROXY DEBUG: actor# [1:7489507184609428030:2114] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7489507188904395917:2498] 2025-04-04T17:12:59.874813Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507188904395917:2498] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known ... Actor# [59:7489507923858616826:2527] txid# 281474976710660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2025-04-04T17:15:50.173410Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616826:2527] txid# 281474976710660 SEND to# [59:7489507923858616825:2342] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 53} 2025-04-04T17:15:50.196285Z node 59 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [59:7489507923858616825:2342], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-04T17:15:50.290662Z node 59 :TX_PROXY DEBUG: actor# [59:7489507902383779454:2112] Handle TEvProposeTransaction 2025-04-04T17:15:50.290706Z node 59 :TX_PROXY DEBUG: actor# [59:7489507902383779454:2112] TxId# 281474976710661 ProcessProposeTransaction 2025-04-04T17:15:50.290759Z node 59 :TX_PROXY DEBUG: actor# [59:7489507902383779454:2112] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [59:7489507923858616905:2586] 2025-04-04T17:15:50.293302Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616905:2586] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-04T17:15:50.293374Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616905:2586] txid# 281474976710661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:50.293394Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616905:2586] txid# 281474976710661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-04T17:15:50.293537Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616905:2586] txid# 281474976710661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-04T17:15:50.293570Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616905:2586] txid# 281474976710661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-04T17:15:50.294798Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616905:2586] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:15:50.294905Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616905:2586] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:50.295144Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616905:2586] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:50.295302Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616905:2586] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:50.295364Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616905:2586] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-04T17:15:50.295509Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616905:2586] txid# 281474976710661 HANDLE EvClientConnected 2025-04-04T17:15:50.298747Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616905:2586] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-04T17:15:50.298882Z node 59 :TX_PROXY ERROR: Actor# [59:7489507923858616905:2586] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:50.298920Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616905:2586] txid# 281474976710661 SEND to# [59:7489507923858616825:2342] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-04T17:15:50.318915Z node 59 :TX_PROXY DEBUG: actor# [59:7489507902383779454:2112] Handle TEvProposeTransaction 2025-04-04T17:15:50.318952Z node 59 :TX_PROXY DEBUG: actor# [59:7489507902383779454:2112] TxId# 281474976710662 ProcessProposeTransaction 2025-04-04T17:15:50.318999Z node 59 :TX_PROXY DEBUG: actor# [59:7489507902383779454:2112] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7489507923858616929:2598] 2025-04-04T17:15:50.321413Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616929:2598] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45648" 2025-04-04T17:15:50.321488Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616929:2598] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:50.321508Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616929:2598] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:15:50.321556Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616929:2598] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:50.321838Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616929:2598] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:50.321934Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616929:2598] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:50.321992Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616929:2598] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-04T17:15:50.322130Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616929:2598] txid# 281474976710662 HANDLE EvClientConnected 2025-04-04T17:15:50.332673Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616929:2598] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-04T17:15:50.332742Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616929:2598] txid# 281474976710662 SEND to# [59:7489507923858616928:2335] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-04T17:15:50.386780Z node 59 :TX_PROXY DEBUG: actor# [59:7489507902383779454:2112] Handle TEvProposeTransaction 2025-04-04T17:15:50.386823Z node 59 :TX_PROXY DEBUG: actor# [59:7489507902383779454:2112] TxId# 281474976710663 ProcessProposeTransaction 2025-04-04T17:15:50.386882Z node 59 :TX_PROXY DEBUG: actor# [59:7489507902383779454:2112] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7489507923858616962:2612] 2025-04-04T17:15:50.389577Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616962:2612] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45680" 2025-04-04T17:15:50.389655Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616962:2612] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:50.389679Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616962:2612] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-04-04T17:15:50.389864Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616962:2612] txid# 281474976710663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-04T17:15:50.389915Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616962:2612] txid# 281474976710663 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-04T17:15:50.389969Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616962:2612] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:50.390294Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616962:2612] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:50.390336Z node 59 :TX_PROXY ERROR: Actor# [59:7489507923858616962:2612] txid# 281474976710663, Access denied for ordinaryuser@builtin, attempt to manage user 2025-04-04T17:15:50.390449Z node 59 :TX_PROXY ERROR: Actor# [59:7489507923858616962:2612] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2025-04-04T17:15:50.390477Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507923858616962:2612] txid# 281474976710663 SEND to# [59:7489507923858616961:2352] Source {TEvProposeTransactionStatus Status# 5} 2025-04-04T17:15:50.390794Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=ZjQ3ZmJmZWUtYmM4Zjc1NmItYmFkYWE4OWMtN2UzMDExMzY=, ActorId: [59:7489507923858616947:2352], ActorState: ExecuteState, TraceId: 01jr0wtfq330vsy4qxy13rjxts, Create QueryResponse for error on request, msg: 2025-04-04T17:15:50.391034Z node 59 :TX_PROXY DEBUG: actor# [59:7489507902383779454:2112] Handle TEvExecuteKqpTransaction 2025-04-04T17:15:50.391057Z node 59 :TX_PROXY DEBUG: actor# [59:7489507902383779454:2112] TxId# 281474976710664 ProcessProposeKqpTransaction 2025-04-04T17:15:50.706022Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7489507902383779427:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:50.706120Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |89.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |89.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink >> KqpQueryService::DdlExecuteScript [GOOD] >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink |89.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertViaLegacyScripting+UseSink-Streaming >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink >> KqpDataIntegrityTrails::UpsertViaLegacyScripting+UseSink+Streaming >> TPersQueueTest::ReadRuleServiceTypeMigration [GOOD] >> TPersQueueTest::ReadRuleServiceTypeMigrationWithDisallowDefault |89.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] Test command err: 2025-04-04T17:15:41.223237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:15:41.223722Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:15:41.223780Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0006fa/r3tmp/tmpKChmLH/pdisk_1.dat 2025-04-04T17:15:41.592741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:15:41.640231Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:41.682998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:41.683155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:41.694674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:41.776518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:15:41.839926Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:15:41.840166Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:41.878335Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:41.878486Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:15:41.880298Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:15:41.880379Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:15:41.880436Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:15:41.880876Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:15:41.881021Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:15:41.881103Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:15:41.891872Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:15:41.914480Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:15:41.914688Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:15:41.914819Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:15:41.914861Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:41.914891Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:15:41.914927Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:41.915350Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:15:41.915454Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:15:41.915611Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:41.915656Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:41.915700Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:41.915764Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:41.915834Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:15:41.916240Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:41.916479Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:15:41.916568Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:15:41.918285Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:41.929044Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:41.929160Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:42.068897Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:15:42.074481Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:15:42.074562Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:42.075353Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:42.075404Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:15:42.075487Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:15:42.075767Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:15:42.075929Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:15:42.076523Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:42.076598Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:15:42.078639Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:15:42.079090Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:42.080781Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:15:42.080825Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:42.081815Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:15:42.081897Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:42.083727Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:42.084075Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:42.084130Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:42.084186Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:15:42.084242Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:15:42.084290Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:15:42.084369Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:42.089036Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:15:42.089210Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:15:42.089257Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:15:42.098604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:735:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:42.098718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:42.098789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:42.104010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:15:42.109691Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:42.260903Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:42.263668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:749:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:15:42.328095Z node 1 :TX_PROXY ERROR: Actor# [1:823:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:42.677311Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0wt7mg4xv024emvvk1y4ja, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUzNzYyMDMtNjUyZGZmZGEtNzA5NzBkMmMtOWZiZGEyZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:15:42.704849Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:850:2687], serverId# [1:851:2688], sessionId# [0:0:0] 2025-04-04T17:15:42.705098Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:42.718163Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17 ... 86224037888 2025-04-04T17:15:51.683255Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:51.683296Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:51.683347Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:51.683788Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:661:2566], serverId# [3:672:2573], sessionId# [0:0:0] 2025-04-04T17:15:51.683950Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:51.684185Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:15:51.684290Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:15:51.686456Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:51.698871Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:51.699000Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:51.863727Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:704:2594], sessionId# [0:0:0] 2025-04-04T17:15:51.865107Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:15:51.865168Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:51.865831Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:51.865892Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:15:51.865952Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:15:51.866195Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:15:51.870698Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:15:51.871155Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:51.871233Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:15:51.871765Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:15:51.872206Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:51.873908Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:15:51.873970Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:51.887187Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:15:51.887310Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:51.888764Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:51.888835Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:51.888900Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:15:51.888989Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:15:51.889045Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:15:51.889143Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:51.889817Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:51.892282Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:15:51.892355Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:15:51.892831Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:15:51.900975Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:736:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:51.901086Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:746:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:51.901190Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:51.906562Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:15:51.913374Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:52.075204Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:52.078832Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:750:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:15:52.116605Z node 3 :TX_PROXY ERROR: Actor# [3:824:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:52.266647Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0wth6v44816r716t50jjpk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZmQyNDI2ZGMtNTQ3NDEwNjEtZTE2NGY4MTMtYWY2MDNjZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:15:52.267140Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:851:2687], serverId# [3:852:2688], sessionId# [0:0:0] 2025-04-04T17:15:52.267355Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:52.282878Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:52.283026Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:52.286854Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:859:2694], serverId# [3:860:2695], sessionId# [0:0:0] 2025-04-04T17:15:52.287829Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-04-04T17:15:52.302940Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-04-04T17:15:52.303026Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:52.303405Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-04T17:15:52.303447Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-04-04T17:15:52.303584Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:859:2694], serverId# [3:860:2695], sessionId# [0:0:0] 2025-04-04T17:15:52.303692Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:52.303751Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:52.303803Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:52.303864Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:52.304747Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:52.305201Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:52.305369Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:52.305414Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:52.305457Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-04T17:15:52.305682Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:52.305747Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:52.306326Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-04-04T17:15:52.306575Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T17:15:52.306706Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-04-04T17:15:52.306748Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-04-04T17:15:52.308368Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-04T17:15:52.308414Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-04-04T17:15:52.308735Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:52.308766Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:52.308797Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-04-04T17:15:52.308934Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:52.308988Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:52.309033Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |89.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleConfigTests::TestAffectedConfigs [GOOD] Test command err: 2025-04-04T17:10:16.312922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:16.312997Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:16.383029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:17.535345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:17.535414Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:17.573370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:18.641559Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:18.641628Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:18.684953Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:19.753536Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:19.753600Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:19.798626Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:20.834942Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:20.835015Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:20.874824Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:22.121936Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:22.122006Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:22.172632Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:23.271737Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:23.271815Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:23.313364Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:24.456382Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:24.456460Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:24.505489Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:26.099499Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:26.099574Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:26.151344Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:27.887137Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:27.887212Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:27.932624Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:29.672700Z node 11 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:29.672768Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:29.718263Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:31.477243Z node 12 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:31.477332Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:31.535689Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:33.466372Z node 13 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:33.466435Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:33.509307Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:35.418394Z node 14 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:35.418483Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:35.467703Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:37.165329Z node 15 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:37.165408Z node 15 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:37.213486Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:39.038574Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:39.038664Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:39.081647Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:41.209320Z node 17 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:41.209432Z node 17 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:41.311688Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:41.844644Z node 17 :CMS_CONFIGS ERROR: Unexpected config sender died for subscription id=1 2025-04-04T17:10:42.577740Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:42.577824Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:42.624088Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:43.469146Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:10:43.469246Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:43.582795Z node 18 :CMS_CONFIGS ERROR: Couldn't deliver config notification for subscription id=1 tabletid=8651011 serviceid=[0:0:0] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2025-04-04T17:10:44.184207Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:44.184295Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:44.238361Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:45.060451Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:10:45.060573Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:45.165119Z node 19 :CMS_CONFIGS ERROR: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[19:8246204620103118691:7960687] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2025-04-04T17:10:45.783853Z node 20 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:45.783946Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:45.859964Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:48.824949Z node 21 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:48.825039Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:48.882261Z node 21 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:52.149361Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:52.149461Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:52.201092Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:53.506022Z node 23 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:53.506113Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:53.562988Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:54.664885Z node 24 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:10:54.664973Z node 24 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:10:54.708681Z node 24 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:10:59.842865Z node 24 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:10:59.842981Z node 24 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:09.282136Z node 24 :CMS_CONFIGS ERROR: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[100:28538277257700723:0] nodeid=100 host=host100 tenant=tenant-100 nodetype=type100 kinds=2 lastprovidedconfig= 2025-04-04T17:14:10.242514Z node 25 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:14:10.242621Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:10.295836Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:14:15.626681Z node 25 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:14:15.626784Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:48.187429Z node 26 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:15:48.187528Z node 26 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:48.233866Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:15:49.646853Z node 27 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:15:49.646935Z node 27 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:49.695817Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:15:50.822027Z node 28 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:15:50.822150Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:50.868212Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:15:52.309067Z node 29 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:15:52.309164Z node 29 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:52.362699Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] >> KqpDataIntegrityTrails::UpsertViaLegacyScripting-UseSink+Streaming ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] Test command err: 2025-04-04T17:15:50.147041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:15:50.147676Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:15:50.147753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000869/r3tmp/tmppH37UW/pdisk_1.dat 2025-04-04T17:15:50.641456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:15:50.691987Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:50.747822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:50.750778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:50.767294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:50.863560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:15:51.376818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:865:2712], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:51.376950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:876:2717], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:51.378138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:51.395342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:15:51.561971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:879:2720], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:15:51.647228Z node 1 :TX_PROXY ERROR: Actor# [1:961:2771] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:52.851427Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0wtgpe88p67db10ywyggrk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWM5M2YxZDItMTc4MDQwMC1jMmYzYmNhLWQ5N2IzNzY1, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:15:52.930525Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:992:2792], TxId: 281474976715660, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wtgpe88p67db10ywyggrk. SessionId : ydb://session/3?node_id=1&id=ZWM5M2YxZDItMTc4MDQwMC1jMmYzYmNhLWQ5N2IzNzY1. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Source[0] fatal error: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 } 2025-04-04T17:15:52.933374Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:992:2792], TxId: 281474976715660, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wtgpe88p67db10ywyggrk. SessionId : ydb://session/3?node_id=1&id=ZWM5M2YxZDItMTc4MDQwMC1jMmYzYmNhLWQ5N2IzNzY1. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. InternalError: INTERNAL_ERROR KIKIMR_CONSTRAINT_VIOLATION: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 }. 2025-04-04T17:15:52.942447Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:993:2793], TxId: 281474976715660, task: 2. Ctx: { TraceId : 01jr0wtgpe88p67db10ywyggrk. SessionId : ydb://session/3?node_id=1&id=ZWM5M2YxZDItMTc4MDQwMC1jMmYzYmNhLWQ5N2IzNzY1. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2025-04-04T17:15:52.954079Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWM5M2YxZDItMTc4MDQwMC1jMmYzYmNhLWQ5N2IzNzY1, ActorId: [1:863:2710], ActorState: ExecuteState, TraceId: 01jr0wtgpe88p67db10ywyggrk, Create QueryResponse for error on request, msg: 2025-04-04T17:15:52.957004Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0wtgpe88p67db10ywyggrk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWM5M2YxZDItMTc4MDQwMC1jMmYzYmNhLWQ5N2IzNzY1, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |89.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |89.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |89.8%| [TA] $(B)/ydb/core/cms/console/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |89.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |89.8%| [TA] {RESULT} $(B)/ydb/core/cms/console/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |89.8%| [TA] $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |89.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DdlExecuteScript [GOOD] Test command err: Trying to start YDB, gRPC: 62806, MsgBus: 17258 2025-04-04T17:13:50.893955Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507409809586065:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:50.894911Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00188f/r3tmp/tmpZZF6Tm/pdisk_1.dat 2025-04-04T17:13:51.342733Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:51.344115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:51.344203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:51.346129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62806, node 1 2025-04-04T17:13:51.426675Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:51.426690Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:51.426695Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:51.426780Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17258 TClient is connected to server localhost:17258 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:51.947306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:51.978774Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:51.988077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.153290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.313975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:52.398182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:54.071429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507426989456886:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.071548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.358842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.391490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.426472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.463136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.504243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.546523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:13:54.644219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507426989457398:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.644314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.645393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507426989457403:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:54.649390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:13:54.658091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507426989457405:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:13:54.736663Z node 1 :TX_PROXY ERROR: Actor# [1:7489507426989457460:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:55.695186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:13:55.826033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7489507431284425159:2501];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:13:55.826158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489507431284425158:2500];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:13:55.826175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7489507431284425159:2501];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:13:55.826221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489507431284425158:2500];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:13:55.826505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489507431284425158:2500];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:13:55.826508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7489507431284425159:2501];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:13:55.826633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7489507431284425159:2501];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:13:55.826649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489507431284425158:2500];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:13:55.826758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7489507431284425159:2501];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:13:55.826782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489507431284425158:2500];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:13:55.826880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7489507431284425159:2501];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:13:55.826884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489507431284425158:2500];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:13:55.826993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7489507431284425159:2501];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:13:55.827118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7489507431284425159:2501];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:13:55.827119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489507431284425158:2500];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:13:55.827238Z node 1 ... 080Z node 2 :TX_PROXY ERROR: Actor# [2:7489507897873859489:3816] txid# 281474976715693, issues: { message: "Member account not found" severity: 1 } 2025-04-04T17:15:44.183280Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjdkM2M1NjEtYmM1NTgyMTAtY2M4ZGJiNmUtOTkzNzEwYjc=, ActorId: [2:7489507897873859471:2543], ActorState: ExecuteState, TraceId: 01jr0wt9n24tcmdkr63nzc0tkg, Create QueryResponse for error on request, msg: 2025-04-04T17:15:44.209407Z node 2 :TX_PROXY ERROR: Actor# [2:7489507897873859520:3838] txid# 281474976715695, issues: { message: "Role \"user1\" is already a member of role \"group1\"" issue_code: 2 severity: 3 } 2025-04-04T17:15:44.267444Z node 2 :TX_PROXY ERROR: Actor# [2:7489507897873859562:3861] txid# 281474976715698, issues: { message: "Role \"user3\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-04-04T17:15:44.293761Z node 2 :TX_PROXY ERROR: Actor# [2:7489507897873859585:3874] txid# 281474976715700, issues: { message: "Role \"user1\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-04-04T17:15:44.322143Z node 2 :TX_PROXY ERROR: Actor# [2:7489507897873859599:3881] txid# 281474976715701, issues: { message: "Role \"user2\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-04-04T17:15:44.381440Z node 2 :TX_PROXY ERROR: Actor# [2:7489507897873859655:3918] txid# 281474976715704, issues: { message: "Group already exists" severity: 1 } 2025-04-04T17:15:44.381684Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yzk0Y2I1MTUtMmQ5NDA3OGYtMmZiMmRmNGQtZmRhMjdiMzE=, ActorId: [2:7489507897873859649:2567], ActorState: ExecuteState, TraceId: 01jr0wt9ve1pyy9kb0w95yewpm, Create QueryResponse for error on request, msg: 2025-04-04T17:15:44.412467Z node 2 :TX_PROXY ERROR: Actor# [2:7489507897873859675:3928] txid# 281474976715706, issues: { message: "Role \"user1\" is already a member of role \"group3\"" issue_code: 2 severity: 3 } 2025-04-04T17:15:44.527025Z node 2 :TX_PROXY ERROR: Actor# [2:7489507897873859737:3968] txid# 281474976715710, issues: { message: "Member account not found" severity: 1 } 2025-04-04T17:15:44.527347Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODBkYzE3ODUtNjY1ZjFkNDktOTkxYWEyNi1kNTdmZWNm, ActorId: [2:7489507897873859697:2577], ActorState: ExecuteState, TraceId: 01jr0wt9y47qq98a59hkvx7aea, Create QueryResponse for error on request, msg: 2025-04-04T17:15:44.556220Z node 2 :TX_PROXY ERROR: Actor# [2:7489507897873859754:3976] txid# 281474976715712, issues: { message: "Group already exists" severity: 1 } 2025-04-04T17:15:44.556639Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWU2YzQ4NzctNWM2YTU4MjEtNzBkZjVkZmYtYzkzYzY5Yg==, ActorId: [2:7489507897873859748:2581], ActorState: ExecuteState, TraceId: 01jr0wta0vfsa3nbq8scsm2fnj, Create QueryResponse for error on request, msg: 2025-04-04T17:15:44.589357Z node 2 :TX_PROXY ERROR: Actor# [2:7489507897873859772:3985] txid# 281474976715714, issues: { message: "Role \"user1\" is already a member of role \"group4\"" issue_code: 2 severity: 3 } Trying to start YDB, gRPC: 20172, MsgBus: 29107 2025-04-04T17:15:45.553321Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507902310159827:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:45.554131Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00188f/r3tmp/tmpGTlkpE/pdisk_1.dat 2025-04-04T17:15:45.675422Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:45.691429Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:45.691509Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:45.692819Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20172, node 3 2025-04-04T17:15:45.758798Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:45.758820Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:45.758829Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:45.758949Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29107 TClient is connected to server localhost:29107 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:46.212425Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:46.232668Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:46.315432Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:46.505559Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:46.609057Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:49.231154Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507919490030737:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:49.231246Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:49.262447Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:15:49.304196Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:15:49.345475Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:15:49.383298Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:15:49.426765Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:15:49.504124Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:15:49.569856Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507919490031253:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:49.569968Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:49.570377Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489507919490031258:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:49.575310Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:15:49.590884Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489507919490031260:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:15:49.657026Z node 3 :TX_PROXY ERROR: Actor# [3:7489507919490031315:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:50.553592Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489507902310159827:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:50.553667Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:15:50.702228Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:15:50.704889Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:15:50.707508Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:15:51.136248Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 |89.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |89.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 22515, msgbus: 15814 2025-04-04T17:13:00.328804Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507194408935656:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:00.328895Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f73/r3tmp/tmpiKOd6e/pdisk_1.dat 2025-04-04T17:13:00.717813Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22515, node 1 2025-04-04T17:13:00.739498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:00.739738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:00.748358Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T17:13:00.754762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:00.764545Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T17:13:00.798892Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:00.798914Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:00.798921Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:00.799027Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15814 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:13:01.013572Z node 1 :TX_PROXY DEBUG: actor# [1:7489507194408935881:2114] Handle TEvNavigate describe path dc-1 2025-04-04T17:13:01.013635Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903680:2440] HANDLE EvNavigateScheme dc-1 2025-04-04T17:13:01.015037Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903680:2440] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:13:01.045755Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903680:2440] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-04T17:13:01.056573Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903680:2440] Handle TEvDescribeSchemeResult Forward to# [1:7489507198703903679:2439] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:13:01.082456Z node 1 :TX_PROXY DEBUG: actor# [1:7489507194408935881:2114] Handle TEvProposeTransaction 2025-04-04T17:13:01.082491Z node 1 :TX_PROXY DEBUG: actor# [1:7489507194408935881:2114] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T17:13:01.082627Z node 1 :TX_PROXY DEBUG: actor# [1:7489507194408935881:2114] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489507198703903693:2446] 2025-04-04T17:13:01.185941Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903693:2446] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T17:13:01.186038Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903693:2446] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:13:01.186056Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903693:2446] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:13:01.186129Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903693:2446] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:13:01.186495Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903693:2446] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:13:01.186728Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903693:2446] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-04T17:13:01.186862Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903693:2446] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-04T17:13:01.187130Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903693:2446] txid# 281474976710657 HANDLE EvClientConnected 2025-04-04T17:13:01.187805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:01.190346Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903693:2446] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-04T17:13:01.190424Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903693:2446] txid# 281474976710657 SEND to# [1:7489507198703903692:2445] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-04T17:13:01.217174Z node 1 :TX_PROXY DEBUG: actor# [1:7489507194408935881:2114] Handle TEvProposeTransaction 2025-04-04T17:13:01.217213Z node 1 :TX_PROXY DEBUG: actor# [1:7489507194408935881:2114] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T17:13:01.217273Z node 1 :TX_PROXY DEBUG: actor# [1:7489507194408935881:2114] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489507198703903733:2482] 2025-04-04T17:13:01.219578Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903733:2482] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T17:13:01.219630Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903733:2482] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:13:01.219645Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903733:2482] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:13:01.219686Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903733:2482] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:13:01.220031Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903733:2482] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:13:01.220169Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903733:2482] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:13:01.220220Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903733:2482] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T17:13:01.220356Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903733:2482] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T17:13:01.220711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:13:01.222470Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903733:2482] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-04T17:13:01.222507Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903733:2482] txid# 281474976710658 SEND to# [1:7489507198703903732:2481] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-04T17:13:01.263558Z node 1 :TX_PROXY DEBUG: actor# [1:7489507194408935881:2114] Handle TEvProposeTransaction 2025-04-04T17:13:01.263586Z node 1 :TX_PROXY DEBUG: actor# [1:7489507194408935881:2114] TxId# 281474976710659 ProcessProposeTransaction 2025-04-04T17:13:01.263621Z node 1 :TX_PROXY DEBUG: actor# [1:7489507194408935881:2114] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7489507198703903751:2492] 2025-04-04T17:13:01.265874Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198703903751:2492] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known ... Actor# [59:7489507933785664451:2533] txid# 281474976710660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2025-04-04T17:15:52.297016Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664451:2533] txid# 281474976710660 SEND to# [59:7489507933785664450:2342] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 53} 2025-04-04T17:15:52.313281Z node 59 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [59:7489507933785664450:2342], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-04T17:15:52.413586Z node 59 :TX_PROXY DEBUG: actor# [59:7489507912310827056:2109] Handle TEvProposeTransaction 2025-04-04T17:15:52.413623Z node 59 :TX_PROXY DEBUG: actor# [59:7489507912310827056:2109] TxId# 281474976710661 ProcessProposeTransaction 2025-04-04T17:15:52.413676Z node 59 :TX_PROXY DEBUG: actor# [59:7489507912310827056:2109] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [59:7489507933785664530:2592] 2025-04-04T17:15:52.416475Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664530:2592] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-04-04T17:15:52.416531Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664530:2592] txid# 281474976710661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:52.416552Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664530:2592] txid# 281474976710661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-04T17:15:52.416686Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664530:2592] txid# 281474976710661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-04T17:15:52.416714Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664530:2592] txid# 281474976710661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-04T17:15:52.417353Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664530:2592] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:15:52.417444Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664530:2592] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:52.417636Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664530:2592] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:52.417775Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664530:2592] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:52.417824Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664530:2592] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-04T17:15:52.417941Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664530:2592] txid# 281474976710661 HANDLE EvClientConnected 2025-04-04T17:15:52.423407Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664530:2592] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-04T17:15:52.423547Z node 59 :TX_PROXY ERROR: Actor# [59:7489507933785664530:2592] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:52.423580Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664530:2592] txid# 281474976710661 SEND to# [59:7489507933785664450:2342] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-04T17:15:52.441738Z node 59 :TX_PROXY DEBUG: actor# [59:7489507912310827056:2109] Handle TEvProposeTransaction 2025-04-04T17:15:52.441786Z node 59 :TX_PROXY DEBUG: actor# [59:7489507912310827056:2109] TxId# 281474976710662 ProcessProposeTransaction 2025-04-04T17:15:52.441833Z node 59 :TX_PROXY DEBUG: actor# [59:7489507912310827056:2109] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7489507933785664554:2604] 2025-04-04T17:15:52.444432Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664554:2604] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:50762" 2025-04-04T17:15:52.444501Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664554:2604] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:52.444523Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664554:2604] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:15:52.444572Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664554:2604] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:52.444877Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664554:2604] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:52.444976Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664554:2604] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:52.445028Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664554:2604] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-04T17:15:52.445156Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664554:2604] txid# 281474976710662 HANDLE EvClientConnected 2025-04-04T17:15:52.458359Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664554:2604] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-04T17:15:52.458417Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664554:2604] txid# 281474976710662 SEND to# [59:7489507933785664553:2335] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-04T17:15:52.536780Z node 59 :TX_PROXY DEBUG: actor# [59:7489507912310827056:2109] Handle TEvProposeTransaction 2025-04-04T17:15:52.536816Z node 59 :TX_PROXY DEBUG: actor# [59:7489507912310827056:2109] TxId# 281474976710663 ProcessProposeTransaction 2025-04-04T17:15:52.536852Z node 59 :TX_PROXY DEBUG: actor# [59:7489507912310827056:2109] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7489507933785664587:2618] 2025-04-04T17:15:52.539353Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664587:2618] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:50792" 2025-04-04T17:15:52.539420Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664587:2618] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:52.539441Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664587:2618] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-04-04T17:15:52.539566Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664587:2618] txid# 281474976710663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-04T17:15:52.539605Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664587:2618] txid# 281474976710663 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-04T17:15:52.539646Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664587:2618] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:52.539884Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664587:2618] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:52.539910Z node 59 :TX_PROXY ERROR: Actor# [59:7489507933785664587:2618] txid# 281474976710663, Access denied for ordinaryuser@builtin, attempt to manage user 2025-04-04T17:15:52.539998Z node 59 :TX_PROXY ERROR: Actor# [59:7489507933785664587:2618] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2025-04-04T17:15:52.540026Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507933785664587:2618] txid# 281474976710663 SEND to# [59:7489507933785664586:2352] Source {TEvProposeTransactionStatus Status# 5} 2025-04-04T17:15:52.540501Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=NjYxZmFmMGUtYWI5MjE3ZmUtZmE5NzBmNmMtZGRmYjgzY2E=, ActorId: [59:7489507933785664572:2352], ActorState: ExecuteState, TraceId: 01jr0wtht2crrvq6c71ecjghwc, Create QueryResponse for error on request, msg: 2025-04-04T17:15:52.540970Z node 59 :TX_PROXY DEBUG: actor# [59:7489507912310827056:2109] Handle TEvExecuteKqpTransaction 2025-04-04T17:15:52.540991Z node 59 :TX_PROXY DEBUG: actor# [59:7489507912310827056:2109] TxId# 281474976710664 ProcessProposeKqpTransaction 2025-04-04T17:15:52.738985Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7489507912310827040:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:52.739070Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; >> KqpDataIntegrityTrails::UpsertViaLegacyScripting-UseSink-Streaming >> TPersQueueTest::CheckDeleteTopic [GOOD] >> TPersQueueTest::CheckDecompressionTasksWithoutSession |89.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink |89.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Select >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] |89.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |89.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink |89.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |89.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |89.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |89.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |89.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 21267, msgbus: 19652 2025-04-04T17:12:58.605227Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507183203844483:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:58.612997Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f9a/r3tmp/tmpPH9wag/pdisk_1.dat 2025-04-04T17:12:59.024862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:59.026352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:59.039594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:59.068277Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21267, node 1 2025-04-04T17:12:59.114507Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T17:12:59.114525Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T17:12:59.262887Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:59.262912Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:59.262922Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:59.263015Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19652 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:12:59.630081Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183203844711:2114] Handle TEvNavigate describe path dc-1 2025-04-04T17:12:59.630124Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812539:2451] HANDLE EvNavigateScheme dc-1 2025-04-04T17:12:59.630444Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812539:2451] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:59.678352Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812539:2451] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-04T17:12:59.689058Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812539:2451] Handle TEvDescribeSchemeResult Forward to# [1:7489507187498812538:2450] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:59.710829Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183203844711:2114] Handle TEvProposeTransaction 2025-04-04T17:12:59.710873Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183203844711:2114] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T17:12:59.711039Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183203844711:2114] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489507187498812547:2458] 2025-04-04T17:12:59.790321Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812547:2458] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T17:12:59.790444Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812547:2458] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:12:59.790465Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812547:2458] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:12:59.790529Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812547:2458] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:12:59.790926Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812547:2458] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:59.791099Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812547:2458] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-04T17:12:59.791178Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812547:2458] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-04T17:12:59.791401Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812547:2458] txid# 281474976710657 HANDLE EvClientConnected 2025-04-04T17:12:59.792226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:59.794952Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812547:2458] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-04T17:12:59.795044Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812547:2458] txid# 281474976710657 SEND to# [1:7489507187498812546:2457] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-04T17:12:59.814904Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183203844711:2114] Handle TEvProposeTransaction 2025-04-04T17:12:59.814932Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183203844711:2114] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T17:12:59.814980Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183203844711:2114] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489507187498812585:2492] 2025-04-04T17:12:59.817704Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812585:2492] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T17:12:59.817747Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812585:2492] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:12:59.817760Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812585:2492] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:12:59.817808Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812585:2492] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:12:59.818084Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812585:2492] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:59.818167Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812585:2492] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:12:59.818216Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812585:2492] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T17:12:59.818338Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812585:2492] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T17:12:59.818804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:12:59.822903Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812585:2492] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-04T17:12:59.822938Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187498812585:2492] txid# 281474976710658 SEND to# [1:7489507187498812584:2491] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-04T17:13:01.356627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507196088747270:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:01.356647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507196088747259:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:01.356735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:01.357171Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183203844711:2114] H ... strator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:53.966175Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507934646736961:2583] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:15:53.966226Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507934646736961:2583] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:53.966549Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507934646736961:2583] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:53.966666Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507934646736961:2583] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:53.966720Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507934646736961:2583] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-04T17:15:53.966853Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507934646736961:2583] txid# 281474976710661 HANDLE EvClientConnected 2025-04-04T17:15:53.976932Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507934646736961:2583] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-04-04T17:15:53.976991Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507934646736961:2583] txid# 281474976710661 SEND to# [59:7489507934646736960:2333] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-04T17:15:54.229388Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7489507917466866906:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:54.229459Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:15:54.253704Z node 59 :TX_PROXY DEBUG: actor# [59:7489507917466866895:2111] Handle TEvProposeTransaction 2025-04-04T17:15:54.253734Z node 59 :TX_PROXY DEBUG: actor# [59:7489507917466866895:2111] TxId# 281474976710662 ProcessProposeTransaction 2025-04-04T17:15:54.253787Z node 59 :TX_PROXY DEBUG: actor# [59:7489507917466866895:2111] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7489507938941704279:2599] 2025-04-04T17:15:54.256380Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704279:2599] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:53110" 2025-04-04T17:15:54.256469Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704279:2599] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:54.256492Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704279:2599] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:15:54.256552Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704279:2599] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:54.256936Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704279:2599] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:54.257041Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704279:2599] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:54.257103Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704279:2599] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-04T17:15:54.257247Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704279:2599] txid# 281474976710662 HANDLE EvClientConnected 2025-04-04T17:15:54.257797Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:15:54.261310Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704279:2599] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-04T17:15:54.261369Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704279:2599] txid# 281474976710662 SEND to# [59:7489507938941704278:2347] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-04T17:15:54.342712Z node 59 :TX_PROXY DEBUG: actor# [59:7489507917466866895:2111] Handle TEvProposeTransaction 2025-04-04T17:15:54.342758Z node 59 :TX_PROXY DEBUG: actor# [59:7489507917466866895:2111] TxId# 281474976710663 ProcessProposeTransaction 2025-04-04T17:15:54.342814Z node 59 :TX_PROXY DEBUG: actor# [59:7489507917466866895:2111] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7489507938941704314:2620] 2025-04-04T17:15:54.345381Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704314:2620] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:53140" 2025-04-04T17:15:54.345446Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704314:2620] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:54.345469Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704314:2620] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:15:54.345531Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704314:2620] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:54.345865Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704314:2620] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:54.345972Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704314:2620] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:54.346033Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704314:2620] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-04-04T17:15:54.346175Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704314:2620] txid# 281474976710663 HANDLE EvClientConnected 2025-04-04T17:15:54.355533Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704314:2620] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-04-04T17:15:54.355594Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704314:2620] txid# 281474976710663 SEND to# [59:7489507938941704313:2349] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-04-04T17:15:54.434344Z node 59 :TX_PROXY DEBUG: actor# [59:7489507917466866895:2111] Handle TEvProposeTransaction 2025-04-04T17:15:54.434387Z node 59 :TX_PROXY DEBUG: actor# [59:7489507917466866895:2111] TxId# 281474976710664 ProcessProposeTransaction 2025-04-04T17:15:54.434437Z node 59 :TX_PROXY DEBUG: actor# [59:7489507917466866895:2111] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [59:7489507938941704344:2633] 2025-04-04T17:15:54.436972Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704344:2633] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMDE1NCwiaWF0IjoxNzQzNzg2OTU0LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.VzqkMTv0nXAX0-jHhYWXuP3vAPqUZhwCq42im3gQHJ1FqHoSQO7omiu4vnldauUR_QHUaumSinnaJr2StvQ1CYIm81X7jVhoKdMEo_-rAz6d1TNtEYVmS1zEh3x9cTYLPpk941K06spXgAjDa94s63NCIHqpZUY-99YkncJQdQtUz6XYwRU3deQh5XM6Ixq9BkA0dogMEhyFGI0SeafJqKoZTnd26-gRV6S9OpOIaiRm3W9EFvRe_fGvwP4HEEnxQFaA6kDqJFrH6dUpdVqZ3THBEcrHnQ7PY1JVxVBtG2CmvueS_oUhgJT650UZsmDR9NJJ7v-lHl-1qEwwJg1XVw\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMDE1NCwiaWF0IjoxNzQzNzg2OTU0LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:53158" 2025-04-04T17:15:54.437046Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704344:2633] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:54.437067Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704344:2633] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-04T17:15:54.437223Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704344:2633] txid# 281474976710664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-04T17:15:54.437268Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704344:2633] txid# 281474976710664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-04T17:15:54.437307Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704344:2633] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:54.437558Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704344:2633] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:54.437585Z node 59 :TX_PROXY ERROR: Actor# [59:7489507938941704344:2633] txid# 281474976710664, Access denied for ordinaryuser, attempt to manage user 2025-04-04T17:15:54.437672Z node 59 :TX_PROXY ERROR: Actor# [59:7489507938941704344:2633] txid# 281474976710664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-04-04T17:15:54.437699Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507938941704344:2633] txid# 281474976710664 SEND to# [59:7489507938941704342:2361] Source {TEvProposeTransactionStatus Status# 5} 2025-04-04T17:15:54.438006Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=YWYwMWI2ODgtYjZmNGQ1MzUtMmRmMTBlZGItOWQ4YzBlZjI=, ActorId: [59:7489507938941704332:2361], ActorState: ExecuteState, TraceId: 01jr0wtknf8qam2gpyjncvgwjr, Create QueryResponse for error on request, msg: 2025-04-04T17:15:54.438348Z node 59 :TX_PROXY DEBUG: actor# [59:7489507917466866895:2111] Handle TEvExecuteKqpTransaction 2025-04-04T17:15:54.438393Z node 59 :TX_PROXY DEBUG: actor# [59:7489507917466866895:2111] TxId# 281474976710665 ProcessProposeKqpTransaction >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant |89.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] >> KqpWorkloadService::TestLessConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestCpuLoadThreshold >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [GOOD] Test command err: Starting YDB, grpc: 25002, msgbus: 28092 2025-04-04T17:12:58.636565Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507183531141392:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:58.637907Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000fd6/r3tmp/tmpQbu83L/pdisk_1.dat 2025-04-04T17:12:59.050371Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:59.050482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:59.059953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:59.069424Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25002, node 1 2025-04-04T17:12:59.260708Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:59.260736Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:59.260747Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:59.260901Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28092 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:12:59.613558Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183531141514:2115] Handle TEvNavigate describe path dc-1 2025-04-04T17:12:59.613625Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109325:2452] HANDLE EvNavigateScheme dc-1 2025-04-04T17:12:59.615909Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109325:2452] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:59.655406Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109325:2452] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-04T17:12:59.665642Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109325:2452] Handle TEvDescribeSchemeResult Forward to# [1:7489507187826109324:2451] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:59.682548Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183531141514:2115] Handle TEvProposeTransaction 2025-04-04T17:12:59.682580Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183531141514:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T17:12:59.689925Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183531141514:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489507187826109348:2461] 2025-04-04T17:12:59.802762Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109348:2461] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T17:12:59.802865Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109348:2461] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:12:59.802897Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109348:2461] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:12:59.802966Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109348:2461] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:12:59.803538Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109348:2461] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:59.803760Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109348:2461] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-04T17:12:59.804823Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109348:2461] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-04T17:12:59.805133Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109348:2461] txid# 281474976710657 HANDLE EvClientConnected 2025-04-04T17:12:59.806171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:59.809229Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109348:2461] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-04T17:12:59.809334Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109348:2461] txid# 281474976710657 SEND to# [1:7489507187826109347:2460] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-04T17:12:59.822585Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183531141514:2115] Handle TEvProposeTransaction 2025-04-04T17:12:59.822645Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183531141514:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T17:12:59.822695Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183531141514:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489507187826109390:2499] 2025-04-04T17:12:59.824991Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109390:2499] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T17:12:59.825034Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109390:2499] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:12:59.825047Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109390:2499] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:12:59.825099Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109390:2499] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:12:59.825378Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109390:2499] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:59.825480Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109390:2499] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:12:59.825522Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109390:2499] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T17:12:59.825660Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109390:2499] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T17:12:59.826068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:12:59.827888Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109390:2499] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-04T17:12:59.827928Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109390:2499] txid# 281474976710658 SEND to# [1:7489507187826109389:2498] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-04T17:12:59.856884Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183531141514:2115] Handle TEvProposeTransaction 2025-04-04T17:12:59.856913Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183531141514:2115] TxId# 281474976710659 ProcessProposeTransaction 2025-04-04T17:12:59.856963Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183531141514:2115] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7489507187826109411:2509] 2025-04-04T17:12:59.859347Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187826109411:2509] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:44906" 2025-04-04T17:12:59.859404Z node 1 :TX_PROXY DEBUG: ... ministrator: 1 2025-04-04T17:15:55.204723Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674376:2583] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:15:55.204783Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674376:2583] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:55.205105Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674376:2583] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:55.205210Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674376:2583] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:55.205270Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674376:2583] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-04-04T17:15:55.205414Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674376:2583] txid# 281474976715661 HANDLE EvClientConnected 2025-04-04T17:15:55.224015Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674376:2583] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-04-04T17:15:55.224081Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674376:2583] txid# 281474976715661 SEND to# [59:7489507946365674375:2333] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-04-04T17:15:55.326077Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7489507924890836873:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:55.326157Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:15:55.552538Z node 59 :TX_PROXY DEBUG: actor# [59:7489507924890836887:2097] Handle TEvProposeTransaction 2025-04-04T17:15:55.552592Z node 59 :TX_PROXY DEBUG: actor# [59:7489507924890836887:2097] TxId# 281474976715662 ProcessProposeTransaction 2025-04-04T17:15:55.552661Z node 59 :TX_PROXY DEBUG: actor# [59:7489507924890836887:2097] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7489507946365674405:2602] 2025-04-04T17:15:55.555506Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674405:2602] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51372" 2025-04-04T17:15:55.555582Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674405:2602] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:55.555604Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674405:2602] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:15:55.555660Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674405:2602] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:55.556010Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674405:2602] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:55.556118Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674405:2602] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:55.556183Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674405:2602] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-04-04T17:15:55.556337Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674405:2602] txid# 281474976715662 HANDLE EvClientConnected 2025-04-04T17:15:55.556908Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:15:55.561116Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674405:2602] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-04-04T17:15:55.561173Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674405:2602] txid# 281474976715662 SEND to# [59:7489507946365674404:2348] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-04-04T17:15:55.626378Z node 59 :TX_PROXY DEBUG: actor# [59:7489507924890836887:2097] Handle TEvProposeTransaction 2025-04-04T17:15:55.626418Z node 59 :TX_PROXY DEBUG: actor# [59:7489507924890836887:2097] TxId# 281474976715663 ProcessProposeTransaction 2025-04-04T17:15:55.626462Z node 59 :TX_PROXY DEBUG: actor# [59:7489507924890836887:2097] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7489507946365674443:2623] 2025-04-04T17:15:55.628782Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674443:2623] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51396" 2025-04-04T17:15:55.628842Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674443:2623] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:55.628861Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674443:2623] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:15:55.628907Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674443:2623] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:55.629178Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674443:2623] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:55.629277Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674443:2623] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:55.629334Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674443:2623] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-04-04T17:15:55.629472Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674443:2623] txid# 281474976715663 HANDLE EvClientConnected 2025-04-04T17:15:55.639639Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674443:2623] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-04-04T17:15:55.639704Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674443:2623] txid# 281474976715663 SEND to# [59:7489507946365674442:2350] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-04-04T17:15:55.736970Z node 59 :TX_PROXY DEBUG: actor# [59:7489507924890836887:2097] Handle TEvProposeTransaction 2025-04-04T17:15:55.737010Z node 59 :TX_PROXY DEBUG: actor# [59:7489507924890836887:2097] TxId# 281474976715664 ProcessProposeTransaction 2025-04-04T17:15:55.737073Z node 59 :TX_PROXY DEBUG: actor# [59:7489507924890836887:2097] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [59:7489507946365674471:2635] 2025-04-04T17:15:55.739017Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674471:2635] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "targetuser" Password: "passwd" IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMDE1NSwiaWF0IjoxNzQzNzg2OTU1LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.Jz2ZX7IuO_4DkFVGjI0ulN47D-__yZf1RVoRpVACtuWvIqIdcDnJiXseCo3MM2oouRJy-KXL2RMu5Xjfs5F3_yUCMgVtgroxy-OcdO5E2CrB-fh76KcrQZih00usiJnhN_BCYyWzL04x6fMSDxbguj-wtxCfiCPxcWNw-jZHmaK5LGSJHF-mC5_mgM95sNXJxBIvDGYsQpCjzQk--4Ulc7N60RHMIsaUNZ62NoviRRXDyo5ZXfozZxbW2Q2dYw9VyF2NcTfiTZkjW1kYsuQfkt0ayK_AsD5oT-JRz0W7UsROALFinuozu-Sb64ogLmJGuXWh35R0njmp2ZGthABcfQ\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMDE1NSwiaWF0IjoxNzQzNzg2OTU1LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51414" 2025-04-04T17:15:55.739095Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674471:2635] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:55.739112Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674471:2635] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-04T17:15:55.739238Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674471:2635] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-04T17:15:55.739272Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674471:2635] txid# 281474976715664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-04T17:15:55.739305Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674471:2635] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:55.739482Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674471:2635] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:55.739500Z node 59 :TX_PROXY ERROR: Actor# [59:7489507946365674471:2635] txid# 281474976715664, Access denied for ordinaryuser, attempt to manage user 2025-04-04T17:15:55.740325Z node 59 :TX_PROXY ERROR: Actor# [59:7489507946365674471:2635] txid# 281474976715664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-04-04T17:15:55.740356Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507946365674471:2635] txid# 281474976715664 SEND to# [59:7489507946365674470:2362] Source {TEvProposeTransactionStatus Status# 5} 2025-04-04T17:15:55.740810Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=OTNiZDI2NmYtZmUwZjQyODYtNDRhZTY4ODAtYTBhZGYyODU=, ActorId: [59:7489507946365674461:2362], ActorState: ExecuteState, TraceId: 01jr0wtmy906c69tp1f51fmtwe, Create QueryResponse for error on request, msg: 2025-04-04T17:15:55.741031Z node 59 :TX_PROXY DEBUG: actor# [59:7489507924890836887:2097] Handle TEvExecuteKqpTransaction 2025-04-04T17:15:55.741045Z node 59 :TX_PROXY DEBUG: actor# [59:7489507924890836887:2097] TxId# 281474976715665 ProcessProposeKqpTransaction >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] Test command err: Starting YDB, grpc: 28512, msgbus: 6298 2025-04-04T17:13:01.017419Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507196717660097:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:01.017495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f64/r3tmp/tmp1VCgqL/pdisk_1.dat 2025-04-04T17:13:01.367790Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:01.394745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:01.394823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 28512, node 1 2025-04-04T17:13:01.417284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:01.447745Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:01.447779Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:01.447787Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:01.447933Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6298 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:13:01.677844Z node 1 :TX_PROXY DEBUG: actor# [1:7489507196717660336:2115] Handle TEvNavigate describe path dc-1 2025-04-04T17:13:01.677939Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660846:2447] HANDLE EvNavigateScheme dc-1 2025-04-04T17:13:01.679026Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660846:2447] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:13:01.721067Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660846:2447] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-04T17:13:01.736286Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660846:2447] Handle TEvDescribeSchemeResult Forward to# [1:7489507196717660845:2446] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:13:01.758343Z node 1 :TX_PROXY DEBUG: actor# [1:7489507196717660336:2115] Handle TEvProposeTransaction 2025-04-04T17:13:01.758372Z node 1 :TX_PROXY DEBUG: actor# [1:7489507196717660336:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T17:13:01.758496Z node 1 :TX_PROXY DEBUG: actor# [1:7489507196717660336:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489507196717660861:2455] 2025-04-04T17:13:01.857267Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660861:2455] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T17:13:01.857351Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660861:2455] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-04T17:13:01.857370Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660861:2455] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:13:01.857447Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660861:2455] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:13:01.857771Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660861:2455] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:13:01.857913Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660861:2455] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-04T17:13:01.858005Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660861:2455] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-04T17:13:01.858153Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660861:2455] txid# 281474976710657 HANDLE EvClientConnected 2025-04-04T17:13:01.858974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:01.861485Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660861:2455] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-04T17:13:01.861544Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660861:2455] txid# 281474976710657 SEND to# [1:7489507196717660860:2454] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-04T17:13:01.880330Z node 1 :TX_PROXY DEBUG: actor# [1:7489507196717660336:2115] Handle TEvProposeTransaction 2025-04-04T17:13:01.880357Z node 1 :TX_PROXY DEBUG: actor# [1:7489507196717660336:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T17:13:01.880398Z node 1 :TX_PROXY DEBUG: actor# [1:7489507196717660336:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489507196717660903:2493] 2025-04-04T17:13:01.882813Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660903:2493] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T17:13:01.882873Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660903:2493] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-04T17:13:01.882891Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660903:2493] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:13:01.882946Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660903:2493] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:13:01.883211Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660903:2493] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:13:01.883316Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660903:2493] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:13:01.883363Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660903:2493] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T17:13:01.883533Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660903:2493] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T17:13:01.883924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:13:01.885983Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660903:2493] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-04T17:13:01.886028Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660903:2493] txid# 281474976710658 SEND to# [1:7489507196717660902:2492] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-04T17:13:01.933351Z node 1 :TX_PROXY DEBUG: actor# [1:7489507196717660336:2115] Handle TEvProposeTransaction 2025-04-04T17:13:01.933392Z node 1 :TX_PROXY DEBUG: actor# [1:7489507196717660336:2115] TxId# 281474976710659 ProcessProposeTransaction 2025-04-04T17:13:01.933460Z node 1 :TX_PROXY DEBUG: actor# [1:7489507196717660336:2115] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7489507196717660921:2503] 2025-04-04T17:13:01.935824Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507196717660921:2503] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\001\022\026\032\024ordinaryuser@builtin\n\"\010\000\022\036\010\001\020\200\200\002\032\024ordinaryuser@builtin \000\n!\010\000\022\035\010\001\020\200\010\032\024ordinaryuser@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:56208" 2025-04-04T17:13:01.935914Z node 1 :TX_PROXY DEBUG: Acto ... LE EvNavigateKeySetResult, txid# 281474976715660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:56.423313Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130549:2570] txid# 281474976715660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715660 TabletId# 72057594046644480} 2025-04-04T17:15:56.423454Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130549:2570] txid# 281474976715660 HANDLE EvClientConnected 2025-04-04T17:15:56.426612Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130549:2570] txid# 281474976715660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-04T17:15:56.426742Z node 59 :TX_PROXY ERROR: Actor# [59:7489507949422130549:2570] txid# 281474976715660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:56.426775Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130549:2570] txid# 281474976715660 SEND to# [59:7489507949422130475:2341] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 48} 2025-04-04T17:15:56.443917Z node 59 :TX_PROXY DEBUG: actor# [59:7489507927947293297:2109] Handle TEvProposeTransaction 2025-04-04T17:15:56.443952Z node 59 :TX_PROXY DEBUG: actor# [59:7489507927947293297:2109] TxId# 281474976715661 ProcessProposeTransaction 2025-04-04T17:15:56.443999Z node 59 :TX_PROXY DEBUG: actor# [59:7489507927947293297:2109] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7489507949422130573:2582] 2025-04-04T17:15:56.446754Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130573:2582] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:57926" 2025-04-04T17:15:56.446816Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130573:2582] txid# 281474976715661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:56.446837Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130573:2582] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:15:56.446881Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130573:2582] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:56.447160Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130573:2582] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:56.447252Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130573:2582] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:56.447304Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130573:2582] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-04-04T17:15:56.447431Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130573:2582] txid# 281474976715661 HANDLE EvClientConnected 2025-04-04T17:15:56.455696Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130573:2582] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-04-04T17:15:56.455750Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130573:2582] txid# 281474976715661 SEND to# [59:7489507949422130572:2333] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-04-04T17:15:56.527881Z node 59 :TX_PROXY DEBUG: actor# [59:7489507927947293297:2109] Handle TEvProposeTransaction 2025-04-04T17:15:56.527921Z node 59 :TX_PROXY DEBUG: actor# [59:7489507927947293297:2109] TxId# 281474976715662 ProcessProposeTransaction 2025-04-04T17:15:56.527977Z node 59 :TX_PROXY DEBUG: actor# [59:7489507927947293297:2109] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7489507949422130593:2596] 2025-04-04T17:15:56.530738Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130593:2596] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:57942" 2025-04-04T17:15:56.530810Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130593:2596] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:56.530832Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130593:2596] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:15:56.530889Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130593:2596] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:56.531196Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130593:2596] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:56.531285Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130593:2596] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:56.531341Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130593:2596] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-04-04T17:15:56.531473Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130593:2596] txid# 281474976715662 HANDLE EvClientConnected 2025-04-04T17:15:56.531975Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:15:56.537743Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130593:2596] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-04-04T17:15:56.537799Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130593:2596] txid# 281474976715662 SEND to# [59:7489507949422130592:2347] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-04-04T17:15:56.543414Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7489507927947293085:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:56.543687Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:15:56.626702Z node 59 :TX_PROXY DEBUG: actor# [59:7489507927947293297:2109] Handle TEvProposeTransaction 2025-04-04T17:15:56.626739Z node 59 :TX_PROXY DEBUG: actor# [59:7489507927947293297:2109] TxId# 281474976715663 ProcessProposeTransaction 2025-04-04T17:15:56.626793Z node 59 :TX_PROXY DEBUG: actor# [59:7489507927947293297:2109] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7489507949422130633:2617] 2025-04-04T17:15:56.629424Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130633:2617] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMDE1NiwiaWF0IjoxNzQzNzg2OTU2LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.Rc3CYeT07LvSf6chQup8TDgKMEOXOwiYIlzgZpT5XocplRUpVDVAfqJ3LT4AgnqbnbIHcxCFSAzfq7HkPJ1W4DRHj8yDHwnvP8QMAzLLJFBQKDRFcp4fdLIgWAgKT6V2ApmtaoHZeaoHAzOeVdsTsJhJ7hewqrKrraW-9mIk3Y8SaXTmuAKz2pOzq248I8rw1kOJuQ4tp7rkMKLMy-L37SaxJ3J5iXmOUkghDX0SkoUPrJYg6nhBg5Ex0YqvvjW7cEHfEUJ0R3hjw0J6SKsUvuOnMLie3sOgm4pkXEB7RweWYIH3N7t5aiUa3tHAKHdPE5taNr-6QeQqHBdJWfCmzQ\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMDE1NiwiaWF0IjoxNzQzNzg2OTU2LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:57974" 2025-04-04T17:15:56.629492Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130633:2617] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:56.629513Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130633:2617] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-04T17:15:56.629722Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130633:2617] txid# 281474976715663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-04T17:15:56.629787Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130633:2617] txid# 281474976715663 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-04T17:15:56.629852Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130633:2617] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:56.630183Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130633:2617] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:56.630225Z node 59 :TX_PROXY ERROR: Actor# [59:7489507949422130633:2617] txid# 281474976715663, Access denied for ordinaryuser, attempt to manage user 2025-04-04T17:15:56.630370Z node 59 :TX_PROXY ERROR: Actor# [59:7489507949422130633:2617] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-04-04T17:15:56.630405Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949422130633:2617] txid# 281474976715663 SEND to# [59:7489507949422130632:2354] Source {TEvProposeTransactionStatus Status# 5} 2025-04-04T17:15:56.630786Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=NmYzNjA2YjgtNGM3MDFhNmMtYTE5OTc5NTQtNjRjMmFkZGE=, ActorId: [59:7489507949422130618:2354], ActorState: ExecuteState, TraceId: 01jr0wtnszfs185ga0gte34h5f, Create QueryResponse for error on request, msg: 2025-04-04T17:15:56.631226Z node 59 :TX_PROXY DEBUG: actor# [59:7489507927947293297:2109] Handle TEvExecuteKqpTransaction 2025-04-04T17:15:56.631255Z node 59 :TX_PROXY DEBUG: actor# [59:7489507927947293297:2109] TxId# 281474976715664 ProcessProposeKqpTransaction >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink [GOOD] >> KqpDataIntegrityTrails::UpsertViaLegacyScripting+UseSink+Streaming [GOOD] >> KqpDataIntegrityTrails::UpsertViaLegacyScripting+UseSink-Streaming [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 21796, msgbus: 19466 2025-04-04T17:12:58.645709Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507183188599494:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:12:58.646928Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000fc3/r3tmp/tmp3IlbzL/pdisk_1.dat 2025-04-04T17:12:59.048915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:12:59.049002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:12:59.060013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:12:59.094880Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21796, node 1 2025-04-04T17:12:59.119754Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T17:12:59.120169Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T17:12:59.262776Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:12:59.262816Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:12:59.262824Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:12:59.262934Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19466 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:12:59.630853Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183188599575:2115] Handle TEvNavigate describe path dc-1 2025-04-04T17:12:59.630891Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567372:2439] HANDLE EvNavigateScheme dc-1 2025-04-04T17:12:59.631932Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567372:2439] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:59.666185Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567372:2439] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-04T17:12:59.676539Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567372:2439] Handle TEvDescribeSchemeResult Forward to# [1:7489507187483567371:2438] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:12:59.690055Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183188599575:2115] Handle TEvProposeTransaction 2025-04-04T17:12:59.690081Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183188599575:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T17:12:59.690181Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183188599575:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489507187483567395:2448] 2025-04-04T17:12:59.773142Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567395:2448] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T17:12:59.773239Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567395:2448] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:12:59.773265Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567395:2448] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:12:59.773337Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567395:2448] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:12:59.773675Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567395:2448] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:59.773830Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567395:2448] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-04T17:12:59.773926Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567395:2448] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-04T17:12:59.774072Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567395:2448] txid# 281474976710657 HANDLE EvClientConnected 2025-04-04T17:12:59.774975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:12:59.782340Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567395:2448] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-04T17:12:59.782424Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567395:2448] txid# 281474976710657 SEND to# [1:7489507187483567394:2447] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-04T17:12:59.799839Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183188599575:2115] Handle TEvProposeTransaction 2025-04-04T17:12:59.799873Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183188599575:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T17:12:59.799908Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183188599575:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489507187483567435:2484] 2025-04-04T17:12:59.802070Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567435:2484] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T17:12:59.802118Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567435:2484] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:12:59.802133Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567435:2484] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:12:59.802201Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567435:2484] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:12:59.802429Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567435:2484] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:12:59.802511Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567435:2484] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:12:59.802554Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567435:2484] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T17:12:59.802673Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567435:2484] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T17:12:59.803140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:12:59.805212Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567435:2484] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-04T17:12:59.805253Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507187483567435:2484] txid# 281474976710658 SEND to# [1:7489507187483567434:2483] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-04T17:13:01.550270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507196073502109:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:01.550273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507196073502121:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:01.550362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:01.550692Z node 1 :TX_PROXY DEBUG: actor# [1:7489507183188599575:2115] H ... wd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60516" 2025-04-04T17:15:56.444483Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900313:2586] txid# 281474976715661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:56.444506Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900313:2586] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:15:56.444557Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900313:2586] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:56.444947Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900313:2586] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:56.445048Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900313:2586] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:56.445110Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900313:2586] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-04-04T17:15:56.445254Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900313:2586] txid# 281474976715661 HANDLE EvClientConnected 2025-04-04T17:15:56.455286Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900313:2586] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-04-04T17:15:56.455347Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900313:2586] txid# 281474976715661 SEND to# [59:7489507949853900312:2334] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-04-04T17:15:56.628475Z node 59 :TX_PROXY DEBUG: actor# [59:7489507928379063044:2112] Handle TEvProposeTransaction 2025-04-04T17:15:56.628508Z node 59 :TX_PROXY DEBUG: actor# [59:7489507928379063044:2112] TxId# 281474976715662 ProcessProposeTransaction 2025-04-04T17:15:56.628553Z node 59 :TX_PROXY DEBUG: actor# [59:7489507928379063044:2112] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7489507949853900336:2600] 2025-04-04T17:15:56.631120Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900336:2600] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60528" 2025-04-04T17:15:56.631197Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900336:2600] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:56.631218Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900336:2600] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:15:56.631268Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900336:2600] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:56.631556Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900336:2600] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:56.631660Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900336:2600] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:56.631719Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900336:2600] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-04-04T17:15:56.631869Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900336:2600] txid# 281474976715662 HANDLE EvClientConnected 2025-04-04T17:15:56.632446Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:15:56.635396Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900336:2600] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-04-04T17:15:56.635443Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900336:2600] txid# 281474976715662 SEND to# [59:7489507949853900335:2348] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-04-04T17:15:56.678538Z node 59 :TX_PROXY DEBUG: actor# [59:7489507928379063044:2112] Handle TEvProposeTransaction 2025-04-04T17:15:56.678567Z node 59 :TX_PROXY DEBUG: actor# [59:7489507928379063044:2112] TxId# 281474976715663 ProcessProposeTransaction 2025-04-04T17:15:56.678624Z node 59 :TX_PROXY DEBUG: actor# [59:7489507928379063044:2112] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7489507949853900370:2619] 2025-04-04T17:15:56.680185Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900370:2619] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60548" 2025-04-04T17:15:56.680226Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900370:2619] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:56.680240Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900370:2619] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:15:56.680275Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900370:2619] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:56.680491Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900370:2619] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:56.680554Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900370:2619] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:15:56.680590Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900370:2619] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-04-04T17:15:56.680685Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900370:2619] txid# 281474976715663 HANDLE EvClientConnected 2025-04-04T17:15:56.688614Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900370:2619] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-04-04T17:15:56.688673Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900370:2619] txid# 281474976715663 SEND to# [59:7489507949853900369:2350] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-04-04T17:15:56.758447Z node 59 :TX_PROXY DEBUG: actor# [59:7489507928379063044:2112] Handle TEvProposeTransaction 2025-04-04T17:15:56.758495Z node 59 :TX_PROXY DEBUG: actor# [59:7489507928379063044:2112] TxId# 281474976715664 ProcessProposeTransaction 2025-04-04T17:15:56.758551Z node 59 :TX_PROXY DEBUG: actor# [59:7489507928379063044:2112] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [59:7489507949853900402:2633] 2025-04-04T17:15:56.761126Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900402:2633] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMDE1NiwiaWF0IjoxNzQzNzg2OTU2LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.CGc7JvDZqmqMVdN0DWOrWPTTqTg0g20iYe7yxTH0hk-gLgWumKNp1Hv3pMHcaA71nmZ3QXfK8w68whhFy77nyib1IU-AXcDveHeSZmUrmAVY0sZuR96WYlYwO-FGuKUyKyUhLG0JGdqKFrReB3UM5Bhxgr2uH1EF3PP7B2zNMLwTETgEFQVdO8QHgEncOgVGJJfxzjg-dRNmM12yhfX7iSyik35ZXMeO6pe6DCzWzNLAlrO7jdKqRR--5unw21jiKpcpI6X82EbS0KXFIrfFN92NN6ax1UMIW37NOlhr-JWFInBkPTE4hIMW9qwz8KIbyRTnMm6mLRpf_DhyKdl2Dw\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMDE1NiwiaWF0IjoxNzQzNzg2OTU2LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60576" 2025-04-04T17:15:56.761217Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900402:2633] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T17:15:56.761245Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900402:2633] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-04T17:15:56.761409Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900402:2633] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-04T17:15:56.761470Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900402:2633] txid# 281474976715664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-04T17:15:56.761520Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900402:2633] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:15:56.761743Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900402:2633] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:15:56.761786Z node 59 :TX_PROXY ERROR: Actor# [59:7489507949853900402:2633] txid# 281474976715664, Access denied for ordinaryuser, attempt to manage user 2025-04-04T17:15:56.761880Z node 59 :TX_PROXY ERROR: Actor# [59:7489507949853900402:2633] txid# 281474976715664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-04-04T17:15:56.761914Z node 59 :TX_PROXY DEBUG: Actor# [59:7489507949853900402:2633] txid# 281474976715664 SEND to# [59:7489507949853900401:2362] Source {TEvProposeTransactionStatus Status# 5} 2025-04-04T17:15:56.762288Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=ZTg1Yjk5MTUtNTY1NWEyNWItNzk4MTY3MTctZmFhMjk1ZDI=, ActorId: [59:7489507949853900390:2362], ActorState: ExecuteState, TraceId: 01jr0wtny39yvkk2b0fhx6fmvw, Create QueryResponse for error on request, msg: 2025-04-04T17:15:56.762729Z node 59 :TX_PROXY DEBUG: actor# [59:7489507928379063044:2112] Handle TEvExecuteKqpTransaction 2025-04-04T17:15:56.762760Z node 59 :TX_PROXY DEBUG: actor# [59:7489507928379063044:2112] TxId# 281474976715665 ProcessProposeKqpTransaction >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 29913, MsgBus: 25227 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ab5/r3tmp/tmpNBdHi6/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29913, node 1 TClient is connected to server localhost:25227 TClient is connected to server localhost:25227 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] Test command err: 2025-04-04T17:15:39.620877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:15:39.621352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:15:39.621404Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000770/r3tmp/tmpEZf1yK/pdisk_1.dat 2025-04-04T17:15:39.987611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:15:40.035070Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:40.074564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:40.074720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:40.087467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:40.189027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:15:40.257039Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:686:2585] 2025-04-04T17:15:40.257266Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:40.315361Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:40.315473Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:15:40.317057Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:15:40.317133Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:15:40.317191Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:15:40.317539Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:15:40.317830Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:15:40.317896Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:712:2585] in generation 1 2025-04-04T17:15:40.319671Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:690:2587] 2025-04-04T17:15:40.319845Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:40.329186Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:40.329552Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:693:2589] 2025-04-04T17:15:40.329735Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:40.337585Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:15:40.338929Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-04T17:15:40.339006Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-04T17:15:40.339046Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-04T17:15:40.339322Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:15:40.340160Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:15:40.340217Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:737:2587] in generation 1 2025-04-04T17:15:40.340663Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:40.340722Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:15:40.341851Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-04T17:15:40.341902Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-04T17:15:40.341951Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-04T17:15:40.343830Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:15:40.343953Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:15:40.344036Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:738:2589] in generation 1 2025-04-04T17:15:40.357417Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:15:40.381211Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:15:40.381400Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:15:40.381522Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:742:2616] 2025-04-04T17:15:40.381570Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:15:40.381607Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:15:40.381643Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:40.381943Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:15:40.381973Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-04T17:15:40.382045Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:15:40.382099Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:743:2617] 2025-04-04T17:15:40.382120Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-04T17:15:40.382138Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-04T17:15:40.382158Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:15:40.382582Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:15:40.382636Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-04T17:15:40.382681Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:15:40.382749Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:744:2618] 2025-04-04T17:15:40.382769Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-04T17:15:40.382788Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-04T17:15:40.382806Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T17:15:40.382951Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:15:40.383051Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:15:40.383248Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:40.383301Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:40.383345Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:15:40.383381Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:40.383431Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-04T17:15:40.383498Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-04T17:15:40.383660Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:677:2580], serverId# [1:704:2594], sessionId# [0:0:0] 2025-04-04T17:15:40.383703Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T17:15:40.383724Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:40.383745Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-04T17:15:40.383769Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T17:15:40.383807Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-04-04T17:15:40.383854Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-04-04T17:15:40.384311Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:40.384536Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:15:40.384618Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:15:40.385039Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:678:2581], serverId# [1:710:2599], sessionId# [0:0:0] 2025-04-04T17:15:40.385099Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-04T17:15:40.385126Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:40.385149Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-04-04T17:15:40.385179Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T17:15:40.385349Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:15:40.385505Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-04T17:15:40.385557Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-04T17:15:40.387577Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:15:40.387655Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T17:15:40.400904Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:40.401029Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:40.401191Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T17:15:40.401235Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-04T17:15:40.445232Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:679:2582], serverId# [1:761:2628], sessionId# [0:0:0] 2025-04-04T17:15:40.445408Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-04T17:15:40.445581Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 2 ... [2000:281474976715663] at 72075186224037888 for LoadAndWaitInRS 2025-04-04T17:15:58.191049Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:58.191219Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715663 2025-04-04T17:15:58.191276Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2000 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 19 Seqno# 6 Flags# 0} 2025-04-04T17:15:58.191336Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-04T17:15:58.191633Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-04T17:15:58.191668Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:15:58.191699Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [2000:281474976715663] at 72075186224037890 for LoadAndWaitInRS 2025-04-04T17:15:58.191955Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:58.203355Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:58.203458Z node 3 :TX_DATASHARD DEBUG: Complete [2000 : 281474976715663] from 72075186224037888 at tablet 72075186224037888 send result to client [3:1101:2846], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:15:58.203558Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 2000 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 5} 2025-04-04T17:15:58.203619Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:58.203748Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715663 2025-04-04T17:15:58.203801Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T17:15:58.203842Z node 3 :TX_DATASHARD DEBUG: Complete [2000 : 281474976715663] from 72075186224037890 at tablet 72075186224037890 send result to client [3:1101:2846], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:15:58.203884Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037890 {TEvReadSet step# 2000 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 6} 2025-04-04T17:15:58.203909Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T17:15:58.203991Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1101:2846] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715663, shard# 72075186224037888, status# 2 2025-04-04T17:15:58.204053Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715663 2025-04-04T17:15:58.204087Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1101:2846] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715663, shard# 72075186224037890, status# 2 2025-04-04T17:15:58.204131Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1101:2846] Reply: txId# 281474976715663, status# OK, error# 2025-04-04T17:15:58.204461Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-04-04T17:15:58.204507Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-04-04T17:15:58.204883Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T17:15:58.204938Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:58.204979Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-04T17:15:58.205051Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T17:15:58.205156Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1096:2842], serverId# [3:1097:2843], sessionId# [0:0:0] 2025-04-04T17:15:58.206511Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:15:58.206968Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T17:15:58.207170Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T17:15:58.207239Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:58.207310Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715666] at 72075186224037889 for WaitForStreamClearance 2025-04-04T17:15:58.207573Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:58.207642Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T17:15:58.208274Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037889, TxId: 281474976715666, MessageQuota: 1 2025-04-04T17:15:58.208536Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037889, TxId: 281474976715666, Size: 70, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T17:15:58.208700Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037889, TxId: 281474976715666, PendingAcks: 0 2025-04-04T17:15:58.208748Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037889, TxId: 281474976715666, MessageQuota: 0 2025-04-04T17:15:58.210865Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-04-04T17:15:58.210920Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715666, at: 72075186224037889 2025-04-04T17:15:58.211087Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T17:15:58.211123Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:58.211161Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715666] at 72075186224037889 for ReadTableScan 2025-04-04T17:15:58.211291Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:58.211344Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T17:15:58.211392Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:15:58.239527Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:15:58.239962Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:15:58.240183Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:58.240236Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:58.240296Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715667] at 72075186224037888 for WaitForStreamClearance 2025-04-04T17:15:58.240561Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:58.240633Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:58.241347Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715667, MessageQuota: 1 2025-04-04T17:15:58.241601Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715667, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T17:15:58.241758Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715667, PendingAcks: 0 2025-04-04T17:15:58.241809Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715667, MessageQuota: 0 2025-04-04T17:15:58.243375Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-04T17:15:58.243436Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715667, at: 72075186224037888 2025-04-04T17:15:58.243866Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:15:58.243914Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:58.243954Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715667] at 72075186224037888 for ReadTableScan 2025-04-04T17:15:58.244095Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:58.244156Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:15:58.244209Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:15:58.289246Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-04-04T17:15:58.289731Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-04-04T17:15:58.289982Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-04T17:15:58.290041Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:58.290101Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037890 for WaitForStreamClearance 2025-04-04T17:15:58.290402Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:58.290500Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T17:15:58.291307Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 1 2025-04-04T17:15:58.291573Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715668, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T17:15:58.291726Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715668, PendingAcks: 0 2025-04-04T17:15:58.291787Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 0 2025-04-04T17:15:58.293460Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-04T17:15:58.293514Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037890 2025-04-04T17:15:58.293957Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-04T17:15:58.294004Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:15:58.294048Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037890 for ReadTableScan 2025-04-04T17:15:58.294191Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:15:58.294271Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T17:15:58.294329Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 >> KqpDataIntegrityTrails::UpsertViaLegacyScripting-UseSink+Streaming [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertViaLegacyScripting+UseSink+Streaming [GOOD] Test command err: Trying to start YDB, gRPC: 63735, MsgBus: 2142 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a83/r3tmp/tmpx4bkEy/pdisk_1.dat TServer::EnableGrpc on GrpcPort 63735, node 1 TClient is connected to server localhost:2142 TClient is connected to server localhost:2142 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 15373, MsgBus: 25433 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a98/r3tmp/tmpHwKwYp/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15373, node 1 TClient is connected to server localhost:25433 TClient is connected to server localhost:25433 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertViaLegacyScripting+UseSink-Streaming [GOOD] Test command err: Trying to start YDB, gRPC: 27682, MsgBus: 26850 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a91/r3tmp/tmpxH4HXI/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27682, node 1 TClient is connected to server localhost:26850 TClient is connected to server localhost:26850 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false >> TSchemeShardLoginTest::BanUnbanUser >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true >> TTopicYqlTest::CreateTopicYqlBackCompatibility [GOOD] >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false >> TWebLoginService::AuditLogEmptySIDsLoginSuccess >> TSchemeShardLoginTest::UserLogin >> TWebLoginService::AuditLogLoginSuccess >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false |89.9%| [TA] $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12610, MsgBus: 19634 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a82/r3tmp/tmpNviOmM/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12610, node 1 TClient is connected to server localhost:19634 TClient is connected to server localhost:19634 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |89.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false >> KqpDataIntegrityTrails::UpsertViaLegacyScripting-UseSink-Streaming [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertViaLegacyScripting-UseSink+Streaming [GOOD] Test command err: Trying to start YDB, gRPC: 17443, MsgBus: 18606 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a76/r3tmp/tmpqLhccM/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17443, node 1 TClient is connected to server localhost:18606 TClient is connected to server localhost:18606 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink [GOOD] >> KqpDataIntegrityTrails::Select [GOOD] >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true >> TWebLoginService::AuditLogEmptySIDsLoginSuccess [GOOD] >> TWebLoginService::AuditLogLdapLoginBadPassword >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink [GOOD] >> GroupWriteTest::ByTableName [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertViaLegacyScripting-UseSink-Streaming [GOOD] Test command err: Trying to start YDB, gRPC: 3818, MsgBus: 7392 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a5d/r3tmp/tmpu5930R/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3818, node 1 TClient is connected to server localhost:7392 TClient is connected to server localhost:7392 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink [GOOD] >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true >> TWebLoginService::AuditLogLoginSuccess [GOOD] >> TWebLoginService::AuditLogLoginBadPassword >> TSchemeShardLoginTest::BanUnbanUser [GOOD] >> TSchemeShardLoginTest::BanUserWithWaiting >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::TestExternalLogin >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true >> TSchemeShardLoginTest::UserLogin [GOOD] >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TTopicYqlTest::CreateTopicYqlBackCompatibility [GOOD] Test command err: 2025-04-04T17:13:06.713989Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507219467234844:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:06.714047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:13:06.770512Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507218912907796:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:06.770797Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:13:06.981342Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:13:06.999415Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e5f/r3tmp/tmp3DTmiR/pdisk_1.dat 2025-04-04T17:13:07.343858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:07.343993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:07.344848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:07.344908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:07.348186Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:13:07.348306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:07.350676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14304, node 1 2025-04-04T17:13:07.432757Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:13:07.435658Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:13:07.490836Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:07.509281Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001e5f/r3tmp/yandexGlMm41.tmp 2025-04-04T17:13:07.509307Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001e5f/r3tmp/yandexGlMm41.tmp 2025-04-04T17:13:07.509457Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001e5f/r3tmp/yandexGlMm41.tmp 2025-04-04T17:13:07.509606Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:13:07.579711Z INFO: TTestServer started on Port 28766 GrpcPort 14304 TClient is connected to server localhost:28766 PQClient connected to localhost:14304 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:08.183275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:13:08.252153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:13:10.563007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507236647104887:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:10.563132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:10.563456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507236647104913:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:10.567705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:13:10.593406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507236647104916:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:13:10.833757Z node 1 :TX_PROXY ERROR: Actor# [1:7489507236647105010:2760] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:10.860698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:10.866853Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507236647105029:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:13:10.867081Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2QzY2E4MTQtMjhlNGE3ZjMtNGIxM2VlY2QtNzUxYTNjM2I=, ActorId: [1:7489507236647104884:2337], ActorState: ExecuteState, TraceId: 01jr0wnkmadafqv4ykwpw3xmh0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:13:10.868504Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:13:10.939600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:11.073089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:13:11.280224Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0wnm6z7rtsmwqqxjsggmr5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBjZDE4NGEtNTY4ODQ4NDUtMzUwOGFlODMtODY4OGU3ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489507240942072765:3100] 2025-04-04T17:13:11.718420Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507219467234844:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:11.718558Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:13:11.753567Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489507218912907796:2187];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:11.753656Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:13:17.089701Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzU0MDFlYWYtNjc3MjcyMDctYjVjZjRmYjYtZTJiNWJjNmQ=, ActorId: [1:7489507262416909641:2452], ActorState: unknown state, TraceId: 01jr0wnssv98fhjfqrb8yes3sa, Session actor destroyed 2025-04-04T17:13:17.091534Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489507219467234879:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:13:17.091801Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489507219467234879:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-04-04T17:13:17.092441Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489507219467234879:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7489507223762202664:2445] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743786788254 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:13:17.092552Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489507219467234879:2128], cacheItem# { Subscriber: { Subscriber: [1:7489507223762202664:2445] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743786788254 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId ... " YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } AllPartitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2025-04-04T17:15:58.525946Z node 26 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:15:58.526093Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete partitions for TxId 281474976710676 2025-04-04T17:15:58.526118Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState EXECUTED 2025-04-04T17:15:58.526143Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 moved from EXECUTING to EXECUTED 2025-04-04T17:15:58.526674Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976710676] save tx TxId: 281474976710676 State: EXECUTED MinStep: 1743786958291 MaxStep: 18446744073709551615 Step: 1743786958529 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 2 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 1 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } AllPartitions { PartitionId: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } } BootstrapConfig { } SourceActor { RawX1: 7489507907242481096 RawX2: 107374184607 } Partitions { Partition { PartitionId: 1 } } 2025-04-04T17:15:58.527044Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:15:58.531618Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:15:58.531655Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Try execute txs with state EXECUTED 2025-04-04T17:15:58.531676Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976710676, State EXECUTED 2025-04-04T17:15:58.531701Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976710676 State EXECUTED FrontTxId 281474976710676 2025-04-04T17:15:58.531721Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TPersQueue::SendEvReadSetAckToSenders 2025-04-04T17:15:58.531742Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976710676, NewState WAIT_RS_ACKS 2025-04-04T17:15:58.531766Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976710676 moved from EXECUTED to WAIT_RS_ACKS 2025-04-04T17:15:58.531797Z node 25 :PERSQUEUE DEBUG: [TxId: 281474976710676] PredicateAcks: 0/0 2025-04-04T17:15:58.531807Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-04T17:15:58.531823Z node 25 :PERSQUEUE DEBUG: [TxId: 281474976710676] PredicateAcks: 0/0 2025-04-04T17:15:58.531841Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] add an TxId 281474976710676 to the list for deletion 2025-04-04T17:15:58.531868Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976710676, NewState DELETING 2025-04-04T17:15:58.531903Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete key for TxId 281474976710676 2025-04-04T17:15:58.531965Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:15:58.539355Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:15:58.539394Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Try execute txs with state DELETING 2025-04-04T17:15:58.539413Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976710676, State DELETING 2025-04-04T17:15:58.539439Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete TxId 281474976710676 2025-04-04T17:15:58.540210Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:15:58.540248Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTED 2025-04-04T17:15:58.540265Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, State EXECUTED 2025-04-04T17:15:58.540291Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 State EXECUTED FrontTxId 281474976710676 2025-04-04T17:15:58.540312Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-04-04T17:15:58.540334Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState WAIT_RS_ACKS 2025-04-04T17:15:58.540350Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 moved from EXECUTED to WAIT_RS_ACKS 2025-04-04T17:15:58.540379Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976710676] PredicateAcks: 0/0 2025-04-04T17:15:58.540391Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-04T17:15:58.540407Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976710676] PredicateAcks: 0/0 2025-04-04T17:15:58.540425Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976710676 to the list for deletion 2025-04-04T17:15:58.540448Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState DELETING 2025-04-04T17:15:58.540474Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976710676 2025-04-04T17:15:58.540533Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:15:58.547559Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:15:58.547596Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-04-04T17:15:58.547616Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, State DELETING 2025-04-04T17:15:58.547638Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976710676 TClient::Ls request: /Root/PQ/rt3.dc1--legacy--topic1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--legacy--topic1" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710676 CreateStep: 1743786958529 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037894 } PersQueueGroup { Name: "rt3.dc1--legacy--topic1" PathId: 13 TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 10... (TRUNCATED) === PATH DESCRIPTION: Name: "rt3.dc1--legacy--topic1" PathId: 13 TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } YdbDatabasePath: "/Root" PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } Partitions { PartitionId: 0 TabletId: 72075186224037893 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186224037892 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037894 NextPartitionId: 2 2025-04-04T17:15:59.177949Z node 25 :KQP_EXECUTER ERROR: ActorId: [25:7489507963077058004:2491] TxId: 281474976710678. Ctx: { TraceId: 01jr0wtra7enxxb7rz22hbkw1v, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=25&id=YTA4MDIzYS1kMDU5OGE2YS0zMzc2ZjY2Ny0zNmJmZjIwZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 26 2025-04-04T17:15:59.185512Z node 25 :KQP_COMPUTE ERROR: SelfId: [25:7489507963077058008:2491], TxId: 281474976710678, task: 2. Ctx: { TraceId : 01jr0wtra7enxxb7rz22hbkw1v. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=25&id=YTA4MDIzYS1kMDU5OGE2YS0zMzc2ZjY2Ny0zNmJmZjIwZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [25:7489507963077058004:2491], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> TPersQueueTest::PartitionsMapping [GOOD] >> TPersQueueTest::MessageMetadata >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true [GOOD] >> Viewer::StorageGroupOutputWithoutFilterNoDepends >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false >> TWebLoginService::AuditLogLdapLoginBadPassword [GOOD] >> TWebLoginService::AuditLogLdapLoginBadUser >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::ByTableName [GOOD] Test command err: RandomSeed# 13401713531549665116 2025-04-04T17:15:31.426090Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058428954028033 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-04T17:15:31.456485Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-04T17:15:31.456537Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 going to send TEvBlock {TabletId# 72058428954028033 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-04T17:15:31.458889Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-04T17:15:31.472487Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-04T17:15:31.475045Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-04T17:15:56.205907Z 7 00h01m24.010512s :BS_LOGCUTTER ERROR: VDISK[82000000:_:0:6:0]: KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 2361 2025-04-04T17:16:01.999845Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-04T17:16:01.999951Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-04T17:16:02.000011Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-04T17:16:02.000057Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-04T17:16:02.063157Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} 2025-04-04T17:16:02.063284Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Status# OK} >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism >> TWebLoginService::AuditLogLoginBadPassword [GOOD] >> TWebLoginService::AuditLogLdapLoginSuccess >> TSchemeShardLoginTest::TestExternalLogin [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCount >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 23016, MsgBus: 29494 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a4f/r3tmp/tmpQCQvWl/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23016, node 1 TClient is connected to server localhost:29494 TClient is connected to server localhost:29494 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Select [GOOD] Test command err: Trying to start YDB, gRPC: 9714, MsgBus: 5153 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a51/r3tmp/tmpDWmqdt/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9714, node 1 TClient is connected to server localhost:5153 TClient is connected to server localhost:5153 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain [GOOD] >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 5115, MsgBus: 26964 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a93/r3tmp/tmprv0zqe/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5115, node 1 TClient is connected to server localhost:26964 TClient is connected to server localhost:26964 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4417, MsgBus: 11624 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a49/r3tmp/tmpRXX4ma/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4417, node 1 TClient is connected to server localhost:11624 TClient is connected to server localhost:11624 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> DataShardReadIterator::ShouldHandleReadAck >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 28896, MsgBus: 6919 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a89/r3tmp/tmpBUEMEK/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28896, node 1 TClient is connected to server localhost:6919 TClient is connected to server localhost:6919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> DataShardReadIterator::ShouldReadKeyCellVec >> TWebLoginService::AuditLogLdapLoginBadUser [GOOD] >> TWebLoginService::AuditLogLdapLoginBadBind >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false >> DataShardReadIteratorBatchMode::RangeFull >> DataShardReadIterator::ShouldReadRangeInclusiveEndsCellVec >> DataShardReadIterator::ShouldReceiveErrorAfterSplit >> DataShardReadIteratorSysTables::ShouldRead >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive >> DataShardReadIterator::ShouldReadRangeCellVec >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne >> DataShardReadIterator::ShouldReverseReadMultipleKeys >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless [GOOD] >> ResourcePoolsDdl::TestAlterResourcePool >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism [GOOD] >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true >> TPersQueueTest::EachMessageGetsExactlyOneAcknowledgementInCorrectOrder [GOOD] >> TPersQueueTest::Delete >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Uncompressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Uncompressed >> TWebLoginService::AuditLogLdapLoginSuccess [GOOD] >> TWebLoginService::AuditLogLogout >> TPersQueueTest::BadSids [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword [GOOD] >> TWebLoginService::AuditLogAdminLoginSuccess >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup [GOOD] >> TSchemeShardLoginTest::FailedLoginWithInvalidUser >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 24614, MsgBus: 26093 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a3b/r3tmp/tmpwt9gHw/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24614, node 1 TClient is connected to server localhost:26093 TClient is connected to server localhost:26093 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> TWebLoginService::AuditLogLogout [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:125:2058] recipient: [1:108:2140] 2025-04-04T17:16:01.513543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:01.513669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:01.513712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:01.513749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:01.515363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:01.515424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:01.515494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:01.515578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:01.516543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:01.603889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:01.603951Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:01.619387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:01.619601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:01.619732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:01.639628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:01.639904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:01.640525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.642569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:01.647564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.648978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:01.649034Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.649205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:01.649259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:01.649309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:01.649433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.661734Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-04T17:16:01.781062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:01.781288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.781494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:01.781711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:01.781759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.784150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.784339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:01.784560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.784658Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:01.784712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:01.784746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:01.787019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.787066Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:01.787108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:01.788914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.788977Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.789018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.789069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.792913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:01.794604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:01.794756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:01.795676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.795788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:01.795832Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.796099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:01.796167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.796336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:01.796419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:01.798100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:01.798144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:01.798344Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.798388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:01.798742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.798807Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:01.798897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:01.798929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.798966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:01.798997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.799030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:01.799068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.799114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:01.799142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:01.799195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:01.799237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:01.799272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:01.806503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:01.806660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:01.806709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:03.830323Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:03.830435Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:03.830479Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:03.830518Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:03.830679Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:03.838357Z node 4 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [4:125:2151] sender: [4:239:2058] recipient: [4:15:2062] 2025-04-04T17:16:03.847235Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:03.847400Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:03.847550Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:03.847699Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:03.847743Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:03.849322Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:03.849454Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:03.849632Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:03.849678Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:03.849717Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:03.849747Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:03.851181Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:03.851235Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:03.851267Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:03.853532Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:03.853576Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:03.853628Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:03.853679Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:03.853808Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:03.855188Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:03.855403Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:03.856164Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:03.856272Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 17179871339 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:03.856314Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:03.856527Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:03.856580Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:03.856740Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:03.856808Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:03.859143Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:03.859196Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:03.859361Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:03.859408Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:03.859480Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:03.859523Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:03.859623Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:03.859666Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:03.859709Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:03.859739Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:03.859777Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:03.859814Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:03.859855Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:03.859888Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:03.859953Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:03.859992Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:03.860024Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:03.860938Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:03.861025Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:03.861064Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T17:16:03.861101Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T17:16:03.861141Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:03.861224Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T17:16:03.867406Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T17:16:03.867862Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:03.868972Z node 4 :TX_PROXY DEBUG: actor# [4:269:2260] Bootstrap 2025-04-04T17:16:03.887780Z node 4 :TX_PROXY DEBUG: actor# [4:269:2260] Become StateWork (SchemeCache [4:277:2268]) 2025-04-04T17:16:03.888053Z node 4 :HTTP WARN: 127.0.0.1:0 POST /login 2025-04-04T17:16:03.888382Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:22862, port: 22862 2025-04-04T17:16:03.888487Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-04-04T17:16:03.891982Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:22862. Invalid credentials 2025-04-04T17:16:03.892435Z node 4 :HTTP ERROR: Login fail for user1@ldap: Could not login via LDAP 2025-04-04T17:16:03.893102Z node 4 :TX_PROXY DEBUG: actor# [4:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:16:03.895223Z node 4 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 AUDIT LOG buffer(2): 2025-04-04T17:16:03.849415Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-04-04T17:16:03.892262Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:22862. Invalid credentials, login_user=user1@ldap, sanitized_token={none} AUDIT LOG checked line: 2025-04-04T17:16:03.892262Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:22862. Invalid credentials, login_user=user1@ldap, sanitized_token={none} >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true [GOOD] |90.0%| [TA] $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLogout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:125:2058] recipient: [1:108:2140] 2025-04-04T17:16:01.513654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:01.513789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:01.513842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:01.513878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:01.515126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:01.515184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:01.515265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:01.515345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:01.516599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:01.603950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:01.604011Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:01.616036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:01.616263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:01.616426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:01.626520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:01.626825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:01.627510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.628342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:01.637220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.641579Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:01.641643Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.641809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:01.641858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:01.641903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:01.642026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.649495Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:238:2058] recipient: [1:15:2062] 2025-04-04T17:16:01.771496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:01.771695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.771849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:01.772602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:01.772667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.775506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.775694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:01.775886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.775940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:01.775991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:01.776023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:01.777574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.777622Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:01.777656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:01.779177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.779230Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.779274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.779325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.784010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:01.785629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:01.785806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:01.786790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.786895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:01.786937Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.788240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:01.788311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.788492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:01.788562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:01.790326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:01.790366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:01.790533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.790574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:01.790903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.790958Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:01.791041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:01.791072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.791102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:01.791134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.791165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:01.791198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.791242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:01.791270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:01.791326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:01.791358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:01.791386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:01.792974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:01.793082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:01.793118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 94046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:04.197361Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T17:16:04.197393Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T17:16:04.197430Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:04.197513Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T17:16:04.200308Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T17:16:04.200871Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-04T17:16:04.201465Z node 4 :TX_PROXY DEBUG: actor# [4:269:2260] Bootstrap 2025-04-04T17:16:04.219842Z node 4 :TX_PROXY DEBUG: actor# [4:269:2260] Become StateWork (SchemeCache [4:275:2266]) 2025-04-04T17:16:04.223077Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:04.228378Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:04.228535Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:16:04.228579Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:16:04.228626Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:16:04.228656Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:16:04.228720Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:04.228768Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-04T17:16:04.228821Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:16:04.228860Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:16:04.228898Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2025-04-04T17:16:04.228934Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-04T17:16:04.230164Z node 4 :TX_PROXY DEBUG: actor# [4:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:16:04.232884Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:04.232980Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2025-04-04T17:16:04.233143Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:04.233177Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:04.233321Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:04.233360Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-04T17:16:04.233885Z node 4 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:16:04.234009Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:16:04.234090Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:16:04.234134Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:16:04.234181Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-04T17:16:04.234227Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:04.234357Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-04T17:16:04.236012Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-04-04T17:16:04.236442Z node 4 :HTTP WARN: 127.0.0.1:0 POST /login 2025-04-04T17:16:04.238424Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-04T17:16:04.238481Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-04T17:16:04.260684Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzODMwMTY0LCJpYXQiOjE3NDM3ODY5NjQsInN1YiI6InVzZXIxIn0.52AoXEyrSsxDb0Ot6cEubFP_aGW9nupK3gJKUV5g9tlX7WIyMUJe13gdLQkoMF0FZoOe6yWz58-uiYKS1-aoXCpyKhorJLwyQd4XFa3kl5oX6oJyZm73oii35Xvsnw6VnLXLsEZkd87w-VkAM2qoSVgXcuxcLjxbR7bNcAu_xQBE8kTgXbPktpfrpXmqCdJ0oMbqXjFCSZR_T4nQ99gifj_cQtETvnWyWk5G7_wvtJWrGph1K9HuPjmJD4zsgJhulv3nmLL-a74eHwN8pvitQtxAdNIPfcTxaIaowLDF1CRFHyKBnnkj_IkslOONHSLOU3SSGpUrxw9Y2Q68bA70Sg" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzODMwMTY0LCJpYXQiOjE3NDM3ODY5NjQsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-04-04T17:16:04.261077Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:04.261130Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:04.261316Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:04.261368Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-04T17:16:04.262646Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2025-04-04T17:16:04.263409Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:04.263653Z node 4 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 269us result status StatusSuccess 2025-04-04T17:16:04.264130Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA8pYf12XI/FMY8xIu22SG\nMwKspUKGV7nclEPvNl5BgJu2SvFOPR0ozVM2GwMj7XN4o4deT1b8H4d/TfpQptIt\noxmkk2IcQJRfh7pLYVTUraD6qUadz0fQzwe38OXA3K7N34vswJ0Tb7oPaMMvkH0V\nyyDGFA7f0FguBbI0HZVDUEWYX4tYgVw82GF4+A7d5xnEYsHe92g7ufsqqbNur8FO\nNYJLzD5cZ0BzA7qzZcF/j+Ytiv4bczwVjJdihUyAI6uFV7TJcfS3KzCjv0NPAije\nfCDvxTItZ0nd0iTEfpm3gQ63L0H5xK+UFPRmsZjva1vmGSv85TWSOHTFvL4V7GOP\noQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743873364250 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:04.264593Z node 4 :HTTP WARN: 127.0.0.1:0 POST /logout 2025-04-04T17:16:04.264657Z node 4 :HTTP ERROR: Logout: No ydb_session_id cookie 2025-04-04T17:16:04.266122Z node 4 :HTTP WARN: 127.0.0.1:0 POST /logout 2025-04-04T17:16:04.276144Z node 4 :TICKET_PARSER ERROR: Ticket **** (589A015B): Token is not in correct format 2025-04-04T17:16:04.276321Z node 4 :HTTP ERROR: Logout: Token is not in correct format 2025-04-04T17:16:04.276879Z node 4 :HTTP WARN: 127.0.0.1:0 POST /logout AUDIT LOG buffer(4): 2025-04-04T17:16:04.186439Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-04-04T17:16:04.228243Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2025-04-04T17:16:04.260888Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzODMwMTY0LCJpYXQiOjE3NDM3ODY5NjQsInN1YiI6InVzZXIxIn0.**, login_user_level=admin 2025-04-04T17:16:04.278225Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzODMwMTY0LCJpYXQiOjE3NDM3ODY5NjQsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS AUDIT LOG checked line: 2025-04-04T17:16:04.278225Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzODMwMTY0LCJpYXQiOjE3NDM3ODY5NjQsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS >> TWebLoginService::AuditLogAdminLoginSuccess [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:01.513614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:01.513762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:01.513834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:01.513873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:01.515076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:01.515138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:01.515235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:01.515347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:01.516808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:01.596833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:01.597093Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:01.613204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:01.613473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:01.613636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:01.623088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:01.623525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:01.626500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.627426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:01.635045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.643429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:01.643503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.643695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:01.643761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:01.643825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:01.643953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.650331Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:01.791529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:01.791755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.791960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:01.792194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:01.792248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.794464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.794618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:01.794783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.794863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:01.794960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:01.795013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:01.796832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.796901Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:01.796952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:01.798516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.798559Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.798619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.798670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.802004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:01.804133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:01.804302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:01.805285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.805405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:01.805463Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.805728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:01.805793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.805972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:01.806058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:01.808248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:01.808294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:01.808509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.808557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:01.808904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.808954Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:01.809076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:01.809134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.809176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:01.809208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.809243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:01.809295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.809340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:01.809386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:01.809448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:01.809482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:01.809513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:01.811281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:01.811381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:01.811420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:16:04.573146Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:04.573268Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:04.573306Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:203:2205], at schemeshard: 72057594046678944, txId: 105, path id: 2 2025-04-04T17:16:04.573346Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:203:2205], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-04-04T17:16:04.573845Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:16:04.573928Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:16:04.573955Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-04-04T17:16:04.573987Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-04T17:16:04.574018Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:16:04.574360Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:16:04.574407Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:16:04.574425Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-04T17:16:04.574441Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-04-04T17:16:04.574460Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:04.574502Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-04T17:16:04.576155Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-04T17:16:04.576423Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 2025-04-04T17:16:04.576748Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:04.576864Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir1" took 131us result status StatusSuccess 2025-04-04T17:16:04.577070Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1" PathDescription { Self { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 2 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2025-04-04T17:16:04.579106Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveGroup { Group: "group1" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:04.579249Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 1] name: MyRoot type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:04.579277Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:04.579309Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:04.579331Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:16:04.579490Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:04.579563Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-04T17:16:04.579593Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T17:16:04.579629Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-04T17:16:04.579660Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T17:16:04.579709Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:04.579751Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-04-04T17:16:04.579810Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T17:16:04.579855Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-04T17:16:04.579900Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-04-04T17:16:04.579931Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-04T17:16:04.581465Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:04.581546Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE GROUP, path: /MyRoot 2025-04-04T17:16:04.581676Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:04.581715Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:04.581835Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:04.581870Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:203:2205], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-04-04T17:16:04.582296Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-04-04T17:16:04.582370Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-04-04T17:16:04.582401Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-04-04T17:16:04.582435Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-04T17:16:04.582479Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:04.582549Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-04-04T17:16:04.584328Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2025-04-04T17:16:04.584913Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:04.585077Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 188us result status StatusSuccess 2025-04-04T17:16:04.585459Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::BadSids [GOOD] Test command err: 2025-04-04T17:13:06.780854Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:13:06.780960Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-04T17:13:07.198628Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:13:07.198706Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST === Server->StartServer(false); 2025-04-04T17:13:07.650952Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507222365786391:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:07.651094Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:13:07.779339Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489507224645336187:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:08.006509Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:13:08.033940Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:13:08.034209Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e50/r3tmp/tmpB2dqjq/pdisk_1.dat 2025-04-04T17:13:08.480513Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:08.480645Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:08.482297Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:08.482391Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:08.488233Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:08.488360Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-04T17:13:08.488495Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:08.491420Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30493, node 3 2025-04-04T17:13:08.745197Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001e50/r3tmp/yandex5b5mrd.tmp 2025-04-04T17:13:08.745225Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001e50/r3tmp/yandex5b5mrd.tmp 2025-04-04T17:13:08.745385Z node 3 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001e50/r3tmp/yandex5b5mrd.tmp 2025-04-04T17:13:08.745518Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:13:08.782506Z INFO: TTestServer started on Port 24907 GrpcPort 30493 TClient is connected to server localhost:24907 PQClient connected to localhost:30493 === TenantModeEnabled() = 0 === Init PQ - start server on port 30493 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:09.188188Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:13:09.188422Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:09.188624Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T17:13:09.188852Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:13:09.188889Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:09.192021Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T17:13:09.198422Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T17:13:09.198592Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:09.198660Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T17:13:09.198675Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-04T17:13:09.198685Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-04T17:13:09.206864Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:09.206915Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:13:09.206932Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-04T17:13:09.207885Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:09.207902Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-04T17:13:09.207921Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:09.209056Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:09.209079Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:09.209100Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:13:09.209124Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:13:09.232580Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:13:09.234387Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-04T17:13:09.234521Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:13:09.236956Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743786789283, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:13:09.237061Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743786789283 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:13:09.237088Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:13:09.237329Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-04T17:13:09.237356Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:13:09.237616Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:13:09.237671Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-04T17:13:09.239373Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:13:09.239407Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:13:09.239539Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:13:09.239557Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7489507226660754204:2400], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-04T17:13:09.239590Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17 ... 07972486401915:2548] (SourceId=base64:aa, PreferedPartition=(NULL)) RequestPQRB 2025-04-04T17:16:01.625755Z node 22 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][rt3.dc1--topic1] pipe [21:7489507972486401934:2548] connected; active server actors: 1 2025-04-04T17:16:01.626397Z node 21 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [21:7489507972486401915:2548] (SourceId=base64:aa, PreferedPartition=(NULL)) Received partition 7 from PQRB for SourceId=base64:aa 2025-04-04T17:16:01.626442Z node 21 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [21:7489507972486401915:2548] (SourceId=base64:aa, PreferedPartition=(NULL)) Update the table 2025-04-04T17:16:01.627812Z node 22 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][rt3.dc1--topic1] pipe [21:7489507972486401934:2548] disconnected; active server actors: 1 2025-04-04T17:16:01.627856Z node 22 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][rt3.dc1--topic1] pipe [21:7489507972486401934:2548] disconnected no session 2025-04-04T17:16:01.636117Z node 21 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [21:7489507972486401915:2548] (SourceId=base64:aa, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-04T17:16:01.636177Z node 21 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [21:7489507972486401915:2548] (SourceId=base64:aa, PreferedPartition=(NULL)) ReplyResult: Partition=7, SeqNo=(NULL) 2025-04-04T17:16:01.636202Z node 21 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [21:7489507972486401915:2548] (SourceId=base64:aa, PreferedPartition=(NULL)) Start idle 2025-04-04T17:16:01.636250Z node 21 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 5 sessionId: partition: 7 expectedGeneration: (NULL) 2025-04-04T17:16:01.636806Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=7) TEvClientConnected Status OK, TabletId: 72075186224037893, NodeId 21, Generation: 1 2025-04-04T17:16:01.636863Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server connected, pipe [21:7489507972486401943:2548], now have 1 active actors on pipe 2025-04-04T17:16:01.637038Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-04T17:16:01.637067Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'rt3.dc1--topic1' partition 7 2025-04-04T17:16:01.637212Z node 21 :PERSQUEUE INFO: new Cookie base64:aa|93975b0d-effa114a-7ae1e86c-a0ada697_0 generated for partition 7 topic 'rt3.dc1--topic1' owner base64:aa 2025-04-04T17:16:01.637380Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 7, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 7 2025-04-04T17:16:01.637480Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 7 messageNo: 0 requestId: cookie: 0 2025-04-04T17:16:01.637931Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-04T17:16:01.637963Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'rt3.dc1--topic1' partition 7 2025-04-04T17:16:01.638093Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 7 messageNo: 0 requestId: cookie: 0 2025-04-04T17:16:01.638282Z node 21 :PQ_WRITE_PROXY INFO: session inited cookie: 5 partition: 7 MaxSeqNo: 0 sessionId: base64:aa|93975b0d-effa114a-7ae1e86c-a0ada697_0 2025-04-04T17:16:01.639115Z :INFO: [] MessageGroupId [base64:aa] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743786961639 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:16:01.639267Z :INFO: [] MessageGroupId [base64:aa] SessionId [] Write session established. Init response: session_id: "base64:aa|93975b0d-effa114a-7ae1e86c-a0ada697_0" topic: "topic1" cluster: "dc1" partition_id: 7 supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-04T17:16:01.639907Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|93975b0d-effa114a-7ae1e86c-a0ada697_0] Write 1 messages with Id from 1 to 1 2025-04-04T17:16:01.640043Z :INFO: [] MessageGroupId [base64:aa] SessionId [base64:aa|93975b0d-effa114a-7ae1e86c-a0ada697_0] Write session: close. Timeout = 18446744073709551 ms 2025-04-04T17:16:01.640522Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|93975b0d-effa114a-7ae1e86c-a0ada697_0] Write session: try to update token 2025-04-04T17:16:01.640596Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|93975b0d-effa114a-7ae1e86c-a0ada697_0] Send 1 message(s) (0 left), first sequence number is 1 2025-04-04T17:16:01.641212Z node 21 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: base64:aa|93975b0d-effa114a-7ae1e86c-a0ada697_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-04T17:16:01.641545Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=7) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-04T17:16:01.641760Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-04T17:16:01.641790Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'rt3.dc1--topic1' partition 7 2025-04-04T17:16:01.641877Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 7 messageNo: 0 requestId: cookie: 1 2025-04-04T17:16:01.641934Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=7) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T17:16:01.642090Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-04T17:16:01.642108Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'rt3.dc1--topic1' partition 7 2025-04-04T17:16:01.642613Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message topic: rt3.dc1--topic1 partition: 7 SourceId: '\0base64:aa' SeqNo: 1 partNo : 0 messageNo: 1 size 92 offset: -1 2025-04-04T17:16:01.642895Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 7, State: StateIdle] Topic 'rt3.dc1--topic1' partition 7 part blob processing sourceId '\0base64:aa' seqNo 1 partNo 0 2025-04-04T17:16:01.730723Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 7, State: StateIdle] Topic 'rt3.dc1--topic1' partition 7 part blob complete sourceId '\0base64:aa' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 169 count 1 nextOffset 1 batches 1 2025-04-04T17:16:01.731576Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 7, State: StateIdle] Add new write blob: topic 'rt3.dc1--topic1' partition 7 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000007_00000000000000000000_00000_0000000001_00000| size 157 WTime 1743786961731 2025-04-04T17:16:01.731774Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:16:01.731894Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 7 offset 0 partNo 0 count 1 size 157 2025-04-04T17:16:01.734340Z node 21 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 7 offset 0 count 1 size 157 actorID [21:7489507963896466880:2477] 2025-04-04T17:16:01.734443Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 7, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 102 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:16:01.734489Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 7, State: StateIdle] TPartition::ReplyWrite. Partition: 7 2025-04-04T17:16:01.735363Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|93975b0d-effa114a-7ae1e86c-a0ada697_0] Write session got write response: sequence_numbers: 1 offsets: 0 already_written: false partition_id: 7 write_statistics { persist_duration_ms: 3 queued_in_partition_duration_ms: 88 } 2025-04-04T17:16:01.735426Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|93975b0d-effa114a-7ae1e86c-a0ada697_0] Write session: acknoledged message 1 2025-04-04T17:16:01.734549Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 7, State: StateIdle] Answering for message sourceid: '\0base64:aa', Topic: 'rt3.dc1--topic1', Partition: 7, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-04T17:16:01.734574Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 7 messageNo: 1 requestId: cookie: 1 2025-04-04T17:16:01.734692Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=7) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T17:16:01.734751Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 7, State: StateIdle] Topic 'rt3.dc1--topic1' partition 7 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04T17:16:01.734775Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 7, State: StateIdle] Topic 'rt3.dc1--topic1' partition 7 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-04T17:16:01.734879Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 7, State: StateIdle] read cookie 0 Topic 'rt3.dc1--topic1' partition 7 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-04-04T17:16:01.734908Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 7, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-04-04T17:16:01.734947Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 7, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-04T17:16:01.734961Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T17:16:01.735024Z node 21 :PERSQUEUE DEBUG: Topic 'rt3.dc1--topic1' partition 7 user user readTimeStamp done, result 1743786961642 queuesize 0 startOffset 0 2025-04-04T17:16:01.735139Z node 21 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037893' partition 7 offset 0 partno 0 count 1 parts 0 size 157 2025-04-04T17:16:01.740208Z :INFO: [] MessageGroupId [base64:aa] SessionId [base64:aa|93975b0d-effa114a-7ae1e86c-a0ada697_0] Write session will now close 2025-04-04T17:16:01.740286Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|93975b0d-effa114a-7ae1e86c-a0ada697_0] Write session: aborting 2025-04-04T17:16:01.740916Z :INFO: [] MessageGroupId [base64:aa] SessionId [base64:aa|93975b0d-effa114a-7ae1e86c-a0ada697_0] Write session: gracefully shut down, all writes complete 2025-04-04T17:16:01.740980Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|93975b0d-effa114a-7ae1e86c-a0ada697_0] Write session: destroy 2025-04-04T17:16:01.742138Z node 21 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: base64:aa|93975b0d-effa114a-7ae1e86c-a0ada697_0 grpc read done: success: 0 data: 2025-04-04T17:16:01.742175Z node 21 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: base64:aa|93975b0d-effa114a-7ae1e86c-a0ada697_0 grpc read failed 2025-04-04T17:16:01.742259Z node 21 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 5 sessionId: base64:aa|93975b0d-effa114a-7ae1e86c-a0ada697_0 2025-04-04T17:16:01.742293Z node 21 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: base64:aa|93975b0d-effa114a-7ae1e86c-a0ada697_0 is DEAD 2025-04-04T17:16:01.742785Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=7) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:16:01.743057Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server disconnected, pipe [21:7489507972486401943:2548] destroyed 2025-04-04T17:16:01.743100Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 7, State: StateIdle] TPartition::DropOwner. 2025-04-04T17:16:02.644644Z node 21 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:16:02.644680Z node 21 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:01.513561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:01.513700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:01.513751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:01.513798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:01.515038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:01.515135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:01.515229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:01.515304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:01.516518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:01.603287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:01.603355Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:01.615723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:01.615958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:01.616122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:01.628940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:01.629465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:01.630426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.631361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:01.638046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.641593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:01.641658Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.641871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:01.641921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:01.641969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:01.642094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.648687Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:01.771221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:01.771452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.771623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:01.772591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:01.772661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.775572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.775692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:01.775856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.775921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:01.776027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:01.776081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:01.777855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.777919Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:01.777963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:01.779649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.779691Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.779753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.779810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.789547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:01.791427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:01.791598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:01.792589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.792711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:01.792759Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.793036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:01.793096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.793313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:01.793400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:01.795425Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:01.795475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:01.795645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.795710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:01.796051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.796097Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:01.796214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:01.796251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.796304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:01.796336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.796372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:01.796414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.796453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:01.796500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:01.796577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:01.796620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:01.796652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:01.798453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:01.798567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:01.798618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 105:0, at schemeshard: 72057594046678944 2025-04-04T17:16:04.861120Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-04T17:16:04.861163Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T17:16:04.861211Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-04T17:16:04.861249Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T17:16:04.861316Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-04T17:16:04.861417Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:16:04.861468Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-04-04T17:16:04.861517Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T17:16:04.861559Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T17:16:04.861601Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-04T17:16:04.861642Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2025-04-04T17:16:04.861680Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2025-04-04T17:16:04.861714Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2025-04-04T17:16:04.864177Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusSuccess TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:04.864358Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /MyRoot/Dir1/DirSub1, set owner:user2 2025-04-04T17:16:04.864533Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:04.864579Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:16:04.864704Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:16:04.864802Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:04.864848Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:203:2205], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-04-04T17:16:04.864896Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:203:2205], at schemeshard: 72057594046678944, txId: 105, path id: 2 2025-04-04T17:16:04.865445Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:16:04.865544Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:16:04.865584Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-04-04T17:16:04.865631Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-04-04T17:16:04.865678Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:16:04.866054Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:16:04.866120Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:16:04.866147Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-04T17:16:04.866178Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-04-04T17:16:04.866204Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:16:04.866282Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-04T17:16:04.868434Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-04T17:16:04.868845Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 2025-04-04T17:16:04.871556Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "user1" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:04.871950Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:04.872056Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-04T17:16:04.872095Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T17:16:04.872141Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-04T17:16:04.872186Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T17:16:04.872247Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:04.872305Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-04-04T17:16:04.872348Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T17:16:04.872387Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-04T17:16:04.872429Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-04-04T17:16:04.872471Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-04-04T17:16:04.874413Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:04.874509Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE USER, path: /MyRoot 2025-04-04T17:16:04.874698Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:04.874747Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:04.874913Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:04.874961Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:203:2205], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-04-04T17:16:04.875441Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-04-04T17:16:04.875542Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-04-04T17:16:04.875584Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-04-04T17:16:04.875630Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-04T17:16:04.875675Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:04.875759Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-04-04T17:16:04.877760Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2025-04-04T17:16:04.878261Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1/DirSub1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:04.878431Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir1/DirSub1" took 213us result status StatusSuccess 2025-04-04T17:16:04.878755Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1/DirSub1" PathDescription { Self { Name: "DirSub1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "user2" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:04.879222Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-04T17:16:04.879319Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Cannot find user: user1", at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:01.513941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:01.514092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:01.514146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:01.514190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:01.515546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:01.515601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:01.515676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:01.515755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:01.518457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:01.599341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:01.599407Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:01.615796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:01.616048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:01.616211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:01.630127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:01.630420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:01.631073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.635793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:01.666090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.667576Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:01.667645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.667847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:01.667904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:01.667957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:01.668085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.676054Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:01.826526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:01.826777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.826968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:01.827213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:01.827269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.831296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.831436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:01.831618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.831687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:01.831724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:01.831772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:01.833587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.833641Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:01.833679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:01.835254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.835295Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.835356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.835409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.845098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:01.847027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:01.847219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:01.848253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.848375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:01.848419Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.848715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:01.848775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.848982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:01.849084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:01.851070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:01.851132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:01.851311Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.851370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:01.851716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.851766Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:01.851876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:01.851912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.851956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:01.851988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.852033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:01.852076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.852113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:01.852157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:01.852224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:01.852265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:01.852297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:01.854135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:01.854493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:01.854544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tablet# 72057594046678944 2025-04-04T17:16:04.738557Z node 5 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:04.738622Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:04.738793Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:04.738865Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:04.740352Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:04.740402Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:04.740613Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:04.740659Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:04.741011Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:04.741057Z node 5 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:04.741165Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:04.741205Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:04.741244Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:04.741280Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:04.741320Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:04.741360Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:04.741398Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:04.741435Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:04.741497Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:04.741537Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:04.741574Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:04.742205Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:04.742316Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:04.742358Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T17:16:04.742405Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T17:16:04.742443Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:04.742527Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T17:16:04.744865Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T17:16:04.745306Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:04.746114Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:04.746274Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 178us result status StatusSuccess 2025-04-04T17:16:04.746863Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:04.746940Z node 5 :TX_PROXY DEBUG: actor# [5:266:2257] Bootstrap 2025-04-04T17:16:04.766124Z node 5 :TX_PROXY DEBUG: actor# [5:266:2257] Become StateWork (SchemeCache [5:271:2262]) 2025-04-04T17:16:04.766715Z node 5 :TX_PROXY DEBUG: actor# [5:266:2257] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:16:04.769083Z node 5 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944 2025-04-04T17:16:04.769769Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-04T17:16:04.769814Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-04T17:16:04.852893Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Cannot find user: user1", at schemeshard: 72057594046678944 2025-04-04T17:16:04.853031Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:04.853102Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:04.853281Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:04.853333Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:203:2205], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-04T17:16:04.853810Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 0 2025-04-04T17:16:04.854177Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:04.854370Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 219us result status StatusSuccess 2025-04-04T17:16:04.854790Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2hwUdw3cVfpST/N3E+0q\nlKv4ewHDXmObCeaKCKRRYvoSkqdl3UxGkOr/l+c/CvpTMZ1A9P6udpRdcX8b81BD\nRUptPjkKYLHw2xqleyzel+JqpSCj2UwJmYDWemLAFBEB4I2JBrDy1EgoKerOAofI\nZxgqBchgvC6xjq7Pc48x6nKZAu7lzSDafZbwaLtY2FI5U03to3P+wV/Ujmaup6g5\nq33NNszL6cn8WQYvp1hJAtNC4k9RU+pHu7JT5DlG4hE//gqs2QQTK/Enl84aPbTZ\nowd5YzUlNpQfIpZOC0/XPCOS2reyl3SiZhieyNLcIIBFzTYmG2Io4dbaDOf5K/aK\nWQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743873364850 } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogAdminLoginSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:01.513538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:01.513687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:01.513739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:01.513778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:01.515026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:01.515088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:01.515166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:01.515301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:01.516531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:01.604415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:01.604486Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:01.616362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:01.616605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:01.616747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:01.629442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:01.629737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:01.630455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.631318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:01.636629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.641694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:01.641760Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.641937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:01.641994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:01.642040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:01.642166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.649066Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:01.782878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:01.783098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.783295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:01.783551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:01.783609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.788719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.788868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:01.789047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.789110Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:01.789151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:01.789206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:01.794886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.794955Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:01.794997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:01.797686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.797743Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.797817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.797884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.801408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:01.803686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:01.803867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:01.804850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.804971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:01.805011Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.806385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:01.806451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.806654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:01.806728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:01.808842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:01.808912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:01.809079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.809124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:01.809452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.809515Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:01.809615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:01.809648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.809684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:01.809712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.809747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:01.809780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.809812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:01.809853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:01.809931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:01.809970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:01.810001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:01.811822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:01.811937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:01.811977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:04.681461Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:04.681530Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:04.683271Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:04.683321Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:04.683500Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:04.683547Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:04.683622Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:04.683668Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:04.683775Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:04.683817Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:04.683861Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:04.683899Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:04.683945Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:04.683983Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:04.684020Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:04.684054Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:04.684116Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:04.684155Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:04.684192Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:04.685325Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:04.685435Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:04.685479Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T17:16:04.685519Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T17:16:04.685564Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:04.685657Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T17:16:04.687955Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T17:16:04.688449Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-04T17:16:04.691351Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:04.696579Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:04.696721Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:16:04.696768Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:16:04.696810Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:16:04.696848Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:16:04.696906Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:04.696962Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-04T17:16:04.697004Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:16:04.697043Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:16:04.697080Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2025-04-04T17:16:04.697117Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-04T17:16:04.697397Z node 4 :TX_PROXY DEBUG: actor# [4:268:2259] Bootstrap 2025-04-04T17:16:04.715865Z node 4 :TX_PROXY DEBUG: actor# [4:268:2259] Become StateWork (SchemeCache [4:273:2264]) 2025-04-04T17:16:04.717095Z node 4 :TX_PROXY DEBUG: actor# [4:268:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:16:04.719975Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:04.720087Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2025-04-04T17:16:04.720348Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:04.720403Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:04.720635Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:04.720695Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-04T17:16:04.721417Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:16:04.721513Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:16:04.721552Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:16:04.721597Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-04T17:16:04.721638Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:04.721741Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-04T17:16:04.722140Z node 4 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:16:04.723361Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-04-04T17:16:04.723832Z node 4 :HTTP WARN: 127.0.0.1:0 POST /login 2025-04-04T17:16:04.726174Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-04T17:16:04.726220Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-04T17:16:04.831330Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzODMwMTY0LCJpYXQiOjE3NDM3ODY5NjQsInN1YiI6InVzZXIxIn0.o3WTDqxFsu-3WLp7BKW_68lTxeyWggWWwGvu7cfM9rPhhi_JAb-qItoNfug94ZsOC5hA0rEOD6PgDKPqbTf0_jR2miTG05NrmB2wwdxvbQ3xgYvaL2XqGMayTCf6drA901aGqD9KF5pY370YdAvrgvZRgRH6kX9E7fpUBdL1nfD9tuzzShxoOSR_71IRd43MD-nwBtr-sXQQQi_OQsWEh7qlL6oveBHQQ6HKttfOInoNW7tdHeSmzWptOMMti_Jb1TpcrKdIS30sfdEiIZ4hR4VAvUBeIEa8GfJCWLLBfZxw-OG3mZzAjassCbOpfmJne3OAPv0pqY7B2OcwejJI7g" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzODMwMTY0LCJpYXQiOjE3NDM3ODY5NjQsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-04-04T17:16:04.831768Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:04.831822Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:04.832014Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:04.832061Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:205:2207], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-04T17:16:04.833249Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 AUDIT LOG buffer(3): 2025-04-04T17:16:04.674497Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-04-04T17:16:04.696461Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2025-04-04T17:16:04.831534Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzODMwMTY0LCJpYXQiOjE3NDM3ODY5NjQsInN1YiI6InVzZXIxIn0.**, login_user_level=admin AUDIT LOG checked line: 2025-04-04T17:16:04.831534Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzODMwMTY0LCJpYXQiOjE3NDM3ODY5NjQsInN1YiI6InVzZXIxIn0.**, login_user_level=admin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:01.513564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:01.513732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:01.513786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:01.513842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:01.515062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:01.515139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:01.515209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:01.515282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:01.516536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:01.605640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:01.605692Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:01.617802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:01.618069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:01.618260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:01.627302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:01.627587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:01.628202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.628977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:01.634971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.641636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:01.641704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.641907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:01.641958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:01.642005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:01.642127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.649009Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:01.778725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:01.778959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.779183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:01.779430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:01.779481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.781781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.781986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:01.782155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.782215Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:01.782272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:01.782325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:01.784267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.784333Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:01.784379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:01.786039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.786083Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.786137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.786211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.789780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:01.792189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:01.792346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:01.793358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.793474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:01.793530Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.793777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:01.793844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.794042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:01.794150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:01.795900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:01.795942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:01.796092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.796131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:01.796452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.796494Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:01.796598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:01.796628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.796660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:01.796688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.796717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:01.796752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.796785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:01.796826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:01.796891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:01.796928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:01.796958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:01.798572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:01.798684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:01.798717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:16:04.976494Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:04.976617Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:04.976662Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:203:2205], at schemeshard: 72057594046678944, txId: 105, path id: 2 2025-04-04T17:16:04.976708Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:203:2205], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-04-04T17:16:04.977292Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:16:04.977390Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:16:04.977429Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-04-04T17:16:04.977472Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-04T17:16:04.977512Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:16:04.977915Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:16:04.977978Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:16:04.978004Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-04-04T17:16:04.978030Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-04-04T17:16:04.978055Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:04.978112Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-04-04T17:16:04.981095Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-04T17:16:04.981499Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 2025-04-04T17:16:04.982017Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:04.982191Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir1" took 204us result status StatusSuccess 2025-04-04T17:16:04.982559Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1" PathDescription { Self { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2025-04-04T17:16:04.985187Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveGroup { Group: "group1" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:04.985368Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 1] name: MyRoot type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:04.985403Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:04.985447Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:04.985492Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:16:04.985684Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:04.985789Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-04T17:16:04.985827Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T17:16:04.985876Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-04T17:16:04.985913Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T17:16:04.985974Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:04.986030Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-04-04T17:16:04.986073Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T17:16:04.986115Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-04T17:16:04.986155Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-04-04T17:16:04.986190Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-04T17:16:04.988529Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:04.988651Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE GROUP, path: /MyRoot 2025-04-04T17:16:04.988860Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:04.988927Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:04.989090Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:04.989147Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:203:2205], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-04-04T17:16:04.989642Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-04-04T17:16:04.989741Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-04-04T17:16:04.989778Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-04-04T17:16:04.989819Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-04T17:16:04.989861Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:04.989963Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-04-04T17:16:04.991724Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2025-04-04T17:16:04.992218Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:04.992398Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 209us result status StatusSuccess 2025-04-04T17:16:04.992758Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch >> TBackupTests::BackupUuidColumn[Raw] >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly >> TBackupTests::ShouldSucceedOnLargeData[Zstd] >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] >> TBackupTests::BackupUuidColumn[Zstd] >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] >> BSCReadOnlyPDisk::ReadOnlyNotAllowed >> TSchemeShardLoginTest::BanUserWithWaiting [GOOD] >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TPersQueueTest::SetupWriteSessionOnDisabledCluster [GOOD] >> TPersQueueTest::SetupReadSession >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive >> TSchemeShardLoginTest::ResetFailedAttemptCount [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser >> ResourcePoolClassifiersDdl::TestMultiGroupClassification [GOOD] >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters [GOOD] >> TSchemeShardLoginTest::ChangeAccountLockoutParameters >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] >> TBackupTests::BackupUuidColumn[Raw] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 17423711959139106699 2025-04-04T17:16:07.639878Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.640048Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.640122Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.640191Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.640256Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.640321Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.640395Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.641337Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.641436Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.641491Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.641544Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.641605Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.641674Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.641725Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.641828Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.641887Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.641928Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.642009Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.642053Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.642088Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.642129Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.643963Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.644054Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.644120Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.644213Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.644273Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.644321Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.644383Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] Test command err: RandomSeed# 5954019919667561058 2025-04-04T17:16:07.609690Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.609846Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.609940Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.610021Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.610082Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.610154Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.610266Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.610331Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:7:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.611287Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.611368Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.611416Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.611462Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.611535Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.611588Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.611634Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.611685Z 8 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.611751Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.611868Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.611914Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.611946Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:7:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.611979Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.612029Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.612072Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.612110Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-04-04T17:16:07.614089Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.614175Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.614256Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.614303Z 8 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.614347Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.614397Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.614463Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.614510Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-04-04T17:16:07.766886Z 1 00h01m30.011024s :BS_LOCALRECOVERY CRIT: VDISK[82000000:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "Some error reason" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersDdl::TestMultiGroupClassification [GOOD] Test command err: 2025-04-04T17:15:04.344190Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507727227346242:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:04.344289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00179f/r3tmp/tmpSMXuj8/pdisk_1.dat 2025-04-04T17:15:04.939974Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:04.944245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:04.944319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:04.955377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22597, node 1 2025-04-04T17:15:05.094439Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:05.094464Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:05.094473Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:05.094590Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7543 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:05.453601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:05.471030Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:15:07.502615Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-04T17:15:07.502710Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507740112248769:2329], Start check tables existence, number paths: 2 2025-04-04T17:15:07.506358Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmZmYzFlZTAtYWM3ZTA1ODMtNGVjYTczNmYtOTg1OTQyMzE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZmZmYzFlZTAtYWM3ZTA1ODMtNGVjYTczNmYtOTg1OTQyMzE= 2025-04-04T17:15:07.506877Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmZmYzFlZTAtYWM3ZTA1ODMtNGVjYTczNmYtOTg1OTQyMzE=, ActorId: [1:7489507740112248785:2330], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:15:07.507435Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-04T17:15:07.507452Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-04T17:15:07.507464Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-04T17:15:07.507523Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507740112248769:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-04T17:15:07.507559Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507740112248769:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-04T17:15:07.507594Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507740112248769:2329], Successfully finished 2025-04-04T17:15:07.507641Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-04T17:15:07.518330Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507740112248795:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-04T17:15:07.521078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:15:07.523071Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507740112248795:2301], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-04T17:15:07.524069Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507740112248795:2301], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-04T17:15:07.529101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507740112248795:2301], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:15:07.612577Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507740112248795:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-04T17:15:07.616414Z node 1 :TX_PROXY ERROR: Actor# [1:7489507740112248846:2333] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:07.616539Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507740112248795:2301], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-04T17:15:07.629349Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-04T17:15:07.629376Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-04T17:15:07.629540Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmZmYzFlZTAtYWM3ZTA1ODMtNGVjYTczNmYtOTg1OTQyMzE=, ActorId: [1:7489507740112248785:2330], ActorState: ReadyState, TraceId: 01jr0ws5zcdrgvfp4yksnmyv2b, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: GRANT DESCRIBE SCHEMA ON `/Root` TO `user@test`; GRANT DESCRIBE SCHEMA, SELECT ROW ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `user@test`; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-04T17:15:07.630322Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507740112248855:2332], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-04T17:15:07.635546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507740112248855:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:07.635628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:07.860547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:15:07.865405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:15:07.867177Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ZmZmYzFlZTAtYWM3ZTA1ODMtNGVjYTczNmYtOTg1OTQyMzE=, ActorId: [1:7489507740112248785:2330], ActorState: ExecuteState, TraceId: 01jr0ws5zcdrgvfp4yksnmyv2b, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7489507740112248860:2330] WorkloadServiceCleanup: 0 2025-04-04T17:15:07.868563Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmZmYzFlZTAtYWM3ZTA1ODMtNGVjYTczNmYtOTg1OTQyMzE=, ActorId: [1:7489507740112248785:2330], ActorState: CleanupState, TraceId: 01jr0ws5zcdrgvfp4yksnmyv2b, EndCleanup, isFinal: 0 2025-04-04T17:15:07.868619Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmZmYzFlZTAtYWM3ZTA1ODMtNGVjYTczNmYtOTg1OTQyMzE=, ActorId: [1:7489507740112248785:2330], ActorState: CleanupState, TraceId: 01jr0ws5zcdrgvfp4yksnmyv2b, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7489507727227346485:2277] 2025-04-04T17:15:07.876953Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=N2JiOTc1YjUtZjcwMTI0NTAtZDE1MTY0NmItMTJmY2E5, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id N2JiOTc1YjUtZjcwMTI0NTAtZDE1MTY0NmItMTJmY2E5 2025-04-04T17:15:07.877095Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=N2JiOTc1YjUtZjcwMTI0NTAtZDE1MTY0NmItMTJmY2E5, ActorId: [1:7489507740112248895:2335], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:15:07.877203Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-04T17:15:07.877261Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=N2JiOTc1YjUtZjcwMTI0NTAtZDE1MTY0NmItMTJmY2E5, ActorId: [1:7489507740112248895:2335], ActorState: ReadyState, TraceId: 01jr0ws675a1t3f81j1zmrxe07, received request, proxyRequestId: 4 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: DROP RESOURCE POOL CLASSIFIER MyResourcePoolClassifier rpcActor: [1:7489507740112248894:2360] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-04T17:15:07.877275Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507740112248897:2336], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-04T17:15:07.877323Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7489507740112248895:2335], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=N2JiOTc1YjUtZjcwMTI0NTAtZDE1MTY0NmItMTJmY2E5 2025-04-04T17:15:07.877383Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7489507740112248898:2337], Database: /Root, Start database fetching 2025-04-04T17:15:07.877601Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherA ... essfully fetched pool my_pool, DatabaseId: /Root 2025-04-04T17:16:06.536388Z node 5 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool default, DatabaseId: /Root 2025-04-04T17:16:06.563796Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZjY2ODMyOTctMTU3YzdlYWUtZmU4OThkNzYtMjk1ODdjZmE=, ActorId: [5:7489507992411350245:2781], ActorState: ExecuteState, TraceId: 01jr0wtzde0jc9dadncs3pd4wr, acquire mvcc snapshot 2025-04-04T17:16:06.564751Z node 5 :KQP_SESSION TRACE: SessionId: ydb://session/3?node_id=5&id=ZjY2ODMyOTctMTU3YzdlYWUtZmU4OThkNzYtMjk1ODdjZmE=, ActorId: [5:7489507992411350245:2781], ActorState: ExecuteState, TraceId: 01jr0wtzde0jc9dadncs3pd4wr, read snapshot result: UNAVAILABLE, step: 1743786966579, tx id: 18446744073709551615 2025-04-04T17:16:06.564813Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZjY2ODMyOTctMTU3YzdlYWUtZmU4OThkNzYtMjk1ODdjZmE=, ActorId: [5:7489507992411350245:2781], ActorState: ExecuteState, TraceId: 01jr0wtzde0jc9dadncs3pd4wr, ExecutePhyTx, tx: 0x000050C00011D4D8 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-04-04T17:16:06.564854Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZjY2ODMyOTctMTU3YzdlYWUtZmU4OThkNzYtMjk1ODdjZmE=, ActorId: [5:7489507992411350245:2781], ActorState: ExecuteState, TraceId: 01jr0wtzde0jc9dadncs3pd4wr, Sending to Executer TraceId: 0 8 2025-04-04T17:16:06.564922Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZjY2ODMyOTctMTU3YzdlYWUtZmU4OThkNzYtMjk1ODdjZmE=, ActorId: [5:7489507992411350245:2781], ActorState: ExecuteState, TraceId: 01jr0wtzde0jc9dadncs3pd4wr, Created new KQP executer: [5:7489507992411350283:2781] isRollback: 0 2025-04-04T17:16:06.568532Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZjY2ODMyOTctMTU3YzdlYWUtZmU4OThkNzYtMjk1ODdjZmE=, ActorId: [5:7489507992411350245:2781], ActorState: ExecuteState, TraceId: 01jr0wtzde0jc9dadncs3pd4wr, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-04T17:16:06.568665Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=ZjY2ODMyOTctMTU3YzdlYWUtZmU4OThkNzYtMjk1ODdjZmE=, ActorId: [5:7489507992411350245:2781], ActorState: ExecuteState, TraceId: 01jr0wtzde0jc9dadncs3pd4wr, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 4.87 QueriesCount: 2 2025-04-04T17:16:06.568775Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZjY2ODMyOTctMTU3YzdlYWUtZmU4OThkNzYtMjk1ODdjZmE=, ActorId: [5:7489507992411350245:2781], ActorState: ExecuteState, TraceId: 01jr0wtzde0jc9dadncs3pd4wr, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-04T17:16:06.569036Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=ZjY2ODMyOTctMTU3YzdlYWUtZmU4OThkNzYtMjk1ODdjZmE=, ActorId: [5:7489507992411350245:2781], ActorState: ExecuteState, TraceId: 01jr0wtzde0jc9dadncs3pd4wr, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:16:06.569068Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZjY2ODMyOTctMTU3YzdlYWUtZmU4OThkNzYtMjk1ODdjZmE=, ActorId: [5:7489507992411350245:2781], ActorState: ExecuteState, TraceId: 01jr0wtzde0jc9dadncs3pd4wr, EndCleanup, isFinal: 0 2025-04-04T17:16:06.569129Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZjY2ODMyOTctMTU3YzdlYWUtZmU4OThkNzYtMjk1ODdjZmE=, ActorId: [5:7489507992411350245:2781], ActorState: ExecuteState, TraceId: 01jr0wtzde0jc9dadncs3pd4wr, Sent query response back to proxy, proxyRequestId: 73, proxyId: [5:7489507932281805889:2221] 2025-04-04T17:16:06.569475Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=ZjY2ODMyOTctMTU3YzdlYWUtZmU4OThkNzYtMjk1ODdjZmE=, ActorId: [5:7489507992411350245:2781], ActorState: ReadyState, Session closed due to explicit close event 2025-04-04T17:16:06.569545Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZjY2ODMyOTctMTU3YzdlYWUtZmU4OThkNzYtMjk1ODdjZmE=, ActorId: [5:7489507992411350245:2781], ActorState: ReadyState, Sending to Executer TraceId: 0 8 2025-04-04T17:16:06.569609Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZjY2ODMyOTctMTU3YzdlYWUtZmU4OThkNzYtMjk1ODdjZmE=, ActorId: [5:7489507992411350245:2781], ActorState: ReadyState, Created new KQP executer: [5:7489507992411350294:2781] isRollback: 1 2025-04-04T17:16:06.569649Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=ZjY2ODMyOTctMTU3YzdlYWUtZmU4OThkNzYtMjk1ODdjZmE=, ActorId: [5:7489507992411350245:2781], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 1 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:16:06.569885Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZjY2ODMyOTctMTU3YzdlYWUtZmU4OThkNzYtMjk1ODdjZmE=, ActorId: [5:7489507992411350245:2781], ActorState: CleanupState, EndCleanup, isFinal: 1 2025-04-04T17:16:06.569914Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZjY2ODMyOTctMTU3YzdlYWUtZmU4OThkNzYtMjk1ODdjZmE=, ActorId: [5:7489507992411350245:2781], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:16:06.569997Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=ZjY2ODMyOTctMTU3YzdlYWUtZmU4OThkNzYtMjk1ODdjZmE=, ActorId: [5:7489507992411350245:2781], ActorState: unknown state, Session actor destroyed 2025-04-04T17:16:06.962903Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTE5MWIyOGItZTlmOGE3YzYtY2Y5MTdjMDAtNTRiYWU1ZDQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OTE5MWIyOGItZTlmOGE3YzYtY2Y5MTdjMDAtNTRiYWU1ZDQ= 2025-04-04T17:16:06.963028Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTE5MWIyOGItZTlmOGE3YzYtY2Y5MTdjMDAtNTRiYWU1ZDQ=, ActorId: [5:7489507992411350312:2797], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:16:06.963315Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTE5MWIyOGItZTlmOGE3YzYtY2Y5MTdjMDAtNTRiYWU1ZDQ=, ActorId: [5:7489507992411350312:2797], ActorState: ReadyState, TraceId: 01jr0wtzxk1jt7rrpntcef73ss, received request, proxyRequestId: 74 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [5:7489507992411350311:3145] database: Root databaseId: /Root pool id: default 2025-04-04T17:16:06.963352Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTE5MWIyOGItZTlmOGE3YzYtY2Y5MTdjMDAtNTRiYWU1ZDQ=, ActorId: [5:7489507992411350312:2797], ActorState: ReadyState, TraceId: 01jr0wtzxk1jt7rrpntcef73ss, request placed into pool from cache: default 2025-04-04T17:16:06.963413Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTE5MWIyOGItZTlmOGE3YzYtY2Y5MTdjMDAtNTRiYWU1ZDQ=, ActorId: [5:7489507992411350312:2797], ActorState: ExecuteState, TraceId: 01jr0wtzxk1jt7rrpntcef73ss, Sending CompileQuery request 2025-04-04T17:16:07.013094Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTE5MWIyOGItZTlmOGE3YzYtY2Y5MTdjMDAtNTRiYWU1ZDQ=, ActorId: [5:7489507992411350312:2797], ActorState: ExecuteState, TraceId: 01jr0wtzxk1jt7rrpntcef73ss, ExecutePhyTx, tx: 0x000050C0003CF898 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-04T17:16:07.013138Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTE5MWIyOGItZTlmOGE3YzYtY2Y5MTdjMDAtNTRiYWU1ZDQ=, ActorId: [5:7489507992411350312:2797], ActorState: ExecuteState, TraceId: 01jr0wtzxk1jt7rrpntcef73ss, Sending to Executer TraceId: 0 8 2025-04-04T17:16:07.013187Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTE5MWIyOGItZTlmOGE3YzYtY2Y5MTdjMDAtNTRiYWU1ZDQ=, ActorId: [5:7489507992411350312:2797], ActorState: ExecuteState, TraceId: 01jr0wtzxk1jt7rrpntcef73ss, Created new KQP executer: [5:7489507996706317612:2797] isRollback: 0 2025-04-04T17:16:07.014701Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTE5MWIyOGItZTlmOGE3YzYtY2Y5MTdjMDAtNTRiYWU1ZDQ=, ActorId: [5:7489507992411350312:2797], ActorState: ExecuteState, TraceId: 01jr0wtzxk1jt7rrpntcef73ss, Forwarded TEvStreamData to [5:7489507992411350311:3145] 2025-04-04T17:16:07.016146Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTE5MWIyOGItZTlmOGE3YzYtY2Y5MTdjMDAtNTRiYWU1ZDQ=, ActorId: [5:7489507992411350312:2797], ActorState: ExecuteState, TraceId: 01jr0wtzxk1jt7rrpntcef73ss, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-04-04T17:16:07.016290Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=OTE5MWIyOGItZTlmOGE3YzYtY2Y5MTdjMDAtNTRiYWU1ZDQ=, ActorId: [5:7489507992411350312:2797], ActorState: ExecuteState, TraceId: 01jr0wtzxk1jt7rrpntcef73ss, txInfo Status: Committed Kind: Pure TotalDuration: 3.237 ServerDuration: 3.194 QueriesCount: 2 2025-04-04T17:16:07.016368Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTE5MWIyOGItZTlmOGE3YzYtY2Y5MTdjMDAtNTRiYWU1ZDQ=, ActorId: [5:7489507992411350312:2797], ActorState: ExecuteState, TraceId: 01jr0wtzxk1jt7rrpntcef73ss, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-04T17:16:07.016530Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=OTE5MWIyOGItZTlmOGE3YzYtY2Y5MTdjMDAtNTRiYWU1ZDQ=, ActorId: [5:7489507992411350312:2797], ActorState: ExecuteState, TraceId: 01jr0wtzxk1jt7rrpntcef73ss, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:16:07.016559Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTE5MWIyOGItZTlmOGE3YzYtY2Y5MTdjMDAtNTRiYWU1ZDQ=, ActorId: [5:7489507992411350312:2797], ActorState: ExecuteState, TraceId: 01jr0wtzxk1jt7rrpntcef73ss, EndCleanup, isFinal: 1 2025-04-04T17:16:07.016606Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTE5MWIyOGItZTlmOGE3YzYtY2Y5MTdjMDAtNTRiYWU1ZDQ=, ActorId: [5:7489507992411350312:2797], ActorState: ExecuteState, TraceId: 01jr0wtzxk1jt7rrpntcef73ss, Sent query response back to proxy, proxyRequestId: 74, proxyId: [5:7489507932281805889:2221] 2025-04-04T17:16:07.016638Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTE5MWIyOGItZTlmOGE3YzYtY2Y5MTdjMDAtNTRiYWU1ZDQ=, ActorId: [5:7489507992411350312:2797], ActorState: unknown state, TraceId: 01jr0wtzxk1jt7rrpntcef73ss, Cleanup temp tables: 0 2025-04-04T17:16:07.016876Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTE5MWIyOGItZTlmOGE3YzYtY2Y5MTdjMDAtNTRiYWU1ZDQ=, ActorId: [5:7489507992411350312:2797], ActorState: unknown state, TraceId: 01jr0wtzxk1jt7rrpntcef73ss, Session actor destroyed 2025-04-04T17:16:07.024107Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=MWY0MGYxNjYtNzRlYzRkNzQtYzMyMTczMjAtOWZmMGM5Mjk=, ActorId: [5:7489507949461675540:2329], ActorState: ReadyState, Session closed due to explicit close event 2025-04-04T17:16:07.024160Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=MWY0MGYxNjYtNzRlYzRkNzQtYzMyMTczMjAtOWZmMGM5Mjk=, ActorId: [5:7489507949461675540:2329], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:16:07.024193Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=MWY0MGYxNjYtNzRlYzRkNzQtYzMyMTczMjAtOWZmMGM5Mjk=, ActorId: [5:7489507949461675540:2329], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-04T17:16:07.024222Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=MWY0MGYxNjYtNzRlYzRkNzQtYzMyMTczMjAtOWZmMGM5Mjk=, ActorId: [5:7489507949461675540:2329], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:16:07.024298Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=MWY0MGYxNjYtNzRlYzRkNzQtYzMyMTczMjAtOWZmMGM5Mjk=, ActorId: [5:7489507949461675540:2329], ActorState: unknown state, Session actor destroyed >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] >> DataShardReadIteratorSysTables::ShouldRead [GOOD] >> DataShardReadIteratorSysTables::ShouldNotReadUserTableUsingLocalTid >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive [GOOD] >> DataShardReadIterator::ShouldNotReadAfterCancel >> DataShardReadIterator::ShouldHandleReadAck [GOOD] >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck >> DataShardReadIteratorBatchMode::RangeFull [GOOD] >> DataShardReadIteratorBatchMode::RangeToInclusive >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRanges >> DataShardReadIterator::ShouldReadRangeInclusiveEndsCellVec [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsArrow >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] >> DataShardReadIterator::ShouldReadRangeCellVec [GOOD] >> DataShardReadIterator::ShouldReadRangeArrow >> DataShardReadIterator::ShouldReverseReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture >> DataShardReadIterator::ShouldReceiveErrorAfterSplit [GOOD] >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:07.374344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:07.374488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:07.374528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:07.374580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:07.375775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:07.375823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:07.375898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:07.375973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:07.377691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:07.451307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:07.451346Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:07.461404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:07.461620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:07.461740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:07.468252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:07.468446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:07.469481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:07.470977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:07.479110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:07.485745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:07.485866Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:07.486017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:07.486062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:07.486104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:07.486215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.491863Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:07.603220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:07.603421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.603644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:07.603881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:07.603935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.611342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:07.611499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:07.611655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.611721Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:07.611788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:07.611823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:07.614389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.614440Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:07.614476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:07.619114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.619162Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.619202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:07.619235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.622185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:07.628430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:07.628631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:07.629481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:07.629598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:07.629653Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:07.629886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:07.629943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:07.630104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:07.630174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:07.632140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:07.632181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:07.632345Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:07.632402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:07.632740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.632778Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:07.632865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:07.632891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.632926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:07.632955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.632985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:07.633021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.633052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:07.633083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:07.633133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:07.633164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:07.633195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:07.634865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:07.635008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:07.635053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... FO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:08.003963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-04T17:16:08.004097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:08.005480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-04T17:16:08.005607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-04T17:16:08.006601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:08.006713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:08.006762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-04T17:16:08.006865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-04T17:16:08.006988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:16:08.024241Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:412:2383], attempt# 0 2025-04-04T17:16:08.048495Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:412:2383], sender# [1:411:2382] 2025-04-04T17:16:08.053766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:08.053832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:16:08.054095Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:08.054148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-04T17:16:08.054636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.054693Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:08.055385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:16:08.055486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:16:08.055525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:16:08.055550Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:16:08.055580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:16:08.055637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:16:08.058598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:12735 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F39CD46A-6554-4F69-A707-C9D8E628EEE9 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-04-04T17:16:08.075584Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:412:2383], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:12735 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C1D7AB7C-9457-43F2-BDE7-46E95683B381 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-04-04T17:16:08.084656Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:412:2383], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-04-04T17:16:08.084757Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:411:2382] 2025-04-04T17:16:08.084975Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:412:2383], sender# [1:411:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:12735 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DC3C969C-7326-4F9F-8787-B8C6D0E06566 amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2025-04-04T17:16:08.087914Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:412:2383], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2025-04-04T17:16:08.087971Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:412:2383], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-04T17:16:08.088097Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:411:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-04T17:16:08.095614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-04T17:16:08.095666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-04T17:16:08.095778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-04T17:16:08.095874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-04T17:16:08.095945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:08.096000Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.096030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:16:08.096069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-04T17:16:08.096196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:08.100271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.100439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.100504Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:16:08.100624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:16:08.100663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:16:08.100703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:16:08.100741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:16:08.100796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T17:16:08.100881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-04-04T17:16:08.100932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:16:08.100972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:16:08.101024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:16:08.101186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:16:08.103439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:16:08.103500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:397:2369] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:07.374714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:07.374802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:07.374838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:07.374874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:07.375800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:07.375861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:07.375931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:07.375993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:07.377673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:07.453521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:07.453574Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:07.465923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:07.466112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:07.466233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:07.475340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:07.475567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:07.476122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:07.476786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:07.481285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:07.485745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:07.485808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:07.485955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:07.485999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:07.486040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:07.486151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.491642Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:07.624041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:07.624241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.624427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:07.624658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:07.624707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.626750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:07.626872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:07.627014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.627080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:07.627129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:07.627179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:07.628963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.629015Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:07.629045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:07.630568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.630638Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.630687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:07.630731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.633784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:07.635385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:07.635545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:07.636459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:07.636626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:07.636681Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:07.636938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:07.636983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:07.637117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:07.637190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:07.638942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:07.638980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:07.639122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:07.639178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:07.639506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.639545Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:07.639632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:07.639660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.639693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:07.639720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.639753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:07.639786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.639815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:07.639841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:07.639893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:07.639925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:07.639954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:07.641477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:07.641574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:07.641624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SHARD INFO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:08.005438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-04T17:16:08.005570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:08.007073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-04T17:16:08.007186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-04T17:16:08.008023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:08.008103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:08.008141Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-04T17:16:08.008226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-04T17:16:08.008327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:16:08.020341Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:412:2383], attempt# 0 2025-04-04T17:16:08.052724Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:412:2383], sender# [1:411:2382] 2025-04-04T17:16:08.058770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:08.058820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:16:08.059032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:08.059068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-04T17:16:08.059558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.059597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:08.060252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:16:08.060400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:16:08.060444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:16:08.060479Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:16:08.060514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:16:08.060586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:16:08.064865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:30382 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 76900BC4-B544-4C86-A717-1FC44C5154AD amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-04-04T17:16:08.074073Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:412:2383], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:30382 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A1BBBC4B-AF99-4439-A8C4-A3723695631E amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-04-04T17:16:08.086105Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:412:2383], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-04-04T17:16:08.086228Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:411:2382] 2025-04-04T17:16:08.086351Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:412:2383], sender# [1:411:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:30382 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 254A48EB-AADD-4BAD-BA6E-CC40B45C04A5 amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2025-04-04T17:16:08.088650Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:412:2383], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2025-04-04T17:16:08.088694Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:412:2383], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-04T17:16:08.088859Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:411:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-04T17:16:08.095834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-04T17:16:08.095889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-04T17:16:08.096043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-04T17:16:08.096180Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-04T17:16:08.096245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:08.096293Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.096350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:16:08.096384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-04T17:16:08.096514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:08.098236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.098514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.098568Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:16:08.098696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:16:08.098733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:16:08.098765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:16:08.098794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:16:08.098844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T17:16:08.098902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-04-04T17:16:08.098939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:16:08.098981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:16:08.099025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:16:08.099121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:16:08.100685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:16:08.100731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:397:2369] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:07.374350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:07.374456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:07.374495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:07.374532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:07.375783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:07.375841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:07.375960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:07.376035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:07.377677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:07.445937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:07.445992Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:07.458267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:07.458476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:07.458702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:07.466616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:07.466825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:07.469489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:07.471227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:07.479544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:07.485817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:07.485904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:07.486079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:07.486127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:07.486176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:07.486329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.492806Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:07.639624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:07.639865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.640098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:07.640372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:07.640429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.642888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:07.643028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:07.643245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.643326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:07.643394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:07.643433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:07.645425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.645476Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:07.645515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:07.647311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.647380Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.647434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:07.647478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.651256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:07.653174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:07.653358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:07.654412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:07.654537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:07.654585Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:07.654889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:07.654945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:07.655104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:07.655183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:07.657116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:07.657160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:07.657358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:07.657400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:07.657750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.657795Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:07.657886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:07.657925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.657965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:07.657999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.658036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:07.658076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.658109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:07.658136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:07.658221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:07.658283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:07.658320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:07.660202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:07.660303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:07.660345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SHARD INFO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:08.003523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-04T17:16:08.003660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:08.005140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-04T17:16:08.005254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-04T17:16:08.006080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:08.006189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:08.006236Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-04T17:16:08.006365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-04T17:16:08.006446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:16:08.020343Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:412:2383], attempt# 0 2025-04-04T17:16:08.052355Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:412:2383], sender# [1:411:2382] 2025-04-04T17:16:08.056567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:08.056639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:16:08.056909Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:08.056960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-04T17:16:08.057485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.057540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:08.058326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:16:08.058435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:16:08.058479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:16:08.058521Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:16:08.058565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:16:08.058657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:16:08.061760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:13343 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D6F39816-1B17-467D-BA23-97E7C04F741C amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-04-04T17:16:08.077074Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:412:2383], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:13343 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5D083537-270E-4FE1-9A00-45E0013A5E81 amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-04-04T17:16:08.082647Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:412:2383], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2025-04-04T17:16:08.082769Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:411:2382] 2025-04-04T17:16:08.082887Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:412:2383], sender# [1:411:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:13343 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6E17866F-1A2B-438F-A4B9-B4A21E0B9F4B amz-sdk-request: attempt=1 content-length: 39 content-md5: GLX1nc5/cKhlAfxBHlykQA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 39 2025-04-04T17:16:08.086747Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:412:2383], result# PutObjectResult { ETag: 18b5f59dce7f70a86501fc411e5ca440 } 2025-04-04T17:16:08.086811Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:412:2383], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-04T17:16:08.086952Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:411:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-04T17:16:08.094225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-04T17:16:08.094298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-04T17:16:08.094441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-04T17:16:08.094519Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-04T17:16:08.094560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:08.094603Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.094626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:16:08.094651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-04T17:16:08.094768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:08.096420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.096573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.096611Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:16:08.096723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:16:08.096753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:16:08.096793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:16:08.096824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:16:08.096864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T17:16:08.096937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-04-04T17:16:08.096982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:16:08.097011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:16:08.097035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:16:08.097151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:16:08.098813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:16:08.098865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:397:2369] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:07.374348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:07.374455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:07.374501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:07.374540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:07.375789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:07.375850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:07.375984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:07.376063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:07.377711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:07.459658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:07.459713Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:07.470875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:07.471131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:07.471266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:07.481074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:07.481340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:07.482020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:07.482870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:07.487428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:07.488602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:07.488669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:07.488828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:07.488875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:07.488921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:07.489045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.495311Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:07.596790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:07.598857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.600782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:07.602898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:07.602971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.605810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:07.605985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:07.606263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.606353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:07.606460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:07.606497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:07.608403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.608465Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:07.608508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:07.610130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.610186Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.610234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:07.610301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.614380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:07.617508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:07.617662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:07.618469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:07.618572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:07.618625Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:07.620023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:07.620085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:07.620288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:07.620358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:07.622353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:07.622425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:07.622614Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:07.622659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:07.623016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.623060Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:07.623159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:07.623193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.623234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:07.623269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.623318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:07.623358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.623398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:07.623427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:07.623500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:07.623544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:07.623582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:07.625315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:07.625423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:07.625459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... INFO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:08.003247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-04T17:16:08.003348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:08.004636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-04T17:16:08.004748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-04T17:16:08.005594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:08.005710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:08.005752Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-04T17:16:08.005839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-04T17:16:08.005925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:16:08.024221Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:412:2383], attempt# 0 2025-04-04T17:16:08.050349Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:412:2383], sender# [1:411:2382] 2025-04-04T17:16:08.056360Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:08.056418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:16:08.056651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:08.056715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-04T17:16:08.057228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.057280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:08.058077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:16:08.058176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:16:08.058204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:16:08.058232Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:16:08.058299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:16:08.058360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:16:08.061231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:6800 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6C3ABBFF-34CB-4AC3-B951-266A48712674 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-04-04T17:16:08.075962Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:412:2383], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:6800 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8C37EAF2-B186-4937-9169-A7DFDF0D9D2E amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-04-04T17:16:08.081608Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:412:2383], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2025-04-04T17:16:08.081711Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:411:2382] 2025-04-04T17:16:08.082602Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:412:2383], sender# [1:411:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:6800 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 46FED341-1C17-4FDB-A838-CED851695149 amz-sdk-request: attempt=1 content-length: 40 content-md5: LXbLDYru8NmFsYXNSXjnpQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 40 2025-04-04T17:16:08.085607Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:412:2383], result# PutObjectResult { ETag: 2d76cb0d8aeef0d985b185cd4978e7a5 } 2025-04-04T17:16:08.085661Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:412:2383], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-04T17:16:08.085790Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:411:2382], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-04T17:16:08.092813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-04T17:16:08.092874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-04T17:16:08.093041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-04T17:16:08.093175Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-04-04T17:16:08.093247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:08.093292Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.093332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:16:08.093372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-04T17:16:08.093548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:08.095404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.095618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.095665Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:16:08.095781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:16:08.095818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:16:08.095876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:16:08.095910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:16:08.095960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T17:16:08.096062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-04-04T17:16:08.096115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:16:08.096154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:16:08.096185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:16:08.096310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:16:08.098293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:16:08.098358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:397:2369] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:07.374577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:07.374689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:07.374734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:07.374779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:07.375805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:07.375857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:07.375927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:07.376008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:07.377655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:07.458443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:07.458499Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:07.469395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:07.469617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:07.469748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:07.479636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:07.479825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:07.480297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:07.480875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:07.484833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:07.485726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:07.485768Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:07.485903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:07.485941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:07.486038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:07.486134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.490903Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:07.623581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:07.623815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.624061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:07.624341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:07.624406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.626573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:07.626733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:07.626910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.626984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:07.627035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:07.627077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:07.628915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.628973Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:07.629013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:07.630667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.630728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.630776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:07.630819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.645908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:07.647859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:07.648030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:07.649105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:07.649229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:07.649284Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:07.649571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:07.649630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:07.649794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:07.649899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:07.651872Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:07.651920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:07.652095Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:07.652160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:07.652572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.652626Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:07.652741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:07.652774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.652816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:07.652851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.652887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:07.652926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.652964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:07.652995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:07.653056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:07.653094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:07.653128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:07.654960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:07.655078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:07.655131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 72057594046678944 2025-04-04T17:16:08.070762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:16:08.070851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:16:08.070886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:16:08.070922Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:16:08.070954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:16:08.071011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 REQUEST: PUT /data_01.csv HTTP/1.1 HEADERS: Host: localhost:62017 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9D3555F4-FDB4-474F-830B-06806A827514 amz-sdk-request: attempt=1 content-length: 11 content-md5: jsMhyzH+cyrvZpBm0dQVGQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_01.csv / / 11 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:62017 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E724A9EB-F1E4-404F-AEF3-C0ED92641CB6 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:16:08.074790Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:476:2435], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2025-04-04T17:16:08.078535Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:480:2437], result# PutObjectResult { ETag: 8ec321cb31fe732aef669066d1d41519 } 2025-04-04T17:16:08.078578Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:480:2437], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-04T17:16:08.078756Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:479:2436], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:62017 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F6844486-F7AE-4DC4-A31E-E8F72C72F11B amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2025-04-04T17:16:08.085569Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:476:2435], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2025-04-04T17:16:08.085794Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:475:2433] 2025-04-04T17:16:08.085905Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:476:2435], sender# [1:475:2433], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-04-04T17:16:08.088326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:62017 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 84564A05-8804-4E7B-AC6D-EFBB59D9CE18 amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2025-04-04T17:16:08.089343Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:476:2435], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2025-04-04T17:16:08.089383Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:476:2435], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-04T17:16:08.089524Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:475:2433], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-04T17:16:08.111758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 318 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-04T17:16:08.111846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-04T17:16:08.111999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 318 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-04T17:16:08.112096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 318 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-04T17:16:08.112160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:08.112322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:08.112797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-04T17:16:08.112833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-04T17:16:08.112932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-04T17:16:08.113022Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-04-04T17:16:08.113060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:08.113088Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.113135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:16:08.113179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T17:16:08.113204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-04T17:16:08.113318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:08.117022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.117254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.117638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.117694Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:16:08.117800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:16:08.117854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:16:08.117895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:16:08.117956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:16:08.118000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T17:16:08.118084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:369:2336] message: TxId: 102 2025-04-04T17:16:08.118134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:16:08.118176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:16:08.118209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:16:08.118369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:16:08.120321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:16:08.120381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:452:2412] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:01.513558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:01.513728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:01.513773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:01.513807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:01.515073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:01.515126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:01.515195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:01.515284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:01.516526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:01.601137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:01.601192Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:01.619653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:01.619896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:01.620044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:01.629201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:01.629444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:01.630060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.631071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:01.635706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.641768Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:01.641863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.642081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:01.642133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:01.642178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:01.642329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.650803Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:01.768176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:01.769821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.770674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:01.772592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:01.772686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.775487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.775631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:01.775877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.775945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:01.776002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:01.776044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:01.778740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.778784Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:01.778814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:01.780390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.780430Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.780482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.780525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.784258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:01.785768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:01.785898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:01.786830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.786937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:01.786988Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.788207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:01.788265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.788488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:01.788561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:01.790191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:01.790232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:01.790429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.790468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:01.790769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.790803Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:01.790906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:01.790940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.790973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:01.791003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.791034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:01.791069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.791101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:01.791149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:01.791205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:01.791240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:01.791268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:01.792945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:01.793048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:01.793087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... DbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:08.491561Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-04T17:16:08.491603Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:16:08.491627Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T17:16:08.491652Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-04-04T17:16:08.491679Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-04-04T17:16:08.493642Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusSuccess TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:08.493746Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY USER, path: /MyRoot 2025-04-04T17:16:08.493907Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:08.493947Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:08.494094Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:08.494134Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:354:2330], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-04T17:16:08.494511Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:16:08.494603Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:16:08.494663Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:16:08.494707Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-04-04T17:16:08.494744Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:08.494841Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-04T17:16:08.496300Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:16:08.497321Z node 4 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [4:308:2295] sender: [4:403:2058] recipient: [4:101:2136] Leader for TabletID 72057594046678944 is [4:308:2295] sender: [4:406:2058] recipient: [4:15:2062] Leader for TabletID 72057594046678944 is [4:308:2295] sender: [4:407:2058] recipient: [4:405:2376] Leader for TabletID 72057594046678944 is [4:408:2377] sender: [4:409:2058] recipient: [4:405:2376] 2025-04-04T17:16:08.530904Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:08.531011Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:08.531056Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:08.531096Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:08.531129Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:08.531164Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:08.531233Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:08.531296Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:08.531591Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:08.550044Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:08.551379Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:08.551549Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:08.551765Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:08.551799Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:08.551931Z node 4 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:08.552607Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:08.552721Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.552789Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.553191Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.553274Z node 4 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-04T17:16:08.553467Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.553549Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.553621Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.553709Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.553791Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.553904Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.554254Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.554365Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.554633Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.554782Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.554884Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.554940Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.554996Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.555168Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.555234Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.555333Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.555473Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.555596Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.555637Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.555680Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:08.561637Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:08.561709Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:08.562253Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:08.562317Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:08.562358Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:08.563070Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [4:408:2377] sender: [4:465:2058] recipient: [4:15:2062] 2025-04-04T17:16:08.597816Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-04T17:16:08.597886Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-04T17:16:08.637301Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzODMwMTY4LCJpYXQiOjE3NDM3ODY5NjgsInN1YiI6InVzZXIxIn0.rm6HlFQkmTudVMhImhPLhX9rOt2kgMgjl94HvEh44OSFlBX64nzOfmkMe-Q0jRzHdnFDWmMcBtWVlxrRPkj29tnTGotKyydthYduIi6LCkJXJSJS8ybD32fuE1_p_7ajJ6AW94BsqvRuZOvJERTjtEKc5oES4cCtMsGcYA89SBBGSKSvXpnI757O9iDtNz9-hyNQB4dmfjjR7M5SnaQ33wke66_rJTdVGnn3XKfSH32y4AMFUeZBeWtgd_RACiY-B6-Ji9sCb_EZhFghzPyLPCtr05T_EJet1D6_DVmicdAJ7f12C6CmZ_onBNBygc6teW58Iky7i3EdMJfJ0EElmA" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzODMwMTY4LCJpYXQiOjE3NDM3ODY5NjgsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-04-04T17:16:08.637391Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:08.637418Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:08.637555Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:08.637596Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:457:2415], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-04T17:16:08.638007Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 >> DataShardReadIterator::ShouldReadKeyCellVec [GOOD] >> DataShardReadIterator::ShouldReadKeyArrow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:01.514040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:01.514150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:01.514265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:01.514303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:01.515078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:01.515149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:01.515229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:01.515304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:01.520059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:01.597549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:01.597589Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:01.611690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:01.611955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:01.612207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:01.623517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:01.623807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:01.626500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.627564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:01.635085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.641552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:01.641612Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.641777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:01.641814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:01.641911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:01.642022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.651003Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:01.774698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:01.774917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.775105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:01.775323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:01.775368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.777094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.777180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:01.777294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.777335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:01.777361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:01.777395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:01.778760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.778812Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:01.778880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:01.780286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.780341Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.780378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.780415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.783899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:01.785606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:01.785786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:01.786802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.786942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:01.786988Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.788284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:01.788348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.788514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:01.788583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:01.790437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:01.790480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:01.790646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.790687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:01.791002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.791048Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:01.791148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:01.791181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.791214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:01.791244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.791277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:01.791316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.791346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:01.791389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:01.791446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:01.791477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:01.791506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:01.793230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:01.793339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:01.793375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 04T17:16:04.611693Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:16:04.611742Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:16:04.611818Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:04.611895Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-04T17:16:04.611945Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:16:04.611993Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:16:04.612040Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2025-04-04T17:16:04.612087Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-04-04T17:16:04.614734Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:04.614880Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2025-04-04T17:16:04.615119Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:04.615178Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:04.615371Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:04.615423Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:203:2205], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-04T17:16:04.615998Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:16:04.616118Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:16:04.616169Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:16:04.616219Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-04-04T17:16:04.616269Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:04.616388Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-04T17:16:04.618348Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-04-04T17:16:04.618783Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-04T17:16:04.618830Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-04T17:16:04.755028Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-04-04T17:16:04.755180Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:04.755227Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:04.755421Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:04.755471Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:203:2205], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-04T17:16:04.755897Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2025-04-04T17:16:04.756137Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-04T17:16:04.763098Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-04-04T17:16:04.763358Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-04T17:16:04.769690Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-04-04T17:16:04.770038Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-04T17:16:04.783030Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-04-04T17:16:04.783545Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-04T17:16:04.783684Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "User user1 is not permitted to log in", at schemeshard: 72057594046678944 2025-04-04T17:16:04.784102Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-04T17:16:04.784210Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "User user1 is not permitted to log in", at schemeshard: 72057594046678944 2025-04-04T17:16:04.784682Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:04.784891Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 243us result status StatusSuccess 2025-04-04T17:16:04.785375Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAm6DQrTJHm4JiiJearjgF\n2YMi8hhofFSi5RvvalbyKINFQUQow7HQB6Cm5OCisdwcMW1ur/JzZdj17cPOP7/s\nic2LO8KCN1kY5TKbZkcDyF/CnhWcq342VwvXaS2gANY3mIA3AXdnO6N7WyHruBzB\ngKr13fkhx94qMndmT6blAWvaXiHahF40iy3SCvX3dljdSBJll5f/btn1uLeNCuOZ\nIrUbk2ydyztSjpK/anZ27RJztf3UYnn+vUXnP8ee34oHYpm0M/U/y1/8rYf3JyOA\ne/WW+/SDlqwUzr3xvyl8v/CpBPgLz+M7t1QnT92hq1RO0HoKBMfypB2ATs6ERBG1\naQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743873364741 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:08.786397Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-04T17:16:08.793866Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-04-04T17:16:08.794456Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-04T17:16:08.806346Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzODMwMTY4LCJpYXQiOjE3NDM3ODY5NjgsInN1YiI6InVzZXIxIn0.Zkg8fW3cJoSrKkSpOubj7S2t79LY7E3pm0OxSWqsBDSUg9fLEA7wboWtsh_7TBHXuiKwko-QLCWYXouKJaJx8My_gmf6W879XQGQr2qvM4WOsbV4S9dOOQZ9kmM8VItq8o4pazULZhVZoiXYHxspfpRrE-_AQiGtcgbbRPxggiji9cPvFRrc68lBZ0T5T55VGm7elja2PbIMf7ZqgUOScZtnFumU_wUXzLNIJ4oUmeEzUXqHnc8n4C_A7ilWvpjtqIFO6cJogQg5NXjXpbkhdI28XNj1kI5L6bxu5oyPlYl27ky1_NVz7g6e02T8FSaFzu1NAm0JajD7E7rNPf7HIw" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzODMwMTY4LCJpYXQiOjE3NDM3ODY5NjgsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-04-04T17:16:08.806967Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:08.807194Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 243us result status StatusSuccess 2025-04-04T17:16:08.807669Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAm6DQrTJHm4JiiJearjgF\n2YMi8hhofFSi5RvvalbyKINFQUQow7HQB6Cm5OCisdwcMW1ur/JzZdj17cPOP7/s\nic2LO8KCN1kY5TKbZkcDyF/CnhWcq342VwvXaS2gANY3mIA3AXdnO6N7WyHruBzB\ngKr13fkhx94qMndmT6blAWvaXiHahF40iy3SCvX3dljdSBJll5f/btn1uLeNCuOZ\nIrUbk2ydyztSjpK/anZ27RJztf3UYnn+vUXnP8ee34oHYpm0M/U/y1/8rYf3JyOA\ne/WW+/SDlqwUzr3xvyl8v/CpBPgLz+M7t1QnT92hq1RO0HoKBMfypB2ATs6ERBG1\naQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743873364741 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardMoveTest::Chain >> TSchemeShardMoveTest::Boot >> TSchemeShardMoveTest::MoveTableForBackup >> TSchemeShardMoveTest::TwoTables >> TSchemeShardMoveTest::Replace >> TSchemeShardMoveTest::MoveIndexSameDst >> TSchemeShardMoveTest::ResetCachedPath >> TSchemeShardMoveTest::MoveIndex >> TSchemeShardMoveTest::Reject >> TSchemeShardMoveTest::Boot [GOOD] >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly >> TSchemeShardMoveTest::MoveMigratedTable >> TSchemeShardMoveTest::MoveIndexSameDst [GOOD] >> TSchemeShardMoveTest::MoveIntoBuildingIndex >> TSchemeShardMoveTest::MoveTableForBackup [GOOD] >> TSchemeShardMoveTest::MoveTableWithSequence >> TSchemeShardMoveTest::TwoTables [GOOD] >> TSchemeShardMoveTest::MoveIndex [GOOD] >> TSchemeShardMoveTest::MoveIndexDoesNonExisted >> TSchemeShardMoveTest::Reject [GOOD] >> TSchemeShardMoveTest::OneTable >> TSchemeShardMoveTest::Chain [GOOD] >> TSchemeShardMoveTest::Index >> TSchemeShardMoveTest::ResetCachedPath [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::TwoTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:10.575069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:10.575158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:10.575208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:10.575258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:10.575993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:10.576066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:10.576204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:10.576294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:10.577166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:10.645974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:10.646029Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:10.656670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:10.656851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:10.657025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:10.668643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:10.669626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:10.671144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.672544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:10.680394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:10.687784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:10.687821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:10.687976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.693410Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:10.817196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:10.817436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.817625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:10.817828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:10.817873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.820071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.820207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:10.820353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.820442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:10.820476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:10.820542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:10.822385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.822440Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:10.822474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:10.824057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.824097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.824135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.824182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.827763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:10.829247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:10.829427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:10.830334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.830456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:10.830508Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.830750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:10.830802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.830940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:10.831022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:10.832869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:10.832910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:10.833091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.833134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:10.833407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.833449Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:10.833537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:10.833567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.833629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:10.833664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.833699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:10.833745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.833793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:10.833822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:10.833892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:10.833930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:10.833970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:10.835551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:10.835645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:10.835686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 78944, LocalPathId: 2] was 1 2025-04-04T17:16:11.300696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-04-04T17:16:11.300716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-04-04T17:16:11.300783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-04T17:16:11.300817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:16:11.301122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:16:11.301208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T17:16:11.301288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-04T17:16:11.301335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:16:11.301366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T17:16:11.304279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:16:11.304341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:16:11.304381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:499:2459] TestWaitNotification: OK eventTxId 103 2025-04-04T17:16:11.304986Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:11.305177Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 203us result status StatusPathDoesNotExist 2025-04-04T17:16:11.305371Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:16:11.305803Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:11.306002Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove1" took 173us result status StatusSuccess 2025-04-04T17:16:11.306407Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove1" PathDescription { Self { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:11.307078Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:11.307252Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table2" took 179us result status StatusPathDoesNotExist 2025-04-04T17:16:11.307409Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:16:11.307922Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:11.308080Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove2" took 163us result status StatusSuccess 2025-04-04T17:16:11.308440Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove2" PathDescription { Self { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:11.309060Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:11.309228Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 144us result status StatusSuccess 2025-04-04T17:16:11.309618Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPersQueueTest::CheckDecompressionTasksWithoutSession [GOOD] >> TPersQueueTest::Codecs_InitWriteSession_DefaultTopicSupportedCodecsInInitResponse >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] >> TSchemeShardMoveTest::Replace [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::ResetCachedPath [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:10.575069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:10.575171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:10.575238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:10.575279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:10.575966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:10.576024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:10.576146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:10.576216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:10.577184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:10.657355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:10.657409Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:10.671288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:10.671445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:10.671546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:10.680994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:10.681243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:10.681828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.682600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:10.686974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.688012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:10.688073Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.688223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:10.688263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:10.688305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:10.688429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.693930Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:10.803150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:10.803394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.803568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:10.803809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:10.803861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.805941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.806067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:10.806294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.806356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:10.806390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:10.806423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:10.808271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.808317Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:10.808347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:10.809764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.809799Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.809827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.809861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.812892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:10.814393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:10.814606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:10.815511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.815628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:10.815684Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.815910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:10.815962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.816109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:10.816183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:10.817860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:10.817904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:10.818089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.818127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:10.818444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.818494Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:10.818584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:10.818627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.818659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:10.818687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.818721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:10.818777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.818823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:10.818854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:10.818907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:10.818940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:10.818975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:10.820314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:10.820428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:10.820462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... RelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-04-04T17:16:11.559880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: PREPARED TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 PrepareArriveTime: 157000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 222 } } 2025-04-04T17:16:11.560008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId# 105:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: PREPARED TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 PrepareArriveTime: 157000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 222 } } 2025-04-04T17:16:11.560048Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-04T17:16:11.560133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409549, shardIdx: 72057594046678944:4, operationId: 105:0, left await: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:11.560176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 3 -> 128 2025-04-04T17:16:11.561846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-04T17:16:11.561976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-04T17:16:11.562028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 105:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:11.562094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2025-04-04T17:16:11.562223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:11.563723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2025-04-04T17:16:11.563837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 105 at step: 5000004 2025-04-04T17:16:11.564254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:11.564354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:11.564407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 105:0 HandleReply TEvOperationPlan, operationId: 105:0, stepId: 5000004, at schemeshard: 72057594046678944 2025-04-04T17:16:11.565781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 129 2025-04-04T17:16:11.565943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-04-04T17:16:11.572137Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:11.572205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:16:11.572422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:11.572463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-04-04T17:16:11.572838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-04T17:16:11.572902Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 105 2025-04-04T17:16:11.574071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:16:11.574181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:16:11.574214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-04T17:16:11.574292Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 8 2025-04-04T17:16:11.574344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:16:11.574428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-04-04T17:16:11.575480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1144 } } 2025-04-04T17:16:11.575537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-04-04T17:16:11.575670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1144 } } 2025-04-04T17:16:11.575770Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1144 } } 2025-04-04T17:16:11.577168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 672 RawX2: 4294969907 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-04-04T17:16:11.577212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-04-04T17:16:11.577336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Source { RawX1: 672 RawX2: 4294969907 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-04-04T17:16:11.577389Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:16:11.577478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 672 RawX2: 4294969907 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-04-04T17:16:11.577530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 105:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:11.577567Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-04T17:16:11.577602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 105:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-04T17:16:11.577658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 129 -> 240 2025-04-04T17:16:11.580012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-04T17:16:11.580499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-04T17:16:11.580807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-04T17:16:11.581081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-04T17:16:11.581126Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-04T17:16:11.581230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-04T17:16:11.581263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T17:16:11.581329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-04T17:16:11.581364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T17:16:11.581430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-04-04T17:16:11.581489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 105 2025-04-04T17:16:11.581535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T17:16:11.581572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-04T17:16:11.581603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-04T17:16:11.581731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:16:11.583287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-04T17:16:11.583333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:838:2758] TestWaitNotification: OK eventTxId 105 >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:10.575084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:10.575183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:10.575220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:10.575249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:10.575973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:10.576027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:10.576154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:10.576222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:10.577125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:10.647381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:10.647428Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:10.656988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:10.657168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:10.657278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:10.668921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:10.669654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:10.671144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.672591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:10.680489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:10.687828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:10.687872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:10.687983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.693878Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:10.820106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:10.820395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.820630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:10.820905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:10.820971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.823341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.823489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:10.823667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.823715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:10.823765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:10.823802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:10.825648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.825708Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:10.825743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:10.827478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.827525Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.827568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.827623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.831537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:10.834323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:10.834504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:10.835562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.835711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:10.835769Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.836076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:10.836194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.836367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:10.836456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:10.838675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:10.838714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:10.838879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.838910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:10.839163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.839198Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:10.839273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:10.839300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.839327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:10.839353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.839394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:10.839436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.839467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:10.839489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:10.839537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:10.839565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:10.839596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:10.840861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:10.840943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:10.840973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... T17:16:11.852255Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:2, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 103 Step: 5000004 OrderId: 103 ExecLatency: 0 ProposeLatency: 12 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1240 } } 2025-04-04T17:16:11.852327Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 103 Step: 5000004 OrderId: 103 ExecLatency: 0 ProposeLatency: 12 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1240 } } FAKE_COORDINATOR: Erasing txId 103 2025-04-04T17:16:11.853247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 8589936900 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-04T17:16:11.853297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409547, partId: 0 2025-04-04T17:16:11.853411Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 8589936900 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-04T17:16:11.853461Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:16:11.853547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 325 RawX2: 8589936900 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-04T17:16:11.853610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:11.853648Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:16:11.853691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T17:16:11.853759Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-04-04T17:16:11.856671Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 8589936898 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-04T17:16:11.856715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409546, partId: 2 2025-04-04T17:16:11.856834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:2, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 8589936898 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-04T17:16:11.856876Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:16:11.856956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 322 RawX2: 8589936898 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-04-04T17:16:11.857007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:11.857036Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:2, at schemeshard: 72057594046678944 2025-04-04T17:16:11.857062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:16:11.857095Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:2 129 -> 240 2025-04-04T17:16:11.858363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:16:11.865591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2025-04-04T17:16:11.866103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:16:11.867290Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:16:11.867397Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:11.867461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 103:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-04T17:16:11.867573Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 2/3 2025-04-04T17:16:11.867613Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2025-04-04T17:16:11.867660Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 2/3 2025-04-04T17:16:11.867696Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2025-04-04T17:16:11.867733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 2/3, is published: true 2025-04-04T17:16:11.869267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2025-04-04T17:16:11.869879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2025-04-04T17:16:11.869928Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:11.869965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 103:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 4], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-04T17:16:11.870034Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 3/3 2025-04-04T17:16:11.870075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-04-04T17:16:11.870125Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 3/3 2025-04-04T17:16:11.870151Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-04-04T17:16:11.870177Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 3/3, is published: true 2025-04-04T17:16:11.870214Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-04-04T17:16:11.870282Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T17:16:11.870324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T17:16:11.870450Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-04T17:16:11.870493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:16:11.870532Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-04-04T17:16:11.870553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-04-04T17:16:11.870583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-04T17:16:11.870624Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:16:11.870647Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-04-04T17:16:11.870666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-04-04T17:16:11.870710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-04T17:16:11.870733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-04T17:16:11.871376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:16:11.871434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-04T17:16:11.871515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:16:11.871564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T17:16:11.871599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:16:11.871635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:16:11.871667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:11.877953Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Handle TEvGetProxyServicesRequest 2025-04-04T17:16:11.878405Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 103 2025-04-04T17:16:11.915889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-04T17:16:11.915944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-04T17:16:11.916389Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T17:16:11.916487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:16:11.916525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:675:2560] TestWaitNotification: OK eventTxId 103 >> TPersQueueTest::ReadRuleServiceTypeMigrationWithDisallowDefault [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestHierarchicalQuotas [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestHangDefence [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestMoreStrongChildLimit [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeout >> TKesusTest::TestQuoterHDRRParametersValidation >> TKesusTest::TestAcquireUpgrade >> THDRRQuoterResourceTreeRuntimeTest::TestAllocateResource [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAllocationGranularity [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAmountIsLessThanEpsilon [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestCreateInactiveSession [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDistributeResourcesBetweenConsumers [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestEffectiveProps [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestStopConsuming [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionState [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionStateAfterAllResourceAllocated [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> TPersQueueTest::ReadWithoutConsumerFederation >> TSchemeShardMoveTest::MoveMigratedTable [GOOD] >> TSchemeShardMoveTest::MoveOldTableWithIndex >> TSchemeShardMoveTest::Index [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:10.575087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:10.575191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:10.575227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:10.575276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:10.575929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:10.575985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:10.576095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:10.576171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:10.577236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:10.655840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:10.655897Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:10.666071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:10.666312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:10.666434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:10.675685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:10.675976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:10.676584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.677475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:10.682088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687572Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:10.687780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:10.687823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:10.687920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.693427Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:10.801401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:10.801628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.801810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:10.802024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:10.802098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.804237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.804380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:10.804519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.804567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:10.804600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:10.804631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:10.806314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.806369Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:10.806404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:10.807887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.807932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.807968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.808023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.817134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:10.818870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:10.819048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:10.819991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.820109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:10.820160Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.820425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:10.820480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.820641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:10.820764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:10.822692Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:10.822739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:10.822996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.823050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:10.823377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.823422Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:10.823515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:10.823551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.823584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:10.823614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.823663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:10.823719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.823752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:10.823781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:10.823833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:10.823865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:10.823905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:10.825556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:10.825668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:10.825709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... wnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:448:2409], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T17:16:12.212342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-04-04T17:16:12.212440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-04-04T17:16:12.212652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-04-04T17:16:12.212681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-04-04T17:16:12.212713Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 2025-04-04T17:16:12.212809Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:12.212882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 126 RawX2: 8589936743 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:12.212925Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-04-04T17:16:12.212964Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-04-04T17:16:12.214236Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-04T17:16:12.214291Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-04-04T17:16:12.214363Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-04T17:16:12.214382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-04T17:16:12.214408Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-04T17:16:12.214433Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-04T17:16:12.214457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-04-04T17:16:12.214493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:131:2154] message: TxId: 281474976710760 2025-04-04T17:16:12.214519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-04T17:16:12.214547Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-04T17:16:12.214567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-04-04T17:16:12.214616Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-04-04T17:16:12.215719Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-04T17:16:12.215770Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-04-04T17:16:12.215815Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-04-04T17:16:12.215890Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:448:2409], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T17:16:12.217122Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-04T17:16:12.217208Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:448:2409], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T17:16:12.217258Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-04T17:16:12.218582Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-04T17:16:12.218667Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:448:2409], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T17:16:12.218706Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-04T17:16:12.218803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:16:12.218847Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:625:2574] TestWaitNotification: OK eventTxId 102 2025-04-04T17:16:12.219402Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:12.219622Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 243us result status StatusSuccess 2025-04-04T17:16:12.220104Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "SomeIndex" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 5 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:10.575080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:10.575171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:10.575227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:10.575265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:10.575912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:10.575962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:10.576082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:10.576177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:10.577175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:10.656694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:10.656735Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:10.666706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:10.666917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:10.667058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:10.675986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:10.676246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:10.676866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.677583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:10.682202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:10.687837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:10.687886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:10.688014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.693956Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:10.820847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:10.821090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.821302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:10.821540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:10.821621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.823755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.823900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:10.824069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.824123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:10.824170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:10.824226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:10.825995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.826053Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:10.826088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:10.827869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.827917Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.827952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.828005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.831683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:10.833249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:10.833417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:10.834368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.834477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:10.834525Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.834782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:10.834829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.834973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:10.835050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:10.836842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:10.836904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:10.837106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.837151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:10.837440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.837489Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:10.837577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:10.837616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.837657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:10.837706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.837744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:10.837794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.837828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:10.837857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:10.837910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:10.837944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:10.837986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:10.839915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:10.840040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:10.840083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2025-04-04T17:16:12.280508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveSequence TDropParts HandleReply TEvDropSequenceResult shardId# 72075186233409546 status# SUCCESS operationId# 102:1 at tablet 72057594046678944 2025-04-04T17:16:12.280607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:16:12.280659Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 4 -> 240 2025-04-04T17:16:12.282339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-04T17:16:12.282479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-04T17:16:12.282516Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveSequence TDone, operationId: 102:1 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:12.282566Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveSequence TDone, operationId: 102:1 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 3], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-04T17:16:12.282685Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-04-04T17:16:12.282713Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-04-04T17:16:12.282738Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-04-04T17:16:12.282759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-04-04T17:16:12.282798Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/2, is published: true 2025-04-04T17:16:12.282870Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:374:2343] message: TxId: 102 2025-04-04T17:16:12.282914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-04-04T17:16:12.282955Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:16:12.282982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:16:12.283079Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-04T17:16:12.283108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:16:12.283154Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-04-04T17:16:12.283169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-04-04T17:16:12.283208Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-04T17:16:12.283232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:16:12.283517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:16:12.283568Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T17:16:12.283634Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:16:12.283676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:16:12.283709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T17:16:12.286032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:16:12.286086Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:472:2428] 2025-04-04T17:16:12.286500Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-04-04T17:16:12.289939Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:12.290119Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/myseq" took 204us result status StatusPathDoesNotExist 2025-04-04T17:16:12.290283Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/myseq\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table/myseq" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:16:12.290680Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:12.290816Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 159us result status StatusPathDoesNotExist 2025-04-04T17:16:12.290959Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:16:12.291322Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:12.291496Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove" took 191us result status StatusSuccess 2025-04-04T17:16:12.291853Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove" PathDescription { Self { Name: "TableMove" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "TableMove" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 DefaultFromSequence: "myseq" NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false Sequences { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:12.292335Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:16:12.292487Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/myseq" took 169us result status StatusSuccess 2025-04-04T17:16:12.292729Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/myseq" PathDescription { Self { Name: "myseq" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SequenceDescription { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Replace [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:10.575081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:10.575179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:10.575212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:10.575247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:10.575971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:10.576022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:10.576091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:10.576174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:10.577163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:10.650975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:10.651020Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:10.660584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:10.660768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:10.660896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:10.669269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:10.669650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:10.671196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.672601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:10.680357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:10.687770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:10.687888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:10.688023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.693406Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:10.786476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:10.788030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.788976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:10.791459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:10.791517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.793794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.793913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:10.794071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.794114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:10.794151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:10.794177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:10.795893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.795947Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:10.795981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:10.797451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.797491Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.797526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.797575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.806815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:10.808525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:10.808687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:10.809560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.809669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:10.809717Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.809955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:10.810003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.810146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:10.810218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:10.812141Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:10.812182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:10.812373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.812412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:10.812706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.812753Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:10.812840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:10.812869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.812918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:10.812951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.813000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:10.813048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.813081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:10.813107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:10.813160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:10.813192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:10.813262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:10.814957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:10.815048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:10.815080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... rdLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409546 2025-04-04T17:16:12.193724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-04T17:16:12.193942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 1 Forgetting tablet 72075186233409546 2025-04-04T17:16:12.196899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:16:12.196945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 14], at schemeshard: 72057594046678944 2025-04-04T17:16:12.197007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-04-04T17:16:12.197045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 13], at schemeshard: 72057594046678944 2025-04-04T17:16:12.197078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 3 2025-04-04T17:16:12.197549Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-04T17:16:12.197810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T17:16:12.197990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 1 2025-04-04T17:16:12.198492Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409548 Forgetting tablet 72075186233409547 2025-04-04T17:16:12.201276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:12.201469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 2 Forgetting tablet 72075186233409548 2025-04-04T17:16:12.209668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-04T17:16:12.209742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409546 2025-04-04T17:16:12.209878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-04-04T17:16:12.209954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:16:12.210006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2025-04-04T17:16:12.210084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 1 2025-04-04T17:16:12.210136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 15], at schemeshard: 72057594046678944 2025-04-04T17:16:12.210170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 1 2025-04-04T17:16:12.210216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 12], at schemeshard: 72057594046678944 2025-04-04T17:16:12.210278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:12.210926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:16:12.210965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-04T17:16:12.211075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:16:12.211124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409548 2025-04-04T17:16:12.212504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2025-04-04T17:16:12.212863Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-04T17:16:12.212950Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-04-04T17:16:12.212990Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2025-04-04T17:16:12.213358Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Src" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:12.213487Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Src" took 153us result status StatusPathDoesNotExist 2025-04-04T17:16:12.213609Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Src\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Src" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:16:12.213985Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:12.214142Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dst" took 168us result status StatusSuccess 2025-04-04T17:16:12.214458Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dst" PathDescription { Self { Name: "Dst" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Async" LocalPathId: 23 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 25 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 22 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:12.215145Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:12.215253Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 122us result status StatusSuccess 2025-04-04T17:16:12.215493Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 28 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 28 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 26 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:10.575089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:10.575173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:10.575207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:10.575241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:10.575965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:10.576032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:10.576139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:10.576206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:10.577150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:10.654782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:10.654830Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:10.668985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:10.669200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:10.669319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:10.678611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:10.678901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:10.679526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.680309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:10.685066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:10.687771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:10.687817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:10.687930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.693379Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:10.798878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:10.799119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.799315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:10.799551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:10.799623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.801598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.801719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:10.801844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.801879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:10.801902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:10.801923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:10.803484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.803524Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:10.803554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:10.804906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.804940Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.804969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.805003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.807916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:10.809252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:10.809394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:10.810150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.810258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:10.810302Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.810457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:10.810491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.810651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:10.810739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:10.812534Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:10.812578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:10.812754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.812798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:10.813063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.813106Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:10.813189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:10.813235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.813268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:10.813298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.813349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:10.813407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.813441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:10.813467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:10.813629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:10.813664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:10.813713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:10.820468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:10.820563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:10.820598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... t: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:12.328673Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:12.328843Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 188us result status StatusSuccess 2025-04-04T17:16:12.329211Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:12.329726Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:16:12.329945Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Sync" took 236us result status StatusSuccess 2025-04-04T17:16:12.330665Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Sync" PathDescription { Self { Name: "Sync" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Sync" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:12.331287Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:16:12.331487Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Async" took 218us result status StatusSuccess 2025-04-04T17:16:12.332158Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Async" PathDescription { Self { Name: "Async" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 5 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Async" LocalPathId: 5 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardMoveTest::OneTable [GOOD] |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> TKesusTest::TestReleaseLockFailure >> ResourcePoolsDdl::TestAlterResourcePool [GOOD] >> KqpWorkloadService::TestCpuLoadThreshold [GOOD] >> KqpWorkloadService::TestCpuLoadThresholdRefresh ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Index [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:10.575089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:10.575192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:10.575254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:10.575288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:10.575952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:10.576013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:10.576119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:10.576195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:10.577199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:10.656018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:10.656082Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:10.672382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:10.672654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:10.672805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:10.680450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:10.680695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:10.681266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.682528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:10.686906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.688128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:10.688179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:10.688219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:10.688317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.693950Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:10.802926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:10.803131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.803307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:10.803518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:10.803592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.806022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.806138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:10.806295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.806341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:10.806380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:10.806409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:10.808048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.808099Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:10.808132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:10.811376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.811421Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.811457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.811505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.819997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:10.821732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:10.821900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:10.822872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.822997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:10.823055Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.823267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:10.823307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.823454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:10.823531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:10.825530Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:10.825572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:10.825750Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.825794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:10.826090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.826132Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:10.826216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:10.826265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.826298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:10.826328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.826359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:10.826419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.826454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:10.826482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:10.826531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:10.826561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:10.826610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:10.828138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:10.828249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:10.828285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... : 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:12.703350Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:12.703453Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 115us result status StatusSuccess 2025-04-04T17:16:12.703668Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 10 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "TableMove" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:12.704050Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:16:12.704189Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Sync" took 161us result status StatusSuccess 2025-04-04T17:16:12.704742Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Sync" PathDescription { Self { Name: "Sync" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 11 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 10 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:12.705263Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:16:12.705440Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Async" took 188us result status StatusSuccess 2025-04-04T17:16:12.705965Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Async" PathDescription { Self { Name: "Async" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 8 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Async" LocalPathId: 8 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TKesusTest::TestQuoterHDRRParametersValidation [GOOD] >> TKesusTest::TestQuoterAccountResourcesOnDemand >> TKesusTest::TestAcquireUpgrade [GOOD] >> TKesusTest::TestAcquireTimeout >> DataShardReadIteratorSysTables::ShouldNotReadUserTableUsingLocalTid [GOOD] >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion >> DataShardReadIterator::ShouldReadRangeArrow [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck [GOOD] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::OneTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:10.575078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:10.575169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:10.575246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:10.575287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:10.575971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:10.576045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:10.576148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:10.576216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:10.577157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:10.655223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:10.655272Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:10.665011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:10.665217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:10.665325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:10.673478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:10.673677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:10.674219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.674923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:10.680264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687542Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.687765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:10.687826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:10.687873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:10.687991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.693807Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:10.789364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:10.789536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.789674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:10.791104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:10.791209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.793637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.793778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:10.793981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.794018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:10.794081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:10.794102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:10.795665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.795737Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:10.795769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:10.797187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.797241Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.797275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.797334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.801404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:10.802956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:10.803157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:10.804079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:10.804217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:10.804274Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.805308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:10.805355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:10.805501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:10.805592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:10.807352Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:10.807389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:10.807568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:10.807603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:10.807920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:10.807966Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:10.808054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:10.808083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.808108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:10.808128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.808150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:10.808197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:10.808224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:10.808243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:10.808285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:10.808318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:10.808356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:10.809863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:10.809978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:10.810014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4T17:16:12.997373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 108:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-04T17:16:12.997410Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 108, done: 0, blocked: 1 2025-04-04T17:16:12.997485Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 108:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 108 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-04T17:16:12.997595Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 137 -> 129 2025-04-04T17:16:12.997702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:12.997759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T17:16:12.998672Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-04T17:16:12.999245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-04T17:16:12.999745Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:12.999791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:12.999930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-04T17:16:13.000045Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:13.000085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 108, path id: 1 2025-04-04T17:16:13.000122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 108, path id: 4 2025-04-04T17:16:13.000479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-04T17:16:13.000528Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:16:13.000595Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-04T17:16:13.000630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 108:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:16:13.000666Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 129 -> 240 2025-04-04T17:16:13.001315Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2025-04-04T17:16:13.001407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2025-04-04T17:16:13.001442Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-04-04T17:16:13.001477Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 23 2025-04-04T17:16:13.001513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:13.002578Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2025-04-04T17:16:13.002663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2025-04-04T17:16:13.002685Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-04-04T17:16:13.002709Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-04T17:16:13.002735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-04T17:16:13.002798Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2025-04-04T17:16:13.004134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-04T17:16:13.004181Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 108:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:13.004397Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T17:16:13.004502Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-04-04T17:16:13.004541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-04T17:16:13.004575Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-04-04T17:16:13.004604Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-04T17:16:13.004635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2025-04-04T17:16:13.004691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:335:2314] message: TxId: 108 2025-04-04T17:16:13.004731Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-04-04T17:16:13.004767Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2025-04-04T17:16:13.004797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2025-04-04T17:16:13.004875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-04T17:16:13.006072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-04-04T17:16:13.007354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-04-04T17:16:13.008125Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-04T17:16:13.008169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:833:2791] TestWaitNotification: OK eventTxId 108 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-04-04T17:16:13.008824Z node 2 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-04T17:16:13.008888Z node 2 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409547 2025-04-04T17:16:13.031240Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 8589936886 } TabletId: 72075186233409546 State: 4 2025-04-04T17:16:13.031350Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-04-04T17:16:13.033078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:16:13.033431Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-04T17:16:13.033592Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:13.033803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-04T17:16:13.034108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:16:13.034147Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-04T17:16:13.034203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 Forgetting tablet 72075186233409546 2025-04-04T17:16:13.038064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:16:13.038130Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-04T17:16:13.039124Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Deleted tabletId 72075186233409546 2025-04-04T17:16:13.039824Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:13.039994Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 201us result status StatusSuccess 2025-04-04T17:16:13.040327Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 23 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 23 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 21 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TKesusTest::TestReleaseLockFailure [GOOD] >> TKesusTest::TestReleaseSemaphore >> TKesusTest::TestSessionTimeoutAfterDetach >> TKesusTest::TestAcquireWaiterDowngrade >> DataShardReadIteratorBatchMode::RangeToInclusive [GOOD] >> DataShardReadIteratorBatchMode::RangeToNonInclusive >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite >> DataShardReadIterator::ShouldReadRangeInclusiveEndsArrow [GOOD] >> DataShardReadIterator::ShouldReadRangeReverse >> DataShardReadIterator::ShouldNotReadAfterCancel [GOOD] >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 >> DataShardReadIterator::ShouldReverseReadMultipleRanges [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks >> TKesusTest::TestReleaseSemaphore [GOOD] >> TKesusTest::TestSemaphoreData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:11.713534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:11.713604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:11.713630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:11.713672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:11.713710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:11.713741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:11.713815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:11.713863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:11.714155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:11.777316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:11.777385Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:11.787362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:11.787612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:11.787752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:11.798642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:11.798958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:11.799940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:11.800949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:11.806351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:11.807582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:11.807637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:11.807794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:11.807861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:11.807906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:11.808049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:11.814382Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:11.944737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:11.944982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:11.945197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:11.945444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:11.945516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:11.947657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:11.947807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:11.947973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:11.948027Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:11.948080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:11.948120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:11.952369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:11.952431Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:11.952485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:11.954352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:11.954400Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:11.954439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:11.954497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:11.964182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:11.965997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:11.966147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:11.966933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:11.967033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:11.967072Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:11.967508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:11.967547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:11.967695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:11.967755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:11.969788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:11.969829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:11.970086Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:11.970138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:11.970483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:11.970534Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:11.970662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:11.970714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:11.970752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:11.970798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:11.970831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:11.970871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:11.970901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:11.970921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:11.970969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:11.971004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:11.971039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:11.972368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:11.972467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:11.972497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 821 } } 2025-04-04T17:16:13.709091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-04T17:16:13.709175Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 821 } } 2025-04-04T17:16:13.709241Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 821 } } FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:16:13.710219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 8589936898 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:16:13.710284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 2 2025-04-04T17:16:13.710441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:2, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 8589936898 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:16:13.710501Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:16:13.710610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 322 RawX2: 8589936898 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:16:13.710673Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:13.710713Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-04T17:16:13.710753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:16:13.710793Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:2 129 -> 240 2025-04-04T17:16:13.711440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 8589936900 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:16:13.711474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-04-04T17:16:13.711559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 8589936900 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:16:13.711592Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:16:13.711652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 325 RawX2: 8589936900 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:16:13.711690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:13.711716Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:13.711742Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T17:16:13.711769Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-04T17:16:13.715498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-04T17:16:13.716812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:13.718808Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-04T17:16:13.718960Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-04T17:16:13.719006Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:13.719067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 102:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 4], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-04T17:16:13.719177Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 2/3 2025-04-04T17:16:13.719214Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2025-04-04T17:16:13.719256Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 2/3 2025-04-04T17:16:13.719287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2025-04-04T17:16:13.719324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/3, is published: true 2025-04-04T17:16:13.719780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:13.719955Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:13.719989Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:13.720020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 102:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-04T17:16:13.720076Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 3/3 2025-04-04T17:16:13.720100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-04-04T17:16:13.720131Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 3/3 2025-04-04T17:16:13.720153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-04-04T17:16:13.720178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/3, is published: true 2025-04-04T17:16:13.720248Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:375:2343] message: TxId: 102 2025-04-04T17:16:13.720300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-04-04T17:16:13.720344Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:16:13.720378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:16:13.720491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-04T17:16:13.720530Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:16:13.720562Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-04-04T17:16:13.720581Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-04-04T17:16:13.720607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-04T17:16:13.720627Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:16:13.720648Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-04-04T17:16:13.720665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-04-04T17:16:13.720702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-04T17:16:13.720723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-04T17:16:13.721146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:16:13.721196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-04T17:16:13.721265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:16:13.721304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T17:16:13.721335Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:16:13.721362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:16:13.721391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:13.724243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:16:13.724296Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:474:2435] 2025-04-04T17:16:13.724744Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 >> THDRRQuoterResourceTreeRuntimeTest::TestWeights [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestWeightsChange [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVerySmallSpeed [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaRelease >> DataShardReadIterator::ShouldReadKeyArrow [GOOD] >> TKesusTest::TestAcquireWaiterDowngrade [GOOD] >> TKesusTest::TestAcquireWaiterUpgrade >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn >> TKesusTest::TestAttachNewSessions >> TKesusTest::TestUnregisterProxy >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite >> TKesusTest::TestSessionDetach >> TKesusTest::TestAttachOutOfSequence >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] |90.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} >> TKesusTest::TestAcquireLocks >> TKesusTest::TestQuoterAccountResourcesBurst >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsDdl::TestAlterResourcePool [GOOD] Test command err: 2025-04-04T17:14:59.585089Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507705052317151:2087];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:59.585355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017b7/r3tmp/tmp8bUQ3R/pdisk_1.dat 2025-04-04T17:15:00.082167Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:00.085464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:00.085569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:00.093383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16283, node 1 2025-04-04T17:15:00.318617Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:00.318647Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:00.318653Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:00.318762Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6301 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:00.631785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:02.850755Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-04T17:15:02.850858Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507717937219836:2333], Start check tables existence, number paths: 2 2025-04-04T17:15:02.881995Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjVmNjVkMzYtNDQyNTU2NWQtNWM1MWNkNGMtYjU5MDE1ODA=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MjVmNjVkMzYtNDQyNTU2NWQtNWM1MWNkNGMtYjU5MDE1ODA= 2025-04-04T17:15:02.882216Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-04T17:15:02.882298Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2025-04-04T17:15:02.884166Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-04T17:15:02.884286Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MjVmNjVkMzYtNDQyNTU2NWQtNWM1MWNkNGMtYjU5MDE1ODA=, ActorId: [1:7489507717937219851:2334], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:15:02.895017Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507717937219836:2333], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-04T17:15:02.895116Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507717937219836:2333], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-04T17:15:02.895171Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507717937219836:2333], Successfully finished 2025-04-04T17:15:02.895391Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-04T17:15:02.944400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:15:02.977254Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489507719232671709:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:02.977297Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:15:02.984598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:02.984684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:02.990395Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T17:15:02.994084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:03.097928Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:15:03.098116Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:15:03.098177Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:15:03.098237Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:15:03.098476Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:15:03.098550Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:15:03.098622Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:15:03.098716Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:15:03.098783Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:15:03.143139Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:03.143241Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:03.151681Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:03.279543Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:03.295995Z node 3 :STATISTICS WARN: [72075186224037894] TTxInit::Complete. EnableColumnStatistics=false 2025-04-04T17:15:03.584069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:15:03.660853Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507721165818456:2095];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:03.703753Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:15:03.716900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:03.716966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:03.725492Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:15:03.727425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:03.944561Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:03.944660Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:03.947427Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:15:03.947540Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:15:03.947605Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:15:03.947697Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:15:03.947756Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:15:03.947840Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:15:03.947900Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:15:03.947964Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:15:03.948016Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:15:03.991408Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:15:04.107306Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:04.137110Z node 2 :STATISTICS WARN: [72075186224038895] TTxInit::Complete. EnableColumnStatistics=false 2025-04-04T17:15:04.340161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:15:04.480932Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:04.586359Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507705052317151:2087];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:04.586429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:15:06.579773Z node 3 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-04T17:15:06.580099Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7489507736412541645:2344], Start check tables existence, number paths: 2 2025-04-04T17:15:06.580423Z node 3 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-04T17:15:06.580439Z node 3 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-04T17:15:06.581627Z node 3 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Servi ... DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ExecuteState, TraceId: 01jr0wv5cdfgn4aqg9k58745pv, Created new KQP executer: [11:7489508019829001006:2512] isRollback: 0 2025-04-04T17:16:12.567947Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=YmExNDQzOWYtNmJmZDVmNDUtZjZjY2RmZC1mZTViNjcwNQ==, ActorId: [11:7489507998354163689:2331], ActorState: ReadyState, Session closed due to explicit close event 2025-04-04T17:16:12.568001Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=YmExNDQzOWYtNmJmZDVmNDUtZjZjY2RmZC1mZTViNjcwNQ==, ActorId: [11:7489507998354163689:2331], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:16:12.568042Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=YmExNDQzOWYtNmJmZDVmNDUtZjZjY2RmZC1mZTViNjcwNQ==, ActorId: [11:7489507998354163689:2331], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-04T17:16:12.568070Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=YmExNDQzOWYtNmJmZDVmNDUtZjZjY2RmZC1mZTViNjcwNQ==, ActorId: [11:7489507998354163689:2331], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:16:12.568167Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=YmExNDQzOWYtNmJmZDVmNDUtZjZjY2RmZC1mZTViNjcwNQ==, ActorId: [11:7489507998354163689:2331], ActorState: unknown state, Session actor destroyed 2025-04-04T17:16:12.575233Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ExecuteState, TraceId: 01jr0wv5cdfgn4aqg9k58745pv, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-04T17:16:12.575413Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ExecuteState, TraceId: 01jr0wv5cdfgn4aqg9k58745pv, txInfo Status: Committed Kind: ReadWrite TotalDuration: 17.296 ServerDuration: 17.138 QueriesCount: 2 2025-04-04T17:16:12.575515Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ExecuteState, TraceId: 01jr0wv5cdfgn4aqg9k58745pv, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-04T17:16:12.575577Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ExecuteState, TraceId: 01jr0wv5cdfgn4aqg9k58745pv, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:16:12.575602Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ExecuteState, TraceId: 01jr0wv5cdfgn4aqg9k58745pv, EndCleanup, isFinal: 0 2025-04-04T17:16:12.575655Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ExecuteState, TraceId: 01jr0wv5cdfgn4aqg9k58745pv, Sent query response back to proxy, proxyRequestId: 31, proxyId: [11:7489507981174294074:2267] 2025-04-04T17:16:12.576050Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, TxId: 2025-04-04T17:16:12.576150Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2025-04-04T17:16:12.576546Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ReadyState, TraceId: 01jr0wv5d058723c62xt0p3brr, received request, proxyRequestId: 32 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [11:7489508019829001018:2520] database: /Root databaseId: /Root pool id: default 2025-04-04T17:16:12.576589Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ReadyState, TraceId: 01jr0wv5d058723c62xt0p3brr, request placed into pool from cache: default 2025-04-04T17:16:12.577108Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ExecuteState, TraceId: 01jr0wv5d058723c62xt0p3brr, ExecutePhyTx, tx: 0x000050C0004DD358 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-04-04T17:16:12.577179Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ExecuteState, TraceId: 01jr0wv5d058723c62xt0p3brr, Sending to Executer TraceId: 0 8 2025-04-04T17:16:12.577296Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ExecuteState, TraceId: 01jr0wv5d058723c62xt0p3brr, Created new KQP executer: [11:7489508019829001021:2512] isRollback: 0 2025-04-04T17:16:12.582382Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ExecuteState, TraceId: 01jr0wv5d058723c62xt0p3brr, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-04-04T17:16:12.582464Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ExecuteState, TraceId: 01jr0wv5d058723c62xt0p3brr, ExecutePhyTx, tx: 0x000050C00012DD98 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-04T17:16:12.583356Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ExecuteState, TraceId: 01jr0wv5d058723c62xt0p3brr, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-04T17:16:12.583498Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ExecuteState, TraceId: 01jr0wv5d058723c62xt0p3brr, txInfo Status: Committed Kind: ReadOnly TotalDuration: 6.49 ServerDuration: 6.396 QueriesCount: 2 2025-04-04T17:16:12.583612Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ExecuteState, TraceId: 01jr0wv5d058723c62xt0p3brr, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-04T17:16:12.583674Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ExecuteState, TraceId: 01jr0wv5d058723c62xt0p3brr, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:16:12.583703Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ExecuteState, TraceId: 01jr0wv5d058723c62xt0p3brr, EndCleanup, isFinal: 0 2025-04-04T17:16:12.583755Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ExecuteState, TraceId: 01jr0wv5d058723c62xt0p3brr, Sent query response back to proxy, proxyRequestId: 32, proxyId: [11:7489507981174294074:2267] 2025-04-04T17:16:12.584200Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, TxId: 2025-04-04T17:16:12.584289Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, TxId: 2025-04-04T17:16:12.584449Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ReadyState, Session closed due to explicit close event 2025-04-04T17:16:12.584496Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:16:12.584521Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-04T17:16:12.584541Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:16:12.584546Z node 11 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [11:7489508002649131077:2337], DatabaseId: /Root, PoolId: sample_pool_id, succefully refreshed pool state, in flight: 0, delayed: 0 2025-04-04T17:16:12.584629Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDAwYTU3ZmUtYTdlOTRlNDYtYzYzN2M3NDAtZDAxM2Q3MWM=, ActorId: [11:7489508019829000986:2512], ActorState: unknown state, Session actor destroyed >> TKesusTest::TestSemaphoreData [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire >> TKesusTest::TestAcquireWaiterUpgrade [GOOD] >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero >> TKesusTest::TestQuoterResourceDescribe >> TKesusTest::TestAttachNewSessions [GOOD] >> TKesusTest::TestAttachMissingSession >> TKesusTest::TestSessionDetach [GOOD] >> TKesusTest::TestSessionDetachFutureId >> TKesusTest::TestAttachOutOfSequence [GOOD] >> TKesusTest::TestAttachOutOfSequenceInTx >> TKesusTest::TestAcquireBeforeTimeoutViaRelease [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange >> TKesusTest::TestUnregisterProxy [GOOD] >> TKesusTest::TestUnregisterProxyBadGeneration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 1270143271545088338 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] Test command err: RandomSeed# 10098619166519588755 >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero [GOOD] >> TKesusTest::TestAcquireWaiterRelease >> TKesusTest::TestAttachMissingSession [GOOD] >> TKesusTest::TestAttachOldGeneration >> TKesusTest::TestAttachOutOfSequenceInTx [GOOD] >> TKesusTest::TestAttachThenReRegister >> TKesusTest::TestUnregisterProxyBadGeneration [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister >> TKesusTest::TestQuoterResourceDescribe [GOOD] >> TKesusTest::TestQuoterResourceCreation >> TKesusTest::TestSessionDetachFutureId [GOOD] >> TKesusTest::TestSessionDestroy >> TKesusTest::TestSemaphoreReleaseReacquire [GOOD] >> TKesusTest::TestSemaphoreSessionFailures >> TKesusTest::TestQuoterAccountResourcesOnDemand [GOOD] >> TKesusTest::TestQuoterAccountResourcesPaced >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] |90.2%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TKesusTest::TestAcquireWaiterRelease [GOOD] >> TKesusTest::TestAllocatesResources >> TKesusTest::TestAttachOldGeneration [GOOD] >> TKesusTest::TestAttachFastPath >> TKesusTest::TestAttachThenReRegister [GOOD] >> TKesusTest::TestAttachTimeoutTooBig >> TKesusTest::TestSessionDestroy [GOOD] >> TKesusTest::TestSessionStealing ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] Test command err: 2025-04-04T17:16:14.941340Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:14.941547Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:14.958120Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:14.958318Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:14.978499Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:14.979474Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:129:2155], cookie=15894741282698219682, session=0, seqNo=0) 2025-04-04T17:16:14.979631Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:15.007001Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:129:2155], cookie=15894741282698219682, session=1) 2025-04-04T17:16:15.007290Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=8035568529494037226, session=0, seqNo=0) 2025-04-04T17:16:15.007399Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-04T17:16:15.019480Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=8035568529494037226, session=2) 2025-04-04T17:16:15.020741Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:129:2155], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-04T17:16:15.020933Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-04T17:16:15.021068Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-04T17:16:15.035128Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:129:2155], cookie=111) 2025-04-04T17:16:15.035528Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:129:2155], cookie=112, session=1, semaphore="Lock2" count=1) 2025-04-04T17:16:15.035686Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-04-04T17:16:15.035761Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-04-04T17:16:15.048883Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:129:2155], cookie=112) 2025-04-04T17:16:15.049282Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:129:2155], cookie=333, name="Lock1") 2025-04-04T17:16:15.049396Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-04-04T17:16:15.049589Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-04T17:16:15.049681Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 3 "Lock1" 2025-04-04T17:16:15.049761Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2025-04-04T17:16:15.049876Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-04-04T17:16:15.062378Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:129:2155], cookie=333) 2025-04-04T17:16:15.062463Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=222) 2025-04-04T17:16:15.062506Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=223) 2025-04-04T17:16:15.062854Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:129:2155], cookie=334, name="Lock2") 2025-04-04T17:16:15.062970Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-04-04T17:16:15.063025Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-04T17:16:15.074923Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:129:2155], cookie=334) 2025-04-04T17:16:15.075509Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:158:2182], cookie=14035589698435978515, name="Lock1") 2025-04-04T17:16:15.075614Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:158:2182], cookie=14035589698435978515) 2025-04-04T17:16:15.076029Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:161:2185], cookie=2967889516705550588, name="Lock2") 2025-04-04T17:16:15.076094Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:161:2185], cookie=2967889516705550588) 2025-04-04T17:16:15.089506Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.089588Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.090139Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.090377Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:15.142368Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:15.142526Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-04T17:16:15.142622Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2025-04-04T17:16:15.142959Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:201:2215], cookie=13323717904838643314, name="Lock1") 2025-04-04T17:16:15.143038Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:201:2215], cookie=13323717904838643314) 2025-04-04T17:16:15.143547Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:209:2222], cookie=12858902261182213734, name="Lock2") 2025-04-04T17:16:15.143645Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:209:2222], cookie=12858902261182213734) 2025-04-04T17:16:15.510632Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.510761Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.530643Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.530797Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:15.556507Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:15.557335Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:131:2157], cookie=16727870858980875630, session=0, seqNo=0) 2025-04-04T17:16:15.557487Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:15.569567Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:131:2157], cookie=16727870858980875630, session=1) 2025-04-04T17:16:15.569859Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=8697479919898572443, session=0, seqNo=0) 2025-04-04T17:16:15.569982Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-04T17:16:15.581876Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=8697479919898572443, session=2) 2025-04-04T17:16:15.583032Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-04T17:16:15.583183Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-04T17:16:15.583267Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-04T17:16:15.595744Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=111) 2025-04-04T17:16:15.596130Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=112, session=1, semaphore="Lock2" count=1) 2025-04-04T17:16:15.596261Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-04-04T17:16:15.596338Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-04-04T17:16:15.610687Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=112) 2025-04-04T17:16:15.611105Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=333, session=1, semaphore="Lock1" count=1) 2025-04-04T17:16:15.611351Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-04T17:16:15.611448Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-04-04T17:16:15.611545Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-04-04T17:16:15.623892Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=333) 2025-04-04T17:16:15.623983Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=222) 2025-04-04T17:16:15.624013Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=223) 2025-04-04T17:16:15.624509Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:158:2182], cookie=8861823917913931809, name="Lock1") 2025-04-04T17:16:15.624591Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:158:2182], cookie=8861823917913931809) 2025-04-04T17:16:15.625003Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:161:2185], cookie=3593012759623779985, name="Lock2") 2025-04-04T17:16:15.625070Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:161:2185], cookie=3593012759623779985) 2025-04-04T17:16:15.625451Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:164:2188], cookie=4730879397334598217, name="Lock1") 2025-04-04T17:16:15.625521Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:164:2188], cookie=4730879397334598217) 2025-04-04T17:16:15.625962Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:167:2191], cookie=5322841216866379990, name="Lock2") 2025-04-04T17:16:15.626023Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:167:2191], cookie=5322841216866379990) 2025-04-04T17:16:15.626328Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=444, session=2, semaphore="Lock2" count=1) 2025-04-04T17:16:15.626455Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-04T17:16:15.638421Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=444) 2025-04-04T17:16:15.639030Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:172:2196], cookie=5429998083178327268, name="Lock2") 2025-04-04T17:16:15.639159Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:172:2196], cookie=5429998083178327268) 2025-04-04T17:16:15.639626Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:175:2199], cookie=10176151138299673941, name="Lock2") 2025-04-04T17:16:15.639695Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:175:2199], cookie=10176151138299673941) 2025-04-04T17:16:15.653456Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.653547Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.654121Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.654738Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:15.705021Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:15.705160Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-04T17:16:15.705206Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-04-04T17:16:15.705230Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-04-04T17:16:15.705255Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-04T17:16:15.705632Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:215:2229], cookie=7889158901285429709, name="Lock1") 2025-04-04T17:16:15.705713Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:215:2229], cookie=7889158901285429709) 2025-04-04T17:16:15.706377Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:223:2236], cookie=11061640334041293127, name="Lock2") 2025-04-04T17:16:15.706459Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:223:2236], cookie=11061640334041293127) >> TKesusTest::TestQuoterResourceCreation [GOOD] >> TKesusTest::TestQuoterResourceModification >> TKesusTest::TestRegisterProxy >> TKesusTest::TestAttachFastPath [GOOD] >> TKesusTest::TestAttachFastPathBlocked >> TKesusTest::TestAttachTimeoutTooBig [GOOD] >> TKesusTest::TestCreateSemaphore >> TKesusTest::TestSemaphoreSessionFailures [GOOD] >> TKesusTest::TestSessionStealing [GOOD] >> TKesusTest::TestSessionStealingAnyKey >> TKesusTest::TestAllocatesResources [GOOD] >> TKesusTest::TestRegisterProxy [GOOD] >> TKesusTest::TestRegisterProxyBadGeneration >> TKesusTest::TestAttachFastPathBlocked [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSemaphoreSessionFailures [GOOD] Test command err: 2025-04-04T17:16:13.789857Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:13.789963Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:13.810463Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:13.810603Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:13.825566Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:13.826176Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:129:2155], cookie=9337718938499181124, session=0, seqNo=0) 2025-04-04T17:16:13.826379Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:13.851547Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:129:2155], cookie=9337718938499181124, session=1) 2025-04-04T17:16:13.851909Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:129:2155], cookie=7638925381310485550, session=0, seqNo=0) 2025-04-04T17:16:13.852065Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-04T17:16:13.864437Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:129:2155], cookie=7638925381310485550, session=2) 2025-04-04T17:16:13.864845Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:129:2155], cookie=111, name="Lock1") 2025-04-04T17:16:13.881021Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:129:2155], cookie=111) 2025-04-04T17:16:13.881418Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:129:2155], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-04T17:16:13.881613Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-04T17:16:13.881729Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-04T17:16:13.894331Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:129:2155], cookie=222) 2025-04-04T17:16:13.894649Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:129:2155], cookie=333, name="Lock1") 2025-04-04T17:16:13.906909Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:129:2155], cookie=333) 2025-04-04T17:16:14.241845Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:14.241955Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:14.256649Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:14.256761Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:14.282389Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:14.282902Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:131:2157], cookie=10954701049373689370, session=0, seqNo=0) 2025-04-04T17:16:14.283058Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:14.295447Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:131:2157], cookie=10954701049373689370, session=1) 2025-04-04T17:16:14.295799Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:131:2157], cookie=12091537808618329981, session=0, seqNo=0) 2025-04-04T17:16:14.295936Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-04T17:16:14.309308Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:131:2157], cookie=12091537808618329981, session=2) 2025-04-04T17:16:14.310334Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:142:2166], cookie=12380347645820529028, name="Sem1", limit=1) 2025-04-04T17:16:14.310511Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-04T17:16:14.323303Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:142:2166], cookie=12380347645820529028) 2025-04-04T17:16:14.323636Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=111, session=1, semaphore="Sem1" count=1) 2025-04-04T17:16:14.323812Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-04T17:16:14.324030Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=222, session=2, semaphore="Sem1" count=1) 2025-04-04T17:16:14.336638Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=111) 2025-04-04T17:16:14.336729Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=222) 2025-04-04T17:16:14.337321Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:150:2174], cookie=715413982064955811, name="Sem1") 2025-04-04T17:16:14.337428Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:150:2174], cookie=715413982064955811) 2025-04-04T17:16:14.337862Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:153:2177], cookie=15920487807360675255, name="Sem1") 2025-04-04T17:16:14.337928Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:153:2177], cookie=15920487807360675255) 2025-04-04T17:16:14.338165Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:131:2157], cookie=333, name="Sem1") 2025-04-04T17:16:14.338285Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Sem1" waiter link 2025-04-04T17:16:14.350353Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:131:2157], cookie=333) 2025-04-04T17:16:14.350902Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:158:2182], cookie=12165277479065623597, name="Sem1") 2025-04-04T17:16:14.350996Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:158:2182], cookie=12165277479065623597) 2025-04-04T17:16:14.351439Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:161:2185], cookie=3499487104264834634, name="Sem1") 2025-04-04T17:16:14.351500Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:161:2185], cookie=3499487104264834634) 2025-04-04T17:16:14.351745Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:131:2157], cookie=444, name="Sem1") 2025-04-04T17:16:14.351841Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-04-04T17:16:14.363933Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:131:2157], cookie=444) 2025-04-04T17:16:14.364552Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:166:2190], cookie=63967302509847318, name="Sem1") 2025-04-04T17:16:14.364640Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:166:2190], cookie=63967302509847318) 2025-04-04T17:16:14.365123Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:169:2193], cookie=15199863278049098864, name="Sem1") 2025-04-04T17:16:14.365189Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:169:2193], cookie=15199863278049098864) 2025-04-04T17:16:14.705677Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:14.705772Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:14.723705Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:14.724171Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:14.747747Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:14.748009Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:131:2157], cookie=17509397726883300975, name="Sem1", limit=1) 2025-04-04T17:16:14.748123Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-04T17:16:14.759513Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:131:2157], cookie=17509397726883300975) 2025-04-04T17:16:14.759900Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:140:2164], cookie=4846991171503615214, name="Sem2", limit=1) 2025-04-04T17:16:14.759996Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem2" 2025-04-04T17:16:14.772034Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:140:2164], cookie=4846991171503615214) 2025-04-04T17:16:14.772541Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:145:2169], cookie=8281777628221485975, name="Sem1") 2025-04-04T17:16:14.772621Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:145:2169], cookie=8281777628221485975) 2025-04-04T17:16:14.773034Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:148:2172], cookie=13288966958717774259, name="Sem2") 2025-04-04T17:16:14.773089Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:148:2172], cookie=13288966958717774259) 2025-04-04T17:16:14.787745Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:14.787862Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:14.788298Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:14.788745Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:14.840369Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:14.840736Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:188:2202], cookie=11222312233832792234, name="Sem1") 2025-04-04T17:16:14.840812Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:188:2202], cookie=11222312233832792234) 2025-04-04T17:16:14.841416Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:194:2207], cookie=16453745063076830614, name="Sem2") 2025-04-04T17:16:14.841485Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:194:2207], cookie=16453745063076830614) 2025-04-04T17:16:14.841922Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:197:2210], cookie=9870075777553201743, name="Sem1", limit=1) 2025-04-04T17:16:14.854187Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:197:2210], cookie=9870075777553201743) 2025-04-04T17:16:14.854838Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:202:2215], cookie=6717934530417139334, name="Sem2", limit=1) 2025-04-04T17:16:14.870622Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:202:2215], cookie=6717934530417139334) 2025-04-04T17:16:14.871248Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:207:2220], cookie=1703574133099032775, name="Sem1") 2025-04-04T17:16:14.871323Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:207:2220], cookie=1703574 ... :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 9 "Sem1" 2025-04-04T17:16:15.605918Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:223:2246], cookie=13072817549154326245) 2025-04-04T17:16:15.606496Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:228:2251], cookie=3869537046876652962, name="Sem1", force=0) 2025-04-04T17:16:15.606558Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 9 "Sem1" 2025-04-04T17:16:15.620366Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:228:2251], cookie=3869537046876652962) 2025-04-04T17:16:15.621036Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:233:2256], cookie=10731588623215763408, name="Sem1", limit=1) 2025-04-04T17:16:15.621155Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 10 "Sem1" 2025-04-04T17:16:15.635804Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:233:2256], cookie=10731588623215763408) 2025-04-04T17:16:15.636489Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:238:2261], cookie=13949229207615206815, name="Sem1", force=0) 2025-04-04T17:16:15.636574Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 10 "Sem1" 2025-04-04T17:16:15.648657Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:238:2261], cookie=13949229207615206815) 2025-04-04T17:16:15.649371Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:243:2266], cookie=9127317972465137316, name="Sem1", limit=1) 2025-04-04T17:16:15.649502Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 11 "Sem1" 2025-04-04T17:16:15.661791Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:243:2266], cookie=9127317972465137316) 2025-04-04T17:16:15.662333Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:131:2157], cookie=111, session=1, semaphore="Sem1" count=1) 2025-04-04T17:16:15.663566Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #1 session 1 2025-04-04T17:16:15.675913Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:131:2157], cookie=111) 2025-04-04T17:16:15.676559Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:131:2157], cookie=222, session=2, semaphore="Sem1" count=1) 2025-04-04T17:16:15.699131Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:131:2157], cookie=222) 2025-04-04T17:16:15.699699Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:131:2157], cookie=333, name="Sem1") 2025-04-04T17:16:15.699830Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 11 "Sem1" waiter link 2025-04-04T17:16:15.712012Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:131:2157], cookie=333) 2025-04-04T17:16:15.712651Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:131:2157], cookie=444, session=2, semaphore="Sem1" count=1) 2025-04-04T17:16:15.724928Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:131:2157], cookie=444) 2025-04-04T17:16:15.725483Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:131:2157], cookie=555, name="Sem1") 2025-04-04T17:16:15.725585Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 11 "Sem1" owner link 2025-04-04T17:16:15.725653Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #3 session 2 2025-04-04T17:16:15.737679Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:131:2157], cookie=555) 2025-04-04T17:16:16.110506Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:16.110584Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:16.128405Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:16.128525Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:16.152451Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:16.152986Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2157], cookie=9542748627092035043, session=0, seqNo=0) 2025-04-04T17:16:16.153147Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:16.165318Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2157], cookie=9542748627092035043, session=1) 2025-04-04T17:16:16.165667Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:131:2157], cookie=112, name="Sem1", limit=5) 2025-04-04T17:16:16.165832Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-04T17:16:16.177913Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:131:2157], cookie=112) 2025-04-04T17:16:16.178281Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:131:2157], cookie=113, name="Sem1") 2025-04-04T17:16:16.190758Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:131:2157], cookie=113) 2025-04-04T17:16:16.191119Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:131:2157], cookie=114, name="Sem1", force=0) 2025-04-04T17:16:16.191234Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-04-04T17:16:16.203552Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:131:2157], cookie=114) 2025-04-04T17:16:16.203824Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[5:131:2157], cookie=9100420604885163445 2025-04-04T17:16:16.204116Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:131:2157], cookie=115, name="Sem1", limit=5) 2025-04-04T17:16:16.216353Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:131:2157], cookie=115) 2025-04-04T17:16:16.216687Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:131:2157], cookie=116, name="Sem1") 2025-04-04T17:16:16.228797Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:131:2157], cookie=116) 2025-04-04T17:16:16.229124Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:131:2157], cookie=117, name="Sem1", force=0) 2025-04-04T17:16:16.241325Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:131:2157], cookie=117) 2025-04-04T17:16:16.241641Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2157], cookie=118, session=1, semaphore="Sem1" count=1) 2025-04-04T17:16:16.253764Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2157], cookie=118) 2025-04-04T17:16:16.254091Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:131:2157], cookie=119, name="Sem1") 2025-04-04T17:16:16.266211Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:131:2157], cookie=119) 2025-04-04T17:16:16.266624Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:131:2157], cookie=120, name="Sem1") 2025-04-04T17:16:16.266713Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:131:2157], cookie=120) 2025-04-04T17:16:16.266951Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[5:131:2157], cookie=14852540154588983134, session=1) 2025-04-04T17:16:16.267049Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-04T17:16:16.279469Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[5:131:2157], cookie=14852540154588983134) 2025-04-04T17:16:16.279821Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:131:2157], cookie=121, name="Sem1", limit=5) 2025-04-04T17:16:16.292232Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:131:2157], cookie=121) 2025-04-04T17:16:16.292624Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:131:2157], cookie=122, name="Sem1") 2025-04-04T17:16:16.304791Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:131:2157], cookie=122) 2025-04-04T17:16:16.305119Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:131:2157], cookie=123, name="Sem1", force=0) 2025-04-04T17:16:16.317230Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:131:2157], cookie=123) 2025-04-04T17:16:16.317562Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2157], cookie=124, session=1, semaphore="Sem1" count=1) 2025-04-04T17:16:16.329702Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2157], cookie=124) 2025-04-04T17:16:16.330048Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:131:2157], cookie=125, name="Sem1") 2025-04-04T17:16:16.352764Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:131:2157], cookie=125) 2025-04-04T17:16:16.353109Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:131:2157], cookie=126, name="Sem1") 2025-04-04T17:16:16.353192Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:131:2157], cookie=126) 2025-04-04T17:16:16.353786Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:131:2157], cookie=127, name="Sem1", limit=5) 2025-04-04T17:16:16.353861Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:131:2157], cookie=127) 2025-04-04T17:16:16.354154Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:131:2157], cookie=128, name="Sem1") 2025-04-04T17:16:16.354224Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:131:2157], cookie=128) 2025-04-04T17:16:16.354461Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:131:2157], cookie=129, name="Sem1", force=0) 2025-04-04T17:16:16.354517Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:131:2157], cookie=129) 2025-04-04T17:16:16.354737Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2157], cookie=130, session=1, semaphore="Sem1" count=1) 2025-04-04T17:16:16.354797Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2157], cookie=130) 2025-04-04T17:16:16.355079Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:131:2157], cookie=131, name="Sem1") 2025-04-04T17:16:16.355140Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:131:2157], cookie=131) 2025-04-04T17:16:16.355334Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:131:2157], cookie=132, name="Sem1") 2025-04-04T17:16:16.355393Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:131:2157], cookie=132) >> TKesusTest::TestSessionStealingAnyKey [GOOD] >> TKesusTest::TestCreateSemaphore [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop >> TKesusTest::TestQuoterAccountResourcesBurst [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients >> Viewer::StorageGroupOutputWithoutFilterNoDepends [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus >> TKesusTest::TestQuoterResourceModification [GOOD] >> TKesusTest::TestQuoterResourceDeletion >> TKesusTest::TestRegisterProxyBadGeneration [GOOD] >> TKesusTest::TestRegisterProxyFromDeadActor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAllocatesResources [GOOD] Test command err: 2025-04-04T17:16:14.505539Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:14.505645Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:14.523303Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:14.523452Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:14.538762Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:14.539247Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:129:2155], cookie=13028859323294736186, session=0, seqNo=0) 2025-04-04T17:16:14.539411Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:14.562990Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:129:2155], cookie=13028859323294736186, session=1) 2025-04-04T17:16:14.563255Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:129:2155], cookie=9103107668837503909, session=0, seqNo=0) 2025-04-04T17:16:14.563336Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-04T17:16:14.579033Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:129:2155], cookie=9103107668837503909, session=2) 2025-04-04T17:16:14.579353Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:129:2155], cookie=111, session=1, semaphore="Lock1" count=1) 2025-04-04T17:16:14.579576Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-04T17:16:14.579699Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-04T17:16:14.592521Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:129:2155], cookie=111) 2025-04-04T17:16:14.592843Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:129:2155], cookie=222, session=2, semaphore="Lock1" count=18446744073709551615) 2025-04-04T17:16:14.593163Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:129:2155], cookie=333, session=2, semaphore="Lock1" count=1) 2025-04-04T17:16:14.593238Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #2 session 2 2025-04-04T17:16:14.618793Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:129:2155], cookie=222) 2025-04-04T17:16:14.618885Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:129:2155], cookie=333) 2025-04-04T17:16:14.619407Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:145:2169], cookie=9264776012578699518, name="Lock1") 2025-04-04T17:16:14.619476Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:145:2169], cookie=9264776012578699518) 2025-04-04T17:16:14.907727Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:14.907818Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:14.926839Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:14.926967Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:14.950959Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:14.951420Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:131:2157], cookie=15623050258100598093, session=0, seqNo=0) 2025-04-04T17:16:14.951573Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:14.963617Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:131:2157], cookie=15623050258100598093, session=1) 2025-04-04T17:16:14.963911Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:131:2157], cookie=1072596491507902982, session=0, seqNo=0) 2025-04-04T17:16:14.964031Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-04T17:16:14.977200Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:131:2157], cookie=1072596491507902982, session=2) 2025-04-04T17:16:14.977527Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-04T17:16:14.977671Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-04T17:16:14.977760Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-04T17:16:14.990877Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=111) 2025-04-04T17:16:14.991194Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-04T17:16:14.991508Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=333, session=2, semaphore="Lock1" count=18446744073709551615) 2025-04-04T17:16:15.003987Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=222) 2025-04-04T17:16:15.004059Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=333) 2025-04-04T17:16:15.004563Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:147:2171], cookie=9734803767816965548, name="Lock1") 2025-04-04T17:16:15.004641Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:147:2171], cookie=9734803767816965548) 2025-04-04T17:16:15.005098Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:150:2174], cookie=14821139603017944683, name="Lock1") 2025-04-04T17:16:15.005162Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:150:2174], cookie=14821139603017944683) 2025-04-04T17:16:15.327358Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.327471Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.346697Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.347127Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:15.371450Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:15.371935Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:131:2157], cookie=15646175622894761122, session=0, seqNo=0) 2025-04-04T17:16:15.372260Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:15.384642Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:131:2157], cookie=15646175622894761122, session=1) 2025-04-04T17:16:15.384938Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:131:2157], cookie=16968567589381354856, session=0, seqNo=0) 2025-04-04T17:16:15.385066Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-04T17:16:15.397262Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:131:2157], cookie=16968567589381354856, session=2) 2025-04-04T17:16:15.397912Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:131:2157], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-04T17:16:15.398059Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-04T17:16:15.398143Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-04T17:16:15.409963Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:131:2157], cookie=111) 2025-04-04T17:16:15.410275Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:131:2157], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-04T17:16:15.410526Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:131:2157], cookie=333, session=2, semaphore="Lock1" count=1) 2025-04-04T17:16:15.410605Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-04-04T17:16:15.423210Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:131:2157], cookie=222) 2025-04-04T17:16:15.423291Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:131:2157], cookie=333) 2025-04-04T17:16:15.423856Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:150:2174], cookie=3320988412343146708, name="Lock1") 2025-04-04T17:16:15.423945Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:150:2174], cookie=3320988412343146708) 2025-04-04T17:16:15.424380Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:153:2177], cookie=11726923039074502504, name="Lock1") 2025-04-04T17:16:15.424447Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:153:2177], cookie=11726923039074502504) 2025-04-04T17:16:15.437355Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.437461Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.438013Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.438656Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:15.475410Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:15.475565Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-04T17:16:15.475910Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:193:2207], cookie=3192317105143618208, name="Lock1") 2025-04-04T17:16:15.475990Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:193:2207], cookie=3192317105143618208) 2025-04-04T17:16:15.476504Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:201:2214], cookie=4501483247711954863, name="Lock1") 2025-04-04T17:16:15.476570Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:201:2214], cookie=4501483247711954863) 2025-04-04T17:16:15.918737Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.918817Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.933251Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.933370Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:15.957320Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:15.957769Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:131:2157], cookie=9553829765884222741, session=0, seqNo=0) 2025-04-04T17:16:15.957928Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:15.969901Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:131:2157], cookie=9553829765884222741, session=1) 2025-04-04T17:16:15.970267Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:131:2157], cookie=9807805970941398556, session=0, seqNo=0) 2025-04-04T17:16:15.970404Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-04T17:16:15.982450Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:131:2157], cookie=9807805970941398556, session=2) 2025-04-04T17:16:15.982818Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:131:2157], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-04T17:16:15.982972Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-04T17:16:15.983097Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-04T17:16:15.995292Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:131:2157], cookie=111) 2025-04-04T17:16:15.995624Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:131:2157], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-04T17:16:15.995997Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:131:2157], cookie=333, name="Lock1") 2025-04-04T17:16:15.996099Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-04-04T17:16:16.008232Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:131:2157], cookie=222) 2025-04-04T17:16:16.008318Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:131:2157], cookie=333) 2025-04-04T17:16:16.355512Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:16.355610Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:16.375344Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:16.375493Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:16.402348Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:16.411834Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:131:2157], cookie=3225814294395469398, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-04-04T17:16:16.412103Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-04T17:16:16.427154Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:131:2157], cookie=3225814294395469398) 2025-04-04T17:16:16.427788Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:140:2164], cookie=15695225001657772506, path="/Root/Res", config={ }) 2025-04-04T17:16:16.428046Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-04T17:16:16.440556Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:140:2164], cookie=15695225001657772506) 2025-04-04T17:16:16.442411Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:145:2169]. Cookie: 9557051450871705798. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-04T17:16:16.442483Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:145:2169], cookie=9557051450871705798) 2025-04-04T17:16:16.443024Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:145:2169]. Cookie: 16162768655092791566. Data: { } 2025-04-04T17:16:16.443080Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Update quoter resources consumption state (sender=[5:145:2169], cookie=16162768655092791566) 2025-04-04T17:16:16.484738Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:145:2169]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-04T17:16:16.526152Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:145:2169]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-04T17:16:16.557326Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:145:2169]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-04T17:16:16.588412Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:145:2169]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-04-04T17:16:16.630956Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:145:2169]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAttachFastPathBlocked [GOOD] Test command err: 2025-04-04T17:16:15.055030Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.055140Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.071994Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.072108Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:15.086466Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:15.086961Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:129:2155], cookie=10269209432186716269, session=0, seqNo=0) 2025-04-04T17:16:15.087111Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:15.109707Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:129:2155], cookie=10269209432186716269, session=1) 2025-04-04T17:16:15.110089Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:129:2155], cookie=10704415262389313448, session=0, seqNo=0) 2025-04-04T17:16:15.110206Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-04T17:16:15.122319Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:129:2155], cookie=10704415262389313448, session=2) 2025-04-04T17:16:15.471077Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.471178Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.493197Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.493429Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:15.517768Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:15.518287Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:131:2157], cookie=13959354822728270270, session=1, seqNo=0) 2025-04-04T17:16:15.531587Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:131:2157], cookie=13959354822728270270, session=1) 2025-04-04T17:16:15.883792Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.883892Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.903203Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.903632Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:15.927429Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:15.928204Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:131:2157], cookie=13113523090441682373, session=0, seqNo=0) 2025-04-04T17:16:15.928345Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:15.940032Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:131:2157], cookie=13113523090441682373, session=1) 2025-04-04T17:16:16.312790Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:16.312889Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:16.331233Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:16.331338Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:16.357169Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:16.357613Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[4:131:2157], cookie=8730989548058700000, path="") 2025-04-04T17:16:16.370097Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[4:131:2157], cookie=8730989548058700000, status=SUCCESS) 2025-04-04T17:16:16.371034Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:140:2164], cookie=5238196340877709987, session=0, seqNo=0) 2025-04-04T17:16:16.371140Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:16.382830Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:140:2164], cookie=5238196340877709987, session=1) 2025-04-04T17:16:16.383444Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:141:2165], cookie=111, session=0, seqNo=0) 2025-04-04T17:16:16.383535Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-04T17:16:16.383659Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path attach session=1 to sender=[4:141:2165], cookie=222, seqNo=0 2025-04-04T17:16:16.395643Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:141:2165], cookie=111, session=2) 2025-04-04T17:16:16.705971Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:16.706065Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:16.724310Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:16.724425Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:16.747985Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:16.748303Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[5:131:2157], cookie=13551459251877032972, path="") 2025-04-04T17:16:16.760102Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[5:131:2157], cookie=13551459251877032972, status=SUCCESS) 2025-04-04T17:16:16.761085Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:140:2164], cookie=7131432088533285518, session=0, seqNo=0) 2025-04-04T17:16:16.761212Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:16.773148Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:140:2164], cookie=7131432088533285518, session=1) 2025-04-04T17:16:16.774014Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:140:2164], cookie=123, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-04T17:16:16.774171Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-04T17:16:16.774275Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-04T17:16:16.774645Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:141:2165], cookie=111, session=0, seqNo=0) 2025-04-04T17:16:16.774730Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-04T17:16:16.774837Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:141:2165], cookie=222, session=1, seqNo=0) 2025-04-04T17:16:16.786685Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:140:2164], cookie=123) 2025-04-04T17:16:16.786764Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:141:2165], cookie=111, session=2) 2025-04-04T17:16:16.786810Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:141:2165], cookie=222, session=1) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingAnyKey [GOOD] Test command err: 2025-04-04T17:16:15.211349Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.211452Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.231457Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.231572Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:15.248315Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:15.248841Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:129:2155], cookie=17045257743596298673, session=0, seqNo=0) 2025-04-04T17:16:15.249003Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:15.271439Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:129:2155], cookie=17045257743596298673, session=1) 2025-04-04T17:16:15.272874Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[1:129:2155], cookie=8854684990067357284, session=2) 2025-04-04T17:16:15.272941Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[1:129:2155], cookie=8854684990067357284) 2025-04-04T17:16:15.273447Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[1:129:2155], cookie=1911860115250268076 2025-04-04T17:16:15.274070Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:129:2155], cookie=600813680071216394, session=1, seqNo=0) 2025-04-04T17:16:15.286195Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:129:2155], cookie=600813680071216394, session=1) 2025-04-04T17:16:15.286578Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:129:2155], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-04T17:16:15.286758Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-04T17:16:15.286845Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-04T17:16:15.287012Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[1:129:2155], cookie=16644020216525732812, session=1) 2025-04-04T17:16:15.297386Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-04T17:16:15.297481Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-04T17:16:15.297528Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-04-04T17:16:15.309327Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:129:2155], cookie=111) 2025-04-04T17:16:15.309400Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[1:129:2155], cookie=16644020216525732812) 2025-04-04T17:16:15.309457Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-04T17:16:15.586958Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.587041Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.605856Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.606055Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:15.630101Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:15.630346Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[2:131:2157], cookie=12329543694145873179, path="") 2025-04-04T17:16:15.643656Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[2:131:2157], cookie=12329543694145873179, status=SUCCESS) 2025-04-04T17:16:15.644174Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:140:2164], cookie=111, session=0, seqNo=0) 2025-04-04T17:16:15.644318Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:15.644481Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[2:140:2164], cookie=682916312279109049, session=1) 2025-04-04T17:16:15.654794Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-04T17:16:15.654848Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-04T17:16:15.666677Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:140:2164], cookie=111, session=1) 2025-04-04T17:16:15.666750Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[2:140:2164], cookie=682916312279109049) 2025-04-04T17:16:15.666798Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-04T17:16:16.014945Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:16.015047Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:16.034087Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:16.034640Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:16.058285Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:16.058718Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:131:2157], cookie=1479659338906359739, session=0, seqNo=0) 2025-04-04T17:16:16.058843Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:16.070687Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:131:2157], cookie=1479659338906359739, session=1) 2025-04-04T17:16:16.071243Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:131:2157], cookie=7276795511099608115, session=1) 2025-04-04T17:16:16.071315Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-04T17:16:16.082838Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:131:2157], cookie=7276795511099608115) 2025-04-04T17:16:16.083507Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:148:2172], cookie=5275443199988672337) 2025-04-04T17:16:16.083557Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:148:2172], cookie=5275443199988672337) 2025-04-04T17:16:16.083998Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:151:2175], cookie=11114021183436422747, session=0, seqNo=0) 2025-04-04T17:16:16.084085Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-04T17:16:16.095690Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:151:2175], cookie=11114021183436422747, session=2) 2025-04-04T17:16:16.096497Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:131:2157], cookie=16174767383770679807, session=2) 2025-04-04T17:16:16.096567Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 2025-04-04T17:16:16.108307Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:131:2157], cookie=16174767383770679807) 2025-04-04T17:16:16.406106Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:16.406210Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:16.426890Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:16.426993Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:16.451236Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:16.452020Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:131:2157], cookie=12345, session=0, seqNo=0) 2025-04-04T17:16:16.452185Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:16.464668Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:131:2157], cookie=12345, session=1) 2025-04-04T17:16:16.465342Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:138:2162], cookie=23456, session=1, seqNo=0) 2025-04-04T17:16:16.477047Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:138:2162], cookie=23456, session=1) 2025-04-04T17:16:16.834914Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:16.835008Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:16.854404Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:16.854535Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:16.878556Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:16.879439Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2157], cookie=12345, session=0, seqNo=0) 2025-04-04T17:16:16.879596Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:16.894465Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2157], cookie=12345, session=1) 2025-04-04T17:16:16.895296Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:138:2162], cookie=23456, session=1, seqNo=0) 2025-04-04T17:16:16.908029Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:138:2162], cookie=23456, session=1) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestCreateSemaphore [GOOD] Test command err: 2025-04-04T17:16:15.148025Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.148136Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.165230Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.165339Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:15.179732Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:15.180619Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:129:2155], cookie=1103208966604857017, session=0, seqNo=222) 2025-04-04T17:16:15.180778Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:15.205512Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:129:2155], cookie=1103208966604857017, session=1) 2025-04-04T17:16:15.205883Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=11653769285220984099, session=1, seqNo=111) 2025-04-04T17:16:15.218495Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=11653769285220984099, session=1) 2025-04-04T17:16:15.584622Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.584719Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.603526Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.603661Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:15.627602Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:15.628161Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:131:2157], cookie=111, session=0, seqNo=42) 2025-04-04T17:16:15.628297Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:15.628446Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:131:2157], cookie=222, session=1, seqNo=41) 2025-04-04T17:16:15.640521Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:131:2157], cookie=111, session=1) 2025-04-04T17:16:15.640633Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:131:2157], cookie=222, session=1) 2025-04-04T17:16:15.959147Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.959246Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.973733Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.974069Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:15.997387Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:15.997845Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:131:2157], cookie=11059844538423088095, session=0, seqNo=0) 2025-04-04T17:16:15.997981Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:16.009426Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:131:2157], cookie=11059844538423088095, session=1) 2025-04-04T17:16:16.010749Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:148:2172], cookie=7269912914241715376) 2025-04-04T17:16:16.010833Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:148:2172], cookie=7269912914241715376) 2025-04-04T17:16:16.390659Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:16.390754Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:16.408298Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:16.408421Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:16.432750Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:16.776610Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:16.776708Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:16.796789Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:16.796922Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:16.820274Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:16.820632Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2157], cookie=11720423945470642553, session=0, seqNo=0) 2025-04-04T17:16:16.820734Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:16.832461Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2157], cookie=11720423945470642553, session=1) 2025-04-04T17:16:16.832790Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2157], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-04T17:16:16.832935Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-04T17:16:16.833020Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-04T17:16:16.844971Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2157], cookie=111) 2025-04-04T17:16:16.845884Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:145:2169], cookie=7851334522199161118, name="Sem1", limit=42) 2025-04-04T17:16:16.846042Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem1" 2025-04-04T17:16:16.857874Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:145:2169], cookie=7851334522199161118) 2025-04-04T17:16:16.858366Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:150:2174], cookie=15477649764771248087, name="Sem1", limit=42) 2025-04-04T17:16:16.870206Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:150:2174], cookie=15477649764771248087) 2025-04-04T17:16:16.870676Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:155:2179], cookie=13860460974449712251, name="Sem1", limit=51) 2025-04-04T17:16:16.882633Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:155:2179], cookie=13860460974449712251) 2025-04-04T17:16:16.883224Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:160:2184], cookie=11662022532664205761, name="Lock1", limit=42) 2025-04-04T17:16:16.895173Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:160:2184], cookie=11662022532664205761) 2025-04-04T17:16:16.895763Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:165:2189], cookie=11525064874600730387, name="Lock1", limit=18446744073709551615) 2025-04-04T17:16:16.907800Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:165:2189], cookie=11525064874600730387) 2025-04-04T17:16:16.908469Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:170:2194], cookie=10041165274380887307, name="Sem1") 2025-04-04T17:16:16.908555Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:170:2194], cookie=10041165274380887307) 2025-04-04T17:16:16.909033Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:173:2197], cookie=12572130515088021373, name="Sem2") 2025-04-04T17:16:16.909096Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:173:2197], cookie=12572130515088021373) 2025-04-04T17:16:16.921726Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:16.921798Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:16.922300Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:16.922893Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:16.959512Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:16.959662Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-04T17:16:16.959995Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:213:2227], cookie=1868277424184506198, name="Sem1") 2025-04-04T17:16:16.960070Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:213:2227], cookie=1868277424184506198) 2025-04-04T17:16:16.960737Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:220:2233], cookie=15981430253512438820, name="Sem2") 2025-04-04T17:16:16.960827Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:220:2233], cookie=15981430253512438820) >> TKesusTest::TestRegisterProxyFromDeadActor [GOOD] >> TKesusTest::TestRegisterProxyLinkFailure >> TPersQueueTest::MessageMetadata [GOOD] >> TPersQueueTest::LOGBROKER_7820 >> TKesusTest::TestQuoterResourceDeletion [GOOD] >> TKesusTest::TestQuoterSubscribeOnResource >> TKesusTest::TestQuoterAccountResourcesPaced [GOOD] >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] >> TKesusTest::TestRegisterProxyLinkFailure [GOOD] >> TKesusTest::TestRegisterProxyLinkFailureRace >> KqpEffects::InsertAbort_Params_Duplicates+UseSink >> KqpImmediateEffects::InsertExistingKey+UseSink >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink >> KqpWrite::ProjectReplace+UseSink >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink >> KqpEffects::InsertRevert_Literal_Success >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] Test command err: 2025-04-04T17:16:15.372509Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.372607Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.391311Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.391428Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:15.406463Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:15.417886Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:129:2155], cookie=5483011795138034457, path="/Root", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-04-04T17:16:15.418172Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-04T17:16:15.441251Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:129:2155], cookie=5483011795138034457) 2025-04-04T17:16:15.441820Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:138:2162], cookie=1328788246360772565, path="/Root/Folder", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-04-04T17:16:15.442056Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Folder" 2025-04-04T17:16:15.454918Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:138:2162], cookie=1328788246360772565) 2025-04-04T17:16:15.455535Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:143:2167], cookie=13812859810371576928, path="/Root/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-04-04T17:16:15.455755Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Q1" 2025-04-04T17:16:15.468696Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:143:2167], cookie=13812859810371576928) 2025-04-04T17:16:15.469354Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:148:2172], cookie=9582431454694315513, path="/Root/Folder/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-04-04T17:16:15.469917Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-04-04T17:16:15.483820Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:148:2172], cookie=9582431454694315513) 2025-04-04T17:16:15.484501Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:153:2177], cookie=14156465173258982550, path="/Root/Folder/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-04-04T17:16:15.484739Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 5 "Root/Folder/Q2" 2025-04-04T17:16:15.497918Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:153:2177], cookie=14156465173258982550) 2025-04-04T17:16:15.498625Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:158:2182], cookie=17998367850186038506, path="/Root/Folder/Q3", config={ MaxUnitsPerSecond: 10 }) 2025-04-04T17:16:15.498859Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 6 "Root/Folder/Q3" 2025-04-04T17:16:15.511260Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:158:2182], cookie=17998367850186038506) 2025-04-04T17:16:15.511982Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:163:2187], cookie=9436477370876209771, path="/Root2", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-04-04T17:16:15.512172Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 7 "Root2" 2025-04-04T17:16:15.524392Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:163:2187], cookie=9436477370876209771) 2025-04-04T17:16:15.525159Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:168:2192], cookie=10559536236959942621, path="/Root2/Q", config={ MaxUnitsPerSecond: 10 }) 2025-04-04T17:16:15.525390Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 8 "Root2/Q" 2025-04-04T17:16:15.539003Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:168:2192], cookie=10559536236959942621) 2025-04-04T17:16:15.539769Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:173:2197], cookie=128731396330304960, ids=[100], paths=[], recursive=0) 2025-04-04T17:16:15.539887Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:173:2197], cookie=128731396330304960) 2025-04-04T17:16:15.540429Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:176:2200], cookie=9129850579678380341, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-04-04T17:16:15.540528Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:176:2200], cookie=9129850579678380341) 2025-04-04T17:16:15.541017Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:179:2203], cookie=6474900264183099728, ids=[], paths=[/Root, ], recursive=0) 2025-04-04T17:16:15.541097Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:179:2203], cookie=6474900264183099728) 2025-04-04T17:16:15.541625Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:182:2206], cookie=18386792908158548706, ids=[1, 1], paths=[], recursive=0) 2025-04-04T17:16:15.541699Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:182:2206], cookie=18386792908158548706) 2025-04-04T17:16:15.542190Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:185:2209], cookie=865139282412553467, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2025-04-04T17:16:15.545307Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:185:2209], cookie=865139282412553467) 2025-04-04T17:16:15.545931Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:188:2212], cookie=7136668314901719111, ids=[], paths=[], recursive=1) 2025-04-04T17:16:15.545993Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:188:2212], cookie=7136668314901719111) 2025-04-04T17:16:15.546626Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:191:2215], cookie=187121153334160875, ids=[], paths=[], recursive=0) 2025-04-04T17:16:15.546697Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:191:2215], cookie=187121153334160875) 2025-04-04T17:16:15.547365Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:194:2218], cookie=7754442853652166494, ids=[3, 2], paths=[], recursive=1) 2025-04-04T17:16:15.547458Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:194:2218], cookie=7754442853652166494) 2025-04-04T17:16:15.548114Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:197:2221], cookie=856916359373985924, ids=[3, 2], paths=[], recursive=0) 2025-04-04T17:16:15.548179Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:197:2221], cookie=856916359373985924) 2025-04-04T17:16:15.548847Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:200:2224], cookie=16102990563298196580, ids=[], paths=[Root2/], recursive=1) 2025-04-04T17:16:15.548977Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:200:2224], cookie=16102990563298196580) 2025-04-04T17:16:15.549544Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:203:2227], cookie=16309240831930071461, ids=[], paths=[Root2/], recursive=0) 2025-04-04T17:16:15.549607Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:203:2227], cookie=16309240831930071461) 2025-04-04T17:16:15.563751Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.563861Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.564510Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.565018Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:15.606339Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:15.606721Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:243:2257], cookie=14942673752862377416, ids=[100], paths=[], recursive=0) 2025-04-04T17:16:15.606795Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:243:2257], cookie=14942673752862377416) 2025-04-04T17:16:15.607485Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:249:2262], cookie=5064027905774035758, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-04-04T17:16:15.607553Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:249:2262], cookie=5064027905774035758) 2025-04-04T17:16:15.608095Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:252:2265], cookie=11423719983907636405, ids=[], paths=[/Root, ], recursive=0) 2025-04-04T17:16:15.608176Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:252:2265], cookie=11423719983907636405) 2025-04-04T17:16:15.608634Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:255:2268], cookie=11018688183632748060, ids=[1, 1], paths=[], recursive=0) 2025-04-04T17:16:15.608690Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:255:2268], cookie=11018688183632748060) 2025-04-04T17:16:15.609148Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:258:2271], cookie=9278234473135862752, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2025-04-04T17:16:15.609248Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:258:2271], cookie=9278234473135862752) 2025-04-04T17:16:15.609700Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:261:2274], cookie=13138192848727815166, ids=[], paths=[], recursive=1) 2025-04-04T17:16:15.609775Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:261:2274], cookie=13138192848727815166) 2025-04-04T17:16:15.610345Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:264:2277], cookie=7339131653301457980, ids=[], paths=[], recursive=0) 2025-04-04T17:16:15.610394Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:264:2277], cookie=7339131653301457980) 2025-04-04T17:16:15.610919Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:267:2280], cookie=9356537333760235870, ids=[3, 2], paths=[], recursive=1) 2025-04-04T17:16:15.610977Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:267:2280], cookie=9356537333760235870) 2025-04-04T17:16:15.611521Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:270:2283], cookie=18041442848805102545, ids=[3, 2], paths=[], recursive=0) 2025-0 ... :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:17.485940Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:17.486254Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:131:2157], cookie=7451553908825375400, path="/Root", config={ MaxUnitsPerSecond: 1 }) 2025-04-04T17:16:17.486415Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-04T17:16:17.497998Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:131:2157], cookie=7451553908825375400) 2025-04-04T17:16:17.498513Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:140:2164], cookie=17047803244811489521, path="/Root/Q", config={ }) 2025-04-04T17:16:17.498714Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Q" 2025-04-04T17:16:17.510710Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:140:2164], cookie=17047803244811489521) 2025-04-04T17:16:17.511243Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:145:2169], cookie=14412912102673492055, path="/Root/Folder", config={ }) 2025-04-04T17:16:17.511440Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Folder" 2025-04-04T17:16:17.523261Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:145:2169], cookie=14412912102673492055) 2025-04-04T17:16:17.523924Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:150:2174], cookie=12917660720641311863, path="/Root/Folder/Q1", config={ }) 2025-04-04T17:16:17.524130Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-04-04T17:16:17.536071Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:150:2174], cookie=12917660720641311863) 2025-04-04T17:16:17.536549Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:155:2179], cookie=4462823666803895913, ids=[], paths=[], recursive=1) 2025-04-04T17:16:17.536627Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:155:2179], cookie=4462823666803895913) 2025-04-04T17:16:17.537257Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:161:2185], cookie=15265439455071242520, ids=[], paths=[], recursive=1) 2025-04-04T17:16:17.537323Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:161:2185], cookie=15265439455071242520) 2025-04-04T17:16:17.537970Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:167:2191], cookie=3942679809114823163, ids=[], paths=[], recursive=1) 2025-04-04T17:16:17.538029Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:167:2191], cookie=3942679809114823163) 2025-04-04T17:16:17.538396Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:170:2194], cookie=7768318401022222411, id=0, path="/Root/Folder/NonexistingRes") 2025-04-04T17:16:17.538476Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:170:2194], cookie=7768318401022222411) 2025-04-04T17:16:17.538824Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:173:2197], cookie=912481444949726416, ids=[], paths=[], recursive=1) 2025-04-04T17:16:17.538872Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:173:2197], cookie=912481444949726416) 2025-04-04T17:16:17.539229Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:176:2200], cookie=2549529936451328207, id=100, path="") 2025-04-04T17:16:17.539279Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:176:2200], cookie=2549529936451328207) 2025-04-04T17:16:17.539619Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:179:2203], cookie=705225923953409484, ids=[], paths=[], recursive=1) 2025-04-04T17:16:17.539664Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:179:2203], cookie=705225923953409484) 2025-04-04T17:16:17.540032Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:182:2206], cookie=17015040757002902115, id=3, path="") 2025-04-04T17:16:17.540093Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:182:2206], cookie=17015040757002902115) 2025-04-04T17:16:17.540439Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:185:2209], cookie=12398497494722630066, ids=[], paths=[], recursive=1) 2025-04-04T17:16:17.540501Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:185:2209], cookie=12398497494722630066) 2025-04-04T17:16:17.540866Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:188:2212], cookie=15455552658085377074, id=0, path="/Root/Folder/Q1") 2025-04-04T17:16:17.540992Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleted quoter resource 4 "Root/Folder/Q1" 2025-04-04T17:16:17.552985Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:188:2212], cookie=15455552658085377074) 2025-04-04T17:16:17.553702Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:193:2217], cookie=873580207925691239, ids=[], paths=[], recursive=1) 2025-04-04T17:16:17.553783Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:193:2217], cookie=873580207925691239) 2025-04-04T17:16:17.565350Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:17.565449Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:17.566070Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:17.566730Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:17.614078Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:17.614461Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:233:2247], cookie=987503612590202590, ids=[], paths=[], recursive=1) 2025-04-04T17:16:17.614559Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:233:2247], cookie=987503612590202590) 2025-04-04T17:16:17.615317Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:239:2252], cookie=4426043684524774951, id=3, path="") 2025-04-04T17:16:17.615480Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleted quoter resource 3 "Root/Folder" 2025-04-04T17:16:17.627899Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:239:2252], cookie=4426043684524774951) 2025-04-04T17:16:17.628665Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:244:2257], cookie=12771219164120634075, ids=[], paths=[], recursive=1) 2025-04-04T17:16:17.628763Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:244:2257], cookie=12771219164120634075) 2025-04-04T17:16:17.641183Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:17.641283Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:17.641895Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:17.642514Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:17.689810Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:17.690179Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:284:2287], cookie=7979945776699572034, ids=[], paths=[], recursive=1) 2025-04-04T17:16:17.690297Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:284:2287], cookie=7979945776699572034) 2025-04-04T17:16:18.024306Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:18.024409Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:18.043931Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:18.044061Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:18.067820Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:18.068332Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:131:2157], cookie=6013825862676611047, path="/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-04-04T17:16:18.068507Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Q1" 2025-04-04T17:16:18.080160Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:131:2157], cookie=6013825862676611047) 2025-04-04T17:16:18.080709Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:140:2164], cookie=4761169145586978592, path="/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-04-04T17:16:18.080866Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Q2" 2025-04-04T17:16:18.092939Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:140:2164], cookie=4761169145586978592) 2025-04-04T17:16:18.094707Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:145:2169]. Cookie: 10672811631481523874. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-04T17:16:18.094783Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:145:2169], cookie=10672811631481523874) 2025-04-04T17:16:18.095592Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:145:2169]. Cookie: 2228997181628487941. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Q2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { Error { Status: NOT_FOUND Issues { message: "Resource \"/Q3\" doesn\'t exist." } } } ProtocolVersion: 1 } 2025-04-04T17:16:18.095644Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:145:2169], cookie=2228997181628487941) >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion [GOOD] >> DataShardReadIteratorSysTables::ShouldNotAllowArrow >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead [GOOD] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeReadReverse >> KqpEffects::InsertAbort_Params_Success >> KqpImmediateEffects::Delete >> KqpEffects::InsertAbort_Params_Conflict+UseSink >> DataShardReadIterator::ShouldReadRangeReverse [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight >> KqpWrite::UpsertNullKey >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateResources >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks [GOOD] >> DataShardReadIterator::ShouldStopWhenNodeDisconnected >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 >> DataShardReadIteratorBatchMode::RangeToNonInclusive [GOOD] >> DataShardReadIteratorBatchMode::SelectingColumns >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] Test command err: 2025-04-04T17:16:16.711159Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:16.711258Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:16.726132Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:16.726225Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:16.740160Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:17.090750Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:17.090816Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:17.105959Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:17.106087Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:17.129629Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:17.480435Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:17.480544Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:17.499150Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:17.499568Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:17.523508Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:17.932053Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:17.932159Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:17.947594Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:17.947774Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:17.972491Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:17.973904Z node 4 :PIPE_SERVER ERROR: [72057594037927937] NodeDisconnected NodeId# 5 2025-04-04T17:16:17.974541Z node 4 :KESUS_TABLET TRACE: Got TEvServerDisconnected([4:183:2156]) 2025-04-04T17:16:18.550054Z node 6 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:18.550159Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:18.567591Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:18.567804Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute ... waiting for register request 2025-04-04T17:16:18.592726Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR ... waiting for register request (done) ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR 2025-04-04T17:16:18.593485Z node 6 :PIPE_SERVER ERROR: [72057594037927937] NodeDisconnected NodeId# 7 2025-04-04T17:16:18.593962Z node 6 :KESUS_TABLET TRACE: Got TEvServerDisconnected([6:185:2158]) >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn [GOOD] >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite >> TPersQueueTest::Delete [GOOD] >> TPersQueueTest::FetchRequest >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient [GOOD] >> TKesusTest::TestQuoterAccountResourcesForgetClient >> KqpWorkloadServiceTables::TestLeaseExpiration [GOOD] >> KqpWorkloadServiceTables::TestLeaseUpdates >> KqpImmediateEffects::MultiShardUpsertAfterRead >> TKesusTest::TestQuoterAccountResourcesAggregateResources [GOOD] >> TKesusTest::TestQuoterAccountLabels >> KqpInplaceUpdate::SingleRowSimple+UseSink >> KqpWorkloadService::TestCpuLoadThresholdRefresh [GOOD] >> KqpWorkloadService::TestHandlerActorCleanup >> TKesusTest::TestQuoterAccountLabels [GOOD] >> TKesusTest::TestPassesUpdatedPropsToSession >> KqpWrite::ProjectReplace+UseSink [GOOD] >> KqpWrite::ProjectReplace-UseSink >> KqpImmediateEffects::InsertExistingKey+UseSink [GOOD] >> KqpImmediateEffects::InsertExistingKey-UseSink >> DataShardReadIteratorSysTables::ShouldNotAllowArrow [GOOD] >> ReadIteratorExternalBlobs::ExtBlobs >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Uncompressed [GOOD] >> TTopicYqlTest::CreateAndAlterTopicYql >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeReadReverse [GOOD] >> DataShardReadIterator::ShouldForbidDuplicatedReadId >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] >> KqpEffects::InsertAbort_Params_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Duplicates-UseSink >> KqpEffects::InsertRevert_Literal_Success [GOOD] >> KqpEffects::UpdateOn_Literal >> KqpEffects::InsertAbort_Params_Success [GOOD] >> KqpEffects::InsertAbort_Select_Conflict+UseSink >> KqpEffects::InsertAbort_Params_Conflict+UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Conflict-UseSink >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink [GOOD] >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight [GOOD] >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds >> KqpImmediateEffects::Delete [GOOD] >> KqpImmediateEffects::DeleteAfterInsert >> TSchemeShardLoginTest::ChangeAccountLockoutParameters [GOOD] >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] Test command err: 2025-04-04T17:16:15.227762Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.227890Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.248264Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.248412Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:15.263578Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:15.269471Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:129:2155], cookie=10508095856079076079, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-04T17:16:15.269876Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-04T17:16:15.293965Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:129:2155], cookie=10508095856079076079) 2025-04-04T17:16:15.294854Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:139:2163], cookie=5987283270863322518, path="/Root/Res", config={ }) 2025-04-04T17:16:15.295075Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-04T17:16:15.307271Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:139:2163], cookie=5987283270863322518) 2025-04-04T17:16:15.309005Z node 1 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [1:144:2168]. Cookie: 15157186321066309518. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-04T17:16:15.309068Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[1:144:2168], cookie=15157186321066309518) 2025-04-04T17:16:15.309674Z node 1 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [1:144:2168]. Cookie: 11585635511490436416. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 28000 } } 2025-04-04T17:16:15.309721Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[1:144:2168], cookie=11585635511490436416) 2025-04-04T17:16:17.421803Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:17.421901Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:17.439770Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:17.439896Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:17.463465Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:17.463860Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:131:2157], cookie=16419110709422107641, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-04T17:16:17.464137Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-04T17:16:17.475959Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:131:2157], cookie=16419110709422107641) 2025-04-04T17:16:17.476754Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:141:2165]. Cookie: 12827356643250842254. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-04T17:16:17.476816Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:141:2165], cookie=12827356643250842254) 2025-04-04T17:16:17.477435Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:141:2165]. Cookie: 12708102261208527787. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-04T17:16:17.477492Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:141:2165], cookie=12708102261208527787) 2025-04-04T17:16:17.478017Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:141:2165]. Cookie: 14496353249381209710. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-04-04T17:16:17.478068Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:141:2165], cookie=14496353249381209710) 2025-04-04T17:16:17.478536Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:141:2165]. Cookie: 7257715403207208957. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-04-04T17:16:17.478599Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:141:2165], cookie=7257715403207208957) 2025-04-04T17:16:19.670322Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:19.670425Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:19.688332Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:19.688766Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:19.719322Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:19.719790Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:131:2157], cookie=15576739797995604132, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-04T17:16:19.720126Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-04T17:16:19.732436Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:131:2157], cookie=15576739797995604132) 2025-04-04T17:16:19.733038Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:141:2165], cookie=1592392942706060659, path="/Root/Res1", config={ }) 2025-04-04T17:16:19.733271Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res1" 2025-04-04T17:16:19.745546Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:141:2165], cookie=1592392942706060659) 2025-04-04T17:16:19.746180Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:146:2170], cookie=18158192729191962226, path="/Root/Res2", config={ }) 2025-04-04T17:16:19.746856Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Res2" 2025-04-04T17:16:19.759462Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:146:2170], cookie=18158192729191962226) 2025-04-04T17:16:19.760289Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:151:2175]. Cookie: 17437794906323758015. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-04T17:16:19.760359Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:151:2175], cookie=17437794906323758015) 2025-04-04T17:16:19.761003Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:151:2175]. Cookie: 6233776714450650210. Data: { Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "Root/Res2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-04T17:16:19.761054Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:151:2175], cookie=6233776714450650210) 2025-04-04T17:16:19.761592Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [3:151:2175]. Cookie: 711288956656123838. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1020500 } ResourcesInfo { ResourceId: 3 AcceptedUs: 1020500 } } 2025-04-04T17:16:19.761642Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[3:151:2175], cookie=711288956656123838) 2025-04-04T17:16:21.944562Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:21.944672Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:21.965334Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:21.965463Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:21.991041Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:21.991481Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:131:2157], cookie=17889426633507521875, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-04-04T17:16:21.991851Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-04T17:16:22.011669Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:131:2157], cookie=17889426633507521875) 2025-04-04T17:16:22.012827Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:141:2165]. Cookie: 15876424784228551582. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { Enabled: true BillingPeriodSec: 2 Labels { key: "k1" value: "v1" } Labels { key: "k2" value: "v2" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-04T17:16:22.012897Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:141:2165], cookie=15876424784228551582) 2025-04-04T17:16:22.013361Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:141:2165]. Cookie: 5388216126113508131. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 27500 } } 2025-04-04T17:16:22.013408Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:141:2165], cookie=5388216126113508131) 2025-04-04T17:16:24.177072Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:24.177192Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:24.195991Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:24.196108Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:24.219996Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:24.220394Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:131:2157], cookie=12353016447302022509, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-04-04T17:16:24.220590Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-04T17:16:24.232509Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:131:2157], cookie=12353016447302022509) 2025-04-04T17:16:24.233073Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:140:2164], cookie=2291551568771796126, path="/Root/Res", config={ }) 2025-04-04T17:16:24.233280Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-04T17:16:24.245247Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:140:2164], cookie=2291551568771796126) 2025-04-04T17:16:24.246084Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:145:2169]. Cookie: 6790751435496718751. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-04T17:16:24.246147Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:145:2169], cookie=6790751435496718751) 2025-04-04T17:16:24.246671Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[5:149:2173], cookie=14691935556436414741, id=0, path="/Root", config={ MaxUnitsPerSecond: 150 }) 2025-04-04T17:16:24.246834Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Updated quoter resource 1 "Root" 2025-04-04T17:16:24.247011Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:145:2169]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 150 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } StateNotification { Status: SUCCESS } } } 2025-04-04T17:16:24.259167Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[5:149:2173], cookie=14691935556436414741) 2025-04-04T17:16:24.259780Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:145:2169]. Cookie: 13425877856590978644. Data: { } 2025-04-04T17:16:24.259829Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Update quoter resources consumption state (sender=[5:145:2169], cookie=13425877856590978644) >> DataShardReadIteratorBatchMode::SelectingColumns [GOOD] >> DataShardReadIteratorBatchMode::ShouldHandleReadAck >> KqpWrite::UpsertNullKey [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn [GOOD] >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite >> KqpImmediateEffects::MultiShardUpsertAfterRead [GOOD] >> KqpImmediateEffects::ManyFlushes >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::UpsertNullKey [GOOD] Test command err: Trying to start YDB, gRPC: 7774, MsgBus: 6703 2025-04-04T17:16:19.404076Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508046255046988:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:19.404125Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d63/r3tmp/tmpPYclOm/pdisk_1.dat 2025-04-04T17:16:19.835545Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:19.840742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:19.840867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:19.849797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7774, node 1 2025-04-04T17:16:19.946764Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:19.946788Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:19.946793Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:19.946893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6703 TClient is connected to server localhost:6703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:20.633147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.662777Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.678029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:16:20.838129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.996731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:21.071531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:22.590216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508059139950642:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.590369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.873460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.898718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.925600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.947926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.973186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:23.004417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:23.087530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508063434918453:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:23.087610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:23.087728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508063434918458:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:23.091006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:23.102289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508063434918460:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:23.177035Z node 1 :TX_PROXY ERROR: Actor# [1:7489508063434918515:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:24.404236Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508046255046988:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:24.404336Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpImmediateEffects::ConflictingKeyR1WR2 >> KqpInplaceUpdate::SingleRowSimple+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowSimple-UseSink >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] >> TPersQueueTest::Codecs_InitWriteSession_DefaultTopicSupportedCodecsInInitResponse [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithDefaultCodecs_MessagesAreAcknowledged >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] Test command err: 2025-04-04T17:16:13.320875Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:13.321053Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:13.343408Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:13.343572Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:13.369818Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:13.380632Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:129:2155], cookie=6305085272675318955, path="/Res", config={ MaxUnitsPerSecond: -100 }) 2025-04-04T17:16:13.380839Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:129:2155], cookie=6305085272675318955) 2025-04-04T17:16:13.381496Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:135:2160], cookie=8207985114592723452, path="/ResWithoutMaxUnitsPerSecond", config={ }) 2025-04-04T17:16:13.381613Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:135:2160], cookie=8207985114592723452) 2025-04-04T17:16:13.382115Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:138:2163], cookie=617249909697738334, path="/ResWithMaxUnitsPerSecond", config={ MaxUnitsPerSecond: 1 }) 2025-04-04T17:16:13.382350Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "ResWithMaxUnitsPerSecond" 2025-04-04T17:16:13.405538Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:138:2163], cookie=617249909697738334) 2025-04-04T17:16:13.406180Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:144:2168], cookie=6417964595760663544, path="/ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond", config={ }) 2025-04-04T17:16:13.406445Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond" 2025-04-04T17:16:13.419670Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:144:2168], cookie=6417964595760663544) 2025-04-04T17:16:13.885951Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:13.886051Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:13.904399Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:13.904545Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:13.928795Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:13.929258Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:131:2157], cookie=16644828167050577849, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-04-04T17:16:13.929692Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-04T17:16:13.941857Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:131:2157], cookie=16644828167050577849) 2025-04-04T17:16:13.942467Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:141:2165], cookie=5512868179372962302, path="/Root/Res", config={ }) 2025-04-04T17:16:13.942695Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-04T17:16:13.960458Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:141:2165], cookie=5512868179372962302) 2025-04-04T17:16:13.964880Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:146:2170]. Cookie: 9177818433752551669. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 2 Version: "version" Schema: "schema" CloudId: "cloud" FolderId: "folder" ResourceId: "resource" SourceId: "source" Tags { key: "key" value: "value" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-04T17:16:13.964988Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:146:2170], cookie=9177818433752551669) 2025-04-04T17:16:13.965701Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:146:2170]. Cookie: 11928087500974353657. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2025-04-04T17:16:13.965754Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:146:2170], cookie=11928087500974353657) 2025-04-04T17:16:16.129595Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:16.129694Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:16.148590Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:16.148991Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:16.173517Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:16.174030Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:131:2157], cookie=5752446609240029404, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-04T17:16:16.174381Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-04T17:16:16.190953Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:131:2157], cookie=5752446609240029404) 2025-04-04T17:16:16.191586Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:141:2165], cookie=2515657518905992771, path="/Root/Res", config={ }) 2025-04-04T17:16:16.191816Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-04-04T17:16:16.206981Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:141:2165], cookie=2515657518905992771) 2025-04-04T17:16:16.207801Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:146:2170]. Cookie: 17756974626319614669. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-04T17:16:16.207857Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:146:2170], cookie=17756974626319614669) 2025-04-04T17:16:16.208361Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [3:146:2170]. Cookie: 6633547080892163509. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1019000 } } 2025-04-04T17:16:16.208408Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[3:146:2170], cookie=6633547080892163509) 2025-04-04T17:16:18.371514Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:18.371636Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:18.389522Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:18.389658Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:18.413762Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:18.414275Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:131:2157], cookie=6141502391043047786, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-04T17:16:18.414667Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-04T17:16:18.426495Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:131:2157], cookie=6141502391043047786) 2025-04-04T17:16:18.427221Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:141:2165]. Cookie: 1037918871779827415. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-04T17:16:18.427272Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:141:2165], cookie=1037918871779827415) 2025-04-04T17:16:18.427643Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:141:2165]. Cookie: 16304270681122615216. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-04-04T17:16:18.427679Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:141:2165], cookie=16304270681122615216) 2025-04-04T17:16:18.428050Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:141:2165]. Cookie: 2637473365541974087. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-04-04T17:16:18.428096Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:141:2165], cookie=2637473365541974087) 2025-04-04T17:16:20.855834Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:20.855949Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:20.876332Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:20.876473Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:20.904571Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:20.905026Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:131:2157], cookie=3362269680434813683, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-04-04T17:16:20.905376Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-04T17:16:20.919019Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:131:2157], cookie=3362269680434813683) 2025-04-04T17:16:20.919951Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:141:2165]. Cookie: 8247452618259542306. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-04T17:16:20.920013Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:141:2165], cookie=8247452618259542306) 2025-04-04T17:16:20.920561Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:141:2165]. Cookie: 10429181701876943890. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 3000000 } } 2025-04-04T17:16:20.920618Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:141:2165], cookie=10429181701876943890) 2025-04-04T17:16:23.360720Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:172:2189]. Cookie: 12702304263526734341. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-04T17:16:23.360794Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:172:2189], cookie=12702304263526734341) 2025-04-04T17:16:23.361257Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:172:2189]. Cookie: 13206750734020114224. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 9000000 } } 2025-04-04T17:16:23.361301Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:172:2189], cookie=13206750734020114224) 2025-04-04T17:16:25.455748Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:199:2215]. Cookie: 10307933107323316807. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-04-04T17:16:25.455824Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:199:2215], cookie=10307933107323316807) 2025-04-04T17:16:25.456288Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:199:2215]. Cookie: 17704776023955507253. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 15000000 } } 2025-04-04T17:16:25.456332Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:199:2215], cookie=17704776023955507253) >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] >> KqpWrite::ProjectReplace-UseSink [GOOD] >> KqpImmediateEffects::InsertExistingKey-UseSink [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:15:10.819462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:15:10.819563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:15:10.819600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:15:10.819646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:15:10.819689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:15:10.819710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:15:10.819772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:15:10.819878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:15:10.820224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:10.938671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:15:10.938722Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:10.956531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:10.956744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:15:10.956861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:15:10.981026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:15:10.981321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:15:10.986308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:10.990397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:15:11.009669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:11.018901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:15:11.018984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:11.019197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:15:11.019236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:15:11.019334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:15:11.019434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:15:11.028004Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:15:11.165785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:15:11.170939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:11.178452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:15:11.183160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:15:11.183297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:11.189190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:11.189439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:15:11.189680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:11.189777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:15:11.189873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:15:11.189925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:15:11.192279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:11.192362Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:15:11.192411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:15:11.194237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:11.194303Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:11.194344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:11.194388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:15:11.199287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:15:11.201092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:15:11.201272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:15:11.204547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:11.204712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:15:11.204763Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:11.211180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:15:11.211287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:11.211509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:15:11.211600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:15:11.214166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:15:11.214215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:15:11.214392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:11.214450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:15:11.214786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:11.214830Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:15:11.214921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:15:11.214968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:11.215015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:15:11.215048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:11.215079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:15:11.215111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:11.215143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:15:11.215171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:15:11.215238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:15:11.215291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:15:11.215337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:15:11.217058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:15:11.217150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:15:11.217182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... E: StateWork, received event# 269553210, Sender [3:123:2149], Recipient [3:315:2301]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 2 } CompactSinglePartedShards: true 2025-04-04T17:16:27.676696Z node 3 :TX_DATASHARD INFO: Started background compaction# 6 of 72075186233409546 tableId# 2 localTid# 1001, requested from [3:123:2149], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2025-04-04T17:16:27.677735Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 5, ts 1970-01-01T00:00:18.153000Z 2025-04-04T17:16:27.677802Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 5, front# 6 2025-04-04T17:16:27.686231Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [3:1248:3186], Recipient [3:315:2301]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-04-04T17:16:27.686352Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-04-04T17:16:27.687248Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:315:2301], Recipient [3:123:2149]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 5 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 87 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 18 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 87 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 17380 Memory: 124260 Storage: 14054 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 43 TableOwnerId: 72057594046678944 FollowerId: 0 2025-04-04T17:16:27.687296Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-04T17:16:27.687340Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 1.738 2025-04-04T17:16:27.687457Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 87 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 18 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 87 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-04T17:16:27.687492Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-04T17:16:27.692667Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:303:2292], Recipient [3:315:2301]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-04T17:16:27.698436Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 6, ts 1970-01-01T00:00:19.153000Z 2025-04-04T17:16:27.698506Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 6, front# 6 2025-04-04T17:16:27.698547Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:123:2149]pathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:16:27.698859Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553211, Sender [3:315:2301], Recipient [3:123:2149]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 2 } Status: OK 2025-04-04T17:16:27.698899Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvCompactTableResult 2025-04-04T17:16:27.698965Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 0 seconds 2025-04-04T17:16:27.699018Z node 3 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 4 ms, with status# 0, next wakeup in# 0.996000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-04-04T17:16:27.701944Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:303:2292], Recipient [3:315:2301]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-04T17:16:27.717085Z node 3 :TX_DATASHARD DEBUG: Updated last full compaction of tablet# 72075186233409546, tableId# 2, last full compaction# 1970-01-01T00:00:19.153000Z 2025-04-04T17:16:27.760635Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435090, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-04T17:16:27.760716Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-04T17:16:27.760764Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-04-04T17:16:27.760860Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-04T17:16:27.760897Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-04T17:16:27.761043Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-04-04T17:16:27.761110Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-04-04T17:16:27.761174Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2025-04-04T17:16:27.761254Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:18.000000Z at schemeshard 72057594046678944 2025-04-04T17:16:27.761369Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:16:27.772080Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435090, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-04T17:16:27.772163Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-04T17:16:27.772193Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-04T17:16:28.053479Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:315:2301]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-04T17:16:28.053671Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:16:28.053719Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:16:28.053820Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:16:28.053857Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:16:28.085407Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue wakeup 2025-04-04T17:16:28.085543Z node 3 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, compactionInfo# {72057594046678944:1, SH# 1, Rows# 100, Deletes# 0, Compaction# 1970-01-01T00:00:18.000000Z}, next wakeup in# 0.000000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-04-04T17:16:28.085707Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 30 seconds 2025-04-04T17:16:28.085990Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [3:123:2149], Recipient [3:315:2301]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 2 } CompactSinglePartedShards: true 2025-04-04T17:16:28.086181Z node 3 :TX_DATASHARD INFO: Started background compaction# 7 of 72075186233409546 tableId# 2 localTid# 1001, requested from [3:123:2149], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2025-04-04T17:16:28.087399Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 6, ts 1970-01-01T00:00:19.153000Z 2025-04-04T17:16:28.087457Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 6, front# 7 2025-04-04T17:16:28.091685Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [3:1275:3211], Recipient [3:315:2301]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-04-04T17:16:28.095935Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:303:2292], Recipient [3:315:2301]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-04T17:16:28.101062Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 7, ts 1970-01-01T00:00:20.152000Z 2025-04-04T17:16:28.101135Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 7, front# 7 2025-04-04T17:16:28.101180Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:123:2149]pathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:16:28.101533Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553211, Sender [3:315:2301], Recipient [3:123:2149]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 2 } Status: OK 2025-04-04T17:16:28.101577Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvCompactTableResult 2025-04-04T17:16:28.101671Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 0 seconds 2025-04-04T17:16:28.101729Z node 3 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 3 ms, with status# 0, next wakeup in# 0.997000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-04-04T17:16:28.104388Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:303:2292], Recipient [3:315:2301]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-04T17:16:28.116912Z node 3 :TX_DATASHARD DEBUG: Updated last full compaction of tablet# 72075186233409546, tableId# 2, last full compaction# 1970-01-01T00:00:20.152000Z >> KqpEffects::InsertAbort_Select_Conflict+UseSink [GOOD] >> KqpEffects::UpdateOn_Literal [GOOD] >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription [GOOD] >> DataShardReadIterator::ShouldForbidDuplicatedReadId [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter >> DataShardReadIterator::ShouldFailUknownColumns >> KqpEffects::InsertAbort_Params_Duplicates-UseSink [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RR2 >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink [GOOD] >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd [GOOD] >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::ProjectReplace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19915, MsgBus: 25265 2025-04-04T17:16:18.719994Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508044262306030:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:18.720107Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d79/r3tmp/tmpQ3x6Pj/pdisk_1.dat 2025-04-04T17:16:19.091525Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19915, node 1 2025-04-04T17:16:19.136079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:19.136237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:19.137893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:19.274951Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:19.274972Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:19.274980Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:19.275069Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25265 TClient is connected to server localhost:25265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:20.001807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.030651Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:20.048876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.219762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.367987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.445478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:21.675382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508057147209686:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:21.675486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.202346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.236021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.268583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.301004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.332044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.372363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.431341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508061442177496:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.431431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.431667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508061442177501:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.436921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:22.449122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508061442177503:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:22.526691Z node 1 :TX_PROXY ERROR: Actor# [1:7489508061442177556:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19574, MsgBus: 15742 2025-04-04T17:16:24.035155Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508068639158294:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:24.035291Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d79/r3tmp/tmpOS41XD/pdisk_1.dat 2025-04-04T17:16:24.110846Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19574, node 2 2025-04-04T17:16:24.158047Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:24.158145Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:24.159888Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:24.170198Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:24.170224Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:24.170233Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:24.170360Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15742 TClient is connected to server localhost:15742 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:24.545475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:24.552431Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:24.564345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:24.654319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:24.869328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:24.973068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:27.020843Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508081524061937:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.020937Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.061181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.093720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.120945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.152263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.180743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.215026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.299140Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508081524062453:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.299207Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.299250Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508081524062458:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.302135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:27.312149Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508081524062460:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:27.365309Z node 2 :TX_PROXY ERROR: Actor# [2:7489508081524062513:3436] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds [GOOD] >> DataShardReadIterator::ShouldReadRangeLeftInclusive >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertExistingKey-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 26341, MsgBus: 21580 2025-04-04T17:16:18.709960Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508044055984761:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:18.710068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d6b/r3tmp/tmpHU2CSm/pdisk_1.dat 2025-04-04T17:16:19.097654Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:19.141402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:19.141563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 26341, node 1 2025-04-04T17:16:19.143670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:19.278784Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:19.278808Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:19.278816Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:19.278939Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21580 TClient is connected to server localhost:21580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:19.989957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.030466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.201692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.349899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.447830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:21.824727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508056940888281:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:21.824824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.201524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.243749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.273452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.300807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.336026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.403234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.482052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508061235856097:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.482136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.482146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508061235856102:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.485226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:22.493613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508061235856104:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:22.568678Z node 1 :TX_PROXY ERROR: Actor# [1:7489508061235856158:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:23.303550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:23.617004Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=4; 2025-04-04T17:16:23.625492Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 4 at tablet 72075186224037919 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T17:16:23.626116Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 4 at tablet 72075186224037919 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T17:16:23.626324Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508065530823908:2496], Table: `/Root/TestImmediateEffects` ([72057594046644480:16:1]), SessionActorId: [1:7489508065530823742:2496]Got CONSTRAINT VIOLATION for table `/Root/TestImmediateEffects`. ShardID=72075186224037919, Sink=[1:7489508065530823908:2496].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-04T17:16:23.626805Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508065530823892:2496], SessionActorId: [1:7489508065530823742:2496], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7489508065530823742:2496]. isRollback=0 2025-04-04T17:16:23.627107Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTgwZjBjOTItM2I5NjBmMzEtZWUzNjNmZTItOTdmNGUzOGQ=, ActorId: [1:7489508065530823742:2496], ActorState: ExecuteState, TraceId: 01jr0wvfzg1mt04dzc3d6dg8c9, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7489508065530823902:2496] from: [1:7489508065530823892:2496] 2025-04-04T17:16:23.627197Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489508065530823902:2496] TxId: 281474976710674. Ctx: { TraceId: 01jr0wvfzg1mt04dzc3d6dg8c9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTgwZjBjOTItM2I5NjBmMzEtZWUzNjNmZTItOTdmNGUzOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-04T17:16:23.627981Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTgwZjBjOTItM2I5NjBmMzEtZWUzNjNmZTItOTdmNGUzOGQ=, ActorId: [1:7489508065530823742:2496], ActorState: ExecuteState, TraceId: 01jr0wvfzg1mt04dzc3d6dg8c9, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 22325, MsgBus: 7167 2025-04-04T17:16:24.175493Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508070125873075:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:24.175574Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d6b/r3tmp/tmp6DzSrr/pdisk_1.dat 2025-04-04T17:16:24.248785Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22325, node 2 2025-04-04T17:16:24.298175Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:24.298280Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:24.299903Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:24.300083Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:24.300109Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:24.300123Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:24.300233Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7167 TClient is connected to server localhost:7167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:24.685530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:24.693319Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:24.701692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:24.786892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:24.948819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.033550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:26.961039Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508078715809443:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:26.961122Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:26.998504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.022496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.088798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.116200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.141929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.171617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.213006Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508083010777252:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.213081Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.213083Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508083010777257:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.217060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:27.226713Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508083010777259:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:27.308166Z node 2 :TX_PROXY ERROR: Actor# [2:7489508083010777313:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:28.048323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:28.744129Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508087305745089:2518], TxId: 281474976715676, task: 1. Ctx: { TraceId : 01jr0wvmtpepcb3aewz6hshj83. SessionId : ydb://session/3?node_id=2&id=YmFkZGIwOWYtZTY4ZWUxNWUtMzJiOTBmOTMtZDM0OGFkM2Y=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-04T17:16:28.744522Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508087305745090:2519], TxId: 281474976715676, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=YmFkZGIwOWYtZTY4ZWUxNWUtMzJiOTBmOTMtZDM0OGFkM2Y=. CustomerSuppliedId : . TraceId : 01jr0wvmtpepcb3aewz6hshj83. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7489508087305745086:2487], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:16:28.744946Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmFkZGIwOWYtZTY4ZWUxNWUtMzJiOTBmOTMtZDM0OGFkM2Y=, ActorId: [2:7489508087305744859:2487], ActorState: ExecuteState, TraceId: 01jr0wvmtpepcb3aewz6hshj83, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Conflict+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6693, MsgBus: 11556 2025-04-04T17:16:19.144849Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508047604762044:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:19.145356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d67/r3tmp/tmpC9moYO/pdisk_1.dat 2025-04-04T17:16:19.539030Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6693, node 1 2025-04-04T17:16:19.591301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:19.591402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:19.593024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:19.660933Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:19.660966Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:19.660975Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:19.661125Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11556 TClient is connected to server localhost:11556 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:20.203917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.217970Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:20.231129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.430952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.587581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.666575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:22.242419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508060489665667:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.242496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.453622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.477756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.498565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.523264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.547464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.576175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.608456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508060489666173:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.608547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.608679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508060489666178:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.611519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:22.619578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508060489666180:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:22.698793Z node 1 :TX_PROXY ERROR: Actor# [1:7489508060489666235:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 28080, MsgBus: 15388 2025-04-04T17:16:24.477780Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508069672204707:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:24.477847Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d67/r3tmp/tmpeJMgm2/pdisk_1.dat 2025-04-04T17:16:24.584609Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28080, node 2 2025-04-04T17:16:24.624223Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:24.624383Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:24.626525Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:24.654769Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:24.654791Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:24.654796Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:24.654900Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15388 TClient is connected to server localhost:15388 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:25.200028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.210199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.280722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.418502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.489725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:27.260920Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508082557108332:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.260964Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.296663Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.325226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.355600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.396043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.427734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.457114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.536559Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508082557108848:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.536640Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.536659Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508082557108853:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.539777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:27.548918Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508082557108855:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:27.602327Z node 2 :TX_PROXY ERROR: Actor# [2:7489508082557108908:3437] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:28.315833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:28.516059Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=281474976715673; 2025-04-04T17:16:28.522484Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508086852076632:2495], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [2:7489508086852076489:2495]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[2:7489508086852076632:2495].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-04T17:16:28.522981Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508086852076621:2495], SessionActorId: [2:7489508086852076489:2495], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[2:7489508086852076489:2495]. isRollback=0 2025-04-04T17:16:28.523192Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmM3MDA1ZWItN2VmNDkwZGQtZjg2MGQzZWYtNDczNTQ2NjY=, ActorId: [2:7489508086852076489:2495], ActorState: ExecuteState, TraceId: 01jr0wvmwqems90465k3a8kcxn, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7489508086852076622:2495] from: [2:7489508086852076621:2495] 2025-04-04T17:16:28.523278Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7489508086852076622:2495] TxId: 281474976715673. Ctx: { TraceId: 01jr0wvmwqems90465k3a8kcxn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmM3MDA1ZWItN2VmNDkwZGQtZjg2MGQzZWYtNDczNTQ2NjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-04T17:16:28.523456Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmM3MDA1ZWItN2VmNDkwZGQtZjg2MGQzZWYtNDczNTQ2NjY=, ActorId: [2:7489508086852076489:2495], ActorState: ExecuteState, TraceId: 01jr0wvmwqems90465k3a8kcxn, Create QueryResponse for error on request, msg: >> KqpImmediateEffects::DeleteAfterInsert [GOOD] >> DataShardReadIterator::ShouldStopWhenNodeDisconnected [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:01.514197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:01.514980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:01.515054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:01.515086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:01.515135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:01.515179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:01.515243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:01.515318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:01.518652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:01.601953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:01.602014Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:01.615269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:01.615582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:01.615745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:01.625391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:01.625670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:01.626525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.627453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:01.634876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.641647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:01.641719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.641890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:01.641943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:01.641989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:01.642124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.651042Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:01.790820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:01.791093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.791303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:01.791546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:01.791610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.795100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.795245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:01.795415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.795496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:01.795548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:01.795600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:01.797456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.797509Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:01.797543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:01.799141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.799214Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.799274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.799335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.809356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:01.811387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:01.811573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:01.812616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:01.812747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:01.812816Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.813135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:01.813193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:01.813384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:01.813457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:01.815488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:01.815533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:01.815725Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:01.815770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:01.816149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:01.816202Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:01.816309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:01.816352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.816388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:01.816420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.816457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:01.816503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:01.816538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:01.816584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:01.816653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:01.816690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:01.816725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:01.818565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:01.818681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:01.818735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 0.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:27.624981Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:27.625034Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:27.625085Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:27.625133Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:27.625169Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:27.625226Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:27.625317Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:27.625660Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:27.644684Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:27.646065Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:27.646281Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:27.646441Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:27.646478Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:27.646616Z node 5 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:27.647385Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:27.647497Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.647576Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.647981Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.648072Z node 5 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-04T17:16:27.648280Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.648392Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.648469Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.648593Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.648670Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.648832Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.649122Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.649246Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.649621Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.649716Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.649879Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.649982Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.650108Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.650400Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.650499Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.650629Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.650866Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.651035Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.651109Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.651166Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-04T17:16:27.658297Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:27.658365Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:27.658439Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:27.658490Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:27.658534Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:27.659512Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [5:369:2338] sender: [5:426:2058] recipient: [5:15:2062] 2025-04-04T17:16:27.692509Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-04T17:16:27.692583Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-04-04T17:16:27.855480Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "User user1 is not permitted to log in", at schemeshard: 72057594046678944 2025-04-04T17:16:27.855620Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:27.855666Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:27.855888Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:27.855937Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:418:2376], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-04-04T17:16:27.856526Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 0 2025-04-04T17:16:29.859442Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-04-04T17:16:29.871984Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzODMwMTg5LCJpYXQiOjE3NDM3ODY5ODksInN1YiI6InVzZXIxIn0.BJ0JYv0kzbYMbVnqIvAtgfIm2sLQVPQgJxu_Ok0_Eb27Qph0Ryz1llnpbI4lA_Dij6WzgzMqQS-EgK3Sub1tkzmXQtdPY4Igz62UNm6D2McbV_96Q5u2LkmIiR0Ii33u5OGK2PwUVslAC4_cEqH0KtcwQCN1ji_ojs_da6cneAdVBrJ2lALWstnpxqUmdvIB4P-lK23Q-cFHbvHYMvXMPrCm5_6pcuGB8LanQDDdI8ZApbyZlf8uAtwcqIkIydelgo89Ie0x6UM5n0kflM5Q-K2_K7IwZ-iD4pHhPz-AmMQnwBCKNTJcqaCdUWZByRX0EVvqDdY9fPg9KuVfVrtsSg" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzODMwMTg5LCJpYXQiOjE3NDM3ODY5ODksInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-04-04T17:16:29.872671Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:16:29.872895Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 244us result status StatusSuccess 2025-04-04T17:16:29.873363Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 4 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsyPGJb1JMvI38reL8+NP\n+ffXakL8IIS4ugKFQ0O1WfGx0AamgF30hhAHSLeHtIuL/DYJzij94MXckmV1yLLX\nCR+QQlDMaB+FZbqhwTkBB1HCFICg6u4XrKZhQ44sxcylBvmKOUWc7Rfj4YsxooI6\nJAjMESUf2Se9a6luRCTTV7nhypBK95jKusQMOpspdcfEzMjp0NDDqyECBMmWXYO1\nQAmao7XWop7ez55lxMcRDxXJZY5UJWF6VClShqRyu1l3K68/p3O8mo9OHTlk00Da\nNoJTfryi15jAeNeFE3OoM2AkXanMq/HooiAAqMWH5Y+TSUQk2151gRrEjfhMY8aB\n/wIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743873385402 } PublicKeys { KeyId: 2 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArVdVm0Nzh7KHpbMeL6uw\nB5iMXYHFHfoxiQn1bxoalOOYIyAS7SukYLOvhXg/hPAuo/4Dz61heizzQMWHNJTu\n+bKuR6BYgIQoKLTGwJ5Vx5Y9yJAyBPVWkZk3ms4xfQACREvqPNcGJGrG/oxWmdpG\njuwmiAjNJ2k8WbXasifwrp27TrPRUMlmNzVhDUBo1edT8Bt7Q6bLrcrOqMblH/VY\npjqc4OayRW77fh91EHlrNRGCGoJ9BcT4h5xQrDfpFqmV4O+sFyZEld6wCx7/A4NH\nIrVhkB+JPJFC/LEFyuTM16Ew46Vh+beRLSgAkkPR8A/KHUxE3M/qTEZ4yx+A7zDv\nTQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743873385565 } PublicKeys { KeyId: 3 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA7ef5B2YbimsIrOWpm4bL\nLc0DBQahMSAwP7ZKAvF3U+ZPZmSRpWmQ0QMc7k35BwekqtzjTmXIpzyvJ+4Y/3xI\nOdNYfOQ0LzUApoVlw0+5E4BaNcRsJSmU+Ruq5A263hWqADiMHfJQ+rOEqISeWkzm\n+Z5NjL3Gi8hGzw0zY6k5HExT8SOi+xYe+nAndkvgL8Ie6VuF0CSLhZyIjy9eAzjI\nsYr77d5CbX7kbUupGx8Feqbh9MHuEdKQ3KezGMQ+aKBsSNhxUyP+gQO8bnEy1mfz\nkHlojQbR+a6v3nEGkXERO/d6E/dadmDwq+Kj6FVvvBCJB3qrsYvmey/WQoBK51fm\nFQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743873387852 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::UpdateOn_Literal [GOOD] Test command err: Trying to start YDB, gRPC: 28609, MsgBus: 5269 2025-04-04T17:16:18.718820Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508044193533908:2238];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:18.719039Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d6e/r3tmp/tmpVzRLbH/pdisk_1.dat 2025-04-04T17:16:19.111449Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:19.135687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:19.135799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:19.137965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28609, node 1 2025-04-04T17:16:19.279776Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:19.279794Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:19.279800Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:19.279893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5269 TClient is connected to server localhost:5269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:19.993430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.030526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.180945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.349905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.450318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:21.892378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508057078437389:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:21.892508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.201593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.259174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.288245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.319516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.346191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.381484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.431295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508061373405194:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.431377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.431632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508061373405199:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.436954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:22.447048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508061373405201:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:22.511755Z node 1 :TX_PROXY ERROR: Actor# [1:7489508061373405254:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:23.718565Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508044193533908:2238];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:23.718677Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15965, MsgBus: 65318 2025-04-04T17:16:24.494541Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508069379747889:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:24.494636Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d6e/r3tmp/tmphoxHXY/pdisk_1.dat 2025-04-04T17:16:24.612094Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:24.641341Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:24.641433Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:24.642989Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15965, node 2 2025-04-04T17:16:24.686702Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:24.686723Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:24.686730Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:24.686829Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65318 TClient is connected to server localhost:65318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:25.179395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.191043Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:25.220949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.304376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:25.445929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:16:25.528098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:27.504309Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508082264651563:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.504392Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.543264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.567927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.594883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.619989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.648106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.676873Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.716833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508082264652074:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.716897Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.716933Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508082264652079:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.720264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:27.729253Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508082264652081:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:27.829173Z node 2 :TX_PROXY ERROR: Actor# [2:7489508082264652136:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TKesusTest::TestAcquireSemaphoreTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig >> DataShardReadIteratorBatchMode::ShouldHandleReadAck [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20539, MsgBus: 1467 2025-04-04T17:16:18.726269Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508043507963843:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:18.726394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d72/r3tmp/tmpzH9Ows/pdisk_1.dat 2025-04-04T17:16:19.073802Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:19.123073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:19.123228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:19.126954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20539, node 1 2025-04-04T17:16:19.278774Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:19.278801Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:19.278809Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:19.278923Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1467 TClient is connected to server localhost:1467 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:20.052834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.074905Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:20.088796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.232045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:20.430420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:16:20.507619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:21.844139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508056392867499:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:21.844253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.201554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.249272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.274967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.301127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.371432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.416813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.456943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508060687835312:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.457000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.457074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508060687835317:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.460253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:22.468018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508060687835319:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:22.531074Z node 1 :TX_PROXY ERROR: Actor# [1:7489508060687835371:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:23.302166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:23.726493Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508043507963843:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:23.726609Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10054, MsgBus: 22627 2025-04-04T17:16:24.345669Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508069560905316:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:24.345728Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d72/r3tmp/tmpybQn6x/pdisk_1.dat 2025-04-04T17:16:24.438884Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10054, node 2 2025-04-04T17:16:24.472721Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:24.473167Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:24.483171Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:24.538798Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:24.538819Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:24.538839Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:24.538962Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22627 TClient is connected to server localhost:22627 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:25.024206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.050421Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:25.084224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.176998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.367547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.437280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:27.283237Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508082445808985:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.283404Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.299355Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.327051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.356216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.382030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.407282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.434261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.480697Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508082445809493:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.480790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.480997Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508082445809498:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.484481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:27.492838Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508082445809500:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:27.570552Z node 2 :TX_PROXY ERROR: Actor# [2:7489508082445809554:3438] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:28.524799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Duplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20855, MsgBus: 6523 2025-04-04T17:16:18.711474Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508044757988046:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:18.711514Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d70/r3tmp/tmp2xDlAx/pdisk_1.dat 2025-04-04T17:16:19.099024Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20855, node 1 2025-04-04T17:16:19.164412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:19.164482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:19.165603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:19.275186Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:19.275209Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:19.275216Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:19.275308Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6523 TClient is connected to server localhost:6523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:19.988312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.030441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.192878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.381457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.454267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:21.802814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508057642891710:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:21.802917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.203853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.255353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.290856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.322862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.349259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.382063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.431316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508061937859515:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.431411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.431579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508061937859520:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.436934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:22.445484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508061937859522:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:22.529287Z node 1 :TX_PROXY ERROR: Actor# [1:7489508061937859576:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:23.419813Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-04T17:16:23.430264Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T17:16:23.430428Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T17:16:23.430638Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508066232827204:2496], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7489508066232827164:2496]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7489508066232827204:2496].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-04T17:16:23.431087Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508066232827197:2496], SessionActorId: [1:7489508066232827164:2496], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7489508066232827164:2496]. isRollback=0 2025-04-04T17:16:23.431290Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGM3MjA5YWQtN2UzZDhmNjgtZmM5MDc1NC03ODVlOWU2ZA==, ActorId: [1:7489508066232827164:2496], ActorState: ExecuteState, TraceId: 01jr0wvfvk79qwck4fyh81m9be, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7489508066232827198:2496] from: [1:7489508066232827197:2496] 2025-04-04T17:16:23.431389Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489508066232827198:2496] TxId: 281474976710671. Ctx: { TraceId: 01jr0wvfvk79qwck4fyh81m9be, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGM3MjA5YWQtN2UzZDhmNjgtZmM5MDc1NC03ODVlOWU2ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-04T17:16:23.432115Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGM3MjA5YWQtN2UzZDhmNjgtZmM5MDc1NC03ODVlOWU2ZA==, ActorId: [1:7489508066232827164:2496], ActorState: ExecuteState, TraceId: 01jr0wvfvk79qwck4fyh81m9be, Create QueryResponse for error on request, msg: 2025-04-04T17:16:23.711745Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508044757988046:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:23.711822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2769, MsgBus: 27067 2025-04-04T17:16:24.437426Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508071198240107:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:24.437461Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d70/r3tmp/tmpAs6IUl/pdisk_1.dat 2025-04-04T17:16:24.546158Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:24.574685Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:24.574760Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:24.578756Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2769, node 2 2025-04-04T17:16:24.625004Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:24.625026Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:24.625033Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:24.625130Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27067 TClient is connected to server localhost:27067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:25.066323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.075205Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:25.094633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.180736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.356071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.447509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:27.350861Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508084083143770:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.350976Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.392936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.419573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.447651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.476620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.503862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.537043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.572568Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508084083144279:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.572662Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.572692Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508084083144284:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.575562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:27.583614Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508084083144286:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:27.655922Z node 2 :TX_PROXY ERROR: Actor# [2:7489508084083144340:3437] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:28.781756Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508088378111949:2500], TxId: 281474976715672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWNmN2ViMTgtZWMwZDUxMTMtMTRjOGZiNGQtNDI3MWY5YWU=. TraceId : 01jr0wvmzc6xz7mb18n55qjtew. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-04T17:16:28.782680Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508088378111950:2501], TxId: 281474976715672, task: 2. Ctx: { TraceId : 01jr0wvmzc6xz7mb18n55qjtew. SessionId : ydb://session/3?node_id=2&id=NWNmN2ViMTgtZWMwZDUxMTMtMTRjOGZiNGQtNDI3MWY5YWU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7489508088378111946:2488], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:16:28.783101Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWNmN2ViMTgtZWMwZDUxMTMtMTRjOGZiNGQtNDI3MWY5YWU=, ActorId: [2:7489508088378111893:2488], ActorState: ExecuteState, TraceId: 01jr0wvmzc6xz7mb18n55qjtew, Create QueryResponse for error on request, msg: >> KqpEffects::InsertAbort_Select_Duplicates+UseSink >> TPersQueueTest::ReadWithoutConsumerFederation [GOOD] >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9171, MsgBus: 21747 2025-04-04T17:16:18.707334Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508041999373398:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:18.707910Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d6a/r3tmp/tmpbG8Jpj/pdisk_1.dat 2025-04-04T17:16:19.152613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:19.152726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:19.154485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9171, node 1 2025-04-04T17:16:19.183400Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:16:19.183896Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:16:19.185530Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:19.277884Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:19.277911Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:19.277917Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:19.278024Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21747 TClient is connected to server localhost:21747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:20.041335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.071010Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:20.087126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:20.229772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:16:20.404785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.485362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:21.857734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508054884277043:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:21.857847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.202499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.252291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.282861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.311019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.339309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.371487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.447874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508059179244854:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.447976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.447982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508059179244859:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.450846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:22.458445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508059179244861:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:22.548224Z node 1 :TX_PROXY ERROR: Actor# [1:7489508059179244916:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:23.311344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:23.707436Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508041999373398:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:23.707525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25362, MsgBus: 2414 2025-04-04T17:16:24.442856Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508068425442401:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:24.442926Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d6a/r3tmp/tmpGVkjVl/pdisk_1.dat 2025-04-04T17:16:24.519013Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:24.549776Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:24.549856Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:24.551256Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25362, node 2 2025-04-04T17:16:24.598774Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:24.598799Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:24.598807Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:24.598905Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2414 TClient is connected to server localhost:2414 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:25.095413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.118802Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:25.130320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.210700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.371940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.444859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:27.543692Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508081310346037:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.543778Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.585863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.652050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.680112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.706280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.736543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.764976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.813183Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508081310346548:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.813262Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.813464Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508081310346553:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.816492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:27.824467Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508081310346555:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:27.897180Z node 2 :TX_PROXY ERROR: Actor# [2:7489508081310346609:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:28.843483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:29.443402Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508068425442401:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:29.443510Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4170, MsgBus: 65249 2025-04-04T17:16:19.295551Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508049004162838:2236];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:19.296063Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d65/r3tmp/tmp6PLlbr/pdisk_1.dat 2025-04-04T17:16:19.665306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:19.665412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:19.667482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:19.708820Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4170, node 1 2025-04-04T17:16:19.786963Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:19.786989Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:19.786995Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:19.787129Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65249 TClient is connected to server localhost:65249 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:20.280603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.302066Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:20.312946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.462742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.647529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.713714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:22.314578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508061889066319:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.316639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.540565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.564425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.588386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.612597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.635084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.701969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.746454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508061889066831:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.746544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.746610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508061889066836:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.749821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:22.757998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508061889066838:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:22.842285Z node 1 :TX_PROXY ERROR: Actor# [1:7489508061889066892:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:23.590420Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-04T17:16:23.600452Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T17:16:23.600602Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T17:16:23.600779Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508066184034504:2496], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7489508066184034476:2496]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7489508066184034504:2496].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-04T17:16:23.601254Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508066184034497:2496], SessionActorId: [1:7489508066184034476:2496], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7489508066184034476:2496]. isRollback=0 2025-04-04T17:16:23.601493Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGVlMTRjMDctZDkzZGEzNi01NWM3ZmUwNS0yOTA5ODQzZA==, ActorId: [1:7489508066184034476:2496], ActorState: ExecuteState, TraceId: 01jr0wvg1s5nfkmn04cmdpnqmp, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7489508066184034498:2496] from: [1:7489508066184034497:2496] 2025-04-04T17:16:23.601593Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489508066184034498:2496] TxId: 281474976710671. Ctx: { TraceId: 01jr0wvg1s5nfkmn04cmdpnqmp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGVlMTRjMDctZDkzZGEzNi01NWM3ZmUwNS0yOTA5ODQzZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-04T17:16:23.602459Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGVlMTRjMDctZDkzZGEzNi01NWM3ZmUwNS0yOTA5ODQzZA==, ActorId: [1:7489508066184034476:2496], ActorState: ExecuteState, TraceId: 01jr0wvg1s5nfkmn04cmdpnqmp, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 15586, MsgBus: 22163 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d65/r3tmp/tmp5i2pg3/pdisk_1.dat 2025-04-04T17:16:24.671055Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:16:24.712123Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15586, node 2 2025-04-04T17:16:24.742937Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:24.743028Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:24.746445Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:24.806849Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:24.806873Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:24.806880Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:24.806980Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22163 TClient is connected to server localhost:22163 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:25.315458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.325540Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.404065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.543425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.610203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:27.664970Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508080831851072:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.665082Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.688206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.713416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.739912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.773952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.806186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.832933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.917032Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508080831851588:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.917104Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.917182Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508080831851593:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.920208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:27.928331Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508080831851595:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:28.013361Z node 2 :TX_PROXY ERROR: Actor# [2:7489508085126818946:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:29.096722Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508089421786546:2500], TxId: 281474976715672, task: 1. Ctx: { TraceId : 01jr0wvnae382tha8sh3821wrp. SessionId : ydb://session/3?node_id=2&id=ZTYxY2Y4MzItY2I3MzNmZWMtODAxMjI0NjYtZmQ1NTJkZTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-04T17:16:29.096971Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508089421786548:2501], TxId: 281474976715672, task: 2. Ctx: { TraceId : 01jr0wvnae382tha8sh3821wrp. SessionId : ydb://session/3?node_id=2&id=ZTYxY2Y4MzItY2I3MzNmZWMtODAxMjI0NjYtZmQ1NTJkZTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7489508089421786543:2488], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:16:29.097257Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTYxY2Y4MzItY2I3MzNmZWMtODAxMjI0NjYtZmQ1NTJkZTI=, ActorId: [2:7489508085126819199:2488], ActorState: ExecuteState, TraceId: 01jr0wvnae382tha8sh3821wrp, Create QueryResponse for error on request, msg: |90.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd [GOOD] Test command err: Trying to start YDB, gRPC: 7449, MsgBus: 6844 2025-04-04T17:16:18.707433Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508043693142039:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:18.707561Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d75/r3tmp/tmpxIWokn/pdisk_1.dat 2025-04-04T17:16:19.067542Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:19.121792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:19.122664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:19.124360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7449, node 1 2025-04-04T17:16:19.276800Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:19.276822Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:19.276827Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:19.276915Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6844 TClient is connected to server localhost:6844 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:20.029738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.061088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.215840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:20.393940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:16:20.478121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:21.678702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508056578045629:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:21.678815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.201606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.235296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.267022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.298408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.328634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.362983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.438847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508060873013441:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.438923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.439195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508060873013446:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.443058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:22.451604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508060873013448:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:22.552320Z node 1 :TX_PROXY ERROR: Actor# [1:7489508060873013503:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:23.365729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:23.720656Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508043693142039:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:23.720718Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9490, MsgBus: 16142 2025-04-04T17:16:24.725206Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508070256760834:2144];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:24.727832Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d75/r3tmp/tmpmFpKoE/pdisk_1.dat 2025-04-04T17:16:24.875860Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:24.880175Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:24.880245Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:24.883683Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9490, node 2 2025-04-04T17:16:24.961721Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:24.961745Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:24.961751Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:24.961845Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16142 TClient is connected to server localhost:16142 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:16:25.448782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:16:25.455870Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:25.466216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.514052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.677441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.752377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:27.503864Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508083141664392:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.503959Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.538408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.563756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.588583Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.614350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.648023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.691201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.730429Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508083141664902:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.730484Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.730533Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508083141664907:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.735500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:27.743233Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508083141664909:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:27.830875Z node 2 :TX_PROXY ERROR: Actor# [2:7489508083141664965:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:28.661358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 |90.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite >> TKesusTest::TestAcquireTimeout [GOOD] >> TKesusTest::TestAcquireSharedBlocked >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite [GOOD] >> TKesusTest::TestAcquireSemaphoreRebootTimeout >> KqpImmediateEffects::UpdateOn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::DeleteAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 6122, MsgBus: 5617 2025-04-04T17:16:19.073875Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508047379513315:2262];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:19.073993Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d68/r3tmp/tmpRnFbsf/pdisk_1.dat 2025-04-04T17:16:19.414983Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:19.417293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:19.417364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:19.420379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6122, node 1 2025-04-04T17:16:19.543310Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:19.543323Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:19.543330Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:19.543434Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5617 TClient is connected to server localhost:5617 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:20.107573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.139140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.280151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.436163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:20.501816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:22.029646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508060264416764:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.029737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.304380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.329244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.355610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.383784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.409255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.443970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:22.481391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508060264417277:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.481454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.481458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508060264417282:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:22.484124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:22.491848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508060264417284:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:22.555014Z node 1 :TX_PROXY ERROR: Actor# [1:7489508060264417338:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:23.321909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:24.064918Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508047379513315:2262];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:24.064989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24024, MsgBus: 29398 2025-04-04T17:16:24.953599Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508069100048430:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:24.954767Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d68/r3tmp/tmpnzjHjl/pdisk_1.dat 2025-04-04T17:16:25.057750Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24024, node 2 2025-04-04T17:16:25.083494Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:25.083546Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:25.084870Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:25.134656Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:25.134672Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:25.134678Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:25.134792Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29398 TClient is connected to server localhost:29398 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:25.546980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.551572Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:25.561192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.621554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.769457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:25.822328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:27.701929Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508081984951916:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.702027Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.738919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.769244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.800146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.827053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.851258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.885090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:27.926520Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508081984952424:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.926616Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.926674Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508081984952429:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:27.929877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:27.938655Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508081984952431:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:28.029947Z node 2 :TX_PROXY ERROR: Actor# [2:7489508086279919784:3435] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:28.889358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:29.947842Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508069100048430:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:29.947927Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite >> TKesusTest::TestSessionTimeoutAfterDetach [GOOD] >> TKesusTest::TestSessionTimeoutAfterReboot >> KqpImmediateEffects::ManyFlushes [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite >> TKesusTest::TestAcquireSharedBlocked [GOOD] >> TKesusTest::TestAcquireTimeoutAfterReboot >> KqpImmediateEffects::ConflictingKeyR1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyR1WRR2 >> KqpInplaceUpdate::SingleRowStr+UseSink >> KqpInplaceUpdate::SingleRowSimple-UseSink [GOOD] >> KqpImmediateEffects::DeleteAfterUpsert >> KqpEffects::InsertAbort_Literal_Success >> KqpImmediateEffects::ConflictingKeyW1WR2 >> KqpInplaceUpdate::SingleRowArithm+UseSink >> KqpEffects::UpdateOn_Params >> KqpImmediateEffects::UpsertDuplicates >> KqpImmediateEffects::ConflictingKeyW1RR2 >> KqpImmediateEffects::UpsertAfterInsertWithIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ManyFlushes [GOOD] Test command err: Trying to start YDB, gRPC: 22217, MsgBus: 28507 2025-04-04T17:16:20.717840Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508053368189108:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:20.726422Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d60/r3tmp/tmpsWqLf1/pdisk_1.dat 2025-04-04T17:16:21.089245Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:21.133835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:21.133937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:21.135965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22217, node 1 2025-04-04T17:16:21.192025Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:21.192060Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:21.192069Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:21.192173Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28507 TClient is connected to server localhost:28507 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:21.662462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:21.695201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:21.807764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:21.969759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:22.046262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:23.527221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508066253092605:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:23.527298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:23.786290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:23.810458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:23.834754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:23.858293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:23.884099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:23.910386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:23.946978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508066253093114:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:23.947049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508066253093119:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:23.947054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:23.950327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:23.958640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508066253093121:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:24.041754Z node 1 :TX_PROXY ERROR: Actor# [1:7489508070548060471:3436] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:24.887720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13210, MsgBus: 29846 2025-04-04T17:16:26.187119Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508077260386512:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:26.187174Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d60/r3tmp/tmpr5tvSY/pdisk_1.dat 2025-04-04T17:16:26.281341Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13210, node 2 2025-04-04T17:16:26.313693Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:26.313891Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:26.315417Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:26.344825Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:26.344852Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:26.344862Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:26.344964Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29846 TClient is connected to server localhost:29846 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:26.720436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:26.738812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:26.821127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:26.970953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:27.046768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:28.946523Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508085850322881:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:28.946610Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:28.963430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:28.990100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:29.053570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:29.080934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:29.106568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:29.134127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:29.212578Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508090145290696:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:29.212669Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508090145290701:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:29.212691Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:29.215534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:29.224991Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508090145290703:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:29.294094Z node 2 :TX_PROXY ERROR: Actor# [2:7489508090145290757:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:30.071886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:31.180139Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508077260386512:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:31.180202Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowSimple-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28974, MsgBus: 4237 2025-04-04T17:16:21.942332Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508054610472584:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:21.942645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d5f/r3tmp/tmpMxQKiq/pdisk_1.dat 2025-04-04T17:16:22.290754Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28974, node 1 2025-04-04T17:16:22.345290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:22.345399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:22.346699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:22.357225Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:22.357244Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:22.357253Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:22.357373Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4237 TClient is connected to server localhost:4237 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:22.814830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:22.845986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:22.967963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:23.104527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:23.158722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:24.665212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508067495376242:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:24.665326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:24.914945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:24.981389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:25.030568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:25.110558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:25.168016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:25.206083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:25.259601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508071790344056:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:25.259674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:25.259890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508071790344061:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:25.264354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:25.278415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508071790344063:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:25.359735Z node 1 :TX_PROXY ERROR: Actor# [1:7489508071790344117:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:26.127422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 32221, MsgBus: 11275 2025-04-04T17:16:27.350333Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508081684822780:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:27.350430Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d5f/r3tmp/tmprSrFMR/pdisk_1.dat 2025-04-04T17:16:27.426809Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32221, node 2 2025-04-04T17:16:27.474433Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:27.474510Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:27.476349Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:27.488768Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:27.488781Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:27.488788Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:27.488860Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11275 TClient is connected to server localhost:11275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:27.881929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:27.899891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:27.945498Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:28.069557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:28.174434Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:30.010206Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508094569726424:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:30.010323Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:30.052792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:30.079087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:30.149417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:30.180674Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:30.211059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:30.246644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:30.287342Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508094569726939:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:30.287433Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:30.287437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508094569726944:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:30.290833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:30.300350Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508094569726946:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:30.358411Z node 2 :TX_PROXY ERROR: Actor# [2:7489508094569726999:3438] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:31.196113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> TKesusTest::TestSessionTimeoutAfterUnregister [GOOD] >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed >> KqpImmediateEffects::WriteThenReadWithCommit >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] Test command err: 2025-04-04T17:16:15.161362Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.161499Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.178062Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.178180Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:15.192863Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:15.559466Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.559558Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.577527Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.577674Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:15.601431Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:15.971261Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.971356Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.993373Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.993847Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:16.017810Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:16.018276Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:131:2157], cookie=9905202815920931941, session=0, seqNo=0) 2025-04-04T17:16:16.018443Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:16.030534Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:131:2157], cookie=9905202815920931941, session=1) 2025-04-04T17:16:16.031049Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:140:2164], cookie=16386596071874617522) 2025-04-04T17:16:16.031132Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:140:2164], cookie=16386596071874617522) 2025-04-04T17:16:16.341710Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:16.358904Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:16.626165Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:16.638093Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:16.885003Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:16.896637Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:17.143867Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:17.155573Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:17.413107Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:17.424735Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:17.671628Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:17.683452Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:17.920464Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:17.932201Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:18.169322Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:18.181367Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:18.421356Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:18.433461Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:18.751072Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:18.767189Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:19.046637Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:19.063677Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:19.337484Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:19.351005Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:19.621828Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:19.634805Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:19.902763Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:19.919217Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:20.227997Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:20.243147Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:20.538485Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:20.558881Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:20.833196Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:20.855647Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:21.132407Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:21.144797Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:21.407897Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:21.420049Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:21.701046Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:21.714223Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:21.980833Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:21.994969Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:22.247702Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:22.260310Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:22.528924Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:22.540942Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:22.804532Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:22.816928Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:23.081399Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:23.098177Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:23.360725Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:23.373881Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:23.631662Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:23.644195Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:23.912639Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:23.924792Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:24.173526Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:24.191053Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:24.485629Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:24.499404Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:24.786871Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:24.804242Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:25.088773Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:25.103527Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:25.388430Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:25.402341Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:25.698600Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:25.712156Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:25.988257Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:26.000534Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:26.273395Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:26.285484Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:26.546030Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:26.557838Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:26.828462Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:26.840550Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:27.107577Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:27.121043Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:27.428972Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:27.443038Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:27.705039Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:27.723361Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:28.004994Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:28.019126Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:28.282352Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:28.294877Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:28.564483Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:28.576836Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:28.863318Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:28.876817Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:29.140529Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:29.152795Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:29.421838Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:29.433715Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:29.697715Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:29.709520Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:29.980830Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:29.994933Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:30.271702Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:30.284093Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:30.542393Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:30.554988Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:30.817984Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:30.830834Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:31.097954Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:31.111588Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:31.380000Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:31.399134Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:31.657587Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:31.676642Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:31.928603Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:31.941747Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:32.191399Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:32.206769Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:32.490561Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:32.510871Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:32.766636Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:32.787706Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:33.132411Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-04T17:16:33.132517Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-04T17:16:33.147003Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-04T17:16:33.158757Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:471:2478], cookie=11277416189671012707) 2025-04-04T17:16:33.158851Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:471:2478], cookie=11277416189671012707) 2025-04-04T17:16:33.756843Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:33.756949Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:33.775688Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:33.775821Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:33.808488Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:33.814618Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:131:2157], cookie=8717775996083381457, path="Root", config={ MaxUnitsPerSecond: 100 }) 2025-04-04T17:16:33.814880Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-04-04T17:16:33.827417Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:131:2157], cookie=8717775996083381457) 2025-04-04T17:16:33.829025Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:140:2164]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-04T17:16:33.829079Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:140:2164], cookie=0) 2025-04-04T17:16:33.829230Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:142:2166]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-04-04T17:16:33.829250Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:142:2166], cookie=0) 2025-04-04T17:16:33.872134Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:140:2164]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-04-04T17:16:33.872246Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:142:2166]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-04-04T17:16:33.872521Z node 4 :KESUS_TABLET TRACE: Got TEvServerDisconnected([4:145:2169]) 2025-04-04T17:16:33.872661Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:142:2166]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 StateNotification { Status: SESSION_EXPIRED Issues { message: "Disconected." } } } } 2025-04-04T17:16:33.917890Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:140:2164]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 10 StateNotification { Status: SUCCESS } } } >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast+UseSink >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange >> TPersQueueTest::SetupReadSession [GOOD] >> TPersQueueTest::TestBigMessage >> KqpImmediateEffects::ConflictingKeyRW1RR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RWR2 >> DataShardReadIterator::ShouldFailUknownColumns [GOOD] >> DataShardReadIterator::ShouldFailWrongSchema >> TKesusTest::TestAcquireLocks [GOOD] >> TKesusTest::TestAcquireRepeat >> DataShardReadIterator::ShouldReadRangeLeftInclusive [GOOD] >> DataShardReadIterator::ShouldReadRangeRightInclusive >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 >> TKesusTest::TestAcquireRepeat [GOOD] >> TKesusTest::TestAcquireDowngrade >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow [GOOD] >> DataShardReadIterator::ShouldReadNonExistingKey >> KqpImmediateEffects::InsertDuplicates+UseSink >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies >> KqpEffects::InsertAbort_Select_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Select_Conflict-UseSink >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile-BreakLocks >> DataShardReadIterator::ShouldReadKeyPrefix1 [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix2 >> TKesusTest::TestAcquireDowngrade [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout >> KqpInplaceUpdate::SingleRowStr+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowStr-UseSink >> KqpImmediateEffects::UpdateOn [GOOD] >> KqpImmediateEffects::UpdateAfterUpsert >> KqpInplaceUpdate::SingleRowArithm+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowArithm-UseSink >> KqpImmediateEffects::ConflictingKeyR1WRR2 [GOOD] >> KqpImmediateEffects::DeleteAfterUpsert [GOOD] >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex >> KqpEffects::InsertAbort_Literal_Success [GOOD] >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite >> KqpImmediateEffects::ConflictingKeyW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WRR2 >> KqpImmediateEffects::UpsertDuplicates [GOOD] >> KqpImmediateEffects::UpsertExistingKey >> KqpEffects::UpdateOn_Params [GOOD] >> KqpEffects::UpdateOn_Select >> KqpImmediateEffects::ConflictingKeyW1RR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1RWR2 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite >> TTopicYqlTest::CreateAndAlterTopicYql [GOOD] >> TTopicYqlTest::BadRequests ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 6832, MsgBus: 27553 2025-04-04T17:16:26.869752Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508076160568416:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:26.869791Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d5e/r3tmp/tmpNUNwhN/pdisk_1.dat 2025-04-04T17:16:27.170498Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6832, node 1 2025-04-04T17:16:27.233678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:27.233831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:27.235630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:27.250650Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:27.250673Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:27.250680Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:27.250850Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27553 TClient is connected to server localhost:27553 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:27.692136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:27.733993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:27.856680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:28.004394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:28.067407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:16:29.469956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508089045472067:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:29.470063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:29.680062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:29.707449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:29.733396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:29.762846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:29.795852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:29.827845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:29.899057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508089045472579:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:29.899147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:29.899235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508089045472584:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:29.902771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:29.911316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508089045472586:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:29.993519Z node 1 :TX_PROXY ERROR: Actor# [1:7489508089045472641:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:30.870233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 30076, MsgBus: 11305 2025-04-04T17:16:32.298205Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508104786347911:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:32.299158Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d5e/r3tmp/tmppnxGeN/pdisk_1.dat 2025-04-04T17:16:32.424504Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:32.440104Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:32.444021Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:32.448220Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30076, node 2 2025-04-04T17:16:32.507656Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:32.507679Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:32.507688Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:32.507795Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11305 TClient is connected to server localhost:11305 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:32.975492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:32.981251Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:32.993628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:33.080627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:33.229990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:16:33.332824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:35.344090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508117671251533:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:35.344390Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:35.385411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.414259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.446463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.477752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.509961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.577946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.619952Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508117671252048:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:35.620035Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:35.620047Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508117671252053:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:35.624154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:35.636638Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508117671252055:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:35.716133Z node 2 :TX_PROXY ERROR: Actor# [2:7489508117671252108:3440] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:36.596282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:37.297969Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508104786347911:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:37.298088Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink [GOOD] >> KqpImmediateEffects::WriteThenReadWithCommit [GOOD] >> KqpInplaceUpdate::BigRow >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> ReadIteratorExternalBlobs::ExtBlobs [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1615, MsgBus: 3574 2025-04-04T17:16:28.096877Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508087349378345:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:28.096968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d5c/r3tmp/tmpeIuVnm/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1615, node 1 2025-04-04T17:16:28.453642Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:16:28.453694Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:16:28.465955Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:28.483664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:28.483809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:28.486536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:28.503922Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:28.503942Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:28.503948Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:28.504077Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3574 TClient is connected to server localhost:3574 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:28.993669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:29.014113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:29.135956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:29.259217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:29.333264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:30.932370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508095939314727:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:30.932453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:31.279114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:31.311648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:31.353301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:31.388130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:31.456094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:31.495981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:31.549047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508100234282539:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:31.549193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:31.549519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508100234282545:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:31.552808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:31.561423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508100234282547:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:31.663580Z node 1 :TX_PROXY ERROR: Actor# [1:7489508100234282602:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:32.467999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:33.097210Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508087349378345:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:33.097314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23606, MsgBus: 20209 2025-04-04T17:16:33.815748Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508107501811566:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:33.815946Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d5c/r3tmp/tmpz4QuiH/pdisk_1.dat 2025-04-04T17:16:33.910479Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23606, node 2 2025-04-04T17:16:33.947400Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:33.947509Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:33.970885Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:34.001728Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:34.001745Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:34.001751Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:34.001847Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20209 TClient is connected to server localhost:20209 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:16:34.448895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:16:34.464584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.534711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.673733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.747427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:36.728260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508120386715193:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.728340Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.760249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.791148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.848626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.914874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.946954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:37.017446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:37.064372Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508124681683008:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:37.064434Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:37.064673Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508124681683013:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:37.067850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:37.075789Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508124681683015:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:37.171175Z node 2 :TX_PROXY ERROR: Actor# [2:7489508124681683069:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:38.116455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink >> TPersQueueTest::FetchRequest [GOOD] >> TPersQueueTest::EventBatching >> KqpImmediateEffects::ConflictingKeyRW1RWR2 [GOOD] >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] >> TPersQueueTest::LOGBROKER_7820 [GOOD] >> TPersQueueTest::InflightLimit >> DataShardReadIterator::ShouldFailWrongSchema [GOOD] >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] Test command err: Starting YDB, grpc: 11281, msgbus: 64831 2025-04-04T17:13:00.180998Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507194326381581:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:00.181051Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f87/r3tmp/tmpQ0k61P/pdisk_1.dat 2025-04-04T17:13:00.566869Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:00.573555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:00.573625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11281, node 1 2025-04-04T17:13:00.608390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:00.616551Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T17:13:00.616589Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T17:13:00.695635Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:00.695689Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:00.695700Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:00.695870Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64831 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:13:00.919088Z node 1 :TX_PROXY DEBUG: actor# [1:7489507194326381809:2114] Handle TEvNavigate describe path dc-1 2025-04-04T17:13:00.919139Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507194326382310:2439] HANDLE EvNavigateScheme dc-1 2025-04-04T17:13:00.920198Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507194326382310:2439] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:13:00.955912Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507194326382310:2439] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-04T17:13:00.965972Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507194326382310:2439] Handle TEvDescribeSchemeResult Forward to# [1:7489507194326382309:2438] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:13:00.984304Z node 1 :TX_PROXY DEBUG: actor# [1:7489507194326381809:2114] Handle TEvProposeTransaction 2025-04-04T17:13:00.984334Z node 1 :TX_PROXY DEBUG: actor# [1:7489507194326381809:2114] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T17:13:00.984469Z node 1 :TX_PROXY DEBUG: actor# [1:7489507194326381809:2114] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489507194326382326:2448] 2025-04-04T17:13:01.065094Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507194326382326:2448] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T17:13:01.065214Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507194326382326:2448] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-04T17:13:01.065232Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507194326382326:2448] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:13:01.065299Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507194326382326:2448] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:13:01.065556Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507194326382326:2448] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:13:01.065678Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507194326382326:2448] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-04T17:13:01.065735Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507194326382326:2448] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-04T17:13:01.065876Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507194326382326:2448] txid# 281474976710657 HANDLE EvClientConnected 2025-04-04T17:13:01.066599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:01.071708Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507194326382326:2448] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-04T17:13:01.071791Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507194326382326:2448] txid# 281474976710657 SEND to# [1:7489507194326382325:2447] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-04T17:13:01.103089Z node 1 :TX_PROXY DEBUG: actor# [1:7489507194326381809:2114] Handle TEvProposeTransaction 2025-04-04T17:13:01.103117Z node 1 :TX_PROXY DEBUG: actor# [1:7489507194326381809:2114] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T17:13:01.103148Z node 1 :TX_PROXY DEBUG: actor# [1:7489507194326381809:2114] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489507198621349662:2484] 2025-04-04T17:13:01.105463Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198621349662:2484] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T17:13:01.105543Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198621349662:2484] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-04T17:13:01.105561Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198621349662:2484] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:13:01.105619Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198621349662:2484] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:13:01.105893Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198621349662:2484] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:13:01.105979Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198621349662:2484] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:13:01.106021Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198621349662:2484] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T17:13:01.106169Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198621349662:2484] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T17:13:01.106678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:13:01.108783Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198621349662:2484] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-04T17:13:01.108849Z node 1 :TX_PROXY DEBUG: Actor# [1:7489507198621349662:2484] txid# 281474976710658 SEND to# [1:7489507198621349661:2483] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-04T17:13:03.118990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507207211284359:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:03.118999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507207211284351:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:03.119115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:03.119388Z node 1 :TX_PROXY DEBUG: actor# [1:7489507194326381809:2114] H ... root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-04T17:16:37.585466Z node 59 :TX_PROXY DEBUG: Actor# [59:7489508124061692996:2877] txid# 281474976710665 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T17:16:37.585584Z node 59 :TX_PROXY DEBUG: Actor# [59:7489508124061692996:2877] txid# 281474976710665 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:16:37.586145Z node 59 :TX_PROXY DEBUG: Actor# [59:7489508124061692996:2877] txid# 281474976710665 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:16:37.586374Z node 59 :TX_PROXY DEBUG: Actor# [59:7489508124061692996:2877] HANDLE EvNavigateKeySetResult, txid# 281474976710665 shardToRequest# 72075186224037891 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 2] DomainInfo.Params# Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 RedirectRequired# true 2025-04-04T17:16:37.586495Z node 59 :TX_PROXY DEBUG: Actor# [59:7489508124061692996:2877] txid# 281474976710665 SEND to# 72075186224037891 shardToRequest {TEvModifySchemeTransaction txid# 281474976710665 TabletId# 72075186224037891} 2025-04-04T17:16:37.587294Z node 59 :TX_PROXY DEBUG: Actor# [59:7489508124061692996:2877] txid# 281474976710665 HANDLE EvClientConnected 2025-04-04T17:16:37.590405Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpModifyACL ModifyACL { Name: "tenant-db" DiffACL: "\n\022\010\001\022\016\032\014clusteradmin\n\031\010\000\022\025\010\001\020\200\004\032\014clusteradmin \003" } } TxId: 281474976710665 TabletId: 72075186224037891 Owner: "root@builtin" UserToken: "***" PeerName: "ipv6:[::1]:39268" , at schemeshard: 72075186224037891 2025-04-04T17:16:37.590833Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /dc-1/tenant-db, operationId: 281474976710665:0, at schemeshard: 72075186224037891 2025-04-04T17:16:37.591091Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72075186224037891, LocalPathId: 1] name: dc-1/tenant-db type: EPathTypeSubDomain state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-04T17:16:37.591198Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-04T17:16:37.591478Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710665:1, propose status:StatusSuccess, reason: , at schemeshard: 72075186224037891 2025-04-04T17:16:37.591525Z node 60 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72075186224037891 2025-04-04T17:16:37.591661Z node 60 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710665:0 progress is 1/1 2025-04-04T17:16:37.591685Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-04-04T17:16:37.591723Z node 60 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710665:0 progress is 1/1 2025-04-04T17:16:37.591742Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-04-04T17:16:37.591799Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 4 2025-04-04T17:16:37.591871Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710665, ready parts: 1/1, is published: false 2025-04-04T17:16:37.591902Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72075186224037891, LocalPathId: 1], at schemeshard: 72075186224037891 2025-04-04T17:16:37.591922Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-04-04T17:16:37.591943Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710665:0 2025-04-04T17:16:37.591962Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710665, publications: 1, subscribers: 0 2025-04-04T17:16:37.591981Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710665, [OwnerId: 72075186224037891, LocalPathId: 1], 9 2025-04-04T17:16:37.595655Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710665, response: Status: StatusSuccess TxId: 281474976710665 SchemeshardId: 72075186224037891, at schemeshard: 72075186224037891 2025-04-04T17:16:37.595987Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710665, subject: root@builtin, status: StatusSuccess, operation: MODIFY ACL, path: /dc-1/tenant-db, add access: +(DS):clusteradmin, remove access: -():clusteradmin:- 2025-04-04T17:16:37.596261Z node 59 :TX_PROXY DEBUG: Actor# [59:7489508124061692996:2877] txid# 281474976710665 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710665} 2025-04-04T17:16:37.596328Z node 60 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186224037891 2025-04-04T17:16:37.596367Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976710665, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-04T17:16:37.596387Z node 59 :TX_PROXY DEBUG: Actor# [59:7489508124061692996:2877] txid# 281474976710665 SEND to# [59:7489508124061692995:2359] Source {TEvProposeTransactionStatus txid# 281474976710665 Status# 48} 2025-04-04T17:16:37.596695Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976710665, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-04-04T17:16:37.596838Z node 60 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186224037891 2025-04-04T17:16:37.596869Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [60:7489508101361734917:2308], at schemeshard: 72075186224037891, txId: 281474976710665, path id: 1 2025-04-04T17:16:37.596893Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [60:7489508101361734917:2308], at schemeshard: 72075186224037891, txId: 281474976710665, path id: 1 2025-04-04T17:16:37.597769Z node 60 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976710665 2025-04-04T17:16:37.597898Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976710665 2025-04-04T17:16:37.597921Z node 60 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72075186224037891, txId: 281474976710665 2025-04-04T17:16:37.597948Z node 60 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186224037891, txId: 281474976710665, pathId: [OwnerId: 72075186224037891, LocalPathId: 1], version: 9 2025-04-04T17:16:37.597976Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 5 2025-04-04T17:16:37.598100Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72075186224037891, txId: 281474976710665, subscribers: 0 TEST clusteradmin triggers auth on tenant 2025-04-04T17:16:37.601022Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710665 TClient is connected to server localhost:27752 TClient::Ls request: /dc-1/tenant-db 2025-04-04T17:16:38.020981Z node 59 :TX_PROXY DEBUG: actor# [59:7489508089701953419:2112] Handle TEvNavigate describe path /dc-1/tenant-db 2025-04-04T17:16:38.021060Z node 59 :TX_PROXY DEBUG: Actor# [59:7489508128356660315:2888] HANDLE EvNavigateScheme /dc-1/tenant-db 2025-04-04T17:16:38.021648Z node 59 :TX_PROXY DEBUG: Actor# [59:7489508128356660315:2888] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:16:38.021828Z node 59 :TX_PROXY DEBUG: Actor# [59:7489508128356660315:2888] SEND to# 72075186224037891 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/tenant-db" Options { ReturnBoundaries: false ShowPrivateTable: true ReturnRangeKey: false } 2025-04-04T17:16:38.027905Z node 59 :TX_PROXY DEBUG: Actor# [59:7489508128356660315:2888] Handle TEvDescribeSchemeResult Forward to# [59:7489508128356660314:2887] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 0 Record# Status: StatusSuccess Path: "/dc-1/tenant-db" PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "name_tenant-db_kind_tenant-db" Kind: "tenant-db" } StoragePools { Name: "name_tenant-db_kind_test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Sids { Name: "tenantuser" Type: USER } Audience: "/dc-1/tenant-db" } } } PathId: 1 PathOwnerId: 72075186224037891 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72075186224037891 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 Pare... (TRUNCATED) 2025-04-04T17:16:38.065255Z node 59 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 60 2025-04-04T17:16:38.065879Z node 59 :HIVE WARN: HIVE#72057594037968897 Node(60, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T17:16:38.066641Z node 60 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 >> KqpImmediateEffects::InsertDuplicates+UseSink [GOOD] >> KqpImmediateEffects::InsertDuplicates-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 25094, MsgBus: 25852 2025-04-04T17:16:29.986625Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508090843525510:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:29.986708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d5b/r3tmp/tmpIKv1cZ/pdisk_1.dat 2025-04-04T17:16:30.306706Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25094, node 1 2025-04-04T17:16:30.373839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:30.373943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:30.379431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:30.396055Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:30.396101Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:30.396114Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:30.396247Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25852 TClient is connected to server localhost:25852 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:30.819977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:30.843810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:31.005192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:31.177469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:31.253703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:32.674946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508103728429176:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:32.675152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:32.917125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:32.946917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:33.020544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:33.047195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:33.084527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:33.131815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:33.235535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508108023396992:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:33.235617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:33.235941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508108023396997:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:33.240072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:33.251605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508108023396999:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:33.337688Z node 1 :TX_PROXY ERROR: Actor# [1:7489508108023397055:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:34.248371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:34.986900Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508090843525510:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:34.986991Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6545, MsgBus: 5521 2025-04-04T17:16:35.544689Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508115097552101:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:35.544862Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d5b/r3tmp/tmpn7AO00/pdisk_1.dat 2025-04-04T17:16:35.640832Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6545, node 2 2025-04-04T17:16:35.684868Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:35.689548Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:35.695532Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:35.712007Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:35.712030Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:35.712037Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:35.712166Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5521 TClient is connected to server localhost:5521 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:36.170621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:36.179194Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:36.198208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:36.271343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:36.418667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:36.476681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:38.229925Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508127982455749:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:38.229999Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:38.256320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.280162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.302685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.328810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.358139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.392079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.432079Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508127982456258:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:38.432179Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:38.432333Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508127982456263:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:38.436051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:38.446815Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508127982456265:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:38.528981Z node 2 :TX_PROXY ERROR: Actor# [2:7489508127982456320:3435] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:39.392128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:40.047275Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWNkYWMwODUtNWQxNDMyOWQtYzRiYzFjODktZTI0YWQ1N2Q=, ActorId: [2:7489508132277423874:2489], ActorState: ExecuteState, TraceId: 01jr0ww04r6v3w3hc6t70pnx3s, Create QueryResponse for error on request, msg: >> KqpEffects::InsertAbort_Select_Conflict-UseSink [GOOD] >> DataShardReadIterator::ShouldReadRangeRightInclusive [GOOD] >> DataShardReadIterator::ShouldReadRangeOneByOne >> KqpImmediateEffects::Replace >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 >> DataShardReadIterator::ShouldReadNonExistingKey [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeys >> KqpImmediateEffects::UpsertAfterInsertWithIndex [GOOD] >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted |90.4%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIterator::ShouldReadKeyPrefix2 [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix3 |90.4%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies >> KqpInplaceUpdate::SingleRowStr-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] Test command err: 2025-04-04T17:14:50.029557Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507666545305381:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:50.035487Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:14:50.057782Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507665226451610:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:50.057820Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017d5/r3tmp/tmpsWYlH8/pdisk_1.dat 2025-04-04T17:14:51.050812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:51.050921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:51.051552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:51.051596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:51.135392Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:14:51.136439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:51.137674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:51.199315Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:14:51.190449Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:14:51.209490Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2033, node 1 2025-04-04T17:14:51.537497Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:51.537522Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:51.537528Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:51.537645Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26878 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:51.962658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:54.418877Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-04T17:14:54.419846Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-04T17:14:54.419863Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-04T17:14:54.419918Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7489507682406321092:2305], Start check tables existence, number paths: 2 2025-04-04T17:14:54.425144Z node 2 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 2 2025-04-04T17:14:54.425214Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7489507682406321092:2305], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-04T17:14:54.425269Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7489507682406321092:2305], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-04T17:14:54.425323Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7489507682406321092:2305], Successfully finished 2025-04-04T17:14:54.425398Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-04T17:14:54.458370Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-04T17:14:54.469456Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTIzZmFkN2EtNTRhMzYzNmQtZWIxZDMyNWQtOWMyMGM2ZWU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NTIzZmFkN2EtNTRhMzYzNmQtZWIxZDMyNWQtOWMyMGM2ZWU= 2025-04-04T17:14:54.469724Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507683725175310:2332], Start check tables existence, number paths: 2 2025-04-04T17:14:54.470110Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTIzZmFkN2EtNTRhMzYzNmQtZWIxZDMyNWQtOWMyMGM2ZWU=, ActorId: [1:7489507683725175311:2333], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:14:54.472211Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 2 2025-04-04T17:14:54.472229Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-04T17:14:54.472241Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-04T17:14:54.489991Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507683725175338:2529], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-04T17:14:54.490076Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507683725175310:2332], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-04T17:14:54.490127Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507683725175310:2332], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-04T17:14:54.490150Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507683725175310:2332], Successfully finished 2025-04-04T17:14:54.490393Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-04T17:14:54.494496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:14:54.510113Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507683725175338:2529], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-04T17:14:54.512110Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507683725175338:2529], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-04T17:14:54.539481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507683725175338:2529], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:14:54.596824Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507683725175338:2529], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-04T17:14:54.601288Z node 1 :TX_PROXY ERROR: Actor# [1:7489507683725175409:2581] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:14:54.601394Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507683725175338:2529], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-04T17:14:54.604098Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmU5MTE4NjUtNzA1ZGU2MWMtZmYyNDhlYWQtNzIzMDc5YmU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZmU5MTE4NjUtNzA1ZGU2MWMtZmYyNDhlYWQtNzIzMDc5YmU= 2025-04-04T17:14:54.604353Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-04T17:14:54.604367Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-04T17:14:54.604411Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmU5MTE4NjUtNzA1ZGU2MWMtZmYyNDhlYWQtNzIzMDc5YmU=, ActorId: [1:7489507683725175419:2336], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:14:54.604593Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmU5MTE4NjUtNzA1ZGU2MWMtZmYyNDhlYWQtNzIzMDc5YmU=, ActorId: [1:7489507683725175419:2336], ActorState: ReadyState, TraceId: 01jr0wrs8ca0js07an4763052p, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7489507683725175418:2589] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-04T17:14:54.604666Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7489507683725175419:2336], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZmU5MTE4NjUtNzA1ZGU2MWMtZmYyNDhlYWQtNzIzMDc5YmU= 2025-04-04T17:14:54.604735Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507683725175421:2337], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-04T17:14:54.604810Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7489507683725175422:2338], Database: /Root, Start database fetching 2025-04-04T17:14:54.606363Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7489507683725175422:2338], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-04T17:14:54.606535Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-04T17:14:54.606552Z node 1 :KQP_WORKLOAD_SERVI ... WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:35.749220Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:35.749395Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17737 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:36.082331Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:36.093864Z node 12 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:40.158216Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-04T17:16:40.161674Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=OWEzODg1NjUtY2YxY2NhZjAtOGRjZTkzOGQtNjIzMTEzNjQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OWEzODg1NjUtY2YxY2NhZjAtOGRjZTkzOGQtNjIzMTEzNjQ= 2025-04-04T17:16:40.162606Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7489508137354098501:2328], Start check tables existence, number paths: 2 2025-04-04T17:16:40.162817Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=OWEzODg1NjUtY2YxY2NhZjAtOGRjZTkzOGQtNjIzMTEzNjQ=, ActorId: [12:7489508137354098502:2329], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:16:40.163003Z node 12 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-04T17:16:40.163040Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-04T17:16:40.163075Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-04T17:16:40.165166Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7489508137354098501:2328], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-04T17:16:40.165235Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7489508137354098501:2328], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-04T17:16:40.165279Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7489508137354098501:2328], Successfully finished 2025-04-04T17:16:40.165362Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-04T17:16:40.168501Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7489508137354098529:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-04T17:16:40.174566Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:16:40.177630Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7489508137354098529:2302], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2025-04-04T17:16:40.181114Z node 12 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7489508137354098529:2302], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-04T17:16:40.190453Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7489508137354098529:2302], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:16:40.262426Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7489508137354098529:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-04T17:16:40.265546Z node 12 :TX_PROXY ERROR: Actor# [12:7489508137354098580:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:40.265698Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7489508137354098529:2302], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-04T17:16:40.265970Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: Root, PoolId: sample_pool_id 2025-04-04T17:16:40.265999Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id Root 2025-04-04T17:16:40.266077Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7489508137354098587:2332], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-04-04T17:16:40.267579Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7489508137354098587:2332], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-04T17:16:40.267684Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2025-04-04T17:16:40.267722Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-04T17:16:40.267985Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [12:7489508137354098596:2333], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-04T17:16:40.269117Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [12:7489508137354098596:2333], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-04-04T17:16:40.275950Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-04T17:16:40.275983Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-04T17:16:40.276037Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=OWEzODg1NjUtY2YxY2NhZjAtOGRjZTkzOGQtNjIzMTEzNjQ=, ActorId: [12:7489508137354098502:2329], ActorState: ReadyState, TraceId: 01jr0ww0ekaq2gnvytst6k6sd0, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: GRANT ALL ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `test@user`; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-04T17:16:40.276049Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7489508137354098608:2335], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-04T17:16:40.277421Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7489508137354098608:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:40.277519Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:40.288894Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:16:40.290831Z node 12 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=12&id=OWEzODg1NjUtY2YxY2NhZjAtOGRjZTkzOGQtNjIzMTEzNjQ=, ActorId: [12:7489508137354098502:2329], ActorState: ExecuteState, TraceId: 01jr0ww0ekaq2gnvytst6k6sd0, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [12:7489508137354098617:2329] WorkloadServiceCleanup: 0 2025-04-04T17:16:40.291292Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [12:7489508137354098596:2333], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-04-04T17:16:40.292699Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=OWEzODg1NjUtY2YxY2NhZjAtOGRjZTkzOGQtNjIzMTEzNjQ=, ActorId: [12:7489508137354098502:2329], ActorState: CleanupState, TraceId: 01jr0ww0ekaq2gnvytst6k6sd0, EndCleanup, isFinal: 0 2025-04-04T17:16:40.292761Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=OWEzODg1NjUtY2YxY2NhZjAtOGRjZTkzOGQtNjIzMTEzNjQ=, ActorId: [12:7489508137354098502:2329], ActorState: CleanupState, TraceId: 01jr0ww0ekaq2gnvytst6k6sd0, Sent query response back to proxy, proxyRequestId: 3, proxyId: [12:7489508115879261414:2084] 2025-04-04T17:16:40.302073Z node 12 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=12&id=OWEzODg1NjUtY2YxY2NhZjAtOGRjZTkzOGQtNjIzMTEzNjQ=, ActorId: [12:7489508137354098502:2329], ActorState: ReadyState, Session closed due to explicit close event 2025-04-04T17:16:40.302118Z node 12 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=12&id=OWEzODg1NjUtY2YxY2NhZjAtOGRjZTkzOGQtNjIzMTEzNjQ=, ActorId: [12:7489508137354098502:2329], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:16:40.302155Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=OWEzODg1NjUtY2YxY2NhZjAtOGRjZTkzOGQtNjIzMTEzNjQ=, ActorId: [12:7489508137354098502:2329], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-04T17:16:40.302194Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=OWEzODg1NjUtY2YxY2NhZjAtOGRjZTkzOGQtNjIzMTEzNjQ=, ActorId: [12:7489508137354098502:2329], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:16:40.302320Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=OWEzODg1NjUtY2YxY2NhZjAtOGRjZTkzOGQtNjIzMTEzNjQ=, ActorId: [12:7489508137354098502:2329], ActorState: unknown state, Session actor destroyed 2025-04-04T17:16:40.516929Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7489508115879261602:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:40.517043Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpInplaceUpdate::SingleRowArithm-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4728, MsgBus: 2486 2025-04-04T17:16:31.174480Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508098745401744:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:31.175349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d59/r3tmp/tmp5qAZ0R/pdisk_1.dat 2025-04-04T17:16:31.539364Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:31.543286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:31.543397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:31.547729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4728, node 1 2025-04-04T17:16:31.627992Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:31.628015Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:31.628033Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:31.628180Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2486 TClient is connected to server localhost:2486 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:32.184198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:32.220830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:32.346268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:32.522280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:32.598654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:16:34.212381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508111630305262:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:34.212512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:34.412576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:34.443728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:34.476113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:34.506105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:34.536493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:34.577924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:34.658113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508111630305779:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:34.658174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:34.658316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508111630305784:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:34.662335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:34.673773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508111630305786:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:34.764201Z node 1 :TX_PROXY ERROR: Actor# [1:7489508111630305841:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:35.327977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.580815Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=281474976710673; 2025-04-04T17:16:35.604252Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508115925273565:2496], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7489508115925273417:2496]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7489508115925273565:2496].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-04T17:16:35.604905Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508115925273552:2496], SessionActorId: [1:7489508115925273417:2496], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7489508115925273417:2496]. isRollback=0 2025-04-04T17:16:35.605139Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWRiZThkZGQtNzg0N2I3NTMtY2QyZDQ1MGUtNjlmMDE3YmY=, ActorId: [1:7489508115925273417:2496], ActorState: ExecuteState, TraceId: 01jr0wvvqse5hdg1ggqfwev9ka, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7489508115925273553:2496] from: [1:7489508115925273552:2496] 2025-04-04T17:16:35.605235Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489508115925273553:2496] TxId: 281474976710673. Ctx: { TraceId: 01jr0wvvqse5hdg1ggqfwev9ka, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRiZThkZGQtNzg0N2I3NTMtY2QyZDQ1MGUtNjlmMDE3YmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-04T17:16:35.605415Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWRiZThkZGQtNzg0N2I3NTMtY2QyZDQ1MGUtNjlmMDE3YmY=, ActorId: [1:7489508115925273417:2496], ActorState: ExecuteState, TraceId: 01jr0wvvqse5hdg1ggqfwev9ka, Create QueryResponse for error on request, msg: 2025-04-04T17:16:36.154357Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508098745401744:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:36.154453Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7186, MsgBus: 26130 2025-04-04T17:16:36.757752Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508121316168748:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:36.757808Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d59/r3tmp/tmpYcykup/pdisk_1.dat 2025-04-04T17:16:36.831539Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7186, node 2 2025-04-04T17:16:36.880075Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:36.880173Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:36.888751Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:36.907030Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:36.907049Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:36.907059Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:36.907159Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26130 TClient is connected to server localhost:26130 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:16:37.292436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:16:37.300023Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:37.309327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:37.367764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:37.508579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:37.584646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:39.438371Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508134201072385:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:39.438452Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:39.471245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:39.505393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:39.535965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:39.566642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:39.604648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:39.634367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:39.697821Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508134201072894:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:39.697926Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:39.698142Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508134201072899:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:39.702508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:39.711223Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508134201072902:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:39.769979Z node 2 :TX_PROXY ERROR: Actor# [2:7489508134201072957:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:40.537644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:40.907138Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508138496040683:2515], TxId: 281474976715675, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODU4M2RlOTUtZmUyYmE3YmYtZmU1ZmM3YzUtNzI1M2E3ZWM=. TraceId : 01jr0ww0tb49t0atan29wb8w7b. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-04T17:16:40.907373Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508138496040685:2516], TxId: 281474976715675, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ODU4M2RlOTUtZmUyYmE3YmYtZmU1ZmM3YzUtNzI1M2E3ZWM=. TraceId : 01jr0ww0tb49t0atan29wb8w7b. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7489508138496040680:2487], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:16:40.907664Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODU4M2RlOTUtZmUyYmE3YmYtZmU1ZmM3YzUtNzI1M2E3ZWM=, ActorId: [2:7489508138496040505:2487], ActorState: ExecuteState, TraceId: 01jr0ww0tb49t0atan29wb8w7b, Create QueryResponse for error on request, msg: >> DataShardReadIterator::TryCommitLocksPrepared+Volatile-BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile+BreakLocks >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WR2 >> KqpImmediateEffects::UpdateAfterUpsert [GOOD] >> KqpWrite::CastValues >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] >> TKesusTest::TestAcquireSemaphoreRebootTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] >> KqpEffects::UpdateOn_Select [GOOD] >> KqpImmediateEffects::UpsertExistingKey [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithDefaultCodecs_MessagesAreAcknowledged [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowStr-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 15148, MsgBus: 32671 2025-04-04T17:16:32.402695Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508105272975503:2272];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:32.402757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d53/r3tmp/tmpzyEhKJ/pdisk_1.dat 2025-04-04T17:16:32.861365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:32.861473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:32.863004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:32.886323Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15148, node 1 2025-04-04T17:16:32.958041Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:32.958069Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:32.958076Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:32.958223Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32671 TClient is connected to server localhost:32671 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:33.544969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:33.563973Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:33.570306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:33.681869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:33.825911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:33.889754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:35.438349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508118157878925:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:35.438474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:35.704214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.740401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.780252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.813851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.845386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.874676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.973538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508118157879436:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:35.973630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:35.975326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508118157879441:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:35.978439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:35.992494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508118157879443:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:36.056489Z node 1 :TX_PROXY ERROR: Actor# [1:7489508122452846794:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:36.800641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7666, MsgBus: 17295 2025-04-04T17:16:37.937434Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508126982762053:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:37.937501Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d53/r3tmp/tmp6dWsqb/pdisk_1.dat 2025-04-04T17:16:38.016411Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7666, node 2 2025-04-04T17:16:38.046217Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:38.046380Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:38.047769Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:38.085464Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:38.085479Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:38.085484Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:38.085572Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17295 TClient is connected to server localhost:17295 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:38.481988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:38.486317Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:38.493346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:38.550936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:38.696484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:38.781057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:16:40.759776Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508139867665696:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:40.759857Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:40.797940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:40.828675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:40.859487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:40.884278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:40.909061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:40.938441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:40.981519Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508139867666206:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:40.981610Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:40.982008Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508139867666211:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:40.985314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:40.996681Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508139867666213:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:41.096303Z node 2 :TX_PROXY ERROR: Actor# [2:7489508144162633564:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:42.000417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4172, MsgBus: 13743 2025-04-04T17:16:32.552217Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508105173757437:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:32.552278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d51/r3tmp/tmp8l1aaS/pdisk_1.dat 2025-04-04T17:16:33.026679Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:33.055329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:33.055490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:33.056801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4172, node 1 2025-04-04T17:16:33.205031Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:33.205057Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:33.205064Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:33.205192Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13743 TClient is connected to server localhost:13743 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:33.905201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:33.951198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:34.080310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:16:34.253952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.334383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:35.775333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508118058661133:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:35.775448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.026839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.058995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.085471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.115263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.144852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.181589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.243909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508122353628940:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.243983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.244207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508122353628945:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.247575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:36.258149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508122353628947:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:36.349909Z node 1 :TX_PROXY ERROR: Actor# [1:7489508122353629003:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:37.552355Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508105173757437:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:37.552433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 61282, MsgBus: 27035 2025-04-04T17:16:38.332094Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508128480100340:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:38.347187Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d51/r3tmp/tmp7Cu458/pdisk_1.dat 2025-04-04T17:16:38.427946Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:38.457311Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:38.457394Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 61282, node 2 2025-04-04T17:16:38.458925Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:38.504658Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:38.504680Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:38.504689Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:38.504781Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27035 TClient is connected to server localhost:27035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:38.892318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:38.900581Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:38.905354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:38.981972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:39.121456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:39.185704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:41.073318Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508141365003830:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.073392Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.105457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.172895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.201413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.234452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.287918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.322345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.405719Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508141365004349:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.405807Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.406275Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508141365004354:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.409824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:41.420246Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508141365004356:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:41.499107Z node 2 :TX_PROXY ERROR: Actor# [2:7489508141365004410:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:42.526644Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508145659972019:2501], TxId: 281474976715672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr0ww2es1c5w1092223rqwej. SessionId : ydb://session/3?node_id=2&id=NjdkMDY3MTctMjA1ODA0NzItYzM4MjllZTEtNDM5YmFkNw==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-04T17:16:42.527185Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508145659972021:2502], TxId: 281474976715672, task: 2. Ctx: { TraceId : 01jr0ww2es1c5w1092223rqwej. SessionId : ydb://session/3?node_id=2&id=NjdkMDY3MTctMjA1ODA0NzItYzM4MjllZTEtNDM5YmFkNw==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7489508145659972016:2487], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:16:42.527602Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjdkMDY3MTctMjA1ODA0NzItYzM4MjllZTEtNDM5YmFkNw==, ActorId: [2:7489508145659971963:2487], ActorState: ExecuteState, TraceId: 01jr0ww2es1c5w1092223rqwej, Create QueryResponse for error on request, msg: >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowArithm-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4155, MsgBus: 10648 2025-04-04T17:16:32.813776Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508101945886209:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:32.815648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d4e/r3tmp/tmpJnlrup/pdisk_1.dat 2025-04-04T17:16:33.250598Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:33.252889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:33.253103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:33.256358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4155, node 1 2025-04-04T17:16:33.409527Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:33.409553Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:33.409560Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:33.409682Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10648 TClient is connected to server localhost:10648 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:33.939082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:33.953322Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:33.970094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.156196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.340501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.417241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:35.802830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508114830789736:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:35.802936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.053379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.085470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.114327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.144061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.180190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.214142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.261013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508119125757543:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.261083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.261269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508119125757548:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.264868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:36.275383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508119125757550:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:36.366090Z node 1 :TX_PROXY ERROR: Actor# [1:7489508119125757604:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:37.091307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 30503, MsgBus: 8521 2025-04-04T17:16:38.166265Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508127842742426:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:38.166341Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d4e/r3tmp/tmpvAlJdz/pdisk_1.dat 2025-04-04T17:16:38.240676Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30503, node 2 2025-04-04T17:16:38.271670Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:38.271752Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:38.273474Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:38.294172Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:38.294202Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:38.294207Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:38.294305Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8521 TClient is connected to server localhost:8521 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:38.720264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:38.724687Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:38.728868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:38.788125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:38.915096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:38.981370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:40.843902Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508136432678777:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:40.844085Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:40.861990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:40.890361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:40.917425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:40.944139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:40.972233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.000962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.080341Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508140727646585:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.080413Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508140727646590:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.080432Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.083816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:41.092057Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508140727646592:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:41.183114Z node 2 :TX_PROXY ERROR: Actor# [2:7489508140727646650:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:42.026425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateAfterUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 16549, MsgBus: 21882 2025-04-04T17:16:31.924831Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508100211805014:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:31.925080Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d58/r3tmp/tmpUItHYa/pdisk_1.dat 2025-04-04T17:16:32.244930Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16549, node 1 2025-04-04T17:16:32.310900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:32.310981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:32.325000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:32.365160Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:32.365191Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:32.365197Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:32.365330Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21882 TClient is connected to server localhost:21882 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:32.959212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:32.982999Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:32.992320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:33.123725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:33.293843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:16:33.376478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.863302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508113096708509:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:34.863431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:35.058948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.111169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.135753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.159609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.184910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.253672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.340849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508117391676327:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:35.340932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:35.341231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508117391676332:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:35.345763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:35.357909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508117391676334:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:35.416949Z node 1 :TX_PROXY ERROR: Actor# [1:7489508117391676388:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:36.291834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.925019Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508100211805014:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:36.925163Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 11004, MsgBus: 62687 2025-04-04T17:16:38.025871Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508130386698771:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:38.025953Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d58/r3tmp/tmpgKKSiN/pdisk_1.dat 2025-04-04T17:16:38.091752Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11004, node 2 2025-04-04T17:16:38.145408Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:38.145498Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:38.147244Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:38.151720Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:38.151742Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:38.151748Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:38.151849Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62687 TClient is connected to server localhost:62687 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:38.523570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:38.528429Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:38.536175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.609227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:38.748384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:38.837501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:40.755941Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508138976635118:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:40.756005Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:40.791074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:40.821231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:40.845199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:40.870082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:40.898208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:40.928648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:40.976774Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508138976635626:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:40.976867Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:40.977047Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508138976635631:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:40.980374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:40.990827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508138976635633:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:41.048878Z node 2 :TX_PROXY ERROR: Actor# [2:7489508143271602985:3435] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:41.926788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpWorkloadServiceTables::TestLeaseUpdates [GOOD] >> KqpInplaceUpdate::BigRow [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 7601, MsgBus: 4159 2025-04-04T17:16:32.720526Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508105562462531:2179];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:32.720971Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d4f/r3tmp/tmpjdo2kN/pdisk_1.dat 2025-04-04T17:16:33.198667Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:33.200172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:33.200268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:33.205203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7601, node 1 2025-04-04T17:16:33.383127Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:33.383146Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:33.383153Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:33.383260Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4159 TClient is connected to server localhost:4159 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:33.947880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:33.974419Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:33.988488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.116300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.282603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.391473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:35.831451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508118447366083:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:35.831562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.099573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.133157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.166274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.199825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.229852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.297070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.340512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508122742333898:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.340566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.340834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508122742333903:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.343611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:36.352693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508122742333905:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:36.437521Z node 1 :TX_PROXY ERROR: Actor# [1:7489508122742333958:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:37.291062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:37.716431Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508105562462531:2179];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:37.716512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29399, MsgBus: 15887 2025-04-04T17:16:38.463374Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508130286914950:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:38.463455Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d4f/r3tmp/tmp59klzo/pdisk_1.dat 2025-04-04T17:16:38.550354Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:38.592619Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:38.592825Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:38.594340Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29399, node 2 2025-04-04T17:16:38.638822Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:38.638850Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:38.638857Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:38.638976Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15887 TClient is connected to server localhost:15887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:16:39.063125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:39.076489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:16:39.154204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:39.296476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:39.373886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:41.030946Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508143171818589:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.031022Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.080162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.109117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.131519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.155701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.182565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.206991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.264445Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508143171819096:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.264531Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.264699Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508143171819101:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.268420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:41.277232Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508143171819103:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:41.332308Z node 2 :TX_PROXY ERROR: Actor# [2:7489508143171819157:3436] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:42.213242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:42.872093Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjIxOWQ4ZDYtYWRkYzk5Y2YtYmEzNWM2NWMtZTJmMWRlNzE=, ActorId: [2:7489508147466786711:2489], ActorState: ExecuteState, TraceId: 01jr0ww2xba45gs0pgn8x4scb1, Create QueryResponse for error on request, msg: Error while locks merge >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] Test command err: 2025-04-04T17:16:13.320898Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:13.321027Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:13.342949Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:13.343082Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:13.360984Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:13.362541Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:129:2155], cookie=12198531714703544750, session=0, seqNo=0) 2025-04-04T17:16:13.362748Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:13.386354Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:129:2155], cookie=12198531714703544750, session=1) 2025-04-04T17:16:13.386739Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:129:2155], cookie=12433142497144299054, session=0, seqNo=0) 2025-04-04T17:16:13.386864Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-04T17:16:13.398983Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:129:2155], cookie=12433142497144299054, session=2) 2025-04-04T17:16:13.399587Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[1:140:2164], cookie=9458703050284981253, name="Sem1", limit=1) 2025-04-04T17:16:13.399752Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-04T17:16:13.411963Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[1:140:2164], cookie=9458703050284981253) 2025-04-04T17:16:13.412427Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:129:2155], cookie=111, session=1, semaphore="Sem1" count=1) 2025-04-04T17:16:13.412637Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-04T17:16:13.412827Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:129:2155], cookie=222, session=2, semaphore="Sem1" count=1) 2025-04-04T17:16:13.424972Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:129:2155], cookie=111) 2025-04-04T17:16:13.425071Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:129:2155], cookie=222) 2025-04-04T17:16:13.425689Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:148:2172], cookie=1185908116215421007, name="Sem1") 2025-04-04T17:16:13.426762Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:148:2172], cookie=1185908116215421007) 2025-04-04T17:16:13.428154Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:151:2175], cookie=9744606488846365106, name="Sem1") 2025-04-04T17:16:13.428232Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:151:2175], cookie=9744606488846365106) 2025-04-04T17:16:13.734539Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:13.750407Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:14.018625Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:14.039219Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:14.312894Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:14.325035Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:14.589909Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:14.603844Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:14.871854Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:14.888002Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:15.140306Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:15.153201Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:15.408111Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:15.427306Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:15.685004Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:15.697194Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:15.945827Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:15.957712Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:16.237266Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:16.251218Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:16.524108Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:16.537692Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:16.789019Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:16.804304Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:17.068810Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:17.081091Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:17.339369Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:17.351598Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:17.640475Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:17.652467Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:17.920191Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:17.931997Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:18.189765Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:18.201917Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:18.461428Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:18.473471Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:18.733579Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:18.747012Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:19.043896Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:19.063294Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:19.343410Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:19.358880Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:19.635614Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:19.651278Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:19.934331Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:19.954726Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:20.241619Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:20.261165Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:20.558624Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:20.578835Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:20.862736Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:20.878519Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:21.147428Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:21.159606Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:21.424140Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:21.439401Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:21.690053Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:21.702454Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:22.005708Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:22.017896Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:22.300132Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:22.312482Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:22.591944Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:22.604165Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:22.867794Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:22.879955Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:23.138545Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:23.155071Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:23.436962Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:23.450045Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:23.711573Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:23.724048Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:23.993403Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:24.005446Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:24.264280Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:24.276492Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:24.545912Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:24.561005Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:24.889761Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:24.901658Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:25.207331Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:25.224197Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:25.526023Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:25.540508Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:25.806536Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:25.820061Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:26.091104Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:26.107123Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:26.416796Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:26.4292 ... [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:36.994938Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:37.274607Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:37.290726Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:37.568134Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:37.580464Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:37.861770Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:37.874164Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:38.155378Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:38.168276Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:38.436166Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:38.451210Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:38.719375Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:38.731246Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:38.996671Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:39.015146Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:39.287370Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:39.303631Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:39.584622Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:39.600507Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:39.843738Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:39.856233Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:40.123502Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:40.139775Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:40.394698Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:40.406634Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:40.718551Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:40.731142Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:41.006608Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:41.022483Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:41.297097Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:41.314572Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:41.594190Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:41.611305Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:41.881613Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:41.894961Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:42.163144Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:42.176215Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:42.458050Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:42.471108Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:42.736176Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:42.750030Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:43.025683Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:43.037989Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:43.301491Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:43.314461Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:43.570838Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-04T17:16:43.570924Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-04T17:16:43.570975Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-04-04T17:16:43.584125Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-04T17:16:43.598727Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:410:2410], cookie=3541192016011401641, name="Sem1") 2025-04-04T17:16:43.598830Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:410:2410], cookie=3541192016011401641) 2025-04-04T17:16:44.082876Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:44.082978Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:44.102805Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:44.102933Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:44.126567Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:44.126974Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2157], cookie=8770648297758669941, session=0, seqNo=0) 2025-04-04T17:16:44.127078Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:44.138807Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2157], cookie=8770648297758669941, session=1) 2025-04-04T17:16:44.139092Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2157], cookie=17890517619690097902, session=0, seqNo=0) 2025-04-04T17:16:44.139220Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-04T17:16:44.151113Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2157], cookie=17890517619690097902, session=2) 2025-04-04T17:16:44.151422Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2157], cookie=13900559412295346827, session=0, seqNo=0) 2025-04-04T17:16:44.151535Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 3 2025-04-04T17:16:44.163700Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2157], cookie=13900559412295346827, session=3) 2025-04-04T17:16:44.164277Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:144:2168], cookie=8179381065528150361, name="Sem1", limit=3) 2025-04-04T17:16:44.164440Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-04T17:16:44.176607Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:144:2168], cookie=8179381065528150361) 2025-04-04T17:16:44.176963Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2157], cookie=111, session=1, semaphore="Sem1" count=2) 2025-04-04T17:16:44.177109Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-04T17:16:44.177312Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2157], cookie=222, session=2, semaphore="Sem1" count=1) 2025-04-04T17:16:44.177395Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-04-04T17:16:44.177492Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2157], cookie=333, session=3, semaphore="Sem1" count=1) 2025-04-04T17:16:44.190187Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2157], cookie=111) 2025-04-04T17:16:44.190290Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2157], cookie=222) 2025-04-04T17:16:44.190323Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2157], cookie=333) 2025-04-04T17:16:44.191017Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:152:2176], cookie=306523443634833062, name="Sem1") 2025-04-04T17:16:44.191115Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:152:2176], cookie=306523443634833062) 2025-04-04T17:16:44.191577Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:155:2179], cookie=6076642670078343322, name="Sem1") 2025-04-04T17:16:44.191655Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:155:2179], cookie=6076642670078343322) 2025-04-04T17:16:44.191901Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2157], cookie=444, session=1, semaphore="Sem1" count=1) 2025-04-04T17:16:44.192036Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-04-04T17:16:44.207000Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2157], cookie=444) 2025-04-04T17:16:44.207731Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:160:2184], cookie=11861363033665440038, name="Sem1") 2025-04-04T17:16:44.207834Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:160:2184], cookie=11861363033665440038) 2025-04-04T17:16:44.208323Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:163:2187], cookie=10287308242852005168, name="Sem1") 2025-04-04T17:16:44.208406Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:163:2187], cookie=10287308242852005168) 2025-04-04T17:16:44.223433Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:44.223538Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:44.224114Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:44.224782Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:44.264909Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:44.265135Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-04T17:16:44.265192Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-04-04T17:16:44.265222Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-04-04T17:16:44.265568Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:203:2217], cookie=9248701839149508332, name="Sem1") 2025-04-04T17:16:44.265654Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:203:2217], cookie=9248701839149508332) 2025-04-04T17:16:44.266207Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:212:2225], cookie=5019683019958806749, name="Sem1") 2025-04-04T17:16:44.266300Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:212:2225], cookie=5019683019958806749) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::UpdateOn_Select [GOOD] Test command err: Trying to start YDB, gRPC: 7286, MsgBus: 14371 2025-04-04T17:16:32.878661Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508102660816211:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:32.879406Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d4b/r3tmp/tmp8toDa4/pdisk_1.dat 2025-04-04T17:16:33.330973Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7286, node 1 2025-04-04T17:16:33.357197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:33.357279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:33.365534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:33.412649Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:33.412667Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:33.412672Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:33.412759Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14371 TClient is connected to server localhost:14371 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:33.998837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.019138Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:34.029719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.184398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.340509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.422382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:36.090277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508119840687178:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.090420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.332131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.364917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.434809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.464996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.498366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.529425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.622131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508119840687694:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.622209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.622360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508119840687699:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.625766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:36.638349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508119840687701:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:36.716553Z node 1 :TX_PROXY ERROR: Actor# [1:7489508119840687757:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:37.878801Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508102660816211:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:37.878878Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13231, MsgBus: 16345 2025-04-04T17:16:38.702029Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508131223554664:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:38.702112Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d4b/r3tmp/tmpGPwCUQ/pdisk_1.dat 2025-04-04T17:16:38.778501Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13231, node 2 2025-04-04T17:16:38.830878Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:38.830964Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:38.834445Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:38.938820Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:38.938845Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:38.938852Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:38.938970Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16345 TClient is connected to server localhost:16345 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:39.360325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:39.376438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:39.435354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:39.576364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:39.636430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.638891Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508144108458312:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.638947Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.670487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.699436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.731697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.800998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.834967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.882463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.925523Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508144108458825:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.925598Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.925905Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508144108458830:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.929523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:41.941226Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508144108458832:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:42.022658Z node 2 :TX_PROXY ERROR: Actor# [2:7489508148403426181:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 13594, MsgBus: 2333 2025-04-04T17:16:32.988191Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508102169694254:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:32.988229Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d45/r3tmp/tmpx2kidK/pdisk_1.dat 2025-04-04T17:16:33.419837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:33.419958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:33.427704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:33.475494Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13594, node 1 2025-04-04T17:16:33.619378Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:33.619405Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:33.619412Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:33.619535Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2333 TClient is connected to server localhost:2333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:34.103520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.134933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.264671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.423620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.485898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:36.115236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508119349565200:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.115342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.340015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.370011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.406272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.482737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.513754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.583011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.653896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508119349565718:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.653956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.654193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508119349565723:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.657299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:36.666827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508119349565725:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:36.770441Z node 1 :TX_PROXY ERROR: Actor# [1:7489508119349565781:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:37.583757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:37.988411Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508102169694254:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:37.988469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13256, MsgBus: 10893 2025-04-04T17:16:38.769316Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508129585264588:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:38.804094Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d45/r3tmp/tmpCZNa9U/pdisk_1.dat 2025-04-04T17:16:38.856714Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13256, node 2 2025-04-04T17:16:38.915214Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:38.915299Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:38.927431Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:38.978780Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:38.978804Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:38.978810Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:38.978907Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10893 TClient is connected to server localhost:10893 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:39.347982Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:39.355769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:39.428045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:39.596149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:39.659252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.347808Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508142470168109:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.347940Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.382035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.419673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.492579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.519191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.547374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.579236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.632737Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508142470168620:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.632806Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.632930Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508142470168625:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.636482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:41.644718Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508142470168627:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:41.740640Z node 2 :TX_PROXY ERROR: Actor# [2:7489508142470168681:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:42.636641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:43.263924Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDhhNTE1NjQtZDljZjNjNjUtMWJlY2U0OTUtOTdmM2Q5YjA=, ActorId: [2:7489508146765136238:2489], ActorState: ExecuteState, TraceId: 01jr0ww38vdj8pg8n6z7j9swhm, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertExistingKey [GOOD] Test command err: Trying to start YDB, gRPC: 6931, MsgBus: 11270 2025-04-04T17:16:32.922723Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508102300283322:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:32.922946Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d46/r3tmp/tmpQ2Y1u1/pdisk_1.dat 2025-04-04T17:16:33.319853Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:33.325591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:33.325693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:33.326929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6931, node 1 2025-04-04T17:16:33.439377Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:33.439399Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:33.439405Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:33.439533Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11270 TClient is connected to server localhost:11270 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:34.001504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.020941Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:34.044924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.197548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.371105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.438815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:36.016511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508119480154068:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.016648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.252837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.281273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.311556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.340360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.370677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.445098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.497194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508119480154584:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.497271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.497483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508119480154589:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.501034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:36.512526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508119480154591:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:36.581858Z node 1 :TX_PROXY ERROR: Actor# [1:7489508119480154644:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:37.342963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:37.922268Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508102300283322:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:37.922373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 22013, MsgBus: 5313 2025-04-04T17:16:38.563232Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508128405231243:2216];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:38.563511Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d46/r3tmp/tmpBy9D90/pdisk_1.dat 2025-04-04T17:16:38.678594Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22013, node 2 2025-04-04T17:16:38.712616Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:38.712728Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:38.713835Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:38.762782Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:38.762807Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:38.762815Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:38.762918Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5313 TClient is connected to server localhost:5313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:16:39.194546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:16:39.216920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:39.277060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:39.422472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:39.501760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:41.418365Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508141290134735:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.418441Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.459900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.487536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.517153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.570148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.597566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.670000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.715675Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508141290135248:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.715799Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.715900Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508141290135253:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.719727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:41.730832Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508141290135255:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:41.804289Z node 2 :TX_PROXY ERROR: Actor# [2:7489508141290135308:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:42.709289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:43.562708Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508128405231243:2216];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:43.562778Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:07.374359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:07.374492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:07.374565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:07.374615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:07.375783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:07.375830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:07.375900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:07.375988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:07.377702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:07.458531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:07.458578Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:07.469300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:07.469541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:07.469676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:07.479823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:07.480085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:07.480673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:07.481432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:07.486204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:07.487308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:07.487366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:07.487505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:07.487545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:07.487605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:07.487733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.493451Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:07.633993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:07.634201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.634456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:07.634690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:07.634786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.639174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:07.639311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:07.639522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.639593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:07.639648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:07.639677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:07.641520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.641567Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:07.641597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:07.643239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.643307Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.643356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:07.643395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.648808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:07.651366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:07.651645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:07.652832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:07.652982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:07.653055Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:07.653356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:07.653428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:07.653631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:07.653712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:07.655760Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:07.655813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:07.655996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:07.656049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:07.656449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.656507Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:07.656600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:07.656634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.656696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:07.656726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.656769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:07.656813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.656854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:07.656883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:07.656944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:07.657000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:07.657042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:07.658869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:07.658998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:07.659052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:43.431988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-04T17:16:43.432130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:43.435295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-04T17:16:43.435415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-04T17:16:43.436219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:43.436333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:43.436382Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-04T17:16:43.436488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-04T17:16:43.436603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:16:43.448577Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:3460:5424], attempt# 0 2025-04-04T17:16:43.473034Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:3460:5424], sender# [1:3459:5423] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:63613 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 292B37B8-B58D-4B79-9310-EE5FE8AFB3C9 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-04-04T17:16:43.484266Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:3460:5424], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-04T17:16:43.488403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:43.488492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:16:43.488681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:43.488708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:63613 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 20EAED88-D295-4649-A462-0969E4C4AA8F amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-04-04T17:16:43.489244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:43.489304Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:43.489926Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:3460:5424], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-04-04T17:16:43.490278Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3459:5423] 2025-04-04T17:16:43.490637Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3460:5424], sender# [1:3459:5423], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-04-04T17:16:43.491107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:16:43.491205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:16:43.491233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:16:43.491261Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:16:43.491290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:16:43.491352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:63613 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CC2381C6-D04D-490C-A4A1-CAE76D170FAB amz-sdk-request: attempt=1 content-length: 740 content-md5: P/a/uWmNWYxyRT1pAtAE7A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 740 2025-04-04T17:16:43.495642Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:3460:5424], result# PutObjectResult { ETag: 3ff6bfb9698d598c72453d6902d004ec } 2025-04-04T17:16:43.495699Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3460:5424], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-04-04T17:16:43.495962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:16:43.496058Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3459:5423], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-04T17:16:43.532862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-04T17:16:43.532945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-04T17:16:43.533109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-04T17:16:43.533224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-04T17:16:43.533294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:43.533328Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:43.533367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:16:43.533420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-04T17:16:43.533564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:43.537554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:43.537991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:43.538042Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:16:43.538144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:16:43.538187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:16:43.538403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:16:43.538441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:16:43.538494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T17:16:43.538573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-04-04T17:16:43.538630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:16:43.538681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:16:43.538723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:16:43.538828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:16:43.544117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:16:43.544181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3444:5409] TestWaitNotification: OK eventTxId 102 >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink [GOOD] >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::BigRow [GOOD] Test command err: Trying to start YDB, gRPC: 26972, MsgBus: 62623 2025-04-04T17:16:33.823980Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508106228255865:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:33.824471Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d42/r3tmp/tmppXHsoJ/pdisk_1.dat 2025-04-04T17:16:34.228183Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:34.232309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:34.232408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:34.234866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26972, node 1 2025-04-04T17:16:34.315382Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:34.315406Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:34.315412Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:34.315528Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62623 TClient is connected to server localhost:62623 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:34.811895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.826292Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:34.836824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.946036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:35.079083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:35.153554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:36.804316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508119113159383:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.804412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:37.044286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:37.073820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:37.103920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:37.133464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:37.165344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:37.201572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:37.299442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508123408127197:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:37.299555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:37.300896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508123408127202:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:37.304953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:37.318102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508123408127204:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:37.387330Z node 1 :TX_PROXY ERROR: Actor# [1:7489508123408127258:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:38.223103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.644209Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489508127703095074:2488] TxId: 281474976710675. Ctx: { TraceId: 01jr0wvys218ja56dr2w94gzbz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ0M2VhZTQtMzFhZWI4ZTgtNzc5ODM3YzgtNjc5ZjJkZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2025-04-04T17:16:38.644272Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 1, OutputsCount: 1 2025-04-04T17:16:38.644492Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710675. Resolved key sets: 1 2025-04-04T17:16:38.644642Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710675. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 16] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-04T17:16:38.644683Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489508127703095074:2488] TxId: 281474976710675. Ctx: { TraceId: 01jr0wvys218ja56dr2w94gzbz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ0M2VhZTQtMzFhZWI4ZTgtNzc5ODM3YzgtNjc5ZjJkZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '($1) $1)) ) 2025-04-04T17:16:38.644847Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-04-04T17:16:38.645013Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976710675. Shard resolve complete, resolved shards: 1 2025-04-04T17:16:38.645042Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489508127703095074:2488] TxId: 281474976710675. Ctx: { TraceId: 01jr0wvys218ja56dr2w94gzbz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ0M2VhZTQtMzFhZWI4ZTgtNzc5ODM3YzgtNjc5ZjJkZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-04-04T17:16:38.645071Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489508127703095074:2488] TxId: 281474976710675. Ctx: { TraceId: 01jr0wvys218ja56dr2w94gzbz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ0M2VhZTQtMzFhZWI4ZTgtNzc5ODM3YzgtNjc5ZjJkZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 1: [72075186224037919] 2025-04-04T17:16:38.645123Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710675. Ctx: { TraceId: 01jr0wvys218ja56dr2w94gzbz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ0M2VhZTQtMzFhZWI4ZTgtNzc5ODM3YzgtNjc5ZjJkZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, snapshot: {18446744073709551615, 1743786998583} 2025-04-04T17:16:38.645354Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7489508127703095074:2488] TxId: 281474976710675. Ctx: { TraceId: 01jr0wvys218ja56dr2w94gzbz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ0M2VhZTQtMzFhZWI4ZTgtNzc5ODM3YzgtNjc5ZjJkZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-04T17:16:38.645386Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489508127703095074:2488] TxId: 281474976710675. Ctx: { TraceId: 01jr0wvys218ja56dr2w94gzbz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ0M2VhZTQtMzFhZWI4ZTgtNzc5ODM3YzgtNjc5ZjJkZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7489508127703095078:2488], 2025-04-04T17:16:38.645416Z nod ... se: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ0M2VhZTQtMzFhZWI4ZTgtNzc5ODM3YzgtNjc5ZjJkZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. State: WaitResolveState, Executing KQP transaction on shard: 72075186224037919, tasks: [], lockTxId: (empty maybe), locks: Locks { LockId: 281474976710675 DataShard: 72075186224037919 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 16 HasWrites: true } SendingShards: 72075186224037919 ReceivingShards: 72075186224037919 Op: Commit, immediate: 1 2025-04-04T17:16:38.863529Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489508127703095147:2488] TxId: 281474976710678. Ctx: { TraceId: 01jr0wvyyj5ad9ct9qhcvf80m2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ0M2VhZTQtMzFhZWI4ZTgtNzc5ODM3YzgtNjc5ZjJkZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ExecuteDatashardTransaction traceId.verbosity: 0 2025-04-04T17:16:38.863561Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7489508127703095147:2488] TxId: 281474976710678. Ctx: { TraceId: 01jr0wvyyj5ad9ct9qhcvf80m2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ0M2VhZTQtMzFhZWI4ZTgtNzc5ODM3YzgtNjc5ZjJkZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 0, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-04T17:16:38.863577Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489508127703095147:2488] TxId: 281474976710678. Ctx: { TraceId: 01jr0wvyyj5ad9ct9qhcvf80m2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ0M2VhZTQtMzFhZWI4ZTgtNzc5ODM3YzgtNjc5ZjJkZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037919 not finished yet: Executing 2025-04-04T17:16:38.863596Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489508127703095147:2488] TxId: 281474976710678. Ctx: { TraceId: 01jr0wvyyj5ad9ct9qhcvf80m2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ0M2VhZTQtMzFhZWI4ZTgtNzc5ODM3YzgtNjc5ZjJkZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037919 (Executing), 2025-04-04T17:16:38.863611Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489508127703095147:2488] TxId: 281474976710678. Ctx: { TraceId: 01jr0wvyyj5ad9ct9qhcvf80m2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ0M2VhZTQtMzFhZWI4ZTgtNzc5ODM3YzgtNjc5ZjJkZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-04T17:16:38.867770Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489508127703095147:2488] TxId: 281474976710678. Ctx: { TraceId: 01jr0wvyyj5ad9ct9qhcvf80m2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ0M2VhZTQtMzFhZWI4ZTgtNzc5ODM3YzgtNjc5ZjJkZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037919, status: COMPLETE, error: 2025-04-04T17:16:38.867865Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489508127703095147:2488] TxId: 281474976710678. Ctx: { TraceId: 01jr0wvyyj5ad9ct9qhcvf80m2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ0M2VhZTQtMzFhZWI4ZTgtNzc5ODM3YzgtNjc5ZjJkZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T17:16:38.867894Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489508127703095147:2488] TxId: 281474976710678. Ctx: { TraceId: 01jr0wvyyj5ad9ct9qhcvf80m2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ0M2VhZTQtMzFhZWI4ZTgtNzc5ODM3YzgtNjc5ZjJkZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 Trying to start YDB, gRPC: 22828, MsgBus: 23967 2025-04-04T17:16:39.639520Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508135947824742:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:39.639591Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d42/r3tmp/tmp0CY6sY/pdisk_1.dat 2025-04-04T17:16:39.712327Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22828, node 2 2025-04-04T17:16:39.766125Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:39.766193Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:39.767851Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:39.778965Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:39.778986Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:39.778992Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:39.779097Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23967 TClient is connected to server localhost:23967 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:40.153568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:40.160044Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:40.170378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:40.223196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:40.353663Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:40.423040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:16:42.304634Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508148832728389:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:42.304710Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:42.344012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:42.407213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:42.437694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:42.471753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:42.503021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:42.572968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:42.624117Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508148832728905:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:42.624212Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:42.624460Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508148832728910:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:42.627817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:42.634949Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508148832728912:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:42.728242Z node 2 :TX_PROXY ERROR: Actor# [2:7489508148832728967:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:43.615767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::ReplaceDuplicates >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout [GOOD] >> TKesusTest::TestAcquireSemaphore >> KqpEffects::InsertAbort_Select_Success >> KqpInplaceUpdate::Negative_SingleRowListFromRange+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:16:07.374347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:16:07.374487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:07.374526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:16:07.374564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:16:07.375742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:16:07.375792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:16:07.375914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:16:07.376000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:16:07.377662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:07.458409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:16:07.458456Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:07.469055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:07.469263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:16:07.469386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:16:07.478360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:16:07.478630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:16:07.479179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:07.480002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:16:07.484416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:07.485665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:07.485764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:07.485914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:16:07.485971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:07.486002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:16:07.486096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.490863Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:16:07.645882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:16:07.646097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.646345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:16:07.646612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:16:07.646669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.648630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:07.648753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:16:07.648935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.649004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:16:07.649046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:16:07.649090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:16:07.650706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.650772Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:16:07.650813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:16:07.652323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.652376Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.652424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:07.652462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.655853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:16:07.657404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:16:07.657583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:16:07.658635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:07.658758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:16:07.658804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:07.659063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:16:07.659112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:16:07.659245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:16:07.659318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:16:07.661148Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:16:07.661191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:07.661333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:16:07.661365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:16:07.661695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:16:07.661739Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:16:07.661827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:07.661856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.661890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:16:07.661917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.661949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:16:07.661983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:16:07.662012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:16:07.662036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:16:07.662101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:16:07.662149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:16:07.662180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:16:07.663926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:07.664017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:16:07.664049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ksum: } REQUEST: PUT /data_00.csv.zst?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:8773 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EA66C405-2F35-414C-8F27-B153AA9932FF amz-sdk-request: attempt=1 content-length: 55 content-md5: Ry5TonSXZhxkAEEJCUBcTg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=99&uploadId=1 / 55 2025-04-04T17:16:44.086138Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3461:5425], result# UploadPartResult { ETag: 472e53a27497661c6400410909405c4e } 2025-04-04T17:16:44.086360Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3460:5424] 2025-04-04T17:16:44.086525Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3461:5425], sender# [1:3460:5424], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:8773 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A2AAF15D-D504-4A7C-A5CC-BE2324FCA944 amz-sdk-request: attempt=1 content-length: 55 content-md5: B5SOCmjwb1RI3tHamcoRHA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=100&uploadId=1 / 55 2025-04-04T17:16:44.089438Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3461:5425], result# UploadPartResult { ETag: 07948e0a68f06f5448ded1da99ca111c } 2025-04-04T17:16:44.089650Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3460:5424] 2025-04-04T17:16:44.089725Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3461:5425], sender# [1:3460:5424], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:8773 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E86E3409-2109-4DCC-BB14-0C523FCCFE35 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=101&uploadId=1 / 0 2025-04-04T17:16:44.093036Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3461:5425], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2025-04-04T17:16:44.093087Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3461:5425], success# 1, error# , multipart# 1, uploadId# 1 2025-04-04T17:16:44.097943Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3461:5425], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [f8f51a1e4a70db44fa91cc2ab9680824,9eba675fd7f187274786dff2f47292df,921325fb6b8811df3d06a44dbe1f8523,4eeb6b90e8e61075275bd8a42f56bd69,2840a487abe8cb9502b3d9c8a8e1c942,607d8f6e3b235a360d63796efd3a51c2,ed22e08df7fb8840f7cabc779cc86885,efeff2c7731061edd9a39059cc078045,4af01cb3455932f28e3bba713dcd57c9,dc94d36ecf3b36d183d75c84b9b2fac6,e2ce425dd2bb582abcc13d0d714c3554,b71e46686939d2cdf046520dd2774281,ab731a82a161e5e044b24e895a1713d6,1df51aaec89711e13a6f95c13113e36c,b6066b2ed343831b1b0ee0076179981e,332d34d77adc2b024a33d87e07d4233f,cf0093cc99590a0e8f9c199ed6deca07,8cc923ec76224e69263ac93b7bfabd30,690d66897e0780f2dfe3614e5a659a22,7502aae0ec253663b1cbfdc8ede92ab9,7d2c6f728ee0c12097dfe5441970b946,5fc7b9b675e0a125eea67cf05f82627f,fc8c5faa99cc7f4ce7ca320f8e7adb58,8e305c5aca758683ff25407a7bbd9220,181bce9c6393e22a0ac359a7b45d8187,639677548f0a8b776a6db92f44d96505,390ff8f57cfa4c04bfbed0d7a63c90e8,3dd76756e6558fd6c8c918210f7dc136,a3f5254fdad3ded54edef910e704c151,e9186373f80dbaa55dd04d07621de277,8898b965060a431b499261ec0cd3cee3,3ed51c736e64defe04980ce328b17aa4,bb0e45971888796588c12ea1c1bec162,e2b3defa84005d3892986ca6894b811f,656c7c809c8c8485f6e91892591cd284,779c6827126f255bde25ae242bf4c8ff,8883fc9b073e683558f1231c5f2142d0,19390a0e3340bcb6ccfe866a790f05cb,305182d3e9745fba3aad1973bb1bfc93,002819d72a6dc7954ecc1bcd2bd20254,325c6bc3cdd6fd83083cf0126c606218,b86932903843b9626e80bd9ccb5d0571,b5054116537a7c467bdb488c9d67dee7,fc3a45bd17a00b147e4f9c55bc2493da,1118e2f41e8839211163250796a65dce,b403ff17c2c269a79201a03ce439dc2a,88f2692ee439cfadef1cd21d58aac8d3,e5bef12f89b101af84d52299a5867d99,ed613335180c53f69d450ef8b176a4d5,150fd7dcdc86eb38c7f821ff4698d8bc,a0c18bf08acc6ebecac04a2520efee9b,e8463d7ce8f502d1575a433c1b30a9af,f123e0fc879e2fdc2c3e2f698fc4176d,d7ab79d73e4648e0a2bf8dec3a19c019,4e74b82f6a8ea7fad8790ee7dfcdb76e,f72bb1d8aa0f5c9265bae10a3784d8e8,924b317371d16363a37962b17a2ae4bb,7214b458c7e25c791e54bd430b835a6e,e79dba1b56122372af3fe7b06ea91bda,6aae345b94d78fc7c1ed0b8697cf5e62,fd3636ed699facb5f0c12f81741cabc5,2c4a198408c3eb9577fcd339ca62c539,59fbf761f9b7574b65fa6877b167bb8c,14f9f5cfdf3a6c33c577a54429b19cb6,c6d078b3be9cd7943e8145fd982baeef,198f55ae25539fbd54a4a6075beac2d1,939123b44e362c76a151a85af0247fb7,0147f8bd741be7780cbc900b6f4b0899,43453200aeaf201420737354cd73cfe4,de26d1339779fe0c538d01d5963fd423,5c903650e719f959dc9f37ea360c6319,23607b3f36e0a2abae7f1ed8e38596f3,0db9af920c6d1cf868e470bf7a349747,aed6ac19c60d08500582eea9dadcdfee,3f4e37ddd3e2e56a725323fad4d85cf6,942b269af420b4277d025cea489dcb25,89eddc25ba615b6cf09b9cd9a11a16bb,1d8e7f0613dc1919ee90133c468380bd,8bf1e4c1266d8437c1bd85e0fca6640a,e9eabcf5b61cf257f530b156dbd77a88,411f1661ae7650d2144e8c6f8a33b28f,6706ec5b8771e555779d5cbeca41aa75,b3a33ef21a8224ddc78a52e8d7ca8357,58749d344f42c192e572eda4ee66fb01,381aeb5ee3014e2c0fd9b85bd59ce005,9aed2297cd10dce10d68de3ff1830b42,be88e095fc3a13708b714db03b1f2744,5628e81ee17fb22fc828ed1b2169578b,a1cfb563fa4af884fe02ced05c26c881,fc602b8ee2e9746fb52823f8fd1f0f28,a1de256e94c7baa9b8ab905c892d1a14,6bff895b0b5f3552ad4bdc61b0d24148,fcba1d258a8651d831767b42e010e439,bef6e3d7088e671809fe584531f96971,f0b489242271d11200dbdbc78e4ce715,372d2d6877fff7c04433e492ad4dbd45,32191cf1972dcccd59c0b5a8b53d4f23,25928b7997b97ac58f18fbbe589573e8,472e53a27497661c6400410909405c4e,07948e0a68f06f5448ded1da99ca111c,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv.zst?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:8773 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 934C89FD-8AE5-491B-A585-72583FA2A7FF amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv.zst / uploadId=1 2025-04-04T17:16:44.109682Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3461:5425], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv.zst ETag: c902b621cdd1ee89b9f1c4e6c36e6e45 } 2025-04-04T17:16:44.110063Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3460:5424], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-04-04T17:16:44.141484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-04T17:16:44.141549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-04T17:16:44.141694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-04T17:16:44.141789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-04-04T17:16:44.141848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:16:44.141882Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:44.141916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:16:44.141954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-04T17:16:44.142094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:16:44.145176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:44.145621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:16:44.145678Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:16:44.145769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:16:44.145800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:16:44.145836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:16:44.145870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:16:44.145917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T17:16:44.145981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-04-04T17:16:44.146014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:16:44.146047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:16:44.146070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:16:44.146155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:16:44.149034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:16:44.149083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3444:5409] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceTables::TestLeaseUpdates [GOOD] Test command err: 2025-04-04T17:14:57.445929Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507694139586400:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:57.445966Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017bf/r3tmp/tmpPl9mGf/pdisk_1.dat 2025-04-04T17:14:57.853273Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:14:57.853597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:57.853685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:57.858532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6072, node 1 2025-04-04T17:14:57.942697Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:57.942716Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:57.942723Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:57.942824Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:58.221575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:58.239403Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:15:00.425995Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-04T17:15:00.426199Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-04T17:15:00.426222Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-04T17:15:00.429814Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODBiYmU3ZS0zNzU1Yzg1MC1mM2Q2MmMyZS00MjI5N2I5OA==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ODBiYmU3ZS0zNzU1Yzg1MC1mM2Q2MmMyZS00MjI5N2I5OA== 2025-04-04T17:15:00.430199Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-04T17:15:00.442936Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507707024488927:2328], Start check tables existence, number paths: 2 2025-04-04T17:15:00.443151Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODBiYmU3ZS0zNzU1Yzg1MC1mM2Q2MmMyZS00MjI5N2I5OA==, ActorId: [1:7489507707024488932:2329], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:15:00.444521Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507707024488927:2328], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-04T17:15:00.444614Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507707024488927:2328], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-04T17:15:00.444651Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507707024488927:2328], Successfully finished 2025-04-04T17:15:00.444708Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-04T17:15:00.454359Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507707024488949:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-04T17:15:00.459030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:15:00.463292Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507707024488949:2301], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-04T17:15:00.465753Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507707024488949:2301], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-04T17:15:00.485348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507707024488949:2301], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:15:00.566678Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507707024488949:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-04T17:15:00.571807Z node 1 :TX_PROXY ERROR: Actor# [1:7489507707024489002:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:00.571909Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507707024488949:2301], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-04T17:15:00.574496Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: Root, PoolId: sample_pool_id 2025-04-04T17:15:00.574534Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id Root 2025-04-04T17:15:00.574663Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507707024489009:2331], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-04-04T17:15:00.576133Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507707024489009:2331], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-04T17:15:00.576179Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2025-04-04T17:15:00.576197Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-04T17:15:00.576487Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7489507707024489018:2332], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-04T17:15:00.581538Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7489507707024489018:2332], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-04-04T17:15:00.603972Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-04T17:15:00.603997Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-04T17:15:00.604041Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507707024489030:2334], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-04T17:15:00.604208Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODBiYmU3ZS0zNzU1Yzg1MC1mM2Q2MmMyZS00MjI5N2I5OA==, ActorId: [1:7489507707024488932:2329], ActorState: ReadyState, TraceId: 01jr0wrz3v1yr9ndfznyfqd9xm, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: DROP RESOURCE POOL sample_pool_id; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-04T17:15:00.607084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507707024489030:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:00.607187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:15:00.800939Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7489507707024489018:2332], DatabaseId: Root, PoolId: sample_pool_id, Got delete notification 2025-04-04T17:15:00.804426Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ODBiYmU3ZS0zNzU1Yzg1MC1mM2Q2MmMyZS00MjI5N2I5OA==, ActorId: [1:7489507707024488932:2329], ActorState: ExecuteState, TraceId: 01jr0wrz3v1yr9ndfznyfqd9xm, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7489507707024489031:2329] WorkloadServiceCleanup: 0 2025-04-04T17:15:00.805810Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODBiYmU3ZS0zNzU1Yzg1MC1mM2Q2MmMyZS00MjI5N2I5OA==, ActorId: [1:7489507707024488932:2329], ActorState: CleanupState, TraceId: 01jr0wrz3v1yr9ndfznyfqd9xm, EndCleanup, isFinal: 0 2025-04-04T17:15:00.805873Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODBiYmU3ZS0zNzU1Yzg1MC1mM2Q2MmMyZS00MjI5N2I5OA==, ActorId: [1:7489507707024488932:2329], ActorState: CleanupState, TraceId: 01jr0wrz3v1yr9ndfznyfqd9xm, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7489507694139586635:2276] 2025-04-04T17:15:00.824317Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ODBiYmU3ZS0zNzU1Yzg1MC1mM2Q2MmMyZS00MjI5N2I5OA==, ActorId: [1:7489507707024488932:2329], ActorState: ReadyState, Session closed due to explicit close event 2025-04-04T17:15:00.824372Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ODBiYmU3ZS0zNzU1Yzg1MC1mM2Q2MmMyZS00MjI5N2I5OA==, ActorId: [1:7489507707024488932:2329], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:15:00.824392Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODBiYmU3ZS0zNzU1Yzg1MC1mM2Q2MmMyZS00MjI5N2I5OA==, ActorId: [1:7489507707024488932:2329], ActorState: ReadyState, EndCleanup, isFinal: 1 2025- ... ode_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ExecuteState, TraceId: 01jr0ww3x90hz04dxz7eqam4jq, Sending to Executer TraceId: 0 8 2025-04-04T17:16:43.830168Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ExecuteState, TraceId: 01jr0ww3x90hz04dxz7eqam4jq, Created new KQP executer: [10:7489508149979855144:2544] isRollback: 0 2025-04-04T17:16:43.838043Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ExecuteState, TraceId: 01jr0ww3x90hz04dxz7eqam4jq, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-04T17:16:43.838201Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ExecuteState, TraceId: 01jr0ww3x90hz04dxz7eqam4jq, txInfo Status: Committed Kind: ReadWrite TotalDuration: 20.327 ServerDuration: 20.149 QueriesCount: 2 2025-04-04T17:16:43.838359Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ExecuteState, TraceId: 01jr0ww3x90hz04dxz7eqam4jq, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-04T17:16:43.838436Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ExecuteState, TraceId: 01jr0ww3x90hz04dxz7eqam4jq, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:16:43.838469Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ExecuteState, TraceId: 01jr0ww3x90hz04dxz7eqam4jq, EndCleanup, isFinal: 0 2025-04-04T17:16:43.838526Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ExecuteState, TraceId: 01jr0ww3x90hz04dxz7eqam4jq, Sent query response back to proxy, proxyRequestId: 28, proxyId: [10:7489508051195605973:2276] 2025-04-04T17:16:43.839392Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, TxId: 2025-04-04T17:16:43.839525Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2025-04-04T17:16:43.840377Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ReadyState, TraceId: 01jr0ww3y0afg7xf75zcz2gf5r, received request, proxyRequestId: 29 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [10:7489508149979855150:2551] database: /Root databaseId: /Root pool id: default 2025-04-04T17:16:43.840426Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ReadyState, TraceId: 01jr0ww3y0afg7xf75zcz2gf5r, request placed into pool from cache: default 2025-04-04T17:16:43.841020Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ExecuteState, TraceId: 01jr0ww3y0afg7xf75zcz2gf5r, ExecutePhyTx, tx: 0x000050C000146098 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-04-04T17:16:43.841211Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ExecuteState, TraceId: 01jr0ww3y0afg7xf75zcz2gf5r, Sending to Executer TraceId: 0 8 2025-04-04T17:16:43.841297Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ExecuteState, TraceId: 01jr0ww3y0afg7xf75zcz2gf5r, Created new KQP executer: [10:7489508149979855154:2544] isRollback: 0 2025-04-04T17:16:43.847028Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ExecuteState, TraceId: 01jr0ww3y0afg7xf75zcz2gf5r, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-04-04T17:16:43.847126Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ExecuteState, TraceId: 01jr0ww3y0afg7xf75zcz2gf5r, ExecutePhyTx, tx: 0x000050C000366DD8 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-04-04T17:16:43.848170Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ExecuteState, TraceId: 01jr0ww3y0afg7xf75zcz2gf5r, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-04-04T17:16:43.848342Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ExecuteState, TraceId: 01jr0ww3y0afg7xf75zcz2gf5r, txInfo Status: Committed Kind: ReadOnly TotalDuration: 7.419 ServerDuration: 7.315 QueriesCount: 2 2025-04-04T17:16:43.848516Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ExecuteState, TraceId: 01jr0ww3y0afg7xf75zcz2gf5r, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-04-04T17:16:43.848591Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ExecuteState, TraceId: 01jr0ww3y0afg7xf75zcz2gf5r, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:16:43.848625Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ExecuteState, TraceId: 01jr0ww3y0afg7xf75zcz2gf5r, EndCleanup, isFinal: 0 2025-04-04T17:16:43.848717Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ExecuteState, TraceId: 01jr0ww3y0afg7xf75zcz2gf5r, Sent query response back to proxy, proxyRequestId: 29, proxyId: [10:7489508051195605973:2276] 2025-04-04T17:16:43.849100Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, TxId: 2025-04-04T17:16:43.849209Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, TxId: 2025-04-04T17:16:43.849495Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ReadyState, Session closed due to explicit close event 2025-04-04T17:16:43.849529Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:16:43.849556Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-04T17:16:43.849610Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:16:43.849697Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Y2MyYzBjMmYtNzYzMTgwZTItZDNlMWNhY2ItOTZjMzM4YTg=, ActorId: [10:7489508149979855120:2544], ActorState: unknown state, Session actor destroyed 2025-04-04T17:16:43.866285Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=MzUwYTE1NjktY2VmMWEzZGItOGQxZGEwNzgtMWNjOTg1MDg=, ActorId: [10:7489508072670442864:2329], ActorState: ReadyState, Session closed due to explicit close event 2025-04-04T17:16:43.866349Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=MzUwYTE1NjktY2VmMWEzZGItOGQxZGEwNzgtMWNjOTg1MDg=, ActorId: [10:7489508072670442864:2329], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:16:43.866404Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MzUwYTE1NjktY2VmMWEzZGItOGQxZGEwNzgtMWNjOTg1MDg=, ActorId: [10:7489508072670442864:2329], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-04T17:16:43.866439Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MzUwYTE1NjktY2VmMWEzZGItOGQxZGEwNzgtMWNjOTg1MDg=, ActorId: [10:7489508072670442864:2329], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:16:43.866575Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=MzUwYTE1NjktY2VmMWEzZGItOGQxZGEwNzgtMWNjOTg1MDg=, ActorId: [10:7489508072670442864:2329], ActorState: unknown state, Session actor destroyed >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink [GOOD] >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink >> KqpInplaceUpdate::Negative_BatchUpdate+UseSink >> TKesusTest::TestAcquireSemaphore [GOOD] |90.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |90.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 61927, MsgBus: 2061 2025-04-04T17:16:35.148878Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508116318440474:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:35.149066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d40/r3tmp/tmpE57pFY/pdisk_1.dat 2025-04-04T17:16:35.458440Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61927, node 1 2025-04-04T17:16:35.537997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:35.538085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:35.554823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:35.574285Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:35.574315Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:35.574320Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:35.574419Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2061 TClient is connected to server localhost:2061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:36.121167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:36.152971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:36.286911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:36.450465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:36.507119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.029618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508129203344125:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:38.029720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:38.236032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.261193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.305211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.331722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.359516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.426516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.464568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508129203344641:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:38.464642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:38.464692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508129203344646:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:38.468885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:38.479716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508129203344648:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:38.556766Z node 1 :TX_PROXY ERROR: Actor# [1:7489508129203344702:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:39.326229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17317, MsgBus: 7712 2025-04-04T17:16:40.499125Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508139948475316:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:40.499216Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d40/r3tmp/tmpkG8Fbp/pdisk_1.dat 2025-04-04T17:16:40.594536Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17317, node 2 2025-04-04T17:16:40.634180Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:40.634267Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:40.635810Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:40.645736Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:40.645762Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:40.645768Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:40.645860Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7712 TClient is connected to server localhost:7712 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:41.011052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:41.028849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:41.080311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.205700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:41.281864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:43.133912Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508152833378986:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:43.134019Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:43.154073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:43.183319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:43.213463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:43.241918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:43.309180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:43.379604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:43.464776Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508152833379506:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:43.464855Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:43.465176Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508152833379511:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:43.468633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:43.481331Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508152833379513:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:43.554496Z node 2 :TX_PROXY ERROR: Actor# [2:7489508152833379571:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:44.458820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20569, MsgBus: 3298 2025-04-04T17:16:35.230443Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508117153283530:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:35.230527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d3f/r3tmp/tmpgHMPev/pdisk_1.dat 2025-04-04T17:16:35.527378Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20569, node 1 2025-04-04T17:16:35.590841Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:35.590864Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:35.590871Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:35.591023Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:16:35.600141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:35.600274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:35.605265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3298 TClient is connected to server localhost:3298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:36.110010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:36.135891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:36.272815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:36.412487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:36.489499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:38.078257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508130038187197:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:38.078337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:38.338648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.367897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.392525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.422017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.454088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.486746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.535554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508130038187709:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:38.535612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:38.535804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508130038187714:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:38.538969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:38.547522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508130038187716:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:38.627725Z node 1 :TX_PROXY ERROR: Actor# [1:7489508130038187770:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:39.364403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28205, MsgBus: 11942 2025-04-04T17:16:40.414792Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508139605599417:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:40.414944Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d3f/r3tmp/tmphFK5i0/pdisk_1.dat 2025-04-04T17:16:40.514726Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28205, node 2 2025-04-04T17:16:40.556575Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:40.556675Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:40.565225Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:40.579905Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:40.579926Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:40.579930Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:40.580010Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11942 TClient is connected to server localhost:11942 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:40.952052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:40.968444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.026480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:41.165617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:41.221696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:43.170868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508152490503078:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:43.170952Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:43.204961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:43.237770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:43.264607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:43.297511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:43.326390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:43.358264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:43.400067Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508152490503591:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:43.400127Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:43.400189Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508152490503596:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:43.403420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:43.414210Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508152490503598:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:43.509141Z node 2 :TX_PROXY ERROR: Actor# [2:7489508152490503654:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:44.384065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphore [GOOD] Test command err: 2025-04-04T17:16:15.260646Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.260728Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.274951Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.275087Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:15.294527Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:15.295022Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:129:2155], cookie=5206761124239919559, session=0, seqNo=0) 2025-04-04T17:16:15.295192Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:15.318039Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:129:2155], cookie=5206761124239919559, session=1) 2025-04-04T17:16:15.318370Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:129:2155], cookie=7760913115325510070, session=0, seqNo=0) 2025-04-04T17:16:15.318500Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-04T17:16:15.330344Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:129:2155], cookie=7760913115325510070, session=2) 2025-04-04T17:16:15.331103Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:129:2155], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-04T17:16:15.331277Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-04T17:16:15.331356Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-04T17:16:15.331506Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:129:2155], cookie=222, session=2, semaphore="Lock2" count=1) 2025-04-04T17:16:15.331569Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-04-04T17:16:15.331615Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-04-04T17:16:15.331735Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:129:2155], cookie=333, session=1, semaphore="Lock2" count=1) 2025-04-04T17:16:15.331790Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-04-04T17:16:15.343280Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:129:2155], cookie=111) 2025-04-04T17:16:15.343350Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:129:2155], cookie=222) 2025-04-04T17:16:15.343373Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:129:2155], cookie=333) 2025-04-04T17:16:15.343856Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:145:2169], cookie=12612640171545073752, name="Lock1") 2025-04-04T17:16:15.343929Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:145:2169], cookie=12612640171545073752) 2025-04-04T17:16:15.344298Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:148:2172], cookie=10370062282397176748, name="Lock2") 2025-04-04T17:16:15.344355Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:148:2172], cookie=10370062282397176748) 2025-04-04T17:16:15.355620Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:15.355739Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:15.356363Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:15.356996Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:15.403675Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:15.403857Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-04T17:16:15.403902Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-04-04T17:16:15.403941Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-04-04T17:16:15.404255Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:188:2202], cookie=10691622915240201520, name="Lock1") 2025-04-04T17:16:15.404344Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:188:2202], cookie=10691622915240201520) 2025-04-04T17:16:15.404832Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:196:2209], cookie=1312176325216161905, name="Lock2") 2025-04-04T17:16:15.404897Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:196:2209], cookie=1312176325216161905) 2025-04-04T17:16:15.734030Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:15.746143Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:15.993290Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:16.004947Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:16.252166Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:16.264005Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:16.528873Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:16.540477Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:16.797224Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:16.809006Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:17.050070Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:17.062486Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:17.309638Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:17.321500Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:17.568050Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:17.579511Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:17.836806Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:17.848568Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:18.128652Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:18.140988Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:18.399853Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:18.412311Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:18.670684Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:18.682912Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:18.955919Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:18.968697Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:19.258950Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:19.270936Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:19.580771Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:19.598212Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:19.872914Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:19.888372Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:20.162254Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:20.182912Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:20.454402Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:20.470946Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:20.752252Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:20.766948Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:21.086479Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:21.099941Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:21.356937Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:21.368942Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:21.638428Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:21.650850Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:21.910160Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:21.922934Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:22.194502Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:22.207064Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:22.474910Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:22.487257Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:22.751242Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:22.765106Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:23.027541Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:23.039648Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:23.296087Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:23.311086Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:23.589797Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:23.601835Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:23.887198Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:363:2364], cookie=9005361781347595705, name="Lock1") 2025-04-04T17:16:23.887294Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:363:2364], cookie=9005361781347595705) 2025-04-04T17:16:23.887773Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:366:2367], cookie=14388439884738159180, name="Lock2") 2025-04-04T17:16:23.887837Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:366:2367], cookie=14388439884738159180) 2025-04-04T17:16:23.929925Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:23.942096Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:24.195525Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:24.207478Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:24.474314Z node 1 :KESUS_TABLET DEBUG: [720575 ... 25-04-04T17:16:40.018144Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:40.293043Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:40.305934Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:40.573819Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:40.587065Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:40.849653Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:40.862049Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:41.134541Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:41.147928Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:41.448195Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:41.460362Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:41.729970Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:41.741873Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:42.006473Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:42.019478Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:42.277929Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:42.290839Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:42.574515Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:42.590780Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:42.883677Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:42.896685Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:43.164230Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:43.182606Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:43.463807Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:43.479998Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:43.760122Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:43.775060Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:44.048392Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:44.060314Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:44.312012Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:44.327179Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:44.587885Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:44.599777Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:44.889328Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:44.901792Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:45.163303Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:45.177764Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:45.446494Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:45.460396Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:45.705267Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-04T17:16:45.705370Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-04T17:16:45.705416Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-04-04T17:16:45.705498Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-04-04T17:16:45.705547Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-04-04T17:16:45.705574Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-04T17:16:45.718915Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-04T17:16:45.719548Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:330:2343], cookie=7508585628727130003, name="Lock1") 2025-04-04T17:16:45.719624Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:330:2343], cookie=7508585628727130003) 2025-04-04T17:16:45.720067Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:333:2346], cookie=12857369017469828209, name="Lock2") 2025-04-04T17:16:45.720124Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:333:2346], cookie=12857369017469828209) 2025-04-04T17:16:45.720513Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:336:2349], cookie=17232261003650651831) 2025-04-04T17:16:45.720566Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:336:2349], cookie=17232261003650651831) 2025-04-04T17:16:45.738521Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:45.738629Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:45.739152Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:45.739659Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:45.798959Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:45.799097Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-04-04T17:16:45.799139Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-04-04T17:16:45.799446Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:376:2379], cookie=16517125362498479357) 2025-04-04T17:16:45.799516Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:376:2379], cookie=16517125362498479357) 2025-04-04T17:16:45.800104Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:383:2385], cookie=9019198930891705293, name="Lock1") 2025-04-04T17:16:45.800181Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:383:2385], cookie=9019198930891705293) 2025-04-04T17:16:45.800656Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:386:2388], cookie=2835806014602583909, name="Lock2") 2025-04-04T17:16:45.800717Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:386:2388], cookie=2835806014602583909) 2025-04-04T17:16:46.185071Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:46.185169Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:46.209270Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:46.209395Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:46.241505Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:46.242009Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2157], cookie=7624180983351612049, session=0, seqNo=0) 2025-04-04T17:16:46.242163Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:46.254258Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2157], cookie=7624180983351612049, session=1) 2025-04-04T17:16:46.254547Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2157], cookie=10419248761119400501, session=0, seqNo=0) 2025-04-04T17:16:46.254699Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-04T17:16:46.267481Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2157], cookie=10419248761119400501, session=2) 2025-04-04T17:16:46.267791Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2157], cookie=111, session=1, semaphore="Sem1" count=1) 2025-04-04T17:16:46.281651Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2157], cookie=111) 2025-04-04T17:16:46.282147Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:144:2168], cookie=6339923576258988037, name="Sem1", limit=1) 2025-04-04T17:16:46.282296Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-04T17:16:46.295147Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:144:2168], cookie=6339923576258988037) 2025-04-04T17:16:46.295565Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2157], cookie=333, session=1, semaphore="Sem1" count=100500) 2025-04-04T17:16:46.310522Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2157], cookie=333) 2025-04-04T17:16:46.310846Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2157], cookie=222, session=1, semaphore="Sem1" count=1) 2025-04-04T17:16:46.310977Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-04T17:16:46.311150Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2157], cookie=333, session=2, semaphore="Sem1" count=1) 2025-04-04T17:16:46.326905Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2157], cookie=222) 2025-04-04T17:16:46.326984Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2157], cookie=333) 2025-04-04T17:16:46.327548Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:154:2178], cookie=870458170346805791, name="Sem1") 2025-04-04T17:16:46.327632Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:154:2178], cookie=870458170346805791) 2025-04-04T17:16:46.328066Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:157:2181], cookie=12419729982397997891, name="Sem1") 2025-04-04T17:16:46.328133Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:157:2181], cookie=12419729982397997891) 2025-04-04T17:16:46.328528Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:160:2184], cookie=2289300637674475775, name="Sem1", force=0) 2025-04-04T17:16:46.340460Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:160:2184], cookie=2289300637674475775) 2025-04-04T17:16:46.340943Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:165:2189], cookie=3601177348759725171, name="Sem1", force=1) 2025-04-04T17:16:46.341022Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-04-04T17:16:46.358658Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:165:2189], cookie=3601177348759725171) >> KqpInplaceUpdate::SingleRowIf+UseSink >> KqpImmediateEffects::InsertDuplicates-UseSink [GOOD] >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChange [GOOD] >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChangeExhausted >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex [GOOD] >> DataShardReadIterator::ShouldReadRangeOneByOne [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk5 >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 >> KqpImmediateEffects::Replace [GOOD] >> KqpImmediateEffects::MultipleEffectsWithIndex >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertDuplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13621, MsgBus: 3164 2025-04-04T17:16:36.591117Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508123078297315:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:36.591177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d3b/r3tmp/tmpKCuCbg/pdisk_1.dat 2025-04-04T17:16:36.942964Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13621, node 1 2025-04-04T17:16:37.001067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:37.002262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:37.004568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:37.019426Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:37.019444Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:37.019452Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:37.020085Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3164 TClient is connected to server localhost:3164 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:37.484662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:37.505264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:37.660250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:37.793475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:37.842906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:39.367944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508135963200792:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:39.368036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:39.594493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:39.664989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:39.729179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:39.756947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:39.788625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:39.821033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:39.865130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508135963201308:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:39.865246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:39.865396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508135963201313:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:39.868355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:39.875811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508135963201315:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:39.950201Z node 1 :TX_PROXY ERROR: Actor# [1:7489508135963201368:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:40.699834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.048773Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=4; 2025-04-04T17:16:41.058132Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 4 at tablet 72075186224037919 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T17:16:41.058297Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 4 at tablet 72075186224037919 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T17:16:41.058473Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508144553136420:2496], Table: `/Root/TestImmediateEffects` ([72057594046644480:16:1]), SessionActorId: [1:7489508140258168955:2496]Got CONSTRAINT VIOLATION for table `/Root/TestImmediateEffects`. ShardID=72075186224037919, Sink=[1:7489508144553136420:2496].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-04T17:16:41.058914Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508144553136404:2496], SessionActorId: [1:7489508140258168955:2496], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7489508140258168955:2496]. isRollback=0 2025-04-04T17:16:41.059170Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTVlMDQwOWUtNDRkNTdlZTUtNDA5MWRlOWMtZTMyNGFiODk=, ActorId: [1:7489508140258168955:2496], ActorState: ExecuteState, TraceId: 01jr0ww10705tza760vq5vz6g4, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7489508144553136414:2496] from: [1:7489508144553136404:2496] 2025-04-04T17:16:41.059300Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489508144553136414:2496] TxId: 281474976710674. Ctx: { TraceId: 01jr0ww10705tza760vq5vz6g4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTVlMDQwOWUtNDRkNTdlZTUtNDA5MWRlOWMtZTMyNGFiODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-04T17:16:41.060129Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTVlMDQwOWUtNDRkNTdlZTUtNDA5MWRlOWMtZTMyNGFiODk=, ActorId: [1:7489508140258168955:2496], ActorState: ExecuteState, TraceId: 01jr0ww10705tza760vq5vz6g4, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 8755, MsgBus: 61557 2025-04-04T17:16:41.853271Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508141550791677:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:41.875857Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d3b/r3tmp/tmpeKMa78/pdisk_1.dat 2025-04-04T17:16:41.972636Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8755, node 2 2025-04-04T17:16:42.011579Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:42.011683Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:42.013441Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:42.031414Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:42.031436Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:42.031443Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:42.031547Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61557 TClient is connected to server localhost:61557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:42.401824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:42.419288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:42.499768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:42.622124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:42.700106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:16:44.775312Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508154435695178:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:44.775403Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:44.818976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:44.852043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:44.883968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:44.913304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:44.949379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:44.979250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:45.035934Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508158730662987:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:45.036020Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:45.036333Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508158730662992:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:45.039858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:45.058627Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508158730662994:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:45.112396Z node 2 :TX_PROXY ERROR: Actor# [2:7489508158730663047:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:45.993701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:46.654298Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508163025630815:2518], TxId: 281474976715676, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MTQ5NDc5NGMtYTRiMjAzZjctNTU5ZGFiMTgtNzY0YTQyNA==. TraceId : 01jr0ww69q39v9pm6ze6w5asab. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-04T17:16:46.654706Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508163025630816:2519], TxId: 281474976715676, task: 2. Ctx: { TraceId : 01jr0ww69q39v9pm6ze6w5asab. SessionId : ydb://session/3?node_id=2&id=MTQ5NDc5NGMtYTRiMjAzZjctNTU5ZGFiMTgtNzY0YTQyNA==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7489508163025630812:2488], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:16:46.655064Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTQ5NDc5NGMtYTRiMjAzZjctNTU5ZGFiMTgtNzY0YTQyNA==, ActorId: [2:7489508158730663303:2488], ActorState: ExecuteState, TraceId: 01jr0ww69q39v9pm6ze6w5asab, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 14988, MsgBus: 25301 2025-04-04T17:16:32.455168Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508101866554505:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:32.455220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d52/r3tmp/tmpA5FgVe/pdisk_1.dat 2025-04-04T17:16:32.854693Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:32.871087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:32.871175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:32.872709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14988, node 1 2025-04-04T17:16:32.976063Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:32.976122Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:32.976127Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:32.976246Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25301 TClient is connected to server localhost:25301 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:33.514533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:33.548023Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:33.563858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:33.708715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:33.861952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:33.945507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:35.550223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508114751457959:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:35.550320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:35.789850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.821906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.848561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.873604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.899458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:35.936034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.024517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508119046425770:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.024561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.024757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508119046425775:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.027860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:36.040101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508119046425777:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:36.125638Z node 1 :TX_PROXY ERROR: Actor# [1:7489508119046425832:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:36.979172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:37.454539Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508101866554505:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:37.454657Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24151, MsgBus: 23000 2025-04-04T17:16:38.336576Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508127917925695:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:38.336611Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d52/r3tmp/tmpMcvGJG/pdisk_1.dat 2025-04-04T17:16:38.470862Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24151, node 2 2025-04-04T17:16:38.498627Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:38.498701Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:38.504578Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:38.539207Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:38.539239Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:38.539245Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:38.539359Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23000 TClient is connected to server localhost:23000 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:39.032862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:39.045083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:16:39.123817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:39.281767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:39.356338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:41.217760Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508140802829338:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.217850Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.254289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.331864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.379069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.411273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.441406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.471492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:41.552552Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508140802829858:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.552624Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.552684Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508140802829863:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:41.555075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:41.564054Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508140802829865:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:41.662091Z node 2 :TX_PROXY ERROR: Actor# [2:7489508140802829921:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:42.589342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:42.661543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:16:42.702392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:16:43.338428Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508127917925695:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:43.338489Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardReadIterator::ShouldReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne >> DataShardReadIterator::ShouldReadKeyPrefix3 [GOOD] >> DataShardReadIterator::ShouldReadFromFollower >> KqpWrite::CastValues [GOOD] >> KqpWrite::CastValuesOptional >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites >> KqpImmediateEffects::ConflictingKeyRW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen [GOOD] >> KqpStats::StreamLookupStats+StreamLookupJoin >> KqpParams::CheckQueryCacheForPreparedQuery >> KqpExplain::PrecomputeRange >> KqpQuery::DdlInDataQuery >> KqpTypes::QuerySpecialTypes >> KqpLimits::OutOfSpaceBulkUpsertFail >> KqpStats::RequestUnitForSuccessExplicitPrepare >> KqpQuery::QueryTimeout >> KqpQuery::QueryCacheTtl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 18398, MsgBus: 22820 2025-04-04T17:16:33.104172Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508106806769731:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:33.104449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d44/r3tmp/tmpDsKeKa/pdisk_1.dat 2025-04-04T17:16:33.604493Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:33.608933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:33.609029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:33.611966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18398, node 1 2025-04-04T17:16:33.693791Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:33.693811Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:33.693820Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:33.693910Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22820 TClient is connected to server localhost:22820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:34.205060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.236903Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:34.248344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.372704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.564437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:34.649012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:36.108577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508119691673257:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.108680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.374270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.409095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.474518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.545461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.576805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.646556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:36.728085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508119691673783:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.728154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.728375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508119691673788:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:36.732183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:36.743296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508119691673790:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:36.831014Z node 1 :TX_PROXY ERROR: Actor# [1:7489508119691673846:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:37.635459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:37.703200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:16:37.731986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:16:38.104259Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508106806769731:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:38.104320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19819, MsgBus: 1250 2025-04-04T17:16:42.543171Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508148567360802:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:42.548198Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d44/r3tmp/tmpkM9uho/pdisk_1.dat 2025-04-04T17:16:42.700557Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:42.713761Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:42.713848Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:42.715849Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19819, node 2 2025-04-04T17:16:42.768398Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:42.768416Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:42.768424Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:42.768519Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1250 TClient is connected to server localhost:1250 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:43.172018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:43.180685Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:43.193828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:43.271262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:43.449282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:43.518056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:45.769800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508161452264362:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:45.769890Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:45.804175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:45.841661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:45.883266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:45.913961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:45.946041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:45.979475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:46.057162Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508165747232172:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:46.057266Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:46.057473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508165747232177:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:46.061073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:46.074085Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508165747232179:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:46.141754Z node 2 :TX_PROXY ERROR: Actor# [2:7489508165747232232:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:47.019790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:47.542383Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508148567360802:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:47.542442Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd >> KqpQuery::RowsLimit >> KqpStats::RequestUnitForBadRequestExecute >> DataShardReadIterator::TryCommitLocksPrepared-Volatile+BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile+BreakLocks >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict-UseSink >> KqpStats::MultiTxStatsFullYql >> TKesusTest::TestAcquireTimeoutAfterReboot [GOOD] >> TKesusTest::TestAcquireSemaphoreViaRelease >> KqpQuery::QueryClientTimeout >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::Interactive >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen [GOOD] Test command err: 2025-04-04T17:13:08.447575Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507226233164778:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:08.452756Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507228187536504:2223];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:08.456768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:13:08.452823Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:13:08.777366Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e12/r3tmp/tmpGAg5lF/pdisk_1.dat 2025-04-04T17:13:08.832688Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:13:09.203190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:09.203315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:09.206911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:09.206977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:09.239461Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:09.239818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:09.245980Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:13:09.247479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14104, node 1 2025-04-04T17:13:09.394910Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001e12/r3tmp/yandexfJZIts.tmp 2025-04-04T17:13:09.394946Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001e12/r3tmp/yandexfJZIts.tmp 2025-04-04T17:13:09.395124Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001e12/r3tmp/yandexfJZIts.tmp 2025-04-04T17:13:09.395257Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:13:09.453422Z INFO: TTestServer started on Port 17849 GrpcPort 14104 TClient is connected to server localhost:17849 PQClient connected to localhost:14104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:09.796859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:13:09.862905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:13:11.978732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507241072439324:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:11.978946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:11.979349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507241072439339:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:11.983185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:13:12.018536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507241072439341:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:13:12.212558Z node 1 :TX_PROXY ERROR: Actor# [1:7489507245367406726:2766] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:12.237225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:12.240577Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489507243413034312:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:13:12.240846Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTdiNThhZjUtZDM3OGUwODItNjNkZmNhYjAtMmFkOWUyNTE=, ActorId: [2:7489507243413034288:2312], ActorState: ExecuteState, TraceId: 01jr0wnn2529z91r8arpg3bhr1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:13:12.243029Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:13:12.259182Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507245367406743:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:13:12.260082Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzVkMGVhMDItMzY4Y2VjM2QtYzllY2IzMGUtZTI5YzVjZDM=, ActorId: [1:7489507241072439322:2337], ActorState: ExecuteState, TraceId: 01jr0wnn0z7zvve3fwxrjca75s, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:13:12.260545Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:13:12.312266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:12.387697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:13:12.635525Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0wnnhgcqpsc7nbqx35j5yr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTg1MzVjOGEtMjA4N2Y2OWMtM2M1ZDMxOWMtZGQzMjIyOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489507245367407177:3098] 2025-04-04T17:13:13.418299Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507228187536504:2223];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:13.418439Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:13:13.447399Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489507226233164778:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:13.447474Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 10 partitions CallPersQueueGRPC request to localhost:14104 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } 2025-04-04T17:13:18.755661Z node 1 :PQ_METACACHE DEBUG: HandleDescribeAllTopics 2025-04-04T17:13:18.755700Z node 1 :PQ_METACACHE DEBUG: ProcessDescribeAllTopics 2025-04-04T17:13:18.755710Z node 1 :PQ_METACACHE DEBUG: Describe all topics - send empty response 2025-04-04T17:13:18.755727Z node 1 :PQ_METACACHE DEBUG: Send describe all topics response with 0 topics 2025-04-04T17:13:18.755830Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, ... DEBUG: session cookie 1 consumer session _24_1_11921760309466912993_v1 Process answer. Aval parts: 1 2025-04-04T17:16:46.902364Z node 25 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-04T17:16:46.902443Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 4 2025-04-04T17:16:46.902642Z node 25 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-04T17:16:46.902674Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 4 2025-04-04T17:16:46.902901Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 4, State: StateIdle] read cookie 32 Topic 'rt3.dc1--topic1' partition 4 user $without_consumer offset 38 count 2 size 168 endOffset 40 max time lag 0ms effective offset 38 2025-04-04T17:16:46.902961Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 4, State: StateIdle] read cookie 32 added 0 blobs, size 0 count 0 last offset 38, current partition end offset: 40 2025-04-04T17:16:46.903051Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 4, State: StateIdle] Reading cookie 32. All data is from uncompacted head. 2025-04-04T17:16:46.903095Z node 25 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T17:16:46.903246Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 4, State: StateIdle] read cookie 33 Topic 'rt3.dc1--topic1' partition 4 user $without_consumer offset 35 count 6 size 290 endOffset 40 max time lag 0ms effective offset 35 2025-04-04T17:16:46.903285Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 4, State: StateIdle] read cookie 33 added 0 blobs, size 0 count 0 last offset 35, current partition end offset: 40 Bytes readed: 352 Offset: 38 from session 5 2025-04-04T17:16:46.903368Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 4, State: StateIdle] Reading cookie 33. All data is from uncompacted head. Offset: 39 from session 5 2025-04-04T17:16:46.903389Z node 25 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T17:16:46.903510Z node 25 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 4 messageNo: 0 requestId: cookie: 38 2025-04-04T17:16:46.903621Z node 25 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 4 messageNo: 0 requestId: cookie: 35 Bytes readed: 522 Offset: 35 from session 5 Offset: 36 from session 5 Offset: 37 from session 5 2025-04-04T17:16:46.957717Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_2438846032833085692_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 5 offsets { end: 39 } } } } 2025-04-04T17:16:46.957805Z node 24 :PQ_READ_PROXY INFO: session cookie 2 consumer session _24_2_2438846032833085692_v1 closed with error: reason# can't commit when reading without a consumer 2025-04-04T17:16:46.958122Z node 24 :PQ_READ_PROXY INFO: session cookie 2 consumer session _24_2_2438846032833085692_v1 is DEAD 2025-04-04T17:16:46.958365Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_11921760309466912993_v1 grpc read done: success# 1, data# { read_request { bytes_size: 400 } } 2025-04-04T17:16:46.958455Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_11921760309466912993_v1 got read request: guid# ece9466f-ac2f6466-81694222-5896ec81 2025-04-04T17:16:46.958508Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_11921760309466912993_v1 performing read request: guid# 3d3ef326-b660561f-325a20f9-e4aaeab5, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 4(assignId:5), count# 2, size# 168, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-04T17:16:46.958765Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_11921760309466912993_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 4(assignId:5)maxCount 2 maxSize 168 maxTimeLagMs 0 readTimestampMs 0 readOffset 38 EndOffset 40 ClientCommitOffset 0 committedOffset 0 Guid 3d3ef326-b660561f-325a20f9-e4aaeab5 2025-04-04T17:16:46.960020Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_2438846032833085692_v1 2025-04-04T17:16:46.960114Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7489508164343073156:2589] destroyed 2025-04-04T17:16:46.960147Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_2438846032833085692_v1 2025-04-04T17:16:46.960167Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7489508164343073154:2587] destroyed 2025-04-04T17:16:46.960322Z node 25 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-04T17:16:46.960368Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 4 2025-04-04T17:16:46.960466Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_2438846032833085692_v1 2025-04-04T17:16:46.960493Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7489508164343073159:2595] destroyed 2025-04-04T17:16:46.960511Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_2438846032833085692_v1 2025-04-04T17:16:46.960532Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7489508164343073158:2594] destroyed 2025-04-04T17:16:46.960550Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_2438846032833085692_v1 2025-04-04T17:16:46.960569Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7489508164343073157:2591] destroyed 2025-04-04T17:16:46.960649Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_2438846032833085692_v1 2025-04-04T17:16:46.960682Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_2438846032833085692_v1 2025-04-04T17:16:46.960700Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_2438846032833085692_v1 2025-04-04T17:16:46.960717Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_2438846032833085692_v1 2025-04-04T17:16:46.960732Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_2438846032833085692_v1 2025-04-04T17:16:46.960847Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 4, State: StateIdle] read cookie 34 Topic 'rt3.dc1--topic1' partition 4 user $without_consumer offset 38 count 2 size 168 endOffset 40 max time lag 0ms effective offset 38 2025-04-04T17:16:46.960892Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 4, State: StateIdle] read cookie 34 added 0 blobs, size 0 count 0 last offset 38, current partition end offset: 40 2025-04-04T17:16:46.960981Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 4, State: StateIdle] Reading cookie 34. All data is from uncompacted head. 2025-04-04T17:16:46.961013Z node 25 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T17:16:46.961175Z node 25 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 4 messageNo: 0 requestId: cookie: 38 2025-04-04T17:16:46.961970Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_11921760309466912993_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 4(assignId:5) initDone 1 event { CmdReadResult { MaxOffset: 40 Result { Offset: 38 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 40 WriteTimestampMS: 1743787006640 CreateTimestampMS: 1743787006637 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } Result { Offset: 39 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 41 WriteTimestampMS: 1743787006653 CreateTimestampMS: 1743787006650 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 40 RealReadOffset: 39 WaitQuotaTimeMs: 0 } Cookie: 38 } 2025-04-04T17:16:46.962284Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_11921760309466912993_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 4(assignId:5) wait data in partition inited, cookie 1 from offset40 2025-04-04T17:16:46.962358Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_11921760309466912993_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 4(assignId:5) EndOffset 40 ReadOffset 40 ReadGuid 3d3ef326-b660561f-325a20f9-e4aaeab5 has messages 1 2025-04-04T17:16:46.962503Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_11921760309466912993_v1 read done: guid# 3d3ef326-b660561f-325a20f9-e4aaeab5, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 4(assignId:5), size# 352 2025-04-04T17:16:46.962567Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_11921760309466912993_v1 response to read: guid# 3d3ef326-b660561f-325a20f9-e4aaeab5 2025-04-04T17:16:46.962823Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_11921760309466912993_v1 Process answer. Aval parts: 0 Bytes readed: 352 Offset: 38 from session 5 Offset: 39 from session 5 2025-04-04T17:16:46.963852Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_11921760309466912993_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 5 offsets { end: 39 } } } } 2025-04-04T17:16:46.963888Z node 24 :PQ_READ_PROXY INFO: session cookie 1 consumer session _24_1_11921760309466912993_v1 closed with error: reason# can't commit when reading without a consumer 2025-04-04T17:16:46.964116Z node 24 :PQ_READ_PROXY INFO: session cookie 1 consumer session _24_1_11921760309466912993_v1 is DEAD 2025-04-04T17:16:46.964871Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_11921760309466912993_v1 2025-04-04T17:16:46.964936Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7489508164343073153:2592] destroyed 2025-04-04T17:16:46.964965Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_11921760309466912993_v1 2025-04-04T17:16:46.964984Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7489508164343073151:2588] destroyed 2025-04-04T17:16:46.965003Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_11921760309466912993_v1 2025-04-04T17:16:46.965035Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7489508164343073150:2586] destroyed 2025-04-04T17:16:46.965051Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_11921760309466912993_v1 2025-04-04T17:16:46.965082Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7489508164343073152:2590] destroyed 2025-04-04T17:16:46.965105Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_11921760309466912993_v1 2025-04-04T17:16:46.965126Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7489508164343073155:2593] destroyed 2025-04-04T17:16:46.965199Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_11921760309466912993_v1 2025-04-04T17:16:46.965228Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_11921760309466912993_v1 2025-04-04T17:16:46.965245Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_11921760309466912993_v1 2025-04-04T17:16:46.965262Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_11921760309466912993_v1 2025-04-04T17:16:46.965295Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_11921760309466912993_v1 >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] Test command err: 2025-04-04T17:16:13.320871Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:13.321023Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:13.342968Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:13.343113Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:13.368344Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:13.368906Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:129:2155], cookie=9352913837169080772, session=0, seqNo=0) 2025-04-04T17:16:13.369089Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:13.403237Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:129:2155], cookie=9352913837169080772, session=1) 2025-04-04T17:16:13.404196Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:129:2155], cookie=111, session=1, semaphore="Lock1" count=1) 2025-04-04T17:16:13.405792Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-04T17:16:13.405928Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-04T17:16:13.421436Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:129:2155], cookie=111) 2025-04-04T17:16:13.421815Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:129:2155], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-04T17:16:13.434128Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:129:2155], cookie=222) 2025-04-04T17:16:13.434706Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:145:2169], cookie=11493738887408019767, name="Lock1") 2025-04-04T17:16:13.434806Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:145:2169], cookie=11493738887408019767) 2025-04-04T17:16:13.874938Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:13.875038Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:13.894414Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:13.894546Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:13.918721Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:13.919594Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:131:2157], cookie=1321104632110192590, session=0, seqNo=0) 2025-04-04T17:16:13.919752Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:13.931935Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:131:2157], cookie=1321104632110192590, session=1) 2025-04-04T17:16:13.932279Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=15728088411753745776, session=0, seqNo=0) 2025-04-04T17:16:13.932401Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-04T17:16:13.947686Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=15728088411753745776, session=2) 2025-04-04T17:16:13.948774Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-04-04T17:16:13.948932Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-04-04T17:16:13.949023Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-04-04T17:16:13.961128Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=111) 2025-04-04T17:16:13.961477Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=112, session=1, semaphore="Lock2" count=1) 2025-04-04T17:16:13.961641Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-04-04T17:16:13.961733Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-04-04T17:16:13.973873Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=112) 2025-04-04T17:16:13.974229Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=222, session=2, semaphore="Lock1" count=1) 2025-04-04T17:16:13.974421Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-04-04T17:16:13.986556Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=222) 2025-04-04T17:16:13.986679Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=223) 2025-04-04T17:16:13.987154Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=333, session=2, semaphore="Lock1" count=1) 2025-04-04T17:16:13.987428Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=334, session=2, semaphore="Lock2" count=18446744073709551615) 2025-04-04T17:16:13.999868Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=333) 2025-04-04T17:16:13.999947Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=334) 2025-04-04T17:16:14.329783Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:14.346374Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:14.615363Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:14.628107Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:14.896245Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:14.913461Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:15.163314Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:15.176194Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:15.445294Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:15.462887Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:15.716601Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:15.730962Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:15.980837Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:15.992776Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:16.242926Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:16.255868Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:16.515524Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:16.527748Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:16.827572Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:16.839612Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:17.120545Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:17.132245Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:17.400762Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:17.412935Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:17.681191Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:17.693054Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:17.951197Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:17.963350Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:18.254578Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:18.268396Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:18.552471Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:18.564578Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:18.835078Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:18.847775Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:19.127300Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:19.139235Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:19.417697Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:19.430632Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:19.738867Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:19.759032Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:20.056769Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:20.069159Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:20.339746Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:20.353566Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:20.624853Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:20.640832Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:20.914671Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:20.931060Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:21.202711Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:21.218514Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:21.505638Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:21.518487Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:21.802603Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:21.822881Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:22.087860Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:22.100138Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:22.357441Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:22.370993Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:22.644383Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=2) 2025-04-04T17:16:22.644565Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 2 "Lock2" waiter link 2025-04-04T17:16:22.656610Z node 2 :KESUS_TABLET DEBUG: [ ... 025-04-04T17:16:42.484807Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:42.765414Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:42.778509Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:43.043057Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:43.063289Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:43.334104Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:43.351510Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:43.621818Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:43.634829Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:43.908403Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:43.920690Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:44.205944Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:44.219899Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:44.508256Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:44.520411Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:44.786679Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:44.806978Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:45.086569Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:45.110264Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:45.381499Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:45.399128Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:45.710091Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:45.726108Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:45.996200Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:46.019383Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:46.287020Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:46.302975Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:46.574573Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:46.590675Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:46.862590Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:46.882940Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:47.174617Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:47.190336Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:47.458599Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:47.475921Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:47.737551Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:47.749854Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:47.988877Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:48.000839Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:48.249005Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:48.261470Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:48.511143Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:48.522932Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:48.772605Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:48.784704Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:49.046476Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:49.064790Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:49.297106Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:49.309341Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:49.561176Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:49.573605Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:49.946777Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=1) 2025-04-04T17:16:49.946874Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-04-04T17:16:49.960302Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Complete (session=2, semaphore=1) 2025-04-04T17:16:49.987105Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:535:2531], cookie=9684667885395770614) 2025-04-04T17:16:49.987233Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:535:2531], cookie=9684667885395770614) 2025-04-04T17:16:49.987792Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:538:2534], cookie=10274011494625330087) 2025-04-04T17:16:49.987861Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:538:2534], cookie=10274011494625330087) 2025-04-04T17:16:49.988376Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:541:2537], cookie=10877439392324815372, name="Lock1") 2025-04-04T17:16:49.988455Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:541:2537], cookie=10877439392324815372) 2025-04-04T17:16:49.988984Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:544:2540], cookie=10889375320851664232, name="Lock1") 2025-04-04T17:16:49.989049Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:544:2540], cookie=10889375320851664232) 2025-04-04T17:16:50.466268Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:50.466417Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:50.487850Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:50.488008Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:50.513831Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:50.514355Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2157], cookie=10828398066820557396, session=0, seqNo=0) 2025-04-04T17:16:50.514507Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:50.526906Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2157], cookie=10828398066820557396, session=1) 2025-04-04T17:16:50.527253Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2157], cookie=17072896499279647737, session=0, seqNo=0) 2025-04-04T17:16:50.527408Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-04-04T17:16:50.539938Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2157], cookie=17072896499279647737, session=2) 2025-04-04T17:16:50.540246Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:131:2157], cookie=593055194626661165, session=0, seqNo=0) 2025-04-04T17:16:50.540377Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 3 2025-04-04T17:16:50.552763Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:131:2157], cookie=593055194626661165, session=3) 2025-04-04T17:16:50.553350Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:144:2168], cookie=9921855942800352045, name="Sem1", limit=3) 2025-04-04T17:16:50.553502Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-04-04T17:16:50.565949Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:144:2168], cookie=9921855942800352045) 2025-04-04T17:16:50.566345Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2157], cookie=111, session=1, semaphore="Sem1" count=2) 2025-04-04T17:16:50.566513Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-04-04T17:16:50.566707Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2157], cookie=222, session=2, semaphore="Sem1" count=2) 2025-04-04T17:16:50.566870Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:131:2157], cookie=333, session=3, semaphore="Sem1" count=1) 2025-04-04T17:16:50.582783Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2157], cookie=111) 2025-04-04T17:16:50.582865Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2157], cookie=222) 2025-04-04T17:16:50.582894Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:131:2157], cookie=333) 2025-04-04T17:16:50.583474Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:153:2177], cookie=9869040295381011607, name="Sem1") 2025-04-04T17:16:50.583562Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:153:2177], cookie=9869040295381011607) 2025-04-04T17:16:50.584015Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:156:2180], cookie=11025559521983241245, name="Sem1") 2025-04-04T17:16:50.584391Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:156:2180], cookie=11025559521983241245) 2025-04-04T17:16:50.584654Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:131:2157], cookie=444, name="Sem1") 2025-04-04T17:16:50.584751Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-04-04T17:16:50.584827Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-04-04T17:16:50.584883Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-04-04T17:16:50.597519Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:131:2157], cookie=444) 2025-04-04T17:16:50.598179Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:161:2185], cookie=3600448688821215205, name="Sem1") 2025-04-04T17:16:50.598284Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:161:2185], cookie=3600448688821215205) 2025-04-04T17:16:50.598737Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:164:2188], cookie=8820340958676467399, name="Sem1") 2025-04-04T17:16:50.598801Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:164:2188], cookie=8820340958676467399) >> KqpExplain::ExplainStream >> KqpInplaceUpdate::Negative_SingleRowListFromRange+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink >> KqpInplaceUpdate::Negative_BatchUpdate+UseSink [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink >> KqpEffects::InsertAbort_Select_Success [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates-UseSink >> KqpImmediateEffects::ReplaceDuplicates [GOOD] >> KqpImmediateEffects::ReplaceExistingKey >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink [GOOD] >> KqpAnalyze::AnalyzeTable+ColumnStore >> TKesusTest::TestSessionTimeoutAfterReboot [GOOD] >> TKesusTest::TestSessionStealingSameKey >> KqpInplaceUpdate::SingleRowIf+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowIf-UseSink >> TPersQueueTest::TestBigMessage [GOOD] >> TPersQueueTest::SetMeteringMode >> KqpTypes::UnsafeTimestampCastV0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18735, MsgBus: 22339 2025-04-04T17:16:41.111062Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508140872064536:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:41.111106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d39/r3tmp/tmp1UDwmQ/pdisk_1.dat 2025-04-04T17:16:41.480448Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18735, node 1 2025-04-04T17:16:41.525309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:41.525509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:41.527233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:41.569672Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:41.569693Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:41.569700Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:41.569823Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22339 TClient is connected to server localhost:22339 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:42.095847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:42.123665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:42.249484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:42.396249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:42.459253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:43.996134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508149462000883:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:43.996246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:44.202489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:44.259523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:44.293889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:44.323141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:44.349290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:44.383157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:44.424774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508153756968693:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:44.424860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:44.425082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508153756968698:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:44.428668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:44.436282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508153756968700:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:44.533491Z node 1 :TX_PROXY ERROR: Actor# [1:7489508153756968754:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:45.287982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28801, MsgBus: 19301 2025-04-04T17:16:46.341276Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508164568966163:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:46.341324Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d39/r3tmp/tmpQ0DUoc/pdisk_1.dat 2025-04-04T17:16:46.494983Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:46.507503Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:46.507614Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:46.509365Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28801, node 2 2025-04-04T17:16:46.553584Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:46.553602Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:46.553607Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:46.553690Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19301 TClient is connected to server localhost:19301 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:46.937574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:46.950569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:46.998146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:47.157253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:47.224758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:49.100050Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508177453869813:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.100142Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.156361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.184016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.215173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.242652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.275199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.313401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.393091Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508177453870329:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.393172Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.393372Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508177453870334:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.396810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:49.407835Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508177453870336:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:49.509496Z node 2 :TX_PROXY ERROR: Actor# [2:7489508177453870391:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:50.571315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> TKesusTest::TestSessionStealingSameKey [GOOD] >> TKesusTest::TestSessionStealingDifferentKey >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChangeExhausted [GOOD] >> TKesusTest::TestSessionStealingDifferentKey [GOOD] >> DataShardReadIterator::NoErrorOnFinalACK >> KqpWrite::CastValuesOptional [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 >> DataShardReadIterator::ShouldReadRangeChunk5 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk7 >> TTopicYqlTest::BadRequests [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingDifferentKey [GOOD] Test command err: 2025-04-04T17:16:14.466183Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:14.466311Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:14.487523Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:14.487651Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:14.504497Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:14.504999Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:129:2155], cookie=18132121622550840038, session=0, seqNo=0) 2025-04-04T17:16:14.505161Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:14.530825Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:129:2155], cookie=18132121622550840038, session=1) 2025-04-04T17:16:14.531635Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[1:129:2155], cookie=15313398435512868589 2025-04-04T17:16:14.532142Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[1:142:2166], cookie=4261335747816353218) 2025-04-04T17:16:14.532214Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[1:142:2166], cookie=4261335747816353218) 2025-04-04T17:16:14.860235Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:14.875059Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:15.142656Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:15.155299Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:15.411929Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:15.428140Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:15.674307Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:15.687019Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:15.959065Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:15.970876Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:16.222165Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:16.234331Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:16.471626Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:16.484789Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:16.731699Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:16.743353Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:16.990527Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:17.002678Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:17.293649Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:17.306099Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:17.565382Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:17.577347Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:17.834984Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:17.846971Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:18.104997Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:18.117043Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:18.377200Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:18.392165Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:18.691408Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:18.703625Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:18.965256Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:18.979427Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:19.245174Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:19.263100Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:19.519077Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:19.535971Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:19.806319Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:19.818826Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:20.148655Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:20.163108Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:20.454476Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:20.473843Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:20.744875Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:20.757808Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:21.028286Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:21.042513Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:21.310333Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:21.332068Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:21.628465Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:21.640662Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:21.895404Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:21.907835Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:22.191288Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:22.207128Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:22.465968Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:22.478363Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:22.730845Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:22.744167Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:23.055538Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:23.069815Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:23.340008Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:23.356613Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:23.627493Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:23.643007Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:23.900916Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:23.912782Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:24.164899Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:24.177137Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:24.446781Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:24.463389Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:24.746574Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:24.762647Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:25.040960Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:25.066867Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:25.334961Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:25.348036Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:25.629218Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:25.642831Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:25.965152Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:25.978259Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:26.249659Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:26.262236Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:26.514144Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:26.526427Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:26.791467Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:26.807247Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:27.063130Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:27.075754Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:27.381643Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:27.396415Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:27.670739Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:27.682777Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:27.936137Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:27.948060Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:28.204435Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:28.216956Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:28.500141Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:28.515257Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:28.827646Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:28.840448Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:29.108705Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:29.121037Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:29.370192Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:29.382508Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:29.630687Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:29.643119Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:29.894097Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:29.906492Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TT ... eck::Execute 2025-04-04T17:16:39.156686Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:39.452713Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:39.464624Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:39.730712Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:39.745663Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:40.014398Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:40.031158Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:40.304475Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:40.319762Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:40.572012Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:40.583826Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:40.903322Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:40.915169Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:41.196375Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:41.209448Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:41.489889Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:41.505753Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:41.770544Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:41.788375Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:42.058032Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:42.070929Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:42.366528Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:42.386821Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:42.678008Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:42.694930Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:42.963608Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:42.975565Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:43.243909Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:43.256390Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:43.522680Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:43.536239Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:43.859217Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:43.873209Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:44.123687Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:44.138806Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:44.404635Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:44.417916Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:44.676716Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:44.690127Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:44.951392Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:44.963537Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:45.269439Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:45.285136Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:45.547089Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:45.559077Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:45.822618Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:45.837520Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:46.104885Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:46.117122Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:46.385527Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:46.401919Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:46.719897Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:46.733079Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:46.981737Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:46.993761Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:47.253892Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:47.271049Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:47.530378Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:47.544253Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:47.786898Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:47.799319Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:48.058837Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:48.072228Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:48.332442Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:48.344837Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:48.594646Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:48.606995Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:48.849603Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:48.867224Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:49.135319Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:49.147692Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:49.538564Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:49.550845Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:49.833520Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:49.850543Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:50.134741Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:50.163430Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:50.450653Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:50.470156Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:50.768279Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:50.785466Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:51.054197Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:51.070569Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:51.354630Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:51.368526Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:51.670957Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:51.691093Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:51.994612Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:52.010923Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:52.286649Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-04-04T17:16:52.310887Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-04-04T17:16:52.603420Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-04-04T17:16:52.603513Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-04-04T17:16:52.615793Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-04-04T17:16:52.626788Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[2:571:2565], cookie=4068893644701248594) 2025-04-04T17:16:52.626883Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[2:571:2565], cookie=4068893644701248594) 2025-04-04T17:16:53.133057Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:53.133161Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:53.152732Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:53.153142Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:53.177436Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:53.178325Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:131:2157], cookie=12345, session=0, seqNo=0) 2025-04-04T17:16:53.178485Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:53.190658Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:131:2157], cookie=12345, session=1) 2025-04-04T17:16:53.191423Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:138:2162], cookie=23456, session=1, seqNo=0) 2025-04-04T17:16:53.203414Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:138:2162], cookie=23456, session=1) 2025-04-04T17:16:53.578725Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-04-04T17:16:53.578830Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-04-04T17:16:53.599517Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-04-04T17:16:53.599649Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-04-04T17:16:53.623788Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-04-04T17:16:53.624628Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:131:2157], cookie=12345, session=0, seqNo=0) 2025-04-04T17:16:53.624797Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-04-04T17:16:53.640673Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:131:2157], cookie=12345, session=1) 2025-04-04T17:16:53.641450Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:138:2162], cookie=23456, session=1, seqNo=0) 2025-04-04T17:16:53.666969Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:138:2162], cookie=23456, session=1) |90.5%| [TA] $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.5%| [TA] {RESULT} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpLimits::BigParameter >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne [GOOD] >> DataShardReadIterator::ShouldReadNotExistingRange >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::CastValuesOptional [GOOD] Test command err: Trying to start YDB, gRPC: 1743, MsgBus: 29564 2025-04-04T17:16:43.819179Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508152050648115:2112];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:43.819511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d31/r3tmp/tmpwGJIAY/pdisk_1.dat 2025-04-04T17:16:44.132756Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1743, node 1 2025-04-04T17:16:44.195541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:44.195664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:44.197200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:44.208364Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:44.208388Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:44.208395Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:44.208485Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29564 TClient is connected to server localhost:29564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:44.740873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:44.753157Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:44.767739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:44.916230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:45.080777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:45.162567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:46.654572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508164935551713:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:46.654702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:46.867518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:46.902551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:46.932959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:46.963743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:46.991827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:47.024280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:47.064940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508169230519518:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:47.065007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:47.065027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508169230519523:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:47.068334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:47.077796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508169230519525:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:47.138385Z node 1 :TX_PROXY ERROR: Actor# [1:7489508169230519579:3440] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 11136, MsgBus: 65517 2025-04-04T17:16:48.835905Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508173451341321:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:48.835973Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d31/r3tmp/tmpKG9yDJ/pdisk_1.dat 2025-04-04T17:16:48.926100Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11136, node 2 2025-04-04T17:16:48.964461Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:48.964565Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:48.966173Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:48.986839Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:48.986861Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:48.986869Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:48.986992Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65517 TClient is connected to server localhost:65517 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:49.367181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:49.374185Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:49.384247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:49.435321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:49.579176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:49.633209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.761879Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508186336244973:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:51.761951Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:51.790845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:51.830129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:51.872660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:51.916222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:51.991490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:52.070265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:52.128039Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508190631212790:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:52.128139Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:52.128546Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508190631212795:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:52.132186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:52.144075Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508190631212797:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:52.236354Z node 2 :TX_PROXY ERROR: Actor# [2:7489508190631212852:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQuery::DdlInDataQuery [GOOD] >> KqpQuery::DeleteWhereInSubquery >> DataShardReadIterator::ShouldReadFromFollower [GOOD] >> DataShardReadIterator::ShouldReadHeadFromFollower >> KqpTypes::QuerySpecialTypes [GOOD] >> KqpTypes::SelectNull >> KqpStats::RequestUnitForBadRequestExecute [GOOD] >> KqpStats::RequestUnitForBadRequestExplicitPrepare >> KqpQuery::QueryTimeout [GOOD] >> KqpQuery::RandomNumber >> KqpExplain::PrecomputeRange [GOOD] >> KqpExplain::MultiUsedStage >> KqpParams::CheckQueryCacheForPreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForUnpreparedQuery >> KqpStats::MultiTxStatsFullYql [GOOD] >> KqpStats::MultiTxStatsFullScan >> KqpStats::RequestUnitForSuccessExplicitPrepare [GOOD] >> KqpStats::StatsProfile >> KqpStats::StreamLookupStats+StreamLookupJoin [GOOD] >> KqpStats::StreamLookupStats-StreamLookupJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TTopicYqlTest::BadRequests [GOOD] Test command err: 2025-04-04T17:13:07.303102Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507223349345940:2278];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:07.303142Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:13:07.410426Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507224743914411:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:07.643626Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:13:07.655590Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:13:07.655911Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e1a/r3tmp/tmp3qzGnG/pdisk_1.dat 2025-04-04T17:13:08.035457Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:08.039534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:08.039616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:08.046774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:08.046843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:08.062258Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:13:08.062396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:08.063056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2691, node 1 2025-04-04T17:13:08.560960Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001e1a/r3tmp/yandexQsm6Tv.tmp 2025-04-04T17:13:08.560989Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001e1a/r3tmp/yandexQsm6Tv.tmp 2025-04-04T17:13:08.561149Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001e1a/r3tmp/yandexQsm6Tv.tmp 2025-04-04T17:13:08.561292Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:13:08.713531Z INFO: TTestServer started on Port 22474 GrpcPort 2691 TClient is connected to server localhost:22474 PQClient connected to localhost:2691 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:09.013132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:13:09.106171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:13:11.171667Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507241923783997:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:11.171801Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:11.172074Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507241923784009:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:11.185750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T17:13:11.222539Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507241923784011:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T17:13:11.446421Z node 2 :TX_PROXY ERROR: Actor# [2:7489507241923784038:2179] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:11.468848Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507240529216091:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:13:11.469062Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTk1MmFmZDktMjc3MzFiMDEtNjczZDgxYWMtN2UxMjAzZjY=, ActorId: [1:7489507240529216063:2337], ActorState: ExecuteState, TraceId: 01jr0wnma93bfsc0txkxzbpxph, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:13:11.471806Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:13:11.473087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:13:11.474572Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489507241923784045:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:13:11.474793Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmE2OTg3MzYtYmIxNWJhMi02ZTA0ZjBkOS1lNjMxMDIwYQ==, ActorId: [2:7489507241923783995:2312], ActorState: ExecuteState, TraceId: 01jr0wnm809kntcpv0w9m9vsfj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:13:11.475131Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:13:11.551385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:11.674462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:13:11.868312Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0wnmsx58pqz0gv0b6hadzy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdlZGI1NzktNmE1YzgwYjktNmQzZjQxNDktMmY1MGFkYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489507240529216533:3077] 2025-04-04T17:13:12.303069Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507223349345940:2278];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:12.303154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:13:12.372688Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489507224743914411:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:12.372789Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 2 partitions CallPersQueueGRPC request to localhost:2691 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } 2025-04-04T17:13:17.923982Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:2691 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic1" NumPartitions: 2 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 2 ... RawX2: 107374184605 } Partitions { Partition { PartitionId: 0 } } 2025-04-04T17:16:51.285973Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:16:51.289780Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:16:51.289825Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state CALCULATED 2025-04-04T17:16:51.289849Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676, State CALCULATED 2025-04-04T17:16:51.289876Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676 State CALCULATED FrontTxId 281474976715676 2025-04-04T17:16:51.289902Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676, NewState WAIT_RS 2025-04-04T17:16:51.289931Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676 moved from CALCULATED to WAIT_RS 2025-04-04T17:16:51.289988Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2025-04-04T17:16:51.290045Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveParticipantsDecision 1 2025-04-04T17:16:51.290124Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676, NewState EXECUTING 2025-04-04T17:16:51.290155Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676 moved from WAIT_RS to EXECUTING 2025-04-04T17:16:51.290178Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 0, Expected 1 2025-04-04T17:16:51.290258Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1743787011309, TxId 281474976715676 2025-04-04T17:16:51.290871Z node 25 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:16:51.295048Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:16:51.295092Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvTxCommitDone Step 1743787011309, TxId 281474976715676, Partition 0 2025-04-04T17:16:51.295124Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTING 2025-04-04T17:16:51.295149Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676, State EXECUTING 2025-04-04T17:16:51.295191Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676 State EXECUTING FrontTxId 281474976715676 2025-04-04T17:16:51.295209Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 1, Expected 1 2025-04-04T17:16:51.295249Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId: 281474976715676 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-04-04T17:16:51.295296Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] complete TxId 281474976715676 2025-04-04T17:16:51.295784Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2025-04-04T17:16:51.295942Z node 25 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:16:51.296058Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete partitions for TxId 281474976715676 2025-04-04T17:16:51.296087Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676, NewState EXECUTED 2025-04-04T17:16:51.296121Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676 moved from EXECUTING to EXECUTED 2025-04-04T17:16:51.296633Z node 25 :PERSQUEUE DEBUG: [TxId: 281474976715676] save tx TxId: 281474976715676 State: EXECUTED MinStep: 1743787011106 MaxStep: 18446744073709551615 Step: 1743787011309 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } } BootstrapConfig { } SourceActor { RawX1: 7489508135181066758 RawX2: 107374184605 } Partitions { Partition { PartitionId: 0 } } 2025-04-04T17:16:51.296912Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:16:51.304271Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:16:51.304308Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTED 2025-04-04T17:16:51.304324Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676, State EXECUTED 2025-04-04T17:16:51.304344Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676 State EXECUTED FrontTxId 281474976715676 2025-04-04T17:16:51.304364Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-04-04T17:16:51.304382Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676, NewState WAIT_RS_ACKS 2025-04-04T17:16:51.304401Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676 moved from EXECUTED to WAIT_RS_ACKS 2025-04-04T17:16:51.304426Z node 25 :PERSQUEUE DEBUG: [TxId: 281474976715676] PredicateAcks: 0/0 2025-04-04T17:16:51.304434Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-04T17:16:51.304447Z node 25 :PERSQUEUE DEBUG: [TxId: 281474976715676] PredicateAcks: 0/0 2025-04-04T17:16:51.304466Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976715676 to the list for deletion 2025-04-04T17:16:51.304490Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676, NewState DELETING 2025-04-04T17:16:51.304513Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976715676 2025-04-04T17:16:51.304565Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:16:51.326999Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:16:51.327041Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-04-04T17:16:51.327065Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715676, State DELETING 2025-04-04T17:16:51.327094Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976715676 2025-04-04T17:16:52.600238Z node 25 :KQP_EXECUTER ERROR: ActorId: [25:7489508191015643579:2507] TxId: 281474976715679. Ctx: { TraceId: 01jr0wwbzc2f4wgcd8txyvth22, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=25&id=YjZhMDg4NDEtMjIyNjk0OTQtNTc1YjA2ZjQtYzQ0MzhmMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 26 2025-04-04T17:16:52.600431Z node 25 :KQP_COMPUTE ERROR: SelfId: [25:7489508191015643592:2516], TxId: 281474976715679, task: 4. Ctx: { SessionId : ydb://session/3?node_id=25&id=YjZhMDg4NDEtMjIyNjk0OTQtNTc1YjA2ZjQtYzQ0MzhmMWU=. TraceId : 01jr0wwbzc2f4wgcd8txyvth22. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [25:7489508191015643579:2507], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-04T17:16:52.600437Z node 25 :KQP_COMPUTE ERROR: SelfId: [25:7489508191015643590:2515], TxId: 281474976715679, task: 2. Ctx: { TraceId : 01jr0wwbzc2f4wgcd8txyvth22. SessionId : ydb://session/3?node_id=25&id=YjZhMDg4NDEtMjIyNjk0OTQtNTc1YjA2ZjQtYzQ0MzhmMWU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [25:7489508191015643579:2507], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-04T17:16:52.962944Z node 25 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715680. Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-04T17:16:52.963149Z node 25 :KQP_EXECUTER WARN: ActorId: [25:7489508191015643601:2517] TxId: 281474976715680. Ctx: { TraceId: 01jr0wwcnfed0k1jxs82wxhpwz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=25&id=ZmFlM2ZjZjktMWE1ZjIwM2UtNjJkN2FkNzgtNmI2MDRjY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-04T17:16:52.963453Z node 25 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=25&id=ZmFlM2ZjZjktMWE1ZjIwM2UtNjJkN2FkNzgtNmI2MDRjY2Y=, ActorId: [25:7489508191015643598:2517], ActorState: ExecuteState, TraceId: 01jr0wwcnfed0k1jxs82wxhpwz, Create QueryResponse for error on request, msg: 2025-04-04T17:16:52.965774Z node 25 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jr0wwcnfed0k1jxs8628x6jm" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites [GOOD] >> TPersQueueTest::EventBatching [GOOD] >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows >> TPersQueueTest::DisableWrongSettings >> KqpQuery::ExecuteDataQueryCollectMeta >> KqpQuery::RowsLimit [GOOD] >> KqpQuery::RowsLimitServiceOverride ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 10224, MsgBus: 4744 2025-04-04T17:16:43.532027Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508151479639692:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:43.532154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d33/r3tmp/tmpiqTPSP/pdisk_1.dat 2025-04-04T17:16:43.882984Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10224, node 1 2025-04-04T17:16:43.954606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:43.954742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:43.956610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:43.973147Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:43.973163Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:43.973169Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:43.973279Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4744 TClient is connected to server localhost:4744 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:44.422969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:44.453086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:44.586540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:44.766422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:44.837036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:46.511446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508164364543378:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:46.511551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:46.728265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:46.758808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:46.787160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:46.814536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:46.841325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:46.869498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:46.913464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508164364543888:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:46.913522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508164364543893:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:46.913523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:46.916520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:46.925187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508164364543895:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:46.982873Z node 1 :TX_PROXY ERROR: Actor# [1:7489508164364543949:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:47.803178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9956, MsgBus: 29046 2025-04-04T17:16:49.186754Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508176817241279:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:49.186832Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d33/r3tmp/tmpN29eV7/pdisk_1.dat 2025-04-04T17:16:49.295746Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9956, node 2 2025-04-04T17:16:49.341949Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:49.342006Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:49.343624Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:49.362771Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:49.362793Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:49.362800Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:49.362914Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29046 TClient is connected to server localhost:29046 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:49.740989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:49.746657Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:49.761064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:49.856842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:50.036590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:50.112646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:52.352566Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508189702144945:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:52.352650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:52.391100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:52.428722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:52.458651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:52.486254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:52.514091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:52.543812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:52.582082Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508189702145455:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:52.582164Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:52.582390Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508189702145460:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:52.586227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:52.595821Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508189702145462:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:52.693094Z node 2 :TX_PROXY ERROR: Actor# [2:7489508189702145517:3437] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:53.624585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:54.187742Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508176817241279:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:54.187805Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:16:54.402818Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzZkMzY4ZGYtZjU5YWM2MDEtYmQ1ZGRiZTktNzVjZGRhNWM=, ActorId: [2:7489508193997113073:2490], ActorState: ExecuteState, TraceId: 01jr0wwe4vaqhg0rk7sa8h1w8t, Create QueryResponse for error on request, msg: Error while locks merge >> KqpImmediateEffects::Interactive [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile+BreakLocks [GOOD] >> DataShardReadIterator::ShouldRollbackLocksWhenWrite >> KqpStats::DataQueryWithEffects+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 25105, MsgBus: 10894 2025-04-04T17:16:45.079409Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508160661969757:2088];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:45.082217Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d2f/r3tmp/tmpIbWaEf/pdisk_1.dat 2025-04-04T17:16:45.435603Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:45.456294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:45.456673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:45.458616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25105, node 1 2025-04-04T17:16:45.532614Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:45.532639Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:45.532647Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:45.532798Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10894 TClient is connected to server localhost:10894 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:46.053810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:46.072320Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:46.080693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:46.197022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:46.349806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:46.433644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:47.979197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508169251906103:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:47.979305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:48.201737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:48.246883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:48.269942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:48.292920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:48.317656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:48.344701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:48.418107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508173546873914:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:48.418163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:48.418199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508173546873919:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:48.420959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:48.428897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508173546873921:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:48.511855Z node 1 :TX_PROXY ERROR: Actor# [1:7489508173546873977:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:49.212396Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-04T17:16:49.219557Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T17:16:49.219705Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T17:16:49.219908Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508177841841583:2496], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7489508177841841557:2496]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7489508177841841583:2496].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-04T17:16:49.220358Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508177841841576:2496], SessionActorId: [1:7489508177841841557:2496], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7489508177841841557:2496]. isRollback=0 2025-04-04T17:16:49.220545Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWU4NmYzZjMtYjU5OTI3OGYtZjJjOTk1MmMtODhlNjYyZTI=, ActorId: [1:7489508177841841557:2496], ActorState: ExecuteState, TraceId: 01jr0ww942dmhmqg1say7j23yr, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7489508177841841577:2496] from: [1:7489508177841841576:2496] 2025-04-04T17:16:49.220622Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489508177841841577:2496] TxId: 281474976710671. Ctx: { TraceId: 01jr0ww942dmhmqg1say7j23yr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWU4NmYzZjMtYjU5OTI3OGYtZjJjOTk1MmMtODhlNjYyZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-04T17:16:49.221243Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWU4NmYzZjMtYjU5OTI3OGYtZjJjOTk1MmMtODhlNjYyZTI=, ActorId: [1:7489508177841841557:2496], ActorState: ExecuteState, TraceId: 01jr0ww942dmhmqg1say7j23yr, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 22681, MsgBus: 2864 2025-04-04T17:16:50.169824Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508179665323393:2210];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d2f/r3tmp/tmp1y8c0b/pdisk_1.dat 2025-04-04T17:16:50.276830Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:16:50.355695Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:50.377223Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:50.377301Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 22681, node 2 2025-04-04T17:16:50.378891Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:50.458070Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:50.458090Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:50.458097Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:50.458196Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2864 TClient is connected to server localhost:2864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:51.007356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.015224Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:51.035336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.175657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.337614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.415470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.496966Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508192550226877:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.497088Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.533147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.576108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.625637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.670372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.710225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.785831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.832859Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508192550227396:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.833022Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.834084Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508192550227401:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.837495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:53.849934Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508192550227403:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:53.915590Z node 2 :TX_PROXY ERROR: Actor# [2:7489508192550227456:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:55.062759Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508201140162358:2501], TxId: 281474976715672, task: 1. Ctx: { TraceId : 01jr0wwens6nss2aw4nc7xr14y. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NzIwMTljMTItM2Q3ZGFiMjEtODE5MWY1OWMtN2VhYjYzNGE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-04T17:16:55.063099Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508201140162359:2502], TxId: 281474976715672, task: 2. Ctx: { TraceId : 01jr0wwens6nss2aw4nc7xr14y. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NzIwMTljMTItM2Q3ZGFiMjEtODE5MWY1OWMtN2VhYjYzNGE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7489508201140162355:2488], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:16:55.063393Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzIwMTljMTItM2Q3ZGFiMjEtODE5MWY1OWMtN2VhYjYzNGE=, ActorId: [2:7489508196845195010:2488], ActorState: ExecuteState, TraceId: 01jr0wwens6nss2aw4nc7xr14y, Create QueryResponse for error on request, msg: 2025-04-04T17:16:55.144681Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508179665323393:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:55.144747Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink [GOOD] >> KqpExplain::ExplainStream [GOOD] >> KqpExplain::ExplainScanQueryWithParams >> KqpLimits::StreamWrite+Allowed >> KqpImmediateEffects::ReplaceExistingKey [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::Interactive [GOOD] Test command err: Trying to start YDB, gRPC: 28095, MsgBus: 17911 2025-04-04T17:16:45.097860Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508158755815888:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:45.097938Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d29/r3tmp/tmp4ZDcKf/pdisk_1.dat 2025-04-04T17:16:45.485279Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28095, node 1 2025-04-04T17:16:45.498562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:45.498665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:45.510651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:45.568888Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:45.568912Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:45.568918Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:45.569047Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17911 TClient is connected to server localhost:17911 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:46.056588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:46.088424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:46.224204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:46.383555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:46.450057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:47.894538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508167345752256:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:47.894665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:48.073280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:48.098187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:48.120614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:48.142017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:48.206309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:48.234773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:48.307499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508171640720071:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:48.307540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:48.307634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508171640720076:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:48.310814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:48.319266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508171640720078:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:48.415388Z node 1 :TX_PROXY ERROR: Actor# [1:7489508171640720132:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:49.128419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6130, MsgBus: 21179 2025-04-04T17:16:50.659857Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508182103424214:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:50.659909Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d29/r3tmp/tmpIY2Xgl/pdisk_1.dat 2025-04-04T17:16:50.785214Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6130, node 2 2025-04-04T17:16:50.831271Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:50.831356Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:50.836888Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:50.905208Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:50.905224Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:50.905229Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:50.905338Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21179 TClient is connected to server localhost:21179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:51.386462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.404707Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:51.416537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.538314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.828796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.907897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.799334Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508194988327862:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.799413Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.831070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.868079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.898222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.924262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.950721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.985701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:54.045351Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508199283295666:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:54.045421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:54.045649Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508199283295671:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:54.048618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:54.056861Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508199283295673:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:54.121281Z node 2 :TX_PROXY ERROR: Actor# [2:7489508199283295727:3436] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:55.032764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.659902Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508182103424214:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:55.659959Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpStats::JoinNoStatsYql >> KqpQuery::QueryClientTimeout [GOOD] >> KqpQuery::QueryCancelWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18190, MsgBus: 23027 2025-04-04T17:16:46.320959Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508165323660788:2231];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:46.321034Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d1d/r3tmp/tmpzQ6QUg/pdisk_1.dat 2025-04-04T17:16:46.676573Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:46.678835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:46.678956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:46.684208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18190, node 1 2025-04-04T17:16:46.746554Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:46.746617Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:46.746625Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:46.746763Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23027 TClient is connected to server localhost:23027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:47.231458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:47.255173Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:47.265646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:47.386566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:16:47.537957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:47.611352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:49.105823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508178208564262:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.105937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.358973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.415824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.442568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.465999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.490594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.518978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.595724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508178208564778:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.595779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.595800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508178208564783:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.599140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:49.608940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508178208564785:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:49.688765Z node 1 :TX_PROXY ERROR: Actor# [1:7489508178208564841:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:50.557465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:51.321095Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508165323660788:2231];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:51.321178Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5219, MsgBus: 12167 2025-04-04T17:16:52.053897Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508188663056602:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:52.053955Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d1d/r3tmp/tmptKeiaV/pdisk_1.dat 2025-04-04T17:16:52.195899Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:52.209332Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:52.209409Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:52.211257Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5219, node 2 2025-04-04T17:16:52.278825Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:52.278847Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:52.278853Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:52.278962Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12167 TClient is connected to server localhost:12167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:52.716678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:52.722628Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:52.733182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:52.781385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:52.949807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.009639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:55.001111Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508197252992927:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.001188Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.038347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.073073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.113642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.142381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.166421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.196814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.238410Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508201547960735:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.238487Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.238702Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508201547960740:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.241798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:55.249566Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508201547960742:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:55.331137Z node 2 :TX_PROXY ERROR: Actor# [2:7489508201547960795:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:56.282127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:57.056039Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508188663056602:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:57.056094Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 8950, MsgBus: 12855 2025-04-04T17:16:42.267051Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508146596289138:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:42.267100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d37/r3tmp/tmpVBb9Ov/pdisk_1.dat 2025-04-04T17:16:42.617188Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:42.652018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:42.652082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 8950, node 1 2025-04-04T17:16:42.656273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:42.702029Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:42.702049Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:42.702054Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:42.702156Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12855 TClient is connected to server localhost:12855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:43.149347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:43.168343Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:43.174743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:43.318960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:43.485898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:43.544386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:45.115287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508159481192807:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:45.115406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:45.337140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:45.368487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:45.401458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:45.468176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:45.503477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:45.536763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:45.612724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508159481193329:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:45.612800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:45.612947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508159481193334:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:45.616751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:45.626610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508159481193336:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:45.682867Z node 1 :TX_PROXY ERROR: Actor# [1:7489508159481193389:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:46.599677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:47.267929Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508146596289138:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:47.267980Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1524, MsgBus: 12884 2025-04-04T17:16:48.087235Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508171968510039:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:48.087340Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d37/r3tmp/tmpxn3fHM/pdisk_1.dat 2025-04-04T17:16:48.181933Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1524, node 2 2025-04-04T17:16:48.212919Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:48.213015Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:48.214512Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:48.238784Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:48.238809Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:48.238816Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:48.238921Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12884 TClient is connected to server localhost:12884 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:48.574964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:48.582079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:48.654666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:48.805911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:48.865583Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:50.815329Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508180558446410:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:50.815417Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:50.852697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:50.888296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:50.927876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:50.996285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:51.034157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:51.112202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:51.182566Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508184853414229:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:51.182691Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:51.183154Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508184853414234:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:51.186893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:51.199357Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508184853414236:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:51.256213Z node 2 :TX_PROXY ERROR: Actor# [2:7489508184853414289:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:52.283810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:52.331556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:16:52.374310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.087596Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508171968510039:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:53.087660Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpInplaceUpdate::SingleRowIf-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28259, MsgBus: 28803 2025-04-04T17:16:46.512290Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508162195465615:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:46.512334Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d1a/r3tmp/tmpyAsoTP/pdisk_1.dat 2025-04-04T17:16:46.830154Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28259, node 1 2025-04-04T17:16:46.901673Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:46.901695Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:46.901703Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:46.901819Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:16:46.908873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:46.908995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:46.910718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28803 TClient is connected to server localhost:28803 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:47.387186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:47.400356Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:47.412848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:16:47.549246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:47.684766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:47.758800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:49.147901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508175080369258:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.148041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.396020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.425299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.454655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.479011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.511809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.540505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.616968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508175080369773:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.617193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.617436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508175080369778:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.621247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:49.629462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508175080369781:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:49.732825Z node 1 :TX_PROXY ERROR: Actor# [1:7489508175080369837:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:50.552369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21790, MsgBus: 29417 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d1a/r3tmp/tmpwAyTs8/pdisk_1.dat 2025-04-04T17:16:52.218386Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:16:52.271444Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:52.284605Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:52.284692Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:52.285998Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21790, node 2 2025-04-04T17:16:52.366842Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:52.366863Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:52.366869Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:52.366968Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29417 TClient is connected to server localhost:29417 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:52.764352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:52.778930Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:52.789076Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:16:52.862620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.014198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.077388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:55.082431Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508201508801528:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.082565Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.116365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.192129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.220054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.285824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.321354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.387817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.463998Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508201508802056:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.464076Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.464110Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508201508802061:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.466574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:55.473816Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508201508802063:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:55.558451Z node 2 :TX_PROXY ERROR: Actor# [2:7489508201508802119:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:56.463204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpTypes::UnsafeTimestampCastV0 [GOOD] >> KqpTypes::UnsafeTimestampCastV1 >> KqpQuery::QueryResultsTruncated ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ReplaceExistingKey [GOOD] Test command err: Trying to start YDB, gRPC: 26176, MsgBus: 15700 2025-04-04T17:16:46.082331Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508164641440064:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:46.082437Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d26/r3tmp/tmp5TSEKE/pdisk_1.dat 2025-04-04T17:16:46.375873Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26176, node 1 2025-04-04T17:16:46.440831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:46.442340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:46.451050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:46.490511Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:46.490538Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:46.490547Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:46.490683Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15700 TClient is connected to server localhost:15700 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:46.967976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:46.985642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:47.106904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:47.262573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:47.337040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:49.003267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508173231376449:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.003383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.269788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.299726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.330718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.398835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.427686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.458914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.496103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508177526344259:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.496175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.496246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508177526344264:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.499186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:49.507629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508177526344266:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:49.598384Z node 1 :TX_PROXY ERROR: Actor# [1:7489508177526344319:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:50.559950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:51.090342Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508164641440064:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:51.090417Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27182, MsgBus: 17684 2025-04-04T17:16:52.123172Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508191598268957:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:52.138435Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d26/r3tmp/tmpSAOgvd/pdisk_1.dat 2025-04-04T17:16:52.213407Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:52.232948Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:52.233022Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:52.234062Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27182, node 2 2025-04-04T17:16:52.288790Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:52.288815Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:52.288821Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:52.288927Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17684 TClient is connected to server localhost:17684 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:52.662575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:52.667877Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:52.680657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:52.754870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:52.917066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:52.984815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:55.042182Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508204483172576:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.042261Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.089262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.158272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.186471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.219876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.247206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.276647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.313879Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508204483173088:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.313956Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.313985Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508204483173093:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.317761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:55.325599Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508204483173095:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:55.398234Z node 2 :TX_PROXY ERROR: Actor# [2:7489508204483173149:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:56.327545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:57.151071Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508191598268957:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:57.153409Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Duplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8391, MsgBus: 14253 2025-04-04T17:16:46.241101Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508166091198557:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:46.241227Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d23/r3tmp/tmp4zvhr4/pdisk_1.dat 2025-04-04T17:16:46.606469Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8391, node 1 2025-04-04T17:16:46.623754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:46.625085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:46.633798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:46.675347Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:46.675372Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:46.675397Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:46.675524Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14253 TClient is connected to server localhost:14253 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:47.165659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:47.201873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:47.323992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:47.478058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:47.541603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.043729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508178976102236:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.043840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.276123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.376688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.407100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.434045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.461135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.488337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:49.566379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508178976102753:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.566434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.566578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508178976102758:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:49.569802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:49.578967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508178976102760:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:49.650873Z node 1 :TX_PROXY ERROR: Actor# [1:7489508178976102814:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:50.610338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:51.242551Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508166091198557:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:51.242653Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25408, MsgBus: 3876 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d23/r3tmp/tmpizw1rD/pdisk_1.dat 2025-04-04T17:16:52.347463Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:16:52.364564Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:52.368957Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:52.369030Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:52.370854Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25408, node 2 2025-04-04T17:16:52.434770Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:52.434801Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:52.434808Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:52.434917Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3876 TClient is connected to server localhost:3876 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:52.868419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:52.876811Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:52.887493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:52.956400Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.111075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.199554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:55.100764Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508201788260518:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.100871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.145311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.173114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.207654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.235881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.260627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.290884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.367874Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508201788261029:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.367938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508201788261034:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.368078Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.372037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:55.383974Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508201788261036:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:55.460726Z node 2 :TX_PROXY ERROR: Actor# [2:7489508201788261094:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:56.347657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:56.970858Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508206083228844:2515], TxId: 281474976710675, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjMxMTI3MGMtMzdkODk0ZGEtMmNkMzhlY2ItN2QwYjA4ZWM=. CustomerSuppliedId : . TraceId : 01jr0wwgd831rt9713ggk1wn51. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-04T17:16:56.971358Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508206083228845:2516], TxId: 281474976710675, task: 2. Ctx: { TraceId : 01jr0wwgd831rt9713ggk1wn51. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjMxMTI3MGMtMzdkODk0ZGEtMmNkMzhlY2ItN2QwYjA4ZWM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7489508206083228841:2487], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:16:56.971765Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjMxMTI3MGMtMzdkODk0ZGEtMmNkMzhlY2ItN2QwYjA4ZWM=, ActorId: [2:7489508206083228641:2487], ActorState: ExecuteState, TraceId: 01jr0wwgd831rt9713ggk1wn51, Create QueryResponse for error on request, msg: >> KqpParams::Decimal-QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowIf-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8752, MsgBus: 21369 2025-04-04T17:16:47.462906Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508170253804116:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:47.462968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d17/r3tmp/tmpF4pxmE/pdisk_1.dat 2025-04-04T17:16:47.771196Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:47.787708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:47.787807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:47.789836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8752, node 1 2025-04-04T17:16:47.851222Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:47.851270Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:47.851279Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:47.851366Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21369 TClient is connected to server localhost:21369 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:48.268716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:48.295996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:48.420057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:48.564732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:48.611803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:50.036777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508183138707568:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:50.036869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:50.284941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:50.321521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:50.357994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:50.422699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:50.492512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:50.568716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:50.665164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508183138708089:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:50.665243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:50.665482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508183138708094:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:50.669310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:50.685446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508183138708096:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:50.757687Z node 1 :TX_PROXY ERROR: Actor# [1:7489508183138708151:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:51.685406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:52.461546Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508170253804116:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:52.461630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25084, MsgBus: 26297 2025-04-04T17:16:53.129785Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508195551079096:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:53.129833Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001d17/r3tmp/tmpV7qwhJ/pdisk_1.dat 2025-04-04T17:16:53.238288Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:53.255881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:53.255962Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:53.257062Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25084, node 2 2025-04-04T17:16:53.330121Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:53.330138Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:53.330144Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:53.330226Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26297 TClient is connected to server localhost:26297 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:53.775891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.781423Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.789227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.865723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:54.006206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:54.090385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:55.976828Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508204141015435:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.976907Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:56.018591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:56.057841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:56.091889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:56.131863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:56.168979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:56.199545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:56.249507Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508208435983240:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:56.249578Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508208435983245:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:56.249588Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:56.252501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:56.263047Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508208435983247:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:56.356605Z node 2 :TX_PROXY ERROR: Actor# [2:7489508208435983301:3437] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:57.308228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:16:58.129965Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508195551079096:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:58.130104Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQuery::QueryCacheTtl [GOOD] >> KqpQuery::QueryCacheInvalidate >> TPersQueueTest::Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError [GOOD] >> TPersQueueTest::CreateTopicWithMeteringMode >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle >> KqpQuery::Now >> KqpQuery::RewriteIfPresentToMap >> KqpQuery::OlapCreateAsSelect_Simple |90.6%| [TA] $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpStats::RequestUnitForBadRequestExplicitPrepare [GOOD] >> KqpStats::RequestUnitForExecute >> KqpTypes::SelectNull [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64 >> DataShardReadIterator::NoErrorOnFinalACK [GOOD] >> DataShardReadIterator::ShouldCancelMvccSnapshotFromFuture >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 >> DataShardReadIterator::ShouldReadRangeChunk7 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix1 >> KqpQuery::SelectWhereInSubquery >> KqpLimits::LargeParametersAndMkqlFailure >> KqpStats::MultiTxStatsFullScan [GOOD] >> KqpStats::OneShardLocalExec+UseSink >> KqpQuery::RandomNumber [GOOD] >> KqpQuery::RandomUuid >> KqpQuery::DeleteWhereInSubquery [GOOD] >> KqpQuery::DictJoin >> KqpParams::CheckQueryCacheForUnpreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries >> KqpStats::StatsProfile [GOOD] >> KqpStats::SelfJoin >> DataShardReadIterator::ShouldReadNotExistingRange [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk1_100 >> KqpStats::StreamLookupStats-StreamLookupJoin [GOOD] >> KqpStats::SysViewCancelled >> KqpQuery::RowsLimitServiceOverride [GOOD] >> KqpQuery::SelectCountAsteriskFromVar >> KqpExplain::MultiUsedStage [GOOD] >> KqpExplain::Predicates >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink >> KqpQuery::ExecuteDataQueryCollectMeta [GOOD] >> KqpQuery::GenericQueryNoRowsLimit >> KqpLimits::BigParameter [GOOD] >> KqpLimits::CancelAfterRoTx >> KqpLimits::StreamWrite+Allowed [GOOD] >> KqpLimits::StreamWrite-Allowed >> DataShardReadIterator::ShouldReadHeadFromFollower [GOOD] >> DataShardReadIterator::ShouldReadFromHead >> KqpStats::DataQueryWithEffects+UseSink [GOOD] >> KqpStats::DataQueryWithEffects-UseSink >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows [GOOD] >> DataShardReadIteratorLatency::ReadSplitLatency >> KqpExplain::ExplainScanQueryWithParams [GOOD] >> KqpExplain::FewEffects+UseSink >> DataShardReadIterator::ShouldRollbackLocksWhenWrite [GOOD] >> DataShardReadIterator::TryWriteManyRows+Commit >> KqpQuery::QueryCancelWrite [GOOD] >> KqpQuery::QueryCancelWriteImmediate >> KqpTypes::UnsafeTimestampCastV1 [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite [GOOD] >> KqpStats::JoinNoStatsYql [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite >> KqpStats::JoinStatsBasicYql+StreamLookupJoin >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite >> KqpTypes::Time64Columns+EnableTableDatetime64 [GOOD] >> KqpQuery::QueryResultsTruncated [GOOD] >> KqpQuery::QueryStats+UseSink >> KqpQuery::QueryCacheInvalidate [GOOD] >> KqpQuery::QueryCachePermissionsLoss >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink >> KqpLimits::StreamWrite-Allowed [GOOD] >> KqpLimits::TooBigColumn+useSink >> KqpQuery::Now [GOOD] >> KqpQuery::NoEvaluate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns+EnableTableDatetime64 [GOOD] Test command err: Trying to start YDB, gRPC: 4770, MsgBus: 21217 2025-04-04T17:16:49.736111Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508177374024253:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:49.736935Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001623/r3tmp/tmp0hsvwz/pdisk_1.dat 2025-04-04T17:16:50.171236Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:50.187504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:50.187597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:50.190476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4770, node 1 2025-04-04T17:16:50.407139Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:50.407161Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:50.407170Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:50.407296Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21217 TClient is connected to server localhost:21217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:51.103573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.118960Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:51.133099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.273875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.435650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.568146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.242263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508194553895231:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.242359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.536052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.596127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.628984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.660934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.700956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.751166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.853496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508194553895750:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.853565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.853975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508194553895755:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.857157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:53.866107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508194553895757:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:53.923768Z node 1 :TX_PROXY ERROR: Actor# [1:7489508194553895811:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:54.765385Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508177374024253:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:54.765451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27082, MsgBus: 9421 2025-04-04T17:16:55.770612Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508204751774642:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:55.770719Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001623/r3tmp/tmpmeP5sV/pdisk_1.dat 2025-04-04T17:16:55.867939Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27082, node 2 2025-04-04T17:16:55.931117Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:55.931198Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:55.938033Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:55.957403Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:55.957429Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:55.957436Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:55.957544Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9421 TClient is connected to server localhost:9421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:56.447535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:56.455218Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:56.464380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:56.545146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:56.708842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:56.798716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:58.841151Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508217636678316:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:58.841300Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:58.877938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:58.912439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:58.943768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:58.976829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.002608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.029504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.070232Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508221931646120:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.070365Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.070600Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508221931646125:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.073608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:59.082428Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508221931646127:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:59.167012Z node 2 :TX_PROXY ERROR: Actor# [2:7489508221931646180:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 1297, MsgBus: 24674 2025-04-04T17:17:00.882533Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508225357649796:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:00.882609Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001623/r3tmp/tmpPmCm85/pdisk_1.dat 2025-04-04T17:17:01.031622Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:01.051864Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:01.051951Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:01.053415Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1297, node 3 2025-04-04T17:17:01.138224Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:01.138264Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:01.138271Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:01.138391Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24674 TClient is connected to server localhost:24674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:01.636763Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:01.644273Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:04.368044Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508242537519631:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.368123Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.385395Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.446925Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508242537519733:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.447031Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.447334Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508242537519738:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.450324Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:17:04.461118Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508242537519740:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:17:04.529729Z node 3 :TX_PROXY ERROR: Actor# [3:7489508242537519791:2393] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQuery::RewriteIfPresentToMap [GOOD] >> KqpStats::RequestUnitForExecute [GOOD] >> KqpQuery::SelectWhereInSubquery [GOOD] >> KqpQuery::ReadOverloaded+StreamLookup >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink >> KqpStats::OneShardLocalExec+UseSink [GOOD] >> KqpStats::SelfJoin [GOOD] >> KqpQuery::RandomUuid [GOOD] >> KqpLimits::LargeParametersAndMkqlFailure [GOOD] >> KqpLimits::ManyPartitions >> KqpQuery::GenericQueryNoRowsLimit [GOOD] >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows >> KqpParams::Decimal-QueryService-UseSink [GOOD] >> KqpParams::Decimal+QueryService-UseSink >> KqpQuery::DictJoin [GOOD] >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries [GOOD] >> KqpParams::CheckCacheWithRecompilationQuery >> KqpQuery::SelectCountAsteriskFromVar [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix1 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix2 >> KqpQuery::OlapCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Simple >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::RequestUnitForExecute [GOOD] Test command err: Trying to start YDB, gRPC: 9950, MsgBus: 16393 2025-04-04T17:16:49.867562Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508176377035089:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:49.867928Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001624/r3tmp/tmpkPVyzL/pdisk_1.dat 2025-04-04T17:16:50.349929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:50.350043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:50.355597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:50.375579Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9950, node 1 2025-04-04T17:16:50.480767Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:50.480787Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:50.480792Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:50.480880Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16393 TClient is connected to server localhost:16393 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:51.205597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.223761Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.247710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:16:51.411171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.625233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.712859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.279052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508193556905891:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.279175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.580193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.620529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.661201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.693303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.724095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.793028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.880205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508193556906411:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.880300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.880637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508193556906416:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.884807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:53.895077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508193556906418:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:53.964114Z node 1 :TX_PROXY ERROR: Actor# [1:7489508193556906475:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:54.860939Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508176377035089:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:54.860990Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:16:54.878281Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508197851874032:2493], status: GENERIC_ERROR, issues:
:2:12: Error: Unexpected token 'INCORRECT_STMT' : cannot match to any predicted input... 2025-04-04T17:16:54.879496Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjEzZWE0MzgtODM1NDgxOTQtOTBlNjUxNTYtNTc0OGUzMDM=, ActorId: [1:7489508197851874022:2488], ActorState: ExecuteState, TraceId: 01jr0wweprdyjza2gbmpcc95rr, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:2:12: Error: Unexpected token 'INCORRECT_STMT' : cannot match to any predicted input... Trying to start YDB, gRPC: 16791, MsgBus: 4035 2025-04-04T17:16:55.726135Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508202125123426:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:55.726208Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001624/r3tmp/tmpR1GtN6/pdisk_1.dat 2025-04-04T17:16:55.817123Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16791, node 2 2025-04-04T17:16:55.860640Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:55.860737Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:55.879009Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:55.885553Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:55.885574Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:55.885584Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:55.885695Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4035 TClient is connected to server localhost:4035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:56.364662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:56.373869Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:56.381133Z node 2 :FLAT_TX_SCHEMESHARD WARN ... lt not found or you don't have access permissions } 2025-04-04T17:16:58.799002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:58.828648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:58.858310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:58.892564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:58.930364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:58.966149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.028893Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508219304994901:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.028985Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.029260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508219304994906:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.032910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:59.044755Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508219304994908:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:59.123928Z node 2 :TX_PROXY ERROR: Actor# [2:7489508219304994961:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:00.022322Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489508223599962523:2493], status: GENERIC_ERROR, issues:
:2:8: Error: Unexpected token 'INCORRECT_STMT' : cannot match to any predicted input... 2025-04-04T17:17:00.022913Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2VjMTAyOTUtYzkxMzFkMzAtYWJiNjgyNTMtZWEzODcxNjY=, ActorId: [2:7489508219304995217:2488], ActorState: ExecuteState, TraceId: 01jr0wwkqgb8h02xmwzk3neh4e, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:2:8: Error: Unexpected token 'INCORRECT_STMT' : cannot match to any predicted input... Trying to start YDB, gRPC: 30035, MsgBus: 23851 2025-04-04T17:17:00.881456Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508222745179114:2218];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:00.892645Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001624/r3tmp/tmpqlfZ78/pdisk_1.dat 2025-04-04T17:17:00.999887Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:01.007358Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:01.007442Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:01.009352Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30035, node 3 2025-04-04T17:17:01.054758Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:01.054777Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:01.054783Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:01.054885Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23851 TClient is connected to server localhost:23851 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:01.506959Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:01.513393Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:01.527592Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:01.609141Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:01.762514Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:01.834271Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:04.200857Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508239925049897:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.200935Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.226137Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.275073Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.307734Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.341642Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.375457Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.431035Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.479092Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508239925050407:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.479190Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.479414Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508239925050412:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.482901Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:04.492871Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508239925050414:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:04.591440Z node 3 :TX_PROXY ERROR: Actor# [3:7489508239925050469:3438] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:05.880259Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508222745179114:2218];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:05.880323Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Consumed units: 262 Consumed units: 6 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::OneShardLocalExec+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1243, MsgBus: 61371 2025-04-04T17:16:50.217633Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508181831668857:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:50.219413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00161d/r3tmp/tmptxlRsJ/pdisk_1.dat 2025-04-04T17:16:50.790865Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:50.792519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:50.792615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:50.805283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1243, node 1 2025-04-04T17:16:50.962753Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:50.962774Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:50.962779Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:50.962881Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61371 TClient is connected to server localhost:61371 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:51.725188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.747169Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:51.761686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.928548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:52.074762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:16:52.165558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.696318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508194716572325:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.696518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.927161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.952222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.986360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:54.021640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:54.058931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:54.128988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:54.203845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508199011540139:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:54.203942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:54.204748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508199011540144:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:54.208860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:54.226095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508199011540146:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:54.293112Z node 1 :TX_PROXY ERROR: Actor# [1:7489508199011540199:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:55.216282Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508181831668857:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:55.216337Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:16:55.349200Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787015362, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 13572, MsgBus: 25612 2025-04-04T17:16:56.039782Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508206469370126:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:56.040460Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00161d/r3tmp/tmpdqdjpi/pdisk_1.dat 2025-04-04T17:16:56.123139Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13572, node 2 2025-04-04T17:16:56.195363Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:56.195481Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:56.219845Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:56.252302Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:56.252324Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:56.252331Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:56.252449Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25612 TClient is connected to server localhost:25612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:56.857820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:56.863879Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:56.877186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:56.961071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:57.149516Z node 2 :FLAT_TX_SCHEMESHARD WA ... oolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.037979Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.075647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.140690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.171923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.200176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.227767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.271095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.327520Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508219354274129:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.327598Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.327800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508219354274134:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.330823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:59.343598Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-04T17:16:59.344051Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508219354274136:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:59.422631Z node 2 :TX_PROXY ERROR: Actor# [2:7489508219354274190:3438] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:00.413815Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787020430, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 2855, MsgBus: 29270 2025-04-04T17:17:01.291841Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508227742747774:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:01.291910Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00161d/r3tmp/tmpvgRj7T/pdisk_1.dat 2025-04-04T17:17:01.400947Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:01.439439Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:01.439519Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:01.443758Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2855, node 3 2025-04-04T17:17:01.530732Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:01.530753Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:01.530760Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:01.530864Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29270 TClient is connected to server localhost:29270 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:01.997331Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.006622Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:02.021025Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.087294Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.270780Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.341872Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:04.513377Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508240627651427:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.513458Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.572682Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.608667Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.641243Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.672307Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.702274Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.732538Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.776716Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508240627651935:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.776796Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.777101Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508240627651940:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.780591Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:04.791821Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508240627651942:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:04.881600Z node 3 :TX_PROXY ERROR: Actor# [3:7489508240627651996:3437] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:06.292841Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508227742747774:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:06.292914Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpStats::DataQueryWithEffects-UseSink [GOOD] >> KqpStats::DataQueryMulti >> KqpExplain::Predicates [GOOD] >> KqpExplain::MultiJoinCteLinks >> KqpLimits::TooBigQuery+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::RandomUuid [GOOD] Test command err: Trying to start YDB, gRPC: 21248, MsgBus: 14162 2025-04-04T17:16:49.720999Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508176290608841:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:49.731061Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001620/r3tmp/tmpTUBu1U/pdisk_1.dat 2025-04-04T17:16:50.174346Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:50.175423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:50.175502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 21248, node 1 2025-04-04T17:16:50.182337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:50.330987Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:50.331010Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:50.331016Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:50.331131Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14162 TClient is connected to server localhost:14162 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:51.220321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.238235Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:51.254136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.424300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.645603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.773063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.117467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508193470479793:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.117617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.536575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.603689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.634729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.674408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.706936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.754625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.798195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508193470480308:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.798334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.798807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508193470480313:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.824950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:53.833348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508193470480315:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:53.913759Z node 1 :TX_PROXY ERROR: Actor# [1:7489508193470480369:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:54.751420Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508176290608841:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:54.751689Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:16:54.920410Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTI4Y2JjMDUtYzVhOTMzMmYtYmFlYWU1NzAtZjY1MDAzNjI=, ActorId: [1:7489508197765447921:2489], ActorState: ExecuteState, TraceId: 01jr0wwepn4bywa4q807hwqgwb, Create QueryResponse for error on request, msg:
: Error: Request timeout 50ms exceeded
: Error: Cancelling after 50ms during compilation Trying to start YDB, gRPC: 28940, MsgBus: 8181 2025-04-04T17:16:55.779377Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508203787329019:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:55.779463Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001620/r3tmp/tmpJ0WF72/pdisk_1.dat 2025-04-04T17:16:55.917468Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:55.929796Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:55.929875Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:55.931075Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28940, node 2 2025-04-04T17:16:56.022707Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:56.022729Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:56.022735Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:56.022837Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8181 TClient is connected to server localhost:8181 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:56.483989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:56.496302Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:56.504769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:56.560861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part ... Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:59.076531Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508220967199943:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.076612Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.126165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.161409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.190259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.230746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.262150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.300154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.349845Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508220967200450:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.349927Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.354151Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508220967200455:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.357891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:59.367853Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508220967200457:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:59.425226Z node 2 :TX_PROXY ERROR: Actor# [2:7489508220967200511:3436] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 24339, MsgBus: 18518 2025-04-04T17:17:01.275878Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508227225851072:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:01.275927Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001620/r3tmp/tmpZ4veG4/pdisk_1.dat 2025-04-04T17:17:01.415491Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:01.429107Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:01.429190Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:01.434661Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24339, node 3 2025-04-04T17:17:01.562787Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:01.562808Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:01.562816Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:01.562930Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18518 TClient is connected to server localhost:18518 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:02.379657Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.387181Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:02.408336Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.509130Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.669823Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.754570Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:04.871916Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508240110754708:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.871995Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.912945Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.980931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.046703Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.076976Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.143227Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.201589Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.249472Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508244405722527:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.249569Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.249802Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508244405722532:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.252855Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:05.262586Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508244405722534:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:05.356488Z node 3 :TX_PROXY ERROR: Actor# [3:7489508244405722589:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:06.278340Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508227225851072:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:06.278403Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SelfJoin [GOOD] Test command err: Trying to start YDB, gRPC: 17636, MsgBus: 64340 2025-04-04T17:16:49.740526Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508176981374091:2262];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:49.740884Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001629/r3tmp/tmpn9IQ1X/pdisk_1.dat 2025-04-04T17:16:50.369235Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:50.369794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:50.369932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:50.372200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17636, node 1 2025-04-04T17:16:50.556971Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:50.556992Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:50.556999Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:50.557107Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64340 TClient is connected to server localhost:64340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:51.197359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.212538Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:51.224632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.384603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.557638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.650331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.303433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508194161244835:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.303573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.536228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.576428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.625076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.655896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.689559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.731190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.789753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508194161245346:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.789833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.789881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508194161245351:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.793879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:53.803251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508194161245353:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:53.896140Z node 1 :TX_PROXY ERROR: Actor# [1:7489508194161245408:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:54.738990Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508176981374091:2262];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:54.739044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15280, MsgBus: 61474 2025-04-04T17:16:56.015269Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508207484400358:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:56.015376Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001629/r3tmp/tmpr0LOGx/pdisk_1.dat 2025-04-04T17:16:56.114822Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:56.137726Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:56.137797Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:56.139709Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15280, node 2 2025-04-04T17:16:56.210904Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:56.210928Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:56.210935Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:56.211022Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61474 TClient is connected to server localhost:61474 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:56.783269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:56.797795Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:56.812817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:56.924673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:57.077628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting ... nt":2,"Sum":3,"Max":2,"Min":1},"FirstMessageMs":{"Count":2,"Sum":3,"Max":2,"Min":1},"ActiveMessageMs":{"Count":2,"Max":2,"Min":1},"WaitTimeUs":{"Count":2,"Sum":2887,"Max":1497,"Min":1390,"History":[4,1390,5,2887]},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1}}}],"MaxMemoryUsage":{"Count":2,"Sum":2097152,"Max":1048576,"Min":1048576,"History":[4,1048576,5,2097152]},"ResultRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Tasks":2,"ResultBytes":{"Count":2,"Sum":48,"Max":36,"Min":12},"OutputRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"IngressRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"PhysicalStageId":0,"StageDurationUs":3000,"Table":[{"Path":"\/Root\/TwoShard","ReadRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"ReadBytes":{"Count":2,"Sum":48,"Max":24,"Min":24}}],"BaseTimeMs":1743787026283,"OutputBytes":{"Count":2,"Sum":48,"Max":36,"Min":12},"CpuTimeUs":{"Count":2,"Sum":963,"Max":530,"Min":433,"History":[4,433,5,963]},"Ingress":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":3,"Max":2,"Min":1},"ActiveMessageMs":{"Count":2,"Max":2,"Min":1},"FirstMessageMs":{"Count":2,"Sum":3,"Max":2,"Min":1},"Bytes":{"Count":2,"Sum":96,"Max":48,"Min":48,"History":[4,48,5,96]}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"ResumeMessageMs":{"Count":2,"Sum":2,"Max":1,"Min":1},"FirstMessageMs":{"Count":2,"Sum":2,"Max":1,"Min":1},"Bytes":{"Count":2,"Sum":96,"Max":48,"Min":48,"History":[4,48,5,96]},"WaitTimeUs":{"Count":2,"Sum":2618,"Max":1512,"Min":1106,"History":[4,1106,5,2618]},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1}}}]}}],"PlanNodeType":"Connection","E-Cost":"0"}],"Node Type":"Collect","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":43,"Max":35,"Min":8},"ActiveMessageMs":{"Count":2,"Max":35,"Min":8},"FirstMessageMs":{"Count":2,"Sum":43,"Max":35,"Min":8},"Bytes":{"Count":2,"Sum":48,"Max":36,"Min":12,"History":[9,12,36,48]}},"Name":"6","Push":{"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":43,"Max":35,"Min":8},"Chunks":{"Count":2,"Sum":6,"Max":3,"Min":3},"ResumeMessageMs":{"Count":2,"Sum":42,"Max":34,"Min":8},"FirstMessageMs":{"Count":2,"Sum":42,"Max":34,"Min":8},"ActiveMessageMs":{"Count":2,"Max":35,"Min":8},"PauseMessageMs":{"Count":2,"Sum":40,"Max":33,"Min":7},"ActiveTimeUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"WaitTimeUs":{"Count":2,"Sum":37716,"Max":31608,"Min":6108,"History":[9,6108,36,37716]},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1},"WaitMessageMs":{"Count":2,"Max":34,"Min":7}}}],"DurationUs":{"Count":2,"Sum":41000,"Max":34000,"Min":7000},"MaxMemoryUsage":{"Count":2,"Sum":2097152,"Max":1048576,"Min":1048576,"History":[9,1048576,36,2097152]},"InputBytes":{"Count":2,"Sum":48,"Max":36,"Min":12},"Tasks":2,"OutputRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"InputRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"PhysicalStageId":1,"StageDurationUs":34000,"Table":[{"Path":"\/Root\/TwoShard","ReadRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"ReadBytes":{"Count":2,"Sum":24,"Max":12,"Min":12}}],"BaseTimeMs":1743787026283,"WaitInputTimeUs":{"Count":2,"Sum":36989,"Max":31368,"Min":5621,"History":[9,5621,36,36989]},"OutputBytes":{"Count":2,"Sum":48,"Max":36,"Min":12},"CpuTimeUs":{"Count":2,"Sum":1734,"Max":1244,"Min":490,"History":[9,490,36,1734]},"Input":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"FirstMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Bytes":{"Count":2,"Sum":48,"Max":36,"Min":12,"History":[9,12,36,48]}},"Name":"2","Push":{"LastMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"ResumeMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"FirstMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Bytes":{"Count":2,"Sum":48,"Max":36,"Min":12,"History":[9,12,36,48]},"PauseMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"WaitTimeUs":{"Count":2,"Sum":357,"Max":192,"Min":165,"History":[9,165,36,357]},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1}}}]}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":5}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":35,"Max":35,"Min":35},"ActiveMessageMs":{"Count":1,"Max":35,"Min":9},"FirstMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"Bytes":{"Count":1,"Sum":27,"Max":27,"Min":27,"History":[36,27]},"ActiveTimeUs":{"Count":1,"Sum":26000,"Max":26000,"Min":26000}},"Name":"8","Push":{"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":35,"Max":35,"Min":35},"Chunks":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResumeMessageMs":{"Count":1,"Sum":35,"Max":35,"Min":35},"FirstMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"ActiveMessageMs":{"Count":1,"Max":35,"Min":9},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"ActiveTimeUs":{"Count":1,"Sum":26000,"Max":26000,"Min":26000},"WaitTimeUs":{"Count":1,"Sum":34622,"Max":34622,"Min":34622,"History":[36,34622]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":35,"Min":1}}}],"DurationUs":{"Count":1,"Sum":27000,"Max":27000,"Min":27000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[36,1048576]},"InputBytes":{"Count":1,"Sum":48,"Max":48,"Min":48},"Tasks":1,"OutputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":2,"StageDurationUs":27000,"BaseTimeMs":1743787026283,"WaitInputTimeUs":{"Count":1,"Sum":26481,"Max":26481,"Min":26481,"History":[36,26481]},"OutputBytes":{"Count":1,"Sum":27,"Max":27,"Min":27},"CpuTimeUs":{"Count":1,"Sum":1239,"Max":1239,"Min":1239,"History":[36,1239]},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":35,"Max":35,"Min":35},"ActiveMessageMs":{"Count":1,"Max":35,"Min":9},"FirstMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"Bytes":{"Count":1,"Sum":48,"Max":48,"Min":48,"History":[36,48]},"ActiveTimeUs":{"Count":1,"Sum":26000,"Max":26000,"Min":26000}},"Name":"4","Push":{"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":35,"Max":35,"Min":35},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":35,"Max":35,"Min":35},"FirstMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"ActiveMessageMs":{"Count":1,"Max":35,"Min":9},"Bytes":{"Count":1,"Sum":48,"Max":48,"Min":48,"History":[36,48]},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"ActiveTimeUs":{"Count":1,"Sum":26000,"Max":26000,"Min":26000},"WaitTimeUs":{"Count":1,"Sum":21262,"Max":21262,"Min":21262,"History":[36,21262]},"WaitPeriods":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitMessageMs":{"Count":1,"Max":35,"Min":1}}}]}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":7}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":36,"Max":36,"Min":36},"ActiveMessageMs":{"Count":1,"Max":36,"Min":9},"FirstMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"Bytes":{"Count":1,"Sum":27,"Max":27,"Min":27,"History":[37,27]},"ActiveTimeUs":{"Count":1,"Sum":27000,"Max":27000,"Min":27000}},"Name":"RESULT","Push":{"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":36,"Max":36,"Min":36},"Chunks":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResumeMessageMs":{"Count":1,"Sum":36,"Max":36,"Min":36},"FirstMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"ActiveMessageMs":{"Count":1,"Max":36,"Min":9},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"ActiveTimeUs":{"Count":1,"Sum":27000,"Max":27000,"Min":27000},"WaitTimeUs":{"Count":1,"Sum":34715,"Max":34715,"Min":34715,"History":[37,34715]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":36,"Min":1}}}],"DurationUs":{"Count":1,"Sum":27000,"Max":27000,"Min":27000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[37,1048576]},"InputBytes":{"Count":1,"Sum":27,"Max":27,"Min":27},"ResultRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Tasks":1,"ResultBytes":{"Count":1,"Sum":27,"Max":27,"Min":27},"OutputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":3,"StageDurationUs":27000,"BaseTimeMs":1743787026283,"WaitInputTimeUs":{"Count":1,"Sum":26295,"Max":26295,"Min":26295,"History":[37,26295]},"OutputBytes":{"Count":1,"Sum":27,"Max":27,"Min":27},"CpuTimeUs":{"Count":1,"Sum":527,"Max":527,"Min":527,"History":[37,527]},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":36,"Max":36,"Min":36},"ActiveMessageMs":{"Count":1,"Max":36,"Min":9},"FirstMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"Bytes":{"Count":1,"Sum":27,"Max":27,"Min":27,"History":[37,27]},"ActiveTimeUs":{"Count":1,"Sum":27000,"Max":27000,"Min":27000}},"Name":"6","Push":{"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":36,"Max":36,"Min":36},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":36,"Max":36,"Min":36},"FirstMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"ActiveMessageMs":{"Count":1,"Max":36,"Min":9},"Bytes":{"Count":1,"Sum":27,"Max":27,"Min":27,"History":[37,27]},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"ActiveTimeUs":{"Count":1,"Sum":27000,"Max":27000,"Min":27000},"WaitTimeUs":{"Count":1,"Sum":34722,"Max":34722,"Min":34722,"History":[37,34722]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":36,"Min":1}}}]}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":216291,"CpuTimeUs":213335},"ProcessCpuTimeUs":279,"TotalDurationUs":271176,"ResourcePoolId":"default","QueuedTimeUs":554},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":9,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/TwoShard","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"TwoShard","ReadColumns":["Key"],"E-Cost":"0"}],"Node Type":"TableFullScan"},{"Operators":[{"E-Rows":"0","Columns":["Key"],"E-Size":"0","E-Cost":"0","Name":"TableLookup","Table":"TwoShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"TableLookup"}],"Operators":[{"Name":"LookupJoin","LookupKeyColumns":["Key"]}],"Node Type":"LookupJoin","PlanNodeType":"Connection"}],"Operators":[{"A-Rows":6,"A-SelfCpu":1.239,"A-Cpu":1.239,"A-Size":27,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"A-Rows":6,"A-SelfCpu":0.527,"A-Cpu":1.766,"A-Size":27,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} 2025-04-04T17:17:06.514655Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508227815885971:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:06.514717Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardReadIterator::ShouldReadRangeChunk1_100 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk1 >> DataShardReadIterator::ShouldCancelMvccSnapshotFromFuture [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInOneTransaction >> KqpTypes::Time64Columns-EnableTableDatetime64 [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage [GOOD] >> Viewer::SharedDoesntShowExclusiveNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::DictJoin [GOOD] Test command err: Trying to start YDB, gRPC: 17339, MsgBus: 63824 2025-04-04T17:16:49.712151Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508178236963247:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:49.714530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00162d/r3tmp/tmpuH14zP/pdisk_1.dat 2025-04-04T17:16:50.168235Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:50.185792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:50.185897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:50.192853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17339, node 1 2025-04-04T17:16:50.334629Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:50.334647Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:50.334651Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:50.334722Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63824 TClient is connected to server localhost:63824 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:51.075466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.098976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.320388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.494043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.574141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.101179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508195416834055:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.101265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.533099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.604361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.682708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.726431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.757697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.792405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.846453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508195416834574:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.846562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.847154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508195416834579:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.850413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:53.859871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508195416834581:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:53.945274Z node 1 :TX_PROXY ERROR: Actor# [1:7489508195416834635:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:54.711527Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508178236963247:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:54.711590Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:16:54.904619Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508199711802197:2494], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2025-04-04T17:16:54.905985Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTFiMmJkYjgtOGVlNWNiNDgtOGZhYTYxNDQtMmY0OTQ3NzI=, ActorId: [1:7489508199711802189:2489], ActorState: ExecuteState, TraceId: 01jr0wwepvbfqnwmmtb9g1dhm0, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2025-04-04T17:16:54.924237Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508199711802210:2497], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2025-04-04T17:16:54.925367Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTFiMmJkYjgtOGVlNWNiNDgtOGZhYTYxNDQtMmY0OTQ3NzI=, ActorId: [1:7489508199711802189:2489], ActorState: ExecuteState, TraceId: 01jr0wwer03a4wdkyv5cpap9sj, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2025-04-04T17:16:54.944622Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508199711802219:2501], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 2025-04-04T17:16:54.945835Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTFiMmJkYjgtOGVlNWNiNDgtOGZhYTYxNDQtMmY0OTQ3NzI=, ActorId: [1:7489508199711802189:2489], ActorState: ExecuteState, TraceId: 01jr0wwerm0nsxkdnxf2jkx9y1, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 Trying to start YDB, gRPC: 18927, MsgBus: 19968 2025-04-04T17:16:55.765029Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508203787378476:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:55.765097Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00162d/r3tmp/tmpkS0nYS/pdisk_1.dat 2025-04-04T17:16:55.859256Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18927, node 2 2025-04-04T17:16:55.902772Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:55.902876Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:55.911027Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:55.942294Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:55.942314Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:55.942320Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:55.942419Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19968 TClient is connected to server localhost:19968 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathTyp ... N: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:58.910429Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508216672282131:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:58.910522Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:58.944111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:58.984499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.053833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.120495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.149621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.223013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.295643Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508220967249949:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.295727Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.295967Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508220967249954:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.299680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:59.312037Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508220967249956:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:59.404737Z node 2 :TX_PROXY ERROR: Actor# [2:7489508220967250012:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 65438, MsgBus: 9335 2025-04-04T17:17:01.376018Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508227362455023:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00162d/r3tmp/tmpH4BwZW/pdisk_1.dat 2025-04-04T17:17:01.487565Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:17:01.589833Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:01.607467Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:01.607563Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:01.610866Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65438, node 3 2025-04-04T17:17:01.730789Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:01.730818Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:01.730826Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:01.730937Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9335 TClient is connected to server localhost:9335 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:02.310441Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.317111Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:02.328981Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.442685Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.609888Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.689494Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:04.821079Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508240247358484:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.821199Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.848362Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.890096Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.922432Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.956572Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.035716Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.105373Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.147837Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508244542326295:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.147918Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.148001Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508244542326300:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.151159Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:05.159818Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508244542326302:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:05.244845Z node 3 :TX_PROXY ERROR: Actor# [3:7489508244542326355:3441] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:06.372198Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508227362455023:2220];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:06.372276Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::SelectCountAsteriskFromVar [GOOD] Test command err: Trying to start YDB, gRPC: 3563, MsgBus: 17782 2025-04-04T17:16:49.728441Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508178816095704:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:49.730499Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00162c/r3tmp/tmpLyKkpv/pdisk_1.dat 2025-04-04T17:16:50.203000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:50.203117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:50.206817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:50.232977Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3563, node 1 2025-04-04T17:16:50.335208Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:50.335229Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:50.335235Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:50.335345Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17782 TClient is connected to server localhost:17782 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:51.151466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.206953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.337618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.549132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.640668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.193158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508195995966698:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.193290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.533852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.615825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.652656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.725248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.763268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.828989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.884604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508195995967218:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.884672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.884728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508195995967223:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.887742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:53.901397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508195995967225:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:53.971967Z node 1 :TX_PROXY ERROR: Actor# [1:7489508195995967279:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:54.734363Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508178816095704:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:54.734426Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 62981, MsgBus: 20372 2025-04-04T17:16:56.285676Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508208516978053:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:56.333442Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00162c/r3tmp/tmpq2God0/pdisk_1.dat 2025-04-04T17:16:56.483431Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:56.484757Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:56.484835Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:56.488693Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62981, node 2 2025-04-04T17:16:56.650293Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:56.650320Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:56.650327Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:56.650464Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20372 TClient is connected to server localhost:20372 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:57.136693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:57.144486Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:16:57.154843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:57.246421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:57.432667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:57.512808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:59.560224Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508221401881590:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.560328Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.602741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.647027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.676306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.705878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.734419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.806015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.852031Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508221401882106:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.852117Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.852309Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508221401882111:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.855044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:59.864431Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508221401882113:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:16:59.934033Z node 2 :TX_PROXY ERROR: Actor# [2:7489508221401882166:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 17480, MsgBus: 29182 2025-04-04T17:17:01.906533Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508229407645445:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00162c/r3tmp/tmp8ojGPL/pdisk_1.dat 2025-04-04T17:17:01.977961Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:17:02.072815Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:02.086737Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:02.086993Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:02.088283Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17480, node 3 2025-04-04T17:17:02.126836Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:02.126853Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:02.126861Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:02.126972Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29182 TClient is connected to server localhost:29182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:02.635840Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:17:02.662566Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.732379Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.900350Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.964554Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:04.991334Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508242292548910:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.991419Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.054670Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.087670Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.116390Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.190990Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.256835Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.296826Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.387559Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508246587516728:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.387649Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.387874Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508246587516733:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.391215Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:05.403498Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508246587516735:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:05.496346Z node 3 :TX_PROXY ERROR: Actor# [3:7489508246587516791:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:06.890404Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508229407645445:2220];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:06.890487Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardReadIterator::ShouldReadFromHead [GOOD] >> DataShardReadIterator::ShouldReadFromHeadWithConflict+UseSink >> KqpExplain::FewEffects+UseSink [GOOD] >> KqpExplain::FewEffects-UseSink >> KqpQuery::QueryCancelWriteImmediate [GOOD] >> DataShardReadIteratorLatency::ReadSplitLatency [GOOD] >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable >> KqpStats::JoinNoStatsScan >> KqpExplain::LimitOffset >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink [GOOD] >> KqpLimits::ComputeNodeMemoryLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns-EnableTableDatetime64 [GOOD] Test command err: Trying to start YDB, gRPC: 20561, MsgBus: 1887 2025-04-04T17:16:53.341946Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508193242498188:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:53.342626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001616/r3tmp/tmpx9f5d9/pdisk_1.dat 2025-04-04T17:16:53.726980Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20561, node 1 2025-04-04T17:16:53.765314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:53.765396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:53.772879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:53.851081Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:53.851101Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:53.851108Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:53.851228Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1887 TClient is connected to server localhost:1887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:54.325356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:54.339294Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:54.353146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:54.512659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:54.689823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:54.761357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:56.335993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508206127401704:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:56.336084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:56.572341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:56.620132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:56.656870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:56.690775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:56.763810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:56.824106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:56.886269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508206127402220:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:56.886370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:56.886628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508206127402225:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:56.890653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:56.904602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508206127402227:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:56.970105Z node 1 :TX_PROXY ERROR: Actor# [1:7489508206127402280:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:57.916300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480
: Warning: Optimization, code: 1070
:3:29: Warning: Unsafe conversion integral value to Timestamp, consider using date types, code: 1102 2025-04-04T17:16:58.342399Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508193242498188:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:58.342494Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16354, MsgBus: 16651 2025-04-04T17:16:59.199651Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508221350816671:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:59.201163Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001616/r3tmp/tmpdwN49t/pdisk_1.dat 2025-04-04T17:16:59.368941Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:59.375029Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:59.375111Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:59.377165Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16354, node 2 2025-04-04T17:16:59.450780Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:59.450802Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:59.450808Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:59.450906Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16651 TClient is connected to server localhost:16651 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:59.858870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:59.875465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:59.956329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:00.136869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:17:00.207426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.127190Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508234235720255:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:02.127255Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:02.168331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.205128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.246733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.284525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.319131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.395686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.467723Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508234235720773:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:02.467793Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:02.468145Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508234235720778:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:02.471317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:02.485959Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508234235720780:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:02.544604Z node 2 :TX_PROXY ERROR: Actor# [2:7489508234235720833:3441] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:03.575206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:17:03.638097Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489508238530688465:2499], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:6:25: Error: At function: AsList
:6:46: Error: At function: AsStruct
:3:29: Error: At function: Just, At function: UnsafeTimestampCast
:3:29: Error: Unsafe timestamp cast restricted from SQL v1. 2025-04-04T17:17:03.638498Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yzk2YjZlYzctYmI3OGM1MmYtMWUzNWNjZDMtZmY4YWEwYTU=, ActorId: [2:7489508238530688387:2488], ActorState: ExecuteState, TraceId: 01jr0wwq873bg8rx3r685pzdb6, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:6:25: Error: At function: AsList
:6:46: Error: At function: AsStruct
:3:29: Error: At function: Just, At function: UnsafeTimestampCast
:3:29: Error: Unsafe timestamp cast restricted from SQL v1. Trying to start YDB, gRPC: 30231, MsgBus: 23151 2025-04-04T17:17:04.536118Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508242968594691:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:04.536165Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001616/r3tmp/tmpohq7j2/pdisk_1.dat 2025-04-04T17:17:04.642747Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30231, node 3 2025-04-04T17:17:04.674925Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:04.675014Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:04.676493Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:04.694167Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:04.694187Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:04.694198Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:04.694335Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23151 TClient is connected to server localhost:23151 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:05.161773Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:05.169872Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:07.926024Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508255853497237:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:07.926115Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:07.957165Z node 3 :TX_PROXY ERROR: Actor# [3:7489508255853497259:2307] txid# 281474976715658, issues: { message: "Type \'Datetime64\' specified for column \'DatetimePK\', but support for new date/time 64 types is disabled (EnableTableDatetime64 feature flag is off)" severity: 1 } >> KqpParams::ImplicitParameterTypes >> KqpParams::RowsList >> KqpParams::MissingParameter >> KqpStats::SysViewClientLost >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded >> KqpStats::JoinStatsBasicYql+StreamLookupJoin [GOOD] >> KqpStats::JoinStatsBasicScan >> KqpQuery::QueryStats+UseSink [GOOD] >> KqpQuery::QueryStats-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCancelWriteImmediate [GOOD] Test command err: Trying to start YDB, gRPC: 64255, MsgBus: 4557 2025-04-04T17:16:50.280359Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508181953102011:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:50.280399Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00161c/r3tmp/tmpNndDP6/pdisk_1.dat 2025-04-04T17:16:50.691965Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:50.708519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:50.708632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:50.711819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64255, node 1 2025-04-04T17:16:50.782477Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:50.782498Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:50.782505Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:50.782630Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4557 TClient is connected to server localhost:4557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:51.317259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.347054Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:51.370344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.529679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.798576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.892043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.596058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508194838005448:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.596148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.827259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.853251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.879386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.909565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.935471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.961980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:54.019606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508199132973253:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:54.019691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:54.019861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508199132973259:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:54.023181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:54.033426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508199132973261:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:54.121138Z node 1 :TX_PROXY ERROR: Actor# [1:7489508199132973315:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:55.009152Z node 1 :GRPC_SERVER DEBUG: [0x51b0002d0280] received request Name# ExecuteDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=MmU0Yjg1YmUtMzhmNzI2MmYtNTk4NjA1NS0zMDBhOWNlNw==" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:44470 2025-04-04T17:16:55.009223Z node 1 :GRPC_SERVER DEBUG: [0x51b0002d1080] created request Name# ExecuteDataQuery 2025-04-04T17:16:55.009318Z node 1 :GRPC_SERVER DEBUG: [0x51b0002d0280] received request without user token Name# ExecuteDataQuery data# session_id: "ydb://session/3?node_id=1&id=MmU0Yjg1YmUtMzhmNzI2MmYtNTk4NjA1NS0zMDBhOWNlNw==" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:44470 database# /Root 2025-04-04T17:16:55.009526Z node 1 :GRPC_SERVER DEBUG: Got grpc request# ExecuteDataQueryRequest, traceId# 01jr0wwev1f99adh6r6webbh9m, sdkBuildInfo# ydb-cpp-sdk/3.1.0, state# AS_NOT_PERFORMED, database# /Root, peer# ipv6:[::1]:44470, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 2.985747s 2025-04-04T17:16:55.280245Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508181953102011:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:55.280320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:64255 2025-04-04T17:16:57.994507Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489508203427940890:2488] TxId: 281474976710671. Ctx: { TraceId: 01jr0wwev1f99adh6r6webbh9m, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmU0Yjg1YmUtMzhmNzI2MmYtNTk4NjA1NS0zMDBhOWNlNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-04T17:16:57.995126Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508203427940901:2497], TxId: 281474976710671, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MmU0Yjg1YmUtMzhmNzI2MmYtNTk4NjA1NS0zMDBhOWNlNw==. TraceId : 01jr0wwev1f99adh6r6webbh9m. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489508203427940890:2488], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:16:57.995786Z node 1 :GRPC_SERVER DEBUG: [0x51b0002d0280] issuing response Name# ExecuteDataQuery data# operation { ready: true status: INTERNAL_ERROR issues { message: "Closing Grpc request, client should not see this message." severity: 1 } } peer# ipv6:%5B::1%5D:44470 2025-04-04T17:16:57.996288Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508203427940903:2498], TxId: 281474976710671, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wwev1f99adh6r6webbh9m. SessionId : ydb://session/3?node_id=1&id=MmU0Yjg1YmUtMzhmNzI2MmYtNTk4NjA1NS0zMDBhOWNlNw==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489508203427940890:2488], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:16:57.996393Z node 1 :GRPC_SERVER DEBUG: [0x51b0002d0280] finished request Name# ExecuteDataQuery ok# false peer# unknown 2025-04-04T17:16:57.997107Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmU0Yjg1YmUtMzhmNzI2MmYtNTk4NjA1NS0zMDBhOWNlNw==, ActorId: [1:7489508199132973573:2488], ActorState: ExecuteState, TraceId: 01jr0wwev1f99adh6r6webbh9m, Create QueryResponse for error on request, msg: 2025-04-04T17:16:58.008157Z node 1 :GRPC_SERVER DEBUG: [0x51b0002d1080] received request Name# ExecuteDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=MmU0Yjg1YmUtMzhmNzI2MmYtNTk4NjA1NS0zMDBhOWNlNw==" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query ... ion type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:59.832760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:59.893806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:01.839185Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508229199002411:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:01.839303Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:01.877352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:01.922659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.002708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.049059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.090300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.138273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.216035Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508233493970221:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:02.216123Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:02.216391Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508233493970226:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:02.219948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:02.242661Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508233493970228:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:02.300905Z node 2 :TX_PROXY ERROR: Actor# [2:7489508233493970282:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 12419, MsgBus: 3528 2025-04-04T17:17:04.230791Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508242560341766:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:04.230850Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00161c/r3tmp/tmp0aOH46/pdisk_1.dat 2025-04-04T17:17:04.374920Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:04.382486Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:04.382572Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:04.384881Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12419, node 3 2025-04-04T17:17:04.442775Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:04.442798Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:04.442806Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:04.442926Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3528 TClient is connected to server localhost:3528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:04.843966Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:04.850804Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:04.864591Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:04.911006Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:05.072314Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:05.131517Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:07.201398Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508255445245414:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:07.201465Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:07.235754Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:07.283983Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:07.320818Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:07.358330Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:07.424159Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:07.463262Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:07.507878Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508255445245926:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:07.507959Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:07.508358Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508255445245931:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:07.511886Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:07.521256Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508255445245933:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:07.605104Z node 3 :TX_PROXY ERROR: Actor# [3:7489508255445245986:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQuery::NoEvaluate [GOOD] >> KqpQuery::OlapCreateAsSelect_Complex >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite >> KqpLimits::ManyPartitions [GOOD] >> KqpLimits::ManyPartitionsSorting >> KqpExplain::SortStage >> KqpQuery::OltpCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Disable >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite >> KqpQuery::QueryClientTimeoutPrecompiled >> TPersQueueTest::DisableWrongSettings [GOOD] >> TPersQueueTest::DisableDeduplication >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink >> KqpQuery::QueryCachePermissionsLoss [GOOD] >> KqpParams::Decimal+QueryService-UseSink [GOOD] >> KqpParams::Decimal-QueryService+UseSink >> KqpStats::DataQueryMulti [GOOD] >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows [GOOD] >> KqpExplain::MultiJoinCteLinks [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix2 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix3 >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 [GOOD] >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCachePermissionsLoss [GOOD] Test command err: Trying to start YDB, gRPC: 27168, MsgBus: 7060 2025-04-04T17:16:49.780738Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508176059220402:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:49.785662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001630/r3tmp/tmpHBS8iM/pdisk_1.dat 2025-04-04T17:16:50.230387Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:50.231301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:50.231391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:50.238530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27168, node 1 2025-04-04T17:16:50.336238Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:50.336261Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:50.336265Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:50.336367Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7060 TClient is connected to server localhost:7060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:51.057009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.098307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.304478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.485597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.564514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.055308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508193239091182:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.055414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.536080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.594202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.664611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.702813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.750887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.803316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.848576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508193239091698:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.848642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508193239091703:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.848663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.851902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:53.877734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508193239091705:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:53.957370Z node 1 :TX_PROXY ERROR: Actor# [1:7489508193239091759:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:54.777528Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508176059220402:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:54.780041Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 65253, MsgBus: 4918 2025-04-04T17:17:00.252306Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508223118127944:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:00.252353Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001630/r3tmp/tmpEoIu5F/pdisk_1.dat 2025-04-04T17:17:00.358602Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65253, node 2 2025-04-04T17:17:00.398380Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:00.402050Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:00.404189Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:00.426746Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:00.426769Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:00.426775Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:00.426881Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4918 TClient is connected to server localhost:4918 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:00.907653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:00.922882Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:00.927538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:01.014899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:01.192168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:01.284103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTab ... for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 4 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-04T17:17:11.847979Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MWYzZjNjYzYtNmQ2YWU1NTEtNzZkOTJmZTYtNjU0ZTBjYjY=, ActorId: [3:7489508272932068025:2566], ActorState: ExecuteState, TraceId: 01jr0wwz5g7n0hys1xdy6vy3xx, Create QueryResponse for error on request, msg: 2025-04-04T17:17:11.848533Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jr0wwz5g7n0hys1xdy6vy3xx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWYzZjNjYzYtNmQ2YWU1NTEtNzZkOTJmZTYtNjU0ZTBjYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:11.865103Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-04T17:17:12.050404Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7489508277227035386:3845], for# user0@builtin, access# SelectRow 2025-04-04T17:17:12.050680Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715686. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 1 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-04T17:17:12.050963Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OGM5YjE4MmQtNjg4YWUwNWUtYmM4NjU2NTQtYjQ0NDI2Mzc=, ActorId: [3:7489508272932068072:2577], ActorState: ExecuteState, TraceId: 01jr0wwzbf24ty49e06xfy0wz5, Create QueryResponse for error on request, msg: 2025-04-04T17:17:12.051510Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715687. Ctx: { TraceId: 01jr0wwzbf24ty49e06xfy0wz5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGM5YjE4MmQtNjg4YWUwNWUtYmM4NjU2NTQtYjQ0NDI2Mzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:12.355331Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715688. Ctx: { TraceId: 01jr0wwzhs007mrxc5cp4abhrw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDY2YzRmODQtYzg1NGU4Zi1kZTg0YmM1MC00Yjg2ZmYxNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:12.361422Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715689. Ctx: { TraceId: 01jr0wwzhs007mrxc5cp4abhrw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDY2YzRmODQtYzg1NGU4Zi1kZTg0YmM1MC00Yjg2ZmYxNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:12.363769Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7489508277227035452:3865], for# user0@builtin, access# UpdateRow 2025-04-04T17:17:12.363931Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715690. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 2 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-04T17:17:12.364073Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDY2YzRmODQtYzg1NGU4Zi1kZTg0YmM1MC00Yjg2ZmYxNw==, ActorId: [3:7489508277227035413:2590], ActorState: ExecuteState, TraceId: 01jr0wwzhs007mrxc5cp4abhrw, Create QueryResponse for error on request, msg: 2025-04-04T17:17:12.364377Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715691. Ctx: { TraceId: 01jr0wwzhs007mrxc5cp4abhrw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDY2YzRmODQtYzg1NGU4Zi1kZTg0YmM1MC00Yjg2ZmYxNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:12.500934Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7489508277227035479:3874], for# user0@builtin, access# EraseRow 2025-04-04T17:17:12.501089Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715692. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 4 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-04T17:17:12.501363Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZjFmODk3NDgtZTY2N2VhNDItNzdlZWMyMS1kYzM2ZjA0MQ==, ActorId: [3:7489508277227035465:2606], ActorState: ExecuteState, TraceId: 01jr0wwzt8d7mb6h4vp0w8zg0g, Create QueryResponse for error on request, msg: 2025-04-04T17:17:12.501875Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715693. Ctx: { TraceId: 01jr0wwzt8d7mb6h4vp0w8zg0g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZjFmODk3NDgtZTY2N2VhNDItNzdlZWMyMS1kYzM2ZjA0MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:12.518324Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-04-04T17:17:12.613853Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7489508277227035509:3887], for# user0@builtin, access# DescribeSchema 2025-04-04T17:17:12.613873Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7489508277227035509:3887], for# user0@builtin, access# DescribeSchema 2025-04-04T17:17:12.615485Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489508277227035506:2619], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:17:12.615712Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YWFlOGJjYS1hM2Y0ZGI3OC1lMmRiNzc4Mi1kMzU2ZTIxNQ==, ActorId: [3:7489508277227035502:2617], ActorState: ExecuteState, TraceId: 01jr0wx00w0c43shr4k1g8tznx, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:17:12.666797Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7489508277227035526:3891], for# user0@builtin, access# DescribeSchema 2025-04-04T17:17:12.666827Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7489508277227035526:3891], for# user0@builtin, access# DescribeSchema 2025-04-04T17:17:12.669151Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489508277227035523:2628], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:8:25: Error: At function: KiWriteTable!
:8:25: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:17:12.674940Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MmJiOTE3ZGEtMzAzNGVkNzktZWExM2M1MWMtMWE3YTBlYzI=, ActorId: [3:7489508277227035519:2626], ActorState: ExecuteState, TraceId: 01jr0wx02c6rcbrk5pqhfe012y, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:17:12.729272Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7489508277227035544:3896], for# user0@builtin, access# DescribeSchema 2025-04-04T17:17:12.729299Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7489508277227035544:3896], for# user0@builtin, access# DescribeSchema 2025-04-04T17:17:12.731652Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489508277227035541:2637], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiWriteTable!
:12:30: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:17:12.733262Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NjJkYjcwMjktODU0ZmVhNjctNThmNjQ4YzYtZThkOTkxMjM=, ActorId: [3:7489508277227035537:2635], ActorState: ExecuteState, TraceId: 01jr0wx04d59wk5msd9z8bp2mf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:17:12.751734Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715695:0, at schemeshard: 72057594046644480 2025-04-04T17:17:12.814768Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7489508277227035570:3907], for# user0@builtin, access# DescribeSchema 2025-04-04T17:17:12.814808Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7489508277227035570:3907], for# user0@builtin, access# DescribeSchema 2025-04-04T17:17:12.817664Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489508277227035567:2647], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:17:12.819135Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NzM3MWQyZTktYTQwN2VjNzUtZjgyNTJlODQtYTY3YTc4YTA=, ActorId: [3:7489508277227035563:2645], ActorState: ExecuteState, TraceId: 01jr0wx06y0d45xvtxxvkhxxgj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:17:12.886754Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7489508277227035588:3912], for# user0@builtin, access# DescribeSchema 2025-04-04T17:17:12.886784Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7489508277227035588:3912], for# user0@builtin, access# DescribeSchema 2025-04-04T17:17:12.888974Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489508277227035585:2656], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:8:25: Error: At function: KiWriteTable!
:8:25: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:17:12.890467Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZGI2NDBkZjAtZjc4ZWNkZmItMTE2YzhiNDQtMzkyOWNmOTU=, ActorId: [3:7489508277227035580:2654], ActorState: ExecuteState, TraceId: 01jr0wx0965123jmbat6sntpsg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:17:12.930628Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7489508277227035606:3917], for# user0@builtin, access# DescribeSchema 2025-04-04T17:17:12.930656Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7489508277227035606:3917], for# user0@builtin, access# DescribeSchema 2025-04-04T17:17:12.933083Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489508277227035603:2665], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiWriteTable!
:12:30: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:17:12.933429Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ODJkMjc0ZDEtMzk4YWM1NDEtY2FhNTY0MGItYzFjZDNjMWY=, ActorId: [3:7489508277227035598:2663], ActorState: ExecuteState, TraceId: 01jr0wx0amejshx961me0xxb35, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> KqpExplain::LimitOffset [GOOD] >> KqpExplain::FullOuterJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::DataQueryMulti [GOOD] Test command err: Trying to start YDB, gRPC: 19598, MsgBus: 23149 2025-04-04T17:16:57.198635Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508211634847875:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:57.198678Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001610/r3tmp/tmpsJVfea/pdisk_1.dat 2025-04-04T17:16:57.634303Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:57.656161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:57.656255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:57.657959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19598, node 1 2025-04-04T17:16:57.822765Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:57.822788Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:57.822793Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:57.822888Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23149 TClient is connected to server localhost:23149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:58.385530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:58.403557Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:58.419294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:58.555725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:58.738332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:16:58.837542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:00.320351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508224519751549:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:00.320476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:00.535939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:00.567324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:00.598033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:00.669399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:00.693630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:00.735734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:00.797231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508224519752063:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:00.797343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:00.797630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508224519752068:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:00.801354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:00.811573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508224519752070:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:00.872208Z node 1 :TX_PROXY ERROR: Actor# [1:7489508224519752123:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 14839, MsgBus: 5046 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001610/r3tmp/tmpbACDFa/pdisk_1.dat 2025-04-04T17:17:02.907373Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508230931916047:2084];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:02.988788Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:17:03.094454Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:03.107596Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:03.107681Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:03.110486Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14839, node 2 2025-04-04T17:17:03.194400Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:03.194417Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:03.194423Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:03.194528Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5046 TClient is connected to server localhost:5046 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:03.569380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:03.575392Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:03.586168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:03.644443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:03.824360Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:03.900618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:06.004798Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508248111786953:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:06.004878Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:06.040490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:06.070029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:06.097772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:06.124126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:06.187762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:06.218056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:06.261269Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508248111787466:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:06.261342Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:06.261492Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508248111787471:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:06.264371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:06.273796Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508248111787473:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:06.338068Z node 2 :TX_PROXY ERROR: Actor# [2:7489508248111787525:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 64188, MsgBus: 13631 2025-04-04T17:17:08.169677Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508258219514018:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:08.169776Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001610/r3tmp/tmpV5NLOk/pdisk_1.dat TServer::EnableGrpc on GrpcPort 64188, node 3 2025-04-04T17:17:08.315283Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:08.315370Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:08.319356Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:08.347078Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:08.366829Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:08.366851Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:08.366857Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:08.366943Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13631 TClient is connected to server localhost:13631 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:08.920353Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:08.926152Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:08.937860Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:09.015349Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:09.155239Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:09.245535Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.682147Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508271104417664:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:11.682297Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:11.723983Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:11.763148Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:11.800045Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:11.876475Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:11.927022Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:11.982500Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:12.081517Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508275399385479:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:12.081612Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:12.081971Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508275399385484:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:12.086093Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:12.097589Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508275399385486:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:12.198431Z node 3 :TX_PROXY ERROR: Actor# [3:7489508275399385542:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:13.172698Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508258219514018:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:13.172764Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] Test command err: Trying to start YDB, gRPC: 7211, MsgBus: 22978 2025-04-04T17:16:49.727650Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508178819319882:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:49.728875Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001622/r3tmp/tmpc97505/pdisk_1.dat 2025-04-04T17:16:50.382725Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:50.384386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:50.384490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:50.388644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7211, node 1 2025-04-04T17:16:50.490876Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:50.490897Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:50.490903Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:50.491002Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22978 TClient is connected to server localhost:22978 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:51.350783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.375223Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:51.389910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.564895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.764585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.866831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.419588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508195999190832:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.419703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.635699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.706521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.771995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.798641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.822420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.854417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.901108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508195999191347:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.901195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.901390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508195999191352:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.904132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:53.912796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508195999191354:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:54.004983Z node 1 :TX_PROXY ERROR: Actor# [1:7489508200294158704:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:54.727559Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508178819319882:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:54.727615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28750, MsgBus: 7565 2025-04-04T17:16:55.968011Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508204285419911:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:55.968065Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001622/r3tmp/tmp52Mbm2/pdisk_1.dat 2025-04-04T17:16:56.138727Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:56.151110Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:56.151197Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:56.152547Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28750, node 2 2025-04-04T17:16:56.278666Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:56.278687Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:56.278701Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:56.278795Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7565 TClient is connected to server localhost:7565 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:56.812672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:56.825848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:56.922153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:57.093101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:57.172819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... false data# peer# 2025-04-04T17:17:13.199169Z node 4 :GRPC_SERVER DEBUG: [0x51b000359880] received request Name# Coordination/CreateNode ok# false data# peer# 2025-04-04T17:17:13.199250Z node 4 :GRPC_SERVER DEBUG: [0x51b000359180] received request Name# Coordination/AlterNode ok# false data# peer# 2025-04-04T17:17:13.199356Z node 4 :GRPC_SERVER DEBUG: [0x51b000359f80] received request Name# Coordination/DropNode ok# false data# peer# 2025-04-04T17:17:13.199444Z node 4 :GRPC_SERVER DEBUG: [0x51b000035d80] received request Name# Coordination/DescribeNode ok# false data# peer# 2025-04-04T17:17:13.199568Z node 4 :GRPC_SERVER DEBUG: [0x51b000031080] received request Name# CreateDatabase ok# false data# peer# 2025-04-04T17:17:13.199630Z node 4 :GRPC_SERVER DEBUG: [0x51b000037280] received request Name# GetDatabaseStatus ok# false data# peer# 2025-04-04T17:17:13.199760Z node 4 :GRPC_SERVER DEBUG: [0x51b000036480] received request Name# AlterDatabase ok# false data# peer# 2025-04-04T17:17:13.199804Z node 4 :GRPC_SERVER DEBUG: [0x51b000036b80] received request Name# ListDatabases ok# false data# peer# 2025-04-04T17:17:13.199931Z node 4 :GRPC_SERVER DEBUG: [0x51b000038080] received request Name# RemoveDatabase ok# false data# peer# 2025-04-04T17:17:13.199956Z node 4 :GRPC_SERVER DEBUG: [0x51b000038780] received request Name# DescribeDatabaseOptions ok# false data# peer# 2025-04-04T17:17:13.200082Z node 4 :GRPC_SERVER DEBUG: [0x51b000037980] received request Name# GetScaleRecommendation ok# false data# peer# 2025-04-04T17:17:13.200118Z node 4 :GRPC_SERVER DEBUG: [0x51b0004f0d80] received request Name# ListEndpoints ok# false data# peer# 2025-04-04T17:17:13.200191Z node 4 :GRPC_SERVER DEBUG: [0x51b000360180] received request Name# WhoAmI ok# false data# peer# 2025-04-04T17:17:13.200232Z node 4 :GRPC_SERVER DEBUG: [0x51b000035680] received request Name# NodeRegistration ok# false data# peer# 2025-04-04T17:17:13.200350Z node 4 :GRPC_SERVER DEBUG: [0x51b000357c80] received request Name# Scan ok# false data# peer# 2025-04-04T17:17:13.200424Z node 4 :GRPC_SERVER DEBUG: [0x51b000356080] received request Name# GetShardLocations ok# false data# peer# 2025-04-04T17:17:13.200536Z node 4 :GRPC_SERVER DEBUG: [0x51b000354480] received request Name# DescribeTable ok# false data# peer# 2025-04-04T17:17:13.200604Z node 4 :GRPC_SERVER DEBUG: [0x51b000368680] received request Name# CreateSnapshot ok# false data# peer# 2025-04-04T17:17:13.200718Z node 4 :GRPC_SERVER DEBUG: [0x51b000364080] received request Name# RefreshSnapshot ok# false data# peer# 2025-04-04T17:17:13.200805Z node 4 :GRPC_SERVER DEBUG: [0x51b0003bdb80] received request Name# DiscardSnapshot ok# false data# peer# 2025-04-04T17:17:13.200900Z node 4 :GRPC_SERVER DEBUG: [0x51b0003be280] received request Name# List ok# false data# peer# 2025-04-04T17:17:13.200987Z node 4 :GRPC_SERVER DEBUG: [0x51b0003bfe80] received request Name# RateLimiter/CreateResource ok# false data# peer# 2025-04-04T17:17:13.201072Z node 4 :GRPC_SERVER DEBUG: [0x51b0003c0580] received request Name# RateLimiter/AlterResource ok# false data# peer# 2025-04-04T17:17:13.201145Z node 4 :GRPC_SERVER DEBUG: [0x51b0003c0c80] received request Name# RateLimiter/DropResource ok# false data# peer# 2025-04-04T17:17:13.201232Z node 4 :GRPC_SERVER DEBUG: [0x51b0003c1380] received request Name# RateLimiter/ListResources ok# false data# peer# 2025-04-04T17:17:13.201327Z node 4 :GRPC_SERVER DEBUG: [0x51b0003c1a80] received request Name# RateLimiter/DescribeResource ok# false data# peer# 2025-04-04T17:17:13.201392Z node 4 :GRPC_SERVER DEBUG: [0x51b0003c2180] received request Name# RateLimiter/AcquireResource ok# false data# peer# 2025-04-04T17:17:13.201596Z node 4 :GRPC_SERVER DEBUG: [0x51b00026d480] received request Name# CreateStream ok# false data# peer# 2025-04-04T17:17:13.201678Z node 4 :GRPC_SERVER DEBUG: [0x51b0003c2880] received request Name# ListStreams ok# false data# peer# 2025-04-04T17:17:13.201795Z node 4 :GRPC_SERVER DEBUG: [0x51b0003c2f80] received request Name# DeleteStream ok# false data# peer# 2025-04-04T17:17:13.201847Z node 4 :GRPC_SERVER DEBUG: [0x51b00026cd80] received request Name# DescribeStream ok# false data# peer# 2025-04-04T17:17:13.201959Z node 4 :GRPC_SERVER DEBUG: [0x51b0003c3680] received request Name# ListShards ok# false data# peer# 2025-04-04T17:17:13.201996Z node 4 :GRPC_SERVER DEBUG: [0x51b000336f80] received request Name# SetWriteQuota ok# false data# peer# 2025-04-04T17:17:13.202127Z node 4 :GRPC_SERVER DEBUG: [0x51b000337680] received request Name# UpdateStream ok# false data# peer# 2025-04-04T17:17:13.202155Z node 4 :GRPC_SERVER DEBUG: [0x51b0003c4480] received request Name# PutRecord ok# false data# peer# 2025-04-04T17:17:13.202639Z node 4 :GRPC_SERVER DEBUG: [0x51b0003c5980] received request Name# GetRecords ok# false data# peer# 2025-04-04T17:17:13.202783Z node 4 :GRPC_SERVER DEBUG: [0x51b0003c5280] received request Name# PutRecords ok# false data# peer# 2025-04-04T17:17:13.202982Z node 4 :GRPC_SERVER DEBUG: [0x51b0003c6080] received request Name# GetShardIterator ok# false data# peer# 2025-04-04T17:17:13.203151Z node 4 :GRPC_SERVER DEBUG: [0x51b0003c6780] received request Name# SubscribeToShard ok# false data# peer# 2025-04-04T17:17:13.203305Z node 4 :GRPC_SERVER DEBUG: [0x51b0003c7c80] received request Name# DescribeLimits ok# false data# peer# 2025-04-04T17:17:13.203487Z node 4 :GRPC_SERVER DEBUG: [0x51b0003c8380] received request Name# DescribeStreamSummary ok# false data# peer# 2025-04-04T17:17:13.203647Z node 4 :GRPC_SERVER DEBUG: [0x51b0003c9180] received request Name# DecreaseStreamRetentionPeriod ok# false data# peer# 2025-04-04T17:17:13.203795Z node 4 :GRPC_SERVER DEBUG: [0x51b0003c9880] received request Name# IncreaseStreamRetentionPeriod ok# false data# peer# 2025-04-04T17:17:13.203939Z node 4 :GRPC_SERVER DEBUG: [0x51b0003c9f80] received request Name# UpdateShardCount ok# false data# peer# 2025-04-04T17:17:13.204084Z node 4 :GRPC_SERVER DEBUG: [0x51b0003ca680] received request Name# UpdateStreamMode ok# false data# peer# 2025-04-04T17:17:13.204237Z node 4 :GRPC_SERVER DEBUG: [0x51b0003da980] received request Name# RegisterStreamConsumer ok# false data# peer# 2025-04-04T17:17:13.204394Z node 4 :GRPC_SERVER DEBUG: [0x51b0003db080] received request Name# DeregisterStreamConsumer ok# false data# peer# 2025-04-04T17:17:13.204539Z node 4 :GRPC_SERVER DEBUG: [0x51b00025ae80] received request Name# DescribeStreamConsumer ok# false data# peer# 2025-04-04T17:17:13.204699Z node 4 :GRPC_SERVER DEBUG: [0x51b0003dbe80] received request Name# ListStreamConsumers ok# false data# peer# 2025-04-04T17:17:13.204847Z node 4 :GRPC_SERVER DEBUG: [0x51b0003dc580] received request Name# AddTagsToStream ok# false data# peer# 2025-04-04T17:17:13.205011Z node 4 :GRPC_SERVER DEBUG: [0x51b0003dcc80] received request Name# DisableEnhancedMonitoring ok# false data# peer# 2025-04-04T17:17:13.205155Z node 4 :GRPC_SERVER DEBUG: [0x51b0003dd380] received request Name# EnableEnhancedMonitoring ok# false data# peer# 2025-04-04T17:17:13.205305Z node 4 :GRPC_SERVER DEBUG: [0x51b0003dda80] received request Name# ListTagsForStream ok# false data# peer# 2025-04-04T17:17:13.205455Z node 4 :GRPC_SERVER DEBUG: [0x51b0003de180] received request Name# MergeShards ok# false data# peer# 2025-04-04T17:17:13.205605Z node 4 :GRPC_SERVER DEBUG: [0x51b0003de880] received request Name# RemoveTagsFromStream ok# false data# peer# 2025-04-04T17:17:13.205748Z node 4 :GRPC_SERVER DEBUG: [0x51b0003df680] received request Name# SplitShard ok# false data# peer# 2025-04-04T17:17:13.205899Z node 4 :GRPC_SERVER DEBUG: [0x51b0003def80] received request Name# StartStreamEncryption ok# false data# peer# 2025-04-04T17:17:13.206043Z node 4 :GRPC_SERVER DEBUG: [0x51b00016ce80] received request Name# StopStreamEncryption ok# false data# peer# 2025-04-04T17:17:13.206202Z node 4 :GRPC_SERVER DEBUG: [0x51b0003fd280] received request Name# SelfCheck ok# false data# peer# 2025-04-04T17:17:13.206541Z node 4 :GRPC_SERVER DEBUG: [0x51b0003fd980] received request Name# NodeCheck ok# false data# peer# 2025-04-04T17:17:13.206741Z node 4 :GRPC_SERVER DEBUG: [0x51b0003b8e80] received request Name# DeleteSession ok# false data# peer# 2025-04-04T17:17:13.206909Z node 4 :GRPC_SERVER DEBUG: [0x51b0003b8780] received request Name# AttachSession ok# false data# peer# 2025-04-04T17:17:13.207075Z node 4 :GRPC_SERVER DEBUG: [0x51b0003af480] received request Name# BeginTransaction ok# false data# peer# 2025-04-04T17:17:13.207241Z node 4 :GRPC_SERVER DEBUG: [0x51b0003afb80] received request Name# CommitTransaction ok# false data# peer# 2025-04-04T17:17:13.207402Z node 4 :GRPC_SERVER DEBUG: [0x51b0003aed80] received request Name# RollbackTransaction ok# false data# peer# 2025-04-04T17:17:13.207549Z node 4 :GRPC_SERVER DEBUG: [0x51b0003b1780] received request Name# ExecuteQuery ok# false data# peer# 2025-04-04T17:17:13.207700Z node 4 :GRPC_SERVER DEBUG: [0x51b0003b3380] received request Name# ExecuteScript ok# false data# peer# 2025-04-04T17:17:13.207875Z node 4 :GRPC_SERVER DEBUG: [0x51b0003b1e80] received request Name# FetchScriptResults ok# false data# peer# 2025-04-04T17:17:13.208026Z node 4 :GRPC_SERVER DEBUG: [0x51b0003ae680] received request Name# ExecuteTabletMiniKQL ok# false data# peer# 2025-04-04T17:17:13.208174Z node 4 :GRPC_SERVER DEBUG: [0x51b0003adf80] received request Name# ChangeTabletSchema ok# false data# peer# 2025-04-04T17:17:13.208321Z node 4 :GRPC_SERVER DEBUG: [0x51b0003ad880] received request Name# RestartTablet ok# false data# peer# 2025-04-04T17:17:13.208475Z node 4 :GRPC_SERVER DEBUG: [0x51b0003ad180] received request Name# CreateLogStore ok# false data# peer# 2025-04-04T17:17:13.208626Z node 4 :GRPC_SERVER DEBUG: [0x51b0003aca80] received request Name# DescribeLogStore ok# false data# peer# 2025-04-04T17:17:13.208783Z node 4 :GRPC_SERVER DEBUG: [0x51b0003aae80] received request Name# DropLogStore ok# false data# peer# 2025-04-04T17:17:13.208934Z node 4 :GRPC_SERVER DEBUG: [0x51b0003ac380] received request Name# AlterLogStore ok# false data# peer# 2025-04-04T17:17:13.209090Z node 4 :GRPC_SERVER DEBUG: [0x51b0003abc80] received request Name# CreateLogTable ok# false data# peer# 2025-04-04T17:17:13.209239Z node 4 :GRPC_SERVER DEBUG: [0x51b0003ab580] received request Name# DescribeLogTable ok# false data# peer# 2025-04-04T17:17:13.209386Z node 4 :GRPC_SERVER DEBUG: [0x51b0003aa780] received request Name# DropLogTable ok# false data# peer# 2025-04-04T17:17:13.209539Z node 4 :GRPC_SERVER DEBUG: [0x51b000034f80] received request Name# AlterLogTable ok# false data# peer# 2025-04-04T17:17:13.209694Z node 4 :GRPC_SERVER DEBUG: [0x51b000034880] received request Name# Login ok# false data# peer# 2025-04-04T17:17:13.209841Z node 4 :GRPC_SERVER DEBUG: [0x51b000031780] received request Name# DescribeReplication ok# false data# peer# 2025-04-04T17:17:13.210003Z node 4 :GRPC_SERVER DEBUG: [0x51b000033a80] received request Name# DescribeView ok# false data# peer# 2025-04-04T17:17:13.211522Z node 4 :GRPC_SERVER DEBUG: [0x51b0003b2580] received request Name# CreateSession ok# false data# peer# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows [GOOD] Test command err: Trying to start YDB, gRPC: 7914, MsgBus: 14304 2025-04-04T17:16:56.377864Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508206300783932:2235];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:56.378373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001611/r3tmp/tmpT0RWX6/pdisk_1.dat 2025-04-04T17:16:56.845658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:56.845746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:56.847141Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:56.860796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7914, node 1 2025-04-04T17:16:56.986782Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:56.986804Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:56.986826Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:56.986923Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14304 TClient is connected to server localhost:14304 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:57.597442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:57.622608Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:57.636906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:57.767834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:57.945321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:58.035327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.544933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508219185687427:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.545069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:59.758040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.804769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.833411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.861069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.889929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:59.970664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:00.039312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508223480655237:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:00.039415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:00.039633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508223480655242:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:00.043961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:00.056843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508223480655244:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:00.137628Z node 1 :TX_PROXY ERROR: Actor# [1:7489508223480655298:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:00.993191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:17:01.378744Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508206300783932:2235];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:01.378793Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18823, MsgBus: 1153 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001611/r3tmp/tmp1szMET/pdisk_1.dat 2025-04-04T17:17:02.134737Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:02.209031Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:02.234987Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:02.235084Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:02.240295Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18823, node 2 2025-04-04T17:17:02.393996Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:02.394027Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:02.394033Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:02.394134Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1153 TClient is connected to server localhost:1153 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:03.059433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:03.075129Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:03.100397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:03.167385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:17:03.331289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:17:03.390283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:05.180792Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508245118391876:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.180833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.215562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.240067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.270602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.297105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.320700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.351052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.391800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508245118392383:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.391897Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.392477Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508245118392388:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.396368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:05.405526Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508245118392390:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:05.496399Z node 2 :TX_PROXY ERROR: Actor# [2:7489508245118392445:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 18810, MsgBus: 14574 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001611/r3tmp/tmpykeP0L/pdisk_1.dat 2025-04-04T17:17:07.391640Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:07.433063Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18810, node 3 2025-04-04T17:17:07.499342Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:07.499439Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:07.500971Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:07.519047Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:07.519073Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:07.519082Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:07.519217Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14574 TClient is connected to server localhost:14574 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:17:07.971428Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:07.996431Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:08.053528Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:08.217722Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:08.309728Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:10.470369Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508265261574933:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:10.470567Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:10.501502Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:10.546393Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:10.590560Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:10.625975Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:10.663418Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:10.704430Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:10.789048Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508265261575447:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:10.789132Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:10.789352Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508265261575452:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:10.793079Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:10.808704Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508265261575454:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:10.907417Z node 3 :TX_PROXY ERROR: Actor# [3:7489508265261575510:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:11.980641Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... >> KqpParams::ImplicitParameterTypes [GOOD] >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInOneTransaction [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInSeparateTransactions >> KqpExplain::FewEffects-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::MultiJoinCteLinks [GOOD] Test command err: Trying to start YDB, gRPC: 63730, MsgBus: 14963 2025-04-04T17:16:49.713240Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508178886139772:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:49.713643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001631/r3tmp/tmpGDbone/pdisk_1.dat 2025-04-04T17:16:50.171023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:50.171126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:50.173436Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:50.176373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63730, node 1 2025-04-04T17:16:50.328935Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:50.328955Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:50.328962Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:50.329109Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14963 TClient is connected to server localhost:14963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:51.056114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.079525Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:51.100826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.275528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.454329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.575653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.064496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508196066010731:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.064641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.537033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.606314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.643774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.724075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.759167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.829652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.889242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508196066011252:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.889307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.889559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508196066011257:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.893042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:53.902756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508196066011259:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:53.961392Z node 1 :TX_PROXY ERROR: Actor# [1:7489508196066011312:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:54.723612Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508178886139772:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:54.735527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"4","TopSortBy":"row.Data"},{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"1","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"0"}],"Node Type":"TopSort-TableRangeScan"}],"Node Type":"Merge","SortColumns":["Data (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key [150, 266]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"0","Name":"TableRangeScan","Path":"\/Root\/EightShard","E-Rows":"1","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"TopSort","Limit":"4","TopSortBy":"row.Data"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 12502, MsgBus: 20059 2025-04-04T17:16:55.918904Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508201766467488:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:55.918945Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001631/r3tmp/tmpm7FNSp/pdisk_1.dat 2025-04-04T17:16:56.014567Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:56.029408Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:56.029506Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:56.030828Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12502, node 2 2025-04-04T17:16:56.134745Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:56.134768Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:56.134775Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:56.134884Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20059 TClient is connected to server localhost:20059 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttr ... 976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:06.608967Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:17:06.894418Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508229075469600:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:06.894485Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20249, MsgBus: 65139 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001631/r3tmp/tmpmpZ2T0/pdisk_1.dat 2025-04-04T17:17:08.430450Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:08.456882Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:08.484924Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:08.485024Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:08.486740Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20249, node 4 2025-04-04T17:17:08.538747Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:08.538768Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:08.538775Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:08.538887Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65139 TClient is connected to server localhost:65139 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:09.014476Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:09.020807Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:09.025040Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:09.113217Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:09.286611Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:09.373203Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.886543Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508270996572224:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:11.886658Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:11.932571Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:11.976512Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:12.024929Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:12.103689Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:12.145959Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:12.184209Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:12.279595Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508275291540043:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:12.279687Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:12.279865Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508275291540048:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:12.284587Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:12.299986Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489508275291540050:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:12.381050Z node 4 :TX_PROXY ERROR: Actor# [4:7489508275291540106:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"E-Size":"0","PlanNodeId":8,"LookupKeyColumns":["Key"],"Node Type":"TableLookup","Path":"\/Root\/EightShard","Columns":["Data","Key","Text"],"E-Rows":"0","Table":"EightShard","Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"ConstantExpr-Aggregate","CTE Name":"precompute_0_0"}],"PlanNodeType":"Connection","E-Cost":"0"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"InternalOperatorId":3},{"InternalOperatorId":2}],"E-Rows":"0","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"0"},{"Inputs":[],"ToFlow":"precompute_0_0","Name":"ToFlow"},{"Inputs":[{"ExternalPlanNodeId":8}],"E-Rows":"0","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Limit-InnerJoin (MapJoin)-ConstantExpr-Filter","CTE Name":"precompute_0_0"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":10}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":5,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"lookup_by":["Key"],"columns":["Data","Key","Text"],"type":"Lookup"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"E-Rows":"0","Columns":["Data","Key","Text"],"E-Size":"0","E-Cost":"0","Name":"TableLookup","Table":"EightShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"0","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Filter"},{"PlanNodeId":13,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"0","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"0"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> KqpParams::RowsList [GOOD] >> KqpQuery::CurrentUtcTimestamp >> KqpParams::MissingParameter [GOOD] >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck >> DataShardReadIterator::ShouldReadFromHeadWithConflict+UseSink [GOOD] >> DataShardReadIterator::ShouldReadFromHeadWithConflict-UseSink >> DataShardReadIterator::ShouldReadRangeChunk1 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk2 >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable [GOOD] >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault >> KqpQuery::QueryStats-UseSink [GOOD] >> KqpLimits::ManyPartitionsSorting [GOOD] >> KqpLimits::ManyPartitionsSortingLimit >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] >> KqpQuery::PreparedQueryInvalidate >> KqpStats::OneShardLocalExec-UseSink >> KqpStats::MultiTxStatsFullExpYql >> KqpLimits::WaitCAsStateOnAbort ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::FewEffects-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 16772, MsgBus: 21620 2025-04-04T17:16:51.746270Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508183821179294:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:51.749438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00161a/r3tmp/tmp92BZcZ/pdisk_1.dat 2025-04-04T17:16:52.226801Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:52.231261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:52.231360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:52.233495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16772, node 1 2025-04-04T17:16:52.354782Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:52.354806Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:52.354814Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:52.354916Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21620 TClient is connected to server localhost:21620 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:52.858069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:52.876656Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:52.888888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.023314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.171103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.268871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:54.863862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508196706082883:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:54.863988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.076842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.102187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.168577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.195482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.229126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.259266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:55.301358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508201001050693:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.301466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.301503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508201001050698:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:55.304985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:55.315386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508201001050700:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:55.403582Z node 1 :TX_PROXY ERROR: Actor# [1:7489508201001050754:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:56.742624Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508183821179294:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:56.742695Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Intermediate"},{"Inputs":[{"InternalOperatorId":2},{"ExternalPlanNodeId":4}],"E-Rows":"0","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"0"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"0","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"0","E-Cost":"0"},{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"0","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"0"}],"Node Type":"Aggregate-InnerJoin (MapJoin)-Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Final"},{"Inputs":[{"InternalOperatorId":2}],"Name":"Limit","Limit":"1"},{"Inputs":[{"ExternalPlanNodeId":6}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate-Limit-Aggregate"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":11,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/EightShard","E-Rows":"0","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"0","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Filter"},{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"0","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"0"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 29536, MsgBus: 30526 2025-04-04T17:16:57.875895Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;a ... l try to initialize from file: (empty maybe) 2025-04-04T17:17:09.766841Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:09.766953Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17185 TClient is connected to server localhost:17185 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:10.283472Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:10.297959Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:10.314612Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:17:10.389586Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:10.601610Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:10.680540Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:13.023942Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508278444274536:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.024019Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.065233Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.097693Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.129195Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.165502Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.201192Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.238085Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.287562Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508278444275044:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.287658Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.287917Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508278444275049:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.292015Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:13.301110Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489508278444275051:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:13.356705Z node 4 :TX_PROXY ERROR: Actor# [4:7489508278444275104:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:14.508271Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489508261264403681:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:14.508349Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":26,"Plans":[{"Tables":["EightShard"],"PlanNodeId":25,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/EightShard","Name":"Delete","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_5_0","Name":"Iterator"}],"Node Type":"Delete-ConstantExpr","CTE Name":"precompute_5_0"}],"Node Type":"Effect"},{"PlanNodeId":23,"Plans":[{"PlanNodeId":22,"Plans":[{"PlanNodeId":21,"Plans":[{"PlanNodeId":20,"Plans":[{"Tables":["EightShard"],"PlanNodeId":19,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRange":["Key (350, +∞)"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"1","Table":"EightShard","ReadColumns":["Key"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_5_0","Node Type":"Precompute_5","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":17,"Plans":[{"Tables":["EightShard"],"PlanNodeId":16,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_3_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_3_0"}],"Node Type":"Effect"},{"PlanNodeId":14,"Plans":[{"PlanNodeId":13,"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"Tables":["EightShard"],"PlanNodeId":10,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"0","ReadRangesPointPrefixLen":"1","ReadRangesKeys":["Key"],"Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_3_0","Node Type":"Precompute_3","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":8,"Plans":[{"Tables":["EightShard"],"PlanNodeId":7,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"Effect"},{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"0","ReadRangesPointPrefixLen":"0","Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_0_0","Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"},{"columns":["Key"],"scan_by":["Key (350, +∞)"],"type":"Scan"},{"columns":["Data","Key"],"scan_by":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"type":"Scan"}],"writes":[{"columns":["Data","Key"],"type":"MultiUpsert"},{"columns":["Data","Key"],"type":"MultiUpsert"},{"type":"MultiErase"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Path":"\/Root\/EightShard","Name":"Delete","Table":"EightShard"}],"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRange":["Key (350, +∞)"],"Name":"TableRangeScan","Path":"\/Root\/EightShard","E-Rows":"1","Table":"EightShard","ReadColumns":["Key"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"Delete"}],"Node Type":"Effect"},{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Operators":[{"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"}],"Plans":[{"PlanNodeId":16,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"Name":"TableRangeScan","Path":"\/Root\/EightShard","ReadRangesPointPrefixLen":"1","E-Rows":"0","ReadRangesKeys":["Key"],"Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"},{"PlanNodeId":17,"Plans":[{"PlanNodeId":18,"Operators":[{"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"}],"Plans":[{"PlanNodeId":24,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/EightShard","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} >> KqpExplain::SortStage [GOOD] >> KqpExplain::SqlIn >> KqpStats::JoinNoStatsScan [GOOD] >> KqpStats::DeferredEffects+UseSink >> KqpExplain::Explain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryStats-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5097, MsgBus: 64148 2025-04-04T17:16:59.382193Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508218408817987:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:59.382359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001609/r3tmp/tmpXigr5s/pdisk_1.dat 2025-04-04T17:16:59.758612Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5097, node 1 2025-04-04T17:16:59.817698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:59.822721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:59.828751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:59.854186Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:59.854208Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:59.854214Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:59.854379Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64148 TClient is connected to server localhost:64148 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:00.382662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:00.439212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:00.581349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:00.752302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:00.826059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.458647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508231293721507:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:02.458750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:02.722616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.757508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.791932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.835182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.863127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.933571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:03.020905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508235588689324:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:03.020980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:03.021610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508235588689329:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:03.024777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:03.042693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508235588689331:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:03.132979Z node 1 :TX_PROXY ERROR: Actor# [1:7489508235588689387:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:04.072245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.376627Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508218408817987:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:04.376705Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14577, MsgBus: 1122 2025-04-04T17:17:05.606843Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508246999768015:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:05.606956Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001609/r3tmp/tmpkpUSPp/pdisk_1.dat 2025-04-04T17:17:05.693503Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14577, node 2 2025-04-04T17:17:05.742157Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:05.746452Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:05.763012Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:05.778768Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:05.778788Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:05.778794Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:05.778895Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1122 TClient is connected to server localhost:1122 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:06.187312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:06.194572Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:06.199924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:06.271242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:06.441712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: ... default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:08.643621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:08.721266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:08.758064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:08.788983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:08.818726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:08.849952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:08.901778Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508259884672175:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:08.901875Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:08.902022Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508259884672180:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:08.905483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:08.920568Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508259884672182:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:09.004032Z node 2 :TX_PROXY ERROR: Actor# [2:7489508264179639532:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } query_phases { duration_us: 9810 table_access { name: "/Root/EightShard" updates { rows: 3 bytes: 47 } partitions_count: 1 } table_access { name: "/Root/TwoShard" reads { rows: 3 bytes: 35 } partitions_count: 1 } cpu_time_us: 3630 affected_shards: 2 } compilation { duration_us: 185528 cpu_time_us: 182383 } process_cpu_time_us: 466 total_duration_us: 199468 total_cpu_time_us: 186479 Trying to start YDB, gRPC: 32452, MsgBus: 62070 2025-04-04T17:17:10.932681Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508265670102377:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:10.933870Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001609/r3tmp/tmpnAVxMZ/pdisk_1.dat 2025-04-04T17:17:11.067038Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:11.082188Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:11.082374Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:11.084396Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32452, node 3 2025-04-04T17:17:11.140325Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:11.140344Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:11.140353Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:11.140458Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62070 TClient is connected to server localhost:62070 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:11.633780Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.674472Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:11.680783Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.732846Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.911520Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.978553Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:14.107704Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508282849973303:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:14.107771Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:14.144870Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.179395Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.208748Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.239895Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.270093Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.343842Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.387557Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508282849973818:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:14.387631Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:14.387643Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508282849973823:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:14.390856Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:14.403688Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508282849973825:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:14.473069Z node 3 :TX_PROXY ERROR: Actor# [3:7489508282849973878:3439] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } query_phases { duration_us: 4471 table_access { name: "/Root/TwoShard" reads { rows: 3 bytes: 35 } partitions_count: 1 } cpu_time_us: 3545 affected_shards: 1 } query_phases { duration_us: 6268 table_access { name: "/Root/EightShard" updates { rows: 3 bytes: 47 } partitions_count: 1 } cpu_time_us: 1951 affected_shards: 2 } compilation { duration_us: 178749 cpu_time_us: 175865 } process_cpu_time_us: 683 total_duration_us: 194041 total_cpu_time_us: 182044 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] Test command err: Trying to start YDB, gRPC: 1896, MsgBus: 7333 2025-04-04T17:17:00.834845Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508224407347405:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:00.835646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001604/r3tmp/tmpKePYiy/pdisk_1.dat 2025-04-04T17:17:01.232081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:01.232339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:01.236361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1896, node 1 2025-04-04T17:17:01.273623Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:17:01.282478Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:17:01.335955Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:01.398705Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:01.398726Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:01.398733Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:01.398818Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7333 TClient is connected to server localhost:7333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:02.079876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.114438Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:04.020634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508241587217237:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.020785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508237292249918:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.020883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.025626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:17:04.033562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508241587217250:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:17:04.092140Z node 1 :TX_PROXY ERROR: Actor# [1:7489508241587217301:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:04.294003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.414879Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037893;self_id=[1:7489508241587217468:2345];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:17:04.418893Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037895;self_id=[1:7489508241587217469:2346];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:17:04.445352Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037895;self_id=[1:7489508241587217469:2346];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:17:04.445392Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037893;self_id=[1:7489508241587217468:2345];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:17:04.445519Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037895 2025-04-04T17:17:04.445754Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037893 2025-04-04T17:17:04.454166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489508241587217468:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:17:04.454544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489508241587217468:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:17:04.454803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489508241587217468:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:17:04.454941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489508241587217468:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:17:04.455043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489508241587217468:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:17:04.455152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489508241587217468:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:17:04.455267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489508241587217468:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:17:04.455396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489508241587217468:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:17:04.455572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489508241587217468:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:17:04.455674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489508241587217468:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:17:04.455713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489508241587217469:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:17:04.455739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489508241587217469:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:17:04.455780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489508241587217468:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:17:04.455870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489508241587217469:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:17:04.455898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489508241587217468:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:17:04.455958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489508241587217469:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:17:04.456073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489508241587217469:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:17:04.456166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489508241587217469:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:17:04.456258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489508241587217469:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:17:04.456374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489508241587217469:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:17:04.456473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489508241587217469:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:17:04.456582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751 ... nt=skip_flush_writing; 2025-04-04T17:17:07.218323Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037905;parent=[1:7489508245882185708:2618];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-04T17:17:07.219989Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037904;parent=[1:7489508245882185717:2619];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-04T17:17:07.225016Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037903;parent=[1:7489508245882185718:2620];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-04T17:17:07.225081Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037902;parent=[1:7489508245882185726:2621];fline=actor.cpp:33;event=skip_flush_writing; Trying to start YDB, gRPC: 26925, MsgBus: 15107 2025-04-04T17:17:07.874442Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508255097382936:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:07.874485Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001604/r3tmp/tmpqh7mRB/pdisk_1.dat 2025-04-04T17:17:08.045711Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:08.059171Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:08.059284Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:08.062315Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26925, node 2 2025-04-04T17:17:08.137644Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:08.137663Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:08.137668Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:08.137799Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15107 TClient is connected to server localhost:15107 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:08.571539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:08.593496Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:08.611737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:17:08.640573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:17:10.972983Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508267982285476:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:10.973185Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:10.976425Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508267982285513:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:10.980042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-04T17:17:10.988554Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508267982285515:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-04T17:17:11.090792Z node 2 :TX_PROXY ERROR: Actor# [2:7489508272277252862:2346] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:11.110779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:11.376535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:1, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8836, MsgBus: 2548 2025-04-04T17:17:12.396486Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508276813362283:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:12.419549Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001604/r3tmp/tmpT4NstE/pdisk_1.dat 2025-04-04T17:17:12.522191Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8836, node 3 2025-04-04T17:17:12.562338Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:12.562435Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:12.564240Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:12.594491Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:12.594516Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:12.594523Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:12.594651Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2548 TClient is connected to server localhost:2548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:13.023541Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:15.455274Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508289698264648:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:15.455353Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508289698264662:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:15.455405Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:15.459399Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:17:15.468922Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508289698264680:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:17:15.547616Z node 3 :TX_PROXY ERROR: Actor# [3:7489508289698264736:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:15.568478Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:17:15.784398Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489508289698264871:2356], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:5:49: Error: Creating table with data is not supported. 2025-04-04T17:17:15.784617Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=M2YwNmY3YTAtYWU4OWRiYjYtZTdmMTA1NDQtYjYxODllOTA=, ActorId: [3:7489508289698264869:2355], ActorState: ExecuteState, TraceId: 01jr0wx32wayv0jptqgxfy0kby, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpQuery::ReadOverloaded+StreamLookup [GOOD] >> KqpQuery::ReadOverloaded-StreamLookup >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink [GOOD] >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] >> DataShardReadIterator::TryWriteManyRows+Commit [GOOD] >> DataShardReadIterator::TryWriteManyRows-Commit >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite >> KqpStats::JoinStatsBasicScan [GOOD] >> KqpExplain::SelfJoin3xSameLabels >> KqpLimits::KqpMkqlMemoryLimitException >> TPersQueueTest::TestWriteStat [GOOD] >> TPersQueueTest::TestWriteSessionsConflicts >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 16882, MsgBus: 4327 2025-04-04T17:17:01.227538Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508226979524324:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:01.227579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001603/r3tmp/tmpNnuDs1/pdisk_1.dat 2025-04-04T17:17:01.681404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:01.681536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:01.687842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:01.692843Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16882, node 1 2025-04-04T17:17:01.776671Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:01.776693Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:01.776705Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:01.776813Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4327 TClient is connected to server localhost:4327 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:02.436479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.458933Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:02.473015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.632743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.797100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.871544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:04.377137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508239864427913:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.377243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.587364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.615569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.646680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.674887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.700356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.727835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.821185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508239864428428:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.821304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.821392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508239864428433:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.824684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:04.832572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508239864428435:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:04.934332Z node 1 :TX_PROXY ERROR: Actor# [1:7489508239864428490:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:06.227886Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508226979524324:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:06.227971Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21128, MsgBus: 26599 2025-04-04T17:17:06.888227Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508251874700313:2156];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001603/r3tmp/tmp96NKbn/pdisk_1.dat 2025-04-04T17:17:06.908981Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:17:06.976531Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21128, node 2 2025-04-04T17:17:07.006961Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:07.010623Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:07.030743Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:07.064818Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:07.064838Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:07.064844Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:07.064944Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26599 TClient is connected to server localhost:26599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:07.445775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:07.454981Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:09.786354Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508264759602738:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:09.786437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:09.806009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:17:09.947350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:17:10.116632Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508269054571437:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:10.116684Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:10.116799Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508269054571442:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:10.119359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-04T17:17:10.125611Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508269054571444:2444], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-04T17:17:10.213330Z node 2 :TX_PROXY ERROR: Actor# [2:7489508269054571495:3216] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:11.885168Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508251874700313:2156];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:11.885247Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20417, MsgBus: 19796 2025-04-04T17:17:12.909509Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508276495611824:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:12.916876Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001603/r3tmp/tmpYaaRke/pdisk_1.dat 2025-04-04T17:17:13.008551Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20417, node 3 2025-04-04T17:17:13.039439Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:13.039534Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:13.057804Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:13.084360Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:13.084404Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:13.084412Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:13.084519Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19796 TClient is connected to server localhost:19796 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:13.475021Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:13.481604Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:15.829841Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508289380514287:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:15.829931Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:15.857113Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:17:16.016918Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:17:16.180539Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508293675482931:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:16.180624Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:16.180685Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508293675482936:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:16.184013Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-04T17:17:16.193428Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508293675482938:2444], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-04T17:17:16.255348Z node 3 :TX_PROXY ERROR: Actor# [3:7489508293675482989:3210] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:17.916780Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508276495611824:2140];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:17.928923Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] Test command err: Trying to start YDB, gRPC: 9879, MsgBus: 9179 2025-04-04T17:17:00.745605Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508226168128875:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:00.745655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001606/r3tmp/tmpCEsuUF/pdisk_1.dat 2025-04-04T17:17:01.135181Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9879, node 1 2025-04-04T17:17:01.174390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:01.180834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:01.190065Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:17:01.194820Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:17:01.215920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:01.315945Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:01.315972Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:01.315979Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:01.316099Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9179 TClient is connected to server localhost:9179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:01.935580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:01.949737Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:01.954704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.128477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.320732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.404122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:04.091034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508243347999822:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.091361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.362570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.388877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.420547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.452494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.483567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.514577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.575591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508243348000333:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.575693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.575954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508243348000338:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.581883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:04.591705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508243348000340:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:04.664433Z node 1 :TX_PROXY ERROR: Actor# [1:7489508243348000394:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:05.748686Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508226168128875:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:05.748752Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14098, MsgBus: 8287 2025-04-04T17:17:06.533505Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508252080341386:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:06.533545Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001606/r3tmp/tmpcAv4G8/pdisk_1.dat 2025-04-04T17:17:06.675288Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:06.690078Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:06.690150Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:06.692890Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14098, node 2 2025-04-04T17:17:06.741639Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:06.741660Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:06.741667Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:06.741759Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8287 TClient is connected to server localhost:8287 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:17:07.166502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:07.183782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:17:07.259069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:07.416977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 w ... apshotFromChunks;id=13; 2025-04-04T17:17:16.350672Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:17:16.350860Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:17:16.350895Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:17:16.351014Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:17:16.351038Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:17:16.352064Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:17:16.352107Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:17:16.352189Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:17:16.352216Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:17:16.352378Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:17:16.352409Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:17:16.352512Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:17:16.352550Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:17:16.352626Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:17:16.352650Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:17:16.352688Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:17:16.352711Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:17:16.353203Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:17:16.353234Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:17:16.353425Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:17:16.353469Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:17:16.353594Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:17:16.353620Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:17:16.353822Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:17:16.353850Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:17:16.353943Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:17:16.353962Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:17:16.357448Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:17:16.357503Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:17:16.357611Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:17:16.357673Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:17:16.357846Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:17:16.357872Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:17:16.357991Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:17:16.358041Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:17:16.358128Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:17:16.358157Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:17:16.358209Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:17:16.358259Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:17:16.358830Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:17:16.358888Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:17:16.359121Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:17:16.359154Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:17:16.359313Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:17:16.359340Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:17:16.359546Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:17:16.359571Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:17:16.359679Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:17:16.359702Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:17:16.376698Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715666;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715666; 2025-04-04T17:17:16.377683Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715666;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715666; 2025-04-04T17:17:16.383896Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715666;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715666; 2025-04-04T17:17:16.384111Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715666;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715666; 2025-04-04T17:17:16.754349Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508272254598006:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:16.754430Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicScan [GOOD] Test command err: Trying to start YDB, gRPC: 3716, MsgBus: 15550 2025-04-04T17:16:58.542769Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508213993220549:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:58.542839Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00160a/r3tmp/tmpLZXCEd/pdisk_1.dat 2025-04-04T17:16:58.935043Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:58.975119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:58.975200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:58.979768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3716, node 1 2025-04-04T17:16:59.026806Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:59.026835Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:59.026843Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:59.034798Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15550 TClient is connected to server localhost:15550 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:59.665275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:59.686304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:59.815101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:59.972373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:00.035796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:01.683734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508226878123984:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:01.683860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:01.934975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.033360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.078857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.117294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.156324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.240710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.306535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508231173091800:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:02.306640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:02.306992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508231173091805:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:02.310536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:02.325378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508231173091807:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:02.386378Z node 1 :TX_PROXY ERROR: Actor# [1:7489508231173091860:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:03.541347Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508213993220549:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:03.541406Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8621, MsgBus: 1913 2025-04-04T17:17:04.750899Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508241825630141:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:04.750950Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00160a/r3tmp/tmpFTZ3b2/pdisk_1.dat 2025-04-04T17:17:04.823402Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8621, node 2 2025-04-04T17:17:04.877331Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:04.877430Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:04.878854Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:04.905871Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:04.905891Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:04.905897Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:04.906000Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1913 TClient is connected to server localhost:1913 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:05.286643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:05.302147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:05.376341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:05.536511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:05.613342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:07.684951Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoo ... Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:07.723589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:07.760856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:07.815152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:07.881985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:07.911598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:07.961602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:08.016620Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508259005501606:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:08.016712Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:08.018018Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508259005501611:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:08.022889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:08.039466Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508259005501613:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:08.105950Z node 2 :TX_PROXY ERROR: Actor# [2:7489508259005501667:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:09.750368Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508241825630141:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:09.750437Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16730, MsgBus: 15167 2025-04-04T17:17:10.840164Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508268559032732:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:10.840297Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00160a/r3tmp/tmp9rSVp6/pdisk_1.dat 2025-04-04T17:17:11.022469Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16730, node 3 2025-04-04T17:17:11.049355Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:11.049446Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:11.051130Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:11.077020Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:11.077035Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:11.077040Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:11.077129Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15167 TClient is connected to server localhost:15167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:11.588273Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.595977Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:11.614007Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.695949Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.859769Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.947693Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:14.083612Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508285738903617:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:14.083709Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:14.116124Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.146852Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.178288Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.210003Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.289616Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.361934Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.409616Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508285738904134:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:14.409693Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508285738904139:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:14.409705Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:14.412346Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:14.420164Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508285738904141:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:14.484233Z node 3 :TX_PROXY ERROR: Actor# [3:7489508285738904194:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:15.840126Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508268559032732:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:15.840202Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:17:18.421270Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787036166, txId: 281474976715671] shutting down >> KqpQuery::QueryClientTimeoutPrecompiled [GOOD] >> KqpQuery::QueryExplain >> KqpExplain::FullOuterJoin [GOOD] >> KqpExplain::MergeConnection >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck >> KqpParams::Decimal-QueryService+UseSink [GOOD] >> KqpParams::Decimal+QueryService+UseSink >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot >> KqpQuery::CurrentUtcTimestamp [GOOD] >> KqpQuery::CreateAsSelect_BadCases >> KqpQuery::YqlSyntaxV0 >> KqpLimits::ManyPartitionsSortingLimit [GOOD] >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::ParameterTypes >> DataShardReadIterator::ShouldReadRangePrefix3 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix4 >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc+UseSink >> KqpQuery::PreparedQueryInvalidate [GOOD] >> KqpQuery::QueryCache >> KqpStats::OneShardLocalExec-UseSink [GOOD] >> KqpStats::OneShardNonLocalExec+UseSink >> KqpLimits::QueryReplySize >> KqpStats::MultiTxStatsFullExpYql [GOOD] >> KqpStats::MultiTxStatsFullExpScan >> KqpExplain::SqlIn [GOOD] >> KqpExplain::SsaProgramInJsonPlan >> TPersQueueTest::SetMeteringMode [GOOD] >> TPersQueueTest::TClusterTrackerTest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ManyPartitionsSortingLimit [GOOD] Test command err: Trying to start YDB, gRPC: 21399, MsgBus: 30036 2025-04-04T17:17:01.195585Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508230017896801:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:01.203131Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001601/r3tmp/tmpvQRB38/pdisk_1.dat 2025-04-04T17:17:01.813004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:01.813115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:01.822808Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:01.827093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21399, node 1 2025-04-04T17:17:02.028977Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:02.028992Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:02.028999Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:02.029089Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30036 TClient is connected to server localhost:30036 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:02.716286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.731309Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:02.753007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.979960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:03.155321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:03.232600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:04.774979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508242902800441:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.775080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.991287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.053921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.083215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.113352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.140526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.215238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.274908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508247197768255:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.274977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.275284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508247197768260:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.278841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:05.288656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508247197768262:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:05.361179Z node 1 :TX_PROXY ERROR: Actor# [1:7489508247197768315:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:06.195543Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508230017896801:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:06.195586Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:17:06.228621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:06.447560Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDVkMDU1ODUtYzMzYzQ3YjYtMjIxN2U3YjItZGVlZmFjMTk=, ActorId: [1:7489508251492736195:2514], ActorState: ExecuteState, TraceId: 01jr0wwsxz8cqh5y03ckrhpga3, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:999: Memory limit exception at ExecuteState, current limit is 1024 bytes.
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:999: Memory limit exception at ExecuteState, current limit is 1024 bytes. Trying to start YDB, gRPC: 16820, MsgBus: 21170 2025-04-04T17:17:07.231482Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508256323684658:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:07.231541Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001601/r3tmp/tmp1aYBWF/pdisk_1.dat 2025-04-04T17:17:07.361816Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:07.367218Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:07.367273Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:07.371266Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16820, node 2 2025-04-04T17:17:07.480010Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:07.480029Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:07.480033Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:07.480121Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21170 TClient is connected to server localhost:21170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:07.862854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:07.889819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation par ... 3,\"Min\":23},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":72,\"Max\":72,\"Min\":72},\"ActiveMessageMs\":{\"Count\":1,\"Max\":72,\"Min\":5},\"FirstMessageMs\":{\"Count\":1,\"Sum\":5,\"Max\":5,\"Min\":5},\"Bytes\":{\"Count\":1,\"Sum\":7707,\"Max\":7707,\"Min\":7707,\"History\":[3,0,25,1006,46,1421,66,1587,74,7707]},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":67000,\"Max\":67000,\"Min\":67000}},\"Name\":\"RESULT\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":72,\"Max\":72,\"Min\":72},\"Chunks\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":69,\"Max\":69,\"Min\":69},\"FirstMessageMs\":{\"Count\":1,\"Sum\":5,\"Max\":5,\"Min\":5},\"ActiveMessageMs\":{\"Count\":1,\"Max\":72,\"Min\":5},\"PauseMessageMs\":{\"Count\":1,\"Sum\":4,\"Max\":4,\"Min\":4},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":67000,\"Max\":67000,\"Min\":67000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":62647,\"Max\":62647,\"Min\":62647,\"History\":[3,0,25,19863,46,39818,66,60101,74,62647]},\"WaitPeriods\":{\"Count\":1,\"Sum\":22,\"Max\":22,\"Min\":22},\"WaitMessageMs\":{\"Count\":1,\"Max\":69,\"Min\":4}}}],\"DurationUs\":{\"Count\":1,\"Sum\":68000,\"Max\":68000,\"Min\":68000},\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[3,1048576,74,1048576]},\"InputBytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168},\"ResultRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"Tasks\":1,\"ResultBytes\":{\"Count\":1,\"Sum\":7707,\"Max\":7707,\"Min\":7707},\"OutputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"InputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"PhysicalStageId\":1,\"StageDurationUs\":68000,\"BaseTimeMs\":1743787035449,\"OutputBytes\":{\"Count\":1,\"Sum\":7707,\"Max\":7707,\"Min\":7707},\"CpuTimeUs\":{\"Count\":1,\"Sum\":8688,\"Max\":8688,\"Min\":8688,\"History\":[3,386,25,2694,46,3604,66,4047,74,8688]},\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":29,\"Max\":29,\"Min\":29},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":72,\"Max\":72,\"Min\":72},\"ActiveMessageMs\":{\"Count\":1,\"Max\":72,\"Min\":4},\"FirstMessageMs\":{\"Count\":1,\"Sum\":4,\"Max\":4,\"Min\":4},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168,\"History\":[3,0,25,1267,46,1682,66,1848,74,8168]},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":68000,\"Max\":68000,\"Min\":68000}},\"Name\":\"2\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":72,\"Max\":72,\"Min\":72},\"Chunks\":{\"Count\":1,\"Sum\":100,\"Max\":100,\"Min\":100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":69,\"Max\":69,\"Min\":69},\"FirstMessageMs\":{\"Count\":1,\"Sum\":4,\"Max\":4,\"Min\":4},\"ActiveMessageMs\":{\"Count\":1,\"Max\":72,\"Min\":4},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168,\"History\":[3,0,25,1267,46,1682,66,1848,74,8168]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":68000,\"Max\":68000,\"Min\":68000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":15637,\"Max\":15637,\"Min\":15637,\"History\":[3,0,25,4917,46,9916,66,14984,74,15637]},\"WaitPeriods\":{\"Count\":1,\"Sum\":23,\"Max\":23,\"Min\":23},\"WaitMessageMs\":{\"Count\":1,\"Max\":69,\"Min\":3}}}]}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":97406,\"CpuTimeUs\":94528},\"ProcessCpuTimeUs\":255,\"TotalDurationUs\":295294,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":112262},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/ManyShardsTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Data\",\"Key\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/ManyShardsTable\" \'\"72057594046644480:2\" \'\"\" \'1))\n(let $2 (KqpRowsSourceSettings $1 \'(\'\"Data\" \'\"Key\") \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $3 (StructType \'(\'\"Data\" (OptionalType (DataType \'Int32))) \'(\'\"Key\" (OptionalType (DataType \'Uint32)))))\n(let $4 \'(\'(\'\"_logical_id\" \'367) \'(\'\"_id\" \'\"ed1fbe2-af18a79f-47c8e71-701ef9c4\") \'(\'\"_wide_channels\" $3)))\n(let $5 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $2)) (lambda \'($9) (block \'(\n (let $10 (lambda \'($11) (Member $11 \'\"Data\") (Member $11 \'\"Key\")))\n (return (FromFlow (ExpandMap (ToFlow $9) $10)))\n))) $4))\n(let $6 (DqCnMerge (TDqOutput $5 \'\"0\") \'(\'(\'1 \'\"Asc\"))))\n(let $7 (DqPhyStage \'($6) (lambda \'($12) (FromFlow (NarrowMap (ToFlow $12) (lambda \'($13 $14) (AsStruct \'(\'\"Data\" $13) \'(\'\"Key\" $14)))))) \'(\'(\'\"_logical_id\" \'379) \'(\'\"_id\" \'\"4ef35f72-7a40b507-9bbcff62-eb561492\"))))\n(let $8 (DqCnResult (TDqOutput $7 \'\"0\") \'(\'\"Key\" \'\"Data\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($5 $7) \'($8) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType $3) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 295294 total_cpu_time_us: 184348 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/ManyShardsTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":2},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Data\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint32\\\",\\\"TypeId\\\":2,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1743787035\",\"query_type\":\"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"23c31542-71b6c03e-6dea655b-390d5e55\",\"version\":\"1.0\"}" Trying to start YDB, gRPC: 16820, MsgBus: 21172 2025-04-04T17:17:16.658205Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489508294978390469:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:16.689845Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001601/r3tmp/tmpiYBCYt/pdisk_1.dat 2025-04-04T17:17:16.780966Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:16.808285Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:16.808360Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:16.809368Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16820, node 4 2025-04-04T17:17:16.866590Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:16.866622Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:16.866632Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:16.866789Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21172 TClient is connected to server localhost:21172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:17.430653Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:17.449790Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:20.204769Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508312158264793:2635], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:20.204844Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:20.205069Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508312158264808:2638], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:20.209897Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:17:20.223282Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489508312158264810:2639], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:17:20.320591Z node 4 :TX_PROXY ERROR: Actor# [4:7489508312158264880:5612] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInSeparateTransactions [GOOD] >> DataShardReadIterator::HandlePersistentSnapshotGoneInContinue [GOOD] >> DataShardReadIterator::HandleMvccGoneInContinue [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk2 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk3 >> KqpStats::DeferredEffects+UseSink [GOOD] >> KqpStats::DeferredEffects-UseSink >> KqpExplain::Explain [GOOD] >> KqpExplain::CompoundKeyRange >> DataShardReadIterator::ShouldReadFromHeadWithConflict-UseSink [GOOD] >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict+UseSink >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault [GOOD] >> DataShardReadIteratorState::ShouldCalculateQuota [GOOD] >> Viewer::SharedDoesntShowExclusiveNodes [GOOD] >> Viewer::SimpleFeatureFlags ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::HandleMvccGoneInContinue [GOOD] Test command err: 2025-04-04T17:16:06.384637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:16:06.385062Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:16:06.385110Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a67/r3tmp/tmpZDZ3we/pdisk_1.dat 2025-04-04T17:16:06.749307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:16:06.786399Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:06.823841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:06.823965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:06.836924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:06.921350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:16:06.958267Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:16:06.959334Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:16:06.959813Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:16:06.960076Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:06.969552Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:16:06.996853Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:06.996959Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:16:06.998475Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:16:06.998541Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:16:06.998600Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:16:06.998907Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:16:06.999016Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:16:06.999099Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:16:07.009814Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:16:07.031870Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:16:07.032067Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:16:07.032167Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:16:07.032200Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:16:07.032231Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:16:07.032280Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:16:07.032480Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.032527Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.032826Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:16:07.032911Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:16:07.033137Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:16:07.033173Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:16:07.033215Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:16:07.033246Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:16:07.033281Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:16:07.033310Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:16:07.033363Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:16:07.033460Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.033506Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.033563Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:16:07.033919Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:16:07.033967Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:16:07.034052Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:16:07.034302Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:16:07.034352Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:16:07.034432Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:16:07.034488Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:16:07.034529Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:16:07.034566Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:16:07.034608Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:16:07.034875Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:16:07.034913Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:16:07.034945Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:16:07.034975Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:16:07.035018Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:16:07.035058Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:16:07.035107Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:16:07.035151Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:16:07.035183Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:16:07.036528Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:16:07.036573Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:16:07.047212Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:16:07.047289Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:16:07.047339Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:16:07.047399Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:16:07.047452Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:16:07.188241Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.188342Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.188432Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:16:07.188733Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:16:07.188764Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:16:07.188843Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:16:07.188879Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:16:07.188922Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:16:07.188951Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:16:07.191824Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:16:07.191866Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:16:07.192343Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.192369Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.192395Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:16:0 ... 474976715665] at 72075186224037889 executing on unit WaitForPlan 2025-04-04T17:17:22.555818Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit PlanQueue 2025-04-04T17:17:22.555964Z node 13 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715665 at step 3500 at tablet 72075186224037889 { Transactions { TxId: 281474976715665 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-04T17:17:22.555994Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:17:22.556183Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [13:878:2711], Recipient [13:878:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:22.556216Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:22.556253Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T17:17:22.556284Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:22.556311Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-04T17:17:22.556341Z node 13 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715665] in PlanQueue unit at 72075186224037889 2025-04-04T17:17:22.556370Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit PlanQueue 2025-04-04T17:17:22.556401Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-04-04T17:17:22.556429Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit PlanQueue 2025-04-04T17:17:22.556455Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit LoadTxDetails 2025-04-04T17:17:22.556481Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit LoadTxDetails 2025-04-04T17:17:22.556586Z node 13 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715665 keys extracted: 0 2025-04-04T17:17:22.556619Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-04-04T17:17:22.556643Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit LoadTxDetails 2025-04-04T17:17:22.556666Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:22.556688Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-04T17:17:22.556726Z node 13 :TX_DATASHARD TRACE: Operation [3500:281474976715665] is the new logically complete end at 72075186224037889 2025-04-04T17:17:22.556755Z node 13 :TX_DATASHARD TRACE: Operation [3500:281474976715665] is the new logically incomplete end at 72075186224037889 2025-04-04T17:17:22.556789Z node 13 :TX_DATASHARD TRACE: Activated operation [3500:281474976715665] at 72075186224037889 2025-04-04T17:17:22.556827Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-04-04T17:17:22.556850Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:22.556873Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-04-04T17:17:22.556896Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CreateVolatileSnapshot 2025-04-04T17:17:22.556975Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is ExecutedNoMoreRestarts 2025-04-04T17:17:22.557000Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-04-04T17:17:22.557033Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-04-04T17:17:22.557065Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit DropVolatileSnapshot 2025-04-04T17:17:22.557089Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-04-04T17:17:22.557113Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-04-04T17:17:22.557137Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CompleteOperation 2025-04-04T17:17:22.557162Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CompleteOperation 2025-04-04T17:17:22.557277Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is DelayComplete 2025-04-04T17:17:22.557307Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CompleteOperation 2025-04-04T17:17:22.557338Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CompletedOperations 2025-04-04T17:17:22.557373Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CompletedOperations 2025-04-04T17:17:22.557403Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-04-04T17:17:22.557428Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CompletedOperations 2025-04-04T17:17:22.557451Z node 13 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715665] at 72075186224037889 has finished 2025-04-04T17:17:22.557485Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:22.557521Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-04T17:17:22.557555Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-04T17:17:22.557588Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-04T17:17:22.570046Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-04-04T17:17:22.570168Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:17:22.570259Z node 13 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715665] at 72075186224037888 on unit CompleteOperation 2025-04-04T17:17:22.570358Z node 13 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037888 at tablet 72075186224037888 send result to client [13:1072:2868], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:17:22.570441Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:22.570763Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-04-04T17:17:22.570825Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T17:17:22.570854Z node 13 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715665] at 72075186224037889 on unit CompleteOperation 2025-04-04T17:17:22.570895Z node 13 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [13:1072:2868], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:17:22.570931Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:17:22.572441Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:593:2518], Recipient [13:665:2570]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715665 } ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-04T17:17:22.572575Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T17:17:22.572662Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2025-04-04T17:17:22.572784Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-04T17:17:22.572844Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2025-04-04T17:17:22.572902Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:22.572950Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:17:22.572995Z node 13 :TX_DATASHARD TRACE: Activated operation [0:8] at 72075186224037888 2025-04-04T17:17:22.573040Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-04T17:17:22.573066Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:22.573088Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T17:17:22.573110Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:17:22.573244Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715665 } ResultFormat: FORMAT_ARROW } 2025-04-04T17:17:22.573593Z node 13 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715665 2025-04-04T17:17:22.573661Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[13:593:2518], 1} after executionsCount# 1 2025-04-04T17:17:22.573728Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:593:2518], 1} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:22.573947Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:593:2518], 1} finished in read 2025-04-04T17:17:22.574020Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-04T17:17:22.574047Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:17:22.574071Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:17:22.574099Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:17:22.574144Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-04T17:17:22.574165Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:22.574195Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2025-04-04T17:17:22.574269Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T17:17:22.574420Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink [GOOD] >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> KqpExplain::SelfJoin3xSameLabels [GOOD] >> KqpExplain::PureExpr >> TTxAllocatorClientTest::InitiatingRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorState::ShouldCalculateQuota [GOOD] Test command err: 2025-04-04T17:16:06.597441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:16:06.597887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:16:06.597938Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a69/r3tmp/tmpC4bGnF/pdisk_1.dat 2025-04-04T17:16:06.964039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:16:07.002019Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:07.039541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:07.039645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:07.050696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:07.131085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:16:07.166496Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:16:07.167420Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:16:07.167850Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:16:07.168091Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:07.177829Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:16:07.212182Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:07.212304Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:16:07.213947Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:16:07.214021Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:16:07.214083Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:16:07.214459Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:16:07.214583Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:16:07.214670Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:16:07.225331Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:16:07.247477Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:16:07.247698Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:16:07.247847Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:16:07.247886Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:16:07.247918Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:16:07.247968Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:16:07.248178Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.248219Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.248518Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:16:07.248610Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:16:07.248731Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:16:07.248782Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:16:07.248834Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:16:07.248867Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:16:07.248900Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:16:07.248943Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:16:07.248984Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:16:07.249080Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.249111Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.249166Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:16:07.249543Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:16:07.249590Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:16:07.249683Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:16:07.249936Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:16:07.249984Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:16:07.250079Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:16:07.250124Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:16:07.250192Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:16:07.250229Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:16:07.250282Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:16:07.250544Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:16:07.250580Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:16:07.250629Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:16:07.250657Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:16:07.250700Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:16:07.250754Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:16:07.250791Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:16:07.250822Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:16:07.250847Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:16:07.252276Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:16:07.252330Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:16:07.262998Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:16:07.263077Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:16:07.263112Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:16:07.263175Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:16:07.263232Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:16:07.401495Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.401543Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.401618Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:16:07.401931Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:16:07.401968Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:16:07.402087Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:16:07.402131Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:16:07.402187Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:16:07.402220Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:16:07.411437Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:16:07.411506Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:16:07.412307Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.412358Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.412410Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:16:0 ... : TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:17:23.575795Z node 13 :TX_DATASHARD TRACE: TxId: 281474976715665, shard 72075186224037888, task: 1, meta: Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\002\000\004\000\000\000\002\000\000\000\004\000\000\000\000\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 1 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "index" Type: 1 } MaxValueSizeBytes: 4 } Columns { Column { Id: 3 Name: "value" Type: 1 } MaxValueSizeBytes: 4 } } 2025-04-04T17:17:23.575915Z node 13 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, task: 1, write point (Int32 : 2, Int32 : 0) 2025-04-04T17:17:23.576018Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 2, Int32 : 0) table: [72057594046644480:2:1] 2025-04-04T17:17:23.576216Z node 13 :TX_DATASHARD TRACE: -- AddReadRange: (Uint64 : 281474976715663, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-04T17:17:23.576340Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715663, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-04T17:17:23.576742Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2025-04-04T17:17:23.576852Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-04T17:17:23.576926Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2025-04-04T17:17:23.576986Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:23.577040Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:17:23.577103Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3500/18446744073709551615 ImmediateWriteEdge# v3501/0 ImmediateWriteEdgeReplied# v3501/0 2025-04-04T17:17:23.577213Z node 13 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2025-04-04T17:17:23.577277Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-04T17:17:23.577308Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:23.577333Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-04T17:17:23.577360Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-04T17:17:23.577422Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3500/18446744073709551615 ImmediateWriteEdge# v3501/0 ImmediateWriteEdgeReplied# v3501/0 2025-04-04T17:17:23.577497Z node 13 :TX_DATASHARD TRACE: TSysLocks::GetLock: lock 281474976715663 not found 2025-04-04T17:17:23.577561Z node 13 :TX_DATASHARD TRACE: ValidateLocks: broken lock 281474976715663 expected 2:3 found 0:0 2025-04-04T17:17:23.577699Z node 13 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715663 DataShard: 72075186224037888 Generation: 2 Counter: 3 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2025-04-04T17:17:23.577806Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-04T17:17:23.577833Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-04T17:17:23.577857Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:17:23.577880Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:23.578002Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:23.578094Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:17:23.578161Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:17:23.578214Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:17:23.578283Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-04T17:17:23.578310Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:23.578347Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-04-04T17:17:23.579099Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:17:23.579220Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:23.579300Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: LOCKS_BROKEN 2025-04-04T17:17:23.579414Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:23.580236Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=YTFiNzU5MWQtOWMwZDJiMDQtNWYzNmZjOWItYmNiOWQzYzE=, ActorId: [13:949:2764], ActorState: ExecuteState, TraceId: 01jr0wxak46ej0948c9c5vfy4g, Create QueryResponse for error on request, msg: 2025-04-04T17:17:23.581248Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr0wxak46ej0948c9c5vfy4g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YTFiNzU5MWQtOWMwZDJiMDQtNWYzNmZjOWItYmNiOWQzYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:23.581612Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [13:999:2764], Recipient [13:872:2706]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 999 RawX2: 55834577612 } TxBody: " \0018\001j3\010\001\032\'\n#\t\217\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\002 \003)\000\001\205\000\000\000\000\0010\0028\000 \003\"\006\020\0020\000@\n\220\001\000" TxId: 281474976715666 ExecLevel: 0 Flags: 8 2025-04-04T17:17:23.581655Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:23.581854Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [13:872:2706], Recipient [13:872:2706]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-04T17:17:23.581889Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-04T17:17:23.581942Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:17:23.582098Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715663, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-04T17:17:23.582219Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit CheckDataTx 2025-04-04T17:17:23.582282Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-04-04T17:17:23.582313Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CheckDataTx 2025-04-04T17:17:23.582339Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:23.582367Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:17:23.582407Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3501/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3500/18446744073709551615 ImmediateWriteEdge# v3501/18446744073709551615 ImmediateWriteEdgeReplied# v3501/18446744073709551615 2025-04-04T17:17:23.582453Z node 13 :TX_DATASHARD TRACE: Activated operation [0:281474976715666] at 72075186224037888 2025-04-04T17:17:23.582483Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-04-04T17:17:23.582510Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:23.582531Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-04T17:17:23.582558Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-04T17:17:23.582635Z node 13 :TX_DATASHARD TRACE: Operation [0:281474976715666] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193448 2025-04-04T17:17:23.582756Z node 13 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715663 DataShard: 72075186224037888 Generation: 2 Counter: 3 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2025-04-04T17:17:23.582840Z node 13 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-04T17:17:23.582909Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-04-04T17:17:23.582935Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-04T17:17:23.582960Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:17:23.582986Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:23.583040Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715666 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-04T17:17:23.583140Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is DelayComplete 2025-04-04T17:17:23.583182Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:17:23.583236Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:17:23.583288Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:17:23.583327Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-04-04T17:17:23.583352Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:23.583379Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715666] at 72075186224037888 has finished 2025-04-04T17:17:23.583437Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:17:23.583501Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715666] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:23.583566Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> KqpQuery::QueryExplain [GOOD] >> KqpQuery::QueryFromSqs >> TTxAllocatorClientTest::InitiatingRequest [GOOD] |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> KqpLimits::WaitCAsStateOnAbort [GOOD] >> KqpLimits::WaitCAsTimeout >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::InitiatingRequest [GOOD] Test command err: 2025-04-04T17:17:25.895536Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-04T17:17:25.896791Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-04T17:17:25.898682Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-04T17:17:25.914964Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:17:25.920743Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-04T17:17:25.938709Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:17:25.938863Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:17:25.938928Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:17:25.938994Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-04T17:17:25.939202Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:17:25.939322Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-04T17:17:25.939454Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-04T17:17:25.941557Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:69:2104] requested range size#5000 2025-04-04T17:17:25.945033Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:17:25.945134Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:17:25.945264Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-04-04T17:17:25.945319Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:69:2104] TEvAllocateResult from# 0 to# 5000 >> KqpExplain::MergeConnection [GOOD] >> KqpExplain::IdxFullscan >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck [GOOD] >> KqpParams::DefaultParameterValue >> KqpLimits::TooBigColumn-useSink >> KqpQuery::QueryCache [GOOD] >> KqpQuery::Pure >> KqpQuery::YqlSyntaxV0 [GOOD] >> KqpQuery::YqlTableSample >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite >> KqpParams::ParameterTypes [GOOD] >> KqpParams::InvalidJson >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts >> KqpLimits::KqpMkqlMemoryLimitException [GOOD] >> KqpLimits::DatashardProgramSize+useSink >> TTxAllocatorClientTest::ZeroRange >> KqpStats::MultiTxStatsFullExpScan [GOOD] >> KqpStats::JoinStatsBasicYql-StreamLookupJoin >> TPersQueueTest::InflightLimit [GOOD] >> TPersQueueTest::DisableDeduplication [GOOD] >> KqpParams::Decimal+QueryService+UseSink [GOOD] >> KqpExplain::CompoundKeyRange [GOOD] >> KqpExplain::ExplainDataQuery >> DataShardReadIterator::ShouldReadRangePrefix4 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix5 >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19093, MsgBus: 10766 2025-04-04T17:17:00.147889Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508222659159758:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:00.148006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001608/r3tmp/tmpZj1UIU/pdisk_1.dat 2025-04-04T17:17:00.563252Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19093, node 1 2025-04-04T17:17:00.600539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:00.600638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:00.617783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:00.694441Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:00.694462Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:00.694469Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:00.694603Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10766 TClient is connected to server localhost:10766 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:01.285548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:01.318660Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:01.335050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:17:01.515840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:01.666468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:01.735229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:03.451280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508235544063417:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:03.451396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:03.662339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:03.693728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:03.730642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:03.765906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:03.810034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:03.880426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:03.973493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508235544063938:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:03.973574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:03.973900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508235544063943:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:03.977480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:03.987443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508235544063945:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:04.086088Z node 1 :TX_PROXY ERROR: Actor# [1:7489508239839031296:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:04.927235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.150276Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508222659159758:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:05.150332Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:17:05.914868Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508244133999133:2531], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2025-04-04T17:17:05.915202Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODFiYjBkMGMtNzM1NDZmNGYtNDc5ZjdkMDEtMmRlNDgzM2Q=, ActorId: [1:7489508239839031549:2488], ActorState: ExecuteState, TraceId: 01jr0wwses5zmbeexsn0djzp2r, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T17:17:06.015234Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODFiYjBkMGMtNzM1NDZmNGYtNDc5ZjdkMDEtMmRlNDgzM2Q=, ActorId: [1:7489508239839031549:2488], ActorState: ExecuteState, TraceId: 01jr0wwsg739cny7p815nc84tr, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1294: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 2025-04-04T17:17:06.037521Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508248428966446:2537], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:4:25: Error: Implicit decimal cast would lose precision
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-04T17:17:06.037770Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODFiYjBkMGMtNzM1NDZmNGYtNDc5ZjdkMDEtMmRlNDgzM2Q=, ActorId: [1:7489508239839031549:2488], ActorState: ExecuteState, TraceId: 01jr0wwsk95cm5g2rr8vpdm9qg, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T17:17:06.057798Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508248428966456:2541], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:0:14: Error: Implicit decimal cast would lose precision
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-04T17:17:06.059167Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODFiYjBkMGMtNzM1NDZmNGYtNDc5ZjdkMDEtMmRlNDgzM2Q=, ActorId: [1:7489508239839031549:2488], ActorState: ExecuteState, TraceId: 01jr0wwskw6wqqfmt6fy76b4mw, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 16891, MsgBus: 15576 2025-04-04T17:17:07.338262Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=und ... orId: [3:7489508301198908202:2496], ActorState: ExecuteState, TraceId: 01jr0wx71kdgpzvhxn6qe8pa1j, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 61085, MsgBus: 10534 2025-04-04T17:17:21.279997Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489508313130413714:2219];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001608/r3tmp/tmpyQr5j3/pdisk_1.dat 2025-04-04T17:17:21.300975Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:17:21.397443Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:21.408663Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:21.408762Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:21.411491Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61085, node 4 2025-04-04T17:17:21.506871Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:21.506898Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:21.506905Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:21.507067Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10534 TClient is connected to server localhost:10534 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:21.991710Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:21.996559Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:22.010889Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:17:22.092524Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:22.259717Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:22.340474Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:24.846858Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508326015317198:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:24.846989Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:24.895110Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.935932Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.971901Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:25.011653Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:25.043552Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:25.080905Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:25.143619Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508330310285002:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:25.143727Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:25.144093Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508330310285007:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:25.147942Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:25.158127Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489508330310285009:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:25.257840Z node 4 :TX_PROXY ERROR: Actor# [4:7489508330310285064:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:26.271096Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489508313130413714:2219];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:26.271185Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:17:26.310597Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:17:27.293956Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7489508338900220279:2547], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2025-04-04T17:17:27.295953Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTVmMjYxMTEtOGMxZTBmYzktMTliY2U3MjUtOTY3ZWM1N2Y=, ActorId: [4:7489508338900220277:2546], ActorState: ExecuteState, TraceId: 01jr0wxebb8bc9jnsywm6y68w5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T17:17:27.390986Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmFmYjliMjctNWM5ZjY2NjYtOTcxZDZlNDQtY2MyZWYzYw==, ActorId: [4:7489508338900220283:2549], ActorState: ExecuteState, TraceId: 01jr0wxecfbcqwtqfhfrcnd1pk, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1294: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 2025-04-04T17:17:27.440936Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7489508338900220304:2556], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:4:25: Error: Implicit decimal cast would lose precision
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-04T17:17:27.443153Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YzFkNDJhMjItNjVmYjI2ZGEtM2QwN2VjYTMtZDA2MjJkOTE=, ActorId: [4:7489508338900220302:2555], ActorState: ExecuteState, TraceId: 01jr0wxefv96xfhypvkgne7f2z, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T17:17:27.488926Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7489508338900220315:2561], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:0:14: Error: Implicit decimal cast would lose precision
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-04T17:17:27.489947Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Y2E5YzE0MDYtOTgwMTgzYzMtOTBiZDI2MzktYzJiMDgwZDE=, ActorId: [4:7489508338900220313:2560], ActorState: ExecuteState, TraceId: 01jr0wxeh2agkjdtd7725ttwqt, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpStats::DeferredEffects-UseSink [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc+UseSink [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::InflightLimit [GOOD] Test command err: 2025-04-04T17:13:06.653614Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507220705376897:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:06.653651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:13:06.756457Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507218796142648:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:06.772979Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:13:06.987367Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:13:06.990802Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e42/r3tmp/tmpeyhv4p/pdisk_1.dat 2025-04-04T17:13:07.362744Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:07.370834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:07.370918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:07.374736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:07.374805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:07.376489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:07.378989Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:13:07.381316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13628, node 1 2025-04-04T17:13:07.629648Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001e42/r3tmp/yandexCF6C9b.tmp 2025-04-04T17:13:07.629668Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001e42/r3tmp/yandexCF6C9b.tmp 2025-04-04T17:13:07.629789Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001e42/r3tmp/yandexCF6C9b.tmp 2025-04-04T17:13:07.629912Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:13:07.740392Z INFO: TTestServer started on Port 10846 GrpcPort 13628 TClient is connected to server localhost:10846 PQClient connected to localhost:13628 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:08.155657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:13:08.217648Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:13:08.230593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:13:10.450294Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507235976012206:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:10.452858Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507235976012230:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:10.454112Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:10.472354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T17:13:10.503956Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507235976012234:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T17:13:10.599103Z node 2 :TX_PROXY ERROR: Actor# [2:7489507235976012262:2177] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:10.835114Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507237885247263:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:13:10.835347Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjI5YThjODAtMzE0MDVlYjgtODcwMmIzNmYtOTQ5NjBmZDc=, ActorId: [1:7489507237885247213:2336], ActorState: ExecuteState, TraceId: 01jr0wnkq97p3gsdjper8nb7be, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:13:10.836721Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489507235976012269:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:13:10.836923Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjQyNDdhNDYtYWJiZDEzYTYtZWM0M2NiMWEtMjIyMzQ0NjM=, ActorId: [2:7489507235976012204:2312], ActorState: ExecuteState, TraceId: 01jr0wnkh4a4xwwdwx1pdnnqe0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:13:10.837317Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:13:10.837304Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:13:10.861259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:13:10.921150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:11.047867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:13:11.322473Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0wnm6r0yb172pjakphfnj8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjhlN2EzMzYtYjE4NjQ3MzQtNzg1ZGU3MGItYmY3YmE2ZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489507242180214995:3067] 2025-04-04T17:13:11.654025Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489507220705376897:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:11.654097Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:13:11.750740Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489507218796142648:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:11.750819Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 10 partitions CallPersQueueGRPC request to localhost:13628 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } CallPersQueueGRPC response: 2025-04-04T17:13:17.254379Z node 1 :PQ_METACACHE DEBUG: HandleDescribeAllTopics 2025-04-04T17:13:17.254425Z node 1 :PQ_METACACHE DEBUG: ProcessDescribeAllTopics 2025-04-04T17:13:17.254431Z node 1 :PQ_METACACHE DEBUG: Describe all topics - send empty response 2025-04-04T17:13:17.254452Z node 1 :PQ_METACACHE DEBUG: Send describe all topics response with 0 ... _READ_PROXY INFO: session cookie 3 consumer session _27_3_17426091083582694661_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-04-04T17:17:16.347344Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_17426091083582694661_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 4 2025-04-04T17:17:16.347445Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_17426091083582694661_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 4, WTime# 1743787032084, sizeLag# 82536 2025-04-04T17:17:16.347470Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_17426091083582694661_v1TEvPartitionReady. Aval parts: 1 2025-04-04T17:17:16.347616Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_17426091083582694661_v1 grpc read done: success# 1, data# { read_request { bytes_size: 1048576 } } 2025-04-04T17:17:16.347754Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_17426091083582694661_v1 got read request: guid# 7011f8e2-dd1316b9-3c973e5a-fc1bba08 2025-04-04T17:17:16.347813Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_17426091083582694661_v1 performing read request: guid# 5f63ad6f-c96dd3d0-4a47eb78-53f2e7ee, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 4, size# 99043, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-04T17:17:16.347930Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_17426091083582694661_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 4 maxSize 99043 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 4 ClientCommitOffset 0 committedOffset 0 Guid 5f63ad6f-c96dd3d0-4a47eb78-53f2e7ee 2025-04-04T17:17:16.349101Z node 28 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-04T17:17:16.349173Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-04-04T17:17:19.722149Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 Topic 'rt3.dc1--topic1' partition 0 user $without_consumer offset 0 count 4 size 99043 endOffset 4 max time lag 0ms effective offset 0 2025-04-04T17:17:19.722218Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 4 2025-04-04T17:17:19.722359Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-04-04T17:17:19.722390Z node 28 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T17:17:19.722568Z node 28 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:17:19.723653Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_4381976386850541346_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 2 WriteTimestampMS: 1743787032084 CreateTimestampMS: 1743787032076 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 3 WriteTimestampMS: 1743787032124 CreateTimestampMS: 1743787032120 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 4 WriteTimestampMS: 1743787032149 CreateTimestampMS: 1743787032137 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 5 WriteTimestampMS: 1743787032189 CreateTimestampMS: 1743787032186 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 88 RealReadOffset: 3 WaitQuotaTimeMs: 3380 } Cookie: 0 } 2025-04-04T17:17:19.723954Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_4381976386850541346_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2025-04-04T17:17:19.724163Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_4381976386850541346_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid a89396a3-37cc7f74-f907e044-c3c074e4 has messages 1 2025-04-04T17:17:19.724359Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_4381976386850541346_v1 read done: guid# a89396a3-37cc7f74-f907e044-c3c074e4, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 82608 2025-04-04T17:17:19.724402Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_4381976386850541346_v1 response to read: guid# a89396a3-37cc7f74-f907e044-c3c074e4 2025-04-04T17:17:19.724890Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_4381976386850541346_v1 Process answer. Aval parts: 0 Bytes readed: 82608 Offset: 0 from session 1 Offset: 1 from session 1 Offset: 2 from session 1 Offset: 3 from session 1 2025-04-04T17:17:19.728090Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_4381976386850541346_v1 grpc read done: success# 0, data# { } 2025-04-04T17:17:19.728123Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_4381976386850541346_v1 grpc read failed 2025-04-04T17:17:19.728145Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_4381976386850541346_v1 grpc closed 2025-04-04T17:17:19.728170Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_4381976386850541346_v1 is DEAD 2025-04-04T17:17:19.729401Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _27_2_4381976386850541346_v1 2025-04-04T17:17:19.729463Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [27:7489508294463514004:2580] destroyed 2025-04-04T17:17:19.729526Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _27_2_4381976386850541346_v1 2025-04-04T17:17:23.728156Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 Topic 'rt3.dc1--topic1' partition 0 user $without_consumer offset 0 count 4 size 99043 endOffset 4 max time lag 0ms effective offset 0 2025-04-04T17:17:23.728251Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 4 2025-04-04T17:17:23.728419Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 3. All data is from uncompacted head. 2025-04-04T17:17:23.728457Z node 28 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T17:17:23.728669Z node 28 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:17:23.735020Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_17426091083582694661_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 2 WriteTimestampMS: 1743787032084 CreateTimestampMS: 1743787032076 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 3 WriteTimestampMS: 1743787032124 CreateTimestampMS: 1743787032120 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 4 WriteTimestampMS: 1743787032149 CreateTimestampMS: 1743787032137 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 5 WriteTimestampMS: 1743787032189 CreateTimestampMS: 1743787032186 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 88 RealReadOffset: 3 WaitQuotaTimeMs: 7379 } Cookie: 0 } 2025-04-04T17:17:23.735475Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_17426091083582694661_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2025-04-04T17:17:23.735545Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_17426091083582694661_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 5f63ad6f-c96dd3d0-4a47eb78-53f2e7ee has messages 1 2025-04-04T17:17:23.735721Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_17426091083582694661_v1 read done: guid# 5f63ad6f-c96dd3d0-4a47eb78-53f2e7ee, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 82608 2025-04-04T17:17:23.735764Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_17426091083582694661_v1 response to read: guid# 5f63ad6f-c96dd3d0-4a47eb78-53f2e7ee 2025-04-04T17:17:23.736181Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_17426091083582694661_v1 Process answer. Aval parts: 0 Bytes readed: 82608 Offset: 0 from session 1 Offset: 1 from session 1 Offset: 2 from session 1 Offset: 3 from session 1 2025-04-04T17:17:23.742326Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_17426091083582694661_v1 grpc read done: success# 0, data# { } 2025-04-04T17:17:23.742347Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_17426091083582694661_v1 grpc read failed 2025-04-04T17:17:23.742372Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_17426091083582694661_v1 grpc closed 2025-04-04T17:17:23.742406Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_17426091083582694661_v1 is DEAD 2025-04-04T17:17:23.746698Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _27_3_17426091083582694661_v1 2025-04-04T17:17:23.746781Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [27:7489508294463514009:2584] destroyed 2025-04-04T17:17:23.746867Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _27_3_17426091083582694661_v1 2025-04-04T17:17:24.164312Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 1048584 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10240 BurstSize: 10240 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2025-04-04T17:17:24.162689Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic1] TPersQueueReadBalancer::HandleWakeup 2025-04-04T17:17:24.162773Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 1 2025-04-04T17:17:24.166867Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic1] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 1 DataSize: 0 UsedReserveSize: 0 2025-04-04T17:17:24.167474Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic1] ProcessPendingStats. PendingUpdates size 1 >> KqpExplain::PureExpr [GOOD] >> KqpExplain::ReadTableRangesFullScan >> TPersQueueTest::CreateTopicWithMeteringMode [GOOD] >> TPersQueueTest::DefaultMeteringMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 65263, MsgBus: 22760 2025-04-04T17:17:00.769001Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508224825767615:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:00.769136Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001605/r3tmp/tmp5POanN/pdisk_1.dat 2025-04-04T17:17:01.326820Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:01.349632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:01.349728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:01.352552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65263, node 1 2025-04-04T17:17:01.470521Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:01.470547Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:01.470553Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:01.470660Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22760 TClient is connected to server localhost:22760 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:02.199045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.255029Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:02.276530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.442176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.598559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.673493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:04.208809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508242005638572:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.208945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.408830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.477442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.544251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.573585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.601760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.631749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:04.708928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508242005639090:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.708990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.709349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508242005639095:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.712753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:04.722837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508242005639097:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:04.821006Z node 1 :TX_PROXY ERROR: Actor# [1:7489508242005639152:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:05.770359Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508224825767615:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:05.770423Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23616, MsgBus: 10790 2025-04-04T17:17:09.623815Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:09.623954Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:17:09.624186Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001605/r3tmp/tmpklQtkh/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23616, node 2 2025-04-04T17:17:10.075714Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:10.076926Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:10.076975Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:10.077011Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:10.077525Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:17:10.113501Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:10.113621Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:10.124893Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10790 TClient is connected to server localhost:10790 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:10.390005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:10.501729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:10.814261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.241461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose ... . }. InternalError: OVERLOADED DEFAULT_ERROR: {
: Error: Table '/Root/SecondaryKeys/Index/indexImplTable' retry limit exceeded. }. 2025-04-04T17:17:17.265378Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:3249:4547], TxId: 281474976715674, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wx3ayepv1t7rntdg84hk5. SessionId : ydb://session/3?node_id=2&id=ODhmYmNlMjEtYTY2ZTQwNjEtZmI1NzQ3NjYtYjViMTg3ODc=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:3242:4079], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-04-04T17:17:17.265627Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:3250:4548], TxId: 281474976715674, task: 3. Ctx: { SessionId : ydb://session/3?node_id=2&id=ODhmYmNlMjEtYTY2ZTQwNjEtZmI1NzQ3NjYtYjViMTg3ODc=. CustomerSuppliedId : . TraceId : 01jr0wx3ayepv1t7rntdg84hk5. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:3242:4079], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-04-04T17:17:17.265891Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:3251:4549], TxId: 281474976715674, task: 4. Ctx: { TraceId : 01jr0wx3ayepv1t7rntdg84hk5. SessionId : ydb://session/3?node_id=2&id=ODhmYmNlMjEtYTY2ZTQwNjEtZmI1NzQ3NjYtYjViMTg3ODc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:3242:4079], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-04-04T17:17:17.276080Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODhmYmNlMjEtYTY2ZTQwNjEtZmI1NzQ3NjYtYjViMTg3ODc=, ActorId: [2:2665:4079], ActorState: ExecuteState, TraceId: 01jr0wx3ayepv1t7rntdg84hk5, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 12566, MsgBus: 21795 2025-04-04T17:17:21.045481Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:99:2145], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:21.045836Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:17:21.045920Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001605/r3tmp/tmpxdBt8O/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12566, node 3 2025-04-04T17:17:21.470698Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:21.471546Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:21.471596Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:21.471640Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:21.472028Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:17:21.507720Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:21.507860Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:21.519237Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21795 TClient is connected to server localhost:21795 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:21.784390Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:21.797650Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:22.077792Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:22.497467Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:22.799752Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:23.365280Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1804:3401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:23.365635Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:23.385910Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:23.594584Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:23.869457Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.125936Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.407311Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.742885Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:25.126111Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2390:3852], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:25.126295Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:25.126654Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2395:3857], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:25.132651Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:25.284625Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:2397:3859], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:25.352945Z node 3 :TX_PROXY ERROR: Actor# [3:2462:3905] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:26.401090Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:17:26.610850Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:17:26.965436Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:17:28.763811Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:3244:4544], TxId: 281474976715674, task: 1. Ctx: { TraceId : 01jr0wxefj7weczjbbqmch9trm. SessionId : ydb://session/3?node_id=3&id=NjA3YzJjODAtMjFiYjMzZWQtNmIxYTFlY2MtYWY1MWZmMDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Source[0] fatal error: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. } 2025-04-04T17:17:28.763951Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:3244:4544], TxId: 281474976715674, task: 1. Ctx: { TraceId : 01jr0wxefj7weczjbbqmch9trm. SessionId : ydb://session/3?node_id=3&id=NjA3YzJjODAtMjFiYjMzZWQtNmIxYTFlY2MtYWY1MWZmMDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: OVERLOADED DEFAULT_ERROR: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. }. 2025-04-04T17:17:28.764795Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:3245:4545], TxId: 281474976715674, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wxefj7weczjbbqmch9trm. SessionId : ydb://session/3?node_id=3&id=NjA3YzJjODAtMjFiYjMzZWQtNmIxYTFlY2MtYWY1MWZmMDQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:3238:4095], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-04-04T17:17:28.765436Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NjA3YzJjODAtMjFiYjMzZWQtNmIxYTFlY2MtYWY1MWZmMDQ=, ActorId: [3:2680:4095], ActorState: ExecuteState, TraceId: 01jr0wxefj7weczjbbqmch9trm, Create QueryResponse for error on request, msg: >> DataShardReadIterator::ShouldReadRangeChunk3 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk100 >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 >> KqpExplain::SsaProgramInJsonPlan [GOOD] >> KqpLimits::AffectedShardsLimit >> KqpStats::OneShardNonLocalExec+UseSink [GOOD] >> KqpStats::OneShardNonLocalExec-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::DeferredEffects-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 15262, MsgBus: 24369 2025-04-04T17:17:09.878174Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508262230642476:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:09.878212Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015fc/r3tmp/tmpmbpvNU/pdisk_1.dat 2025-04-04T17:17:10.253754Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:10.285989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:10.286098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 15262, node 1 2025-04-04T17:17:10.287747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:10.328876Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:10.328905Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:10.328916Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:10.329020Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24369 TClient is connected to server localhost:24369 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:10.924900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:10.954551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.104329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.271668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.349987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:12.924015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508275115546131:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:12.924157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.116660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.143200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.170429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.201091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.232422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.265575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.310877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508279410513940:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.310973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.311311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508279410513945:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.314171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:13.328107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508279410513947:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:13.414771Z node 1 :TX_PROXY ERROR: Actor# [1:7489508279410514001:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:14.878568Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508262230642476:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:14.878638Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:17:16.803335Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787034927, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 7324, MsgBus: 19430 2025-04-04T17:17:17.498702Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508295234883891:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:17.499771Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015fc/r3tmp/tmpaIjHTy/pdisk_1.dat 2025-04-04T17:17:17.639573Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7324, node 2 2025-04-04T17:17:17.650187Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:17.650282Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:17.651505Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:17.686271Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:17.686294Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:17.686301Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:17.686405Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19430 TClient is connected to server localhost:19430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:18.118163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:18.127196Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:18.144437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:18.207997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:18.345999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 7205759404 ... rt proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.564978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.591133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.619457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.653459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.732520Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508308119788052:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:20.732598Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:20.732601Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508308119788057:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:20.736690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:20.745899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508308119788059:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:20.835556Z node 2 :TX_PROXY ERROR: Actor# [2:7489508308119788114:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:22.495106Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508295234883891:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:22.495162Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:3:46: Warning: At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 6078, MsgBus: 9167 2025-04-04T17:17:23.479933Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508322537667320:2152];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015fc/r3tmp/tmpWAnhYv/pdisk_1.dat 2025-04-04T17:17:23.523490Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:17:23.618054Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:23.633262Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:23.633347Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:23.635367Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6078, node 3 2025-04-04T17:17:23.706351Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:23.706370Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:23.706380Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:23.706497Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9167 TClient is connected to server localhost:9167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:24.150351Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:24.156203Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:24.175018Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:17:24.234873Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.400840Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:24.488197Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:26.855556Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508335422570850:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:26.855641Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:26.897356Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:26.934841Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:26.965519Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:26.996157Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:27.028991Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:27.069141Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:27.154903Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508339717538662:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:27.155009Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:27.155299Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508339717538667:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:27.158800Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:27.168618Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508339717538669:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:27.268205Z node 3 :TX_PROXY ERROR: Actor# [3:7489508339717538725:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:28.482342Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508322537667320:2152];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:28.482423Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:3:46: Warning: At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict+UseSink [GOOD] >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict-UseSink >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink [GOOD] >> KqpLimits::QueryExecTimeout >> KqpLimits::QueryReplySize [GOOD] >> KqpLimits::ReadsetCountLimit >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty >> KqpQuery::QueryFromSqs [GOOD] >> DataShardOutOfOrder::UncommittedReadSetAck >> KqpQuery::YqlTableSample [GOOD] >> KqpQuery::UpdateWhereInSubquery >> DataShardTxOrder::ZigZag >> DataShardReadIterator::TryWriteManyRows-Commit [GOOD] >> DataShardReadIteratorBatchMode::RangeFromInclusive >> Viewer::SimpleFeatureFlags [GOOD] >> KqpParams::DefaultParameterValue [GOOD] >> KqpQuery::Pure [GOOD] >> KqpLimits::TooBigColumn-useSink [GOOD] >> DataShardOutOfOrder::TestPlannedTimeoutSplit >> KqpExplain::IdxFullscan [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryFromSqs [GOOD] Test command err: Trying to start YDB, gRPC: 3007, MsgBus: 1646 2025-04-04T17:17:12.784322Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508277621072249:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:12.784376Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015ef/r3tmp/tmpURGAJh/pdisk_1.dat 2025-04-04T17:17:13.126350Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3007, node 1 2025-04-04T17:17:13.181852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:13.181951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:13.196670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:13.231560Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:13.231583Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:13.231593Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:13.231691Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1646 TClient is connected to server localhost:1646 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:13.748012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:13.765092Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:13.777416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:13.904549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:14.052757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:14.133945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:15.613140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508290505975780:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:15.613229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:15.794197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:15.833170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:15.863280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:15.885911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:15.914997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:15.951573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:16.032811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508294800943593:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:16.032882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:16.033083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508294800943598:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:16.036872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:16.047986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508294800943600:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:16.122971Z node 1 :TX_PROXY ERROR: Actor# [1:7489508294800943655:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:16.931765Z node 1 :GRPC_SERVER DEBUG: [0x51b0000c7f80] received request Name# PrepareDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=OGY3YmUyMmMtNzBhOThmZGQtODA4OTZkNDYtM2Y2ODk2Mw==" yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " operation_params { } peer# ipv6:%5B::1%5D:39394 2025-04-04T17:17:16.931836Z node 1 :GRPC_SERVER DEBUG: [0x51b000210f80] created request Name# PrepareDataQuery 2025-04-04T17:17:16.932145Z node 1 :GRPC_SERVER DEBUG: [0x51b0000c7f80] received request without user token Name# PrepareDataQuery data# session_id: "ydb://session/3?node_id=1&id=OGY3YmUyMmMtNzBhOThmZGQtODA4OTZkNDYtM2Y2ODk2Mw==" yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " operation_params { } peer# ipv6:%5B::1%5D:39394 database# /Root 2025-04-04T17:17:16.932429Z node 1 :GRPC_SERVER DEBUG: Got grpc request# PrepareDataQueryRequest, traceId# 01jr0wx484cat170f6x2ef3wzq, sdkBuildInfo# ydb-cpp-sdk/3.1.0, state# AS_NOT_PERFORMED, database# /Root, peer# ipv6:[::1]:39394, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:17:17.023573Z node 1 :GRPC_SERVER DEBUG: [0x51b0000c7f80] issuing response Name# PrepareDataQuery data# operation { ready: true status: SUCCESS result { type_url: "type.googleapis.com/Ydb.Table.PrepareQueryResult" value: "\n>ydb://preparedqueryid/4?id=c9e5fb91-8215e6fe-13901652-845aeeae" } } peer# ipv6:%5B::1%5D:39394 2025-04-04T17:17:17.026386Z node 1 :GRPC_SERVER DEBUG: [0x51b0000c7f80] finished request Name# PrepareDataQuery ok# true peer# ipv6:%5B::1%5D:39394 2025-04-04T17:17:17.033148Z node 1 :GRPC_SERVER DEBUG: [0x51b000210180] received request Name# ExecuteDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=OGY3YmUyMmMtNzBhOThmZGQtODA4OTZkNDYtM2Y2ODk2Mw==" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { id: "ydb://preparedqueryid/4?id=c9e5fb91-8215e6fe-13901652-845aeeae" } query_cache_policy { keep_in_cache: true } operation_params { } peer# ipv6:%5B::1%5D:39410 2025-04-04T17:17:17.033204Z node 1 :GRPC_SERVER DEBUG: [0x51b0002d1780] created request Name# ExecuteDataQuery 2025-04-04T17:17:17.033352Z node 1 :GRPC_SERVER DEBUG: [0x51b000210180] received request without user token Name# ExecuteDataQuery data# session_id: "ydb://session/3?node_id=1&id=OGY3YmUyMmMtNzBhOThmZGQtODA4OTZkNDYtM2Y2ODk2Mw==" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { id: "ydb://preparedqueryid/4?id=c9e5fb91-8215e6fe-13901652-845aeeae" } query_cache_policy { keep_in_cache: true } operation_params { } peer# ipv6:%5B::1%5D:39410 database# /Root 2025-04-04T17:17:17.033537Z node 1 :GRPC_SERVER DEBUG: Got grpc request# ExecuteDataQueryRequest, traceId# 01jr0wx4b9akd05bv5ctgy9xsj, sdkBuildInfo# ydb-cpp-sdk/3.1.0, state# AS_NOT_PERFORMED, database# /Root, peer# ipv6:[::1]:39410, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 2.998735s 2025-04-04T17:17:17.784806Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508277621072249:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:17.784900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:3007 2025-04-04T17:17:20.034535Z node 1 :GRPC_SERVER DEBUG: [0x51b000210180] issuing response Name# ExecuteDataQuery data# operation { ready: true status: INTERNAL_ERROR issues { message: "Closing Grpc request, client should not see this message." severity: 1 } } peer# ipv6:%5B::1%5D:39410 2025-04-04T17:17:20.035152Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489508299095911225:2488] TxId: 281474976710671. Ctx: { TraceId: 01jr0wx4b9akd05bv5ctgy9xsj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGY3YmUyMmMtNzBhOThmZGQtODA4OTZkNDYtM2Y2ODk2Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-04T17:17:20.035610Z node 1 :GRPC_SERVER DEBUG: [0x51b000210180] finished request Name ... {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:24.158728Z node 2 :TX_PROXY ERROR: Actor# [2:7489508327597242263:3437] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } AST: ( (let $1 (KqpTable '"/Root/Test" '"72057594046644480:9" '"" '1)) (let $2 '('"Amount" '"Comment" '"Group" '"Name")) (let $3 (Uint64 '"1001")) (let $4 (Uint32 '1)) (let $5 (KqpRowsSourceSettings $1 $2 '('('"ItemsLimit" $3) '('"Sequential" '1)) '((KqlKeyExc $4 (String '"Name")) (KqlKeyInc $4)))) (let $6 (OptionalType (DataType 'String))) (let $7 (StructType '('"Amount" (OptionalType (DataType 'Uint64))) '('"Comment" $6) '('"Group" (OptionalType (DataType 'Uint32))) '('"Name" $6))) (let $8 '('('"_logical_id" '710) '('"_id" '"7292ce1-84ca454b-5581604b-cab5282b") '('"_wide_channels" $7))) (let $9 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $5)) (lambda '($13) (block '( (let $14 (lambda '($15) (Member $15 '"Amount") (Member $15 '"Comment") (Member $15 '"Group") (Member $15 '"Name"))) (return (FromFlow (ExpandMap (Take (ToFlow $13) $3) $14))) ))) $8)) (let $10 (DqCnUnionAll (TDqOutput $9 '"0"))) (let $11 (DqPhyStage '($10) (lambda '($16) (FromFlow (NarrowMap (Take (ToFlow $16) $3) (lambda '($17 $18 $19 $20) (AsStruct '('"Amount" $17) '('"Comment" $18) '('"Group" $19) '('"Name" $20)))))) '('('"_logical_id" '723) '('"_id" '"8d65566b-4ef85bcd-c4fa8fa-f7fad0af")))) (let $12 (DqCnResult (TDqOutput $11 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($9 $11) '($12) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType $7) '"0" '"0")) '('('"type" '"data_query")))) ) Plan: {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Test"],"PlanNodeId":1,"Operators":[{"Scan":"Sequential","ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"0","ReadLimit":"1001","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/Test","E-Rows":"1","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Test","reads":[{"lookup_by":["Group (1)"],"columns":["Amount","Comment","Group","Name"],"scan_by":["Name (Name, +∞)"],"limit":"1001","type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Sequential","ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"0","ReadLimit":"1001","Name":"TableRangeScan","Path":"\/Root\/Test","E-Rows":"1","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 4289, MsgBus: 61819 2025-04-04T17:17:26.043033Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508334493847133:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:26.043072Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015ef/r3tmp/tmp2XlpgA/pdisk_1.dat 2025-04-04T17:17:26.166996Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:26.178555Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:26.178651Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:26.180200Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4289, node 3 2025-04-04T17:17:26.270762Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:26.270781Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:26.270788Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:26.270894Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61819 TClient is connected to server localhost:61819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:26.715604Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:26.730776Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:26.748487Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:26.838167Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:26.971161Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:27.098284Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:29.143269Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508347378750779:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:29.143378Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:29.182974Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:29.211933Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:29.243658Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:29.274766Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:29.343638Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:29.377328Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:29.424953Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508347378751291:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:29.425048Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:29.425277Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508347378751296:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:29.429087Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:29.440454Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508347378751298:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:29.530496Z node 3 :TX_PROXY ERROR: Actor# [3:7489508347378751351:3437] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:30.405053Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-04-04T17:17:31.050328Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508334493847133:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:31.050470Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] >> KqpParams::InvalidJson [GOOD] >> KqpLimits::DatashardProgramSize+useSink [GOOD] >> KqpLimits::DatashardProgramSize-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::DefaultParameterValue [GOOD] Test command err: Trying to start YDB, gRPC: 64450, MsgBus: 11291 2025-04-04T17:17:10.305236Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508269136076228:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:10.482017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015f9/r3tmp/tmpwo1bru/pdisk_1.dat 2025-04-04T17:17:10.675421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:10.675517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:10.677098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:10.700772Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64450, node 1 2025-04-04T17:17:10.814323Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:10.814344Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:10.814350Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:10.814473Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11291 TClient is connected to server localhost:11291 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:11.464028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.490219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.639349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.808720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.904495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:13.524394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508282020979749:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.524526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.760871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.784257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.811653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.847984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.877087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.910451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.987212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508282020980260:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.987292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.987470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508282020980265:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.991161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:14.002341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508282020980267:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:14.078054Z node 1 :TX_PROXY ERROR: Actor# [1:7489508286315947618:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 7061, MsgBus: 16878 2025-04-04T17:17:15.805625Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508287694459030:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:15.806396Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015f9/r3tmp/tmpUQAMvw/pdisk_1.dat 2025-04-04T17:17:15.938260Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:15.962824Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:15.962900Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:15.964411Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7061, node 2 2025-04-04T17:17:16.013481Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:16.013503Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:16.013511Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:16.013613Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16878 TClient is connected to server localhost:16878 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:17:16.391996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:16.402591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:16.484797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:16.621350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:16.710044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:18.884675Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508300579362707:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:18.884759Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: { < ... ype: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:22.318906Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:24.464557Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508327222966586:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:24.464642Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:24.515474Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.563447Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.601005Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.638378Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.680390Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.753278Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.800581Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508327222967101:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:24.800671Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:24.800793Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508327222967106:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:24.804513Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:24.818865Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508327222967108:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:24.875420Z node 3 :TX_PROXY ERROR: Actor# [3:7489508327222967160:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:26.272795Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508314338062918:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:26.272848Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15001, MsgBus: 17785 2025-04-04T17:17:27.058622Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489508339535184230:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:27.058679Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015f9/r3tmp/tmpEQcN26/pdisk_1.dat 2025-04-04T17:17:27.178006Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15001, node 4 2025-04-04T17:17:27.219066Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:27.219151Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:27.220575Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:27.275519Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:27.275538Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:27.275545Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:27.275702Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17785 TClient is connected to server localhost:17785 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:27.822734Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:27.844137Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:27.930616Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:28.082390Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:28.161721Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:30.358511Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508352420087857:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.358608Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.397952Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.424567Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.451596Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.477932Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.508607Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.542779Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.618878Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508352420088371:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.618979Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.619089Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508352420088376:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.622516Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:30.636970Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489508352420088378:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:30.720783Z node 4 :TX_PROXY ERROR: Actor# [4:7489508352420088433:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigColumn-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 29507, MsgBus: 28418 2025-04-04T17:16:58.011049Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508209324059661:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:58.011267Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00160c/r3tmp/tmp0hK5U2/pdisk_1.dat 2025-04-04T17:16:58.483876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:58.484005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:58.487269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:58.488442Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29507, node 1 2025-04-04T17:16:58.573973Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:58.574004Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:58.574011Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:58.574124Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28418 TClient is connected to server localhost:28418 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:59.156364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:00.938376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508222208962077:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:00.938447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508222208962064:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:00.938490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:00.941633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:17:00.952141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508222208962088:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:17:01.048755Z node 1 :TX_PROXY ERROR: Actor# [1:7489508226503929437:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:01.255789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29932, MsgBus: 16135 2025-04-04T17:17:02.311776Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508231779060925:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:02.383209Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00160c/r3tmp/tmpMCStwv/pdisk_1.dat 2025-04-04T17:17:02.539558Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:02.543961Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:02.544038Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:02.545914Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29932, node 2 2025-04-04T17:17:02.706754Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:02.706778Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:02.706785Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:02.706889Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16135 TClient is connected to server localhost:16135 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:03.291781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:03.300063Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:05.451590Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508244663963293:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.451677Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.451810Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508244663963326:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.455383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:17:05.463207Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508244663963328:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:17:05.516264Z node 2 :TX_PROXY ERROR: Actor# [2:7489508244663963379:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:05.536122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.667352Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508244663963540:2349], SessionActorId: [2:7489508244663963526:2349], statusCode=PRECONDITION_FAILED. Issue=
: Error: Stream write queries aren't allowed., code: 2029 . sessionActorId=[2:7489508244663963526:2349]. isRollback=0 2025-04-04T17:17:05.667650Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTE3MTJkN2MtZjc5MDAwM2UtY2ExMWYyYjMtNWRiMTI2MzM=, ActorId: [2:7489508244663963526:2349], ActorState: ExecuteState, TraceId: 01jr0wws5p11cnp3dhtjyxc361, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7489508244663963541:2349] from: [2:7489508244663963540:2349] 2025-04-04T17:17:05.667743Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7489508244663963541:2349] TxId: 281474976715661. Ctx: { TraceId: 01jr0wws5p11cnp3dhtjyxc361, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTE3MTJkN2MtZjc5MDAwM2UtY2ExMWYyYjMtNWRiMTI2MzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Stream write queries aren't allowed., code: 2029 } 2025-04-04T17:17:05.667891Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508244663963544:2349], TxId: 281474976715661, task: 1. Ctx: { TraceId : 01jr0wws5p11cnp3dhtjyxc361. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YTE3MTJkN2MtZjc5MDAwM2UtY2ExMWYyYjMtNWRiMTI2MzM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7489508244663963541:2349], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:17:05.669204Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTE3MTJkN2MtZjc5MDAwM2UtY2ExMWYyYjMtNWRiMTI2MzM=, ActorId: [2:7489508244663963526:2349], ActorState: ExecuteState, TraceId: 01jr0wws5p11cnp3dhtjyxc361, Create QueryResponse for error on request, msg: ... 02], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:17:10.830081Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ODFjYTEzMGYtZWM1ZGViYWItNDgwMThmZmQtNDZiZjQ2MjQ=, ActorId: [3:7489508261291948377:2402], ActorState: ExecuteState, TraceId: 01jr0wwxg03gg1mb9p52c4qy0a, Create QueryResponse for error on request, msg: VERIFY failed (2025-04-04T17:17:10.838506Z): assertion failed in non-unittest thread with message: assertion failed at ydb/core/kqp/ut/common/kqp_ut_common.h:366, void NKikimr::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Error: Stream write queries aren't allowed., code: 2029 library/cpp/testing/unittest/registar.cpp:37 RaiseError(): requirement UnittestThread failed 2025-04-04T17:17:11.430371Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508248407044756:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:11.430487Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:17:21.504538Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:17:21.504566Z node 3 :IMPORT WARN: Table profiles were not loaded 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1870B438 1. /-S/util/system/yassert.cpp:55: Panic @ 0x186FC3FA 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:37: RaiseError @ 0x18B8D3D1 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:366: AssertSuccessResult @ 0x17F1AD76 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:346: CreateSampleTables @ 0x45A7D840 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:542: operator() @ 0x45AB9E43 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x45AB9E43 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x45AB9E43 8. /-S/util/thread/pool.h:71: Process @ 0x45AB9E43 9. /-S/util/thread/pool.cpp:405: DoExecute @ 0x18721615 10. /-S/util/thread/factory.h:15: Execute @ 0x1871E1CC 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x1871E1CC 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x18717574 13. /tmp//-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239: asan_thread_start @ 0x183C8BF8 14. ??:0: ?? @ 0x7F4961D9CAC2 15. ??:0: ?? @ 0x7F4961E2E84F Trying to start YDB, gRPC: 64208, MsgBus: 3694 2025-04-04T17:17:27.183430Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508339077829853:2127];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:27.185058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00160c/r3tmp/tmpT7i03a/pdisk_1.dat 2025-04-04T17:17:27.555571Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:27.558640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:27.558706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:27.560857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64208, node 1 2025-04-04T17:17:27.645440Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:27.645458Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:27.645464Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:27.645563Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3694 TClient is connected to server localhost:3694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:28.201510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:28.220094Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:28.240677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:28.382177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:28.504448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:17:28.579689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.180608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508351962733457:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.180753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.403670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.427234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.456385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.478329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.502815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.577049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.621453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508351962733973:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.621542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.621602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508351962733978:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.624057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:30.632472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508351962733980:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:30.705672Z node 1 :TX_PROXY ERROR: Actor# [1:7489508351962734033:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:32.059966Z node 1 :TX_DATASHARD ERROR: Transaction write column value of 20971522 bytes is larger than the allowed threshold 2025-04-04T17:17:32.060086Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710671 at tablet 72075186224037911 status: EXEC_ERROR errors: BAD_ARGUMENT (Transaction write column value of 20971522 bytes is larger than the allowed threshold) | 2025-04-04T17:17:32.062369Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489508356257701635:2488] TxId: 281474976710671. Ctx: { TraceId: 01jr0wxjms0zer68xg439jvb35, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjYxODdlZjYtMWU0ODM5NjEtYjFkZDU4MWMtZTEzM2I3ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. EXEC_ERROR: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold; 2025-04-04T17:17:32.071535Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjYxODdlZjYtMWU0ODM5NjEtYjFkZDU4MWMtZTEzM2I3ZjE=, ActorId: [1:7489508356257701586:2488], ActorState: ExecuteState, TraceId: 01jr0wxjms0zer68xg439jvb35, Create QueryResponse for error on request, msg:
: Error: Error executing transaction (ExecError): Execution failed
: Error: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold 2025-04-04T17:17:32.187239Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508339077829853:2127];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:32.187336Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] Test command err: 2025-04-04T17:17:31.448920Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:31.552737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:31.552792Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:31.563630Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:31.564078Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:17:31.564340Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:31.576785Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:31.619472Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:31.619604Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:31.622395Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:17:31.622478Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:17:31.622539Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:17:31.623833Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:31.624225Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:31.624321Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:17:31.703121Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:31.736571Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:17:31.738066Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:31.738237Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:17:31.738320Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:17:31.738363Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:17:31.738400Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:31.738588Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:31.739158Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:31.740122Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:17:31.740224Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:17:31.740362Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:31.740409Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:31.740453Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:17:31.740491Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:31.740524Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:31.740574Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:17:31.740611Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:31.740708Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:31.740745Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:31.740800Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:17:31.749042Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:17:31.749121Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:31.749212Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:17:31.749450Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:17:31.749510Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:17:31.749562Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:17:31.749667Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:31.749704Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:17:31.749742Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:17:31.749775Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:31.750100Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:31.750168Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:17:31.750204Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:17:31.750236Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:31.750312Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:17:31.750355Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:17:31.750391Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:17:31.750437Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:31.750465Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:31.764484Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:17:31.764580Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:31.764624Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:31.764663Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:31.764719Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:31.766947Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:31.767003Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:31.767051Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:17:31.767203Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:31.767240Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:31.767384Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:31.767428Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:31.767465Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:31.767502Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:31.776130Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:31.776224Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:31.776487Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:31.776529Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:31.776584Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:31.776627Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:31.776662Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:31.776706Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:17:31.776741Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:17:31.776787Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:31.776823Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:31.776865Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:31.776914Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:31.777128Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:17:31.777165Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:31.777190Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:31.777215Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:17:31.777236Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:17:31.777311Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:31.777335Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:17:31.777369Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:31.777406Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:31.777475Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:17:31.777508Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:17:31.777543Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:17:31.777582Z node 1 :TX_DATA ... 2025-04-04T17:17:32.830973Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:32.831033Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:4] at 9437184 on unit CompleteOperation 2025-04-04T17:17:32.831081Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 4] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 1 ms, propose latency: 3 ms 2025-04-04T17:17:32.831138Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 4 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-04T17:17:32.831170Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:32.831194Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-04T17:17:32.831221Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:32.831242Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-04T17:17:32.831258Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-04T17:17:32.831384Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:32.831406Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:6] at 9437184 on unit CompleteOperation 2025-04-04T17:17:32.831448Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 6] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 1 ms, propose latency: 3 ms 2025-04-04T17:17:32.831520Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-04T17:17:32.831561Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:32.831583Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-04T17:17:32.831600Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-04T17:17:32.831618Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-04T17:17:32.831718Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:32.831742Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:7] at 9437184 on unit CompleteOperation 2025-04-04T17:17:32.831772Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 7] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 1 ms, propose latency: 3 ms 2025-04-04T17:17:32.831807Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-04T17:17:32.831826Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:32.831933Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:32.831950Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:8] at 9437184 on unit CompleteOperation 2025-04-04T17:17:32.831974Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 8] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 1 ms, propose latency: 3 ms 2025-04-04T17:17:32.832003Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-04T17:17:32.832020Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:32.832117Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:32.832152Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:9] at 9437184 on unit CompleteOperation 2025-04-04T17:17:32.832181Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 9] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 1 ms, propose latency: 3 ms 2025-04-04T17:17:32.832220Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-04T17:17:32.832240Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:32.832255Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:32.832273Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:14] at 9437184 on unit FinishPropose 2025-04-04T17:17:32.832310Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 14 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-04-04T17:17:32.832379Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:32.832556Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:32.832579Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:10] at 9437184 on unit CompleteOperation 2025-04-04T17:17:32.832617Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 10] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 1 ms, propose latency: 3 ms 2025-04-04T17:17:32.832669Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-04T17:17:32.832690Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:32.832765Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:32.832780Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:12] at 9437184 on unit CompleteOperation 2025-04-04T17:17:32.832800Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 12] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 1 ms, propose latency: 3 ms 2025-04-04T17:17:32.832819Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-04T17:17:32.832832Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:32.832912Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:32.833128Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:13] at 9437184 on unit CompleteOperation 2025-04-04T17:17:32.833181Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 13] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:17:32.833205Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:32.833229Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-04T17:17:32.833316Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:32.833335Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:5] at 9437184 on unit CompleteOperation 2025-04-04T17:17:32.833376Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 5] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 1 ms, propose latency: 3 ms 2025-04-04T17:17:32.833414Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-04T17:17:32.833436Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:32.833609Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000004 txid# 4 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-04T17:17:32.833672Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:32.833718Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 4 2025-04-04T17:17:32.833811Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-04T17:17:32.833831Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:32.833850Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-04-04T17:17:32.833883Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-04T17:17:32.833895Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:32.833907Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-04-04T17:17:32.833956Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-04T17:17:32.833980Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:32.833994Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-04-04T17:17:32.834041Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-04T17:17:32.834069Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:32.834085Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-04-04T17:17:32.834118Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-04T17:17:32.834133Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:32.834144Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-04-04T17:17:32.834199Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-04T17:17:32.834224Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:32.834319Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-04-04T17:17:32.834393Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-04T17:17:32.834409Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:32.834420Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 expect 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - interm - 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 65280, MsgBus: 26979 2025-04-04T17:17:16.814801Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508293287615483:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:16.816646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015ee/r3tmp/tmp6e52Mx/pdisk_1.dat 2025-04-04T17:17:17.159785Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65280, node 1 2025-04-04T17:17:17.213326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:17.213897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:17.215700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:17.281597Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:17.281619Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:17.281624Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:17.281736Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26979 TClient is connected to server localhost:26979 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:17.817393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:17.831090Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:17.845715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:17.969310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:18.117430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:18.187285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:19.754449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508306172519141:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:19.754660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:20.056438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.085526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.115870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.146375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.213816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.247935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.296784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508310467486954:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:20.296859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:20.296905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508310467486959:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:20.300095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:20.312581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508310467486961:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:20.373585Z node 1 :TX_PROXY ERROR: Actor# [1:7489508310467487013:3441] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:21.313264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 18395, MsgBus: 16633 2025-04-04T17:17:22.182619Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508317297828313:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:22.182707Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015ee/r3tmp/tmppIUfdB/pdisk_1.dat 2025-04-04T17:17:22.278921Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18395, node 2 2025-04-04T17:17:22.328080Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:22.328211Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:22.332489Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:22.363666Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:22.363684Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:22.363691Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:22.363795Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16633 TClient is connected to server localhost:16633 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:22.785796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:22.791441Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:22.799497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:22.878480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:23.046225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:23.119726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:25.087076Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508330182731965:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:25.087148Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:25.113376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:25.144331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:25.176700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:25.208021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:25.237744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:25.280351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:25.356968Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508330182732484:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:25.357049Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:25.357255Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508330182732489:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:25.361218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:25.377197Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508330182732491:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:25.433139Z node 2 :TX_PROXY ERROR: Actor# [2:7489508330182732544:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 30055, MsgBus: 62652 2025-04-04T17:17:27.279666Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508338942754748:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:27.279734Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015ee/r3tmp/tmprld9gY/pdisk_1.dat 2025-04-04T17:17:27.422841Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30055, node 3 2025-04-04T17:17:27.443037Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:27.446593Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:27.477572Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:27.502818Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:27.502845Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:27.502857Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:27.502981Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62652 TClient is connected to server localhost:62652 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:28.033551Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:17:28.049965Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:17:28.113548Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:28.258466Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:17:28.333765Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.589123Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508351827658402:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.589247Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.639710Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.670262Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.700355Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.728406Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.756520Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.795228Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.848013Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508351827658911:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.848093Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.848157Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508351827658916:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.852093Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:30.862293Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508351827658918:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:30.942740Z node 3 :TX_PROXY ERROR: Actor# [3:7489508351827658972:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::SimpleFeatureFlags [GOOD] Test command err: 2025-04-04T17:16:07.929508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:16:07.929884Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:16:07.929952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 16913, node 1 TClient is connected to server localhost:31832 2025-04-04T17:16:15.251183Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:336:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:16:15.251334Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:16:15.251510Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 22046, node 2 TClient is connected to server localhost:20090 2025-04-04T17:16:22.902149Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:337:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:16:22.902344Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:16:22.902561Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 1262, node 3 TClient is connected to server localhost:13855 2025-04-04T17:16:32.092917Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:310:2356], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:16:32.093416Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:16:32.093653Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 26578, node 4 TClient is connected to server localhost:29946 2025-04-04T17:16:41.988256Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:336:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:16:41.988438Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:16:41.988654Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 6914, node 5 TClient is connected to server localhost:22853 2025-04-04T17:16:52.717897Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:335:2377], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:16:52.718115Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:16:52.718419Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 6903, node 6 TClient is connected to server localhost:29079 2025-04-04T17:17:05.121564Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:246:2292], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:05.122000Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:17:05.122394Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 28043, node 7 TClient is connected to server localhost:13534 2025-04-04T17:17:18.660855Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:536:2426], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:18.661352Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:18.661461Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T17:17:19.100079Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:19.260558Z node 8 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-04T17:17:19.296291Z node 8 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-04T17:17:20.034939Z node 8 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 6307, node 8 TClient is connected to server localhost:28032 2025-04-04T17:17:20.643194Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:20.643306Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:20.643391Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:20.644506Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:17:24.586890Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7489508328418329401:2227];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T17:17:24.715652Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:17:24.829025Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:24.854478Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:24.856335Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:24.861148Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15347, node 11 2025-04-04T17:17:24.936926Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:24.936958Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:24.936974Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:24.937203Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4832 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::IdxFullscan [GOOD] Test command err: Trying to start YDB, gRPC: 65049, MsgBus: 31516 2025-04-04T17:17:10.090992Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508267297607991:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:10.091039Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015fa/r3tmp/tmpfiCfqx/pdisk_1.dat 2025-04-04T17:17:10.471460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:10.471572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:10.473110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:10.518720Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65049, node 1 2025-04-04T17:17:10.566695Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:10.566711Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:10.566733Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:10.566833Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31516 TClient is connected to server localhost:31516 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:11.075601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.099835Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:11.103971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.209003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.373047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.468909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:13.000534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508275887544153:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.000661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.196110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.228872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.264119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.306918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.329371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.358332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.406375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508280182511959:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.406447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.406629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508280182511964:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.410615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:13.422700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508280182511966:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:13.493872Z node 1 :TX_PROXY ERROR: Actor# [1:7489508280182512019:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"SUM(10,15)","TopSortBy":"row.Text"},{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"0","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"0"}],"Node Type":"TopSort-TableFullScan"}],"Node Type":"Merge","SortColumns":["Text (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"Min(If,SUM(10,15))"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"10"},{"Inputs":[{"ExternalPlanNodeId":4}],"Offset":"15","Name":"Offset"}],"Node Type":"Limit-Offset"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/EightShard","E-Rows":"0","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"TopSort","Limit":"SUM(10,15)","TopSortBy":"row.Text"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"Min(If,SUM(10,15))"}],"Node Type":"Limit"}],"Operators":[{"Offset":"15","Name":"Offset"}],"Node Type":"Offset"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 25206, MsgBus: 20849 2025-04-04T17:17:15.265858Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508288379063367:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015fa/r3tmp/tmprqlj2m/pdisk_1.dat 2025-04-04T17:17:15.292156Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:17:15.358123Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25206, node 2 2025-04-04T17:17:15.393125Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:15.393204Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:15.394952Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:15.398697Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:15.398708Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:15.398719Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:15.398780Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20849 TClient is connected to server localhost:20849 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPath ... h=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015fa/r3tmp/tmpnq2Bo1/pdisk_1.dat 2025-04-04T17:17:26.893016Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17345, node 4 2025-04-04T17:17:26.920137Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:26.920218Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:26.922642Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:26.949255Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:26.949275Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:26.949283Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:26.949417Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25853 TClient is connected to server localhost:25853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:27.407814Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:27.437507Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:27.522836Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:27.724849Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:27.826912Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:30.114047Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508352188773918:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.114168Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.143010Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.175436Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.211765Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.242812Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.276149Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.352364Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.403944Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508352188774433:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.404040Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.404304Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508352188774438:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.407394Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:30.415779Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489508352188774440:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:30.476782Z node 4 :TX_PROXY ERROR: Actor# [4:7489508352188774493:3434] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:31.364769Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:31.609217Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:17:31.650147Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-04T17:17:31.748957Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489508335008902980:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:31.749014Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"E-Size":"0","PlanNodeId":8,"LookupKeyColumns":["id"],"Node Type":"TableLookup","Path":"\/Root\/test_table_idx","Columns":["Value","complex_field","id","str_field"],"E-Rows":"1","Table":"test_table_idx","Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"ConstantExpr-Aggregate","CTE Name":"precompute_0_0"}],"PlanNodeType":"Connection","E-Cost":"0"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"InternalOperatorId":3},{"InternalOperatorId":2}],"E-Rows":"0.2","Condition":"t.id = idx.id","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"4.5"},{"Inputs":[],"ToFlow":"precompute_0_0","Name":"ToFlow"},{"Inputs":[{"ExternalPlanNodeId":8}],"E-Rows":"1","Predicate":"Exist(item.id)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Limit-InnerJoin (MapJoin)-ConstantExpr-Filter","CTE Name":"precompute_0_0"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":10}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":5,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["test_table_idx_idx"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","ReadRange":["str_field (null)","complex_field (-∞, +∞)"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/test_table_idx_idx","E-Rows":"1","Table":"test_table_idx_idx","ReadColumns":["id"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/test_table_idx","reads":[{"lookup_by":["id"],"columns":["Value","complex_field","id","str_field"],"type":"Lookup"}]},{"name":"\/Root\/test_table_idx_idx","reads":[{"lookup_by":["str_field (null)"],"columns":["id"],"scan_by":["complex_field (-∞, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"E-Rows":"1","Columns":["Value","complex_field","id","str_field"],"E-Size":"0","E-Cost":"0","Name":"TableLookup","Table":"test_table_idx","LookupKeyColumns":["id"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"1","Predicate":"Exist(item.id)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Filter"},{"PlanNodeId":13,"Operators":[{"Scan":"Parallel","ReadRange":["str_field (null)","complex_field (-∞, +∞)"],"E-Size":"0","Name":"TableRangeScan","Path":"\/Root\/test_table_idx_idx","E-Rows":"1","Table":"test_table_idx_idx","ReadColumns":["id"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"0.2","Condition":"t.id = idx.id","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"4.5"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::InvalidJson [GOOD] Test command err: Trying to start YDB, gRPC: 1821, MsgBus: 9847 2025-04-04T17:17:10.715388Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508268116485566:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:10.729805Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015f4/r3tmp/tmpC9qdo2/pdisk_1.dat 2025-04-04T17:17:11.182876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:11.182985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:11.183108Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:11.186692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1821, node 1 2025-04-04T17:17:11.274487Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:11.274503Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:11.274511Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:11.274613Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9847 TClient is connected to server localhost:9847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:11.953091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.974939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:12.123886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:12.262461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:12.343108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:13.989690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508281001389109:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.989830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:14.251334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.284380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.316235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.344666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.380436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.445791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.496216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508285296356920:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:14.496285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:14.496344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508285296356925:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:14.499448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:14.508220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508285296356927:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:14.588727Z node 1 :TX_PROXY ERROR: Actor# [1:7489508285296356980:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:15.429918Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmQ0MTM3Mi1kZDkzNWJhMy1jNzVhOTY3Ni05ODU1OWMyNg==, ActorId: [1:7489508289591324531:2488], ActorState: ExecuteState, TraceId: 01jr0wx2p189w7dyy3580px2zp, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1294: ydb/core/kqp/query_data/kqp_query_data.cpp:266: Missing value for parameter: $group Trying to start YDB, gRPC: 21315, MsgBus: 10688 2025-04-04T17:17:16.221811Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508294888648659:2218];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015f4/r3tmp/tmpu66Uxs/pdisk_1.dat 2025-04-04T17:17:16.253546Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:17:16.332810Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:16.351017Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:16.351110Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:16.352477Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21315, node 2 2025-04-04T17:17:16.409601Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:16.409623Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:16.409630Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:16.409732Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10688 TClient is connected to server localhost:10688 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:16.868162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:16.906573Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:16.920821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:16.984628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:17.125529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:17:17.207323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo ... d: [3:7489508328436978938:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:24.811100Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:24.856190Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.889897Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.920251Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.946812Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.974481Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:25.017681Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:25.059233Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508332731946745:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:25.059326Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:25.059438Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508332731946750:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:25.062282Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:25.070273Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508332731946752:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:25.166933Z node 3 :TX_PROXY ERROR: Actor# [3:7489508332731946806:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 15751, MsgBus: 32743 2025-04-04T17:17:27.636502Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489508342031491274:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:27.636556Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015f4/r3tmp/tmpmW84x7/pdisk_1.dat 2025-04-04T17:17:27.769679Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:27.783344Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:27.783438Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:27.784494Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15751, node 4 2025-04-04T17:17:27.850794Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:27.850814Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:27.850821Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:27.850951Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32743 TClient is connected to server localhost:32743 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:28.343257Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:28.348621Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:28.358043Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:28.414707Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:28.563223Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:17:28.630154Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.828305Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508354916394912:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.828421Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.872128Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.908642Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.943004Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.978802Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:31.047275Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:31.089100Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:31.142570Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508359211362721:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:31.142660Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:31.142867Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508359211362726:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:31.147980Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:31.158077Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489508359211362728:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:31.243877Z node 4 :TX_PROXY ERROR: Actor# [4:7489508359211362781:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:32.281347Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:17:32.465058Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjM4M2FlNDYtYjk5OTMxOWUtNjRmY2MxM2UtZTgxY2UxYzU=, ActorId: [4:7489508363506330336:2488], ActorState: ExecuteState, TraceId: 01jr0wxkah9vfp4x8zn1tgyddt, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: Invalid Json value
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: Invalid Json value |90.7%| [TA] $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.7%| [TA] {RESULT} $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQuery::CreateAsSelect_BadCases [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] >> DataShardOutOfOrder::TestReadTableWriteConflict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 7571, MsgBus: 17511 2025-04-04T17:17:17.215484Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508298385803161:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:17.217369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015e9/r3tmp/tmplcdM8p/pdisk_1.dat 2025-04-04T17:17:17.590210Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7571, node 1 2025-04-04T17:17:17.644866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:17.644975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:17.651589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:17.692061Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:17.692086Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:17.692095Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:17.692244Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17511 TClient is connected to server localhost:17511 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:18.339371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:18.375951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:18.519726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:18.686159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:17:18.756472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.391315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508311270706827:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:20.391439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:20.596933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.624821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.648217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.674314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.699506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.726175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.768611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508311270707333:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:20.768691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:20.768813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508311270707338:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:20.771922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:20.779136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508311270707340:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:20.836987Z node 1 :TX_PROXY ERROR: Actor# [1:7489508311270707393:3440] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:21.962723Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787041962, txId: 281474976710671] shutting down 2025-04-04T17:17:22.215547Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508298385803161:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:22.215638Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14919, MsgBus: 26276 2025-04-04T17:17:22.843810Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508317507662490:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:22.843844Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015e9/r3tmp/tmpA1JdLy/pdisk_1.dat 2025-04-04T17:17:22.981898Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:22.983138Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:22.983215Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:22.986952Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14919, node 2 2025-04-04T17:17:23.063677Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:23.063698Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:23.063705Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:23.063801Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26276 TClient is connected to server localhost:26276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:23.452083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:23.472795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:23.557738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:23.731695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:23.804051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:25.838515Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508330392566127:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:25.838604Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:25.879224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:25.910423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:25.941714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:25.981851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:26.015525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:26.051703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:26.106813Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508334687533933:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:26.106893Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:26.107971Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508334687533938:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:26.111662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:26.122951Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508334687533940:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:26.180681Z node 2 :TX_PROXY ERROR: Actor# [2:7489508334687533993:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:27.238977Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787047254, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 5937, MsgBus: 10561 2025-04-04T17:17:28.139082Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508344272010691:2092];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:28.139135Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015e9/r3tmp/tmpthOJA7/pdisk_1.dat 2025-04-04T17:17:28.295766Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:28.308799Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:28.308896Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:28.311130Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5937, node 3 2025-04-04T17:17:28.376354Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:28.376375Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:28.376383Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:28.376492Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10561 TClient is connected to server localhost:10561 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:28.811008Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:28.824536Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:28.899237Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:29.049969Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:29.166425Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:31.356085Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508357156914286:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:31.356159Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:31.395858Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:31.421628Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:31.501590Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:31.537530Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:31.570011Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:31.611439Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:31.658266Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508357156914798:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:31.658348Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:31.658799Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508357156914803:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:31.663875Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:31.677247Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508357156914806:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:31.767063Z node 3 :TX_PROXY ERROR: Actor# [3:7489508357156914860:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:33.139583Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508344272010691:2092];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:33.139638Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardOutOfOrder::TestSnapshotReadPriority >> TxOrderInternals::OperationOrder [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [GOOD] Test command err: 2025-04-04T17:16:06.291959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:16:06.292322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:16:06.292371Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001aa7/r3tmp/tmpkhcXd4/pdisk_1.dat 2025-04-04T17:16:06.634996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:16:06.686513Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:06.725263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:06.725362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:06.736646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:06.820486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:16:06.877555Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:16:06.878638Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:16:06.879115Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:16:06.879357Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:06.888615Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:16:06.909048Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:06.909133Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:16:06.910441Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:16:06.910503Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:16:06.910538Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:16:06.911963Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:16:06.912077Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:16:06.912171Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:16:06.922728Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:16:06.948038Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:16:06.951071Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:16:06.951256Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:16:06.951294Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:16:06.951330Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:16:06.951367Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:16:06.951604Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:06.951669Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:06.952822Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:16:06.952934Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:16:06.953070Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:16:06.953118Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:16:06.953210Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:16:06.953248Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:16:06.953295Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:16:06.953332Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:16:06.953376Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:16:06.953478Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:06.953531Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:06.953580Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:16:06.954962Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:16:06.955016Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:16:06.955123Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:16:06.955437Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:16:06.955511Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:16:06.955601Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:16:06.955719Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:16:06.955757Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:16:06.955790Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:16:06.955838Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:16:06.956102Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:16:06.956136Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:16:06.956169Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:16:06.956199Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:16:06.956244Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:16:06.956265Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:16:06.956293Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:16:06.956319Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:16:06.956346Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:16:06.957586Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:16:06.957645Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:16:06.968361Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:16:06.968432Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:16:06.968486Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:16:06.968547Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:16:06.968607Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:16:07.107959Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.108036Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.108076Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:16:07.108453Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:16:07.108499Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:16:07.108660Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:16:07.108721Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:16:07.108762Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:16:07.108803Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:16:07.113429Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:16:07.113506Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:16:07.118370Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.118440Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.118520Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:16:0 ... de 14 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715666 at step 3500 at tablet 72075186224037889 { Transactions { TxId: 281474976715666 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-04T17:17:33.425245Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:17:33.425447Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:880:2711], Recipient [14:880:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:33.425484Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:33.425525Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T17:17:33.425558Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:33.425594Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-04T17:17:33.425635Z node 14 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2025-04-04T17:17:33.425669Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit PlanQueue 2025-04-04T17:17:33.425701Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-04T17:17:33.425731Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit PlanQueue 2025-04-04T17:17:33.425761Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit LoadTxDetails 2025-04-04T17:17:33.425793Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit LoadTxDetails 2025-04-04T17:17:33.425928Z node 14 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2025-04-04T17:17:33.425973Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-04T17:17:33.426001Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit LoadTxDetails 2025-04-04T17:17:33.426029Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:33.426056Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-04T17:17:33.426095Z node 14 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically complete end at 72075186224037889 2025-04-04T17:17:33.426135Z node 14 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically incomplete end at 72075186224037889 2025-04-04T17:17:33.426173Z node 14 :TX_DATASHARD TRACE: Activated operation [3500:281474976715666] at 72075186224037889 2025-04-04T17:17:33.426217Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-04T17:17:33.426262Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:33.426287Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-04-04T17:17:33.426312Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CreateVolatileSnapshot 2025-04-04T17:17:33.426398Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is ExecutedNoMoreRestarts 2025-04-04T17:17:33.426425Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-04-04T17:17:33.426512Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-04-04T17:17:33.426553Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit DropVolatileSnapshot 2025-04-04T17:17:33.426590Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-04T17:17:33.426614Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-04-04T17:17:33.426642Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompleteOperation 2025-04-04T17:17:33.426670Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-04-04T17:17:33.426819Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is DelayComplete 2025-04-04T17:17:33.426857Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompleteOperation 2025-04-04T17:17:33.426900Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompletedOperations 2025-04-04T17:17:33.426941Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompletedOperations 2025-04-04T17:17:33.426980Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-04T17:17:33.427008Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompletedOperations 2025-04-04T17:17:33.427039Z node 14 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715666] at 72075186224037889 has finished 2025-04-04T17:17:33.427076Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:33.427112Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-04T17:17:33.427147Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-04T17:17:33.427186Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-04T17:17:33.443587Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-04-04T17:17:33.443760Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:17:33.443839Z node 14 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037888 on unit CompleteOperation 2025-04-04T17:17:33.449298Z node 14 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [14:1076:2868], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:17:33.449446Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:33.450080Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-04-04T17:17:33.450150Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T17:17:33.450203Z node 14 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-04-04T17:17:33.450284Z node 14 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [14:1076:2868], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:17:33.450334Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:17:33.455096Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [14:592:2517], Recipient [14:665:2569]: NKikimrTxDataShard.TEvRead ReadId: 10 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-04T17:17:33.455296Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T17:17:33.455443Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-04T17:17:33.455628Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-04T17:17:33.455716Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-04T17:17:33.455783Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:33.455838Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:17:33.455896Z node 14 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-04T17:17:33.455957Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-04T17:17:33.455987Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:33.456015Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T17:17:33.456041Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:17:33.456201Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 10 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW } 2025-04-04T17:17:33.456643Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 1011121314, counter# 18446744073709551615 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T17:17:33.456738Z node 14 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715666 2025-04-04T17:17:33.456813Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[14:592:2517], 10} after executionsCount# 1 2025-04-04T17:17:33.456889Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:592:2517], 10} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:33.457171Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:592:2517], 10} finished in read 2025-04-04T17:17:33.457278Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-04T17:17:33.457312Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:17:33.457342Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:17:33.457374Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:17:33.457432Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-04T17:17:33.457463Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:33.458146Z node 14 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-04T17:17:33.458230Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T17:17:33.458470Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 >> KqpExplain::ExplainDataQuery [GOOD] >> KqpExplain::ExplainDataQueryWithParams >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> TxOrderInternals::OperationOrder [GOOD] >> KqpLimits::WaitCAsTimeout [GOOD] >> KqpParams::BadParameterType ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelect_BadCases [GOOD] Test command err: Trying to start YDB, gRPC: 12765, MsgBus: 24688 2025-04-04T17:17:10.446606Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508265875245268:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:10.450424Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015f6/r3tmp/tmpVKCBVT/pdisk_1.dat 2025-04-04T17:17:10.839959Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:10.882976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:10.883095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 12765, node 1 2025-04-04T17:17:10.884772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:10.957503Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:10.957553Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:10.957562Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:10.957687Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24688 TClient is connected to server localhost:24688 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:11.577075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.601798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.761229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.955498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:12.047252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:13.683233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508278760148792:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.683323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.942598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:13.975350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.004515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.033474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.059028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.094151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.153764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508283055116598:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:14.153855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:14.154101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508283055116603:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:14.157934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:14.170269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508283055116605:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:14.262632Z node 1 :TX_PROXY ERROR: Actor# [1:7489508283055116661:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:15.441755Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508265875245268:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:15.441825Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6557, MsgBus: 12960 2025-04-04T17:17:16.105117Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508292641819029:2084];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:16.105226Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015f6/r3tmp/tmpPmmbSq/pdisk_1.dat 2025-04-04T17:17:16.261165Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:16.295611Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:16.295691Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:16.299662Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6557, node 2 2025-04-04T17:17:16.372733Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:16.372754Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:16.372761Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:16.372866Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12960 TClient is connected to server localhost:12960 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:16.765068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:16.794668Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:16.871358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:17.009061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:17.085571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:19.215813Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] ... 508348508105624:3310];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.065258Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037998;self_id=[3:7489508348508105890:3382];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.067877Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[3:7489508348508105613:3306];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.069722Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037965 not found 2025-04-04T17:17:32.069761Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037982 not found 2025-04-04T17:17:32.069776Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037990 not found 2025-04-04T17:17:32.069791Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037988 not found 2025-04-04T17:17:32.070501Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037975;self_id=[3:7489508348508105816:3336];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.072871Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[3:7489508348508105467:3298];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.075681Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[3:7489508348508105630:3312];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.077924Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038002;self_id=[3:7489508348508105663:3318];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.078699Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[3:7489508348508105607:3303];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.079933Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[3:7489508348508105635:3315];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.081248Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037974;self_id=[3:7489508348508105824:3341];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.081888Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[3:7489508348508105612:3305];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.083959Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;self_id=[3:7489508348508105337:3291];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.084276Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037980;self_id=[3:7489508348508105384:3295];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.086589Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037984;self_id=[3:7489508348508105794:3323];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.086729Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[3:7489508348508105616:3308];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.089064Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037985;self_id=[3:7489508348508105844:3357];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.089446Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037977;self_id=[3:7489508348508105821:3339];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.090688Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;self_id=[3:7489508348508105862:3368];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.092277Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038000;self_id=[3:7489508348508105872:3373];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.093331Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[3:7489508348508105615:3307];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.094680Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;self_id=[3:7489508348508105311:3290];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.095925Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[3:7489508348508105627:3311];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.096877Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[3:7489508348508105608:3304];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.098652Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[3:7489508348508105632:3314];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.099035Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037966;self_id=[3:7489508348508105339:3293];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.101316Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[3:7489508348508105466:3297];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.101538Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[3:7489508348508105681:3320];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.104128Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[3:7489508348508105470:3300];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.106088Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[3:7489508348508105775:3322];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.108901Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;self_id=[3:7489508348508105837:3352];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.111585Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037992;self_id=[3:7489508348508105823:3340];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.116331Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[3:7489508348508105468:3299];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.118473Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;self_id=[3:7489508348508105852:3361];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.126425Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037973;self_id=[3:7489508348508105853:3362];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.129733Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037967;self_id=[3:7489508348508105868:3371];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.133024Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037976;self_id=[3:7489508348508105805:3330];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.136194Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037990;self_id=[3:7489508348508105830:3346];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.139323Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037965;self_id=[3:7489508348508105338:3292];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.141987Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037982;self_id=[3:7489508348508105861:3367];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.144216Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[3:7489508348508105622:3309];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.145441Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;self_id=[3:7489508348508105867:3370];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.147305Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037971;self_id=[3:7489508348508105848:3360];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.148276Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037988;self_id=[3:7489508348508105886:3379];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.150068Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037994;self_id=[3:7489508348508105815:3335];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:17:32.229400Z node 3 :TX_PROXY ERROR: Actor# [3:7489508361393011224:7655] txid# 281474976715687, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:32.238546Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:1, at schemeshard: 72057594046644480 2025-04-04T17:17:32.340677Z node 3 :TX_PROXY ERROR: Actor# [3:7489508361393011361:7740] txid# 281474976715691, issues: { message: "Check failed: path: \'/Root/RowSrc\', error: path exist, request doesn\'t accept it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:32.341036Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZjZhZjAyNjEtMWM3NjNhYjgtZDk2ZWUxZjgtNGI1YmZhYTc=, ActorId: [3:7489508361393011199:4137], ActorState: ExecuteState, TraceId: 01jr0wxk4z3h6v7bhf0bsyj7jg, Create QueryResponse for error on request, msg: 2025-04-04T17:17:32.525870Z node 3 :TX_PROXY ERROR: Actor# [3:7489508361393011432:7771] txid# 281474976715693, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:32.538100Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:1, at schemeshard: 72057594046644480 2025-04-04T17:17:33.293738Z node 3 :TX_PROXY ERROR: Actor# [3:7489508365687979466:7991] txid# 281474976715699, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:33.305578Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:1, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] Test command err: 2025-04-04T17:16:06.257678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:16:06.258104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:16:06.258154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a8a/r3tmp/tmp6qpRfJ/pdisk_1.dat 2025-04-04T17:16:06.635224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:16:06.675272Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:06.716883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:06.717912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:06.730289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:06.820439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:16:06.863231Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:16:06.864183Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:16:06.864612Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:16:06.864823Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:06.880176Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:16:06.914957Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:06.915060Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:16:06.916557Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:16:06.916629Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:16:06.916674Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:16:06.916957Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:16:06.917066Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:16:06.917148Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:16:06.927816Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:16:06.956917Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:16:06.957076Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:16:06.957175Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:16:06.957208Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:16:06.957240Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:16:06.957280Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:16:06.957454Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:06.957510Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:06.957734Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:16:06.957805Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:16:06.957910Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:16:06.957945Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:16:06.957994Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:16:06.958026Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:16:06.958085Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:16:06.958120Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:16:06.958156Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:16:06.958232Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:06.958293Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:06.958344Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:16:06.958684Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:16:06.958726Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:16:06.958801Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:16:06.958985Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:16:06.959027Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:16:06.959094Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:16:06.959145Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:16:06.959183Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:16:06.959221Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:16:06.959264Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:16:06.959508Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:16:06.959540Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:16:06.959572Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:16:06.959599Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:16:06.959635Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:16:06.959670Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:16:06.959709Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:16:06.959738Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:16:06.959759Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:16:06.961017Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:16:06.961059Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:16:06.971617Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:16:06.971680Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:16:06.971709Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:16:06.971762Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:16:06.971817Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:16:07.109782Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.109847Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.109890Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:16:07.110221Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:16:07.110281Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:16:07.110403Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:16:07.110450Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:16:07.110507Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:16:07.110543Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:16:07.114759Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:16:07.114819Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:16:07.118269Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.118315Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.118363Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:16:0 ... planned 0 immediate 0 planned 0 2025-04-04T17:17:34.168485Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-04T17:17:34.168543Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:17:34.168604Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:17:34.168828Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:880:2711], Recipient [14:880:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:34.168863Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:34.168907Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T17:17:34.168943Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:34.168981Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-04T17:17:34.169019Z node 14 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2025-04-04T17:17:34.169073Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit PlanQueue 2025-04-04T17:17:34.169109Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-04T17:17:34.169141Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit PlanQueue 2025-04-04T17:17:34.169172Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit LoadTxDetails 2025-04-04T17:17:34.169203Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit LoadTxDetails 2025-04-04T17:17:34.169336Z node 14 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2025-04-04T17:17:34.169375Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-04T17:17:34.169405Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit LoadTxDetails 2025-04-04T17:17:34.169431Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:34.169460Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-04T17:17:34.169503Z node 14 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically complete end at 72075186224037889 2025-04-04T17:17:34.169541Z node 14 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically incomplete end at 72075186224037889 2025-04-04T17:17:34.169579Z node 14 :TX_DATASHARD TRACE: Activated operation [3500:281474976715666] at 72075186224037889 2025-04-04T17:17:34.169624Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-04T17:17:34.169649Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:34.169672Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-04-04T17:17:34.169698Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CreateVolatileSnapshot 2025-04-04T17:17:34.169809Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is ExecutedNoMoreRestarts 2025-04-04T17:17:34.169850Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-04-04T17:17:34.169888Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-04-04T17:17:34.169923Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit DropVolatileSnapshot 2025-04-04T17:17:34.169948Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-04T17:17:34.169974Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-04-04T17:17:34.169999Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompleteOperation 2025-04-04T17:17:34.170025Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-04-04T17:17:34.170164Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is DelayComplete 2025-04-04T17:17:34.170192Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompleteOperation 2025-04-04T17:17:34.170228Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompletedOperations 2025-04-04T17:17:34.170281Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompletedOperations 2025-04-04T17:17:34.170313Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-04-04T17:17:34.170336Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompletedOperations 2025-04-04T17:17:34.170366Z node 14 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715666] at 72075186224037889 has finished 2025-04-04T17:17:34.170418Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:34.170453Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-04T17:17:34.170488Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-04T17:17:34.170522Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-04T17:17:34.181637Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-04-04T17:17:34.181810Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:17:34.181902Z node 14 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037888 on unit CompleteOperation 2025-04-04T17:17:34.182009Z node 14 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [14:1076:2868], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:17:34.182111Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:34.182505Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-04-04T17:17:34.182552Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T17:17:34.182592Z node 14 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-04-04T17:17:34.182634Z node 14 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [14:1076:2868], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:17:34.182672Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:17:34.184331Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [14:592:2517], Recipient [14:665:2569]: NKikimrTxDataShard.TEvRead ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-04T17:17:34.184483Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T17:17:34.184599Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-04T17:17:34.184745Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-04T17:17:34.184817Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-04T17:17:34.184885Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:34.184942Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:17:34.184987Z node 14 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-04T17:17:34.185043Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-04T17:17:34.185072Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:34.185116Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T17:17:34.185145Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:17:34.185307Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW } 2025-04-04T17:17:34.185744Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 1011121314, counter# 18446744073709551615 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T17:17:34.185834Z node 14 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715666 2025-04-04T17:17:34.185906Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[14:592:2517], 3} after executionsCount# 1 2025-04-04T17:17:34.186000Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:592:2517], 3} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:34.186330Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:592:2517], 3} finished in read 2025-04-04T17:17:34.186436Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-04T17:17:34.186471Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:17:34.186501Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:17:34.186532Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:17:34.186603Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-04T17:17:34.186627Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:34.186667Z node 14 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-04T17:17:34.186734Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T17:17:34.186935Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> DataShardTxOrder::RandomPointsAndRanges >> DataShardReadIterator::ShouldReadRangePrefix5 [GOOD] >> KqpExplain::ReadTableRangesFullScan [GOOD] >> KqpExplain::ReadTableRanges >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate >> KqpLimits::ReadsetCountLimit [GOOD] >> KqpLimits::QueryExecTimeoutCancel >> DataShardTxOrder::RandomPoints_DelayRS_Reboot >> DataShardScan::ScanFollowedByUpdate >> TPersQueueTest::TestWriteSessionsConflicts [GOOD] >> TPersQueueTest::TestReadRuleServiceTypePassword >> DataShardTxOrder::RandomDotRanges_DelayRS >> KqpQuery::UpdateWhereInSubquery [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadRangePrefix5 [GOOD] Test command err: 2025-04-04T17:16:06.231666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:16:06.232209Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:16:06.232272Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a5e/r3tmp/tmpU0Q783/pdisk_1.dat 2025-04-04T17:16:06.635127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:16:06.686480Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:06.728447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:06.728545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:06.739596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:06.820410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:16:06.865086Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:16:06.866120Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:16:06.866663Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:16:06.866850Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:06.880214Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:16:06.914228Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:06.914384Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:16:06.916086Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:16:06.916164Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:16:06.916220Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:16:06.916566Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:16:06.916702Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:16:06.916787Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:16:06.927444Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:16:06.955021Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:16:06.955227Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:16:06.955333Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:16:06.955366Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:16:06.955400Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:16:06.955446Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:16:06.955671Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:06.955719Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:06.956018Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:16:06.956119Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:16:06.956242Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:16:06.956296Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:16:06.956361Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:16:06.956397Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:16:06.956447Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:16:06.956483Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:16:06.956531Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:16:06.956645Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:06.956693Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:06.956752Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:16:06.957130Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:16:06.957184Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:16:06.957279Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:16:06.957528Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:16:06.957584Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:16:06.957677Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:16:06.957750Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:16:06.957798Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:16:06.957839Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:16:06.957883Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:16:06.958139Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:16:06.958168Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:16:06.958202Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:16:06.958233Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:16:06.958298Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:16:06.958338Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:16:06.958388Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:16:06.958423Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:16:06.958443Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:16:06.959684Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:16:06.959730Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:16:06.970434Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:16:06.970507Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:16:06.970544Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:16:06.970618Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:16:06.970701Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:16:07.109524Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.109585Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.109690Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:16:07.110000Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:16:07.110034Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:16:07.110151Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:16:07.110219Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:16:07.110310Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:16:07.110348Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:16:07.123526Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:16:07.123594Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:16:07.124346Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.124390Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.124431Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:16:0 ... 4976715664] at 72075186224037889 executing on unit WaitForPlan 2025-04-04T17:17:35.759788Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit PlanQueue 2025-04-04T17:17:35.759951Z node 15 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715664 at step 3000 at tablet 72075186224037889 { Transactions { TxId: 281474976715664 AckTo { RawX1: 0 RawX2: 0 } } Step: 3000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-04T17:17:35.759984Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:17:35.760186Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:880:2711], Recipient [15:880:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:35.760227Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:35.760264Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T17:17:35.760298Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:35.760328Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-04T17:17:35.760363Z node 15 :TX_DATASHARD DEBUG: Found ready operation [3000:281474976715664] in PlanQueue unit at 72075186224037889 2025-04-04T17:17:35.760395Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit PlanQueue 2025-04-04T17:17:35.760424Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-04T17:17:35.760452Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit PlanQueue 2025-04-04T17:17:35.760481Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit LoadTxDetails 2025-04-04T17:17:35.760510Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit LoadTxDetails 2025-04-04T17:17:35.760660Z node 15 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3000:281474976715664 keys extracted: 0 2025-04-04T17:17:35.760706Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-04T17:17:35.760732Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit LoadTxDetails 2025-04-04T17:17:35.760755Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:35.760781Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-04T17:17:35.760821Z node 15 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically complete end at 72075186224037889 2025-04-04T17:17:35.760858Z node 15 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically incomplete end at 72075186224037889 2025-04-04T17:17:35.760895Z node 15 :TX_DATASHARD TRACE: Activated operation [3000:281474976715664] at 72075186224037889 2025-04-04T17:17:35.760939Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-04T17:17:35.760961Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:35.760985Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-04-04T17:17:35.761010Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CreateVolatileSnapshot 2025-04-04T17:17:35.761106Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is ExecutedNoMoreRestarts 2025-04-04T17:17:35.761146Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-04-04T17:17:35.761193Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-04-04T17:17:35.761230Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit DropVolatileSnapshot 2025-04-04T17:17:35.761255Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-04T17:17:35.761280Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-04-04T17:17:35.761302Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompleteOperation 2025-04-04T17:17:35.761325Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-04-04T17:17:35.761462Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is DelayComplete 2025-04-04T17:17:35.761490Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompleteOperation 2025-04-04T17:17:35.761526Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompletedOperations 2025-04-04T17:17:35.761581Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompletedOperations 2025-04-04T17:17:35.761622Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-04T17:17:35.761644Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompletedOperations 2025-04-04T17:17:35.761671Z node 15 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715664] at 72075186224037889 has finished 2025-04-04T17:17:35.761706Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:35.761739Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-04T17:17:35.761771Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-04T17:17:35.761808Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-04T17:17:35.785759Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3000} 2025-04-04T17:17:35.785921Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:17:35.786004Z node 15 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-04-04T17:17:35.786115Z node 15 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1039:2836], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:17:35.786207Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:35.786436Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3000} 2025-04-04T17:17:35.786481Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T17:17:35.786512Z node 15 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-04-04T17:17:35.786553Z node 15 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [15:1039:2836], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:17:35.786610Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:17:35.788539Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:592:2517], Recipient [15:664:2569]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW RangesSize: 1 2025-04-04T17:17:35.788706Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T17:17:35.788809Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-04-04T17:17:35.788975Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-04T17:17:35.789047Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-04-04T17:17:35.789110Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:35.789168Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:17:35.789217Z node 15 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-04-04T17:17:35.789300Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-04T17:17:35.789332Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:35.789359Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T17:17:35.789386Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:17:35.789560Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW } 2025-04-04T17:17:35.790011Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/281474976715664 2025-04-04T17:17:35.790093Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:592:2517], 1} after executionsCount# 1 2025-04-04T17:17:35.790167Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:592:2517], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551583, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:35.790569Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:592:2517], 1} finished in read 2025-04-04T17:17:35.790691Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-04T17:17:35.790739Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:17:35.790773Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:17:35.790813Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:17:35.790868Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-04T17:17:35.790890Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:35.790925Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-04-04T17:17:35.790986Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T17:17:35.791177Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> KqpLimits::AffectedShardsLimit [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc-UseSink [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk100 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::UpdateWhereInSubquery [GOOD] Test command err: Trying to start YDB, gRPC: 16241, MsgBus: 22891 2025-04-04T17:17:21.670305Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508314887753309:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:21.671068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015d6/r3tmp/tmpSMhPSL/pdisk_1.dat 2025-04-04T17:17:22.019346Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:22.023068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:22.023160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:22.027172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16241, node 1 2025-04-04T17:17:22.092694Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:22.092715Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:22.092722Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:22.092817Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22891 TClient is connected to server localhost:22891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:22.581299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:22.599330Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:22.605697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:22.732624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:22.895012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:17:22.981511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.691706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508327772656894:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:24.691851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:24.889771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.914865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.944215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.970708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:25.004316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:25.074325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:25.168170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508332067624713:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:25.168269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:25.168527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508332067624718:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:25.171775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:25.180790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508332067624720:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:25.264693Z node 1 :TX_PROXY ERROR: Actor# [1:7489508332067624774:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:26.411240Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508336362592367:2497], status: GENERIC_ERROR, issues:
:3:26: Error: Unexpected token '[' : cannot match to any predicted input...
:3:32: Error: Unexpected token '/' : cannot match to any predicted input... 2025-04-04T17:17:26.412367Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjI4ZjAzYzUtZTNlM2NhNjItZDY0NmE2MDUtNTc0MmI2NDU=, ActorId: [1:7489508336362592326:2488], ActorState: ExecuteState, TraceId: 01jr0wxdg581jwfhkgjykjns3k, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:3:26: Error: Unexpected token '[' : cannot match to any predicted input...
:3:32: Error: Unexpected token '/' : cannot match to any predicted input... 2025-04-04T17:17:26.668772Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508314887753309:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:26.669357Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18031, MsgBus: 11194 2025-04-04T17:17:27.294606Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508338212623312:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:27.294687Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015d6/r3tmp/tmpTnxYxE/pdisk_1.dat 2025-04-04T17:17:27.442299Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:27.453441Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:27.453520Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:27.454678Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18031, node 2 2025-04-04T17:17:27.550825Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:27.550849Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:27.550856Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:27.550955Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11194 TClient is connected to server localhost:11194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:28.005849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:28.025246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part ... art proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:30.309629Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508351097526963:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.309692Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.351638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.417771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.440086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.464628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.492429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.527627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:30.573737Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508351097527476:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.573818Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.574055Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508351097527481:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:30.577654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:30.586307Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508351097527483:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:30.647995Z node 2 :TX_PROXY ERROR: Actor# [2:7489508351097527537:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:31.484741Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489508355392495098:2493], status: UNSUPPORTED, issues:
: Error: Default error
:1:15: Error: ATOM evaluation is not supported in YDB queries., code: 2030 2025-04-04T17:17:31.484965Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmE3MDc3YjktN2Q3OGQzMTgtMmU0OGJmMTQtM2JlNzFlNTg=, ActorId: [2:7489508355392495090:2488], ActorState: ExecuteState, TraceId: 01jr0wxje71vq5z5ybycrp4wp6, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: Trying to start YDB, gRPC: 4772, MsgBus: 28518 2025-04-04T17:17:32.408245Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508361920345050:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:32.408306Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015d6/r3tmp/tmpETVSor/pdisk_1.dat 2025-04-04T17:17:32.535218Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:32.547063Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:32.547155Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:32.548948Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4772, node 3 2025-04-04T17:17:32.613795Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:32.613817Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:32.613826Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:32.613944Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28518 TClient is connected to server localhost:28518 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:33.029889Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:33.045494Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:33.121453Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:33.258120Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:33.366941Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:35.474120Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508374805248709:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:35.474194Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:35.512532Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:35.549965Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:35.617292Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:35.649489Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:35.681858Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:35.724853Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:35.765067Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508374805249224:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:35.765148Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:35.765343Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508374805249229:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:35.768647Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:35.777183Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508374805249231:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:35.878758Z node 3 :TX_PROXY ERROR: Actor# [3:7489508374805249285:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpLimits::DatashardProgramSize-useSink [GOOD] >> KqpLimits::DatashardReplySize >> KqpStats::SysViewCancelled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::AffectedShardsLimit [GOOD] Test command err: Trying to start YDB, gRPC: 17943, MsgBus: 26885 2025-04-04T17:17:12.008284Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508269762199240:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:12.008458Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015f1/r3tmp/tmp1AXVPU/pdisk_1.dat 2025-04-04T17:17:12.388856Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17943, node 1 2025-04-04T17:17:12.447749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:12.448140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:12.450522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:12.482790Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:12.482816Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:12.482822Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:12.482931Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26885 TClient is connected to server localhost:26885 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:13.027647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:13.046966Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:13.057989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:13.173088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:13.322193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:13.385953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:15.020628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508286942070190:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:15.020916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:15.278974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:15.303340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:15.327598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:15.352238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:15.376019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:15.406132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:15.451619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508286942070698:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:15.451690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:15.451900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508286942070703:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:15.455342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:15.463060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508286942070705:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:15.556088Z node 1 :TX_PROXY ERROR: Actor# [1:7489508286942070760:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.Text","Name":"Sort"},{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"1","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"0"}],"Node Type":"Sort-TableRangeScan"}],"Node Type":"Merge","SortColumns":["Text (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key [150, 266]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"0","Name":"TableRangeScan","Path":"\/Root\/EightShard","E-Rows":"1","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.Text","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 1519, MsgBus: 4527 2025-04-04T17:17:17.241648Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508296029958650:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:17.241692Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015f1/r3tmp/tmpbJLQ42/pdisk_1.dat 2025-04-04T17:17:17.352397Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1519, node 2 2025-04-04T17:17:17.397467Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:17.397598Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:17.406454Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:17.419527Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:17.419548Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:17.419554Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:17.419658Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4527 TClient is connected to server localhost:4527 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:17:17.890812Z node 2 :FLAT_TX_SCHEMESHARD W ... xecute;tx_current=0;tx_id=281474976715671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715671; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["OlapTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"0","Predicate":"Value \u003E 0","Pushdown":"True","Name":"Filter","E-Size":"0","E-Cost":"0"},{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/OlapTable","E-Rows":"0","Table":"OlapTable","ReadColumns":["Key","Value"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":0},"Column":{"Id":3}}},{"Assign":{"Function":{"YqlOperationId":15,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":2},{"Id":3}]},"Column":{"Id":4}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":5}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":4},{"Id":5}]},"Column":{"Id":6}}},{"Filter":{"Predicate":{"Id":6}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"0"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/OlapTable","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/OlapTable","E-Rows":"0","Table":"OlapTable","ReadColumns":["Key","Value"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":0},"Column":{"Id":3}}},{"Assign":{"Function":{"YqlOperationId":15,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":2},{"Id":3}]},"Column":{"Id":4}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":5}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":4},{"Id":5}]},"Column":{"Id":6}}},{"Filter":{"Predicate":{"Id":6}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"0","Predicate":"Value \u003E 0","Pushdown":"True","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 31581, MsgBus: 62419 2025-04-04T17:17:31.024551Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489508357193328953:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:31.024595Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015f1/r3tmp/tmp5IXrL4/pdisk_1.dat 2025-04-04T17:17:31.131037Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:31.156607Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:31.156699Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:31.161752Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31581, node 4 2025-04-04T17:17:31.205477Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:31.205501Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:31.205510Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:31.205638Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62419 TClient is connected to server localhost:62419 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:31.716256Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:31.722164Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:31.732379Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:31.804138Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:32.037453Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:32.107302Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:34.583802Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508370078232622:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:34.583926Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:34.608899Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:34.643153Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:34.676871Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:34.710670Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:34.752629Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:34.788080Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:34.830035Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508370078233129:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:34.830126Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:34.830210Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508370078233134:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:34.833235Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:34.842089Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489508370078233136:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:34.914338Z node 4 :TX_PROXY ERROR: Actor# [4:7489508370078233189:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:35.907484Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:36.062236Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489508357193328953:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:36.067752Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:17:36.127078Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:37.187465Z node 4 :KQP_EXECUTER WARN: ActorId: [4:7489508382963137453:2614] TxId: 281474976715674. Ctx: { TraceId: 01jr0wxqnjd127hsdjgdkahrz8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NjQ2ZjYyMTktODVhYjBiNGQtYzE1MDhmNzktZjdlZWFiNzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Too many affected shards: datashardTasks=21, limit: 20 2025-04-04T17:17:37.187695Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjQ2ZjYyMTktODVhYjBiNGQtYzE1MDhmNzktZjdlZWFiNzU=, ActorId: [4:7489508378668169723:2614], ActorState: ExecuteState, TraceId: 01jr0wxqnjd127hsdjgdkahrz8, Create QueryResponse for error on request, msg:
: Error: Affected too many shards: 0, code: 2029 >> DataShardOutOfOrder::TestPlannedTimeoutSplit [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict-UseSink [GOOD] >> DataShardReadIteratorBatchMode::RangeFromInclusive [GOOD] >> DataShardOutOfOrder::UncommittedReads >> DataShardReadIteratorBatchMode::RangeFromNonInclusive >> DataShardTxOrder::ZigZag [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc-UseSink [GOOD] Test command err: 2025-04-04T17:16:06.279350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:16:06.279683Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:16:06.279744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a7d/r3tmp/tmptkDA4t/pdisk_1.dat 2025-04-04T17:16:06.635276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:16:06.676401Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:06.719536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:06.719667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:06.730976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:06.822417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:16:06.873651Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:16:06.874782Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:16:06.875284Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:16:06.875593Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:06.886536Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:16:06.922642Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:06.922749Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:16:06.924411Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:16:06.924485Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:16:06.924537Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:16:06.924933Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:16:06.925063Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:16:06.925154Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:16:06.935780Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:16:06.962388Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:16:06.962612Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:16:06.962731Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:16:06.962769Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:16:06.962805Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:16:06.962864Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:16:06.963091Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:06.963143Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:06.963471Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:16:06.963560Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:16:06.963694Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:16:06.963740Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:16:06.963798Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:16:06.963832Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:16:06.963867Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:16:06.963901Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:16:06.963942Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:16:06.964040Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:06.964089Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:06.964151Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:16:06.964543Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:16:06.964596Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:16:06.964700Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:16:06.964959Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:16:06.965032Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:16:06.965131Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:16:06.965185Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:16:06.965285Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:16:06.965326Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:16:06.965367Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:16:06.965641Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:16:06.965684Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:16:06.965719Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:16:06.965748Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:16:06.965791Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:16:06.965851Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:16:06.965894Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:16:06.965927Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:16:06.965950Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:16:06.967413Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:16:06.967465Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:16:06.978085Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:16:06.978159Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:16:06.978198Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:16:06.978303Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:16:06.978359Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:16:07.117240Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.117298Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.117386Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:16:07.117732Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:16:07.117764Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:16:07.117900Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:16:07.117955Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:16:07.118018Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:16:07.118057Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:16:07.122326Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:16:07.122397Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:16:07.123264Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.123305Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.123353Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:16:0 ... lan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:37.923478Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-04T17:17:37.923543Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:17:37.923613Z node 15 :TX_DATASHARD DEBUG: Found ready candidate operation [0:8] at 72075186224037888 for ExecuteRead 2025-04-04T17:17:37.923922Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [15:61:2108], Recipient [15:1061:2854]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 15 Status: STATUS_NOT_FOUND 2025-04-04T17:17:37.924064Z node 15 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=15&id=OTY1MzcxNjMtY2YxZjU3ZWEtZjIyZDZhMDEtZTY5OGU3NTM=, workerId: [15:1132:2908], local sessions count: 1 2025-04-04T17:17:37.924175Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:664:2569], Recipient [15:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:37.924206Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:37.924246Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:37.924278Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:17:37.924308Z node 15 :TX_DATASHARD DEBUG: Return cached ready operation [0:8] at 72075186224037888 2025-04-04T17:17:37.924336Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:17:37.924436Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 2, request: { ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2025-04-04T17:17:37.924805Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-04-04T17:17:37.924850Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:592:2517], 3} after executionsCount# 2 2025-04-04T17:17:37.924888Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:592:2517], 3} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:37.925018Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:592:2517], 3} finished in read 2025-04-04T17:17:37.925075Z node 15 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-04T17:17:37.925104Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:17:37.925131Z node 15 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:17:37.925158Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:17:37.925198Z node 15 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-04T17:17:37.925222Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:37.925248Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2025-04-04T17:17:37.925276Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:37.925314Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-04T17:17:37.925377Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:17:37.925438Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:17:37.925947Z node 15 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=15&id=MjgwMzViODAtZmExZmZlYTUtNjZiMDQ2YTYtNDFhMWQyMg==, workerId: [15:1160:2928], local sessions count: 0 2025-04-04T17:17:37.927386Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:592:2517], Recipient [15:664:2569]: NKikimrTxDataShard.TEvRead ReadId: 4 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-04T17:17:37.927527Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T17:17:37.927633Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit CheckRead 2025-04-04T17:17:37.927760Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-04-04T17:17:37.927824Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit CheckRead 2025-04-04T17:17:37.927881Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:37.927935Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:17:37.927986Z node 15 :TX_DATASHARD TRACE: Activated operation [0:9] at 72075186224037888 2025-04-04T17:17:37.928048Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-04-04T17:17:37.928079Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:37.928106Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T17:17:37.928130Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:17:37.928258Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 4 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2025-04-04T17:17:37.928612Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-04-04T17:17:37.928684Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:592:2517], 4} after executionsCount# 1 2025-04-04T17:17:37.928761Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:592:2517], 4} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:37.928966Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:592:2517], 4} finished in read 2025-04-04T17:17:37.929056Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-04-04T17:17:37.929087Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:17:37.929116Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:17:37.929147Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:17:37.929193Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-04-04T17:17:37.929219Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:37.929258Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:9] at 72075186224037888 has finished 2025-04-04T17:17:37.929318Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T17:17:37.930195Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:592:2517], Recipient [15:664:2569]: NKikimrTxDataShard.TEvRead ReadId: 5 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW KeysSize: 1 2025-04-04T17:17:37.930329Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T17:17:37.930446Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CheckRead 2025-04-04T17:17:37.930566Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-04-04T17:17:37.930641Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CheckRead 2025-04-04T17:17:37.930694Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:37.930748Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:17:37.930796Z node 15 :TX_DATASHARD TRACE: Activated operation [0:10] at 72075186224037888 2025-04-04T17:17:37.930848Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-04-04T17:17:37.930875Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:37.930897Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T17:17:37.930920Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:17:37.931049Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 5 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2025-04-04T17:17:37.931387Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2025-04-04T17:17:37.931455Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:592:2517], 5} after executionsCount# 1 2025-04-04T17:17:37.931527Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:592:2517], 5} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:37.931708Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:592:2517], 5} finished in read 2025-04-04T17:17:37.931785Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-04-04T17:17:37.931813Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:17:37.931838Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:17:37.931863Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:17:37.931903Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-04-04T17:17:37.931924Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:37.931959Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:10] at 72075186224037888 has finished 2025-04-04T17:17:37.932012Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 >> KqpStats::OneShardNonLocalExec-UseSink [GOOD] >> DataShardScan::ScanFollowedByUpdate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadRangeChunk100 [GOOD] Test command err: 2025-04-04T17:16:06.393434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:16:06.393811Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:16:06.393869Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a8b/r3tmp/tmpF09w73/pdisk_1.dat 2025-04-04T17:16:06.739592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:16:06.772011Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:06.809444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:06.809580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:06.821918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:06.903955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:16:06.943188Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:16:06.944166Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:16:06.944620Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:16:06.944875Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:06.955058Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:16:06.989749Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:06.989863Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:16:06.991523Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:16:06.991596Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:16:06.991667Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:16:06.992024Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:16:06.992156Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:16:06.992230Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:16:07.002981Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:16:07.030774Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:16:07.030994Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:16:07.031106Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:16:07.031141Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:16:07.031176Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:16:07.031246Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:16:07.031453Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.031499Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.031784Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:16:07.031885Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:16:07.032024Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:16:07.032065Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:16:07.032112Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:16:07.032146Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:16:07.032182Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:16:07.032208Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:16:07.032268Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:16:07.032362Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.032406Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.032461Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:16:07.032845Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:16:07.032894Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:16:07.033001Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:16:07.033220Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:16:07.033262Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:16:07.033370Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:16:07.033425Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:16:07.033475Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:16:07.033513Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:16:07.033546Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:16:07.033804Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:16:07.033844Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:16:07.033875Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:16:07.033905Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:16:07.033954Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:16:07.034006Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:16:07.034043Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:16:07.034076Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:16:07.034100Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:16:07.035649Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:16:07.035701Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:16:07.046417Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:16:07.046497Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:16:07.046534Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:16:07.046615Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:16:07.046698Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:16:07.185723Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.185782Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.185872Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:16:07.186305Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:16:07.186346Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:16:07.186462Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:16:07.186515Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:16:07.186572Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:16:07.186626Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:16:07.190944Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:16:07.191037Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:16:07.191858Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.191912Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.191959Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:16:0 ... :2517], 1}, firstUnprocessedQuery# 0 2025-04-04T17:17:38.287726Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:592:2517], 1}, FirstUnprocessedQuery# 0 2025-04-04T17:17:38.288143Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:592:2517], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709543002, quota bytes left# 18446744073709000383, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:38.288309Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2788], Recipient [15:977:2788]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-04T17:17:38.288357Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:592:2517], 1}, firstUnprocessedQuery# 0 2025-04-04T17:17:38.288400Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:592:2517], 1}, FirstUnprocessedQuery# 0 2025-04-04T17:17:38.288787Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:592:2517], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542903, quota bytes left# 18446744073708994047, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:38.288941Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2788], Recipient [15:977:2788]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-04T17:17:38.288991Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:592:2517], 1}, firstUnprocessedQuery# 0 2025-04-04T17:17:38.289033Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:592:2517], 1}, FirstUnprocessedQuery# 0 2025-04-04T17:17:38.289418Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:592:2517], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542804, quota bytes left# 18446744073708987711, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:38.289594Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2788], Recipient [15:977:2788]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-04T17:17:38.289642Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:592:2517], 1}, firstUnprocessedQuery# 0 2025-04-04T17:17:38.289683Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:592:2517], 1}, FirstUnprocessedQuery# 0 2025-04-04T17:17:38.290044Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:592:2517], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542705, quota bytes left# 18446744073708981375, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:38.290204Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2788], Recipient [15:977:2788]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-04T17:17:38.291380Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:592:2517], 1}, firstUnprocessedQuery# 0 2025-04-04T17:17:38.291449Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:592:2517], 1}, FirstUnprocessedQuery# 0 2025-04-04T17:17:38.291872Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:592:2517], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542606, quota bytes left# 18446744073708975039, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:38.292038Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2788], Recipient [15:977:2788]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-04T17:17:38.292087Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:592:2517], 1}, firstUnprocessedQuery# 0 2025-04-04T17:17:38.292134Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:592:2517], 1}, FirstUnprocessedQuery# 0 2025-04-04T17:17:38.292494Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:592:2517], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542507, quota bytes left# 18446744073708968703, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:38.292654Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2788], Recipient [15:977:2788]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-04T17:17:38.292702Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:592:2517], 1}, firstUnprocessedQuery# 0 2025-04-04T17:17:38.292747Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:592:2517], 1}, FirstUnprocessedQuery# 0 2025-04-04T17:17:38.293073Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:592:2517], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542408, quota bytes left# 18446744073708962367, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:38.293238Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2788], Recipient [15:977:2788]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-04T17:17:38.293281Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:592:2517], 1}, firstUnprocessedQuery# 0 2025-04-04T17:17:38.293318Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:592:2517], 1}, FirstUnprocessedQuery# 0 2025-04-04T17:17:38.293660Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:592:2517], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542309, quota bytes left# 18446744073708956031, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:38.293808Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2788], Recipient [15:977:2788]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-04T17:17:38.293848Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:592:2517], 1}, firstUnprocessedQuery# 0 2025-04-04T17:17:38.293887Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:592:2517], 1}, FirstUnprocessedQuery# 0 2025-04-04T17:17:38.294256Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:592:2517], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542210, quota bytes left# 18446744073708949695, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:38.294407Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2788], Recipient [15:977:2788]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-04T17:17:38.294451Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:592:2517], 1}, firstUnprocessedQuery# 0 2025-04-04T17:17:38.294490Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:592:2517], 1}, FirstUnprocessedQuery# 0 2025-04-04T17:17:38.294867Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:592:2517], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542111, quota bytes left# 18446744073708943359, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:38.294998Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2788], Recipient [15:977:2788]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-04T17:17:38.295040Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:592:2517], 1}, firstUnprocessedQuery# 0 2025-04-04T17:17:38.295080Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:592:2517], 1}, FirstUnprocessedQuery# 0 2025-04-04T17:17:38.295446Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:592:2517], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542012, quota bytes left# 18446744073708937023, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:38.295595Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2788], Recipient [15:977:2788]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-04T17:17:38.295636Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:592:2517], 1}, firstUnprocessedQuery# 0 2025-04-04T17:17:38.295675Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:592:2517], 1}, FirstUnprocessedQuery# 0 2025-04-04T17:17:38.296030Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:592:2517], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541913, quota bytes left# 18446744073708930687, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:38.296198Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2788], Recipient [15:977:2788]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-04T17:17:38.296246Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:592:2517], 1}, firstUnprocessedQuery# 0 2025-04-04T17:17:38.296290Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:592:2517], 1}, FirstUnprocessedQuery# 0 2025-04-04T17:17:38.296651Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:592:2517], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541814, quota bytes left# 18446744073708924351, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:38.296799Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2788], Recipient [15:977:2788]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-04T17:17:38.296847Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:592:2517], 1}, firstUnprocessedQuery# 0 2025-04-04T17:17:38.296887Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:592:2517], 1}, FirstUnprocessedQuery# 0 2025-04-04T17:17:38.297274Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:592:2517], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541715, quota bytes left# 18446744073708918015, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:38.297429Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2788], Recipient [15:977:2788]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-04T17:17:38.297473Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:592:2517], 1}, firstUnprocessedQuery# 0 2025-04-04T17:17:38.297511Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:592:2517], 1}, FirstUnprocessedQuery# 0 2025-04-04T17:17:38.297848Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:592:2517], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541616, quota bytes left# 18446744073708911679, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:38.297965Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2788], Recipient [15:977:2788]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-04T17:17:38.298005Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:592:2517], 1}, firstUnprocessedQuery# 0 2025-04-04T17:17:38.298042Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:592:2517], 1}, FirstUnprocessedQuery# 0 2025-04-04T17:17:38.298165Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:592:2517], 1} sends rowCount# 1, bytes# 64, quota rows left# 18446744073709541615, quota bytes left# 18446744073708911615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:38.298236Z node 15 :TX_DATASHARD DEBUG: 72075186224037890 read iterator# {[15:592:2517], 1} finished in ReadContinue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag [GOOD] Test command err: 2025-04-04T17:17:32.715304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:32.715367Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:32.718836Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:32.724689Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:32.725074Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:17:32.725352Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:32.734901Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:32.778199Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:32.778425Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:32.779857Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:17:32.779921Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:17:32.779995Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:17:32.780340Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:32.780451Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:32.780513Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:196:2153] in generation 2 2025-04-04T17:17:32.842943Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:32.880155Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:17:32.880322Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:32.880417Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:17:32.880450Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:17:32.880480Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:17:32.880515Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:32.880766Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:32.880804Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:32.881029Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:17:32.881106Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:17:32.881202Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:32.881247Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:32.881331Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:17:32.881365Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:32.881393Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:32.881425Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:17:32.881460Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:32.881549Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:32.881586Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:32.881630Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:17:32.884045Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:17:32.884096Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:32.884167Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:17:32.884320Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:17:32.884364Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:17:32.884405Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:17:32.884442Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:32.884474Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:17:32.884516Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:17:32.884551Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:32.884817Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:32.884850Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:17:32.884882Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:17:32.884908Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:32.884952Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:17:32.884975Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:17:32.885004Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:17:32.885039Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:32.885071Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:32.898969Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:17:32.899053Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:32.899096Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:32.899134Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:32.899187Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:32.899741Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:32.899787Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:32.899827Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:17:32.899954Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:32.899984Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:32.900105Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:32.900142Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:32.900174Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:32.900232Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:32.903744Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:32.903811Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:32.904006Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:32.904038Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:32.904083Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:32.904118Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:32.904144Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:32.904182Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:17:32.904212Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:17:32.904253Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:32.904282Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:32.904330Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:32.904365Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:32.904533Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:17:32.904587Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:32.904615Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:32.904635Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:17:32.904659Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:17:32.904715Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:32.904736Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:17:32.904767Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:32.904796Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:32.904849Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:17:32.904878Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:17:32.904920Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:17:32.904964Z node 1 :TX_DATA ... 48347Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2025-04-04T17:17:39.248382Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2025-04-04T17:17:39.248416Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2025-04-04T17:17:39.248450Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-04T17:17:39.248466Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2025-04-04T17:17:39.248493Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2025-04-04T17:17:39.248525Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:39.248554Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-04T17:17:39.248586Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-04T17:17:39.248620Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-04T17:17:39.248814Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1115:3044], Recipient [2:232:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:39.248848Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:39.248891Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [2:1112:3041], serverId# [2:1115:3044], sessionId# [0:0:0] 2025-04-04T17:17:39.249031Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [2:98:2133], Recipient [2:232:2225]: {TEvPlanStep step# 1000016 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:39.249058Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:39.249131Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit WaitForPlan 2025-04-04T17:17:39.249169Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-04T17:17:39.249205Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:39.249229Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:39.249352Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 45 at step 1000016 at tablet 9437184 { Transactions { TxId: 45 AckTo { RawX1: 98 RawX2: 8589936725 } } Step: 1000016 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:39.249382Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:39.249564Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:232:2225], Recipient [2:232:2225]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:39.249594Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:39.249668Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:39.249698Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:39.249721Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:39.249750Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2025-04-04T17:17:39.249774Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2025-04-04T17:17:39.249799Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-04T17:17:39.249820Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:39.249842Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:39.249869Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:39.250471Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2025-04-04T17:17:39.250528Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-04T17:17:39.250553Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:39.250590Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2025-04-04T17:17:39.250612Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2025-04-04T17:17:39.250643Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-04T17:17:39.250663Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2025-04-04T17:17:39.250682Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:39.250705Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:39.250750Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437184 2025-04-04T17:17:39.250774Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437184 2025-04-04T17:17:39.250795Z node 2 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437184 2025-04-04T17:17:39.250823Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-04T17:17:39.250860Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:39.250892Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2025-04-04T17:17:39.250927Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2025-04-04T17:17:39.250977Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-04T17:17:39.250998Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2025-04-04T17:17:39.251019Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2025-04-04T17:17:39.251040Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2025-04-04T17:17:39.251062Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-04T17:17:39.251090Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2025-04-04T17:17:39.251113Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2025-04-04T17:17:39.251181Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2025-04-04T17:17:39.251211Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-04T17:17:39.251229Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2025-04-04T17:17:39.251246Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2025-04-04T17:17:39.251264Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2025-04-04T17:17:39.251289Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-04T17:17:39.251306Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2025-04-04T17:17:39.251331Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2025-04-04T17:17:39.251358Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2025-04-04T17:17:39.251676Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2025-04-04T17:17:39.251727Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:17:39.251790Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-04T17:17:39.251815Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-04-04T17:17:39.251850Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-04-04T17:17:39.251874Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-04-04T17:17:39.252034Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-04-04T17:17:39.252063Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-04-04T17:17:39.252097Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-04-04T17:17:39.252132Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-04-04T17:17:39.252164Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-04T17:17:39.252185Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-04-04T17:17:39.252209Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2025-04-04T17:17:39.252254Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:39.252281Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:39.252307Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:39.252332Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:39.265578Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-04-04T17:17:39.265644Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-04-04T17:17:39.265705Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-04T17:17:39.265759Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-04-04T17:17:39.265822Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:17:39.265874Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-04T17:17:39.266196Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-04-04T17:17:39.266233Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-04-04T17:17:39.266307Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:39.266346Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-04-04T17:17:39.266386Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:17:39.266416Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewCancelled [GOOD] Test command err: Trying to start YDB, gRPC: 5536, MsgBus: 4783 2025-04-04T17:16:49.715386Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508175950320190:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:49.715442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001628/r3tmp/tmpVAlapa/pdisk_1.dat 2025-04-04T17:16:50.110973Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:50.167327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:50.168850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:50.170591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5536, node 1 2025-04-04T17:16:50.330688Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:50.330711Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:50.330716Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:50.330820Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4783 TClient is connected to server localhost:4783 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:51.114062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.130461Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:51.138901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.316844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.481800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.581628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.049442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508193130191160:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.049553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.532375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.571490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.606708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.642896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.675946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.713238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.789764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508193130191672:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.789882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.789908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508193130191677:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.794024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:53.803167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508193130191679:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:53.892725Z node 1 :TX_PROXY ERROR: Actor# [1:7489508193130191734:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:54.715474Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508175950320190:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:54.715556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"E-Size":"0","PlanNodeId":5,"LookupKeyColumns":["Key"],"Node Type":"TableLookup","Path":"\/Root\/TwoShard","Columns":["Key","Value1","Value2"],"E-Rows":"0","Table":"TwoShard","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Stats":{"UseLlvm":"undefined","DurationUs":{"Count":1,"Sum":6000,"Max":6000,"Min":6000},"Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5,"History":[9,5]}},"Name":"4","Push":{"WaitTimeUs":{"Count":1,"Sum":2050,"Max":2050,"Min":2050,"History":[9,2050]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[9,1048576]},"Tasks":1,"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"IngressRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":6000,"Table":[{"Path":"\/Root\/KeyValue","ReadRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"ReadBytes":{"Count":1,"Sum":16,"Max":16,"Min":16}}],"BaseTimeMs":1743787015253,"OutputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"CpuTimeUs":{"Count":1,"Sum":868,"Max":868,"Min":868,"History":[9,868]},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"FirstMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":32,"Max":32,"Min":32,"History":[9,32]}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"FirstMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"Bytes":{"Count":1,"Sum":32,"Max":32,"Min":32,"History":[9,32]},"WaitTimeUs":{"Count":1,"Sum":2057,"Max":2057,"Min":2057,"History":[9,2057]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"Node Type":"HashShuffle","KeyColumns":["Key"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"PartitionByKey","Input":"NarrowMap"}],"Node Type":"Aggregate","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"FirstMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5,"History":[11,5]}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"FirstMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"PauseMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"WaitTimeUs":{"Count":1,"Sum":2487,"Max":2487,"Min":2487,"History":[11,2487]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":11,"Min":8}}}],"DurationUs":{"Count":1,"Sum":3000,"Max":3000,"Min":3000},"M ... ws":"0","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Filter"}],"Operators":[{"A-Rows":2,"A-SelfCpu":0.629,"A-Cpu":0.629,"A-Size":18,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"A-Rows":2,"A-SelfCpu":0.426,"A-Cpu":1.055,"A-Size":18,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} Trying to start YDB, gRPC: 13395, MsgBus: 4054 2025-04-04T17:17:01.770896Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508228483964719:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:01.770947Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001628/r3tmp/tmpKet6gU/pdisk_1.dat 2025-04-04T17:17:01.919707Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:01.937601Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:01.937687Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:01.939544Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13395, node 3 2025-04-04T17:17:02.014705Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:02.014730Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:02.014737Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:02.014846Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4054 TClient is connected to server localhost:4054 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:17:02.494349Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:02.500339Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:02.511055Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.593424Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.820585Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:02.909275Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:05.303304Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508245663835655:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.303396Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.340841Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.378791Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.459227Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.487248Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.514778Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.549696Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:05.642151Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508245663836170:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.642260Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.642439Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508245663836175:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:05.645666Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:05.658417Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508245663836177:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:05.754234Z node 3 :TX_PROXY ERROR: Actor# [3:7489508245663836233:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:06.662508Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:06.774362Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508228483964719:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:06.774439Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:17:11.078297Z node 3 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037919, for tableId 16 don't have enough keys: RowCount: 70000 DataSize: 11643080 IndexSize: 56302 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 1 2025-04-04T17:17:16.904471Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:17:16.904504Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:21.767622Z node 3 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037919, for tableId 16 don't have enough keys: RowCount: 210000 DataSize: 34929240 IndexSize: 168906 ByKeyFilterSize: 0 RowCountHistogram: 2 DataSizeHistogram: 3 2025-04-04T17:17:31.780081Z node 3 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037919, for tableId 16 don't have enough keys: RowCount: 415000 DataSize: 69342120 IndexSize: 335236 ByKeyFilterSize: 0 RowCountHistogram: 4 DataSizeHistogram: 6 2025-04-04T17:17:37.258551Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787057241, txId: 281474976715672] shutting down 2025-04-04T17:17:37.891126Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7489508383102791459:2744] TxId: 281474976715674. Ctx: { TraceId: 01jr0wxrky311hm47cwfcp7y3f, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGEzMDk1YmYtNGY5YjExYzUtNTFlNzc1MGItMTYzOGJjYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 100ms } {
: Error: Cancelling after 100ms during execution } ] 2025-04-04T17:17:37.891923Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7489508383102791487:2770], TxId: 281474976715674, task: 7. Ctx: { SessionId : ydb://session/3?node_id=3&id=OGEzMDk1YmYtNGY5YjExYzUtNTFlNzc1MGItMTYzOGJjYjI=. TraceId : 01jr0wxrky311hm47cwfcp7y3f. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7489508383102791459:2744], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-04T17:17:37.892357Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7489508383102791489:2772], TxId: 281474976715674, task: 9. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=OGEzMDk1YmYtNGY5YjExYzUtNTFlNzc1MGItMTYzOGJjYjI=. TraceId : 01jr0wxrky311hm47cwfcp7y3f. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7489508383102791459:2744], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-04T17:17:37.892897Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OGEzMDk1YmYtNGY5YjExYzUtNTFlNzc1MGItMTYzOGJjYjI=, ActorId: [3:7489508383102791408:2744], ActorState: ExecuteState, TraceId: 01jr0wxrky311hm47cwfcp7y3f, Create QueryResponse for error on request, msg:
: Error: Request canceled after 100ms
: Error: Cancelling after 100ms during execution 2025-04-04T17:17:38.093166Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787058085, txId: 281474976715676] shutting down >> DataShardTxOrder::ImmediateBetweenOnline ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardScan::ScanFollowedByUpdate [GOOD] Test command err: 2025-04-04T17:17:38.004002Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:38.067690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:38.067730Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:38.072968Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:38.073471Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:17:38.073684Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:38.081063Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:38.110691Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:38.110758Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:38.112029Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:17:38.112097Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:17:38.112140Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:17:38.112490Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:38.112764Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:38.112834Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:17:38.173923Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:38.198899Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:17:38.199027Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:38.199097Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:17:38.199135Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:17:38.199165Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:17:38.199193Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:38.199312Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:38.199350Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:38.199582Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:17:38.199663Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:17:38.199741Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:38.199783Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:38.199819Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:17:38.199843Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:38.199864Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:38.199895Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:17:38.199922Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:38.199981Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:38.200004Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:38.200039Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:17:38.202083Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:17:38.202140Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:38.202212Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:17:38.202366Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:17:38.202411Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:17:38.202455Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:17:38.202496Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:38.202529Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:17:38.202559Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:17:38.202598Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:38.202891Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:38.202927Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:17:38.202961Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:17:38.202994Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:38.203056Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:17:38.203093Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:17:38.203126Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:17:38.203153Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:38.203185Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:38.215276Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:17:38.215350Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:38.215385Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:38.215417Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:38.215475Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:38.215941Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:38.215989Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:38.216024Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:17:38.216160Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:38.216185Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:38.216280Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:38.216318Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:38.216352Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:38.216384Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:38.219754Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:38.219814Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:38.219989Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:38.220023Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:38.220063Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:38.220099Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:38.220129Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:38.220169Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:17:38.220203Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:17:38.220235Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:38.220266Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:38.220310Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:38.220355Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:38.220543Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:17:38.220578Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:38.220613Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:38.220639Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:17:38.220660Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:17:38.220730Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:38.220756Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:17:38.220789Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:38.220824Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:38.220882Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:17:38.220915Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:17:38.220945Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:17:38.220984Z node 1 :TX_D ... essageQuota: 9 2025-04-04T17:17:39.955833Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 9437186, TxId: 36, MessageQuota: 10 2025-04-04T17:17:39.956082Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 9437186, TxId: 36, Size: 22, Rows: 0, PendingAcks: 1, MessageQuota: 9 2025-04-04T17:17:39.956223Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 9437184, TxId: 36, MessageQuota: 10 2025-04-04T17:17:39.956598Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 9437184, TxId: 36, Size: 22, Rows: 0, PendingAcks: 1, MessageQuota: 9 2025-04-04T17:17:39.956717Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 9437185, TxId: 36, PendingAcks: 0 2025-04-04T17:17:39.956764Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 9437185, TxId: 36, MessageQuota: 9 2025-04-04T17:17:39.957095Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 9437186, TxId: 36, PendingAcks: 0 2025-04-04T17:17:39.957145Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 9437186, TxId: 36, MessageQuota: 9 2025-04-04T17:17:39.957438Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 9437184, TxId: 36, PendingAcks: 0 2025-04-04T17:17:39.957480Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 9437184, TxId: 36, MessageQuota: 9 2025-04-04T17:17:39.957816Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 9437186 2025-04-04T17:17:39.957848Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 36, at: 9437186 2025-04-04T17:17:39.957906Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 9437184 2025-04-04T17:17:39.957926Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 36, at: 9437184 2025-04-04T17:17:39.958013Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 9437185 2025-04-04T17:17:39.958034Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 36, at: 9437185 2025-04-04T17:17:39.958142Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:454:2396], Recipient [1:454:2396]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:39.958175Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:39.958216Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2025-04-04T17:17:39.958270Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:17:39.958304Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000006:36] at 9437186 for ReadTableScan 2025-04-04T17:17:39.958335Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437186 on unit ReadTableScan 2025-04-04T17:17:39.958368Z node 1 :TX_DATASHARD TRACE: ReadTable scan complete for [1000006:36] at 9437186 error: , IsFatalError: 0 2025-04-04T17:17:39.958410Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437186 is Executed 2025-04-04T17:17:39.958439Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437186 executing on unit ReadTableScan 2025-04-04T17:17:39.958499Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437186 to execution unit CompleteOperation 2025-04-04T17:17:39.958535Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437186 on unit CompleteOperation 2025-04-04T17:17:39.958751Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437186 is DelayComplete 2025-04-04T17:17:39.958783Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437186 executing on unit CompleteOperation 2025-04-04T17:17:39.958817Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437186 to execution unit CompletedOperations 2025-04-04T17:17:39.958848Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437186 on unit CompletedOperations 2025-04-04T17:17:39.958877Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437186 is Executed 2025-04-04T17:17:39.958901Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437186 executing on unit CompletedOperations 2025-04-04T17:17:39.958935Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000006:36] at 9437186 has finished 2025-04-04T17:17:39.958971Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:39.958997Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-04-04T17:17:39.959064Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-04-04T17:17:39.959103Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-04-04T17:17:39.959262Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:232:2225], Recipient [1:232:2225]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:39.959293Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:39.959352Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:39.959385Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:17:39.959414Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000006:36] at 9437184 for ReadTableScan 2025-04-04T17:17:39.959437Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437184 on unit ReadTableScan 2025-04-04T17:17:39.959463Z node 1 :TX_DATASHARD TRACE: ReadTable scan complete for [1000006:36] at 9437184 error: , IsFatalError: 0 2025-04-04T17:17:39.959505Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437184 is Executed 2025-04-04T17:17:39.959530Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437184 executing on unit ReadTableScan 2025-04-04T17:17:39.959552Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437184 to execution unit CompleteOperation 2025-04-04T17:17:39.959575Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437184 on unit CompleteOperation 2025-04-04T17:17:39.959712Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437184 is DelayComplete 2025-04-04T17:17:39.959735Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437184 executing on unit CompleteOperation 2025-04-04T17:17:39.959775Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437184 to execution unit CompletedOperations 2025-04-04T17:17:39.959803Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437184 on unit CompletedOperations 2025-04-04T17:17:39.959831Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437184 is Executed 2025-04-04T17:17:39.959851Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437184 executing on unit CompletedOperations 2025-04-04T17:17:39.959873Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000006:36] at 9437184 has finished 2025-04-04T17:17:39.959896Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:39.959919Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:39.959941Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:39.959963Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:39.960119Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:343:2311], Recipient [1:343:2311]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:39.960149Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:39.960186Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-04-04T17:17:39.960227Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:17:39.960255Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000006:36] at 9437185 for ReadTableScan 2025-04-04T17:17:39.960290Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437185 on unit ReadTableScan 2025-04-04T17:17:39.960320Z node 1 :TX_DATASHARD TRACE: ReadTable scan complete for [1000006:36] at 9437185 error: , IsFatalError: 0 2025-04-04T17:17:39.960407Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437185 is Executed 2025-04-04T17:17:39.960438Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437185 executing on unit ReadTableScan 2025-04-04T17:17:39.960460Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437185 to execution unit CompleteOperation 2025-04-04T17:17:39.960481Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437185 on unit CompleteOperation 2025-04-04T17:17:39.960604Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437185 is DelayComplete 2025-04-04T17:17:39.960626Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437185 executing on unit CompleteOperation 2025-04-04T17:17:39.960653Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437185 to execution unit CompletedOperations 2025-04-04T17:17:39.960675Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437185 on unit CompletedOperations 2025-04-04T17:17:39.960700Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437185 is Executed 2025-04-04T17:17:39.960720Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437185 executing on unit CompletedOperations 2025-04-04T17:17:39.960742Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000006:36] at 9437185 has finished 2025-04-04T17:17:39.960785Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:39.960824Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-04T17:17:39.960851Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-04T17:17:39.960871Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-04T17:17:39.986596Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-04T17:17:39.986663Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-04T17:17:39.986717Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000006:36] at 9437185 on unit CompleteOperation 2025-04-04T17:17:39.986787Z node 1 :TX_DATASHARD DEBUG: Complete [1000006 : 36] from 9437185 at tablet 9437185 send result to client [1:98:2133], exec latency: 3 ms, propose latency: 5 ms 2025-04-04T17:17:39.986841Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-04T17:17:39.987625Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:39.987667Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:39.987696Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000006:36] at 9437186 on unit CompleteOperation 2025-04-04T17:17:39.987736Z node 1 :TX_DATASHARD DEBUG: Complete [1000006 : 36] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 3 ms, propose latency: 5 ms 2025-04-04T17:17:39.987770Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:39.987882Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:39.987914Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:39.987959Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000006:36] at 9437184 on unit CompleteOperation 2025-04-04T17:17:39.988008Z node 1 :TX_DATASHARD DEBUG: Complete [1000006 : 36] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 3 ms, propose latency: 5 ms 2025-04-04T17:17:39.988035Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict-UseSink [GOOD] Test command err: 2025-04-04T17:16:06.231628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:16:06.232057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:16:06.232103Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a75/r3tmp/tmptctFu8/pdisk_1.dat 2025-04-04T17:16:06.635225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:16:06.684950Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:06.728435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:06.728590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:06.739873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:06.820925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:16:06.865560Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:16:06.866719Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:16:06.867270Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:16:06.867516Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:06.880190Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:16:06.918316Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:06.918439Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:16:06.920079Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:16:06.920166Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:16:06.920223Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:16:06.920613Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:16:06.920743Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:16:06.920832Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:16:06.931627Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:16:06.966937Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:16:06.967177Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:16:06.967379Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:16:06.967426Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:16:06.967463Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:16:06.967516Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:16:06.967788Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:06.967847Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:06.968203Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:16:06.968307Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:16:06.968519Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:16:06.968583Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:16:06.968649Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:16:06.968689Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:16:06.968727Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:16:06.968765Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:16:06.968819Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:16:06.968962Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:06.969024Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:06.969086Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:16:06.969531Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:16:06.969587Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:16:06.969718Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:16:06.969973Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:16:06.970037Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:16:06.970145Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:16:06.970213Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:16:06.970281Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:16:06.970387Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:16:06.970427Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:16:06.970724Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:16:06.970770Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:16:06.970811Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:16:06.970848Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:16:06.970895Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:16:06.970947Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:16:06.970994Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:16:06.971026Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:16:06.971056Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:16:06.972700Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:16:06.973018Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:16:06.985747Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:16:06.985843Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:16:06.985886Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:16:06.985969Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:16:06.986042Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:16:07.125335Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.125391Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.125540Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:16:07.125914Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:16:07.126004Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:16:07.126130Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:16:07.126182Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:16:07.126256Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:16:07.126296Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:16:07.141094Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:16:07.141172Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:16:07.142006Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.142057Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.142116Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:16:0 ... lMDUtNWJlNmU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, datashard 72075186224037888 not finished yet: Executing 2025-04-04T17:17:38.899851Z node 15 :KQP_EXECUTER DEBUG: ActorId: [15:1154:2908] TxId: 281474976715667. Ctx: { TraceId: 01jr0wxsh2fyeegd83crfg8tn9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=MzMyNjlhZGEtZDIwMTkxYjktM2ZkNWFlMDUtNWJlNmU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2025-04-04T17:17:38.900315Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [15:1165:2933], Recipient [15:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:38.900346Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:38.900368Z node 15 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [15:1164:2932], serverId# [15:1165:2933], sessionId# [0:0:0] 2025-04-04T17:17:38.900906Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:592:2517], Recipient [15:664:2569]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 KeysSize: 6 2025-04-04T17:17:38.901014Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T17:17:38.901077Z node 15 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3001/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-04T17:17:38.901128Z node 15 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v3001/18446744073709551615 2025-04-04T17:17:38.901203Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-04-04T17:17:38.901350Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-04T17:17:38.901398Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-04-04T17:17:38.901442Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:38.901483Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:17:38.901540Z node 15 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-04-04T17:17:38.901578Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-04T17:17:38.901599Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:38.901615Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T17:17:38.901634Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:17:38.901730Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 } 2025-04-04T17:17:38.902045Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Continue 2025-04-04T17:17:38.902077Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Continue at tablet# 72075186224037888 2025-04-04T17:17:38.902136Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-04T17:17:38.925048Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [15:1061:2854], Recipient [15:664:2569]: {TEvReadSet step# 3001 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037891 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-04T17:17:38.925148Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-04T17:17:38.925203Z node 15 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037891 dest 72075186224037888 producer 72075186224037891 txId 281474976715667 2025-04-04T17:17:38.925326Z node 15 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3001 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037891 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-04T17:17:38.925532Z node 15 :TX_DATASHARD DEBUG: Complete [3001 : 281474976715667] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1154:2908], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:17:38.925639Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:17:38.925703Z node 15 :TX_DATASHARD DEBUG: Found ready candidate operation [0:4] at 72075186224037888 for ExecuteRead 2025-04-04T17:17:38.926039Z node 15 :KQP_EXECUTER DEBUG: ActorId: [15:1154:2908] TxId: 281474976715667. Ctx: { TraceId: 01jr0wxsh2fyeegd83crfg8tn9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=MzMyNjlhZGEtZDIwMTkxYjktM2ZkNWFlMDUtNWJlNmU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2025-04-04T17:17:38.926279Z node 15 :KQP_EXECUTER DEBUG: ActorId: [15:1154:2908] TxId: 281474976715667. Ctx: { TraceId: 01jr0wxsh2fyeegd83crfg8tn9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=MzMyNjlhZGEtZDIwMTkxYjktM2ZkNWFlMDUtNWJlNmU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T17:17:38.926366Z node 15 :KQP_EXECUTER DEBUG: ActorId: [15:1154:2908] TxId: 281474976715667. Ctx: { TraceId: 01jr0wxsh2fyeegd83crfg8tn9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=MzMyNjlhZGEtZDIwMTkxYjktM2ZkNWFlMDUtNWJlNmU5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-04T17:17:38.926521Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:664:2569], Recipient [15:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:38.926605Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:38.926825Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:38.927882Z node 15 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [15:592:2517], selfId: [15:57:2104], source: [15:1132:2908] 2025-04-04T17:17:38.928192Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:38.928259Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:17:38.928324Z node 15 :TX_DATASHARD DEBUG: Return cached ready operation [0:4] at 72075186224037888 2025-04-04T17:17:38.928383Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:17:38.928522Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 2, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 } 2025-04-04T17:17:38.928965Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-04-04T17:17:38.929033Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:592:2517], 1} after executionsCount# 2 2025-04-04T17:17:38.929097Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:592:2517], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551583, hasUnreadQueries# 1, total queries# 6, firstUnprocessed# 0 2025-04-04T17:17:38.929301Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-04T17:17:38.929350Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:17:38.929390Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:17:38.929428Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:17:38.929468Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-04T17:17:38.929490Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:38.929524Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-04-04T17:17:38.929582Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:38.929640Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-04T17:17:38.929693Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:17:38.929743Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:17:38.930038Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:664:2569], Recipient [15:664:2569]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-04T17:17:38.930132Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue for iterator# {[15:592:2517], 1}, firstUnprocessedQuery# 2 2025-04-04T17:17:38.930378Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue: iterator# {[15:592:2517], 1}, FirstUnprocessedQuery# 2 2025-04-04T17:17:38.930534Z node 15 :TX_DATASHARD TRACE: 72075186224037888 readContinue iterator# {[15:592:2517], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551611, quota bytes left# 18446744073709551551, hasUnreadQueries# 1, total queries# 6, firstUnprocessed# 2 2025-04-04T17:17:38.931317Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:664:2569], Recipient [15:664:2569]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-04T17:17:38.931372Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue for iterator# {[15:592:2517], 1}, firstUnprocessedQuery# 4 2025-04-04T17:17:38.931557Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue: iterator# {[15:592:2517], 1}, FirstUnprocessedQuery# 4 2025-04-04T17:17:38.931662Z node 15 :TX_DATASHARD TRACE: 72075186224037888 readContinue iterator# {[15:592:2517], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551609, quota bytes left# 18446744073709551519, hasUnreadQueries# 0, total queries# 6, firstUnprocessed# 4 2025-04-04T17:17:38.931749Z node 15 :TX_DATASHARD DEBUG: 72075186224037888 read iterator# {[15:592:2517], 1} finished in ReadContinue 2025-04-04T17:17:38.931935Z node 15 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=15&id=MzMyNjlhZGEtZDIwMTkxYjktM2ZkNWFlMDUtNWJlNmU5Yw==, workerId: [15:1132:2908], local sessions count: 0 2025-04-04T17:17:38.932095Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [15:61:2108], Recipient [15:1061:2854]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 15 Status: STATUS_NOT_FOUND >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite >> DataShardOutOfOrder::TestReadTableWriteConflict [GOOD] >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace >> KqpParams::BadParameterType [GOOD] >> KqpParams::CheckCacheByAst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::OneShardNonLocalExec-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 31104, MsgBus: 20009 2025-04-04T17:17:17.051737Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508295862321238:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:17.052190Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015eb/r3tmp/tmp2hYW7B/pdisk_1.dat 2025-04-04T17:17:17.480709Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:17.483223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:17.483315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:17.487133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31104, node 1 2025-04-04T17:17:17.564725Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:17.564746Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:17.564752Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:17.564863Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20009 TClient is connected to server localhost:20009 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:18.038751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:18.070790Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:18.081807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:18.226054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:18.393105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:18.472333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:19.992218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508304452257456:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:19.992350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:20.234820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.316172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.348620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.379391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.458973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.496081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:20.575089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508308747225275:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:20.575155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:20.575322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508308747225280:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:20.578338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:20.588201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508308747225282:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:20.643427Z node 1 :TX_PROXY ERROR: Actor# [1:7489508308747225334:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:22.051146Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508295862321238:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:22.051227Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21491, MsgBus: 2371 2025-04-04T17:17:22.984865Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508319497340817:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:22.985015Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015eb/r3tmp/tmpCFjo7N/pdisk_1.dat 2025-04-04T17:17:23.184766Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:23.272129Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:23.309221Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:23.309295Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:23.310229Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:23.310294Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:23.314127Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T17:17:23.314220Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:23.316790Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21491, node 2 2025-04-04T17:17:23.413665Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:23.413691Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:23.413698Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:23.413821Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2371 TClient is connected to server localhost:2371 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:23.867426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:23.926388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, subop ... , opId: 281474976720665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:26.621153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:26.708299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:26.827764Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508336677212987:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:26.827844Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:26.828330Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508336677212992:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:26.831908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:26.888603Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508336677212994:2417], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720668 completed, doublechecking } 2025-04-04T17:17:26.956110Z node 2 :TX_PROXY ERROR: Actor# [2:7489508336677213065:4299] txid# 281474976720669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:27.986530Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508319497340817:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:27.986606Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16670, MsgBus: 24191 2025-04-04T17:17:31.426542Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489508359357446419:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:31.426674Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:17:31.449839Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489508358489325259:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:31.453582Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015eb/r3tmp/tmpR4hqlR/pdisk_1.dat 2025-04-04T17:17:31.659105Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:31.714100Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:31.714215Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:31.716545Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:31.716629Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:31.719632Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-04T17:17:31.719764Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:31.720864Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16670, node 4 2025-04-04T17:17:31.802813Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:31.802837Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:31.802845Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:31.802994Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24191 TClient is connected to server localhost:24191 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:32.296237Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:32.332102Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:32.462184Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:32.644028Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:32.738192Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:34.966895Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508372242350531:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:34.966986Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:34.990320Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:35.040532Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:35.087197Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:35.132220Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:35.182868Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:35.251906Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:35.317088Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508376537318552:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:35.317201Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:35.317435Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508376537318557:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:35.321520Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:35.339742Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489508376537318559:2417], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:35.395635Z node 4 :TX_PROXY ERROR: Actor# [4:7489508376537318631:4306] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:36.430872Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489508359357446419:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:36.431025Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:17:36.443986Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489508358489325259:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:36.444065Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_Init >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] >> KqpExplain::ExplainDataQueryWithParams [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder >> DataShardTxOrder::ForceOnlineBetweenOnline >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+EvWrite >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] Test command err: 2025-04-04T17:17:36.248816Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:36.328228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:36.328301Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:36.335411Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:36.335885Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:17:36.336179Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:36.346069Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:36.388592Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:36.388692Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:36.390282Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:17:36.390414Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:17:36.390469Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:17:36.390837Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:36.391152Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:36.391227Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:17:36.452720Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:36.489093Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:17:36.489254Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:36.489356Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:17:36.489395Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:17:36.489427Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:17:36.489478Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:36.489621Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:36.489665Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:36.489965Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:17:36.490069Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:17:36.490206Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:36.490270Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:36.490309Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:17:36.490341Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:36.490368Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:36.490411Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:17:36.490456Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:36.490639Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:36.490677Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:36.490725Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:17:36.493245Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:17:36.493297Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:36.493365Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:17:36.493515Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:17:36.493556Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:17:36.493616Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:17:36.493652Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:36.493686Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:17:36.493716Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:17:36.493746Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:36.494030Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:36.494072Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:17:36.494102Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:17:36.494136Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:36.494181Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:17:36.494213Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:17:36.494271Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:17:36.494319Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:36.494343Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:36.511232Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:17:36.511312Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:36.511364Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:36.511403Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:36.511457Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:36.511947Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:36.511984Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:36.512014Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:17:36.512136Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:36.512178Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:36.512308Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:36.512350Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-04T17:17:36.512380Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:36.512423Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:36.515946Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:36.515994Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:36.516141Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:36.516175Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:36.516209Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:36.516237Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:36.516261Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:36.516285Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-04-04T17:17:36.516309Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-04-04T17:17:36.516335Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-04T17:17:36.516361Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:36.516386Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:36.516412Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:36.516556Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-04-04T17:17:36.516584Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-04T17:17:36.516599Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:36.516612Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:17:36.516628Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:17:36.516667Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:36.516688Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:17:36.516714Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:36.516742Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:36.516790Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2025-04-04T17:17:36.516835Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically incomplete end at 9437184 2025-04-04T17:17:36.516862Z node 1 :TX_DATASHARD TRACE: Activated operation [2:1] at 9437184 2025-04-04T17:17:36.516895Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-04T17:17:36.516914Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1 ... 7186 2025-04-04T17:17:40.907249Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:152] at 9437186 on unit CompleteOperation 2025-04-04T17:17:40.907279Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 152] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:17:40.907375Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:40.907688Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 6 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-04T17:17:40.907739Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:40.907771Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-04T17:17:40.907874Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 6 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-04T17:17:40.907895Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:40.907914Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-04T17:17:40.907984Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 6 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-04T17:17:40.908005Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:40.908023Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-04T17:17:40.908076Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 6 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-04T17:17:40.908095Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:40.908119Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-04T17:17:40.908222Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 6 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-04T17:17:40.908249Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:40.908269Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-04T17:17:40.908327Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-04T17:17:40.908367Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:40.908392Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-04T17:17:40.908474Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-04T17:17:40.908496Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:40.908528Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-04-04T17:17:40.908604Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 6 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-04T17:17:40.908627Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:40.908651Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-04T17:17:40.908739Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 6 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-04T17:17:40.908762Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:40.908782Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-04T17:17:40.908832Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 6 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-04T17:17:40.908853Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:40.908871Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-04T17:17:40.908925Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 6 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-04T17:17:40.908946Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:40.908963Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-04T17:17:40.909012Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 6 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-04T17:17:40.909032Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:40.909050Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-04T17:17:40.909144Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 6 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-04T17:17:40.909168Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:40.909187Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-04T17:17:40.909250Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:40.909296Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:149] at 9437184 on unit CompleteOperation 2025-04-04T17:17:40.909333Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 149] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 3 ms 2025-04-04T17:17:40.909371Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-04T17:17:40.909400Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:40.909515Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:40.909542Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:151] at 9437184 on unit CompleteOperation 2025-04-04T17:17:40.909578Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 151] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 3 ms 2025-04-04T17:17:40.909614Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-04T17:17:40.909646Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:40.909772Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:40.909797Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:152] at 9437184 on unit CompleteOperation 2025-04-04T17:17:40.909827Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 152] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 3 ms 2025-04-04T17:17:40.909863Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-04T17:17:40.909886Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:40.909966Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:40.909996Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:154] at 9437184 on unit CompleteOperation 2025-04-04T17:17:40.910030Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 154] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 3 ms 2025-04-04T17:17:40.910066Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-04T17:17:40.910091Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:40.910265Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-04T17:17:40.910294Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:40.910317Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-04-04T17:17:40.910436Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 6 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-04T17:17:40.910472Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:40.910496Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-04-04T17:17:40.910552Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-04T17:17:40.910587Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:40.910606Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-04T17:17:40.910673Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 6 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-04T17:17:40.910695Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:40.910723Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] >> DataShardTxOrder::ZigZag_oo8_dirty >> DataShardTxOrder::ImmediateBetweenOnline_oo8 >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate [GOOD] >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead >> KqpExplain::ReadTableRanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] Test command err: 2025-04-04T17:17:36.768259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:36.768743Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:36.768978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:17:36.770601Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:36.770805Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:36.771053Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f94/r3tmp/tmpnkIHNI/pdisk_1.dat 2025-04-04T17:17:37.092511Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:37.257627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:17:37.360281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:37.360472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:37.369676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:37.369759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:37.383719Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:17:37.384192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:37.384522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:37.652247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:37.724880Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [2:1257:2377], Recipient [2:1282:2389]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:37.728756Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [2:1257:2377], Recipient [2:1282:2389]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:37.729308Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1282:2389] 2025-04-04T17:17:37.729632Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:37.771416Z node 2 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [2:1257:2377], Recipient [2:1282:2389]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:37.775751Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:37.776056Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:37.777390Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:17:37.777450Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:17:37.777490Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:17:37.777763Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:37.777940Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:37.778016Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:1306:2389] in generation 1 2025-04-04T17:17:37.780809Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:37.803699Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:17:37.803876Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:37.804024Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1310:2406] 2025-04-04T17:17:37.804086Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:17:37.804124Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:17:37.804164Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:37.804426Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1282:2389], Recipient [2:1282:2389]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:37.804484Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:37.804761Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:17:37.804849Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:17:37.804948Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:37.805002Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:37.805053Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:17:37.805088Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:17:37.805141Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:17:37.805178Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:17:37.805239Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:17:37.872977Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1314:2407], Recipient [2:1282:2389]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:37.873036Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:37.873084Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1265:2775], serverId# [2:1314:2407], sessionId# [0:0:0] 2025-04-04T17:17:37.873488Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:836:2464], Recipient [2:1314:2407] 2025-04-04T17:17:37.873539Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:37.873658Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:17:37.873863Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:17:37.874003Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:17:37.874111Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:17:37.874159Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:17:37.874209Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:17:37.874265Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:17:37.874304Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:17:37.874588Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:37.874628Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:17:37.874671Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:17:37.874704Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:37.874753Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:17:37.874786Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:17:37.874826Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:17:37.874868Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:17:37.874892Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:37.879259Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:17:37.879313Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:17:37.879346Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:37.879467Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:37.879535Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:37.880088Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [2:1315:2408], Recipient [2:1282:2389]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:17:37.880140Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:17:38.229842Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1347:2417], Recipient [2:1282:2389]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:38.229892Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:38.229925Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1345:2798], serverId# [2:1347:2417], sessionId# [0:0:0] 2025-04-04T17:17:38.232776Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:1061:2623], Recipient [2:1347:2417] 2025-04-04T17:17:38.232831Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:38.233008Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:17:38.233050Z node 2 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] ... :2453], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:40.448444Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1709:2453], 0} finished in read 2025-04-04T17:17:40.448495Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-04T17:17:40.448519Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:17:40.448541Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:17:40.448562Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:17:40.448594Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-04-04T17:17:40.448613Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:40.448637Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2025-04-04T17:17:40.448669Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T17:17:40.448734Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-04T17:17:40.449480Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1709:2453], Recipient [2:1282:2389]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-04T17:17:40.449531Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-04-04T17:17:40.533781Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0wxv758g08nfyfv5bzth1s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE2YmMyNzUtY2VmMTU4Y2YtMjY0MTJiNjctMmExZDE1ZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:40.536027Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1727:2454], Recipient [2:1282:2389]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } LockTxId: 281474976715662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-04-04T17:17:40.536130Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T17:17:40.536192Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-04T17:17:40.536263Z node 2 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-04T17:17:40.536300Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-04T17:17:40.536335Z node 2 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:40.536368Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:17:40.536412Z node 2 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-04T17:17:40.536448Z node 2 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-04T17:17:40.536470Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:40.536491Z node 2 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T17:17:40.536514Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:17:40.536609Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } LockTxId: 281474976715662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-04T17:17:40.536907Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715662, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T17:17:40.537004Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-04-04T17:17:40.537041Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1727:2454], 0} after executionsCount# 1 2025-04-04T17:17:40.537081Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1727:2454], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:40.537150Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1727:2454], 0} finished in read 2025-04-04T17:17:40.537207Z node 2 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-04T17:17:40.537231Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:17:40.537254Z node 2 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:17:40.537279Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:17:40.537318Z node 2 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-04T17:17:40.537338Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:40.537361Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-04T17:17:40.537393Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T17:17:40.537473Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-04T17:17:40.538204Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1727:2454], Recipient [2:1282:2389]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-04T17:17:40.538269Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } } 2025-04-04T17:17:40.662478Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr0wxva9fj6d04g28jcf57wt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2NhOTkxMzctNDQ1MjRjYmMtNzU1NzAwYjAtNjFmZGI2Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:40.664757Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1751:2455], Recipient [2:1282:2389]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-04-04T17:17:40.664852Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T17:17:40.664912Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2025-04-04T17:17:40.665007Z node 2 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-04T17:17:40.665043Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2025-04-04T17:17:40.665076Z node 2 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:40.665106Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:17:40.665152Z node 2 :TX_DATASHARD TRACE: Activated operation [0:8] at 72075186224037888 2025-04-04T17:17:40.665190Z node 2 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-04T17:17:40.665209Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:40.665224Z node 2 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T17:17:40.665239Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:17:40.665312Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-04T17:17:40.665534Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715666, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T17:17:40.665583Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-04-04T17:17:40.665625Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1751:2455], 0} after executionsCount# 1 2025-04-04T17:17:40.665659Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1751:2455], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:40.665733Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1751:2455], 0} finished in read 2025-04-04T17:17:40.665793Z node 2 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-04T17:17:40.665812Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:17:40.665829Z node 2 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:17:40.665855Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:17:40.665890Z node 2 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-04-04T17:17:40.665910Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:40.665935Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2025-04-04T17:17:40.665985Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T17:17:40.666058Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-04T17:17:40.666928Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1751:2455], Recipient [2:1282:2389]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-04T17:17:40.666981Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-04T17:17:40.667632Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:238:2130], Recipient [2:1282:2389]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 1 Status: STATUS_SUBSCRIBED { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::ExplainDataQueryWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 12662, MsgBus: 12759 2025-04-04T17:17:17.859804Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508298631491634:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:17.874492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015e6/r3tmp/tmp98BJiT/pdisk_1.dat 2025-04-04T17:17:18.307044Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12662, node 1 2025-04-04T17:17:18.350990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:18.366460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:18.392620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:18.433178Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:18.433221Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:18.433252Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:18.433365Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12759 TClient is connected to server localhost:12759 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:18.904356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:18.918818Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:18.928849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:19.067510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:19.221767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:19.281771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:21.029368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508315811362583:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:21.029501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:21.270978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:21.301859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:21.334293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:21.371234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:21.399350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:21.464888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:21.520068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508315811363096:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:21.520175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:21.521626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508315811363101:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:21.526037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:21.538479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508315811363103:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:21.601665Z node 1 :TX_PROXY ERROR: Actor# [1:7489508315811363156:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:22.862470Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508298631491634:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:22.862539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Intermediate"},{"Inputs":[{"InternalOperatorId":2},{"ExternalPlanNodeId":4}],"E-Rows":"0","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"0"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"0","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"0","E-Cost":"0"},{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"0","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"0"}],"Node Type":"Aggregate-InnerJoin (MapJoin)-Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Final"},{"Inputs":[{"InternalOperatorId":2}],"Name":"Limit","Limit":"1"},{"Inputs":[{"ExternalPlanNodeId":6}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate-Limit-Aggregate"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":11,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/EightShard","E-Rows":"0","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"0","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Filter"},{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"0","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"0"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 18392, MsgBus: 15565 2025-04-04T17:17:23.832010Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;a ... :"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1_0","PlanNodeType":"ResultSet"},{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Operators":[{"Scan":"Sequential","E-Size":"0","ReadRanges":"%kqp%tx_result_binding_0_2","ReadLimit":"1001","Name":"TableRangeScan","Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"1","E-Rows":"0","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"0","ReadRangesExpectedSize":"4"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1_1","PlanNodeType":"ResultSet"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":15,"Plans":[{"PlanNodeId":16,"Plans":[{"PlanNodeId":17,"Plans":[{"PlanNodeId":19,"Plans":[{"PlanNodeId":20,"Plans":[{"PlanNodeId":22,"Plans":[{"PlanNodeId":23,"Operators":[{"Scan":"Parallel","ReadRange":["Key (20, 120]"],"E-Size":"0","Name":"TableRangeScan","Path":"\/Root\/KeyValue","E-Rows":"1","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_2","PlanNodeType":"ResultSet"},{"PlanNodeId":24,"Plans":[{"PlanNodeId":28,"Plans":[{"PlanNodeId":29,"Plans":[{"PlanNodeId":30,"Plans":[{"PlanNodeId":32,"Plans":[{"PlanNodeId":33,"Plans":[{"PlanNodeId":35,"Plans":[{"PlanNodeId":36,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_3","PlanNodeType":"ResultSet"},{"PlanNodeId":37,"Plans":[{"PlanNodeId":41,"Plans":[{"PlanNodeId":42,"Plans":[{"PlanNodeId":43,"Plans":[{"PlanNodeId":45,"Plans":[{"PlanNodeId":46,"Plans":[{"PlanNodeId":48,"Plans":[{"PlanNodeId":49,"Operators":[{"Scan":"Parallel","ReadRange":["Key [10, +∞)"],"E-Size":"0","Name":"TableRangeScan","Path":"\/Root\/KeyValue","E-Rows":"1","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_4","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 24700, MsgBus: 18671 2025-04-04T17:17:35.978382Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489508372454470616:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:35.978482Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015e6/r3tmp/tmpvG0Kdd/pdisk_1.dat 2025-04-04T17:17:36.103558Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24700, node 4 2025-04-04T17:17:36.134772Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:36.134905Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:36.136440Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:36.164540Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:36.164564Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:36.164574Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:36.164698Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18671 TClient is connected to server localhost:18671 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:36.686703Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:36.692905Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:36.701500Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:36.760746Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:17:36.940568Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:17:37.014634Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:39.354148Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508389634341565:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:39.354212Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:39.377591Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:39.407342Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:39.439235Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:39.478105Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:39.508660Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:39.545032Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:39.587491Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508389634342072:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:39.587591Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:39.587819Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508389634342077:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:39.592207Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:39.601256Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489508389634342079:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:39.676390Z node 4 :TX_PROXY ERROR: Actor# [4:7489508389634342133:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:40.978933Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489508372454470616:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:40.979009Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] Test command err: 2025-04-04T17:17:36.154928Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:36.235226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:36.235285Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:36.243324Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:36.243793Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:17:36.244103Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:36.255107Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:36.300451Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:36.300585Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:36.302221Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:17:36.302323Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:17:36.302382Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:17:36.302809Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:36.303203Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:36.303298Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:17:36.380559Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:36.413948Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:17:36.414123Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:36.414220Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:17:36.414272Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:17:36.414302Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:17:36.414329Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:36.414455Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:36.414505Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:36.414770Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:17:36.414867Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:17:36.414963Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:36.415000Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:36.415033Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:17:36.415059Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:36.415084Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:36.415120Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:17:36.415160Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:36.415256Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:36.415284Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:36.415332Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:17:36.417323Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:17:36.417369Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:36.417468Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:17:36.417598Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:17:36.417629Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:17:36.417662Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:17:36.417694Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:36.417717Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:17:36.417739Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:17:36.417759Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:36.417988Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:36.418018Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:17:36.418044Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:17:36.418067Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:36.418101Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:17:36.418119Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:17:36.418147Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:17:36.418171Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:36.418192Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:36.432645Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:17:36.432716Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:36.432747Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:36.432774Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:36.432819Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:36.433210Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:36.433255Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:36.433286Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:17:36.433380Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:36.433400Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:36.433486Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:36.433513Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:36.433546Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:36.433570Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:36.442505Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:36.442586Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:36.442793Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:36.442822Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:36.442859Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:36.442891Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:36.442920Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:36.442952Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:17:36.442980Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:17:36.443008Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:36.443034Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:36.443065Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:36.443098Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:36.443279Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:17:36.443309Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:36.443327Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:36.443346Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:17:36.443361Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:17:36.443403Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:36.443419Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:17:36.443444Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:36.443470Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:36.443514Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:17:36.443536Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:17:36.443560Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:17:36.443589Z node 1 :TX_DATA ... 04T17:17:41.982849Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2025-04-04T17:17:41.982882Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 3 ms 2025-04-04T17:17:41.982945Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-04T17:17:41.982971Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:41.983093Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:41.983139Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:153] at 9437184 on unit CompleteOperation 2025-04-04T17:17:41.983188Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 153] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:17:41.983224Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:41.983309Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:41.983329Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2025-04-04T17:17:41.983355Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 3 ms 2025-04-04T17:17:41.983391Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-04T17:17:41.983418Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:41.983593Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:341:2309]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-04T17:17:41.983633Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:41.983664Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-04-04T17:17:41.983828Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:41.983857Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:137] at 9437186 on unit CompleteOperation 2025-04-04T17:17:41.983907Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 137] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 3 ms 2025-04-04T17:17:41.983947Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-04T17:17:41.983971Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:41.983998Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-04T17:17:41.984097Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:41.984120Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2025-04-04T17:17:41.984158Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 3 ms 2025-04-04T17:17:41.984198Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-04T17:17:41.984220Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:41.984241Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-04T17:17:41.984268Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-04T17:17:41.984358Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:41.984379Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2025-04-04T17:17:41.984416Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 3 ms 2025-04-04T17:17:41.984455Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-04T17:17:41.984477Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:41.984511Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-04T17:17:41.984597Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:41.984624Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-04-04T17:17:41.984651Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 3 ms 2025-04-04T17:17:41.984685Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-04T17:17:41.984706Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:41.984728Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-04T17:17:41.984811Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:41.984832Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-04-04T17:17:41.984876Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 3 ms 2025-04-04T17:17:41.984924Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-04T17:17:41.984949Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:41.985030Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:41.985049Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-04T17:17:41.985075Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 3 ms 2025-04-04T17:17:41.985104Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-04T17:17:41.985122Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:41.985423Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:453:2395], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-04T17:17:41.985456Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:41.985499Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-04T17:17:41.985563Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:341:2309]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-04T17:17:41.985590Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:41.985611Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-04-04T17:17:41.985689Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:453:2395], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-04T17:17:41.985710Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:41.985731Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-04T17:17:41.985770Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:341:2309]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-04T17:17:41.985790Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:41.985843Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-04T17:17:41.985929Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:453:2395], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-04T17:17:41.985952Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:41.985974Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-04T17:17:41.986017Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:341:2309]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-04T17:17:41.986037Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:41.986056Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-04-04T17:17:41.986144Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:453:2395], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-04T17:17:41.986189Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:41.986212Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-04T17:17:41.986300Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:453:2395], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-04T17:17:41.986322Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:41.986340Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-04T17:17:41.986391Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:453:2395], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-04T17:17:41.986412Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:41.986432Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] >> DataShardTxOrder::RandomPoints_DelayData >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::ReadTableRanges [GOOD] Test command err: Trying to start YDB, gRPC: 25121, MsgBus: 63948 2025-04-04T17:17:19.564884Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508304530749820:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:19.564920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015e1/r3tmp/tmpFXjWxP/pdisk_1.dat 2025-04-04T17:17:19.930519Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25121, node 1 2025-04-04T17:17:19.968629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:19.968983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:19.980706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:20.005845Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:20.005869Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:20.005878Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:20.006022Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63948 TClient is connected to server localhost:63948 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:20.511801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:20.532279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:20.668871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:20.826328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:20.902736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:22.520510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508317415653498:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:22.520630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:22.781600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:22.812216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:22.847632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:22.875910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:22.941457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:23.010706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:23.102610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508321710621316:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:23.102689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:23.102739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508321710621321:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:23.106261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:23.115567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508321710621323:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:23.172147Z node 1 :TX_PROXY ERROR: Actor# [1:7489508321710621376:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:24.570340Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508304530749820:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:24.570400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":21,"Plans":[{"PlanNodeId":20,"Plans":[{"PlanNodeId":19,"Plans":[{"PlanNodeId":18,"Plans":[{"PlanNodeId":17,"Plans":[{"PlanNodeId":16,"Plans":[{"PlanNodeId":15,"Plans":[{"PlanNodeId":14,"Plans":[{"PlanNodeId":13,"Plans":[{"PlanNodeId":12,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":11,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"},{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"GroupBy":"item.t1.Key","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"},{"Inputs":[{"InternalOperatorId":2},{"ExternalPlanNodeId":10}],"E-Rows":"0","Condition":"t1.Key = t2.Key","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"0"},{"Inputs":[{"ExternalPlanNodeId":13}],"E-Rows":"0","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Aggregate-InnerJoin (MapJoin)-Filter"}],"Node Type":"HashShuffle","KeyColumns":["t1.Key"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":15}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"Map","PlanNodeType":"Connection"},{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.Key","Name":"Sort"},{"Inputs":[{"InternalOperatorId":2},{"ExternalPlanNodeId":5}],"E-Rows":"0","Condition":"Foo.t1.Key = t1.Key","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"0"},{"Inputs":[{"ExternalPlanNodeId":17}],"E-Rows":"0","Predicate":"Exist(item.t1.Key)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Sort-InnerJoin (MapJoin)-Filter"}],"Node Type":"Merge","SortColumns":["Key (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"},{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"},{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"0","Predicate":"Exist(item.Key)","Name" ... 764123Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoKeys"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"0","Predicate":"item.Key2 \u003E 101","Name":"Filter","E-Size":"0","E-Cost":"0"},{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/TwoKeys","E-Rows":"0","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"0"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/TwoKeys","reads":[{"columns":["Key1","Key2","Value"],"scan_by":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/TwoKeys","E-Rows":"0","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"0","Predicate":"item.Key2 \u003E 101","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 22455, MsgBus: 19308 2025-04-04T17:17:36.705380Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489508379857245217:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:36.705440Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015e1/r3tmp/tmpTgzZBs/pdisk_1.dat 2025-04-04T17:17:36.833541Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:36.844301Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:36.844398Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:36.846050Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22455, node 4 2025-04-04T17:17:36.901185Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:36.901208Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:36.901220Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:36.901347Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19308 TClient is connected to server localhost:19308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:37.371584Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:37.379753Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:37.393015Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:37.470307Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:37.657934Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:17:37.724902Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:17:40.075738Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508397037116164:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:40.075846Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:40.095303Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:40.127034Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:40.156884Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:40.195411Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:40.227542Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:40.259640Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:40.341128Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508397037116678:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:40.341248Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:40.341520Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508397037116683:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:40.345769Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:40.354524Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489508397037116685:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:40.455862Z node 4 :TX_PROXY ERROR: Actor# [4:7489508397037116740:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:41.373028Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:41.680637Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:17:41.705816Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489508379857245217:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:41.705878Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:17:41.715419Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, 100)","Key [2000, +∞)"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/KeyValue","E-Rows":"0","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"0","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, 100)","Key [2000, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, 100)","Key [2000, +∞)"],"Name":"TableRangeScan","Path":"\/Root\/KeyValue","E-Rows":"0","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"0","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] Test command err: 2025-04-04T17:17:31.452408Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:31.555762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:31.555821Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:31.563368Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:31.563774Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:17:31.564064Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:31.576821Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:31.630402Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:31.630500Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:31.631886Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:17:31.631949Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:17:31.631997Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:17:31.632347Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:31.632669Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:31.632742Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:17:31.711013Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:31.747950Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:17:31.748131Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:31.748232Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:17:31.748275Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:17:31.748311Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:17:31.748341Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:31.748485Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:31.748562Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:31.748819Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:17:31.748892Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:17:31.749003Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:31.749052Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:31.749093Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:17:31.749142Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:31.749177Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:31.749218Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:17:31.749254Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:31.749339Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:31.749370Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:31.749422Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:17:31.751955Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:17:31.752003Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:31.752077Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:17:31.752256Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:17:31.752298Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:17:31.752339Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:17:31.752405Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:31.752435Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:17:31.752471Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:17:31.752501Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:31.752783Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:31.752814Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:17:31.752842Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:17:31.752868Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:31.752920Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:17:31.752959Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:17:31.752989Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:17:31.753038Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:31.753065Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:31.765086Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:17:31.765172Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:31.765211Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:31.765245Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:31.765295Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:31.766922Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:31.766981Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:31.767021Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:17:31.767175Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:31.767215Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:31.767347Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:31.767387Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:31.767423Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:31.767457Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:31.771150Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:31.771209Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:31.771437Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:31.771474Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:31.771520Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:31.771561Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:31.771589Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:31.771626Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:17:31.771656Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:17:31.771692Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:31.771728Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:31.771772Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:31.771818Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:31.772008Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:17:31.772047Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:31.772069Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:31.772092Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:17:31.772114Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:17:31.772175Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:31.772195Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:17:31.772220Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:31.772271Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:31.772340Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:17:31.772367Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:17:31.772394Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:17:31.772428Z node 1 :TX_D ... eady operations at 9437184 2025-04-04T17:17:42.844498Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:42.844556Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2025-04-04T17:17:42.844619Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 1 ms, propose latency: 3 ms 2025-04-04T17:17:42.844703Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-04T17:17:42.844750Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:42.844786Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-04T17:17:42.844968Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:42.844993Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2025-04-04T17:17:42.845078Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:17:42.845125Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-04T17:17:42.845149Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:42.845184Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-04T17:17:42.845208Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-04T17:17:42.845308Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:42.845330Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:508] at 9437184 on unit CompleteOperation 2025-04-04T17:17:42.845363Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 508] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:17:42.845399Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-04T17:17:42.845434Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:42.845456Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-04T17:17:42.845475Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-04T17:17:42.845499Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-04T17:17:42.845593Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:42.845614Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:509] at 9437184 on unit CompleteOperation 2025-04-04T17:17:42.845641Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 509] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:17:42.845692Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-04T17:17:42.845721Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:42.845746Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-04T17:17:42.845844Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:42.845894Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:510] at 9437184 on unit CompleteOperation 2025-04-04T17:17:42.845949Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 510] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:17:42.845999Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-04T17:17:42.846028Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:42.846126Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:42.846150Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:511] at 9437184 on unit CompleteOperation 2025-04-04T17:17:42.846180Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 511] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:17:42.846235Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-04T17:17:42.846278Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:42.846375Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:42.846398Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:512] at 9437184 on unit CompleteOperation 2025-04-04T17:17:42.846441Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 512] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:17:42.846489Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-04T17:17:42.846516Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:42.846542Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:42.846584Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:516] at 9437184 on unit FinishPropose 2025-04-04T17:17:42.846642Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 516 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-04-04T17:17:42.846731Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:42.846949Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:42.846974Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:514] at 9437184 on unit CompleteOperation 2025-04-04T17:17:42.847011Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 514] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 1 ms, propose latency: 3 ms 2025-04-04T17:17:42.847049Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-04T17:17:42.847070Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:42.847165Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:42.847199Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:515] at 9437184 on unit CompleteOperation 2025-04-04T17:17:42.847231Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 515] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:17:42.847269Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:42.847633Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-04T17:17:42.847681Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:42.847723Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 506 2025-04-04T17:17:42.847855Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-04T17:17:42.847882Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:42.847904Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 507 2025-04-04T17:17:42.848041Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-04T17:17:42.848069Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:42.848096Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 508 2025-04-04T17:17:42.848221Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-04T17:17:42.848259Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:42.848283Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 509 2025-04-04T17:17:42.848369Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-04T17:17:42.848395Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:42.848416Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 510 2025-04-04T17:17:42.848532Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-04T17:17:42.848561Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:42.848588Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2025-04-04T17:17:42.848685Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-04T17:17:42.848713Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:42.848735Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2025-04-04T17:17:42.848883Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-04T17:17:42.848913Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:42.848937Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 expect 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - interm 5 6 - 6 6 - - - - - - - - - - - - - - - - - - - - - - - - - - - >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier >> DataShardTxOrder::ZigZag_oo >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] Test command err: 2025-04-04T17:17:38.343228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:38.343654Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:38.343721Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f75/r3tmp/tmpwqVyzF/pdisk_1.dat 2025-04-04T17:17:38.647281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:17:38.654041Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-04-04T17:17:38.654102Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:408:2403] Proxy marker# C1 2025-04-04T17:17:38.681656Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:38.719343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:38.719493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:38.730880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:38.803859Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2025-04-04T17:17:38.803974Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-04-04T17:17:38.804192Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-04-04T17:17:38.804481Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-04-04T17:17:38.804537Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:408:2403] Proxy 2025-04-04T17:17:38.806531Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-04-04T17:17:38.806648Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-04-04T17:17:38.806687Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-04-04T17:17:38.806720Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2025-04-04T17:17:38.811006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:38.852413Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:655:2563], Recipient [1:663:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:38.853201Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:655:2563], Recipient [1:663:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:38.853554Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:663:2569] 2025-04-04T17:17:38.853747Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:38.863083Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:655:2563], Recipient [1:663:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:38.886085Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:38.886167Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:38.887461Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:17:38.887524Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:17:38.887568Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:17:38.887900Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:38.888023Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:38.888088Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:679:2569] in generation 1 2025-04-04T17:17:38.888518Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:38.903644Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:17:38.903788Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:38.903878Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:681:2579] 2025-04-04T17:17:38.903933Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:17:38.903966Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:17:38.904014Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:38.904182Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:663:2569], Recipient [1:663:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:38.904229Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:38.904461Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:17:38.904539Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:17:38.904606Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:38.904651Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:38.904700Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:17:38.904731Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:17:38.904770Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:17:38.904803Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:17:38.904837Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:17:38.904948Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:670:2573], Recipient [1:663:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:38.904976Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:38.905034Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2568], serverId# [1:670:2573], sessionId# [0:0:0] 2025-04-04T17:17:38.905097Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:670:2573] 2025-04-04T17:17:38.905131Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:38.905215Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:17:38.905383Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:17:38.905441Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:17:38.905517Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:17:38.905560Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:17:38.905595Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:17:38.905630Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:17:38.905663Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:17:38.905931Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:38.905983Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:17:38.906020Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:17:38.906055Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:38.906116Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:17:38.906148Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:17:38.906178Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:17:38.906207Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:17:38.906234Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:38.907267Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:17:38.907315Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:17:38.907343Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:38.907399Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:17:38.907455Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:38.909313Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715657 HANDLE EvProposeTransaction marker# C0 2025-04-04T17:17:38.909366Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715657 step# 1000 Status# 16 SEND to# [1:408:2403] Proxy marker# C1 2025-04-04T17:17:38.909658Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:682:2580], Recipient [1:663:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:17:38.909702Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:17:39.044627Z node 1 :TX_COORDINATOR DEBUG: Transaction 281474976715657 has been planned 2025-04-04T17:17:39.044703Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 281474976715657 for mediator 72057594046382081 tablet 72057594046644480 2025-04-04T17:17:39.044754Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 281474976715657 for ... 000 2025-04-04T17:17:42.962877Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 4000} 2025-04-04T17:17:42.962962Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 270270977, Sender [1:24:2071], Recipient [1:1353:3058]: {TEvNotifyPlanStep TabletId# 72075186224037888 PlanStep# 4000} 2025-04-04T17:17:42.962999Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvNotifyPlanStep 2025-04-04T17:17:42.963055Z node 1 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 4000 at tablet 72075186224037888 2025-04-04T17:17:42.963112Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:42.963800Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 1 SubscriptionId: 2 LatestStep: 4000 2025-04-04T17:17:42.964092Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 1 TimeBarrier# 4000} 2025-04-04T17:17:43.058713Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jr0wxxnmf6vafhbkcs0j6s56, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzM1NTg1NWUtNjZmMTMxZTMtNDZlOWE5ZDYtMTMzZDkyMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:43.060252Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1412:3098], Recipient [1:1353:3058]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } LockTxId: 281474976715683 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-04-04T17:17:43.060338Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T17:17:43.060394Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CheckRead 2025-04-04T17:17:43.060485Z node 1 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-04T17:17:43.060530Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckRead 2025-04-04T17:17:43.060564Z node 1 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:43.060588Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:17:43.060623Z node 1 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037888 2025-04-04T17:17:43.060650Z node 1 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-04T17:17:43.060670Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:43.060685Z node 1 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T17:17:43.060699Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:17:43.060803Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } LockTxId: 281474976715683 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-04T17:17:43.061021Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715683, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T17:17:43.061069Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/18446744073709551615 2025-04-04T17:17:43.061106Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[1:1412:3098], 0} after executionsCount# 1 2025-04-04T17:17:43.061143Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1412:3098], 0} sends rowCount# 5, bytes# 160, quota rows left# 996, quota bytes left# 5242720, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:43.061199Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1412:3098], 0} finished in read 2025-04-04T17:17:43.061247Z node 1 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-04T17:17:43.061266Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:17:43.061282Z node 1 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:17:43.061298Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:17:43.061328Z node 1 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-04-04T17:17:43.061342Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:43.061381Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037888 has finished 2025-04-04T17:17:43.061406Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T17:17:43.061482Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-04T17:17:43.062261Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1412:3098], Recipient [1:1353:3058]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-04T17:17:43.062319Z node 1 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } } 2025-04-04T17:17:43.173688Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE TEvAcquireReadStep 2025-04-04T17:17:43.173767Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 4500 in 0.499900s at 4.450000s 2025-04-04T17:17:43.174702Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715686. Ctx: { TraceId: 01jr0wxxrwd73js8k0h78xd667, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTQyODlhMzItZDViZGUzY2MtZjUyZmVjYTAtZGY0ZWEyOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:43.176123Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1436:3115], Recipient [1:1353:3058]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715686 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-04-04T17:17:43.176230Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T17:17:43.176293Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-04-04T17:17:43.176363Z node 1 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-04T17:17:43.176395Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-04-04T17:17:43.176427Z node 1 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:43.176454Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:17:43.176482Z node 1 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-04-04T17:17:43.176521Z node 1 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-04T17:17:43.176539Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:43.176553Z node 1 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T17:17:43.176570Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:17:43.176696Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715686 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-04T17:17:43.176913Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715686, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T17:17:43.176954Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-04-04T17:17:43.176985Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[1:1436:3115], 0} after executionsCount# 1 2025-04-04T17:17:43.177022Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1436:3115], 0} sends rowCount# 6, bytes# 192, quota rows left# 995, quota bytes left# 5242688, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:43.177084Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1436:3115], 0} finished in read 2025-04-04T17:17:43.177156Z node 1 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-04T17:17:43.177181Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:17:43.177211Z node 1 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:17:43.177230Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:17:43.177263Z node 1 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-04T17:17:43.177278Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:43.177297Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-04-04T17:17:43.177321Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T17:17:43.177391Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-04T17:17:43.177610Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:1353:3058]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715686 LockNode: 1 Status: STATUS_SUBSCRIBED 2025-04-04T17:17:43.178128Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1436:3115], Recipient [1:1353:3058]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-04T17:17:43.178183Z node 1 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } }, { items { uint32_value: 11 } items { uint32_value: 11 } } >> DataShardOutOfOrder::UncommittedReads [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] Test command err: 2025-04-04T17:17:37.518964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:37.519502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:37.519706Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:17:37.521038Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:37.521190Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:37.521348Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f88/r3tmp/tmprGYrWD/pdisk_1.dat 2025-04-04T17:17:37.908110Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:38.064814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:17:38.160299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:38.160462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:38.164659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:38.164750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:38.178160Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:17:38.178689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:38.179035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:38.449808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:38.518936Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [2:1258:2377], Recipient [2:1284:2389]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:38.529479Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [2:1258:2377], Recipient [2:1284:2389]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:38.529945Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1284:2389] 2025-04-04T17:17:38.530209Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:38.540431Z node 2 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [2:1258:2377], Recipient [2:1284:2389]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:38.581763Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:38.581996Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:38.583642Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:17:38.583725Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:17:38.583786Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:17:38.584160Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:38.584397Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:38.584473Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:1308:2389] in generation 1 2025-04-04T17:17:38.587242Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:38.618092Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:17:38.618296Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:38.618421Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1312:2406] 2025-04-04T17:17:38.618477Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:17:38.618513Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:17:38.618554Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:38.618799Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1284:2389], Recipient [2:1284:2389]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:38.618848Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:38.619106Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:17:38.619197Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:17:38.619353Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:38.619403Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:38.619467Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:17:38.619522Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:17:38.619559Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:17:38.619595Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:17:38.619654Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:17:38.674203Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1316:2407], Recipient [2:1284:2389]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:38.674274Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:38.674335Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1268:2777], serverId# [2:1316:2407], sessionId# [0:0:0] 2025-04-04T17:17:38.674758Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:836:2464], Recipient [2:1316:2407] 2025-04-04T17:17:38.674801Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:38.674916Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:17:38.675119Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:17:38.675224Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:17:38.675300Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:17:38.675357Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:17:38.675410Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:17:38.675447Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:17:38.675476Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:17:38.675745Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:38.675779Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:17:38.675826Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:17:38.675855Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:38.675920Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:17:38.675946Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:17:38.675981Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:17:38.676009Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:17:38.676031Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:38.679501Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [2:1317:2408], Recipient [2:1284:2389]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:17:38.679564Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:17:38.679737Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:17:38.679775Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:17:38.679808Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:38.679845Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:38.679908Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:39.027887Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1349:2417], Recipient [2:1284:2389]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:39.027945Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:39.027981Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1347:2800], serverId# [2:1349:2417], sessionId# [0:0:0] 2025-04-04T17:17:39.029176Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:1061:2623], Recipient [2:1349:2417] 2025-04-04T17:17:39.029222Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:39.029360Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:17:39.029410Z node 2 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] ... ing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:42.599067Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037889 consumer 72075186224037889 txId 281474976715669 2025-04-04T17:17:42.633331Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:2307:2586], Recipient [2:2189:2544]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:42.633388Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:42.633424Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:2183:3337], serverId# [2:2307:2586], sessionId# [0:0:0] 2025-04-04T17:17:42.634334Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:2308:2587], Recipient [2:2189:2544]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:42.634371Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:42.634399Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [2:2181:2542], serverId# [2:2308:2587], sessionId# [0:0:0] 2025-04-04T17:17:42.634527Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2064:2514], Recipient [2:2189:2544]: {TEvReadSet step# 2544 txid# 281474976715667 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-04T17:17:42.634559Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-04T17:17:42.634619Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715667 2025-04-04T17:17:42.634701Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2544 txid# 281474976715667 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-04T17:17:42.635152Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T17:17:42.635515Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2064:2514], Recipient [2:2189:2544]: {TEvReadSet step# 2695 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-04T17:17:42.635549Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-04T17:17:42.635580Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715669 2025-04-04T17:17:42.635642Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2695 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-04T17:17:42.635829Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2064:2514], Recipient [2:2189:2544]: {TEvReadSet step# 2695 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2025-04-04T17:17:42.635856Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-04T17:17:42.635880Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715669 2025-04-04T17:17:42.635914Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2695 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2025-04-04T17:17:42.636035Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:2122:3291], Recipient [2:2307:2586] 2025-04-04T17:17:42.636065Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-04T17:17:42.636107Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715668 2025-04-04T17:17:42.636149Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2545 txid# 281474976715668 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-04T17:17:42.636386Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:2122:3291], Recipient [2:2307:2586] 2025-04-04T17:17:42.636414Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-04T17:17:42.636441Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715669 2025-04-04T17:17:42.636566Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T17:17:42.636916Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:2122:3291], Recipient [2:2307:2586] 2025-04-04T17:17:42.636948Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-04T17:17:42.636978Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715669 2025-04-04T17:17:42.637458Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2695 txid# 281474976715669 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-04T17:17:42.637594Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2695 txid# 281474976715669 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2025-04-04T17:17:42.637736Z node 2 :TX_DATASHARD DEBUG: Complete [2695 : 281474976715669] from 72075186224037890 at tablet 72075186224037890 send result to client [1:2259:3355], exec latency: 0 ms, propose latency: 2 ms 2025-04-04T17:17:42.638018Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T17:17:42.642767Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-04T17:17:42.644183Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:2189:2544], Recipient [2:2064:2514]: {TEvReadSet step# 2544 txid# 281474976715667 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 1} 2025-04-04T17:17:42.644226Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:42.644266Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715667 2025-04-04T17:17:42.644365Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-04T17:17:42.644417Z node 2 :TX_DATASHARD DEBUG: Send RS Reply at 72075186224037890 {TEvReadSet step# 2695 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-04T17:17:42.644462Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-04T17:17:42.644762Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2189:2544], Recipient [2:2064:2514]: {TEvReadSet step# 2695 txid# 281474976715669 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-04-04T17:17:42.644797Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-04T17:17:42.644825Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037890 dest 72075186224037888 producer 72075186224037890 txId 281474976715669 2025-04-04T17:17:42.644893Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 2695 txid# 281474976715669 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-04-04T17:17:42.644922Z node 2 :TX_DATASHARD NOTICE: Outdated readset for 2695:281474976715669 at 72075186224037888 2025-04-04T17:17:42.644959Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-04T17:17:42.646779Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-04T17:17:42.646913Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-04T17:17:42.646963Z node 2 :TX_DATASHARD DEBUG: Send RS Reply at 72075186224037890 {TEvReadSet step# 2695 txid# 281474976715669 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-04T17:17:42.647131Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:2189:2544], Recipient [1:2122:3291] 2025-04-04T17:17:42.647156Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:42.647184Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715668 2025-04-04T17:17:42.647239Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-04-04T17:17:42.647452Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:2189:2544], Recipient [2:2064:2514]: {TEvReadSet step# 2695 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 3} 2025-04-04T17:17:42.647477Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:42.647502Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715669 2025-04-04T17:17:42.647698Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2189:2544], Recipient [1:2122:3291] 2025-04-04T17:17:42.647721Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-04T17:17:42.647745Z node 1 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037890 dest 72075186224037889 producer 72075186224037890 txId 281474976715669 2025-04-04T17:17:42.647790Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 2695 txid# 281474976715669 TabletSource# 72075186224037890 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037890 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-04-04T17:17:42.647819Z node 1 :TX_DATASHARD NOTICE: Outdated readset for 2695:281474976715669 at 72075186224037889 2025-04-04T17:17:42.647856Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-04T17:17:42.647970Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:2189:2544], Recipient [1:2122:3291] 2025-04-04T17:17:42.647995Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:42.648026Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715669 >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] >> TPersQueueTest::TClusterTrackerTest [GOOD] >> TPersQueueTest::SrcIdCompatibility >> DataShardTxOrder::DelayData >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReads [GOOD] Test command err: 2025-04-04T17:17:42.172185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:42.172471Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:42.172515Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f3b/r3tmp/tmp6rCIy9/pdisk_1.dat 2025-04-04T17:17:42.519290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:17:42.555247Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:42.592095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:42.592214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:42.603585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:42.683289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:42.714177Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:42.715017Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:42.715346Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:17:42.715580Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:42.723546Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:42.754709Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:42.754814Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:42.756323Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:17:42.756403Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:17:42.756452Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:17:42.756774Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:42.756886Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:42.756944Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:17:42.757318Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:42.783228Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:17:42.783392Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:42.783503Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:17:42.783535Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:17:42.783565Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:17:42.783596Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:42.783774Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:42.783817Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:42.784065Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:17:42.784149Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:17:42.784209Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:42.784297Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:42.784347Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:17:42.784390Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:17:42.784425Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:17:42.784470Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:17:42.784507Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:17:42.784625Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:42.784657Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:42.784707Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:17:42.784776Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:17:42.784806Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:42.784901Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:17:42.785089Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:17:42.785133Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:17:42.785217Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:17:42.785255Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:17:42.785288Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:17:42.785324Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:17:42.785354Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:17:42.785627Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:42.785704Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:17:42.785737Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:17:42.785772Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:42.785829Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:17:42.785862Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:17:42.785895Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:17:42.785929Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:17:42.785956Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:42.787056Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:17:42.787108Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:17:42.787136Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:42.787169Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:17:42.787221Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:42.789268Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:17:42.789330Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:17:42.926474Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:42.926520Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:42.926545Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:17:42.927404Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:17:42.927437Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:42.927525Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:17:42.927552Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:17:42.927579Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:17:42.927607Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:17:42.930568Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:17:42.930624Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:42.931179Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:42.931211Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:42.931241Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:4 ... \022\024\n\022\t\255\003\000\000\000\000\000\000\021\277\n\000\000\001\000\000\000\032\256\002\010\240\215\006\022\207\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004?\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\n\014Arg\000\002)\211\002?\016\204\214\002(KqpEffects\000)\211\010?\032\213\010\203\010\203\010\203\005@\203\010\204?\006\210\203\004\203\004\203\0144KqpUpsertRows\000\013?&\003?\036\177\000\001\205\000\000\000\000\001\003? \004\003?\"\000\003?$\002\017)\211\002?(?\010 Iterator\000)\211\004?\010?\n\203\004\030Member\000?\026\003?@\000\002\004\000\006\010\002?.\003\203\004\004\003\203\004\002\003\003?0\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\0 2025-04-04T17:17:43.939324Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:43.939419Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-04T17:17:43.939456Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-04T17:17:43.939513Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:17:43.939821Z node 1 :TX_DATASHARD TRACE: TxId: 281474976715664, shard 72075186224037888, task: 1, meta: Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\004\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } 2025-04-04T17:17:43.939870Z node 1 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, task: 1, write point (Uint32 : 4) 2025-04-04T17:17:43.939918Z node 1 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2025-04-04T17:17:43.940129Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CheckDataTx 2025-04-04T17:17:43.940176Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-04-04T17:17:43.940211Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CheckDataTx 2025-04-04T17:17:43.940236Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:43.940260Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:17:43.940291Z node 1 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-04-04T17:17:43.940336Z node 1 :TX_DATASHARD TRACE: Activated operation [0:281474976715664] at 72075186224037888 2025-04-04T17:17:43.940365Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-04-04T17:17:43.940433Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:43.940454Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-04T17:17:43.940472Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-04T17:17:43.940511Z node 1 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-04-04T17:17:43.940558Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715664] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191926 2025-04-04T17:17:43.940704Z node 1 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-04T17:17:43.940756Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:17:43.940790Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-04T17:17:43.940820Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:17:43.940843Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:43.940878Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is DelayComplete 2025-04-04T17:17:43.940900Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:17:43.940927Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:17:43.940950Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:17:43.940984Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-04-04T17:17:43.941001Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:43.941032Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:281474976715664] at 72075186224037888 has finished ... blocked commit for tablet 72075186224037888 2025-04-04T17:17:44.026929Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0wxym5aek3b23g94g6tg07, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTU4NTdmNTUtMWY1ZDAyZDgtOTg2ODgxNmUtYTE1YjFmZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:44.028357Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:961:2776], Recipient [1:664:2569]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-04T17:17:44.028453Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T17:17:44.028512Z node 1 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-04-04T17:17:44.028550Z node 1 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v2500/18446744073709551615 2025-04-04T17:17:44.028599Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-04-04T17:17:44.028670Z node 1 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-04T17:17:44.028726Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-04-04T17:17:44.028761Z node 1 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:44.028791Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:17:44.028856Z node 1 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-04-04T17:17:44.028889Z node 1 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-04T17:17:44.028908Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:44.028927Z node 1 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T17:17:44.028948Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:17:44.029038Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-04T17:17:44.029255Z node 1 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is DelayComplete 2025-04-04T17:17:44.029301Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:17:44.029352Z node 1 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:17:44.029380Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:17:44.029419Z node 1 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-04-04T17:17:44.029439Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:44.029462Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-04-04T17:17:44.029495Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T17:17:44.102882Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 3000 2025-04-04T17:17:44.102960Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3000} 2025-04-04T17:17:44.238756Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:17:44.238815Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715664] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:44.238862Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715664 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 1000 ms, status: COMPLETE 2025-04-04T17:17:44.238937Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:44.238977Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-04T17:17:44.239014Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:7] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:17:44.239048Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[1:961:2776], 0} after executionsCount# 1 2025-04-04T17:17:44.239108Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:961:2776], 0} sends rowCount# 4, bytes# 128, quota rows left# 997, quota bytes left# 5242752, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:44.239218Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:961:2776], 0} finished in read 2025-04-04T17:17:44.241164Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:961:2776], Recipient [1:664:2569]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-04T17:17:44.241250Z node 1 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 4 } items { uint32_value: 4 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] Test command err: 2025-04-04T17:17:35.766895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:35.767458Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:35.767512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f81/r3tmp/tmpoT0l4k/pdisk_1.dat 2025-04-04T17:17:36.174954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:17:36.227655Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:36.271883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:36.276228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:36.289933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:36.382204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:36.426200Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:36.427322Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:36.427756Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:17:36.428025Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:36.439548Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:36.474095Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:36.474215Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:36.475976Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:17:36.476065Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:17:36.476126Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:17:36.476498Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:36.476633Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:36.476720Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:17:36.490828Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:36.532750Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:17:36.532941Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:36.533088Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:17:36.533126Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:17:36.533159Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:17:36.533194Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:36.533395Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:36.533439Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:36.533709Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:17:36.533815Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:17:36.533963Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:36.534012Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:36.534082Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:17:36.534123Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:17:36.534159Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:17:36.534187Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:17:36.534226Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:17:36.534334Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:36.534385Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:36.534425Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:17:36.536355Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:17:36.536466Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:36.536616Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:17:36.536898Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:17:36.536957Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:17:36.537047Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:17:36.537102Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:17:36.537142Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:17:36.537189Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:17:36.537229Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:17:36.537504Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:36.537547Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:17:36.537588Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:17:36.537633Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:36.537688Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:17:36.537721Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:17:36.537769Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:17:36.537803Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:17:36.537845Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:36.539509Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:17:36.539568Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:17:36.550336Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:17:36.550431Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:17:36.550470Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:36.550522Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:17:36.550612Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:36.693090Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:36.693129Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:36.693978Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:17:36.694410Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:17:36.694462Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:36.694611Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:17:36.694661Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:17:36.694710Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:17:36.694744Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:17:36.698904Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:17:36.698967Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:36.700662Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:36.700703Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:36.700745Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:3 ... actorId: [2:1186:2938] 2025-04-04T17:17:43.625870Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037892 2025-04-04T17:17:43.625930Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037892 2025-04-04T17:17:43.625972Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-04T17:17:43.626219Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:1026:2823], Recipient [2:755:2633]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037892 OperationCookie: 281474976715665 2025-04-04T17:17:43.626288Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715665 2025-04-04T17:17:43.626665Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1181:2933], Recipient [2:755:2633]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037892 ClientId: [2:1181:2933] ServerId: [2:1183:2935] } 2025-04-04T17:17:43.626705Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-04T17:17:43.626809Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1026:2823], Recipient [2:1026:2823]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:43.626841Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:43.627313Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:1026:2823]: {TEvRegisterTabletResult TabletId# 72075186224037892 Entry# 2000} 2025-04-04T17:17:43.627348Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-04T17:17:43.627393Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 2000 2025-04-04T17:17:43.627431Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-04T17:17:43.627563Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1183:2935], Recipient [2:1026:2823]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:17:43.627593Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:17:43.627642Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [2:1181:2933], serverId# [2:1183:2935], sessionId# [0:0:0] 2025-04-04T17:17:43.627726Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-04T17:17:43.627771Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:43.627807Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037892 2025-04-04T17:17:43.627848Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2025-04-04T17:17:43.627886Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2025-04-04T17:17:43.627922Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037892 TxInFly 0 2025-04-04T17:17:43.627975Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-04T17:17:43.628275Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:1026:2823]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-04-04T17:17:43.628313Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-04T17:17:43.628354Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 0 next step 2000 2025-04-04T17:17:43.628408Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037892: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-04T17:17:43.628464Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037892 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-04-04T17:17:43.639474Z node 2 :TX_DATASHARD DEBUG: 72075186224037893 ack snapshot OpId 281474976715665 2025-04-04T17:17:43.639554Z node 2 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037893 2025-04-04T17:17:43.639649Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037893 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:17:43.639724Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037893 2025-04-04T17:17:43.639779Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037893, actorId: [2:1190:2942] 2025-04-04T17:17:43.639811Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037893 2025-04-04T17:17:43.639849Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037893 2025-04-04T17:17:43.639880Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-04T17:17:43.640153Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:1029:2825], Recipient [2:755:2633]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037893 OperationCookie: 281474976715665 2025-04-04T17:17:43.640214Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037893 for split OpId 281474976715665 2025-04-04T17:17:43.640471Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1029:2825], Recipient [2:1029:2825]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:43.640505Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:43.640702Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1182:2934], Recipient [2:755:2633]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037893 ClientId: [2:1182:2934] ServerId: [2:1184:2936] } 2025-04-04T17:17:43.640734Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-04T17:17:43.641093Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:1029:2825]: {TEvRegisterTabletResult TabletId# 72075186224037893 Entry# 2000} 2025-04-04T17:17:43.641130Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-04T17:17:43.641158Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037893 time 2000 2025-04-04T17:17:43.641187Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-04T17:17:43.641300Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-04-04T17:17:43.641330Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:43.641357Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037893 2025-04-04T17:17:43.641385Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037893 has no attached operations 2025-04-04T17:17:43.641411Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037893 2025-04-04T17:17:43.641441Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037893 TxInFly 0 2025-04-04T17:17:43.641478Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-04T17:17:43.641580Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1184:2936], Recipient [2:1029:2825]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:17:43.641609Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:17:43.641646Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1182:2934], serverId# [2:1184:2936], sessionId# [0:0:0] 2025-04-04T17:17:43.641908Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:1029:2825]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-04-04T17:17:43.641969Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-04T17:17:43.642002Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037893 coordinator 72057594046316545 last step 0 next step 2000 2025-04-04T17:17:43.642043Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037893: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-04T17:17:43.642089Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037893 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-04-04T17:17:43.652930Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715665 2025-04-04T17:17:43.655597Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553158, Sender [2:417:2411], Recipient [2:762:2637] 2025-04-04T17:17:43.655662Z node 2 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715665, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-04-04T17:17:43.657303Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715665 2025-04-04T17:17:43.657360Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T17:17:43.657802Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [2:747:2628], Recipient [2:755:2633]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-04T17:17:44.179198Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:977:2680], Recipient [2:665:2569]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 977 RawX2: 8589937272 } TxBody: " \0008\000`\200\200\200\005j\324\006\010\001\022\225\006\010\001\022\024\n\022\t\321\003\000\000\000\000\000\000\021x\n\000\000\002\000\000\000\032\256\002\010\240\215\006\022\207\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004?\004\014key\024valueh%kqp%tx_result_binding_0_1\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\n\014Arg\000\002)\211\002?\016\204\214\002(KqpEffects\000)\211\010?\032\213\010\203\010\203\010\203\005@\203\010\204?\006\210\203\004\203\004\203\0144KqpUpsertRows\000\013?&\003?\036\177\000\001\205\000\000\000\000\001\003? \004\003?\"\000\003?$\002\017)\211\002?(?\010 Iterator\000)\211\004?\010?\n\203\004\030Member\000?\026\003?@\000\002\004\000\006\010\002?.\003\203\004\004\003\203\004\002\003\003?0\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\ 2025-04-04T17:17:44.179280Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:44.179388Z node 2 :TX_DATASHARD NOTICE: Rejecting data TxId 281474976715663 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-04-04T17:17:44.179836Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715664, at schemeshard: 72057594046644480 2025-04-04T17:17:44.180329Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite [GOOD] >> DataShardReadIteratorBatchMode::RangeFromNonInclusive [GOOD] >> DataShardReadIteratorBatchMode::MultipleRanges >> DataShardTxOrder::RandomPoints_DelayRS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] Test command err: 2025-04-04T17:17:37.547013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:37.547467Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:37.547526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f7b/r3tmp/tmpJDkOdM/pdisk_1.dat 2025-04-04T17:17:37.869796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:17:37.907906Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:37.945357Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:17:37.946169Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:17:37.946512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:37.946658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:37.958031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:38.037698Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-04T17:17:38.037766Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T17:17:38.037934Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-04-04T17:17:38.177548Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T17:17:38.177644Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:17:38.178268Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:17:38.178359Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:17:38.178717Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:17:38.178918Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:17:38.179034Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T17:17:38.179262Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T17:17:38.180747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:38.181609Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T17:17:38.181692Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2517] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-04T17:17:38.212866Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:670:2572]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:38.214040Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:670:2572]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:38.214487Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:670:2572] 2025-04-04T17:17:38.214821Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:38.258026Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:670:2572]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:38.258469Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:673:2574]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:38.259744Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:38.259939Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:38.261607Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:17:38.261677Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:17:38.261731Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:17:38.262088Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:38.262185Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:673:2574]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:38.262920Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:673:2574] 2025-04-04T17:17:38.263112Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:38.271395Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:38.271482Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:700:2572] in generation 1 2025-04-04T17:17:38.271661Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:673:2574]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:38.272357Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:38.272442Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:38.273674Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-04T17:17:38.273739Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-04T17:17:38.273784Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-04T17:17:38.274036Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:38.274157Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:38.274206Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:705:2574] in generation 1 2025-04-04T17:17:38.285151Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:38.325071Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:17:38.325259Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:38.325392Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:708:2593] 2025-04-04T17:17:38.325450Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:17:38.325496Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:17:38.325554Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:38.325822Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:670:2572], Recipient [1:670:2572]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:38.325871Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:38.325940Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:38.325976Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-04T17:17:38.326051Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:38.326106Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:709:2594] 2025-04-04T17:17:38.326128Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-04T17:17:38.326149Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-04T17:17:38.326171Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:17:38.326452Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:673:2574], Recipient [1:673:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:38.326486Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:38.326655Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:17:38.326740Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:17:38.327266Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:38.327330Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:38.327387Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:17:38.327439Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:17:38.327480Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:17:38.327515Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:17:38.327582Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:17:38.327636Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-04T17:17:38.327695Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-04T17:17:38.327773Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:691:2585], Recipient [1:670:2572]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:38.327807Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:38.327848Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:664:2568], serverId# [1:691:2585], sessionId# [0:0:0] 2025-04-04T17:17:38.327923Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 202 ... "ydb://session/3?node_id=2&id=MzM3MzZiOTAtZjJjNzM3NzYtOTJmNDI4MzYtYTE3NmJmOGQ=" } RequestContext { key: "TraceId" value: "01jr0wxzaq6qpqtr581wbcw7vd" } EnableSpilling: false DisableMetering: true 2025-04-04T17:17:44.737807Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:968:2764] TxId: 281474976715662. Ctx: { TraceId: 01jr0wxzaq6qpqtr581wbcw7vd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzM3MzZiOTAtZjJjNzM3NzYtOTJmNDI4MzYtYTE3NmJmOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. State: WaitResolveState, Executing KQP transaction on shard: 72075186224037888, tasks: [1], lockTxId: (empty maybe), locks: , immediate: 1 2025-04-04T17:17:44.737914Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:968:2764] TxId: 281474976715662. Ctx: { TraceId: 01jr0wxzaq6qpqtr581wbcw7vd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzM3MzZiOTAtZjJjNzM3NzYtOTJmNDI4MzYtYTE3NmJmOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ExecuteDatashardTransaction traceId.verbosity: 0 2025-04-04T17:17:44.738027Z node 2 :KQP_EXECUTER INFO: ActorId: [2:968:2764] TxId: 281474976715662. Ctx: { TraceId: 01jr0wxzaq6qpqtr581wbcw7vd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzM3MzZiOTAtZjJjNzM3NzYtOTJmNDI4MzYtYTE3NmJmOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 0, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-04T17:17:44.738080Z node 2 :KQP_EXECUTER TRACE: ActorId: [2:968:2764] TxId: 281474976715662. Ctx: { TraceId: 01jr0wxzaq6qpqtr581wbcw7vd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzM3MzZiOTAtZjJjNzM3NzYtOTJmNDI4MzYtYTE3NmJmOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2025-04-04T17:17:44.738139Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:968:2764] TxId: 281474976715662. Ctx: { TraceId: 01jr0wxzaq6qpqtr581wbcw7vd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzM3MzZiOTAtZjJjNzM3NzYtOTJmNDI4MzYtYTE3NmJmOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2025-04-04T17:17:44.738198Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:968:2764] TxId: 281474976715662. Ctx: { TraceId: 01jr0wxzaq6qpqtr581wbcw7vd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzM3MzZiOTAtZjJjNzM3NzYtOTJmNDI4MzYtYTE3NmJmOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2025-04-04T17:17:44.738262Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:968:2764] TxId: 281474976715662. Ctx: { TraceId: 01jr0wxzaq6qpqtr581wbcw7vd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzM3MzZiOTAtZjJjNzM3NzYtOTJmNDI4MzYtYTE3NmJmOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-04-04T17:17:44.738663Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:972:2781], Recipient [2:926:2747]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:44.738721Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:44.738769Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:971:2780], serverId# [2:972:2781], sessionId# [0:0:0] 2025-04-04T17:17:44.738982Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:968:2764], Recipient [2:926:2747]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 968 RawX2: 8589937356 } TxBody: " \0018\000`\200\200\200\005j\246\006\010\001\022\225\006\010\001\022\024\n\022\t\310\003\000\000\000\000\000\000\021\314\n\000\000\002\000\000\000\032\256\002\010\240\215\006\022\207\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004?\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\n\014Arg\000\002)\211\002?\016\204\214\002(KqpEffects\000)\211\010?\032\213\010\203\010\203\010\203\005@\203\010\204?\006\210\203\004\203\004\203\0144KqpUpsertRows\000\013?&\003?\036\177\000\001\205\000\000\000\000\001\003? \004\003?\"\000\003?$\002\017)\211\002?(?\010 Iterator\000)\211\004?\010?\n\203\004\030Member\000?\026\003?@\000\002\004\000\006\010\002?.\003\203\004\004\003\203\004\002\003\003?0\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\0 2025-04-04T17:17:44.739022Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:44.739149Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [2:926:2747], Recipient [2:926:2747]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-04T17:17:44.739182Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-04T17:17:44.739244Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:17:44.739616Z node 2 :TX_DATASHARD TRACE: TxId: 281474976715662, shard 72075186224037888, task: 1, meta: Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\004\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } 2025-04-04T17:17:44.739687Z node 2 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, task: 1, write point (Uint32 : 4) 2025-04-04T17:17:44.739749Z node 2 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2025-04-04T17:17:44.740031Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit CheckDataTx 2025-04-04T17:17:44.740094Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-04-04T17:17:44.740146Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CheckDataTx 2025-04-04T17:17:44.740190Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:44.740233Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:17:44.740273Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-04T17:17:44.740327Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715662] at 72075186224037888 2025-04-04T17:17:44.740362Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-04-04T17:17:44.740386Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:44.740425Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-04T17:17:44.740453Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-04T17:17:44.740500Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-04T17:17:44.740551Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715662] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191926 2025-04-04T17:17:44.740737Z node 2 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-04T17:17:44.740796Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:17:44.740828Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-04T17:17:44.740863Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:17:44.740896Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:44.740961Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:44.741012Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:17:44.741050Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:17:44.741088Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:17:44.741136Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-04-04T17:17:44.741161Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:44.741186Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715662] at 72075186224037888 has finished 2025-04-04T17:17:44.752133Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:17:44.752224Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:44.752307Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715662 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-04T17:17:44.752410Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:44.752736Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:968:2764] TxId: 281474976715662. Ctx: { TraceId: 01jr0wxzaq6qpqtr581wbcw7vd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzM3MzZiOTAtZjJjNzM3NzYtOTJmNDI4MzYtYTE3NmJmOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2025-04-04T17:17:44.752902Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:968:2764] TxId: 281474976715662. Ctx: { TraceId: 01jr0wxzaq6qpqtr581wbcw7vd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzM3MzZiOTAtZjJjNzM3NzYtOTJmNDI4MzYtYTE3NmJmOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T17:17:44.752973Z node 2 :KQP_EXECUTER TRACE: ActorId: [2:968:2764] TxId: 281474976715662. Ctx: { TraceId: 01jr0wxzaq6qpqtr581wbcw7vd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzM3MzZiOTAtZjJjNzM3NzYtOTJmNDI4MzYtYTE3NmJmOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-04-04T17:17:44.753034Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:968:2764] TxId: 281474976715662. Ctx: { TraceId: 01jr0wxzaq6qpqtr581wbcw7vd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzM3MzZiOTAtZjJjNzM3NzYtOTJmNDI4MzYtYTE3NmJmOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] >> DataShardTxOrder::ReadWriteReorder >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] Test command err: 2025-04-04T17:17:40.804950Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:40.884032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:40.884087Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:40.891311Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:40.891740Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:17:40.892042Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:40.901632Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:40.944154Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:40.944238Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:40.945670Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:17:40.945730Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:17:40.945773Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:17:40.946140Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:40.946447Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:40.946511Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:17:41.013182Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:41.044274Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:17:41.044439Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:41.044547Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:17:41.044588Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:17:41.044621Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:17:41.044654Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:41.044799Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:41.044845Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:41.045107Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:17:41.045194Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:17:41.045298Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:41.045345Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:41.045394Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:17:41.045438Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:41.045468Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:41.045509Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:17:41.045552Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:41.045634Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:41.045665Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:41.045708Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:17:41.051835Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:17:41.051907Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:41.051992Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:17:41.052156Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:17:41.052203Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:17:41.052248Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:17:41.052288Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:41.052322Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:17:41.052356Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:17:41.052385Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:41.052673Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:41.052708Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:17:41.052755Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:17:41.052784Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:41.052843Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:17:41.052889Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:17:41.052922Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:17:41.052966Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:41.052989Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:41.065203Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:17:41.065293Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:41.065335Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:41.065376Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:41.065431Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:41.065934Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:41.065986Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:41.066032Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:17:41.066192Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:41.066224Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:41.066373Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:41.066414Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:41.066454Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:41.066494Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:41.070431Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:41.070513Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:41.070786Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:41.070830Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:41.070878Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:41.070920Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:41.070952Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:41.071002Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:17:41.071038Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:17:41.071080Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:41.071117Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:41.071163Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:41.071212Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:41.071414Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:17:41.071455Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:41.071479Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:41.071503Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:17:41.071524Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:17:41.071581Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:41.071612Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:17:41.071648Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:41.071689Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:41.071744Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:17:41.071779Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:17:41.071814Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:17:41.071854Z node 1 :TX_DATA ... lt to client [1:98:2133], exec latency: 0 ms, propose latency: 2 ms 2025-04-04T17:17:45.462108Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:45.462198Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:45.462217Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2025-04-04T17:17:45.462274Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 2 ms 2025-04-04T17:17:45.462300Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:45.462383Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:45.462404Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2025-04-04T17:17:45.462437Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 2 ms 2025-04-04T17:17:45.462467Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:45.462584Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:45.462611Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-04-04T17:17:45.462644Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 2 ms 2025-04-04T17:17:45.462667Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:45.462773Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:45.462795Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-04-04T17:17:45.462822Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 2 ms 2025-04-04T17:17:45.462844Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:45.462864Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:45.462899Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-04T17:17:45.463088Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 104 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 34} 2025-04-04T17:17:45.463116Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:45.463141Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 104 2025-04-04T17:17:45.463244Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 107 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 35} 2025-04-04T17:17:45.463280Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:45.463303Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 107 2025-04-04T17:17:45.463409Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 110 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 36} 2025-04-04T17:17:45.463436Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:45.463456Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 110 2025-04-04T17:17:45.463540Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 113 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 37} 2025-04-04T17:17:45.463574Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:45.463599Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 113 2025-04-04T17:17:45.463683Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-04T17:17:45.463709Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:45.463731Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-04T17:17:45.463784Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-04T17:17:45.463803Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:45.463821Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-04T17:17:45.463911Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-04T17:17:45.463940Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:45.463963Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-04T17:17:45.464032Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-04T17:17:45.464051Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:45.464070Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-04T17:17:45.464115Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-04T17:17:45.464143Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:45.464171Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-04T17:17:45.464248Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-04T17:17:45.464270Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:45.464290Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-04T17:17:45.464362Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-04T17:17:45.464388Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:45.464408Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-04T17:17:45.464489Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-04T17:17:45.464528Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:45.464549Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-04T17:17:45.464611Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-04T17:17:45.464648Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:45.464672Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-04T17:17:45.464728Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-04T17:17:45.464747Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:45.464768Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-04T17:17:45.464836Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-04T17:17:45.464860Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:45.464880Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-04T17:17:45.464961Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-04T17:17:45.464982Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:45.465007Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-04T17:17:45.479351Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:45.479403Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-04T17:17:45.479452Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 4 ms 2025-04-04T17:17:45.479517Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-04T17:17:45.479557Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:45.479810Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-04T17:17:45.479847Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:45.479882Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite [GOOD] Test command err: 2025-04-04T17:17:38.278116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:38.278481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:38.278518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f6b/r3tmp/tmp3U3m52/pdisk_1.dat 2025-04-04T17:17:38.611500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:17:38.643490Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:38.680904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:38.681017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:38.692384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:38.772092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:38.807254Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:38.808152Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:38.808545Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:17:38.808781Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:38.818346Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:38.846967Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:38.847090Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:38.848767Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:17:38.848853Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:17:38.848915Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:17:38.849302Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:38.849436Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:38.849526Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:17:38.860119Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:38.884865Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:17:38.885058Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:38.885177Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:17:38.885215Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:17:38.885248Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:17:38.885283Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:38.885439Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:38.885477Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:38.885669Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:17:38.885770Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:17:38.886035Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:38.886076Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:38.886130Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:17:38.886162Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:17:38.886194Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:17:38.886218Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:17:38.886266Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:17:38.886356Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:38.886385Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:38.886416Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:17:38.886693Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:17:38.886723Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:38.886813Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:17:38.886954Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:17:38.886999Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:17:38.887052Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:17:38.887084Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:17:38.887109Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:17:38.887140Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:17:38.887169Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:17:38.887492Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:38.887522Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:17:38.887549Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:17:38.887571Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:38.887610Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:17:38.887631Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:17:38.887662Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:17:38.887692Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:17:38.887726Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:38.888754Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:17:38.888796Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:17:38.899391Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:17:38.899455Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:17:38.899490Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:38.899527Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:17:38.899601Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:39.038048Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:39.038097Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:39.038127Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:17:39.038415Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:17:39.038447Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:39.038590Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:17:39.038631Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:17:39.038678Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:17:39.038709Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:17:39.042173Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:17:39.042235Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:39.043006Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:39.043045Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:39.043092Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:3 ... SessionId: ydb://session/3?node_id=2&id=NzI0OWIxOWItOWFkZWY1N2YtMmYzN2I5N2YtNjFmMDdjZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2846], CA [2:1051:2847], 2025-04-04T17:17:45.647378Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2824] TxId: 281474976715665. Ctx: { TraceId: 01jr0wxzzfbd7nszk3c8jv4pph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzI0OWIxOWItOWFkZWY1N2YtMmYzN2I5N2YtNjFmMDdjZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1050:2846], CA [2:1051:2847], 2025-04-04T17:17:45.647493Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2824] TxId: 281474976715665. Ctx: { TraceId: 01jr0wxzzfbd7nszk3c8jv4pph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzI0OWIxOWItOWFkZWY1N2YtMmYzN2I5N2YtNjFmMDdjZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1050:2846], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 306 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 137 FinishTimeMs: 1743787065647 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 85 BuildCpuTimeUs: 52 HostName: "ghrun-mup5cka3jq" NodeId: 2 CreateTimeMs: 1743787065639 } MaxMemoryUsage: 1048576 } 2025-04-04T17:17:45.647532Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jr0wxzzfbd7nszk3c8jv4pph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzI0OWIxOWItOWFkZWY1N2YtMmYzN2I5N2YtNjFmMDdjZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1050:2846] 2025-04-04T17:17:45.647571Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2824] TxId: 281474976715665. Ctx: { TraceId: 01jr0wxzzfbd7nszk3c8jv4pph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzI0OWIxOWItOWFkZWY1N2YtMmYzN2I5N2YtNjFmMDdjZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1051:2847], 2025-04-04T17:17:45.647589Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2824] TxId: 281474976715665. Ctx: { TraceId: 01jr0wxzzfbd7nszk3c8jv4pph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzI0OWIxOWItOWFkZWY1N2YtMmYzN2I5N2YtNjFmMDdjZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1051:2847], 2025-04-04T17:17:45.647797Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2824] TxId: 281474976715665. Ctx: { TraceId: 01jr0wxzzfbd7nszk3c8jv4pph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzI0OWIxOWItOWFkZWY1N2YtMmYzN2I5N2YtNjFmMDdjZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1051:2847], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 267 DurationUs: 1000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 119 FinishTimeMs: 1743787065647 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 87 BuildCpuTimeUs: 32 HostName: "ghrun-mup5cka3jq" NodeId: 2 StartTimeMs: 1743787065646 CreateTimeMs: 1743787065639 } MaxMemoryUsage: 1048576 } 2025-04-04T17:17:45.647845Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jr0wxzzfbd7nszk3c8jv4pph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzI0OWIxOWItOWFkZWY1N2YtMmYzN2I5N2YtNjFmMDdjZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1051:2847] 2025-04-04T17:17:45.650013Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1039:2824] TxId: 281474976715665. Ctx: { TraceId: 01jr0wxzzfbd7nszk3c8jv4pph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzI0OWIxOWItOWFkZWY1N2YtMmYzN2I5N2YtNjFmMDdjZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 7615 DurationUs: 1743787063635614 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } ExecuterCpuTimeUs: 4705 StartTimeMs: 2012 FinishTimeMs: 1743787065647 Stages { StageId: 5 StageGuid: "ab71b06-7d0c0023-668521ff-d713506b" Program: "(\n(return (lambda \'($1) (FromFlow (Take (ToFlow $1) (Uint64 \'\"1001\")))))\n)\n" ComputeActors { CpuTimeUs: 306 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 137 FinishTimeMs: 1743787065647 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 85 BuildCpuTimeUs: 52 HostName: "ghrun-mup5cka3jq" NodeId: 2 CreateTimeMs: 1743787065639 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743787065643 } Stages { StageGuid: "e12ad3db-3c6adbd0-61611e53-eb83cfa9" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (lambda \'($3) (Member $3 \'\"key\") (Member $3 \'\"value\")))\n (return (FromFlow (ExpandMap (Take (ToFlow $1) (Uint64 \'\"1001\")) $2)))\n))))\n)\n" BaseTimeMs: 1743787065643 } Stages { StageId: 3 StageGuid: "3884b71e-3df0c8b7-91361e0b-95bb2775" Program: "(\n(return (lambda \'($1) (FromFlow (WideTop (ToFlow $1) (Uint64 \'\"1001\") \'(\'(\'0 (Bool \'true)))))))\n)\n" BaseTimeMs: 1743787065643 } Stages { StageId: 2 StageGuid: "a9d52f3b-62fcb3bc-b102cf3f-46b3be73" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (lambda \'($3) (Member $3 \'\"key\") (Member $3 \'\"value\")))\n (return (FromFlow (ExpandMap (Take (ToFlow $1) (Uint64 \'\"1001\")) $2)))\n))))\n)\n" BaseTimeMs: 1743787065643 } Stages { StageId: 4 StageGuid: "47da7bf5-39c165a9-a54c8bd7-97fe2126" Program: "(\n(return (lambda \'($1 $2) (block \'(\n (let $3 (lambda \'($6 $7) (AsStruct \'(\'\"key\" $6) \'(\'\"value\" $7))))\n (let $4 (Sort (Extend (NarrowMap (ToFlow $1) $3) (NarrowMap (ToFlow $2) $3)) (Bool \'true) (lambda \'($8) (Member $8 \'\"key\"))))\n (let $5 (lambda \'($9) (Member $9 \'\"key\") (Member $9 \'\"value\")))\n (return (FromFlow (ExpandMap $4 $5)))\n))))\n)\n" BaseTimeMs: 1743787065643 } Stages { StageId: 6 StageGuid: "28bd8a98-8277396c-3ba5a031-3e5e6a12" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 \'\"1001\")) (lambda \'($2 $3) (AsStruct \'(\'\"key\" $2) \'(\'\"value\" $3)))))))\n)\n" BaseTimeMs: 1743787065643 } Stages { StageId: 1 StageGuid: "41258ccc-44dc9b5f-de752a10-4d79fe92" Program: "(\n(return (lambda \'($1) (FromFlow (WideTop (ToFlow $1) (Uint64 \'\"1001\") \'(\'(\'0 (Bool \'true)))))))\n)\n" BaseTimeMs: 1743787065643 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":17,\"Plans\":[{\"Node Type\":\"ResultSet\",\"PlanNodeId\":16,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":14}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":15,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":14,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":12}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":13,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":12,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Sort-Union\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Sort\",\"SortBy\":\"row.key\"},{\"Inputs\":[{\"ExternalPlanNodeId\":10},{\"ExternalPlanNodeId\":5}],\"Name\":\"Union\"}],\"PlanNodeId\":11,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":10,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Top\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":8}],\"Limit\":\"1001\",\"Name\":\"Top\",\"TopBy\":\"row.key\"}],\"PlanNodeId\":9,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":8,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":6}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":7,\"Plans\":[{\"Node Type\":\"TablePointLookup\",\"Operators\":[{\"E-Cost\":\"0\",\"E-Rows\":\"1\",\"E-Size\":\"0\",\"Inputs\":[],\"Name\":\"TablePointLookup\",\"Path\":\"\\/Root\\/table-1\",\"ReadColumns\":[\"key\",\"value\"],\"ReadLimit\":\"1001\",\"ReadRange\":[\"key (3)\"],\"Scan\":\"Sequential\",\"Table\":\"table-1\"}],\"PlanNodeId\":6,\"StageGuid\":\"\",\"Tables\":[\"table-1\"]}],\"StageGuid\":\"e12ad3db-3c6adbd0-61611e53-eb83cfa9\",\"Stats\":{\"BaseTimeMs\":1743787065643,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"41258ccc-44dc9b5f-de752a10-4d79fe92\",\"Stats\":{\"BaseTimeMs\":1743787065643,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"},{\"Node Type\":\"UnionAll\",\"PlanNodeId\":5,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Top\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Limit\":\"1001\",\"Name\":\"Top\",\"TopBy\":\"row.key\"}],\"PlanNodeId\":4,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":3,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":2,\"Plans\":[{\"Node Type\":\"TablePointLookup\",\"Operators\":[{\"E-Cost\":\"0\",\"E-Rows\":\"1\",\"E-Size\":\"0\",\"Inputs\":[],\"Name\":\"TablePointLookup\",\"Path\":\"\\/Root\\/table-2\",\"ReadColumns\":[\"key\",\"value\"],\"ReadLimit\":\"1001\",\"ReadRange\":[\"key (4)\"],\"Scan\":\"Sequential\",\"Table\":\"table-2\"}],\"PlanNodeId\":1,\"StageGuid\":\"\",\"Tables\":[\"table-2\"]}],\"StageGuid\":\"a9d52f3b-62fcb3bc-b102cf3f-46b3be73\",\"Stats\":{\"BaseTimeMs\":1743787065643,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"3884b71e-3df0c8b7-91361e0b-95bb2775\",\"Stats\":{\"BaseTimeMs\":1743787065643,\"PhysicalStageId\":3,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"47da7bf5-39c165a9-a54c8bd7-97fe2126\",\"Stats\":{\"BaseTimeMs\":1743787065643,\"PhysicalStageId\":4,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"SortColumns\":[\"key (Asc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"ab71b06-7d0c0023-668521ff-d713506b\",\"Stats\":{\"BaseTimeMs\":1743787065643,\"ComputeNodes\":[{\"CpuTimeUs\":306,\"Tasks\":[{\"ComputeTimeUs\":85,\"FinishTimeMs\":1743787065647,\"Host\":\"ghrun-mup5cka3jq\",\"InputBytes\":7,\"InputRows\":2,\"NodeId\":2,\"OutputBytes\":7,\"OutputRows\":2,\"TaskId\":6}]}],\"PhysicalStageId\":5,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"28bd8a98-8277396c-3ba5a031-3e5e6a12\",\"Stats\":{\"BaseTimeMs\":1743787065643,\"PhysicalStageId\":6,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 3706 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\002\022\013\010\213\002\020\347\005\030\336\026 \007" } } 2025-04-04T17:17:45.650091Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2824] TxId: 281474976715665. Ctx: { TraceId: 01jr0wxzzfbd7nszk3c8jv4pph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzI0OWIxOWItOWFkZWY1N2YtMmYzN2I5N2YtNjFmMDdjZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T17:17:45.650135Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2824] TxId: 281474976715665. Ctx: { TraceId: 01jr0wxzzfbd7nszk3c8jv4pph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzI0OWIxOWItOWFkZWY1N2YtMmYzN2I5N2YtNjFmMDdjZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.002910s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] Test command err: 2025-04-04T17:17:41.847789Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:41.921439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:41.921491Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:41.928633Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:41.929153Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:17:41.929479Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:41.939343Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:41.972506Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:41.972596Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:41.974272Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:17:41.974350Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:17:41.974402Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:17:41.974840Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:41.975171Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:41.975259Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:17:42.036287Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:42.065740Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:17:42.065883Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:42.065973Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:17:42.066002Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:17:42.066028Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:17:42.066057Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:42.066169Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:42.066214Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:42.066475Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:17:42.066551Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:17:42.066651Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:42.066695Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:42.066735Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:17:42.066765Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:42.066788Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:42.066835Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:17:42.066864Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:42.066928Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:42.066951Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:42.066990Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:17:42.072582Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:17:42.072659Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:42.072756Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:17:42.072916Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:17:42.072957Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:17:42.073006Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:17:42.073055Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:42.073091Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:17:42.073122Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:17:42.073154Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:42.073449Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:42.073489Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:17:42.073523Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:17:42.073582Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:42.073634Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:17:42.073665Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:17:42.073705Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:17:42.073744Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:42.073768Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:42.085786Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:17:42.085864Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:42.085907Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:42.085948Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:42.086011Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:42.086514Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:42.086566Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:42.086642Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:17:42.086813Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:42.086845Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:42.086959Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:42.087001Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:42.087040Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:42.087077Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:42.090851Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:42.090918Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:42.091110Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:42.091147Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:42.091194Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:42.091233Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:42.091274Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:42.091319Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:17:42.091356Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:17:42.091394Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:42.091431Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:42.091473Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:42.091515Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:42.091657Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:17:42.091684Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:42.091699Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:42.091714Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:17:42.091728Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:17:42.091764Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:42.091786Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:17:42.091813Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:42.091849Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:42.091895Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:17:42.091924Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:17:42.091952Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:17:42.091982Z node 1 :TX_DATA ... lt to client [1:98:2133], exec latency: 0 ms, propose latency: 2 ms 2025-04-04T17:17:46.260791Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:46.260861Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:46.260892Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2025-04-04T17:17:46.260918Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 2 ms 2025-04-04T17:17:46.260935Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:46.261027Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:46.261054Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2025-04-04T17:17:46.261083Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 2 ms 2025-04-04T17:17:46.261125Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:46.261223Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:46.261251Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-04-04T17:17:46.261274Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 2 ms 2025-04-04T17:17:46.261301Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:46.261388Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:46.261409Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-04-04T17:17:46.261437Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 2 ms 2025-04-04T17:17:46.261452Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:46.261473Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:46.261497Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-04T17:17:46.261727Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 104 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 34} 2025-04-04T17:17:46.261760Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:46.261789Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 104 2025-04-04T17:17:46.261907Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 110 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 36} 2025-04-04T17:17:46.261943Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:46.261965Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 110 2025-04-04T17:17:46.262068Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 113 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 37} 2025-04-04T17:17:46.262097Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:46.262119Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 113 2025-04-04T17:17:46.262222Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 107 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 35} 2025-04-04T17:17:46.262290Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:46.262321Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 107 2025-04-04T17:17:46.262558Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-04T17:17:46.262593Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:46.262612Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-04T17:17:46.262669Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-04T17:17:46.262683Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:46.262709Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-04T17:17:46.262772Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-04T17:17:46.262964Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:46.262997Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-04T17:17:46.263071Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-04T17:17:46.263095Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:46.263110Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-04T17:17:46.263335Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-04T17:17:46.263361Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:46.263387Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-04T17:17:46.263448Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-04T17:17:46.263461Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:46.263473Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-04T17:17:46.263528Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-04T17:17:46.263544Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:46.263559Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-04T17:17:46.263711Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-04T17:17:46.263735Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:46.263750Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-04T17:17:46.263815Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-04T17:17:46.263832Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:46.263846Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-04T17:17:46.263895Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-04T17:17:46.263909Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:46.263922Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-04T17:17:46.264183Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-04T17:17:46.264206Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:46.264220Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-04T17:17:46.264269Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-04T17:17:46.264435Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:46.264469Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-04T17:17:46.279017Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:46.279094Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-04T17:17:46.279153Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 4 ms 2025-04-04T17:17:46.279223Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-04T17:17:46.279273Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:46.279538Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-04T17:17:46.279575Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:46.279610Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::DisableDeduplication [GOOD] Test command err: === Server->StartServer(false); 2025-04-04T17:13:06.444967Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507219061021962:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:06.445197Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:13:06.615562Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e3c/r3tmp/tmprXqZGf/pdisk_1.dat 2025-04-04T17:13:06.819757Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12058, node 1 2025-04-04T17:13:06.828175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:06.828285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:06.841328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:06.842468Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:13:06.966755Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001e3c/r3tmp/yandexxNytCy.tmp 2025-04-04T17:13:06.966777Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001e3c/r3tmp/yandexxNytCy.tmp 2025-04-04T17:13:06.966905Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001e3c/r3tmp/yandexxNytCy.tmp 2025-04-04T17:13:06.967003Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:13:07.027002Z INFO: TTestServer started on Port 13374 GrpcPort 12058 TClient is connected to server localhost:13374 PQClient connected to localhost:12058 === TenantModeEnabled() = 0 === Init PQ - start server on port 12058 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:07.398415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:07.398592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:07.398766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T17:13:07.399159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:13:07.399184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:07.399683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T17:13:07.399767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T17:13:07.399877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:07.399900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T17:13:07.399915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-04T17:13:07.399924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-04T17:13:07.400528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:07.400548Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:13:07.400560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-04T17:13:07.400843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:07.400868Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:07.400894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:13:07.400911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:13:07.404565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:13:07.404766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:07.404778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-04T17:13:07.404793Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:07.404973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-04T17:13:07.405066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:13:07.406968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743786787449, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:13:07.407060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743786787449 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:13:07.407086Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:13:07.407280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-04T17:13:07.407308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:13:07.407474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:13:07.407516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-04T17:13:07.407913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:13:07.407939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:13:07.408082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:13:07.408107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489507219061022491:2249], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-04T17:13:07.408147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:07.408166Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-04T17:13:07.408227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-04T17:13:07.408237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:13:07.408253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-04T17:13:07.408261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:13:07.408278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-04T17:13:07.408307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:13:07.408320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-04T17:13:07.408332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-04T17:13:07.408361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-04T17:13:07.408372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-04-04T17:13:07.408380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-04T17:13:07.411364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-04T17:13:07.411468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-04-04T17:13:07.411481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemes ... rpc_core::ChannelArgTypeTraits, void>::VTable()::'lambda'(void*)::__invoke(void*) /-S/contrib/libs/grpc/src/core/lib/channel/channel_args.h:141:9 #3 0x19396f79 in Pointer /-S/contrib/libs/grpc/src/core/lib/channel/channel_args.cc:50:10 #4 0x19396f79 in __alt /-S/contrib/libs/cxxsupp/libcxx/include/variant:666:9 #5 0x19396f79 in __construct_alt<2UL, grpc_core::ChannelArgs::Pointer, const grpc_core::ChannelArgs::Pointer &> /-S/contrib/libs/cxxsupp/libcxx/include/variant:807:40 #6 0x19396f79 in operator(), const std::__y1::__variant_detail::__alt<2UL, grpc_core::ChannelArgs::Pointer> &> /-S/contrib/libs/cxxsupp/libcxx/include/variant:819:13 #7 0x19396f79 in __invoke<(lambda at /-S/contrib/libs/cxxsupp/libcxx/include/variant:818:11), std::__y1::__variant_detail::__alt<2UL, grpc_core::ChannelArgs::Pointer> &, const std::__y1::__variant_detail::__alt<2UL, grpc_core::ChannelArgs::Pointer> &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #8 0x19396f79 in decltype(auto) std::__y1::__variant_detail::__visitation::__base::__dispatcher<2ul, 2ul>::__dispatch[abi:fe190000]>, grpc_core::ChannelArgs::Pointer>>::__generic_construct[abi:fe190000]>, grpc_core::ChannelArgs::Pointer>, (std::__y1::__variant_detail::_Trait)1> const&>(std::__y1::__variant_detail::__ctor>, grpc_core::ChannelArgs::Pointer>>&, std::__y1::__variant_detail::__copy_constructor>, grpc_core::ChannelArgs::Pointer>, (std::__y1::__variant_detail::_Trait)1> const&)::'lambda'(std::__y1::__variant_detail::__copy_constructor>, grpc_core::ChannelArgs::Pointer>, (std::__y1::__variant_detail::_Trait)1> const&, auto&&)&&, std::__y1::__variant_detail::__base<(std::__y1::__variant_detail::_Trait)1, int, TBasicString>, grpc_core::ChannelArgs::Pointer>&, std::__y1::__variant_detail::__base<(std::__y1::__variant_detail::_Trait)1, int, TBasicString>, grpc_core::ChannelArgs::Pointer> const&>(std::__y1::__variant_detail::__copy_constructor>, grpc_core::ChannelArgs::Pointer>, (std::__y1::__variant_detail::_Trait)1> const&, std::__y1::__variant_detail::__base<(std::__y1::__variant_detail::_Trait)1, int, TBasicString>, grpc_core::ChannelArgs::Pointer>&, std::__y1::__variant_detail::__base<(std::__y1::__variant_detail::_Trait)1, int, TBasicString>, grpc_core::ChannelArgs::Pointer> const&) /-S/contrib/libs/cxxsupp/libcxx/include/variant:537:14 #9 0x19392946 in __visit_alt_at<(lambda at /-S/contrib/libs/cxxsupp/libcxx/include/variant:818:11), std::__y1::__variant_detail::__ctor >, grpc_core::ChannelArgs::Pointer> > &, const std::__y1::__variant_detail::__copy_constructor >, grpc_core::ChannelArgs::Pointer>, (std::__y1::__variant_detail::_Trait)1> &> /-S/contrib/libs/cxxsupp/libcxx/include/variant:499:12 #10 0x19392946 in __generic_construct >, grpc_core::ChannelArgs::Pointer>, (std::__y1::__variant_detail::_Trait)1> &> /-S/contrib/libs/cxxsupp/libcxx/include/variant:816:7 #11 0x19392946 in __copy_constructor /-S/contrib/libs/cxxsupp/libcxx/include/variant:881:1 #12 0x19392946 in __assignment /-S/contrib/libs/cxxsupp/libcxx/include/variant:890:28 #13 0x19392946 in __move_assignment /-S/contrib/libs/cxxsupp/libcxx/include/variant:973:1 #14 0x19392946 in __copy_assignment /-S/contrib/libs/cxxsupp/libcxx/include/variant:999:1 #15 0x19392946 in __impl /-S/contrib/libs/cxxsupp/libcxx/include/variant:1015:25 #16 0x19392946 in variant /-S/contrib/libs/cxxsupp/libcxx/include/variant:1159:35 #17 0x19392946 in grpc_core::AVL>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::Rebalance(TBasicString>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>, std::__y1::shared_ptr>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::Node> const&, std::__y1::shared_ptr>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::Node> const&) /-S/contrib/libs/grpc/src/core/lib/avl/avl.h:252:30 #18 0x19391ad5 in grpc_core::AVL>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::AddKey(std::__y1::shared_ptr>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::Node> const&, TBasicString>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>) /-S/contrib/libs/grpc/src/core/lib/avl/avl.h:265:14 #19 0x19391aa4 in grpc_core::AVL>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::AddKey(std::__y1::shared_ptr>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::Node> const&, TBasicString>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>) /-S/contrib/libs/grpc/src/core/lib/avl/avl.h:266:24 #20 0x19389a76 in grpc_core::AVL>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::Add(TBasicString>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>) const /-S/contrib/libs/grpc/src/core/lib/avl/avl.h:36:16 #21 0x19389497 in grpc_core::ChannelArgs::Set(std::__y1::basic_string_view>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>) const /-S/contrib/libs/grpc/src/core/lib/channel/channel_args.cc:158:28 #22 0x19388b75 in grpc_core::ChannelArgs::Set(std::__y1::basic_string_view>, grpc_core::ChannelArgs::Pointer) const /-S/contrib/libs/grpc/src/core/lib/channel/channel_args.cc:150:10 #23 0x193eeac1 in grpc_core::Channel::Create(char const*, grpc_core::ChannelArgs, grpc_channel_stack_type, grpc_transport*) /-S/contrib/libs/grpc/src/core/lib/surface/channel.cc:218:19 #24 0x19770aa1 in CreateChannel /-S/contrib/libs/grpc/src/core/ext/transport/chttp2/client/chttp2_connector.cc:323:10 #25 0x19770aa1 in grpc_channel_create /-S/contrib/libs/grpc/src/core/ext/transport/chttp2/client/chttp2_connector.cc:365:14 #26 0x19fc0e03 in grpc::(anonymous namespace)::InsecureChannelCredentialsImpl::CreateChannelWithInterceptors(TBasicString> const&, grpc::ChannelArguments const&, std::__y1::vector>, std::__y1::allocator>>>) /-S/contrib/libs/grpc/src/cpp/client/insecure_credentials.cc:55:13 #27 0x19fc0bdb in grpc::(anonymous namespace)::InsecureChannelCredentialsImpl::CreateChannelImpl(TBasicString> const&, grpc::ChannelArguments const&) /-S/contrib/libs/grpc/src/cpp/client/insecure_credentials.cc:40:12 #28 0x19fb9551 in grpc::CreateCustomChannel(TBasicString> const&, std::__y1::shared_ptr const&, grpc::ChannelArguments const&) /-S/contrib/libs/grpc/src/cpp/client/create_channel.cc:50:25 #29 0x17f52277 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TDirectReadTestSetup::Connect(NKikimr::NPersQueueTests::TPersQueueV1TestServer&) /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:824:23 #30 0x17c70f92 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TDirectReadTestSetup::TDirectReadTestSetup(NKikimr::NPersQueueTests::TPersQueueV1TestServer&) /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:806:13 #31 0x17ca821b in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TTestCaseDirectReadRestartTablet::Execute_(NUnitTest::TTestContext&) /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:1397:30 #32 0x17f322a7 in operator() /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1 #33 0x17f322a7 in __invoke<(lambda at /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #34 0x17f322a7 in __call<(lambda at /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #35 0x17f322a7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #36 0x17f322a7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311:10 #37 0x189ab095 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428:12 #38 0x189ab095 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987:10 #39 0x189ab095 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #40 0x1897bdb8 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #41 0x17f31295 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TCurrentTest::Execute() /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1 #42 0x1897d685 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #43 0x189a59cc in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #44 0x7fb47b0c3d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 7085174 byte(s) leaked in 1615 allocation(s). >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply >> DataShardTxOrder::ReadWriteReorder [GOOD] >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder [GOOD] Test command err: 2025-04-04T17:17:38.591469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:38.591875Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:38.591920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f60/r3tmp/tmpfU3jou/pdisk_1.dat 2025-04-04T17:17:38.910052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:17:38.944700Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:38.982328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:38.982449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:38.993727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:39.073176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:39.367772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:17:39.610755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:827:2681], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:39.610860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:837:2686], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:39.610945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:39.615648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:17:39.769857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:841:2689], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:17:39.840932Z node 1 :TX_PROXY ERROR: Actor# [1:901:2730] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:40.112940Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0wxtcr3a7q87818cmts9hj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTM4MzdiMzQtNGIyOGI0MzgtZmZmYmQwMWYtMjVjZTY2MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:40.194005Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0wxtx7avf9keb547s2w4y2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGQxMzkyMGUtM2JjNmVjMjItYzFmNWZhMi1iYmI3YzlkOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:40.792545Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0wxv6p843agr21khb6rc22, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWMwZjU1NTEtZDVlN2RjMDMtYzU1NGZjZmEtOTgwNWRmYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-04-04T17:17:41.135329Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0wxvtta301j34bsy300hd9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDcxODBmYWMtYTdjNDJiOTMtZmJjZDA3NDItZDIzOTBiODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:41.209453Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0wxvww1xkg6shv3hr56d08, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWMwZjU1NTEtZDVlN2RjMDMtYzU1NGZjZmEtOTgwNWRmYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:41.284924Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr0wxvz6ar3zx9w36pnyrz8n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWMwZjU1NTEtZDVlN2RjMDMtYzU1NGZjZmEtOTgwNWRmYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:41.337034Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWMwZjU1NTEtZDVlN2RjMDMtYzU1NGZjZmEtOTgwNWRmYWM=, ActorId: [1:964:2780], ActorState: ExecuteState, TraceId: 01jr0wxw17cq6fdj43rztrs1w8, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-04-04T17:17:41.348355Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr0wxw17cq6fdj43rztrs1w8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWMwZjU1NTEtZDVlN2RjMDMtYzU1NGZjZmEtOTgwNWRmYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:44.216397Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:44.216476Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:17:44.216634Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f60/r3tmp/tmpsJCZWR/pdisk_1.dat 2025-04-04T17:17:44.427975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:17:44.452403Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:44.488080Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:44.488208Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:44.499249Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:44.578344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:44.846679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:17:45.097557Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:829:2681], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:45.097664Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:839:2686], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:45.097726Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:45.101833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:17:45.251895Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:843:2689], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:17:45.287618Z node 2 :TX_PROXY ERROR: Actor# [2:903:2730] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:45.347589Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0wxzr8ab2g458w3axw9ztq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjg1YTg5OWYtODU5NzNiYTgtMTcyOTA2OTktMzZlNDYzM2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:45.431287Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0wy00kcvzdx2c71w7d1wqe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzc0MGQ5MWEtNDM4ZGI3MWUtODc0ZDgxNi0xNzI3NjdmNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the first select 2025-04-04T17:17:45.904059Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0wy08v47jhy4g6chc30hjk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzQ3ZDYwOTItZjA3Njk3NmQtMzIwOTkwOWItMmM0NTFlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-04-04T17:17:46.259671Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0wy0j31v5xqzbe51na2zdk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODU0NmRkMzAtOTFiNDY5MjItNGI4YWM4ZTctNjY4NDBmZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } ... waiting for commit read sets 2025-04-04T17:17:46.364112Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0wy0x2a7bdh0evbbybb4t9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODU0NmRkMzAtOTFiNDY5MjItNGI4YWM4ZTctNjY4NDBmZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ... performing an upsert 2025-04-04T17:17:46.753882Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr0wy19vb0x59qe3ak7qnbk7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjYzZTZlYjItMThiMDFkYTgtZTY0M2MzMTctMjZmYWRmYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the second select 2025-04-04T17:17:46.829566Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr0wy1cf4vg9xkqd3fh2exmn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzQ3ZDYwOTItZjA3Njk3NmQtMzIwOTkwOWItMmM0NTFlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the third select 2025-04-04T17:17:46.900136Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr0wy1eg55k2a5fpnxa22h1t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzQ3ZDYwOTItZjA3Njk3NmQtMzIwOTkwOWItMmM0NTFlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the last upsert and commit 2025-04-04T17:17:46.985862Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzQ3ZDYwOTItZjA3Njk3NmQtMzIwOTkwOWItMmM0NTFlYjI=, ActorId: [2:966:2780], ActorState: ExecuteState, TraceId: 01jr0wy1gqbxwj74s6vxqkhzhd, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-04-04T17:17:46.997147Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr0wy1gqbxwj74s6vxqkhzhd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzQ3ZDYwOTItZjA3Njk3NmQtMzIwOTkwOWItMmM0NTFlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] Test command err: 2025-04-04T17:17:39.839468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:39.839960Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:39.840028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f57/r3tmp/tmpbUcNZ7/pdisk_1.dat 2025-04-04T17:17:40.174779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:17:40.201475Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:40.243794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:40.243951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:40.255418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:40.338054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:40.670873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:17:40.929988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:924:2735], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:40.930101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:935:2740], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:40.930179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:40.934904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:17:41.088456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:938:2743], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:17:41.165370Z node 1 :TX_PROXY ERROR: Actor# [1:999:2785] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:41.440180Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0wxvp05ga48b4yedt1shma, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGNhNTY5YWUtMmRlYzJjOWMtMTE2MjE3YTktNTkxODZlMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:41.538747Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0wxw74eqpds99fkbjxmr50, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTkzMGVjYzMtODE1NjI1MzgtMzFjYjA4NzktMTU5Y2FhYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:41.858201Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0wxwa01kad6c98c299pw7b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OThjYjQwZWItODcwYjYwMzUtZWRiYTg0MzctODg4MTNjNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ... waiting for commit read sets 2025-04-04T17:17:41.970046Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0wxwkj5yq462djg02ywnfc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OThjYjQwZWItODcwYjYwMzUtZWRiYTg0MzctODg4MTNjNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... sending immediate upsert ... waiting for immediate propose 2025-04-04T17:17:42.047549Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0wxwq3b312c3c1hap3a9cg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTgyYjYxYzgtN2NmMDNlNy1kYzFjMWVkNy02ZWFmNjY5NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... immediate upsert is blocked 2025-04-04T17:17:42.049266Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:1174:2841] TxId: 281474976715665. Ctx: { TraceId: 01jr0wxwq3b312c3c1hap3a9cg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTgyYjYxYzgtN2NmMDNlNy1kYzFjMWVkNy02ZWFmNjY5NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: [WRONG_SHARD_STATE] Rejecting immediate tx 281474976715665 because datashard 72075186224037889 is restarting; 2025-04-04T17:17:42.057455Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTgyYjYxYzgtN2NmMDNlNy1kYzFjMWVkNy02ZWFmNjY5NA==, ActorId: [1:1069:2841], ActorState: ExecuteState, TraceId: 01jr0wxwq3b312c3c1hap3a9cg, Create QueryResponse for error on request, msg: 2025-04-04T17:17:42.059183Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OThjYjQwZWItODcwYjYwMzUtZWRiYTg0MzctODg4MTNjNjk=, ActorId: [1:1071:2843], ActorState: ExecuteState, TraceId: 01jr0wxwkj5yq462djg02ywnfc, Create QueryResponse for error on request, msg: 2025-04-04T17:17:42.060348Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr0wxwq3b312c3c1hap3a9cg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTgyYjYxYzgtN2NmMDNlNy1kYzFjMWVkNy02ZWFmNjY5NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:42.072310Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr0wxwkj5yq462djg02ywnfc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OThjYjQwZWItODcwYjYwMzUtZWRiYTg0MzctODg4MTNjNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:42.380746Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr0wxwz00j9tpqbfkr8yxrm1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjRkYzhiZC05MzMyYTRiYi1hNzc4MTM3ZS1lZmMxYmIz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } } 2025-04-04T17:17:45.231623Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:45.231710Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:17:45.231879Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f57/r3tmp/tmp83fjj1/pdisk_1.dat 2025-04-04T17:17:45.478566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:17:45.504257Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:45.539707Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:45.539821Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:45.551080Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:45.631035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:45.884091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:17:46.142046Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:831:2683], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:46.142141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:842:2688], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:46.142210Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:46.146639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:17:46.299661Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:845:2691], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:17:46.336122Z node 2 :TX_PROXY ERROR: Actor# [2:905:2732] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:46.404525Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0wy0rwcgsj184sshs6g4fh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjRiZGRkYmYtZGYzNzAwZjAtZTUwZDQyZGEtN2U0NDI0ODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:46.484924Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0wy11n87cwep4k4ggddz31, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzMyNDUwMDAtMzRiMzljZWUtMTE3OGE4NzUtZmU3OWViYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for readsets 2025-04-04T17:17:47.019502Z node 2 :KQP_COMPUTE WARN: TxId: 281474976715664, task: 1, CA Id [2:1002:2811]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2025-04-04T17:17:47.019933Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTJjYTViYWQtODhmYWQ4ODAtNmFiZWI2YTgtMzViM2JlYzY=, ActorId: [2:959:2773], ActorState: ExecuteState, TraceId: 01jr0wy144ezvnmth5xdpsxrs8, Create QueryResponse for error on request, msg: { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ReadWriteReorder [GOOD] Test command err: 2025-04-04T17:17:46.908357Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:46.971842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:46.971910Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:46.977010Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:46.977388Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:17:46.977629Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:46.987826Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:47.019315Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:47.019427Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:47.020784Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:17:47.020839Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:17:47.020876Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:17:47.021210Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:47.021503Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:47.021601Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:17:47.092038Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:47.129479Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:17:47.129625Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:47.129717Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:17:47.129752Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:17:47.129777Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:17:47.129804Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:47.129923Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:47.129963Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:47.130210Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:17:47.130325Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:17:47.130430Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:47.130481Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:47.130519Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:17:47.130549Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:47.130593Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:47.130631Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:17:47.130667Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:47.130734Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:47.130761Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:47.130808Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:17:47.132686Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\n\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:17:47.132723Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:47.132782Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:17:47.132901Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:17:47.132934Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:17:47.132971Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:17:47.133006Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:47.133039Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:17:47.133085Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:17:47.133121Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:47.133365Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:47.133397Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:17:47.133435Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:17:47.133460Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:47.133498Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:17:47.133525Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:17:47.133558Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:17:47.133586Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:47.133622Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:47.147194Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:17:47.147278Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:47.147320Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:47.147361Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:47.147424Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:47.147960Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:47.148012Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:47.148054Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:17:47.148203Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:47.148233Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:47.148329Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:47.148369Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:47.148404Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:47.148438Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:47.152371Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:47.152436Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:47.152629Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:47.152671Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:47.152714Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:47.152752Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:47.152785Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:47.152826Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:17:47.152863Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:17:47.152904Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:47.152938Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:47.152978Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:47.153021Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:47.153194Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:17:47.153232Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:47.153273Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:47.153303Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:17:47.153329Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:17:47.153376Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:47.153399Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:17:47.153434Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:47.153470Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:47.153522Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:17:47.153558Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:17:47.153591Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:17:47.153634Z node 1 :TX_DATASH ... WaitInRS 2025-04-04T17:17:47.943666Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-04-04T17:17:47.943692Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit LoadAndWaitInRS 2025-04-04T17:17:47.943716Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit ExecuteDataTx 2025-04-04T17:17:47.943736Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit ExecuteDataTx 2025-04-04T17:17:47.944203Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:12] at tablet 9437185 with status COMPLETE 2025-04-04T17:17:47.944289Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:12] at 9437185: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 8, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:17:47.944357Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-04-04T17:17:47.944384Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit ExecuteDataTx 2025-04-04T17:17:47.944410Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit CompleteOperation 2025-04-04T17:17:47.944433Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit CompleteOperation 2025-04-04T17:17:47.944609Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is DelayComplete 2025-04-04T17:17:47.944671Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit CompleteOperation 2025-04-04T17:17:47.944712Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit CompletedOperations 2025-04-04T17:17:47.944743Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit CompletedOperations 2025-04-04T17:17:47.944786Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-04-04T17:17:47.944813Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit CompletedOperations 2025-04-04T17:17:47.944842Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:12] at 9437185 has finished 2025-04-04T17:17:47.944881Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:47.944911Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-04T17:17:47.944945Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-04T17:17:47.944981Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-04T17:17:47.945133Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:232:2225], Recipient [1:232:2225]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:47.945167Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:47.945210Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:47.945242Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:47.945284Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:47.945319Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000005:12] in PlanQueue unit at 9437184 2025-04-04T17:17:47.945350Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit PlanQueue 2025-04-04T17:17:47.945383Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-04T17:17:47.945413Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:47.945450Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:47.945473Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:47.946195Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000005:12 keys extracted: 3 2025-04-04T17:17:47.946235Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-04T17:17:47.946281Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:47.946303Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit FinalizeDataTxPlan 2025-04-04T17:17:47.946326Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit FinalizeDataTxPlan 2025-04-04T17:17:47.946368Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-04T17:17:47.946409Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit FinalizeDataTxPlan 2025-04-04T17:17:47.946431Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:47.946462Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:47.946522Z node 1 :TX_DATASHARD TRACE: Operation [1000005:12] is the new logically complete end at 9437184 2025-04-04T17:17:47.946611Z node 1 :TX_DATASHARD TRACE: Operation [1000005:12] is the new logically incomplete end at 9437184 2025-04-04T17:17:47.946642Z node 1 :TX_DATASHARD TRACE: Activated operation [1000005:12] at 9437184 2025-04-04T17:17:47.946676Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-04T17:17:47.946696Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:47.946718Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit BuildDataTxOutRS 2025-04-04T17:17:47.946741Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit BuildDataTxOutRS 2025-04-04T17:17:47.946802Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-04T17:17:47.946826Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildDataTxOutRS 2025-04-04T17:17:47.946848Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit StoreAndSendOutRS 2025-04-04T17:17:47.946885Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit StoreAndSendOutRS 2025-04-04T17:17:47.946912Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-04T17:17:47.946931Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit StoreAndSendOutRS 2025-04-04T17:17:47.946951Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit PrepareDataTxInRS 2025-04-04T17:17:47.946972Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit PrepareDataTxInRS 2025-04-04T17:17:47.946998Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-04T17:17:47.947029Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit PrepareDataTxInRS 2025-04-04T17:17:47.947066Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit LoadAndWaitInRS 2025-04-04T17:17:47.947091Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit LoadAndWaitInRS 2025-04-04T17:17:47.947113Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-04T17:17:47.947133Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadAndWaitInRS 2025-04-04T17:17:47.947154Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit ExecuteDataTx 2025-04-04T17:17:47.947172Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit ExecuteDataTx 2025-04-04T17:17:47.947598Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:12] at tablet 9437184 with status COMPLETE 2025-04-04T17:17:47.947651Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:12] at 9437184: {NSelectRow: 3, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 3, SelectRowBytes: 24, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:17:47.947696Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-04T17:17:47.947721Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit ExecuteDataTx 2025-04-04T17:17:47.947746Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit CompleteOperation 2025-04-04T17:17:47.947767Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit CompleteOperation 2025-04-04T17:17:47.947906Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is DelayComplete 2025-04-04T17:17:47.947930Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit CompleteOperation 2025-04-04T17:17:47.947969Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit CompletedOperations 2025-04-04T17:17:47.948007Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit CompletedOperations 2025-04-04T17:17:47.948041Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-04-04T17:17:47.948061Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit CompletedOperations 2025-04-04T17:17:47.948085Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:12] at 9437184 has finished 2025-04-04T17:17:47.948115Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:47.948157Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:47.948184Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:47.948207Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:47.960654Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 12} 2025-04-04T17:17:47.960721Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2025-04-04T17:17:47.960783Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:47.960828Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:12] at 9437184 on unit CompleteOperation 2025-04-04T17:17:47.960903Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 12] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:17:47.960958Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:47.961229Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000005 txid# 12} 2025-04-04T17:17:47.961264Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000005} 2025-04-04T17:17:47.961303Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-04T17:17:47.961342Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:12] at 9437185 on unit CompleteOperation 2025-04-04T17:17:47.961387Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 12] from 9437185 at tablet 9437185 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:17:47.961418Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] Test command err: 2025-04-04T17:17:42.946132Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:43.013619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:43.013664Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:43.019848Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:43.020334Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:17:43.020627Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:43.029980Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:43.065503Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:43.065601Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:43.067301Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:17:43.067375Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:17:43.067430Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:17:43.067842Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:43.068145Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:43.068218Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:17:43.124597Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:43.163636Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:17:43.163806Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:43.163933Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:17:43.163974Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:17:43.164008Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:17:43.164044Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:43.164192Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:43.164242Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:43.164527Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:17:43.164627Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:17:43.164751Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:43.164802Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:43.164840Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:17:43.164879Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:43.164911Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:43.164953Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:17:43.164995Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:43.165089Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:43.165122Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:43.165170Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:17:43.167695Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:17:43.167750Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:43.167827Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:17:43.167990Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:17:43.168038Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:17:43.168083Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:17:43.168122Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:43.168158Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:17:43.168193Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:17:43.168223Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:43.168509Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:43.168547Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:17:43.168594Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:17:43.168632Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:43.168680Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:17:43.168710Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:17:43.168741Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:17:43.168785Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:43.168810Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:43.180842Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:17:43.180921Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:43.180963Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:43.181005Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:43.181067Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:43.181574Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:43.181628Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:43.181690Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:17:43.181842Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:43.181876Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:43.181985Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:43.182048Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:43.182089Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:43.182126Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:43.186066Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:43.186139Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:43.186351Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:43.186395Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:43.186443Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:43.186483Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:43.186521Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:43.186565Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:17:43.186618Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:17:43.186660Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:43.186695Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:43.186737Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:43.186788Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:43.187029Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:17:43.187076Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:43.187101Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:43.187125Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:17:43.187150Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:17:43.187219Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:43.187244Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:17:43.187282Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:43.187328Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:43.187399Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:17:43.187436Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:17:43.187473Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:17:43.187518Z node 1 :TX_DATA ... BUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:17:47.551950Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:47.552266Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-04T17:17:47.552308Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.552354Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-04T17:17:47.552462Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-04T17:17:47.552487Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.552506Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-04T17:17:47.552601Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-04T17:17:47.552629Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.552651Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-04T17:17:47.552710Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-04T17:17:47.552739Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.552765Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-04T17:17:47.552839Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-04T17:17:47.552861Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.552881Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-04T17:17:47.552924Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-04T17:17:47.552945Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.552962Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-04T17:17:47.553050Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-04T17:17:47.553078Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.553110Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-04-04T17:17:47.553231Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-04T17:17:47.553257Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.553293Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-04T17:17:47.553366Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-04T17:17:47.553392Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.553412Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-04T17:17:47.553466Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-04T17:17:47.553488Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.553517Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-04T17:17:47.553618Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-04T17:17:47.553645Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.553665Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-04T17:17:47.553726Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-04T17:17:47.553748Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.553771Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-04T17:17:47.553833Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-04T17:17:47.553857Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.553877Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-04T17:17:47.553936Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:47.553962Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2025-04-04T17:17:47.554014Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 3 ms 2025-04-04T17:17:47.554059Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-04T17:17:47.554098Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:47.554216Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:47.554255Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2025-04-04T17:17:47.554286Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 3 ms 2025-04-04T17:17:47.554322Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-04T17:17:47.554364Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:47.554461Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:47.554498Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2025-04-04T17:17:47.554530Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 3 ms 2025-04-04T17:17:47.554595Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-04T17:17:47.554624Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:47.554712Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:47.554734Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2025-04-04T17:17:47.554763Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 3 ms 2025-04-04T17:17:47.554800Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-04T17:17:47.554839Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:47.555014Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-04T17:17:47.555044Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.555069Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-04-04T17:17:47.555169Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-04T17:17:47.555194Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.555215Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-04-04T17:17:47.555285Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-04T17:17:47.555309Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.555331Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-04T17:17:47.555403Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-04T17:17:47.555431Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.555451Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] Test command err: 2025-04-04T17:17:43.108070Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:43.174844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:43.174880Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:43.179850Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:43.180328Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:17:43.180566Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:43.189333Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:43.226056Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:43.226140Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:43.227618Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:17:43.227706Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:17:43.227748Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:17:43.228064Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:43.228330Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:43.228402Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:17:43.289570Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:43.321659Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:17:43.321822Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:43.321928Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:17:43.321970Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:17:43.322020Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:17:43.322052Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:43.322181Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:43.322222Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:43.322542Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:17:43.322642Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:17:43.322751Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:43.322792Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:43.322827Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:17:43.322856Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:43.322884Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:43.322926Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:17:43.322961Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:43.323043Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:43.323073Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:43.323118Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:17:43.329887Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:17:43.329962Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:43.330032Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:17:43.330181Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:17:43.330220Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:17:43.330279Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:17:43.330323Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:43.330356Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:17:43.330388Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:17:43.330418Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:43.330698Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:43.330734Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:17:43.330761Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:17:43.330788Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:43.330831Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:17:43.330869Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:17:43.330899Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:17:43.330938Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:43.330960Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:43.343078Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:17:43.343187Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:43.343225Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:43.343264Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:43.343317Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:43.343760Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:43.343802Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:43.343838Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:17:43.343975Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:43.344007Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:43.344120Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:43.344163Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:43.344198Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:43.344231Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:43.348048Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:43.348120Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:43.348313Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:43.348350Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:43.348388Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:43.348422Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:43.348453Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:43.348489Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:17:43.348523Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:17:43.348558Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:43.348590Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:43.348628Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:43.348665Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:43.348841Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:17:43.348873Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:43.348919Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:43.348941Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:17:43.348962Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:17:43.349018Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:43.349041Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:17:43.349072Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:43.349109Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:43.349166Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:17:43.349198Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:17:43.349229Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:17:43.349264Z node 1 :TX_DATA ... BUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:17:47.713194Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:47.713506Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-04-04T17:17:47.713547Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.713579Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-04-04T17:17:47.713689Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-04-04T17:17:47.713723Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.713747Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-04-04T17:17:47.713853Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-04T17:17:47.713878Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.713908Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-04T17:17:47.713966Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-04T17:17:47.713994Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.714021Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-04T17:17:47.714083Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-04-04T17:17:47.714099Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.714112Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-04-04T17:17:47.714141Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-04T17:17:47.714162Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.714182Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-04T17:17:47.714290Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-04T17:17:47.714320Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.714347Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-04-04T17:17:47.714404Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-04-04T17:17:47.714469Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.714494Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-04-04T17:17:47.714548Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-04T17:17:47.714563Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.714591Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-04T17:17:47.714630Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-04T17:17:47.714644Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.714661Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-04T17:17:47.714771Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-04T17:17:47.714802Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.714825Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-04T17:17:47.714879Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-04T17:17:47.714903Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.714920Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-04T17:17:47.714967Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-04T17:17:47.714981Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.714998Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-04T17:17:47.715043Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:47.715064Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2025-04-04T17:17:47.715106Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 3 ms 2025-04-04T17:17:47.715141Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-04T17:17:47.715170Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:47.715265Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:47.715283Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2025-04-04T17:17:47.715315Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 3 ms 2025-04-04T17:17:47.715343Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-04T17:17:47.715362Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:47.715435Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:47.715454Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2025-04-04T17:17:47.715474Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 3 ms 2025-04-04T17:17:47.715512Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-04T17:17:47.715529Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:47.715595Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:47.715610Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2025-04-04T17:17:47.715629Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 2 ms, propose latency: 3 ms 2025-04-04T17:17:47.715649Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-04T17:17:47.715663Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:47.715827Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-04-04T17:17:47.715851Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.715868Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-04-04T17:17:47.715944Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-04T17:17:47.715960Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.715973Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-04-04T17:17:47.716025Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-04T17:17:47.716044Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.716058Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-04T17:17:47.716103Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-04T17:17:47.716122Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.716135Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] Test command err: 2025-04-04T17:17:42.242165Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:42.332752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:42.332805Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:42.341009Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:42.341499Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:17:42.341823Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:42.353219Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:42.394523Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:42.394653Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:42.396298Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:17:42.396366Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:17:42.396413Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:17:42.396839Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:42.397139Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:42.397211Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:17:42.466167Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:42.496066Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:17:42.496224Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:42.496331Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:17:42.496377Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:17:42.496408Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:17:42.496444Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:42.496599Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:42.496643Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:42.496939Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:17:42.497046Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:17:42.497162Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:42.497215Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:42.497254Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:17:42.497289Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:42.497324Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:42.497375Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:17:42.497422Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:42.497698Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:42.497730Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:42.497776Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:17:42.500122Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:17:42.500177Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:42.500263Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:17:42.500416Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:17:42.500457Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:17:42.500508Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:17:42.500548Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:42.500581Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:17:42.500616Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:17:42.500646Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:42.500909Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:42.500957Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:17:42.500995Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:17:42.501042Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:42.501092Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:17:42.501120Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:17:42.501154Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:17:42.501203Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:42.501228Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:42.519061Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:17:42.519148Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:42.519193Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:42.519235Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:42.519296Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:42.519796Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:42.519845Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:42.519904Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:17:42.520049Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:42.520082Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:42.520197Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:42.520243Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:42.520287Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:42.520320Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:42.524512Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:42.524580Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:42.524779Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:42.524821Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:42.524869Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:42.524909Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:42.524940Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:42.524988Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:17:42.525022Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:17:42.525061Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:42.525096Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:42.525151Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:42.525202Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:42.525416Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:17:42.525455Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:42.525478Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:42.525503Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:17:42.525528Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:17:42.525588Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:42.525616Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:17:42.525648Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:42.525687Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:42.525749Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:17:42.525784Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:17:42.525822Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:17:42.525871Z node 1 :TX_DATA ... 1000005:149] at 9437186 on unit CompleteOperation 2025-04-04T17:17:47.829182Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:17:47.829198Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:47.829215Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:47.829409Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-04-04T17:17:47.829430Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.829451Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-04-04T17:17:47.829493Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:232:2225], Recipient [1:454:2396]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2025-04-04T17:17:47.829534Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-04T17:17:47.829564Z node 1 :TX_DATASHARD DEBUG: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2025-04-04T17:17:47.829603Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2025-04-04T17:17:47.829632Z node 1 :TX_DATASHARD TRACE: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2025-04-04T17:17:47.829670Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-04T17:17:47.829746Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-04-04T17:17:47.829762Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.829777Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-04-04T17:17:47.829825Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-04-04T17:17:47.829839Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.829855Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-04-04T17:17:47.829898Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-04-04T17:17:47.829925Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.829947Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-04-04T17:17:47.830006Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:454:2396], Recipient [1:454:2396]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:47.830024Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:47.830046Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2025-04-04T17:17:47.830069Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:17:47.830091Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2025-04-04T17:17:47.830109Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2025-04-04T17:17:47.830130Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2025-04-04T17:17:47.830150Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2025-04-04T17:17:47.830174Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2025-04-04T17:17:47.830201Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2025-04-04T17:17:47.830546Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2025-04-04T17:17:47.830607Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:17:47.830640Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2025-04-04T17:17:47.830675Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2025-04-04T17:17:47.830698Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2025-04-04T17:17:47.830717Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2025-04-04T17:17:47.830853Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is DelayComplete 2025-04-04T17:17:47.830873Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2025-04-04T17:17:47.830899Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2025-04-04T17:17:47.830923Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2025-04-04T17:17:47.830950Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2025-04-04T17:17:47.830970Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2025-04-04T17:17:47.830991Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:152] at 9437186 has finished 2025-04-04T17:17:47.831020Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:47.831044Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-04-04T17:17:47.831070Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-04-04T17:17:47.831114Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-04-04T17:17:47.831375Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-04-04T17:17:47.831411Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.831444Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-04-04T17:17:47.831511Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:232:2225], Recipient [1:343:2311]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-04-04T17:17:47.831535Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.831556Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-04-04T17:17:47.831658Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-04-04T17:17:47.831689Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.831710Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-04-04T17:17:47.831773Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-04-04T17:17:47.831795Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.831817Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-04-04T17:17:47.831918Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-04-04T17:17:47.831945Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.831974Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-04-04T17:17:47.832077Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-04-04T17:17:47.832107Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.832131Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-04-04T17:17:47.832203Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-04-04T17:17:47.832231Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.832252Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-04-04T17:17:47.846616Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:17:47.846669Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-04-04T17:17:47.846721Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:98:2133], exec latency: 1 ms, propose latency: 3 ms 2025-04-04T17:17:47.846803Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-04T17:17:47.846843Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:17:47.847074Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:454:2396], Recipient [1:232:2225]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-04-04T17:17:47.847110Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:47.847141Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] Test command err: 2025-04-04T17:17:42.807362Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:42.870535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:42.870589Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:42.876052Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:42.876498Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:17:42.876822Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:42.887443Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:42.920677Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:42.920764Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:42.922299Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:17:42.922421Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:17:42.922478Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:17:42.922845Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:42.923151Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:42.923226Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:17:42.984663Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:43.023245Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:17:43.023399Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:43.023492Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:17:43.023528Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:17:43.023560Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:17:43.023590Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:43.023719Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:43.023758Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:43.024030Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:17:43.024115Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:17:43.024221Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:43.024269Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:43.024309Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:17:43.024340Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:43.024368Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:43.024407Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:17:43.024440Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:43.024512Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:43.024545Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:43.024590Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:17:43.026997Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:17:43.027050Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:43.027118Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:17:43.027276Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:17:43.027321Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:17:43.027368Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:17:43.027405Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:43.027437Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:17:43.027470Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:17:43.027500Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:43.027768Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:43.027801Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:17:43.027847Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:17:43.027880Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:43.027938Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:17:43.027967Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:17:43.028001Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:17:43.028044Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:43.028086Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:43.039744Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:17:43.039802Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:43.039831Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:43.039857Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:43.039910Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:43.040359Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:43.040416Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:43.040454Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:17:43.040610Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:43.040638Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:43.040756Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:43.040791Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:43.040834Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:43.040868Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:43.048878Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:43.048952Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:43.049164Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:43.049201Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:43.049242Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:43.049280Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:43.049315Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:43.049354Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:17:43.049388Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:17:43.049430Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:43.049461Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:43.049503Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:43.049547Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:43.049718Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:17:43.049753Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:43.049789Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:43.049814Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:17:43.049835Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:17:43.049897Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:43.049921Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:17:43.049950Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:43.049988Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:43.050045Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:17:43.050077Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:17:43.050113Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:17:43.050149Z node 1 :TX_DATA ... aitInRS 2025-04-04T17:17:48.247300Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-04T17:17:48.247321Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2025-04-04T17:17:48.247342Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2025-04-04T17:17:48.247362Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2025-04-04T17:17:48.247733Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2025-04-04T17:17:48.247787Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:17:48.247833Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-04T17:17:48.247880Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-04-04T17:17:48.247907Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-04-04T17:17:48.247931Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-04-04T17:17:48.248107Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-04-04T17:17:48.248136Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-04-04T17:17:48.248169Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-04-04T17:17:48.248208Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-04-04T17:17:48.248238Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-04T17:17:48.248259Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-04-04T17:17:48.248287Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2025-04-04T17:17:48.248326Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:48.248378Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:48.248441Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:48.248475Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:48.248643Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:341:2309], Recipient [2:341:2309]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:48.248682Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:48.248718Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-04-04T17:17:48.248748Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:48.248774Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-04T17:17:48.248802Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437185 2025-04-04T17:17:48.248828Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PlanQueue 2025-04-04T17:17:48.248856Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-04T17:17:48.248878Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PlanQueue 2025-04-04T17:17:48.248921Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadTxDetails 2025-04-04T17:17:48.248950Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadTxDetails 2025-04-04T17:17:48.249428Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437185 loaded tx from db 1000016:45 keys extracted: 2 2025-04-04T17:17:48.249469Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-04T17:17:48.249494Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadTxDetails 2025-04-04T17:17:48.249518Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit FinalizeDataTxPlan 2025-04-04T17:17:48.249543Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit FinalizeDataTxPlan 2025-04-04T17:17:48.249574Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-04T17:17:48.249595Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit FinalizeDataTxPlan 2025-04-04T17:17:48.249616Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:48.249637Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildAndWaitDependencies 2025-04-04T17:17:48.249692Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437185 2025-04-04T17:17:48.249724Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437185 2025-04-04T17:17:48.249749Z node 2 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437185 2025-04-04T17:17:48.249773Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-04T17:17:48.249791Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:48.249807Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildDataTxOutRS 2025-04-04T17:17:48.249835Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildDataTxOutRS 2025-04-04T17:17:48.249867Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-04T17:17:48.249881Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildDataTxOutRS 2025-04-04T17:17:48.249896Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit StoreAndSendOutRS 2025-04-04T17:17:48.249910Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit StoreAndSendOutRS 2025-04-04T17:17:48.249926Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-04T17:17:48.249941Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit StoreAndSendOutRS 2025-04-04T17:17:48.249955Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit PrepareDataTxInRS 2025-04-04T17:17:48.249979Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PrepareDataTxInRS 2025-04-04T17:17:48.250003Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-04T17:17:48.250025Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PrepareDataTxInRS 2025-04-04T17:17:48.250052Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadAndWaitInRS 2025-04-04T17:17:48.250075Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadAndWaitInRS 2025-04-04T17:17:48.250097Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-04T17:17:48.250119Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2025-04-04T17:17:48.250139Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2025-04-04T17:17:48.250162Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2025-04-04T17:17:48.250457Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2025-04-04T17:17:48.250495Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:17:48.250522Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-04T17:17:48.250543Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2025-04-04T17:17:48.250588Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2025-04-04T17:17:48.250612Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2025-04-04T17:17:48.250769Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2025-04-04T17:17:48.250795Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2025-04-04T17:17:48.250820Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2025-04-04T17:17:48.250843Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2025-04-04T17:17:48.250861Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-04T17:17:48.250877Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2025-04-04T17:17:48.250909Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2025-04-04T17:17:48.250929Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:48.250948Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-04T17:17:48.250972Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-04T17:17:48.250995Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-04T17:17:48.263300Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-04-04T17:17:48.263352Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-04-04T17:17:48.263395Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:48.263426Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-04-04T17:17:48.263473Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:17:48.263506Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:48.263752Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-04-04T17:17:48.263778Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-04-04T17:17:48.263807Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-04T17:17:48.263859Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-04-04T17:17:48.263911Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:17:48.263942Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] Test command err: 2025-04-04T17:17:40.601586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:40.602090Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:40.602147Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f45/r3tmp/tmpfPRVaA/pdisk_1.dat 2025-04-04T17:17:40.962647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:17:41.003000Z node 1 :KQP_RESOURCE_MANAGER INFO: Updated table service config: ComputeActorsCount: 10000 ChannelBufferSize: 8388608 MkqlLightProgramMemoryLimit: 1048576 MkqlHeavyProgramMemoryLimit: 31457280 QueryMemoryLimit: 32212254720 PublishStatisticsIntervalSec: 2 MaxTotalChannelBuffersSize: 2147483648 MinChannelBufferSize: 2048 2025-04-04T17:17:41.003105Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-04T17:17:41.003159Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 7 2025-04-04T17:17:41.003287Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Updated table service config. 2025-04-04T17:17:41.003422Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:41.041164Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:17:41.042031Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:17:41.042351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:41.042463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:41.053600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:41.131589Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-04T17:17:41.131646Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T17:17:41.131805Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-04-04T17:17:41.226680Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value1" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value2" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T17:17:41.226764Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:17:41.227150Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:17:41.227252Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:17:41.227437Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:17:41.227572Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:17:41.227641Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T17:17:41.227826Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T17:17:41.228812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:41.229438Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T17:17:41.229516Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:592:2517] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-04T17:17:41.253923Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:41.254742Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:41.255072Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-04-04T17:17:41.255824Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:41.286923Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:41.287595Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:41.287690Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:41.288997Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:17:41.289055Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:17:41.289091Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:17:41.289403Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:41.289501Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:41.289571Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-04-04T17:17:41.300147Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:41.330334Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:17:41.330533Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:41.330660Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-04-04T17:17:41.330698Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:17:41.330728Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:17:41.330765Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:41.330929Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:41.330964Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:41.331180Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:17:41.331240Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:17:41.331296Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:41.331329Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:41.331369Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:17:41.331394Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:17:41.331432Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:17:41.331459Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:17:41.331512Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:17:41.331832Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:41.331860Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:41.331890Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-04-04T17:17:41.331944Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:673:2574] 2025-04-04T17:17:41.331994Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:41.332074Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:17:41.332213Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:17:41.332258Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:17:41.332311Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:17:41.332353Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:17:41.332379Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:17:41.332404Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:17:41.332441Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:17:41.332646Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:41.332674Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:17:41.332710Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:17:41.332736Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:41.332787Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:17:41.332808Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:17:41.332846Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:17:41.332869Z node 1 :TX_DATASH ... rocessing event TEvDataShard::TEvSchemaChangedResult 2025-04-04T17:17:48.340438Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037888 state Ready 2025-04-04T17:17:48.340493Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 --- resending captured proposals --- waiting for result 2025-04-04T17:17:48.341532Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553206, Sender [2:889:2719], Recipient [2:666:2570]: NKikimrTxDataShard.TEvKqpScan TxId: 281474976715662 ScanId: 2 LocalPathId: 2 TablePath: "/Root/table-1" SchemaVersion: 1 ColumnTags: 3 ColumnTypes: 2 Ranges { From: "\001\000\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } Snapshot { Step: 2000 TxId: 281474976715661 } Generation: 1 ItemsLimit: 0 Reverse: false DataFormat: FORMAT_CELLVEC StatsMode: DQ_STATS_MODE_NONE ColumnTypeInfos { } LockNodeId: 0 2025-04-04T17:17:48.341652Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715662. Table '/Root/table-1' schema version changed at 72075186224037888 2025-04-04T17:17:48.341772Z node 2 :KQP_COMPUTE WARN: SelfId: [2:889:2719]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/table-1' scheme changed., code: 2028 , tablet id: 72075186224037888, actor_id: [2:666:2570] 2025-04-04T17:17:48.341818Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:889:2719]. Enqueue for resolve 72075186224037888 2025-04-04T17:17:48.341868Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:345;event=scanner_finished;tablet_id=72075186224037888;stop_shard=1; 2025-04-04T17:17:48.341920Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:92;event=stop_scanner;actor_id=NO_VALUE_OPTIONAL;message=;final_flag=1; 2025-04-04T17:17:48.342004Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:889:2719]. Sending TEvResolveKeySet update for table '/Root/table-1', range: [(Uint32 : NULL) ; ()), attempt #1 2025-04-04T17:17:48.342189Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:889:2719]. Received TEvResolveKeySetResult update for table '/Root/table-1' 2025-04-04T17:17:48.342229Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:889:2719]. Resolve request failed for table '/Root/table-1', ErrorCount# 1 2025-04-04T17:17:48.342346Z node 2 :KQP_COMPUTE DEBUG: kqp_scan_compute_actor.cpp:163 :TEvTerminateFromFetcher: [2:889:2719]/[2:887:2717] 2025-04-04T17:17:48.342428Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:887:2717], TxId: 281474976715662, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NzllNzI1OWQtZDMyY2UwYmItYjUxZTEwNjAtMmI5MDA1Yjc=. TraceId : 01jr0wy1x676xz5cf9vj6qfed4. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. InternalError: SCHEME_ERROR KIKIMR_SCHEME_MISMATCH: {
: Error: Table '/Root/table-1' scheme changed., code: 2028 }. 2025-04-04T17:17:48.342600Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 1. pass away 2025-04-04T17:17:48.342691Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715662;task_id=1;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-04-04T17:17:48.344907Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715662, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-04T17:17:48.345069Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_fetcher_actor.cpp:99;event=TEvTerminateFromCompute;sender=[2:887:2717];info={
: Error: COMPUTE_STATE_FAILURE }; 2025-04-04T17:17:48.345149Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:281;event=abort_all_scanners;error_message=Send abort execution from compute actor, message: {
: Error: COMPUTE_STATE_FAILURE }; 2025-04-04T17:17:48.345337Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 1970-01-01T00:00:04.000000Z, after 1.550000s 2025-04-04T17:17:48.345544Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:882:2691] TxId: 281474976715662. Ctx: { TraceId: 01jr0wy1x676xz5cf9vj6qfed4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzllNzI1OWQtZDMyY2UwYmItYjUxZTEwNjAtMmI5MDA1Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:887:2717], task: 1, state: COMPUTE_STATE_FAILURE, stats: { CpuTimeUs: 152542 Tasks { TaskId: 1 CpuTimeUs: 150841 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 12 BuildCpuTimeUs: 150829 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-mup5cka3jq" NodeId: 2 CreateTimeMs: 1743787067797 } MaxMemoryUsage: 1048576 } 2025-04-04T17:17:48.345768Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715662. Ctx: { TraceId: 01jr0wy1x676xz5cf9vj6qfed4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzllNzI1OWQtZDMyY2UwYmItYjUxZTEwNjAtMmI5MDA1Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:887:2717] 2025-04-04T17:17:48.345864Z node 2 :KQP_EXECUTER INFO: ActorId: [2:882:2691] TxId: 281474976715662. Ctx: { TraceId: 01jr0wy1x676xz5cf9vj6qfed4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzllNzI1OWQtZDMyY2UwYmItYjUxZTEwNjAtMmI5MDA1Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2025-04-04T17:17:48.345934Z node 2 :KQP_EXECUTER INFO: ActorId: [2:882:2691] TxId: 281474976715662. Ctx: { TraceId: 01jr0wy1x676xz5cf9vj6qfed4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzllNzI1OWQtZDMyY2UwYmItYjUxZTEwNjAtMmI5MDA1Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. aborting compute actor execution, message: {
: Error: Terminate execution }, compute actor: [2:888:2718], task: 2 2025-04-04T17:17:48.346052Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:882:2691] TxId: 281474976715662. Ctx: { TraceId: 01jr0wy1x676xz5cf9vj6qfed4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzllNzI1OWQtZDMyY2UwYmItYjUxZTEwNjAtMmI5MDA1Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T17:17:48.346144Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:888:2718], TxId: 281474976715662, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzllNzI1OWQtZDMyY2UwYmItYjUxZTEwNjAtMmI5MDA1Yjc=. TraceId : 01jr0wy1x676xz5cf9vj6qfed4. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646735 2025-04-04T17:17:48.346219Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:888:2718], TxId: 281474976715662, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=NzllNzI1OWQtZDMyY2UwYmItYjUxZTEwNjAtMmI5MDA1Yjc=. TraceId : 01jr0wy1x676xz5cf9vj6qfed4. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Handle abort execution event from: [2:882:2691], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2025-04-04T17:17:48.346341Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 2. pass away 2025-04-04T17:17:48.346431Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715662;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-04-04T17:17:48.349547Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715662, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-04T17:17:48.349776Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzllNzI1OWQtZDMyY2UwYmItYjUxZTEwNjAtMmI5MDA1Yjc=, ActorId: [2:856:2691], ActorState: ExecuteState, TraceId: 01jr0wy1x676xz5cf9vj6qfed4, Create QueryResponse for error on request, msg: 2025-04-04T17:17:48.350591Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Handle TEvExecuteKqpTransaction 2025-04-04T17:17:48.350640Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] TxId# 281474976715664 ProcessProposeKqpTransaction 2025-04-04T17:17:48.350733Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2025-04-04T17:17:48.350808Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Handle TEvProposeTransaction 2025-04-04T17:17:48.350848Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] TxId# 0 ProcessProposeTransaction 2025-04-04T17:17:48.350933Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Cookie# 0 userReqId# "" txid# 0 reqId# [2:924:2750] SnapshotReq marker# P0 2025-04-04T17:17:48.351331Z node 2 :TX_PROXY DEBUG: Actor# [2:927:2750] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2025-04-04T17:17:48.351437Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715664. Resolved key sets: 0 2025-04-04T17:17:48.351586Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0wy1x676xz5cf9vj6qfed4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzllNzI1OWQtZDMyY2UwYmItYjUxZTEwNjAtMmI5MDA1Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:48.351634Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715664. Ctx: { TraceId: 01jr0wy1x676xz5cf9vj6qfed4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzllNzI1OWQtZDMyY2UwYmItYjUxZTEwNjAtMmI5MDA1Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-04T17:17:48.351694Z node 2 :KQP_EXECUTER INFO: ActorId: [2:923:2691] TxId: 281474976715664. Ctx: { TraceId: 01jr0wy1x676xz5cf9vj6qfed4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzllNzI1OWQtZDMyY2UwYmItYjUxZTEwNjAtMmI5MDA1Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-04T17:17:48.351789Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:923:2691] TxId: 281474976715664. Ctx: { TraceId: 01jr0wy1x676xz5cf9vj6qfed4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzllNzI1OWQtZDMyY2UwYmItYjUxZTEwNjAtMmI5MDA1Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T17:17:48.351839Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:923:2691] TxId: 281474976715664. Ctx: { TraceId: 01jr0wy1x676xz5cf9vj6qfed4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzllNzI1OWQtZDMyY2UwYmItYjUxZTEwNjAtMmI5MDA1Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-04T17:17:48.351899Z node 2 :TX_PROXY DEBUG: Actor# [2:927:2750] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2025-04-04T17:17:48.352114Z node 2 :TX_PROXY DEBUG: Actor# [2:924:2750] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2025-04-04T17:17:48.352245Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:593:2518], selfId: [2:57:2104], source: [2:856:2691] 2025-04-04T17:17:48.352468Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:924:2750], Recipient [2:666:2570]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2025-04-04T17:17:48.353145Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NzllNzI1OWQtZDMyY2UwYmItYjUxZTEwNjAtMmI5MDA1Yjc=, workerId: [2:856:2691], local sessions count: 0 Response { QueryIssues { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 } QueryIssues { message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } TxMeta { } } YdbStatus: ABORTED ConsumedRu: 352 |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |91.0%| [TA] $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] >> TGroupMapperTest::MonteCarlo [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink [GOOD] Test command err: 2025-04-04T17:17:43.359571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:43.360011Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:43.360063Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f30/r3tmp/tmpgCCZuD/pdisk_1.dat 2025-04-04T17:17:43.651794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:17:43.685863Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:43.723707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:43.723841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:43.735096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:43.816134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:43.851150Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:43.852119Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:43.852505Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:17:43.852754Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:43.862109Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:43.892994Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:43.893102Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:43.894690Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:17:43.894772Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:17:43.894824Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:17:43.895176Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:43.895295Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:43.895368Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:17:43.905947Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:43.936513Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:17:43.936665Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:43.936754Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:17:43.936789Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:17:43.936827Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:17:43.936856Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:43.937022Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:43.937067Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:43.937307Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:17:43.937422Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:17:43.937549Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:43.937589Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:43.937639Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:17:43.937673Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:17:43.937700Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:17:43.937726Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:17:43.937761Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:17:43.937836Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:43.937867Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:43.937901Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:17:43.938313Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:17:43.938355Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:43.938452Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:17:43.938639Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:17:43.938681Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:17:43.938740Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:17:43.938776Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:17:43.938815Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:17:43.938861Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:17:43.938895Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:17:43.939136Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:43.939169Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:17:43.939196Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:17:43.939226Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:43.939272Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:17:43.939307Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:17:43.939338Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:17:43.939362Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:17:43.939400Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:43.940638Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:17:43.940679Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:17:43.951984Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:17:43.952052Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:17:43.952083Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:43.952118Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:17:43.952210Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:44.091416Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:44.091470Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:44.091507Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:17:44.091824Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:17:44.091873Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:44.092001Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:17:44.092053Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:17:44.092091Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:17:44.092122Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:17:44.096199Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:17:44.096263Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:44.097021Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:44.097062Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:44.097111Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:4 ... se: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjdiY2U4OC1jNTc0ZmFmYy1kNzY5MmVlZC1lZTdhNGUxYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-04T17:17:49.459938Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1142:2718] TxId: 281474976715676. Ctx: { TraceId: 01jr0wy3d2a3qnb3fydtf50eze, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjdiY2U4OC1jNTc0ZmFmYy1kNzY5MmVlZC1lZTdhNGUxYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T17:17:49.459961Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1142:2718] TxId: 281474976715676. Ctx: { TraceId: 01jr0wy3d2a3qnb3fydtf50eze, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjdiY2U4OC1jNTc0ZmFmYy1kNzY5MmVlZC1lZTdhNGUxYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-04T17:17:49.459983Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715677. Resolved key sets: 0 2025-04-04T17:17:49.460388Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jr0wy3d29afby8hrf71p48f1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTQxMWQ0ZjYtZjczM2NmMTgtNmZlOTJmOWUtNjAzNmFhNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:49.460416Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715677. Ctx: { TraceId: 01jr0wy3d29afby8hrf71p48f1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTQxMWQ0ZjYtZjczM2NmMTgtNmZlOTJmOWUtNjAzNmFhNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-04T17:17:49.460438Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1145:2720] TxId: 281474976715677. Ctx: { TraceId: 01jr0wy3d29afby8hrf71p48f1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTQxMWQ0ZjYtZjczM2NmMTgtNmZlOTJmOWUtNjAzNmFhNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-04T17:17:49.460468Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1145:2720] TxId: 281474976715677. Ctx: { TraceId: 01jr0wy3d29afby8hrf71p48f1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTQxMWQ0ZjYtZjczM2NmMTgtNmZlOTJmOWUtNjAzNmFhNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T17:17:49.460497Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1145:2720] TxId: 281474976715677. Ctx: { TraceId: 01jr0wy3d29afby8hrf71p48f1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTQxMWQ0ZjYtZjczM2NmMTgtNmZlOTJmOWUtNjAzNmFhNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-04T17:17:49.460567Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDIyYWI5OWYtNzZkMmNmNGQtYjI5YTAxZGQtYTM4NGZkMmQ=, ActorId: [2:900:2735], ActorState: ExecuteState, TraceId: 01jr0wy3d6bd2j21k8a7b6hnm1, Create QueryResponse for error on request, msg: 2025-04-04T17:17:49.461131Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715678. Resolved key sets: 0 2025-04-04T17:17:49.461159Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715679. Resolved key sets: 0 2025-04-04T17:17:49.461500Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jr0wy3d4dskeyekepyk1ass0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjhmNjc0YjktOTkwZDBiMjQtZTljNDBkODEtMzYwZDRlZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:49.461524Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715678. Ctx: { TraceId: 01jr0wy3d4dskeyekepyk1ass0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjhmNjc0YjktOTkwZDBiMjQtZTljNDBkODEtMzYwZDRlZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-04T17:17:49.461545Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1153:2723] TxId: 281474976715678. Ctx: { TraceId: 01jr0wy3d4dskeyekepyk1ass0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjhmNjc0YjktOTkwZDBiMjQtZTljNDBkODEtMzYwZDRlZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-04T17:17:49.461576Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1153:2723] TxId: 281474976715678. Ctx: { TraceId: 01jr0wy3d4dskeyekepyk1ass0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjhmNjc0YjktOTkwZDBiMjQtZTljNDBkODEtMzYwZDRlZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T17:17:49.461598Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1153:2723] TxId: 281474976715678. Ctx: { TraceId: 01jr0wy3d4dskeyekepyk1ass0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjhmNjc0YjktOTkwZDBiMjQtZTljNDBkODEtMzYwZDRlZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-04T17:17:49.461629Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jr0wy3d5f755x34jpwyt4f29, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzZWU1M2UtNmZkOWZjZjctZjRjYzUyNDMtZjhmMzcxNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:49.461645Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715679. Ctx: { TraceId: 01jr0wy3d5f755x34jpwyt4f29, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzZWU1M2UtNmZkOWZjZjctZjRjYzUyNDMtZjhmMzcxNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-04T17:17:49.461668Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1156:2726] TxId: 281474976715679. Ctx: { TraceId: 01jr0wy3d5f755x34jpwyt4f29, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzZWU1M2UtNmZkOWZjZjctZjRjYzUyNDMtZjhmMzcxNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-04T17:17:49.461702Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1156:2726] TxId: 281474976715679. Ctx: { TraceId: 01jr0wy3d5f755x34jpwyt4f29, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzZWU1M2UtNmZkOWZjZjctZjRjYzUyNDMtZjhmMzcxNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T17:17:49.461724Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1156:2726] TxId: 281474976715679. Ctx: { TraceId: 01jr0wy3d5f755x34jpwyt4f29, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzkzZWU1M2UtNmZkOWZjZjctZjRjYzUyNDMtZjhmMzcxNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-04T17:17:49.461743Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715680. Resolved key sets: 0 2025-04-04T17:17:49.462090Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jr0wy3d6erkfpe6atv59040d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTNkMTRmZi1iYTNjZGQ1OC02MzQ4ZWVhZS00YjE2ZjczNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:49.462118Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715680. Ctx: { TraceId: 01jr0wy3d6erkfpe6atv59040d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTNkMTRmZi1iYTNjZGQ1OC02MzQ4ZWVhZS00YjE2ZjczNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-04T17:17:49.462138Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1158:2729] TxId: 281474976715680. Ctx: { TraceId: 01jr0wy3d6erkfpe6atv59040d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTNkMTRmZi1iYTNjZGQ1OC02MzQ4ZWVhZS00YjE2ZjczNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-04T17:17:49.462179Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2729] TxId: 281474976715680. Ctx: { TraceId: 01jr0wy3d6erkfpe6atv59040d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTNkMTRmZi1iYTNjZGQ1OC02MzQ4ZWVhZS00YjE2ZjczNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T17:17:49.462204Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1158:2729] TxId: 281474976715680. Ctx: { TraceId: 01jr0wy3d6erkfpe6atv59040d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTNkMTRmZi1iYTNjZGQ1OC02MzQ4ZWVhZS00YjE2ZjczNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-04-04T17:17:49.462411Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715681. Resolved key sets: 0 2025-04-04T17:17:49.462918Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jr0wy3d6bd2j21k8a7b6hnm1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDIyYWI5OWYtNzZkMmNmNGQtYjI5YTAxZGQtYTM4NGZkMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:49.462962Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715681. Ctx: { TraceId: 01jr0wy3d6bd2j21k8a7b6hnm1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDIyYWI5OWYtNzZkMmNmNGQtYjI5YTAxZGQtYTM4NGZkMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-04-04T17:17:49.463012Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1165:2735] TxId: 281474976715681. Ctx: { TraceId: 01jr0wy3d6bd2j21k8a7b6hnm1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDIyYWI5OWYtNzZkMmNmNGQtYjI5YTAxZGQtYTM4NGZkMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-04-04T17:17:49.463047Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1165:2735] TxId: 281474976715681. Ctx: { TraceId: 01jr0wy3d6bd2j21k8a7b6hnm1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDIyYWI5OWYtNzZkMmNmNGQtYjI5YTAxZGQtYTM4NGZkMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T17:17:49.463086Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1165:2735] TxId: 281474976715681. Ctx: { TraceId: 01jr0wy3d6bd2j21k8a7b6hnm1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDIyYWI5OWYtNzZkMmNmNGQtYjI5YTAxZGQtYTM4NGZkMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> KqpLimits::DatashardReplySize [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::PassRequestId [GOOD] >> Donor::ConsistentWritesWhenSwitchingToDonorMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] Test command err: 2025-04-04T17:17:47.462645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:47.463127Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:47.463221Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000eea/r3tmp/tmpDPK5I9/pdisk_1.dat 2025-04-04T17:17:47.871709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:17:47.916698Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:47.956263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:47.956419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:47.967842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:48.047206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:48.080677Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:48.081470Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:48.082034Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:17:48.082322Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:48.092171Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:48.119817Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:48.119912Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:48.121227Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:17:48.121282Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:17:48.121338Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:17:48.121688Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:48.121817Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:48.121951Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:17:48.132616Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:48.155119Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:17:48.155295Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:48.155450Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:17:48.155484Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:17:48.155533Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:17:48.155571Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:48.155756Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:48.155797Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:48.156015Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:17:48.156107Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:17:48.156274Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:48.156325Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:48.156369Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:17:48.156395Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:17:48.156437Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:17:48.156466Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:17:48.156500Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:17:48.156577Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:48.156628Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:48.156695Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:17:48.157025Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:17:48.157059Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:48.157132Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:17:48.157307Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:17:48.157381Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:17:48.157460Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:17:48.157501Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:17:48.157539Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:17:48.157573Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:17:48.157604Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:17:48.157826Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:48.157854Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:17:48.157877Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:17:48.157900Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:48.157953Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:17:48.157989Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:17:48.158028Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:17:48.158057Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:17:48.158076Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:48.159423Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:17:48.159507Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:17:48.170138Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:17:48.170204Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:17:48.170232Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:48.170304Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:17:48.170408Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:48.308625Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:48.308672Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:48.308738Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:17:48.308996Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:17:48.309021Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:48.309105Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:17:48.309148Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:17:48.309215Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:17:48.309267Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:17:48.313218Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:17:48.313285Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:48.313993Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:48.314030Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:48.314079Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:4 ... de 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:17:49.755884Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:664:2569]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 1 Status: STATUS_NOT_FOUND 2025-04-04T17:17:49.755999Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:753:2633]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 1 Status: STATUS_NOT_FOUND 2025-04-04T17:17:49.766918Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-04T17:17:49.767074Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:664:2569], Recipient [1:753:2633]: {TEvReadSet step# 3046 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-04T17:17:49.767118Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:49.767172Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715664 2025-04-04T17:17:49.767352Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-04T17:17:49.767461Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:753:2633], Recipient [1:664:2569]: {TEvReadSet step# 3046 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-04-04T17:17:49.767491Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:17:49.767518Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 ... performing the first select 2025-04-04T17:17:50.314012Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0wy4jm6g64j01x40abrg5v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjM5NWMzYzItN2Y2YmE0ODEtZmI5Yjc3NTUtZTI3ZWVjZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:50.317547Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1087:2876], Recipient [1:664:2569]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2025-04-04T17:17:50.317640Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T17:17:50.317708Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-04-04T17:17:50.317780Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-04T17:17:50.317816Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-04-04T17:17:50.317872Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:50.317909Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:17:50.317966Z node 1 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-04-04T17:17:50.318028Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-04T17:17:50.318058Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:50.318083Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T17:17:50.318098Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:17:50.318234Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-04T17:17:50.318515Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T17:17:50.318568Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-04-04T17:17:50.318620Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[1:1087:2876], 0} after executionsCount# 1 2025-04-04T17:17:50.318666Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1087:2876], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:50.318744Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1087:2876], 0} finished in read 2025-04-04T17:17:50.318842Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-04T17:17:50.318889Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:17:50.318917Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:17:50.318941Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:17:50.318980Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-04T17:17:50.319000Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:50.319037Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-04-04T17:17:50.319073Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T17:17:50.319154Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-04T17:17:50.319391Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1089:2877], Recipient [1:753:2633]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2025-04-04T17:17:50.319450Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-04T17:17:50.319491Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2025-04-04T17:17:50.319533Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-04-04T17:17:50.319556Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2025-04-04T17:17:50.319592Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:50.319610Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-04T17:17:50.319635Z node 1 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037889 2025-04-04T17:17:50.319661Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-04-04T17:17:50.319689Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:50.319709Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2025-04-04T17:17:50.319742Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2025-04-04T17:17:50.319832Z node 1 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-04-04T17:17:50.319981Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 3] 2025-04-04T17:17:50.320004Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-04-04T17:17:50.320034Z node 1 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[1:1089:2877], 0} after executionsCount# 1 2025-04-04T17:17:50.320068Z node 1 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[1:1089:2877], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:17:50.320103Z node 1 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[1:1089:2877], 0} finished in read 2025-04-04T17:17:50.320130Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-04-04T17:17:50.320144Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2025-04-04T17:17:50.320156Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2025-04-04T17:17:50.320169Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2025-04-04T17:17:50.320192Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-04-04T17:17:50.320210Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2025-04-04T17:17:50.320237Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037889 has finished 2025-04-04T17:17:50.320258Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-04T17:17:50.320311Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-04T17:17:50.320533Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:664:2569]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 1 Status: STATUS_SUBSCRIBED 2025-04-04T17:17:50.320594Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:753:2633]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 1 Status: STATUS_SUBSCRIBED 2025-04-04T17:17:50.321576Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1087:2876], Recipient [1:664:2569]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-04T17:17:50.321648Z node 1 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-04T17:17:50.323212Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1089:2877], Recipient [1:753:2633]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-04T17:17:50.323257Z node 1 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> TNebiusAccessServiceTest::Authorize [GOOD] >> Donor::MultipleEvicts |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::SlayAfterWiping |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MonteCarlo [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] Test command err: 2025-04-04T17:17:44.477583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:44.478010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:44.478059Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f2b/r3tmp/tmpN45L2T/pdisk_1.dat 2025-04-04T17:17:44.823307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:17:44.860069Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:44.897304Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:17:44.897999Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:17:44.898282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:44.898390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:44.909662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:44.986909Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-04T17:17:44.986952Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T17:17:44.987068Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-04-04T17:17:45.076050Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T17:17:45.076113Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:17:45.076544Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:17:45.076598Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:17:45.076816Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:17:45.076941Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:17:45.077005Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T17:17:45.077161Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T17:17:45.078178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:45.078938Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T17:17:45.078985Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2517] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-04T17:17:45.106188Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:670:2572]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:45.107324Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:670:2572]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:45.107758Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:670:2572] 2025-04-04T17:17:45.108040Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:45.145709Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:670:2572]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:45.146037Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:673:2574]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:45.146963Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:45.147114Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:45.148275Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:17:45.148326Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:17:45.148356Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:17:45.148580Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:45.148641Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:673:2574]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:45.148906Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:673:2574] 2025-04-04T17:17:45.149036Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:45.155844Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:45.155920Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:700:2572] in generation 1 2025-04-04T17:17:45.156091Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:673:2574]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:45.156768Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:45.156847Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:45.158050Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-04T17:17:45.158109Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-04T17:17:45.158192Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-04T17:17:45.158474Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:45.158596Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:45.158647Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:705:2574] in generation 1 2025-04-04T17:17:45.169325Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:45.192773Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:17:45.192899Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:45.192970Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:708:2593] 2025-04-04T17:17:45.193010Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:17:45.193035Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:17:45.193063Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:45.193234Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:670:2572], Recipient [1:670:2572]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:45.193268Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:45.193311Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:45.193344Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-04T17:17:45.193379Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:45.193407Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:709:2594] 2025-04-04T17:17:45.193419Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-04T17:17:45.193430Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-04T17:17:45.193441Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:17:45.193631Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:673:2574], Recipient [1:673:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:45.193657Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:45.193749Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:17:45.193849Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:17:45.194169Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:45.194205Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:45.194294Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:17:45.194322Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:17:45.194343Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:17:45.194374Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:17:45.194422Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:17:45.194460Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-04T17:17:45.194559Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-04T17:17:45.194628Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:691:2585], Recipient [1:670:2572]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:45.194648Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:45.194674Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:664:2568], serverId# [1:691:2585], sessionId# [0:0:0] 2025-04-04T17:17:45.194714Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 202 ... 25-04-04T17:17:50.332024Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:17:50.332068Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T17:17:50.332152Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715660] at 72075186224037888 2025-04-04T17:17:50.332190Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is Executed 2025-04-04T17:17:50.332216Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:50.332241Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-04T17:17:50.332270Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-04T17:17:50.332316Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T17:17:50.332375Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715660] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191926 2025-04-04T17:17:50.332592Z node 2 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-04T17:17:50.332664Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:17:50.332702Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-04T17:17:50.332749Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:17:50.332782Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:50.332872Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:50.332906Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:17:50.332947Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:17:50.332982Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:17:50.333025Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is Executed 2025-04-04T17:17:50.333049Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:50.333079Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715660] at 72075186224037888 has finished 2025-04-04T17:17:50.343883Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:17:50.343955Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715660] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:50.344020Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715660 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-04T17:17:50.344140Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:50.346050Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Handle TEvProposeTransaction 2025-04-04T17:17:50.346113Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] TxId# 281474976715661 ProcessProposeTransaction 2025-04-04T17:17:50.346193Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [2:856:2691] DataReq marker# P0 2025-04-04T17:17:50.346382Z node 2 :TX_PROXY DEBUG: Actor# [2:856:2691] Cookie# 0 txid# 281474976715661 HANDLE TDataReq marker# P1 2025-04-04T17:17:50.346675Z node 2 :TX_PROXY DEBUG: Actor# [2:856:2691] txid# 281474976715661 HANDLE EvNavigateKeySetResult TDataReq marker# P3b ErrorCount# 0 2025-04-04T17:17:50.346906Z node 2 :TX_PROXY DEBUG: Actor# [2:856:2691] txid# 281474976715661 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-04-04T17:17:50.347008Z node 2 :TX_PROXY DEBUG: Actor# [2:856:2691] txid# 281474976715661 SEND TEvProposeTransaction to datashard 72075186224037888 with read table request affected shards 1 followers disallowed marker# P4b 2025-04-04T17:17:50.347333Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:856:2691], Recipient [2:665:2569]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 856 RawX2: 8589937283 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\tX\003\000\000\000\000\000\000\021\203\n\000\000\002\000\000\000" TxId: 281474976715661 ExecLevel: 0 Flags: 8 2025-04-04T17:17:50.347402Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:50.347497Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:17:50.347678Z node 2 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:2:0] 2025-04-04T17:17:50.347763Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit CheckDataTx 2025-04-04T17:17:50.347823Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-04T17:17:50.347867Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CheckDataTx 2025-04-04T17:17:50.347911Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:50.347948Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:17:50.347994Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-04-04T17:17:50.348053Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715661] at 72075186224037888 2025-04-04T17:17:50.348092Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-04T17:17:50.348121Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:50.348145Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit MakeScanSnapshot 2025-04-04T17:17:50.348176Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit MakeScanSnapshot 2025-04-04T17:17:50.348206Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-04T17:17:50.348231Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit MakeScanSnapshot 2025-04-04T17:17:50.348254Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit WaitForStreamClearance 2025-04-04T17:17:50.348278Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2025-04-04T17:17:50.348331Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:856:2691] for [0:281474976715661] at 72075186224037888 2025-04-04T17:17:50.348367Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2025-04-04T17:17:50.348422Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:17:50.348499Z node 2 :TX_PROXY DEBUG: Got clearance request, shard: 72075186224037888, txid: 281474976715661 2025-04-04T17:17:50.348571Z node 2 :TX_PROXY DEBUG: Collected all clerance requests, txid: 281474976715661 2025-04-04T17:17:50.348620Z node 2 :TX_PROXY DEBUG: Send stream clearance, shard: 72075186224037888, txid: 281474976715661, cleared: 1 2025-04-04T17:17:50.348750Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:856:2691], Recipient [2:665:2569]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715661 2025-04-04T17:17:50.348796Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-04-04T17:17:50.348917Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:856:2691], Recipient [2:665:2569]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2025-04-04T17:17:50.348953Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-04T17:17:50.349036Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:665:2569], Recipient [2:665:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:50.349068Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:50.349127Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:50.349173Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:17:50.349246Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-04-04T17:17:50.349287Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2025-04-04T17:17:50.349335Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715661] at 72075186224037888 2025-04-04T17:17:50.349375Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-04T17:17:50.349419Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit WaitForStreamClearance 2025-04-04T17:17:50.349455Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit ReadTableScan 2025-04-04T17:17:50.349493Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit ReadTableScan 2025-04-04T17:17:50.349708Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2025-04-04T17:17:50.349766Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:17:50.349807Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-04T17:17:50.349845Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:17:50.349886Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:17:50.349970Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:17:50.350504Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:862:2696], Recipient [2:665:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-04T17:17:50.350551Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authorize [GOOD] Test command err: 2025-04-04T17:17:51.408541Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Connect to grpc://localhost:4419 2025-04-04T17:17:51.418499Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2025-04-04T17:17:51.427968Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user_id" } } } } } 2025-04-04T17:17:51.428505Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (79225CA9)" } } } 2025-04-04T17:17:51.430533Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Status 7 Permission Denied 2025-04-04T17:17:51.430888Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Request AuthorizeRequest { checks { key: 0 value { permission { name: "denied" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2025-04-04T17:17:51.432208Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Status 7 Permission Denied 2025-04-04T17:17:51.432508Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "p" } } iam_token: "**** (717F937C)" } } } 2025-04-04T17:17:51.433625Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Status 7 Permission Denied ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:15:10.810818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:15:10.810939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:15:10.810994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:15:10.811031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:15:10.812185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:15:10.812257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:15:10.812344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:15:10.812448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:15:10.813609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:15:10.936416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:15:10.936485Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:10.955311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:15:10.955539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:15:10.955939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:15:10.979271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:15:10.979704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:15:10.982823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:10.990401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:15:11.002898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:11.019539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:15:11.019615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:11.019806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:15:11.019852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:15:11.019893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:15:11.020005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:15:11.026313Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:15:11.166714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:15:11.170614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:11.178352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:15:11.186366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:15:11.186470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:11.194951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:11.195109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:15:11.195272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:11.195332Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:15:11.195399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:15:11.195434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:15:11.202715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:11.202778Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:15:11.202817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:15:11.205402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:11.205465Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:11.205509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:11.205560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:15:11.208992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:15:11.211070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:15:11.211229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:15:11.212195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:15:11.212318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:15:11.212358Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:11.212621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:15:11.212688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:15:11.212832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:15:11.212898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:15:11.214770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:15:11.214808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:15:11.214937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:15:11.214975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:15:11.215251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:15:11.215320Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:15:11.215407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:15:11.215437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:11.215470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:15:11.215502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:11.215535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:15:11.215568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:15:11.215598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:15:11.215647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:15:11.215707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:15:11.215741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:15:11.215769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:15:11.217454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:15:11.217550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:15:11.217584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:17:49.989396Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:17:49.989494Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:17:49.989528Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:17:50.274479Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:315:2301]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-04T17:17:50.274752Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-04-04T17:17:50.275122Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:315:2301], Recipient [3:123:2149]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 12 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 87 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 87 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 339 Memory: 124260 Storage: 14054 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 43 TableOwnerId: 72057594046678944 FollowerId: 0 2025-04-04T17:17:50.275173Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-04T17:17:50.275225Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0339 2025-04-04T17:17:50.275350Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 87 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 87 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-04T17:17:50.275414Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-04T17:17:50.285870Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:17:50.285934Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:17:50.286026Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:17:50.286055Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:17:50.318181Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435090, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-04T17:17:50.318281Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-04T17:17:50.318315Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-04-04T17:17:50.318378Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-04T17:17:50.318411Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-04T17:17:50.318518Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-04-04T17:17:50.318597Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-04-04T17:17:50.318644Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2025-04-04T17:17:50.318771Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:17:50.329241Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435090, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-04T17:17:50.329306Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-04T17:17:50.329337Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-04T17:17:50.360466Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:715:2681]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-04T17:17:50.360707Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 3 2025-04-04T17:17:50.361085Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:715:2681], Recipient [3:123:2149]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 3 Generation: 2 Round: 12 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 87 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 87 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 61 Memory: 124260 } ShardState: 2 UserTablePartOwners: 72075186233409547 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 213 TableOwnerId: 72057594046678944 FollowerId: 0 2025-04-04T17:17:50.361134Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-04T17:17:50.361182Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0061 2025-04-04T17:17:50.361315Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 87 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 87 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-04T17:17:50.361356Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-04T17:17:50.392499Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue wakeup 2025-04-04T17:17:50.392587Z node 3 :FLAT_TX_SCHEMESHARD INFO: Borrowed compaction timeout for pathId# [OwnerId: 72057594046678944, LocalPathId: 3], datashard# 72075186233409547, next wakeup# 0.000000s, in queue# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-04-04T17:17:50.392651Z node 3 :FLAT_TX_SCHEMESHARD INFO: RunBorrowedCompaction for pathId# [OwnerId: 72057594046678944, LocalPathId: 3], datashard# 72075186233409547, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046678944 2025-04-04T17:17:50.392727Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 3 seconds 2025-04-04T17:17:50.392760Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2025-04-04T17:17:50.392908Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435090, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-04T17:17:50.392955Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-04T17:17:50.392984Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-04-04T17:17:50.393039Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-04T17:17:50.393073Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-04T17:17:50.393171Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046678944:2 data size 13940 row count 100 2025-04-04T17:17:50.393232Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=CopyTable, is column=0, is olap=0 2025-04-04T17:17:50.393276Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 3: RowCount 100, DataSize 13940, with borrowed parts 2025-04-04T17:17:50.393386Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2025-04-04T17:17:50.393455Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:17:50.403875Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435090, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-04T17:17:50.403954Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-04T17:17:50.403986Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-04-04T17:17:50.581079Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:17:50.581148Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:17:50.581231Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:123:2149], Recipient [3:123:2149]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:17:50.581255Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::PassRequestId [GOOD] Test command err: 2025-04-04T17:17:51.351350Z node 2 :GRPC_CLIENT DEBUG: [517000004708]{reqId} Connect to grpc://localhost:31329 2025-04-04T17:17:51.358106Z node 2 :GRPC_CLIENT DEBUG: [517000004708]{reqId} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2025-04-04T17:17:51.369545Z node 2 :GRPC_CLIENT DEBUG: [517000004708]{reqId} Response AuthenticateResponse { account { user_account { id: "1234" } } } |91.0%| [TA] $(B)/ydb/core/mind/bscontroller/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [TA] $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.1%| [TA] {RESULT} $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::DatashardReplySize [GOOD] Test command err: Trying to start YDB, gRPC: 10277, MsgBus: 19035 2025-04-04T17:17:19.832922Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508303926203829:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:19.837037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015de/r3tmp/tmpmDq2AZ/pdisk_1.dat 2025-04-04T17:17:20.176049Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10277, node 1 2025-04-04T17:17:20.264324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:20.264632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:20.271294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:20.281196Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:20.281215Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:20.281220Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:20.281340Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19035 TClient is connected to server localhost:19035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:20.709169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:20.721572Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:20.732142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:20.852522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:21.027322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:21.099265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:22.750876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508316811107490:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:22.750994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:22.974656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:23.007481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:23.071044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:23.099585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:23.132920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:23.169686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:23.221397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508321106075298:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:23.221478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:23.221660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508321106075303:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:23.225041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:23.234789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508321106075305:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:23.305012Z node 1 :TX_PROXY ERROR: Actor# [1:7489508321106075359:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:24.141057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:24.834350Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508303926203829:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:24.834439Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:17:27.088827Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489508338285945753:2601] TxId: 281474976710672. Ctx: { TraceId: 01jr0wxe1w4w8gnzb8tp8ca8eq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjI5ZDY4NzQtODBjNGIyZmUtYWZjYzdmNGItOWZkYWIzOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Memory limit exception at WaitResolveState, current limit is 1024 bytes. } 2025-04-04T17:17:27.089084Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjI5ZDY4NzQtODBjNGIyZmUtYWZjYzdmNGItOWZkYWIzOGQ=, ActorId: [1:7489508333990978442:2601], ActorState: ExecuteState, TraceId: 01jr0wxe1w4w8gnzb8tp8ca8eq, Create QueryResponse for error on request, msg:
: Error: Memory limit exception at WaitResolveState, current limit is 1024 bytes. Trying to start YDB, gRPC: 21218, MsgBus: 6929 2025-04-04T17:17:27.899207Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508339725733589:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:27.899243Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015de/r3tmp/tmpTCjL9O/pdisk_1.dat 2025-04-04T17:17:28.037465Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:28.039673Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:28.040597Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:28.056450Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21218, node 2 2025-04-04T17:17:28.143515Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:28.143535Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:28.143543Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:28.143651Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6929 TClient is connected to server localhost:6929 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:28.559800Z ... :0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:34.329305Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:17:34.349343Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:37.454528Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508382632013021:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:37.454626Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:37.454668Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508382632013033:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:37.457700Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:17:37.471315Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508382632013035:2360], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:17:37.538172Z node 3 :TX_PROXY ERROR: Actor# [3:7489508382632013086:2603] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:38.368342Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7489508382632013118:2354] TxId: 281474976715661. Ctx: { TraceId: 01jr0wxr9aang6k2fqv3z4txgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NmZmMWI2MjAtYTk2ZmVjN2UtNGI3ZDQzYmYtZjhkMTJiMjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Abort execution. Task #1 size is too big: 100442499 > 50331648 2025-04-04T17:17:38.368629Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NmZmMWI2MjAtYTk2ZmVjN2UtNGI3ZDQzYmYtZjhkMTJiMjY=, ActorId: [3:7489508378337045713:2354], ActorState: ExecuteState, TraceId: 01jr0wxr9aang6k2fqv3z4txgf, Create QueryResponse for error on request, msg:
: Error: Datashard program size limit exceeded (100442499 > 50331648), code: 200509 2025-04-04T17:17:38.716287Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508365452142779:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:38.716379Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7489, MsgBus: 15085 2025-04-04T17:17:39.199291Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489508390279282493:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:39.199347Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015de/r3tmp/tmp9b0tKD/pdisk_1.dat 2025-04-04T17:17:39.277761Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7489, node 4 2025-04-04T17:17:39.330108Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:39.330196Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:39.331747Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:39.343976Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:39.344015Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:39.344023Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:39.344152Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15085 TClient is connected to server localhost:15085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:39.827325Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:39.844464Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:39.921412Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:40.095227Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:40.168301Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:42.291540Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508403164186157:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:42.291628Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:42.333050Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:42.376651Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:42.442884Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:42.474126Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:42.506629Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:42.541583Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:42.618470Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508403164186676:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:42.618569Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:42.618666Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508403164186681:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:42.640161Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:42.650439Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489508403164186683:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:42.743211Z node 4 :TX_PROXY ERROR: Actor# [4:7489508403164186739:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:43.538767Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:44.199747Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489508390279282493:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:44.199814Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:17:50.393071Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YjE5Mzc4MjItZDQ0NGE5NGMtYWUzNjI5YWUtOTk0ZWJiZTk=, ActorId: [4:7489508433228959668:2727], ActorState: ExecuteState, TraceId: 01jr0wy3k27bw7ys3nen8458f4, Create QueryResponse for error on request, msg:
: Error: Query result size limit exceeded. (200003964 > 50331648), code: 2013 |91.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} |91.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> DataShardReadIteratorBatchMode::MultipleRanges [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite [GOOD] >> Donor::MultipleEvicts [GOOD] >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::MultipleEvicts [GOOD] Test command err: RandomSeed# 10604918271848800283 0 donors: 2025-04-04T17:17:52.826313Z 4 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-04T17:17:52.827553Z 4 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17832101595959958591] 2025-04-04T17:17:52.846102Z 4 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 2:1000 2025-04-04T17:17:52.915034Z 2 00h00m20.012048s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-04T17:17:52.916872Z 2 00h00m20.012048s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17832101595959958591] 2025-04-04T17:17:52.931983Z 2 00h00m20.012048s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 4:1000 2025-04-04T17:17:52.988269Z 4 00h00m20.013072s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-04T17:17:52.990099Z 4 00h00m20.013072s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17832101595959958591] 2025-04-04T17:17:53.001353Z 4 00h00m20.013072s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 2:1000 2025-04-04T17:17:53.056699Z 2 00h00m20.014096s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-04T17:17:53.058390Z 2 00h00m20.014096s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17832101595959958591] 2025-04-04T17:17:53.068854Z 2 00h00m20.014096s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 4:1000 2025-04-04T17:17:53.119617Z 4 00h00m20.015120s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-04T17:17:53.120812Z 4 00h00m20.015120s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17832101595959958591] 2025-04-04T17:17:53.129196Z 4 00h00m20.015120s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 2:1000 2025-04-04T17:17:53.177249Z 2 00h00m20.016144s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-04T17:17:53.179313Z 2 00h00m20.016144s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17832101595959958591] 2025-04-04T17:17:53.191005Z 2 00h00m20.016144s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 4:1000 2025-04-04T17:17:53.237048Z 4 00h00m20.017168s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-04T17:17:53.238307Z 4 00h00m20.017168s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17832101595959958591] 2025-04-04T17:17:53.246765Z 4 00h00m20.017168s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 2:1000 2025-04-04T17:17:53.291851Z 2 00h00m20.018192s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-04T17:17:53.293301Z 2 00h00m20.018192s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17832101595959958591] 2025-04-04T17:17:53.302086Z 2 00h00m20.018192s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 4:1000 2025-04-04T17:17:53.350068Z 4 00h00m20.019216s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-04T17:17:53.351637Z 4 00h00m20.019216s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17832101595959958591] 2025-04-04T17:17:53.361519Z 4 00h00m20.019216s :BS_SYNCER ERROR: VDISK[82000000:_:0:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 2:1000 >> Donor::SlayAfterWiping [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite [GOOD] Test command err: 2025-04-04T17:17:44.779881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:44.780248Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:44.780286Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f23/r3tmp/tmpFzqmUr/pdisk_1.dat 2025-04-04T17:17:45.073308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:17:45.110427Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:45.147613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:45.147732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:45.159131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:45.239990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:45.280496Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:45.281532Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:45.281958Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:17:45.282309Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:45.293057Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:45.328356Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:45.328481Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:45.330070Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:17:45.330162Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:17:45.330230Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:17:45.330683Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:45.330803Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:45.330885Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:17:45.341624Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:45.373152Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:17:45.373332Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:45.373451Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:17:45.373487Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:17:45.373522Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:17:45.373555Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:45.373753Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:45.373806Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:45.374105Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:17:45.374255Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:17:45.374442Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:45.374503Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:45.374562Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:17:45.374612Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:17:45.374647Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:17:45.374679Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:17:45.374722Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:17:45.374828Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:45.374864Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:45.374909Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:17:45.375260Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:17:45.375308Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:45.375417Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:17:45.375593Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:17:45.375647Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:17:45.375723Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:17:45.375763Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:17:45.375818Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:17:45.375873Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:17:45.375907Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:17:45.376218Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:45.376269Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:17:45.376301Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:17:45.376334Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:45.376391Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:17:45.376423Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:17:45.376473Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:17:45.376506Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:17:45.376543Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:45.378008Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:17:45.378073Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:17:45.388637Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:17:45.388728Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:17:45.388763Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:45.388797Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:17:45.388860Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:45.526786Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:45.526837Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:45.526877Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:17:45.527175Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:17:45.527223Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:45.527325Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:17:45.527361Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:17:45.527416Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:17:45.527449Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:17:45.531478Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:17:45.531541Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:45.532246Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:45.532285Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:45.532330Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:4 ... : /Root, SessionId: ydb://session/3?node_id=2&id=MmVhYmM1ZmQtZTFlZDMyMWQtMjg1NzY2ZDAtZTk4NDMwZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1090:2882], CA [2:1089:2881], 2025-04-04T17:17:52.760000Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2858] TxId: 281474976715667. Ctx: { TraceId: 01jr0wy6wyekqgzc2vxayd577n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhYmM1ZmQtZTFlZDMyMWQtMjg1NzY2ZDAtZTk4NDMwZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1090:2882], CA [2:1089:2881], 2025-04-04T17:17:52.760159Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2858] TxId: 281474976715667. Ctx: { TraceId: 01jr0wy6wyekqgzc2vxayd577n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhYmM1ZmQtZTFlZDMyMWQtMjg1NzY2ZDAtZTk4NDMwZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1089:2881], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 311 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 144 FinishTimeMs: 1743787072759 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 83 BuildCpuTimeUs: 61 HostName: "ghrun-mup5cka3jq" NodeId: 2 CreateTimeMs: 1743787072750 } MaxMemoryUsage: 1048576 } 2025-04-04T17:17:52.760203Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jr0wy6wyekqgzc2vxayd577n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhYmM1ZmQtZTFlZDMyMWQtMjg1NzY2ZDAtZTk4NDMwZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1089:2881] 2025-04-04T17:17:52.760249Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2858] TxId: 281474976715667. Ctx: { TraceId: 01jr0wy6wyekqgzc2vxayd577n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhYmM1ZmQtZTFlZDMyMWQtMjg1NzY2ZDAtZTk4NDMwZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1090:2882], 2025-04-04T17:17:52.760268Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2858] TxId: 281474976715667. Ctx: { TraceId: 01jr0wy6wyekqgzc2vxayd577n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhYmM1ZmQtZTFlZDMyMWQtMjg1NzY2ZDAtZTk4NDMwZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1090:2882], 2025-04-04T17:17:52.760422Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2858] TxId: 281474976715667. Ctx: { TraceId: 01jr0wy6wyekqgzc2vxayd577n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhYmM1ZmQtZTFlZDMyMWQtMjg1NzY2ZDAtZTk4NDMwZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1090:2882], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 281 DurationUs: 1000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 123 FinishTimeMs: 1743787072760 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 90 BuildCpuTimeUs: 33 HostName: "ghrun-mup5cka3jq" NodeId: 2 StartTimeMs: 1743787072759 CreateTimeMs: 1743787072750 } MaxMemoryUsage: 1048576 } 2025-04-04T17:17:52.760455Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jr0wy6wyekqgzc2vxayd577n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhYmM1ZmQtZTFlZDMyMWQtMjg1NzY2ZDAtZTk4NDMwZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1090:2882] 2025-04-04T17:17:52.762691Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1078:2858] TxId: 281474976715667. Ctx: { TraceId: 01jr0wy6wyekqgzc2vxayd577n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhYmM1ZmQtZTFlZDMyMWQtMjg1NzY2ZDAtZTk4NDMwZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 8832 DurationUs: 1743787070747965 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } ExecuterCpuTimeUs: 5390 StartTimeMs: 2012 FinishTimeMs: 1743787072760 Stages { StageId: 5 StageGuid: "f45f812a-f9a5d206-c5337fb7-1e25044" Program: "(\n(return (lambda \'($1) (FromFlow (Take (ToFlow $1) (Uint64 \'\"1001\")))))\n)\n" ComputeActors { CpuTimeUs: 311 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 144 FinishTimeMs: 1743787072759 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 83 BuildCpuTimeUs: 61 HostName: "ghrun-mup5cka3jq" NodeId: 2 CreateTimeMs: 1743787072750 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743787072755 } Stages { StageGuid: "c649547f-a394ae49-fb22a5fe-bc42d013" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (lambda \'($3) (Member $3 \'\"key\") (Member $3 \'\"value\")))\n (return (FromFlow (ExpandMap (Take (ToFlow $1) (Uint64 \'\"1001\")) $2)))\n))))\n)\n" BaseTimeMs: 1743787072755 } Stages { StageId: 3 StageGuid: "9c0eb063-a34821d9-cbc44038-5d4f1c3f" Program: "(\n(return (lambda \'($1) (FromFlow (WideTop (ToFlow $1) (Uint64 \'\"1001\") \'(\'(\'0 (Bool \'true)))))))\n)\n" BaseTimeMs: 1743787072755 } Stages { StageId: 2 StageGuid: "15255a8c-b1d63cf7-4844dbfc-24a4bc7" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (lambda \'($3) (Member $3 \'\"key\") (Member $3 \'\"value\")))\n (return (FromFlow (ExpandMap (Take (ToFlow $1) (Uint64 \'\"1001\")) $2)))\n))))\n)\n" BaseTimeMs: 1743787072755 } Stages { StageId: 4 StageGuid: "1624ec28-8bacda44-93d8557e-2e216aba" Program: "(\n(return (lambda \'($1 $2) (block \'(\n (let $3 (lambda \'($6 $7) (AsStruct \'(\'\"key\" $6) \'(\'\"value\" $7))))\n (let $4 (Sort (Extend (NarrowMap (ToFlow $1) $3) (NarrowMap (ToFlow $2) $3)) (Bool \'true) (lambda \'($8) (Member $8 \'\"key\"))))\n (let $5 (lambda \'($9) (Member $9 \'\"key\") (Member $9 \'\"value\")))\n (return (FromFlow (ExpandMap $4 $5)))\n))))\n)\n" BaseTimeMs: 1743787072755 } Stages { StageId: 6 StageGuid: "cde52546-6f0c266-517d2e8-4cf9f2bb" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 \'\"1001\")) (lambda \'($2 $3) (AsStruct \'(\'\"key\" $2) \'(\'\"value\" $3)))))))\n)\n" BaseTimeMs: 1743787072755 } Stages { StageId: 1 StageGuid: "4052cb1-ca9b0320-4e696376-ff824da4" Program: "(\n(return (lambda \'($1) (FromFlow (WideTop (ToFlow $1) (Uint64 \'\"1001\") \'(\'(\'0 (Bool \'true)))))))\n)\n" BaseTimeMs: 1743787072755 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":17,\"Plans\":[{\"Node Type\":\"ResultSet\",\"PlanNodeId\":16,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":14}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":15,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":14,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":12}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":13,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":12,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Sort-Union\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Sort\",\"SortBy\":\"row.key\"},{\"Inputs\":[{\"ExternalPlanNodeId\":10},{\"ExternalPlanNodeId\":5}],\"Name\":\"Union\"}],\"PlanNodeId\":11,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":10,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Top\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":8}],\"Limit\":\"1001\",\"Name\":\"Top\",\"TopBy\":\"row.key\"}],\"PlanNodeId\":9,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":8,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":6}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":7,\"Plans\":[{\"Node Type\":\"TablePointLookup\",\"Operators\":[{\"E-Cost\":\"0\",\"E-Rows\":\"1\",\"E-Size\":\"0\",\"Inputs\":[],\"Name\":\"TablePointLookup\",\"Path\":\"\\/Root\\/table-1\",\"ReadColumns\":[\"key\",\"value\"],\"ReadLimit\":\"1001\",\"ReadRange\":[\"key (3)\"],\"Scan\":\"Sequential\",\"Table\":\"table-1\"}],\"PlanNodeId\":6,\"StageGuid\":\"\",\"Tables\":[\"table-1\"]}],\"StageGuid\":\"c649547f-a394ae49-fb22a5fe-bc42d013\",\"Stats\":{\"BaseTimeMs\":1743787072755,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"4052cb1-ca9b0320-4e696376-ff824da4\",\"Stats\":{\"BaseTimeMs\":1743787072755,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"},{\"Node Type\":\"UnionAll\",\"PlanNodeId\":5,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Top\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Limit\":\"1001\",\"Name\":\"Top\",\"TopBy\":\"row.key\"}],\"PlanNodeId\":4,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":3,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":2,\"Plans\":[{\"Node Type\":\"TablePointLookup\",\"Operators\":[{\"E-Cost\":\"0\",\"E-Rows\":\"1\",\"E-Size\":\"0\",\"Inputs\":[],\"Name\":\"TablePointLookup\",\"Path\":\"\\/Root\\/table-2\",\"ReadColumns\":[\"key\",\"value\"],\"ReadLimit\":\"1001\",\"ReadRange\":[\"key (4)\"],\"Scan\":\"Sequential\",\"Table\":\"table-2\"}],\"PlanNodeId\":1,\"StageGuid\":\"\",\"Tables\":[\"table-2\"]}],\"StageGuid\":\"15255a8c-b1d63cf7-4844dbfc-24a4bc7\",\"Stats\":{\"BaseTimeMs\":1743787072755,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"9c0eb063-a34821d9-cbc44038-5d4f1c3f\",\"Stats\":{\"BaseTimeMs\":1743787072755,\"PhysicalStageId\":3,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"1624ec28-8bacda44-93d8557e-2e216aba\",\"Stats\":{\"BaseTimeMs\":1743787072755,\"PhysicalStageId\":4,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"SortColumns\":[\"key (Asc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"f45f812a-f9a5d206-c5337fb7-1e25044\",\"Stats\":{\"BaseTimeMs\":1743787072755,\"ComputeNodes\":[{\"CpuTimeUs\":311,\"Tasks\":[{\"ComputeTimeUs\":83,\"FinishTimeMs\":1743787072759,\"Host\":\"ghrun-mup5cka3jq\",\"InputBytes\":7,\"InputRows\":2,\"NodeId\":2,\"OutputBytes\":7,\"OutputRows\":2,\"TaskId\":6}]}],\"PhysicalStageId\":5,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"cde52546-6f0c266-517d2e8-4cf9f2bb\",\"Stats\":{\"BaseTimeMs\":1743787072755,\"PhysicalStageId\":6,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 3702 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\002\022\013\010\231\002\020\301\007\030\362\032 \007" } } 2025-04-04T17:17:52.762768Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2858] TxId: 281474976715667. Ctx: { TraceId: 01jr0wy6wyekqgzc2vxayd577n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhYmM1ZmQtZTFlZDMyMWQtMjg1NzY2ZDAtZTk4NDMwZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T17:17:52.762824Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2858] TxId: 281474976715667. Ctx: { TraceId: 01jr0wy6wyekqgzc2vxayd577n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmVhYmM1ZmQtZTFlZDMyMWQtMjg1NzY2ZDAtZTk4NDMwZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.003442s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } >> KqpLimits::ComputeNodeMemoryLimit [GOOD] >> KqpLimits::DataShardReplySizeExceeded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorBatchMode::MultipleRanges [GOOD] Test command err: 2025-04-04T17:16:06.232028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:16:06.232442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:16:06.232495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a73/r3tmp/tmpNszFCI/pdisk_1.dat 2025-04-04T17:16:06.635351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:16:06.680552Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:06.724351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:06.724491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:06.735940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:06.820482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:16:06.867066Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:16:06.868111Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:16:06.868608Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:16:06.868844Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:06.880212Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:16:06.915962Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:06.916085Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:16:06.917876Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:16:06.917960Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:16:06.918012Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:16:06.918412Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:16:06.918551Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:16:06.918664Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:16:06.929459Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:16:06.967929Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:16:06.968136Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:16:06.968275Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:16:06.968321Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:16:06.968358Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:16:06.968407Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:16:06.968633Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:06.968684Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:06.969010Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:16:06.969118Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:16:06.969240Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:16:06.969290Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:16:06.969346Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:16:06.969381Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:16:06.969422Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:16:06.969456Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:16:06.969497Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:16:06.969586Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:06.969633Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:06.969697Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:16:06.970069Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:16:06.970117Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:16:06.970208Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:16:06.970460Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:16:06.970518Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:16:06.970622Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:16:06.970676Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:16:06.970738Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:16:06.970779Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:16:06.970812Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:16:06.971244Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:16:06.971282Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:16:06.971328Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:16:06.971360Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:16:06.971400Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:16:06.971440Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:16:06.971483Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:16:06.971517Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:16:06.971552Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:16:06.973000Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:16:06.973053Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:16:06.983750Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:16:06.983836Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:16:06.983871Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:16:06.983970Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:16:06.984037Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:16:07.123828Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.123891Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.123980Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:16:07.124349Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:16:07.124391Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:16:07.124509Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:16:07.124556Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:16:07.124617Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:16:07.124654Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:16:07.134606Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:16:07.134690Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:16:07.135554Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.135598Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.135646Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:16:0 ... ssTransaction::Execute at 72075186224037889 2025-04-04T17:17:52.416855Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:52.416891Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-04T17:17:52.416928Z node 15 :TX_DATASHARD DEBUG: Found ready operation [3000:281474976715664] in PlanQueue unit at 72075186224037889 2025-04-04T17:17:52.416962Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit PlanQueue 2025-04-04T17:17:52.416995Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-04T17:17:52.417021Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit PlanQueue 2025-04-04T17:17:52.417043Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit LoadTxDetails 2025-04-04T17:17:52.417068Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit LoadTxDetails 2025-04-04T17:17:52.417181Z node 15 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3000:281474976715664 keys extracted: 0 2025-04-04T17:17:52.417222Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-04T17:17:52.417241Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit LoadTxDetails 2025-04-04T17:17:52.417261Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:52.417281Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-04T17:17:52.417314Z node 15 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically complete end at 72075186224037889 2025-04-04T17:17:52.417352Z node 15 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically incomplete end at 72075186224037889 2025-04-04T17:17:52.417385Z node 15 :TX_DATASHARD TRACE: Activated operation [3000:281474976715664] at 72075186224037889 2025-04-04T17:17:52.417419Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-04T17:17:52.417438Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:52.417457Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-04-04T17:17:52.417486Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CreateVolatileSnapshot 2025-04-04T17:17:52.417558Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is ExecutedNoMoreRestarts 2025-04-04T17:17:52.417588Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-04-04T17:17:52.417629Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-04-04T17:17:52.417657Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit DropVolatileSnapshot 2025-04-04T17:17:52.417677Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-04T17:17:52.417694Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-04-04T17:17:52.417713Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompleteOperation 2025-04-04T17:17:52.417731Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-04-04T17:17:52.417828Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is DelayComplete 2025-04-04T17:17:52.417851Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompleteOperation 2025-04-04T17:17:52.417888Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompletedOperations 2025-04-04T17:17:52.417938Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompletedOperations 2025-04-04T17:17:52.417990Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-04-04T17:17:52.418018Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompletedOperations 2025-04-04T17:17:52.418062Z node 15 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715664] at 72075186224037889 has finished 2025-04-04T17:17:52.418100Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:52.418136Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-04T17:17:52.418173Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-04T17:17:52.418221Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-04T17:17:52.440259Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3000} 2025-04-04T17:17:52.440431Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:17:52.440528Z node 15 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-04-04T17:17:52.440650Z node 15 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1039:2836], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:17:52.440788Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:52.440996Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3000} 2025-04-04T17:17:52.441054Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T17:17:52.441092Z node 15 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-04-04T17:17:52.441144Z node 15 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [15:1039:2836], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:17:52.441185Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:17:52.443458Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:592:2517], Recipient [15:664:2569]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW Hints: 1 RangesSize: 3 2025-04-04T17:17:52.443638Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T17:17:52.443771Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-04-04T17:17:52.443983Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-04T17:17:52.444067Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-04-04T17:17:52.444141Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:52.444203Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:17:52.444257Z node 15 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-04-04T17:17:52.444332Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-04T17:17:52.444367Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:52.444396Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T17:17:52.444431Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:17:52.444597Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW Hints: 1 } 2025-04-04T17:17:52.444679Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/281474976715664 2025-04-04T17:17:52.444975Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-04T17:17:52.445013Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:17:52.445047Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:17:52.445082Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:17:52.445140Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-04T17:17:52.445170Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:17:52.445213Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-04-04T17:17:52.445281Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T17:17:52.445451Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-04T17:17:52.446755Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553236, Sender [15:1059:2854], Recipient [15:664:2569]: NKikimr::TEvDataShard::TEvReadScanStarted 2025-04-04T17:17:52.446885Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553237, Sender [15:1059:2854], Recipient [15:664:2569]: NKikimr::TEvDataShard::TEvReadScanFinished 2025-04-04T17:17:52.447323Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:664:2569], Recipient [15:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:52.447394Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:52.447511Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:52.447594Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:52.447660Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-04T17:17:52.447729Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:17:52.447786Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:17:52.447860Z node 15 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:17:52.447948Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::SlayAfterWiping [GOOD] Test command err: RandomSeed# 4842938379429194221 2025-04-04T17:17:53.097329Z 1 00h01m14.511024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-04T17:17:53.099145Z 1 00h01m14.511024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12904324453417107345] 2025-04-04T17:17:53.117892Z 1 00h01m14.511024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> TVPatchTests::PatchPartOk >> TVPatchTests::FindingPartsWhenPartsAreDontExist >> TVPatchTests::PatchPartGetError >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink [GOOD] >> TVPatchTests::PatchPartFastXorDiffDisorder >> TVPatchTests::FullPatchTest [GOOD] >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] >> KqpLimits::QueryExecTimeout [GOOD] >> KqpLimits::QSReplySize-useSink >> TVPatchTests::FindingPartsWhenPartsAreDontExist [GOOD] >> TVPatchTests::FindingPartsWhenOnlyOnePartExists >> TVPatchTests::PatchPartGetError [GOOD] >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] >> TVPatchTests::PatchPartOk [GOOD] |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet >> TExternalTableTest::ReadOnlyMode >> TExternalTableTest::ParallelCreateSameExternalTable >> TExternalTableTest::ParallelCreateExternalTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] Test command err: Recv 65537 2025-04-04T17:17:55.394623Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-04T17:17:55.397132Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-04-04T17:17:55.397176Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-04-04T17:17:55.397360Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2025-04-04T17:17:55.398232Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-04T17:17:55.398389Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# [XorDiff from datapart] the start of the diff at index 0 righter than the start of the diff at index 1; PrevDiffStart# 2 DiffStart# 0 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartOk [GOOD] Test command err: Recv 65537 2025-04-04T17:17:55.394596Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-04T17:17:55.397474Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-04T17:17:55.397529Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-04T17:17:55.397719Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-04-04T17:17:55.398191Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-04T17:17:55.398399Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2025-04-04T17:17:55.399567Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2025-04-04T17:17:55.399653Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2025-04-04T17:17:55.401085Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:627} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK 2025-04-04T17:17:55.401150Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-04-04T17:17:55.401257Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartGetError [GOOD] Test command err: Recv 65537 2025-04-04T17:17:55.394653Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-04T17:17:55.397412Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-04T17:17:55.397480Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-04T17:17:55.397706Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-04-04T17:17:55.398208Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-04T17:17:55.398438Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VGetResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-04-04T17:17:55.398525Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> TExternalTableTest::DropExternalTable >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite [GOOD] >> TExternalTableTest::SchemeErrors >> DataShardTxOrder::DelayData [GOOD] >> TExternalTableTest::ReplaceExternalTableIfNotExists >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists |91.1%| [TA] $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} |91.1%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] Test command err: Recv 65537 2025-04-04T17:17:55.394560Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-04T17:17:55.397367Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-04T17:17:55.397453Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-04-04T17:17:55.397563Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2025-04-04T17:17:55.658428Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-04T17:17:55.658869Z node 2 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-04T17:17:55.658922Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-04T17:17:55.659125Z node 2 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2025-04-04T17:17:55.659195Z node 2 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-04-04T17:17:55.659268Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink [GOOD] Test command err: 2025-04-04T17:17:46.859676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:46.860089Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:46.860134Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ef4/r3tmp/tmpMzxShV/pdisk_1.dat 2025-04-04T17:17:47.212293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:17:47.240362Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:47.279529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:47.279677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:47.291246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:47.377594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:47.408708Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:47.409407Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:47.409733Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:17:47.409941Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:47.417380Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:47.444453Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:47.444565Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:47.446074Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:17:47.446153Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:17:47.446216Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:17:47.446589Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:47.446725Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:47.446821Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:17:47.457452Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:47.483964Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:17:47.484116Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:47.484218Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:17:47.484248Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:17:47.484279Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:17:47.484320Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:47.484486Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:47.484519Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:47.484777Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:17:47.484870Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:17:47.485029Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:47.485079Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:47.485125Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:17:47.485157Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:17:47.485189Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:17:47.485216Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:17:47.485271Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:17:47.485380Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:47.485426Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:47.485465Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:17:47.485780Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:17:47.485823Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:47.485904Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:17:47.486087Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:17:47.486149Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:17:47.486219Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:17:47.486282Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:17:47.486308Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:17:47.486334Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:17:47.486355Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:17:47.486552Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:47.486588Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:17:47.486613Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:17:47.486636Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:47.486685Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:17:47.486727Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:17:47.486761Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:17:47.486783Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:17:47.486812Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:47.488108Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:17:47.488151Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:17:47.498757Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:17:47.498825Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:17:47.498853Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:17:47.498887Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:17:47.498945Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:47.636886Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:47.636938Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:47.636972Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:17:47.637299Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:17:47.637335Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:47.637425Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:17:47.637467Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:17:47.637503Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:17:47.637533Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:17:47.641912Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:17:47.641981Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:17:47.642751Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:47.642801Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:47.642842Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:17:4 ... 86224037892 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:17:54.438632Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-04-04T17:17:54.438691Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037892, actorId: [2:1198:2935] 2025-04-04T17:17:54.438736Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037892 2025-04-04T17:17:54.438773Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037892 2025-04-04T17:17:54.438802Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-04T17:17:54.438982Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1044:2821], Recipient [2:1044:2821]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:54.439009Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:54.439124Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:1044:2821], Recipient [2:755:2633]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037892 OperationCookie: 281474976715665 2025-04-04T17:17:54.439160Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715665 2025-04-04T17:17:54.439493Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1193:2930], Recipient [2:755:2633]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037892 ClientId: [2:1193:2930] ServerId: [2:1195:2932] } 2025-04-04T17:17:54.439522Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-04T17:17:54.439778Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:1044:2821]: {TEvRegisterTabletResult TabletId# 72075186224037892 Entry# 2000} 2025-04-04T17:17:54.439805Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-04T17:17:54.439837Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 2000 2025-04-04T17:17:54.439871Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-04T17:17:54.439961Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-04T17:17:54.439996Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:54.440024Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037892 2025-04-04T17:17:54.440056Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2025-04-04T17:17:54.440085Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2025-04-04T17:17:54.440122Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037892 TxInFly 0 2025-04-04T17:17:54.440168Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-04T17:17:54.440259Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1195:2932], Recipient [2:1044:2821]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:17:54.440283Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:17:54.440323Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [2:1193:2930], serverId# [2:1195:2932], sessionId# [0:0:0] 2025-04-04T17:17:54.440551Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:1044:2821]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-04-04T17:17:54.440578Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-04T17:17:54.440614Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 0 next step 2000 2025-04-04T17:17:54.440655Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037892: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-04T17:17:54.440707Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037892 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-04-04T17:17:54.451510Z node 2 :TX_DATASHARD DEBUG: 72075186224037893 ack snapshot OpId 281474976715665 2025-04-04T17:17:54.451573Z node 2 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037893 2025-04-04T17:17:54.451644Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037893 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:17:54.451711Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037893 2025-04-04T17:17:54.451759Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037893, actorId: [2:1202:2939] 2025-04-04T17:17:54.451781Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037893 2025-04-04T17:17:54.451818Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037893 2025-04-04T17:17:54.451841Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-04T17:17:54.452013Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1046:2823], Recipient [2:1046:2823]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:54.452061Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:54.452179Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:1046:2823], Recipient [2:755:2633]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037893 OperationCookie: 281474976715665 2025-04-04T17:17:54.452212Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037893 for split OpId 281474976715665 2025-04-04T17:17:54.452519Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1194:2931], Recipient [2:755:2633]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037893 ClientId: [2:1194:2931] ServerId: [2:1196:2933] } 2025-04-04T17:17:54.452546Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-04T17:17:54.452816Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:1046:2823]: {TEvRegisterTabletResult TabletId# 72075186224037893 Entry# 2000} 2025-04-04T17:17:54.452840Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-04T17:17:54.452865Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037893 time 2000 2025-04-04T17:17:54.452889Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-04-04T17:17:54.452923Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-04-04T17:17:54.452948Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:54.452972Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037893 2025-04-04T17:17:54.452993Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037893 has no attached operations 2025-04-04T17:17:54.453016Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037893 2025-04-04T17:17:54.453043Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037893 TxInFly 0 2025-04-04T17:17:54.453085Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-04-04T17:17:54.453210Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1196:2933], Recipient [2:1046:2823]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:17:54.453241Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:17:54.453277Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1194:2931], serverId# [2:1196:2933], sessionId# [0:0:0] 2025-04-04T17:17:54.453539Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:1046:2823]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-04-04T17:17:54.453565Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-04T17:17:54.453588Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037893 coordinator 72057594046316545 last step 0 next step 2000 2025-04-04T17:17:54.453620Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037893: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-04-04T17:17:54.453651Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037893 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-04-04T17:17:54.464361Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715665 2025-04-04T17:17:54.466979Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553158, Sender [2:417:2411], Recipient [2:762:2637] 2025-04-04T17:17:54.467077Z node 2 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715665, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-04-04T17:17:54.469647Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715665 2025-04-04T17:17:54.469723Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T17:17:54.470378Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [2:747:2628], Recipient [2:755:2633]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-04T17:17:54.986188Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [2:993:2680], Recipient [2:665:2569]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 281474976715663 TxMode: MODE_VOLATILE_PREPARE Locks { SendingShards: 72075186224037888 SendingShards: 72075186224037889 ReceivingShards: 72075186224037888 ReceivingShards: 72075186224037889 Op: Commit } 2025-04-04T17:17:54.986282Z node 2 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037888 2025-04-04T17:17:54.986413Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 281474976715663 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=281474976715663; 2025-04-04T17:17:54.986483Z node 2 :TX_DATASHARD NOTICE: Rejecting data TxId 281474976715663 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-04-04T17:17:54.986980Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715664, at schemeshard: 72057594046644480 2025-04-04T17:17:54.987452Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite [GOOD] Test command err: 2025-04-04T17:17:48.182132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:48.182615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:48.182673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ee2/r3tmp/tmpxKfg6f/pdisk_1.dat 2025-04-04T17:17:48.474024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:17:48.509111Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:48.548319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:48.548449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:48.559507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:48.638894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:48.938330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2025-04-04T17:17:49.185420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:827:2681], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:49.185512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:837:2686], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:49.185571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:49.190090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:17:49.341641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:841:2689], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:17:49.410036Z node 1 :TX_PROXY ERROR: Actor# [1:901:2730] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:49.658683Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0wy3qz2sa3ndw8d1vhczx9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjFjOWE5OWYtMWRlYjFmMzItYmEyMmVjOTktMzIzMWYzMjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:49.723092Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0wy47g06dq59t0wbqgd8t6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTVhZTA2NS0xZWZiZWE4MS1jNjFkNGE4NC0xNDQ1NTgzMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===== Begin SELECT 2025-04-04T17:17:50.010836Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0wy4a1ea5ng3yg9z5gknzz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmE2Njc0ZTctZjBiYThjNmItYmUzMzkxZC1iNTMyOThhMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2025-04-04T17:17:50.076766Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0wy4j7bmytg3yjjtmzv92w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmE2Njc0ZTctZjBiYThjNmItYmUzMzkxZC1iNTMyOThhMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ===== restarting tablet 2025-04-04T17:17:50.247721Z node 1 :KQP_COMPUTE WARN: SelfId: [1:1030:2771], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [1:965:2771]TEvDeliveryProblem was received from tablet: 72075186224037888 ===== Waiting for commit response ===== Last SELECT 2025-04-04T17:17:50.518193Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0wy4x69r154kmkg6r9q4rc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTVmYWMzMGEtMWE5ZjZmNDAtOTM2MTM0YTAtM2ZiYzQ0MzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } 2025-04-04T17:17:53.540677Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:53.540770Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:17:53.540933Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ee2/r3tmp/tmpvy9siJ/pdisk_1.dat 2025-04-04T17:17:53.776774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:17:53.801120Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:53.836524Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:53.836657Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:53.848181Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:53.927714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:54.168750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2025-04-04T17:17:54.421894Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:829:2681], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:54.421959Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:839:2686], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:54.422007Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:54.425559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:17:54.574936Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:843:2689], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:17:54.610191Z node 2 :TX_PROXY ERROR: Actor# [2:903:2730] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:54.671258Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0wy8vmaf4sjmq79er6r6sj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzk5ZmIxYmYtOTg3YjMyMi03NmI0YjFkZS1iY2VlYzkyZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:54.772467Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0wy94f30aztxzx3pcvxfcn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmJhM2ZlMmUtYzFhYmU4NmQtNTFkNjZlNGEtNTNiYzAzNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===== Begin SELECT 2025-04-04T17:17:55.114322Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0wy977bv32gamq1ge2gf6a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTJhNzZiYzItZTNiZDAyNGItNDdlODczOTItNDE4MzYzZjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2025-04-04T17:17:55.213140Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0wy9hs0xqjn15rsf9dz5as, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTJhNzZiYzItZTNiZDAyNGItNDdlODczOTItNDE4MzYzZjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ===== restarting tablet ===== Waiting for commit response ===== Last SELECT 2025-04-04T17:17:55.623129Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0wy9y0aja9bptpm9sxjnfb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTQyZWE3NTUtN2RhMjJmZDktZmYxM2FiZS03ZDgyNTE5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::DelayData [GOOD] Test command err: 2025-04-04T17:17:45.837757Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:45.900099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:45.900137Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:45.904689Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:45.905081Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:17:45.905286Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:45.912487Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:45.943849Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:45.943905Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:45.945098Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:17:45.945158Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:17:45.945204Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:17:45.945494Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:45.945719Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:45.945778Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:17:46.008663Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:46.041452Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:17:46.041623Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:46.041728Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:17:46.041789Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:17:46.041829Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:17:46.041862Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:46.041987Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:46.042024Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:46.042284Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:17:46.042364Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:17:46.042467Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:46.042504Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:46.042534Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:17:46.042584Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:46.042614Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:46.042648Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:17:46.042680Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:46.042754Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:46.042785Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:46.042825Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:17:46.044619Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\002\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:17:46.044655Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:46.044718Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:17:46.044865Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:17:46.044902Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:17:46.044946Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:17:46.044975Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:46.045005Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:17:46.045030Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:17:46.045054Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:46.045327Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:46.045356Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:17:46.045383Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:17:46.045408Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:46.045457Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:17:46.045490Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:17:46.045520Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:17:46.045548Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:46.045584Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:46.057542Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:17:46.057612Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:46.057641Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:46.057674Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:46.057720Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:46.058126Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:218:2216], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:46.058175Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:46.058209Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:217:2215], serverId# [1:218:2216], sessionId# [0:0:0] 2025-04-04T17:17:46.058353Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:46.058382Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:46.058472Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:46.058500Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:46.058530Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:46.058559Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:46.061108Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:46.061175Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:46.061389Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:46.061423Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:46.061466Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:46.061503Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:46.061535Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:46.061577Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:17:46.061609Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:17:46.061647Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:46.061683Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:46.061718Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:46.061764Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:46.061952Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:17:46.062004Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:46.062033Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:46.062056Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:17:46.062082Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:17:46.062136Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:46.062161Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:17:46.062188Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:46.062219Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:46.062284Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:17:46.062337Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:17:46.062374Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:17:46.062410Z node 1 :TX_D ... e 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit PrepareDataTxInRS 2025-04-04T17:17:55.792301Z node 1 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit LoadAndWaitInRS 2025-04-04T17:17:55.792318Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit LoadAndWaitInRS 2025-04-04T17:17:55.792332Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2025-04-04T17:17:55.792363Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit LoadAndWaitInRS 2025-04-04T17:17:55.792392Z node 1 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit ExecuteDataTx 2025-04-04T17:17:55.792412Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit ExecuteDataTx 2025-04-04T17:17:55.792714Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:506] at tablet 9437184 with status COMPLETE 2025-04-04T17:17:55.792766Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:506] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 81, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:17:55.792821Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2025-04-04T17:17:55.792851Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit ExecuteDataTx 2025-04-04T17:17:55.792870Z node 1 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit CompleteOperation 2025-04-04T17:17:55.792884Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit CompleteOperation 2025-04-04T17:17:55.793025Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is DelayComplete 2025-04-04T17:17:55.793046Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit CompleteOperation 2025-04-04T17:17:55.793070Z node 1 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit CompletedOperations 2025-04-04T17:17:55.793107Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit CompletedOperations 2025-04-04T17:17:55.793129Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2025-04-04T17:17:55.793142Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit CompletedOperations 2025-04-04T17:17:55.793172Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:506] at 9437184 has finished 2025-04-04T17:17:55.793214Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:55.793254Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:55.793285Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000005:507] in PlanQueue unit at 9437184 2025-04-04T17:17:55.793519Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:231:2224], Recipient [1:231:2224]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:55.793546Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:55.793574Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:55.793595Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:55.793633Z node 1 :TX_DATASHARD DEBUG: Return cached ready operation [1000005:507] at 9437184 2025-04-04T17:17:55.793669Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit PlanQueue 2025-04-04T17:17:55.793693Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-04T17:17:55.793714Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:55.793735Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:55.793752Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:55.794119Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000005:507 keys extracted: 1 2025-04-04T17:17:55.794142Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-04T17:17:55.794157Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:55.794172Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit FinalizeDataTxPlan 2025-04-04T17:17:55.794191Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit FinalizeDataTxPlan 2025-04-04T17:17:55.794217Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-04T17:17:55.794230Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit FinalizeDataTxPlan 2025-04-04T17:17:55.794413Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:55.794477Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:55.794546Z node 1 :TX_DATASHARD TRACE: Operation [1000005:507] is the new logically complete end at 9437184 2025-04-04T17:17:55.794586Z node 1 :TX_DATASHARD TRACE: Operation [1000005:507] is the new logically incomplete end at 9437184 2025-04-04T17:17:55.794617Z node 1 :TX_DATASHARD TRACE: Activated operation [1000005:507] at 9437184 2025-04-04T17:17:55.794676Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-04T17:17:55.794711Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-04T17:17:55.794750Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit BuildDataTxOutRS 2025-04-04T17:17:55.794794Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit BuildDataTxOutRS 2025-04-04T17:17:55.794846Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-04T17:17:55.794882Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildDataTxOutRS 2025-04-04T17:17:55.794903Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit StoreAndSendOutRS 2025-04-04T17:17:55.794923Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit StoreAndSendOutRS 2025-04-04T17:17:55.794943Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-04T17:17:55.794961Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit StoreAndSendOutRS 2025-04-04T17:17:55.794979Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit PrepareDataTxInRS 2025-04-04T17:17:55.794998Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit PrepareDataTxInRS 2025-04-04T17:17:55.795027Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-04T17:17:55.795053Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit PrepareDataTxInRS 2025-04-04T17:17:55.795086Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit LoadAndWaitInRS 2025-04-04T17:17:55.795106Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit LoadAndWaitInRS 2025-04-04T17:17:55.795128Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-04T17:17:55.795149Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadAndWaitInRS 2025-04-04T17:17:55.795184Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit ExecuteDataTx 2025-04-04T17:17:55.795205Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit ExecuteDataTx 2025-04-04T17:17:55.795536Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:507] at tablet 9437184 with status COMPLETE 2025-04-04T17:17:55.795585Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:507] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 11, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:17:55.795631Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:55.795655Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit ExecuteDataTx 2025-04-04T17:17:55.795687Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit CompleteOperation 2025-04-04T17:17:55.795719Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit CompleteOperation 2025-04-04T17:17:55.795872Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is DelayComplete 2025-04-04T17:17:55.795914Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit CompleteOperation 2025-04-04T17:17:55.795941Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit CompletedOperations 2025-04-04T17:17:55.795979Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit CompletedOperations 2025-04-04T17:17:55.796014Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-04-04T17:17:55.796038Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit CompletedOperations 2025-04-04T17:17:55.796062Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:507] at 9437184 has finished 2025-04-04T17:17:55.796096Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:55.796133Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:55.796160Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:55.796194Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:55.810536Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 506 txid# 507} 2025-04-04T17:17:55.810617Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2025-04-04T17:17:55.810680Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:55.810719Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2025-04-04T17:17:55.810783Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:17:55.810851Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:55.811020Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:55.811050Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2025-04-04T17:17:55.811094Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [1:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:17:55.811141Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> TVPatchTests::PatchPartPutError >> KqpStreamLookup::ReadTableWithIndexDuringSplit >> KqpStreamLookup::ReadTableDuringSplit >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> TExternalTableTest::ParallelCreateExternalTable [GOOD] >> TVPatchTests::PatchPartPutError [GOOD] >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] >> TExternalTableTest::DropExternalTable [GOOD] >> TExternalTableTest::Decimal >> KqpParams::CheckCacheByAst [GOOD] >> TExternalTableTest::SchemeErrors [GOOD] >> TExternalTableTest::ReadOnlyMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:17:56.772372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:17:56.772487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:17:56.772557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:17:56.772601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:17:56.773582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:17:56.773619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:17:56.773685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:17:56.773777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:17:56.774822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:56.853992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:56.854065Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:56.863196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:56.863440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:17:56.863589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:17:56.872415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:17:56.872598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:17:56.873206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:56.874714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:17:56.882070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:56.896705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:17:56.896780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:56.896985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:17:56.897037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:17:56.897137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:17:56.897233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:17:56.903029Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:17:57.013096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:17:57.013296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.013510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:17:57.013727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:17:57.013806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.015536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:57.015642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:17:57.015792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.015836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:17:57.015878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:17:57.015909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:17:57.017328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.017391Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:17:57.017431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:17:57.018835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.018875Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.018912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.018959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.022504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:17:57.023925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:17:57.024078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:17:57.024933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:57.025051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.025097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.025331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:17:57.025393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.025538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:17:57.025623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:17:57.027362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:17:57.027403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:17:57.027559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:57.027600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:17:57.027871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.027921Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:17:57.028015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:17:57.028050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.028103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:17:57.028131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.028163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:17:57.028200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.028242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:17:57.028271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:17:57.028325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:17:57.028361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:17:57.028388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:17:57.030051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:17:57.030142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:17:57.030173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ode 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:57.069908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-04T17:17:57.069950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-04T17:17:57.069968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T17:17:57.070171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.070226Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:17:57.070352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:17:57.070400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:17:57.070439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:17:57.070471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:17:57.070503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-04T17:17:57.070534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:17:57.070563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:17:57.070607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:17:57.070652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:17:57.070677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-04T17:17:57.070703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-04T17:17:57.070732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-04T17:17:57.071444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:17:57.071542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:17:57.071580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:17:57.071629Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:17:57.071675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:17:57.072652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:17:57.072718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:17:57.072742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:17:57.072768Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-04T17:17:57.072793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:17:57.072849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-04T17:17:57.075103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:17:57.075824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T17:17:57.075980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:17:57.076015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-04T17:17:57.076311Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:17:57.076364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:17:57.076387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:303:2294] TestWaitNotification: OK eventTxId 101 2025-04-04T17:17:57.076743Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:17:57.076891Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 173us result status StatusSuccess 2025-04-04T17:17:57.077183Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-04-04T17:17:57.079447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:17:57.079648Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2025-04-04T17:17:57.079698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-04-04T17:17:57.079736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-04-04T17:17:57.081515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.081623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-04T17:17:57.081795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:17:57.081837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T17:17:57.082273Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:17:57.082356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:17:57.082385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:311:2302] TestWaitNotification: OK eventTxId 102 2025-04-04T17:17:57.082773Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:17:57.082918Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 144us result status StatusPathDoesNotExist 2025-04-04T17:17:57.083053Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:17:56.772372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:17:56.772473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:17:56.772525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:17:56.772589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:17:56.773576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:17:56.773610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:17:56.773657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:17:56.773726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:17:56.774800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:56.844728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:56.844786Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:56.856519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:56.856712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:17:56.856845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:17:56.865368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:17:56.865596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:17:56.873176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:56.874756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:17:56.882173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:56.896738Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:17:56.896812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:56.897020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:17:56.897063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:17:56.897105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:17:56.897217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:17:56.903184Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:17:56.994199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:17:56.995870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:56.997650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:17:56.999740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:17:56.999794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.002324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:57.002476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:17:57.002709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.002745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:17:57.002804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:17:57.002827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:17:57.004256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.004300Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:17:57.004330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:17:57.005521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.005559Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.005586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.005625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.009603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:17:57.011162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:17:57.011310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:17:57.012090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:57.012191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.012305Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.012533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:17:57.012577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.012724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:17:57.012819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:17:57.014559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:17:57.014615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:17:57.014726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:57.014757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:17:57.015014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.015052Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:17:57.015125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:17:57.015154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.015206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:17:57.015240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.015270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:17:57.015300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.015337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:17:57.015368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:17:57.015417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:17:57.015448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:17:57.015474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:17:57.017032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:17:57.017131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:17:57.017164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... nt reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:17:57.088172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 125, subscribers: 0 2025-04-04T17:17:57.089069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 2025-04-04T17:17:57.090404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 2025-04-04T17:17:57.090495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 TestModificationResult got TxId: 127, wait until txId: 127 2025-04-04T17:17:57.090848Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:17:57.090987Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 137us result status StatusSuccess 2025-04-04T17:17:57.091191Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.091611Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:17:57.091734Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 87us result status StatusSuccess 2025-04-04T17:17:57.091892Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 125 2025-04-04T17:17:57.092099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: send EvNotifyTxCompletion 2025-04-04T17:17:57.092125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 125 TestWaitNotification wait txId: 126 2025-04-04T17:17:57.092241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2025-04-04T17:17:57.092255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 TestWaitNotification wait txId: 127 2025-04-04T17:17:57.092288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: send EvNotifyTxCompletion 2025-04-04T17:17:57.092303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 127 2025-04-04T17:17:57.092732Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-04-04T17:17:57.092791Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-04-04T17:17:57.092814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-04-04T17:17:57.092835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:339:2330] 2025-04-04T17:17:57.092932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-04-04T17:17:57.092946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:339:2330] 2025-04-04T17:17:57.092978Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-04-04T17:17:57.093015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-04-04T17:17:57.093027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:339:2330] TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 127 2025-04-04T17:17:57.093407Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:17:57.093520Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 146us result status StatusSuccess 2025-04-04T17:17:57.093671Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 128 2025-04-04T17:17:57.096947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:17:57.097159Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2025-04-04T17:17:57.097238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/NilNoviSubLuna 2025-04-04T17:17:57.097334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-04T17:17:57.098736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 128 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 125, at schemeshard: 72057594046678944 2025-04-04T17:17:57.098839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/NilNoviSubLuna TestModificationResult got TxId: 128, wait until txId: 128 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartPutError [GOOD] Test command err: Recv 65537 2025-04-04T17:17:57.187351Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-04T17:17:57.187956Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-04-04T17:17:57.187997Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-04-04T17:17:57.188137Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-04-04T17:17:57.188190Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-04-04T17:17:57.188330Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2025-04-04T17:17:57.188385Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2025-04-04T17:17:57.188439Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2025-04-04T17:17:57.188576Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:627} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR 2025-04-04T17:17:57.188624Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VPutResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-04-04T17:17:57.188680Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:17:56.772384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:17:56.772485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:17:56.772572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:17:56.772610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:17:56.773606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:17:56.773648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:17:56.773757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:17:56.773826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:17:56.774835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:56.853699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:56.853746Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:56.864236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:56.864450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:17:56.864600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:17:56.873870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:17:56.874168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:17:56.874819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:56.875456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:17:56.882536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:56.896783Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:17:56.896875Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:56.897070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:17:56.897125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:17:56.897169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:17:56.897281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:17:56.903554Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:17:56.994197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:17:56.995848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:56.997670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:17:56.999747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:17:56.999814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.002396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:57.002511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:17:57.002696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.002741Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:17:57.002778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:17:57.002809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:17:57.004525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.004566Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:17:57.004596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:17:57.006015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.006054Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.006092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.006135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.008914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:17:57.010170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:17:57.010357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:17:57.011166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:57.011262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.011316Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.012273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:17:57.012321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.012487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:17:57.012563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:17:57.014162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:17:57.014202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:17:57.014340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:57.014380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:17:57.014640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.014675Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:17:57.014755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:17:57.014786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.014837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:17:57.014860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.014888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:17:57.014911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.014958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:17:57.014980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:17:57.015017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:17:57.015040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:17:57.015063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:17:57.016256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:17:57.016326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:17:57.016348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... satisfy waiter [1:371:2362] 2025-04-04T17:17:57.115812Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-04-04T17:17:57.115890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-04-04T17:17:57.115923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:371:2362] 2025-04-04T17:17:57.116005Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-04-04T17:17:57.116047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-04-04T17:17:57.116065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:371:2362] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2025-04-04T17:17:57.116542Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:17:57.116725Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 227us result status StatusSuccess 2025-04-04T17:17:57.116985Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.117650Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:17:57.117855Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 168us result status StatusSuccess 2025-04-04T17:17:57.118124Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.118894Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:17:57.119022Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 129us result status StatusSuccess 2025-04-04T17:17:57.120665Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.121166Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:17:57.121332Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 164us result status StatusSuccess 2025-04-04T17:17:57.121559Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.122011Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:17:57.122175Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 138us result status StatusSuccess 2025-04-04T17:17:57.122463Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:17:56.772402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:17:56.772514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:17:56.772565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:17:56.772600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:17:56.773603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:17:56.773644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:17:56.773709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:17:56.773785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:17:56.774830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:56.840650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:56.840708Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:56.852938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:56.853212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:17:56.853451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:17:56.867447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:17:56.867656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:17:56.873198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:56.874993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:17:56.882102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:56.896736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:17:56.896804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:56.897001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:17:56.897051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:17:56.897101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:17:56.897223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:17:56.903270Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:17:57.013074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:17:57.013282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.013491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:17:57.013700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:17:57.013750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.015866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:57.016010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:17:57.016210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.016298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:17:57.016352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:17:57.016385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:17:57.018121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.018171Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:17:57.018217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:17:57.019719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.019768Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.019807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.019861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.023755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:17:57.025349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:17:57.025512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:17:57.026378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:57.026495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.026540Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.026807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:17:57.026882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.027036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:17:57.027108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:17:57.029190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:17:57.029238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:17:57.029417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:57.029460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:17:57.029794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.029877Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:17:57.029976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:17:57.030024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.030073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:17:57.030109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.030144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:17:57.030182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.030222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:17:57.030278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:17:57.030360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:17:57.030397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:17:57.030431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:17:57.031968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:17:57.032041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:17:57.032067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... LocalPathId: 3] was 2 2025-04-04T17:17:57.077938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:17:57.079238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:17:57.079674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:17:57.079708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:17:57.079819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:17:57.079886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:17:57.079978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:57.080015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-04T17:17:57.080047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-04T17:17:57.080073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-04T17:17:57.080252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.080290Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:17:57.080372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:17:57.080398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:17:57.080428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:17:57.080462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:17:57.080491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-04T17:17:57.080525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:17:57.080550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:17:57.080575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:17:57.080628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:17:57.080662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-04T17:17:57.080701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-04-04T17:17:57.080732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-04T17:17:57.081364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:17:57.081445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:17:57.081474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:17:57.081510Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-04-04T17:17:57.081547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T17:17:57.082536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:17:57.082615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:17:57.082644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:17:57.082671Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-04T17:17:57.082697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:17:57.082770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-04T17:17:57.084123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:17:57.085143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-04T17:17:57.085330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:17:57.085360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T17:17:57.085697Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:17:57.085790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:17:57.085817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:324:2315] TestWaitNotification: OK eventTxId 102 2025-04-04T17:17:57.086229Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:17:57.086405Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 198us result status StatusSuccess 2025-04-04T17:17:57.086639Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-04-04T17:17:57.089115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:17:57.089346Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2025-04-04T17:17:57.089413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TAlterExternalTable Propose: opId# 103:0, path# /MyRoot/UniqueName, ReplaceIfExists:1 2025-04-04T17:17:57.089510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, at schemeshard: 72057594046678944 2025-04-04T17:17:57.091357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-04-04T17:17:57.091538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, operation: CREATE EXTERNAL TABLE, path: /MyRoot/UniqueName TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-04T17:17:57.091863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-04T17:17:57.091905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-04T17:17:57.092391Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T17:17:57.092479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:17:57.092519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:332:2323] TestWaitNotification: OK eventTxId 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:17:56.772372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:17:56.772475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:17:56.772536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:17:56.772597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:17:56.773583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:17:56.773622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:17:56.773687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:17:56.773767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:17:56.774830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:56.851523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:56.851585Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:56.861348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:56.861492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:17:56.861650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:17:56.870823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:17:56.871077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:17:56.873218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:56.874819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:17:56.881698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:56.896777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:17:56.896871Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:56.897084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:17:56.897138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:17:56.897186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:17:56.897315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:17:56.903509Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:17:57.016468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:17:57.016671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.016892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:17:57.017082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:17:57.017122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.018978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:57.019115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:17:57.019293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.019341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:17:57.019374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:17:57.019405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:17:57.021027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.021084Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:17:57.021119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:17:57.022806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.022850Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.022903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.022958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.026443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:17:57.028247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:17:57.028429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:17:57.029350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:57.029461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.029507Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.029743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:17:57.029795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.029971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:17:57.030071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:17:57.031929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:17:57.031976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:17:57.032087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:57.032116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:17:57.032376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.032413Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:17:57.032503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:17:57.032534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.032602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:17:57.032630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.032657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:17:57.032686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.032724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:17:57.032754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:17:57.032797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:17:57.032825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:17:57.032850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:17:57.034652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:17:57.034779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:17:57.034820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... on: 8 2025-04-04T17:17:57.108753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T17:17:57.109303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:17:57.109369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:17:57.109391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:17:57.109463Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-04T17:17:57.109494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:17:57.109541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-04T17:17:57.111404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-04T17:17:57.111522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-04-04T17:17:57.112092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:57.112207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.112265Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterExternalTable TPropose, operationId: 103:0 HandleReply TEvOperationPlan: step# 5000004 2025-04-04T17:17:57.112374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-04-04T17:17:57.112580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:17:57.112655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:17:57.113184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:17:57.114191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:17:57.115897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:17:57.115959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:17:57.116134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:17:57.116216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:17:57.116292Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:57.116324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-04T17:17:57.116389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-04T17:17:57.116425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 FAKE_COORDINATOR: Erasing txId 103 2025-04-04T17:17:57.116726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.116772Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-04T17:17:57.116872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:17:57.116930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:17:57.116988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:17:57.117020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:17:57.117054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-04-04T17:17:57.117090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:17:57.117126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T17:17:57.117158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T17:17:57.117221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:17:57.117256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:17:57.117316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-04-04T17:17:57.117355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-04-04T17:17:57.117385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-04-04T17:17:57.118070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:17:57.118163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:17:57.118208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:17:57.118265Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-04T17:17:57.118304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T17:17:57.119004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:17:57.119089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:17:57.119120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:17:57.119146Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-04-04T17:17:57.119172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:17:57.119226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-04T17:17:57.122651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:17:57.122995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-04T17:17:57.123225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-04T17:17:57.123268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-04T17:17:57.123664Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T17:17:57.123768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:17:57.123809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:362:2353] TestWaitNotification: OK eventTxId 103 2025-04-04T17:17:57.124317Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:17:57.124514Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 203us result status StatusSuccess 2025-04-04T17:17:57.124874Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 2 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExternalTableTest::Decimal [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:17:56.772377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:17:56.772464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:17:56.772510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:17:56.772537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:17:56.773559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:17:56.773599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:17:56.773676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:17:56.773732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:17:56.774828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:56.841805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:56.841856Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:56.853696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:56.853901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:17:56.854055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:17:56.864541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:17:56.864882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:17:56.873184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:56.874896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:17:56.882671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:56.896778Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:17:56.896866Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:56.897089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:17:56.897140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:17:56.897187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:17:56.897311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:17:56.903687Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:17:56.994273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:17:56.995971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:56.997685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:17:56.999784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:17:56.999882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.002891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:57.003047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:17:57.003262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.003335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:17:57.003391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:17:57.003483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:17:57.005629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.005684Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:17:57.005720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:17:57.007719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.007783Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.007823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.007886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.011524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:17:57.013489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:17:57.013674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:17:57.014744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:57.014878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.014928Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.015192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:17:57.015248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.015438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:17:57.015529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:17:57.017712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:17:57.017767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:17:57.017938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:57.017980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:17:57.018336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.018391Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:17:57.018494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:17:57.018532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.018614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:17:57.018653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.018696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:17:57.018737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.018779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:17:57.018818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:17:57.018897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:17:57.018947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:17:57.018983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:17:57.020820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:17:57.020986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:17:57.021024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 126 2025-04-04T17:17:57.098313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "BlaBlaType" } } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:17:57.098702Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "BlaBlaType" } } 2025-04-04T17:17:57.098824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 126:0, path# /MyRoot/DirA/Table2 2025-04-04T17:17:57.099130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, at schemeshard: 72057594046678944 2025-04-04T17:17:57.101537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Type \'BlaBlaType\' specified for column \'RowId\' is not supported by storage" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.101799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2025-04-04T17:17:57.105454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:17:57.105767Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } 2025-04-04T17:17:57.105876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 127:0, path# /MyRoot/DirA/Table2 2025-04-04T17:17:57.106004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Columns cannot have an empty name, at schemeshard: 72057594046678944 2025-04-04T17:17:57.107955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Columns cannot have an empty name" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.108131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Columns cannot have an empty name, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2025-04-04T17:17:57.111022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:17:57.111371Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } 2025-04-04T17:17:57.111466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/DirA/Table2 2025-04-04T17:17:57.111594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, at schemeshard: 72057594046678944 2025-04-04T17:17:57.113542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Cannot set TypeId for column \'RowId\', use Type" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.113684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2025-04-04T17:17:57.116013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:17:57.116338Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } 2025-04-04T17:17:57.116449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 129:0, path# /MyRoot/DirA/Table2 2025-04-04T17:17:57.116549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Missing Type for column 'RowId', at schemeshard: 72057594046678944 2025-04-04T17:17:57.118403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Missing Type for column \'RowId\'" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.118553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Missing Type for column 'RowId', operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 129, wait until txId: 129 TestModificationResults wait txId: 130 2025-04-04T17:17:57.120826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } } TxId: 130 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:17:57.121102Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 130:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } 2025-04-04T17:17:57.121162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 130:0, path# /MyRoot/DirA/Table2 2025-04-04T17:17:57.121293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 130:1, propose status:StatusSchemeError, reason: Duplicate column id: 2, at schemeshard: 72057594046678944 2025-04-04T17:17:57.122996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 130, response: Status: StatusSchemeError Reason: "Duplicate column id: 2" TxId: 130 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.123164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 130, database: /MyRoot, subject: , status: StatusSchemeError, reason: Duplicate column id: 2, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 130, wait until txId: 130 TestModificationResults wait txId: 131 2025-04-04T17:17:57.125202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } } TxId: 131 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:17:57.125498Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 131:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } 2025-04-04T17:17:57.125568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 131:0, path# /MyRoot/DirA/Table2 2025-04-04T17:17:57.125678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 131:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-04-04T17:17:57.127538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 131, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 131 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.127671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 131, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 131, wait until txId: 131 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:17:56.772382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:17:56.772472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:17:56.772518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:17:56.772552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:17:56.773552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:17:56.773609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:17:56.773687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:17:56.773767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:17:56.774806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:56.843691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:56.843747Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:56.856107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:56.856326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:17:56.856457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:17:56.865300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:17:56.865536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:17:56.873205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:56.874910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:17:56.882077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:56.896760Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:17:56.896832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:56.896994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:17:56.897039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:17:56.897085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:17:56.897171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:17:56.902798Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:17:57.007197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:17:57.007452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.007634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:17:57.007821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:17:57.007862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.009652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:57.009798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:17:57.009992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.010049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:17:57.010084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:17:57.010126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:17:57.011803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.011849Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:17:57.011883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:17:57.013399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.013444Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.013480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.013526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.016977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:17:57.018600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:17:57.018767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:17:57.019530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:57.019622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.019656Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.019849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:17:57.019892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.020027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:17:57.020092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:17:57.021757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:17:57.021804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:17:57.021993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:57.022033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:17:57.022343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.022387Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:17:57.022471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:17:57.022504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.022566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:17:57.022613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.022648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:17:57.022686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.022718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:17:57.022748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:17:57.022810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:17:57.022850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:17:57.022883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:17:57.024560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:17:57.024665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:17:57.024697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... HEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:17:57.417503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-04T17:17:57.417561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-04T17:17:57.419492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusAccepted TxId: 129 SchemeshardId: 72057594046678944 PathId: 5, at schemeshard: 72057594046678944 2025-04-04T17:17:57.419592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/SubDirBBBB 2025-04-04T17:17:57.419738Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:17:57.419771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:17:57.419904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-04-04T17:17:57.419988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:57.420047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:484:2442], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-04-04T17:17:57.420080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:484:2442], at schemeshard: 72057594046678944, txId: 129, path id: 5 2025-04-04T17:17:57.420318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.420353Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 129:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:17:57.420400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2025-04-04T17:17:57.420501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:17:57.421173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2025-04-04T17:17:57.421239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2025-04-04T17:17:57.421264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-04-04T17:17:57.421303Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-04-04T17:17:57.421348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-04-04T17:17:57.421963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-04-04T17:17:57.422025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-04-04T17:17:57.422043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-04-04T17:17:57.422060Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-04-04T17:17:57.422086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-04T17:17:57.422139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 0/1, is published: true 2025-04-04T17:17:57.423933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2025-04-04T17:17:57.424146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:17:57.424626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 FAKE_COORDINATOR: Add transaction: 129 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000005 2025-04-04T17:17:57.426064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:57.426172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.426213Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 129:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2025-04-04T17:17:57.426336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 129:0 128 -> 240 2025-04-04T17:17:57.426490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-04-04T17:17:57.426547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-04T17:17:57.426836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-04T17:17:57.427949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:17:57.427988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:17:57.428089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-04-04T17:17:57.428145Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:57.428170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:484:2442], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-04-04T17:17:57.428193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:484:2442], at schemeshard: 72057594046678944, txId: 129, path id: 5 FAKE_COORDINATOR: Erasing txId 129 2025-04-04T17:17:57.428345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.428378Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 129:0 ProgressState 2025-04-04T17:17:57.428465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-04-04T17:17:57.428504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-04T17:17:57.428534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-04-04T17:17:57.428555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-04T17:17:57.428582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2025-04-04T17:17:57.428622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-04-04T17:17:57.428671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 129:0 2025-04-04T17:17:57.428703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 129:0 2025-04-04T17:17:57.428752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-04T17:17:57.428783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2025-04-04T17:17:57.428805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-04-04T17:17:57.428826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 5], 3 2025-04-04T17:17:57.429329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2025-04-04T17:17:57.429391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2025-04-04T17:17:57.429434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2025-04-04T17:17:57.429468Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-04-04T17:17:57.429506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-04-04T17:17:57.430078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-04-04T17:17:57.430138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-04-04T17:17:57.430165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2025-04-04T17:17:57.430200Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-04-04T17:17:57.430222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-04T17:17:57.430292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2025-04-04T17:17:57.432791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-04-04T17:17:57.432971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 TestModificationResult got TxId: 129, wait until txId: 129 >> KqpWorkloadService::TestHandlerActorCleanup [GOOD] |91.2%| [TA] $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteOverload |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWith >> TestProgram::Like >> TestProgram::YqlKernelContains ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:17:56.772388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:17:56.772468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:17:56.772530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:17:56.772567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:17:56.773585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:17:56.773659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:17:56.773742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:17:56.773823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:17:56.774786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:56.844848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:56.844903Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:56.859187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:56.859430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:17:56.859612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:17:56.868522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:17:56.868756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:17:56.873197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:56.874814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:17:56.882062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:56.896746Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:17:56.896834Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:56.897044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:17:56.897104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:17:56.897153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:17:56.897284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:17:56.903539Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:17:56.995728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:17:56.995908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:56.997653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:17:56.999752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:17:56.999839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.002364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:57.002477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:17:57.002709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.002758Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:17:57.002789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:17:57.002832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:17:57.004447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.004487Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:17:57.004512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:17:57.005806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.005841Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.005868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.005919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.008895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:17:57.010270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:17:57.010388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:17:57.011066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:57.011153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.011189Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.012296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:17:57.012352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:17:57.012496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:17:57.012550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:17:57.013956Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:17:57.014000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:17:57.014118Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:57.014149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:17:57.014407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.014443Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:17:57.014526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:17:57.014567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.014620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:17:57.014643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.014674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:17:57.014713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:17:57.014740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:17:57.014763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:17:57.014810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:17:57.014838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:17:57.014861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:17:57.016044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:17:57.016161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:17:57.016190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-04-04T17:17:57.755610Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:17:57.755728Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 126 RawX2: 8589936743 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:17:57.755782Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateExternalTable TPropose, operationId: 101:0 HandleReply TEvOperationPlan: step# 5000003 2025-04-04T17:17:57.755919Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-04T17:17:57.756085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:17:57.756148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:17:57.756189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:17:57.756995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:17:57.758114Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T17:17:57.759245Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:17:57.759296Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:17:57.759430Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:17:57.759522Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:17:57.759575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:17:57.759665Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:17:57.759697Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-04T17:17:57.759729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-04T17:17:57.759754Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-04-04T17:17:57.759775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-04T17:17:57.759933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:17:57.759989Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:17:57.760085Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:17:57.760122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:17:57.760163Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:17:57.760192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:17:57.760233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-04T17:17:57.760273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:17:57.760305Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:17:57.760335Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:17:57.760395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:17:57.760429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:17:57.760466Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2025-04-04T17:17:57.760507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-04T17:17:57.760553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-04T17:17:57.760576Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-04T17:17:57.761598Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:17:57.761673Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:17:57.761708Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:17:57.761749Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-04T17:17:57.761807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T17:17:57.762900Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:17:57.762968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:17:57.762999Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:17:57.763028Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-04T17:17:57.763084Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:17:57.763703Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:17:57.763768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:17:57.763801Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:17:57.763824Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-04T17:17:57.763848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:17:57.763924Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-04T17:17:57.765534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:17:57.766396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:17:57.767504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T17:17:57.767708Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:17:57.767760Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-04T17:17:57.768131Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:17:57.768213Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:17:57.768252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:335:2326] TestWaitNotification: OK eventTxId 101 2025-04-04T17:17:57.768708Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:17:57.768899Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 232us result status StatusSuccess 2025-04-04T17:17:57.769244Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Decimal(35,9)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckCacheByAst [GOOD] Test command err: Trying to start YDB, gRPC: 21493, MsgBus: 4435 2025-04-04T17:17:19.845127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:19.845670Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:19.845726Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015e8/r3tmp/tmpPE0azC/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21493, node 1 2025-04-04T17:17:20.357146Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:20.361415Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:20.361505Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:20.361552Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:20.362015Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:17:20.408763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:20.408891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:20.420824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4435 TClient is connected to server localhost:4435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:20.685040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:20.777713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:21.138029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:21.551181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:21.865308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:22.569559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1812:3408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:22.569697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:22.591518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:22.774712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:23.033489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:23.296541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:23.553701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:23.890968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.200202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2397:3859], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:24.200342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:24.200806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2402:3864], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:24.206677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:24.356699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2404:3866], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:24.403847Z node 1 :TX_PROXY ERROR: Actor# [1:2465:3908] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:25.640399Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:2702:4098] TxId: 281474976715671. Ctx: { TraceId: 01jr0wxcr27h816gqaxsb9z29m, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDVjM2YwZWUtYjMzZTkzNjMtOWQ4NjAxYzYtZTg3OGIwNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. STATUS_CODE_UNSPECIFIED: 2025-04-04T17:17:25.641183Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2711:4123], TxId: 281474976715671, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=NDVjM2YwZWUtYjMzZTkzNjMtOWQ4NjAxYzYtZTg3OGIwNTI=. TraceId : 01jr0wxcr27h816gqaxsb9z29m. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:2702:4098], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-04-04T17:17:25.642704Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2709:4121], TxId: 281474976715671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NDVjM2YwZWUtYjMzZTkzNjMtOWQ4NjAxYzYtZTg3OGIwNTI=. CustomerSuppliedId : . TraceId : 01jr0wxcr27h816gqaxsb9z29m. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:2702:4098], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-04-04T17:17:25.643006Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2710:4122], TxId: 281474976715671, task: 2. Ctx: { TraceId : 01jr0wxcr27h816gqaxsb9z29m. SessionId : ydb://session/3?node_id=1&id=NDVjM2YwZWUtYjMzZTkzNjMtOWQ4NjAxYzYtZTg3OGIwNTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:2702:4098], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-04-04T17:17:25.643583Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDVjM2YwZWUtYjMzZTkzNjMtOWQ4NjAxYzYtZTg3OGIwNTI=, ActorId: [1:2683:4098], ActorState: ExecuteState, TraceId: 01jr0wxcr27h816gqaxsb9z29m, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 64095, MsgBus: 28926 2025-04-04T17:17:29.382302Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:29.382429Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:17:29.382665Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015e8/r3tmp/tmp7RYhMp/pdisk_1.dat TServer::EnableGrpc on GrpcPort 64095, node 2 2025-04-04T17:17:29.773112Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:29.773757Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:29.773804Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:29.773839Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:29.774117Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:17:29.821142Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:29.821306Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:29.832735Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28926 TClient is connected to server localhost:28926 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 ... n type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:47.489522Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:49.923551Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489508432855868596:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:49.923645Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:49.966136Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:49.994370Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:50.021882Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:50.050000Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:50.081462Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:50.149516Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:50.225332Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489508437150836413:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:50.225392Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489508437150836418:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:50.225437Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:50.228769Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:50.238103Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489508437150836420:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:50.311394Z node 5 :TX_PROXY ERROR: Actor# [5:7489508437150836472:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:51.514511Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489508419970964932:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:51.514673Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 64291, MsgBus: 1684 2025-04-04T17:17:52.229090Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489508446513044527:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:52.229169Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015e8/r3tmp/tmpBjPl54/pdisk_1.dat 2025-04-04T17:17:52.333709Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64291, node 6 2025-04-04T17:17:52.370060Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:52.370168Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:52.371602Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:52.396054Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:52.396074Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:52.396079Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:52.396186Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1684 TClient is connected to server localhost:1684 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:52.962611Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:52.978328Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:53.050774Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:53.182375Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:53.264799Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:55.280550Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489508459397948185:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:55.280634Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:55.318997Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:55.347192Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:55.374914Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:55.400094Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:55.424574Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:55.464837Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:55.502941Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489508459397948692:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:55.503001Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:55.503055Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489508459397948697:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:55.506515Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:55.515058Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489508459397948699:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:55.575292Z node 6 :TX_PROXY ERROR: Actor# [6:7489508459397948753:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TestProgram::YqlKernelEndsWith [GOOD] >> TestProgram::Like [GOOD] >> TestProgram::YqlKernelContains [GOOD] >> KqpStats::SysViewClientLost [FAIL] >> KqpTypes::DyNumberCompare >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite [GOOD] |91.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |91.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelContains [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Calculation","input":"7,9","output":"15"},"fetch":"7,9","drop":"7,9"},{"processor":{"internal":{},"type":"Projection","input":"15"}}]}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Calculation","input":"7,9","output":"15"},"fetch":"7,9","drop":"7,9"},{"processor":{"internal":{},"type":"Projection","input":"15"}}]}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::Like [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Const","output":"15"}},{"processor":{"internal":{},"type":"Const","output":"16"}},{"processor":{"internal":{},"type":"Calculation","input":"7,16","output":"17"},"fetch":"7","drop":"16"},{"processor":{"internal":{},"type":"Calculation","input":"7,15","output":"18"},"drop":"7,15"},{"processor":{"internal":{},"type":"Calculation","input":"17","output":"19"},"drop":"17"},{"processor":{"internal":{},"type":"Calculation","input":"18","output":"20"},"drop":"18"},{"processor":{"internal":{},"type":"Calculation","input":"19,20","output":"21"},"drop":"19,20"},{"processor":{"internal":{},"type":"Projection","input":"21"}}]}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow11BooleanTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow11BooleanTypeE; |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel >> TestProgram::YqlKernelStartsWithScalar >> TestProgram::YqlKernel [GOOD] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWithScalar [GOOD] >> TColumnEngineTestInsertTable::TestInsertCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestHandlerActorCleanup [GOOD] Test command err: 2025-04-04T17:15:00.711899Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507707374749643:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:15:00.711930Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0017b0/r3tmp/tmpry4kI2/pdisk_1.dat 2025-04-04T17:15:01.117820Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:15:01.121890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:15:01.121990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:15:01.125134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4485, node 1 2025-04-04T17:15:01.188969Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:15:01.188991Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:15:01.188997Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:15:01.190269Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:15:01.464053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:15:03.447035Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-04T17:15:03.449975Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZTViODMwYzYtNTY2Njk4MTEtYmY2MTg1OTYtNmM0NjUwNjE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZTViODMwYzYtNTY2Njk4MTEtYmY2MTg1OTYtNmM0NjUwNjE= 2025-04-04T17:15:03.472591Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-04T17:15:03.472918Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-04T17:15:03.479409Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507720259652098:2327], Start check tables existence, number paths: 2 2025-04-04T17:15:03.479802Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZTViODMwYzYtNTY2Njk4MTEtYmY2MTg1OTYtNmM0NjUwNjE=, ActorId: [1:7489507720259652099:2328], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:15:03.479929Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-04-04T17:15:03.485872Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507720259652098:2327], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-04T17:15:03.485920Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507720259652098:2327], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-04T17:15:03.485959Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7489507720259652098:2327], Successfully finished 2025-04-04T17:15:03.486037Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-04T17:15:03.486676Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507720259652125:2299], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-04T17:15:03.495056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:15:03.502417Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507720259652125:2299], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-04-04T17:15:03.505298Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507720259652125:2299], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-04-04T17:15:03.513533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507720259652125:2299], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:15:03.566373Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507720259652125:2299], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-04-04T17:15:03.570196Z node 1 :TX_PROXY ERROR: Actor# [1:7489507720259652176:2331] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:15:03.570468Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507720259652125:2299], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-04-04T17:15:03.573017Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODk4ZGJjNDYtZGYzNGM3YmEtM2MyNDEwNDEtMzI0ODVlMDM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ODk4ZGJjNDYtZGYzNGM3YmEtM2MyNDEwNDEtMzI0ODVlMDM= 2025-04-04T17:15:03.573294Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-04T17:15:03.573305Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-04-04T17:15:03.573358Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODk4ZGJjNDYtZGYzNGM3YmEtM2MyNDEwNDEtMzI0ODVlMDM=, ActorId: [1:7489507720259652184:2330], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:15:03.573541Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODk4ZGJjNDYtZGYzNGM3YmEtM2MyNDEwNDEtMzI0ODVlMDM=, ActorId: [1:7489507720259652184:2330], ActorState: ReadyState, TraceId: 01jr0ws20n0nas42e81wvwezh7, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7489507720259652183:2337] database: Root databaseId: /Root pool id: sample_pool_id 2025-04-04T17:15:03.573576Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7489507720259652184:2330], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ODk4ZGJjNDYtZGYzNGM3YmEtM2MyNDEwNDEtMzI0ODVlMDM= 2025-04-04T17:15:03.573618Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507720259652186:2331], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-04T17:15:03.573704Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7489507720259652187:2332], Database: /Root, Start database fetching 2025-04-04T17:15:03.574842Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7489507720259652187:2332], Database: /Root, Database info successfully fetched, serverless: 0 2025-04-04T17:15:03.574895Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-04-04T17:15:03.574946Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7489507720259652196:2333], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ODk4ZGJjNDYtZGYzNGM3YmEtM2MyNDEwNDEtMzI0ODVlMDM=, Start pool fetching 2025-04-04T17:15:03.574988Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507720259652198:2334], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-04T17:15:03.575152Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507720259652186:2331], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-04T17:15:03.575201Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-04-04T17:15:03.575230Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-04-04T17:15:03.575487Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7489507720259652200:2335], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-04-04T17:15:03.575552Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507720259652198:2334], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-04-04T17:15:03.575600Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7489507720259652196:2333], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ODk4ZGJjNDYtZGYzNGM3YmEtM2MyNDEwNDEtMzI0ODVlMDM=, Pool info successfully resolved 2025-04-04T17:15:03.575626Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODk4ZGJjNDYtZGYzNGM3YmEtM2MyNDEwNDEtMzI0ODVlMDM= 2025-04-04T17:15:03.575669Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7489507720259652200:2335], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7489507720259652184:2330], session id: ydb://session/3?node_id=1&id=ODk4ZGJjNDYtZGYzNGM3YmEtM2MyNDEwNDEtMzI0ODVlMDM= 2025-04-04T17:15:03.575705Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ODk4ZGJjNDYtZGYzNGM3YmEtM2MyNDEwNDEtMzI0ODVlMDM= 2025-04-04T17:15:03.575733Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got create teables request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-04T17:15:03.575746Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] ... wadrc8n7b, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:16:30.463804Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=OWQ2NzRhYmQtYmU2MzU2YS04ODEwYTA2ZC0zZjgyMjkwNQ==, ActorId: [6:7489508093411400049:2441], ActorState: ExecuteState, TraceId: 01jr0wvpswephxc4awadrc8n7b, EndCleanup, isFinal: 1 2025-04-04T17:16:30.463853Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=OWQ2NzRhYmQtYmU2MzU2YS04ODEwYTA2ZC0zZjgyMjkwNQ==, ActorId: [6:7489508093411400049:2441], ActorState: ExecuteState, TraceId: 01jr0wvpswephxc4awadrc8n7b, Sent query response back to proxy, proxyRequestId: 15, proxyId: [6:7489508063346627999:2274] 2025-04-04T17:16:30.463914Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=OWQ2NzRhYmQtYmU2MzU2YS04ODEwYTA2ZC0zZjgyMjkwNQ==, ActorId: [6:7489508093411400049:2441], ActorState: unknown state, TraceId: 01jr0wvpswephxc4awadrc8n7b, Cleanup temp tables: 0 2025-04-04T17:16:30.464169Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=OWQ2NzRhYmQtYmU2MzU2YS04ODEwYTA2ZC0zZjgyMjkwNQ==, ActorId: [6:7489508093411400049:2441], ActorState: unknown state, TraceId: 01jr0wvpswephxc4awadrc8n7b, Session actor destroyed 2025-04-04T17:16:30.471446Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=Y2E4Y2Q4OWUtYzY1OGE2MTQtNWU5MjdjOTYtNmVmNGYzNjQ=, ActorId: [6:7489508080526497612:2331], ActorState: ReadyState, TraceId: 01jr0wvpw6970gpjp4w9fy2g7b, received request, proxyRequestId: 18 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: DROP RESOURCE POOL sample_pool_id; DROP RESOURCE POOL default; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-04-04T17:16:30.492107Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7489508080526497688:2337], DatabaseId: /Root, PoolId: sample_pool_id, Got delete notification 2025-04-04T17:16:30.492246Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-04T17:16:30.492333Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489508093411400123:2456], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-04-04T17:16:30.492668Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489508093411400123:2456], DatabaseId: /Root, PoolId: sample_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool sample_pool_id not found or you don't have access permissions } 2025-04-04T17:16:30.492752Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool sample_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool sample_pool_id not found or you don't have access permissions } 2025-04-04T17:16:30.498631Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7489508080526497887:2360], DatabaseId: /Root, PoolId: default, Got delete notification 2025-04-04T17:16:30.498752Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-04-04T17:16:30.498868Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489508093411400139:2457], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-04-04T17:16:30.499679Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489508093411400139:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:30.499825Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:30.509698Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=Y2E4Y2Q4OWUtYzY1OGE2MTQtNWU5MjdjOTYtNmVmNGYzNjQ=, ActorId: [6:7489508080526497612:2331], ActorState: ExecuteState, TraceId: 01jr0wvpw6970gpjp4w9fy2g7b, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [6:7489508093411400110:2331] WorkloadServiceCleanup: 0 2025-04-04T17:16:30.512074Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=Y2E4Y2Q4OWUtYzY1OGE2MTQtNWU5MjdjOTYtNmVmNGYzNjQ=, ActorId: [6:7489508080526497612:2331], ActorState: CleanupState, TraceId: 01jr0wvpw6970gpjp4w9fy2g7b, EndCleanup, isFinal: 0 2025-04-04T17:16:30.512171Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=Y2E4Y2Q4OWUtYzY1OGE2MTQtNWU5MjdjOTYtNmVmNGYzNjQ=, ActorId: [6:7489508080526497612:2331], ActorState: CleanupState, TraceId: 01jr0wvpw6970gpjp4w9fy2g7b, Sent query response back to proxy, proxyRequestId: 18, proxyId: [6:7489508063346627999:2274] Wait pool handlers 0.000029s: number handlers = 2 Wait pool handlers 1.000175s: number handlers = 2 Wait pool handlers 2.000316s: number handlers = 2 Wait pool handlers 3.003700s: number handlers = 2 Wait pool handlers 4.003853s: number handlers = 2 Wait pool handlers 5.007502s: number handlers = 2 Wait pool handlers 6.011505s: number handlers = 2 Wait pool handlers 7.011855s: number handlers = 2 2025-04-04T17:16:38.202668Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:16:38.202704Z node 6 :IMPORT WARN: Table profiles were not loaded Wait pool handlers 8.012390s: number handlers = 2 Wait pool handlers 9.015498s: number handlers = 2 Wait pool handlers 10.018882s: number handlers = 2 Wait pool handlers 11.019021s: number handlers = 2 Wait pool handlers 12.023503s: number handlers = 2 Wait pool handlers 13.024101s: number handlers = 2 2025-04-04T17:16:44.024101Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7489508080526497688:2337], DatabaseId: /Root, PoolId: sample_pool_id, Try to start scheduled refresh Wait pool handlers 14.025035s: number handlers = 2 Wait pool handlers 15.025170s: number handlers = 2 Wait pool handlers 16.025309s: number handlers = 2 Wait pool handlers 17.027503s: number handlers = 2 Wait pool handlers 18.027632s: number handlers = 2 Wait pool handlers 19.027753s: number handlers = 2 Wait pool handlers 20.030609s: number handlers = 2 Wait pool handlers 21.032415s: number handlers = 2 Wait pool handlers 22.033146s: number handlers = 2 Wait pool handlers 23.035500s: number handlers = 2 Wait pool handlers 24.039503s: number handlers = 2 Wait pool handlers 25.039628s: number handlers = 2 Wait pool handlers 26.040549s: number handlers = 2 Wait pool handlers 27.040668s: number handlers = 2 Wait pool handlers 28.042299s: number handlers = 2 Wait pool handlers 29.043496s: number handlers = 2 Wait pool handlers 30.047496s: number handlers = 2 Wait pool handlers 31.051353s: number handlers = 2 Wait pool handlers 32.051504s: number handlers = 2 Wait pool handlers 33.051759s: number handlers = 2 Wait pool handlers 34.051895s: number handlers = 2 Wait pool handlers 35.053560s: number handlers = 2 Wait pool handlers 36.055493s: number handlers = 2 Wait pool handlers 37.059500s: number handlers = 2 Wait pool handlers 38.060034s: number handlers = 2 Wait pool handlers 39.060891s: number handlers = 2 Wait pool handlers 40.061014s: number handlers = 2 Wait pool handlers 41.063490s: number handlers = 2 Wait pool handlers 42.063616s: number handlers = 2 Wait pool handlers 43.063917s: number handlers = 2 Wait pool handlers 44.064169s: number handlers = 2 Wait pool handlers 45.064295s: number handlers = 2 Wait pool handlers 46.067501s: number handlers = 2 Wait pool handlers 47.071497s: number handlers = 2 Wait pool handlers 48.071752s: number handlers = 2 Wait pool handlers 49.071875s: number handlers = 2 Wait pool handlers 50.072212s: number handlers = 2 Wait pool handlers 51.075500s: number handlers = 2 Wait pool handlers 52.079501s: number handlers = 2 Wait pool handlers 53.080261s: number handlers = 2 Wait pool handlers 54.083493s: number handlers = 2 Wait pool handlers 55.087493s: number handlers = 2 Wait pool handlers 56.091495s: number handlers = 2 Wait pool handlers 57.091659s: number handlers = 2 Wait pool handlers 58.095501s: number handlers = 2 Wait pool handlers 59.099492s: number handlers = 2 Wait pool handlers 60.100268s: number handlers = 2 Wait pool handlers 61.100845s: number handlers = 2 Wait pool handlers 62.100988s: number handlers = 2 Wait pool handlers 63.107492s: number handlers = 2 Wait pool handlers 64.107583s: number handlers = 2 Wait pool handlers 65.107692s: number handlers = 2 Wait pool handlers 66.108073s: number handlers = 2 Wait pool handlers 67.108201s: number handlers = 2 Wait pool handlers 68.108326s: number handlers = 2 Wait pool handlers 69.108453s: number handlers = 2 Wait pool handlers 70.108574s: number handlers = 2 Wait pool handlers 71.108708s: number handlers = 2 Wait pool handlers 72.108848s: number handlers = 2 Wait pool handlers 73.108968s: number handlers = 2 Wait pool handlers 74.109098s: number handlers = 2 Wait pool handlers 75.109204s: number handlers = 2 Wait pool handlers 76.109358s: number handlers = 2 Wait pool handlers 77.109561s: number handlers = 2 Wait pool handlers 78.109693s: number handlers = 2 Wait pool handlers 79.109851s: number handlers = 2 Wait pool handlers 80.109954s: number handlers = 2 Wait pool handlers 81.110094s: number handlers = 2 Wait pool handlers 82.110239s: number handlers = 2 Wait pool handlers 83.110380s: number handlers = 2 Wait pool handlers 84.110522s: number handlers = 2 Wait pool handlers 85.110663s: number handlers = 2 Wait pool handlers 86.110802s: number handlers = 2 2025-04-04T17:17:57.229479Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7489508080526497887:2360], DatabaseId: /Root, PoolId: default, Got stop pool handler request, waiting for 0 requests 2025-04-04T17:17:57.229489Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7489508080526497688:2337], DatabaseId: /Root, PoolId: sample_pool_id, Got stop pool handler request, waiting for 0 requests 2025-04-04T17:17:57.229621Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got stop pool handler response, DatabaseId: /Root, PoolId: sample_pool_id 2025-04-04T17:17:57.229649Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got stop pool handler response, DatabaseId: /Root, PoolId: default 2025-04-04T17:17:57.633498Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=Y2E4Y2Q4OWUtYzY1OGE2MTQtNWU5MjdjOTYtNmVmNGYzNjQ=, ActorId: [6:7489508080526497612:2331], ActorState: ReadyState, Session closed due to explicit close event 2025-04-04T17:17:57.633567Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=Y2E4Y2Q4OWUtYzY1OGE2MTQtNWU5MjdjOTYtNmVmNGYzNjQ=, ActorId: [6:7489508080526497612:2331], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:17:57.633598Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=Y2E4Y2Q4OWUtYzY1OGE2MTQtNWU5MjdjOTYtNmVmNGYzNjQ=, ActorId: [6:7489508080526497612:2331], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-04T17:17:57.633632Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=Y2E4Y2Q4OWUtYzY1OGE2MTQtNWU5MjdjOTYtNmVmNGYzNjQ=, ActorId: [6:7489508080526497612:2331], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:17:57.633713Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=Y2E4Y2Q4OWUtYzY1OGE2MTQtNWU5MjdjOTYtNmVmNGYzNjQ=, ActorId: [6:7489508080526497612:2331], ActorState: unknown state, Session actor destroyed |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TPersQueueTest::DefaultMeteringMode [GOOD] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Calculation","input":"3,4","output":"15"},"fetch":"3,4","drop":"3,4"},{"processor":{"internal":{},"type":"Projection","input":"15"}}]}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int32TypeE; |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExists |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Const","output":"15"}},{"processor":{"internal":{},"type":"Calculation","input":"7,15","output":"16"},"fetch":"7","drop":"7,15"},{"processor":{"internal":{},"type":"Projection","input":"16"}}]}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite [GOOD] Test command err: 2025-04-04T17:17:50.596526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:50.596822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:50.596869Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ed0/r3tmp/tmpR8KkCc/pdisk_1.dat 2025-04-04T17:17:50.909084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:17:50.945512Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:50.983305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:50.983441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:50.994568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:51.073169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:51.352902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:17:51.599417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:827:2681], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:51.599504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:837:2686], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:51.599557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:51.603598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:17:51.758980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:841:2689], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:17:51.836866Z node 1 :TX_PROXY ERROR: Actor# [1:901:2730] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:52.136131Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0wy63da3g1ka392h58xcmb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTc4ODg0NGQtM2M3ZDQwNDQtMWM5Mzk4NDEtZDg2YmUxYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:52.218377Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0wy6n08myyavtx1y6gxyf8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDNkMWY3NGQtYjNkMWFmZGQtZjA5MWNlMWUtNmVmYjk4ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 2025-04-04T17:17:52.869579Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0wy796bfvy6n0kqb1nqqaq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdmMTY3ZmQtNzYzYjYxN2EtZTA0MzE2MmQtOWEzOGM5ODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:52.941959Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr0wy7bb7nxd546974f4gc23, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2JlOGQ0YjMtMzE3NjI5ZDgtOGYyNDcwMjctZDM2NTZlYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... shards are ready for read-only immediate transactions ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 2025-04-04T17:17:55.996850Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:55.996924Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:17:55.997059Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ed0/r3tmp/tmpkMCLEd/pdisk_1.dat 2025-04-04T17:17:56.232189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:17:56.253929Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:56.289245Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:56.289360Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:56.300499Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:56.382389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:56.650267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:17:56.905501Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:829:2681], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:56.905576Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:839:2686], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:56.905631Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:56.910317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:17:57.064094Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:843:2689], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:17:57.099286Z node 2 :TX_PROXY ERROR: Actor# [2:903:2730] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:57.146180Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0wyb986rnbaz0vrc0j8sxk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmIwZWQzZjAtMmIyM2RiYTctYjg4MjhiMDEtZTk0ZGZjNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:57.199764Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0wybhc651q85jrjbcx8439, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjliZjQ0Yi01MDg3NWVhMi02ZTZjNWY0LTQxMGU4ODg4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:57.730322Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0wybrq7ytph2k83321ermv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDBhOWZmZjMtZGM0NzdhNWYtZjliMWI3NDUtYWM0MDhjYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-04-04T17:17:58.056469Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0wycbxc4kxrv8ks29aksy2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzkzYzNmODktZmEyOGQ4Mi1jZmM2YzkzOS0xNDEwYTcxYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:58.165214Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0wycdsd44ef9s7d2jzctde, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDBhOWZmZjMtZGM0NzdhNWYtZjliMWI3NDUtYWM0MDhjYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:58.259165Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr0wycgs4w8n607fgk9nh46s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDBhOWZmZjMtZGM0NzdhNWYtZjliMWI3NDUtYWM0MDhjYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:17:58.319412Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDBhOWZmZjMtZGM0NzdhNWYtZjliMWI3NDUtYWM0MDhjYTk=, ActorId: [2:974:2778], ActorState: ExecuteState, TraceId: 01jr0wyckq01zx1d4k0kshp2eg, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken >> TColumnEngineTestLogs::IndexReadWithPredicates >> TestProgram::NumRowsWithNulls [GOOD] >> TestProgram::JsonExists [GOOD] >> TestProgram::JsonExistsBinary ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestInsertTable::TestInsertCommit [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=insert_table.cpp:43;event=commit_insertion;path_id=0;blob_range={ Blob: DS:0:[2222:1:1:2:100:1:0] Offset: 0 Size: 0 }; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; >> TestProgram::JsonExistsBinary [GOOD] >> TColumnEngineTestLogs::IndexReadWithPredicates [GOOD] >> TestProgram::JsonValue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExists [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Const","output":"15"}},{"processor":{"internal":{},"type":"Calculation","input":"5,15","output":"16"},"fetch":"5","drop":"15,5"},{"processor":{"internal":{},"type":"Projection","input":"16"}}]}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::NumRowsWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Calculation","input":"2","output":"10001"},"fetch":"2","drop":"2"},{"processor":{"internal":{},"type":"Filter","input":"10001"},"drop":"10001"},{"processor":{"internal":{},"type":"Calculation","output":"10002"}},{"processor":{"internal":{},"type":"Projection","input":"10002"}}]}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10UInt64TypeE; |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |91.4%| [TA] $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexReadWithPredicates [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:143;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232 ... t=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;));); |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExistsBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Const","output":"15"}},{"processor":{"internal":{},"type":"Calculation","input":"6,15","output":"16"},"fetch":"6","drop":"15,6"},{"processor":{"internal":{},"type":"Projection","input":"16"}}]}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValue [GOOD] |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValueBinary |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |91.4%| [TA] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::DefaultMeteringMode [GOOD] Test command err: 2025-04-04T17:13:06.698902Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507218014601794:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:06.698952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:13:06.926325Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:13:06.937739Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e39/r3tmp/tmptC1Rl7/pdisk_1.dat 2025-04-04T17:13:06.991202Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:13:07.214489Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:07.249769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:07.249856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:07.251042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:07.251087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:07.263948Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:13:07.264059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:07.265162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19719, node 1 2025-04-04T17:13:07.489685Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001e39/r3tmp/yandexRRGBKZ.tmp 2025-04-04T17:13:07.489707Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001e39/r3tmp/yandexRRGBKZ.tmp 2025-04-04T17:13:07.489891Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001e39/r3tmp/yandexRRGBKZ.tmp 2025-04-04T17:13:07.490006Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:13:07.570496Z INFO: TTestServer started on Port 29873 GrpcPort 19719 TClient is connected to server localhost:29873 PQClient connected to localhost:19719 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:07.877699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:07.926463Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:07.944624Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:13:07.956986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:13:08.230770Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:10.355550Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507238516572958:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:10.355550Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489507238516572932:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:10.355655Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:10.358651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507235194472037:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:10.358731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507235194472076:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:10.358784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:10.361904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:13:10.368004Z node 2 :TX_PROXY ERROR: Actor# [2:7489507238516572970:2172] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:13:10.411177Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489507238516572969:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:13:10.412974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507235194472078:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:13:10.495677Z node 2 :TX_PROXY ERROR: Actor# [2:7489507238516572996:2178] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:10.507125Z node 1 :TX_PROXY ERROR: Actor# [1:7489507235194472170:2758] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:10.560109Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489507235194472180:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:13:10.561221Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2I2ZTdkODctNjgxMTA2NGMtMzgwMDYyY2ItNzdmMzEwOTI=, ActorId: [1:7489507235194472035:2337], ActorState: ExecuteState, TraceId: 01jr0wnkdzfrjycf26ax47arjb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:13:10.559592Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489507238516573003:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:13:10.561155Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTYwMWI1MTgtNWZlYWVhMzMtZmE4ZjhhZDYtNzEyOTE0MjU=, ActorId: [2:7489507238516572928:2310], ActorState: ExecuteState, TraceId: 01jr0wnkdsbrv2q4ayhvtyky86, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:13:10.563537Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:13:10.563543Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:13:10.572659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:10.669323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:10.835235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Ini ... 2025-04-04T17:17:57.927766Z node 30 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'PQ/ttt' partition 0 generation 1 [30:7489508469876062867:2339] 2025-04-04T17:17:57.927830Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateInit] SYNC INIT topic PQ/ttt partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:17:57.927888Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Process pending events. Count 1 2025-04-04T17:17:57.928114Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvProposePartitionConfigResult Step 1743787077963, TxId 281474976710672, Partition 0 2025-04-04T17:17:57.928161Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976710672] Handle TEvProposePartitionConfigResult 2025-04-04T17:17:57.928200Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976710672] Partition responses 1/1 2025-04-04T17:17:57.928228Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state CALCULATING 2025-04-04T17:17:57.928259Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, State CALCULATING 2025-04-04T17:17:57.928299Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672 State CALCULATING FrontTxId 281474976710672 2025-04-04T17:17:57.928326Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 1, Expected 1 2025-04-04T17:17:57.928373Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, NewState CALCULATED 2025-04-04T17:17:57.928441Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672 moved from CALCULATING to CALCULATED 2025-04-04T17:17:57.928944Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976710672] save tx TxId: 281474976710672 State: CALCULATED MinStep: 1743787077893 MaxStep: 18446744073709551615 Step: 1743787077963 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "ttt" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/ttt" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } } BootstrapConfig { } SourceActor { RawX1: 7489508419937475965 RawX2: 124554053780 } Partitions { Partition { PartitionId: 0 } } 2025-04-04T17:17:57.929134Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:17:57.933394Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:17:57.933447Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state CALCULATED 2025-04-04T17:17:57.933480Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, State CALCULATED 2025-04-04T17:17:57.933513Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672 State CALCULATED FrontTxId 281474976710672 2025-04-04T17:17:57.933546Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, NewState WAIT_RS 2025-04-04T17:17:57.933586Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672 moved from CALCULATED to WAIT_RS 2025-04-04T17:17:57.933688Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2025-04-04T17:17:57.933748Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveParticipantsDecision 1 2025-04-04T17:17:57.933820Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, NewState EXECUTING 2025-04-04T17:17:57.933847Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672 moved from WAIT_RS to EXECUTING 2025-04-04T17:17:57.933868Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 0, Expected 1 2025-04-04T17:17:57.933887Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1743787077963, TxId 281474976710672 2025-04-04T17:17:57.934449Z node 30 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:17:57.937699Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvTxCommitDone Step 1743787077963, TxId 281474976710672, Partition 0 2025-04-04T17:17:57.937741Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTING 2025-04-04T17:17:57.937781Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, State EXECUTING 2025-04-04T17:17:57.937784Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:17:57.937829Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672 State EXECUTING FrontTxId 281474976710672 2025-04-04T17:17:57.937863Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 1, Expected 1 2025-04-04T17:17:57.937916Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId: 281474976710672 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-04-04T17:17:57.937979Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] complete TxId 281474976710672 2025-04-04T17:17:57.938498Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "ttt" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/ttt" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } 2025-04-04T17:17:57.938615Z node 30 :PERSQUEUE NOTICE: [PQ: 72075186224037892] metering mode METERING_MODE_REQUEST_UNITS 2025-04-04T17:17:57.938770Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete partitions for TxId 281474976710672 2025-04-04T17:17:57.938815Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, NewState EXECUTED 2025-04-04T17:17:57.938857Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672 moved from EXECUTING to EXECUTED 2025-04-04T17:17:57.939374Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976710672] save tx TxId: 281474976710672 State: EXECUTED MinStep: 1743787077893 MaxStep: 18446744073709551615 Step: 1743787077963 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "ttt" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/ttt" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } } BootstrapConfig { } SourceActor { RawX1: 7489508419937475965 RawX2: 124554053780 } Partitions { Partition { PartitionId: 0 } } 2025-04-04T17:17:57.939712Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:17:57.947405Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:17:57.947457Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTED 2025-04-04T17:17:57.947491Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, State EXECUTED 2025-04-04T17:17:57.947521Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672 State EXECUTED FrontTxId 281474976710672 2025-04-04T17:17:57.947554Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-04-04T17:17:57.947579Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, NewState WAIT_RS_ACKS 2025-04-04T17:17:57.947609Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672 moved from EXECUTED to WAIT_RS_ACKS 2025-04-04T17:17:57.947658Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976710672] PredicateAcks: 0/0 2025-04-04T17:17:57.947671Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-04T17:17:57.947695Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976710672] PredicateAcks: 0/0 2025-04-04T17:17:57.947723Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976710672 to the list for deletion 2025-04-04T17:17:57.947760Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, NewState DELETING 2025-04-04T17:17:57.947792Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976710672 2025-04-04T17:17:57.947861Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:17:57.951433Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:17:57.951495Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-04-04T17:17:57.951530Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710672, State DELETING 2025-04-04T17:17:57.951565Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976710672 2025-04-04T17:17:57.952766Z node 29 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-04-04T17:17:57.952865Z node 29 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request operation_params { } path: "/Root/PQ/ttt" 2025-04-04T17:17:57.952938Z node 29 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ/ttt ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValue [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Const","output":"15"}},{"processor":{"internal":{},"type":"Calculation","input":"5,15","output":"16"},"fetch":"5","drop":"15,5"},{"processor":{"internal":{},"type":"Projection","input":"16"}}]}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Const","output":"15"}},{"processor":{"internal":{},"type":"Calculation","input":"5,15","output":"16"},"fetch":"5","drop":"15,5"},{"processor":{"internal":{},"type":"Projection","input":"16"}}]}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Bool FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\0 ... rd_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Uint64 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203B\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\374\016Convert?\372\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203B\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\374\016Convert?\372\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Const","output":"15"}},{"processor":{"internal":{},"type":"Calculation","input":"5,15","output":"16"},"fetch":"5","drop":"15,5"},{"processor":{"internal":{},"type":"Projection","input":"16"}}]}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Const","output":"15"}},{"processor":{"internal":{},"type":"Calculation","input":"5,15","output":"16"},"fetch":"5","drop":"15,5"},{"processor":{"internal":{},"type":"Projection","input":"16"}}]}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10DoubleTypeE; >> TColumnEngineTestLogs::IndexWriteLoadRead [GOOD] >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWith |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWith [GOOD] >> TestProgram::SimpleFunction [GOOD] >> TestProgram::JsonValueBinary [GOOD] >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteLoadRead [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:143;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:143;event=RegisterTable;path_id=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=1072;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=0;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=2;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=2;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;));); ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::SimpleFunction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Calculation","input":"2","output":"15"},"fetch":"2","drop":"2"},{"processor":{"internal":{},"type":"Projection","input":"15"}}]}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10UInt64TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Calculation","input":"7,9","output":"15"},"fetch":"7,9","drop":"7,9"},{"processor":{"internal":{},"type":"Projection","input":"15"}}]}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; >> TPersQueueTest::SrcIdCompatibility [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValueBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Const","output":"15"}},{"processor":{"internal":{},"type":"Calculation","input":"6,15","output":"16"},"fetch":"6","drop":"15,6"},{"processor":{"internal":{},"type":"Projection","input":"16"}}]}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?pJs ... iority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Uint64 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203B\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\270\016Convert?\266\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203B\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\270\016Convert?\266\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Const","output":"15"}},{"processor":{"internal":{},"type":"Calculation","input":"6,15","output":"16"},"fetch":"6","drop":"15,6"},{"processor":{"internal":{},"type":"Projection","input":"16"}}]}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Const","output":"15"}},{"processor":{"internal":{},"type":"Calculation","input":"6,15","output":"16"},"fetch":"6","drop":"15,6"},{"processor":{"internal":{},"type":"Projection","input":"16"}}]}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow10DoubleTypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:143;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232 ... t=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_selected;pathId=1;portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:376;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:459;event=portion_skipped;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;));); >> KqpLimits::DataShardReplySizeExceeded [GOOD] >> DataShardTxOrder::ZigZag_oo [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo [GOOD] Test command err: 2025-04-04T17:17:44.889945Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:44.966416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:44.966468Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:44.972838Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:44.973265Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:17:44.973517Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:44.982671Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:45.025697Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:45.025792Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:45.027236Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:17:45.027284Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:17:45.027316Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:17:45.027606Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:45.027849Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:45.027905Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:17:45.095863Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:45.125939Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:17:45.126075Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:45.126164Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:17:45.126196Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:17:45.126226Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:17:45.126271Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:45.126395Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:45.126451Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:45.126737Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:17:45.126814Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:17:45.126910Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:45.126953Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:45.126987Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:17:45.127018Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:45.127052Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:45.127096Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:17:45.127157Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:45.127225Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:45.127255Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:45.127296Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:17:45.129713Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\004\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:17:45.129770Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:45.129837Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:17:45.129968Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:17:45.130006Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:17:45.130056Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:17:45.130088Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:45.130118Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:17:45.130148Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:17:45.130175Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:45.130428Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:45.130462Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:17:45.130491Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:17:45.130519Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:45.130587Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:17:45.130614Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:17:45.130645Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:17:45.130718Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:45.130766Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:45.142603Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:17:45.142672Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:45.142708Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:45.142745Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:45.142792Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:45.143244Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:45.143286Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:45.143321Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:17:45.143465Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:45.143490Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:45.143598Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:45.143642Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:45.143677Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:45.143709Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:45.147220Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:45.147278Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:45.147448Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:45.147482Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:45.147521Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:45.147572Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:45.147602Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:45.147640Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:17:45.147669Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:17:45.147706Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:45.147735Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:45.147775Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:45.147817Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:45.147962Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:17:45.148001Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:45.148025Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:45.148043Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:17:45.148060Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:17:45.148107Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:45.148134Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:17:45.148162Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:45.148198Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:45.148247Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:17:45.148277Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:17:45.148304Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:17:45.148338Z node 1 :TX_DATA ... aitInRS 2025-04-04T17:18:02.076139Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-04T17:18:02.076159Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2025-04-04T17:18:02.076178Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2025-04-04T17:18:02.076197Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2025-04-04T17:18:02.076617Z node 6 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2025-04-04T17:18:02.076690Z node 6 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:18:02.076742Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-04T17:18:02.076767Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-04-04T17:18:02.076791Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-04-04T17:18:02.076814Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-04-04T17:18:02.077011Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-04-04T17:18:02.077044Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-04-04T17:18:02.077082Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-04-04T17:18:02.077125Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-04-04T17:18:02.077159Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-04-04T17:18:02.077180Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-04-04T17:18:02.077210Z node 6 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2025-04-04T17:18:02.077256Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:18:02.077295Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:18:02.077333Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:18:02.077368Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:18:02.077553Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [6:345:2312], Recipient [6:345:2312]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:02.077587Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:02.077625Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-04-04T17:18:02.077650Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:18:02.077671Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-04T17:18:02.077699Z node 6 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437185 2025-04-04T17:18:02.077723Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PlanQueue 2025-04-04T17:18:02.077749Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-04T17:18:02.077773Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PlanQueue 2025-04-04T17:18:02.077794Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadTxDetails 2025-04-04T17:18:02.077819Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadTxDetails 2025-04-04T17:18:02.078368Z node 6 :TX_DATASHARD DEBUG: LoadTxDetails at 9437185 loaded tx from db 1000016:45 keys extracted: 2 2025-04-04T17:18:02.078412Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-04T17:18:02.078435Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadTxDetails 2025-04-04T17:18:02.078457Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit FinalizeDataTxPlan 2025-04-04T17:18:02.078481Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit FinalizeDataTxPlan 2025-04-04T17:18:02.078513Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-04T17:18:02.078535Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit FinalizeDataTxPlan 2025-04-04T17:18:02.078554Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildAndWaitDependencies 2025-04-04T17:18:02.078590Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildAndWaitDependencies 2025-04-04T17:18:02.078633Z node 6 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437185 2025-04-04T17:18:02.078660Z node 6 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437185 2025-04-04T17:18:02.078685Z node 6 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437185 2025-04-04T17:18:02.078719Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-04T17:18:02.078740Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildAndWaitDependencies 2025-04-04T17:18:02.078760Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildDataTxOutRS 2025-04-04T17:18:02.078781Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildDataTxOutRS 2025-04-04T17:18:02.078820Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-04T17:18:02.078841Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildDataTxOutRS 2025-04-04T17:18:02.078861Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit StoreAndSendOutRS 2025-04-04T17:18:02.078883Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit StoreAndSendOutRS 2025-04-04T17:18:02.078904Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-04T17:18:02.078926Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit StoreAndSendOutRS 2025-04-04T17:18:02.078949Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit PrepareDataTxInRS 2025-04-04T17:18:02.078970Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PrepareDataTxInRS 2025-04-04T17:18:02.078996Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-04T17:18:02.079017Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PrepareDataTxInRS 2025-04-04T17:18:02.079036Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadAndWaitInRS 2025-04-04T17:18:02.079057Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadAndWaitInRS 2025-04-04T17:18:02.079079Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-04T17:18:02.079100Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2025-04-04T17:18:02.079119Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2025-04-04T17:18:02.079141Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2025-04-04T17:18:02.079452Z node 6 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2025-04-04T17:18:02.079499Z node 6 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:18:02.079539Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-04T17:18:02.079560Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2025-04-04T17:18:02.079583Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2025-04-04T17:18:02.079604Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2025-04-04T17:18:02.079772Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2025-04-04T17:18:02.079800Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2025-04-04T17:18:02.079825Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2025-04-04T17:18:02.079850Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2025-04-04T17:18:02.079881Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-04-04T17:18:02.079901Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2025-04-04T17:18:02.079923Z node 6 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2025-04-04T17:18:02.079952Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:18:02.079976Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-04T17:18:02.079998Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-04T17:18:02.080022Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-04T17:18:02.093269Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-04-04T17:18:02.093348Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-04-04T17:18:02.093416Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-04T17:18:02.093465Z node 6 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-04-04T17:18:02.093544Z node 6 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [6:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:02.093602Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-04T17:18:02.094555Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-04-04T17:18:02.094609Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-04-04T17:18:02.094642Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:02.094668Z node 6 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-04-04T17:18:02.094706Z node 6 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [6:98:2133], exec latency: 0 ms, propose latency: 2 ms 2025-04-04T17:18:02.094734Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::SrcIdCompatibility [GOOD] Test command err: === Start server === Server->StartServer(false); 2025-04-04T17:13:06.611360Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507219702409880:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:06.611443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:13:06.651111Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507220251465654:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:06.654320Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:13:06.862864Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:13:06.873565Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e22/r3tmp/tmpMYnSRC/pdisk_1.dat 2025-04-04T17:13:07.209311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:07.209445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:07.210100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:07.210153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:07.213108Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:13:07.213235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:07.215899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:07.258859Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30697, node 1 2025-04-04T17:13:07.454947Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001e22/r3tmp/yandexn3w6Km.tmp 2025-04-04T17:13:07.454971Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001e22/r3tmp/yandexn3w6Km.tmp 2025-04-04T17:13:07.455145Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001e22/r3tmp/yandexn3w6Km.tmp 2025-04-04T17:13:07.455313Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:13:07.568579Z INFO: TTestServer started on Port 7973 GrpcPort 30697 TClient is connected to server localhost:7973 PQClient connected to localhost:30697 === TenantModeEnabled() = 0 === Init PQ - start server on port 30697 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:08.271822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:13:08.271980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.272133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T17:13:08.272289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:13:08.272323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.273690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T17:13:08.273771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T17:13:08.273886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.273924Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T17:13:08.273943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-04T17:13:08.273959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-04T17:13:08.275449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.275500Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:13:08.275520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2025-04-04T17:13:08.279293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.279337Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.279365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:13:08.279395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:13:08.283666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:13:08.284014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:08.284041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-04T17:13:08.284071Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:08.286070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-04T17:13:08.286168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:13:08.288328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743786788331, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:13:08.288440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743786788331 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:13:08.288457Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:13:08.288652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-04T17:13:08.288691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:13:08.288817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:13:08.288843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-04T17:13:08.290284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:13:08.290307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:13:08.290477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:13:08.290493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489507223997377842:2414], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-04T17:13:08.290533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.290571Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-04T17:13:08.290672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-04T17:13:08.290681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:13:08.290700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-04T17:13:08.290708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:13:08.290720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-04T17:13:08.290737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:13:08.290751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-04T17:13:08.290758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-04T17:13:08.290821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [Own ... .877532Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-04-04T17:17:59.877672Z node 27 :PERSQUEUE INFO: new Cookie test-src-id-compat2|ce4e8cbd-8e8ba47-d0417c3e-d4e0b7c1_0 generated for partition 7 topic 'rt3.dc1--account--topic100' owner test-src-id-compat2 2025-04-04T17:17:59.877813Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 7 2025-04-04T17:17:59.877910Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 0 2025-04-04T17:17:59.878106Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-04-04T17:17:59.878133Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-04-04T17:17:59.878258Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 0 2025-04-04T17:17:59.878456Z node 27 :PQ_WRITE_PROXY INFO: session inited cookie: 5 partition: 7 MaxSeqNo: 0 sessionId: test-src-id-compat2|ce4e8cbd-8e8ba47-d0417c3e-d4e0b7c1_0 2025-04-04T17:17:59.879467Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743787079879 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:17:59.879668Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [] Write session established. Init response: session_id: "test-src-id-compat2|ce4e8cbd-8e8ba47-d0417c3e-d4e0b7c1_0" topic: "account/topic100" cluster: "dc1" partition_id: 7 supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-04T17:17:59.879991Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ce4e8cbd-8e8ba47-d0417c3e-d4e0b7c1_0] Write 1 messages with Id from 1 to 1 2025-04-04T17:17:59.880466Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ce4e8cbd-8e8ba47-d0417c3e-d4e0b7c1_0] Write session: try to update token 2025-04-04T17:17:59.880568Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ce4e8cbd-8e8ba47-d0417c3e-d4e0b7c1_0] Send 1 message(s) (0 left), first sequence number is 1 2025-04-04T17:17:59.881203Z node 27 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-src-id-compat2|ce4e8cbd-8e8ba47-d0417c3e-d4e0b7c1_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-04T17:17:59.881559Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-04T17:17:59.881712Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-04-04T17:17:59.881758Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-04-04T17:17:59.881877Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 1 2025-04-04T17:17:59.881949Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T17:17:59.882035Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-04-04T17:17:59.882051Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-04-04T17:17:59.882116Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message topic: rt3.dc1--account--topic100 partition: 7 SourceId: '\0test-src-id-compat2' SeqNo: 1 partNo : 0 messageNo: 1 size 102 offset: -1 2025-04-04T17:17:59.882390Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 part blob processing sourceId '\0test-src-id-compat2' seqNo 1 partNo 0 2025-04-04T17:17:59.883187Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 part blob complete sourceId '\0test-src-id-compat2' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 189 count 1 nextOffset 1 batches 1 2025-04-04T17:17:59.884074Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Add new write blob: topic 'rt3.dc1--account--topic100' partition 7 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000007_00000000000000000000_00000_0000000001_00000| size 177 WTime 1743787079883 2025-04-04T17:17:59.884289Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:17:59.884407Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 7 offset 0 partNo 0 count 1 size 177 2025-04-04T17:17:59.887278Z node 27 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 7 offset 0 count 1 size 177 actorID [27:7489508468814258455:2502] 2025-04-04T17:17:59.887406Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 122 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:17:59.887420Z node 27 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037910' partition 7 offset 0 partno 0 count 1 parts 0 size 177 2025-04-04T17:17:59.887472Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::ReplyWrite. Partition: 7 2025-04-04T17:17:59.887561Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Answering for message sourceid: '\0test-src-id-compat2', Topic: 'rt3.dc1--account--topic100', Partition: 7, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-04T17:17:59.887645Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 1 requestId: cookie: 1 2025-04-04T17:17:59.887745Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T17:17:59.887827Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04T17:17:59.887869Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-04T17:17:59.888032Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] read cookie 0 Topic 'rt3.dc1--account--topic100' partition 7 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-04-04T17:17:59.888074Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-04-04T17:17:59.888130Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-04T17:17:59.888157Z node 27 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T17:17:59.888240Z node 27 :PERSQUEUE DEBUG: Topic 'rt3.dc1--account--topic100' partition 7 user user readTimeStamp done, result 1743787079882 queuesize 0 startOffset 0 2025-04-04T17:17:59.888721Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ce4e8cbd-8e8ba47-d0417c3e-d4e0b7c1_0] Write session got write response: sequence_numbers: 1 offsets: 0 already_written: false partition_id: 7 write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 1 } 2025-04-04T17:17:59.888804Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ce4e8cbd-8e8ba47-d0417c3e-d4e0b7c1_0] Write session: acknoledged message 1 2025-04-04T17:17:59.889126Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ce4e8cbd-8e8ba47-d0417c3e-d4e0b7c1_0] Write session: close. Timeout = 0 ms 2025-04-04T17:17:59.889196Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ce4e8cbd-8e8ba47-d0417c3e-d4e0b7c1_0] Write session will now close 2025-04-04T17:17:59.889260Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ce4e8cbd-8e8ba47-d0417c3e-d4e0b7c1_0] Write session: aborting 2025-04-04T17:17:59.890282Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ce4e8cbd-8e8ba47-d0417c3e-d4e0b7c1_0] Write session: gracefully shut down, all writes complete 2025-04-04T17:17:59.890357Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ce4e8cbd-8e8ba47-d0417c3e-d4e0b7c1_0] Write session is aborting and will not restart 2025-04-04T17:17:59.890437Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|ce4e8cbd-8e8ba47-d0417c3e-d4e0b7c1_0] Write session: destroy 2025-04-04T17:17:59.891417Z node 27 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-src-id-compat2|ce4e8cbd-8e8ba47-d0417c3e-d4e0b7c1_0 grpc read done: success: 0 data: 2025-04-04T17:17:59.891466Z node 27 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-src-id-compat2|ce4e8cbd-8e8ba47-d0417c3e-d4e0b7c1_0 grpc read failed 2025-04-04T17:17:59.891555Z node 27 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 5 sessionId: test-src-id-compat2|ce4e8cbd-8e8ba47-d0417c3e-d4e0b7c1_0 2025-04-04T17:17:59.891603Z node 27 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-src-id-compat2|ce4e8cbd-8e8ba47-d0417c3e-d4e0b7c1_0 is DEAD 2025-04-04T17:17:59.892427Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:17:59.892692Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [27:7489508477404194595:2748] destroyed 2025-04-04T17:17:59.892730Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::DropOwner. 2025-04-04T17:18:00.501914Z node 27 :KQP_EXECUTER ERROR: ActorId: [27:7489508481699161941:2754] TxId: 281474976710699. Ctx: { TraceId: 01jr0wye9c92qj04sbvfr1phgs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=27&id=Y2RhYTI5NmYtYTU4N2ViYjAtZjc0YjAxMzktYjE3OWE0NjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 28 2025-04-04T17:18:00.502686Z node 27 :KQP_COMPUTE ERROR: SelfId: [27:7489508481699161947:2762], TxId: 281474976710699, task: 2. Ctx: { SessionId : ydb://session/3?node_id=27&id=Y2RhYTI5NmYtYTU4N2ViYjAtZjc0YjAxMzktYjE3OWE0NjQ=. TraceId : 01jr0wye9c92qj04sbvfr1phgs. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [27:7489508481699161941:2754], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-04T17:18:00.503379Z node 27 :KQP_COMPUTE ERROR: SelfId: [27:7489508481699161949:2763], TxId: 281474976710699, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=27&id=Y2RhYTI5NmYtYTU4N2ViYjAtZjc0YjAxMzktYjE3OWE0NjQ=. TraceId : 01jr0wye9c92qj04sbvfr1phgs. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [27:7489508481699161941:2754], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-04T17:18:00.858583Z node 27 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:18:00.858624Z node 27 :IMPORT WARN: Table profiles were not loaded >> TestProgram::YqlKernelEquals >> TestProgram::YqlKernelEquals [GOOD] >> KqpTypes::DyNumberCompare [GOOD] >> KqpTypes::MultipleCurrentUtcTimestamp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEquals [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:32;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Calculation","input":"10,11","output":"15"},"fetch":"10,11","drop":"10,11"},{"processor":{"internal":{},"type":"Projection","input":"15"}}]}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:493;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::DataShardReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 18364, MsgBus: 61193 2025-04-04T17:17:01.926283Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508230339487945:2123];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:01.927216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015fe/r3tmp/tmp9oKHgQ/pdisk_1.dat 2025-04-04T17:17:02.419815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:02.419889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:02.421493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:02.461197Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18364, node 1 2025-04-04T17:17:02.564795Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:02.564812Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:02.564817Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:02.564912Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61193 TClient is connected to server localhost:61193 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:03.139997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:03.156342Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:03.176819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:04.882351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508243224390808:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.882413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.882953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508243224390820:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:04.886303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:17:04.896373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508243224390822:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:17:04.984174Z node 1 :TX_PROXY ERROR: Actor# [1:7489508243224390875:2605] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:05.260752Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=2;memory=1048576; 2025-04-04T17:17:05.261047Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 2. [Mem] memory 1048576 NOT granted 2025-04-04T17:17:05.261186Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=4;memory=1048576; 2025-04-04T17:17:05.261195Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 4. [Mem] memory 1048576 NOT granted 2025-04-04T17:17:05.272713Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508247519358230:2370], TxId: 281474976710661, task: 4. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wwq1r8a34qz3tfgcdzhzv. SessionId : ydb://session/3?node_id=1&id=MmQ0MDgyNDItN2E0OTM2OGUtN2E0MmRiYjUtYWZkNWM5NDE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 4: 10, host: ghrun-mup5cka3jq, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 50B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 5, started at: 2025-04-04T17:17:05.257521Z }, code: 2029 }. 2025-04-04T17:17:05.273099Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508247519358228:2368], TxId: 281474976710661, task: 2. Ctx: { TraceId : 01jr0wwq1r8a34qz3tfgcdzhzv. SessionId : ydb://session/3?node_id=1&id=MmQ0MDgyNDItN2E0OTM2OGUtN2E0MmRiYjUtYWZkNWM5NDE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-mup5cka3jq, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 50B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 5, started at: 2025-04-04T17:17:05.257521Z }, code: 2029 }. 2025-04-04T17:17:05.276176Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=5;memory=1048576; 2025-04-04T17:17:05.276202Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 5. [Mem] memory 1048576 NOT granted 2025-04-04T17:17:05.276535Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508247519358231:2371], TxId: 281474976710661, task: 5. Ctx: { TraceId : 01jr0wwq1r8a34qz3tfgcdzhzv. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MmQ0MDgyNDItN2E0OTM2OGUtN2E0MmRiYjUtYWZkNWM5NDE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 5: 10, host: ghrun-mup5cka3jq, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 40B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 4, started at: 2025-04-04T17:17:05.257521Z }, code: 2029 }. 2025-04-04T17:17:05.277866Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508247519358227:2367], TxId: 281474976710661, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MmQ0MDgyNDItN2E0OTM2OGUtN2E0MmRiYjUtYWZkNWM5NDE=. CustomerSuppliedId : . TraceId : 01jr0wwq1r8a34qz3tfgcdzhzv. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7489508247519358201:2353], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-04-04T17:17:05.278633Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508247519358229:2369], TxId: 281474976710661, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=MmQ0MDgyNDItN2E0OTM2OGUtN2E0MmRiYjUtYWZkNWM5NDE=. CustomerSuppliedId : . TraceId : 01jr0wwq1r8a34qz3tfgcdzhzv. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489508247519358201:2353], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-04-04T17:17:05.281734Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmQ0MDgyNDItN2E0OTM2OGUtN2E0MmRiYjUtYWZkNWM5NDE=, ActorId: [1:7489508243224390806:2353], ActorState: ExecuteState, TraceId: 01jr0wwq1r8a34qz3tfgcdzhzv, Create QueryResponse for error on request, msg:
: Error: Mkql memory limit exceeded, allocated by task 4: 10, host: ghrun-mup5cka3jq, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 50B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 5, started at: 2025-04-04T17:17:05.257521Z } , code: 2029 query_phases { duration_us: 25604 table_access { name: "/Root/LargeTable" partitions_count: 3 } cpu_time_us: 32988 affected_shards: 8 } compilation { duration_us: 267159 cpu_time_us: 262863 } process_cpu_time_us: 483 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"LargeTable\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\",\"KeyText (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/LargeTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"LargeTable\",\"ReadColumns\":[\"Data\",\"DataText\",\"Key\",\"KeyText\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Collect\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{},\"Name\":\"4\",\"Push\":{}}],\"MaxMemoryUsage\":{\"Count\":4,\"Sum\":2097192,\"Max\":1048586,\"Min\":10,\"History\":[1743787025277,1048606,1743787025278,1048606,1743787025279,2097192]},\"Tasks\":4,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/LargeTable\"}],\"CpuTimeUs\":{\"Count\":4,\"Sum\":4436,\"Max\":2477,\"Min\":26,\"History\":[1743787025276,1766,1743787025277,4269,1743787025278,4278,1743787025279,4436]},\"Ingress\":[{\"P ... ce] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.608611Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508278611224374:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.612164Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:13.627710Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508278611224376:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:13.721162Z node 3 :TX_PROXY ERROR: Actor# [3:7489508278611224431:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:25.208575Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:17:25.208605Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:53.790041Z node 3 :KQP_EXECUTER WARN: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wx1z76ykkkje23tqjrpfg, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTU2YWZlOS00YzVlNWIwYS0xMmY1OWE4YS04NTY5N2FmZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, memory limit exceeded. 2025-04-04T17:17:53.790860Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTU2YWZlOS00YzVlNWIwYS0xMmY1OWE4YS04NTY5N2FmZg==, ActorId: [3:7489508282906191983:2488], ActorState: ExecuteState, TraceId: 01jr0wx1z76ykkkje23tqjrpfg, Create QueryResponse for error on request, msg: 2025-04-04T17:17:53.791035Z node 3 :KQP_SLOW_LOG WARN: TraceId: "01jr0wx1z76ykkkje23tqjrpfg", SessionId: ydb://session/3?node_id=3&id=ZTU2YWZlOS00YzVlNWIwYS0xMmY1OWE4YS04NTY5N2FmZg==, Slow query, duration: 39.191446s, status: PRECONDITION_FAILED, user: UNAUTHENTICATED, results: 0b, text: "\n SELECT ToDict(\n ListMap(\n ListFromRange(0ul, 5000000ul),\n ($x) -> { RETURN AsTuple($x, $x + 1); }\n )\n );\n ", parameters: 0b
: Warning: Type annotation, code: 1030
:2:13: Warning: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:2:20: Warning: At function: ToDict
:5:38: Warning: At function: OrderedMap
:5:53: Warning: At function: +
:5:53: Warning: Integral type implicit bitcast: Uint64 and Int32, code: 1107
: Error: Memory limit exceeded, code: 2029 Trying to start YDB, gRPC: 2050, MsgBus: 22745 2025-04-04T17:17:54.351778Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489508458295280337:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:54.351839Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015fe/r3tmp/tmpdED3zY/pdisk_1.dat 2025-04-04T17:17:54.448048Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2050, node 4 2025-04-04T17:17:54.485061Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:54.485132Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:54.486897Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:54.506003Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:54.506029Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:54.506037Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:54.506139Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22745 TClient is connected to server localhost:22745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:54.929005Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:54.944841Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:55.004335Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:55.180933Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:55.258003Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:57.217163Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508471180183999:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:57.217228Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:57.247870Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:57.276549Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:57.307191Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:57.337749Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:57.371609Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:57.406556Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:57.482555Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508471180184513:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:57.482643Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:57.482670Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508471180184518:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:57.485662Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:57.492785Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489508471180184520:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:57.587322Z node 4 :TX_PROXY ERROR: Actor# [4:7489508471180184575:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:58.535218Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:17:59.352332Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489508458295280337:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:59.352422Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:17:59.741739Z node 4 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037919, for tableId 16 don't have enough keys: RowCount: 200 DataSize: 20488600 IndexSize: 5037 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 1 2025-04-04T17:18:01.692556Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTBjZWQyNzItM2Q5M2MyMDYtZjQ1NmMzMDgtM2MxYjc4MTc=, ActorId: [4:7489508475475152128:2488], ActorState: ExecuteState, TraceId: 01jr0wyfmx27tpf15f5kdxm6mz, Create QueryResponse for error on request, msg: >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists >> ExternalIndex::Simple [GOOD] >> TTxDataShardMiniKQL::CrossShard_1_Cycle >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx >> TTxDataShardMiniKQL::WriteEraseRead >> TTxDataShardMiniKQL::WriteKeyTooLarge >> TTxDataShardMiniKQL::CrossShard_5_AllToAll >> TTxDataShardMiniKQL::ReadConstant >> TTxDataShardMiniKQL::Write >> TTxDataShardMiniKQL::ReadSpecialColumns >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex-UseSink >> KqpUniqueIndex::InsertFkPartialColumnSet >> KqpUniqueIndex::ReplaceFkPartialColumnSet >> KqpUniqueIndex::ReplaceFkAlreadyExist >> KqpUniqueIndex::UpdateOnHidenChanges-DataColumn >> KqpIndexes::CreateTableWithImplicitSyncIndexSQL >> KqpIndexes::SelectConcurentTX >> KqpIndexes::VectorIndexOrderByCosineSimilarityNotNullableLevel1 >> KqpIndexes::UpsertWithNullKeysSimple >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns+UseSink >> KqpIndexes::UpdateIndexSubsetPk >> KqpIndexes::MultipleSecondaryIndex+UseSink >> KqpUniqueIndex::UpdateOnFkSelectResultSameValue >> KqpIndexes::UpsertMultipleUniqIndexes >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex-UseSink >> KqpIndexes::UniqIndexComplexPkComplexFkOverlap >> KqpUniqueIndex::InsertNullInComplexFk >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource [GOOD] >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] >> KqpStreamLookup::ReadTableDuringSplit [GOOD] >> TPersQueueTest::TestReadRuleServiceTypePassword [GOOD] >> TPersQueueTest::TestReadPartitionByGroupId >> TTxDataShardMiniKQL::ReadConstant [GOOD] >> TTxDataShardMiniKQL::ReadAfterWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:18:06.647926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:18:06.648081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:18:06.648127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:18:06.648158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:18:06.649587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:18:06.649642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:18:06.649714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:18:06.649821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:18:06.651375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:18:06.739047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:18:06.739096Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:06.750034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:18:06.750277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:18:06.750413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:18:06.767715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:18:06.768010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:18:06.769158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:18:06.770979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:18:06.785581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:18:06.786807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:18:06.786916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:18:06.787105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:18:06.787150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:18:06.787192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:18:06.787304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:18:06.792778Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:18:06.887106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:18:06.889041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:18:06.890770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:18:06.892341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:18:06.892401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:18:06.894763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:18:06.894983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:18:06.895264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:18:06.895335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:18:06.895438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:18:06.895475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:18:06.897132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:18:06.897162Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:18:06.897185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:18:06.898383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:18:06.898421Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:18:06.898466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:18:06.898525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:18:06.902263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:18:06.903989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:18:06.904141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:18:06.905099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:18:06.905222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:18:06.905264Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:18:06.906620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:18:06.906683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:18:06.906878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:18:06.906990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:18:06.909025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:18:06.909065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:18:06.909219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:18:06.909262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:18:06.909539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:18:06.909583Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:18:06.909664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:18:06.909696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:18:06.909754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:18:06.909804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:18:06.909839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:18:06.909878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:18:06.909921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:18:06.909950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:18:06.910005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:18:06.910044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:18:06.910072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:18:06.911820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:18:06.911886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:18:06.911921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:18:06.971478Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-04-04T17:18:06.971513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:18:06.972084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:18:06.972126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:18:06.972141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:18:06.972160Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:18:06.972183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:18:06.972240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-04T17:18:06.974041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-04T17:18:06.974139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:18:06.974608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-04T17:18:06.974950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:18:06.975045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:18:06.975092Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterExternalDataSource TPropose, operationId: 102:0HandleReply TEvOperationPlan: step# 5000003 2025-04-04T17:18:06.975182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-04T17:18:06.975352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:18:06.975425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:18:06.975946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:18:06.977226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:18:06.977259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:18:06.977366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:18:06.977413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:18:06.977457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:18:06.977483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-04T17:18:06.977505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-04T17:18:06.977517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:18:06.977689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:18:06.977712Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:18:06.977775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:18:06.977812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:18:06.977838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:18:06.977859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:18:06.977882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-04T17:18:06.977905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:18:06.977930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:18:06.977947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:18:06.977987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:18:06.978015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-04T17:18:06.978035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-04T17:18:06.978061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-04T17:18:06.978508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:18:06.978556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:18:06.978596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:18:06.978637Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-04T17:18:06.978660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:18:06.979134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:18:06.979191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:18:06.979212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:18:06.979227Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:18:06.979262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:18:06.979304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-04T17:18:06.981196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:18:06.981945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-04T17:18:06.982106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:18:06.982162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T17:18:06.982582Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:18:06.982666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:18:06.982695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:332:2323] TestWaitNotification: OK eventTxId 102 2025-04-04T17:18:06.983032Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:18:06.983198Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 181us result status StatusSuccess 2025-04-04T17:18:06.983392Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTxDataShardMiniKQL::Write [GOOD] >> TTxDataShardMiniKQL::TableStats >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx >> TTxDataShardMiniKQL::WriteEraseRead [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMultipleShards >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] >> TTxDataShardMiniKQL::ReadSpecialColumns [GOOD] >> TTxDataShardMiniKQL::SelectRange >> TTxDataShardMiniKQL::WriteKeyTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteValueTooLarge ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> ExternalIndex::Simple [GOOD] Test command err: 2025-04-04T17:13:25.684099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:303:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:13:25.684454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:13:25.684780Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/cs_index/external;error=incorrect path status: LookupError; 2025-04-04T17:13:25.684852Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e5d/r3tmp/tmp9BihnN/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18555, node 1 TClient is connected to server localhost:25522 2025-04-04T17:13:26.686865Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvGetProxyServicesRequest 2025-04-04T17:13:26.687443Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvGetProxyServicesRequest 2025-04-04T17:13:26.701329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:13:26.758826Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:26.767177Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:26.767226Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:26.767283Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:26.767637Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:13:26.803440Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:13:26.805740Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:13:26.805992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:26.806198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:26.817776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:26.951720Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvProposeTransaction 2025-04-04T17:13:26.951802Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T17:13:26.953601Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:677:2570] 2025-04-04T17:13:27.046511Z node 1 :TX_PROXY DEBUG: Actor# [1:677:2570] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "olapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Name: "uid" Type: "Utf8" NotNull: true StorageId: "__MEMORY" } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" StorageId: "__MEMORY" } Columns { Name: "json_payload" Type: "JsonDocument" } KeyColumnNames: "timestamp" KeyColumnNames: "uid" } } } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T17:13:27.046644Z node 1 :TX_PROXY DEBUG: Actor# [1:677:2570] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:13:27.048017Z node 1 :TX_PROXY DEBUG: Actor# [1:677:2570] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:13:27.048144Z node 1 :TX_PROXY DEBUG: Actor# [1:677:2570] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:13:27.048529Z node 1 :TX_PROXY DEBUG: Actor# [1:677:2570] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:13:27.048823Z node 1 :TX_PROXY DEBUG: Actor# [1:677:2570] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:13:27.048932Z node 1 :TX_PROXY DEBUG: Actor# [1:677:2570] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T17:13:27.054501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:27.055142Z node 1 :TX_PROXY DEBUG: Actor# [1:677:2570] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T17:13:27.056800Z node 1 :TX_PROXY DEBUG: Actor# [1:677:2570] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T17:13:27.056906Z node 1 :TX_PROXY DEBUG: Actor# [1:677:2570] txid# 281474976715657 SEND to# [1:676:2569] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-04T17:13:27.145394Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:748:2630];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:13:27.169330Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:748:2630];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:13:27.169696Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-04-04T17:13:27.177029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:748:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:13:27.177265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:748:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:13:27.177509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:748:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:13:27.177625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:748:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:13:27.177705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:748:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:13:27.177799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:748:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:13:27.177873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:748:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:13:27.177955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:748:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:13:27.178020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:748:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:13:27.178093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:748:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:13:27.178165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:748:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:13:27.179120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:748:2630];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:13:27.195645Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-04-04T17:13:27.195867Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:13:27.195922Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:13:27.196060Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:13:27.196199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:13:27.196273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:13:27.196311Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:13:27.196399Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:13:27.196452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:13:27.196479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:13:27.196520Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:13:27.196661Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:13:27.196743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute ... oFlow $15) $3) (lambda '($16 $17 $18) (AsStruct '('"componentId" $16) '('"instant" $17) '('"modificationId" $18)))))) '('('"_logical_id" '351) '('"_id" '"4a26b38e-7860c514-2bd5d0ce-87e0c0d6")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalTx '($8 $10) '($11) '() '('('"type" '"data")))) ) 2025-04-04T17:18:05.081927Z node 1 :KQP_YQL TRACE: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.081 TRACE ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [KQP] kqp_transform.cpp:33: PhysicalPeepholeTransformer: ( (let $1 (KqpTable '"//Root/.metadata/initialization/migrations" '"72057594046644480:6" '"" '1)) (let $2 '('"componentId" '"instant" '"modificationId")) (let $3 (Uint64 '"1001")) (let $4 (KqpRowsSourceSettings $1 $2 '('('"ItemsLimit" $3) '('"Sequential" '1)) (Void) '())) (let $5 (OptionalType (DataType 'Utf8))) (let $6 (StructType '('"componentId" $5) '('"instant" (OptionalType (DataType 'Uint32))) '('"modificationId" $5))) (let $7 '('('"_logical_id" '338) '('"_id" '"9c1c2ed6-af8f485a-419b7425-89d32968") '('"_wide_channels" $6))) (let $8 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $4)) (lambda '($12) (block '( (let $13 (lambda '($14) (Member $14 '"componentId") (Member $14 '"instant") (Member $14 '"modificationId"))) (return (FromFlow (ExpandMap (Take (ToFlow $12) $3) $13))) ))) $7)) (let $9 (DqCnUnionAll (TDqOutput $8 '"0"))) (let $10 (DqPhyStage '($9) (lambda '($15) (FromFlow (NarrowMap (Take (ToFlow $15) $3) (lambda '($16 $17 $18) (AsStruct '('"componentId" $16) '('"instant" $17) '('"modificationId" $18)))))) '('('"_logical_id" '351) '('"_id" '"4a26b38e-7860c514-2bd5d0ce-87e0c0d6")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($8 $10) '($11) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType $6) '"0" '"0")) '('('"type" '"data_query")))) ) 2025-04-04T17:18:05.102388Z node 1 :KQP_YQL INFO: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.102 INFO ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:466: Register async execution for node #268 2025-04-04T17:18:05.102496Z node 1 :KQP_YQL TRACE: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.102 TRACE ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:387: {3}, callable #277 2025-04-04T17:18:05.102583Z node 1 :KQP_YQL INFO: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.102 INFO ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:577: Node #277 finished execution 2025-04-04T17:18:05.102627Z node 1 :KQP_YQL INFO: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.102 INFO ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:594: Node #277 created 0 trackable nodes: 2025-04-04T17:18:05.102687Z node 1 :KQP_YQL INFO: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.102 INFO ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:87: Finish, output #280, status: Async 2025-04-04T17:18:05.103200Z node 1 :KQP_YQL INFO: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.103 INFO ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:133: Completed async execution for node #268 2025-04-04T17:18:05.103251Z node 1 :KQP_YQL INFO: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.103 INFO ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:153: State is ExecutionRequired after apply async changes for node #268 2025-04-04T17:18:05.103296Z node 1 :KQP_YQL INFO: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.103 INFO ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:59: Begin, root #280 2025-04-04T17:18:05.103337Z node 1 :KQP_YQL INFO: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.103 INFO ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:72: Collect unused nodes for root #280, status: Ok 2025-04-04T17:18:05.103377Z node 1 :KQP_YQL TRACE: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.103 TRACE ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:387: {0}, callable #280 2025-04-04T17:18:05.103422Z node 1 :KQP_YQL TRACE: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.103 TRACE ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:387: {1}, callable #279 2025-04-04T17:18:05.103464Z node 1 :KQP_YQL TRACE: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.103 TRACE ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:387: {2}, callable #278 2025-04-04T17:18:05.103528Z node 1 :KQP_YQL TRACE: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.103 TRACE ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:387: {3}, callable #275 2025-04-04T17:18:05.103567Z node 1 :KQP_YQL TRACE: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.103 TRACE ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:387: {4}, callable #268 2025-04-04T17:18:05.103685Z node 1 :KQP_YQL INFO: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.103 INFO ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:577: Node #268 finished execution 2025-04-04T17:18:05.103728Z node 1 :KQP_YQL INFO: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.103 INFO ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:594: Node #268 created 0 trackable nodes: 2025-04-04T17:18:05.103771Z node 1 :KQP_YQL TRACE: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.103 TRACE ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:387: {3}, callable #275 2025-04-04T17:18:05.103815Z node 1 :KQP_YQL INFO: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.103 INFO ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:577: Node #275 finished execution 2025-04-04T17:18:05.103867Z node 1 :KQP_YQL TRACE: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.103 TRACE ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:387: {2}, callable #278 2025-04-04T17:18:05.104018Z node 1 :KQP_YQL INFO: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.103 INFO ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:577: Node #278 finished execution 2025-04-04T17:18:05.104062Z node 1 :KQP_YQL INFO: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.104 INFO ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:594: Node #278 created 0 trackable nodes: 2025-04-04T17:18:05.104107Z node 1 :KQP_YQL TRACE: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.104 TRACE ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:387: {1}, callable #279 2025-04-04T17:18:05.104160Z node 1 :KQP_YQL INFO: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.104 INFO ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:577: Node #279 finished execution 2025-04-04T17:18:05.104202Z node 1 :KQP_YQL INFO: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.104 INFO ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:594: Node #279 created 0 trackable nodes: 2025-04-04T17:18:05.104246Z node 1 :KQP_YQL TRACE: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.104 TRACE ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:387: {0}, callable #280 2025-04-04T17:18:05.104297Z node 1 :KQP_YQL INFO: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.104 INFO ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:577: Node #280 finished execution 2025-04-04T17:18:05.104339Z node 1 :KQP_YQL INFO: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.104 INFO ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:594: Node #280 created 0 trackable nodes: 2025-04-04T17:18:05.104379Z node 1 :KQP_YQL INFO: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.104 INFO ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:87: Finish, output #280, status: Ok 2025-04-04T17:18:05.104420Z node 1 :KQP_YQL INFO: TraceId: 01jr0wyk30ckg1nah936j3kcsc, SessionId: CompileActor 2025-04-04 17:18:05.104 INFO ydb-services-ext_index-ut(pid=547950, tid=0x00007F74D33B0CC0) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #280 2025-04-04T17:18:05.121607Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvExecuteKqpTransaction 2025-04-04T17:18:05.121678Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] TxId# 281474976716246 ProcessProposeKqpTransaction 2025-04-04T17:18:05.130397Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvExecuteKqpTransaction 2025-04-04T17:18:05.130463Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] TxId# 281474976716247 ProcessProposeKqpTransaction 2025-04-04T17:18:05.306365Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:748:2630];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-04T17:18:05.306598Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:753:2632];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-04T17:18:05.306646Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:755:2634];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-04T17:18:05.306689Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:759:2636];fline=actor.cpp:33;event=skip_flush_writing; 2025-04-04T17:18:05.318400Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:748:2630];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-04-04T17:18:05.318529Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:753:2632];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2025-04-04T17:18:05.318607Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:755:2634];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-04-04T17:18:05.318661Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:759:2636];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableDuringSplit [GOOD] Test command err: 2025-04-04T17:18:00.099112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:18:00.099591Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:18:00.099662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001480/r3tmp/tmp0WIW1h/pdisk_1.dat 2025-04-04T17:18:00.448373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:18:00.514120Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:00.558756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:00.559666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:00.572708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:00.666682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:18:01.005464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:01.005585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2625], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:01.005664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:01.017230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:18:01.173821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2628], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:18:01.229464Z node 1 :TX_PROXY ERROR: Actor# [1:825:2671] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:06.085725Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0wyf9bdxx5fhj12c6wggwt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWM2Zjg2ZDQtNTYyMzlhOTgtY2QyZTc0NmUtYTNmMmVlOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:18:06.558100Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0wyma2b7mwd5szqf4m7gw5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWVjM2JmYjctNzBjNWIyNDQtZDlhMWQ4YTktZGE4NjQ1MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR 2025-04-04T17:18:06.576109Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0wyma2b7mwd5szqf4m7gw5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWVjM2JmYjctNzBjNWIyNDQtZDlhMWQ4YTktZGE4NjQ1MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR --- split started --- --- split finished --- Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:18:06.647938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:18:06.648095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:18:06.648187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:18:06.648229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:18:06.649600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:18:06.649656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:18:06.649735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:18:06.649843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:18:06.651382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:18:06.737993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:18:06.738050Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:06.749343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:18:06.749580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:18:06.749794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:18:06.761650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:18:06.762059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:18:06.769124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:18:06.770995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:18:06.778804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:18:06.785687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:18:06.785766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:18:06.785973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:18:06.786021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:18:06.786148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:18:06.786302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:18:06.792488Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:18:06.887113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:18:06.888996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:18:06.890757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:18:06.892361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:18:06.892420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:18:06.895177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:18:06.895318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:18:06.895493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:18:06.895544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:18:06.895588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:18:06.895636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:18:06.897115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:18:06.897154Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:18:06.897178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:18:06.898349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:18:06.898380Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:18:06.898417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:18:06.898487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:18:06.907359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:18:06.909017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:18:06.909169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:18:06.910058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:18:06.910174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:18:06.910215Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:18:06.910517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:18:06.910581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:18:06.910751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:18:06.910833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:18:06.912661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:18:06.912699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:18:06.912805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:18:06.912830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:18:06.913014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:18:06.913044Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:18:06.913107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:18:06.913141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:18:06.913177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:18:06.913211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:18:06.913260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:18:06.913293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:18:06.913323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:18:06.913351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:18:06.913401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:18:06.913438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:18:06.913468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:18:06.915142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:18:06.915229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:18:06.915260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-04T17:18:07.653897Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:18:07.653939Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:18:07.653977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:18:07.654065Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:18:07.654107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:18:07.654147Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2025-04-04T17:18:07.654196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-04-04T17:18:07.654228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-04T17:18:07.654270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-04T17:18:07.655586Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:18:07.655670Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:18:07.655706Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:18:07.655749Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-04T17:18:07.655787Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T17:18:07.657988Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:18:07.658064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:18:07.658095Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:18:07.658124Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-04T17:18:07.658152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:18:07.658987Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:18:07.659064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:18:07.659091Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:18:07.659118Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-04T17:18:07.659145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:18:07.659207Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-04T17:18:07.661192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:18:07.662111Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:18:07.664034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T17:18:07.664243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:18:07.664291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-04T17:18:07.664712Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:18:07.664803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:18:07.664844Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:335:2326] TestWaitNotification: OK eventTxId 101 2025-04-04T17:18:07.665347Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:18:07.665560Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 237us result status StatusSuccess 2025-04-04T17:18:07.665877Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-04-04T17:18:07.668854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropExternalDataSource Drop { Name: "ExternalDataSource" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:18:07.669034Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TDropExternalDataSource Propose: opId# 103:0, path# /MyRoot/ExternalDataSource 2025-04-04T17:18:07.669123Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, at schemeshard: 72057594046678944 2025-04-04T17:18:07.671298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:18:07.671466Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, operation: DROP EXTERNAL DATA SOURCE, path: /MyRoot/ExternalDataSource TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-04T17:18:07.671766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-04T17:18:07.671808Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-04T17:18:07.672219Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T17:18:07.672318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:18:07.672359Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:343:2334] TestWaitNotification: OK eventTxId 103 2025-04-04T17:18:07.672866Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:18:07.673049Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 213us result status StatusSuccess 2025-04-04T17:18:07.673339Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTxDataShardMiniKQL::ReadAfterWrite [GOOD] >> TTxDataShardMiniKQL::ReadNonExisting >> KqpLimits::QSReplySize-useSink [GOOD] >> TTxDataShardMiniKQL::SelectRange [GOOD] >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey >> KqpLimits::QueryExecTimeoutCancel [GOOD] >> KqpLimits::ReplySizeExceeded >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageMultiShard >> KqpPg::JoinWithQueryService+StreamLookup |91.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxDataShardMiniKQL::TableStats [GOOD] >> TTxDataShardMiniKQL::TableStatsHistograms >> TTxDataShardMiniKQL::WriteAndReadMultipleShards [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMany >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] >> KqpIndexes::ForbidViewModification >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] Test command err: 2025-04-04T17:17:59.941197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:17:59.941713Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:17:59.941784Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00146e/r3tmp/tmpbtc9sF/pdisk_1.dat 2025-04-04T17:18:00.448360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:18:00.510350Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:00.558756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:00.559710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:00.572691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:00.666667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:18:01.030046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:798:2664], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:01.030200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:788:2659], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:01.030591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:01.035566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:18:01.195317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:802:2667], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:18:01.263782Z node 1 :TX_PROXY ERROR: Actor# [1:880:2714] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:07.394323Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0wyfa3egmerd7v2d1dk99d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTMxOTU5NGEtZTA1Njg4OWItNDU4YjJhNTctMTE2YzA2Mjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:18:07.448798Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0wyfa3egmerd7v2d1dk99d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTMxOTU5NGEtZTA1Njg4OWItNDU4YjJhNTctMTE2YzA2Mjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:18:07.483743Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0wyfa3egmerd7v2d1dk99d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTMxOTU5NGEtZTA1Njg4OWItNDU4YjJhNTctMTE2YzA2Mjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:18:07.767152Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0wynq78sb53zftmbvgpyjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjA1NTYzMjYtOGIyNGFjZGYtMTAzNmY3NzAtMjhhZGY1NTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR |91.5%| [TA] $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxDataShardMiniKQL::WriteValueTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteLargeExternalBlob |91.5%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexes::VectorIndexOrderByCosineDistanceNotNullableLevel1 >> KqpIndexes::InnerJoinWithNonIndexWherePredicate |91.5%| [TA] {RESULT} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] Test command err: 2025-04-04T17:18:06.991659Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:18:07.090762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:18:07.090809Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.098021Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:18:07.098498Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:18:07.098836Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:18:07.112023Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:18:07.149047Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:18:07.149154Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:18:07.152356Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:18:07.152473Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:18:07.152527Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:18:07.154509Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:18:07.154823Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:18:07.154921Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:18:07.226646Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:18:07.258495Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:18:07.258636Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:18:07.258704Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:18:07.258732Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:18:07.258765Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:18:07.258794Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:07.258897Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.258945Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.259177Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:18:07.259278Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:18:07.259396Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:18:07.259433Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:18:07.259483Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:18:07.259520Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:18:07.259556Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:18:07.259601Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:18:07.259649Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:07.259733Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.259767Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.259817Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:18:07.266670Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:18:07.266739Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:18:07.266816Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:18:07.266996Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:18:07.267054Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:18:07.267106Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:18:07.267156Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:07.267193Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:18:07.267230Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:18:07.267262Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:18:07.267560Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:18:07.267603Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:18:07.267639Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:18:07.267671Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:18:07.267708Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:18:07.267743Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:18:07.267775Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:18:07.267805Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:18:07.267842Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:18:07.282804Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:18:07.282912Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:18:07.282956Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:18:07.282998Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:18:07.294634Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:18:07.295192Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.295250Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.295306Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:18:07.295447Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:18:07.295486Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:18:07.295606Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:18:07.295651Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:18:07.295687Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:18:07.295727Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:18:07.299108Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:18:07.299180Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:07.299384Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.299421Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.299468Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:18:07.299530Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:18:07.299564Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:18:07.299600Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:18:07.299631Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:18:07.299668Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:18:07.299721Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:18:07.299784Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:18:07.299824Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:18:07.299981Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:18:07.300018Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:18:07.300050Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:18:07.300077Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:18:07.300105Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:18:07.300163Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:07.300195Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:18:07.300229Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:18:07.300266Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:18:07.300317Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:18:07.300353Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:18:07.300390Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:18:07.300430Z node 1 :TX_DATA ... tateInit, received event# 268828673, Sender [3:231:2225], Recipient [3:233:2226]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:18:09.243980Z node 3 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [3:231:2225], Recipient [3:233:2226]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:18:09.249824Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [3:233:2226] 2025-04-04T17:18:09.250034Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:18:09.262992Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute Persist Sys_SubDomainInfo 2025-04-04T17:18:09.312634Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:18:09.312770Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:18:09.314657Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:18:09.314741Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:18:09.314793Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:18:09.315152Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:18:09.315908Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:18:09.315989Z node 3 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [3:276:2226] in generation 3 2025-04-04T17:18:09.355663Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:18:09.355755Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 9437184 2025-04-04T17:18:09.355838Z node 3 :TX_DATASHARD INFO: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2025-04-04T17:18:09.355941Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 9437184 mediators count is 0 coordinators count is 1 buckets per mediator 2 2025-04-04T17:18:09.356137Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [3:281:2265] 2025-04-04T17:18:09.356178Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:18:09.356220Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 9437184 2025-04-04T17:18:09.356259Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:09.356528Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-04-04T17:18:09.356620Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-04-04T17:18:09.356777Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [3:233:2226], Recipient [3:233:2226]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:09.356832Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:09.357043Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:18:09.357131Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:18:09.357284Z node 3 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 233 RawX2: 12884904114 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2025-04-04T17:18:09.357359Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [3:24:2071], Recipient [3:233:2226]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-04-04T17:18:09.357389Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-04T17:18:09.357429Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-04-04T17:18:09.357464Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:09.357574Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [3:24:2071], Recipient [3:233:2226]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2025-04-04T17:18:09.357606Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-04T17:18:09.357644Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2025-04-04T17:18:09.357715Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:18:09.357758Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:18:09.357794Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:18:09.357827Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:18:09.357862Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:18:09.357894Z node 3 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:18:09.357935Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:09.358029Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [3:279:2263], Recipient [3:233:2226]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [3:283:2267] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-04T17:18:09.358060Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-04T17:18:09.358138Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [3:122:2148], Recipient [3:233:2226]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-04-04T17:18:09.358169Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-04T17:18:09.358205Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-04-04T17:18:09.358272Z node 3 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-04-04T17:18:09.370547Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [3:279:2263], Recipient [3:233:2226]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [3:279:2263] ServerId: [3:283:2267] } 2025-04-04T17:18:09.370618Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-04T17:18:09.406833Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:98:2133], Recipient [3:233:2226]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 98 RawX2: 12884904021 } 2025-04-04T17:18:09.406897Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-04T17:18:09.407182Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:289:2271], Recipient [3:233:2226]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:09.407213Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:09.407261Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:287:2270], serverId# [3:289:2271], sessionId# [0:0:0] 2025-04-04T17:18:09.407454Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:98:2133], Recipient [3:233:2226]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 98 RawX2: 12884904021 } TxBody: "\032\365\001\037\004\0021\nvalue\005\205\n\205\002\207\205\002\207\203\001H\006\002\205\004\205\002?\006\002\205\000\034MyReads MyWrites\205\004\205\002?\006\002\206\202\024Reply\024Write?\014\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\010)\211\n?\006\203\005\004\200\205\002\203\004\006\213\002\203\004\203\004$SelectRow\000\003?\036 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000?\004\005?\"\003? p\001\013?&\003?$T\001\003?(\000\037\002\000\005?\016\005?\n?8\000\005?\014\003\005?\024\005?\020?8\000\006\000?\022\003?>\005?\032\006\000?\030\001\037/ \0018\001" TxId: 2 ExecLevel: 0 Flags: 0 2025-04-04T17:18:09.407487Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:18:09.407562Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:18:09.411174Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2025-04-04T17:18:09.411321Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-04T17:18:09.411367Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2025-04-04T17:18:09.411410Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:18:09.411447Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:18:09.411488Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T17:18:09.411568Z node 3 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2025-04-04T17:18:09.411608Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-04T17:18:09.411632Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-04T17:18:09.411657Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-04-04T17:18:09.411680Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-04T17:18:09.412104Z node 3 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-04-04T17:18:09.412167Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:18:09.412221Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-04T17:18:09.412246Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-04-04T17:18:09.412271Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2025-04-04T17:18:09.412296Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-04-04T17:18:09.412334Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-04T17:18:09.412404Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayComplete 2025-04-04T17:18:09.412436Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-04-04T17:18:09.412470Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-04-04T17:18:09.412506Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-04-04T17:18:09.412547Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-04T17:18:09.412567Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-04-04T17:18:09.412594Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2025-04-04T17:18:09.412682Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:18:09.412722Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-04-04T17:18:09.412765Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QSReplySize-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 4412, MsgBus: 30132 2025-04-04T17:17:18.941850Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508301491152907:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:18.942993Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015e5/r3tmp/tmprEDFw4/pdisk_1.dat 2025-04-04T17:17:19.231844Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4412, node 1 2025-04-04T17:17:19.314817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:19.314942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:19.318293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:19.335346Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:19.335367Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:19.335378Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:19.335513Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30132 TClient is connected to server localhost:30132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:19.844092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:19.855594Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:19.867178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:20.047311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:20.198340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:20.289161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:21.968316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508314376056577:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:21.968459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:22.260919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:22.289107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:22.313988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:22.384037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:22.418066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:22.450675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:22.546526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508318671024390:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:22.546628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:22.547783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508318671024395:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:22.551436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:22.561781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508318671024398:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:22.656070Z node 1 :TX_PROXY ERROR: Actor# [1:7489508318671024454:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:23.457151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:23.941666Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508301491152907:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:23.950854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:17:24.612834Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508327260959893:2531], SessionActorId: [1:7489508327260959852:2531], statusCode=PRECONDITION_FAILED. Issue=
: Error: Memory limit exception, current limit is 1024 bytes., code: 2029 . sessionActorId=[1:7489508327260959852:2531]. isRollback=0 2025-04-04T17:17:24.639758Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWZlYzBhMjUtODdiNTZkY2YtZThhYzE1ZWQtN2E2YWJm, ActorId: [1:7489508327260959852:2531], ActorState: ExecuteState, TraceId: 01jr0wxbg835wm5h0q157abxz3, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7489508327260959894:2531] from: [1:7489508327260959893:2531] 2025-04-04T17:17:24.639853Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489508327260959894:2531] TxId: 281474976710672. Ctx: { TraceId: 01jr0wxbg835wm5h0q157abxz3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWZlYzBhMjUtODdiNTZkY2YtZThhYzE1ZWQtN2E2YWJm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Memory limit exception, current limit is 1024 bytes., code: 2029 } 2025-04-04T17:17:24.639990Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508327260959902:2544], TxId: 281474976710672, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YWZlYzBhMjUtODdiNTZkY2YtZThhYzE1ZWQtN2E2YWJm. TraceId : 01jr0wxbg835wm5h0q157abxz3. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7489508327260959894:2531], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:17:24.640618Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWZlYzBhMjUtODdiNTZkY2YtZThhYzE1ZWQtN2E2YWJm, ActorId: [1:7489508327260959852:2531], ActorState: ExecuteState, TraceId: 01jr0wxbg835wm5h0q157abxz3, Create QueryResponse for error on request, msg:
: Error: Memory limit exception, current limit is 1024 bytes., code: 2029 Trying to start YDB, gRPC: 4734, MsgBus: 24318 2025-04-04T17:17:25.406600Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508329586635346:2126];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:25.432002Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015e5/r3tmp/tmpijhnPe/pdisk_1.dat 2025-04-04T17:17:25.543124Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:25.558801Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:25.558887Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:25.562926Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4734, node 2 2025-04-04T17:17:25.611634Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:25.611670Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:25.611678Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:25.611793Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24318 TCl ... 76715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:34.622755Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:34.653545Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:34.682034Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:34.724966Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508371993055392:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:34.725043Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:34.725055Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508371993055397:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:34.728219Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:34.736487Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508371993055399:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:34.809390Z node 3 :TX_PROXY ERROR: Actor# [3:7489508371993055452:3435] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:36.495839Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508359108151219:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:36.495896Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:17:46.577143Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:17:46.577176Z node 3 :IMPORT WARN: Table profiles were not loaded
: Error: Query did not complete within specified timeout 500ms, session id ydb://session/3?node_id=3&id=NmQ2YzQ5OWMtZDU1Y2JmYTUtMjMyM2NhZmQtYjc1ZmQxMjc= Trying to start YDB, gRPC: 13325, MsgBus: 26147 2025-04-04T17:17:55.501152Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489508460656988844:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:55.501239Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015e5/r3tmp/tmpf6gyXs/pdisk_1.dat 2025-04-04T17:17:55.631543Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:55.667832Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:55.667921Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:55.669986Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13325, node 4 2025-04-04T17:17:55.720083Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:55.720110Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:55.720118Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:55.720268Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26147 TClient is connected to server localhost:26147 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:56.201217Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:56.220164Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:56.285766Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:56.485901Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:56.573903Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:58.781804Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508473541892514:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:58.781898Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:58.821715Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:58.854590Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:58.889388Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:58.916587Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:58.946593Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:59.020835Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:59.070842Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508477836860325:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:59.070944Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:59.071179Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508477836860330:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:59.075672Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:59.086366Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489508477836860332:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:17:59.165063Z node 4 :TX_PROXY ERROR: Actor# [4:7489508477836860385:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:00.133502Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:00.501501Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489508460656988844:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:00.501615Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:07.763597Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MmRlODVjNjctMmQ3ZTI2ZmItNzRkMzhhZjQtMjFlYmExZGE=, ActorId: [4:7489508507901632694:2610], ActorState: ExecuteState, TraceId: 01jr0wymjg7s91wv18egbe6dwf, Create QueryResponse for error on request, msg:
: Error: Intermediate data materialization exceeded size limit (88240924 > 50331648). This usually happens when trying to write large amounts of data or to perform lookup by big collection of keys in single query. Consider using smaller batches of data., code: 2013 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] Test command err: 2025-04-04T17:18:06.991675Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:18:07.095058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:18:07.095118Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.101191Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:18:07.101628Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:18:07.101885Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:18:07.112816Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:18:07.159949Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:18:07.160049Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:18:07.161725Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:18:07.161792Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:18:07.161840Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:18:07.162279Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:18:07.162561Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:18:07.162638Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:18:07.227891Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:18:07.269396Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:18:07.269565Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:18:07.269664Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:18:07.269708Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:18:07.269745Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:18:07.269781Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:07.269925Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.269974Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.270208Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:18:07.270355Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:18:07.270469Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:18:07.270508Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:18:07.270602Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:18:07.270657Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:18:07.270691Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:18:07.270736Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:18:07.270777Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:07.270866Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.270897Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.270944Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:18:07.273504Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nx\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\016\n\010__tablet\030\004 9\032\023\n\r__updateEpoch\030\004 :\032\020\n\n__updateNo\030\004 ;(\"J\014/Root/table1\222\002\013\th\020\000\000\000\000\000\000\020\r" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:18:07.273569Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:18:07.273635Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:18:07.273762Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:18:07.273813Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:18:07.273867Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:18:07.273908Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:07.273942Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:18:07.273978Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:18:07.274013Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:18:07.274342Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:18:07.274390Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:18:07.274426Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:18:07.274458Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:18:07.274497Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:18:07.274524Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:18:07.274558Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:18:07.274602Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:18:07.274628Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:18:07.288644Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:18:07.288733Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:18:07.288768Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:18:07.288802Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:18:07.295347Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:18:07.295859Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.295906Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.295949Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:18:07.296072Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:18:07.296105Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:18:07.296210Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:18:07.296250Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:18:07.296309Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:18:07.296346Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:18:07.299584Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:18:07.299642Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:07.299876Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.299910Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.299957Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:18:07.299999Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:18:07.300030Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:18:07.300065Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:18:07.300098Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:18:07.300148Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:18:07.300196Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:18:07.300236Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:18:07.300267Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:18:07.300408Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:18:07.300457Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:18:07.300481Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:18:07.300501Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:18:07.300522Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:18:07.300570Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:07.300597Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:18:07.300630Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:18:07.300680Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:18:07.300724Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:18:07.300756Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:18:07.300810Z node 1 :TX_DATASHARD TR ... 503016Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:18:09.503050Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:6] at 9437184 on unit FinishPropose 2025-04-04T17:18:09.503087Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:09.506468Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:98:2133], Recipient [3:234:2227]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 98 RawX2: 12884904021 } 2025-04-04T17:18:09.506519Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-04T17:18:09.506787Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:302:2284], Recipient [3:234:2227]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:09.506820Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:09.506861Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:301:2283], serverId# [3:302:2284], sessionId# [0:0:0] 2025-04-04T17:18:09.507050Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:98:2133], Recipient [3:234:2227]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 98 RawX2: 12884904021 } TxBody: "\032\351\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4e\005\'?8\003\013?>\003?\000\003?@\000\003?B\000\006\004?F\003\203\014\000\003\203\014\000\003\003?H\000\377\007\002\000\005?\032\005?\026?r\000\005?\030\003\005? \005?\034?r\000\006\000?\036\003?x\005?&\006\0 2025-04-04T17:18:09.519367Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:18:09.519450Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:18:09.520132Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit CheckDataTx 2025-04-04T17:18:09.520217Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-04-04T17:18:09.520252Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit CheckDataTx 2025-04-04T17:18:09.520285Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:18:09.520317Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:18:09.520357Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-04T17:18:09.520407Z node 3 :TX_DATASHARD TRACE: Activated operation [0:8] at 9437184 2025-04-04T17:18:09.520441Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-04-04T17:18:09.520463Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-04T17:18:09.520485Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit ExecuteDataTx 2025-04-04T17:18:09.520507Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit ExecuteDataTx 2025-04-04T17:18:09.521057Z node 3 :TX_DATASHARD TRACE: Executed operation [0:8] at tablet 9437184 with status COMPLETE 2025-04-04T17:18:09.521124Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:8] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 2, SelectRangeBytes: 31, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:18:09.521182Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-04-04T17:18:09.521207Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit ExecuteDataTx 2025-04-04T17:18:09.521259Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit FinishPropose 2025-04-04T17:18:09.521286Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit FinishPropose 2025-04-04T17:18:09.521329Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 8 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-04T17:18:09.521388Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is DelayComplete 2025-04-04T17:18:09.521419Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit FinishPropose 2025-04-04T17:18:09.521451Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit CompletedOperations 2025-04-04T17:18:09.521480Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit CompletedOperations 2025-04-04T17:18:09.521523Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-04-04T17:18:09.521543Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit CompletedOperations 2025-04-04T17:18:09.521567Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:8] at 9437184 has finished 2025-04-04T17:18:09.521616Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:18:09.521648Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:8] at 9437184 on unit FinishPropose 2025-04-04T17:18:09.521705Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> KqpIndexes::UpsertWithoutExtraNullDelete+UseSink >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] >> KqpUniqueIndex::InsertNullInPk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] Test command err: 2025-04-04T17:18:06.991692Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:18:07.105476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:18:07.105537Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.113517Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:18:07.114014Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:18:07.114321Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:18:07.125494Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:18:07.166870Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:18:07.166957Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:18:07.168587Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:18:07.168653Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:18:07.168721Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:18:07.169058Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:18:07.169332Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:18:07.169406Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:18:07.234545Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:18:07.266203Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:18:07.266417Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:18:07.266514Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:18:07.266549Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:18:07.266607Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:18:07.266676Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:07.266841Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.266902Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.267173Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:18:07.267278Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:18:07.267388Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:18:07.267428Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:18:07.267483Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:18:07.267523Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:18:07.267557Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:18:07.267606Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:18:07.267650Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:07.267743Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.267775Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.267824Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:18:07.270352Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:18:07.270413Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:18:07.270488Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:18:07.270655Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:18:07.270707Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:18:07.270762Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:18:07.270810Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:07.270845Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:18:07.270877Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:18:07.270909Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:18:07.271217Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:18:07.271255Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:18:07.271291Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:18:07.271324Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:18:07.271371Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:18:07.271407Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:18:07.271454Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:18:07.271489Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:18:07.271513Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:18:07.288384Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:18:07.288489Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:18:07.288532Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:18:07.288582Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:18:07.294414Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:18:07.295010Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.295065Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.295110Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:18:07.295265Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-04-04T17:18:07.295315Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:18:07.295451Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-04-04T17:18:07.295496Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-04T17:18:07.295533Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:18:07.295582Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:18:07.299107Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:18:07.299178Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:07.299380Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.299442Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.299494Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:18:07.299531Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:18:07.299564Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:18:07.299602Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-04-04T17:18:07.299637Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-04-04T17:18:07.299680Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-04T17:18:07.299717Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:18:07.299752Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:18:07.299807Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:18:07.299921Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Restart 2025-04-04T17:18:07.299952Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:18:07.299982Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:18:07.300012Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:18:07.300063Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:18:07.300326Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:18:07.300368Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:18:07.300493Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-04-04T17:18:07.300527Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-04T17:18:07.300570Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:18:07.300621Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:18:07.300673Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:18:07.300729Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:07.300753Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEc ... 0534} resubmit=1) 2025-04-04T17:18:10.545454Z node 3 :RESOURCE_BROKER DEBUG: Assigning waiting task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:368:2313]) to queue queue_transaction 2025-04-04T17:18:10.545492Z node 3 :RESOURCE_BROKER DEBUG: Allocate resources {0, 96990534} for task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:368:2313]) from queue queue_transaction 2025-04-04T17:18:10.545519Z node 3 :RESOURCE_BROKER DEBUG: Assigning in-fly task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:368:2313]) to queue queue_transaction 2025-04-04T17:18:10.545549Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 1.881864 to 18.818640 (insert task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:368:2313])) 2025-04-04T17:18:10.545606Z node 3 :RESOURCE_BROKER DEBUG: Update task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:256:2227]) (priority=5 type=transaction resources={0, 96990534} resubmit=1) 2025-04-04T17:18:10.545633Z node 3 :RESOURCE_BROKER DEBUG: Assigning waiting task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:256:2227]) to queue queue_transaction 2025-04-04T17:18:10.545661Z node 3 :RESOURCE_BROKER DEBUG: Allocate resources {0, 96990534} for task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:256:2227]) from queue queue_transaction 2025-04-04T17:18:10.545683Z node 3 :RESOURCE_BROKER DEBUG: Assigning in-fly task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:256:2227]) to queue queue_transaction 2025-04-04T17:18:10.545709Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 16.936776 to 33.873553 (insert task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:256:2227])) 2025-04-04T17:18:10.545757Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 96990534b}, Memory{0 dyn 96990534} 2025-04-04T17:18:10.545797Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 96990534b}, Memory{0 dyn 96990534} 2025-04-04T17:18:10.545846Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-04-04T17:18:10.545868Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit ExecuteDataTx 2025-04-04T17:18:10.546523Z node 3 :TX_DATASHARD DEBUG: tx 5 at 9437185 restored its data 2025-04-04T17:18:10.810907Z node 3 :TX_DATASHARD TRACE: Executed operation [6:5] at tablet 9437185 with status COMPLETE 2025-04-04T17:18:10.810968Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [6:5] at 9437185: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:18:10.811014Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is ExecutedNoMoreRestarts 2025-04-04T17:18:10.811037Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit ExecuteDataTx 2025-04-04T17:18:10.811057Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437185 to execution unit CompleteOperation 2025-04-04T17:18:10.811082Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit CompleteOperation 2025-04-04T17:18:10.811246Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is DelayComplete 2025-04-04T17:18:10.811263Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit CompleteOperation 2025-04-04T17:18:10.811279Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437185 to execution unit CompletedOperations 2025-04-04T17:18:10.811295Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit CompletedOperations 2025-04-04T17:18:10.811312Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is Executed 2025-04-04T17:18:10.811325Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit CompletedOperations 2025-04-04T17:18:10.811341Z node 3 :TX_DATASHARD TRACE: Execution plan for [6:5] at 9437185 has finished 2025-04-04T17:18:10.811358Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:18:10.811373Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-04T17:18:10.811390Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-04T17:18:10.811405Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-04T17:18:10.811475Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2025-04-04T17:18:10.811524Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 96990534b}, Memory{0 dyn 0} 2025-04-04T17:18:10.811633Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:18:10.811652Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit ExecuteDataTx 2025-04-04T17:18:10.812263Z node 3 :TX_DATASHARD DEBUG: tx 5 at 9437184 restored its data 2025-04-04T17:18:11.061312Z node 3 :TX_DATASHARD TRACE: Executed operation [6:5] at tablet 9437184 with status COMPLETE 2025-04-04T17:18:11.061367Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [6:5] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:18:11.061411Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:11.061434Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit ExecuteDataTx 2025-04-04T17:18:11.061454Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437184 to execution unit CompleteOperation 2025-04-04T17:18:11.061475Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit CompleteOperation 2025-04-04T17:18:11.061638Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is DelayComplete 2025-04-04T17:18:11.061655Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit CompleteOperation 2025-04-04T17:18:11.061674Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437184 to execution unit CompletedOperations 2025-04-04T17:18:11.061691Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit CompletedOperations 2025-04-04T17:18:11.061711Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is Executed 2025-04-04T17:18:11.061725Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit CompletedOperations 2025-04-04T17:18:11.061744Z node 3 :TX_DATASHARD TRACE: Execution plan for [6:5] at 9437184 has finished 2025-04-04T17:18:11.061762Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:18:11.061780Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:18:11.061797Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:18:11.061819Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:18:11.061899Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2025-04-04T17:18:11.061934Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 96990534b}, Memory{0 dyn 0} 2025-04-04T17:18:11.062048Z node 3 :RESOURCE_BROKER DEBUG: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:368:2313]) (release resources {0, 96990534}) 2025-04-04T17:18:11.062084Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 33.873553 to 16.936776 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:368:2313])) 2025-04-04T17:18:11.062166Z node 3 :RESOURCE_BROKER DEBUG: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:256:2227]) (release resources {0, 96990534}) 2025-04-04T17:18:11.062188Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 16.936776 to 0.000000 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:256:2227])) 2025-04-04T17:18:11.075042Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:10} commited cookie 1 for step 9 2025-04-04T17:18:11.075105Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-04T17:18:11.075142Z node 3 :TX_DATASHARD TRACE: Complete execution for [6:5] at 9437185 on unit CompleteOperation 2025-04-04T17:18:11.075193Z node 3 :TX_DATASHARD DEBUG: Complete [6 : 5] from 9437185 at tablet 9437185 send result to client [3:98:2133], exec latency: 1 ms, propose latency: 3 ms 2025-04-04T17:18:11.075274Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2025-04-04T17:18:11.075325Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-04-04T17:18:11.075559Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:10} commited cookie 1 for step 9 2025-04-04T17:18:11.075596Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:11.075625Z node 3 :TX_DATASHARD TRACE: Complete execution for [6:5] at 9437184 on unit CompleteOperation 2025-04-04T17:18:11.075663Z node 3 :TX_DATASHARD DEBUG: Complete [6 : 5] from 9437184 at tablet 9437184 send result to client [3:98:2133], exec latency: 1 ms, propose latency: 3 ms 2025-04-04T17:18:11.075709Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-04T17:18:11.075736Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:11.075940Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [3:346:2313], Recipient [3:457:2399]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2025-04-04T17:18:11.075979Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:11.076033Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437185 consumer 9437185 txId 5 2025-04-04T17:18:11.076117Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [3:234:2227], Recipient [3:457:2399]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-04-04T17:18:11.076141Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:11.076163Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 5 >> KqpIndexes::ExplainCollectFullDiagnostics >> KqpUniqueIndex::UpdateFkSameValue >> KqpIndexes::WriteWithParamsFieldOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] Test command err: Trying to start YDB, gRPC: 3396, MsgBus: 13433 2025-04-04T17:17:10.811642Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508268335453001:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:10.813187Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015f2/r3tmp/tmpNg1a4G/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3396, node 1 2025-04-04T17:17:11.221512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:11.221636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:11.221783Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:11.225528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:11.306754Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:11.306783Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:11.306788Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:11.306879Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13433 TClient is connected to server localhost:13433 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:11.943913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:11.960361Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:11.970480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:17:12.126986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:17:12.270134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:12.362555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:13.924228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508281220356471:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:13.924575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:14.138391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.168455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.234551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.268854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.332498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.364254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:14.405561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508285515324281:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:14.405631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:14.405643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508285515324286:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:14.408939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:14.417598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508285515324288:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:14.515887Z node 1 :TX_PROXY ERROR: Actor# [1:7489508285515324342:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:15.401286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:15.808099Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508268335453001:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:15.808140Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:17:18.940242Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037919, for tableId 16 don't have enough keys: RowCount: 70000 DataSize: 11562840 IndexSize: 55894 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 1 2025-04-04T17:17:26.218445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:17:26.218478Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:30.240916Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037919, for tableId 16 don't have enough keys: RowCount: 280000 DataSize: 46171080 IndexSize: 223100 ByKeyFilterSize: 0 RowCountHistogram: 2 DataSizeHistogram: 4 2025-04-04T17:17:40.463069Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037919, for tableId 16 don't have enough keys: RowCount: 445000 DataSize: 74205920 IndexSize: 358696 ByKeyFilterSize: 0 RowCountHistogram: 4 DataSizeHistogram: 7 2025-04-04T17:17:42.678677Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787062668, txId: 281474976710672] shutting down 2025-04-04T17:17:42.733895Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-04T17:17:43.911698Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787063904, txId: 281474976710674] shutting down 2025-04-04T17:17:45.090538Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787065082, txId: 281474976710676] shutting down 2025-04-04T17:17:46.274144Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787066266, txId: 281474976710678] shutting down 2025-04-04T17:17:47.496326Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787067484, txId: 281474976710680] shutting down 2025-04-04T17:17:48.672565Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787068665, txId: 281474976710682] shutting down 2025-04-04T17:17:49.844080Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787069836, txId: 281474976710684] shutting down 2025-04-04T17:17:51.017294Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787071011, txId: 281474976710686] shutting down 2025-04-04T17:17:52.183795Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787072176, txId: 281474976710688] shutting down 2025-04-04T17:17:53.322803Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787073316, txId: 281474976710690] shutting down 2025-04-04T17:17:54.494220Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787074487, txId: 281474976710692] shutting down assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x18B8D15F 1. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x18273E98 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x18285F67 3. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x18285F67 4. /-S/contrib/libs/cxxsupp/libcxx/include/__ ... Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Int32
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Int32
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Int32 2025-04-04T17:18:03.440008Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzZjNzdmOTMtY2YwNGU3YTctMmIzMmU1ZDAtY2U0NWU5NmE=, ActorId: [2:7489508494850699397:2488], ActorState: ExecuteState, TraceId: 01jr0wyhn47cfbpf5epdf9f6wh, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:5:13: Error: At function: SqlProjectItem
:6:22: Error: At function: ==
:6:22: Error: Uncompatible types in compare: Optional '==' Int32
:5:13: Error: At function: SqlProjectItem
:7:22: Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Int32
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Int32
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Int32 2025-04-04T17:18:03.456797Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489508494850699418:2500], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:6:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:6:13: Error: At function: SqlProjectItem
:7:22: Error: At function: ==
:7:22: Error: Uncompatible types in compare: Optional '==' Optional
:6:13: Error: At function: SqlProjectItem
:8:22: Error: At function: !=
:8:22: Error: Uncompatible types in compare: Optional '!=' Optional
:6:13: Error: At function: SqlProjectItem
:9:22: Error: At function: >
:9:22: Error: Uncompatible types in compare: Optional '>' Optional
:6:13: Error: At function: SqlProjectItem
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional 2025-04-04T17:18:03.456987Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzZjNzdmOTMtY2YwNGU3YTctMmIzMmU1ZDAtY2U0NWU5NmE=, ActorId: [2:7489508494850699397:2488], ActorState: ExecuteState, TraceId: 01jr0wyhnn4tnxz94q2f5vgs3t, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:6:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:6:13: Error: At function: SqlProjectItem
:7:22: Error: At function: ==
:7:22: Error: Uncompatible types in compare: Optional '==' Optional
:6:13: Error: At function: SqlProjectItem
:8:22: Error: At function: !=
:8:22: Error: Uncompatible types in compare: Optional '!=' Optional
:6:13: Error: At function: SqlProjectItem
:9:22: Error: At function: >
:9:22: Error: Uncompatible types in compare: Optional '>' Optional
:6:13: Error: At function: SqlProjectItem
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional Trying to start YDB, gRPC: 8971, MsgBus: 8710 2025-04-04T17:18:04.219103Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508501108788680:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:04.219172Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015f2/r3tmp/tmpyX7upb/pdisk_1.dat 2025-04-04T17:18:04.299106Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8971, node 3 2025-04-04T17:18:04.351028Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:04.351051Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:04.351058Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:04.351172Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:18:04.354815Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:04.354877Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:04.356429Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8710 TClient is connected to server localhost:8710 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:04.718559Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:04.728945Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:04.767972Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:04.882534Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:04.955258Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:06.957344Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508509698725036:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:06.957616Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:06.976880Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:07.004671Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:07.032764Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:07.056495Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:07.082916Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:07.114342Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:07.191364Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508513993692846:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:07.191431Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:07.191483Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508513993692851:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:07.194987Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:07.202115Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508513993692853:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:07.279123Z node 3 :TX_PROXY ERROR: Actor# [3:7489508513993692909:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] >> KqpIndexes::VectorIndexOrderByCosineDistanceNullableLevel1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] Test command err: 2025-04-04T17:18:06.991646Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:18:07.089377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:18:07.089462Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.098624Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:18:07.099272Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:18:07.099632Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:18:07.112017Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:18:07.150363Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:18:07.150440Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:18:07.152268Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:18:07.152349Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:18:07.152390Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:18:07.154432Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:18:07.154739Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:18:07.154815Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:18:07.238970Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:18:07.265936Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:18:07.266093Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:18:07.266180Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:18:07.266212Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:18:07.266282Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:18:07.266314Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:07.266465Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.266529Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.266769Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:18:07.266863Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:18:07.266972Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:18:07.267006Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:18:07.267053Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:18:07.267082Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:18:07.267112Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:18:07.267157Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:18:07.267196Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:07.267277Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.267309Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.267353Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:18:07.269108Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:18:07.269148Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:18:07.269208Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:18:07.269365Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:18:07.269412Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:18:07.269457Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:18:07.269499Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:07.269530Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:18:07.269559Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:18:07.269593Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:18:07.269855Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:18:07.269891Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:18:07.269915Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:18:07.269947Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:18:07.269980Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:18:07.269996Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:18:07.270020Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:18:07.270040Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:18:07.270055Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:18:07.281964Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:18:07.282047Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:18:07.282090Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:18:07.282149Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:18:07.294319Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:18:07.295329Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.295397Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.295436Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:18:07.295548Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:18:07.295576Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:18:07.295685Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:18:07.295737Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:18:07.295779Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:18:07.295814Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:18:07.298806Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:18:07.298864Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:07.299057Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.299091Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.299126Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:18:07.299155Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:18:07.299195Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:18:07.299227Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:18:07.299259Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:18:07.299295Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:18:07.299332Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:18:07.299382Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:18:07.299413Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:18:07.299533Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:18:07.299560Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:18:07.299576Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:18:07.299591Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:18:07.299604Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:18:07.299645Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:07.299666Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:18:07.299691Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:18:07.299726Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:18:07.299762Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:18:07.299791Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:18:07.299814Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at ... v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T17:18:11.087957Z node 3 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2025-04-04T17:18:11.088003Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-04T17:18:11.088029Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-04T17:18:11.088051Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-04-04T17:18:11.088076Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-04T17:18:11.088122Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T17:18:11.088176Z node 3 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 requested 33554432 more memory 2025-04-04T17:18:11.088216Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-04T17:18:11.088350Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:18:11.088395Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-04T17:18:11.088444Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T17:18:11.118059Z node 3 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-04-04T17:18:11.118142Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 7340039, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:18:11.118202Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:11.118261Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-04-04T17:18:11.118296Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2025-04-04T17:18:11.118329Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-04-04T17:18:11.118394Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:18:11.118412Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-04-04T17:18:11.118444Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-04-04T17:18:11.118472Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-04-04T17:18:11.118502Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-04T17:18:11.118523Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-04-04T17:18:11.118556Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2025-04-04T17:18:11.151047Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:18:11.151123Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-04-04T17:18:11.151169Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 5 ms, status: COMPLETE 2025-04-04T17:18:11.151251Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:12.052069Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:98:2133], Recipient [3:233:2226]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 98 RawX2: 12884904021 } 2025-04-04T17:18:12.052144Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-04T17:18:12.052535Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:298:2279], Recipient [3:233:2226]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:12.052575Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:12.052628Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:297:2278], serverId# [3:298:2279], sessionId# [0:0:0] 2025-04-04T17:18:12.224860Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:98:2133], Recipient [3:233:2226]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 98 RawX2: 12884904021 } TxBody: "\032\332\201\200\010\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\004\203\004\203\001H\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000\013?\024\003?\020\251\003\003?\022\006bar\003\005?\030\003?\026\007\000\000\000\001xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx 2025-04-04T17:18:12.227059Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:18:12.227187Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:18:12.277879Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit CheckDataTx 2025-04-04T17:18:12.277975Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2025-04-04T17:18:12.278029Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit CheckDataTx 2025-04-04T17:18:12.278072Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:18:12.278111Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:18:12.278163Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-04T17:18:12.278220Z node 3 :TX_DATASHARD TRACE: Activated operation [0:3] at 9437184 2025-04-04T17:18:12.278279Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2025-04-04T17:18:12.278305Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-04T17:18:12.278328Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit ExecuteDataTx 2025-04-04T17:18:12.278353Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-04-04T17:18:12.278405Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-04T17:18:12.278453Z node 3 :TX_DATASHARD TRACE: Operation [0:3] at 9437184 requested 46269638 more memory 2025-04-04T17:18:12.278490Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Restart 2025-04-04T17:18:12.278640Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:18:12.278690Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-04-04T17:18:12.278739Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-04T17:18:12.308771Z node 3 :TX_DATASHARD TRACE: Operation [0:3] at 9437184 exceeded memory limit 50463942 and requests 403711536 more for the next try 2025-04-04T17:18:12.315113Z node 3 :TX_DATASHARD DEBUG: tx 3 released its data 2025-04-04T17:18:12.315230Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Restart 2025-04-04T17:18:12.315653Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:18:12.315697Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-04-04T17:18:12.379991Z node 3 :TX_DATASHARD DEBUG: tx 3 at 9437184 restored its data 2025-04-04T17:18:12.380090Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-04T17:18:12.467727Z node 3 :TX_DATASHARD TRACE: Executed operation [0:3] at tablet 9437184 with status COMPLETE 2025-04-04T17:18:12.467843Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:3] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 16777223, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:18:12.467926Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:12.467974Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit ExecuteDataTx 2025-04-04T17:18:12.468015Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit FinishPropose 2025-04-04T17:18:12.468058Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit FinishPropose 2025-04-04T17:18:12.468107Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is DelayComplete 2025-04-04T17:18:12.468138Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit FinishPropose 2025-04-04T17:18:12.468177Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit CompletedOperations 2025-04-04T17:18:12.468213Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit CompletedOperations 2025-04-04T17:18:12.468266Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2025-04-04T17:18:12.468297Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit CompletedOperations 2025-04-04T17:18:12.468336Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:3] at 9437184 has finished 2025-04-04T17:18:12.564664Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:18:12.564742Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:3] at 9437184 on unit FinishPropose 2025-04-04T17:18:12.564800Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 3 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 2 ms, status: COMPLETE 2025-04-04T17:18:12.564892Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:12.635543Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:18:12.635615Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2025-04-04T17:18:12.641707Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:231:2225], Recipient [3:233:2226]: NKikimr::TEvTablet::TEvFollowerGcApplied >> KqpIndexes::CreateTableWithImplicitSyncIndexSQL [GOOD] >> KqpIndexes::CreateTableWithExplicitSyncIndexSQL >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex-UseSink [GOOD] >> KqpIndexes::DuplicateUpsertInterleave >> KqpIndexes::SelectConcurentTX [GOOD] >> KqpIndexes::SelectConcurentTX2 >> KqpUniqueIndex::InsertFkPartialColumnSet [GOOD] >> KqpUniqueIndex::InsertFkPkOverlap >> KqpIndexes::UpsertWithNullKeysSimple [GOOD] >> KqpIndexes::UpsertWithNullKeysComplex >> KqpIndexes::MultipleSecondaryIndex+UseSink [GOOD] >> KqpIndexes::MultipleSecondaryIndex-UseSink >> KqpIndexes::UpdateIndexSubsetPk [GOOD] >> KqpIndexes::UpdateOnReadColumns >> KqpUniqueIndex::InsertFkAlreadyExist >> KqpUniqueIndex::ReplaceFkPartialColumnSet [GOOD] >> KqpUniqueIndex::UpdateFkAlreadyExist >> KqpUniqueIndex::InsertNullInComplexFk [GOOD] >> KqpUniqueIndex::InsertNullInComplexFkDuplicate >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex-UseSink [GOOD] >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex+UseSink >> KqpPg::JoinWithQueryService+StreamLookup [GOOD] >> KqpPg::Insert_Serial+useSink >> KqpUniqueIndex::ReplaceFkAlreadyExist [GOOD] >> KqpUniqueIndex::ReplaceFkDuplicate >> KqpIndexes::SecondaryIndexUsingInJoin+UseStreamJoin >> KqpUniqueIndex::UpdateOnHidenChanges-DataColumn [GOOD] >> KqpUniqueIndex::UpdateOnNullInComplexFk >> KqpIndexes::ForbidViewModification [GOOD] >> KqpIndexes::IndexOr >> KqpUniqueIndex::UpdateOnFkSelectResultSameValue [GOOD] >> KqpUniqueIndex::UpdateOnHidenChanges+DataColumn >> KqpIndexes::UpsertMultipleUniqIndexes [GOOD] >> KqpIndexes::UpsertNoIndexColumns >> KqpIndexes::ExplainCollectFullDiagnostics [GOOD] >> KqpIndexes::UpsertWithoutExtraNullDelete+UseSink [GOOD] >> KqpIndexes::ForbidDirectIndexTableCreation >> KqpIndexes::UpsertWithoutExtraNullDelete-UseSink >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns+UseSink [GOOD] >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns-UseSink >> KqpIndexes::WriteWithParamsFieldOrder [GOOD] >> KqpMultishardIndex::CheckPushTopSort >> KqpIndexes::InnerJoinWithNonIndexWherePredicate [GOOD] >> KqpIndexes::InnerJoinSecondaryIndexLookupAndRightTablePredicateNonIndexColumn >> KqpUniqueIndex::InsertNullInPk [GOOD] >> KqpUniqueIndex::InsertNullInFk >> KqpLimits::ReplySizeExceeded [GOOD] >> KqpPg::Insert_Serial+useSink [GOOD] >> KqpPg::Insert_Serial-useSink >> KqpIndexes::UniqIndexComplexPkComplexFkOverlap [GOOD] >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover >> KqpIndexes::CreateTableWithExplicitSyncIndexSQL [GOOD] >> KqpIndexes::DeleteByIndex >> KqpIndexes::DuplicateUpsertInterleave [GOOD] >> KqpIndexes::DuplicateUpsertInterleaveParams+UseSink >> KqpUniqueIndex::UpdateFkSameValue [GOOD] >> KqpUniqueIndex::UpdateFkPkOverlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 10196, MsgBus: 10429 2025-04-04T17:17:22.856463Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508319989061666:2223];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:22.856560Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015d3/r3tmp/tmpT9vlXP/pdisk_1.dat 2025-04-04T17:17:23.334182Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:23.336083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:23.336175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:23.339489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10196, node 1 2025-04-04T17:17:23.452853Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:23.452877Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:23.452882Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:23.452978Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10429 TClient is connected to server localhost:10429 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:24.042895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:24.056005Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:24.073251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:24.245188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:17:24.404168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:17:24.469902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:26.010592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508337168932468:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:26.010734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:26.248329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:26.282117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:26.312452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:26.344166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:26.377689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:26.421854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:26.505857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508337168932981:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:26.505945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:26.506410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508337168932986:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:26.509675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:26.519715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508337168932989:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:26.623202Z node 1 :TX_PROXY ERROR: Actor# [1:7489508337168933044:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:27.488776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:27.851065Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508319989061666:2223];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:27.851128Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:17:30.616598Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDY5ZmE3NmYtZGJiNDMxNTEtYTJiZjBmOWYtZDQ4NTJkNA==, ActorId: [1:7489508354348803411:2601], ActorState: ExecuteState, TraceId: 01jr0wxh1j0hzs63nrw9h9vzdy, Create QueryResponse for error on request, msg:
: Error: Query result size limit exceeded. (80001703 > 50331648), code: 2013 Trying to start YDB, gRPC: 16658, MsgBus: 27808 2025-04-04T17:17:31.511413Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508359505044885:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:31.511506Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015d3/r3tmp/tmpkt9Loi/pdisk_1.dat 2025-04-04T17:17:31.636863Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:31.660806Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 16658, node 2 2025-04-04T17:17:31.660885Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:31.662766Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:31.715116Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:31.715141Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:31.715149Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:31.715273Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27808 TClient is connected to server localhost:27808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:17:32.079866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:17:32.099042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:1 ... ode 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:17:52.265897Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:55.193372Z node 3 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037919, for tableId 16 don't have enough keys: RowCount: 280000 DataSize: 46331580 IndexSize: 223950 ByKeyFilterSize: 0 RowCountHistogram: 2 DataSizeHistogram: 4 2025-04-04T17:18:06.482053Z node 3 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037919, for tableId 16 don't have enough keys: RowCount: 465000 DataSize: 77555440 IndexSize: 374880 ByKeyFilterSize: 0 RowCountHistogram: 4 DataSizeHistogram: 7 2025-04-04T17:18:07.293245Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YmI3MTI1MTEtZTkzMThiODYtMWE5MzM4ODItYWRlZGJlOGU=, ActorId: [3:7489508507935643820:2727], ActorState: ExecuteState, TraceId: 01jr0wynaj5vstfqhsm70z1jgz, Create QueryResponse for error on request, msg:
: Error: Query did not complete within specified timeout 100ms, session id ydb://session/3?node_id=3&id=YmI3MTI1MTEtZTkzMThiODYtMWE5MzM4ODItYWRlZGJlOGU= 2025-04-04T17:18:07.374907Z node 3 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 7 2025-04-04T17:18:07.540272Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7489508512230611215:2727] TxId: 281474976715674. Ctx: { TraceId: 01jr0wynh3f8vb5k8qgg31ksfw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmI3MTI1MTEtZTkzMThiODYtMWE5MzM4ODItYWRlZGJlOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 100ms } {
: Error: Cancelling after 144ms during execution } ] 2025-04-04T17:18:07.540981Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7489508512230611233:2760], TxId: 281474976715674, task: 9. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=YmI3MTI1MTEtZTkzMThiODYtMWE5MzM4ODItYWRlZGJlOGU=. TraceId : 01jr0wynh3f8vb5k8qgg31ksfw. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7489508512230611215:2727], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-04T17:18:07.734677Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7489508512230611230:2758], TxId: 281474976715674, task: 7. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wynh3f8vb5k8qgg31ksfw. SessionId : ydb://session/3?node_id=3&id=YmI3MTI1MTEtZTkzMThiODYtMWE5MzM4ODItYWRlZGJlOGU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7489508512230611215:2727], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-04T17:18:07.740642Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YmI3MTI1MTEtZTkzMThiODYtMWE5MzM4ODItYWRlZGJlOGU=, ActorId: [3:7489508507935643820:2727], ActorState: ExecuteState, TraceId: 01jr0wynh3f8vb5k8qgg31ksfw, Create QueryResponse for error on request, msg:
: Error: Request canceled after 100ms
: Error: Cancelling after 144ms during execution Trying to start YDB, gRPC: 2042, MsgBus: 7835 2025-04-04T17:18:09.105950Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489508518660987797:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:09.128118Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015d3/r3tmp/tmp8n5j0R/pdisk_1.dat 2025-04-04T17:18:09.325971Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:09.361534Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:09.361635Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:09.363330Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2042, node 4 2025-04-04T17:18:09.456704Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:09.456729Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:09.456738Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:09.456902Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7835 TClient is connected to server localhost:7835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:10.004537Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:10.015283Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:10.026422Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:10.123408Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:10.296995Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:10.389093Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:18:12.937534Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508531545891379:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:12.937620Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:12.988965Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.033494Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.072042Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.148443Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.197283Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.244713Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.305240Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508535840859195:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:13.305337Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:13.305706Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489508535840859200:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:13.308852Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:13.317680Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489508535840859202:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:13.391496Z node 4 :TX_PROXY ERROR: Actor# [4:7489508535840859256:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:14.106353Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489508518660987797:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:14.106441Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:14.623929Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.958410Z node 4 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037919, for tableId 16 don't have enough keys: RowCount: 200 DataSize: 20488600 IndexSize: 5037 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 1 2025-04-04T17:18:18.128317Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWU2Y2ZhOWMtNTgzMTg0MzktNTg1OWU3ZWMtMWNiNGMyMTg=, ActorId: [4:7489508540135826815:2489], ActorState: ExecuteState, TraceId: 01jr0wyzn3eskdp75p73v3yeat, Create QueryResponse for error on request, msg: >> KqpUniqueIndex::InsertNullInComplexFkDuplicate [GOOD] >> KqpUniqueIndex::InsertFkPkOverlap [GOOD] >> KqpIndexes::SelectConcurentTX2 [GOOD] >> KqpIndexes::MultipleSecondaryIndex-UseSink [GOOD] >> KqpIndexes::MultipleSecondaryIndexWithSameComulns+UseSink >> KqpIndexes::UpsertWithNullKeysComplex [GOOD] >> KqpUniqueIndex::InsertFkAlreadyExist [GOOD] >> KqpUniqueIndex::InsertFkDuplicate >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex+UseSink [GOOD] >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertNullInComplexFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 28371, MsgBus: 32157 2025-04-04T17:18:07.264941Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508512413461035:2273];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:07.265054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016e5/r3tmp/tmpaN1iFt/pdisk_1.dat 2025-04-04T17:18:07.711168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:07.711249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:07.713575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28371, node 1 2025-04-04T17:18:07.735914Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.740217Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:18:07.740237Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:18:07.874744Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:07.874763Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:07.874769Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:07.874856Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32157 TClient is connected to server localhost:32157 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:08.737658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.750643Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:08.773827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.938109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.121308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.210316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:10.468026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508525298364462:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.468217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.842234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.879264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.941484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.975394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.000652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.037799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.126618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508529593332273:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.126668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.126811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508529593332278:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.130341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:11.142525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508529593332280:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:11.244187Z node 1 :TX_PROXY ERROR: Actor# [1:7489508529593332336:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:12.093550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:12.262133Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508512413461035:2273];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:12.262362Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20375, MsgBus: 12398 2025-04-04T17:18:14.865288Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508542114100773:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:14.866498Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016e5/r3tmp/tmpUes3XZ/pdisk_1.dat 2025-04-04T17:18:14.989593Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:15.022043Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:15.022129Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:15.027813Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20375, node 2 2025-04-04T17:18:15.105193Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:15.105214Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:15.105221Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:15.105320Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12398 TClient is connected to server localhost:12398 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:15.548781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:15.555133Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:15.568600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:15.628134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:15.786109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.869112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:17.850363Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508554999004333:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.850446Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.879808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.916065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.951005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.982724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.018120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.087766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.172133Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508559293972153:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.172240Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.172427Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508559293972158:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.175759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:18.186514Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508559293972160:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:18.243194Z node 2 :TX_PROXY ERROR: Actor# [2:7489508559293972212:3438] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:19.109417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:19.848182Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508542114100773:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:19.848256Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SelectConcurentTX2 [GOOD] Test command err: Trying to start YDB, gRPC: 23795, MsgBus: 17632 2025-04-04T17:18:07.240108Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508510377069670:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:07.241149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016d8/r3tmp/tmptZLIxs/pdisk_1.dat 2025-04-04T17:18:07.702780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:07.702863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:07.710507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23795, node 1 2025-04-04T17:18:07.770221Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:18:07.770261Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:18:07.771947Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.874103Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:07.874128Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:07.874140Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:07.874287Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17632 TClient is connected to server localhost:17632 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:08.660692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.687093Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:08.707861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.901047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.060905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.153590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:10.573276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508523261973336:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.573427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.842642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.909209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.937748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.969017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.999045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.029648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.108601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508527556941146:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.108688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.108886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508527556941151:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.111907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:11.120575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508527556941153:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:11.187398Z node 1 :TX_PROXY ERROR: Actor# [1:7489508527556941206:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:12.098489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:12.242368Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508510377069670:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:12.242427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 11650, MsgBus: 27941 2025-04-04T17:18:14.132643Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508541801812037:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:14.134106Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016d8/r3tmp/tmpEy7hft/pdisk_1.dat 2025-04-04T17:18:14.260691Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:14.286451Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:14.286539Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:14.291436Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11650, node 2 2025-04-04T17:18:14.338871Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:14.338897Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:14.338904Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:14.339020Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27941 TClient is connected to server localhost:27941 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:14.793235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:14.799215Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:14.808333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:14.889118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:15.038393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:15.113642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:17.257190Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508554686715689:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.257285Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.299862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.325990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.351872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.381357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.416673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.458764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.538956Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508554686716207:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.539022Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.539060Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508554686716212:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.542280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:17.556977Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-04T17:18:17.557458Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508554686716214:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:17.640095Z node 2 :TX_PROXY ERROR: Actor# [2:7489508554686716269:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:18.566425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:19.156750Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508541801812037:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:19.156818Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:20.266126Z node 2 :TX_DATASHARD ERROR: Complete [1743787100307 : 281474976715681] from 72075186224037920 at tablet 72075186224037920, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-04T17:18:20.273055Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjRmODU4YzEtOWViOTM2My1kMGMwODY2ZC0yYzc4Mjk5YQ==, ActorId: [2:7489508558981683823:2488], ActorState: ExecuteState, TraceId: 01jr0wz1kfe40g9bjjef66x0ks, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertFkPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 24883, MsgBus: 3647 2025-04-04T17:18:07.247111Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508513574999630:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:07.250330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001720/r3tmp/tmp3oAxeo/pdisk_1.dat 2025-04-04T17:18:07.758691Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.760774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:07.760873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:07.765351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24883, node 1 2025-04-04T17:18:07.872349Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:07.872372Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:07.872380Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:07.872467Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3647 TClient is connected to server localhost:3647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:08.672863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.703760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.839160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.994845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.085772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:10.445162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508526459903273:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.445288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.842100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.894131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.963289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.994929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.020728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.052723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.101838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508530754871080:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.101893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.102035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508530754871085:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.110520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:11.121092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508530754871087:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:11.194025Z node 1 :TX_PROXY ERROR: Actor# [1:7489508530754871141:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:12.097010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:12.267493Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508513574999630:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:12.269213Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 4777, MsgBus: 4655 2025-04-04T17:18:14.187655Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508543283537922:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:14.187694Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001720/r3tmp/tmpirPjJu/pdisk_1.dat 2025-04-04T17:18:14.348283Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:14.348375Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:14.350096Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:14.353050Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4777, node 2 2025-04-04T17:18:14.434863Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:14.434884Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:14.434894Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:14.435003Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4655 TClient is connected to server localhost:4655 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:18:14.884114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:18:14.908237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:14.986294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:15.156380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.246281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:17.390383Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508556168441570:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.390489Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.440158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.467067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.490899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.517053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.546134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.586456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.625854Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508556168442081:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.625944Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.625964Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508556168442086:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.628812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:17.637486Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508556168442088:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:17.710749Z node 2 :TX_PROXY ERROR: Actor# [2:7489508556168442143:3438] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:18.613269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:19.190331Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508543283537922:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:19.190390Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexes::SecondaryIndexUpdateOnUsingIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertWithNullKeysComplex [GOOD] Test command err: Trying to start YDB, gRPC: 3379, MsgBus: 26119 2025-04-04T17:18:07.241861Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508510270137888:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:07.241916Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016da/r3tmp/tmpFZXvw7/pdisk_1.dat 2025-04-04T17:18:07.664399Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.704581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:07.704659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:07.707144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3379, node 1 2025-04-04T17:18:07.874825Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:07.874847Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:07.874862Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:07.874982Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26119 TClient is connected to server localhost:26119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:08.595850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.617649Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:08.636366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.828161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.995523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.126021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:10.439126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508523155041569:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.439241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.842493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.927101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.963342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.000659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.029824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.075346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.122502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508527450009378:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.122593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.122616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508527450009383:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.125823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:11.134787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508527450009385:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:11.197001Z node 1 :TX_PROXY ERROR: Actor# [1:7489508527450009439:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:12.113358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:12.242408Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508510270137888:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:12.242525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:12.284364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T17:18:12.328755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T17:18:12.727121Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 7089, MsgBus: 28220 2025-04-04T17:18:14.306030Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508542818120318:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:14.306079Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016da/r3tmp/tmp6Z14hQ/pdisk_1.dat 2025-04-04T17:18:14.439650Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:14.470941Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:14.471014Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 7089, node 2 2025-04-04T17:18:14.476470Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:14.570739Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:14.570763Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:14.570770Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:14.570875Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28220 TClient is connected to server localhost:28220 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:15.128119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:15.136892Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:15.145047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:15.230717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:15.409734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:15.493747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:17.378676Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508555703023987:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.378833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.396447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.427756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.460140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.493372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.523694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.559153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.602499Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508555703024492:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.602593Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.602662Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508555703024497:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.605804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:17.614360Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508555703024499:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:17.717148Z node 2 :TX_PROXY ERROR: Actor# [2:7489508555703024553:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:18.593040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:19.278034Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:19.298170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-04T17:18:19.306640Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508542818120318:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:19.306692Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:19.377722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-04T17:18:20.426825Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:20.445981Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpIndexes::SecondaryIndexUsingInJoin+UseStreamJoin [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin-UseStreamJoin >> KqpIndexes::ForbidDirectIndexTableCreation [GOOD] >> KqpIndexes::DuplicateUpsertInterleaveParams-UseSink >> KqpIndexes::IndexOr [GOOD] >> KqpIndexes::IndexFilterPushDown >> KqpUniqueIndex::UpdateOnHidenChanges+DataColumn [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin2+UseStreamJoin >> KqpIndexes::SelectFromAsyncIndexedTable >> KqpMultishardIndex::DataColumnWrite+UseSink >> KqpPg::Insert_Serial-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText+useSink >> KqpIndexes::UpsertNoIndexColumns [GOOD] >> KqpMultishardIndex::CheckPushTopSort [GOOD] >> KqpIndexes::VectorIndexOrderByCosineSimilarityNullableLevel1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateOnHidenChanges+DataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 5184, MsgBus: 28335 2025-04-04T17:18:07.252311Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508511903424579:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:07.252772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001700/r3tmp/tmpHW99yu/pdisk_1.dat 2025-04-04T17:18:07.697312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:07.697378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:07.699900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:07.714198Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5184, node 1 2025-04-04T17:18:07.870872Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:07.870898Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:07.870910Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:07.871028Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28335 TClient is connected to server localhost:28335 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:08.605846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.624235Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:08.635174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.826714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.033781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.144199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:10.606344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508524788328163:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.606466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.885671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.918708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.941977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.973503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.004608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.045609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.103454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508529083295967:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.103555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.103845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508529083295972:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.107541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:11.117791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508529083295974:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:11.187486Z node 1 :TX_PROXY ERROR: Actor# [1:7489508529083296028:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:12.101897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:12.255285Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508511903424579:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:12.255673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10251, MsgBus: 2189 2025-04-04T17:18:15.846854Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508547722634759:2111];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:15.851127Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001700/r3tmp/tmp5Py26e/pdisk_1.dat 2025-04-04T17:18:15.936217Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:15.954457Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:15.954511Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:15.955982Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10251, node 2 2025-04-04T17:18:15.994509Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:15.994526Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:15.994531Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:15.994640Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2189 TClient is connected to server localhost:2189 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:16.372079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.379105Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:16.394318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.440346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.584652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.648348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:18.807125Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508560607538324:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.807203Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.850882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.878351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.907630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.937757Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.966095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:19.035466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:19.082549Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508564902506135:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:19.082650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:19.082894Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508564902506140:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:19.087019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:19.096553Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508564902506142:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:19.184695Z node 2 :TX_PROXY ERROR: Actor# [2:7489508564902506195:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:20.045114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:20.846894Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508547722634759:2111];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:20.846947Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:21.717822Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wz2tr1bt92vaw3wbct0rc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGU1ZjgwYmMtMjRhMjBkYmQtODY2YjE2MTEtOWNmMGUzMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:21.726176Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGU1ZjgwYmMtMjRhMjBkYmQtODY2YjE2MTEtOWNmMGUzMjg=, ActorId: [2:7489508569197474556:2546], ActorState: ExecuteState, TraceId: 01jr0wz2tr1bt92vaw3wbct0rc, Create QueryResponse for error on request, msg: >> KqpUniqueIndex::ReplaceFkDuplicate [GOOD] >> KqpIndexes::UpsertWithoutExtraNullDelete-UseSink [GOOD] >> KqpUniqueIndex::InsertNullInFk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::CheckPushTopSort [GOOD] Test command err: Trying to start YDB, gRPC: 11346, MsgBus: 8724 2025-04-04T17:18:12.319201Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508535029647251:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:12.319300Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016c5/r3tmp/tmpl51esG/pdisk_1.dat 2025-04-04T17:18:12.731236Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:12.737103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:12.737185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11346, node 1 2025-04-04T17:18:12.741771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:12.795498Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:12.795525Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:12.795539Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:12.795655Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8724 TClient is connected to server localhost:8724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:13.261531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:13.279174Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:13.298695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:13.422480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:13.609855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:13.698516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:15.285877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508547914550921:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:15.286027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:15.524783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.561470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.598582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.634719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.666610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.708033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.770622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508547914551437:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:15.770754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:15.771014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508547914551442:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:15.776765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:15.786884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508547914551444:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:15.875170Z node 1 :TX_PROXY ERROR: Actor# [1:7489508547914551499:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:16.787209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.319550Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508535029647251:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:17.319606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:17.372471Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 9066, MsgBus: 1143 2025-04-04T17:18:18.121476Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508558158004993:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:18.121582Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016c5/r3tmp/tmpC4zYRO/pdisk_1.dat 2025-04-04T17:18:18.214728Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9066, node 2 2025-04-04T17:18:18.249243Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:18.249350Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:18.250717Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:18.283660Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:18.283681Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:18.283688Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:18.283796Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1143 TClient is connected to server localhost:1143 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:18.759494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:18.767385Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:18.772076Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:18.845661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:19.004497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:19.075660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:21.038905Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508571042908655:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:21.038974Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:21.075222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.114218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.147350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.182372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.215688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.250788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.336422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508571042909170:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:21.336482Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:21.336761Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508571042909175:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:21.340474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:21.351026Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508571042909177:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:21.417192Z node 2 :TX_PROXY ERROR: Actor# [2:7489508571042909232:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:22.325597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:23.126421Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508558158004993:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:23.126463Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertNoIndexColumns [GOOD] Test command err: Trying to start YDB, gRPC: 31960, MsgBus: 31247 2025-04-04T17:18:07.247274Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508512229187942:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:07.247948Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ff/r3tmp/tmpEI2kz9/pdisk_1.dat 2025-04-04T17:18:07.715130Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.728247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:07.728344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:07.739260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31960, node 1 2025-04-04T17:18:07.874705Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:07.874725Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:07.874730Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:07.874828Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31247 TClient is connected to server localhost:31247 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:08.596072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.635071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.814205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.001610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.075788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:10.479781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508525114091516:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.479896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.843037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.952366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.026020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.056783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.079503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.110225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.153910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508529409059326:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.153983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508529409059331:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.153995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.156770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:11.164521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508529409059333:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:11.254070Z node 1 :TX_PROXY ERROR: Actor# [1:7489508529409059387:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:12.098825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:12.242473Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508512229187942:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:12.242545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:13.352055Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:14.379427Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wyvcq2smypb8af5c4d6x7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmViZDEzYWEtZTI5MTQ1ODMtYTRlMTRiMmYtOGM3YTYzZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:14.395502Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmViZDEzYWEtZTI5MTQ1ODMtYTRlMTRiMmYtOGM3YTYzZmE=, ActorId: [1:7489508533704026938:2488], ActorState: ExecuteState, TraceId: 01jr0wyvcq2smypb8af5c4d6x7, Create QueryResponse for error on request, msg: 2025-04-04T17:18:14.501843Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:15.629760Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 31849, MsgBus: 30216 2025-04-04T17:18:16.308112Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508552649704090:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:16.308147Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ff/r3tmp/tmp3Oqpvd/pdisk_1.dat 2025-04-04T17:18:16.407007Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31849, node 2 2025-04-04T17:18:16.452763Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:16.452844Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:16.461022Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:16.477743Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:16.477761Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:16.477768Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:16.477874Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30216 TClient is connected to server localhost:30216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:16.889781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.915068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.997721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:17.162936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:17.242213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:19.358394Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508565534607747:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:19.358505Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:19.405817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:19.434007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:19.465469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:19.496021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:19.527716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:19.563053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:19.610918Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508565534608256:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:19.611037Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:19.611235Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508565534608261:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:19.615303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:19.627034Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508565534608263:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:19.687669Z node 2 :TX_PROXY ERROR: Actor# [2:7489508565534608316:3436] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:20.623171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:20.670733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:18:20.724165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.308622Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508552649704090:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:21.308682Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::ReplaceFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 9097, MsgBus: 4061 2025-04-04T17:18:07.247483Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508513439555903:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:07.247527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016f0/r3tmp/tmptFDyw0/pdisk_1.dat 2025-04-04T17:18:07.655226Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.691811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:07.692624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:07.694232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9097, node 1 2025-04-04T17:18:07.874148Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:07.874175Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:07.874183Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:07.874299Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4061 TClient is connected to server localhost:4061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:08.676671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.700774Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:08.708672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.863107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.052800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.133577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:10.527401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508526324459556:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.527589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.847366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.891555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.947230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.027252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.064651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.106267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.181490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508530619427372:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.181570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.181644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508530619427377:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.184179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:11.191804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508530619427379:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:11.264021Z node 1 :TX_PROXY ERROR: Actor# [1:7489508530619427432:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:12.198313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:12.343807Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508513439555903:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:12.365671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:13.839755Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wyv7t5h5szxwftvsj86jp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWQ2NDQzZGEtZGM1ZmUyODEtZDgxYzUzMzktYWM2MzAxY2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:13.848199Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWQ2NDQzZGEtZGM1ZmUyODEtZDgxYzUzMzktYWM2MzAxY2M=, ActorId: [1:7489508534914395792:2547], ActorState: ExecuteState, TraceId: 01jr0wyv7t5h5szxwftvsj86jp, Create QueryResponse for error on request, msg: 2025-04-04T17:18:14.485498Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wyvv13nkvd3bgnar94m2y, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWQ2NDQzZGEtZGM1ZmUyODEtZDgxYzUzMzktYWM2MzAxY2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:14.485712Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWQ2NDQzZGEtZGM1ZmUyODEtZDgxYzUzMzktYWM2MzAxY2M=, ActorId: [1:7489508534914395792:2547], ActorState: ExecuteState, TraceId: 01jr0wyvv13nkvd3bgnar94m2y, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 11512, MsgBus: 5343 2025-04-04T17:18:15.360868Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508546527558079:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:15.360900Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016f0/r3tmp/tmpFhPrHZ/pdisk_1.dat 2025-04-04T17:18:15.550950Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:15.574635Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:15.574728Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:15.576311Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11512, node 2 2025-04-04T17:18:15.722086Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:15.722109Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:15.722116Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:15.722219Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5343 TClient is connected to server localhost:5343 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:16.231448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.237153Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:16.243818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.324756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.516790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.582702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:18.479812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508559412461726:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.479909Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.523858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.559357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.594953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.630508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.665728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.747308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.808322Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508559412462244:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.808414Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.808453Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508559412462249:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.811677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:18.824359Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508559412462251:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:18.926859Z node 2 :TX_PROXY ERROR: Actor# [2:7489508559412462306:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:19.829289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:20.361110Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508546527558079:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:20.361156Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:21.543063Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wz2nbb2e1cptfkt6001xz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2E5N2QwYTEtY2Q4NTgwNzMtOTY0NTJjMzMtNjNkNjg4MjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:21.543304Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2E5N2QwYTEtY2Q4NTgwNzMtOTY0NTJjMzMtNjNkNjg4MjE=, ActorId: [2:7489508568002397930:2546], ActorState: ExecuteState, TraceId: 01jr0wz2nbb2e1cptfkt6001xz, Create QueryResponse for error on request, msg: 2025-04-04T17:18:22.281104Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wz3c68823aappmb52ah34, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2E5N2QwYTEtY2Q4NTgwNzMtOTY0NTJjMzMtNjNkNjg4MjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:22.281344Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2E5N2QwYTEtY2Q4NTgwNzMtOTY0NTJjMzMtNjNkNjg4MjE=, ActorId: [2:7489508568002397930:2546], ActorState: ExecuteState, TraceId: 01jr0wz3c68823aappmb52ah34, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertWithoutExtraNullDelete-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14136, MsgBus: 1304 2025-04-04T17:18:10.589863Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508524413556402:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:10.590003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016cb/r3tmp/tmpcRnBxo/pdisk_1.dat 2025-04-04T17:18:10.908467Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14136, node 1 2025-04-04T17:18:10.983126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:10.983229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:10.996774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:11.035031Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:11.035054Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:11.035069Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:11.035172Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1304 TClient is connected to server localhost:1304 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:11.485351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:11.504661Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:11.520503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:11.705606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:11.874396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:11.938293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.503614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508537298460080:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:13.503725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:13.740103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.783835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.867101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.898150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.966422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:14.007498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:14.051556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508541593427894:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:14.051628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:14.051813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508541593427899:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:14.054484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:14.065962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508541593427901:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:14.144046Z node 1 :TX_PROXY ERROR: Actor# [1:7489508541593427955:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:14.878932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.594337Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508524413556402:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:15.594390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18821, MsgBus: 16814 2025-04-04T17:18:17.731247Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508553043298583:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:17.732057Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016cb/r3tmp/tmp9lrMIE/pdisk_1.dat 2025-04-04T17:18:17.831579Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18821, node 2 2025-04-04T17:18:17.877745Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:17.877853Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:17.885353Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:17.908060Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:17.908079Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:17.908087Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:17.908195Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16814 TClient is connected to server localhost:16814 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:18.274921Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:18.279639Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:18.291863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:18.345891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:18.467669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:18.546303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:20.633851Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508565928202155:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:20.633948Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:20.683088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:20.751331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:20.790928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:20.855042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:20.888094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:20.927249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:20.976947Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508565928202671:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:20.977041Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508565928202676:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:20.977054Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:20.980543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:20.992097Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508565928202678:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:21.084503Z node 2 :TX_PROXY ERROR: Actor# [2:7489508570223170028:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:21.959470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:22.734342Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508553043298583:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:22.734468Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:23.669642Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:23.955885Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpUniqueIndex::UpdateFkAlreadyExist [GOOD] >> KqpIndexes::DuplicateUpsertInterleaveParams+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertNullInFk [GOOD] Test command err: Trying to start YDB, gRPC: 65268, MsgBus: 30812 2025-04-04T17:18:11.658024Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508529938468993:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:11.658094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ca/r3tmp/tmpla4dMx/pdisk_1.dat 2025-04-04T17:18:11.988521Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65268, node 1 2025-04-04T17:18:12.045412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:12.046578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:12.086451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:12.154782Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:12.154804Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:12.154814Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:12.154920Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30812 TClient is connected to server localhost:30812 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:12.743571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:12.760035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:12.891297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.068683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:13.144572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:14.625282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508542823372679:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:14.625364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:14.857338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:14.888026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:14.962010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.035263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.067126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.111889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.199996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508547118340497:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:15.200127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:15.201187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508547118340503:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:15.205068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:15.216882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508547118340505:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:15.281130Z node 1 :TX_PROXY ERROR: Actor# [1:7489508547118340560:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:16.187362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.658385Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508529938468993:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:16.658484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:17.839425Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508555708276655:2611], TxId: 281474976710681, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=YmFjMmY0YTQtMjZhMWRlOGMtNWYxM2YwLWRkNmM1NzRk. TraceId : 01jr0wyzabfxh2r26jdcd0gemq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-04T17:18:17.839754Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508555708276656:2612], TxId: 281474976710681, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=YmFjMmY0YTQtMjZhMWRlOGMtNWYxM2YwLWRkNmM1NzRk. CustomerSuppliedId : . TraceId : 01jr0wyzabfxh2r26jdcd0gemq. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489508555708276652:2546], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:18:17.840099Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmFjMmY0YTQtMjZhMWRlOGMtNWYxM2YwLWRkNmM1NzRk, ActorId: [1:7489508551413308898:2546], ActorState: ExecuteState, TraceId: 01jr0wyzabfxh2r26jdcd0gemq, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 11989, MsgBus: 23742 2025-04-04T17:18:18.520986Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508560315744553:2272];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:18.521056Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ca/r3tmp/tmpPhSHBU/pdisk_1.dat 2025-04-04T17:18:18.611965Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:18.632911Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:18.632973Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:18.634285Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11989, node 2 2025-04-04T17:18:18.669931Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:18.669950Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:18.669958Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:18.670069Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23742 TClient is connected to server localhost:23742 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:18:19.119665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:18:19.128530Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:19.136486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:19.201990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:19.380701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:19.459738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.416162Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508573200647976:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:21.416245Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:21.452298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.490794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.532367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.576838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.609666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.641752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.692645Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508573200648485:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:21.692715Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:21.692866Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508573200648490:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:21.695811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:21.706427Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508573200648493:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:21.796139Z node 2 :TX_PROXY ERROR: Actor# [2:7489508573200648546:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:22.600495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:23.506350Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508560315744553:2272];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:23.506416Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpUniqueIndex::UpdateOnFkAlreadyExist >> TTxDataShardMiniKQL::CrossShard_1_Cycle [GOOD] >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy >> KqpIndexes::DeleteByIndex [GOOD] >> KqpMultishardIndex::SortedRangeReadDesc >> KqpIndexes::CheckUpsertNonEquatableType+NotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateFkAlreadyExist [GOOD] Test command err: Trying to start YDB, gRPC: 19634, MsgBus: 63733 2025-04-04T17:18:07.240909Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508511850535729:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:07.240964Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ef/r3tmp/tmpWa0gNX/pdisk_1.dat 2025-04-04T17:18:07.723223Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.748577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:07.748668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:07.755494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19634, node 1 2025-04-04T17:18:07.875700Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:07.875719Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:07.875726Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:07.875824Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63733 TClient is connected to server localhost:63733 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:08.671425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.726759Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:08.746530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.888023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.108589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.192575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:10.570469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508524735439246:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.570582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.841847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.931027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.966836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.050133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.077929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.114445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.184988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508529030407061:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.185065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.185110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508529030407066:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.187828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:11.195166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508529030407068:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:11.289368Z node 1 :TX_PROXY ERROR: Actor# [1:7489508529030407124:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:12.092934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:12.241826Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508511850535729:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:12.242826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28199, MsgBus: 11639 2025-04-04T17:18:14.664653Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508540508539586:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:14.664692Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ef/r3tmp/tmpj8XFJ7/pdisk_1.dat 2025-04-04T17:18:14.813778Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:14.827962Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:14.828031Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:14.831149Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28199, node 2 2025-04-04T17:18:14.908475Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:14.908501Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:14.908509Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:14.908628Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11639 TClient is connected to server localhost:11639 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:15.396220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:15.410262Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:15.429019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:15.496038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:15.648507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:15.713654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:17.859293Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508553393443248:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.859413Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.888762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.926711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.964575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.995346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.024271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.093651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.152938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508557688411059:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.152991Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508557688411064:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.153040Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.157572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:18.181481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508557688411066:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:18.284386Z node 2 :TX_PROXY ERROR: Actor# [2:7489508557688411122:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:19.212699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:19.664787Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508540508539586:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:19.664860Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:20.997432Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wz23f7b3k7wdeqzfwcfp4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTlhYTc0MGQtOTMzZDA0MzQtZDMxZjdhM2MtNmMxZmI4Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:21.006635Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTlhYTc0MGQtOTMzZDA0MzQtZDMxZjdhM2MtNmMxZmI4Nw==, ActorId: [2:7489508561983379468:2546], ActorState: ExecuteState, TraceId: 01jr0wz23f7b3k7wdeqzfwcfp4, Create QueryResponse for error on request, msg: 2025-04-04T17:18:21.847643Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wz2tw2ngts3snsg84yczd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTlhYTc0MGQtOTMzZDA0MzQtZDMxZjdhM2MtNmMxZmI4Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:21.847866Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTlhYTc0MGQtOTMzZDA0MzQtZDMxZjdhM2MtNmMxZmI4Nw==, ActorId: [2:7489508561983379468:2546], ActorState: ExecuteState, TraceId: 01jr0wz2tw2ngts3snsg84yczd, Create QueryResponse for error on request, msg: 2025-04-04T17:18:21.873234Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wz3n46sghkerhkcmts1vf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTlhYTc0MGQtOTMzZDA0MzQtZDMxZjdhM2MtNmMxZmI4Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:21.873420Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTlhYTc0MGQtOTMzZDA0MzQtZDMxZjdhM2MtNmMxZmI4Nw==, ActorId: [2:7489508561983379468:2546], ActorState: ExecuteState, TraceId: 01jr0wz3n46sghkerhkcmts1vf, Create QueryResponse for error on request, msg: 2025-04-04T17:18:22.816240Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wz3nw61ryrkgwfy4nwta5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTlhYTc0MGQtOTMzZDA0MzQtZDMxZjdhM2MtNmMxZmI4Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:22.816493Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTlhYTc0MGQtOTMzZDA0MzQtZDMxZjdhM2MtNmMxZmI4Nw==, ActorId: [2:7489508561983379468:2546], ActorState: ExecuteState, TraceId: 01jr0wz3nw61ryrkgwfy4nwta5, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DuplicateUpsertInterleaveParams+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4298, MsgBus: 12848 2025-04-04T17:18:07.265361Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508514135809840:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:07.270786Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016e1/r3tmp/tmpvTIugn/pdisk_1.dat 2025-04-04T17:18:07.746150Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.760409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:07.760515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:07.763989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4298, node 1 2025-04-04T17:18:07.874761Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:07.874785Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:07.874795Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:07.874895Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12848 TClient is connected to server localhost:12848 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:08.645278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.659348Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:08.668964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.821385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.986483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.067078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:10.552918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508527020713483:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.553102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.842519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.937717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.965440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.994719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.034489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.077577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.124344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508531315681295:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.124417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.124482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508531315681300:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.127379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:11.134148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508531315681302:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:11.202101Z node 1 :TX_PROXY ERROR: Actor# [1:7489508531315681356:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:12.111360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:12.266349Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508514135809840:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:12.266406Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; query_phases { duration_us: 779 cpu_time_us: 779 } query_phases { duration_us: 8544 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 7315 affected_shards: 1 } query_phases { duration_us: 1356 cpu_time_us: 1356 } query_phases { duration_us: 5650 cpu_time_us: 4685 } query_phases { duration_us: 6513 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 31 } partitions_count: 1 } table_access { name: "/Root/TestTable/Index/indexImplTable" updates { rows: 1 bytes: 24 } partitions_count: 1 } cpu_time_us: 2921 affected_shards: 2 } compilation { duration_us: 521494 cpu_time_us: 511332 } process_cpu_time_us: 4869 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":27,\"Plans\":[{\"Tables\":[\"TestTable\"],\"PlanNodeId\":26,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\",\"Name\":\"Upsert\",\"Table\":\"TestTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_1_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1743787092763,\"TaskId\":1,\"Host\":\"ghrun-mup5cka3jq\",\"ComputeTimeUs\":126}],\"CpuTimeUs\":614}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/TestTable\"}],\"BaseTimeMs\":1743787092763,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":614,\"Max\":614,\"Min\":614}},\"CTE Name\":\"precompute_1_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":25,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":24,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\",\"Name\":\"Delete\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_3_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Delete-ConstantExpr\",\"Stats\":{\"StageDurationUs\":0,\"PhysicalStageId\":1,\"BaseTimeMs\":1743787092763,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"CTE Name\":\"precompute_3_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":23,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":22,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\",\"Name\":\"Upsert\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_3_0\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1743787092763,\"TaskId\":2,\"Host\":\"ghrun-mup5cka3jq\",\"ComputeTimeUs\":93}],\"CpuTimeUs\":455}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\"}],\"BaseTimeMs\":1743787092763,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":455,\"Max\":455,\"Min\":455}},\"CTE Name\":\"precompute_3_0\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":20,\"Subplan Name\":\"CTE precompute_3_0\",\"Plans\":[{\"PlanNodeId\":19,\"Plans\":[{\"PlanNodeId\":18,\"Plans\":[{\"PlanNodeId\":17,\"Operators\":[{\"Inputs\":[{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"}],\"Iterator\":\"FlatMap\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"FinishTimeMs\":1743787092752,\"Host\":\"ghrun-mup5cka3jq\",\"OutputRows\":1,\"ComputeTimeUs\":59,\"NodeId\":1,\"OutputCha ... 057594046644480 2025-04-04T17:18:17.337283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.368353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.403158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.435048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.468762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.548453Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508555304879498:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.548560Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.548686Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508555304879503:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.552789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:17.562209Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508555304879505:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:17.624809Z node 2 :TX_PROXY ERROR: Actor# [2:7489508555304879558:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:18.468900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:19.051532Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508542419975349:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:19.051720Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17574, MsgBus: 23361 2025-04-04T17:18:19.958986Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508565316861888:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:19.970069Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016e1/r3tmp/tmpZGnxEA/pdisk_1.dat 2025-04-04T17:18:20.072705Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17574, node 3 2025-04-04T17:18:20.095266Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:20.095578Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:20.097551Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:20.175125Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:20.175149Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:20.175156Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:20.175296Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23361 TClient is connected to server localhost:23361 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:20.609582Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:20.614628Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:20.622130Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:18:20.698787Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:20.852911Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:20.918815Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:23.147418Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508582496732685:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:23.147542Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:23.184684Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.256912Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.287534Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.337509Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.369799Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.440775Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.529768Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508582496733208:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:23.529852Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:23.530086Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508582496733213:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:23.534706Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:23.546536Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508582496733215:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:23.612288Z node 3 :TX_PROXY ERROR: Actor# [3:7489508582496733269:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:24.447453Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:24.954317Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508565316861888:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:24.954366Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:25.639229Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpUniqueIndex::UpsertExplicitNullInComplexFk >> KqpIndexes::UpdateDeletePlan+UseSink >> KqpMultishardIndex::SecondaryIndexSelectNull >> KqpUniqueIndex::InsertFkDuplicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DeleteByIndex [GOOD] Test command err: Trying to start YDB, gRPC: 15771, MsgBus: 9020 2025-04-04T17:18:07.296335Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508512337958034:2095];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:07.297361Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016fc/r3tmp/tmpCAqTah/pdisk_1.dat 2025-04-04T17:18:07.680438Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.714282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:07.714363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:07.716059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15771, node 1 2025-04-04T17:18:07.871342Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:07.871365Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:07.871373Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:07.871472Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9020 TClient is connected to server localhost:9020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:08.667862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.697358Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.713502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:18:08.876296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.050774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.140701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:10.441253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508525222861636:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.441384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.844399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.904250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.928733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.973846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.006301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.076255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.159720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508529517829453:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.159804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.159822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508529517829458:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.162869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:11.170263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508529517829460:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:11.248574Z node 1 :TX_PROXY ERROR: Actor# [1:7489508529517829515:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:12.279189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:12.304556Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508512337958034:2095];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:12.304656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:12.406936Z node 1 :TX_PROXY ERROR: Actor# [1:7489508533812797351:3809] txid# 281474976710672, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:13.202763Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:13.226321Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 2742, MsgBus: 3691 2025-04-04T17:18:14.075060Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508540974314666:2197];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016fc/r3tmp/tmpzUn8OK/pdisk_1.dat 2025-04-04T17:18:14.120527Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:18:14.178999Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:14.189854Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:14.189928Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:14.191696Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2742, node 2 2025-04-04T17:18:14.236171Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:14.236191Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:14.236198Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:14.236312Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3691 TClient is connected to server localhost:3691 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:14.647707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:14.656531Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657 ... 664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.031730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.078748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.109511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.145332Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508553859218676:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.145409Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.145626Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508553859218681:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.148502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:17.157222Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508553859218683:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:17.231603Z node 2 :TX_PROXY ERROR: Actor# [2:7489508553859218736:3436] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:18.015744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.136059Z node 2 :TX_PROXY ERROR: Actor# [2:7489508558154186568:3798] txid# 281474976715672, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:18.976046Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:18.996199Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:19.067543Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508540974314666:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:19.067603Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10823, MsgBus: 11675 2025-04-04T17:18:19.662186Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508565610676784:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:19.662310Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016fc/r3tmp/tmp4GpcEr/pdisk_1.dat 2025-04-04T17:18:19.740270Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10823, node 3 2025-04-04T17:18:19.785391Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:19.785493Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:19.787035Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:19.806792Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:19.806814Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:19.806821Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:19.806952Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11675 TClient is connected to server localhost:11675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:20.290878Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:20.296678Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:20.304569Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:20.365727Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:20.506642Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:18:20.589915Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:22.762524Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508578495580435:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:22.762590Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:22.804420Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:22.876826Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:22.951295Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:22.987376Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.025831Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.060546Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.113535Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508582790548245:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:23.113618Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:23.113967Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508582790548250:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:23.117843Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:23.128333Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508582790548252:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:23.216043Z node 3 :TX_PROXY ERROR: Actor# [3:7489508582790548306:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:24.255267Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:24.662780Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508565610676784:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:24.721534Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex+UseSink [GOOD] >> KqpIndexes::SecondaryIndexUpdateOnUsingIndex [GOOD] >> KqpIndexes::SecondaryIndexSelectUsingScripting >> KqpLimits::CancelAfterRoTx [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup >> KqpPg::InsertValuesFromTableWithDefaultText+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText-useSink >> KqpMultishardIndex::WriteIntoRenamingSyncIndex >> KqpIndexes::MultipleModifications >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover [GOOD] >> KqpIndexes::DuplicateUpsertInterleaveParams-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 20510, MsgBus: 4469 2025-04-04T17:18:14.628083Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508543946041822:2188];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:14.628217Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016c0/r3tmp/tmpFOFzlS/pdisk_1.dat 2025-04-04T17:18:15.147051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:15.147121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:15.150429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:15.150772Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20510, node 1 2025-04-04T17:18:15.266671Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:15.266691Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:15.266700Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:15.266792Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4469 TClient is connected to server localhost:4469 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:15.782502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:15.806663Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:15.816922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:15.956015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.125564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.205466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:17.680814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508556830945353:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.680978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.931600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.966120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.992582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.022021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.051372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.084492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.168193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508561125913163:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.168258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.168385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508561125913168:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.172134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:18.181673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508561125913170:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:18.279463Z node 1 :TX_PROXY ERROR: Actor# [1:7489508561125913225:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:19.034744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:19.622183Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508543946041822:2188];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:19.622259Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:20.328239Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508569715849224:2588], TxId: 281474976710677, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wz1vg3dk7mwd37rmygz3d. SessionId : ydb://session/3?node_id=1&id=NDVjOGM1ZTktZDI0NzkxNGUtNWMyMTcxZGEtZGMwODAzNWY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-04T17:18:20.328547Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508569715849226:2589], TxId: 281474976710677, task: 2. Ctx: { TraceId : 01jr0wz1vg3dk7mwd37rmygz3d. SessionId : ydb://session/3?node_id=1&id=NDVjOGM1ZTktZDI0NzkxNGUtNWMyMTcxZGEtZGMwODAzNWY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489508569715849221:2546], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:18:20.328888Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDVjOGM1ZTktZDI0NzkxNGUtNWMyMTcxZGEtZGMwODAzNWY=, ActorId: [1:7489508565420881543:2546], ActorState: ExecuteState, TraceId: 01jr0wz1vg3dk7mwd37rmygz3d, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 3767, MsgBus: 64918 2025-04-04T17:18:21.135886Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508571684124694:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:21.135938Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016c0/r3tmp/tmpqiwy4l/pdisk_1.dat 2025-04-04T17:18:21.239045Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:21.247778Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:21.247852Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:21.249208Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3767, node 2 2025-04-04T17:18:21.346752Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:21.346772Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:21.346777Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:21.346870Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64918 TClient is connected to server localhost:64918 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:21.760676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:21.767193Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:21.778291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:21.839065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:22.002290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:18:22.069475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:24.184845Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508584569028311:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:24.184957Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:24.232810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:24.264044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:24.336518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:24.435103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:24.466681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:24.502664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:24.572209Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508584569028827:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:24.572298Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:24.572677Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508584569028832:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:24.576628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:24.593683Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508584569028834:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:24.667864Z node 2 :TX_PROXY ERROR: Actor# [2:7489508584569028889:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:25.573391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:26.136199Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508571684124694:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:26.136263Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:26.923523Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508593158964896:2588], TxId: 281474976715677, task: 1. Ctx: { TraceId : 01jr0wz86777y3xwxee5m05knc. SessionId : ydb://session/3?node_id=2&id=NDBiNjVkNTYtODhjOWQ4YTQtMjczY2RiYmMtYmEzNTUxNTQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-04T17:18:26.924270Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508593158964897:2589], TxId: 281474976715677, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wz86777y3xwxee5m05knc. SessionId : ydb://session/3?node_id=2&id=NDBiNjVkNTYtODhjOWQ4YTQtMjczY2RiYmMtYmEzNTUxNTQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7489508593158964893:2546], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:18:26.924568Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDBiNjVkNTYtODhjOWQ4YTQtMjczY2RiYmMtYmEzNTUxNTQ=, ActorId: [2:7489508588863997215:2546], ActorState: ExecuteState, TraceId: 01jr0wz86777y3xwxee5m05knc, Create QueryResponse for error on request, msg: >> KqpIndexes::SelectFromAsyncIndexedTable [GOOD] >> KqpIndexes::SelectFromIndexesAndFreeSpaceLogicDoesntTimeout >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 26339, MsgBus: 22768 2025-04-04T17:18:07.242942Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508510118140923:2261];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:07.244177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ee/r3tmp/tmp1XAUkn/pdisk_1.dat 2025-04-04T17:18:07.712539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:07.712635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:07.716280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:07.748350Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26339, node 1 2025-04-04T17:18:07.870770Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:07.870808Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:07.870818Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:07.870940Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22768 TClient is connected to server localhost:22768 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:08.700278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:08.723053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:18:08.959779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:09.182639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:18:09.288762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:10.828310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508523003044396:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.828459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.867766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.906979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.935604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.966976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.000613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.043582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.138506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508527298012208:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.138607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.140723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508527298012213:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.144458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:11.152453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508527298012215:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:11.234108Z node 1 :TX_PROXY ERROR: Actor# [1:7489508527298012269:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:12.124369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:12.246454Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508510118140923:2261];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:12.246505Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; query_phases { duration_us: 467 cpu_time_us: 467 } query_phases { duration_us: 1320 cpu_time_us: 1320 } query_phases { duration_us: 4897 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 5378 affected_shards: 1 } query_phases { duration_us: 1160 cpu_time_us: 1160 } query_phases { duration_us: 5176 cpu_time_us: 5176 } query_phases { duration_us: 4586 table_access { name: "/Root/TestTable/Index/indexImplTable" } cpu_time_us: 4162 } query_phases { duration_us: 1311 cpu_time_us: 1311 } query_phases { duration_us: 4193 cpu_time_us: 4193 } query_phases { duration_us: 5052 cpu_time_us: 4116 } query_phases { duration_us: 7724 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 31 } partitions_count: 1 } table_access { name: "/Root/TestTable/Index/indexImplTable" updates { rows: 1 bytes: 24 } partitions_count: 1 } cpu_time_us: 2623 affected_shards: 2 } compilation { duration_us: 936845 cpu_time_us: 924654 } process_cpu_time_us: 16817 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":46,\"Plans\":[{\"Tables\":[\"TestTable\"],\"PlanNodeId\":45,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\",\"Name\":\"Upsert\",\"Table\":\"TestTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_7_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1743787093156,\"TaskId\":1,\"Host\":\"ghrun-mup5cka3jq\",\"ComputeTimeUs\":62}],\"CpuTimeUs\":499}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/TestTable\"}],\"BaseTimeMs\":1743787093155,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":499,\"Max\":499,\"Min\":499}},\"CTE Name\":\"precompute_7_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":44,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":43,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\",\"Name\":\"Delete\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_8_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Delete-ConstantExpr\",\"Stats\":{\"StageDurationUs\":0,\"PhysicalStageId\":1,\"BaseTimeMs\":1743787093155,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"CTE Name\":\"precompute_8_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":42,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":41,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\",\"Name\":\"Upsert\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_8_0\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1743787093155,\"TaskId\":2,\"Host\":\"ghrun-mup5cka3jq\",\"ComputeTimeUs\":88}],\"CpuTimeUs\":451}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\"}],\"BaseTimeMs\":1743787093155,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":451,\"Max\":451,\"Min\":451}},\"CTE Name\":\"precompute_8_0\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":39,\"Subplan Name\":\"CTE precompute_8_0\",\"Plans\":[{\"PlanNodeId\":38,\"Plans\":[{\"PlanNodeId\":37,\"Plans\":[{\"PlanNodeId\":36,\"Operators\":[{\"Inputs\":[{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"}],\"Iterator\":\"FlatMap\",\"Name\":\"Iterator\"}] ... \'(\'\"Key\" (Member $147 \'\"Key\")) \'(\'\"Value\" (Member $147 \'\"Value\")) \'(\'\"fk1\" (Member $147 \'\"fk1\")) \'(\'\"fk3\" (Member $147 \'\"fk3\"))))) \'0 $138))\n (let $140 (OrderedFilter $137 (lambda \'($148) (And (Exists (Member $148 \'\"fk1\")) (Exists (Member $148 \'\"fk2\")) (Exists (Member $148 \'\"fk3\"))))))\n (let $141 (lambda \'($150) (Void)))\n (let $142 (ToDict $140 (lambda \'($149) (AsStruct \'(\'\"fk1\" (Member $149 \'\"fk1\")) \'(\'\"fk2\" (Member $149 \'\"fk2\")) \'(\'\"fk3\" (Member $149 \'\"fk3\")))) $141 $35))\n (let $143 (Variant (DictKeys $142) \'1 $138))\n (let $144 (Variant (== (Length $140) (Length $142)) \'2 $138))\n (let $145 (ToDict $137 (lambda \'($151) (AsStruct \'(\'\"Key\" (Member $151 \'\"Key\")))) $141 $35))\n (let $146 (Variant $145 \'\"3\" $138))\n (return (Iterator (AsList $139 $143 $144 $146)))\n))) \'(\'(\'\"_logical_id\" \'4308) \'(\'\"_id\" \'\"92b295fe-d25ed152-5b05ec67-8169841f\"))))\n(let $59 (DqCnValue (TDqOutput $58 \'0)))\n(let $60 (DqCnValue (TDqOutput $58 \'2)))\n(let $61 (DqCnValue (TDqOutput $58 \'\"3\")))\n(let $62 (DqCnValue (TDqOutput $58 \'1)))\n(let $63 \'($59 $60 $61 $62))\n(let $64 (KqpTxResultBinding $54 \'\"3\" \'0))\n(let $65 (KqpPhysicalTx \'($58) $63 \'(\'($53 $64)) $3))\n(let $66 \'\"%kqp%tx_result_binding_4_3\")\n(let $67 (DqPhyStage \'() (lambda \'() (Iterator %kqp%tx_result_binding_4_3)) \'(\'(\'\"_logical_id\" \'4703) \'(\'\"_id\" \'\"bb5325b1-dae30b8-3a534a8e-288d7609\"))))\n(let $68 (KqpTable \'\"/Root/TestTable/Index/indexImplTable\" \'\"72057594046644480:18\" \'\"\" \'1))\n(let $69 (KqpCnStreamLookup (TDqOutput $67 \'0) $68 \'(\'\"Key\") $55 $24))\n(let $70 \'\"%kqp%tx_result_binding_4_2\")\n(let $71 (Bool \'false))\n(let $72 (DqPhyStage \'($69) (lambda \'($152) (Map (Filter (Take $152 (Uint64 \'1)) (lambda \'($153) (Not (Contains %kqp%tx_result_binding_4_2 $153)))) (lambda \'($154) $71))) \'(\'(\'\"_logical_id\" \'4691) \'(\'\"_id\" \'\"82c75450-c94b5380-84fccac0-49e637f8\"))))\n(let $73 (DqCnUnionAll (TDqOutput $72 \'0)))\n(let $74 (Bool \'true))\n(let $75 (DqPhyStage \'($73) (lambda \'($155) (block \'(\n (let $156 (lambda \'($157 $158) $71))\n (return (FromFlow (Condense (ToFlow $155) $74 $156 $156)))\n))) \'(\'(\'\"_logical_id\" \'4671) \'(\'\"_id\" \'\"7249f341-b2ec4226-4a624dd9-c8a5235d\"))))\n(let $76 \'($67 $72 $75))\n(let $77 (DqCnValue (TDqOutput $75 \'0)))\n(let $78 (KqpTxResultBinding $57 \'\"4\" \'2))\n(let $79 (KqpTxResultBinding $55 \'\"4\" \'\"3\"))\n(let $80 (KqpPhysicalTx $76 \'($77) \'(\'($70 $78) \'($66 $79)) $41))\n(let $81 \'\"%kqp%tx_result_binding_4_1\")\n(let $82 \'\"%kqp%tx_result_binding_5_0\")\n(let $83 \'\"%kqp%tx_result_binding_4_0\")\n(let $84 (DqPhyStage \'() (lambda \'() (block \'(\n (let $159 (KqpEnsure $74 %kqp%tx_result_binding_4_1 \'\"2012\" (Utf8 \'\"Duplicated keys found.\")))\n (let $160 (KqpEnsure $74 %kqp%tx_result_binding_5_0 \'\"2012\" (Utf8 \'\"Conflict with existing key.\")))\n (let $161 (If (And $159 $160) %kqp%tx_result_binding_4_0 (List $11)))\n (return (ToStream (Just (PartitionByKey $161 (lambda \'($162) (Member $162 \'\"Key\")) (Void) (Void) (lambda \'($163) (FlatMap $163 (lambda \'($164) (Last (ForwardList (Nth $164 \'1))))))))))\n))) \'(\'(\'\"_logical_id\" \'5033) \'(\'\"_id\" \'\"ffadda1e-bd6f9ea9-3a87e8f-3b06733a\"))))\n(let $85 (DqCnValue (TDqOutput $84 \'0)))\n(let $86 (KqpTxResultBinding $11 \'\"4\" \'0))\n(let $87 (KqpTxResultBinding $56 \'\"4\" \'1))\n(let $88 (KqpTxResultBinding $56 \'\"5\" \'0))\n(let $89 \'(\'($83 $86) \'($81 $87) \'($82 $88)))\n(let $90 (KqpPhysicalTx \'($84) \'($85) $89 $3))\n(let $91 \'\"%kqp%tx_result_binding_6_0\")\n(let $92 %kqp%tx_result_binding_6_0)\n(let $93 (DqPhyStage \'() (lambda \'() (Iterator (AsList (ToDict (FlatMap (Map $92 (lambda \'($165) (AsStruct \'(\'\"Key\" (Member $165 \'\"Key\")) \'(\'\"fk1\" (Member $165 \'\"fk1\")) \'(\'\"fk3\" (Member $165 \'\"fk3\"))))) (lambda \'($166) (block \'(\n (let $167 (AsStruct \'(\'\"Key\" (Member $166 \'\"Key\"))))\n (return (IfPresent (Lookup $46 $167) (lambda \'($168) (Just \'($167 $168 (Or (AggrNotEquals (Member $166 \'\"fk1\") (Member $168 \'\"fk1\")) (AggrNotEquals (Member $166 \'\"fk3\") (Member $168 \'\"fk3\")))))) (Nothing (OptionalType (TupleType $19 $44 $56)))))\n)))) (lambda \'($169) (Nth $169 \'0)) (lambda \'($170) \'((Nth $170 \'1) (Nth $170 \'2))) $35)))) \'(\'(\'\"_logical_id\" \'5184) \'(\'\"_id\" \'\"91456b7e-a0138f0b-786b1bc7-2d8f974\"))))\n(let $94 (DqCnValue (TDqOutput $93 \'0)))\n(let $95 (KqpTxResultBinding $11 \'\"6\" \'0))\n(let $96 \'($91 $95))\n(let $97 (KqpPhysicalTx \'($93) \'($94) \'($51 $96) $3))\n(let $98 (DataSink \'\"KqpTableSink\" \'\"db\"))\n(let $99 (KqpTableSinkSettings $22 \'false \'\"upsert\" \'0 \'\"oltp\" \'false \'false \'()))\n(let $100 (DqPhyStage \'() (lambda \'() (Iterator $92)) \'(\'(\'\"_logical_id\" \'5732) \'(\'\"_id\" \'\"48356d88-314da637-d254324a-8c2adaf5\")) \'((DqSink \'0 $98 $99))))\n(let $101 \'\"%kqp%tx_result_binding_7_0\")\n(let $102 (DictType $19 (TupleType $44 $56)))\n(let $103 %kqp%tx_result_binding_7_0)\n(let $104 \'(\'(\'\"_logical_id\" \'5760) \'(\'\"_id\" \'\"f57b54e5-6b19ab89-5b1bcd6b-973e0651\") $30))\n(let $105 (DqPhyStage \'() (lambda \'() (block \'(\n (let $171 (lambda \'($173) (block \'(\n (let $174 (Nth $173 \'1))\n (let $175 (Nth $174 \'0))\n (return (Member (Nth $173 \'0) \'\"Key\") (Member $175 \'\"fk1\") (Member $175 \'\"fk2\") (Member $175 \'\"fk3\") (Nth $174 \'1))\n ))))\n (let $172 (lambda \'($181 $182 $183 $184 $185) $181 $182 $183 $184))\n (return (FromFlow (WideMap (WideFilter (ExpandMap (ToFlow (DictItems $103)) $171) (lambda \'($176 $177 $178 $179 $180) $180)) $172)))\n))) $104))\n(let $106 (DqCnUnionAll (TDqOutput $105 \'0)))\n(let $107 (lambda \'($186) (FromFlow (NarrowMap (ToFlow $186) $34))))\n(let $108 (KqpTableSinkSettings $68 \'false \'\"delete\" \'1 \'\"oltp\" \'false \'false \'()))\n(let $109 (DqPhyStage \'($106) $107 \'(\'(\'\"_logical_id\" \'5746) \'(\'\"_id\" \'\"e41c382d-2515dd29-4aa27781-50397c1\")) \'((DqSink \'0 $98 $108))))\n(let $110 \'(\'(\'\"_logical_id\" \'5812) \'(\'\"_id\" \'\"248f479b-8ae4817f-7f92d8df-b7bbefff\") $30))\n(let $111 (DqPhyStage \'() (lambda \'() (FromFlow (ExpandMap (FlatMap (Map (ToFlow $92) (lambda \'($187) (AsStruct \'(\'\"Key\" (Member $187 \'\"Key\")) \'(\'\"fk1\" (Member $187 \'\"fk1\")) \'(\'\"fk3\" (Member $187 \'\"fk3\"))))) (lambda \'($188) (block \'(\n (let $189 \'(\'\"Key\" (Member $188 \'\"Key\")))\n (let $190 \'(\'\"fk1\" (Member $188 \'\"fk1\")))\n (let $191 \'(\'\"fk3\" (Member $188 \'\"fk3\")))\n (return (IfPresent (Lookup $103 (AsStruct $189)) (lambda \'($192) (If (Nth $192 \'1) (Just (AsStruct $189 $190 \'(\'\"fk2\" (Member (Nth $192 \'0) \'\"fk2\")) $191)) (Nothing (OptionalType $29)))) (Just (AsStruct $189 $190 $47 $191))))\n)))) $26))) $110))\n(let $112 (DqCnUnionAll (TDqOutput $111 \'0)))\n(let $113 (KqpTableSinkSettings $68 \'false \'\"\" \'2 \'\"oltp\" \'false \'false \'()))\n(let $114 (DqPhyStage \'($112) $107 \'(\'(\'\"_logical_id\" \'5774) \'(\'\"_id\" \'\"ef8dd7b1-2117294f-567e2032-270afccb\")) \'((DqSink \'0 $98 $113))))\n(let $115 \'($100 $105 $109 $111 $114))\n(let $116 (KqpTxResultBinding $102 \'\"7\" \'0))\n(let $117 (KqpPhysicalTx $115 \'() \'($96 \'($101 $116)) \'($40 \'(\'\"with_effects\"))))\n(let $118 \'($4 $17 $42 $52 $65 $80 $90 $97 $117))\n(return (KqpPhysicalQuery $118 \'() \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 930400 total_cpu_time_us: 908662 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/TestTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":16},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Value\\\",\\\"Id\\\":5,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk1\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk2\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk3\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key\\\"],\\\"Indexes\\\":[{\\\"Name\\\":\\\"Index\\\",\\\"Type\\\":2,\\\"State\\\":1,\\\"SchemaVersion\\\":1,\\\"LocalPathId\\\":17,\\\"PathOwnerId\\\":8716544,\\\"KeyColumns\\\":[\\\"fk1\\\",\\\"fk2\\\",\\\"fk3\\\"]}],\\\"SecondaryGlobalIndexMetadata\\\":[{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/TestTable/Index/indexImplTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":18},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk1\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk2\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk3\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"fk1\\\",\\\"fk2\\\",\\\"fk3\\\",\\\"Key\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1743787106\",\"query_type\":\"QUERY_TYPE_SQL_DML\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"2a168eac-9bae8afa-18cb4e78-2ce3e0b9\",\"version\":\"1.0\"}" 2025-04-04T17:18:27.805227Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> TPersQueueTest::TestReadPartitionByGroupId [GOOD] >> TPersQueueTest::TestReadPartitionStatus >> KqpMultishardIndex::DataColumnUpsertMixedSemantic >> KqpIndexes::IndexFilterPushDown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DuplicateUpsertInterleaveParams-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7644, MsgBus: 26126 2025-04-04T17:18:12.294903Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508534461081763:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:12.294978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016c8/r3tmp/tmpdHvHfB/pdisk_1.dat 2025-04-04T17:18:12.678624Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:12.696026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:12.696099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:12.698098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7644, node 1 2025-04-04T17:18:12.782734Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:12.782750Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:12.782756Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:12.782854Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26126 TClient is connected to server localhost:26126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:13.297687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:13.332567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:13.451016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:13.591388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:13.651959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.121711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508547345985441:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:15.121806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:15.318805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.361869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.396722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.430287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.505295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.542869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.597351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508547345985956:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:15.597446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:15.599780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508547345985961:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:15.603116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:15.616674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508547345985963:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:15.686064Z node 1 :TX_PROXY ERROR: Actor# [1:7489508547345986015:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:16.531216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29440, MsgBus: 6580 2025-04-04T17:18:17.720999Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508554086763369:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:17.721059Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016c8/r3tmp/tmprmcA3k/pdisk_1.dat 2025-04-04T17:18:17.814814Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29440, node 2 2025-04-04T17:18:17.854444Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:17.854527Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:17.855475Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:17.862776Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:17.862799Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:17.862806Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:17.862905Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6580 TClient is connected to server localhost:6580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:18:18.248064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.262737Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:18.278334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:18.330276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:18.489737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:18.556131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:20.740248Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherAct ... ut propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:20.919121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:20.953742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.001806Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508571266634824:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:21.001871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508571266634829:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:21.001897Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:21.004505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:21.012137Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508571266634831:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:21.072078Z node 2 :TX_PROXY ERROR: Actor# [2:7489508571266634884:3438] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:21.964916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:22.003335Z node 2 :TX_PROXY ERROR: Actor# [2:7489508571266635207:3689] txid# 281474976715672, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path is not a directory (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:22.016599Z node 2 :TX_PROXY ERROR: Actor# [2:7489508575561602517:3695] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path is not a directory (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:22.031848Z node 2 :TX_PROXY ERROR: Actor# [2:7489508575561602524:3700] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path is not a directory (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9677, MsgBus: 30808 2025-04-04T17:18:22.870323Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508574867526850:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016c8/r3tmp/tmplo4aro/pdisk_1.dat 2025-04-04T17:18:22.915570Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:18:22.985361Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9677, node 3 2025-04-04T17:18:23.003899Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:23.004007Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:23.031189Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:23.098626Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:23.098647Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:23.098654Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:23.098758Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30808 TClient is connected to server localhost:30808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:23.539294Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:23.548849Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:23.561523Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:23.622323Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:23.761589Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:23.854188Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:18:25.995647Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508587752430353:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:25.995745Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:26.026264Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:26.059124Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:26.090303Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:26.158017Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:26.186792Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:26.256047Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:26.307303Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508592047398165:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:26.307357Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508592047398170:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:26.307374Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:26.310173Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:26.317267Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508592047398172:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:26.398294Z node 3 :TX_PROXY ERROR: Actor# [3:7489508592047398226:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:27.441663Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.847481Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508574867526850:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:27.847556Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:28.723384Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover [GOOD] Test command err: Trying to start YDB, gRPC: 16853, MsgBus: 13695 2025-04-04T17:18:07.258596Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508510706892782:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:07.258635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001716/r3tmp/tmpb6sCJ9/pdisk_1.dat 2025-04-04T17:18:07.695249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:07.695375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:07.697567Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.699133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16853, node 1 2025-04-04T17:18:07.874743Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:07.874768Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:07.874796Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:07.875020Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13695 TClient is connected to server localhost:13695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:08.594002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.636368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.847086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.007019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.070530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:10.408416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508523591796427:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.408531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.841917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.905752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.934028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.974169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.000957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.034501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.118047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508527886764238:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.118113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.118344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508527886764243:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.121907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:11.134431Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T17:18:11.135281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508527886764245:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:11.195016Z node 1 :TX_PROXY ERROR: Actor# [1:7489508527886764298:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:12.115348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:12.259854Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508510706892782:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:12.259935Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:13.059960Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:13.081110Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:13.838355Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:14.550547Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wyvv3dd3eqr4adjn7r3f5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE0MGUwZDktNGM0ZDg3NjYtMTMyOGMyYi1kOWRiYjYzZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:14.569696Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTE0MGUwZDktNGM0ZDg3NjYtMTMyOGMyYi1kOWRiYjYzZA==, ActorId: [1:7489508532181731850:2488], ActorState: ExecuteState, TraceId: 01jr0wyvv3dd3eqr4adjn7r3f5, Create QueryResponse for error on request, msg: 2025-04-04T17:18:15.402357Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:15.450455Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:16.377560Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wyxd1cy7skdrek5ttzg84, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE0MGUwZDktNGM0ZDg3NjYtMTMyOGMyYi1kOWRiYjYzZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:16.378162Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTE0MGUwZDktNGM0ZDg3NjYtMTMyOGMyYi1kOWRiYjYzZA==, ActorId: [1:7489508532181731850:2488], ActorState: ExecuteState, TraceId: 01jr0wyxd1cy7skdrek5ttzg84, Create QueryResponse for error on request, msg: 2025-04-04T17:18:16.410109Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:16.430306Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:16.784644Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508549361601856:2637], TxId: 281474976710706, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTE0MGUwZDktNGM0ZDg3NjYtMTMyOGMyYi1kOWRiYjYzZA==. TraceId : 01jr0wyybt54ykanvx9ff2jj8j. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-04T17:18:16.784960Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508549361601857:2638], TxId: 281474976710706, task: 2. Ctx: { TraceId : 01jr0wyybt54ykanvx9ff2jj8j. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTE0MGUwZDktNGM0ZDg3NjYtMTMyOGMyYi1kOWRiYjYzZA==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489508549361601853:2488], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:18:16.785343Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTE0MGUwZDktNGM0ZDg3NjYtMTMyOGMyYi1kOWRiYjYzZA==, ActorId: [1:7489508532181731850:2488], ActorState: ExecuteState, TraceId: 01jr0wyybt54ykanvx9ff2jj8j, Create QueryResponse for error on request, msg: 2025-04-04T17:18:17.696542Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wyypx94chmmqbfxyssj45, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE0MGUwZDktNGM0ZDg3NjYtMTMyOGMyYi1kOWRiYjYzZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLitera ... 0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wyzk97n4hxx8a5rnexvgm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE0MGUwZDktNGM0ZDg3NjYtMTMyOGMyYi1kOWRiYjYzZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:18.812391Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTE0MGUwZDktNGM0ZDg3NjYtMTMyOGMyYi1kOWRiYjYzZA==, ActorId: [1:7489508532181731850:2488], ActorState: ExecuteState, TraceId: 01jr0wyzk97n4hxx8a5rnexvgm, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 14298, MsgBus: 18672 2025-04-04T17:18:19.524359Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508564350098523:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:19.524406Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001716/r3tmp/tmpKobaK9/pdisk_1.dat 2025-04-04T17:18:19.634673Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14298, node 2 2025-04-04T17:18:19.657354Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:19.657442Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:19.659861Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:19.684732Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:19.684749Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:19.684756Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:19.684878Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18672 TClient is connected to server localhost:18672 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:20.156719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:20.176068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:20.237374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:20.423942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:20.500882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:23.014360Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508581529969473:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:23.014497Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:23.052194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.093953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.123327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.190859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.225633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.262491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.332625Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508581529969988:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:23.332702Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:23.332769Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508581529969993:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:23.336162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:23.345712Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508581529969995:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:23.444417Z node 2 :TX_PROXY ERROR: Actor# [2:7489508581529970051:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:24.353763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:24.526582Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508564350098523:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:24.526644Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:26.253838Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wz736ay48f6mwtexs72tq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTQ5YzhkNi05YjU2NDBlMC1jMTMxMjM3Ni1mN2Q4N2EzZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:26.254088Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTQ5YzhkNi05YjU2NDBlMC1jMTMxMjM3Ni1mN2Q4N2EzZA==, ActorId: [2:7489508585824937602:2488], ActorState: ExecuteState, TraceId: 01jr0wz736ay48f6mwtexs72tq, Create QueryResponse for error on request, msg: 2025-04-04T17:18:26.669440Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508594414872788:2573], TxId: 281474976715679, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wz7yrf16fvqrdypn92cym. SessionId : ydb://session/3?node_id=2&id=ZTQ5YzhkNi05YjU2NDBlMC1jMTMxMjM3Ni1mN2Q4N2EzZA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-04T17:18:26.669659Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508594414872789:2574], TxId: 281474976715679, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wz7yrf16fvqrdypn92cym. SessionId : ydb://session/3?node_id=2&id=ZTQ5YzhkNi05YjU2NDBlMC1jMTMxMjM3Ni1mN2Q4N2EzZA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7489508594414872785:2488], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:18:26.669885Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTQ5YzhkNi05YjU2NDBlMC1jMTMxMjM3Ni1mN2Q4N2EzZA==, ActorId: [2:7489508585824937602:2488], ActorState: ExecuteState, TraceId: 01jr0wz7yrf16fvqrdypn92cym, Create QueryResponse for error on request, msg: 2025-04-04T17:18:27.535028Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wz8br79tsqwf0za8xwxje, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTQ5YzhkNi05YjU2NDBlMC1jMTMxMjM3Ni1mN2Q4N2EzZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:27.535203Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTQ5YzhkNi05YjU2NDBlMC1jMTMxMjM3Ni1mN2Q4N2EzZA==, ActorId: [2:7489508585824937602:2488], ActorState: ExecuteState, TraceId: 01jr0wz8br79tsqwf0za8xwxje, Create QueryResponse for error on request, msg: 2025-04-04T17:18:27.572398Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:27.587332Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:27.654549Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:28.634672Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:28.652007Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:28.685116Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpIndexes::SecondaryIndexUsingInJoin-UseStreamJoin [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin2+UseStreamJoin [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1527, MsgBus: 28670 2025-04-04T17:18:07.246624Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508510782506337:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:07.255633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001710/r3tmp/tmpqASrGt/pdisk_1.dat 2025-04-04T17:18:07.694728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:07.694824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:07.694983Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1527, node 1 2025-04-04T17:18:07.724425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:07.877555Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:07.877576Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:07.877589Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:07.877693Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28670 TClient is connected to server localhost:28670 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:08.651799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.671024Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:08.676397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.864506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.064722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.158621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:10.533517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508523667409950:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.533712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.842317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.873827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.902089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.932985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.965671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.034420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.121158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508527962377762:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.121253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.121735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508527962377767:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.125495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:11.139005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508527962377769:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:11.227864Z node 1 :TX_PROXY ERROR: Actor# [1:7489508527962377824:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:12.113674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:12.248213Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508510782506337:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:12.248278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:12.805972Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:12.817532Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:13.870732Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:13.883277Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:14.490866Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:14.565960Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:15.216717Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:15.254729Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:15.567871Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:15.594982Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:16.244062Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:16.627565Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:16.648477Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:16.846372Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 3859, MsgBus: 29099 2025-04-04T17:18:17.856737Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508554299687248:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:17.856803Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001710/r3tmp/tmpOSSFkR/pdisk_1.dat 2025-04-04T17:18:18.015082Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:18.027948Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:18.028033Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:18.029015Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3859, node 2 2025-04-04T17:18:18.078097Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:18.078119Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:18.078135Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:18.078313Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29099 TClient is connected to server localhost:29099 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:18.577609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:18.583699Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:18.592928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:18.679463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:18.845511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:18.925025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:21.289973Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508571479558210:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:21.290095Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:21.323998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.355923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.390543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.419277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.446719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.479044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.548268Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508571479558717:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:21.548348Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:21.548415Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508571479558722:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:21.552718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:21.568068Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508571479558724:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:21.647676Z node 2 :TX_PROXY ERROR: Actor# [2:7489508571479558780:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:22.664717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:22.858367Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508554299687248:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:22.858428Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:23.558960Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:23.591085Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:24.890546Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:24.963443Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:25.729642Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:25.758928Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:26.518967Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:26.937252Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:27.826990Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpIndexes::NullInIndexTableNoDataRead >> KqpUniqueIndex::UpdateFkPkOverlap [GOOD] >> KqpMultishardIndex::YqWorksFineAfterAlterIndexTableDirectly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::IndexFilterPushDown [GOOD] Test command err: Trying to start YDB, gRPC: 18718, MsgBus: 7291 2025-04-04T17:18:09.878972Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508520652469102:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:09.879396Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016d4/r3tmp/tmp9vX8N6/pdisk_1.dat 2025-04-04T17:18:10.186123Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18718, node 1 2025-04-04T17:18:10.258095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:10.264219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:10.281159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:10.307006Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:10.307028Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:10.307036Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:10.307186Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7291 TClient is connected to server localhost:7291 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:10.807834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:10.818962Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:10.833178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:10.993288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:11.147379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:11.196007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:12.783568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508533537372620:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:12.783695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:12.980942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.018295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.070061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.102269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.128423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.202798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.269876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508537832340434:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:13.269948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:13.270168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508537832340439:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:13.274583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:13.289609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508537832340441:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:13.364426Z node 1 :TX_PROXY ERROR: Actor# [1:7489508537832340494:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:14.209512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:14.851445Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508542127308335:2520], status: GENERIC_ERROR, issues:
:3:46: Error: Unexpected token 'VIEW' : cannot match to any predicted input... 2025-04-04T17:18:14.852607Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjIwNGQ2M2YtNDhkNDIyZWMtMjU0NDFhZDMtNDM4NTFmMTM=, ActorId: [1:7489508542127308044:2488], ActorState: ExecuteState, TraceId: 01jr0wywsy6g45y5dmcrkwzraw, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T17:18:14.865257Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508542127308339:2522], status: GENERIC_ERROR, issues:
:3:46: Error: Unexpected token 'VIEW' : cannot match to any predicted input... 2025-04-04T17:18:14.866816Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjIwNGQ2M2YtNDhkNDIyZWMtMjU0NDFhZDMtNDM4NTFmMTM=, ActorId: [1:7489508542127308044:2488], ActorState: ExecuteState, TraceId: 01jr0wywtcb40a182qhnx2nms3, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T17:18:14.870638Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508520652469102:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:14.870700Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:14.879441Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508542127308343:2524], status: GENERIC_ERROR, issues:
:3:41: Error: Unexpected token 'VIEW' : cannot match to any predicted input... 2025-04-04T17:18:14.880516Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjIwNGQ2M2YtNDhkNDIyZWMtMjU0NDFhZDMtNDM4NTFmMTM=, ActorId: [1:7489508542127308044:2488], ActorState: ExecuteState, TraceId: 01jr0wywttayf21ky9x4xcchmx, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T17:18:14.894549Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508542127308349:2526], status: GENERIC_ERROR, issues:
:3:46: Error: Unexpected token 'VIEW' : cannot match to any predicted input... 2025-04-04T17:18:14.895750Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjIwNGQ2M2YtNDhkNDIyZWMtMjU0NDFhZDMtNDM4NTFmMTM=, ActorId: [1:7489508542127308044:2488], ActorState: ExecuteState, TraceId: 01jr0wywv8dva0t2me3dr6d6pr, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 9352, MsgBus: 64491 2025-04-04T17:18:15.750986Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508546542079162:2214];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016d4/r3tmp/tmp3bS9iL/pdisk_1.dat 2025-04-04T17:18:15.819692Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:18:15.881593Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:15.906587Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:15.906668Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:15.908116Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9352, node 2 2025-04-04T17:18:15.966376Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:15.966398Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:15.966405Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 20 ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.857444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.904477Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508559426983146:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.904557Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.904884Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508559426983151:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.907740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:18.916034Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508559426983153:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:19.004427Z node 2 :TX_PROXY ERROR: Actor# [2:7489508563721950503:3435] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:19.899505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:19.944517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:18:20.040062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:18:20.723866Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508546542079162:2214];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:20.723935Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10099, MsgBus: 27771 2025-04-04T17:18:22.849075Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508577282518840:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:22.860763Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016d4/r3tmp/tmpGC83J2/pdisk_1.dat 2025-04-04T17:18:23.044688Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:23.059243Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:23.059545Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:23.061422Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10099, node 3 2025-04-04T17:18:23.114820Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:23.114846Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:23.114855Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:23.114986Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27771 TClient is connected to server localhost:27771 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:23.581470Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:23.587410Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:23.620376Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:23.713078Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:23.879557Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:23.952339Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:26.448304Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508594462389802:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:26.448422Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:26.479011Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:26.510553Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:26.581629Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:26.612412Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:26.645278Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:26.717821Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:26.801550Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508594462390325:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:26.801645Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:26.801895Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508594462390330:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:26.805572Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:26.821391Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508594462390332:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:26.907166Z node 3 :TX_PROXY ERROR: Actor# [3:7489508594462390389:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:27.816659Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.840703Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508577282518840:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:27.840749Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:27.895755Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.954492Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexUsingInJoin-UseStreamJoin [GOOD] Test command err: Trying to start YDB, gRPC: 12953, MsgBus: 30558 2025-04-04T17:18:15.560953Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508547825657078:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:15.561028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016bc/r3tmp/tmpMMksuQ/pdisk_1.dat 2025-04-04T17:18:16.047527Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:16.049311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:16.049414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:16.051975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12953, node 1 2025-04-04T17:18:16.136883Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:16.136901Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:16.136912Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:16.137037Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30558 TClient is connected to server localhost:30558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:16.640382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.652374Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:16.668030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.813239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.956878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:17.042102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:18.634060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508560710560755:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.634166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.878414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.949544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.978045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:19.010057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:19.085126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:19.135155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:19.185940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508565005528570:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:19.186024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:19.186098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508565005528575:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:19.189236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:19.201663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508565005528577:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:19.262937Z node 1 :TX_PROXY ERROR: Actor# [1:7489508565005528629:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:20.051117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:20.104371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:18:20.561002Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508547825657078:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:20.561055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12199, MsgBus: 5295 2025-04-04T17:18:22.636550Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508578249685446:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:22.636581Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016bc/r3tmp/tmpdm6pkC/pdisk_1.dat 2025-04-04T17:18:22.817147Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:22.821518Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:22.821605Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:22.823060Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12199, node 2 2025-04-04T17:18:22.942896Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:22.942923Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:22.942931Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:22.943064Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5295 TClient is connected to server localhost:5295 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:23.377181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:23.382895Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:23.392690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:23.474805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:23.644323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:23.712018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:18:25.889962Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508591134589121:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:25.890078Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:25.953817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:25.980522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:26.006509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:26.036604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:26.070528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:26.141377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:26.199809Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508595429556932:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:26.199855Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508595429556938:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:26.199870Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:26.203378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:26.213292Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508595429556940:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:26.299348Z node 2 :TX_PROXY ERROR: Actor# [2:7489508595429556992:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:27.183766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.250310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.637335Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508578249685446:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:27.637394Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateFkPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 29680, MsgBus: 13117 2025-04-04T17:18:12.370785Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508531908579384:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:12.377162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016c6/r3tmp/tmpL1RAWv/pdisk_1.dat 2025-04-04T17:18:12.758098Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:12.762094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:12.762190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:12.765284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29680, node 1 2025-04-04T17:18:12.834575Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:12.834594Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:12.834602Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:12.834729Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13117 TClient is connected to server localhost:13117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:13.392337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:13.406124Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:13.412555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:13.548502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:13.702066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:13.766890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.511525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508544793482901:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:15.511639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:15.754593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.787103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.819585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.894615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.929378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:16.007165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:16.104407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508549088450721:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:16.104491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:16.104903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508549088450726:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:16.108679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:16.119150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508549088450728:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:16.215128Z node 1 :TX_PROXY ERROR: Actor# [1:7489508549088450783:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:17.085005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:17.372347Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508531908579384:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:17.372405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21315, MsgBus: 22847 2025-04-04T17:18:20.531774Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508567820513111:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:20.532053Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016c6/r3tmp/tmpItWvYa/pdisk_1.dat 2025-04-04T17:18:20.659063Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21315, node 2 2025-04-04T17:18:20.687906Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:20.687994Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:20.703712Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:20.725389Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:20.725412Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:20.725422Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:20.725530Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22847 TClient is connected to server localhost:22847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:21.144408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:21.148309Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:21.161057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:21.209693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:21.346727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:21.423724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:23.631245Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508580705416761:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:23.631331Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:23.672550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.703187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.730622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.764657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.802919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.852485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.937646Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508580705417277:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:23.937771Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:23.937977Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508580705417282:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:23.942041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:23.950823Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508580705417284:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:24.004603Z node 2 :TX_PROXY ERROR: Actor# [2:7489508585000384632:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:24.875247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:25.538349Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508567820513111:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:25.538420Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexes::UniqAndNoUniqSecondaryIndex >> KqpIndexes::SecondaryIndexOrderBy >> KqpIndexes::SecondaryIndexReplace+UseSink >> KqpIndexes::CheckUpsertNonEquatableType+NotNull [GOOD] >> KqpIndexes::CheckUpsertNonEquatableType-NotNull >> KqpIndexes::InnerJoinSecondaryIndexLookupAndRightTablePredicateNonIndexColumn [GOOD] >> KqpIndexes::IndexTopSortPushDown >> TTxAllocatorClientTest::ZeroRange [GOOD] >> KqpMultishardIndex::SortedRangeReadDesc [GOOD] >> KqpMultishardIndex::SortByPk >> KqpMultishardIndex::DataColumnWriteNull >> KqpIndexes::MultipleSecondaryIndexWithSameComulns+UseSink [GOOD] >> KqpLimits::OutOfSpaceBulkUpsertFail [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail+useSink >> KqpIndexMetadata::HandleNotReadyIndex >> KqpIndexes::UpdateDeletePlan+UseSink [GOOD] >> KqpIndexes::UpdateDeletePlan-UseSink >> KqpPg::InsertValuesFromTableWithDefaultText-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::ZeroRange [GOOD] Test command err: 2025-04-04T17:17:27.986530Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-04-04T17:17:27.986984Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-04-04T17:17:27.987694Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-04-04T17:17:27.989242Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:17:27.989672Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-04-04T17:17:27.999421Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:17:27.999566Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:17:27.999617Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:17:27.999676Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-04-04T17:17:27.999858Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:17:27.999963Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-04-04T17:17:28.000080Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-04-04T17:17:28.000804Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:69:2104] requested range size#5000 2025-04-04T17:17:28.001258Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:17:28.001323Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:17:28.001399Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-04-04T17:17:28.001434Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:69:2104] TEvAllocateResult from# 0 to# 5000 >> KqpUniqueIndex::UpdateOnFkAlreadyExist [GOOD] >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 >> KqpIndexes::SecondaryIndexSelectUsingScripting [GOOD] |91.7%| [TA] $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexes::VectorIndexOrderByCosineSimilarityNotNullableLevel1 [GOOD] >> KqpIndexes::VectorIndexOrderByCosineSimilarityNotNullableLevel2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::MultipleSecondaryIndexWithSameComulns+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6903, MsgBus: 2849 2025-04-04T17:18:07.241426Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508513693593065:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:07.241464Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001703/r3tmp/tmpy3cF57/pdisk_1.dat 2025-04-04T17:18:07.658666Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.691860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:07.696226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:07.707201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6903, node 1 2025-04-04T17:18:07.870545Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:07.870582Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:07.870595Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:07.870719Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2849 TClient is connected to server localhost:2849 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:08.650193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.664264Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:08.681852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.853746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:09.011965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.086634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.447619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508526578496710:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.447742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.841885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.876640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.931049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.966513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.043207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.073136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.126307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508530873464518:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.126361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.126526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508530873464523:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.129329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:11.137169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508530873464525:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:11.211886Z node 1 :TX_PROXY ERROR: Actor# [1:7489508530873464579:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:12.120938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:12.242031Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508513693593065:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:12.242118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:13.573401Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 5042, MsgBus: 24495 2025-04-04T17:18:14.306492Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508543688081190:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:14.306552Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001703/r3tmp/tmpQlW6zD/pdisk_1.dat 2025-04-04T17:18:14.460543Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:14.479966Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:14.480047Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:14.481662Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5042, node 2 2025-04-04T17:18:14.530763Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:14.530788Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:14.530796Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:14.530899Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24495 TClient is connected to server localhost:24495 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:14.905025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:14.911291Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:14.914844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:14.979152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeO ... oadService] [TPoolFetcherActor] ActorId: [2:7489508556572985365:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.653042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:17.662852Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508556572985367:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:17.757277Z node 2 :TX_PROXY ERROR: Actor# [2:7489508556572985421:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:18.659428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:19.306713Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508543688081190:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:19.306772Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:20.057667Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:20.144311Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 9626, MsgBus: 26558 2025-04-04T17:18:21.015623Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508570185580986:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:21.015671Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001703/r3tmp/tmp4mYCUO/pdisk_1.dat 2025-04-04T17:18:21.132769Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:21.146497Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:21.146585Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:21.147919Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9626, node 3 2025-04-04T17:18:21.182960Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:21.182986Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:21.182994Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:21.183114Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26558 TClient is connected to server localhost:26558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:21.635027Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:21.644582Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:21.665892Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:21.746596Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:21.882775Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:21.958597Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:24.220254Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508583070484635:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:24.220355Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:24.253622Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:24.323150Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:24.372524Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:24.411582Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:24.450959Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:24.494101Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:24.554861Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508583070485153:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:24.554929Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:24.555102Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508583070485158:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:24.559202Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:24.572060Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508583070485160:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:24.628365Z node 3 :TX_PROXY ERROR: Actor# [3:7489508583070485213:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:25.481175Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:26.018434Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508570185580986:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:26.018507Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:27.667850Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:27.687508Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:29.144909Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:29.172290Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:30.014840Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:30.754074Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:30.785678Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:31.293900Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:31.313678Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:31.353607Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:31.816720Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:31.834804Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:32.115609Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:32.138159Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:32.627022Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:32.655598Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:32.672987Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpIndexes::SecondaryIndexUpsert1DeleteUpdate >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] >> KqpIndexes::MultipleModifications [GOOD] >> KqpIndexes::JoinWithNonPKColumnsInPredicate+UseStreamJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexSelectUsingScripting [GOOD] Test command err: Trying to start YDB, gRPC: 6310, MsgBus: 17165 2025-04-04T17:18:22.502670Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508574519017275:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:22.503247Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016bb/r3tmp/tmpxqgqQQ/pdisk_1.dat 2025-04-04T17:18:22.937263Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:22.941593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:22.941714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:22.944357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6310, node 1 2025-04-04T17:18:23.102817Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:23.102837Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:23.102850Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:23.102973Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17165 TClient is connected to server localhost:17165 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:23.639545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:23.677656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.837001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:23.971953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:24.051116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:25.562839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508587403920968:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:25.562958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:25.789049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:25.816830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:25.843491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:25.877087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:25.908565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:25.942367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:26.033283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508591698888777:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:26.033371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:26.033607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508591698888782:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:26.037616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:26.046890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508591698888784:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:26.145440Z node 1 :TX_PROXY ERROR: Actor# [1:7489508591698888839:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:27.085460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.501989Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508574519017275:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:27.502032Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:27.671273Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:28.268344Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 22696, MsgBus: 9950 2025-04-04T17:18:29.085528Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508606567364898:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:29.085564Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016bb/r3tmp/tmpB48AF5/pdisk_1.dat 2025-04-04T17:18:29.179052Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22696, node 2 2025-04-04T17:18:29.228644Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:29.228732Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:29.237479Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:29.258497Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:29.258517Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:29.258525Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:29.258631Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9950 TClient is connected to server localhost:9950 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:29.714578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:29.723241Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:29.733004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:29.814546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:29.951371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:30.019016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:32.094367Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508619452268553:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:32.094447Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:32.129854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:32.166369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:32.194704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:32.227509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:32.260292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:32.306815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:32.395778Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508619452269070:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:32.395916Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:32.395969Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508619452269075:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:32.399528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:32.408603Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508619452269077:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:32.472985Z node 2 :TX_PROXY ERROR: Actor# [2:7489508619452269131:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:33.461352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpUniqueIndex::UpsertExplicitNullInComplexFk [GOOD] >> KqpUniqueIndex::UpsertImplicitNullInComplexFk >> TTxDataShardMiniKQL::CrossShard_5_AllToAll [GOOD] >> TTxDataShardMiniKQL::CrossShard_6_Local >> KqpMultishardIndex::SecondaryIndexSelectNull [GOOD] >> KqpMultishardIndex::SecondaryIndexSelect >> KqpUniqueIndex::UpdateOnNullInComplexFk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] Test command err: 2025-04-04T17:18:06.991706Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:18:07.105333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:18:07.105383Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.113407Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:18:07.113693Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:18:07.113940Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:18:07.120995Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:18:07.149956Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:18:07.150036Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:18:07.152164Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:18:07.152271Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:18:07.152322Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:18:07.154440Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:18:07.154761Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:18:07.154829Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:18:07.226047Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:18:07.253825Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:18:07.254817Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:18:07.254927Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:18:07.254959Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:18:07.254987Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:18:07.255018Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:07.255168Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.255207Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.256528Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:18:07.256781Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:18:07.256909Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:18:07.256944Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:18:07.257060Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:18:07.257102Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:18:07.257129Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:18:07.257171Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:18:07.257212Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:07.257322Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.257356Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.257396Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:18:07.264776Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:18:07.264843Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:18:07.264916Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:18:07.265069Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:18:07.265130Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:18:07.265183Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:18:07.265228Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:07.265263Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:18:07.265296Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:18:07.265324Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:18:07.265615Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:18:07.265650Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:18:07.265679Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:18:07.265710Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:18:07.265748Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:18:07.265780Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:18:07.265807Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:18:07.265845Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:18:07.265875Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:18:07.282986Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:18:07.283060Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:18:07.283098Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:18:07.283129Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:18:07.294346Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:18:07.294991Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.295046Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.295083Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:18:07.295244Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:18:07.295281Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:18:07.295397Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:18:07.295433Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:18:07.295462Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:18:07.295490Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:18:07.302943Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:18:07.303007Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:07.303194Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.303232Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.303270Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:18:07.303302Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:18:07.303332Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:18:07.303369Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:18:07.303400Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:18:07.303433Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:18:07.303486Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:18:07.303531Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:18:07.303568Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:18:07.303711Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:18:07.303760Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:18:07.303782Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:18:07.303804Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:18:07.303825Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:18:07.303876Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:07.303901Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:18:07.303931Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:18:07.303968Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:18:07.304025Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:18:07.304056Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:18:07.304084Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:18:07.304120Z node 1 :TX_DATA ... 4MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\231\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1002 ExecLevel: 0 Flags: 0 2025-04-04T17:18:35.045218Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:18:35.045287Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:18:35.045844Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit CheckDataTx 2025-04-04T17:18:35.045897Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2025-04-04T17:18:35.045924Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit CheckDataTx 2025-04-04T17:18:35.045952Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:18:35.045978Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:18:35.046013Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-04T17:18:35.046063Z node 3 :TX_DATASHARD TRACE: Activated operation [0:1002] at 9437184 2025-04-04T17:18:35.046090Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2025-04-04T17:18:35.046110Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-04T17:18:35.046129Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit ExecuteDataTx 2025-04-04T17:18:35.046148Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit ExecuteDataTx 2025-04-04T17:18:35.046182Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-04T17:18:35.046494Z node 3 :TX_DATASHARD TRACE: Executed operation [0:1002] at tablet 9437184 with status COMPLETE 2025-04-04T17:18:35.046541Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:1002] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:18:35.046601Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:35.046626Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit ExecuteDataTx 2025-04-04T17:18:35.046649Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit FinishPropose 2025-04-04T17:18:35.046677Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit FinishPropose 2025-04-04T17:18:35.046706Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is DelayComplete 2025-04-04T17:18:35.046727Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit FinishPropose 2025-04-04T17:18:35.046751Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit CompletedOperations 2025-04-04T17:18:35.046774Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit CompletedOperations 2025-04-04T17:18:35.046811Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2025-04-04T17:18:35.046832Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit CompletedOperations 2025-04-04T17:18:35.046855Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:1002] at 9437184 has finished 2025-04-04T17:18:35.063589Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:18:35.063661Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:1002] at 9437184 on unit FinishPropose 2025-04-04T17:18:35.063706Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 1002 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-04-04T17:18:35.063790Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 .2025-04-04T17:18:35.070989Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:98:2133], Recipient [3:234:2227]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 98 RawX2: 12884904021 } 2025-04-04T17:18:35.071046Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-04T17:18:35.071895Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:4547:6466], Recipient [3:234:2227]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:35.071935Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:35.071969Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:4546:6465], serverId# [3:4547:6466], sessionId# [0:0:0] 2025-04-04T17:18:35.072348Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:98:2133], Recipient [3:234:2227]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 98 RawX2: 12884904021 } TxBody: "\032\265\002\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\235\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1003 ExecLevel: 0 Flags: 0 2025-04-04T17:18:35.072386Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:18:35.072458Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:18:35.072957Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit CheckDataTx 2025-04-04T17:18:35.073008Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2025-04-04T17:18:35.073036Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit CheckDataTx 2025-04-04T17:18:35.073063Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:18:35.073086Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:18:35.073120Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-04T17:18:35.073171Z node 3 :TX_DATASHARD TRACE: Activated operation [0:1003] at 9437184 2025-04-04T17:18:35.073199Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2025-04-04T17:18:35.073219Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-04T17:18:35.073240Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit ExecuteDataTx 2025-04-04T17:18:35.073262Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit ExecuteDataTx 2025-04-04T17:18:35.074679Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-04T17:18:35.075075Z node 3 :TX_DATASHARD TRACE: Executed operation [0:1003] at tablet 9437184 with status COMPLETE 2025-04-04T17:18:35.075129Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:1003] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:18:35.075178Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:35.075209Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit ExecuteDataTx 2025-04-04T17:18:35.075237Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit FinishPropose 2025-04-04T17:18:35.075267Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit FinishPropose 2025-04-04T17:18:35.075301Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is DelayComplete 2025-04-04T17:18:35.075324Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit FinishPropose 2025-04-04T17:18:35.075348Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit CompletedOperations 2025-04-04T17:18:35.075377Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit CompletedOperations 2025-04-04T17:18:35.075413Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2025-04-04T17:18:35.075434Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit CompletedOperations 2025-04-04T17:18:35.075456Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:1003] at 9437184 has finished 2025-04-04T17:18:35.117835Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:18:35.117898Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2025-04-04T17:18:35.120069Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:18:35.120129Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:1003] at 9437184 on unit FinishPropose 2025-04-04T17:18:35.120172Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 1003 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2025-04-04T17:18:35.120252Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:35.124361Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:232:2226], Recipient [3:234:2227]: NKikimr::TEvTablet::TEvFollowerGcApplied .2025-04-04T17:18:35.129379Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:4561:6479], Recipient [3:234:2227]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:35.129458Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:35.129517Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:4560:6478], serverId# [3:4561:6479], sessionId# [0:0:0] 2025-04-04T17:18:35.129927Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553160, Sender [3:4559:6477], Recipient [3:234:2227]: NKikimrTxDataShard.TEvGetTableStats TableId: 13 { InMemSize: 0 LastAccessTime: 1720 LastUpdateTime: 1720 } >> KqpMultishardIndex::DataColumnUpsertMixedSemantic [GOOD] >> KqpMultishardIndex::DataColumnSelect >> KqpIndexes::VectorIndexOrderByCosineDistanceNotNullableLevel1 [GOOD] >> KqpIndexes::VectorIndexOrderByCosineDistanceNotNullableLevel2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateOnNullInComplexFk [GOOD] Test command err: Trying to start YDB, gRPC: 16095, MsgBus: 61462 2025-04-04T17:18:07.255614Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508510780512208:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:07.255650Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00170e/r3tmp/tmpbi1htX/pdisk_1.dat 2025-04-04T17:18:07.758162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:07.758311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:07.760618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16095, node 1 2025-04-04T17:18:07.847258Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.855307Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:18:07.868069Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:18:07.889190Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:07.889213Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:07.889221Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:07.889333Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61462 TClient is connected to server localhost:61462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:08.594028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.637002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.816570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.033760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.138091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:10.532037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508523665415856:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.532177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.842837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.955793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.983958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.014231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.042891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.077608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.141373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508527960383662:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.141443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.141559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508527960383667:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.145055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:11.154141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508527960383669:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:11.236677Z node 1 :TX_PROXY ERROR: Actor# [1:7489508527960383723:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:12.093042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:12.289469Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508510780512208:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:12.291081Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:13.804744Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wyv3p79xpq241x6s3ktb8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWIzNGQxNTYtNTg1NTI5YzktM2QyYWE3MmMtYjY5YjQ2Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:13.815914Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWIzNGQxNTYtNTg1NTI5YzktM2QyYWE3MmMtYjY5YjQ2Yzc=, ActorId: [1:7489508532255352051:2547], ActorState: ExecuteState, TraceId: 01jr0wyv3p79xpq241x6s3ktb8, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 24735, MsgBus: 19489 2025-04-04T17:18:15.584664Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508548218052653:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:15.584706Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00170e/r3tmp/tmpAat7H0/pdisk_1.dat 2025-04-04T17:18:15.742439Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:15.767290Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:15.767393Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 24735, node 2 2025-04-04T17:18:15.775447Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:15.902767Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:15.902786Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:15.902792Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:15.902911Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19489 TClient is connected to server localhost:19489 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:16.299345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.307082Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:16.320172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.392438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.532218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.594120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:18.554459Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508561102956300:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.554552Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.604416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.677645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.710945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.778865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.807714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.841398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.887476Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508561102956817:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.887583Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.887662Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508561102956822:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.891466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:18.900900Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508561102956824:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:18.990917Z node 2 :TX_PROXY ERROR: Actor# [2:7489508561102956877:3437] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:19.852649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:20.590351Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508548218052653:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:20.596912Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:25.212670Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wz62v5nmtcznyxh70dznt, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGNhOTA0MDAtZWU0MTU5NzgtYmY4NGY3YTMtNmMyYWMyYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:25.212938Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGNhOTA0MDAtZWU0MTU5NzgtYmY4NGY3YTMtNmMyYWMyYzE=, ActorId: [2:7489508569692892498:2546], ActorState: ExecuteState, TraceId: 01jr0wz62v5nmtcznyxh70dznt, Create QueryResponse for error on request, msg: 2025-04-04T17:18:30.392054Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-04-04T17:18:30.392109Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-04-04T17:18:30.404260Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037927 not found 2025-04-04T17:18:30.404296Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-04-04T17:18:30.404313Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-04-04T17:18:30.404337Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037930 not found 2025-04-04T17:18:30.434895Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found 2025-04-04T17:18:30.434936Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037922 not found 2025-04-04T17:18:30.434953Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037931 not found 2025-04-04T17:18:30.434970Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037934 not found 2025-04-04T17:18:30.434989Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2025-04-04T17:18:30.443751Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037920 not found 2025-04-04T17:18:30.444071Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037921 not found 2025-04-04T17:18:30.726947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:18:30.726990Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:32.352648Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wzcv32q1krjemapjgzw3k, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGNhOTA0MDAtZWU0MTU5NzgtYmY4NGY3YTMtNmMyYWMyYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:32.352924Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGNhOTA0MDAtZWU0MTU5NzgtYmY4NGY3YTMtNmMyYWMyYzE=, ActorId: [2:7489508569692892498:2546], ActorState: ExecuteState, TraceId: 01jr0wzcv32q1krjemapjgzw3k, Create QueryResponse for error on request, msg: 2025-04-04T17:18:33.443932Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wzdz6beemfk4tyx15chqm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGNhOTA0MDAtZWU0MTU5NzgtYmY4NGY3YTMtNmMyYWMyYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:33.444211Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGNhOTA0MDAtZWU0MTU5NzgtYmY4NGY3YTMtNmMyYWMyYzE=, ActorId: [2:7489508569692892498:2546], ActorState: ExecuteState, TraceId: 01jr0wzdz6beemfk4tyx15chqm, Create QueryResponse for error on request, msg: 2025-04-04T17:18:34.715427Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wzg661t4wxn95s7rn8b04, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGNhOTA0MDAtZWU0MTU5NzgtYmY4NGY3YTMtNmMyYWMyYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:34.715681Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGNhOTA0MDAtZWU0MTU5NzgtYmY4NGY3YTMtNmMyYWMyYzE=, ActorId: [2:7489508569692892498:2546], ActorState: ExecuteState, TraceId: 01jr0wzg661t4wxn95s7rn8b04, Create QueryResponse for error on request, msg: >> KqpMultishardIndex::YqWorksFineAfterAlterIndexTableDirectly [GOOD] >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin [GOOD] >> KqpIndexes::SelectFromIndexesAndFreeSpaceLogicDoesntTimeout [GOOD] >> KqpIndexes::NullInIndexTableNoDataRead [GOOD] >> KqpIndexes::NullInIndexTable >> KqpIndexes::CheckUpsertNonEquatableType-NotNull [GOOD] >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL >> TCmsTest::TestKeepAvailableModeScheduledDisconnects >> TCmsTest::WalleRebootDownNode >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled >> TCmsTest::ScheduledEmergencyDuringRollingRestart >> TCmsTenatsTest::TestClusterLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin [GOOD] Test command err: Trying to start YDB, gRPC: 27391, MsgBus: 14322 2025-04-04T17:18:23.881544Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508580959216107:2181];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:23.883843Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ba/r3tmp/tmpA82gOp/pdisk_1.dat 2025-04-04T17:18:24.219132Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27391, node 1 2025-04-04T17:18:24.272434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:24.273126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:24.286919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:24.318530Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:24.318550Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:24.318557Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:24.318694Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14322 TClient is connected to server localhost:14322 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:24.862711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:24.882945Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:24.910494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:25.054547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:18:25.226350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:25.309833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:27.053823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508598139086931:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:27.053951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:27.292742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.320836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.345507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.381075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.410548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.483058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.565397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508598139087449:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:27.565475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:27.565727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508598139087454:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:27.569777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:27.579064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508598139087456:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:27.656489Z node 1 :TX_PROXY ERROR: Actor# [1:7489508598139087511:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:28.586609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:28.661607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:18:28.877081Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508580959216107:2181];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:28.877152Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18185, MsgBus: 4147 2025-04-04T17:18:30.932006Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508611096234173:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:30.932151Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ba/r3tmp/tmpI8kEzR/pdisk_1.dat 2025-04-04T17:18:31.052651Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:31.052732Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:31.054087Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:31.054302Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18185, node 2 2025-04-04T17:18:31.177202Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:31.177222Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:31.177230Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:31.177333Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4147 TClient is connected to server localhost:4147 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:18:31.639659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:18:31.660663Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:31.758642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:31.910429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:31.986886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:34.074370Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508628276105113:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:34.074466Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:34.109484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.143634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.198405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.233105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.268280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.328743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.394612Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508628276105625:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:34.394731Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:34.395034Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508628276105630:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:34.397753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:34.406919Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508628276105633:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:34.504302Z node 2 :TX_PROXY ERROR: Actor# [2:7489508628276105688:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:35.383383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:35.438828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:18:35.933055Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508611096234173:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:35.933110Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SelectFromIndexesAndFreeSpaceLogicDoesntTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 14847, MsgBus: 23275 2025-04-04T17:18:24.018651Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508584709908157:2260];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:24.018788Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016b8/r3tmp/tmpF2RWmf/pdisk_1.dat 2025-04-04T17:18:24.374470Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:24.378484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:24.378621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:24.381820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14847, node 1 2025-04-04T17:18:24.494398Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:24.494419Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:24.494425Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:24.494516Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23275 TClient is connected to server localhost:23275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:25.098856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:25.137282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:25.282720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:25.443164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:25.514136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.153225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508597594811640:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:27.153344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:27.422208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.449289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.484884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.514849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.544437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.586439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.652398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508597594812149:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:27.652477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:27.652700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508597594812154:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:27.656418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:27.667756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508597594812156:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:27.731031Z node 1 :TX_PROXY ERROR: Actor# [1:7489508597594812209:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:28.588542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:28.858483Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489508601889779983:2488] TxId: 281474976710672. Ctx: { TraceId: 01jr0wzafqdhfgmfmacn0ed1cq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEzOTI4NTAtN2VlMjQ4MDYtNDg3ZDg5MzgtNjkwZmQ2OTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2025-04-04T17:18:28.867906Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjEzOTI4NTAtN2VlMjQ4MDYtNDg3ZDg5MzgtNjkwZmQ2OTk=, ActorId: [1:7489508601889779760:2488], ActorState: ExecuteState, TraceId: 01jr0wzafqdhfgmfmacn0ed1cq, Create QueryResponse for error on request, msg: 2025-04-04T17:18:28.888211Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489508601889779997:2488] TxId: 281474976710674. Ctx: { TraceId: 01jr0wzagbe45dx1em3aedjfg1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEzOTI4NTAtN2VlMjQ4MDYtNDg3ZDg5MzgtNjkwZmQ2OTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2025-04-04T17:18:28.888361Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjEzOTI4NTAtN2VlMjQ4MDYtNDg3ZDg5MzgtNjkwZmQ2OTk=, ActorId: [1:7489508601889779760:2488], ActorState: ExecuteState, TraceId: 01jr0wzagbe45dx1em3aedjfg1, Create QueryResponse for error on request, msg: 2025-04-04T17:18:28.898349Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489508601889780006:2488] TxId: 281474976710676. Ctx: { TraceId: 01jr0wzah07dy6hfr726frtcbq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEzOTI4NTAtN2VlMjQ4MDYtNDg3ZDg5MzgtNjkwZmQ2OTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2025-04-04T17:18:28.898518Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjEzOTI4NTAtN2VlMjQ4MDYtNDg3ZDg5MzgtNjkwZmQ2OTk=, ActorId: [1:7489508601889779760:2488], ActorState: ExecuteState, TraceId: 01jr0wzah07dy6hfr726frtcbq, Create QueryResponse for error on request, msg: 2025-04-04T17:18:28.907578Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489508601889780015:2488] TxId: 281474976710678. Ctx: { TraceId: 01jr0wzaha1dd1ssbg3qpgt0n4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEzOTI4NTAtN2VlMjQ4MDYtNDg3ZDg5MzgtNjkwZmQ2OTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2025-04-04T17:18:28.907744Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjEzOTI4NTAtN2VlMjQ4MDYtNDg3ZDg5MzgtNjkwZmQ2OTk=, ActorId: [1:7489508601889779760:2488], ActorState: ExecuteState, TraceId: 01jr0wzaha1dd1ssbg3qpgt0n4, Create QueryResponse for error on request, msg: 2025-04-04T17:18:29.017223Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508584709908157:2260];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:29.017294Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21342, MsgBus: 62609 2025-04-04T17:18:29.748041Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508606908901800:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:29.812548Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016b8/r3tmp/tmpzWw8ae/pdisk_1.dat 2025-04-04T17:18:29.892049Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21342, node 2 2025-04-04T17:18:29.934577Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:29.934659Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:30.017323Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:30.032195Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:30.032214Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:30.032222Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:30.032332Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62609 TClient is connected to server localhost:62609 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:30.467423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:30.474641Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:30.480417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:30.542794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:30.690530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:30.758482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:18:32.885674Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508619793805288:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:32.885760Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:32.922086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:32.955277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:33.009859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:33.039534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:33.078332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:33.114020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:33.201007Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508624088773101:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:33.201105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:33.201249Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508624088773107:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:33.204039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:33.213076Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508624088773109:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:33.306818Z node 2 :TX_PROXY ERROR: Actor# [2:7489508624088773164:3436] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:34.309059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.403473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.487503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.728182Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508606908901800:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:34.728228Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexes::SecondaryIndexReplace+UseSink [GOOD] >> KqpIndexes::SecondaryIndexReplace-UseSink >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull-useSink >> KqpIndexes::UpdateDeletePlan-UseSink [GOOD] >> KqpMultishardIndex::DataColumnWrite+UseSink [GOOD] >> KqpMultishardIndex::DataColumnWrite-UseSink >> KqpMultishardIndex::SortByPk [GOOD] >> KqpMultishardIndex::DataColumnWriteNull [GOOD] >> KqpMultishardIndex::DuplicateUpsert >> TCmsTenatsTest::TestTenantRatioLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpdateDeletePlan-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 24663, MsgBus: 21061 2025-04-04T17:18:27.960847Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508599005060691:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:27.960914Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016aa/r3tmp/tmpKRJjMb/pdisk_1.dat 2025-04-04T17:18:28.326087Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:28.372457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:28.372591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 24663, node 1 2025-04-04T17:18:28.373577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:28.452609Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:28.452649Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:28.452657Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:28.452806Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21061 TClient is connected to server localhost:21061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:29.007753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:29.035755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:29.159571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:29.304628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:18:29.381689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:31.059153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508616184931681:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:31.059258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:31.367344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:31.401821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:31.435521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:31.464744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:31.500791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:31.528378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:31.577595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508616184932188:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:31.577683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:31.577870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508616184932193:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:31.582322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:31.597672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508616184932195:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:31.685262Z node 1 :TX_PROXY ERROR: Actor# [1:7489508616184932249:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:32.537757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:32.959379Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508599005060691:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:32.959449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27118, MsgBus: 26906 2025-04-04T17:18:33.982998Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508624266244117:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:33.983707Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016aa/r3tmp/tmpRGSayI/pdisk_1.dat 2025-04-04T17:18:34.143517Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27118, node 2 2025-04-04T17:18:34.215036Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:34.220656Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:34.226494Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:34.306176Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:34.306198Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:34.306204Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:34.306324Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26906 TClient is connected to server localhost:26906 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:34.793794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:34.806489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:34.900996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:18:35.029170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:35.116940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:37.121396Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508641446115035:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.121452Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.163653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.191053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.217952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.285194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.311451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.339724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.380181Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508641446115548:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.380257Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.380447Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508641446115553:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.382941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:37.392231Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508641446115555:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:37.489139Z node 2 :TX_PROXY ERROR: Actor# [2:7489508641446115609:3438] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:38.348439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:38.985952Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508624266244117:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:38.986006Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TDowntimeTest::SetIgnoredDowntimeGap [GOOD] >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup >> TCmsTest::WalleRebootDownNode [GOOD] >> TCmsTest::WalleCleanupTest >> TCmsTest::TestKeepAvailableModeScheduledDisconnects [GOOD] >> TCmsTest::TestLoadLog >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction >> KqpIndexes::UniqAndNoUniqSecondaryIndex [GOOD] >> KqpIndexes::Uint8Index >> TCmsTest::ScheduledEmergencyDuringRollingRestart [GOOD] >> TCmsTest::ScheduledWalleRequestDuringRollingRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::SortByPk [GOOD] Test command err: Trying to start YDB, gRPC: 62565, MsgBus: 26424 2025-04-04T17:18:27.242492Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508595804363296:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:27.242618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016af/r3tmp/tmpp0me6Q/pdisk_1.dat 2025-04-04T17:18:27.557971Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:27.560930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:27.560990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:27.564511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62565, node 1 2025-04-04T17:18:27.648705Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:27.648737Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:27.648748Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:27.648890Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26424 TClient is connected to server localhost:26424 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:28.193484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:28.206278Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:28.215428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:28.338483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:18:28.498588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:28.563644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:30.225074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508608689266968:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:30.225219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:30.494528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:30.528879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:30.561986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:30.598412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:30.634021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:30.676570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:30.731439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508608689267479:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:30.731499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:30.731775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508608689267484:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:30.735236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:30.749598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508608689267486:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:30.817363Z node 1 :TX_PROXY ERROR: Actor# [1:7489508608689267539:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:31.649797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:32.246360Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508595804363296:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:32.246502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30891, MsgBus: 13526 2025-04-04T17:18:33.541577Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508623644338626:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:33.544393Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016af/r3tmp/tmpQGUNuV/pdisk_1.dat 2025-04-04T17:18:33.628244Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30891, node 2 2025-04-04T17:18:33.670954Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:33.671046Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:33.686169Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:33.718448Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:33.718465Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:33.718470Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:33.718583Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13526 TClient is connected to server localhost:13526 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:18:34.150650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.164278Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:34.174967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:34.249585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:34.379915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:34.455059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:36.601842Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508636529242218:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.601913Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.639988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.669431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.701183Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.733468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.774187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.831134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.890535Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508636529242733:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.890623Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.890785Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508636529242738:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.893527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:36.902286Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508636529242740:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:36.982852Z node 2 :TX_PROXY ERROR: Actor# [2:7489508636529242793:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:37.956903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:38.543664Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508623644338626:2130];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:38.543720Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TCmsTenatsTest::TestClusterLimit [GOOD] >> TCmsTenatsTest::RequestShutdownHost >> KqpIndexes::IndexTopSortPushDown [GOOD] >> KqpIndexMetadata::HandleNotReadyIndex [GOOD] >> KqpIndexMetadata::TestNoReadFromMainTableBeforeJoin >> TCmsTenatsTest::TestClusterRatioLimit >> KqpIndexes::VectorIndexOrderByCosineDistanceNullableLevel1 [GOOD] >> KqpIndexes::VectorIndexOrderByCosineDistanceNullableLevel2 >> TCmsTest::TestLoadLog [GOOD] >> TCmsTest::TestLogOperationsRollback >> TTxDataShardMiniKQL::CrossShard_6_Local [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx >> KqpIndexes::SecondaryIndexUpsert1DeleteUpdate [GOOD] >> KqpIndexes::SecondaryIndexUpsert2Update >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 [GOOD] >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy [GOOD] >> TTxDataShardMiniKQL::CrossShard_3_AllToOne >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup [GOOD] >> TMaintenanceApiTest::ActionReason >> KqpIndexes::SecondaryIndexOrderBy [GOOD] >> KqpIndexes::SecondaryIndexOrderBy2 >> TCmsTest::CollectInfo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::IndexTopSortPushDown [GOOD] Test command err: Trying to start YDB, gRPC: 7064, MsgBus: 28562 2025-04-04T17:18:10.634132Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508525421495307:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:10.634608Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016cd/r3tmp/tmp0KCAEu/pdisk_1.dat 2025-04-04T17:18:10.976097Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:10.996703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:10.996805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:10.999155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7064, node 1 2025-04-04T17:18:11.118074Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:11.118098Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:11.118105Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:11.118298Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28562 TClient is connected to server localhost:28562 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:11.602664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:11.624963Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:11.638006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:11.782403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:11.940740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:18:12.012688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:13.873221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508538306398761:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:13.873326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:13.894920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.925467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.952449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.981626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:14.008221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:14.035695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:14.071234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508542601366564:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:14.071309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:14.071527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508542601366569:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:14.074732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:14.083750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508542601366571:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:14.162775Z node 1 :TX_PROXY ERROR: Actor# [1:7489508542601366625:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:15.113899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.151027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.186498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.636830Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508525421495307:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:15.636925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15101, MsgBus: 25620 2025-04-04T17:18:18.462408Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508559334796203:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:18.462460Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016cd/r3tmp/tmp96k9sG/pdisk_1.dat 2025-04-04T17:18:18.589710Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:18.604747Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:18.604834Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:18.606365Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15101, node 2 2025-04-04T17:18:18.697813Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:18.697834Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:18.697841Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:18.697952Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25620 TClient is connected to server localhost:25620 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:19.101351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:19.110488Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:19.120795Z nod ... 4976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.654125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.724761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.756985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.797185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:21.848784Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508572219700398:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:21.848879Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:21.848961Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508572219700403:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:21.852380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:21.860993Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508572219700405:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:21.924522Z node 2 :TX_PROXY ERROR: Actor# [2:7489508572219700458:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:22.877034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:23.462804Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508559334796203:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:23.462861Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27420, MsgBus: 14233 2025-04-04T17:18:33.013462Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508621787798038:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:33.013537Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016cd/r3tmp/tmp6N5eTr/pdisk_1.dat 2025-04-04T17:18:33.157501Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:33.183798Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:33.183883Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:33.185564Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27420, node 3 2025-04-04T17:18:33.236460Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:33.236476Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:33.236483Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:33.236623Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14233 TClient is connected to server localhost:14233 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:33.773921Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:33.789786Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:33.868061Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.061151Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:34.149045Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:36.658386Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508634672701716:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.658501Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.695843Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.771363Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.816153Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.862504Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.940407Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.983346Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.059895Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508638967669531:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.060000Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.060399Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508638967669536:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.064399Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:37.077350Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508638967669538:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:37.161004Z node 3 :TX_PROXY ERROR: Actor# [3:7489508638967669594:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:38.010537Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508621787798038:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:38.010638Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:38.202282Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:38.290005Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:18:38.334828Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> TCmsTenatsTest::TestTenantRatioLimit [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions >> TCmsTest::WalleCleanupTest [GOOD] >> TCmsTest::WalleRequestDuringRollingRestart >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 [GOOD] Test command err: Trying to start YDB, gRPC: 12242, MsgBus: 17607 2025-04-04T17:18:26.597627Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508593571380624:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:26.597719Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016b0/r3tmp/tmp9KhIXd/pdisk_1.dat 2025-04-04T17:18:26.998849Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12242, node 1 2025-04-04T17:18:27.034451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:27.034591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:27.035862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:27.095009Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:27.095040Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:27.095049Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:27.095170Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17607 TClient is connected to server localhost:17607 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:27.634391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:27.659498Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:27.671991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:27.853408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:28.005448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:28.080665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:29.642127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508606456284285:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:29.642739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:29.891352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:29.965245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:29.996493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:30.028923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:30.058103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:30.128747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:30.198346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508610751252099:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:30.198421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:30.198651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508610751252104:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:30.201932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:30.213989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508610751252106:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:30.290304Z node 1 :TX_PROXY ERROR: Actor# [1:7489508610751252158:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:31.181102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:31.598228Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508593571380624:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:31.598305Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:32.917099Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wzdta9xywpnxxhh0t35qt, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc5ZjU5YmUtZDZkZThlYjQtNGE3YWJhYTItZTFjZTY5OTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:32.926331Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Mzc5ZjU5YmUtZDZkZThlYjQtNGE3YWJhYTItZTFjZTY5OTQ=, ActorId: [1:7489508615046220489:2546], ActorState: ExecuteState, TraceId: 01jr0wzdta9xywpnxxhh0t35qt, Create QueryResponse for error on request, msg: 2025-04-04T17:18:33.597068Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wzefa7gddjw46tn1m9dbv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc5ZjU5YmUtZDZkZThlYjQtNGE3YWJhYTItZTFjZTY5OTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:33.598189Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Mzc5ZjU5YmUtZDZkZThlYjQtNGE3YWJhYTItZTFjZTY5OTQ=, ActorId: [1:7489508615046220489:2546], ActorState: ExecuteState, TraceId: 01jr0wzefa7gddjw46tn1m9dbv, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 20171, MsgBus: 26461 2025-04-04T17:18:34.570621Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508626923951267:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:34.570657Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016b0/r3tmp/tmpMCHZWw/pdisk_1.dat 2025-04-04T17:18:34.755687Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:34.771536Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:34.771620Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:34.773433Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20171, node 2 2025-04-04T17:18:34.827837Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:34.827860Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:34.827866Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:34.827965Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26461 TClient is connected to server localhost:26461 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:35.241359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:35.274612Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:35.293136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:35.349828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:35.488322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:35.541738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:37.673553Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508639808854914:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.673627Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.715963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.747982Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.774186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.803950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.830884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.863776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.944434Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508639808855429:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.944513Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.944873Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508639808855434:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.947833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:37.958601Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508639808855436:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:38.036798Z node 2 :TX_PROXY ERROR: Actor# [2:7489508644103822787:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:38.984798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:39.571650Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508626923951267:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:39.571714Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:41.129869Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wznme1z3d2qmjrv5v17kn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTgyOGM2NTEtYWZiMzNmMTgtYTFhODUzOWYtM2MwZTY4NmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:41.130116Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTgyOGM2NTEtYWZiMzNmMTgtYTFhODUzOWYtM2MwZTY4NmE=, ActorId: [2:7489508648398791105:2546], ActorState: ExecuteState, TraceId: 01jr0wznme1z3d2qmjrv5v17kn, Create QueryResponse for error on request, msg: 2025-04-04T17:18:42.322851Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wzpnp6n91q7qdmpr9bvfr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTgyOGM2NTEtYWZiMzNmMTgtYTFhODUzOWYtM2MwZTY4NmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:42.323131Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTgyOGM2NTEtYWZiMzNmMTgtYTFhODUzOWYtM2MwZTY4NmE=, ActorId: [2:7489508648398791105:2546], ActorState: ExecuteState, TraceId: 01jr0wzpnp6n91q7qdmpr9bvfr, Create QueryResponse for error on request, msg: >> TCmsTest::ScheduledWalleRequestDuringRollingRestart [GOOD] >> TCmsTest::SamePriorityRequest2 >> KqpMultishardIndex::SecondaryIndexSelect [GOOD] >> TCmsTest::ManageRequestsWrong ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:113:2057] recipient: [1:108:2139] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:113:2057] recipient: [1:108:2139] Leader for TabletID 9437184 is [1:130:2153] sender: [1:133:2057] recipient: [1:108:2139] 2025-04-04T17:18:06.991632Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:18:07.093715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:18:07.093755Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.095213Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:18:07.095640Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:18:07.095955Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:18:07.145190Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:18:07.150814Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:18:07.150915Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:18:07.152292Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:18:07.152359Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:18:07.152446Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:18:07.154430Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:18:07.154499Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:18:07.154563Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:191:2153] in generation 2 Leader for TabletID 9437184 is [1:130:2153] sender: [1:208:2057] recipient: [1:14:2061] 2025-04-04T17:18:07.225725Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:18:07.254472Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:18:07.255392Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:18:07.255535Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:18:07.255581Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:18:07.255619Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:18:07.255654Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:07.255832Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.255875Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.256817Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:18:07.256987Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:18:07.257077Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:18:07.257120Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:18:07.257169Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:18:07.257212Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:18:07.257267Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:18:07.257316Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:18:07.257358Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:07.257485Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.257522Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.257575Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:18:07.261096Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:18:07.261184Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:18:07.261266Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:18:07.261426Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:18:07.261482Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:18:07.261544Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:18:07.261597Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:07.261637Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:18:07.261687Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:18:07.261737Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:18:07.265777Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:18:07.265841Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:18:07.265883Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:18:07.265927Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:18:07.266000Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:18:07.266060Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:18:07.266097Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:18:07.266131Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:18:07.266160Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:18:07.278938Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:18:07.279091Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:18:07.279139Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:18:07.279178Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:18:07.294379Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:18:07.294954Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.295003Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.295045Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:18:07.295166Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-04-04T17:18:07.295222Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:18:07.295365Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-04-04T17:18:07.295403Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-04T17:18:07.295432Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:18:07.295472Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:18:07.298429Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:18:07.298496Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:07.298741Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.298773Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.298819Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:18:07.298851Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:18:07.298877Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:18:07.298927Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-04-04T17:18:07.298956Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-04-04T17:18:07.299002Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-04T17:18:07.299033Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:18:07.299058Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:18:07.299088Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:18:07.299256Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-04-04T17:18:07.299286Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-04T17:18:07.299301Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:18:07.299316Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:18:07.299332Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:18:07.299374Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:07.299391Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:18:07.299421Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:18:07.299459Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:18:07.299518Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2025-04-04T17:18:07.299548Z node 1 :TX_DATASHARD TRAC ... cribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2025-04-04T17:18:43.509071Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [23:281:2265], Recipient [23:235:2228]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [23:285:2269] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-04T17:18:43.509111Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-04T17:18:43.509233Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [23:120:2146], Recipient [23:235:2228]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-04-04T17:18:43.509276Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-04T17:18:43.509328Z node 23 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-04-04T17:18:43.509393Z node 23 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-04-04T17:18:43.528045Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [23:281:2265], Recipient [23:235:2228]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [23:281:2265] ServerId: [23:285:2269] } 2025-04-04T17:18:43.528136Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-04T17:18:43.561396Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [23:292:2274], Recipient [23:235:2228]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:43.561494Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:43.561559Z node 23 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [23:290:2273], serverId# [23:292:2274], sessionId# [0:0:0] 2025-04-04T17:18:43.561776Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 268830214, Sender [23:289:2272], Recipient [23:235:2228]: NKikimrTabletBase.TEvGetCounters 2025-04-04T17:18:43.578307Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [23:98:2133], Recipient [23:235:2228]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 98 RawX2: 98784249941 } 2025-04-04T17:18:43.578402Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-04T17:18:43.578937Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [23:294:2276], Recipient [23:235:2228]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:43.578996Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:43.579063Z node 23 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [23:293:2275], serverId# [23:294:2276], sessionId# [0:0:0] 2025-04-04T17:18:43.579295Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [23:98:2133], Recipient [23:235:2228]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 98 RawX2: 98784249941 } TxBody: "\032\324\002\037\002\006Arg\005\205\n\205\000\205\004?\000\205\002\202\0047\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\004\01057$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020T\001\005?\026)\211\n?\024\206\203\004?\024? ?\024\203\004\020Fold\000)\211\002?\"\206? \034Collect\000)\211\006?(? \203\004\203\0024ListFromRange\000\003? \000\003?,\003\022z\003?.\004\007\010\000\n\003?\024\000)\251\000? \002\000\004)\251\000?\024\002\000\002)\211\006?$\203\005@? ?\024\030Invoke\000\003?F\006Add?@?D\001\006\002\014\000\007\016\000\003\005?\010?\014\006\002?\006?R\000\003?\014?\014\037/ \0018\000" TxId: 2 ExecLevel: 0 Flags: 0 2025-04-04T17:18:43.579352Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:18:43.579467Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:18:43.580602Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2025-04-04T17:18:43.580699Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-04T17:18:43.580753Z node 23 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2025-04-04T17:18:43.580807Z node 23 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:18:43.580854Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:18:43.580906Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T17:18:43.580988Z node 23 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2025-04-04T17:18:43.581043Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-04T17:18:43.581076Z node 23 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-04T17:18:43.581109Z node 23 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-04-04T17:18:43.581141Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-04T17:18:43.581203Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T17:18:43.581271Z node 23 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 requested 132374 more memory 2025-04-04T17:18:43.581322Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-04T17:18:43.581667Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:18:43.581726Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-04T17:18:43.581789Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T17:18:43.583379Z node 23 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 132502 and requests 1060016 more for the next try 2025-04-04T17:18:43.583574Z node 23 :TX_DATASHARD DEBUG: tx 2 released its data 2025-04-04T17:18:43.583637Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-04T17:18:43.583899Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:18:43.583938Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-04T17:18:43.584765Z node 23 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2025-04-04T17:18:43.584814Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T17:18:43.585183Z node 23 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 1192518 and requests 9540144 more for the next try 2025-04-04T17:18:43.585262Z node 23 :TX_DATASHARD DEBUG: tx 2 released its data 2025-04-04T17:18:43.585290Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-04T17:18:43.585471Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:18:43.585497Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-04T17:18:43.585938Z node 23 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2025-04-04T17:18:43.585968Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T17:18:43.586466Z node 23 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 10732662 and requests 85861296 more for the next try 2025-04-04T17:18:43.586590Z node 23 :TX_DATASHARD DEBUG: tx 2 released its data 2025-04-04T17:18:43.586631Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-04-04T17:18:43.586801Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:18:43.586835Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-04-04T17:18:43.587469Z node 23 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2025-04-04T17:18:43.587520Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T17:18:43.874104Z node 23 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-04-04T17:18:43.874260Z node 23 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 8, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:18:43.874359Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:43.874404Z node 23 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-04-04T17:18:43.874459Z node 23 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2025-04-04T17:18:43.874511Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-04-04T17:18:43.874649Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:18:43.874689Z node 23 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-04-04T17:18:43.874740Z node 23 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-04-04T17:18:43.874790Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-04-04T17:18:43.874852Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-04-04T17:18:43.874886Z node 23 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-04-04T17:18:43.874929Z node 23 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2025-04-04T17:18:43.893192Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:18:43.893288Z node 23 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-04-04T17:18:43.893376Z node 23 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-04-04T17:18:43.893490Z node 23 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:43.894851Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [23:299:2281], Recipient [23:235:2228]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:43.894937Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:43.895001Z node 23 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [23:298:2280], serverId# [23:299:2281], sessionId# [0:0:0] 2025-04-04T17:18:43.895132Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 268830214, Sender [23:297:2279], Recipient [23:235:2228]: NKikimrTabletBase.TEvGetCounters >> TCmsTenatsTest::TestClusterRatioLimit [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate [GOOD] >> TMaintenanceApiTest::ActionReason [GOOD] >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull+useSink >> TCmsTest::CollectInfo [GOOD] >> TCmsTest::DynamicConfig >> KqpIndexes::NullInIndexTable [GOOD] >> KqpIndexes::MultipleSecondaryIndexWithSameComulns-UseSink >> KqpIndexes::SecondaryIndexReplace-UseSink [GOOD] >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::RequestShutdownHost [GOOD] >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::SecondaryIndexSelect [GOOD] Test command err: Trying to start YDB, gRPC: 24055, MsgBus: 15364 2025-04-04T17:18:28.288298Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508602272146367:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:28.288529Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016a9/r3tmp/tmp9JH0yo/pdisk_1.dat 2025-04-04T17:18:28.747784Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:28.749675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:28.749749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:28.753635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24055, node 1 2025-04-04T17:18:28.824271Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:28.824297Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:28.824308Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:28.826348Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15364 TClient is connected to server localhost:15364 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:29.426595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:29.437114Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:29.446730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:18:29.574305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:29.724035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:29.804891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:31.468354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508615157050014:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:31.469265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:31.794315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:31.832380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:31.867652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:31.899609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:31.930505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:31.971381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:32.061466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508619452017825:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:32.061522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:32.061809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508619452017830:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:32.065533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:32.075125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508619452017832:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:32.157575Z node 1 :TX_PROXY ERROR: Actor# [1:7489508619452017887:3441] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:32.996578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:33.290621Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508602272146367:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:33.290685Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26988, MsgBus: 32122 2025-04-04T17:18:36.425998Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508636563750688:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:36.426049Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016a9/r3tmp/tmpQNzvke/pdisk_1.dat 2025-04-04T17:18:36.533539Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:36.560113Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:36.560198Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:36.561861Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26988, node 2 2025-04-04T17:18:36.626715Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:36.626737Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:36.626746Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:36.626846Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32122 TClient is connected to server localhost:32122 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:37.103753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:37.113921Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:37.124600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:37.189750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:37.359694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:37.436009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:39.577219Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508649448654349:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:39.577290Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:39.609735Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:39.640095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:39.676921Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:39.708876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:39.736438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:39.768638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:39.819909Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508649448654859:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:39.819999Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:39.820262Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508649448654864:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:39.823791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:39.833706Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508649448654866:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:39.909202Z node 2 :TX_PROXY ERROR: Actor# [2:7489508649448654920:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:40.812963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:41.426236Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508636563750688:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:41.426314Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::ActionReason [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled >> TCmsTest::DynamicConfig [GOOD] >> TCmsTest::DisabledEvictVDisks >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 18008, MsgBus: 15688 2025-04-04T17:18:31.698464Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508616499365291:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:31.698522Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00169e/r3tmp/tmpuJYyqG/pdisk_1.dat 2025-04-04T17:18:32.134592Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:32.137891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:32.137979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:32.140907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18008, node 1 2025-04-04T17:18:32.246861Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:32.246882Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:32.246891Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:32.247024Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15688 TClient is connected to server localhost:15688 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:32.866710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:32.889932Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:32.907343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:18:33.045247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:33.230083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:33.294614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.814107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508629384268932:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:34.814214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:35.036725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:35.071942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:35.142941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:35.176673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:35.209856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:35.248291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:35.294187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508633679236741:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:35.294310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:35.294539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508633679236746:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:35.298685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:35.311000Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T17:18:35.311267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508633679236748:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:35.376839Z node 1 :TX_PROXY ERROR: Actor# [1:7489508633679236802:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:36.236354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:36.700677Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508616499365291:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:36.700741Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:37.039106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710675:1, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 14711, MsgBus: 10878 2025-04-04T17:18:38.096886Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508645989976203:2215];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00169e/r3tmp/tmpVqlaTe/pdisk_1.dat 2025-04-04T17:18:38.138282Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:18:38.203584Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14711, node 2 2025-04-04T17:18:38.238722Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:38.238814Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:38.241843Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:38.258791Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:38.258816Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:38.258824Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:38.258943Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10878 TClient is connected to server localhost:10878 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:38.702676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:38.713292Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:38.728041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:38.812976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:38.959729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:18:39.016250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:41.148252Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508658874879670:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:41.148309Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:41.186475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:41.214110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:41.243937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:41.309868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:41.336714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:41.424543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:41.504866Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508658874880195:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:41.504933Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:41.505052Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508658874880200:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:41.508779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:41.520552Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508658874880202:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:41.579889Z node 2 :TX_PROXY ERROR: Actor# [2:7489508658874880255:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:42.404631Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:43.094341Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508645989976203:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:43.094414Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpMultishardIndex::DataColumnSelect [GOOD] >> TCmsTest::SamePriorityRequest2 [GOOD] >> TCmsTest::TestOutdatedState |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL [GOOD] Test command err: Trying to start YDB, gRPC: 28183, MsgBus: 21694 2025-04-04T17:18:27.362446Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508599320067933:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:27.362581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ae/r3tmp/tmpptFJ7t/pdisk_1.dat 2025-04-04T17:18:27.692495Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:27.734421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:27.734533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 28183, node 1 2025-04-04T17:18:27.736660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:27.802437Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:27.802469Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:27.802477Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:27.802639Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21694 TClient is connected to server localhost:21694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:28.305854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:28.335009Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:28.352875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:28.468668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:28.636718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:28.711298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:30.251908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508612204971590:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:30.251982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:30.466864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:30.497823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:30.548769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:30.578593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:30.608601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:30.659029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:30.747589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508612204972112:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:30.747660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:30.747930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508612204972117:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:30.751847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:30.762628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508612204972119:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:30.832224Z node 1 :TX_PROXY ERROR: Actor# [1:7489508612204972173:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:31.798171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:32.360816Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508599320067933:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:32.360901Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10863, MsgBus: 21582 2025-04-04T17:18:32.948210Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508617915728760:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:32.948823Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ae/r3tmp/tmp0WrKFs/pdisk_1.dat 2025-04-04T17:18:33.052214Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:33.083036Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:33.083099Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 10863, node 2 2025-04-04T17:18:33.085674Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:33.137864Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:33.137883Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:33.137889Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:33.137990Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21582 TClient is connected to server localhost:21582 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:33.470503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:33.478816Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:33.484641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:33.566692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04 ... CreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.227088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.301770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.386719Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508635095600204:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.386790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.386999Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508635095600209:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.390389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:36.405199Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508635095600211:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:36.482822Z node 2 :TX_PROXY ERROR: Actor# [2:7489508635095600267:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:37.355137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.762284Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:37.782355Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:37.950507Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508617915728760:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:37.950578Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23057, MsgBus: 7577 2025-04-04T17:18:38.678137Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508645390227430:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:38.678171Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ae/r3tmp/tmp1mZso6/pdisk_1.dat 2025-04-04T17:18:38.806770Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:38.816899Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:38.816988Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:38.818085Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23057, node 3 2025-04-04T17:18:38.890788Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:38.890812Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:38.890818Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:38.890929Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7577 TClient is connected to server localhost:7577 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:39.332826Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:39.339831Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:39.359215Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:39.439877Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:39.596387Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:39.685663Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:41.855478Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508658275131083:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:41.855574Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:41.892186Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:41.920752Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:41.989833Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:42.018449Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:42.087289Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:42.129285Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:42.209072Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508662570098901:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:42.209187Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:42.209521Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508662570098906:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:42.213290Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:42.224095Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508662570098908:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:42.297252Z node 3 :TX_PROXY ERROR: Actor# [3:7489508662570098963:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:43.345492Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:43.460420Z node 3 :TX_PROXY ERROR: Actor# [3:7489508666865066807:3818] txid# 281474976710672, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:43.681724Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508645390227430:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:43.681910Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:44.658284Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:44.671665Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] >> KqpMultishardIndex::DuplicateUpsert [GOOD] >> KqpUniqueIndex::UpsertImplicitNullInComplexFk [GOOD] |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexReplace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 64278, MsgBus: 29991 2025-04-04T17:18:32.776298Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508618585924648:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:32.778347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00169b/r3tmp/tmpSiuTon/pdisk_1.dat 2025-04-04T17:18:33.167116Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:33.191177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:33.191253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:33.192587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64278, node 1 2025-04-04T17:18:33.285236Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:33.285267Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:33.285276Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:33.285405Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29991 TClient is connected to server localhost:29991 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:33.948173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:33.967655Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:33.979342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:34.148454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:34.329854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:34.394970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:18:35.889251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508631470828180:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:35.889388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.145925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.179022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.213531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.282792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.317527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.390357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.479676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508635765796002:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.479777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.480212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508635765796007:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.484255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:36.497791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508635765796009:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:36.582717Z node 1 :TX_PROXY ERROR: Actor# [1:7489508635765796064:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:37.374322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.754593Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508618585924648:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:37.754646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:38.116781Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:38.759423Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:38.828450Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 18742, MsgBus: 17477 2025-04-04T17:18:39.551516Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508651197014655:2214];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00169b/r3tmp/tmpPvh4xf/pdisk_1.dat 2025-04-04T17:18:39.568330Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:18:39.651095Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:39.672992Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:39.673071Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:39.674541Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18742, node 2 2025-04-04T17:18:39.722843Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:39.722868Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:39.722876Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:39.722977Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17477 TClient is connected to server localhost:17477 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:40.200892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:40.207383Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:40.221711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:40.295109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:40.443824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:40.508450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:18:42.594867Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508664081918137:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:42.594958Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:42.618184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:42.644643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:42.680178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:42.711892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:42.741520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:42.781532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:42.828766Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508664081918653:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:42.828834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:42.829109Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508664081918658:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:42.832419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:42.841837Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508664081918660:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:42.905799Z node 2 :TX_PROXY ERROR: Actor# [2:7489508664081918713:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:43.835034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:44.504009Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:44.517067Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:44.540965Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508651197014655:2214];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:44.541033Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:45.048278Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:45.086192Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> TCmsTest::ManageRequestsWrong [GOOD] >> TCmsTest::ManageRequestsDry |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest2 [GOOD] >> TCmsTest::RequestReplaceBrokenDevices ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnSelect [GOOD] Test command err: Trying to start YDB, gRPC: 28534, MsgBus: 8361 2025-04-04T17:18:30.259540Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508612489837064:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:30.259630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016a1/r3tmp/tmpYzjid5/pdisk_1.dat 2025-04-04T17:18:30.657931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:30.658029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:30.660479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:30.675057Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28534, node 1 2025-04-04T17:18:30.757047Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:30.757068Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:30.757104Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:30.757210Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8361 TClient is connected to server localhost:8361 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:31.479548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:31.502662Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:31.517393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:31.648253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:31.824467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:31.904986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:33.593989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508625374740723:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:33.594112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:33.876296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:33.912043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:33.982452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.014961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.084367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.131799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.172981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508629669708536:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:34.173073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:34.173113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508629669708541:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:34.176127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:34.185680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508629669708543:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:34.269785Z node 1 :TX_PROXY ERROR: Actor# [1:7489508629669708597:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:35.205548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:35.333264Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508612489837064:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:35.333481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10064, MsgBus: 13522 2025-04-04T17:18:37.463774Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508641617314481:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:37.463845Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016a1/r3tmp/tmpG0tp8a/pdisk_1.dat 2025-04-04T17:18:37.611286Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:37.631914Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:37.632011Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:37.633182Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10064, node 2 2025-04-04T17:18:37.689953Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:37.689972Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:37.689977Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:37.690061Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13522 TClient is connected to server localhost:13522 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:18:38.102400Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:18:38.110140Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:38.117659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:38.202526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:38.341768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:38.403017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:40.563660Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508654502218116:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:40.563814Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:40.579641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:40.615481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:40.648232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:40.682088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:40.711409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:40.780257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:40.828751Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508654502218633:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:40.828840Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:40.828886Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508654502218638:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:40.831883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:40.840245Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508654502218640:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:40.925604Z node 2 :TX_PROXY ERROR: Actor# [2:7489508654502218693:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:41.853455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:41.932691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:18:41.977553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:18:42.465382Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508641617314481:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:42.465444Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:43.818048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 waiting... >> KqpIndexes::Uint8Index [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled >> TCmsTest::RequestRestartServicesOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpsertImplicitNullInComplexFk [GOOD] Test command err: Trying to start YDB, gRPC: 24642, MsgBus: 1608 2025-04-04T17:18:27.842640Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508599409653609:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:27.842945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ac/r3tmp/tmp936Rii/pdisk_1.dat 2025-04-04T17:18:28.252669Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:28.258138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:28.258221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:28.261359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24642, node 1 2025-04-04T17:18:28.349596Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:28.349614Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:28.349644Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:28.349755Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1608 TClient is connected to server localhost:1608 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:28.880245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:28.899222Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:28.912922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:29.023357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:29.188763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:29.276040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:31.016373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508616589524372:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:31.016495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:31.284960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:31.316694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:31.350529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:31.382007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:31.410849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:31.449519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:31.510092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508616589524882:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:31.510170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:31.510364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508616589524887:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:31.514125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:31.526977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508616589524890:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:31.602133Z node 1 :TX_PROXY ERROR: Actor# [1:7489508616589524943:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:32.511621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:32.841504Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508599409653609:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:32.841568Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 22003, MsgBus: 20263 2025-04-04T17:18:36.155258Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508636050829577:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:36.155303Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ac/r3tmp/tmpwA42YC/pdisk_1.dat 2025-04-04T17:18:36.263292Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22003, node 2 2025-04-04T17:18:36.342772Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:36.342873Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:36.345535Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:36.374851Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:36.374875Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:36.374882Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:36.375004Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20263 TClient is connected to server localhost:20263 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:18:36.815972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.829584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:36.901839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:37.057887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:37.122482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:18:39.180877Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508648935733214:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:39.180981Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:39.204187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:39.239176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:39.319410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:39.352151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:39.424768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:39.511317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:39.578334Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508648935733738:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:39.578430Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:39.578793Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508648935733743:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:39.581884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:39.594805Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508648935733745:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:39.682752Z node 2 :TX_PROXY ERROR: Actor# [2:7489508648935733800:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:40.608906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:41.186990Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508636050829577:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:41.187307Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:46.033498Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wzta7ad1neehp0n2gkaq4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWYyMTI3OWEtNmIxZDQ3N2UtNmY3NWI4YjItMzQxNDcxZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:46.043855Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWYyMTI3OWEtNmIxZDQ3N2UtNmY3NWI4YjItMzQxNDcxZWM=, ActorId: [2:7489508653230702131:2546], ActorState: ExecuteState, TraceId: 01jr0wzta7ad1neehp0n2gkaq4, Create QueryResponse for error on request, msg: >> TPersQueueTest::TestReadPartitionStatus [GOOD] >> TPersQueueTest::TxCounters >> TCmsTest::TestForceRestartModeDisconnects >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction [GOOD] >> TMaintenanceApiTest::SingleCompositeActionGroup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] Test command err: 2025-04-04T17:17:32.156197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:32.156248Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:32.160017Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:32.165501Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:32.165906Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:17:32.166201Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:32.176141Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:32.213942Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:32.214153Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:32.215708Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:17:32.215792Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:17:32.215867Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:17:32.216226Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:32.216317Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:32.216365Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:196:2153] in generation 2 2025-04-04T17:17:32.296478Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:32.333574Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:17:32.333723Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:32.333819Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:17:32.333852Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:17:32.333882Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:17:32.333912Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:32.334078Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:32.334127Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:32.334365Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:17:32.334459Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:17:32.334612Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:32.334663Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:32.334716Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:17:32.334748Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:32.334776Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:32.334809Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:17:32.334841Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:32.334942Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:32.334979Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:32.335026Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:17:32.337432Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:17:32.337494Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:32.337566Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:17:32.337729Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:17:32.337770Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:17:32.337812Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:17:32.337851Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:32.337886Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:17:32.337931Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:17:32.337969Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:32.338266Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:32.338322Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:17:32.338353Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:17:32.338383Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:32.338430Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:17:32.338459Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:17:32.338502Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:17:32.338536Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:32.338558Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:32.350959Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:17:32.351024Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:32.351051Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:32.351090Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:32.351155Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:32.351623Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:32.351668Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:32.351703Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:17:32.351793Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:32.351812Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:32.351917Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:32.351949Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:32.351985Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:32.352022Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:32.355164Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:32.355220Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:32.355442Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:32.355476Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:32.355519Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:32.355554Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:32.355582Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:32.355635Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:17:32.355670Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:17:32.355706Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:32.355737Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:32.355785Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:32.355815Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:32.355991Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:17:32.356032Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:32.356062Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:32.356081Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:17:32.356106Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:17:32.356166Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:32.356190Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:17:32.356218Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:32.356246Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:32.356293Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:17:32.356320Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:17:32.356367Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:17:32.356415Z node 1 :TX_DATA ... 2025-04-04T17:18:46.225113Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:46.225244Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:46.225277Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2025-04-04T17:18:46.225316Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:46.225350Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:46.225506Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:46.225536Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-04-04T17:18:46.225576Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:46.225609Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:46.225745Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:46.225776Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-04-04T17:18:46.225814Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:46.225844Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:46.226036Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:46.226068Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-04-04T17:18:46.226104Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:46.226136Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:46.226493Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:46.226534Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-04-04T17:18:46.226592Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:46.226630Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:46.226813Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:46.226846Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-04-04T17:18:46.226886Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:46.226917Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:46.227042Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:46.227073Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-04-04T17:18:46.227112Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:46.227143Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:46.227312Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:46.227344Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-04-04T17:18:46.227385Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:46.227414Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:46.227551Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:46.227580Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-04-04T17:18:46.227620Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:46.227653Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:46.227780Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:46.227811Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-04-04T17:18:46.227851Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:46.227884Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:46.228068Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:46.228099Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-04-04T17:18:46.228136Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:46.228168Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:46.228349Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:46.228378Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-04-04T17:18:46.228418Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:46.228449Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:46.228605Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:46.228633Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-04-04T17:18:46.228671Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:46.228700Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:46.228816Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:46.228842Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-04-04T17:18:46.228881Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:46.228911Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:46.229160Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-04T17:18:46.229203Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:46.229240Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-04-04T17:18:46.229356Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-04T17:18:46.229388Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:46.229415Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-04-04T17:18:46.229493Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-04T17:18:46.229524Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:46.229554Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-04-04T17:18:46.229634Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-04T17:18:46.229664Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:46.229690Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-04-04T17:18:46.229765Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-04T17:18:46.229800Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:46.229827Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-04-04T17:18:46.229905Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-04T17:18:46.229934Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:46.229964Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-04-04T17:18:46.230039Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-04T17:18:46.230068Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:46.230095Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-04-04T17:18:46.230172Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-04-04T17:18:46.230205Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:46.230233Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 29 29 20 26 29 30 30 30 30 23 19 30 28 29 29 19 19 28 29 29 21 28 21 28 28 21 21 - - - - - actual 29 29 20 26 29 30 30 30 30 23 19 30 28 29 29 19 19 28 29 29 21 28 21 28 28 21 21 - - - - - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DuplicateUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 19786, MsgBus: 27292 2025-04-04T17:18:33.808883Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508622499927215:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:33.808937Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001693/r3tmp/tmp3auCmk/pdisk_1.dat 2025-04-04T17:18:34.295432Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:34.312593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:34.312681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:34.314847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19786, node 1 2025-04-04T17:18:34.455807Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:34.455827Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:34.455834Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:34.455950Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27292 TClient is connected to server localhost:27292 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:35.102666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:35.141464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:35.309186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:35.464250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:35.539557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:36.974373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508635384830895:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.974474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.226662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.282332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.310951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.338114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.373751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.442087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.500574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508639679798703:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.500663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.500722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508639679798709:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.504037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:37.513240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508639679798711:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:37.586514Z node 1 :TX_PROXY ERROR: Actor# [1:7489508639679798763:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:38.312358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:38.809179Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508622499927215:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:38.809230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20272, MsgBus: 5153 2025-04-04T17:18:41.015967Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508658392229186:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:41.016007Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001693/r3tmp/tmpkp8JiX/pdisk_1.dat 2025-04-04T17:18:41.156420Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20272, node 2 2025-04-04T17:18:41.168765Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:41.168849Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:41.172350Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:41.214776Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:41.214924Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:41.214934Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:41.215049Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5153 TClient is connected to server localhost:5153 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:18:41.615788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:18:41.634718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:41.690842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:41.826216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:41.885717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:43.954213Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508666982165563:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:43.954320Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:43.994369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:44.024324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:44.074850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:44.105249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:44.136592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:44.167529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:44.206138Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508671277133369:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:44.206214Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:44.206284Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508671277133374:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:44.209119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:44.217128Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508671277133376:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:44.278018Z node 2 :TX_PROXY ERROR: Actor# [2:7489508671277133429:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:45.106118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:46.018047Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508658392229186:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:46.018113Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TCmsTest::ManagePermissions >> KqpIndexes::JoinWithNonPKColumnsInPredicate+UseStreamJoin [GOOD] >> KqpIndexes::JoinWithNonPKColumnsInPredicate-UseStreamJoin >> TCmsTest::ActionIssuePartialPermissions >> KqpIndexMetadata::TestNoReadFromMainTableBeforeJoin [GOOD] >> KqpIndexMetadata::HandleWriteOnlyIndex >> TClusterInfoTest::DeviceId [GOOD] >> TClusterInfoTest::FillInfo [GOOD] >> TCmsTenatsTest::CollectInfo >> TCmsTenatsTest::TestTenantLimit >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled [GOOD] >> TCmsTest::ActionIssue >> TCmsTest::TestLogOperationsRollback [GOOD] >> TCmsTest::RequestRestartServicesMultipleNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::Uint8Index [GOOD] Test command err: Trying to start YDB, gRPC: 20061, MsgBus: 19914 2025-04-04T17:18:32.754604Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508617888502511:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:32.755795Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001695/r3tmp/tmpkrWerk/pdisk_1.dat 2025-04-04T17:18:33.136676Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20061, node 1 2025-04-04T17:18:33.187012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:33.187130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:33.189934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:33.245978Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:33.246006Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:33.246020Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:33.246116Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19914 TClient is connected to server localhost:19914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:33.842688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:33.861450Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:33.873586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.023532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:34.196074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:34.281908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:35.841240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508630773406019:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:35.842720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.055936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.093193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.161389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.199280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.226215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.257141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.308566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508635068373828:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.308609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508635068373833:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.308639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.312365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:36.330172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508635068373835:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:36.403689Z node 1 :TX_PROXY ERROR: Actor# [1:7489508635068373889:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:37.340556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.753352Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508617888502511:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:37.753413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:39.245547Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wzkqq6kq9ga6cbpdk31rr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODczMWYzNmYtYTI1MmUxZDMtNjZiM2VhOTYtODIxMGE4OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:39.277285Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODczMWYzNmYtYTI1MmUxZDMtNjZiM2VhOTYtODIxMGE4OTg=, ActorId: [1:7489508639363341443:2488], ActorState: ExecuteState, TraceId: 01jr0wzkqq6kq9ga6cbpdk31rr, Create QueryResponse for error on request, msg: 2025-04-04T17:18:39.646600Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508647953276641:2573], TxId: 281474976710679, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr0wzmnr9akg90b9r0203jqk. SessionId : ydb://session/3?node_id=1&id=ODczMWYzNmYtYTI1MmUxZDMtNjZiM2VhOTYtODIxMGE4OTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-04T17:18:39.647277Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508647953276642:2574], TxId: 281474976710679, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ODczMWYzNmYtYTI1MmUxZDMtNjZiM2VhOTYtODIxMGE4OTg=. CustomerSuppliedId : . TraceId : 01jr0wzmnr9akg90b9r0203jqk. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489508647953276638:2488], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:18:39.647611Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODczMWYzNmYtYTI1MmUxZDMtNjZiM2VhOTYtODIxMGE4OTg=, ActorId: [1:7489508639363341443:2488], ActorState: ExecuteState, TraceId: 01jr0wzmnr9akg90b9r0203jqk, Create QueryResponse for error on request, msg: 2025-04-04T17:18:40.499886Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0wzn1c8chaxzmrpp69944m, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODczMWYzNmYtYTI1MmUxZDMtNjZiM2VhOTYtODIxMGE4OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:18:40.500111Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODczMWYzNmYtYTI1MmUxZDMtNjZiM2VhOTYtODIxMGE4OTg=, ActorId: [1:7489508639363341443:2488], ActorState: ExecuteState, TraceId: 01jr0wzn1c8chaxzmrpp69944m, Create QueryResponse for error on request, msg: 2025-04-04T17:18:40.531446Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:40.561010Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:40.608921Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:41.218038Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:41.255123Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 5470, MsgBus: 22005 2025-04-04T17:18:41.954791Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508660073107846:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:41.954834Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001695/r3tmp/tmpbQJtr0/pdisk_1.dat 2025-04-04T17:18:42.059175Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5470, node 2 2025-04-04T17:18:42.102155Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:42.102256Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:42.107513Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:42.126682Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:42.126704Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:42.126714Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:42.126817Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22005 TClient is connected to server localhost:22005 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:42.572186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:42.597977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:42.673766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:42.822170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:42.899832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:45.119282Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508677252978801:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:45.119354Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:45.170316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:45.207488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:45.240159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:45.272746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:45.304734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:45.377026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:45.425223Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508677252979316:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:45.425317Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:45.425536Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508677252979321:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:45.428756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:45.440558Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508677252979323:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:45.520297Z node 2 :TX_PROXY ERROR: Actor# [2:7489508677252979376:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:46.400579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:46.558935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-04T17:18:46.604884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-04T17:18:46.648422Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> TCmsTest::DisabledEvictVDisks [GOOD] >> TCmsTest::EmergencyDuringRollingRestart >> TCmsTest::TestOutdatedState [GOOD] >> TCmsTest::TestSetResetMarkers >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestLogOperationsRollback [GOOD] >> KqpIndexes::SecondaryIndexUpsert2Update [GOOD] >> TCmsTest::ManageRequestsDry [GOOD] >> TCmsTest::Notifications >> TCmsTest::RequestReplaceBrokenDevices [GOOD] >> TCmsTest::PermissionDuration >> TDowntimeTest::AddDowntime [GOOD] >> TDowntimeTest::HasUpcomingDowntime [GOOD] >> TDowntimeTest::CleanupOldSegments [GOOD] >> TCmsTest::RequestRestartServicesOk [GOOD] >> TCmsTest::RequestRestartServicesReject >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexUpsert2Update [GOOD] Test command err: Trying to start YDB, gRPC: 2717, MsgBus: 23173 2025-04-04T17:18:35.476788Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508630185614410:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:35.477284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00168e/r3tmp/tmpXEh4eS/pdisk_1.dat 2025-04-04T17:18:35.863429Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2717, node 1 2025-04-04T17:18:35.914364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:35.914490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:35.921059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:35.952335Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:35.952354Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:35.952361Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:35.952453Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23173 TClient is connected to server localhost:23173 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:36.533090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:36.570030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:36.706707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:36.869203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:36.930722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:38.476194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508643070517925:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:38.476295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:38.693492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:38.723888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:38.764624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:38.808297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:38.849287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:38.902324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:38.963211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508643070518440:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:38.963268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:38.963470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508643070518445:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:38.968206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:38.979226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508643070518447:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:39.071366Z node 1 :TX_PROXY ERROR: Actor# [1:7489508647365485799:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:39.965697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:40.473875Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508630185614410:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:40.473920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:41.101513Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508655955421056:2542], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestTable/Index/indexImplTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:18:41.102411Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmU0NDQxYzctZWNmYjYzNGItN2UyOWFiNGItZWQ2Yzk4M2Y=, ActorId: [1:7489508647365486050:2487], ActorState: ExecuteState, TraceId: 01jr0wzpdw1y8tzs0za3t20qwf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:18:41.125409Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508655955421062:2545], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Required global index not found, index name: WrongView, code: 2003 2025-04-04T17:18:41.125566Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmU0NDQxYzctZWNmYjYzNGItN2UyOWFiNGItZWQ2Yzk4M2Y=, ActorId: [1:7489508647365486050:2487], ActorState: ExecuteState, TraceId: 01jr0wzpeqbe65frhsafjzx1d9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:18:41.501312Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:41.678433Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:42.361098Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:42.375393Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 15753, MsgBus: 26200 2025-04-04T17:18:43.067527Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508665877794295:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:43.067588Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00168e/r3tmp/tmpfoKRtn/pdisk_1.dat 2025-04-04T17:18:43.162355Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15753, node 2 2025-04-04T17:18:43.208368Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:43.208449Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:43.214121Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:43.237823Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:43.237842Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:43.237849Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:43.237952Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26200 TClient is connected to server localhost:26200 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:43.664586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:43.681066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:43.737771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:43.868197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:43.949544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:45.815059Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508674467730646:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:45.815149Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:45.831857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:45.864097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:45.891781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:45.921651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:45.954426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:45.991514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:46.033106Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508678762698449:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:46.033177Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:46.033394Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508678762698454:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:46.037265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:46.048434Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508678762698456:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:46.127319Z node 2 :TX_PROXY ERROR: Actor# [2:7489508678762698512:3436] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:46.949840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:47.550275Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:47.995993Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:48.010784Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:48.067987Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508665877794295:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:48.068042Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:48.309397Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartMode >> TCmsTest::TestForceRestartModeDisconnects [GOOD] >> TCmsTest::TestForceRestartModeScheduled |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TDowntimeTest::CleanupOldSegments [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] Test command err: 2025-04-04T17:17:38.163905Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:38.239867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:38.239924Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:38.247252Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:38.247728Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:17:38.248053Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:38.258213Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:38.300958Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:38.301043Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:38.302657Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:17:38.302725Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:17:38.302784Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:17:38.303147Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:38.303449Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:38.303546Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:17:38.363923Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:38.389459Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:17:38.389642Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:38.389753Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:17:38.389791Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:17:38.389827Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:17:38.389876Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:38.390037Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:38.390099Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:38.390425Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:17:38.390541Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:17:38.390677Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:38.390732Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:38.390775Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:17:38.390815Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:38.390851Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:38.390915Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:17:38.390971Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:38.391065Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:38.391103Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:38.391153Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:17:38.393651Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:17:38.393703Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:38.393779Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:17:38.393954Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:17:38.394003Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:17:38.394055Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:17:38.394102Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:38.394141Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:17:38.394174Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:17:38.394220Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:38.394530Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:38.394591Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:17:38.394624Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:17:38.394657Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:38.394702Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:17:38.394727Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:17:38.394754Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:17:38.394785Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:38.394806Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:38.408443Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:17:38.408526Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:38.408586Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:38.408631Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:38.408696Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:38.409255Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:38.409316Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:38.409359Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:17:38.409512Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:38.409547Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:38.409663Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:38.409708Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:38.409749Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:38.409788Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:38.414003Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:38.414073Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:38.414287Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:38.414328Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:38.414383Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:38.414424Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:38.414461Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:38.414504Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:17:38.414538Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:17:38.414597Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:38.414634Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:38.414681Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:38.414730Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:38.414928Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:17:38.414968Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:38.414997Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:38.415021Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:17:38.415045Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:17:38.415105Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:38.415132Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:17:38.415164Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:38.415212Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:38.415273Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:17:38.415309Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:17:38.415348Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:17:38.415389Z node 1 :TX_DATA ... 312]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2025-04-04T17:18:48.532818Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:48.532847Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2025-04-04T17:18:48.532925Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:233:2226], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2025-04-04T17:18:48.532954Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:48.532981Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2025-04-04T17:18:48.533065Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:233:2226], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2025-04-04T17:18:48.533095Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:48.533122Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2025-04-04T17:18:48.533200Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:233:2226], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-04T17:18:48.533229Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:48.533257Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-04-04T17:18:48.533336Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:233:2226], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2025-04-04T17:18:48.533370Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:48.533398Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2025-04-04T17:18:48.533477Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:233:2226], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2025-04-04T17:18:48.533507Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:48.533537Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2025-04-04T17:18:48.533611Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:233:2226], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2025-04-04T17:18:48.533640Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:48.533667Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2025-04-04T17:18:48.533747Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:233:2226], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2025-04-04T17:18:48.533775Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:48.533803Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 resending delayed RS 2025-04-04T17:18:48.535665Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [32:345:2312], Recipient [32:233:2226]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletProducer# 9437185 ReadSet.Size()# 7 Seqno# 2 Flags# 0} 2025-04-04T17:18:48.535718Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-04T17:18:48.535755Z node 32 :TX_DATASHARD DEBUG: Receive RS at 9437184 source 9437185 dest 9437184 producer 9437185 txId 5 2025-04-04T17:18:48.535819Z node 32 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 9437184 got read set: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletProducer# 9437185 ReadSet.Size()# 7 Seqno# 2 Flags# 0} 2025-04-04T17:18:48.535867Z node 32 :TX_DATASHARD TRACE: Filled readset for [1000004:5] from=9437185 to=9437184origin=9437185 2025-04-04T17:18:48.536008Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [32:233:2226], Recipient [32:233:2226]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:48.536042Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:48.536082Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:18:48.536118Z node 32 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:18:48.536153Z node 32 :TX_DATASHARD DEBUG: Found ready candidate operation [1000004:5] at 9437184 for LoadAndWaitInRS 2025-04-04T17:18:48.536184Z node 32 :TX_DATASHARD TRACE: Trying to execute [1000004:5] at 9437184 on unit LoadAndWaitInRS 2025-04-04T17:18:48.536218Z node 32 :TX_DATASHARD TRACE: Execution status for [1000004:5] at 9437184 is Executed 2025-04-04T17:18:48.536250Z node 32 :TX_DATASHARD TRACE: Advance execution plan for [1000004:5] at 9437184 executing on unit LoadAndWaitInRS 2025-04-04T17:18:48.536281Z node 32 :TX_DATASHARD TRACE: Add [1000004:5] at 9437184 to execution unit ExecuteDataTx 2025-04-04T17:18:48.536314Z node 32 :TX_DATASHARD TRACE: Trying to execute [1000004:5] at 9437184 on unit ExecuteDataTx 2025-04-04T17:18:48.537301Z node 32 :TX_DATASHARD TRACE: Executed operation [1000004:5] at tablet 9437184 with status COMPLETE 2025-04-04T17:18:48.537365Z node 32 :TX_DATASHARD TRACE: Datashard execution counters for [1000004:5] at 9437184: {NSelectRow: 0, NSelectRange: 2, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 1, SelectRangeBytes: 8, UpdateRowBytes: 8, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 2, InvisibleRowSkips: 26} 2025-04-04T17:18:48.537419Z node 32 :TX_DATASHARD TRACE: Execution status for [1000004:5] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:48.537449Z node 32 :TX_DATASHARD TRACE: Advance execution plan for [1000004:5] at 9437184 executing on unit ExecuteDataTx 2025-04-04T17:18:48.537482Z node 32 :TX_DATASHARD TRACE: Add [1000004:5] at 9437184 to execution unit CompleteOperation 2025-04-04T17:18:48.537514Z node 32 :TX_DATASHARD TRACE: Trying to execute [1000004:5] at 9437184 on unit CompleteOperation 2025-04-04T17:18:48.537746Z node 32 :TX_DATASHARD TRACE: Execution status for [1000004:5] at 9437184 is DelayComplete 2025-04-04T17:18:48.537782Z node 32 :TX_DATASHARD TRACE: Advance execution plan for [1000004:5] at 9437184 executing on unit CompleteOperation 2025-04-04T17:18:48.537816Z node 32 :TX_DATASHARD TRACE: Add [1000004:5] at 9437184 to execution unit CompletedOperations 2025-04-04T17:18:48.537849Z node 32 :TX_DATASHARD TRACE: Trying to execute [1000004:5] at 9437184 on unit CompletedOperations 2025-04-04T17:18:48.537884Z node 32 :TX_DATASHARD TRACE: Execution status for [1000004:5] at 9437184 is Executed 2025-04-04T17:18:48.537911Z node 32 :TX_DATASHARD TRACE: Advance execution plan for [1000004:5] at 9437184 executing on unit CompletedOperations 2025-04-04T17:18:48.537939Z node 32 :TX_DATASHARD TRACE: Execution plan for [1000004:5] at 9437184 has finished 2025-04-04T17:18:48.537972Z node 32 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:18:48.538002Z node 32 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:18:48.538033Z node 32 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:18:48.538060Z node 32 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:18:48.550452Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:48.550510Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-04-04T17:18:48.550591Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 1 ms, propose latency: 2 ms 2025-04-04T17:18:48.550664Z node 32 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-04-04T17:18:48.550709Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:48.550947Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:48.550979Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-04-04T17:18:48.551025Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:48.551060Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:48.551092Z node 32 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-04T17:18:48.551220Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:48.551252Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:5] at 9437184 on unit CompleteOperation 2025-04-04T17:18:48.551298Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 5] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 1 ms, propose latency: 2 ms 2025-04-04T17:18:48.551350Z node 32 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-04T17:18:48.551381Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:48.551632Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:233:2226], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-04-04T17:18:48.551675Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:48.551715Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 2025-04-04T17:18:48.551800Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:233:2226], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-04T17:18:48.551831Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:48.551861Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 expect 31 26 31 31 31 31 28 31 30 29 26 26 31 29 29 28 18 15 14 13 29 13 29 - - 15 15 12 14 - - - actual 31 26 31 31 31 31 28 31 30 29 26 26 31 29 29 28 18 15 14 13 29 13 29 - - 15 15 12 14 - - - interm 28 26 25 24 30 27 28 26 30 29 26 26 25 29 29 28 18 15 14 13 29 13 29 - - 15 15 12 14 - - - >> TCmsTest::ActionIssuePartialPermissions [GOOD] >> TCmsTest::ActionWithZeroDuration >> TCmsTenatsTest::CollectInfo [GOOD] >> TCmsTenatsTest::RequestRestartServices >> TCmsTest::ManagePermissions [GOOD] >> TCmsTest::ManagePermissionWrongRequest >> TCmsTest::TestSetResetMarkers [GOOD] >> TCmsTest::TestProcessingQueue >> TCmsTest::RequestRestartServicesMultipleNodes [GOOD] >> TCmsTest::RequestRestartServicesDryRun >> TCmsTenatsTest::TestTenantLimit [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy >> TCmsTest::EmergencyDuringRollingRestart [GOOD] >> KqpIndexes::SecondaryIndexOrderBy2 [GOOD] >> KqpIndexes::SecondaryIndexInsert1 >> TSubscriberTest::InvalidNotification >> TSubscriberCombinationsTest::MigratedPathRecreation >> TSubscriberTest::ReconnectOnFailure >> TSubscriberTest::StrongNotificationAfterCommit >> TSubscriberTest::Sync >> TSubscriberCombinationsTest::CombinationsRootDomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::EmergencyDuringRollingRestart [GOOD] Test command err: 2025-04-04T17:18:46.707222Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } } } 2025-04-04T17:18:46.707671Z node 10 :CMS DEBUG: TTxUpdateConfig Execute 2025-04-04T17:18:46.737889Z node 10 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-04T17:18:46.738040Z node 10 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-04-04T17:18:46.739813Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-17-17" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 17 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 10 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 11 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 12 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 13 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 14 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 15 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 16 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Timestamp: 120029000 } } 2025-04-04T17:18:46.741461Z node 10 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-17-17" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 17 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 10 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 11 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 12 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 13 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 14 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 15 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 16 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Timestamp: 120029000 } 2025-04-04T17:18:46.741734Z node 10 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.003000s 2025-04-04T17:18:46.741788Z node 10 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-04T17:18:46.741931Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-04-04T17:18:46.741982Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-04-04T17:18:46.742010Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-04-04T17:18:46.742044Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-04-04T17:18:46.742080Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-04-04T17:18:46.742112Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-04-04T17:18:46.742140Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-04-04T17:18:46.742169Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:83883506 ... 08Z node 10 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-04T17:18:47.033213Z node 10 :CMS DEBUG: Running CleanupWalleTasks 2025-04-04T17:18:47.033455Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-04-04T17:18:47.033513Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-04-04T17:18:47.033541Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-04-04T17:18:47.033568Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-04-04T17:18:47.033607Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-04-04T17:18:47.033643Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-04-04T17:18:47.033676Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-04-04T17:18:47.033707Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:8388350642965737326:1634689637] 2025-04-04T17:18:47.034102Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240029 2025-04-04T17:18:47.034766Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240029 2025-04-04T17:18:47.034862Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240029 2025-04-04T17:18:47.034931Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240029 2025-04-04T17:18:47.034990Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240029 2025-04-04T17:18:47.035048Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240029 2025-04-04T17:18:47.035109Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240029 2025-04-04T17:18:47.035169Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 17, response# PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240029 2025-04-04T17:18:47.035245Z node 10 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-04T17:18:47.035511Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 10:10, status# FAULTY, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 3 StateLimit# 1, dry run# 0 2025-04-04T17:18:47.035591Z node 10 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2025-04-04T17:18:47.035818Z node 10 :CMS DEBUG: TTxLogAndSend Execute 2025-04-04T17:18:47.036056Z node 10 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 2 2025-04-04T17:18:47.036114Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:10 2025-04-04T17:18:47.048927Z node 10 :CMS DEBUG: TTxLogAndSend Complete 2025-04-04T17:18:47.064920Z node 10 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-04T17:18:47.065019Z node 10 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-04T17:18:47.065084Z node 10 :CMS DEBUG: Timestamp: 1970-01-01T00:04:00Z 2025-04-04T17:18:47.066135Z node 10 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-04T17:18:47.066272Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } 2025-04-04T17:18:47.066328Z node 10 :CMS DEBUG: Result: ERROR (reason: Evict vdisks is disabled in Sentinel (self heal)) 2025-04-04T17:18:47.066456Z node 10 :CMS DEBUG: TTxStorePermissions Execute 2025-04-04T17:18:47.066623Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-04T17:18:47.078938Z node 10 :CMS DEBUG: TTxStorePermissions complete 2025-04-04T17:18:47.079163Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ERROR Reason: "Evict vdisks is disabled in Sentinel (self heal)" } RequestId: "user-r-1" } 2025-04-04T17:18:47.079702Z node 10 :CMS DEBUG: TTxUpdateConfig Execute 2025-04-04T17:18:47.094270Z node 10 :CMS DEBUG: TTxUpdateConfig Complete 2025-04-04T17:18:47.094540Z node 10 :CMS DEBUG: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 1 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } 2025-04-04T17:18:47.160875Z node 10 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-04T17:18:47.160946Z node 10 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-04T17:18:47.161048Z node 10 :CMS DEBUG: Running CleanupWalleTasks 2025-04-04T17:18:47.161314Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-04-04T17:18:47.161375Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-04-04T17:18:47.161403Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-04-04T17:18:47.161445Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-04-04T17:18:47.161473Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-04-04T17:18:47.161501Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-04-04T17:18:47.161528Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-04-04T17:18:47.161557Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:8388350642965737326:1634689637] 2025-04-04T17:18:47.161956Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300029 2025-04-04T17:18:47.162597Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300029 2025-04-04T17:18:47.162701Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300029 2025-04-04T17:18:47.162762Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300029 2025-04-04T17:18:47.162830Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300029 2025-04-04T17:18:47.162912Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300029 2025-04-04T17:18:47.162987Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 17, response# PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300029 2025-04-04T17:18:47.163043Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300029 2025-04-04T17:18:47.163104Z node 10 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-04T17:18:47.163339Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 10:10, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-04-04T17:18:47.163404Z node 10 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2025-04-04T17:18:47.163603Z node 10 :CMS DEBUG: TTxLogAndSend Execute 2025-04-04T17:18:47.163830Z node 10 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 3 2025-04-04T17:18:47.163877Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:10 >> TSubscriberTest::NotifyUpdate >> TCmsTest::Notifications [GOOD] >> TCmsTest::Mirror3dcPermissions >> TSubscriberTest::SyncWithOutdatedReplica >> TSubscriberTest::InvalidNotification [GOOD] >> TSubscriberCombinationsTest::MigratedPathRecreation [GOOD] >> TSubscriberTest::Boot >> TSubscriberTest::ReconnectOnFailure [GOOD] >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] >> TSubscriberTest::Sync [GOOD] >> TMaintenanceApiTest::SingleCompositeActionGroup [GOOD] >> TMaintenanceApiTest::SimplifiedMirror3DC >> TCmsTest::PermissionDuration [GOOD] >> TCmsTest::RacyStartCollecting >> TSubscriberTest::NotifyUpdate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Sync [GOOD] Test command err: 2025-04-04T17:18:51.221506Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:18:51.222928Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-04-04T17:18:51.222995Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2025-04-04T17:18:51.223022Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2025-04-04T17:18:51.223055Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:36:2066] 2025-04-04T17:18:51.223111Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:37:2066] 2025-04-04T17:18:51.223143Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Set up state: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.224285Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:38:2066] 2025-04-04T17:18:51.224347Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.224487Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 1 2025-04-04T17:18:51.224583Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2066], cookie# 1 2025-04-04T17:18:51.224634Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2066], cookie# 1 2025-04-04T17:18:51.224679Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 1 2025-04-04T17:18:51.224725Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:3:2050], cookie# 1 2025-04-04T17:18:51.224755Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:6:2053], cookie# 1 2025-04-04T17:18:51.224783Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:9:2056], cookie# 1 2025-04-04T17:18:51.224820Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:36:2066], cookie# 1 2025-04-04T17:18:51.224862Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:18:51.224899Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:37:2066], cookie# 1 2025-04-04T17:18:51.224930Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:18:51.224974Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:38:2066], cookie# 1 2025-04-04T17:18:51.224996Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Unexpected sync response: sender# [1:38:2066], cookie# 1 >> TCmsTest::RequestRestartServicesReject [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::InvalidNotification [GOOD] Test command err: 2025-04-04T17:18:51.216518Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:18:51.218851Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-04T17:18:51.218953Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-04T17:18:51.219005Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-04T17:18:51.219067Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-04T17:18:51.220665Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-04T17:18:51.220728Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.220775Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-04-04T17:18:51.220810Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.221196Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [1:33:2064] 2025-04-04T17:18:51.221239Z node 1 :SCHEME_BOARD_SUBSCRIBER ERROR: [main][1:34:2065][path] Suspicious NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [1:33:2064] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::ReconnectOnFailure [GOOD] Test command err: 2025-04-04T17:18:51.215590Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:18:51.222322Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-04T17:18:51.222454Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-04T17:18:51.222539Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-04T17:18:51.222677Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2065] 2025-04-04T17:18:51.222740Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:36:2065] 2025-04-04T17:18:51.222781Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:34:2065][path] Set up state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.222819Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2065] 2025-04-04T17:18:51.222853Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.228028Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2065] 2025-04-04T17:18:51.228132Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.228190Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:36:2065] 2025-04-04T17:18:51.228220Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.228259Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2065] 2025-04-04T17:18:51.228287Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.239637Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:45:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-04T17:18:51.239818Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2065] 2025-04-04T17:18:51.239975Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.240127Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:46:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-04T17:18:51.240239Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:36:2065] 2025-04-04T17:18:51.240285Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.240385Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:47:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-04T17:18:51.240441Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2065] 2025-04-04T17:18:51.240473Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.241049Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:45:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-04-04T17:18:51.241164Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [2:35:2065] 2025-04-04T17:18:51.241220Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:34:2065][path] Update to strong state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } >> TCmsTest::RequestRestartServicesPartial >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] Test command err: 2025-04-04T17:18:51.215640Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:18:51.219359Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-04T17:18:51.219489Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-04T17:18:51.219546Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-04T17:18:51.219610Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-04T17:18:51.220665Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-04T17:18:51.220738Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.220788Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-04-04T17:18:51.220824Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.221250Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-04T17:18:51.221337Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-04T17:18:51.221398Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Update to strong state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.221528Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-04T17:18:51.221577Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-04T17:18:51.221620Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } >> TSubscriberTest::Boot [GOOD] >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] >> TCmsTest::TestForceRestartModeScheduled [GOOD] >> TCmsTest::TestForceRestartModeScheduledDisconnects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::NotifyUpdate [GOOD] Test command err: 2025-04-04T17:18:51.484055Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:18:51.485699Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-04T17:18:51.485782Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-04T17:18:51.485827Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-04T17:18:51.485884Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-04T17:18:51.485941Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-04T17:18:51.485989Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.486030Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-04-04T17:18:51.486059Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.486545Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-04-04T17:18:51.486658Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:35:2065] 2025-04-04T17:18:51.486761Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Update to strong state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] >> TCmsTest::ManagePermissionWrongRequest [GOOD] >> TCmsTest::ManageRequests ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] Test command err: 2025-04-04T17:18:51.608472Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:18:51.610230Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [1:3:2050] 2025-04-04T17:18:51.610353Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:6:2053] 2025-04-04T17:18:51.610380Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:9:2056] 2025-04-04T17:18:51.610445Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [1:36:2066] 2025-04-04T17:18:51.610476Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:37:2066] 2025-04-04T17:18:51.610507Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Set up state: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.610629Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:38:2066] 2025-04-04T17:18:51.610671Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.610776Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 1 2025-04-04T17:18:51.610839Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2066], cookie# 1 2025-04-04T17:18:51.610878Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2066], cookie# 1 2025-04-04T17:18:51.610908Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 1 2025-04-04T17:18:51.610940Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:3:2050], cookie# 1 2025-04-04T17:18:51.610967Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:6:2053], cookie# 1 2025-04-04T17:18:51.610983Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:9:2056], cookie# 1 2025-04-04T17:18:51.611009Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:36:2066], cookie# 1 2025-04-04T17:18:51.611040Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:18:51.611068Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:37:2066], cookie# 1 2025-04-04T17:18:51.611092Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:18:51.611127Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:38:2066], cookie# 1 2025-04-04T17:18:51.611142Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Unexpected sync response: sender# [1:38:2066], cookie# 1 >> KqpIndexes::VectorIndexOrderByCosineSimilarityNullableLevel1 [GOOD] >> KqpIndexes::VectorIndexOrderByCosineSimilarityNullableLevel2 >> TCmsTest::ActionIssue [GOOD] >> TCmsTest::ActionWithZeroDuration [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Boot [GOOD] Test command err: 2025-04-04T17:18:51.217229Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-04-04T17:18:51.217289Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Successful handshake: owner# 800, generation# 1 2025-04-04T17:18:51.217446Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-04-04T17:18:51.217490Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Commit generation: owner# 800, generation# 1 2025-04-04T17:18:51.217541Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:35:2066] 2025-04-04T17:18:51.217568Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 900, generation# 1 2025-04-04T17:18:51.217739Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:35:2066] 2025-04-04T17:18:51.217763Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 900, generation# 1 2025-04-04T17:18:51.217846Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:18:51.218281Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:41:2068] 2025-04-04T17:18:51.218317Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/db/dir_inside 2025-04-04T17:18:51.220713Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Subscribe: subscriber# [1:41:2068], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-04T17:18:51.220901Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:42:2068] 2025-04-04T17:18:51.220920Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/db/dir_inside 2025-04-04T17:18:51.220951Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:42:2068], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-04T17:18:51.221039Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:43:2068] 2025-04-04T17:18:51.221060Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Upsert description: path# /root/db/dir_inside 2025-04-04T17:18:51.221140Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Subscribe: subscriber# [1:43:2068], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-04T17:18:51.221180Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:3:2050] 2025-04-04T17:18:51.221217Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:41:2068] 2025-04-04T17:18:51.221260Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:42:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:6:2053] 2025-04-04T17:18:51.221318Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:42:2068] 2025-04-04T17:18:51.221361Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:43:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:9:2056] 2025-04-04T17:18:51.221388Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:43:2068] 2025-04-04T17:18:51.221448Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:38:2068] 2025-04-04T17:18:51.221501Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:39:2068] 2025-04-04T17:18:51.221529Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/db/dir_inside] Set up state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.221565Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:40:2068] 2025-04-04T17:18:51.221586Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:37:2068][/root/db/dir_inside] Ignore empty state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2025-04-04T17:18:51.221729Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:34:2065], cookie# 0, event size# 118 2025-04-04T17:18:51.221753Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2025-04-04T17:18:51.225993Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-04-04T17:18:51.226189Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 800, LocalPathId: 1111] Version: 1 }: sender# [1:3:2050] 2025-04-04T17:18:51.227242Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:41:2068] 2025-04-04T17:18:51.227304Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 800, LocalPathId: 1111] Version: 1 }: sender# [1:38:2068] 2025-04-04T17:18:51.227345Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/db/dir_inside] Update to strong state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 1111], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() < argsRight.GetSuperId() =========== !argsRight.IsDeletion 2025-04-04T17:18:51.227505Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:35:2066], cookie# 0, event size# 117 2025-04-04T17:18:51.227529Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2025-04-04T17:18:51.227565Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-04-04T17:18:51.227658Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:42:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 900, LocalPathId: 11] Version: 1 }: sender# [1:6:2053] 2025-04-04T17:18:51.227694Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:42:2068] 2025-04-04T17:18:51.227732Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 900, LocalPathId: 11] Version: 1 }: sender# [1:39:2068] 2025-04-04T17:18:51.227771Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/db/dir_inside] Path was updated to new version: owner# [1:36:2067], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 1111], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 900, LocalPathId: 11], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.683173Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:18:51.683872Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2025-04-04T17:18:51.683957Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2025-04-04T17:18:51.684003Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2025-04-04T17:18:51.684064Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:35:2065] 2025-04-04T17:18:51.684118Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:36:2065] 2025-04-04T17:18:51.684162Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:34:2065][path] Set up state: owner# [3:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.684204Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:37:2065] 2025-04-04T17:18:51.684260Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:34:2065][path] Ignore empty state: owner# [3:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } >> TSubscriberTest::SyncPartial >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartMode >> TMaintenanceApiTest::SimplifiedMirror3DC [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] >> TCmsTest::TestProcessingQueue [GOOD] >> TCmsTest::RequestRestartServicesDryRun [GOOD] >> TCmsTest::RequestReplaceDevices |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ActionIssue [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] >> TSubscriberTest::SyncPartial [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::SimplifiedMirror3DC [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestProcessingQueue [GOOD] Test command err: 2025-04-04T17:18:47.033588Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-04-04T17:18:47.176129Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-04-04T17:18:47.192000Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-04-04T17:18:47.238837Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-04-04T17:18:51.286185Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-04-04T17:18:51.286279Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-04-04T17:18:51.286305Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-04-04T17:18:51.286326Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-04-04T17:18:51.286348Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-04-04T17:18:51.286368Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-04-04T17:18:51.286390Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-04-04T17:18:51.286410Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] Test command err: 2025-04-04T17:17:37.647424Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:37.727197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:37.727251Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:37.735859Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:37.736309Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:17:37.736549Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:37.745591Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:37.782079Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:37.782160Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:37.783604Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:17:37.783668Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:17:37.783717Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:17:37.784054Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:37.784295Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:37.784356Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:17:37.851711Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:37.878210Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:17:37.878357Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:37.878427Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:17:37.878452Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:17:37.878473Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:17:37.878496Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:37.878632Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:37.878677Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:37.878886Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:17:37.878943Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:17:37.879015Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:37.879049Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:37.879078Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:17:37.879106Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:37.879146Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:37.879177Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:17:37.879207Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:37.879269Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:37.879291Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:37.879342Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:17:37.881693Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:17:37.881742Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:37.881823Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:17:37.882008Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:17:37.882050Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:17:37.882090Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:17:37.882131Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:37.882160Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:17:37.882191Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:17:37.882252Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:37.882551Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:37.882605Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:17:37.882636Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:17:37.882665Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:37.882711Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:17:37.882742Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:17:37.882800Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:17:37.882838Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:37.882866Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:37.895015Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:17:37.895102Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:37.895139Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:37.895173Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:37.895229Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:37.895716Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:37.895767Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:37.895808Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:17:37.895945Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:37.895975Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:37.896100Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:37.896139Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:37.896173Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:37.896204Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:37.899592Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:37.899648Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:37.899831Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:37.899863Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:37.899902Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:37.899939Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:37.899970Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:37.900005Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:17:37.900036Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:17:37.900071Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:37.900103Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:37.900146Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:37.900194Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:37.900389Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:17:37.900422Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:37.900442Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:37.900462Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:17:37.900480Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:17:37.900532Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:37.900557Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:17:37.900585Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:37.900617Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:37.900672Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:17:37.900703Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:17:37.900730Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:17:37.900765Z node 1 :TX_DATA ... 1 ms 2025-04-04T17:18:51.890157Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:51.890320Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:51.890345Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2025-04-04T17:18:51.890378Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:51.890402Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:51.890526Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:51.890555Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-04-04T17:18:51.890609Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:51.890644Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:51.890881Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:51.890912Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-04-04T17:18:51.890946Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:51.890972Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:51.891128Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:51.891159Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-04-04T17:18:51.891193Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:51.891217Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:51.891340Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:51.891367Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-04-04T17:18:51.891397Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:51.891419Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:51.891576Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:51.891609Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-04-04T17:18:51.891641Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:51.891665Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:51.891802Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:51.891823Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-04-04T17:18:51.891853Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:51.891877Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:51.891979Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:51.892000Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-04-04T17:18:51.892039Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:51.892070Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:51.892200Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:51.892223Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-04-04T17:18:51.892256Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:51.892281Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:51.892403Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:51.892427Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-04-04T17:18:51.892456Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:51.892479Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:51.892584Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:51.892609Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-04-04T17:18:51.892647Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:51.892673Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:51.892775Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:51.892798Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-04-04T17:18:51.892830Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:51.892859Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:51.892999Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:51.893024Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-04-04T17:18:51.893060Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:51.893089Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:51.893236Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:51.893261Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-04-04T17:18:51.893290Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:51.893315Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:51.893566Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-04T17:18:51.893615Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:51.893654Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-04-04T17:18:51.893757Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-04T17:18:51.893782Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:51.893808Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-04-04T17:18:51.893889Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-04T17:18:51.893913Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:51.893932Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-04-04T17:18:51.893972Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-04T17:18:51.893998Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:51.894026Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-04-04T17:18:51.894109Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-04T17:18:51.894134Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:51.894155Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-04-04T17:18:51.894217Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-04T17:18:51.894263Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:51.894295Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-04-04T17:18:51.894389Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-04T17:18:51.894418Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:51.894439Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-04-04T17:18:51.894498Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-04-04T17:18:51.894521Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:51.894544Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 29 29 31 31 31 31 31 31 19 27 20 24 21 31 17 9 18 13 24 17 20 4 5 20 - 20 17 17 - - - - actual 29 29 31 31 31 31 31 31 19 27 20 24 21 31 17 9 18 13 24 17 20 4 5 20 - 20 17 17 - - - - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncPartial [GOOD] Test command err: 2025-04-04T17:18:52.759773Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:18:52.761816Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-04-04T17:18:52.761927Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-04-04T17:18:52.762026Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-04-04T17:18:52.762087Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-04T17:18:52.762165Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-04T17:18:52.762215Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:52.762282Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-04-04T17:18:52.762320Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:52.762570Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 1 2025-04-04T17:18:52.762723Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2065], cookie# 1 2025-04-04T17:18:52.762802Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2065], cookie# 1 2025-04-04T17:18:52.762849Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2065], cookie# 1 2025-04-04T17:18:52.762959Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:6:2053], cookie# 1 2025-04-04T17:18:52.762995Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:9:2056], cookie# 1 2025-04-04T17:18:52.763049Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2065], cookie# 1 2025-04-04T17:18:52.763085Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 0, faulires# 1 2025-04-04T17:18:52.763127Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-04-04T17:18:52.763176Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:52.763209Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:36:2065], cookie# 1 2025-04-04T17:18:52.763233Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 1 2025-04-04T17:18:52.763258Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:37:2065], cookie# 1 2025-04-04T17:18:52.763304Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 1, partial# 0 2025-04-04T17:18:52.763407Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 2 2025-04-04T17:18:52.763493Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2065], cookie# 2 2025-04-04T17:18:52.763519Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 2, size# 3, half# 1, successes# 0, faulires# 1 2025-04-04T17:18:52.763561Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2065], cookie# 2 2025-04-04T17:18:52.763615Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2065], cookie# 2 2025-04-04T17:18:52.763667Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:9:2056], cookie# 2 2025-04-04T17:18:52.763724Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:36:2065], cookie# 2 2025-04-04T17:18:52.763772Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: [main][1:34:2065][path] Sync is done: cookie# 2, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-04T17:18:52.763833Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-04-04T17:18:52.763870Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:52.763909Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:37:2065], cookie# 2 2025-04-04T17:18:52.763933Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Unexpected sync response: sender# [1:37:2065], cookie# 2 2025-04-04T17:18:52.764017Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 3 2025-04-04T17:18:52.764109Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2065], cookie# 3 2025-04-04T17:18:52.764135Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 3, size# 3, half# 1, successes# 0, faulires# 1 2025-04-04T17:18:52.764164Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:36:2065], cookie# 3 2025-04-04T17:18:52.764187Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: [main][1:34:2065][path] Sync is done: cookie# 3, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-04-04T17:18:52.764227Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2065], cookie# 3 2025-04-04T17:18:52.764307Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:37:2065], cookie# 3 2025-04-04T17:18:52.764329Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Unexpected sync response: sender# [1:37:2065], cookie# 3 2025-04-04T17:18:52.764374Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-04-04T17:18:52.764406Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } >> TCmsTest::RacyStartCollecting [GOOD] >> TCmsTest::PriorityRange >> TCmsTest::RequestRestartServicesPartial [GOOD] >> TCmsTest::RequestRestartServicesNoUser >> TSubscriberTest::NotifyDelete >> TCmsTest::TestForceRestartModeScheduledDisconnects [GOOD] >> TCmsTenatsTest::RequestRestartServices [GOOD] >> TSubscriberTest::NotifyDelete [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart [GOOD] >> TCmsTest::AllVDisksEvictionInRack >> TTxDataShardMiniKQL::CrossShard_3_AllToOne [GOOD] >> TTxDataShardMiniKQL::CrossShard_4_OneToAll |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::RequestRestartServices [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestForceRestartModeScheduledDisconnects [GOOD] >> TCmsTest::ManageRequests [GOOD] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag >> TCmsTenatsTest::TestTenantLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::NotifyDelete [GOOD] Test command err: 2025-04-04T17:18:54.398290Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:18:54.400436Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-04-04T17:18:54.400539Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2025-04-04T17:18:54.400576Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2025-04-04T17:18:54.400635Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:36:2066] 2025-04-04T17:18:54.400723Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:37:2066] 2025-04-04T17:18:54.400772Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Set up state: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:54.400901Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:38:2066] 2025-04-04T17:18:54.400970Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:54.401338Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:3:2050] 2025-04-04T17:18:54.401407Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:6:2053] 2025-04-04T17:18:54.401466Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:9:2056] 2025-04-04T17:18:54.401528Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:36:2066] 2025-04-04T17:18:54.401595Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Path was updated to new version: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:54.401655Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:37:2066] 2025-04-04T17:18:54.401701Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:54.401749Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:38:2066] 2025-04-04T17:18:54.401804Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } >> KqpMultishardIndex::DataColumnWrite-UseSink [GOOD] >> TCmsTest::RequestReplaceDevices [GOOD] >> TCmsTest::RequestReplaceManyDevicesOnOneNode |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] >> TCmsTest::PriorityRange [GOOD] >> TCmsTest::RequestRestartServicesNoUser [GOOD] |91.9%| [TA] $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnWrite-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8254, MsgBus: 20604 2025-04-04T17:18:24.105337Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508586992998066:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:24.108483Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016b9/r3tmp/tmpcMl3k8/pdisk_1.dat 2025-04-04T17:18:24.501417Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:24.553957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:24.554040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 8254, node 1 2025-04-04T17:18:24.557023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:24.618695Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:24.618744Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:24.618768Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:24.618865Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20604 TClient is connected to server localhost:20604 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:25.138407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:25.158420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:25.304412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:25.456864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:25.532005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:27.078463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508599877901639:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:27.078613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:27.315314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.349831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.380906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.420445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.449021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.487960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.541574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508599877902154:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:27.541639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:27.541758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508599877902159:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:27.544883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:27.560965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508599877902161:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:27.617223Z node 1 :TX_PROXY ERROR: Actor# [1:7489508599877902215:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:28.380635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:29.106409Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508586992998066:2136];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:29.106459Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:35.590985Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508634237644665:2833], TxId: 281474976710719, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=Y2UyZWY4YmQtNDIzN2ZlOGEtZjMwZTEyM2MtNzgzMzJhYjk=. TraceId : 01jr0wzgs99deb9gj32bzx1tcx. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-04T17:18:35.591459Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508634237644666:2834], TxId: 281474976710719, task: 2. Ctx: { TraceId : 01jr0wzgs99deb9gj32bzx1tcx. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=Y2UyZWY4YmQtNDIzN2ZlOGEtZjMwZTEyM2MtNzgzMzJhYjk=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489508634237644662:2496], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:18:35.591837Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2UyZWY4YmQtNDIzN2ZlOGEtZjMwZTEyM2MtNzgzMzJhYjk=, ActorId: [1:7489508604172869802:2496], ActorState: ExecuteState, TraceId: 01jr0wzgs99deb9gj32bzx1tcx, Create QueryResponse for error on request, msg: 2025-04-04T17:18:38.693226Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037932 not found 2025-04-04T17:18:38.693259Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037931 not found 2025-04-04T17:18:38.693271Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2025-04-04T17:18:38.693285Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2025-04-04T17:18:38.697532Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2025-04-04T17:18:38.737105Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2025-04-04T17:18:38.737141Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037928 not found 2025-04-04T17:18:38.737158Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found Trying to start YDB, gRPC: 31061, MsgBus: 18653 2025-04-04T17:18:40.016728Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508655311672195:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:40.016769Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016b9/r3tmp/tmpwKr7Lr/pdisk_1.dat 2025-04-04T17:18:40.162619Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:40.192906Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:40.193013Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:40.194206Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31061, node 2 2025-04-04T17:18:40.286877Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:40.286903Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:40.286911Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:40.287065Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18653 TClient is connected to server localhost:18653 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:40.777647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:40.788364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:40.875602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:41.051614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:41.129361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:43.373348Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508668196575860:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:43.373418Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:43.416405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:43.441006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:43.470920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:43.505363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:43.550631Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:43.595155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:43.682346Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508668196576378:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:43.682404Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:43.682458Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508668196576383:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:43.686348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:43.697398Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508668196576385:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:43.782085Z node 2 :TX_PROXY ERROR: Actor# [2:7489508668196576439:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:44.681752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:45.017000Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508655311672195:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:45.017057Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:51.589378Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508702556318819:2825], TxId: 281474976715731, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr0x00c7cmty29ytvhx1qe4h. SessionId : ydb://session/3?node_id=2&id=ZWIyODU1N2YtZTY5ZWIzYmUtZjc0NjFiZS0yMjUzMDhm. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-04T17:18:51.589603Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489508702556318820:2826], TxId: 281474976715731, task: 2. Ctx: { TraceId : 01jr0x00c7cmty29ytvhx1qe4h. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZWIyODU1N2YtZTY5ZWIzYmUtZjc0NjFiZS0yMjUzMDhm. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7489508702556318816:2488], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:18:51.589900Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWIyODU1N2YtZTY5ZWIzYmUtZjc0NjFiZS0yMjUzMDhm, ActorId: [2:7489508672491543991:2488], ActorState: ExecuteState, TraceId: 01jr0x00c7cmty29ytvhx1qe4h, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::PriorityRange [GOOD] Test command err: 2025-04-04T17:18:52.395930Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-04-04T17:18:52.396000Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-04-04T17:18:52.396024Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-04-04T17:18:52.396045Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-04-04T17:18:52.396066Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-04-04T17:18:52.396086Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-04-04T17:18:52.396108Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-04-04T17:18:52.396130Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 2025-04-04T17:18:52.402291Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-04-04T17:18:52.402353Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-04-04T17:18:52.402375Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-04-04T17:18:52.402395Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-04-04T17:18:52.402414Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-04-04T17:18:52.402435Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-04-04T17:18:52.402463Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-04-04T17:18:52.402492Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 2025-04-04T17:18:52.445186Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-04-04T17:18:52.445264Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-04-04T17:18:52.445298Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-04-04T17:18:52.445320Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-04-04T17:18:52.445339Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-04-04T17:18:52.445359Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-04-04T17:18:52.445378Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-04-04T17:18:52.445399Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 >> TCmsTest::Mirror3dcPermissions [GOOD] >> KqpIndexes::SecondaryIndexInsert1 [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestRestartServicesNoUser [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] Test command err: 2025-04-04T17:17:46.547535Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:46.617766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:46.617810Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:46.624052Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:46.624497Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:17:46.624809Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:46.633989Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:46.661908Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:46.662016Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:46.663722Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:17:46.663786Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:17:46.663836Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:17:46.664360Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:46.664637Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:46.664743Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:17:46.727546Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:46.760971Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:17:46.761113Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:46.761198Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:17:46.761230Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:17:46.761257Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:17:46.761288Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:46.761417Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:46.761463Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:46.761720Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:17:46.761796Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:17:46.761890Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:46.761929Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:46.761963Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:17:46.761994Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:46.762022Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:46.762062Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:17:46.762097Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:46.762211Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:46.762257Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:46.762304Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:17:46.764643Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:17:46.764708Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:46.764769Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:17:46.764918Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:17:46.764968Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:17:46.765010Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:17:46.765050Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:46.765081Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:17:46.765109Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:17:46.765137Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:46.765399Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:46.765437Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:17:46.765465Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:17:46.765494Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:46.765533Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:17:46.765561Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:17:46.765592Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:17:46.765617Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:46.765649Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:46.777321Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:17:46.777397Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:46.777431Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:46.777467Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:46.777515Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:46.777971Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:46.778020Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:46.778058Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:17:46.778194Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:46.778221Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:46.778337Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:46.778374Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:46.778408Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:46.778439Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:46.797146Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:46.797203Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:46.797352Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:46.797377Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:46.797408Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:46.797434Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:46.797460Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:46.797488Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:17:46.797511Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:17:46.797539Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:46.797560Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:46.797613Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:46.797665Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:46.797789Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:17:46.797813Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:46.797837Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:46.797853Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:17:46.797867Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:17:46.797902Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:46.797918Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:17:46.797938Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:46.797962Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:46.797999Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:17:46.798023Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:17:46.798047Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:17:46.798072Z node 1 :TX_DATA ... propose latency: 1 ms 2025-04-04T17:18:55.510851Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:55.511020Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:55.511050Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-04-04T17:18:55.511089Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:55.511120Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:55.511275Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:55.511304Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-04-04T17:18:55.511342Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:55.511373Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:55.511500Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:55.511530Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-04-04T17:18:55.511569Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:55.511597Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:55.511712Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:55.511741Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-04-04T17:18:55.511782Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:55.511814Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:55.511997Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:55.512026Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-04-04T17:18:55.512064Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:55.512099Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:55.512230Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:55.512259Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-04-04T17:18:55.512298Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:55.512329Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:55.512521Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:55.512551Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-04-04T17:18:55.512590Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:55.512620Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:55.512775Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:55.512805Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-04-04T17:18:55.512843Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:55.512872Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:55.513018Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:55.513048Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-04-04T17:18:55.513086Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:55.513115Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:55.513274Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:55.513305Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-04-04T17:18:55.513346Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:55.513375Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:55.513525Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:55.513553Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-04-04T17:18:55.513590Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:55.513621Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:55.513789Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:55.513818Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-04-04T17:18:55.513854Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:55.513884Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:55.514084Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:55.514117Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-04-04T17:18:55.514155Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:18:55.514185Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:55.514431Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:233:2226], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-04-04T17:18:55.514475Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:55.514513Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 2025-04-04T17:18:55.514694Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:233:2226], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-04-04T17:18:55.514725Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:55.514756Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-04-04T17:18:55.514816Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:233:2226], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-04-04T17:18:55.514846Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:55.514875Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-04-04T17:18:55.514954Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:233:2226], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-04-04T17:18:55.514984Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:55.515012Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-04-04T17:18:55.515085Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:233:2226], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-04T17:18:55.515116Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:55.515144Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-04-04T17:18:55.515222Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:233:2226], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-04T17:18:55.515250Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:55.515277Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-04-04T17:18:55.515354Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:233:2226], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-04T17:18:55.515384Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:55.515412Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-04-04T17:18:55.515489Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:233:2226], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-04T17:18:55.515519Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:55.515546Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-04-04T17:18:55.515618Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:233:2226], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-04-04T17:18:55.515649Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:18:55.515674Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 31 29 31 31 18 15 31 29 29 31 26 27 30 29 26 28 25 31 30 31 25 2 31 2 31 - - - 31 - - - actual 31 29 31 31 18 15 31 29 29 31 26 27 30 29 26 28 25 31 30 31 25 2 31 2 31 - - - 31 - - - interm 2 6 - 4 6 4 5 - 3 4 - 3 1 0 - 2 2 3 - - 1 2 2 2 - - - - - - - - |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> KqpIndexMetadata::HandleWriteOnlyIndex [GOOD] |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] >> StreamCreator::WithResolvedTimestamps >> TCmsTest::AllVDisksEvictionInRack [GOOD] |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::Mirror3dcPermissions [GOOD] >> KqpIndexes::UpdateOnReadColumns [GOOD] >> KqpAnalyze::AnalyzeTable+ColumnStore [GOOD] >> KqpAnalyze::AnalyzeTable-ColumnStore |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] >> KqpIndexes::MultipleSecondaryIndexWithSameComulns-UseSink [GOOD] >> TxUsage::WriteToTopic_Demo_2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::AllVDisksEvictionInRack [GOOD] Test command err: 2025-04-04T17:18:55.202319Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-04T17:18:55.202404Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-04T17:18:55.202505Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-04-04T17:18:55.203784Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 25 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 26 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 27 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 28 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 29 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 30 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 31 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 32 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120030000 } } 2025-04-04T17:18:55.204298Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 25 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 26 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 27 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 28 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 29 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 30 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 31 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 32 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120030000 } 2025-04-04T17:18:55.204445Z node 25 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.003000s 2025-04-04T17:18:55.204482Z node 25 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-04T17:18:55.204618Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-04-04T17:18:55.204673Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 2025-04-04T17:18:55.204718Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 25 has not yet been completed) 2025-04-04T17:18:55.204829Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-04-04T17:18:55.204984Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-04T17:18:55.205025Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 25, marker# MARKER_DISK_FAULTY 2025-04-04T17:18:55.205208Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 25, wbId# [25:8388350642965737326:1634689637] 2025-04-04T17:18:55.205244Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 26, wbId# [26:8388350642965737326:1634689637] 2025-04-04T17:18:55.205267Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 27, wbId# [27:8388350642965737326:1634689637] 2025-04-04T17:18:55.205289Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326:1634689637] 2025-04-04T17:18:55.205312Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2025-04-04T17:18:55.205331Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2025-04-04T17:18:55.205353Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2025-04-04T17:18:55.205376Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2025-04-04T17:18:55.209716Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: ... pdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326:1634689637] 2025-04-04T17:18:55.412370Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2025-04-04T17:18:55.412411Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2025-04-04T17:18:55.412440Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2025-04-04T17:18:55.412468Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2025-04-04T17:18:55.412711Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2025-04-04T17:18:55.413331Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2025-04-04T17:18:55.413523Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2025-04-04T17:18:55.413640Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2025-04-04T17:18:55.413703Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2025-04-04T17:18:55.413772Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2025-04-04T17:18:55.413838Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2025-04-04T17:18:55.413897Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2025-04-04T17:18:55.413949Z node 25 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-04T17:18:55.414156Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 26:26, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-04-04T17:18:55.414218Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 25:25, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-04-04T17:18:55.414294Z node 25 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 2 2025-04-04T17:18:55.414429Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2025-04-04T17:18:55.414657Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2025-04-04T17:18:55.414785Z node 25 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Success: true, cookie# 1 2025-04-04T17:18:55.414847Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 25:25 2025-04-04T17:18:55.414881Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 26:26 2025-04-04T17:18:55.427781Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2025-04-04T17:18:55.427871Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2025-04-04T17:18:55.443930Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-04T17:18:55.444045Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-04T17:18:55.444114Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2025-04-04T17:18:55.444968Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-04T17:18:55.445074Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } 2025-04-04T17:18:55.445139Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 25, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-04-04T17:18:55.445195Z node 25 :CMS DEBUG: Ring: 0; State: Ok 2025-04-04T17:18:55.445224Z node 25 :CMS DEBUG: Ring: 1; State: Ok 2025-04-04T17:18:55.445245Z node 25 :CMS DEBUG: Ring: 2; State: Ok 2025-04-04T17:18:55.445276Z node 25 :CMS DEBUG: Result: ALLOW 2025-04-04T17:18:55.445427Z node 25 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-04-04T17:18:55.445507Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2025-04-04T17:18:55.445588Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-04-04T17:18:55.445758Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:13:00.130000Z, action# Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 2025-04-04T17:18:55.445873Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-04T17:18:55.458122Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-04-04T17:18:55.458465Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 } Deadline: 780130000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 25 InterconnectPort: 12001 } } } } 2025-04-04T17:18:55.458530Z node 25 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:33:00.130000Z 2025-04-04T17:18:55.474890Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2025-04-04T17:18:55.475242Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-04-04T17:18:55.475318Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-04-04T17:18:55.475379Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2025-04-04T17:18:55.476239Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-04T17:18:55.476337Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } 2025-04-04T17:18:55.476402Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-04-04T17:18:55.476450Z node 25 :CMS DEBUG: Result: ALLOW 2025-04-04T17:18:55.476598Z node 25 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-04-04T17:18:55.476668Z node 25 :CMS INFO: Adding lock for Host ::1:12002 (26) (permission user-p-2 until 1970-01-01T00:13:00Z) 2025-04-04T17:18:55.476749Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-04-04T17:18:55.476901Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:13:00.231512Z, action# Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 2025-04-04T17:18:55.477037Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-04-04T17:18:55.489431Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-04-04T17:18:55.489712Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } Deadline: 780231512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2025-04-04T17:18:55.490301Z node 25 :CMS INFO: User user is done with permissions user-p-1 2025-04-04T17:18:55.490355Z node 25 :CMS DEBUG: Resulting status: OK 2025-04-04T17:18:55.490420Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2025-04-04T17:18:55.490505Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 25 2025-04-04T17:18:55.490624Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, reason# permission user-p-1 was removed 2025-04-04T17:18:55.490681Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2025-04-04T17:18:55.503082Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2025-04-04T17:18:55.503301Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-04-04T17:18:55.503876Z node 25 :CMS INFO: User user is done with permissions user-p-2 2025-04-04T17:18:55.503936Z node 25 :CMS DEBUG: Resulting status: OK 2025-04-04T17:18:55.503995Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2025-04-04T17:18:55.504072Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 26 2025-04-04T17:18:55.504180Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-2, reason# permission user-p-2 was removed 2025-04-04T17:18:55.504239Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2025-04-04T17:18:55.516701Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2025-04-04T17:18:55.516898Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexInsert1 [GOOD] Test command err: Trying to start YDB, gRPC: 10753, MsgBus: 7949 2025-04-04T17:18:32.688116Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508621516689861:2226];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:32.688622Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001698/r3tmp/tmpI6PDNo/pdisk_1.dat 2025-04-04T17:18:33.132348Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10753, node 1 2025-04-04T17:18:33.163979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:33.164187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:33.165761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:33.247587Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:33.247604Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:33.247612Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:33.247732Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7949 TClient is connected to server localhost:7949 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:33.839523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:33.866777Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:33.877852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:34.031498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:34.202669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:34.270763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:36.023657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508638696560637:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.023746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.291305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.328546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.360845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.430171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.467777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.515025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.566348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508638696561149:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.566435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.566788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508638696561154:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:36.570296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:36.581760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508638696561156:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:36.653991Z node 1 :TX_PROXY ERROR: Actor# [1:7489508638696561209:3441] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:37.547271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.672772Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508621516689861:2226];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:37.672842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7993, MsgBus: 13531 2025-04-04T17:18:43.401525Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508666398412293:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:43.401619Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001698/r3tmp/tmpMeGpPe/pdisk_1.dat 2025-04-04T17:18:43.607405Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:43.614540Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:43.614643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:43.616013Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7993, node 2 2025-04-04T17:18:43.662460Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:43.662485Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:43.662494Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:43.662633Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13531 TClient is connected to server localhost:13531 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:44.132221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:44.145329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:44.202714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:44.351344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: ... sues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:46.596409Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:46.637538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:46.666224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:46.695038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:46.726411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:46.791420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:46.820997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:46.900072Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508679283316490:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:46.900155Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:46.900159Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508679283316495:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:46.904307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:46.915820Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508679283316497:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:46.988067Z node 2 :TX_PROXY ERROR: Actor# [2:7489508679283316551:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:47.795731Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:48.401655Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508666398412293:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:48.401721Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23174, MsgBus: 20144 2025-04-04T17:18:51.051291Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508700955831579:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:51.051430Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001698/r3tmp/tmp7GWcJ2/pdisk_1.dat 2025-04-04T17:18:51.149409Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23174, node 3 2025-04-04T17:18:51.178175Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:51.178307Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:51.180511Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:51.199210Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:51.199234Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:51.199241Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:51.199359Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20144 TClient is connected to server localhost:20144 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:51.592750Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:51.608125Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:51.654309Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:51.802410Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:51.885313Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:54.309738Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508713840735250:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:54.309828Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:54.329452Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:54.358691Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:54.387198Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:54.414705Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:54.441664Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:54.470174Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:54.505655Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508713840735756:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:54.505715Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508713840735761:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:54.505727Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:54.508472Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:54.516360Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508713840735763:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:54.584949Z node 3 :TX_PROXY ERROR: Actor# [3:7489508713840735817:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:55.513700Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:55.914050Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexMetadata::HandleWriteOnlyIndex [GOOD] Test command err: Trying to start YDB, gRPC: 5145, MsgBus: 10143 2025-04-04T17:18:33.990958Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508624732622532:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:33.993014Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001690/r3tmp/tmpiaHBfZ/pdisk_1.dat 2025-04-04T17:18:34.501089Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:34.505290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:34.506551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:34.509436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5145, node 1 2025-04-04T17:18:34.677554Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:34.677579Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:34.677588Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:34.677705Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10143 TClient is connected to server localhost:10143 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:35.249498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:35.272256Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:35.282359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:18:35.442677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:35.606428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:18:35.677586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:37.345283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508641912493416:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.345396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.550160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.601618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.627361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.656422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.682128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.714699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:37.761324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508641912493924:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.761414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.761478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508641912493929:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:37.764626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:37.771750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508641912493931:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:37.860832Z node 1 :TX_PROXY ERROR: Actor# [1:7489508641912493986:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:38.712357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:38.991111Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508624732622532:2135];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:38.991199Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26716, MsgBus: 16367 2025-04-04T17:18:42.442794Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508664152048260:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:42.442858Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001690/r3tmp/tmpdh8O4L/pdisk_1.dat 2025-04-04T17:18:42.569245Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:42.591994Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:42.592073Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:42.595156Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26716, node 2 2025-04-04T17:18:42.678609Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:42.678635Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:42.678642Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:42.678762Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16367 TClient is connected to server localhost:16367 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:43.096781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:43.102291Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:43.117950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:43.192630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting.. ... puts":[{"ExternalPlanNodeId":11}],"Predicate":"Exist(item.id)","E-Cost":"0","E-Size":"0","Name":"Filter"}],"Plans":[{"E-Size":"0","LookupKeyColumns":["b","id"],"Node Type":"TableLookup","PlanNodeId":11,"Path":"\/Root\/tg","Columns":["am","b","cur","id","pa_id","product","status","system_date","type"],"E-Rows":"0","Plans":[{"PlanNodeId":10,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Input":"precompute_1_0","Name":"PartitionByKey"}],"Node Type":"ConstantExpr-Aggregate","CTE Name":"precompute_1_0"}],"Table":"tg","PlanNodeType":"Connection","E-Cost":"0"}],"Node Type":"TopSort-Filter"}],"Node Type":"Merge","SortColumns":["system_date (Desc)","id (Desc)"],"PlanNodeType":"Connection"}],"Node Type":"Limit"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[{"ExternalPlanNodeId":6}],"Limit":"11","Name":"Top","TopBy":"[row.b,row.pa_id,row.system_date,row.id]"}],"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Operators":[{"Inputs":[{"InternalOperatorId":1},{"InternalOperatorId":1},{"InternalOperatorId":1},{"InternalOperatorId":1}],"Limit":"11","Name":"Limit"},{"E-Rows":"0","Inputs":[{"ExternalPlanNodeId":4}],"Predicate":"NOT If AND item.status != $status_1 AND item.am != $am_1","E-Cost":"0","E-Size":"0","Name":"Filter"}],"Plans":[{"Tables":["tg\/tg_index\/indexImplTable"],"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":"%kqp%tx_result_binding_0_0","Reverse":true,"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/tg\/tg_index\/indexImplTable","E-Rows":"0","ReadRangesPointPrefixLen":"2","ReadRangesKeys":["b","pa_id","system_date","id"],"Table":"tg\/tg_index\/indexImplTable","ReadColumns":["am","b","id","pa_id","status","system_date","type"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Limit-Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Top"}],"Subplan Name":"CTE precompute_1_0","Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":2,"Plans":[{"PlanNodeId":1,"Node Type":"Stage"}],"Subplan Name":"CTE precompute_0_0","Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tg","reads":[{"lookup_by":["b","id"],"columns":["am","b","cur","id","pa_id","product","status","system_date","type"],"type":"Lookup"}]},{"name":"\/Root\/tg\/tg_index\/indexImplTable","reads":[{"columns":["am","b","id","pa_id","status","system_date","type"],"reverse":true,"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Limit":"1001","Name":"Limit"}],"Plans":[{"PlanNodeId":4,"Operators":[{"Limit":"1001","Name":"TopSort","TopSortBy":"[row.system_date,row.id]"}],"Plans":[{"PlanNodeId":5,"Operators":[{"E-Rows":"0","Predicate":"Exist(item.id)","E-Cost":"0","E-Size":"0","Name":"Filter"}],"Plans":[{"PlanNodeId":6,"Operators":[{"E-Rows":"0","Columns":["am","b","cur","id","pa_id","product","status","system_date","type"],"Name":"TableLookup","E-Cost":"0","E-Size":"0","LookupKeyColumns":["b","id"],"Table":"tg"}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Node Type":"Filter"}],"Node Type":"TopSort"}],"Node Type":"Limit"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2}}} Trying to start YDB, gRPC: 6581, MsgBus: 26334 2025-04-04T17:18:48.497229Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508690030436664:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:48.497663Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001690/r3tmp/tmpqXmTg8/pdisk_1.dat 2025-04-04T17:18:48.605188Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6581, node 3 2025-04-04T17:18:48.644828Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:48.644996Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:48.651826Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:48.675551Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:48.675582Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:48.675589Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:48.675728Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26334 TClient is connected to server localhost:26334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:49.057367Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:49.067666Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:49.091557Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:49.163316Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:49.303019Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:49.367817Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:51.787178Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508702915340337:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:51.787287Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:51.829311Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:51.860028Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:51.888485Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:51.919018Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:51.954832Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:51.989387Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:52.027483Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508707210308141:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:52.027540Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:52.027590Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508707210308146:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:52.030434Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:52.037309Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508707210308148:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:52.112194Z node 3 :TX_PROXY ERROR: Actor# [3:7489508707210308201:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:52.988354Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:53.497021Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508690030436664:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:53.497100Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TxUsage::WriteToTopic_Demo_4 >> TxUsage::WriteToTopic_Demo_3 >> Describe::Statistics >> TxUsage::TwoSessionOneConsumer >> TxUsage::WriteToTopic_Invalid_Session >> LocalPartition::WithoutPartitionWithRestart >> TxUsage::Sinks_Oltp_WriteToTopic_1 |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpdateOnReadColumns [GOOD] Test command err: Trying to start YDB, gRPC: 24269, MsgBus: 4601 2025-04-04T17:18:07.244989Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508513860914184:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:07.247076Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016f2/r3tmp/tmpDTY486/pdisk_1.dat 2025-04-04T17:18:07.696787Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.706971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:07.707081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:07.709888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24269, node 1 2025-04-04T17:18:07.874737Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:07.874760Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:07.874771Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:07.874862Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4601 TClient is connected to server localhost:4601 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:08.604891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.620406Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:08.637896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.809103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.980439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.048767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:10.471167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508526745817702:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.471276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.841921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.877149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.940076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.008156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.035341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.073733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.154530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508531040785516:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.154593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.154749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508531040785521:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.158066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:11.165996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508531040785523:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:11.245693Z node 1 :TX_PROXY ERROR: Actor# [1:7489508531040785579:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:12.172914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:12.244846Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508513860914184:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:12.244970Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24439, MsgBus: 29403 2025-04-04T17:18:14.447352Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508540407254124:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:14.447392Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016f2/r3tmp/tmpdkvL6y/pdisk_1.dat 2025-04-04T17:18:14.570920Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:14.600829Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:14.600921Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 24439, node 2 2025-04-04T17:18:14.604943Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:14.662814Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:14.662836Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:14.662845Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:14.662956Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29403 TClient is connected to server localhost:29403 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:15.189709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:15.202804Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:15.216982Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:15.290777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04 ... 4976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:43.772548Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:43.815827Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:43.871754Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489508666133271974:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:43.871854Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:43.871898Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489508666133271979:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:43.875857Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:43.889459Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489508666133271981:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:43.954863Z node 5 :TX_PROXY ERROR: Actor# [5:7489508666133272035:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:44.715593Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489508648953400504:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:44.715665Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:45.105678Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:45.159177Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:18:45.205215Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22799, MsgBus: 61931 2025-04-04T17:18:48.858511Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489508686988263292:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:48.858607Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016f2/r3tmp/tmplUDlCb/pdisk_1.dat 2025-04-04T17:18:48.986022Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:49.010553Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:49.010651Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:49.012354Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22799, node 6 2025-04-04T17:18:49.066852Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:49.066882Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:49.066915Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:49.067098Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61931 TClient is connected to server localhost:61931 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:49.593439Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:49.611267Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:49.667486Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:49.847803Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:49.926415Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:52.666298Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489508704168134250:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:52.666396Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:52.707069Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:52.742223Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:52.781785Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:52.821901Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:52.862464Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:52.908392Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:52.957449Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489508704168134761:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:52.957566Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:52.957829Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489508704168134766:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:52.962457Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:52.975259Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489508704168134768:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:53.030718Z node 6 :TX_PROXY ERROR: Actor# [6:7489508708463102117:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:53.858908Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489508686988263292:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:53.858975Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:54.141753Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:54.190850Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:18:54.277332Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpQueryService::TableSink_ReplaceFromSelectLargeOlap [GOOD] >> KqpQueryService::TableSink_ReplaceDuplicatesOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::MultipleSecondaryIndexWithSameComulns-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 29049, MsgBus: 8491 2025-04-04T17:18:31.412175Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508614044773879:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:31.412207Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016a0/r3tmp/tmpa3DUeY/pdisk_1.dat 2025-04-04T17:18:31.805307Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:31.814009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:31.814130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:31.816314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29049, node 1 2025-04-04T17:18:31.870858Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:31.870910Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:31.870922Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:31.871049Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8491 TClient is connected to server localhost:8491 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:32.389411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:32.432384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:32.587277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:32.770517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:32.840396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:34.394274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508626929677530:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:34.394381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:34.606157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.629681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.652298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.678577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.748474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.782107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.817334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508626929678047:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:34.817420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:34.817783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508626929678052:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:34.821355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:34.835265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508626929678054:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:34.921736Z node 1 :TX_PROXY ERROR: Actor# [1:7489508626929678107:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:35.921709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:35.955639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:18:35.986797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:18:36.414548Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508614044773879:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:36.414634Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28673, MsgBus: 13387 2025-04-04T17:18:38.421329Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508644170127467:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:38.421368Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016a0/r3tmp/tmpmkCEQg/pdisk_1.dat 2025-04-04T17:18:38.516601Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:38.522496Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:38.522591Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:38.523801Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28673, node 2 2025-04-04T17:18:38.574818Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:38.574842Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:38.574849Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:38.574950Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13387 TClient is connected to server localhost:13387 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:39.001789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:39.018195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:39.077696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is und ... eshard: 72057594046644480 2025-04-04T17:18:42.890843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:18:43.421688Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508644170127467:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:43.421739Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25418, MsgBus: 5179 2025-04-04T17:18:45.699999Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508673419257553:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:45.700051Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016a0/r3tmp/tmpAci6Si/pdisk_1.dat 2025-04-04T17:18:45.825464Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25418, node 3 2025-04-04T17:18:45.864510Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:45.864596Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:45.867004Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:45.899604Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:45.899625Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:45.899634Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:45.899762Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5179 TClient is connected to server localhost:5179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:46.355161Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:46.362225Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:46.372799Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:46.430842Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:46.571942Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:46.648697Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:49.000786Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508686304161204:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:49.000886Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:49.040482Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:49.072020Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:49.102871Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:49.131287Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:49.160003Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:49.232677Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:49.272871Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508690599129018:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:49.272993Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:49.273202Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508690599129023:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:49.276618Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:49.285279Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508690599129025:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:49.342778Z node 3 :TX_PROXY ERROR: Actor# [3:7489508690599129078:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:50.241795Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:50.700297Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508673419257553:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:50.700401Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:51.214860Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:51.226860Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:51.271788Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:52.439079Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:52.451159Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:52.463364Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:53.339198Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:53.354382Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:53.393085Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:53.866779Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:53.878844Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:53.892130Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:54.734182Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:54.744987Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:54.758409Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:55.214194Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:55.226624Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:55.241820Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:55.562926Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:55.576846Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:55.591148Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:56.004383Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:56.018202Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:56.031777Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:56.236628Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:56.250175Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:56.263336Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:56.606099Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:56.620293Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:18:56.634605Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutBoth >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed >> TPQCDTest::TestDiscoverClusters >> TPQCDTest::TestUnavailableWithoutNetClassifier |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink >> StreamCreator::WithResolvedTimestamps [GOOD] |92.0%| [TA] $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.0%| [TA] {RESULT} $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexes::JoinWithNonPKColumnsInPredicate-UseStreamJoin [GOOD] |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Validation >> Secret::SimpleQueryService |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::WithResolvedTimestamps [GOOD] Test command err: 2025-04-04T17:18:57.362869Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508727931886308:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:57.362968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018f1/r3tmp/tmpYdQTKC/pdisk_1.dat 2025-04-04T17:18:57.766427Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:57.796702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:57.797258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:57.800235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6523 TServer::EnableGrpc on GrpcPort 63100, node 1 2025-04-04T17:18:58.109115Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:58.109133Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:58.109139Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:58.109228Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:58.653519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:58.682318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743787138800 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743787138716 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743787138800 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-04-04T17:18:58.858155Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:18:58.858340Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:18:58.858364Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-04-04T17:18:58.858959Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-04-04T17:18:59.875099Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743787138800, tx_id: 281474976710658 } } } 2025-04-04T17:18:59.875860Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-04-04T17:18:59.878043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:18:59.878660Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-04-04T17:18:59.878682Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-04-04T17:18:59.902103Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-04-04T17:18:59.902135Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] 2025-04-04T17:18:59.902706Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2025-04-04T17:18:59.995727Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][1:7489508736521821821:2345] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:5:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-04T17:19:00.021883Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2025-04-04T17:19:00.021911Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# 2025-04-04T17:19:00.041064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:00.055423Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2025-04-04T17:19:00.055450Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743787138800 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyC... (TRUNCATED) |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TA] $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} |92.1%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::JoinWithNonPKColumnsInPredicate-UseStreamJoin [GOOD] Test command err: Trying to start YDB, gRPC: 17019, MsgBus: 13089 2025-04-04T17:18:29.475072Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508607436887395:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:29.475653Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016a6/r3tmp/tmpatnTZ8/pdisk_1.dat 2025-04-04T17:18:29.900211Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:29.929473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 17019, node 1 2025-04-04T17:18:29.929554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:29.931464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:29.976737Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:29.976760Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:29.976765Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:29.976862Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13089 TClient is connected to server localhost:13089 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:30.538978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:30.550859Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:30.572189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:30.716423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:30.899652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:30.981152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:32.677230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508620321791049:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:32.677349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:32.949888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:32.986659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:33.019958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:33.053861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:33.096446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:33.143344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:33.192268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508624616758856:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:33.192333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:33.192624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508624616758861:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:33.196407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:33.209177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508624616758863:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:33.293999Z node 1 :TX_PROXY ERROR: Actor# [1:7489508624616758916:3436] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:34.273036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:34.525430Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508607436887395:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:34.525554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25452, MsgBus: 29118 2025-04-04T17:18:35.849189Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508631059319354:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:35.849247Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016a6/r3tmp/tmpwPDXoD/pdisk_1.dat 2025-04-04T17:18:35.995183Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:36.009015Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:36.009099Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:36.010189Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25452, node 2 2025-04-04T17:18:36.060550Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:36.060573Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:36.060579Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:36.060716Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29118 TClient is connected to server localhost:29118 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:36.461341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:36.467029Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:36.478597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:36.558466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04 ... ccess permissions } 2025-04-04T17:18:39.252704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:39.262376Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508648239190831:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:39.344970Z node 2 :TX_PROXY ERROR: Actor# [2:7489508648239190883:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:40.296788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-04-04T17:18:40.341283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:18:40.400274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:18:40.475107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-04T17:18:40.594814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-04T17:18:40.673780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-04T17:18:40.849252Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508631059319354:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:40.849423Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 63654, MsgBus: 64037 2025-04-04T17:18:48.242989Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508686223149344:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:48.243049Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016a6/r3tmp/tmpWkiPhS/pdisk_1.dat 2025-04-04T17:18:48.372863Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:48.386370Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:48.386473Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:48.390054Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63654, node 3 2025-04-04T17:18:48.427128Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:48.427152Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:48.427162Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:48.427318Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64037 TClient is connected to server localhost:64037 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:18:48.874170Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:18:48.892201Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:48.972767Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:49.134011Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:49.210331Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:51.401617Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508699108053007:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:51.401716Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:51.422423Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:51.453025Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:51.478776Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:51.505247Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:51.532856Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:51.561460Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:51.599030Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508699108053513:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:51.599105Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508699108053518:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:51.599113Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:51.602118Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:51.610412Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508699108053520:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:51.692019Z node 3 :TX_PROXY ERROR: Actor# [3:7489508699108053574:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:52.683356Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-04-04T17:18:52.721192Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:18:52.802087Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:18:52.872754Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-04T17:18:52.960196Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-04T17:18:53.036879Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-04T17:18:53.243294Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508686223149344:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:53.243366Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpAnalyze::AnalyzeTable-ColumnStore [GOOD] >> KqpExplain::AggGroupLimit >> TPersQueueTest::TxCounters [GOOD] |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::TxCounters [GOOD] Test command err: === Server->StartServer(false); 2025-04-04T17:13:06.752654Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507221226121997:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:06.752779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:13:06.776318Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489507218713405551:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:06.776361Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:13:07.014645Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:13:07.049311Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e49/r3tmp/tmpJCG4EG/pdisk_1.dat 2025-04-04T17:13:07.486807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:07.486908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:07.488279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:07.488311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:07.494255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:13:07.504483Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:07.506301Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:13:07.511479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1603, node 1 2025-04-04T17:13:07.862989Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001e49/r3tmp/yandexpQPvzS.tmp 2025-04-04T17:13:07.863015Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001e49/r3tmp/yandexpQPvzS.tmp 2025-04-04T17:13:07.863201Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001e49/r3tmp/yandexpQPvzS.tmp 2025-04-04T17:13:07.863327Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:13:08.017017Z INFO: TTestServer started on Port 24957 GrpcPort 1603 TClient is connected to server localhost:24957 PQClient connected to localhost:1603 === TenantModeEnabled() = 0 === Init PQ - start server on port 1603 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:08.420395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:13:08.420633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.420810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T17:13:08.421342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:13:08.421374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.423924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T17:13:08.424038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T17:13:08.424237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.424280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T17:13:08.424297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-04T17:13:08.424313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-04T17:13:08.427987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.428026Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:13:08.428043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2025-04-04T17:13:08.429941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.429971Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.429987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:13:08.430057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:13:08.434706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:13:08.434994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:08.435013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-04T17:13:08.435029Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:13:08.436663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-04T17:13:08.436795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:13:08.439461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743786788485, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:13:08.439613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743786788485 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:13:08.439644Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:13:08.439842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-04T17:13:08.439876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:13:08.440025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:13:08.440088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-04T17:13:08.441920Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:13:08.441939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:13:08.442080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:13:08.442094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489507225521089932:2396], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-04T17:13:08.442130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:13:08.442146Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-04T17:13:08.442222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-04T17:13:08.442231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:13:08.442270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-04T17:13:08.442282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:13:08.442304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-04T17:13:08.442330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:13:08.442343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-04T17:13:08.442358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-04T17:13:08.442431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 7205759404664 ... per UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-04T17:19:01.397124Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [32:7489508743655087451:2495] (SourceId=123, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-04T17:19:01.397163Z node 32 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-04T17:19:01.397974Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2025-04-04T17:19:01.398153Z node 32 :PERSQUEUE INFO: new Cookie 123|c7a190d-58a6ee06-c502e60f-72cbe772_0 generated for partition 0 topic 'topic' owner 123 2025-04-04T17:19:01.398809Z node 32 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 123|c7a190d-58a6ee06-c502e60f-72cbe772_0 2025-04-04T17:19:01.408237Z node 32 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-04-04T17:19:01.408471Z node 32 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/topic" include_location: true 2025-04-04T17:19:01.408549Z node 32 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[32:7489508743655087457:2497]: Bootstrap 2025-04-04T17:19:01.411020Z node 32 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [32:7489508743655087457:2497]: Request location 2025-04-04T17:19:01.411323Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7489508743655087466:2498] connected; active server actors: 1 2025-04-04T17:19:01.411456Z node 32 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 32, Generation 1 2025-04-04T17:19:01.411540Z node 32 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [32:7489508743655087457:2497]: Got location 2025-04-04T17:19:01.411698Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7489508743655087466:2498] disconnected; active server actors: 1 2025-04-04T17:19:01.411749Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7489508743655087466:2498] disconnected no session 2025-04-04T17:19:01.414409Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: 123|c7a190d-58a6ee06-c502e60f-72cbe772_0 grpc read done: success: 0 data: 2025-04-04T17:19:01.414444Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|c7a190d-58a6ee06-c502e60f-72cbe772_0 grpc read failed 2025-04-04T17:19:01.414512Z node 32 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 2 sessionId: 123|c7a190d-58a6ee06-c502e60f-72cbe772_0 2025-04-04T17:19:01.414538Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|c7a190d-58a6ee06-c502e60f-72cbe772_0 is DEAD 2025-04-04T17:19:01.415213Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:19:01.419417Z node 32 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-04T17:19:01.419462Z node 32 :PQ_WRITE_PROXY DEBUG: new session created cookie 3 2025-04-04T17:19:01.420678Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { path: "topic" producer_id: "123" partition_with_generation { generation: 1 } } 2025-04-04T17:19:01.420878Z node 32 :PQ_WRITE_PROXY INFO: session request cookie: 3 path: "topic" producer_id: "123" partition_with_generation { generation: 1 } from ipv6:[::1]:39132 2025-04-04T17:19:01.420916Z node 32 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="topic server" ip=ipv6:[::1]:39132 proto=topic topic=topic durationSec=0 2025-04-04T17:19:01.420933Z node 32 :PQ_WRITE_PROXY INFO: init check schema 2025-04-04T17:19:01.420982Z node 32 :PQ_WRITE_PROXY INFO: session to partition: 0, generation: 1 2025-04-04T17:19:01.422352Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2025-04-04T17:19:01.422632Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-04T17:19:01.422666Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-04T17:19:01.422692Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-04T17:19:01.422716Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [32:7489508743655087471:2500] (SourceId=123, PreferedPartition=0) ReplyResult: Partition=0, SeqNo=0 2025-04-04T17:19:01.422737Z node 32 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 3 sessionId: partition: 0 expectedGeneration: 1 2025-04-04T17:19:01.423395Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2025-04-04T17:19:01.423502Z node 32 :PERSQUEUE INFO: new Cookie 123|a9c1b239-a3ebce2a-843cbd73-cba1f3eb_0 generated for partition 0 topic 'topic' owner 123 2025-04-04T17:19:01.424040Z node 32 :PQ_WRITE_PROXY INFO: session inited cookie: 3 partition: 0 MaxSeqNo: 0 sessionId: 123|a9c1b239-a3ebce2a-843cbd73-cba1f3eb_0 2025-04-04T17:19:01.426195Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|a9c1b239-a3ebce2a-843cbd73-cba1f3eb_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-04T17:19:01.426571Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|a9c1b239-a3ebce2a-843cbd73-cba1f3eb_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-04T17:19:01.426961Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|a9c1b239-a3ebce2a-843cbd73-cba1f3eb_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-04T17:19:01.427161Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|a9c1b239-a3ebce2a-843cbd73-cba1f3eb_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-04T17:19:01.427366Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2025-04-04T17:19:01.428218Z node 32 :PQ_WRITE_PROXY DEBUG: SessionId: ydb://session/3?node_id=32&id=YmMwZjllNzgtMzk5MzNmZDEtOTlhMjNlYWQtNmZhNzFhMjg= TxId: 01jr0x0a4r7sq9wbefcfcs9g75 WriteId: {32, 281474976710673} 2025-04-04T17:19:01.431995Z node 32 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: {0, {32, 281474976710673}, 100000}, State: StateInit] bootstrapping {0, {32, 281474976710673}, 100000} [32:7489508743655087483:2502] 2025-04-04T17:19:01.434970Z node 32 :PERSQUEUE INFO: [topic:{0, {32, 281474976710673}, 100000}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:19:01.435061Z node 32 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: {0, {32, 281474976710673}, 100000}, State: StateInit] init complete for topic 'topic' partition {0, {32, 281474976710673}, 100000} generation 1 [32:7489508743655087483:2502] 2025-04-04T17:19:01.435868Z node 32 :PERSQUEUE INFO: new Cookie 123|6bbc4dc7-f328e5dc-f0e59718-df7efad7_0 generated for partition {0, {32, 281474976710673}, 100000} topic 'topic' owner 123 2025-04-04T17:19:01.438285Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T17:19:01.438361Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T17:19:01.438393Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T17:19:01.438427Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T17:19:01.443301Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T17:19:01.448825Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T17:19:01.448893Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T17:19:01.448953Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T17:19:01.528501Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|a9c1b239-a3ebce2a-843cbd73-cba1f3eb_0 grpc read done: success: 0 data: 2025-04-04T17:19:01.528547Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 123|a9c1b239-a3ebce2a-843cbd73-cba1f3eb_0 grpc read failed 2025-04-04T17:19:01.529075Z node 32 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 3 sessionId: 123|a9c1b239-a3ebce2a-843cbd73-cba1f3eb_0 2025-04-04T17:19:01.529101Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 123|a9c1b239-a3ebce2a-843cbd73-cba1f3eb_0 is DEAD 2025-04-04T17:19:01.529739Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:19:01.529806Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison Counters: ================================
name=api.grpc.topic.stream_write.bytes: 20796
name=api.grpc.topic.stream_write.messages: 4
name=api.grpc.topic.stream_write.uncommitted_bytes: 20796
name=api.grpc.topic.stream_write.uncommitted_messages: 4
name=topic.write.bytes: 20796
name=topic.write.discarded_bytes: 0
name=topic.write.discarded_messages: 0
name=topic.write.messages: 4
name=topic.write.uncommitted_bytes: 20796
name=topic.write.uncommitted_messages: 4
name=topic.write.uncompressed_bytes: 16
name=topic.write.lag_milliseconds:
    bin=100: 0
    bin=1000: 0
    bin=10000: 0
    bin=180000: 0
    bin=200: 0
    bin=2000: 3
    bin=30000: 0
    bin=500: 0
    bin=5000: 1
    bin=60000: 0
    bin=999999: 0
name=topic.write.message_size_bytes:
    bin=1024: 1
    bin=10240: 2
    bin=102400: 0
    bin=1048576: 0
    bin=10485760: 0
    bin=20480: 1
    bin=204800: 0
    bin=2097152: 0
    bin=5120: 0
    bin=51200: 0
    bin=524288: 0
    bin=5242880: 0
    bin=67108864: 0
    bin=99999999: 0
name=topic.write.partition_throttled_milliseconds:
    bin=0: 4
    bin=1: 0
    bin=10: 0
    bin=100: 0
    bin=1000: 0
    bin=10000: 0
    bin=20: 0
    bin=2500: 0
    bin=5: 0
    bin=50: 0
    bin=500: 0
    bin=5000: 0
    bin=999999: 0
2025-04-04T17:19:01.584973Z node 32 :PERSQUEUE WARN: [PQ: 72075186224037892] Unknown transaction 281474976710674 |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Deactivated >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] >> Secret::Simple |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpQueryService::TableSink_ReplaceDuplicatesOlap [GOOD] >> KqpQueryService::TableSink_Oltp_Replace-UseSink >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] Test command err: 2025-04-04T17:19:00.097703Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508741237832855:2151];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:00.098438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001450/r3tmp/tmpR4NmrB/pdisk_1.dat 2025-04-04T17:19:00.452494Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:00.489637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:00.489745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:00.491719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25501, node 1 2025-04-04T17:19:00.682899Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:00.682932Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:00.682945Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:00.683071Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:19:02.776687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508749827768066:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:02.776778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508749827768040:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:02.777194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:02.783738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-04-04T17:19:02.799670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508749827768069:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-04-04T17:19:02.912602Z node 1 :TX_PROXY ERROR: Actor# [1:7489508749827768130:2354] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:03.172777Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508749827768151:2372], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:19:03.173479Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGJjM2IyN2MtODcwM2M5MTMtYjEwODNiYmUtOGYyYzMxYmY=, ActorId: [1:7489508749827768038:2359], ActorState: ExecuteState, TraceId: 01jr0x0bkp9vpn52hkbd2c0e3w, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:19:03.196934Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:19:04.215700Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508758417702820:2402], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:19:04.216657Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGMwZTNmOGUtMWEwNGVmYmQtZGE1YzQ3MjEtNDMzZTAyNTI=, ActorId: [1:7489508758417702813:2398], ActorState: ExecuteState, TraceId: 01jr0x0d0601q8vfd2j2sjam4q, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:19:04.216982Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |92.3%| [TA] $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> Secret::ValidationQueryService |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::DeactivatedQueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] Test command err: 2025-04-04T17:19:00.097299Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508738551141871:2152];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:00.097461Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001473/r3tmp/tmp90H8bw/pdisk_1.dat 2025-04-04T17:19:00.455910Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:00.504154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:00.504248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:00.505490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15039, node 1 2025-04-04T17:19:00.680510Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001473/r3tmp/yandex5hkYE9.tmp 2025-04-04T17:19:00.680534Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001473/r3tmp/yandex5hkYE9.tmp 2025-04-04T17:19:00.680798Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001473/r3tmp/yandex5hkYE9.tmp 2025-04-04T17:19:00.680948Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17938 PQClient connected to localhost:15039 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:01.226323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:19:01.273182Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:02.696812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508747141077098:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:02.696912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508747141077110:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:02.697006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:02.708426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-04T17:19:02.720283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508747141077113:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T17:19:02.820008Z node 1 :TX_PROXY ERROR: Actor# [1:7489508747141077178:2390] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:03.150132Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508747141077186:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:19:03.152011Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmExNzY0MDctMzE3YWQ0YzctYTYxMjQ2NDgtZjAxMjIzNDk=, ActorId: [1:7489508747141077096:2331], ActorState: ExecuteState, TraceId: 01jr0x0bh78m5zbv5bf7rjkf2h, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:19:03.154182Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:19:03.189172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:03.332164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:03.437089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:19:03.839841Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr0x0cc800n9gz6yzjdh666n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODQ4YzVjOTItMjRhOGE3ZDktMmQzNGFmYzQtNjhjNWVhMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink [GOOD] |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |92.3%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 11483, MsgBus: 8288 2025-04-04T17:18:09.593312Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508519751349983:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:09.593382Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bbc/r3tmp/tmpM245MX/pdisk_1.dat 2025-04-04T17:18:10.005764Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:10.036785Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:10.037403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:10.041487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11483, node 1 2025-04-04T17:18:10.271310Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:10.271344Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:10.271352Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:10.271494Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8288 TClient is connected to server localhost:8288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:11.097253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:12.587319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508532636252510:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:12.587453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:12.906143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.108175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508536931219916:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:13.108225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:13.120623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.177762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508536931219995:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:13.177825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:13.178381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508536931220000:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:13.183936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-04-04T17:18:13.194992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508536931220002:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-04-04T17:18:13.292952Z node 1 :TX_PROXY ERROR: Actor# [1:7489508536931220053:2446] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9683, MsgBus: 17774 2025-04-04T17:18:15.233035Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508547773824414:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:15.329697Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bbc/r3tmp/tmpesSp4A/pdisk_1.dat 2025-04-04T17:18:15.416292Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:15.451113Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:15.451186Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 9683, node 2 2025-04-04T17:18:15.453995Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:15.526730Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:15.526745Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:15.526750Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:15.526825Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17774 TClient is connected to server localhost:17774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:16.015228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.027794Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:18.233195Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508560658726807:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.233197Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508560658726812:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.233263Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:18.236553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:18:18.245847Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508560658726821:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:18:18.301917Z node 2 :TX_PROXY ERROR: Actor# [2:7489508560658726873:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:18.333730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16889, MsgBus: 24056 2025-04-04T17:18:19.452196Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508563159784067:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:19.452254Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bbc/r3tmp/tmpjwi7S0/pdisk_1.dat 2025-04-0 ... .cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489508718029524669:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:55.345126Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bbc/r3tmp/tmpt6ig8a/pdisk_1.dat 2025-04-04T17:18:55.465320Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:55.480912Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:55.481033Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:55.483320Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6306, node 10 2025-04-04T17:18:55.533496Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:55.533523Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:55.533533Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:55.533656Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8814 TClient is connected to server localhost:8814 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:56.213495Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:59.174168Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489508735209394508:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:59.174275Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7489508735209394516:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:59.174346Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:59.178870Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:18:59.191618Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7489508735209394522:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:18:59.289851Z node 10 :TX_PROXY ERROR: Actor# [10:7489508735209394573:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:59.330581Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7489508735209394590:2338], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2025-04-04T17:18:59.330853Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ZWUxNTIzNTQtNjIzODUxNDEtYTRkODE3M2MtZTFkNjhlOTA=, ActorId: [10:7489508735209394506:2329], ActorState: ExecuteState, TraceId: 01jr0x05784f6d3qkxftc7hv66, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" Trying to start YDB, gRPC: 5592, MsgBus: 19171 2025-04-04T17:19:00.301373Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7489508741640320297:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:00.301459Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bbc/r3tmp/tmpcSU3zV/pdisk_1.dat 2025-04-04T17:19:00.420560Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:00.460399Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:00.460525Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:00.461960Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5592, node 11 2025-04-04T17:19:00.523515Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:00.523552Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:00.523565Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:00.523751Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19171 TClient is connected to server localhost:19171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:01.297982Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:05.131454Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7489508763115157441:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:05.131535Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7489508763115157423:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:05.131718Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:05.137845Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:19:05.153330Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7489508763115157445:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:19:05.257081Z node 11 :TX_PROXY ERROR: Actor# [11:7489508763115157496:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:05.279761Z node 11 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [11:7489508763115157513:2339], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2025-04-04T17:19:05.280816Z node 11 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=11&id=MmMwMGY4ZTMtM2Q1ZDAxNzctMjAyZjlkMzUtYjEyZDRiMDY=, ActorId: [11:7489508763115157421:2330], ActorState: ExecuteState, TraceId: 01jr0x0a6a7hscz3ywb03jsz2h, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2025-04-04T17:19:05.301928Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7489508741640320297:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:05.302087Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] >> EvWrite::AbortInTransaction >> TColumnShardTestReadWrite::CompactionGCFailingBs |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot >> KqpExplain::AggGroupLimit [GOOD] >> KqpExplain::ComplexJoin >> KqpMultishardIndex::WriteIntoRenamingSyncIndex [GOOD] >> KqpMultishardIndex::WriteIntoRenamingAsyncIndex >> TxUsage::WriteToTopic_Invalid_Session [GOOD] >> TxUsage::TwoSessionOneConsumer [GOOD] |92.3%| [TA] $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::WriteToTopic_Two_WriteSession >> TxUsage::WriteToTopic_Demo_1 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] Test command err: 2025-04-04T17:19:00.097114Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508738347724201:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:00.097399Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001466/r3tmp/tmpmP4mfE/pdisk_1.dat 2025-04-04T17:19:00.445958Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:00.508061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:00.508161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:00.509852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8408, node 1 2025-04-04T17:19:00.681254Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:00.681282Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:00.681288Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:00.681387Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32144 PQClient connected to localhost:8408 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:01.225085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:19:01.266100Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:02.706873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508746937659450:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:02.707067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:02.707544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508746937659463:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:02.711018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-04T17:19:02.720763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508746937659465:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T17:19:02.796897Z node 1 :TX_PROXY ERROR: Actor# [1:7489508746937659530:2390] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:03.139147Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508746937659538:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:19:03.148386Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGFhNDc3M2QtYjg2NDExMzUtOWIxZWUyZWItMzBlYmM2, ActorId: [1:7489508746937659448:2331], ActorState: ExecuteState, TraceId: 01jr0x0bh6a5pgvcxxv799hz8h, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:19:03.151126Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:19:03.188715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:03.288716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:03.401051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:19:03.837513Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr0x0c9e5tapr2e002dqz8k0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDczNTRkNjYtOTJlODI4ZjEtMzc4YjgzNjYtZjY2OGQyNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:19:05.100139Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508738347724201:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:05.100263Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> EvWrite::AbortInTransaction [GOOD] >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes >> TPQCDTest::TestDiscoverClusters [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::AbortInTransaction [GOOD] Test command err: 2025-04-04T17:19:07.673217Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:07.759862Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:07.779393Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:07.779689Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:07.787914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:07.788146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:07.788402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:07.788522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:07.788639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:07.788760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:07.788853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:07.788952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:07.789095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:07.789237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:07.789347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:07.789437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:07.813843Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:07.814392Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:07.814460Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:07.814621Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:07.814746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:07.814810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:07.814846Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:07.814909Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:07.814956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:07.814987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:07.815005Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:07.815152Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:07.815196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:07.815220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:07.815239Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:07.815318Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:07.815371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:07.815436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:07.815472Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:07.815530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:07.815556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:07.815579Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:07.815625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:07.815658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:07.815676Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:07.816042Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=36; 2025-04-04T17:19:07.816158Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=71; 2025-04-04T17:19:07.816232Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-04T17:19:07.816325Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-04-04T17:19:07.816494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:07.816567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:07.816609Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:07.816815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:07.816873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:07.816924Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:07.817079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:07.817124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:07.817153Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:07.817336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:07.817376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:07.817405Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:07.817531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:07.817566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:07.817604Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... 51615;plan=0;src=[1:166:2191];cookie=00:0;;this=88923003848000;op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=00:0;;int_this=89197881016128;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2025-04-04T17:19:08.333413Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 10 at tablet 9437184, mediator 0 2025-04-04T17:19:08.333487Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] execute at tablet 9437184 2025-04-04T17:19:08.333862Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 1 ttl settings: { Version: 1 } at tablet 9437184 2025-04-04T17:19:08.340216Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=0; 2025-04-04T17:19:08.340363Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:245;method=RegisterTable;path_id=1; 2025-04-04T17:19:08.340408Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:143;event=RegisterTable;path_id=1; 2025-04-04T17:19:08.347463Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:487;event=OnTieringModified;path_id=1; 2025-04-04T17:19:08.347667Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tx_controller.cpp:211;event=finished_tx;tx_id=10; 2025-04-04T17:19:08.371499Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-04-04T17:19:08.371723Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=229592;columns=2; 2025-04-04T17:19:08.392340Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];fline=actor.cpp:22;event=flush_writing;size=229592;count=1; 2025-04-04T17:19:08.396624Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 1 at tablet 9437184 2025-04-04T17:19:08.398763Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-04T17:19:08.412095Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-04T17:19:08.412285Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:19:08.412794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=222;problem=finished; 2025-04-04T17:19:08.412887Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=222;problem=finished; 2025-04-04T17:19:08.413182Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 10 at tablet 9437184, mediator 0 2025-04-04T17:19:08.413259Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] execute at tablet 9437184 2025-04-04T17:19:08.413321Z node 1 :TX_COLUMNSHARD ERROR: TxPlanStep[5] Ignore old txIds [112] for step 10 last planned step 10 at tablet 9437184 2025-04-04T17:19:08.413379Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] complete at tablet 9437184 2025-04-04T17:19:08.413831Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {10:max} readable: {10:max} at tablet 9437184 2025-04-04T17:19:08.413980Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-04T17:19:08.416693Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:32;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2025-04-04T17:19:08.416830Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2025-04-04T17:19:08.417110Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Projection","input":"1,2"},"fetch":"1,2"}]}; 2025-04-04T17:19:08.417241Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:127;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-04T17:19:08.418550Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:256:2274];trace_detailed=; 2025-04-04T17:19:08.419907Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:85;ff_first=(column_ids=1,2;column_names=field,key;);; 2025-04-04T17:19:08.420151Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:100;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; 2025-04-04T17:19:08.420558Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:256:2274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:19:08.420713Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:256:2274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T17:19:08.420832Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:256:2274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T17:19:08.420890Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:256:2274] finished for tablet 9437184 2025-04-04T17:19:08.421356Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:256:2274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:250:2268];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743787148418479,"name":"_full_task","f":1743787148418479,"d_finished":0,"c":0,"l":1743787148420958,"d":2479},"events":[{"name":"bootstrap","f":1743787148418747,"d_finished":1597,"c":1,"l":1743787148420344,"d":1597},{"a":1743787148420528,"name":"ack","f":1743787148420528,"d_finished":0,"c":0,"l":1743787148420958,"d":430},{"a":1743787148420501,"name":"processing","f":1743787148420501,"d_finished":0,"c":0,"l":1743787148420958,"d":457},{"name":"ProduceResults","f":1743787148420312,"d_finished":280,"c":2,"l":1743787148420862,"d":280},{"a":1743787148420865,"name":"Finish","f":1743787148420865,"d_finished":0,"c":0,"l":1743787148420958,"d":93}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T17:19:08.421469Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:256:2274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:250:2268];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:19:08.421901Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:256:2274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:250:2268];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":1743787148418479,"name":"_full_task","f":1743787148418479,"d_finished":0,"c":0,"l":1743787148421520,"d":3041},"events":[{"name":"bootstrap","f":1743787148418747,"d_finished":1597,"c":1,"l":1743787148420344,"d":1597},{"a":1743787148420528,"name":"ack","f":1743787148420528,"d_finished":0,"c":0,"l":1743787148421520,"d":992},{"a":1743787148420501,"name":"processing","f":1743787148420501,"d_finished":0,"c":0,"l":1743787148421520,"d":1019},{"name":"ProduceResults","f":1743787148420312,"d_finished":280,"c":2,"l":1743787148420862,"d":280},{"a":1743787148420865,"name":"Finish","f":1743787148420865,"d_finished":0,"c":0,"l":1743787148421520,"d":655}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-04-04T17:19:08.422063Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:256:2274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:19:08.417200Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-04T17:19:08.422120Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:256:2274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:19:08.422258Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:256:2274];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; >> UpsertLoad::ShouldWriteDataBulkUpsert >> ReadLoad::ShouldReadIterate >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom >> UpsertLoad::ShouldWriteDataBulkUpsertBatch >> UpsertLoad::ShouldWriteKqpUpsert >> ReadLoad::ShouldReadKqp >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestDiscoverClusters [GOOD] Test command err: 2025-04-04T17:19:00.103362Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508739367877918:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:00.103518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001478/r3tmp/tmpbLvba7/pdisk_1.dat 2025-04-04T17:19:00.444829Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:00.475005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:00.476105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:00.491040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27772, node 1 2025-04-04T17:19:00.680692Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001478/r3tmp/yandexFvEMoK.tmp 2025-04-04T17:19:00.680721Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001478/r3tmp/yandexFvEMoK.tmp 2025-04-04T17:19:00.680924Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001478/r3tmp/yandexFvEMoK.tmp 2025-04-04T17:19:00.681074Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3337 PQClient connected to localhost:27772 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:01.225206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:19:01.278500Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:01.286878Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-04-04T17:19:02.833587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508747957813257:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:02.833587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508747957813232:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:02.833728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:02.839620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-04T17:19:02.841936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508747957813293:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:02.842002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:02.849997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508747957813261:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T17:19:03.081377Z node 1 :TX_PROXY ERROR: Actor# [1:7489508747957813317:2387] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:03.138961Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508752252780630:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:19:03.148377Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWU0Mzc1Yy1kYzJlY2QwMy1mOWU4ZDk2NC1mOTc3OWU0NQ==, ActorId: [1:7489508747957813229:2331], ActorState: ExecuteState, TraceId: 01jr0x0bn06k5bfcwkv8bcn3c6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:19:03.151261Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:19:03.189392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:03.317899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:03.391037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:19:03.837526Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr0x0c924czyknnk3nk7aa8m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDRjMWFlNmMtMWQyOWNkODctNTk4MjQ4YTMtNjExNTUyOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:19:04.841774Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01jr0x0dde78fb4r9x8x912hz0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDdiYzE5NGEtYTU1M2ZhZWEtYTRlODgwLTc4NzAzYmU4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:19:04.850273Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jr0x0dde78fb4r9x8x912hz0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDdiYzE5NGEtYTU1M2ZhZWEtYTRlODgwLTc4NzAzYmU4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:19:05.103885Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508739367877918:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:05.104025Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:19:06.152437Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jr0x0eq15t8c2ss69mb7pj83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzUxMWYxMmItNDAyZWY2ZmUtYWJjYjNmMjctZjdlZDUxZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:19:06.156152Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr0x0eq15t8c2ss69mb7pj83, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzUxMWYxMmItNDAyZWY2ZmUtYWJjYjNmMjctZjdlZDUxZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:19:07.285197Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr0x0fve8tx505m1knw5pjxr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODFkZGU5ZGMtODZjOTFhZTQtZTdiOWYyZTUtOTU0OGI3ODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:19:07.289687Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr0x0fve8tx505m1knw5pjxr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODFkZGU5ZGMtODZjOTFhZTQtZTdiOWYyZTUtOTU0OGI3ODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:19:08.544309Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jr0x0h2gbqvg8cwevajfcpqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTdhYjE5NWUtZTUxOWNlODItNGQxNzk2ZDUtYmNlOGFlYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:19:08.548844Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jr0x0h2gbqvg8cwevajfcpqq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTdhYjE5NWUtZTUxOWNlODItNGQxNzk2ZDUtYmNlOGFlYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |92.3%| [TA] $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.4%| [TA] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TColumnShardTestReadWrite::WriteReadNoCompression >> LocalPartition::WithoutPartitionWithRestart [GOOD] >> LocalPartition::WithoutPartitionUnknownEndpoint ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:113:2057] recipient: [1:108:2139] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:113:2057] recipient: [1:108:2139] Leader for TabletID 9437184 is [1:130:2153] sender: [1:133:2057] recipient: [1:108:2139] 2025-04-04T17:18:06.991684Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:18:07.107504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:18:07.107573Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.109697Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:18:07.110188Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:18:07.110512Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:18:07.155113Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:18:07.161104Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:18:07.161218Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:18:07.162613Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:18:07.162699Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:18:07.162756Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:18:07.163115Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:18:07.163177Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:18:07.163239Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:191:2153] in generation 2 Leader for TabletID 9437184 is [1:130:2153] sender: [1:208:2057] recipient: [1:14:2061] 2025-04-04T17:18:07.243106Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:18:07.275788Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:18:07.275965Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:18:07.276063Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:18:07.276111Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:18:07.276154Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:18:07.276191Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:07.276391Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.276432Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.276688Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:18:07.276792Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:18:07.276842Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:18:07.276884Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:18:07.276944Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:18:07.276988Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:18:07.277021Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:18:07.277066Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:18:07.277104Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:07.277212Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.277255Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.277308Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:18:07.279895Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:18:07.279957Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:18:07.280034Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:18:07.280186Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:18:07.280243Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:18:07.280323Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:18:07.280391Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:07.280442Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:18:07.280478Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:18:07.280511Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:18:07.280799Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:18:07.280834Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:18:07.280871Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:18:07.280906Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:18:07.280952Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:18:07.280982Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:18:07.281017Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:18:07.281048Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:18:07.281073Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:18:07.293339Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:18:07.293442Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:18:07.293476Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:18:07.293515Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:18:07.293593Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:18:07.294964Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.295042Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.295080Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:18:07.295226Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-04-04T17:18:07.295265Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:18:07.295388Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-04-04T17:18:07.295434Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-04T17:18:07.295468Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:18:07.295513Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:18:07.304468Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:18:07.304541Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:07.304759Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.304798Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.304838Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:18:07.304887Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:18:07.304921Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:18:07.304957Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-04-04T17:18:07.304985Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-04-04T17:18:07.305022Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-04T17:18:07.305051Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:18:07.305084Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:18:07.305118Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:18:07.305273Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-04-04T17:18:07.305307Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-04-04T17:18:07.305326Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:18:07.305349Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:18:07.305369Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:18:07.305426Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:07.305469Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:18:07.305508Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:18:07.305539Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:18:07.305601Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2025-04-04T17:18:07.305639Z node 1 :TX_DATASHARD TRAC ... 9437186 to execution unit ExecuteDataTx 2025-04-04T17:19:09.453721Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit ExecuteDataTx 2025-04-04T17:19:09.454002Z node 41 :TX_DATASHARD TRACE: Executed operation [7:6] at tablet 9437186 with status COMPLETE 2025-04-04T17:19:09.454052Z node 41 :TX_DATASHARD TRACE: Datashard execution counters for [7:6] at 9437186: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 10, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:19:09.454092Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-04T17:19:09.454118Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit ExecuteDataTx 2025-04-04T17:19:09.454142Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit CompleteOperation 2025-04-04T17:19:09.454168Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit CompleteOperation 2025-04-04T17:19:09.454348Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is DelayComplete 2025-04-04T17:19:09.454379Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit CompleteOperation 2025-04-04T17:19:09.454407Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437186 to execution unit CompletedOperations 2025-04-04T17:19:09.454435Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437186 on unit CompletedOperations 2025-04-04T17:19:09.454467Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437186 is Executed 2025-04-04T17:19:09.454493Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437186 executing on unit CompletedOperations 2025-04-04T17:19:09.454519Z node 41 :TX_DATASHARD TRACE: Execution plan for [7:6] at 9437186 has finished 2025-04-04T17:19:09.454550Z node 41 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:19:09.454590Z node 41 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-04-04T17:19:09.454618Z node 41 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-04-04T17:19:09.454646Z node 41 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-04-04T17:19:09.454802Z node 41 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [41:341:2309], Recipient [41:341:2309]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:19:09.454838Z node 41 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:19:09.454879Z node 41 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-04-04T17:19:09.454911Z node 41 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:19:09.454936Z node 41 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-04T17:19:09.454968Z node 41 :TX_DATASHARD DEBUG: Found ready operation [7:6] in PlanQueue unit at 9437185 2025-04-04T17:19:09.454999Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit PlanQueue 2025-04-04T17:19:09.455028Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-04T17:19:09.455053Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit PlanQueue 2025-04-04T17:19:09.455079Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit LoadTxDetails 2025-04-04T17:19:09.455107Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit LoadTxDetails 2025-04-04T17:19:09.455633Z node 41 :TX_DATASHARD DEBUG: LoadTxDetails at 9437185 loaded tx from db 7:6 keys extracted: 1 2025-04-04T17:19:09.455678Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-04T17:19:09.455704Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit LoadTxDetails 2025-04-04T17:19:09.455729Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit FinalizeDataTxPlan 2025-04-04T17:19:09.455774Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit FinalizeDataTxPlan 2025-04-04T17:19:09.455810Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-04T17:19:09.455836Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit FinalizeDataTxPlan 2025-04-04T17:19:09.455860Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit BuildAndWaitDependencies 2025-04-04T17:19:09.455886Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit BuildAndWaitDependencies 2025-04-04T17:19:09.455929Z node 41 :TX_DATASHARD TRACE: Operation [7:6] is the new logically complete end at 9437185 2025-04-04T17:19:09.455960Z node 41 :TX_DATASHARD TRACE: Operation [7:6] is the new logically incomplete end at 9437185 2025-04-04T17:19:09.455987Z node 41 :TX_DATASHARD TRACE: Activated operation [7:6] at 9437185 2025-04-04T17:19:09.456022Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-04T17:19:09.456047Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit BuildAndWaitDependencies 2025-04-04T17:19:09.456073Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit BuildDataTxOutRS 2025-04-04T17:19:09.456100Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit BuildDataTxOutRS 2025-04-04T17:19:09.456142Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-04T17:19:09.456166Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit BuildDataTxOutRS 2025-04-04T17:19:09.456190Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit StoreAndSendOutRS 2025-04-04T17:19:09.456216Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit StoreAndSendOutRS 2025-04-04T17:19:09.456241Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-04T17:19:09.456267Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit StoreAndSendOutRS 2025-04-04T17:19:09.456292Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit PrepareDataTxInRS 2025-04-04T17:19:09.456317Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit PrepareDataTxInRS 2025-04-04T17:19:09.456347Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-04T17:19:09.456372Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit PrepareDataTxInRS 2025-04-04T17:19:09.456395Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit LoadAndWaitInRS 2025-04-04T17:19:09.456422Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit LoadAndWaitInRS 2025-04-04T17:19:09.456447Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-04T17:19:09.456471Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit LoadAndWaitInRS 2025-04-04T17:19:09.456494Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit ExecuteDataTx 2025-04-04T17:19:09.456520Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit ExecuteDataTx 2025-04-04T17:19:09.456798Z node 41 :TX_DATASHARD TRACE: Executed operation [7:6] at tablet 9437185 with status COMPLETE 2025-04-04T17:19:09.456847Z node 41 :TX_DATASHARD TRACE: Datashard execution counters for [7:6] at 9437185: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 10, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:19:09.456891Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-04T17:19:09.456917Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit ExecuteDataTx 2025-04-04T17:19:09.456951Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit CompleteOperation 2025-04-04T17:19:09.456977Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit CompleteOperation 2025-04-04T17:19:09.457141Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is DelayComplete 2025-04-04T17:19:09.457173Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit CompleteOperation 2025-04-04T17:19:09.457202Z node 41 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit CompletedOperations 2025-04-04T17:19:09.457230Z node 41 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit CompletedOperations 2025-04-04T17:19:09.457263Z node 41 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-04-04T17:19:09.457290Z node 41 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit CompletedOperations 2025-04-04T17:19:09.457317Z node 41 :TX_DATASHARD TRACE: Execution plan for [7:6] at 9437185 has finished 2025-04-04T17:19:09.457346Z node 41 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:19:09.457374Z node 41 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-04-04T17:19:09.457402Z node 41 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-04-04T17:19:09.457429Z node 41 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-04-04T17:19:09.470531Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 7 txid# 6} 2025-04-04T17:19:09.470609Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 7} 2025-04-04T17:19:09.470662Z node 41 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:19:09.470700Z node 41 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437184 on unit CompleteOperation 2025-04-04T17:19:09.470758Z node 41 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437184 at tablet 9437184 send result to client [41:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:19:09.470800Z node 41 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:19:09.471086Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437186 step# 7 txid# 6} 2025-04-04T17:19:09.471121Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437186 step# 7} 2025-04-04T17:19:09.471160Z node 41 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:19:09.471188Z node 41 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437186 on unit CompleteOperation 2025-04-04T17:19:09.471229Z node 41 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437186 at tablet 9437186 send result to client [41:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:19:09.471260Z node 41 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:19:09.471445Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 7 txid# 6} 2025-04-04T17:19:09.471478Z node 41 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 7} 2025-04-04T17:19:09.471513Z node 41 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-04-04T17:19:09.471541Z node 41 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437185 on unit CompleteOperation 2025-04-04T17:19:09.471578Z node 41 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437185 at tablet 9437185 send result to client [41:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:19:09.471607Z node 41 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 |92.4%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |92.4%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexes::VectorIndexOrderByCosineSimilarityNotNullableLevel2 [GOOD] >> TColumnShardTestReadWrite::ReadAggregate >> TxUsage::Sinks_Oltp_WriteToTopic_1 [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime >> TColumnShardTestReadWrite::WriteReadZSTD >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot >> Describe::Statistics [GOOD] >> Describe::Location ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::VectorIndexOrderByCosineSimilarityNotNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 64467, MsgBus: 28791 2025-04-04T17:18:07.271003Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508511953701327:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:07.275354Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016dd/r3tmp/tmpgb3ddO/pdisk_1.dat 2025-04-04T17:18:07.781418Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.789138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:07.789242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:07.791290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64467, node 1 2025-04-04T17:18:07.872146Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:07.872164Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:07.872171Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:07.872292Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28791 TClient is connected to server localhost:28791 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:08.625634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.666524Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:08.684049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:08.862197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.022064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:09.118075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:10.580610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508524838604807:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.580776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:10.842500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.897685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.938037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.967523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:10.995828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.030376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:11.111936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508529133572619:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.112028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.112253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508529133572624:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:11.116061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:11.138479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508529133572626:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:11.197353Z node 1 :TX_PROXY ERROR: Actor# [1:7489508529133572681:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:12.115356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:12.271517Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508511953701327:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:12.271576Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:12.352105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T17:18:12.493510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T17:18:22.776936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:18:22.776973Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 12998, MsgBus: 18585 2025-04-04T17:18:35.119108Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508630872291480:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:35.119626Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016dd/r3tmp/tmpYdS5cX/pdisk_1.dat 2025-04-04T17:18:35.281674Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:35.305172Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:35.305259Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:35.307592Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12998, node 2 2025-04-04T17:18:35.396701Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:35.396722Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:35.396729Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:35.396869Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18585 TClient is connected to server localhost:18585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:35.893712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:35.899156Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:35.916007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:35.991989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:36.204302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:36.284959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:38.563337Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508643757195135:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:38.563421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:38.616980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:38.659448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:38.720723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:38.757479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:38.803019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:38.876180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:38.938055Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508643757195650:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:38.938142Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:38.938181Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508643757195655:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:38.941158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:38.950792Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508643757195657:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:39.029220Z node 2 :TX_PROXY ERROR: Actor# [2:7489508648052163006:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:39.991053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:40.118508Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508630872291480:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:40.118575Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:40.333955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-04T17:18:40.382166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-04T17:18:40.436852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 2025-04-04T17:18:40.527293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710763:0, at schemeshard: 72057594046644480 2025-04-04T17:18:40.580910Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-04-04T17:18:40.581480Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-04-04T17:18:40.581501Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-04-04T17:18:50.265242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:18:50.265263Z node 2 :IMPORT WARN: Table profiles were not loaded >> KqpIndexes::VectorIndexOrderByCosineDistanceNotNullableLevel2 [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_2 >> DataShardTxOrder::RandomPointsAndRanges [GOOD] >> KqpQueryService::TableSink_Oltp_Replace-UseSink [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertBatch [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom >> UpsertLoad::ShouldWriteDataBulkUpsert [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 >> TxUsage::WriteToTopic_Demo_3 [GOOD] >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] >> TColumnShardTestReadWrite::RebootWriteReadStandalone >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] >> ReadLoad::ShouldReadIterate [GOOD] >> ReadLoad::ShouldReadIterateMoreThanRows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::VectorIndexOrderByCosineDistanceNotNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 14287, MsgBus: 15115 2025-04-04T17:18:10.445343Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508524079971518:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:10.445551Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ce/r3tmp/tmptMqeCf/pdisk_1.dat 2025-04-04T17:18:10.800998Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:10.820034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:10.820123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:10.821676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14287, node 1 2025-04-04T17:18:10.888744Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:10.888767Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:10.888785Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:10.888920Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15115 TClient is connected to server localhost:15115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:11.446818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:11.466063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:11.599557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:11.764944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:11.841452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:13.358197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508536964875174:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:13.358301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:13.593943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.626660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.659045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.691873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.718762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.751686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:13.810586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508536964875686:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:13.810703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:13.812877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508536964875691:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:13.816656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:13.832880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508536964875693:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:13.916168Z node 1 :TX_PROXY ERROR: Actor# [1:7489508536964875747:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:14.782123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.048304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T17:18:15.114581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T17:18:15.447399Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508524079971518:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:15.447453Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:25.800512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:18:25.800551Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 2259, MsgBus: 26485 2025-04-04T17:18:37.631097Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508641443679827:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:37.634789Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016ce/r3tmp/tmpGnhTN4/pdisk_1.dat 2025-04-04T17:18:37.755735Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2259, node 2 2025-04-04T17:18:37.788954Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:37.789048Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:37.790615Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:37.829749Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:37.829777Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:37.829791Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:37.829926Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26485 TClient is connected to server localhost:26485 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:18:38.240086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:18:38.261890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:38.320143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:18:38.518597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:38.605383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:41.086991Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508658623550647:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:41.087084Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:41.108585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:41.144805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:41.176865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:41.203976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:41.270276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:41.308420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:41.352164Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508658623551158:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:41.352290Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:41.352305Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508658623551163:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:41.355198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:41.362691Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508658623551165:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:41.424558Z node 2 :TX_PROXY ERROR: Actor# [2:7489508658623551217:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:42.393367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:42.629908Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508641443679827:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:42.629992Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:42.637457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T17:18:42.695166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T17:18:42.742864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715760:0, at schemeshard: 72057594046644480 2025-04-04T17:18:42.782000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715763:0, at schemeshard: 72057594046644480 2025-04-04T17:18:42.826645Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-04-04T17:18:42.826680Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-04-04T17:18:42.828098Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-04-04T17:18:52.749698Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:18:52.749723Z node 2 :IMPORT WARN: Table profiles were not loaded >> TxUsage::WriteToTopic_Demo_24 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] Test command err: 2025-04-04T17:19:12.587225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:19:12.587541Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:19:12.587589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ca8/r3tmp/tmpg1GNBX/pdisk_1.dat 2025-04-04T17:19:12.893007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:19:12.944249Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:12.988930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:12.989082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:13.000572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:13.080921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:19:13.366160Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2025-04-04T17:19:13.366394Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:736:2619], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-04-04T17:19:13.471728Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:736:2619], subTag: 2} TUpsertActor finished in 0.104926s, errors=0 2025-04-04T17:19:13.471848Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:737:2620] with tag# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] Test command err: 2025-04-04T17:19:08.897397Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:08.974893Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:08.999228Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:08.999482Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:09.006792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:09.007002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:09.007215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:09.007329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:09.007439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:09.007532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:09.007635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:09.007747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:09.007855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:09.007999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:09.008093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:09.008182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:09.034895Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:09.035116Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:09.035175Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:09.035327Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:09.035466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:09.035532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:09.035569Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:09.035657Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:09.035722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:09.035759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:09.035784Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:09.035938Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:09.035990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:09.036024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:09.036047Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:09.036135Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:09.036203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:09.036258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:09.036303Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:09.036371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:09.036427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:09.036459Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:09.036505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:09.036550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:09.036581Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:09.036944Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=42; 2025-04-04T17:19:09.037027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-04-04T17:19:09.037098Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-04-04T17:19:09.037165Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2025-04-04T17:19:09.037313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:09.037376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:09.037412Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:09.037592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:09.037640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:09.037676Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:09.037817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:09.037857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:09.037895Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:09.038060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:09.038098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:09.038127Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:09.038271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:09.038317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:09.038355Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... :[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-04-04T17:19:13.572482Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] Test command err: 2025-04-04T17:19:09.221047Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:09.301281Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:09.318948Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:09.319155Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:09.325122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:09.325308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:09.325466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:09.325525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:09.325599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:09.325666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:09.325727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:09.325796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:09.325864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:09.325935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:09.325986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:09.326044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:09.350618Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:09.350790Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:09.350828Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:09.350976Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:09.351144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:09.351211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:09.351247Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:09.351323Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:09.351371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:09.351406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:09.351430Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:09.351604Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:09.351670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:09.351703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:09.351732Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:09.351835Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:09.351907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:09.351966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:09.351996Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:09.352067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:09.352102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:09.352148Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:09.352183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:09.352208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:09.352225Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:09.352473Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=27; 2025-04-04T17:19:09.352533Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=29; 2025-04-04T17:19:09.352589Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=21; 2025-04-04T17:19:09.352632Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=19; 2025-04-04T17:19:09.352732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:09.352767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:09.352784Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:09.352891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:09.352930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:09.352950Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:09.353049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:09.353086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:09.353114Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:09.353232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:09.353256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:09.353271Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:09.353356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:09.353393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:09.353430Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... t_id: binary;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:13.907908Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:19:13.908005Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-04T17:19:13.908083Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=2759;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-04T17:19:13.908194Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:430:2445];bytes=2759;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2025-04-04T17:19:13.908319Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:13.908405Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:13.908506Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:13.908659Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:19:13.908769Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:13.908891Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:13.908925Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:431:2446] finished for tablet 9437184 2025-04-04T17:19:13.909249Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:430:2445];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.013}],"full":{"a":1743787153895040,"name":"_full_task","f":1743787153895040,"d_finished":0,"c":0,"l":1743787153908963,"d":13923},"events":[{"name":"bootstrap","f":1743787153895204,"d_finished":2884,"c":1,"l":1743787153898088,"d":2884},{"a":1743787153908643,"name":"ack","f":1743787153907695,"d_finished":836,"c":1,"l":1743787153908531,"d":1156},{"a":1743787153908634,"name":"processing","f":1743787153899296,"d_finished":5388,"c":10,"l":1743787153908532,"d":5717},{"name":"ProduceResults","f":1743787153896929,"d_finished":2628,"c":13,"l":1743787153908913,"d":2628},{"a":1743787153908915,"name":"Finish","f":1743787153908915,"d_finished":0,"c":0,"l":1743787153908963,"d":48},{"name":"task_result","f":1743787153899312,"d_finished":4418,"c":9,"l":1743787153907535,"d":4418}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:13.909302Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:430:2445];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:19:13.909630Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:430:2445];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ProduceResults","f_Finish"],"t":0.013},{"events":["l_ack","l_processing","l_Finish"],"t":0.014}],"full":{"a":1743787153895040,"name":"_full_task","f":1743787153895040,"d_finished":0,"c":0,"l":1743787153909332,"d":14292},"events":[{"name":"bootstrap","f":1743787153895204,"d_finished":2884,"c":1,"l":1743787153898088,"d":2884},{"a":1743787153908643,"name":"ack","f":1743787153907695,"d_finished":836,"c":1,"l":1743787153908531,"d":1525},{"a":1743787153908634,"name":"processing","f":1743787153899296,"d_finished":5388,"c":10,"l":1743787153908532,"d":6086},{"name":"ProduceResults","f":1743787153896929,"d_finished":2628,"c":13,"l":1743787153908913,"d":2628},{"a":1743787153908915,"name":"Finish","f":1743787153908915,"d_finished":0,"c":0,"l":1743787153909332,"d":417},{"name":"task_result","f":1743787153899312,"d_finished":4418,"c":9,"l":1743787153907535,"d":4418}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:13.909708Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:19:13.894571Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=13268;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=13268;selected_rows=0; 2025-04-04T17:19:13.909743Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:19:13.909988Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TColumnShardTestReadWrite::ReadAggregate [GOOD] >> KqpExplain::ComplexJoin [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadAggregate [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; 2025-04-04T17:19:11.579519Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:11.655976Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:11.677279Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:11.677545Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:11.685645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:11.685868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:11.686078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:11.686227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:11.686349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:11.686464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:11.686562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:11.686700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:11.686820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:11.686943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:11.687055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:11.687148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:11.714969Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:11.715176Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:11.715230Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:11.715369Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:11.715500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:11.715572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:11.715616Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:11.715730Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:11.715802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:11.715836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:11.715859Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:11.715987Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:11.716039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:11.716064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:11.716084Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:11.716152Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:11.716201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:11.716240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:11.716276Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:11.716331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:11.716356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:11.716379Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:11.716410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:11.716432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:11.716449Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:11.716756Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=42; 2025-04-04T17:19:11.716848Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=50; 2025-04-04T17:19:11.716918Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=23; 2025-04-04T17:19:11.716999Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-04-04T17:19:11.717121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:11.717161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:11.717196Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:11.717340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:11.717379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:11.717407Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:11.717515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:11.717544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:11.717565Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:11.717700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:11.717728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:11.717747Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:11.717840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:11.717867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:11.717958Z node 1 :TX_COLUMNSHARD INFO: tablet_i ... UMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:19:14.781020Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T17:19:14.781058Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-04T17:19:14.781094Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=76; 2025-04-04T17:19:14.781138Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1;merger=0;interval_id=76; 2025-04-04T17:19:14.781174Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-04T17:19:14.781276Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-04T17:19:14.781315Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-04-04T17:19:14.781347Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:19:14.781847Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:19:14.781977Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-04T17:19:14.782022Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:19:14.782113Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;);columns=4;rows=1; 2025-04-04T17:19:14.782167Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=26;num_rows=1;batch_columns=100,101,102,103; 2025-04-04T17:19:14.782282Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[2:432:2450];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2025-04-04T17:19:14.782386Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-04T17:19:14.782480Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-04T17:19:14.782557Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-04T17:19:14.782821Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:19:14.782913Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-04T17:19:14.782976Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-04T17:19:14.782999Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: Scan [2:433:2451] finished for tablet 9437184 2025-04-04T17:19:14.783263Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[2:432:2450];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.012}],"full":{"a":1743787154770850,"name":"_full_task","f":1743787154770850,"d_finished":0,"c":0,"l":1743787154783033,"d":12183},"events":[{"name":"bootstrap","f":1743787154771029,"d_finished":1829,"c":1,"l":1743787154772858,"d":1829},{"a":1743787154782801,"name":"ack","f":1743787154781822,"d_finished":756,"c":1,"l":1743787154782578,"d":988},{"a":1743787154782788,"name":"processing","f":1743787154773943,"d_finished":5840,"c":10,"l":1743787154782579,"d":6085},{"name":"ProduceResults","f":1743787154771981,"d_finished":2055,"c":13,"l":1743787154782990,"d":2055},{"a":1743787154782992,"name":"Finish","f":1743787154782992,"d_finished":0,"c":0,"l":1743787154783033,"d":41},{"name":"task_result","f":1743787154773954,"d_finished":4969,"c":9,"l":1743787154781395,"d":4969}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-04T17:19:14.783326Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[2:432:2450];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:19:14.783710Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[2:432:2450];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.012}],"full":{"a":1743787154770850,"name":"_full_task","f":1743787154770850,"d_finished":0,"c":0,"l":1743787154783368,"d":12518},"events":[{"name":"bootstrap","f":1743787154771029,"d_finished":1829,"c":1,"l":1743787154772858,"d":1829},{"a":1743787154782801,"name":"ack","f":1743787154781822,"d_finished":756,"c":1,"l":1743787154782578,"d":1323},{"a":1743787154782788,"name":"processing","f":1743787154773943,"d_finished":5840,"c":10,"l":1743787154782579,"d":6420},{"name":"ProduceResults","f":1743787154771981,"d_finished":2055,"c":13,"l":1743787154782990,"d":2055},{"a":1743787154782992,"name":"Finish","f":1743787154782992,"d_finished":0,"c":0,"l":1743787154783368,"d":376},{"name":"task_result","f":1743787154773954,"d_finished":4969,"c":9,"l":1743787154781395,"d":4969}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-04-04T17:19:14.783782Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:19:14.770414Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=16001;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=16001;selected_rows=0; 2025-04-04T17:19:14.783815Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:19:14.784034Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:433:2451];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;; >> TxUsage::WriteToTopic_Demo_4 [GOOD] >> TColumnShardTestReadWrite::WriteReadExoticTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] Test command err: 2025-04-04T17:19:12.587557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:19:12.588077Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:19:12.588137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ccd/r3tmp/tmpUGXkjr/pdisk_1.dat 2025-04-04T17:19:12.960785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:19:13.013668Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:13.057408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:13.057557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:13.068938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:13.150129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:19:13.431628Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2025-04-04T17:19:13.431752Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:736:2619], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2025-04-04T17:19:13.441756Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:736:2619], subTag: 2} started# 5 actors each with inflight# 4 2025-04-04T17:19:13.441845Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-04T17:19:13.441913Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-04T17:19:13.441949Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-04T17:19:13.441990Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-04T17:19:13.442030Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2025-04-04T17:19:13.454706Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 1} session: ydb://session/3?node_id=1&id=NGE4NjMxNDQtNjg4NmI1NWItNjBjYTUxNjktNmE3YjA0YzA= 2025-04-04T17:19:13.454775Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 2} session: ydb://session/3?node_id=1&id=OGUzMjU3NWQtNWUwMTYyMmEtNmE3YjgwNzAtMmJkNjFkY2Y= 2025-04-04T17:19:13.456189Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 3} session: ydb://session/3?node_id=1&id=YzQ5MDNkYmUtNTZmODlhZTMtNGYxY2VkYWYtMjVkYzQyZTE= 2025-04-04T17:19:13.457520Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 4} session: ydb://session/3?node_id=1&id=ZjhkYmU0ZTAtZmIzZGM3NjgtODJjZjRkMmQtZThmZTI4Mw== 2025-04-04T17:19:13.459905Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 5} session: ydb://session/3?node_id=1&id=ZTRkNGRjOTItMzNmNTEyOGUtMmMyOTIxOTgtNTQ2ZDMzZWU= 2025-04-04T17:19:13.464564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:750:2633], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:13.464671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:777:2654], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:13.464726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:778:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:13.464773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:13.464825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:13.464876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:13.464933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:13.474799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:19:13.527853Z node 1 :TX_PROXY ERROR: Actor# [1:797:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:19:13.528529Z node 1 :TX_PROXY ERROR: Actor# [1:798:2675] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:19:13.528851Z node 1 :TX_PROXY ERROR: Actor# [1:799:2676] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:19:13.529208Z node 1 :TX_PROXY ERROR: Actor# [1:803:2677] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:19:13.690643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:788:2665], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:19:13.690740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:789:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:19:13.690790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:790:2667], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:19:13.690833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:791:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:19:13.690894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:792:2669], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:19:13.725444Z node 1 :TX_PROXY ERROR: Actor# [1:898:2740] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:14.416394Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 4} finished in 1743787154.416339s, errors=0 2025-04-04T17:19:14.416617Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:736:2619], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1743787154416 OperationsOK: 4 OperationsError: 0 } 2025-04-04T17:19:14.429730Z node 1 :TX_PROXY ERROR: Actor# [1:951:2778] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:14.490071Z node 1 :TX_PROXY ERROR: Actor# [1:981:2799] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:14.505780Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 2} finished in 1743787154.505719s, errors=0 2025-04-04T17:19:14.506071Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:736:2619], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1743787154505 OperationsOK: 4 OperationsError: 0 } 2025-04-04T17:19:14.550370Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 3} finished in 1743787154.550319s, errors=0 2025-04-04T17:19:14.550502Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:736:2619], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1743787154550 OperationsOK: 4 OperationsError: 0 } 2025-04-04T17:19:14.563682Z node 1 :TX_PROXY ERROR: Actor# [1:1012:2821] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:14.621122Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 1} finished in 1743787154.621075s, errors=0 2025-04-04T17:19:14.621393Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:736:2619], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1743787154621 OperationsOK: 4 OperationsError: 0 } 2025-04-04T17:19:14.634718Z node 1 :TX_PROXY ERROR: Actor# [1:1043:2843] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:14.693187Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:737:2620], subTag: 5} finished in 1743787154.693132s, errors=0 2025-04-04T17:19:14.693431Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:736:2619], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1743787154693 OperationsOK: 4 OperationsError: 0 } 2025-04-04T17:19:14.693484Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:736:2619], subTag: 2} finished in 1.251976s, oks# 20, errors# 0 2025-04-04T17:19:14.693592Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:737:2620] with tag# 2 >> TxUsage::WriteToTopic_Demo_2 [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPointsAndRanges [GOOD] Test command err: 2025-04-04T17:17:36.620974Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:36.698857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:36.698917Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:36.707261Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:36.707760Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:17:36.708080Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:36.718490Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:36.758822Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:36.758907Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:36.760609Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:17:36.760674Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:17:36.760713Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:17:36.761006Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:36.761170Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:36.761230Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:17:36.824869Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:36.854721Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:17:36.854910Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:36.855016Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:17:36.855053Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:17:36.855105Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:17:36.855153Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:36.855327Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:36.855380Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:36.855636Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:17:36.855735Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:17:36.855850Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:36.855898Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:36.855942Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:17:36.855975Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:36.856011Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:36.856059Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:17:36.856099Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:36.856220Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:36.856253Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:36.856305Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:17:36.859066Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:17:36.859128Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:36.859206Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:17:36.859375Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:17:36.859429Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:17:36.859476Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:17:36.859520Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:36.859563Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:17:36.859620Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:17:36.859658Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:36.859961Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:36.860003Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:17:36.860036Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:17:36.860073Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:36.860142Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:17:36.860174Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:17:36.860207Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:17:36.860240Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:36.860266Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:36.872921Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:17:36.873015Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:36.873050Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:36.873106Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:36.873165Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:36.873671Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:36.873744Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:36.873788Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:17:36.873921Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:36.873953Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:36.874095Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:36.874139Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:36.874177Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:36.874213Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:36.877947Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:36.878015Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:36.878309Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:36.878353Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:36.878409Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:36.878450Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:36.878487Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:36.878531Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:17:36.878569Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:17:36.878628Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:36.878675Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:36.878727Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:36.878785Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:36.878971Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:17:36.879007Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:36.879035Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:36.879060Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:17:36.879081Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:17:36.879145Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:36.879172Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:17:36.879206Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:36.879257Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:36.879316Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:17:36.879372Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:17:36.879411Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:17:36.879451Z node 1 :TX_D ... TX_DATASHARD DEBUG: Found ready candidate operation [1000004:403] at 9437186 for LoadAndWaitInRS 2025-04-04T17:19:10.586529Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437186 on unit LoadAndWaitInRS 2025-04-04T17:19:10.586556Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437186 is Executed 2025-04-04T17:19:10.586595Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437186 executing on unit LoadAndWaitInRS 2025-04-04T17:19:10.586619Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437186 to execution unit ExecuteDataTx 2025-04-04T17:19:10.586646Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437186 on unit ExecuteDataTx 2025-04-04T17:19:10.588856Z node 4 :TX_DATASHARD TRACE: Executed operation [1000004:403] at tablet 9437186 with status COMPLETE 2025-04-04T17:19:10.588918Z node 4 :TX_DATASHARD TRACE: Datashard execution counters for [1000004:403] at 9437186: {NSelectRow: 6, NSelectRange: 5, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 137, SelectRangeBytes: 1096, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:19:10.588969Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437186 is ExecutedNoMoreRestarts 2025-04-04T17:19:10.588995Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437186 executing on unit ExecuteDataTx 2025-04-04T17:19:10.589026Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437186 to execution unit CompleteOperation 2025-04-04T17:19:10.589055Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437186 on unit CompleteOperation 2025-04-04T17:19:10.589275Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437186 is DelayComplete 2025-04-04T17:19:10.589307Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437186 executing on unit CompleteOperation 2025-04-04T17:19:10.589335Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437186 to execution unit CompletedOperations 2025-04-04T17:19:10.589362Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437186 on unit CompletedOperations 2025-04-04T17:19:10.589394Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437186 is Executed 2025-04-04T17:19:10.589416Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437186 executing on unit CompletedOperations 2025-04-04T17:19:10.589440Z node 4 :TX_DATASHARD TRACE: Execution plan for [1000004:403] at 9437186 has finished 2025-04-04T17:19:10.589468Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:19:10.589494Z node 4 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-04-04T17:19:10.589522Z node 4 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-04-04T17:19:10.589550Z node 4 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-04-04T17:19:10.607821Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:19:10.607884Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:402] at 9437186 on unit CompleteOperation 2025-04-04T17:19:10.607952Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 402] from 9437186 at tablet 9437186 send result to client [4:98:2133], exec latency: 4 ms, propose latency: 6 ms 2025-04-04T17:19:10.608018Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 399} 2025-04-04T17:19:10.608056Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:19:10.608270Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:19:10.608297Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437186 on unit StoreAndSendOutRS 2025-04-04T17:19:10.608328Z node 4 :TX_DATASHARD DEBUG: Send RS 400 at 9437186 from 9437186 to 9437184 txId 403 2025-04-04T17:19:10.608376Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:19:10.608410Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-04-04T17:19:10.608445Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-04-04T17:19:10.608469Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437186 on unit CompleteOperation 2025-04-04T17:19:10.608509Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 403] from 9437186 at tablet 9437186 send result to client [4:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:19:10.608551Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 400} 2025-04-04T17:19:10.608576Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-04-04T17:19:10.609032Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:453:2395], Recipient [4:344:2311]: {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 399} 2025-04-04T17:19:10.609081Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:19:10.609118Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437186 consumer 9437186 txId 402 2025-04-04T17:19:10.609496Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:453:2395], Recipient [4:344:2311]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 400} 2025-04-04T17:19:10.609536Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:19:10.609565Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437186 consumer 9437186 txId 403 2025-04-04T17:19:10.609954Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [4:453:2395], Recipient [4:233:2226]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletProducer# 9437186 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2025-04-04T17:19:10.609999Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-04T17:19:10.610037Z node 4 :TX_DATASHARD DEBUG: Receive RS at 9437184 source 9437186 dest 9437184 producer 9437186 txId 403 2025-04-04T17:19:10.610107Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 9437184 got read set: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletProducer# 9437186 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2025-04-04T17:19:10.610154Z node 4 :TX_DATASHARD TRACE: Filled readset for [1000004:403] from=9437186 to=9437184origin=9437186 2025-04-04T17:19:10.610221Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-04-04T17:19:10.610512Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [4:233:2226], Recipient [4:233:2226]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:19:10.610552Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:19:10.610610Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:19:10.610648Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:19:10.610685Z node 4 :TX_DATASHARD DEBUG: Found ready candidate operation [1000004:403] at 9437184 for LoadAndWaitInRS 2025-04-04T17:19:10.610713Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit LoadAndWaitInRS 2025-04-04T17:19:10.610748Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is Executed 2025-04-04T17:19:10.610776Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit LoadAndWaitInRS 2025-04-04T17:19:10.610805Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit ExecuteDataTx 2025-04-04T17:19:10.610832Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit ExecuteDataTx 2025-04-04T17:19:10.612935Z node 4 :TX_DATASHARD TRACE: Executed operation [1000004:403] at tablet 9437184 with status COMPLETE 2025-04-04T17:19:10.612998Z node 4 :TX_DATASHARD TRACE: Datashard execution counters for [1000004:403] at 9437184: {NSelectRow: 2, NSelectRange: 3, NUpdateRow: 4, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 161, SelectRangeBytes: 1288, UpdateRowBytes: 29, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:19:10.613047Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:19:10.613077Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit ExecuteDataTx 2025-04-04T17:19:10.613108Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit CompleteOperation 2025-04-04T17:19:10.613141Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit CompleteOperation 2025-04-04T17:19:10.613377Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is DelayComplete 2025-04-04T17:19:10.613408Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit CompleteOperation 2025-04-04T17:19:10.613437Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit CompletedOperations 2025-04-04T17:19:10.613465Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit CompletedOperations 2025-04-04T17:19:10.613497Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is Executed 2025-04-04T17:19:10.613519Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit CompletedOperations 2025-04-04T17:19:10.613544Z node 4 :TX_DATASHARD TRACE: Execution plan for [1000004:403] at 9437184 has finished 2025-04-04T17:19:10.613577Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:19:10.613603Z node 4 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:19:10.613632Z node 4 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:19:10.613657Z node 4 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:19:10.634328Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:19:10.634399Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437184 on unit CompleteOperation 2025-04-04T17:19:10.634472Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 403] from 9437184 at tablet 9437184 send result to client [4:98:2133], exec latency: 4 ms, propose latency: 6 ms 2025-04-04T17:19:10.634548Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2025-04-04T17:19:10.634611Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:19:10.635391Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:233:2226], Recipient [4:453:2395]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2025-04-04T17:19:10.635446Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:19:10.635487Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 403 >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::ComplexJoin [GOOD] Test command err: 2025-04-04T17:16:52.903266Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508191789415822:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:52.904459Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001618/r3tmp/tmpT2iQF0/pdisk_1.dat 2025-04-04T17:16:53.307703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:53.307820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:53.310383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:53.324427Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26289, node 1 2025-04-04T17:16:53.352142Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:16:53.352382Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:16:53.414788Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:53.414814Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:53.414839Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:53.414962Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:16:53.463620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:15315 2025-04-04T17:16:53.703183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.722337Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508193053296503:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:53.722752Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Database/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:16:53.832942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:53.833007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:53.845689Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:16:53.847967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:53.872773Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:16:53.932598Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:53.932694Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:53.938149Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:53.986927Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:16:53.988960Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:16:53.989053Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:16:53.989151Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:16:53.989221Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:16:53.989283Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:16:53.989352Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:16:53.989397Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:16:53.989476Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:16:54.135224Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:16:54.135290Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:16:54.143573Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:54.150387Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:16:54.150452Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:16:54.150747Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:16:54.150779Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:16:54.150801Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:16:54.150829Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:16:54.150851Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:16:54.150882Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:16:54.151123Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:16:54.151139Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7489508197348264338:2277] 2025-04-04T17:16:54.152884Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:16:54.161306Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:16:54.161379Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:16:54.161386Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:16:54.161458Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:16:54.171510Z node 2 :TX_PROXY ERROR: Actor# [2:7489508197348264398:2327] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-04-04T17:16:54.172855Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:16:54.172908Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:7489508197348264437:2315], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:16:54.180438Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-04-04T17:16:54.186504Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7489508197348264496:2370] 2025-04-04T17:16:54.189823Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7489508197348264496:2370], schemeshard id = 72075186224037897 2025-04-04T17:16:54.274413Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:16:54.287678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897 2025-04-04T17:16:54.293548Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:16:54.293618Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720658 2025-04-04T17:16:54.462067Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720658. Doublechecking... 2025-04-04T17:16:54.634558Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:16:56.238956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508208969286174:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:56.239060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:56.500036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72075186224037897 2025-04-04T17:16:56.850937Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;self_id=[2:7489508205938199466:2349];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:16:56.851129Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;self_id=[2:7489508205938199466:2349];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:16:56.851388Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;self_id=[2:7489508205938199466:2349];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:16:56.851512Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;self_id=[2:7489508205938199466:2349];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:16:56.851608Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;self_id=[2:7489508205938199466:2349];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:16:56.851746Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;self_id=[2:7489508205938199466:2349];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:16:56.851856Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;self_id=[2:7489508205938199466:2349];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:16:56.851985Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;self_id=[2:7489508205938199466:2349];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:16:56.852115Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;self_id=[2:7489508205938199466:2349];tablet_id=72075186224037911;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:16:56.852241Z node 2 :TX_COLUMNSHARD WARN: tabl ... ccess permissions } 2025-04-04T17:19:05.604748Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:05.614479Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489508761231842562:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:19:05.687192Z node 5 :TX_PROXY ERROR: Actor# [5:7489508761231842616:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Logs"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1},{"InternalOperatorId":1}],"GroupBy":"item.App","Aggregation":"{MAX(item.Message),MIN(item.Message)}","Name":"Aggregate","Phase":"Intermediate"},{"Inputs":[{"InternalOperatorId":2}],"E-Rows":"0","Predicate":"item.Ts \u003E 1 AND item.Ts \u003C= 4 OR item.App == \"ydb\"","Name":"Filter","E-Size":"0","E-Cost":"0"},{"Scan":"Parallel","E-Size":"0","ReadRanges":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Logs","E-Rows":"0","Table":"Logs","ReadColumns":["App","Message","Ts"],"E-Cost":"0"}],"Node Type":"Aggregate-Filter-TableFullScan"}],"Node Type":"HashShuffle","KeyColumns":["App"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Logs","reads":[{"columns":["App","Message","Ts"],"scan_by":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/Logs","E-Rows":"0","Table":"Logs","ReadColumns":["App","Message","Ts"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"0","Predicate":"item.Ts \u003E 1 AND item.Ts \u003C= 4 OR item.App == \"ydb\"","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Filter"}],"Operators":[{"GroupBy":"item.App","Aggregation":"{MAX(item.Message),MIN(item.Message)}","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 24144, MsgBus: 18341 2025-04-04T17:19:07.773608Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489508770960660526:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:07.773723Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001618/r3tmp/tmpBp09AH/pdisk_1.dat 2025-04-04T17:19:07.874068Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24144, node 6 2025-04-04T17:19:07.906114Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:07.906230Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:07.909433Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:07.929012Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:07.929039Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:07.929049Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:07.929160Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18341 TClient is connected to server localhost:18341 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:19:08.334262Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:19:08.348182Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:08.413276Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:08.569168Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:08.628829Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:19:10.972680Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489508783845564179:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:10.972769Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:11.031096Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:11.104822Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:11.145051Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:11.185897Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:11.220179Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:11.291166Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:11.340966Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489508788140531992:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:11.341070Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:11.341139Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489508788140531997:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:11.345112Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:11.355107Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489508788140531999:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:11.430886Z node 6 :TX_PROXY ERROR: Actor# [6:7489508788140532052:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:12.501372Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:12.765992Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:19:12.773658Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489508770960660526:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:12.773740Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:19:12.812483Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> TxUsage::WriteToTopic_Demo_35 >> TxUsage::WriteToTopic_Demo_13 >> TColumnShardTestReadWrite::ReadSomePrograms >> TColumnShardTestReadWrite::CompactionGC >> TColumnShardTestReadWrite::ReadWithProgramLike >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 >> TxUsage::WriteToTopic_Two_WriteSession [GOOD] >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] Test command err: 2025-04-04T17:19:12.475481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:19:12.475963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:19:12.476019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ccc/r3tmp/tmpPbPGAD/pdisk_1.dat 2025-04-04T17:19:12.862877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:19:12.904051Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:12.943083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:12.943198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:12.954686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:13.034998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:19:13.330907Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 100 Inflight: 3 BatchSize: 7 } 2025-04-04T17:19:13.331070Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:736:2619], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 BatchSize: 7 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-04-04T17:19:13.413019Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:736:2619], subTag: 2} TUpsertActor finished in 0.081557s, errors=0 2025-04-04T17:19:13.413119Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:737:2620] with tag# 2 2025-04-04T17:19:16.235464Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:19:16.235569Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:19:16.235719Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ccc/r3tmp/tmpyhVADJ/pdisk_1.dat 2025-04-04T17:19:16.462018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:19:16.485265Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:16.520683Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:16.520790Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:16.531943Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:16.612100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:19:16.862159Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2025-04-04T17:19:16.862312Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:737:2619], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-04-04T17:19:16.929286Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:737:2619], subTag: 2} TUpsertActor finished in 0.066572s, errors=0 2025-04-04T17:19:16.929386Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:738:2620] with tag# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] Test command err: 2025-04-04T17:19:10.661024Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:10.747650Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:10.769956Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:10.770204Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:10.777307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:10.777547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:10.777750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:10.777856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:10.777963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:10.778053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:10.778159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:10.778296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:10.778402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:10.778507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:10.778608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:10.778701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:10.804405Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:10.804617Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:10.804666Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:10.804845Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:10.805006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:10.805087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:10.805124Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:10.805202Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:10.805252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:10.805288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:10.805315Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:10.805490Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:10.805558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:10.805614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:10.805642Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:10.805723Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:10.805786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:10.805848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:10.805879Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:10.805962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:10.805999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:10.806028Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:10.806071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:10.806109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:10.806138Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:10.806513Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-04-04T17:19:10.806622Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=59; 2025-04-04T17:19:10.806715Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=43; 2025-04-04T17:19:10.806797Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-04-04T17:19:10.806969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:10.807046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:10.807081Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:10.807264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:10.807316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:10.807349Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:10.807502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:10.807540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:10.807570Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:10.807739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:10.807781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:10.807805Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:10.807936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:10.807973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:10.808027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... est_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:17.542759Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:19:17.542876Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-04T17:19:17.542940Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-04T17:19:17.543062Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1050:2921];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-04-04T17:19:17.543180Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:17.543270Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:17.543399Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:17.543560Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:19:17.543664Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:17.543765Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:17.543796Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1051:2922] finished for tablet 9437184 2025-04-04T17:19:17.544135Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1050:2921];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.013}],"full":{"a":1743787157530676,"name":"_full_task","f":1743787157530676,"d_finished":0,"c":0,"l":1743787157543838,"d":13162},"events":[{"name":"bootstrap","f":1743787157530825,"d_finished":2753,"c":1,"l":1743787157533578,"d":2753},{"a":1743787157543541,"name":"ack","f":1743787157542428,"d_finished":996,"c":1,"l":1743787157543424,"d":1293},{"a":1743787157543531,"name":"processing","f":1743787157534938,"d_finished":5120,"c":10,"l":1743787157543426,"d":5427},{"name":"ProduceResults","f":1743787157532459,"d_finished":2665,"c":13,"l":1743787157543786,"d":2665},{"a":1743787157543787,"name":"Finish","f":1743787157543787,"d_finished":0,"c":0,"l":1743787157543838,"d":51},{"name":"task_result","f":1743787157534974,"d_finished":3991,"c":9,"l":1743787157542213,"d":3991}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:17.544200Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1050:2921];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:19:17.544515Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1050:2921];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.013}],"full":{"a":1743787157530676,"name":"_full_task","f":1743787157530676,"d_finished":0,"c":0,"l":1743787157544235,"d":13559},"events":[{"name":"bootstrap","f":1743787157530825,"d_finished":2753,"c":1,"l":1743787157533578,"d":2753},{"a":1743787157543541,"name":"ack","f":1743787157542428,"d_finished":996,"c":1,"l":1743787157543424,"d":1690},{"a":1743787157543531,"name":"processing","f":1743787157534938,"d_finished":5120,"c":10,"l":1743787157543426,"d":5824},{"name":"ProduceResults","f":1743787157532459,"d_finished":2665,"c":13,"l":1743787157543786,"d":2665},{"a":1743787157543787,"name":"Finish","f":1743787157543787,"d_finished":0,"c":0,"l":1743787157544235,"d":448},{"name":"task_result","f":1743787157534974,"d_finished":3991,"c":9,"l":1743787157542213,"d":3991}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:17.544573Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:19:17.530190Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-04T17:19:17.544603Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:19:17.544892Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TxUsage::WriteToTopic_Demo_5 >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] Test command err: 2025-04-04T17:19:12.444078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:19:12.444520Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:19:12.444568Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cba/r3tmp/tmpk8uutT/pdisk_1.dat 2025-04-04T17:19:12.812792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:19:12.856054Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:12.894147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:12.894292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:12.905628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:12.985860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:19:13.282345Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2025-04-04T17:19:13.282513Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:736:2619], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-04-04T17:19:13.427624Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:736:2619], subTag: 2} TUpsertActor finished in 0.144751s, errors=0 2025-04-04T17:19:13.427706Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:737:2620] with tag# 2 2025-04-04T17:19:16.546791Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:19:16.546906Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:19:16.547098Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cba/r3tmp/tmpRnKjEc/pdisk_1.dat 2025-04-04T17:19:16.822840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:19:16.849482Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:16.885071Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:16.885200Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:16.896885Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:16.977403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:19:17.230620Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2025-04-04T17:19:17.230730Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:737:2619], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2025-04-04T17:19:17.323296Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:737:2619], subTag: 2} TUpsertActor finished in 0.092263s, errors=0 2025-04-04T17:19:17.323390Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:738:2620] with tag# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] Test command err: 2025-04-04T17:19:12.381580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:19:12.382001Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:19:12.382054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cbe/r3tmp/tmp8O36WF/pdisk_1.dat 2025-04-04T17:19:12.777969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:19:12.825771Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:12.868259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:12.868872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:12.881100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:12.972095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:19:13.288782Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-04-04T17:19:13.288931Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:736:2619], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-04-04T17:19:13.358410Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:736:2619], subTag: 2} TUpsertActor finished in 0.069036s, errors=0 2025-04-04T17:19:13.358499Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:737:2620] with tag# 2 2025-04-04T17:19:16.405226Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:19:16.405342Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:19:16.405539Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cbe/r3tmp/tmpGFHsyi/pdisk_1.dat 2025-04-04T17:19:16.666264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:19:16.690161Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:16.725444Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:16.725536Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:16.737011Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:16.815372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:19:17.056135Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-04-04T17:19:17.056256Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:737:2619], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2025-04-04T17:19:17.126030Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:737:2619], subTag: 2} TUpsertActor finished in 0.069395s, errors=0 2025-04-04T17:19:17.126129Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:738:2620] with tag# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] Test command err: 2025-04-04T17:19:17.292413Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:17.377920Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:17.406891Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:17.407229Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:17.416186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:17.416427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:17.416709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:17.416851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:17.416989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:17.417121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:17.417237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:17.417368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:17.417532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:17.417671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:17.417815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:17.417934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:17.449620Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:17.449879Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:17.449946Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:17.450121Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:17.450315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:17.450404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:17.450468Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:17.450581Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:17.450671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:17.450720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:17.450755Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:17.450949Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:17.451030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:17.451084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:17.451118Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:17.451226Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:17.451320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:17.451377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:17.451410Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:17.451498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:17.451538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:17.451574Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:17.451626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:17.451669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:17.451701Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:17.452120Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=56; 2025-04-04T17:19:17.452230Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=48; 2025-04-04T17:19:17.452329Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2025-04-04T17:19:17.452416Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=40; 2025-04-04T17:19:17.452602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:17.452676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:17.452714Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:17.452917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:17.452977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:17.453016Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:17.453181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:17.453232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:17.453264Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:17.453632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:17.453683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:17.453716Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:17.453853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:17.453901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:17.453956Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... s_manager.cpp:245;method=RegisterTable;path_id=1; 2025-04-04T17:19:18.120943Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:143;event=RegisterTable;path_id=1; 2025-04-04T17:19:18.127509Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:487;event=OnTieringModified;path_id=1; 2025-04-04T17:19:18.127714Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tx_controller.cpp:211;event=finished_tx;tx_id=10; 2025-04-04T17:19:18.152724Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-04-04T17:19:18.152865Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3200;columns=5; 2025-04-04T17:19:18.168345Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];fline=actor.cpp:22;event=flush_writing;size=3200;count=1; 2025-04-04T17:19:18.170355Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 1 at tablet 9437184 2025-04-04T17:19:18.170680Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-04T17:19:18.182905Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-04T17:19:18.183079Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:19:18.196796Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 100 at tablet 9437184, mediator 0 2025-04-04T17:19:18.196863Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] execute at tablet 9437184 2025-04-04T17:19:18.197132Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=100;fline=abstract.h:83;progress_tx_id=100;lock_id=1;broken=0; 2025-04-04T17:19:18.197313Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=100;fline=tx_controller.cpp:211;event=finished_tx;tx_id=100; 2025-04-04T17:19:18.209382Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] complete at tablet 9437184 2025-04-04T17:19:18.209480Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=100;lock_id=1;broken=0; 2025-04-04T17:19:18.209583Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=3384; 2025-04-04T17:19:18.212893Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=manager.cpp:10;event=lock;process_id=CS::INDEXATION::f0cb6bac-117811f0-8e3df132-363970f; 2025-04-04T17:19:18.212970Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=ro_controller.cpp:45;event=CS::INDEXATION;tablet_id=9437184; 2025-04-04T17:19:18.213062Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:752;event=indexation;bytes=3384;blobs_count=1;max_limit=251658240;has_more=0;external_task_id=f0cb6bac-117811f0-8e3df132-363970f; 2025-04-04T17:19:18.213291Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:618;event=start_changes;type=CS::INDEXATION;task_id=f0cb6bac-117811f0-8e3df132-363970f; 2025-04-04T17:19:18.213677Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=3035;external_task_id=f0cb6bac-117811f0-8e3df132-363970f;type=CS::INDEXATION;priority=0;; 2025-04-04T17:19:18.213926Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=1;task=cpu=0;mem=3035;external_task_id=f0cb6bac-117811f0-8e3df132-363970f;type=CS::INDEXATION;priority=0;; 2025-04-04T17:19:18.213973Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=f0cb6bac-117811f0-8e3df132-363970f;mem=3035;cpu=0; 2025-04-04T17:19:18.214151Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=f0cb6bac-117811f0-8e3df132-363970f;task_id=1;mem=3035;cpu=0; 2025-04-04T17:19:18.214330Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=f0cb6bac-117811f0-8e3df132-363970f;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=f0cb6bac-117811f0-8e3df132-363970f; 2025-04-04T17:19:18.218634Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=f0cb6bac-117811f0-8e3df132-363970f;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-04-04T17:19:18.218773Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-04-04T17:19:18.220142Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-04T17:19:18.220395Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[8] (CS::INDEXATION) apply at tablet 9437184 2025-04-04T17:19:18.221022Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-04-04T17:19:18.221133Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 0 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T17:19:18.221609Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:239;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T17:19:18.221662Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:19:18.221747Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:767;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=3384;indexing_debug={task_ids=f0cb6bac-117811f0-8e3df132-363970f,;}; 2025-04-04T17:19:18.221820Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=0; 2025-04-04T17:19:18.222022Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:19:18.222078Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:19:18.222122Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:19:18.222219Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:19:18.222543Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 100 scanId: 0 version: {100:100} readable: {100:max} at tablet 9437184 2025-04-04T17:19:18.234983Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f0cb6bac-117811f0-8e3df132-363970f;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-04-04T17:19:18.235085Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f0cb6bac-117811f0-8e3df132-363970f;fline=with_appended.cpp:65;portions=1,;task_id=f0cb6bac-117811f0-8e3df132-363970f; 2025-04-04T17:19:18.235283Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f0cb6bac-117811f0-8e3df132-363970f;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::f0cb6bac-117811f0-8e3df132-363970f; 2025-04-04T17:19:18.235362Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f0cb6bac-117811f0-8e3df132-363970f;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:19:18.235431Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f0cb6bac-117811f0-8e3df132-363970f;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:19:18.235518Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f0cb6bac-117811f0-8e3df132-363970f;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=0; 2025-04-04T17:19:18.235581Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f0cb6bac-117811f0-8e3df132-363970f;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:19:18.235635Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f0cb6bac-117811f0-8e3df132-363970f;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:19:18.235682Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f0cb6bac-117811f0-8e3df132-363970f;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:19:18.235770Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f0cb6bac-117811f0-8e3df132-363970f;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.998500s; 2025-04-04T17:19:18.235841Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f0cb6bac-117811f0-8e3df132-363970f;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:19:18.235943Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:1:3:0:3384:0] 2025-04-04T17:19:18.235999Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-04-04T17:19:18.236131Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:19:18.236233Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=1;external_task_id=f0cb6bac-117811f0-8e3df132-363970f;mem=3035;cpu=0; 2025-04-04T17:19:18.236422Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 100 scanId: 0 at tablet 9437184 2025-04-04T17:19:18.236567Z node 1 :TX_COLUMNSHARD_SCAN WARN: tx_id=100;scan_id=0;gen=0;table=;snapshot={100:100};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot parse program;details=Can't parse SsaProgram: Can't parse TOlapProgram protobuf; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] Test command err: 2025-04-04T17:19:17.560188Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:17.642787Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:17.660122Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:17.660324Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:17.666794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:17.667030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:17.667288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:17.667432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:17.667556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:17.667664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:17.667767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:17.667870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:17.668005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:17.668136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:17.668251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:17.668352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:17.693760Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:17.693960Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:17.694012Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:17.694144Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:17.694305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:17.694367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:17.694404Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:17.694514Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:17.694577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:17.694633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:17.694659Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:17.694837Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:17.694883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:17.694909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:17.694927Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:17.695032Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:17.695102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:17.695146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:17.695175Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:17.695232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:17.695274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:17.695307Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:17.695351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:17.695386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:17.695411Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:17.695798Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-04-04T17:19:17.695949Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=97; 2025-04-04T17:19:17.696035Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=23; 2025-04-04T17:19:17.696104Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=29; 2025-04-04T17:19:17.696248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:17.696287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:17.696311Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:17.696422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:17.696450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:17.696476Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:17.696588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:17.696624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:17.696648Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:17.696782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:17.696810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:17.696828Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:17.696915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:17.696951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:17.696990Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... ine=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:19:18.468672Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T17:19:18.468703Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-04-04T17:19:18.468742Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=6; 2025-04-04T17:19:18.468788Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=10;merger=0;interval_id=6; 2025-04-04T17:19:18.468819Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-04-04T17:19:18.468929Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-04T17:19:18.468963Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=10;finished=1; 2025-04-04T17:19:18.468993Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:19:18.469172Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:19:18.469284Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:10;schema=message: string;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-04T17:19:18.469319Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:19:18.469413Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;);columns=1;rows=10; 2025-04-04T17:19:18.469463Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=61;num_rows=10;batch_columns=message; 2025-04-04T17:19:18.469559Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:301:2319];bytes=61;rows=10;faults=0;finished=0;fault=0;schema=message: string; 2025-04-04T17:19:18.469657Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-04T17:19:18.469738Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-04T17:19:18.469813Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-04T17:19:18.469964Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:19:18.470058Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-04T17:19:18.470133Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-04T17:19:18.470182Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:302:2320] finished for tablet 9437184 2025-04-04T17:19:18.470571Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:301:2319];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":1743787158459719,"name":"_full_task","f":1743787158459719,"d_finished":0,"c":0,"l":1743787158470231,"d":10512},"events":[{"name":"bootstrap","f":1743787158459870,"d_finished":2051,"c":1,"l":1743787158461921,"d":2051},{"a":1743787158469946,"name":"ack","f":1743787158469156,"d_finished":675,"c":1,"l":1743787158469831,"d":960},{"a":1743787158469916,"name":"processing","f":1743787158463050,"d_finished":4783,"c":9,"l":1743787158469833,"d":5098},{"name":"ProduceResults","f":1743787158460937,"d_finished":2011,"c":12,"l":1743787158470168,"d":2011},{"a":1743787158470170,"name":"Finish","f":1743787158470170,"d_finished":0,"c":0,"l":1743787158470231,"d":61},{"name":"task_result","f":1743787158463064,"d_finished":3997,"c":8,"l":1743787158469035,"d":3997}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-04T17:19:18.470700Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:301:2319];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:19:18.471040Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:301:2319];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":1743787158459719,"name":"_full_task","f":1743787158459719,"d_finished":0,"c":0,"l":1743787158470732,"d":11013},"events":[{"name":"bootstrap","f":1743787158459870,"d_finished":2051,"c":1,"l":1743787158461921,"d":2051},{"a":1743787158469946,"name":"ack","f":1743787158469156,"d_finished":675,"c":1,"l":1743787158469831,"d":1461},{"a":1743787158469916,"name":"processing","f":1743787158463050,"d_finished":4783,"c":9,"l":1743787158469833,"d":5599},{"name":"ProduceResults","f":1743787158460937,"d_finished":2011,"c":12,"l":1743787158470168,"d":2011},{"a":1743787158470170,"name":"Finish","f":1743787158470170,"d_finished":0,"c":0,"l":1743787158470732,"d":562},{"name":"task_result","f":1743787158463064,"d_finished":3997,"c":8,"l":1743787158469035,"d":3997}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-04-04T17:19:18.471132Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:19:18.459322Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-04T17:19:18.471166Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:19:18.471379Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:302:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_Oltp_Replace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 22863, MsgBus: 26844 2025-04-04T17:14:26.264354Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507561721309870:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:14:26.264393Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001808/r3tmp/tmpKtPkGD/pdisk_1.dat 2025-04-04T17:14:26.615724Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22863, node 1 2025-04-04T17:14:26.687568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:14:26.694656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:14:26.705392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:14:26.744388Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:14:26.744413Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:14:26.744424Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:14:26.744531Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26844 TClient is connected to server localhost:26844 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:14:27.316326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:14:27.351166Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 CREATE TABLE `/Root/ColumnShard1` (Col1 Int64 NOT NULL, Col2 Int32 NOT NULL, PRIMARY KEY (Col1)) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1000); 2025-04-04T17:14:29.014762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507574606212412:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:29.014822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:14:29.578644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:14:29.888531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489507574606212586:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:14:29.888874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489507574606212586:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:14:29.889124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489507574606212586:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:14:29.889243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489507574606212586:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:14:29.889349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489507574606212586:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:14:29.889450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489507574606212586:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:14:29.889639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489507574606212586:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:14:29.889779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489507574606212586:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:14:29.889880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489507574606212586:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:14:29.889971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489507574606212586:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:14:29.890054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489507574606212586:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:14:29.890142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489507574606212586:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:14:29.893511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7489507574606212593:2342];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:14:29.893567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7489507574606212593:2342];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:14:29.893744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7489507574606212593:2342];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:14:29.893859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7489507574606212593:2342];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:14:29.893973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7489507574606212593:2342];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:14:29.894108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7489507574606212593:2342];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:14:29.894207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7489507574606212593:2342];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:14:29.894379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7489507574606212593:2342];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:14:29.894491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7489507574606212593:2342];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:14:29.894589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7489507574606212593:2342];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:14:29.894692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7489507574606212593:2342];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:14:29.894806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7489507574606212593:2342];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:14:29.926330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[1:7489507574606212607:2349];tablet_id=72075186224037904;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:14:29.926385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[1:7489507574606212607:2349];tablet_id=72075186224037904;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:14:29.926592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[1:7489507574606212607:2349];tablet_id=72075186224037904;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:14:29.926695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[1:7489507574606212607:2349];tablet_id=72075186224037904;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:14:29.926789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[1:7489507574606212607:2349];tablet_id=72075186224037904;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:14:29.926881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[1:7489507574606212607:2349];tab ... tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:03.093254Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:03.093313Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:03.093491Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:03.093528Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:03.134227Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:19:03.134962Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:19:03.141828Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:19:03.142584Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:19:03.149600Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:19:03.151340Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:19:03.158072Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:19:03.159547Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:19:03.166006Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:19:03.167582Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-04-04T17:19:03.190025Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508753584995124:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:03.190117Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:03.190364Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508753584995129:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:03.196110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:19:03.211004Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508753584995131:2405], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:19:03.294869Z node 2 :TX_PROXY ERROR: Actor# [2:7489508753584995182:2583] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:03.374869Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715662;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-04-04T17:19:03.415250Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-04-04T17:19:03.680789Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508732110157516:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:03.680874Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7267, MsgBus: 21525 2025-04-04T17:19:05.075954Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508761724281467:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:05.076348Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001808/r3tmp/tmpyUI3Lj/pdisk_1.dat 2025-04-04T17:19:05.227064Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:05.251489Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:05.251604Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:05.252589Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7267, node 3 2025-04-04T17:19:05.298759Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:05.298782Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:05.298792Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:05.298988Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21525 TClient is connected to server localhost:21525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:19:05.865130Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:19:09.511013Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508778904151302:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:09.511129Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:09.544801Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:19:09.788153Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:19:10.076096Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489508761724281467:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:10.076287Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:19:10.161213Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508783199120033:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:10.161386Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:10.161632Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489508783199120038:2446], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:10.168544Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-04T17:19:10.186135Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489508783199120042:2447], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-04T17:19:10.271997Z node 3 :TX_PROXY ERROR: Actor# [3:7489508783199120098:3265] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] Test command err: 2025-04-04T17:19:12.325415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:19:12.325946Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:19:12.325995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cac/r3tmp/tmplVdaAp/pdisk_1.dat 2025-04-04T17:19:12.777792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:19:12.821617Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:12.868288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:12.868871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:12.881200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:12.972088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:19:13.287190Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 1000 2025-04-04T17:19:13.288777Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:736:2619], subTag: 1} TUpsertActor Bootstrap called: RowCount: 1000 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-04-04T17:19:13.341602Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:736:2619], subTag: 1} TUpsertActor finished in 0.052411s, errors=0 2025-04-04T17:19:13.342166Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2025-04-04T17:19:13.342333Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# [1:745:2628] with id# {Tag: 0, parent: [1:736:2619], subTag: 3} Bootstrap called: RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2025-04-04T17:19:13.343522Z node 1 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [1:736:2619], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-04-04T17:19:13.344761Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:748:2631] 2025-04-04T17:19:13.344876Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:745:2628], subTag: 1} Bootstrap called, sample# 0 2025-04-04T17:19:13.344920Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:745:2628], subTag: 1} Connect to# 72075186224037888 called 2025-04-04T17:19:13.345779Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:745:2628], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-04-04T17:19:13.353471Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:745:2628], subTag: 1} finished in 0.007627s, read# 1000 2025-04-04T17:19:13.353889Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:748:2631] with chunkSize# 0 finished: 0 { DurationMs: 7 OperationsOK: 1000 OperationsError: 0 } 2025-04-04T17:19:13.354019Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:751:2634] 2025-04-04T17:19:13.354083Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:745:2628], subTag: 2} Bootstrap called, sample# 0 2025-04-04T17:19:13.354108Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:745:2628], subTag: 2} Connect to# 72075186224037888 called 2025-04-04T17:19:13.354392Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:745:2628], subTag: 2} Handle TEvClientConnected called, Status# OK 2025-04-04T17:19:13.576086Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:745:2628], subTag: 2} finished in 0.221629s, read# 1000 2025-04-04T17:19:13.576229Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:751:2634] with chunkSize# 1 finished: 0 { DurationMs: 221 OperationsOK: 1000 OperationsError: 0 } 2025-04-04T17:19:13.576326Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:754:2637] 2025-04-04T17:19:13.576360Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:745:2628], subTag: 3} Bootstrap called, sample# 0 2025-04-04T17:19:13.576388Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:745:2628], subTag: 3} Connect to# 72075186224037888 called 2025-04-04T17:19:13.576610Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:745:2628], subTag: 3} Handle TEvClientConnected called, Status# OK 2025-04-04T17:19:13.634681Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:745:2628], subTag: 3} finished in 0.058027s, read# 1000 2025-04-04T17:19:13.634822Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:754:2637] with chunkSize# 10 finished: 0 { DurationMs: 58 OperationsOK: 1000 OperationsError: 0 } 2025-04-04T17:19:13.634959Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:757:2640] 2025-04-04T17:19:13.635019Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:745:2628], subTag: 4} Bootstrap called, sample# 1000 2025-04-04T17:19:13.635047Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:745:2628], subTag: 4} Connect to# 72075186224037888 called 2025-04-04T17:19:13.635325Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:745:2628], subTag: 4} Handle TEvClientConnected called, Status# OK 2025-04-04T17:19:13.637966Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:745:2628], subTag: 4} finished in 0.002110s, sampled# 1000, iter finished# 1, oks# 1000 2025-04-04T17:19:13.638092Z node 1 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [1:736:2619], subTag: 3} received keyCount# 1000 2025-04-04T17:19:13.638228Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [1:736:2619], subTag: 3} started read actor with id# [1:760:2643] 2025-04-04T17:19:13.638301Z node 1 :DS_LOAD_TEST NOTICE: TReadIteratorPoints# {Tag: 0, parent: [1:745:2628], subTag: 5} Bootstrap called, will read keys# 1000 2025-04-04T17:19:13.929883Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [1:736:2619], subTag: 3} received point times# 1000, Inflight left# 0 2025-04-04T17:19:13.930070Z node 1 :DS_LOAD_TEST INFO: headread with inflight# 1 finished: 0 { DurationMs: 291 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 22\n" } 2025-04-04T17:19:13.930213Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# {Tag: 0, parent: [1:736:2619], subTag: 3} finished in 0.587737s with report: { DurationMs: 7 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 221 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 58 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 291 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 22\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2025-04-04T17:19:13.930555Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:745:2628] with tag# 3 2025-04-04T17:19:16.939253Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:19:16.939369Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:19:16.939552Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cac/r3tmp/tmpJ37auX/pdisk_1.dat 2025-04-04T17:19:17.190844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:19:17.214650Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:17.250120Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:17.250257Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:17.261490Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:17.341830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:19:17.598818Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 10 2025-04-04T17:19:17.599044Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:737:2619], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-04-04T17:19:17.621693Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:737:2619], subTag: 1} TUpsertActor finished in 0.022444s, errors=0 2025-04-04T17:19:17.622308Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2025-04-04T17:19:17.622456Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# [2:746:2628] with id# {Tag: 0, parent: [2:737:2619], subTag: 3} Bootstrap called: RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2025-04-04T17:19:17.623689Z node 2 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [2:737:2619], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-04-04T17:19:17.623829Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:749:2631] 2025-04-04T17:19:17.623929Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:746:2628], subTag: 1} Bootstrap called, sample# 0 2025-04-04T17:19:17.623970Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:746:2628], subTag: 1} Connect to# 72075186224037888 called 2025-04-04T17:19:17.624298Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:746:2628], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-04-04T17:19:17.625099Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:746:2628], subTag: 1} finished in 0.000741s, read# 10 2025-04-04T17:19:17.625263Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:749:2631] with chunkSize# 0 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2025-04-04T17:19:17.625374Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:752:2634] 2025-04-04T17:19:17.625440Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:746:2628], subTag: 2} Bootstrap called, sample# 0 2025-04-04T17:19:17.625487Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:746:2628], subTag: 2} Connect to# 72075186224037888 called 2025-04-04T17:19:17.625740Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:746:2628], subTag: 2} Handle TEvClientConnected called, Status# OK 2025-04-04T17:19:17.627668Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:746:2628], subTag: 2} finished in 0.001893s, read# 10 2025-04-04T17:19:17.627751Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:752:2634] with chunkSize# 1 finished: 0 { DurationMs: 1 OperationsOK: 10 OperationsError: 0 } 2025-04-04T17:19:17.627810Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:755:2637] 2025-04-04T17:19:17.627837Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:746:2628], subTag: 3} Bootstrap called, sample# 0 2025-04-04T17:19:17.627855Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:746:2628], subTag: 3} Connect to# 72075186224037888 called 2025-04-04T17:19:17.628064Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:746:2628], subTag: 3} Handle TEvClientConnected called, Status# OK 2025-04-04T17:19:17.628480Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:746:2628], subTag: 3} finished in 0.000382s, read# 10 2025-04-04T17:19:17.628537Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:755:2637] with chunkSize# 10 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2025-04-04T17:19:17.628692Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:758:2640] 2025-04-04T17:19:17.628730Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:746:2628], subTag: 4} Bootstrap called, sample# 10 2025-04-04T17:19:17.628760Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:746:2628], subTag: 4} Connect to# 72075186224037888 called 2025-04-04T17:19:17.628913Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:746:2628], subTag: 4} Handle TEvClientConnected called, Status# OK 2025-04-04T17:19:17.629199Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:746:2628], subTag: 4} finished in 0.000242s, sampled# 10, iter finished# 1, oks# 10 2025-04-04T17:19:17.629258Z node 2 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [2:737:2619], subTag: 3} received keyCount# 10 2025-04-04T17:19:17.629356Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [2:737:2619], subTag: 3} started read actor with id# [2:761:2643] 2025-04-04T17:19:17.629395Z node 2 :DS_LOAD_TEST NOTICE: TReadIteratorPoints# {Tag: 0, parent: [2:746:2628], subTag: 5} Bootstrap called, will read keys# 10 2025-04-04T17:19:17.989047Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [2:737:2619], subTag: 3} received point times# 1000, Inflight left# 0 2025-04-04T17:19:17.989257Z node 2 :DS_LOAD_TEST INFO: headread with inflight# 1 finished: 0 { DurationMs: 359 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 32\n" } 2025-04-04T17:19:17.989443Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# {Tag: 0, parent: [2:737:2619], subTag: 3} finished in 0.366809s with report: { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 1 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 359 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 32\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2025-04-04T17:19:17.989567Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:746:2628] with tag# 3 >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] >> ReadLoad::ShouldReadKqp [GOOD] >> ReadLoad::ShouldReadKqpMoreThanRows >> Secret::Deactivated [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] Test command err: 2025-04-04T17:19:11.962606Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:12.040437Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:12.060590Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:12.060774Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:12.067203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:12.067369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:12.067572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:12.067661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:12.067737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:12.067809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:12.067945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:12.068013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:12.068099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:12.068172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:12.068234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:12.068312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:12.095121Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:12.095405Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:12.095470Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:12.095641Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:12.095813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:12.095887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:12.095947Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:12.096045Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:12.096107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:12.096147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:12.096180Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:12.096349Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:12.096420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:12.096474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:12.096504Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:12.096595Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:12.096667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:12.096728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:12.096757Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:12.096838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:12.096877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:12.096912Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:12.096956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:12.096992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:12.097018Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:12.097395Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=53; 2025-04-04T17:19:12.097500Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=46; 2025-04-04T17:19:12.097578Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-04-04T17:19:12.097661Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-04-04T17:19:12.097843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:12.097897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:12.097928Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:12.098117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:12.098168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:12.098215Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:12.098384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:12.098431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:12.098459Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:12.098657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:12.098702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:12.098730Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:12.098857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:12.098900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:12.098960Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... est_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:18.998267Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:19:18.998396Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-04T17:19:18.998483Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-04T17:19:18.998649Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1050:2921];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-04-04T17:19:18.998811Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:18.998936Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:18.999098Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:18.999343Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:19:18.999493Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:18.999630Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:18.999672Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1051:2922] finished for tablet 9437184 2025-04-04T17:19:19.000157Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1050:2921];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.012}],"full":{"a":1743787158987513,"name":"_full_task","f":1743787158987513,"d_finished":0,"c":0,"l":1743787158999728,"d":12215},"events":[{"name":"bootstrap","f":1743787158987651,"d_finished":2847,"c":1,"l":1743787158990498,"d":2847},{"a":1743787158999318,"name":"ack","f":1743787158997988,"d_finished":1143,"c":1,"l":1743787158999131,"d":1553},{"a":1743787158999305,"name":"processing","f":1743787158991630,"d_finished":4430,"c":10,"l":1743787158999133,"d":4853},{"name":"ProduceResults","f":1743787158989197,"d_finished":2775,"c":13,"l":1743787158999656,"d":2775},{"a":1743787158999659,"name":"Finish","f":1743787158999659,"d_finished":0,"c":0,"l":1743787158999728,"d":69},{"name":"task_result","f":1743787158991647,"d_finished":3173,"c":9,"l":1743787158997877,"d":3173}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:19.000251Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1050:2921];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:19:19.000689Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1050:2921];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.012}],"full":{"a":1743787158987513,"name":"_full_task","f":1743787158987513,"d_finished":0,"c":0,"l":1743787159000296,"d":12783},"events":[{"name":"bootstrap","f":1743787158987651,"d_finished":2847,"c":1,"l":1743787158990498,"d":2847},{"a":1743787158999318,"name":"ack","f":1743787158997988,"d_finished":1143,"c":1,"l":1743787158999131,"d":2121},{"a":1743787158999305,"name":"processing","f":1743787158991630,"d_finished":4430,"c":10,"l":1743787158999133,"d":5421},{"name":"ProduceResults","f":1743787158989197,"d_finished":2775,"c":13,"l":1743787158999656,"d":2775},{"a":1743787158999659,"name":"Finish","f":1743787158999659,"d_finished":0,"c":0,"l":1743787159000296,"d":637},{"name":"task_result","f":1743787158991647,"d_finished":3173,"c":9,"l":1743787158997877,"d":3173}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:19.000771Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:19:18.987065Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-04T17:19:19.000813Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:19:19.001154Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService >> TObjectStorageListingTest::MaxKeysAndSharding >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] >> KqpQueryPerf::Update-QueryService+UseSink >> KqpQueryPerf::MultiRead+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Deactivated [GOOD] Test command err: 2025-04-04T17:19:07.108195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00142f/r3tmp/tmpRO7CCP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25095, node 1 TClient is connected to server localhost:18365 2025-04-04T17:19:07.621051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:19:07.651580Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:07.654387Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:07.654433Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:07.654463Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:07.654634Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:19:07.689305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:07.689444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:07.701030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-04T17:19:19.562647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:684:2575], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:19.562815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 >> TFlatTest::Ls >> Secret::DeactivatedQueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] Test command err: 2025-04-04T17:19:16.051432Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:16.142214Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:16.167051Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:16.167361Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:16.175414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:16.175659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:16.175894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:16.175994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:16.176133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:16.176232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:16.176338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:16.176449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:16.176566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:16.176686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:16.176783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:16.176889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:16.206135Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:16.206397Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:16.206454Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:16.206651Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:16.206839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:16.206914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:16.206960Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:16.207052Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:16.207109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:16.207150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:16.207176Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:16.207376Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:16.207438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:16.207477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:16.207506Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:16.207606Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:16.207677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:16.207735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:16.207776Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:16.207845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:16.207896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:16.207931Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:16.207980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:16.208013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:16.208061Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:16.208461Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=51; 2025-04-04T17:19:16.208571Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=54; 2025-04-04T17:19:16.208644Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-04-04T17:19:16.208733Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-04-04T17:19:16.208910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:16.208974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:16.209014Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:16.209228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:16.209301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:16.209337Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:16.209495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:16.209536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:16.209566Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:16.209745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:16.209784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:16.209816Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:16.209929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:16.209970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:16.210016Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... id: binary;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:20.461646Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:19:20.461812Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-04T17:19:20.461954Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=2759;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-04T17:19:20.462145Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:430:2445];bytes=2759;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2025-04-04T17:19:20.462375Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:20.462540Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:20.462705Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:20.462958Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:19:20.463128Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:20.463312Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:20.463366Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:431:2446] finished for tablet 9437184 2025-04-04T17:19:20.463921Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:430:2445];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.015},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.017}],"full":{"a":1743787160445656,"name":"_full_task","f":1743787160445656,"d_finished":0,"c":0,"l":1743787160463432,"d":17776},"events":[{"name":"bootstrap","f":1743787160445874,"d_finished":3419,"c":1,"l":1743787160449293,"d":3419},{"a":1743787160462929,"name":"ack","f":1743787160461318,"d_finished":1424,"c":1,"l":1743787160462742,"d":1927},{"a":1743787160462913,"name":"processing","f":1743787160450785,"d_finished":7012,"c":10,"l":1743787160462745,"d":7531},{"name":"ProduceResults","f":1743787160447791,"d_finished":3752,"c":13,"l":1743787160463350,"d":3752},{"a":1743787160463353,"name":"Finish","f":1743787160463353,"d_finished":0,"c":0,"l":1743787160463432,"d":79},{"name":"task_result","f":1743787160450804,"d_finished":5432,"c":9,"l":1743787160461118,"d":5432}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:20.464015Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:430:2445];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:19:20.464571Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:430:2445];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.015},{"events":["l_ProduceResults","f_Finish"],"t":0.017},{"events":["l_ack","l_processing","l_Finish"],"t":0.018}],"full":{"a":1743787160445656,"name":"_full_task","f":1743787160445656,"d_finished":0,"c":0,"l":1743787160464063,"d":18407},"events":[{"name":"bootstrap","f":1743787160445874,"d_finished":3419,"c":1,"l":1743787160449293,"d":3419},{"a":1743787160462929,"name":"ack","f":1743787160461318,"d_finished":1424,"c":1,"l":1743787160462742,"d":2558},{"a":1743787160462913,"name":"processing","f":1743787160450785,"d_finished":7012,"c":10,"l":1743787160462745,"d":8162},{"name":"ProduceResults","f":1743787160447791,"d_finished":3752,"c":13,"l":1743787160463350,"d":3752},{"a":1743787160463353,"name":"Finish","f":1743787160463353,"d_finished":0,"c":0,"l":1743787160464063,"d":710},{"name":"task_result","f":1743787160450804,"d_finished":5432,"c":9,"l":1743787160461118,"d":5432}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:20.464674Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:19:20.445037Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=13268;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=13268;selected_rows=0; 2025-04-04T17:19:20.464732Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:19:20.465117Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:431:2446];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] >> KqpIndexes::VectorIndexOrderByCosineDistanceNullableLevel2 [GOOD] >> TLocksTest::Range_CorrectNullDot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] Test command err: 2025-04-04T17:19:14.281033Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:14.365859Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:14.384771Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:14.384972Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:14.392701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:14.392864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:14.393046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:14.393114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:14.393199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:14.393257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:14.393319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:14.393420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:14.393514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:14.393598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:14.393654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:14.393737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:14.416640Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:14.416794Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:14.416848Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:14.416964Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:14.417191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:14.417246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:14.417273Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:14.417330Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:14.417370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:14.417404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:14.417429Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:14.417582Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:14.417644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:14.417678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:14.417712Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:14.417764Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:14.417809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:14.417854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:14.417888Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:14.417945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:14.417971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:14.417989Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:14.418023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:14.418044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:14.418058Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:14.418315Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=30; 2025-04-04T17:19:14.418388Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-04-04T17:19:14.418476Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-04-04T17:19:14.418558Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-04-04T17:19:14.418686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:14.418725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:14.418756Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:14.418944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:14.419001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:14.419040Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:14.419144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:14.419168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:14.419188Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:14.419310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:14.419332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:14.419349Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:14.419426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:14.419449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:14.419497Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:21.461455Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:19:21.461600Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-04-04T17:19:21.461704Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-04-04T17:19:21.461859Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1050:2921];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-04-04T17:19:21.462024Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:21.462147Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:21.462300Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:21.462549Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:19:21.462711Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:21.462846Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:21.462890Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1051:2922] finished for tablet 9437184 2025-04-04T17:19:21.463503Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1050:2921];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.013},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.015}],"full":{"a":1743787161447345,"name":"_full_task","f":1743787161447345,"d_finished":0,"c":0,"l":1743787161462955,"d":15610},"events":[{"name":"bootstrap","f":1743787161447557,"d_finished":3333,"c":1,"l":1743787161450890,"d":3333},{"a":1743787161462521,"name":"ack","f":1743787161461103,"d_finished":1238,"c":1,"l":1743787161462341,"d":1672},{"a":1743787161462506,"name":"processing","f":1743787161452250,"d_finished":5973,"c":10,"l":1743787161462344,"d":6422},{"name":"ProduceResults","f":1743787161449437,"d_finished":3190,"c":13,"l":1743787161462874,"d":3190},{"a":1743787161462876,"name":"Finish","f":1743787161462876,"d_finished":0,"c":0,"l":1743787161462955,"d":79},{"name":"task_result","f":1743787161452272,"d_finished":4592,"c":9,"l":1743787161460921,"d":4592}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:21.463598Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1050:2921];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:19:21.464065Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1050:2921];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.013},{"events":["l_ProduceResults","f_Finish"],"t":0.015},{"events":["l_ack","l_processing","l_Finish"],"t":0.016}],"full":{"a":1743787161447345,"name":"_full_task","f":1743787161447345,"d_finished":0,"c":0,"l":1743787161463649,"d":16304},"events":[{"name":"bootstrap","f":1743787161447557,"d_finished":3333,"c":1,"l":1743787161450890,"d":3333},{"a":1743787161462521,"name":"ack","f":1743787161461103,"d_finished":1238,"c":1,"l":1743787161462341,"d":2366},{"a":1743787161462506,"name":"processing","f":1743787161452250,"d_finished":5973,"c":10,"l":1743787161462344,"d":7116},{"name":"ProduceResults","f":1743787161449437,"d_finished":3190,"c":13,"l":1743787161462874,"d":3190},{"a":1743787161462876,"name":"Finish","f":1743787161462876,"d_finished":0,"c":0,"l":1743787161463649,"d":773},{"name":"task_result","f":1743787161452272,"d_finished":4592,"c":9,"l":1743787161460921,"d":4592}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-04-04T17:19:21.464165Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:19:21.446743Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-04-04T17:19:21.464223Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:19:21.464679Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1051:2922];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::DeactivatedQueryService [GOOD] Test command err: 2025-04-04T17:19:08.540193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:322:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00139c/r3tmp/tmpdYdo9v/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22998, node 1 TClient is connected to server localhost:2456 2025-04-04T17:19:09.073145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:19:09.100115Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:09.106345Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:09.106401Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:09.106427Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:09.106586Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:19:09.141014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:09.141158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:09.152355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-04T17:19:20.815923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:687:2579], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:20.816056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:697:2584], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:20.816139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:20.824967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T17:19:20.843540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:701:2587], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T17:19:20.905861Z node 1 :TX_PROXY ERROR: Actor# [1:752:2619] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:21.095881Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:761:2627], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled 2025-04-04T17:19:21.097465Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDEyYjc4ZDAtMzQ0OTY2NWUtY2Q2ZGZjZjctOGYzNDQ2ZmM=, ActorId: [1:685:2577], ActorState: ExecuteState, TraceId: 01jr0x0x715pb8sn27s6p0svwa, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 >> KqpQueryPerf::IndexReplace-QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::VectorIndexOrderByCosineDistanceNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 29217, MsgBus: 26741 2025-04-04T17:18:13.709238Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508537704682845:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:13.709565Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016c3/r3tmp/tmpWZtkyx/pdisk_1.dat 2025-04-04T17:18:14.048875Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:14.051651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:14.051727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:14.054980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29217, node 1 2025-04-04T17:18:14.146963Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:14.146997Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:14.147009Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:14.147141Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26741 TClient is connected to server localhost:26741 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:14.707012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:14.728183Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:14.767320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:18:14.928614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:15.132699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:15.199672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:16.798752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508550589586411:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:16.798869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.052538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.084732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.107830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.140874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.167356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.196291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:17.244063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508554884554214:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.244131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.244327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508554884554219:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:17.247478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:17.259555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508554884554221:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:17.357035Z node 1 :TX_PROXY ERROR: Actor# [1:7489508554884554276:3435] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:18.220138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.410692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T17:18:18.539315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T17:18:18.709554Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508537704682845:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:18.709603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:29.043520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:18:29.043565Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 16893, MsgBus: 5004 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016c3/r3tmp/tmpmwzLUk/pdisk_1.dat 2025-04-04T17:18:42.715012Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:18:42.716250Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16893, node 2 2025-04-04T17:18:42.747551Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:42.747635Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:42.749291Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:42.792512Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:42.792539Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:42.792550Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:42.792665Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5004 TClient is connected to server localhost:5004 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:43.285905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:43.298654Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:18:43.319826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:43.441351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:43.616218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:43.698064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:46.102391Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508679787468962:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:46.102498Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:46.146327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:46.216347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:46.298410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:46.381620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:46.413836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:46.450295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:46.508962Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508679787469480:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:46.509039Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:46.509221Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508679787469485:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:46.512393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:46.521691Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508679787469487:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:46.575996Z node 2 :TX_PROXY ERROR: Actor# [2:7489508679787469540:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:47.466391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:47.723914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-04T17:18:47.764821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-04T17:18:47.811817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 2025-04-04T17:18:47.878099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710763:0, at schemeshard: 72057594046644480 2025-04-04T17:18:47.909302Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-04-04T17:18:47.909345Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-04-04T17:18:47.910951Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-04-04T17:18:57.709382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:18:57.709412Z node 2 :IMPORT WARN: Table profiles were not loaded >> KqpQueryPerf::Replace+QueryService-UseSink >> LocalPartition::WithoutPartitionUnknownEndpoint [GOOD] >> LocalPartition::WithoutPartitionWithSplit |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test >> TFlatTest::Ls [GOOD] >> TFlatTest::LsPathId >> KqpYql::EvaluateExprPgNull >> TxUsage::Sinks_Oltp_WriteToTopic_2 [GOOD] >> KqpYql::TableConcat >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable >> TxUsage::Sinks_Oltp_WriteToTopic_3 >> TxUsage::WriteToTopic_Demo_1 [GOOD] >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService >> TxUsage::WriteToTopic_Demo_35 [GOOD] >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink [GOOD] >> TxUsage::WriteToTopic_Demo_13 [GOOD] >> KqpQueryPerf::Update-QueryService+UseSink [GOOD] >> TxUsage::WriteToTopic_Demo_36 >> TxUsage::WriteToTopic_Demo_10 >> KqpQueryPerf::MultiRead+QueryService [GOOD] >> TLocksTest::GoodDupLock >> TxUsage::WriteToTopic_Demo_14 >> TxUsage::WriteToTopic_Demo_24 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11797, MsgBus: 8541 2025-04-04T17:19:20.452746Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508824633752081:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:20.454033Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001be3/r3tmp/tmp2yIWmC/pdisk_1.dat 2025-04-04T17:19:20.747986Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11797, node 1 2025-04-04T17:19:20.807414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:20.807708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:20.812456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:20.850403Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:20.850429Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:20.850451Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:20.850555Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8541 TClient is connected to server localhost:8541 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:21.356835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:21.384866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:21.530564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:21.680538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:21.769811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:23.359628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508837518655731:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:23.359772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:23.598573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:23.627475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:23.656636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:23.688660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:23.721514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:23.761581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:23.850158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508837518656247:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:23.850229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:23.850393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508837518656252:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:23.854541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:23.863712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508837518656254:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:23.958019Z node 1 :TX_PROXY ERROR: Actor# [1:7489508837518656309:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:25.453175Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508824633752081:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:25.453260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19930, MsgBus: 27240 2025-04-04T17:19:21.178951Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508831461158391:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:21.178982Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bde/r3tmp/tmpaJ1bNe/pdisk_1.dat 2025-04-04T17:19:21.502939Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19930, node 1 2025-04-04T17:19:21.582525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:21.583659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:21.616809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:21.634802Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:21.634832Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:21.634841Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:21.634982Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27240 TClient is connected to server localhost:27240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:22.130754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:22.148377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:22.285306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:22.422619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:19:22.506150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:24.107299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508844346062055:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:24.107419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:24.406127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:24.434691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:24.460048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:24.488101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:24.519499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:24.549401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:24.592980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508844346062564:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:24.593051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508844346062569:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:24.593054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:24.596038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:24.605295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508844346062571:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:24.669901Z node 1 :TX_PROXY ERROR: Actor# [1:7489508844346062625:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> Describe::Location [GOOD] >> LocalPartition::Basic >> TFlatTest::LsPathId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 18552, MsgBus: 1145 2025-04-04T17:19:21.262792Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508828285325951:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:21.262853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bdd/r3tmp/tmpikgPwn/pdisk_1.dat 2025-04-04T17:19:21.630640Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:21.633850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:21.633935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:21.637593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18552, node 1 2025-04-04T17:19:21.710791Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:21.710823Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:21.710835Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:21.710948Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1145 TClient is connected to server localhost:1145 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:22.222499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:22.262552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:22.410138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:22.571127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:22.643832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:19:24.176728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508841170229637:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:24.176871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:24.427011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:24.461641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:24.489895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:24.523145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:24.549528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:24.580036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:24.680218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508841170230154:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:24.680307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:24.680622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508841170230159:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:24.683903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:24.694107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508841170230161:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:24.791183Z node 1 :TX_PROXY ERROR: Actor# [1:7489508841170230217:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::KvRead+QueryService >> KqpQueryPerf::Replace+QueryService-UseSink [GOOD] >> KqpMultishardIndex::WriteIntoRenamingAsyncIndex [GOOD] >> KqpQueryPerf::Replace-QueryService-UseSink >> TxUsage::WriteToTopic_Demo_25 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LsPathId [GOOD] Test command err: 2025-04-04T17:19:21.411311Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508828587100624:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:21.411728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e7a/r3tmp/tmpZZdCPm/pdisk_1.dat 2025-04-04T17:19:21.765838Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:21.770207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:21.770475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:21.773646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11812 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:22.030505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:22.044494Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 7 ErrorReason: "Invalid path" TClient::Ls request: // TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 7 ErrorReason: "Invalid path" TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743787162089 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePo... (TRUNCATED) TClient::Ls request: /dc-11 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Root not found" TClient::Ls request: /dc-2 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Root not found" waiting... TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743787162089 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743787162103 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depr... (TRUNCATED) TClient::Ls request: /dc-1/Berkanavt TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743787162103 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 PathsLimit: 10000 Shard... (TRUNCATED) Error 1: Check failed: path: '/dc-1/Berkanavt', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) TClient::Ls request: /dc-1 2025-04-04T17:19:22.072122Z node 1 :TX_PROXY ERROR: Actor# [1:7489508832882068523:2324] txid# 281474976710659, issues: { message: "Check failed: path: \'/dc-1/Berkanavt\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743787162089 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743787162103 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depr... (TRUNCATED) TClient::Ls request: /dc-1/arcadia TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" waiting... 2025-04-04T17:19:22.086328Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743787162089 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743787162103 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "arcadia" Path... (TRUNCATED) TClient::Ls request: /dc-1/arcadia TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "arcadia" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743787162131 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsI... (TRUNCATED) 2025-04-04T17:19:24.578317Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508843862924607:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:24.579053Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e7a/r3tmp/tmpdNGXSH/pdisk_1.dat 2025-04-04T17:19:24.695505Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:24.728292Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:24.728374Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:24.729400Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27219 TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:24.891656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:24.898369Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13958, MsgBus: 14443 2025-04-04T17:19:23.521035Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508838806506182:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:23.521176Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bd7/r3tmp/tmpFdCzrJ/pdisk_1.dat 2025-04-04T17:19:23.856536Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13958, node 1 2025-04-04T17:19:23.924217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:23.927235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:23.937190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:23.958425Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:23.958448Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:23.958459Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:23.958635Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14443 TClient is connected to server localhost:14443 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:24.403735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:24.417375Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:19:24.428468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:24.564547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:24.711462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:24.773609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:26.542629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508851691409855:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:26.542749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:26.807022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:26.877709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:26.913186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:26.939480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:26.974064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:27.023989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:27.071763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508855986377665:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:27.071844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:27.071919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508855986377670:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:27.076089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:27.089138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508855986377672:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:27.163357Z node 1 :TX_PROXY ERROR: Actor# [1:7489508855986377726:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TFlatTest::LargeDatashardReplyDistributed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::WriteIntoRenamingAsyncIndex [GOOD] Test command err: Trying to start YDB, gRPC: 29964, MsgBus: 6364 2025-04-04T17:18:29.428687Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508606272201186:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:29.428820Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016a3/r3tmp/tmpQsmR1P/pdisk_1.dat 2025-04-04T17:18:29.800287Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:29.804080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:29.804182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:29.807572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29964, node 1 2025-04-04T17:18:29.944383Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:29.944417Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:29.944427Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:29.944534Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6364 TClient is connected to server localhost:6364 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:30.489989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:30.504860Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:30.520562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:30.681274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:30.861282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:30.937659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:32.645328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508619157104864:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:32.645465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:32.900203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:32.934194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:32.968611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:33.041842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:33.074095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:33.118978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:33.168811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508623452072675:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:33.168869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:33.169027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508623452072680:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:33.171700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:33.179882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508623452072682:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:33.254923Z node 1 :TX_PROXY ERROR: Actor# [1:7489508623452072735:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:34.054986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:34.428943Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508606272201186:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:34.428989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:35.366865Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508632042009278:2725], TxId: 281474976710706, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDdhYmNjMTgtZjI4MjRkZGMtNjNlN2I1ZjQtNDgwZjlkMTc=. CustomerSuppliedId : . TraceId : 01jr0wzgsmdj0d6rxt43dmec0e. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InputTransform[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037937 node# 1 state# Ready) } } 2025-04-04T17:18:35.369413Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508632042009278:2725], TxId: 281474976710706, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDdhYmNjMTgtZjI4MjRkZGMtNjNlN2I1ZjQtNDgwZjlkMTc=. CustomerSuppliedId : . TraceId : 01jr0wzgsmdj0d6rxt43dmec0e. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Wrong schemaversion 1 requested, table schemaversion 2 (shard# 72075186224037937 node# 1 state# Ready) } }. 2025-04-04T17:18:35.369794Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489508632042009279:2726], TxId: 281474976710706, task: 3. Ctx: { TraceId : 01jr0wzgsmdj0d6rxt43dmec0e. SessionId : ydb://session/3?node_id=1&id=ZDdhYmNjMTgtZjI4MjRkZGMtNjNlN2I1ZjQtNDgwZjlkMTc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7489508632042009267:2548], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:18:35.372248Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDdhYmNjMTgtZjI4MjRkZGMtNjNlN2I1ZjQtNDgwZjlkMTc=, ActorId: [1:7489508627747041072:2548], ActorState: ExecuteState, TraceId: 01jr0wzgsmdj0d6rxt43dmec0e, Create QueryResponse for error on request, msg: 2025-04-04T17:18:44.386460Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2025-04-04T17:18:44.386501Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037934 not found 2025-04-04T17:18:44.387416Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037933 not found 2025-04-04T17:18:44.387447Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-04-04T17:18:44.798612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:18:44.798658Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:49.286437Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489508692171568988:2548] TxId: 281474976711925. Ctx: { TraceId: 01jr0wzycrcd35q4xawfez1d3k, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDdhYmNjMTgtZjI4MjRkZGMtNjNlN2I1ZjQtNDgwZjlkMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: [WRONG_SHARD_STATE] Rejecting data TxId 281474976711925 because datashard 72075186224037927: is in process of split opId 281474976715659 state SplitSrcSendingSnapshot (wrong shard state); 2025-04-04T17:18:49.294148Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDdhYmNjMTgtZjI4MjRkZGMtNjNlN2I1ZjQtNDgwZjlkMTc=, ActorId: [1:7489508627747041072:2548], ActorState: ExecuteState, TraceId: 01jr0wzycrcd35q4xawfez1d3k, Create QueryResponse for error on request, msg: 2025-04-04T17:18:49.301814Z ... rom node 1, TabletId: 72075186224037942 not found 2025-04-04T17:18:59.503188Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037941 not found 2025-04-04T17:18:59.504864Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037944 not found Trying to start YDB, gRPC: 14592, MsgBus: 14639 2025-04-04T17:19:07.758977Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508771073367642:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:07.759012Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016a3/r3tmp/tmp727ebB/pdisk_1.dat 2025-04-04T17:19:07.858227Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14592, node 2 2025-04-04T17:19:07.896339Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:07.896468Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:07.898354Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:07.928215Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:07.928239Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:07.928247Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:07.928392Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14639 TClient is connected to server localhost:14639 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:08.304930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:08.317942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:08.393768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:08.544729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:08.620730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:10.654532Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508783958271297:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:10.654614Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:10.699570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:10.726135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:10.756075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:10.789659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:10.818517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:10.851527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:10.901256Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508783958271810:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:10.901342Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:10.901406Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508783958271815:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:10.904812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:10.914702Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508783958271817:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:19:10.976850Z node 2 :TX_PROXY ERROR: Actor# [2:7489508783958271870:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:11.899742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:12.472383Z node 2 :TX_DATASHARD CRIT: Cannot apply change record: error# Unknown table with id: 18, at tablet# 72075186224037927 2025-04-04T17:19:12.475259Z node 2 :TX_DATASHARD CRIT: Cannot apply change record: error# Unknown table with id: 18, at tablet# 72075186224037919 2025-04-04T17:19:12.759248Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508771073367642:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:12.759299Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:19:22.281300Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037937:1][72075186224037919][2:7489508792548208076:2630] Apply status: status# 2, reason# 7 2025-04-04T17:19:22.281348Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037937:1][72075186224037927][2:7489508792548208075:2630] Apply status: status# 2, reason# 7 2025-04-04T17:19:22.286994Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found 2025-04-04T17:19:22.287029Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037931 not found 2025-04-04T17:19:22.287045Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-04-04T17:19:22.289523Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-04-04T17:19:22.290527Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037920 not found 2025-04-04T17:19:22.290554Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037921 not found 2025-04-04T17:19:22.304486Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-04-04T17:19:22.304794Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037930 not found 2025-04-04T17:19:22.304840Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-04-04T17:19:22.307158Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037923 not found 2025-04-04T17:19:22.307189Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2025-04-04T17:19:22.310582Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037922 not found 2025-04-04T17:19:22.310623Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037934 not found 2025-04-04T17:19:22.313711Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037937:1][72075186224037927][2:7489508835497889241:2630] Handshake status: status# 2, reason# 7 2025-04-04T17:19:22.858389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:19:22.858422Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:25.882975Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037936:1][72075186224037919][2:7489508848382793471:2626] Handshake status: status# 2, reason# 7 2025-04-04T17:19:27.962155Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037935:1][72075186224037919][2:7489508856972729496:2619] Handshake status: status# 2, reason# 7 >> KqpQueryPerf::IndexReplace-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexReplace-QueryService+UseSink >> KqpYql::EvaluateExprPgNull [GOOD] >> KqpYql::EvaluateExprYsonAndType >> KqpYql::TableConcat [GOOD] >> KqpYql::TableNameConflict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] Test command err: 2025-04-04T17:19:12.762107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:19:12.762615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:19:12.762670Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cdf/r3tmp/tmpRyn298/pdisk_1.dat 2025-04-04T17:19:13.120830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:19:13.163219Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:13.201809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:13.201945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:13.213150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:13.293432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:19:13.580798Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 100 2025-04-04T17:19:13.581954Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:736:2619], subTag: 1} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-04-04T17:19:13.606569Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:736:2619], subTag: 1} TUpsertActor finished in 0.024319s, errors=0 2025-04-04T17:19:13.606924Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadKqpStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadKqpStart { RowCount: 100 Inflights: 10 } 2025-04-04T17:19:13.607076Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:736:2619], subTag: 3} Bootstrap called: RowCount: 100 Inflights: 10 2025-04-04T17:19:13.608290Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:736:2619], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-04-04T17:19:13.608451Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:736:2619], subTag: 3} started fullscan actor# [1:748:2631] 2025-04-04T17:19:13.608567Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:745:2628], subTag: 1} Bootstrap called, sample# 100 2025-04-04T17:19:13.608605Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:745:2628], subTag: 1} Connect to# 72075186224037888 called 2025-04-04T17:19:13.609636Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:745:2628], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-04-04T17:19:13.610769Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:745:2628], subTag: 1} finished in 0.000981s, sampled# 100, iter finished# 1, oks# 100 2025-04-04T17:19:13.610957Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:736:2619], subTag: 3} received keyCount# 100 2025-04-04T17:19:13.611335Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:736:2619], subTag: 3} started# 10 actors each with inflight# 1 2025-04-04T17:19:13.611412Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 2} Bootstrap called 2025-04-04T17:19:13.611456Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 2} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-04T17:19:13.611502Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 3} Bootstrap called 2025-04-04T17:19:13.611528Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 3} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-04T17:19:13.611582Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 4} Bootstrap called 2025-04-04T17:19:13.611609Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 4} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-04T17:19:13.611638Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 5} Bootstrap called 2025-04-04T17:19:13.611659Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 5} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-04T17:19:13.611686Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 6} Bootstrap called 2025-04-04T17:19:13.611708Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 6} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-04T17:19:13.611733Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 7} Bootstrap called 2025-04-04T17:19:13.611755Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 7} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-04T17:19:13.611805Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 8} Bootstrap called 2025-04-04T17:19:13.611832Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 8} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-04T17:19:13.611868Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 9} Bootstrap called 2025-04-04T17:19:13.611895Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 9} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-04T17:19:13.611925Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 10} Bootstrap called 2025-04-04T17:19:13.611948Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 10} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-04T17:19:13.611979Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 11} Bootstrap called 2025-04-04T17:19:13.612001Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 11} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-04-04T17:19:13.615155Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 2} session: ydb://session/3?node_id=1&id=YzdjYzk2MjAtM2FjNTgyYzYtOWY3MGVlY2EtYzIzNDcwY2I= 2025-04-04T17:19:13.615460Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 3} session: ydb://session/3?node_id=1&id=YjMwMzQ2ZWItYTg3ZThkOWEtMmVmNTdiMWQtYjQzNTA1YWU= 2025-04-04T17:19:13.616873Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 4} session: ydb://session/3?node_id=1&id=ZjAzZTQ0NGEtZDZhZTEzMjgtZGFlNWVmZWEtZDA2ODM1MjE= 2025-04-04T17:19:13.618218Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 5} session: ydb://session/3?node_id=1&id=NjJlZmZjMGEtMTljN2JjNjItNzg4ZGRhZDYtYjE2ZDA3NGE= 2025-04-04T17:19:13.619583Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 6} session: ydb://session/3?node_id=1&id=ZDcwNjAwOTctNTcyMGEwNmItZDk1OTM1MTAtMmZlY2RkMzc= 2025-04-04T17:19:13.620959Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 7} session: ydb://session/3?node_id=1&id=MWQxMmMxOGMtYmU4NDdkZGMtYjU4Y2QxYjUtNTA3YmM5MTE= 2025-04-04T17:19:13.622382Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 8} session: ydb://session/3?node_id=1&id=ODEyYTA1ZTMtYzFlMGIyMjAtZTY2N2NjNGEtYmYyYmRhOTI= 2025-04-04T17:19:13.623857Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 9} session: ydb://session/3?node_id=1&id=MTgyMDM5YzktZjVjNDdlODgtMjY1ZDk4MmYtYjQ0ZGI4NDU= 2025-04-04T17:19:13.625289Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 10} session: ydb://session/3?node_id=1&id=MjIyNDAxMTItNDc4M2UzNTItZmMxZmRiOGUtMzBhNGQxMg== 2025-04-04T17:19:13.626782Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:745:2628], subTag: 11} session: ydb://session/3?node_id=1&id=NDQyNDE4ZjItYWU0MDk4MDYtODdhYjJlMDQtMmI3NjUyM2M= 2025-04-04T17:19:13.631208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:808:2685], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:13.631313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:810:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:13.631379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:812:2689], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:13.631502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:772:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:13.631665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:804:2681], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:13.631727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:805:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:13.631769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:806:2683], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:13.631811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:807:2684], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:13.632124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:814:2691], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Erro ... } 2025-04-04T17:19:23.667925Z node 2 :TX_PROXY ERROR: Actor# [2:846:2722] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:19:23.668044Z node 2 :TX_PROXY ERROR: Actor# [2:849:2725] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:19:23.669120Z node 2 :TX_PROXY ERROR: Actor# [2:850:2726] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:19:23.669694Z node 2 :TX_PROXY ERROR: Actor# [2:857:2727] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:19:23.670118Z node 2 :TX_PROXY ERROR: Actor# [2:861:2728] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:19:23.671237Z node 2 :TX_PROXY ERROR: Actor# [2:862:2729] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:19:23.807976Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:829:2705], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-04T17:19:23.808087Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:830:2706], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-04T17:19:23.808141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:831:2707], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-04T17:19:23.808195Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:832:2708], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-04T17:19:23.808269Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:833:2709], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-04T17:19:23.808336Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:834:2710], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-04T17:19:23.808393Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:835:2711], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-04T17:19:23.808448Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:840:2716], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-04T17:19:23.808521Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:845:2721], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-04T17:19:23.808582Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:847:2723], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-04T17:19:23.844436Z node 2 :TX_PROXY ERROR: Actor# [2:985:2821] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:24.230222Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:746:2628], subTag: 9} finished in 0.600191s, errors=0 2025-04-04T17:19:24.230452Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:737:2619], subTag: 3} finished: 9 { Tag: 9 DurationMs: 600 OperationsOK: 100 OperationsError: 0 } 2025-04-04T17:19:24.244568Z node 2 :TX_PROXY ERROR: Actor# [2:1914:3143] txid# 281474976715769, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:24.580048Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:746:2628], subTag: 10} finished in 0.947801s, errors=0 2025-04-04T17:19:24.580368Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:737:2619], subTag: 3} finished: 10 { Tag: 10 DurationMs: 947 OperationsOK: 100 OperationsError: 0 } 2025-04-04T17:19:24.594429Z node 2 :TX_PROXY ERROR: Actor# [2:2821:3449] txid# 281474976715870, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:25.016032Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:746:2628], subTag: 11} finished in 1.382091s, errors=0 2025-04-04T17:19:25.016369Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:737:2619], subTag: 3} finished: 11 { Tag: 11 DurationMs: 1382 OperationsOK: 100 OperationsError: 0 } 2025-04-04T17:19:25.031210Z node 2 :TX_PROXY ERROR: Actor# [2:3728:3755] txid# 281474976715971, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:25.514480Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:746:2628], subTag: 3} finished in 1.897042s, errors=0 2025-04-04T17:19:25.514843Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:737:2619], subTag: 3} finished: 3 { Tag: 3 DurationMs: 1897 OperationsOK: 100 OperationsError: 0 } 2025-04-04T17:19:25.530722Z node 2 :TX_PROXY ERROR: Actor# [2:4635:4061] txid# 281474976716072, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:26.179136Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:746:2628], subTag: 8} finished in 2.554078s, errors=0 2025-04-04T17:19:26.179507Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:737:2619], subTag: 3} finished: 8 { Tag: 8 DurationMs: 2554 OperationsOK: 100 OperationsError: 0 } 2025-04-04T17:19:26.196370Z node 2 :TX_PROXY ERROR: Actor# [2:5542:4367] txid# 281474976716173, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:26.799138Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:746:2628], subTag: 2} finished in 3.181871s, errors=0 2025-04-04T17:19:26.799505Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:737:2619], subTag: 3} finished: 2 { Tag: 2 DurationMs: 3181 OperationsOK: 100 OperationsError: 0 } 2025-04-04T17:19:26.816211Z node 2 :TX_PROXY ERROR: Actor# [2:6449:4673] txid# 281474976716274, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:27.414714Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:746:2628], subTag: 7} finished in 3.791071s, errors=0 2025-04-04T17:19:27.415171Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:737:2619], subTag: 3} finished: 7 { Tag: 7 DurationMs: 3791 OperationsOK: 100 OperationsError: 0 } 2025-04-04T17:19:27.432297Z node 2 :TX_PROXY ERROR: Actor# [2:7356:4979] txid# 281474976716375, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:28.044631Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:746:2628], subTag: 6} finished in 4.422458s, errors=0 2025-04-04T17:19:28.044877Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:737:2619], subTag: 3} finished: 6 { Tag: 6 DurationMs: 4422 OperationsOK: 100 OperationsError: 0 } 2025-04-04T17:19:28.065597Z node 2 :TX_PROXY ERROR: Actor# [2:8263:5285] txid# 281474976716476, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:28.789655Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:746:2628], subTag: 5} finished in 5.169027s, errors=0 2025-04-04T17:19:28.790104Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:737:2619], subTag: 3} finished: 5 { Tag: 5 DurationMs: 5169 OperationsOK: 100 OperationsError: 0 } 2025-04-04T17:19:28.814008Z node 2 :TX_PROXY ERROR: Actor# [2:9170:5591] txid# 281474976716577, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:29.595570Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:746:2628], subTag: 4} finished in 5.976496s, errors=0 2025-04-04T17:19:29.596025Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:737:2619], subTag: 3} finished: 4 { Tag: 4 DurationMs: 5976 OperationsOK: 100 OperationsError: 0 } 2025-04-04T17:19:29.596099Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:737:2619], subTag: 3} finished in 5.986798s, oks# 1000, errors# 0 2025-04-04T17:19:29.596452Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:746:2628] with tag# 3 >> KqpIndexes::VectorIndexOrderByCosineSimilarityNullableLevel2 [GOOD] >> KqpQueryPerf::IndexUpdateOn+QueryService-UseSink |92.5%| [TA] $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TA] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] >> KqpQueryPerf::RangeRead+QueryService >> KqpPragma::ResetPerQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::VectorIndexOrderByCosineSimilarityNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 10131, MsgBus: 1671 2025-04-04T17:18:24.368151Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508583666074691:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:24.368221Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016b3/r3tmp/tmprHMXYP/pdisk_1.dat 2025-04-04T17:18:24.772765Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10131, node 1 2025-04-04T17:18:24.792062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:24.792158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:24.796995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:24.847509Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:24.847546Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:24.847556Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:24.847662Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1671 TClient is connected to server localhost:1671 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:25.437369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:25.449946Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:18:25.464305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:25.607343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:25.758791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:25.831178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:27.428323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508596550978286:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:27.428432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:27.683322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.710904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.735317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.762076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.795710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.865145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:27.909727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508596550978802:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:27.909834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:27.909906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508596550978807:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:27.913084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:27.923599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508596550978809:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:18:28.015793Z node 1 :TX_PROXY ERROR: Actor# [1:7489508600845946158:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:28.903238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:29.138625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T17:18:29.256185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T17:18:29.368866Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508583666074691:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:29.368922Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:18:39.766037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:18:39.766089Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 13077, MsgBus: 30226 2025-04-04T17:18:52.582469Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508704541998882:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:52.582584Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016b3/r3tmp/tmplZpWU3/pdisk_1.dat 2025-04-04T17:18:52.691523Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13077, node 2 2025-04-04T17:18:52.727731Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:52.727829Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:52.729402Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:52.763645Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:18:52.763670Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:18:52.763676Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:18:52.763806Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30226 TClient is connected to server localhost:30226 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:53.173019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:53.189588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:53.263242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:53.417906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:53.478117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:18:55.473229Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508717426902529:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:55.473311Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:55.491185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:18:55.519538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:18:55.543206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:18:55.567652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:18:55.599970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:18:55.629557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:18:55.668579Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508717426903040:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:55.668657Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:55.668674Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508717426903045:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:18:55.672013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:18:55.680687Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508717426903047:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:18:55.762894Z node 2 :TX_PROXY ERROR: Actor# [2:7489508717426903102:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:18:56.437863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:18:56.732074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-04T17:18:56.766704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-04T17:18:56.805720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 2025-04-04T17:18:56.843494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710763:0, at schemeshard: 72057594046644480 2025-04-04T17:18:56.875714Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-04-04T17:18:56.875747Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-04-04T17:18:56.875762Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-04-04T17:18:57.583023Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508704541998882:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:57.583089Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:19:07.685781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:19:07.685809Z node 2 :IMPORT WARN: Table profiles were not loaded >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable [GOOD] >> ReadIteratorExternalBlobs::NotExtBlobs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 6771, MsgBus: 2648 2025-04-04T17:19:20.496842Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508825581953310:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:20.498991Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001be4/r3tmp/tmpCRUxXG/pdisk_1.dat 2025-04-04T17:19:20.823715Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6771, node 1 2025-04-04T17:19:20.902653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:20.903768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:20.907362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:20.939775Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:20.939818Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:20.939827Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:20.939987Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2648 TClient is connected to server localhost:2648 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:21.441486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:21.473597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:21.634965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:21.799048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:21.873290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:23.394376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508838466856981:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:23.394499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:23.624795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:23.655445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:23.706104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:23.735745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:23.774674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:23.812379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:23.855520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508838466857491:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:23.855591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:23.855717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508838466857496:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:23.859702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:23.871120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508838466857498:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:23.959987Z node 1 :TX_PROXY ERROR: Actor# [1:7489508838466857552:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 13755, MsgBus: 9503 2025-04-04T17:19:25.929825Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508847549631045:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:25.930779Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001be4/r3tmp/tmpQ441pc/pdisk_1.dat 2025-04-04T17:19:26.093985Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13755, node 2 2025-04-04T17:19:26.124756Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:26.126067Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:26.134881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:26.157954Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:26.157973Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:26.157980Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:26.158087Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9503 TClient is connected to server localhost:9503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:19:26.588230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:19:26.611503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:26.726582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:26.865242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:26.948349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:19:29.137681Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508864729502004:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:29.137775Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:29.179573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:29.212842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:29.244760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:29.273851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:29.307597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:29.345675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:29.425551Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508864729502517:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:29.425625Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:29.425884Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508864729502522:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:29.429014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:29.439468Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508864729502524:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:19:29.539847Z node 2 :TX_PROXY ERROR: Actor# [2:7489508864729502579:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:30.930112Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508847549631045:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:30.930200Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TxUsage::WriteToTopic_Demo_5 [GOOD] |92.5%| [TA] $(B)/ydb/core/kqp/ut/indexes/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/indexes/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTest::SelectRangeForbidNullArgs2 >> TxUsage::WriteToTopic_Demo_6 >> KqpQueryPerf::IndexReplace+QueryService-UseSink >> KqpQueryPerf::KvRead+QueryService [GOOD] >> KqpQueryPerf::KvRead-QueryService >> KqpWorkload::KV >> KqpQueryPerf::Replace-QueryService-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 21871, MsgBus: 20425 2025-04-04T17:19:29.487014Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508866102724887:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:29.487110Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bd1/r3tmp/tmpbP3GVW/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21871, node 1 2025-04-04T17:19:29.874066Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:19:29.874097Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:19:29.898745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:29.898899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:29.923575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:29.926055Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:29.990816Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:29.990836Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:29.990843Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:29.990961Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20425 TClient is connected to server localhost:20425 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:30.485481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:30.521669Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:19:30.528742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:30.653814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:30.809831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:19:30.881681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:32.436043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508878987628491:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:32.436136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:32.671162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:32.752233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:32.789295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:32.820343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:32.848516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:32.920138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:32.965756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508878987629007:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:32.965839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:32.966041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508878987629012:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:32.970136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:32.982149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508878987629014:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:33.065718Z node 1 :TX_PROXY ERROR: Actor# [1:7489508883282596363:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpYql::TableNameConflict [GOOD] >> KqpYql::EvaluateExprYsonAndType [GOOD] >> TFlatTest::SelectRangeForbidNullArgs2 [GOOD] >> TFlatTest::SelectRangeForbidNullArgs3 >> TxUsage::WriteToTopic_Demo_36 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExprYsonAndType [GOOD] Test command err: Trying to start YDB, gRPC: 27069, MsgBus: 4601 2025-04-04T17:19:24.862781Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508843333325211:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:24.862892Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019e4/r3tmp/tmpW21UW6/pdisk_1.dat 2025-04-04T17:19:25.308320Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:25.343334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:25.344650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:25.347155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27069, node 1 2025-04-04T17:19:25.542882Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:25.542912Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:25.542923Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:25.543050Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4601 TClient is connected to server localhost:4601 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:26.269854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:26.307026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:26.445406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:26.603769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:26.690543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:27.906639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508856218228852:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:27.906760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:28.474492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:28.507173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:28.533389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:28.561114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:28.594578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:28.671120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:28.737198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508860513196664:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:28.737283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:28.737382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508860513196669:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:28.743225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:28.756054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508860513196671:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:28.816326Z node 1 :TX_PROXY ERROR: Actor# [1:7489508860513196723:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:29.863404Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508843333325211:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:29.863466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29312, MsgBus: 20892 2025-04-04T17:19:30.890894Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508867290820852:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:30.890995Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019e4/r3tmp/tmp8oeAgs/pdisk_1.dat 2025-04-04T17:19:31.020791Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:31.031769Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:31.031846Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:31.036731Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29312, node 2 2025-04-04T17:19:31.098471Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:31.098500Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:31.098508Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:31.098621Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20892 TClient is connected to server localhost:20892 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:31.491510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:31.501029Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:19:31.505953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:31.583442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:31.732408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:31.799798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:33.874990Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508880175724513:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:33.875067Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:33.921943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:33.952219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:33.986407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:34.054696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:34.089035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:34.118056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:34.195583Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508884470692327:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:34.195681Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:34.195795Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508884470692332:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:34.198606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:34.205411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508884470692334:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:19:34.278427Z node 2 :TX_PROXY ERROR: Actor# [2:7489508884470692387:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableNameConflict [GOOD] Test command err: Trying to start YDB, gRPC: 30722, MsgBus: 3413 2025-04-04T17:19:25.150360Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508848001942329:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:25.152522Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019cd/r3tmp/tmpqt2ol4/pdisk_1.dat 2025-04-04T17:19:25.513471Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30722, node 1 2025-04-04T17:19:25.575283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:25.575669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:25.577840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:25.614778Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:25.614798Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:25.614804Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:25.614900Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3413 TClient is connected to server localhost:3413 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:26.237885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:26.264921Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:19:26.293726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:26.461144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:26.611227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:26.682512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:19:28.096028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508860886845970:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:28.096127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:28.478684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:28.509497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:28.534039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:28.602432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:28.638104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:28.676158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:28.765434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508860886846491:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:28.765528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:28.765785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508860886846496:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:28.770138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:28.780760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508860886846498:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:28.866007Z node 1 :TX_PROXY ERROR: Actor# [1:7489508860886846553:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Table intent determination, code: 1040
:3:27: Error: CONCAT is not supported on Kikimr clusters. 2025-04-04T17:19:30.150797Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508848001942329:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:30.150897Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18961, MsgBus: 23520 2025-04-04T17:19:30.981024Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508868156326540:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:30.981108Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019cd/r3tmp/tmp5nLUrd/pdisk_1.dat 2025-04-04T17:19:31.073050Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18961, node 2 2025-04-04T17:19:31.119212Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:31.119312Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:31.121687Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:31.147212Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:31.147238Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:31.147245Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:31.147361Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23520 TClient is connected to server localhost:23520 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:19:31.527224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:19:31.541497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:31.609954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:31.753408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:31.824503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:34.060246Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508885336197483:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:34.060363Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:34.097900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:34.129916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:34.169144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:34.196578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:34.227828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:34.256649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:34.296290Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508885336197994:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:34.296360Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:34.296618Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508885336197999:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:34.299741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:34.308452Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508885336198001:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:19:34.411407Z node 2 :TX_PROXY ERROR: Actor# [2:7489508885336198055:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiCreateTable!
:12:30: Error: Table name conflict: db.[/Root/Test] is used to reference multiple tables. >> TxUsage::WriteToTopic_Demo_37 >> KqpQueryPerf::RangeRead+QueryService [GOOD] >> TxUsage::WriteToTopic_Demo_14 [GOOD] >> KqpQueryPerf::IndexReplace-QueryService+UseSink [GOOD] >> KqpPragma::ResetPerQuery [GOOD] >> KqpPragma::Warning >> TFlatTest::CopyTableAndCompareColumnsSchema >> TxUsage::WriteToTopic_Demo_15 >> TSubscriberCombinationsTest::CombinationsRootDomain [GOOD] >> TSubscriberCombinationsTest::CombinationsMigratedPath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 17522, MsgBus: 21619 2025-04-04T17:19:31.959228Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508871218444302:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:31.959883Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bcc/r3tmp/tmprHD07c/pdisk_1.dat 2025-04-04T17:19:32.337434Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:32.378111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:32.378306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:32.380334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17522, node 1 2025-04-04T17:19:32.428478Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:32.428500Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:32.428507Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:32.428619Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21619 TClient is connected to server localhost:21619 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:32.897851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:32.914755Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:32.920442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:19:33.096924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:33.251508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:33.320253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:35.040558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508888398315131:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:35.040646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:35.256608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:35.282038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:35.314665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:35.347729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:35.412102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:35.447235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:35.526137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508888398315650:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:35.526200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:35.526235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508888398315655:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:35.529975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:35.540551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508888398315657:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:35.633201Z node 1 :TX_PROXY ERROR: Actor# [1:7489508888398315712:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:36.958853Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508871218444302:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:36.958985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TxUsage::WriteToTopic_Demo_10 [GOOD] >> KqpQueryPerf::IndexUpdateOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService >> LocalPartition::Basic [GOOD] >> LocalPartition::Restarts >> KqpQueryPerf::MultiRead-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 24459, MsgBus: 24120 2025-04-04T17:19:23.042101Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508840418665054:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:23.042219Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bd9/r3tmp/tmpCKPveG/pdisk_1.dat 2025-04-04T17:19:23.353733Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24459, node 1 2025-04-04T17:19:23.415925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:23.416058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:23.417963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:23.422829Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:23.422843Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:23.422850Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:23.422945Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24120 TClient is connected to server localhost:24120 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:23.935730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:23.963386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:24.099774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:24.272912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:24.356296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:19:26.115822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508853303568728:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:26.115924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:26.348157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:26.382849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:26.420598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:26.450167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:26.483187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:26.519414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:26.569762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508853303569237:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:26.569845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:26.570349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508853303569242:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:26.574674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:26.587923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508853303569244:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:26.656054Z node 1 :TX_PROXY ERROR: Actor# [1:7489508853303569298:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:27.542754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:19:27.577044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:19:27.618643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:19:28.045158Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508840418665054:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:28.045249Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15963, MsgBus: 1612 2025-04-04T17:19:30.679022Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508866504109700:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:30.679072Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bd9/r3tmp/tmpC5R5GX/pdisk_1.dat 2025-04-04T17:19:30.793481Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:30.815097Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:30.815185Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 15963, node 2 2025-04-04T17:19:30.816755Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:30.853258Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:30.853279Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:30.853283Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:30.853374Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1612 TClient is connected to server localhost:1612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:31.292547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:31.298155Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:19:31.309112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:31.382158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:19:31.518256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:31.598033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:33.617300Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508879389013388:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:33.617405Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:33.686379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:33.723026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:33.791988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:33.825300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:33.855286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:33.932310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:33.982997Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508879389013908:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:33.983096Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508879389013913:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:33.983096Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:33.986839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:33.998719Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508879389013915:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:19:34.072568Z node 2 :TX_PROXY ERROR: Actor# [2:7489508883683981265:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:34.791465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:19:34.869079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:19:34.908219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:19:35.686660Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508866504109700:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:35.686735Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> GroupWriteTest::WriteHardRateDispatcher [GOOD] >> TxUsage::WriteToTopic_Demo_11 >> KqpQueryPerf::KvRead-QueryService [GOOD] >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] >> TLocksTest::Range_CorrectNullDot [GOOD] >> TLocksTest::Range_EmptyKey >> TxUsage::Sinks_Oltp_WriteToTopic_3 [GOOD] >> TFlatTest::LargeDatashardReplyDistributed [GOOD] >> TFlatTest::LargeDatashardReplyRW >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test >> TListAllTopicsTests::PlainList ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WriteHardRateDispatcher [GOOD] Test command err: RandomSeed# 3487605329836474648 2025-04-04T17:15:32.101967Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 5 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-04-04T17:15:32.125764Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-04-04T17:15:32.125880Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 going to send TEvBlock {TabletId# 5 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-04-04T17:15:32.128521Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-04-04T17:15:32.141816Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-04T17:15:32.144553Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-04-04T17:16:11.778414Z 2 00h01m03.972869s :BS_LOGCUTTER ERROR: VDISK[82000000:_:0:1:0]: KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 8197 2025-04-04T17:16:19.477423Z 8 00h01m04.307624s :BS_LOGCUTTER ERROR: VDISK[82000000:_:0:7:0]: KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 4562 2025-04-04T17:19:37.430678Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-04T17:19:37.430799Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-04T17:19:37.430857Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-04-04T17:19:37.430897Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-04-04T17:19:37.637986Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} 2025-04-04T17:19:37.638085Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Status# OK} >> KqpYql::TableRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::KvRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 3613, MsgBus: 13302 2025-04-04T17:19:29.063942Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508864959528234:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:29.064209Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bd3/r3tmp/tmpuajS4m/pdisk_1.dat 2025-04-04T17:19:29.461451Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:29.465626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:29.465729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:29.467982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3613, node 1 2025-04-04T17:19:29.570095Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:29.570121Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:29.570132Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:29.570272Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13302 TClient is connected to server localhost:13302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:30.130595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:30.151209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:30.293672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:30.443546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:30.501875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:32.052351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508877844431899:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:32.052467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:32.278031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:32.309336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:32.338712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:32.365426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:32.394383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:32.467305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:32.559479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508877844432422:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:32.559560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:32.559719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508877844432427:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:32.563274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:32.573337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508877844432429:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:32.657333Z node 1 :TX_PROXY ERROR: Actor# [1:7489508877844432483:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 27075, MsgBus: 5244 2025-04-04T17:19:34.446204Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508886172652849:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:34.446336Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bd3/r3tmp/tmp9ocpFL/pdisk_1.dat 2025-04-04T17:19:34.583200Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:34.603702Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:34.603783Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:34.605607Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27075, node 2 2025-04-04T17:19:34.662501Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:34.662522Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:34.662529Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:34.662646Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5244 TClient is connected to server localhost:5244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:35.058607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:35.079976Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:19:35.085713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:35.149396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:35.334119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:35.410612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:37.603572Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508899057556505:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:37.603659Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:37.640530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:37.679646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:37.720239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:37.754169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:37.784253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:37.856629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:37.909957Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508899057557020:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:37.910069Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:37.910131Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508899057557025:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:37.913439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:37.922602Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508899057557027:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:19:38.000527Z node 2 :TX_PROXY ERROR: Actor# [2:7489508899057557080:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |92.6%| [TA] $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TA] {RESULT} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] Test command err: 2025-04-04T17:19:33.522139Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508882751058186:2249];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:33.522230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e76/r3tmp/tmpAWnN4C/pdisk_1.dat 2025-04-04T17:19:33.875326Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:33.878011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:33.878138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:33.881838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5295 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:34.144508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:19:34.173987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:19:36.763099Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508892138683426:2181];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:36.763676Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e76/r3tmp/tmpP3TprV/pdisk_1.dat 2025-04-04T17:19:36.922220Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:36.939848Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:36.939994Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:36.941573Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28178 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-04T17:19:37.131648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:37.160621Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:19:37.165115Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> LocalPartition::WithoutPartitionWithSplit [GOOD] >> Trace::SkipSpaces [GOOD] >> Trace::NextToken [GOOD] >> Trace::TTraceEvent [GOOD] >> Trace::TExpectedTraceEvent [GOOD] >> Trace::TExpectedTrace [GOOD] >> TSettingsValidation::TestDifferentDedupParams >> KqpQueryPerf::IndexReplace+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexReplace+QueryService+UseSink >> TxUsage::Sinks_Oltp_WriteToTopic_4 >> KqpWorkload::STOCK >> TLocksTest::CK_GoodLock >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService >> KqpPragma::Warning [GOOD] >> TxUsage::WriteToTopic_Demo_25 [GOOD] >> TLocksTest::GoodDupLock [GOOD] >> TLocksTest::CK_Range_GoodLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::Warning [GOOD] Test command err: Trying to start YDB, gRPC: 27952, MsgBus: 15253 2025-04-04T17:19:32.262904Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508878786105968:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:32.262951Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019c9/r3tmp/tmpvNjeJI/pdisk_1.dat 2025-04-04T17:19:32.634319Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:32.636397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:32.636495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:32.640874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27952, node 1 2025-04-04T17:19:32.720819Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:32.720842Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:32.720858Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:32.720998Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15253 TClient is connected to server localhost:15253 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:33.266268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:33.289842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:33.446867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:33.614349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:33.698217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:35.110777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508891671009653:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:35.110901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:35.423703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:35.446272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:35.471724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:35.497444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:35.530756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:35.579299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:35.624764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508891671010164:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:35.624829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:35.624884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508891671010169:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:35.628612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:35.638353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508891671010171:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:35.723555Z node 1 :TX_PROXY ERROR: Actor# [1:7489508891671010225:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:37.262923Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508878786105968:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:37.262986Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:19:37.274825Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508900260945160:2500], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:2:50: Error: At function: AssumeColumnOrderPartial
:2:20: Error: At function: Aggregate /lib/yql/aggregate.yqls:648:18: Error: At function: AggregationTraits /lib/yql/aggregate.yqls:60:31: Error: At function: AggrCountInit
:2:20: Error: At function: PersistableRepr
:2:26: Error: At function: Member
:2:26: Error: Member not found: _yql_partition_id 2025-04-04T17:19:37.276515Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGUyNmI4ZTUtOWRkMTJmYTItZGFlYjk4NC1mNmNhOTBhNQ==, ActorId: [1:7489508895965977783:2488], ActorState: ExecuteState, TraceId: 01jr0x1d8z6e42gek7sa9zf1cw, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 3827, MsgBus: 7880 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019c9/r3tmp/tmpqOHKa3/pdisk_1.dat 2025-04-04T17:19:38.222437Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:19:38.234449Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3827, node 2 2025-04-04T17:19:38.253942Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:38.254032Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:38.256481Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:38.302603Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:38.302625Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:38.302632Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:38.302755Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7880 TClient is connected to server localhost:7880 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:38.747696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:38.753815Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:19:38.764911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:38.843859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:38.996182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:39.061234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:41.284789Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508916799783508:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:41.284879Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:41.322567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:41.361994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:41.396268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:41.463937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:41.499329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:41.559182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:41.648998Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508916799784025:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:41.649064Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:41.649276Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508916799784030:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:41.652776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:41.662425Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508916799784032:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:19:41.717163Z node 2 :TX_PROXY ERROR: Actor# [2:7489508916799784085:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::MultiRead-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService >> TxUsage::WriteToTopic_Demo_26 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 19922, MsgBus: 17868 2025-04-04T17:19:39.343502Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508908589934955:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:39.343847Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bc1/r3tmp/tmp0wPxME/pdisk_1.dat 2025-04-04T17:19:39.730982Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:39.761299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:39.761398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 19922, node 1 2025-04-04T17:19:39.763369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:39.819709Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:39.819727Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:39.819735Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:39.819860Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17868 TClient is connected to server localhost:17868 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:40.331012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:40.347783Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:19:40.362097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:40.507555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:40.680097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:40.751445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:42.437965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508921474838396:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:42.438052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:42.736058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:42.766527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:42.835868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:42.862848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:42.891745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:42.927392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:43.009092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508925769806208:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:43.009215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:43.009346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508925769806213:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:43.012790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:43.023811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508925769806215:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:43.123291Z node 1 :TX_PROXY ERROR: Actor# [1:7489508925769806271:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpYql::TableRange [GOOD] >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink [GOOD] >> TxUsage::WriteToTopic_Demo_37 [GOOD] >> TFlatTest::CopyTableAndRead >> TxUsage::WriteToTopic_Demo_6 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableRange [GOOD] Test command err: Trying to start YDB, gRPC: 1225, MsgBus: 3496 2025-04-04T17:19:41.191550Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508916250965677:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:41.192587Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019c4/r3tmp/tmpjvgaIx/pdisk_1.dat 2025-04-04T17:19:41.566289Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:41.579566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:41.579691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:41.590570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1225, node 1 2025-04-04T17:19:41.654423Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:41.654440Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:41.654450Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:41.654733Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3496 TClient is connected to server localhost:3496 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:42.146175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:42.163016Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:42.172471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:19:42.317188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:42.484129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:19:42.557649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:44.196078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508929135869346:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:44.196185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:44.424239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:44.452233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:44.476691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:44.506374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:44.535900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:44.570496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:44.621497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508929135869856:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:44.621602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:44.621714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508929135869861:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:44.624988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:44.632595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508929135869863:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:44.687388Z node 1 :TX_PROXY ERROR: Actor# [1:7489508929135869916:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Table intent determination, code: 1040
:3:27: Error: RANGE is not supported on Kikimr clusters. >> TObjectStorageListingTest::MaxKeysAndSharding [GOOD] >> TObjectStorageListingTest::SchemaChecks >> TxUsage::WriteToTopic_Demo_38 >> TxUsage::WriteToTopic_Demo_7 >> KqpQueryPerf::UpdateOn-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 22437, MsgBus: 31612 2025-04-04T17:19:31.437061Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508872206998278:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:31.444353Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bcd/r3tmp/tmpfF8sYo/pdisk_1.dat 2025-04-04T17:19:31.756843Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:31.799354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:31.799497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:31.801873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22437, node 1 2025-04-04T17:19:31.852661Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:31.852698Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:31.852711Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:31.852843Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31612 TClient is connected to server localhost:31612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:32.408526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:32.427271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:32.583637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:32.725351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:32.801964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:34.448365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508885091901934:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:34.464025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:34.770121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:34.804617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:34.834230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:34.863985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:34.908329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:34.982207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:35.070410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508889386869749:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:35.070534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:35.070834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508889386869755:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:35.074393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:35.085492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508889386869757:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:19:35.167243Z node 1 :TX_PROXY ERROR: Actor# [1:7489508889386869811:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:36.001221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:19:36.075172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:19:36.109975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:19:36.436685Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508872206998278:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:36.436757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7681, MsgBus: 64476 2025-04-04T17:19:39.099033Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508905925716258:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:39.099098Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bcd/r3tmp/tmpTNhENe/pdisk_1.dat 2025-04-04T17:19:39.253058Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:39.280069Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:39.280148Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:39.283222Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7681, node 2 2025-04-04T17:19:39.374713Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:39.374734Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:39.374742Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:39.374858Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64476 TClient is connected to server localhost:64476 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:39.859502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:39.864476Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:19:39.872443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:39.949516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:40.095024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:40.171628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:19:42.362371Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508918810619889:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:42.362469Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:42.400263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:42.432530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:42.466867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:42.538191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:42.568786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:42.635998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:42.687869Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508918810620404:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:42.687967Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:42.688053Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508918810620409:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:42.691850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:42.716523Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508918810620411:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:42.816704Z node 2 :TX_PROXY ERROR: Actor# [2:7489508918810620468:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:43.596520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:19:43.674147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:19:43.717117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:19:44.099925Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508905925716258:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:44.099992Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexUpsert+QueryService-UseSink >> KqpQueryPerf::IndexReplace+QueryService+UseSink [GOOD] >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService [GOOD] >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService >> TFlatTest::CopyTableAndCompareColumnsSchema [GOOD] >> TFlatTest::CopyTableAndAddFollowers >> KqpScripting::ScanQueryInvalid >> TFlatTest::CopyTableAndRead [GOOD] >> TFlatTest::CopyTableAndDropOriginal >> ReadIteratorExternalBlobs::NotExtBlobs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8619, MsgBus: 3411 2025-04-04T17:19:34.350782Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508885097056135:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:34.351364Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bcb/r3tmp/tmpukYEjd/pdisk_1.dat 2025-04-04T17:19:34.649517Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:34.654016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:34.654119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:34.655960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8619, node 1 2025-04-04T17:19:34.740145Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:34.740165Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:34.740174Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:34.740291Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3411 TClient is connected to server localhost:3411 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:35.265940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:35.288706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:35.440635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:35.598703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:35.679788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:37.343638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508897981959810:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:37.343765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:37.557700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:37.590740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:37.624821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:37.657628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:37.688258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:37.723256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:37.777726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508897981960320:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:37.777779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508897981960325:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:37.777806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:37.781013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:37.792761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508897981960327:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:37.894851Z node 1 :TX_PROXY ERROR: Actor# [1:7489508897981960383:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:38.814984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:19:38.896711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:19:38.940377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:19:39.357043Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508885097056135:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:39.357162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17276, MsgBus: 23760 2025-04-04T17:19:42.219720Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508918895721676:2108];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bcb/r3tmp/tmptnMYP1/pdisk_1.dat 2025-04-04T17:19:42.266582Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:19:42.320052Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17276, node 2 2025-04-04T17:19:42.360544Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:42.360646Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:42.388689Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:42.388718Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:42.388726Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:42.388825Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:19:42.405009Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23760 TClient is connected to server localhost:23760 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:42.754482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:42.759701Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:19:42.770526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:42.839094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:42.969096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:43.056649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:19:45.111916Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508931780625250:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:45.112046Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:45.148318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:45.176614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:45.200491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:45.231403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:45.261569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:45.292018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:45.330637Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508931780625758:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:45.330720Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508931780625763:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:45.330758Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:45.333666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:45.344858Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508931780625765:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:19:45.423661Z node 2 :TX_PROXY ERROR: Actor# [2:7489508931780625819:3434] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:46.200319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:19:46.235356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:19:46.273759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:19:47.219166Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508918895721676:2108];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:47.225249Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] >> TFlatTest::LargeDatashardReplyRW [GOOD] >> TxUsage::WriteToTopic_Demo_15 [GOOD] >> TListAllTopicsTests::PlainList [GOOD] >> TListAllTopicsTests::RecursiveList >> TObjectStorageListingTest::SchemaChecks [GOOD] >> TxUsage::WriteToTopic_Demo_16 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::NotExtBlobs [GOOD] Test command err: 2025-04-04T17:16:06.233088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:16:06.233526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:16:06.233581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a74/r3tmp/tmp4OAtY8/pdisk_1.dat 2025-04-04T17:16:06.635301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:16:06.672895Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:06.716910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:06.717874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:06.730152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:06.820400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:16:06.880261Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:16:06.881272Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:16:06.881749Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:16:06.882000Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:16:06.893555Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:16:06.929192Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:16:06.929423Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:16:06.930987Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:16:06.931042Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:16:06.931085Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:16:06.931402Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:16:06.931493Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:16:06.931566Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:16:06.943022Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:16:06.973119Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:16:06.973297Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:16:06.973395Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:16:06.973425Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:16:06.973451Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:16:06.973492Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:16:06.973689Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:06.973729Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:06.973967Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:16:06.974064Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:16:06.974161Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:16:06.974198Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:16:06.974304Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:16:06.974344Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:16:06.974376Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:16:06.974432Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:16:06.974482Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:16:06.974565Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:06.974619Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:06.974667Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:16:06.974962Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:16:06.974996Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:16:06.975063Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:16:06.975275Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:16:06.975325Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:16:06.975419Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:16:06.975459Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:16:06.975498Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:16:06.975538Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:16:06.975564Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:16:06.975851Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:16:06.975880Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:16:06.975906Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:16:06.975929Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:16:06.975959Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:16:06.976000Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:16:06.976032Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:16:06.976061Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:16:06.976083Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:16:06.977584Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:16:06.977634Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:16:06.988375Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:16:06.988450Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:16:06.988494Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:16:06.988567Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:16:06.988651Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:16:07.128078Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.128129Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:16:07.128206Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:16:07.128521Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:16:07.128555Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:16:07.128673Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:16:07.128748Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:16:07.128808Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:16:07.128848Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:16:07.136828Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:16:07.136913Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:16:07.137675Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.137718Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:16:07.137768Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:16:0 ... RD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:19:31.325406Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [17:743:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:31.325502Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [17:753:2629], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:31.325575Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:31.332708Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:19:31.504226Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [17:757:2632], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:19:31.541013Z node 17 :TX_PROXY ERROR: Actor# [17:831:2675] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:31.793619Z node 17 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0x17fv3ywt5dp7pdh0va73, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=17&id=YWU1OGFlYzctNTY2MTIzZWQtZjI0NzY1ZDEtZjIzM2RhMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:19:36.958706Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:19:36.959318Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:19:36.959459Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a74/r3tmp/tmpiujail/pdisk_1.dat 2025-04-04T17:19:37.381795Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:19:37.414365Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:37.455739Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:37.455921Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:37.467826Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:37.558689Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:19:37.974750Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [18:739:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:37.974867Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [18:748:2626], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:37.974960Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:37.982071Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:19:38.159462Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [18:753:2629], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:19:38.198825Z node 18 :TX_PROXY ERROR: Actor# [18:827:2672] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:38.657209Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0x1dzm54sfpefpd0e41vwq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=OGU2ZTBiZjAtYjA3NWI3MjQtOGI3ZjM1ZTktYjg2ZGFjNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:19:39.236420Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0x1es66wg2h28dhm02epjm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=Mzk1OTA0ZTQtYzdhZDBjYzMtZTlhYzdkZS04ZGY0M2Zm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:19:39.732848Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0x1f823s9a0w2nc1stg26v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=MTQ2MTZkMjAtNzg5YjgxN2MtOTQzM2E1NWEtMmJkMmRkZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:19:40.240192Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0x1fsh0grdm3fxwveq5fy0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=ZGU3ZWQxNWYtYjI2MDNiYTctYTAwNzg4ZDAtNTYxMmUxYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:19:40.668592Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0x1g8z1w025gjdq87yv0vj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=MTIyZGM4NGYtNjEzNTFlZWUtZmNkMzA0NTItN2E3N2NjZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:19:41.088337Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0x1gn79pfrdegettnrsqw7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=MmY0MjQ3ZmEtNzY0ZDEzOWMtMzFhMDJiOWMtMTIwYTA3ODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:19:41.482855Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr0x1h23927wdadtx09vpjfz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=ZTk3NTVhNmUtMWQ3YTBiMWEtNGUyMWM4NTEtNTcxMWU2MTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:19:41.879018Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr0x1heqd7xs9wc95ndhxgt3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=YzdkMTZhYjUtOTJkODgxY2QtNjVhY2JiZGEtMWZiMTIxZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:19:42.275168Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr0x1hv0c8m8p32vjtetavbk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=ZjIwMDUzMzAtYTk3NjFlOTctYzYxOTQ1OTEtZjBkZDBhNGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:19:42.769377Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jr0x1j7k37nc83egj434j4z7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=M2FmYmM1YmEtYTcwMmNjN2ItYTZjYWJkMTctMmJhNDQ1MWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for stats after upsert 2025-04-04T17:19:44.992615Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:19:44.992693Z node 18 :IMPORT WARN: Table profiles were not loaded Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { DataSize: 10487312 RowCount: 10 IndexSize: 0 InMemSize: 10487312 LastAccessTime: 1515 LastUpdateTime: 1515 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 8554 Memory: 17425448 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 18 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 ... waiting for stats after compaction Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 1 TableStats { DataSize: 10487312 RowCount: 10 IndexSize: 0 InMemSize: 10487312 LastAccessTime: 1515 LastUpdateTime: 1515 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 3302 Memory: 124998 Storage: 10486554 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 18 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 2 TableStats { DataSize: 10486220 RowCount: 10 IndexSize: 88 InMemSize: 0 LastAccessTime: 1515 LastUpdateTime: 1515 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 10486220 IndexSize: 88 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 3302 Memory: 124998 Storage: 10486554 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 18 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-04T17:19:49.579708Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jr0x1s0j7pfsaackgy9z6m25, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=YjVhNmQ0OWMtNGY3ZjNmODMtZjU4YzI4MjYtZjIxM2M4M2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 26264, MsgBus: 9371 2025-04-04T17:19:39.146739Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508907000391952:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:39.146787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bc4/r3tmp/tmp9rluoJ/pdisk_1.dat 2025-04-04T17:19:39.534940Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:39.560201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:39.560289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:39.563000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26264, node 1 2025-04-04T17:19:39.648503Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:39.648527Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:39.648535Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:39.648647Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9371 TClient is connected to server localhost:9371 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:40.209499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:40.231232Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:19:40.244582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:40.398007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:40.560909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:40.629618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:42.382474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508919885295632:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:42.382593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:42.626132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:42.663062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:42.690567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:42.761059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:42.786368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:42.826522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:42.905966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508919885296148:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:42.906036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:42.906099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508919885296153:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:42.909560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:42.921726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508919885296155:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:42.991809Z node 1 :TX_PROXY ERROR: Actor# [1:7489508919885296208:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:44.146878Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508907000391952:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:44.146964Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 11790, MsgBus: 16159 2025-04-04T17:19:45.123693Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508933859384257:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:45.123738Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bc4/r3tmp/tmpJXmPcC/pdisk_1.dat 2025-04-04T17:19:45.244365Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11790, node 2 2025-04-04T17:19:45.282505Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:45.282599Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:45.315245Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:45.333420Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:45.333448Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:45.333462Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:45.333594Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16159 TClient is connected to server localhost:16159 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:45.711323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:45.717477Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:19:45.721758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:45.786043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:45.924768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:45.992846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:48.182649Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508946744287920:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:48.182748Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:48.232239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:48.304535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:48.332391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:48.361591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:48.390617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:48.423619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:48.465323Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508946744288430:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:48.465401Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:48.465443Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508946744288435:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:48.468642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:48.476183Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508946744288437:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:19:48.574811Z node 2 :TX_PROXY ERROR: Actor# [2:7489508946744288492:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:50.123960Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508933859384257:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:50.124035Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReplyRW [GOOD] Test command err: 2025-04-04T17:19:30.246513Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508869481195590:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:30.246656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e77/r3tmp/tmpl5CAIE/pdisk_1.dat 2025-04-04T17:19:30.576697Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:30.580185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:30.580250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:30.582234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64063 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:30.862875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:30.876536Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:30.890215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:33.831445Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037889, for tableId 3 don't have enough keys: RowCount: 164 DataSize: 16801932 IndexSize: 4137 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 1 2025-04-04T17:19:35.233784Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508869481195590:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:35.233874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:19:39.791280Z node 1 :MINIKQL_ENGINE ERROR: Shard %72075186224037889, txid %281474976711360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2025-04-04T17:19:39.804868Z node 1 :TX_DATASHARD ERROR: Datashard execution error for [1743787179309:281474976711360] at 72075186224037889: Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2025-04-04T17:19:39.808770Z node 1 :TX_PROXY ERROR: Actor# [1:7489508908135907509:5944] txid# 281474976711360 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# RESULT_UNAVAILABLE shard id 72075186224037889 marker# P12 2025-04-04T17:19:39.808955Z node 1 :TX_PROXY ERROR: Actor# [1:7489508908135907509:5944] txid# 281474976711360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) proxy error code: ExecResultUnavailable 2025-04-04T17:19:40.448472Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508911065492498:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:40.449105Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e77/r3tmp/tmpT6VkOH/pdisk_1.dat 2025-04-04T17:19:40.657372Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:40.661389Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:40.661481Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:40.663811Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22878 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:40.853572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:40.871404Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:40.891807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:42.822018Z node 2 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037888, for tableId 3 don't have enough keys: RowCount: 164 DataSize: 16803260 IndexSize: 4137 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 1 2025-04-04T17:19:45.444461Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508911065492498:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:45.444518Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:19:50.100077Z node 2 :MINIKQL_ENGINE ERROR: Shard %72075186224037888, txid %281474976711361, engine error: Error executing transaction (read-only: 0): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-04-04T17:19:50.111410Z node 2 :TX_DATASHARD ERROR: Datashard execution error for [0:281474976711361] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-04-04T17:19:50.114984Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976711361 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2025-04-04T17:19:50.116488Z node 2 :TX_PROXY ERROR: Actor# [2:7489508949720204506:5915] txid# 281474976711361 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SchemaChecks [GOOD] Test command err: 2025-04-04T17:19:20.849986Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508824749109270:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:20.850208Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e7b/r3tmp/tmp760Qt3/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14985, node 1 2025-04-04T17:19:21.202964Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:21.227936Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T17:19:21.228026Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T17:19:21.284179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:21.284397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:21.289432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:21.310072Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:21.310092Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:21.310101Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:21.310224Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31945 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:21.606389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:21.652827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:25.850260Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508824749109270:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:25.850329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:19:36.201793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:19:36.201814Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:47.443183Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508941735328141:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:47.443294Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e7b/r3tmp/tmpmqX7f4/pdisk_1.dat 2025-04-04T17:19:47.593620Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:47.606232Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:47.606360Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:47.607695Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18988, node 2 2025-04-04T17:19:47.646676Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:47.646696Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:47.646701Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:47.646802Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1180 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:47.865048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:19:47.888685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> TFlatTest::SelectBigRangePerf >> KqpQueryPerf::UpdateOn-QueryService+UseSink [GOOD] |92.6%| [TA] $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTest::CopyTableAndAddFollowers [GOOD] >> TFlatTest::CopyTableAndDropCopy |92.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::RangeLimitRead+QueryService >> KqpQueryPerf::UpdateOn+QueryService+UseSink >> TFlatTest::CopyTableAndDropOriginal [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6113, MsgBus: 21282 2025-04-04T17:19:47.849407Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508940255954037:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:47.850377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bba/r3tmp/tmpaWUG0h/pdisk_1.dat 2025-04-04T17:19:48.212379Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6113, node 1 2025-04-04T17:19:48.254143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:48.254260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:48.258335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:48.314834Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:48.314860Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:48.314872Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:48.314975Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21282 TClient is connected to server localhost:21282 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:48.794230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:48.814784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:48.959473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:49.096434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:49.167732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:50.722521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508953140857703:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:50.722612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:50.922038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:50.960787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:51.027493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:51.058757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:51.087385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:51.121269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:51.171824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508957435825512:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:51.171925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:51.172291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508957435825517:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:51.178401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:51.189231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508957435825519:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:51.261063Z node 1 :TX_PROXY ERROR: Actor# [1:7489508957435825573:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::RangeLimitRead-QueryService >> TLocksTest::Range_BrokenLockMax ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropOriginal [GOOD] Test command err: 2025-04-04T17:19:47.202771Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508940938623632:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:47.202848Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e6f/r3tmp/tmpCBwQ0f/pdisk_1.dat 2025-04-04T17:19:47.489770Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:47.562918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:47.563017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:47.564648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26824 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:47.743206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:47.775145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:47.946219Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1211 647 2154)b }, ecr=1.000 2025-04-04T17:19:47.950268Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1085 521 2626)b }, ecr=1.000 2025-04-04T17:19:47.978111Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1567 647 6413)b }, ecr=1.000 2025-04-04T17:19:47.980616Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.005s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2320 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2025-04-04T17:19:48.102832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:19:48.103060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:19:48.103438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-04-04T17:19:48.103490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-04-04T17:19:48.103501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-04T17:19:48.103537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-04-04T17:19:48.103551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-04-04T17:19:48.103652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-04-04T17:19:48.103770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:19:48.104489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-04T17:19:48.104523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-04-04T17:19:48.105137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-04-04T17:19:48.105292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-04-04T17:19:48.105469Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:19:48.105481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T17:19:48.105786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-04-04T17:19:48.105875Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:19:48.105891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489508940938624152:2242], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 2 2025-04-04T17:19:48.105904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489508940938624152:2242], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 4 2025-04-04T17:19:48.105939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:19:48.105971Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-04-04T17:19:48.106281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-04T17:19:48.106389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-04T17:19:48.109047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-04T17:19:48.109160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-04T17:19:48.109174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-04-04T17:19:48.109190Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-04-04T17:19:48.109232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 waiting... 2025-04-04T17:19:48.109527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-04T17:19:48.109631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-04T17:19:48.109642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-04-04T17:19:48.109652Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2025-04-04T17:19:48.109661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 5 2025-04-04T17:19:48.109701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710676, ready parts: 0/1, is published: true 2025-04-04T17:19:48.109823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-04-04T17:19:48.109911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-04-04T17:19:48.109979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710676, partId: 0, tablet: 72057594037968897 2025-04-04T17:19:48.109994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:3, partId: 0 2025-04-04T17:19:48.110012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:4, partId: 0 2025-04-04T17:19:48.110109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710676 2025-04-04T17:19:48.110152Z nod ... 37890 2025-04-04T17:19:50.943195Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 parts [ [72075186224037888:1:23:1:12288:234:0] [72075186224037888:1:16:1:12288:286:0] ] return ack processed 2025-04-04T17:19:50.943228Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T17:19:50.943269Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-04-04T17:19:50.944095Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715680 datashard 72075186224037891 state PreOffline 2025-04-04T17:19:50.944115Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 Got TEvSchemaChangedResult from SS at 72075186224037891 2025-04-04T17:19:50.944299Z node 2 :TX_DATASHARD INFO: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T17:19:50.944377Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T17:19:50.944429Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7489508955283837929:2374], serverId# [2:7489508955283837937:2697], sessionId# [0:0:0] 2025-04-04T17:19:50.944466Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack parts [ [72075186224037889:1:16:1:12288:286:0] [72075186224037889:1:23:1:12288:234:0] ] return to tablet 72075186224037891 2025-04-04T17:19:50.944485Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T17:19:50.944517Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-04-04T17:19:50.945406Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T17:19:50.945507Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T17:19:50.945546Z node 2 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-04-04T17:19:50.945576Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489508955283837387 RawX2: 4503608217307386 } TabletId: 72075186224037888 State: 4 2025-04-04T17:19:50.945618Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:19:50.945780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489508955283837696 RawX2: 4503608217307445 } TabletId: 72075186224037890 State: 4 2025-04-04T17:19:50.945795Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:19:50.946151Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:19:50.946207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 Check that tablet 72075186224037888 was deleted 2025-04-04T17:19:50.947267Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2025-04-04T17:19:50.947543Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 parts [ [72075186224037889:1:16:1:12288:286:0] [72075186224037889:1:23:1:12288:234:0] ] return ack processed 2025-04-04T17:19:50.947649Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-04-04T17:19:50.947666Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-04T17:19:50.947698Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7489508955283837934:2376], serverId# [2:7489508955283837940:2700], sessionId# [0:0:0] 2025-04-04T17:19:50.947783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489508955283837390 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-04-04T17:19:50.947807Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:19:50.948002Z node 2 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T17:19:50.948237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-04T17:19:50.948427Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-04T17:19:50.948560Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-04T17:19:50.948687Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-04-04T17:19:50.948799Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-04T17:19:50.948805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:19:50.948963Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-04-04T17:19:50.949009Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop Check that tablet 72075186224037890 was deleted 2025-04-04T17:19:50.949241Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489508955283837705 RawX2: 4503608217307446 } TabletId: 72075186224037891 State: 4 2025-04-04T17:19:50.949290Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:19:50.949468Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-04-04T17:19:50.949561Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-04T17:19:50.949698Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) 2025-04-04T17:19:50.950069Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-04T17:19:50.950086Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-04T17:19:50.950119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-04T17:19:50.950139Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-04T17:19:50.950171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-04T17:19:50.950179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-04T17:19:50.950225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:19:50.951048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-04T17:19:50.951086Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-04T17:19:50.951131Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-04-04T17:19:50.951186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-04T17:19:50.951305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-04T17:19:50.951400Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-04-04T17:19:50.951473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-04T17:19:50.951491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-04-04T17:19:50.951529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-04T17:19:50.951542Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-04T17:19:50.951557Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-04T17:19:50.952182Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-04T17:19:50.952202Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-04-04T17:19:50.952220Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop Check that tablet 72075186224037891 was deleted 2025-04-04T17:19:50.952837Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-04T17:19:50.952885Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-04T17:19:50.953766Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) 2025-04-04T17:19:50.953787Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-04-04T17:19:50.953801Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-04T17:19:50.954021Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-04T17:19:50.954059Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-04-04T17:19:50.954629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-04T17:19:50.954642Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-04T17:19:50.954683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-04T17:19:50.954705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-04T17:19:50.954735Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-04T17:19:50.955411Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TxUsage::Sinks_Oltp_WriteToTopic_4 [GOOD] >> KqpScripting::ScanQueryInvalid [GOOD] >> KqpScripting::ScanQueryTruncate >> KqpQueryPerf::ComputeLength+QueryService >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] >> TFlatTest::SelectBigRangePerf [GOOD] >> TFlatTest::SelectRangeBothLimit >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1 >> KqpQueryPerf::IndexUpsert+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpsert+QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 26431, MsgBus: 8764 2025-04-04T17:19:43.685145Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508925401202744:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:43.685294Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bbb/r3tmp/tmpjiJ2SY/pdisk_1.dat 2025-04-04T17:19:44.010023Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:44.013937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:44.014058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:44.017268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26431, node 1 2025-04-04T17:19:44.096736Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:44.096763Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:44.096772Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:44.096895Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8764 TClient is connected to server localhost:8764 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:44.575437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:44.600508Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:19:44.607851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:44.740474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:44.892879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:44.973264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:46.610628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508938286106424:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:46.610759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:46.818367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:46.849994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:46.882093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:46.911899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:46.940149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:46.974194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:47.017246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508942581074231:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:47.017406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:47.017501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508942581074236:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:47.020557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:47.028848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508942581074238:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:47.097046Z node 1 :TX_PROXY ERROR: Actor# [1:7489508942581074291:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:48.685378Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508925401202744:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:48.685463Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29801, MsgBus: 32327 2025-04-04T17:19:49.526323Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508949750080375:2260];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:49.526371Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bbb/r3tmp/tmp5bCifv/pdisk_1.dat 2025-04-04T17:19:49.631237Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29801, node 2 2025-04-04T17:19:49.669524Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:49.669609Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:49.670961Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:49.687952Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:49.687973Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:49.687980Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:49.688118Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32327 TClient is connected to server localhost:32327 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:50.145001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:50.151080Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:19:50.158415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:50.240292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:50.432130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:50.501396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:52.717276Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508962634983832:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:52.717380Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:52.751243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:52.776100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:52.810629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:52.841866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:52.870158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:52.905642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:53.003762Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508966929951647:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:53.003842Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:53.004032Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508966929951652:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:53.007892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:53.018093Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508966929951654:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:19:53.102218Z node 2 :TX_PROXY ERROR: Actor# [2:7489508966929951709:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:54.527837Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508949750080375:2260];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:54.527907Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TFlatTest::CopyTableAndDropCopy [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test [GOOD] >> TPartitionGraphTest::BuildGraph [GOOD] >> TPartitionTests::AfterRestart_1 >> TPartitionTests::AfterRestart_1 [GOOD] >> TPartitionTests::AfterRestart_2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropCopy [GOOD] Test command err: 2025-04-04T17:19:38.297845Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508900973997419:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:38.297890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e75/r3tmp/tmpxmqaRf/pdisk_1.dat 2025-04-04T17:19:38.631614Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:38.672783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:38.672970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:38.675110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7632 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:38.843012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:38.860003Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:38.867514Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:19:38.872203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743787178987 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_1" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_1_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_1_Copy" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743787179078 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_1_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot... (TRUNCATED) 2025-04-04T17:19:39.050713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_2" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1743787179127 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_2" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" ... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_2_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_2_Copy" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710662 CreateStep: 1743787179176 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_2_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: ... (TRUNCATED) waiting... 2025-04-04T17:19:39.142485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/Table_3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_3" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710663 CreateStep: 1743787179204 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_3" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "col... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_3_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_3_Copy" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710664 CreateStep: 1743787179239 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_3_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { ... (TRUNCATED) 2025-04-04T17:19:39.220396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_4 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_4" PathId: 9 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710665 CreateStep: 1743787179302 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_4" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "col... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_4_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_4_Copy" PathId: 10 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710666 CreateStep: 1743787179344 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: fals ... 46644480 2025-04-04T17:19:54.333667Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 129 -> 240 2025-04-04T17:19:54.333968Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-04T17:19:54.334070Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-04T17:19:54.334141Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-04-04T17:19:54.334153Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715686:0 ProgressState 2025-04-04T17:19:54.334174Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715686 datashard 72075186224037894 state Ready 2025-04-04T17:19:54.334205Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2025-04-04T17:19:54.334210Z node 3 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-04-04T17:19:54.334211Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715686 ready parts: 1/1 2025-04-04T17:19:54.334222Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2025-04-04T17:19:54.334227Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715686 ready parts: 1/1 2025-04-04T17:19:54.334266Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715686, ready parts: 1/1, is published: true 2025-04-04T17:19:54.334300Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:7489508970821780667:2402] message: TxId: 281474976715686 2025-04-04T17:19:54.334320Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715686 ready parts: 1/1 2025-04-04T17:19:54.334335Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715686:0 2025-04-04T17:19:54.334343Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715686:0 2025-04-04T17:19:54.334372Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715686 datashard 72075186224037895 state Ready 2025-04-04T17:19:54.334387Z node 3 :TX_DATASHARD DEBUG: 72075186224037895 Got TEvSchemaChangedResult from SS at 72075186224037895 2025-04-04T17:19:54.334441Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 4 2025-04-04T17:19:54.338391Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [3:7489508970821780768:3005], serverId# [3:7489508970821780769:3006], sessionId# [0:0:0] 2025-04-04T17:19:54.338491Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-04T17:19:54.339680Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-04T17:19:54.339745Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-04T17:19:54.342662Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037895, clientId# [3:7489508970821780778:3012], serverId# [3:7489508970821780779:3013], sessionId# [0:0:0] 2025-04-04T17:19:54.342790Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-04T17:19:54.344128Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-04T17:19:54.344184Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-04T17:19:54.347307Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-04T17:19:54.348838Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-04T17:19:54.348908Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-04T17:19:54.353077Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-04T17:19:54.354902Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-04T17:19:54.354961Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-04T17:19:54.358195Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-04T17:19:54.359933Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-04T17:19:54.359994Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-04T17:19:54.360725Z node 3 :OPS_COMPACT INFO: Compact{72075186224037894.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1211 647 2154)b }, ecr=1.000 2025-04-04T17:19:54.366517Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:19:54.366588Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2025-04-04T17:19:54.374564Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-04T17:19:54.376123Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-04T17:19:54.376274Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-04T17:19:54.377689Z node 3 :OPS_COMPACT INFO: Compact{72075186224037895.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1085 521 2626)b }, ecr=1.000 2025-04-04T17:19:54.378856Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:19:54.378905Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2025-04-04T17:19:54.381818Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-04T17:19:54.383300Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-04T17:19:54.383363Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-04T17:19:54.386941Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-04T17:19:54.388272Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-04T17:19:54.388328Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-04T17:19:54.392768Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-04T17:19:54.437222Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-04T17:19:54.437320Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-04T17:19:54.441277Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-04T17:19:54.443131Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-04T17:19:54.443188Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-04T17:19:54.446984Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-04T17:19:54.448931Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-04T17:19:54.448996Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-04T17:19:54.452037Z node 3 :OPS_COMPACT INFO: Compact{72075186224037894.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1519 647 6413)b }, ecr=1.000 2025-04-04T17:19:54.452533Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:19:54.452554Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2025-04-04T17:19:54.454274Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-04T17:19:54.455996Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-04T17:19:54.456056Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-04T17:19:54.458406Z node 3 :OPS_COMPACT INFO: Compact{72075186224037895.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2272 1432 5183)b }, ecr=1.000 2025-04-04T17:19:54.458842Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:19:54.458855Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2025-04-04T17:19:54.460548Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-04T17:19:54.462552Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-04T17:19:54.462620Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-04T17:19:54.468396Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-04T17:19:54.472351Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-04T17:19:54.472424Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-04T17:19:54.475221Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-04-04T17:19:54.477256Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-04-04T17:19:54.477365Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-04-04T17:19:54.480485Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-04-04T17:19:54.483067Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-04-04T17:19:54.483147Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 Check that tablet 72075186224037892 was deleted 2025-04-04T17:19:54.484158Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted 2025-04-04T17:19:54.484570Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) Check that tablet 72075186224037888 was deleted 2025-04-04T17:19:54.484814Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2025-04-04T17:19:54.485090Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-04-04T17:19:54.485421Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-04-04T17:19:54.485812Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TxUsage::WriteToTopic_Demo_38 [GOOD] >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] >> TPartitionTests::AfterRestart_2 [GOOD] >> TxUsage::WriteToTopic_Demo_26 [GOOD] >> KqpQueryPerf::UpdateOn+QueryService+UseSink [GOOD] >> TFlatTest::SelectRangeBothLimit [GOOD] >> TxUsage::WriteToTopic_Demo_39 >> TxUsage::WriteToTopic_Demo_27 >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 21203, MsgBus: 12722 2025-04-04T17:19:53.677707Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508966270089875:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:53.677798Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bb6/r3tmp/tmp1Tdv6O/pdisk_1.dat 2025-04-04T17:19:54.076142Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:54.082077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:54.082190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:54.084447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21203, node 1 2025-04-04T17:19:54.199100Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:54.199122Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:54.199131Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:54.199268Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12722 TClient is connected to server localhost:12722 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:54.702276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:54.728711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:19:54.860810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:55.027501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:55.109724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:56.713874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508979154993552:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:56.713959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:56.936350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:56.969966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:57.002596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:57.070601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:57.141124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:57.174569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:57.216514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508983449961366:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:57.216604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:57.216888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508983449961371:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:57.220581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:57.232446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508983449961373:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:57.306726Z node 1 :TX_PROXY ERROR: Actor# [1:7489508983449961426:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1627, MsgBus: 30176 2025-04-04T17:19:53.756486Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508966610909999:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:53.756538Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bb4/r3tmp/tmpkPHfgz/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1627, node 1 2025-04-04T17:19:54.199757Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:19:54.200021Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:19:54.206436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:54.206542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:54.214350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:54.236282Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:54.250011Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:54.250031Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:54.250042Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:54.250170Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30176 TClient is connected to server localhost:30176 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:54.779479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:54.795249Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:19:54.805175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:54.939701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:55.111846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:55.189702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:56.892346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508979495813655:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:56.892676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:57.087374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:57.117566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:57.156093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:57.224859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:57.255298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:57.292415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:57.390777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508983790781470:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:57.390852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:57.391070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508983790781475:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:57.395362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:57.407043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508983790781477:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:57.473504Z node 1 :TX_PROXY ERROR: Actor# [1:7489508983790781532:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> BasicStatistics::ServerlessGlobalIndex >> TSettingsValidation::TestDifferentDedupParams [GOOD] >> TSettingsValidation::ValidateSettingsFailOnStart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::AfterRestart_2 [GOOD] Test command err: 2025-04-04T17:19:24.747945Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508842529760603:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:24.750306Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:19:24.826814Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508843797195277:2268];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:24.827059Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:19:24.957737Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014ea/r3tmp/tmpKHS5uN/pdisk_1.dat 2025-04-04T17:19:24.979204Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:19:25.211928Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:25.246292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:25.246392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:25.247457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:25.247505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:25.253967Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:19:25.254155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:25.254592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3767, node 1 2025-04-04T17:19:25.458783Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/0014ea/r3tmp/yandex3cJozs.tmp 2025-04-04T17:19:25.458812Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/0014ea/r3tmp/yandex3cJozs.tmp 2025-04-04T17:19:25.459067Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/0014ea/r3tmp/yandex3cJozs.tmp 2025-04-04T17:19:25.459203Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:19:25.802952Z INFO: TTestServer started on Port 27859 GrpcPort 3767 TClient is connected to server localhost:27859 PQClient connected to localhost:3767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:26.146471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:26.225989Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:19:26.251636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:19:28.032485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508859709630733:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:28.032638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:28.032720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508859709630746:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:28.032266Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508860977064645:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:28.032271Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508860977064657:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:28.032367Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:28.041738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:19:28.050877Z node 2 :TX_PROXY ERROR: Actor# [2:7489508860977064660:2172] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:19:28.073787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508860977064659:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:19:28.074379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508859709630748:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:19:28.144118Z node 1 :TX_PROXY ERROR: Actor# [1:7489508859709630842:2760] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:28.161499Z node 2 :TX_PROXY ERROR: Actor# [2:7489508860977064686:2178] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:28.481973Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508859709630852:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:19:28.481804Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489508860977064693:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:19:28.482084Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTgxMmNiY2EtNjM4M2JiZDEtZmZmMzJlMWUtN2I4MWIxNTU=, ActorId: [2:7489508860977064643:2312], ActorState: ExecuteState, TraceId: 01jr0x148x37mj16683cmsrsd4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:19:28.485133Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:19:28.493742Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDU0OGUxNWEtMzAwYjE2NTItMzZiYjM4MGMtMzRlYzM5YTA=, ActorId: [1:7489508859709630731:2337], ActorState: ExecuteState, TraceId: 01jr0x148x9s8reyvhejhvmpe8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:19:28.494104Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:19:28.580186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:28.684890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:28.818421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", tru ... = $Partition; 2025-04-04T17:19:54.840485Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489508969694841965:3940] (SourceId=A_Source_5, PreferedPartition=(NULL)) GetOwnershipFast Partition=1 TabletId=1001 2025-04-04T17:19:54.840604Z node 3 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [3:7489508969694841966:3940], Recipient [3:7489508956809939068:3332]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [3:7489508969694841965:3940] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-04-04T17:19:54.840695Z node 3 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [3:7489508969694841965:3940], Recipient [3:7489508956809939068:3332]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_5" 2025-04-04T17:19:54.840760Z node 3 :PQ_PARTITION_CHOOSER TRACE: StateOwnershipFast, received event# 271188558, Sender [3:7489508956809939068:3332], Recipient [3:7489508969694841965:3940]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-04-04T17:19:54.840786Z node 3 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [3:7489508969694841965:3940] (SourceId=A_Source_5, PreferedPartition=(NULL)) InitTable: SourceId=A_Source_5 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2025-04-04T17:19:54.840853Z node 3 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [3:7489508969694841965:3940], Recipient [3:7489508956809939068:3332]: NActors::TEvents::TEvPoison 2025-04-04T17:19:54.841316Z node 3 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [3:7489508909565296688:2069], Recipient [3:7489508969694841965:3940]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2025-04-04T17:19:54.841343Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489508969694841965:3940] (SourceId=A_Source_5, PreferedPartition=(NULL)) StartKqpSession 2025-04-04T17:19:54.843947Z node 3 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [3:7489508909565296914:2279], Recipient [3:7489508969694841965:3940]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=3&id=YzlmNzdkNDgtN2U5NWI4YS1mM2YxMDBhYy04MzE4ODk0MA==" NodeId: 3 } YdbStatus: SUCCESS ResourceExhausted: false 2025-04-04T17:19:54.843974Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489508969694841965:3940] (SourceId=A_Source_5, PreferedPartition=(NULL)) Select from the table 2025-04-04T17:19:55.001781Z node 3 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [3:7489508909565296914:2279], Recipient [3:7489508969694841965:3940]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=3&id=YzlmNzdkNDgtN2U5NWI4YS1mM2YxMDBhYy04MzE4ODk0MA==" PreparedQuery: "920540de-5fa9c8da-f37aa659-3354ed6b" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01jr0x1yk5bn1y9s8ayat2v9q2" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1743787194656 } items { uint64_value: 1743787194656 } items { uint64_value: 13 } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 86 2025-04-04T17:19:55.001949Z node 3 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [3:7489508969694841965:3940] (SourceId=A_Source_5, PreferedPartition=(NULL)) Selected from table PartitionId=0 SeqNo=13 2025-04-04T17:19:55.001963Z node 3 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [3:7489508969694841965:3940] (SourceId=A_Source_5, PreferedPartition=(NULL)) OnPartitionChosen 2025-04-04T17:19:55.002153Z node 3 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [3:7489508969694842000:3940], Recipient [3:7489508956809939068:3332]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [3:7489508969694841965:3940] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-04-04T17:19:55.002206Z node 3 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [3:7489508969694841965:3940], Recipient [3:7489508956809939068:3332]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 2025-04-04T17:19:55.002323Z node 3 :PQ_PARTITION_CHOOSER TRACE: StateCheckPartition, received event# 271188558, Sender [3:7489508956809939068:3332], Recipient [3:7489508969694841965:3940]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-04-04T17:19:55.002354Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489508969694841965:3940] (SourceId=A_Source_5, PreferedPartition=(NULL)) Update the table 2025-04-04T17:19:55.002614Z node 3 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [3:7489508969694841965:3940], Recipient [3:7489508956809939068:3332]: NActors::TEvents::TEvPoison Received TEvChooseResult: 1 2025-04-04T17:19:55.218192Z node 3 :PQ_PARTITION_CHOOSER TRACE: StateUpdate, received event# 271646721, Sender [3:7489508909565296914:2279], Recipient [3:7489508969694841965:3940]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=3&id=YzlmNzdkNDgtN2U5NWI4YS1mM2YxMDBhYy04MzE4ODk0MA==" PreparedQuery: "70cfe0f4-c6e8da83-462980ce-a670618b" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 93 2025-04-04T17:19:55.218258Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489508969694841965:3940] (SourceId=A_Source_5, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-04T17:19:55.218286Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489508969694841965:3940] (SourceId=A_Source_5, PreferedPartition=(NULL)) ReplyResult: Partition=1, SeqNo=13 2025-04-04T17:19:55.218308Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489508969694841965:3940] (SourceId=A_Source_5, PreferedPartition=(NULL)) Start idle Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 11131928866524144434 AND Topic = "Root" AND ProducerId = "00415F536F757263655F35" 2025-04-04T17:19:55.468250Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976710715. Ctx: { TraceId: 01jr0x1yws057ef0mhsrc7zwcw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjdmZGE1OWUtZWIxNDExNWItNDZmNmJjYjMtYzQ2YTQ3ZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:19:55.935753Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:19:55.935795Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:57.825617Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:19:57.825726Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:19:57.854606Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [5:178:2193] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:19:57.857428Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T17:19:57.000000Z 2025-04-04T17:19:57.857512Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [5:178:2193] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\310\204\304\216\3402" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\316\255\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\004\020\000\030\000\"\007session(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session" StorageChannel: INLINE } 2025-04-04T17:19:58.686057Z node 6 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:19:58.686138Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:19:58.701137Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [6:178:2193] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:19:58.703382Z node 6 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T17:19:58.000000Z 2025-04-04T17:19:58.703444Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [6:178:2193] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeBothLimit [GOOD] Test command err: 2025-04-04T17:19:52.894219Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508961252746317:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:52.894280Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e52/r3tmp/tmpHBRG7t/pdisk_1.dat 2025-04-04T17:19:53.231038Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:53.297764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:53.297869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:53.299849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23521 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:53.489863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:53.501604Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:53.527067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... insert finished 9642 usec 8329 usec 9157 usec 8629 usec 9263 usec 6938 usec 6742 usec 12240 usec 15255 usec 9882 usec 2025-04-04T17:19:56.093240Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508978189378394:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:56.093337Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e52/r3tmp/tmp9HnSVm/pdisk_1.dat 2025-04-04T17:19:56.189764Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:56.230922Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:56.231018Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:56.232374Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2084 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-04T17:19:56.416323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:56.441800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> KqpQueryPerf::ComputeLength+QueryService [GOOD] >> KqpQueryPerf::ComputeLength-QueryService >> KqpScripting::ScanQueryTruncate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 27581, MsgBus: 65528 2025-04-04T17:19:54.648598Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508970998936572:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:54.650446Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bb2/r3tmp/tmpRY0WQz/pdisk_1.dat 2025-04-04T17:19:55.042770Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:55.056572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:55.056669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:55.057941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27581, node 1 2025-04-04T17:19:55.134783Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:55.134804Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:55.134814Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:55.134892Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65528 TClient is connected to server localhost:65528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:55.629635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:55.654087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:55.775838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:55.916912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:55.993915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:19:57.746326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508983883840110:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:57.746462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:58.018123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:58.053127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:58.080881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:58.110110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:58.136547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:58.167061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:58.252146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508988178807924:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:58.252235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:58.252463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508988178807929:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:58.255637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:58.266859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508988178807931:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:58.321567Z node 1 :TX_PROXY ERROR: Actor# [1:7489508988178807984:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> Balancing::Balancing_OneTopic_TopicApi >> TLocksTest::CK_GoodLock [GOOD] >> TLocksTest::CK_BrokenLock >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead >> TListAllTopicsTests::RecursiveList [GOOD] >> TListAllTopicsTests::ListLimitAndPaging >> TPQTest::TestWritePQCompact ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryTruncate [GOOD] Test command err: Trying to start YDB, gRPC: 15894, MsgBus: 10741 2025-04-04T17:19:49.794256Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508951733232844:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:49.803655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019c0/r3tmp/tmp00vxnc/pdisk_1.dat 2025-04-04T17:19:50.236276Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:50.255250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:50.255366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:50.256685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15894, node 1 2025-04-04T17:19:50.378844Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:50.378884Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:50.378892Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:50.379006Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10741 TClient is connected to server localhost:10741 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:50.895791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:50.917382Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:19:50.933694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:51.074289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:51.229855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:51.306886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:53.056630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508968913103662:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:53.056767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:53.317704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:53.353256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:53.386584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:53.426019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:53.457333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:53.491579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:53.547532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508968913104171:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:53.547605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:53.547845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508968913104176:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:53.552184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:53.564572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508968913104179:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:53.665842Z node 1 :TX_PROXY ERROR: Actor# [1:7489508968913104234:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:54.753185Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508973208071804:2496], status: PRECONDITION_FAILED, issues:
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029 2025-04-04T17:19:54.753675Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjY1NmMzN2YtNWQxM2Y2M2EtZGI4M2U5MjEtNDJmODI0Yjg=, ActorId: [1:7489508973208071802:2495], ActorState: ExecuteState, TraceId: 01jr0x1yb8fp8jtbccmpazhecb, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029 2025-04-04T17:19:54.793665Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508951733232844:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:54.793722Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:19:54.850258Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508973208071852:2510], status: PRECONDITION_FAILED, issues:
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029 2025-04-04T17:19:54.851741Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODg3NDE5YTEtZDg4YjRmYzYtODI2Y2M0YWItYTJkZDcyZWI=, ActorId: [1:7489508973208071850:2509], ActorState: ExecuteState, TraceId: 01jr0x1yetd83knwfctz6wk4h5, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029 Trying to start YDB, gRPC: 22988, MsgBus: 26209 2025-04-04T17:19:55.705750Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508975877027502:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:55.705817Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019c0/r3tmp/tmpaQnVrV/pdisk_1.dat 2025-04-04T17:19:55.778931Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:55.816395Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:55.816490Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 22988, node 2 2025-04-04T17:19:55.817758Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:55.859368Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:55.859390Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:55.859396Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:55.859521Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26209 TClient is connected to server localhost:26209 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:56.274394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:56.281420Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:19:56.296991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:56.374636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:56.508238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:56.572680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:58.644080Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508988761931167:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:58.644193Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:58.678394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:58.710739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:58.737205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:58.763962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:58.794321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:58.823380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:58.862834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508988761931676:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:58.862915Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508988761931681:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:58.862933Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:58.866063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:58.874456Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508988761931683:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:19:58.970801Z node 2 :TX_PROXY ERROR: Actor# [2:7489508988761931738:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:59.922342Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787199910, txId: 281474976715671] shutting down >> KqpScripting::StreamExecuteYqlScriptData >> KqpQueryPerf::DeleteOn-QueryService-UseSink >> TxUsage::WriteToTopic_Demo_7 [GOOD] >> KqpScanArrowFormat::SingleKey >> TLocksTest::Range_EmptyKey [GOOD] >> KqpQueryPerf::IndexUpsert-QueryService-UseSink >> KqpQueryPerf::IndexUpsert+QueryService+UseSink [GOOD] >> TxUsage::WriteToTopic_Demo_8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_EmptyKey [GOOD] Test command err: 2025-04-04T17:19:22.502115Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508835256890530:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:22.509486Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e79/r3tmp/tmp5IBldK/pdisk_1.dat 2025-04-04T17:19:22.829665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:22.829779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:22.831466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:22.835955Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:2850 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:23.086879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:23.100617Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:23.118957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:23.229695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:23.269499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e79/r3tmp/tmpzflxvV/pdisk_1.dat 2025-04-04T17:19:25.783931Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:19:25.872363Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:25.877524Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:25.877625Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:25.879076Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24576 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:26.056873Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:26.078936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:26.150097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:26.225464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:29.262896Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508865297917152:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:29.263905Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e79/r3tmp/tmpYffQxD/pdisk_1.dat 2025-04-04T17:19:29.377208Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:29.419014Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:29.419082Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:29.421066Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1265 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-04T17:19:29.587501Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:29.608993Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:29.687381Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:29.760343Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:32.901844Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489508875939594362:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e79/r3tmp/tmp9NcLN7/pdisk_1.dat 2025-04-04T17:19:32.947003Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:19:33.027311Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:33.047920Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:33.048004Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:33.053189Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23480 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:33.263707Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:33.290300Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:33.368271Z n ... necting -> Connected TClient is connected to server localhost:24287 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:44.967282Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:44.985378Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:19:44.989997Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:45.074224Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:45.127177Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:19:48.819544Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7489508945940269902:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:48.819617Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e79/r3tmp/tmpNGsCzE/pdisk_1.dat 2025-04-04T17:19:48.952279Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:48.991924Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:48.992015Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:48.993434Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6151 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:49.228388Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:19:49.258150Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:19:49.338295Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:49.409571Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:53.349833Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7489508967139395900:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:53.350696Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e79/r3tmp/tmpEAxVMT/pdisk_1.dat 2025-04-04T17:19:53.476444Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:53.505735Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:53.505812Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:53.507662Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17483 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:53.771984Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:53.779209Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:53.799208Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:53.880935Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:53.960466Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:57.822622Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489508983920514673:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:57.822702Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e79/r3tmp/tmpAWvHEn/pdisk_1.dat 2025-04-04T17:19:57.963804Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:57.977347Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:57.977460Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:57.980687Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28481 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:58.261788Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:58.284118Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:19:58.289793Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:58.377586Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:58.437826Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> LocalPartition::Restarts [GOOD] >> LocalPartition::DescribeBadPartition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11709, MsgBus: 30303 2025-04-04T17:19:49.009877Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508950130381139:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:49.010057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bb9/r3tmp/tmpDGl0c3/pdisk_1.dat 2025-04-04T17:19:49.328145Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11709, node 1 2025-04-04T17:19:49.375012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:49.375122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:49.387106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:49.424453Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:49.424480Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:49.424495Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:49.424605Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30303 TClient is connected to server localhost:30303 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:49.928375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:49.943533Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:19:49.955032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:50.092898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:50.254408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:50.333441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:51.828362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508958720317507:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:51.828520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:52.045609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:52.108031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:52.138390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:52.168763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:52.199366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:52.236124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:52.286501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508963015285314:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:52.286594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:52.286919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508963015285319:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:52.290497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:52.300811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508963015285321:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:52.355662Z node 1 :TX_PROXY ERROR: Actor# [1:7489508963015285374:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:53.335897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:19:53.410981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:19:53.452359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:19:54.010048Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508950130381139:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:54.010126Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27773, MsgBus: 61194 2025-04-04T17:19:56.454081Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508981365091400:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:56.454165Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bb9/r3tmp/tmpkrPKko/pdisk_1.dat 2025-04-04T17:19:56.585877Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:56.617135Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:56.617226Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:56.618754Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27773, node 2 2025-04-04T17:19:56.686570Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:56.686592Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:56.686599Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:56.686718Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61194 TClient is connected to server localhost:61194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:57.121548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:57.126266Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:19:57.133024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:57.208321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:57.361773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:57.442882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:59.455656Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508994249995086:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:59.455778Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:59.473546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:59.500640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:59.529158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:59.557815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:59.587066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:59.653883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:59.735612Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508994249995606:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:59.735706Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:59.735926Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489508994249995611:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:59.739000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:59.748335Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489508994249995613:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:19:59.811431Z node 2 :TX_PROXY ERROR: Actor# [2:7489508994249995665:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:00.582910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:20:00.658039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:20:00.695927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:20:01.472675Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489508981365091400:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:01.482147Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TxUsage::WriteToTopic_Demo_16 [GOOD] >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] >> TxUsage::WriteToTopic_Demo_17 >> KqpQueryPerf::Replace+QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] Test command err: 2025-04-04T17:19:07.736103Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:07.819316Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:07.837756Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:07.837982Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:07.844181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:07.844343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:07.844508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:07.844579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:07.844667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:07.844735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:07.844798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:07.844875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:07.844933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:07.845059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:07.845132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:07.845208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:07.866151Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:07.866372Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:07.866421Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:07.866559Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:07.866711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:07.866787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:07.866828Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:07.866898Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:07.866938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:07.866964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:07.866998Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:07.867148Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:07.867194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:07.867221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:07.867253Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:07.867349Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:07.867415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:07.867487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:07.867544Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:07.867611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:07.867634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:07.867666Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:07.867700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:07.867726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:07.867744Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:07.868118Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-04-04T17:19:07.868227Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=49; 2025-04-04T17:19:07.868317Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=50; 2025-04-04T17:19:07.868390Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=31; 2025-04-04T17:19:07.868585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:07.868645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:07.868694Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:07.868899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:07.868954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:07.868996Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:07.869170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:07.869227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:07.869254Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:07.869422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:07.869464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:07.869491Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:07.869629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:07.869674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:07.869724Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... p_indexation;reason=in_progress;count=1;insert_overload_size=5870200;indexing_debug={task_ids=b8e7308-117911f0-b5be74bf-9b25aea1,;}; 2025-04-04T17:20:03.679138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:829;event=skip_compaction;reason=disabled; 2025-04-04T17:20:03.679190Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=0; 2025-04-04T17:20:03.679252Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:20:03.679303Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:20:03.679345Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:20:03.679427Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.407000s; 2025-04-04T17:20:03.679478Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:20:03.933243Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:278:2291];fline=actor.cpp:22;event=flush_writing;size=4735248;count=1; 2025-04-04T17:20:03.972477Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 45 at tablet 9437184 2025-04-04T17:20:03.972763Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 3:87 Blob count: 1 2025-04-04T17:20:03.985577Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 3:87 Blob count: 1 2025-04-04T17:20:03.985751Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=263;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:767;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=5870200;indexing_debug={task_ids=b8e7308-117911f0-b5be74bf-9b25aea1,;}; 2025-04-04T17:20:03.987145Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:3:44:255:1:574112:0]; 2025-04-04T17:20:03.987272Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:3:44:255:2:592928:0]; GC for channel 3 deletes blobs: GC for channel 2 deletes blobs: GC for channel 4 deletes blobs: [9437184:3:83:4:0:5870200:0] Added portions: 151 152 2025-04-04T17:20:04.010849Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-04T17:20:04.011103Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[264] (CS::INDEXATION) apply at tablet 9437184 2025-04-04T17:20:04.013254Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 3:86 Blob count: 2 2025-04-04T17:20:04.013362Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=92311612;raw_bytes=143732845;count=39;records=1462497} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7381080;raw_bytes=7369506;count=2;records=75000} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T17:20:04.030166Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b8e7308-117911f0-b5be74bf-9b25aea1;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-04-04T17:20:04.030287Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b8e7308-117911f0-b5be74bf-9b25aea1;fline=with_appended.cpp:65;portions=75,76,;task_id=b8e7308-117911f0-b5be74bf-9b25aea1; 2025-04-04T17:20:04.030557Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b8e7308-117911f0-b5be74bf-9b25aea1;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::b8e7308-117911f0-b5be74bf-9b25aea1; 2025-04-04T17:20:04.030623Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b8e7308-117911f0-b5be74bf-9b25aea1;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:20:04.030694Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b8e7308-117911f0-b5be74bf-9b25aea1;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:20:04.030760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;task_id=b8e7308-117911f0-b5be74bf-9b25aea1;tablet_id=9437184;fline=columnshard_impl.cpp:829;event=skip_compaction;reason=disabled; 2025-04-04T17:20:04.030809Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b8e7308-117911f0-b5be74bf-9b25aea1;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=0; 2025-04-04T17:20:04.030880Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b8e7308-117911f0-b5be74bf-9b25aea1;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:20:04.030933Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b8e7308-117911f0-b5be74bf-9b25aea1;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:20:04.030981Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b8e7308-117911f0-b5be74bf-9b25aea1;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:20:04.031068Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b8e7308-117911f0-b5be74bf-9b25aea1;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.395500s; 2025-04-04T17:20:04.031126Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=b8e7308-117911f0-b5be74bf-9b25aea1;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:20:04.031231Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:85:3:0:5870200:0] 2025-04-04T17:20:04.031288Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 3:86 Blob count: 2 2025-04-04T17:20:04.031390Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:20:04.032410Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=45;external_task_id=b8e7308-117911f0-b5be74bf-9b25aea1;mem=5963210;cpu=0; 2025-04-04T17:20:04.033962Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 6080043 at tablet 9437184, mediator 0 2025-04-04T17:20:04.034043Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[267] execute at tablet 9437184 2025-04-04T17:20:04.034418Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1043;fline=abstract.h:83;progress_tx_id=1043;lock_id=1;broken=0; 2025-04-04T17:20:04.034643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1043;fline=tx_controller.cpp:211;event=finished_tx;tx_id=1043; 2025-04-04T17:20:04.046868Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[267] complete at tablet 9437184 2025-04-04T17:20:04.046956Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=1043;lock_id=1;broken=0; 2025-04-04T17:20:04.047053Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=5870200; 2025-04-04T17:20:04.047162Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=manager.cpp:10;event=lock;process_id=CS::INDEXATION::c1da96a-117911f0-a9472609-21f4fd51; 2025-04-04T17:20:04.047201Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=ro_controller.cpp:45;event=CS::INDEXATION;tablet_id=9437184; 2025-04-04T17:20:04.047279Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:752;event=indexation;bytes=5870200;blobs_count=1;max_limit=251658240;has_more=0;external_task_id=c1da96a-117911f0-a9472609-21f4fd51; 2025-04-04T17:20:04.047328Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:618;event=start_changes;type=CS::INDEXATION;task_id=c1da96a-117911f0-a9472609-21f4fd51; 2025-04-04T17:20:04.047423Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:278:2291];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=5963210;external_task_id=c1da96a-117911f0-a9472609-21f4fd51;type=CS::INDEXATION;priority=0;; 2025-04-04T17:20:04.047624Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:278:2291];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=46;task=cpu=0;mem=5963210;external_task_id=c1da96a-117911f0-a9472609-21f4fd51;type=CS::INDEXATION;priority=0;; 2025-04-04T17:20:04.047659Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:278:2291];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=c1da96a-117911f0-a9472609-21f4fd51;mem=5963210;cpu=0; 2025-04-04T17:20:04.047692Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:278:2291];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=c1da96a-117911f0-a9472609-21f4fd51;task_id=46;mem=5963210;cpu=0; 2025-04-04T17:20:04.047828Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=c1da96a-117911f0-a9472609-21f4fd51;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=c1da96a-117911f0-a9472609-21f4fd51; Added portions: 153 154 2025-04-04T17:20:04.583577Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=c1da96a-117911f0-a9472609-21f4fd51;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-04-04T17:20:04.583879Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:278:2291];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=2; Compactions happened: 14 Indexations happened: 31 Cleanups happened: 1 Old portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 34 Cleaned up portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 34 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpQueryPerf::ComputeLength-QueryService [GOOD] >> TLocksTest::CK_Range_GoodLock [GOOD] >> KqpNotNullColumns::CreateTableWithDisabledNotNullDataColumns >> KqpNewEngine::SimpleUpsertSelect >> KqpQueryPerf::IndexDeleteOn+QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::ComputeLength-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 16720, MsgBus: 16389 2025-04-04T17:19:55.793177Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508977680413752:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:55.793523Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001baf/r3tmp/tmpQlNghq/pdisk_1.dat 2025-04-04T17:19:56.073663Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:56.080974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:56.081107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:56.083697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16720, node 1 2025-04-04T17:19:56.161784Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:56.161812Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:56.161820Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:56.161949Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16389 TClient is connected to server localhost:16389 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:56.711304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:56.723424Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:19:56.733848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:56.873404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:57.038359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:57.106418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:58.526268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508990565317225:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:58.526402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:58.741732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:58.771137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:19:58.806733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:58.838377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:58.864829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:19:58.894479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:19:58.977373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508990565317740:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:58.977463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:58.977482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508990565317745:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:58.980356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:19:58.989013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508990565317747:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:19:59.052248Z node 1 :TX_PROXY ERROR: Actor# [1:7489508994860285096:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 11549, MsgBus: 31928 2025-04-04T17:20:00.751487Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508996800631269:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:00.751558Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001baf/r3tmp/tmpJ8VVE4/pdisk_1.dat 2025-04-04T17:20:00.861224Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11549, node 2 2025-04-04T17:20:00.893870Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:00.893948Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:00.895225Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:00.902751Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:00.902768Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:00.902775Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:00.902877Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31928 TClient is connected to server localhost:31928 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:01.304805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:01.319944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:01.371190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:01.521558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:01.602389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:03.755521Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509009685534922:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:03.755900Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:03.775289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:03.806255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:03.833887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:03.865285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:03.894440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:03.929826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:03.976877Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509009685535437:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:03.976950Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:03.977105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509009685535442:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:03.980531Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:03.990303Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509009685535444:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:20:04.087462Z node 2 :TX_PROXY ERROR: Actor# [2:7489509013980502794:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::DeleteOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::DeleteOn-QueryService+UseSink >> KqpScripting::StreamExecuteYqlScriptData [GOOD] >> KqpScripting::StreamExecuteYqlScriptEmptyResults >> KqpPragma::Auth >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_GoodLock [GOOD] Test command err: 2025-04-04T17:19:26.901482Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508852280829704:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:26.901925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e78/r3tmp/tmp3ro5t1/pdisk_1.dat 2025-04-04T17:19:27.360251Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:27.383905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:27.384015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:27.386261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8618 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:27.641618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:27.658567Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:27.677343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:27.826893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:27.894037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:30.166322Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508866574702009:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:30.166416Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e78/r3tmp/tmpT6NT63/pdisk_1.dat 2025-04-04T17:19:30.280154Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:30.308065Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:30.308161Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:30.309436Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18254 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:30.481062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:30.486997Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:30.508309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:30.590136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:30.660259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e78/r3tmp/tmp3Wn8L5/pdisk_1.dat 2025-04-04T17:19:33.635976Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:19:33.687930Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:33.715513Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:33.715601Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:33.717174Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26302 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting...2025-04-04T17:19:33.892394Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:19:33.898494Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:33.909221Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:33.983497Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:19:34.038982Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:37.240796Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489508897277493654:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:37.308059Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e78/r3tmp/tmpkqSm7D/pdisk_1.dat 2025-04-04T17:19:37.395715Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:37.412585Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:37.412716Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:37.414461Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15466 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:37.612424Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard ... SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:49.083623Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:49.091444Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:49.105462Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:49.169430Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:49.236870Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:52.732815Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7489508962483389625:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:52.732943Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e78/r3tmp/tmpu0tVvq/pdisk_1.dat 2025-04-04T17:19:52.910598Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:52.931716Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:52.931816Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:52.933658Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28606 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:53.164597Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:53.184789Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:19:53.190009Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:53.265943Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:53.355085Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:57.286720Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7489508983260095679:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:57.286805Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e78/r3tmp/tmppakeMc/pdisk_1.dat 2025-04-04T17:19:57.414068Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:57.432756Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:57.432861Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:57.434599Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64823 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:57.695104Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:57.703149Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:57.721417Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:57.786729Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:57.862208Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:01.514690Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489509002043032517:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:01.514787Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e78/r3tmp/tmpR7Xpn1/pdisk_1.dat 2025-04-04T17:20:01.674185Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:01.707389Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:01.707497Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:01.709258Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29078 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:20:01.988691Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:01.995392Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:02.014442Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:02.081654Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:02.145280Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2 >> KqpScanArrowFormat::SingleKey [GOOD] >> KqpScanArrowFormat::JoinWithParams >> KqpNotNullColumns::UpdateNotNullPk >> KqpQueryPerf::Update+QueryService+UseSink >> KqpQueryPerf::IndexUpsert-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpsert-QueryService+UseSink >> KqpNotNullColumns::CreateTableWithDisabledNotNullDataColumns [GOOD] >> KqpNotNullColumns::AlterAddNotNullColumnPg >> TxUsage::WriteToTopic_Demo_39 [GOOD] >> KqpQueryPerf::Replace+QueryService+UseSink [GOOD] >> TSettingsValidation::ValidateSettingsFailOnStart [GOOD] >> TxUsage::SessionAbort >> KqpNewEngine::SimpleUpsertSelect [GOOD] >> KqpNewEngine::ShuffleWrite >> TLocksTest::Range_BrokenLockMax [GOOD] >> TLocksTest::Range_CorrectDot >> TxUsage::WriteToTopic_Demo_40 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 24987, MsgBus: 1871 2025-04-04T17:20:06.153289Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509021819634305:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:06.153446Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001baa/r3tmp/tmp5IKTWW/pdisk_1.dat 2025-04-04T17:20:06.441541Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:06.460724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:06.460860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:06.462972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24987, node 1 2025-04-04T17:20:06.548578Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:06.548605Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:06.548628Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:06.548743Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1871 TClient is connected to server localhost:1871 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:07.063016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:07.090728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:07.230751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:20:07.359842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:07.441000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:09.009014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509034704537968:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:09.009130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:09.242465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:09.293499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:09.323367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:09.345911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:09.373627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:09.405428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:09.450262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509034704538480:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:09.450347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:09.450509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509034704538485:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:09.453842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:09.462340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509034704538487:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:20:09.527412Z node 1 :TX_PROXY ERROR: Actor# [1:7489509034704538541:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TListAllTopicsTests::ListLimitAndPaging [GOOD] >> TMeteringSink::FlushPutEventsV1 [GOOD] >> TMeteringSink::FlushResourcesReservedV1 [GOOD] >> TMeteringSink::FlushStorageV1 [GOOD] >> KqpQueryPerf::DeleteOn-QueryService+UseSink [GOOD] >> KqpPragma::Auth [GOOD] >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer >> KqpNotNullColumns::UpdateNotNullPk [GOOD] >> KqpNotNullColumns::UpdateNotNullPkPg >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5778, MsgBus: 25833 2025-04-04T17:20:02.364434Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509006890923864:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:02.365057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bae/r3tmp/tmp2aU3O4/pdisk_1.dat 2025-04-04T17:20:02.718073Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5778, node 1 2025-04-04T17:20:02.750419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:02.750538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:02.752197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:02.802079Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:02.802109Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:02.802120Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:02.802256Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25833 TClient is connected to server localhost:25833 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:03.324687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:03.338947Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:20:03.350126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:03.489678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:03.639126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:20:03.703705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:05.246047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509019775827514:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:05.246185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:05.419505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:05.445036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:05.490147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:05.517421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:05.548018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:05.580775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:05.623004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509019775828023:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:05.623081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:05.623353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509019775828028:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:05.626934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:05.636557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509019775828030:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:20:05.729791Z node 1 :TX_PROXY ERROR: Actor# [1:7489509019775828084:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 22981, MsgBus: 7524 2025-04-04T17:20:07.396972Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509026470837835:2131];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:07.398757Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bae/r3tmp/tmpqfUEES/pdisk_1.dat 2025-04-04T17:20:07.511339Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:07.530225Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:07.530309Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:07.531806Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22981, node 2 2025-04-04T17:20:07.587225Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:07.587252Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:07.587265Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:07.587400Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7524 TClient is connected to server localhost:7524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:08.008807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:08.020805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:08.068862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:08.228426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:08.288567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:10.316540Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509039355741416:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:10.316633Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:10.349578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:10.376553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:10.406402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:10.433423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:10.459558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:10.488656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:10.564706Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509039355741932:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:10.564798Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:10.564863Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509039355741937:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:10.568695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:10.576544Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509039355741939:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:20:10.629565Z node 2 :TX_PROXY ERROR: Actor# [2:7489509039355741993:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TMeteringSink::FlushStorageV1 [GOOD] Test command err: 2025-04-04T17:19:40.630496Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508911813376662:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:40.633793Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:19:40.833395Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014e6/r3tmp/tmpJHeUZ2/pdisk_1.dat 2025-04-04T17:19:41.061440Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19588, node 1 2025-04-04T17:19:41.094347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:41.094450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:41.096413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:41.190884Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/0014e6/r3tmp/yandexnnseDK.tmp 2025-04-04T17:19:41.190909Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/0014e6/r3tmp/yandexnnseDK.tmp 2025-04-04T17:19:41.191061Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/0014e6/r3tmp/yandexnnseDK.tmp 2025-04-04T17:19:41.191194Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:19:41.240475Z INFO: TTestServer started on Port 11357 GrpcPort 19588 TClient is connected to server localhost:11357 PQClient connected to localhost:19588 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:41.506116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:41.536063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:41.684043Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:43.538689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508924698279197:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:43.538689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508924698279211:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:43.538792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:43.542467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:19:43.554944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508924698279234:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:19:43.696175Z node 1 :TX_PROXY ERROR: Actor# [1:7489508924698279300:2448] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:43.724444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:43.743774Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508924698279308:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:19:43.745638Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTQ2ZDg3ZjYtZTAxZjAwMGUtYTVhZWVhZC1hMDc4NWRkMQ==, ActorId: [1:7489508924698279193:2334], ActorState: ExecuteState, TraceId: 01jr0x1kd88cx43qzyd9jk1g49, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:19:43.747542Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:19:43.768769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:43.843843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:19:44.078980Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0x1ksh1f2r3g42f8dbq9yb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmE1OTMxMDQtNjlkOGIwYTItNmNkNTI2MGMtNmFmYWIwMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489508928993246883:2626] 2025-04-04T17:19:45.626819Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508911813376662:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:45.626898Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:19:50.037897Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-04-04T17:19:50.043540Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-04-04T17:19:50.083145Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:19:50.083531Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-04T17:19:50.083940Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:19:50.083941Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:19:50.084122Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-04T17:19:50.084140Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:19:50.084157Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-04T17:19:50.084175Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:19:50.084199Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:19:50.084221Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-04T17:19:50.084537Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7489508954763051043:2840] connected; active server actors: 1 2025-04-04T17:19:50.084941Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic2] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-04T17:19:50.085505Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Registered with mediator time cast 2025-04-04T17:19:50.085632Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:19:50.085747Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic2] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-04T17:19:50.085808Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] doesn't have tx info 2025-04-04T17:19:50.085821Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:19:50.085832Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] no config, start with empty partitions and default config 2025-04-04T17:19:50.085845Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:19:50.085867Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:19:50.085869Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic2] BALANCER INIT DONE for topic2: (0, 72075186224037892) 2025-04-04T17:19:50.085886Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037894] doesn't have tx writes info 2025-04-04T17:19:50.085950Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7489508954763051042:2839], now have 1 active actors on pipe 2025-04-04T17:19:50.085954Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][] pipe [1:7489508954763051055:2848] connected; active server actors: 1 2025-04-04T17:19:50.086064Z node 1 :PERSQUEUE_READ_BAL ... :0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:20:11.535309Z node 3 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 0, State: StateInit] bootstrapping 0 [3:7489509045557081003:2498] 2025-04-04T17:20:11.536227Z node 3 :PERSQUEUE DEBUG: [dir1/topic2:0:Initializer] Initializing completed. 2025-04-04T17:20:11.536267Z node 3 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 0, State: StateInit] init complete for topic 'dir1/topic2' partition 0 generation 1 [3:7489509045557081003:2498] 2025-04-04T17:20:11.536297Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateInit] SYNC INIT topic dir1/topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:20:11.536322Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] Process pending events. Count 1 2025-04-04T17:20:11.536598Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvPQ::TEvProposePartitionConfigResult Step 1743787211579, TxId 281474976715677, Partition 0 2025-04-04T17:20:11.536625Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715677] Handle TEvProposePartitionConfigResult 2025-04-04T17:20:11.536643Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715677] Partition responses 1/1 2025-04-04T17:20:11.536657Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Try execute txs with state CALCULATING 2025-04-04T17:20:11.536671Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, State CALCULATING 2025-04-04T17:20:11.536688Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 State CALCULATING FrontTxId 281474976715677 2025-04-04T17:20:11.536702Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Received 1, Expected 1 2025-04-04T17:20:11.536721Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, NewState CALCULATED 2025-04-04T17:20:11.536737Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 moved from CALCULATING to CALCULATED 2025-04-04T17:20:11.537066Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715677] save tx TxId: 281474976715677 State: CALCULATED MinStep: 1743787211502 MaxStep: 18446744073709551615 Step: 1743787211579 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic2" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/dir1/topic2" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } } BootstrapConfig { } SourceActor { RawX1: 7489509002607406517 RawX2: 12884904026 } Partitions { Partition { PartitionId: 0 } } 2025-04-04T17:20:11.537209Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:20:11.539732Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:20:11.539756Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Try execute txs with state CALCULATED 2025-04-04T17:20:11.539772Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, State CALCULATED 2025-04-04T17:20:11.539792Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 State CALCULATED FrontTxId 281474976715677 2025-04-04T17:20:11.539809Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, NewState WAIT_RS 2025-04-04T17:20:11.539833Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 moved from CALCULATED to WAIT_RS 2025-04-04T17:20:11.539874Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2025-04-04T17:20:11.539896Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] HaveParticipantsDecision 1 2025-04-04T17:20:11.539962Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, NewState EXECUTING 2025-04-04T17:20:11.539982Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 moved from WAIT_RS to EXECUTING 2025-04-04T17:20:11.539994Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Received 0, Expected 1 2025-04-04T17:20:11.540044Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1743787211579, TxId 281474976715677 2025-04-04T17:20:11.540284Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:20:11.541538Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:20:11.541659Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvPQ::TEvTxCommitDone Step 1743787211579, TxId 281474976715677, Partition 0 2025-04-04T17:20:11.541684Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Try execute txs with state EXECUTING 2025-04-04T17:20:11.541701Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, State EXECUTING 2025-04-04T17:20:11.541717Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 State EXECUTING FrontTxId 281474976715677 2025-04-04T17:20:11.541731Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Received 1, Expected 1 2025-04-04T17:20:11.541754Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId: 281474976715677 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-04-04T17:20:11.541780Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] complete TxId 281474976715677 2025-04-04T17:20:11.542045Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic2" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/dir1/topic2" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } 2025-04-04T17:20:11.542105Z node 3 :PERSQUEUE NOTICE: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:20:11.542167Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] delete partitions for TxId 281474976715677 2025-04-04T17:20:11.542187Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, NewState EXECUTED 2025-04-04T17:20:11.542201Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 moved from EXECUTING to EXECUTED 2025-04-04T17:20:11.542541Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715677] save tx TxId: 281474976715677 State: EXECUTED MinStep: 1743787211502 MaxStep: 18446744073709551615 Step: 1743787211579 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic2" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/dir1/topic2" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } } BootstrapConfig { } SourceActor { RawX1: 7489509002607406517 RawX2: 12884904026 } Partitions { Partition { PartitionId: 0 } } 2025-04-04T17:20:11.542706Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:20:11.544682Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:20:11.544710Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Try execute txs with state EXECUTED 2025-04-04T17:20:11.544725Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, State EXECUTED 2025-04-04T17:20:11.544742Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 State EXECUTED FrontTxId 281474976715677 2025-04-04T17:20:11.544758Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TPersQueue::SendEvReadSetAckToSenders 2025-04-04T17:20:11.544787Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, NewState WAIT_RS_ACKS 2025-04-04T17:20:11.544803Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 moved from EXECUTED to WAIT_RS_ACKS 2025-04-04T17:20:11.544823Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715677] PredicateAcks: 0/0 2025-04-04T17:20:11.544832Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-04T17:20:11.544843Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715677] PredicateAcks: 0/0 2025-04-04T17:20:11.544857Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] add an TxId 281474976715677 to the list for deletion 2025-04-04T17:20:11.544875Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, NewState DELETING 2025-04-04T17:20:11.544893Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] delete key for TxId 281474976715677 2025-04-04T17:20:11.544935Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:20:11.546336Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:20:11.546375Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Try execute txs with state DELETING 2025-04-04T17:20:11.546407Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, State DELETING 2025-04-04T17:20:11.546428Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] delete TxId 281474976715677 >> KqpQueryPerf::IndexDeleteOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink >> KqpQueryPerf::MultiDeleteFromTable+QueryService-UseSink >> KqpScanArrowFormat::JoinWithParams [GOOD] >> KqpScanArrowInChanels::AggregateCountStar ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] Test command err: Trying to start YDB, gRPC: 22346, MsgBus: 5269 2025-04-04T17:20:02.143313Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509005092952168:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:02.143911Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019bb/r3tmp/tmp55iHsw/pdisk_1.dat 2025-04-04T17:20:02.499995Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22346, node 1 2025-04-04T17:20:02.553152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:02.553280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:02.555059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:02.603641Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:02.603667Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:02.603678Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:02.603828Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5269 TClient is connected to server localhost:5269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:03.111424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:03.131963Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:20:03.140873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:03.297871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:03.450390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:03.516544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:05.046734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509017977855830:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:05.046840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:05.331526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:05.376589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:05.403614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:05.430516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:05.461355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:05.493610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:05.538404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509017977856340:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:05.538494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:05.538614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509017977856345:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:05.542167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:05.552398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509017977856347:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:20:05.654739Z node 1 :TX_PROXY ERROR: Actor# [1:7489509017977856401:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:06.887379Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787206896, txId: 281474976710671] shutting down 2025-04-04T17:20:07.143647Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509005092952168:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:07.143727Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5903, MsgBus: 64018 2025-04-04T17:20:07.752996Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509026993380396:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:07.753065Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019bb/r3tmp/tmpPCy6lC/pdisk_1.dat 2025-04-04T17:20:07.844539Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5903, node 2 2025-04-04T17:20:07.879639Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:07.879785Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:07.882211Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:07.922114Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:07.922139Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:07.922144Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:07.922233Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64018 TClient is connected to server localhost:64018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:08.338558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:08.346727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:20:08.419827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:08.572399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:08.650789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:10.926343Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509039878284035:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:10.926431Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:10.959589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:10.990878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:11.018508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:11.085716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:11.113326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:11.153353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:11.233319Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509044173251848:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:11.233366Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:11.233558Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509044173251853:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:11.236140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:11.245035Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509044173251855:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:20:11.313737Z node 2 :TX_PROXY ERROR: Actor# [2:7489509044173251908:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:12.408872Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787212440, txId: 281474976715671] shutting down 2025-04-04T17:20:12.546091Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787212573, txId: 281474976715673] shutting down >> KqpQueryPerf::Update+QueryService+UseSink [GOOD] >> TPQTest::TestWritePQCompact [GOOD] >> TPQTest::TestWriteSplit >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink >> KqpNotNullColumns::UpsertNotNullPk >> KqpNotNullColumns::AlterAddNotNullColumnPg [GOOD] >> KqpNotNullColumns::AlterDropNotNullColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20538, MsgBus: 23097 2025-04-04T17:20:09.944926Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509036014559738:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:09.945061Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ba4/r3tmp/tmpegK0Wv/pdisk_1.dat 2025-04-04T17:20:10.257545Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20538, node 1 2025-04-04T17:20:10.333564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:10.333631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:10.335707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:10.347988Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:10.348005Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:10.348011Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:10.348115Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23097 TClient is connected to server localhost:23097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:10.815061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:10.836160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:10.959110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:11.085829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:11.158357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:20:12.897088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509048899463421:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:12.897227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:13.130990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:13.163903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:13.189010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:13.221880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:13.249457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:13.316220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:13.362524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509053194431233:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:13.362621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:13.363193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509053194431238:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:13.367166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:13.377709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509053194431240:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:20:13.449337Z node 1 :TX_PROXY ERROR: Actor# [1:7489509053194431293:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TColumnShardTestReadWrite::CompactionGC [GOOD] >> KqpQueryPerf::IndexDeleteOn-QueryService-UseSink >> KqpNewEngine::ShuffleWrite [GOOD] >> KqpNewEngine::StaleRO >> TPQTest::TestWriteSplit [GOOD] >> TPQTest::TestWriteTimeStampEstimate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGC [GOOD] Test command err: 2025-04-04T17:19:17.532213Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:17.614766Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:17.637135Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:17.637377Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:17.644903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:17.645109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:17.645329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:17.645426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:17.645541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:17.645643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:17.645756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:17.645860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:17.645974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:17.646100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:17.646538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:17.646695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:17.683728Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:17.683930Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:17.683975Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:17.684101Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:17.684252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:17.684307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:17.684346Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:17.684410Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:17.684452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:17.684478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:17.684494Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:17.684638Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:17.684681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:17.684727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:17.684762Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:17.684850Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:17.684913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:17.684961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:17.684987Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:17.685072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:17.685115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:17.685145Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:17.685205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:17.685234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:17.685253Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:17.685553Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=32; 2025-04-04T17:19:17.685630Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-04T17:19:17.685693Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-04T17:19:17.685754Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=29; 2025-04-04T17:19:17.685922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:17.685987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:17.686018Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:17.686176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:17.686214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:17.686271Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:17.686404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:17.686446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:17.686473Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:17.686622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:17.686662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:17.686694Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:17.686804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:17.686882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:17.686929Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... ress;count=1;insert_overload_size=5870200;indexing_debug={task_ids=1264d01e-117911f0-b1fc7851-5bc591fa,;}; 2025-04-04T17:20:15.134103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:829;event=skip_compaction;reason=disabled; 2025-04-04T17:20:15.134151Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=0; 2025-04-04T17:20:15.134213Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:20:15.134282Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:20:15.134326Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:20:15.134406Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.413000s; 2025-04-04T17:20:15.134456Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:20:15.377052Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];fline=actor.cpp:22;event=flush_writing;size=4735248;count=1; 2025-04-04T17:20:15.417896Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 45 at tablet 9437184 2025-04-04T17:20:15.418135Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:89 Blob count: 1 2025-04-04T17:20:15.431565Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:89 Blob count: 1 2025-04-04T17:20:15.431721Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=270;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:767;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=5870200;indexing_debug={task_ids=1264d01e-117911f0-b1fc7851-5bc591fa,;}; 2025-04-04T17:20:15.432835Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:44:255:1:574112:0]; 2025-04-04T17:20:15.432942Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:44:255:2:592928:0]; GC for channel 2 deletes blobs: GC for channel 4 deletes blobs: GC for channel 3 deletes blobs: [9437184:2:85:3:0:5870200:0] Added portions: 151 152 2025-04-04T17:20:15.452321Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-04T17:20:15.452676Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[271] (CS::INDEXATION) apply at tablet 9437184 2025-04-04T17:20:15.455907Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:88 Blob count: 2 2025-04-04T17:20:15.456023Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=92311612;raw_bytes=143732845;count=39;records=1462497} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7381080;raw_bytes=7369506;count=2;records=75000} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T17:20:15.470584Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1264d01e-117911f0-b1fc7851-5bc591fa;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-04-04T17:20:15.470663Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1264d01e-117911f0-b1fc7851-5bc591fa;fline=with_appended.cpp:65;portions=75,76,;task_id=1264d01e-117911f0-b1fc7851-5bc591fa; 2025-04-04T17:20:15.470948Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1264d01e-117911f0-b1fc7851-5bc591fa;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::1264d01e-117911f0-b1fc7851-5bc591fa; 2025-04-04T17:20:15.471007Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1264d01e-117911f0-b1fc7851-5bc591fa;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:20:15.471071Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1264d01e-117911f0-b1fc7851-5bc591fa;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:20:15.471116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;task_id=1264d01e-117911f0-b1fc7851-5bc591fa;tablet_id=9437184;fline=columnshard_impl.cpp:829;event=skip_compaction;reason=disabled; 2025-04-04T17:20:15.471164Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1264d01e-117911f0-b1fc7851-5bc591fa;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=0; 2025-04-04T17:20:15.471232Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1264d01e-117911f0-b1fc7851-5bc591fa;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:20:15.471283Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1264d01e-117911f0-b1fc7851-5bc591fa;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:20:15.471326Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1264d01e-117911f0-b1fc7851-5bc591fa;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:20:15.471406Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1264d01e-117911f0-b1fc7851-5bc591fa;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.401500s; 2025-04-04T17:20:15.471459Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1264d01e-117911f0-b1fc7851-5bc591fa;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:20:15.471560Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:87:2:0:5870200:0] 2025-04-04T17:20:15.471614Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:88 Blob count: 2 2025-04-04T17:20:15.471708Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:20:15.472601Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=45;external_task_id=1264d01e-117911f0-b1fc7851-5bc591fa;mem=5963210;cpu=0; 2025-04-04T17:20:15.473881Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 6080043 at tablet 9437184, mediator 0 2025-04-04T17:20:15.473953Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[274] execute at tablet 9437184 2025-04-04T17:20:15.474757Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1043;fline=abstract.h:83;progress_tx_id=1043;lock_id=1;broken=0; 2025-04-04T17:20:15.474986Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1043;fline=tx_controller.cpp:211;event=finished_tx;tx_id=1043; 2025-04-04T17:20:15.493549Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[274] complete at tablet 9437184 2025-04-04T17:20:15.493680Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=1043;lock_id=1;broken=0; 2025-04-04T17:20:15.493828Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=5870200; 2025-04-04T17:20:15.493965Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=manager.cpp:10;event=lock;process_id=CS::INDEXATION::12f04d60-117911f0-aee9ee0b-2927ce49; 2025-04-04T17:20:15.494019Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=ro_controller.cpp:45;event=CS::INDEXATION;tablet_id=9437184; 2025-04-04T17:20:15.494142Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:752;event=indexation;bytes=5870200;blobs_count=1;max_limit=251658240;has_more=0;external_task_id=12f04d60-117911f0-aee9ee0b-2927ce49; 2025-04-04T17:20:15.494210Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:618;event=start_changes;type=CS::INDEXATION;task_id=12f04d60-117911f0-aee9ee0b-2927ce49; 2025-04-04T17:20:15.494438Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=5963210;external_task_id=12f04d60-117911f0-aee9ee0b-2927ce49;type=CS::INDEXATION;priority=0;; 2025-04-04T17:20:15.494595Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=46;task=cpu=0;mem=5963210;external_task_id=12f04d60-117911f0-aee9ee0b-2927ce49;type=CS::INDEXATION;priority=0;; 2025-04-04T17:20:15.494629Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=12f04d60-117911f0-aee9ee0b-2927ce49;mem=5963210;cpu=0; 2025-04-04T17:20:15.494660Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=12f04d60-117911f0-aee9ee0b-2927ce49;task_id=46;mem=5963210;cpu=0; 2025-04-04T17:20:15.494808Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=12f04d60-117911f0-aee9ee0b-2927ce49;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=12f04d60-117911f0-aee9ee0b-2927ce49; Added portions: 153 154 2025-04-04T17:20:16.049909Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=12f04d60-117911f0-aee9ee0b-2927ce49;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-04-04T17:20:16.050053Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=2; Compactions happened: 14 Indexations happened: 31 Cleanups happened: 1 Old portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 Cleaned up portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpNotNullColumns::UpdateNotNullPkPg [GOOD] >> KqpNotNullColumns::UpdateNotNull >> TxUsage::WriteToTopic_Demo_8 [GOOD] >> KqpQueryPerf::IndexUpsert-QueryService+UseSink [GOOD] >> TxUsage::WriteToTopic_Demo_17 [GOOD] >> TPQTest::TestWriteTimeStampEstimate [GOOD] >> TPQTest::TestWriteTimeLag >> TPQTabletTests::Multiple_PQTablets_1 >> TxUsage::WriteToTopic_Demo_9 >> TPQTabletTests::Multiple_PQTablets_1 [GOOD] >> TPQTabletTests::Multiple_PQTablets_2 >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] >> TPQTabletTests::Multiple_PQTablets_2 [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartNo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11865, MsgBus: 7860 2025-04-04T17:20:02.793571Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509004814284569:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:02.794510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bab/r3tmp/tmpTqcTF4/pdisk_1.dat 2025-04-04T17:20:03.162804Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11865, node 1 2025-04-04T17:20:03.217091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:03.217770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:03.220622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:03.238880Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:03.238903Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:03.238910Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:03.239014Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7860 TClient is connected to server localhost:7860 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:03.749586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:03.771916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:03.908473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:04.048531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:04.118714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:05.636335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509017699188040:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:05.636442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:05.862005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:05.890566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:05.945010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:05.975370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:06.008629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:06.042049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:06.085343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509021994155847:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:06.085434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:06.085567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509021994155852:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:06.089213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:06.098108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509021994155854:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:20:06.182140Z node 1 :TX_PROXY ERROR: Actor# [1:7489509021994155908:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:07.040475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:20:07.072710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:20:07.107400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:20:07.792277Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509004814284569:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:07.792360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18048, MsgBus: 15914 2025-04-04T17:20:10.155038Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509041883679394:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:10.155134Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bab/r3tmp/tmpaPYNt9/pdisk_1.dat 2025-04-04T17:20:10.243801Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18048, node 2 2025-04-04T17:20:10.280350Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:10.280415Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:10.281450Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:10.298887Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:10.298906Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:10.298911Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:10.298997Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15914 TClient is connected to server localhost:15914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:10.704805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:10.711905Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:20:10.718128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:10.798627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:10.959078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:20:11.032277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:13.484532Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509054768583046:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:13.484629Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:13.526920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:13.596689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:13.635753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:13.673999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:13.711655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:13.783870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:13.877204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509054768583567:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:13.877290Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:13.877434Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509054768583572:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:13.880693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:13.890993Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509054768583574:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:20:13.971300Z node 2 :TX_PROXY ERROR: Actor# [2:7489509054768583628:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:14.784019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:20:14.826646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:20:14.872556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:20:15.155417Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509041883679394:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:15.155466Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpScripting::ScriptingCreateAndAlterTableTest >> TPQTest::TestWriteTimeLag [GOOD] >> TPQTestInternal::RestoreKeys [GOOD] >> TPQTabletTests::DropTablet_And_Tx >> KqpQueryPerf::MultiDeleteFromTable+QueryService-UseSink [GOOD] >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink >> KqpNotNullColumns::UpsertNotNullPk [GOOD] >> KqpNotNullColumns::UpsertNotNullPkPg >> TPQTabletTests::DropTablet_And_Tx [GOOD] >> KqpNotNullColumns::AlterDropNotNullColumn [GOOD] >> KqpNotNullColumns::CreateIndexedTableWithDisabledNotNullDataColumns >> TPQTabletTests::DropTablet_Before_Write ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTestInternal::RestoreKeys [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:107:2057] recipient: [1:100:2134] 2025-04-04T17:20:02.103182Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:20:02.103252Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] Leader for TabletID 72057594037927938 is [1:152:2173] sender: [1:153:2057] recipient: [1:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:178:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:20:02.122871Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:20:02.146497Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 7864320 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-04T17:20:02.147561Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2025-04-04T17:20:02.150426Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:20:02.152640Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2025-04-04T17:20:02.154682Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:20:02.179559Z node 1 :PERSQUEUE INFO: new Cookie default|afa7f642-a6371e99-8d2ff700-a96fdfc7_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:20:02.303623Z node 1 :PERSQUEUE INFO: new Cookie default|33a0e0e0-cb58a982-d74822e2-509e8dc9_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:20:02.391572Z node 1 :PERSQUEUE INFO: new Cookie default|f7bafb18-c315cc6d-eebccc65-96eadd95_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:20:02.446449Z node 1 :PERSQUEUE INFO: new Cookie default|24b0d345-d34c2fbc-613155cd-ce0c40b7_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:20:02.455170Z node 1 :PERSQUEUE INFO: new Cookie default|a4585a91-f9daca67-d3f2fc8c-8409f782_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:20:02.461147Z node 1 :PERSQUEUE INFO: new Cookie default|89b08c-14b4655-d0863413-50c386b1_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:102:2057] recipient: [2:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:102:2057] recipient: [2:100:2134] Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:107:2057] recipient: [2:100:2134] 2025-04-04T17:20:02.960245Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:20:02.960331Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:148:2057] recipient: [2:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:148:2057] recipient: [2:146:2169] Leader for TabletID 72057594037927938 is [2:152:2173] sender: [2:153:2057] recipient: [2:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:178:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:20:02.976702Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:20:02.977510Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 7864320 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-04T17:20:02.978032Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:184:2197] 2025-04-04T17:20:02.979678Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:20:02.980939Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:185:2198] 2025-04-04T17:20:02.982059Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:20:03.002140Z node 2 :PERSQUEUE INFO: new Cookie default|783b90ce-a915fa73-9650d0c6-24c53fb6_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:20:03.125546Z node 2 :PERSQUEUE INFO: new Cookie default|9385c67f-345d1f55-748a5a8e-bd62f827_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:20:03.200537Z node 2 :PERSQUEUE INFO: new Cookie default|d27b32ff-168e4557-e2e0172a-222a4965_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:20:03.239910Z node 2 :PERSQUEUE INFO: new Cookie default|d2358464-e03bee75-7584c40c-7102a68a_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:20:03.248199Z node 2 :PERSQUEUE INFO: new Cookie default|edb447c1-bbeeb8c9-9666f5d2-6dfb02bc_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR !Reboot 72057594037927937 (actor [2:106:2138]) on event NKikimr::TEvPersQueue::TEvRequest ! Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:294:2057] recipient: [2:98:2133] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:296:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:298:2057] recipient: [2:297:2294] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:299:2295] sender: [2:300:2057] recipient: [2:297:2294] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_BATCH Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:20:03.298484Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:20:03.298545Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:20:03.299381Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:348:2336] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:20:03.300905Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:349:2337] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:20:03.309705Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestam ... r1 readTimeStamp for offset 12 initiated queuesize 0 startOffset 12 ReadingTimestamp 0 rrg 16 2025-04-04T17:20:19.045450Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user another1 send read request for offset 12 initiated queuesize 0 startOffset 12 ReadingTimestamp 1 rrg 16 2025-04-04T17:20:19.045804Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:20:19.046072Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2025-04-04T17:20:19.046633Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 2 Topic 'topic' partition 0 user another1 offset 12 count 1 size 1024000 endOffset 20 max time lag 0ms effective offset 12 2025-04-04T17:20:19.046885Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 2 added 1 blobs, size 6292734 count 6 last offset 13, current partition end offset: 20 2025-04-04T17:20:19.047988Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 2. Send blob request. 2025-04-04T17:20:19.048137Z node 13 :PERSQUEUE DEBUG: Got data from cache. Partition 0 offset 12 count 6 source 0 size 6292734 accessed 0 times before, last time 1970-01-01T00:00:00.000000Z 2025-04-04T17:20:19.048186Z node 13 :PERSQUEUE DEBUG: Reading cookie 2. All 1 blobs are from cache. 2025-04-04T17:20:19.048333Z node 13 :PERSQUEUE DEBUG: FormAnswer for 1 blobs 2025-04-04T17:20:19.049999Z node 13 :PERSQUEUE DEBUG: FormAnswer processing batch offset 12 totakecount 6 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:20:19.051062Z node 13 :PERSQUEUE DEBUG: FormAnswer processing batch offset 12 totakecount 6 count 0 size 512005 from pos 0 cbcount 1 2025-04-04T17:20:19.051245Z node 13 :PERSQUEUE DEBUG: FormAnswer processing batch offset 12 totakecount 6 count 1 size 24713 from pos 0 cbcount 1 2025-04-04T17:20:19.051480Z node 13 :PERSQUEUE DEBUG: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 12 partno 0 count 6 parts 12 2025-04-04T17:20:19.051581Z node 13 :PERSQUEUE DEBUG: Topic 'topic' partition 0 user another1 readTimeStamp done, result 281 queuesize 0 startOffset 12 2025-04-04T17:20:19.054181Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:20:19.054392Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2025-04-04T17:20:19.054653Z node 13 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 16 actor [13:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 16 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 14 ReadRuleGenerations: 14 ReadRuleGenerations: 16 ReadRuleGenerations: 15 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 14 Important: false } Consumers { Name: "aaa" Generation: 14 Important: false } Consumers { Name: "another1" Generation: 16 Important: true } Consumers { Name: "important" Generation: 15 Important: true } 2025-04-04T17:20:19.055311Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [13:599:2550], now have 1 active actors on pipe 2025-04-04T17:20:19.056117Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [13:602:2552], now have 1 active actors on pipe 2025-04-04T17:20:19.056204Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T17:20:19.056536Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 17(current 16) received from actor [13:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 17 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 14 ReadRuleGenerations: 14 ReadRuleGenerations: 16 ReadRuleGenerations: 15 ReadRuleGenerations: 17 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 14 Important: false } Consumers { Name: "aaa" Generation: 14 Important: false } Consumers { Name: "another1" Generation: 16 Important: true } Consumers { Name: "important" Generation: 15 Important: true } Consumers { Name: "another" Generation: 17 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:20:19.061513Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 17 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 14 ReadRuleGenerations: 14 ReadRuleGenerations: 16 ReadRuleGenerations: 15 ReadRuleGenerations: 17 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 14 Important: false } Consumers { Name: "aaa" Generation: 14 Important: false } Consumers { Name: "another1" Generation: 16 Important: true } Consumers { Name: "important" Generation: 15 Important: true } Consumers { Name: "another" Generation: 17 Important: false } 2025-04-04T17:20:19.061605Z node 13 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:20:19.061870Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user another reinit with generation 17 done 2025-04-04T17:20:19.062222Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user another reinit with generation 17 done 2025-04-04T17:20:19.062509Z node 13 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:20:19.062725Z node 13 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:20:19.067001Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:20:19.067371Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2025-04-04T17:20:19.068513Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:20:19.068714Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2025-04-04T17:20:19.069014Z node 13 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 17 actor [13:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 17 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 14 ReadRuleGenerations: 14 ReadRuleGenerations: 16 ReadRuleGenerations: 15 ReadRuleGenerations: 17 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 14 Important: false } Consumers { Name: "aaa" Generation: 14 Important: false } Consumers { Name: "another1" Generation: 16 Important: true } Consumers { Name: "important" Generation: 15 Important: true } Consumers { Name: "another" Generation: 17 Important: false } 2025-04-04T17:20:19.069762Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [13:616:2563], now have 1 active actors on pipe 2025-04-04T17:20:19.070658Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [13:619:2565], now have 1 active actors on pipe 2025-04-04T17:20:19.070846Z node 13 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-04T17:20:19.070915Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-04T17:20:19.071046Z node 13 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:20:19.071530Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [13:621:2567], now have 1 active actors on pipe 2025-04-04T17:20:19.071657Z node 13 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-04T17:20:19.071706Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-04T17:20:19.071808Z node 13 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:20:19.072310Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [13:623:2569], now have 1 active actors on pipe 2025-04-04T17:20:19.072383Z node 13 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-04T17:20:19.072431Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-04T17:20:19.072520Z node 13 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:20:19.072978Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [13:625:2571], now have 1 active actors on pipe 2025-04-04T17:20:19.073128Z node 13 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-04T17:20:19.073179Z node 13 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-04T17:20:19.073270Z node 13 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 >> Balancing::Balancing_OneTopic_TopicApi [GOOD] >> Balancing::Balancing_OneTopic_PQv1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 27329, MsgBus: 20590 2025-04-04T17:20:07.847532Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509026944768040:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:07.847986Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019b7/r3tmp/tmpRM3EaN/pdisk_1.dat 2025-04-04T17:20:08.207895Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27329, node 1 2025-04-04T17:20:08.267349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:08.267458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:08.269812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:08.302644Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:08.302665Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:08.302671Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:08.302792Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20590 TClient is connected to server localhost:20590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:08.811135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:08.856925Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:20:08.867021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:08.994688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:09.142508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:09.213251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:10.635079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509039829671707:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:10.635248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:10.834021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:10.864837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:10.919789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:10.986395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:11.013300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:11.042860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:11.077160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509044124639516:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:11.077223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:11.077373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509044124639521:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:11.080595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:11.088867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509044124639523:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:20:11.161664Z node 1 :TX_PROXY ERROR: Actor# [1:7489509044124639576:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:12.091766Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489509048419607138:2493], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:2:34: Error: Pragma auth not supported inside Kikimr query., code: 2016 2025-04-04T17:20:12.092117Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWJlYTFhOTEtYzhmYjY4ZDEtZTlmY2I3NDctZTBhOTdhYWY=, ActorId: [1:7489509048419607129:2488], ActorState: ExecuteState, TraceId: 01jr0x2f962666j1jh11nfzywd, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 4069, MsgBus: 5712 2025-04-04T17:20:12.795992Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509047226189518:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:12.796075Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019b7/r3tmp/tmp0DfMts/pdisk_1.dat 2025-04-04T17:20:12.891917Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4069, node 2 2025-04-04T17:20:12.928091Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:12.928244Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:12.933824Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:12.955209Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:12.955241Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:12.955249Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:12.955354Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5712 TClient is connected to server localhost:5712 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:20:13.344844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:20:13.358070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:13.434652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:13.590144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:13.670905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:20:15.622378Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509060111093163:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:15.622476Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:15.662056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:15.695246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:15.727765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:15.767329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:15.812416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:15.880409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:15.935404Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509060111093681:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:15.935493Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:15.935673Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509060111093686:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:15.939348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:15.949248Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509060111093688:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:20:16.036749Z node 2 :TX_PROXY ERROR: Actor# [2:7489509064406061037:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:17.182158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:20:17.796418Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509047226189518:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:17.796488Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:20:18.174036Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787218187, txId: 281474976715675] shutting down >> TPQTabletTests::DropTablet_Before_Write [GOOD] >> TPQTabletTests::DropTablet_And_UnplannedConfigTransaction >> TPQTabletTests::DropTablet_And_UnplannedConfigTransaction [GOOD] >> KqpLimits::TooBigQuery+useSink [GOOD] >> KqpLimits::TooBigQuery-useSink >> TPQTabletTests::Huge_ProposeTransacton >> KqpScanArrowInChanels::AggregateCountStar [GOOD] >> KqpScanArrowInChanels::AggregateByColumn >> KqpSqlIn::SecondaryIndex_PgKey >> TxUsage::WriteToTopic_Demo_11 [GOOD] >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink [GOOD] >> LocalPartition::DescribeBadPartition [GOOD] >> LocalPartition::DiscoveryServiceBadPort >> TxUsage::WriteToTopic_Demo_12 >> TxUsage::SessionAbort [GOOD] >> KqpNotNullColumns::UpdateNotNull [GOOD] >> KqpNotNullColumns::UpdateNotNullPg >> TLocksTest::CK_BrokenLock [GOOD] >> TxUsage::WriteToTopic_Demo_27 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19263, MsgBus: 22496 2025-04-04T17:20:15.392337Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509062457298538:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:15.392377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001b9e/r3tmp/tmpgZZwXb/pdisk_1.dat 2025-04-04T17:20:15.778055Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19263, node 1 2025-04-04T17:20:15.809945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:15.810043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:15.814966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:15.884391Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:15.884429Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:15.884486Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:15.884632Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22496 TClient is connected to server localhost:22496 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:16.495251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:16.523017Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:20:16.541211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:16.681445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:16.841860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:20:16.926230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:18.627473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509075342202205:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:18.627611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:18.915068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:18.942721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:18.975333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:19.000510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:19.030860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:19.066385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:19.114321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509079637170010:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:19.114402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:19.114566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509079637170015:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:19.118293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:19.129717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509079637170017:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:20:19.215005Z node 1 :TX_PROXY ERROR: Actor# [1:7489509079637170071:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:20.392692Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509062457298538:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:20.392789Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8089, MsgBus: 23251 2025-04-04T17:20:06.574471Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509023941834521:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:06.576592Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ba7/r3tmp/tmp3pyB6k/pdisk_1.dat 2025-04-04T17:20:06.887968Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8089, node 1 2025-04-04T17:20:06.954017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:06.954287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:06.972016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:06.997430Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:06.997456Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:06.997472Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:06.997609Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23251 TClient is connected to server localhost:23251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:07.518267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:07.542749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:07.678860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:07.843095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:07.922596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:09.572335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509036826738060:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:09.572446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:09.764589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:09.795762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:09.824627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:09.851364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:09.882998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:09.914493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:09.957879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509036826738570:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:09.957964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:09.957997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509036826738575:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:09.961824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:09.971898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509036826738577:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:20:10.048353Z node 1 :TX_PROXY ERROR: Actor# [1:7489509041121705927:3440] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:10.809294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:20:10.882523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:20:10.960093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:20:11.570793Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509023941834521:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:11.570870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5134, MsgBus: 19447 2025-04-04T17:20:13.803999Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509051722715030:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:13.804040Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ba7/r3tmp/tmpsRm0wM/pdisk_1.dat 2025-04-04T17:20:13.920321Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5134, node 2 2025-04-04T17:20:13.949129Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:13.949236Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:13.951329Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:13.970745Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:13.970772Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:13.970781Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:13.970926Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19447 TClient is connected to server localhost:19447 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:14.367382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:14.386414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:14.477255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:14.641055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:14.721096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:17.206585Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509068902585918:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:17.206702Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:17.249732Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:17.288707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:17.358754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:17.427338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:17.461708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:17.498526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:17.542409Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509068902586433:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:17.542493Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:17.542640Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509068902586438:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:17.545919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:17.554320Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509068902586440:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:20:17.622198Z node 2 :TX_PROXY ERROR: Actor# [2:7489509068902586493:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:18.447949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:20:18.496116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:20:18.573647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:20:18.804191Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509051722715030:2125];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:18.804263Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Upsert+QueryService+UseSink >> TxUsage::Offsets_Cannot_Be_Promoted_When_Reading_In_A_Transaction >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2 [GOOD] >> TxUsage::WriteToTopic_Demo_28 >> KqpNewEngine::StaleRO [GOOD] >> KqpNewEngine::SqlInFromCompact >> KqpNotNullColumns::UpsertNotNullPkPg [GOOD] >> KqpNotNullColumns::UpsertNotNullPg >> KqpQueryPerf::IndexDeleteOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_BrokenLock [GOOD] Test command err: 2025-04-04T17:19:43.336632Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508924259350379:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:43.337600Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e74/r3tmp/tmpLJsoXP/pdisk_1.dat 2025-04-04T17:19:43.714046Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:43.730970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:43.731100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:43.739921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23582 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:43.967667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:43.983609Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:43.996861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:44.136743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:44.207831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:46.467942Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508939069610587:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:46.467998Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e74/r3tmp/tmpzsGiIX/pdisk_1.dat 2025-04-04T17:19:46.559627Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:46.604850Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:46.604950Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:46.608694Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9855 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:46.735362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:46.744874Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:46.760237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:46.816416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:46.889729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:49.583354Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508950897556417:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:49.583400Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e74/r3tmp/tmpUZqGNw/pdisk_1.dat 2025-04-04T17:19:49.698488Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:49.727050Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:49.727124Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:49.735656Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24359 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:49.889870Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:49.906415Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:49.977197Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:50.034538Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:52.859140Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489508964469024641:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:52.866793Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e74/r3tmp/tmpwarxQ3/pdisk_1.dat 2025-04-04T17:19:53.015447Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:53.042780Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:53.042869Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:53.044367Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13741 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:53.251557Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDo ... UCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:20:05.250188Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:05.263440Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:05.337023Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:05.395526Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:08.887265Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7489509030952039146:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:08.888412Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e74/r3tmp/tmprZGXCi/pdisk_1.dat 2025-04-04T17:20:09.010974Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:09.025130Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:09.025212Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:09.029894Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15875 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:20:09.268201Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:09.273615Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:09.289736Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:09.357106Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:09.411779Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:20:13.167653Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7489509054825423782:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:13.167731Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e74/r3tmp/tmpiNAFHQ/pdisk_1.dat 2025-04-04T17:20:13.287129Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:13.320790Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:13.320908Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:13.323110Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22391 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:20:13.563727Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:13.570198Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:13.589739Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:13.680623Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:13.746729Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:17.607114Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489509071236849565:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:17.607168Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e74/r3tmp/tmpjdJQsO/pdisk_1.dat 2025-04-04T17:20:17.874745Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:17.874871Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:17.876396Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:17.876804Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62346 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:20:18.211692Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:18.222555Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:18.238586Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:20:18.245148Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:18.314231Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:18.377809Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> KqpNotNullColumns::CreateIndexedTableWithDisabledNotNullDataColumns [GOOD] >> KqpNotNullColumns::Describe >> KqpNewEngine::PkSelect1 |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink [GOOD] >> TColumnEngineTestLogs::IndexWriteOverload [GOOD] >> TxUsage::WriteToTopic_Demo_40 [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3 |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpScripting::ScriptingCreateAndAlterTableTest [GOOD] >> KqpScripting::SecondaryIndexes >> TxUsage::WriteToTopic_Demo_41 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19749, MsgBus: 5168 2025-04-04T17:20:13.854121Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509053439260667:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:13.854850Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ba2/r3tmp/tmpUXNqWL/pdisk_1.dat 2025-04-04T17:20:14.200128Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:14.204417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:14.204514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:14.208770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19749, node 1 2025-04-04T17:20:14.286290Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:14.286315Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:14.286329Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:14.286479Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5168 TClient is connected to server localhost:5168 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:14.823694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:14.847015Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:20:14.857153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:15.014057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:15.181785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:15.252912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:20:16.940794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509066324164146:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:16.940946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:17.162915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:17.200626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:17.230299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:17.259731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:17.289136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:17.323443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:17.370804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509070619131954:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:17.370885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:17.370977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509070619131959:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:17.374954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:17.384146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509070619131961:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:20:17.453149Z node 1 :TX_PROXY ERROR: Actor# [1:7489509070619132014:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:18.851317Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509053439260667:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:18.851392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19070, MsgBus: 15009 2025-04-04T17:20:19.600005Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509077632490110:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:19.600097Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ba2/r3tmp/tmpHto5Ug/pdisk_1.dat 2025-04-04T17:20:19.702355Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:19.737935Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:19.738034Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:19.743768Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19070, node 2 2025-04-04T17:20:19.778972Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:19.779000Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:19.779008Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:19.779138Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15009 TClient is connected to server localhost:15009 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:20.204659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:20.219709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:20.281249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:20.446702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:20:20.529035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:22.781274Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509090517393752:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:22.781375Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:22.820525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:22.850834Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:22.920069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:22.960015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:23.029880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:23.065742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:23.113461Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509094812361564:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:23.113547Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:23.113720Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509094812361569:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:23.117637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:23.130978Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509094812361571:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:20:23.213881Z node 2 :TX_PROXY ERROR: Actor# [2:7489509094812361625:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:24.600680Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509077632490110:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:24.600762Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteOverload [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:143;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232 ... onent=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; >> KqpYql::EvaluateExpr1 >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] >> KqpNotNullColumns::UpdateNotNullPg [GOOD] >> KqpNotNullColumns::UpdateOnNotNull |92.8%| [TA] $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRanges::WhereInSubquery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] Test command err: =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2025-04-04T17:18:51.340913Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-04-04T17:18:51.340983Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Successful handshake: owner# 800, generation# 1 2025-04-04T17:18:51.341164Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-04-04T17:18:51.341210Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Commit generation: owner# 800, generation# 1 2025-04-04T17:18:51.341266Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:35:2066] 2025-04-04T17:18:51.341306Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 800, generation# 1 2025-04-04T17:18:51.341614Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:35:2066] 2025-04-04T17:18:51.341651Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 800, generation# 1 2025-04-04T17:18:51.341731Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:18:51.342142Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:41:2068] 2025-04-04T17:18:51.342178Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/tenant 2025-04-04T17:18:51.342309Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Subscribe: subscriber# [1:41:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-04T17:18:51.342466Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:42:2068] 2025-04-04T17:18:51.342504Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/tenant 2025-04-04T17:18:51.342579Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:42:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-04T17:18:51.342713Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:43:2068] 2025-04-04T17:18:51.342750Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Upsert description: path# /root/tenant 2025-04-04T17:18:51.342788Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Subscribe: subscriber# [1:43:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-04T17:18:51.342842Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:3:2050] 2025-04-04T17:18:51.342899Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:41:2068] 2025-04-04T17:18:51.342962Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:42:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:6:2053] 2025-04-04T17:18:51.342995Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:42:2068] 2025-04-04T17:18:51.343047Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:43:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:9:2056] 2025-04-04T17:18:51.343079Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:43:2068] 2025-04-04T17:18:51.343153Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:38:2068] 2025-04-04T17:18:51.343226Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:39:2068] 2025-04-04T17:18:51.343272Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/tenant] Set up state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.343330Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:40:2068] 2025-04-04T17:18:51.343381Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:37:2068][/root/tenant] Ignore empty state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2025-04-04T17:18:51.343573Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:34:2065], cookie# 0, event size# 103 2025-04-04T17:18:51.343611Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Update description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-04-04T17:18:51.343689Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /root/tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-04-04T17:18:51.343886Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:3:2050] 2025-04-04T17:18:51.343947Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:41:2068] 2025-04-04T17:18:51.344001Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:38:2068] 2025-04-04T17:18:51.344044Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/tenant] Update to strong state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 2] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2025-04-04T17:18:51.793958Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:34:2065] 2025-04-04T17:18:51.794014Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:3:2050] Successful handshake: owner# 800, generation# 1 2025-04-04T17:18:51.794145Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:34:2065] 2025-04-04T17:18:51.794173Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:3:2050] Commit generation: owner# 800, generation# 1 2025-04-04T17:18:51.794208Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:35:2066] 2025-04-04T17:18:51.794288Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 900, generation# 1 2025-04-04T17:18:51.794417Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:35:2066] 2025-04-04T17:18:51.794445Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 900, generation# 1 2025-04-04T17:18:51.794572Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:37:2068][/root/tenant] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:18:51.794898Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:41:2068] 2025-04-04T17:18:51.794926Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:3:2050] Upsert description: path# /root/tenant 2025-04-04T17:18:51.794995Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:3:2050] Subscribe: subscriber# [3:41:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-04T17:18:51.795112Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:42:2068] 2025-04-04T17:18:51.795137Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# /root/tenant 2025-04-04T17:18:51.795204Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:42:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-04T17:18:51.795318Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:43:2068] 2025-04-04T17:18:51.795338Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:9:2056] Upsert description: path# /root/tenant 2025-04-04T17:18:51.795368Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:9:2056] Subscribe: subscriber# [3:43:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-04T17:18:51.795426Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:41:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:3:2050] 2025-04-04T17:18:51.795601Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:41:2068] 2025-04-04T17:18:51.795657Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:42:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:6:2053] 2025-04-04T17:18:51.795690Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:42:2068] 2025-04-04T17:18:51.795721Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:43:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:9:2056] 2025-04-04T17:18:51.795745Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:43:2068] 2025-04-04T17:18:51.795810Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:38:2068] 2025-04-04T17:18:51.795863Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:39:2068] 2025-04-04T17:18:51.795920Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:37:2068][/root/tenant] Set up state: owner# [3:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:18:51.795973Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:37:2068][/ ... omainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2025-04-04T17:20:26.337067Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [397:34:2065] 2025-04-04T17:20:26.337125Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:3:2050] Successful handshake: owner# 910, generation# 1 2025-04-04T17:20:26.337233Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [397:35:2066] 2025-04-04T17:20:26.337259Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:6:2053] Successful handshake: owner# 910, generation# 1 2025-04-04T17:20:26.337312Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [397:34:2065] 2025-04-04T17:20:26.337337Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:3:2050] Commit generation: owner# 910, generation# 1 2025-04-04T17:20:26.337461Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [397:35:2066] 2025-04-04T17:20:26.337489Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:6:2053] Commit generation: owner# 910, generation# 1 2025-04-04T17:20:26.337621Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:20:26.338020Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:41:2068] 2025-04-04T17:20:26.338051Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:3:2050] Upsert description: path# /Root/Tenant/table_inside 2025-04-04T17:20:26.338129Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:3:2050] Subscribe: subscriber# [397:41:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-04T17:20:26.338290Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:42:2068] 2025-04-04T17:20:26.338314Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:6:2053] Upsert description: path# /Root/Tenant/table_inside 2025-04-04T17:20:26.338354Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:6:2053] Subscribe: subscriber# [397:42:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-04T17:20:26.338476Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:43:2068] 2025-04-04T17:20:26.338498Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:9:2056] Upsert description: path# /Root/Tenant/table_inside 2025-04-04T17:20:26.338531Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:9:2056] Subscribe: subscriber# [397:43:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-04T17:20:26.338598Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:41:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:3:2050] 2025-04-04T17:20:26.338648Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:41:2068] 2025-04-04T17:20:26.338690Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:42:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:6:2053] 2025-04-04T17:20:26.338728Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:42:2068] 2025-04-04T17:20:26.338778Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:43:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:9:2056] 2025-04-04T17:20:26.338816Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:43:2068] 2025-04-04T17:20:26.338882Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:38:2068] 2025-04-04T17:20:26.338947Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:39:2068] 2025-04-04T17:20:26.338989Z node 397 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][397:37:2068][/Root/Tenant/table_inside] Set up state: owner# [397:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:26.339042Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:40:2068] 2025-04-04T17:20:26.339078Z node 397 :SCHEME_BOARD_SUBSCRIBER INFO: [main][397:37:2068][/Root/Tenant/table_inside] Ignore empty state: owner# [397:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2025-04-04T17:20:26.813825Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:34:2065] 2025-04-04T17:20:26.813888Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:3:2050] Successful handshake: owner# 910, generation# 1 2025-04-04T17:20:26.814007Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:35:2066] 2025-04-04T17:20:26.814038Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:6:2053] Successful handshake: owner# 910, generation# 1 2025-04-04T17:20:26.814091Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:34:2065] 2025-04-04T17:20:26.814116Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:3:2050] Commit generation: owner# 910, generation# 1 2025-04-04T17:20:26.814261Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:35:2066] 2025-04-04T17:20:26.814292Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:6:2053] Commit generation: owner# 910, generation# 1 2025-04-04T17:20:26.814453Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:20:26.814894Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:41:2068] 2025-04-04T17:20:26.814932Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:3:2050] Upsert description: path# /Root/Tenant/table_inside 2025-04-04T17:20:26.815013Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:3:2050] Subscribe: subscriber# [399:41:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-04T17:20:26.815153Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:42:2068] 2025-04-04T17:20:26.815175Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:6:2053] Upsert description: path# /Root/Tenant/table_inside 2025-04-04T17:20:26.815220Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:6:2053] Subscribe: subscriber# [399:42:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-04T17:20:26.815333Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:43:2068] 2025-04-04T17:20:26.815354Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:9:2056] Upsert description: path# /Root/Tenant/table_inside 2025-04-04T17:20:26.815388Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:9:2056] Subscribe: subscriber# [399:43:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-04-04T17:20:26.815456Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:41:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:3:2050] 2025-04-04T17:20:26.815505Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:41:2068] 2025-04-04T17:20:26.815546Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:42:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:6:2053] 2025-04-04T17:20:26.815584Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:42:2068] 2025-04-04T17:20:26.815623Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:43:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:9:2056] 2025-04-04T17:20:26.815659Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:43:2068] 2025-04-04T17:20:26.815728Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:38:2068] 2025-04-04T17:20:26.815798Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:39:2068] 2025-04-04T17:20:26.815839Z node 399 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][399:37:2068][/Root/Tenant/table_inside] Set up state: owner# [399:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:26.815890Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:40:2068] 2025-04-04T17:20:26.815927Z node 399 :SCHEME_BOARD_SUBSCRIBER INFO: [main][399:37:2068][/Root/Tenant/table_inside] Ignore empty state: owner# [399:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() >> KqpScanArrowInChanels::AggregateByColumn [GOOD] |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |92.8%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNotNullColumns::UpsertNotNullPg [GOOD] >> KqpRanges::DateKeyPredicate |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpQueryPerf::Upsert+QueryService+UseSink [GOOD] |92.8%| [TA] $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |92.8%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNotNullColumns::Describe [GOOD] >> KqpNotNullColumns::CreateTableWithNotNullColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateByColumn [GOOD] Test command err: Trying to start YDB, gRPC: 17001, MsgBus: 14376 2025-04-04T17:20:02.804494Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509007351357116:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:02.804628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e8e/r3tmp/tmpf0hKkv/pdisk_1.dat 2025-04-04T17:20:03.264283Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:03.278145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:03.278995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:03.291994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17001, node 1 2025-04-04T17:20:03.542878Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:03.542927Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:03.542938Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:03.543078Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14376 TClient is connected to server localhost:14376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:04.259932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:04.326666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:04.476817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:04.631007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:20:04.703684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:05.867697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509020236260581:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:05.867827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:06.358671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:06.391364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:06.416838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:06.443322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:06.474796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:06.542285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:06.600717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509024531228393:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:06.600784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:06.600905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509024531228398:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:06.606393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:06.621130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509024531228400:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:20:06.692621Z node 1 :TX_PROXY ERROR: Actor# [1:7489509024531228453:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:07.806781Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509007351357116:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:07.806839Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:20:08.063929Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787208079, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 11359, MsgBus: 63264 2025-04-04T17:20:08.793674Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509032418811937:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:08.793734Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e8e/r3tmp/tmpkgWblA/pdisk_1.dat 2025-04-04T17:20:08.884856Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11359, node 2 2025-04-04T17:20:08.929132Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:08.929408Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:08.935436Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:08.957042Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:08.957069Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:08.957079Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:08.957198Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63264 TClient is connected to server localhost:63264 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:09.346051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:09.350584Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:20:09.362911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:09.437010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:09.576324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594 ... pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:17.650147Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:17.690791Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:17.723442Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:17.754849Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:17.784253Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:17.823691Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:17.872470Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489509071137648501:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:17.872560Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:17.872801Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489509071137648506:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:17.877477Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:17.888770Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489509071137648508:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:20:17.955196Z node 3 :TX_PROXY ERROR: Actor# [3:7489509071137648562:3440] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:19.340374Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489509058252744367:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:19.340454Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:20:20.006112Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787219405, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 4391, MsgBus: 26731 2025-04-04T17:20:20.888712Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489509082533633467:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:20.888800Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e8e/r3tmp/tmpcRovzD/pdisk_1.dat 2025-04-04T17:20:20.998209Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:21.033722Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:21.033815Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:21.038914Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4391, node 4 2025-04-04T17:20:21.088929Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:21.088953Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:21.088962Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:21.089073Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26731 TClient is connected to server localhost:26731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:21.576063Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:21.593331Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:21.679842Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:21.910520Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:22.009372Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:24.488576Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489509099713504418:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:24.488681Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:24.538023Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:24.575313Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:24.612510Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:24.650214Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:24.736555Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:24.777284Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:24.831657Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489509099713504931:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:24.831743Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:24.831876Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489509099713504936:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:24.835206Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:24.846554Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489509099713504938:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:20:24.921355Z node 4 :TX_PROXY ERROR: Actor# [4:7489509099713504993:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:25.892851Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489509082533633467:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:25.892951Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:20:27.360984Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787226573, txId: 281474976715671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6192, MsgBus: 4713 2025-04-04T17:20:23.043428Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509096450066813:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:23.047957Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001b9a/r3tmp/tmpPgdPJY/pdisk_1.dat 2025-04-04T17:20:23.428018Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6192, node 1 2025-04-04T17:20:23.460595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:23.460693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:23.468069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:23.519362Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:23.519387Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:23.519398Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:23.519572Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4713 TClient is connected to server localhost:4713 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:24.062136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:24.082934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:24.223411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:24.380579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:24.464463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:26.312765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509109334970314:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:26.312878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:26.572729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:26.608349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:26.648668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:26.690539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:26.724813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:26.753897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:26.826617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509109334970827:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:26.826694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:26.826777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509109334970832:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:26.830196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:26.841496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509109334970834:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:20:26.929912Z node 1 :TX_PROXY ERROR: Actor# [1:7489509109334970889:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpNotNullColumns::UpsertNotNull >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpSqlIn::SecondaryIndex_PgKey [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey >> TPQTabletTests::Huge_ProposeTransacton [GOOD] >> KqpNewEngine::PkSelect1 [GOOD] >> KqpNewEngine::PkSelect2 >> KqpNewEngine::SqlInFromCompact [GOOD] >> KqpNewEngine::SqlInAsScalar |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TPartitionTests::IncorrectRange >> TxUsage::WriteToTopic_Demo_12 [GOOD] >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartNo [GOOD] >> KqpYql::InsertIgnore >> TPartitionTests::IncorrectRange [GOOD] >> TxUsage::WriteToTopic_Demo_9 [GOOD] >> KqpYql::EvaluateExpr1 [GOOD] >> KqpYql::Discard >> TPartitionTests::GetPartitionWriteInfoSuccess >> TxUsage::Sinks_Oltp_WriteToTopic_5 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Huge_ProposeTransacton [GOOD] Test command err: 2025-04-04T17:20:18.362582Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:20:18.369959Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:20:18.370382Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-04T17:20:18.370448Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:20:18.370486Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-04T17:20:18.370533Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:20:18.370589Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:20:18.370642Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-04T17:20:18.408821Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:205:2211], now have 1 active actors on pipe 2025-04-04T17:20:18.408977Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T17:20:18.425815Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:20:18.430991Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-04-04T17:20:18.431153Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:20:18.432197Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:20:18.432419Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:20:18.432891Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:20:18.433322Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:213:2217] 2025-04-04T17:20:18.434359Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-04T17:20:18.434425Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:213:2217] 2025-04-04T17:20:18.434481Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:20:18.435346Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T17:20:18.435489Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-04T17:20:18.435551Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-04T17:20:18.435612Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit request with generation 1 2025-04-04T17:20:18.435638Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit with generation 1 done 2025-04-04T17:20:18.435890Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04T17:20:18.435933Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:20:18.436128Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:20:18.444312Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:20:18.444828Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:220:2222], now have 1 active actors on pipe 2025-04-04T17:20:18.445634Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:223:2224], now have 1 active actors on pipe 2025-04-04T17:20:18.446612Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 176 RawX2: 4294969487 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-04-04T17:20:18.446691Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-04T17:20:18.446793Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-04-04T17:20:18.446858Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-04T17:20:18.446906Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-04-04T17:20:18.446950Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-04T17:20:18.446983Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-04-04T17:20:18.447108Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 4294969487 } Partitions { } 2025-04-04T17:20:18.447193Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:20:18.450725Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:20:18.450795Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-04-04T17:20:18.450825Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-04-04T17:20:18.450853Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-04-04T17:20:18.451072Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 176 RawX2: 4294969487 } TxId: 67891 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-04-04T17:20:18.451105Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-04T17:20:18.451196Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67891, WriteId (empty maybe) 2025-04-04T17:20:18.451268Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-04T17:20:18.451315Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State UNKNOWN 2025-04-04T17:20:18.451368Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-04T17:20:18.451421Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PREPARING 2025-04-04T17:20:18.451580Z node 1 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: PREPARED MinStep: 136 MaxStep: 30136 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 4294969487 } Partitions { } 2025-04-04T17:20:18.451674Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:20:18.456534Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:20:18.456606Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-04-04T17:20:18.456661Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State PREPARING 2025-04-04T17:20:18.456702Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PREPARED 2025-04-04T17:20:18.464017Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67891 AckTo { RawX1: 176 RawX2: 4294969487 } } Step: 100 2025-04-04T17:20:18.464106Z node 1 :P ... aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2496" Generation: 2 Important: false } Consumers { Name: "fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2497" Generation: 2 Important: false } Consumers { Name: "fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2498" Generation: 2 Important: false } Consumers { Name: "fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2499" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 176 RawX2: 25769805967 } Partitions { Partition { PartitionId: 0 } Partition { PartitionId: 1 } } 2025-04-04T17:20:30.189009Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:20:30.245043Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:20:30.245125Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-04-04T17:20:30.245160Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-04-04T17:20:30.245200Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATED FrontTxId 67890 2025-04-04T17:20:30.245252Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-04-04T17:20:30.245299Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-04-04T17:20:30.245397Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-04-04T17:20:30.245450Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit >> KqpNotNullColumns::UpdateOnNotNull [GOOD] >> KqpNotNullColumns::UpdateOnNotNullPg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2700, MsgBus: 9768 2025-04-04T17:20:16.512066Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509064116436648:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:16.512107Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001b9b/r3tmp/tmpS9zXhs/pdisk_1.dat 2025-04-04T17:20:16.897028Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2700, node 1 2025-04-04T17:20:16.971575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:16.971708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:16.973850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:16.984264Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:16.984282Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:16.984287Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:16.984369Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9768 TClient is connected to server localhost:9768 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:17.528313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:17.552167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:17.705445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:17.898368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:17.972747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:19.652107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509077001340322:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:19.652235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:19.863094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:19.916397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:19.949636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:20.020696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:20.049205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:20.081177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:20.125623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509081296308128:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:20.125699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:20.125891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509081296308133:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:20.128782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:20.137680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509081296308135:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:20:20.236563Z node 1 :TX_PROXY ERROR: Actor# [1:7489509081296308188:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:21.179700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:20:21.256018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:20:21.335798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:20:21.512283Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509064116436648:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:21.512368Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1552, MsgBus: 6942 2025-04-04T17:20:24.222505Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509099838649244:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:24.222571Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001b9b/r3tmp/tmp4sqXSg/pdisk_1.dat 2025-04-04T17:20:24.319759Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:24.372606Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:24.372700Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:24.374903Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1552, node 2 2025-04-04T17:20:24.434904Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:24.434925Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:24.434930Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:24.435036Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6942 TClient is connected to server localhost:6942 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:24.885681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:24.899303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:24.958117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:25.130184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:25.210847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:27.406473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509112723552905:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:27.406549Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:27.443590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:27.476179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:27.511660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:27.550785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:27.619552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:27.692514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:27.753330Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509112723553422:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:27.753434Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509112723553427:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:27.753512Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:27.756936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:27.768130Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509112723553429:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:20:27.840104Z node 2 :TX_PROXY ERROR: Actor# [2:7489509112723553482:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:28.763706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:20:28.806305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:20:28.895991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:20:29.222499Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509099838649244:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:29.222570Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TxUsage::WriteToTopic_Demo_46 >> KqpMergeCn::TopSortBy_PK_Uint64_Limit3 >> TPartitionTests::GetPartitionWriteInfoSuccess [GOOD] >> TPartitionTests::GetPartitionWriteInfoError >> KqpRanges::WhereInSubquery [GOOD] >> KqpReturning::ReturningTwice >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] >> TxUsage::Offsets_Cannot_Be_Promoted_When_Reading_In_A_Transaction [GOOD] >> KqpScripting::SecondaryIndexes [GOOD] >> KqpNotNullColumns::CreateTableWithNotNullColumns [GOOD] >> KqpNotNullColumns::UpsertNotNull [GOOD] >> KqpNotNullColumns::UpdateTable_DontChangeNotNull |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TPartitionTests::GetPartitionWriteInfoError [GOOD] >> TLocksTest::Range_CorrectDot [GOOD] >> TPartitionTests::FailedTxsDontBlock >> TxUsage::ReadRuleGeneration >> TxUsage::WriteToTopic_Demo_28 [GOOD] >> KqpRanges::DateKeyPredicate [GOOD] >> KqpRanges::DeleteNotFullScan+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SecondaryIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 2080, MsgBus: 27686 2025-04-04T17:20:19.443991Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509080083562888:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:19.444994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019b2/r3tmp/tmppNx6Bp/pdisk_1.dat 2025-04-04T17:20:19.749857Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2080, node 1 2025-04-04T17:20:19.832431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:19.832572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:19.834098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:19.847307Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:19.847328Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:19.847336Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:19.847433Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27686 TClient is connected to server localhost:27686 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:20.403037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:20.451317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:20:20.592457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:20.767975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:20.837695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:22.440009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509092968466480:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:22.440091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:22.728935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:22.771561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:22.805857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:22.843382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:22.875898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:22.914521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:22.997833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509092968466996:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:22.997944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:22.998189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509092968467001:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:23.001440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:23.011825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509092968467003:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:20:23.104196Z node 1 :TX_PROXY ERROR: Actor# [1:7489509097263434354:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:24.076719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:20:24.443652Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509080083562888:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:24.443730Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:20:24.622630Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787224655, txId: 281474976710673] shutting down 2025-04-04T17:20:24.700693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:20:24.756787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:20:24.774495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:20:24.791770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:20:24.847500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:20:24.861727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T17:20:25.226533Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787225264, txId: 281474976710682] shutting down 2025-04-04T17:20:25.324495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-04-04T17:20:25.401529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-04-04T17:20:25.751323Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787225789, txId: 281474976710687] shutting down Trying to start YDB, gRPC: 21127, MsgBus: 9075 2025-04-04T17:20:26.541155Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509108814177127:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:26.541439Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019b2/r3tmp/tmpyq6BKx/pdisk_1.dat 2025-04-04T17:20:26.665959Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:26.669732Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:26.669824Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:26.674791Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21127, node 2 2025-04-04T17:20:26.718551Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:26.718577Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:26.718584Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:26.718706Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9075 TClient is connected to server localhost:9075 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:27.217909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:27.225211Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:20:27.232981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:27.305688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:27.477449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:27.555974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:29.922615Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509121699080764:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:29.922712Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:29.967588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:30.000328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:30.035027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:30.066626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:30.105382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:30.152374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:30.241138Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509125994048578:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:30.241230Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:30.241632Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509125994048583:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:30.245267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:30.262095Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509125994048585:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:20:30.350137Z node 2 :TX_PROXY ERROR: Actor# [2:7489509125994048640:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:31.411755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:20:31.495479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:20:31.540686Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509108814177127:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:31.540755Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:20:31.569358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::CreateTableWithNotNullColumns [GOOD] Test command err: Trying to start YDB, gRPC: 14524, MsgBus: 63310 2025-04-04T17:20:06.554403Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509020993431246:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:06.555920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014f7/r3tmp/tmpROcOdy/pdisk_1.dat 2025-04-04T17:20:06.918262Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:06.966732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:06.966926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 14524, node 1 2025-04-04T17:20:06.968725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:07.099121Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:07.099165Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:07.099174Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:07.099299Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63310 TClient is connected to server localhost:63310 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:07.796108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:09.412037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509033878333797:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:09.412165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:09.832455Z node 1 :TX_PROXY ERROR: Actor# [1:7489509033878333820:2307] txid# 281474976710658, issues: { message: "It is not allowed to create not null data column: Value" severity: 1 } 2025-04-04T17:20:09.863805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509033878333828:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:09.863937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:09.873485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5102, MsgBus: 13125 2025-04-04T17:20:10.675062Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509041585484284:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:10.675417Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014f7/r3tmp/tmpVWa3u4/pdisk_1.dat 2025-04-04T17:20:10.771984Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5102, node 2 2025-04-04T17:20:10.807057Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:10.807145Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:10.809284Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:10.819494Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:10.819512Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:10.819518Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:10.819614Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13125 TClient is connected to server localhost:13125 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:11.203168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:11.224228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:11.307133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:11.460558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:20:11.549281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:13.662799Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509054470387941:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:13.662909Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:13.727952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:13.762273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:13.800613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:13.872535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:13.902221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:13.938203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:13.995746Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509054470388455:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:13.995905Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:13.996271Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509054470388460:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:14.000565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:14.011193Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509054470388462:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:20:14.113015Z node 2 :TX_PROXY ERROR: Actor# [2:7489509058765355814:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accep ... ARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7995, MsgBus: 24805 2025-04-04T17:20:19.979724Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489509077211652630:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:19.979821Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014f7/r3tmp/tmpy7i8NC/pdisk_1.dat 2025-04-04T17:20:20.101749Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:20.136209Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:20.136299Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 7995, node 4 2025-04-04T17:20:20.138209Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:20.181481Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:20.181507Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:20.181518Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:20.181660Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24805 TClient is connected to server localhost:24805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:20.715573Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:20.722312Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:20:23.538517Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489509094391522474:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:23.538620Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:23.561274Z node 4 :TX_PROXY ERROR: Actor# [4:7489509094391522495:2304] txid# 281474976715658, issues: { message: "It is not allowed to create not null data column: Value" severity: 1 } 2025-04-04T17:20:23.578890Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489509094391522503:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:23.578996Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:23.658173Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10898, MsgBus: 25156 2025-04-04T17:20:24.672794Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489509100602546749:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:24.672881Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014f7/r3tmp/tmpBicMFO/pdisk_1.dat 2025-04-04T17:20:24.788126Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:24.801328Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:24.801651Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:24.803608Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10898, node 5 2025-04-04T17:20:24.849957Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:24.849980Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:24.849990Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:24.850116Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25156 TClient is connected to server localhost:25156 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:20:25.409163Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:20:28.559352Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489509117782416584:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:28.559445Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:28.575838Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64272, MsgBus: 31128 2025-04-04T17:20:29.618139Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489509120650816108:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:29.618186Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014f7/r3tmp/tmpSblbl4/pdisk_1.dat 2025-04-04T17:20:29.751212Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:29.766862Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:29.766973Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:29.768517Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64272, node 6 2025-04-04T17:20:29.842917Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:29.842945Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:29.842955Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:29.843104Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31128 TClient is connected to server localhost:31128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:30.404583Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:30.411150Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:20:33.843619Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> KqpNewEngine::PkSelect2 [GOOD] >> KqpNewEngine::PkRangeSelect3 >> KqpSqlIn::KeySuffix >> TxUsage::WriteToTopic_Demo_29 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_CorrectDot [GOOD] Test command err: 2025-04-04T17:19:54.796583Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508971550967547:2159];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:54.796963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e3f/r3tmp/tmp3o5tpw/pdisk_1.dat 2025-04-04T17:19:55.183971Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:55.212729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:55.212798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:55.214909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1163 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:19:55.445632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:19:55.471690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:19:55.569163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:55.611536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:57.914936Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508984088581056:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:57.915087Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e3f/r3tmp/tmp2Tm2Kt/pdisk_1.dat 2025-04-04T17:19:58.043527Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:58.061783Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:58.061891Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:58.064556Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2900 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-04T17:19:58.245187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:58.266199Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:19:58.271208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:58.329012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:58.408420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:00.956900Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489508996354936362:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:00.973953Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e3f/r3tmp/tmp6jayPD/pdisk_1.dat 2025-04-04T17:20:01.112463Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:01.129290Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:01.129367Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:01.135154Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6765 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:20:01.313109Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:01.319491Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:01.333476Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:20:01.396576Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:01.448096Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:04.487407Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489509015761009381:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:04.487461Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e3f/r3tmp/tmpT7bou3/pdisk_1.dat 2025-04-04T17:20:04.608558Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:04.619259Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:04.619345Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:04.623243Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26524 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:20:04.798124Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDoma ... 0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:15.875162Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20904 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:20:16.188643Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:16.210216Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:20:16.215486Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:16.304892Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:16.367461Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:20.431453Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7489509082475992745:2221];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e3f/r3tmp/tmpTdFmax/pdisk_1.dat 2025-04-04T17:20:20.481451Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:20:20.553036Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:20.577081Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:20.577195Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:20.579436Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15838 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:20:20.816973Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:20.840935Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:20.929382Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:20.986398Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:25.169147Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7489509103294434307:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:25.169207Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e3f/r3tmp/tmpUVla6F/pdisk_1.dat 2025-04-04T17:20:25.341276Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:25.344897Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:25.345002Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:25.347107Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28909 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:20:25.597283Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:25.622588Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:25.699090Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:25.759842Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:30.125695Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489509125715310163:2144];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:30.147605Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e3f/r3tmp/tmpso2Bja/pdisk_1.dat 2025-04-04T17:20:30.334200Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:30.346957Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:30.347075Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:30.352910Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13268 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:20:30.648183Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:30.656485Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:30.670049Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:30.798901Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:30.865863Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpYql::InsertIgnore [GOOD] >> KqpYql::JsonCast >> KqpYql::Discard [GOOD] >> Balancing::Balancing_OneTopic_PQv1 [GOOD] >> Balancing::Balancing_ManyTopics_TopicApi |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpNotNullColumns::UpdateOnNotNullPg [GOOD] >> TxUsage::WriteToTopic_Demo_41 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::Discard [GOOD] Test command err: Trying to start YDB, gRPC: 29707, MsgBus: 5429 2025-04-04T17:20:27.015589Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509115317163841:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:27.016387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019ae/r3tmp/tmpU31Yow/pdisk_1.dat 2025-04-04T17:20:27.369723Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29707, node 1 2025-04-04T17:20:27.438408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:27.438619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:27.452481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:27.473940Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:27.473973Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:27.473986Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:27.474127Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5429 TClient is connected to server localhost:5429 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:28.017849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:28.066088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:28.225578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:28.399159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:28.480883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:30.024384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509128202067493:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:30.024567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:30.260000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:30.330173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:30.370907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:30.405152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:30.434886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:30.468068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:30.522053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509128202068007:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:30.522157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:30.522491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509128202068012:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:30.526695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:30.536825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509128202068014:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:20:30.610508Z node 1 :TX_PROXY ERROR: Actor# [1:7489509128202068068:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 15319, MsgBus: 6922 2025-04-04T17:20:32.552783Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509133810677357:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:32.553296Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019ae/r3tmp/tmp5V7rvq/pdisk_1.dat 2025-04-04T17:20:32.682185Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:32.713887Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:32.713966Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:32.715206Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15319, node 2 2025-04-04T17:20:32.776052Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:32.776081Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:32.776099Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:32.776268Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6922 TClient is connected to server localhost:6922 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:20:33.246288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:20:33.260099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:33.336849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:33.508658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:33.589605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:35.833993Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509146695581014:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:35.834063Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:35.871835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:35.904758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:35.938016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:35.970528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:36.000193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:36.034225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:36.110395Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509150990548823:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:36.110456Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:36.110562Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509150990548828:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:36.115175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:36.127591Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509150990548830:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:20:36.223611Z node 2 :TX_PROXY ERROR: Actor# [2:7489509150990548885:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:37.266106Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489509155285516445:2492], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:13: Error: DISCARD not supported in YDB queries, code: 2008 2025-04-04T17:20:37.266415Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmM0NTAxNGMtYzNhMjQ2ZGMtYmY3NGI3OTctYzM1OWNiOTU=, ActorId: [2:7489509155285516438:2488], ActorState: ExecuteState, TraceId: 01jr0x37tdfzyrfdewkh2dsz9s, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> TPartitionTests::FailedTxsDontBlock [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And >> KqpNewEngine::SqlInAsScalar [GOOD] >> KqpNewEngine::SequentialReadsPragma-Enabled >> TPartitionTests::GetUsedStorage >> KqpMergeCn::TopSortBy_PK_Uint64_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Int32_Limit3 |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpNotNullColumns::UpdateTable_DontChangeNotNull [GOOD] >> KqpNotNullColumns::UpdateTable_DontChangeNotNullWithIndex >> TxUsage::WriteToTopic_Demo_42 >> TPartitionTests::GetUsedStorage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::UpdateOnNotNullPg [GOOD] Test command err: Trying to start YDB, gRPC: 10247, MsgBus: 13146 2025-04-04T17:20:09.143746Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509035573937932:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:09.153463Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014e7/r3tmp/tmplP0WFr/pdisk_1.dat 2025-04-04T17:20:09.460736Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10247, node 1 2025-04-04T17:20:09.519384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:09.519579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:09.521082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:09.528749Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:09.528782Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:09.528803Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:09.528930Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13146 TClient is connected to server localhost:13146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:09.973981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:11.768141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509044163873041:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:11.768228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:12.011318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:20:12.144737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509048458840441:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:12.144856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:12.144968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509048458840446:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:12.150019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:20:12.162405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509048458840448:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:20:12.247369Z node 1 :TX_PROXY ERROR: Actor# [1:7489509048458840501:2395] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:12.520861Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489509048458840563:2362], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:61: Error: At function: KiUpdateTable!
:1:61: Error: Cannot update primary key column: Key 2025-04-04T17:20:12.521265Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjA2ODllMGMtNTQ1NTg2MmItMmZhOWJhZjctMjNiYzg0NjQ=, ActorId: [1:7489509044163873038:2328], ActorState: ExecuteState, TraceId: 01jr0x2fppa0brjaqw4bvz8nze, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T17:20:12.541707Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489509048458840572:2366], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:63: Error: At function: KiUpdateTable!
:1:63: Error: Cannot update primary key column: Key 2025-04-04T17:20:12.541992Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjA2ODllMGMtNTQ1NTg2MmItMmZhOWJhZjctMjNiYzg0NjQ=, ActorId: [1:7489509044163873038:2328], ActorState: ExecuteState, TraceId: 01jr0x2fqg9f6m35ym85whf21n, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 3107, MsgBus: 1128 2025-04-04T17:20:13.296600Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509051325034634:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:13.296769Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014e7/r3tmp/tmpNGJpgj/pdisk_1.dat 2025-04-04T17:20:13.427999Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:13.430768Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:13.430863Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:13.432954Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3107, node 2 2025-04-04T17:20:13.498453Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:13.498475Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:13.498483Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:13.498594Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1128 TClient is connected to server localhost:1128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:13.917068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:13.925910Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:20:16.389673Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509064209937175:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:16.389747Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:16.400632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:20:16.459208Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509064209937275:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:16.459301Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:16.459518Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509064209937280:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:16.462716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:20:16.471635Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509064209937282:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: { ... node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:28.182861Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:28.182984Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18820 TClient is connected to server localhost:18820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:28.692914Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:31.443904Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489509128691607833:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:31.444018Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:31.466076Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:20:31.580116Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489509128691607933:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:31.580237Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:31.580708Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489509128691607938:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:31.590401Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:20:31.622029Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489509128691607940:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:20:31.689757Z node 5 :TX_PROXY ERROR: Actor# [5:7489509128691607992:2393] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:32.105890Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7489509132986575362:2366], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:63: Error: At function: KiWriteTable!
:1:45: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64?,'Value':String>
:1:45: Error: Failed to convert 'Value': Null to String
:1:45: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-04T17:20:32.106567Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=OWE1OGFkYzMtOTJkNzQwYmItZjRlMDBiN2EtMWJiNjVkZDg=, ActorId: [5:7489509128691607815:2329], ActorState: ExecuteState, TraceId: 01jr0x32tv6qnp0en2a9ens2c9, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 61402, MsgBus: 7426 2025-04-04T17:20:32.901801Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489509135417367128:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:32.901856Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014e7/r3tmp/tmpfrZpnK/pdisk_1.dat 2025-04-04T17:20:33.078657Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:33.105809Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:33.105918Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:33.108119Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61402, node 6 2025-04-04T17:20:33.170922Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:33.170949Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:33.170958Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:33.171101Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7426 TClient is connected to server localhost:7426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:33.812405Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:33.823023Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:20:37.098100Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509156892204263:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:37.105267Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:37.110820Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:20:37.199939Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509156892204366:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:37.200061Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:37.200564Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509156892204371:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:37.204287Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:20:37.213359Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489509156892204373:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:20:37.316024Z node 6 :TX_PROXY ERROR: Actor# [6:7489509156892204424:2395] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:37.902392Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489509135417367128:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:37.902474Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:20:37.955012Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:20:37.968515Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7489509156892204498:2366], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Value, code: 2031 2025-04-04T17:20:37.969285Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZmNmNjQzZmItYTc0ZTMyOWMtYjQxMGM0OTktZjk4OGJhZTA=, ActorId: [6:7489509156892204245:2330], ActorState: ExecuteState, TraceId: 01jr0x38az1fp4nhwp21m7mxq5, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: >> LocalPartition::DiscoveryServiceBadPort [GOOD] >> LocalPartition::DiscoveryServiceBadNodeId >> KqpReturning::ReturningTwice [GOOD] >> KqpReturning::ReplaceSerial ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::GetUsedStorage [GOOD] Test command err: 2025-04-04T17:20:31.915927Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:20:31.916013Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:20:31.936970Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:178:2193] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:20:31.938810Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T17:20:31.000000Z 2025-04-04T17:20:31.938873Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:178:2193] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\230\216\306\216\3402" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\230\216\306\216\3402" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\230\216\306\216\3402" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\263\222\004" StorageChannel: INLINE } 2025-04-04T17:20:32.727797Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:20:32.727888Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:20:32.743873Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:20:32.744138Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:20:32.744388Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] bootstrapping {2, {0, 10}, 100001} [2:177:2192] 2025-04-04T17:20:32.745138Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Initializing completed. 2025-04-04T17:20:32.745193Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [2:177:2192] 2025-04-04T17:20:32.745246Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition {2, {0, 10}, 100001} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:20:32.745292Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Process pending events. Count 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:20:32.745471Z node 2 :PERSQUEUE INFO: new Cookie owner1|2cefc6ee-60699750-9ee9e8ad-4920b9d6_0 generated for partition {2, {0, 10}, 100001} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2025-04-04T17:20:32.745589Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyOwnerOk. Partition: {2, {0, 10}, 100001} Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:20:32.745922Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 2 partNo 0 2025-04-04T17:20:32.746913Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2025-04-04T17:20:32.747525Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 D0000100001_00000000000000000100_00000_0000000001_00000| size 104 WTime 128 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-04T17:20:32.779389Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:20:32.779534Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2025-04-04T17:20:32.779635Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 2, partNo: 0, Offset: 100 is stored on disk Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR 2025-04-04T17:20:33.076782Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 4 partNo 0 2025-04-04T17:20:33.077710Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 4 partNo 0 FormedBlobsCount 0 NewHead: Offset 101 PartNo 0 PackedSize 118 count 1 nextOffset 102 batches 1 2025-04-04T17:20:33.078127Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 101,1 HeadOffset 100 endOffset 101 curOffset 102 D0000100001_00000000000000000101_00000_0000000001_00000| size 104 WTime 1129 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:20:33.111075Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:20:33.111199Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2025-04-04T17:20:33.111266Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 4, partNo: 0, Offset: 101 is stored on disk Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX 2025-04-04T17:20:33.319769Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 6 partNo 0 2025-04-04T17:20:33.321925Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 6 partNo 0 FormedBlobsCount 0 NewHead: Offset 102 PartNo 0 PackedSize 118 count 1 nextOffset 103 batches 1 2025-04-04T17:20:33.322371Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 102,1 HeadOffset 100 endOffset 102 curOffset 103 D0000100001_00000000000000000102_00000_0000000001_00000| size 104 WTime 2130 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:20:33.354563Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:20:33.354669Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2025-04-04T17:20:33.354740Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 6, partNo: 0, Offset: 102 is stored on disk Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured ... plete: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Create distr tx with id = 3 and act no: 4 Create immediate tx with id = 5 and act no: 6 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Wait batch completion Got batch complete: 6 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Send disk status response with cookie: 0 Got batch complete: 2 Wait batch completion Send disk status response with cookie: 0 Wait immediate tx complete 5 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 5 Got batch complete: 10 Send disk status response with cookie: 0 Create distr tx with id = 8 and act no: 9 Create immediate tx with id = 10 and act no: 11 Create distr tx with id = 12 and act no: 13 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Wait batch completion Got batch complete: 3 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Send disk status response with cookie: 0 Wait immediate tx complete 10 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 10 Errors { Kind: BAD_REQUEST Reason: "MinSeqNo violation failure on src2" } Wait tx committed for tx 12 2025-04-04T17:20:39.690491Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:20:39.690568Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:20:39.707679Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] bootstrapping {2, {0, 10}, 100001} [5:178:2193] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:20:39.709297Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:20:39.709363Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [5:178:2193] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3 [GOOD] >> KqpRanges::DeleteNotFullScan+UseSink [GOOD] >> KqpRanges::CastKeyBounds >> TxUsage::Sinks_Oltp_WriteToTopic_5 [GOOD] >> KqpNewEngine::PkRangeSelect3 [GOOD] >> KqpNewEngine::PkRangeSelect4 >> TExtSubDomainTest::GenericCases >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::DeclareAndDrop >> TxUsage::Sinks_Oltp_WriteToTopics_1 >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false >> KqpYql::JsonCast [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::JsonCast [GOOD] Test command err: Trying to start YDB, gRPC: 5612, MsgBus: 21595 2025-04-04T17:20:32.332264Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509132707879558:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:32.332317Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019aa/r3tmp/tmpB1XXrB/pdisk_1.dat 2025-04-04T17:20:32.794816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:32.794934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:32.796981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:32.818542Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5612, node 1 2025-04-04T17:20:32.893289Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:32.893311Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:32.893318Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:32.893437Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21595 TClient is connected to server localhost:21595 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:33.499296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:33.522754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:33.683953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:33.834232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:33.902460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:35.520809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509145592783224:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:35.520957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:35.807701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:35.839098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:35.874601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:35.945630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:35.978626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:36.018824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:36.063927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509149887751032:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:36.063986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509149887751037:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:36.063994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:36.067456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:36.078069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509149887751039:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:20:36.182760Z node 1 :TX_PROXY ERROR: Actor# [1:7489509149887751093:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Table intent determination, code: 1040
:3:35: Error: INSERT OR IGNORE is not yet supported for Kikimr. 2025-04-04T17:20:37.333221Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509132707879558:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:37.333305Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1394, MsgBus: 9265 2025-04-04T17:20:37.948096Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509157383294235:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:37.948207Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019aa/r3tmp/tmpVgqN9Z/pdisk_1.dat 2025-04-04T17:20:38.097809Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:38.101572Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:38.101636Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:38.103524Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1394, node 2 2025-04-04T17:20:38.162794Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:38.162820Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:38.162827Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:38.162944Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9265 TClient is connected to server localhost:9265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:38.577377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:38.601831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:20:38.677447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:38.842068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:20:38.921030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:41.056788Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509174563165189:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:41.056919Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:41.095363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:41.129911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:41.161314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:41.192307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:41.221013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:41.253124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:41.295557Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509174563165699:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:41.295633Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:41.296075Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509174563165704:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:41.300330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:41.310420Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509174563165706:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:20:41.393556Z node 2 :TX_PROXY ERROR: Actor# [2:7489509174563165760:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } [[#]] >> KqpSqlIn::KeySuffix [GOOD] >> KqpSqlIn::KeySuffix_OnlyTail >> TxUsage::WriteToTopic_Demo_46 [GOOD] >> KqpWorkload::STOCK [GOOD] >> KqpMergeCn::TopSortBy_Int32_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Float_Limit4 >> KqpNotNullColumns::UpdateTable_DontChangeNotNullWithIndex [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndex >> KqpLimits::OutOfSpaceYQLUpsertFail+useSink [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail-useSink >> TxUsage::WriteToTopic_Demo_47 >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::DeclareAndDrop [GOOD] >> KqpNewEngine::SequentialReadsPragma-Enabled [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false [GOOD] >> KqpReturning::ReplaceSerial [GOOD] >> KqpReturning::ReturningSerial ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::STOCK [GOOD] Test command err: Trying to start YDB, gRPC: 22817, MsgBus: 19228 2025-04-04T17:19:43.039453Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508925098084616:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:43.039525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bbf/r3tmp/tmpitCNTP/pdisk_1.dat 2025-04-04T17:19:43.368936Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22817, node 1 2025-04-04T17:19:43.410138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:43.410351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:43.418640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:19:43.444956Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:43.444979Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:43.444986Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:43.445100Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19228 TClient is connected to server localhost:19228 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:43.925151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:45.704240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508933688019870:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:45.704363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:45.892353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:19:46.013112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:19:46.478005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:19:46.825771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508937982991210:2632], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:46.825855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:46.825880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508937982991215:2635], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:46.828906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-04T17:19:46.838022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508937982991217:2636], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T17:19:46.921630Z node 1 :TX_PROXY ERROR: Actor# [1:7489508937982991363:5110] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:48.040487Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508925098084616:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:48.040578Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:19:58.362423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:19:58.362462Z node 1 :IMPORT WARN: Table profiles were not loaded took: 0.629116s took: 0.629777s took: 0.629190s took: 0.630684s took: 0.635153s took: 0.633124s took: 0.646013s took: 0.644198s took: 0.643785s took: 0.640068s took: 6.348175s took: 6.348217s took: 6.374086s took: 6.400521s took: 6.426457s took: 6.425170s took: 6.428109s took: 6.427457s took: 6.427225s took: 6.478108s took: 6.051855s 2025-04-04T17:20:43.616167Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzUwZjAzYjYtOTA5YWJhYTctZTczNTEzZjAtYzgwNDQzZWY=, ActorId: [1:7489509157026336951:5368], ActorState: ExecuteState, TraceId: 01jr0x38rk3h7p3cvsyjcpsje2, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-04-04T17:20:43.620126Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2VkMjIzYzgtYzdmYzdiM2ItYmE4ZGQ5NjAtZWEwYTk0MTg=, ActorId: [1:7489509157026336955:5370], ActorState: ExecuteState, TraceId: 01jr0x38s3ftr2t7s2hzvrbqb7, Create QueryResponse for error on request, msg: 2025-04-04T17:20:43.620799Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTI1MWU0MmMtODU1NmEwMzQtYmE4ZDY2NWYtZDc4MTczNGI=, ActorId: [1:7489509157026336948:5367], ActorState: ExecuteState, TraceId: 01jr0x38vz6j2e692nv98wqza9, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-04-04T17:20:43.624754Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTA5YjYxNzMtNTJjZGQ4YTktYTZmNmRhNGQtNjZmYzZiMjc=, ActorId: [1:7489509157026336954:5369], ActorState: ExecuteState, TraceId: 01jr0x38wp9n3t6sx7gsm0bnpx, Create QueryResponse for error on request, msg: 2025-04-04T17:20:43.625615Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmUyN2Q5NzEtZGNiZGI1OS0xYWIxNDdhYy0yYTFiOWIxZg==, ActorId: [1:7489509157026336939:5358], ActorState: ExecuteState, TraceId: 01jr0x38trbwjkq661nx9jjn68, Create QueryResponse for error on request, msg: 2025-04-04T17:20:43.626315Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2QzNWU2MmUtNWI0NjM0OTQtNzRjMTNkNzgtZWQwNjM4ZjQ=, ActorId: [1:7489509157026336945:5364], ActorState: ExecuteState, TraceId: 01jr0x38wp9vd3anwn258xypna, Create QueryResponse for error on request, msg: 2025-04-04T17:20:43.627545Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjRjMzE5OWItM2U5MzFiOTEtZGVkMTcyYjItZjVmMzUwM2Y=, ActorId: [1:7489509157026336938:5357], ActorState: ExecuteState, TraceId: 01jr0x38wpeya000kdm0awjzs3, Create QueryResponse for error on request, msg: 2025-04-04T17:20:43.629018Z node 1 :TX_DATASHARD ERROR: Complete [1743787243597 : 281474976711336] from 72075186224037897 at tablet 72075186224037897, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-04T17:20:43.630761Z node 1 :TX_DATASHARD ERROR: Complete [1743787243597 : 281474976711336] from 72075186224037889 at tablet 72075186224037889, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-04T17:20:43.631765Z node 1 :TX_DATASHARD ERROR: Complete [1743787243618 : 281474976711338] from 72075186224037913 at tablet 72075186224037913, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-04T17:20:43.632501Z node 1 :TX_DATASHARD ERROR: Complete [1743787243632 : 281474976711341] from 72075186224037920 at tablet 72075186224037920, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-04T17:20:43.633881Z node 1 :TX_DATASHARD ERROR: Complete [1743787243618 : 281474976711338] from 72075186224037889 at tablet 72075186224037889, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-04T17:20:43.633931Z node 1 :TX_DATASHARD ERROR: Complete [1743787243632 : 281474976711341] from 72075186224037889 at tablet 72075186224037889, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-04T17:20:43.633970Z node 1 :TX_DATASHARD ERROR: Complete [1743787243633 : 281474976711339] from 72075186224037889 at tablet 72075186224037889, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-04T17:20:43.634037Z node 1 :TX_DATASHARD ERROR: Complete [1743787243633 : 281474976711339] from 72075186224037901 at tablet 72075186224037901, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-04T17:20:43.634994Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTMyMTQ5MjktZDc2MTM0OTQtMWQ2N2JiOTktNmVhZDQzNw==, ActorId: [1:7489509157026336946:5365], ActorState: ExecuteState, TraceId: 01jr0x38sy36w4s2ehpgn2n739, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-04-04T17:20:43.637115Z node 1 :TX_DATASHARD ERROR: Complete [1743787243646 : 281474976711340] from 72075186224037920 at tablet 72075186224037920, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-04T17:20:43.637368Z node 1 :TX_DATASHARD ERROR: Complete [1743787243646 : 281474976711340] from 72075186224037889 at tablet 72075186224037889, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-04T17:20:43.637730Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTI3YjgzYmUtMjM5NGU2MzMtYzMwYTkwMWEtMTNkOTJjMjU=, ActorId: [1:7489509157026336937:5356], ActorState: ExecuteState, TraceId: 01jr0x38xffxgbjgxhhzcmzmvz, Crea ... 7:20:44.989180Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2025-04-04T17:20:44.989200Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2025-04-04T17:20:44.989216Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-04-04T17:20:44.992709Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2025-04-04T17:20:44.992745Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2025-04-04T17:20:44.992787Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2025-04-04T17:20:44.992807Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2025-04-04T17:20:44.999685Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2025-04-04T17:20:44.999725Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037916 not found 2025-04-04T17:20:44.999743Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-04-04T17:20:44.999759Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-04-04T17:20:44.999775Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2025-04-04T17:20:44.999792Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-04T17:20:44.999809Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2025-04-04T17:20:44.999856Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-04-04T17:20:44.999878Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-04-04T17:20:44.999895Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037909 not found 2025-04-04T17:20:44.999911Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2025-04-04T17:20:44.999972Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-04-04T17:20:45.000035Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-04-04T17:20:45.000059Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-04T17:20:45.006976Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037928 not found 2025-04-04T17:20:45.007068Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037912 not found 2025-04-04T17:20:45.008739Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2025-04-04T17:20:45.008776Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037915 not found 2025-04-04T17:20:45.010381Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037929 not found 2025-04-04T17:20:45.010409Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-04-04T17:20:45.010430Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2025-04-04T17:20:45.011447Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-04-04T17:20:45.011549Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-04-04T17:20:45.011658Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-04-04T17:20:45.011681Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-04-04T17:20:45.013389Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2025-04-04T17:20:45.022267Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2025-04-04T17:20:45.026576Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-04T17:20:45.028787Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-04-04T17:20:45.028832Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037909 not found 2025-04-04T17:20:45.029626Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-04T17:20:45.156145Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037945 not found 2025-04-04T17:20:45.156271Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037965 not found 2025-04-04T17:20:45.156291Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037934 not found 2025-04-04T17:20:45.156305Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037961 not found 2025-04-04T17:20:45.156320Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037932 not found 2025-04-04T17:20:45.156333Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037950 not found 2025-04-04T17:20:45.156350Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037946 not found 2025-04-04T17:20:45.164139Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037930 not found 2025-04-04T17:20:45.164177Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037937 not found 2025-04-04T17:20:45.164192Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037936 not found 2025-04-04T17:20:45.164229Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037931 not found 2025-04-04T17:20:45.164249Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037947 not found 2025-04-04T17:20:45.164579Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037953 not found 2025-04-04T17:20:45.164628Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037960 not found 2025-04-04T17:20:45.167708Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037948 not found 2025-04-04T17:20:45.167750Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037957 not found 2025-04-04T17:20:45.167773Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037966 not found 2025-04-04T17:20:45.167801Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037962 not found 2025-04-04T17:20:45.167825Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037933 not found 2025-04-04T17:20:45.167850Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037952 not found 2025-04-04T17:20:45.167875Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037951 not found 2025-04-04T17:20:45.167895Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037958 not found 2025-04-04T17:20:45.167943Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037942 not found 2025-04-04T17:20:45.167966Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037941 not found 2025-04-04T17:20:45.167991Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037943 not found 2025-04-04T17:20:45.168023Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037956 not found 2025-04-04T17:20:45.168049Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037959 not found 2025-04-04T17:20:45.168069Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037968 not found 2025-04-04T17:20:45.168102Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037944 not found 2025-04-04T17:20:45.168129Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037963 not found 2025-04-04T17:20:45.168176Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037949 not found 2025-04-04T17:20:45.168209Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037940 not found 2025-04-04T17:20:45.168240Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037935 not found 2025-04-04T17:20:45.168278Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037954 not found 2025-04-04T17:20:45.189978Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037967 not found 2025-04-04T17:20:45.190440Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037964 not found 2025-04-04T17:20:45.190466Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037939 not found 2025-04-04T17:20:45.190483Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037938 not found 2025-04-04T17:20:45.190500Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037955 not found 2025-04-04T17:20:45.190519Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037969 not found >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDrop [GOOD] Test command err: 2025-04-04T17:20:43.137185Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509183439905350:2192];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:43.137272Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00167a/r3tmp/tmp5vcmEi/pdisk_1.dat 2025-04-04T17:20:43.626716Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:43.646758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:43.647888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:43.652027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29597 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:20:43.887038Z node 1 :TX_PROXY DEBUG: actor# [1:7489509183439905468:2102] Handle TEvNavigate describe path dc-1 2025-04-04T17:20:43.887082Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509183439905748:2258] HANDLE EvNavigateScheme dc-1 2025-04-04T17:20:43.890803Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489509183439905493:2115], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:43.890897Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489509183439905493:2115], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T17:20:43.891167Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509183439905749:2259][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:20:43.893385Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183439905166:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509183439905753:2259] 2025-04-04T17:20:43.893423Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183439905169:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509183439905754:2259] 2025-04-04T17:20:43.893467Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509183439905166:2049] Subscribe: subscriber# [1:7489509183439905753:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:43.893471Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509183439905169:2052] Subscribe: subscriber# [1:7489509183439905754:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:43.893534Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183439905172:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509183439905755:2259] 2025-04-04T17:20:43.893555Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509183439905172:2055] Subscribe: subscriber# [1:7489509183439905755:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:43.893595Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509183439905753:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509183439905166:2049] 2025-04-04T17:20:43.893641Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509183439905754:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509183439905169:2052] 2025-04-04T17:20:43.893668Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509183439905755:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509183439905172:2055] 2025-04-04T17:20:43.893711Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509183439905749:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509183439905750:2259] 2025-04-04T17:20:43.893744Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183439905172:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509183439905755:2259] 2025-04-04T17:20:43.893750Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509183439905749:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509183439905751:2259] 2025-04-04T17:20:43.893764Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183439905166:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509183439905753:2259] 2025-04-04T17:20:43.893777Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183439905169:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509183439905754:2259] 2025-04-04T17:20:43.893808Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489509183439905749:2259][/dc-1] Set up state: owner# [1:7489509183439905493:2115], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:43.895324Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509183439905749:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509183439905752:2259] 2025-04-04T17:20:43.895421Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489509183439905749:2259][/dc-1] Path was already updated: owner# [1:7489509183439905493:2115], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:43.895496Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509183439905753:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509183439905750:2259], cookie# 1 2025-04-04T17:20:43.895525Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509183439905754:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509183439905751:2259], cookie# 1 2025-04-04T17:20:43.895538Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509183439905755:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509183439905752:2259], cookie# 1 2025-04-04T17:20:43.896250Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183439905169:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509183439905754:2259], cookie# 1 2025-04-04T17:20:43.896322Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183439905172:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509183439905755:2259], cookie# 1 2025-04-04T17:20:43.896388Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509183439905754:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509183439905169:2052], cookie# 1 2025-04-04T17:20:43.896424Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509183439905755:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509183439905172:2055], cookie# 1 2025-04-04T17:20:43.896474Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509183439905749:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509183439905751:2259], cookie# 1 2025-04-04T17:20:43.896496Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509183439905749:2259][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:20:43.896539Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509183439905749:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509183439905752:2259], cookie# 1 2025-04-04T17:20:43.896578Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509183439905749:2259][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:20:43.896609Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183439905166:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509183439905753:2259], cookie# 1 2025-04-04T17:20:43.896627Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509183439905753:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509183439905166:2049], cookie# 1 2025-04-04T17:20:43.896663Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509183439905749:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509183439905750:2259], cookie# 1 2025-04-04T17:20:43.896682Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509183439905749:2259][/dc-1] Unexpected sync response: sender# [1:7489509183439905750:2259], cookie# 1 2025-04-04T17:20:43.954733Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489509183439905493:2115], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T17:20:43.956425Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489509183439905493:2115], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... t reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-04T17:20:44.251593Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489509183439905493:2115], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7489509187734873109:2302] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1743787244269 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [1:7489509187734873109:2302] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1743787244269 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2025-04-04T17:20:44.251711Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183439905166:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [1:7489509187734873113:2302] 2025-04-04T17:20:44.251780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-04T17:20:44.251795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-04-04T17:20:44.251828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 TClient::Ls request: /dc-1 2025-04-04T17:20:44.251987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710659 2025-04-04T17:20:44.252116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710659 2025-04-04T17:20:44.252166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-04T17:20:44.252191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-04T17:20:44.252985Z node 1 :TX_PROXY DEBUG: actor# [1:7489509183439905468:2102] Handle TEvNavigate describe path /dc-1 2025-04-04T17:20:44.253010Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509187734873137:2321] HANDLE EvNavigateScheme /dc-1 2025-04-04T17:20:44.253101Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489509183439905493:2115], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:44.253185Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509183439905749:2259][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7489509183439905493:2115], cookie# 4 2025-04-04T17:20:44.253249Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509183439905753:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509183439905750:2259], cookie# 4 2025-04-04T17:20:44.253264Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509183439905754:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509183439905751:2259], cookie# 4 2025-04-04T17:20:44.253278Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509183439905755:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509183439905752:2259], cookie# 4 2025-04-04T17:20:44.253317Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183439905166:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509183439905753:2259], cookie# 4 2025-04-04T17:20:44.253341Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183439905169:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509183439905754:2259], cookie# 4 2025-04-04T17:20:44.253357Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183439905172:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509183439905755:2259], cookie# 4 2025-04-04T17:20:44.253396Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509183439905753:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7489509183439905166:2049], cookie# 4 2025-04-04T17:20:44.253429Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509183439905754:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7489509183439905169:2052], cookie# 4 2025-04-04T17:20:44.253452Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509183439905755:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7489509183439905172:2055], cookie# 4 2025-04-04T17:20:44.253476Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509183439905749:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7489509183439905750:2259], cookie# 4 2025-04-04T17:20:44.253519Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509183439905749:2259][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:20:44.253535Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509183439905749:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7489509183439905751:2259], cookie# 4 2025-04-04T17:20:44.253551Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509183439905749:2259][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:20:44.253575Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509183439905749:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7489509183439905752:2259], cookie# 4 2025-04-04T17:20:44.253585Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509183439905749:2259][/dc-1] Unexpected sync response: sender# [1:7489509183439905752:2259], cookie# 4 2025-04-04T17:20:44.253620Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489509183439905493:2115], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-04T17:20:44.253706Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489509183439905493:2115], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7489509183439905749:2259] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743787244241 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:20:44.253777Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489509183439905493:2115], cacheItem# { Subscriber: { Subscriber: [1:7489509183439905749:2259] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743787244241 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-04-04T17:20:44.253913Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489509187734873138:2322], recipient# [1:7489509187734873137:2321], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:20:44.253940Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509187734873137:2321] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:20:44.253997Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509187734873137:2321] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743787244241 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version... (TRUNCATED) 2025-04-04T17:20:44.254620Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509187734873137:2321] Handle TEvDescribeSchemeResult Forward to# [1:7489509187734873136:2320] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743787244241 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 >> TxUsage::WriteToTopic_Demo_29 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: 2025-04-04T17:20:43.206573Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509181776324188:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:43.206618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00165c/r3tmp/tmppyyzuW/pdisk_1.dat 2025-04-04T17:20:43.784063Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:43.826573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:43.826755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:43.836648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64896 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:20:44.036761Z node 1 :TX_PROXY DEBUG: actor# [1:7489509181776324407:2091] Handle TEvNavigate describe path dc-1 2025-04-04T17:20:44.036815Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509186071292230:2437] HANDLE EvNavigateScheme dc-1 2025-04-04T17:20:44.036953Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489509181776324449:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:44.036994Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489509181776324449:2113], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T17:20:44.037242Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509186071292231:2438][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:20:44.039285Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509181776324137:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509186071292235:2438] 2025-04-04T17:20:44.039366Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509181776324137:2049] Subscribe: subscriber# [1:7489509186071292235:2438], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:44.039453Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509181776324143:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509186071292237:2438] 2025-04-04T17:20:44.039474Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509181776324143:2055] Subscribe: subscriber# [1:7489509186071292237:2438], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:44.039536Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509186071292235:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509181776324137:2049] 2025-04-04T17:20:44.039560Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509186071292237:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509181776324143:2055] 2025-04-04T17:20:44.039623Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509186071292231:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509186071292232:2438] 2025-04-04T17:20:44.039658Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509186071292231:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509186071292234:2438] 2025-04-04T17:20:44.039708Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489509186071292231:2438][/dc-1] Set up state: owner# [1:7489509181776324449:2113], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:44.039847Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509186071292235:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509186071292232:2438], cookie# 1 2025-04-04T17:20:44.039862Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509186071292236:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509186071292233:2438], cookie# 1 2025-04-04T17:20:44.039878Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509186071292237:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509186071292234:2438], cookie# 1 2025-04-04T17:20:44.039925Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509181776324137:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509186071292235:2438] 2025-04-04T17:20:44.039971Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509181776324137:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509186071292235:2438], cookie# 1 2025-04-04T17:20:44.039997Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509181776324143:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509186071292237:2438] 2025-04-04T17:20:44.040010Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509181776324143:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509186071292237:2438], cookie# 1 2025-04-04T17:20:44.042306Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509181776324140:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509186071292236:2438] 2025-04-04T17:20:44.042361Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509181776324140:2052] Subscribe: subscriber# [1:7489509186071292236:2438], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:44.042444Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509181776324140:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509186071292236:2438], cookie# 1 2025-04-04T17:20:44.042492Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509186071292235:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509181776324137:2049], cookie# 1 2025-04-04T17:20:44.042510Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509186071292237:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509181776324143:2055], cookie# 1 2025-04-04T17:20:44.042553Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509186071292236:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509181776324140:2052] 2025-04-04T17:20:44.042574Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509186071292236:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509181776324140:2052], cookie# 1 2025-04-04T17:20:44.042610Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509186071292231:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509186071292232:2438], cookie# 1 2025-04-04T17:20:44.042630Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509186071292231:2438][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:20:44.042645Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509186071292231:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509186071292234:2438], cookie# 1 2025-04-04T17:20:44.042664Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509186071292231:2438][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:20:44.042701Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509186071292231:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509186071292233:2438] 2025-04-04T17:20:44.042754Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489509186071292231:2438][/dc-1] Path was already updated: owner# [1:7489509181776324449:2113], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:44.042772Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509186071292231:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509186071292233:2438], cookie# 1 2025-04-04T17:20:44.042801Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509186071292231:2438][/dc-1] Unexpected sync response: sender# [1:7489509186071292233:2438], cookie# 1 2025-04-04T17:20:44.042839Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509181776324140:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509186071292236:2438] 2025-04-04T17:20:44.086494Z node 1 :TX_PROXY DEBUG: actor# [1:7489509181776324407:2091] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:20:44.094080Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:20:44.098159Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489509181776324449:2113], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T17:20:44.098726Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489509181776324449:2113], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathType ... HEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489509181776324449:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7489509186071292438:2593] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1743787244360 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:20:44.896072Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489509181776324449:2113], cacheItem# { Subscriber: { Subscriber: [1:7489509186071292438:2593] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1743787244360 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2025-04-04T17:20:44.896238Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489509186071292563:2667], recipient# [1:7489509186071292562:2666], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:20:44.896265Z node 1 :TX_PROXY INFO: Actor# [1:7489509186071292562:2666] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2025-04-04T17:20:44.901993Z node 1 :TX_PROXY DEBUG: actor# [1:7489509181776324407:2091] Handle TEvNavigate describe path /dc-1 2025-04-04T17:20:44.902029Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509186071292565:2669] HANDLE EvNavigateScheme /dc-1 2025-04-04T17:20:44.902126Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489509181776324449:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:44.902200Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509186071292231:2438][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7489509181776324449:2113], cookie# 4 2025-04-04T17:20:44.902265Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509186071292235:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509186071292232:2438], cookie# 4 2025-04-04T17:20:44.902287Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509186071292236:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509186071292233:2438], cookie# 4 2025-04-04T17:20:44.902302Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509186071292237:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509186071292234:2438], cookie# 4 2025-04-04T17:20:44.902326Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509181776324137:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509186071292235:2438], cookie# 4 2025-04-04T17:20:44.902350Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509181776324140:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509186071292236:2438], cookie# 4 2025-04-04T17:20:44.902373Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509181776324143:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509186071292237:2438], cookie# 4 2025-04-04T17:20:44.902402Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509186071292235:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7489509181776324137:2049], cookie# 4 2025-04-04T17:20:44.902431Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509186071292236:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7489509181776324140:2052], cookie# 4 2025-04-04T17:20:44.902447Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509186071292237:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7489509181776324143:2055], cookie# 4 2025-04-04T17:20:44.902473Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509186071292231:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7489509186071292232:2438], cookie# 4 2025-04-04T17:20:44.902489Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509186071292231:2438][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:20:44.902503Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509186071292231:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7489509186071292233:2438], cookie# 4 2025-04-04T17:20:44.902519Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509186071292231:2438][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:20:44.902537Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509186071292231:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7489509186071292234:2438], cookie# 4 2025-04-04T17:20:44.902546Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509186071292231:2438][/dc-1] Unexpected sync response: sender# [1:7489509186071292234:2438], cookie# 4 2025-04-04T17:20:44.902584Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489509181776324449:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-04T17:20:44.902645Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489509181776324449:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7489509186071292231:2438] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743787244325 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:20:44.902701Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489509181776324449:2113], cacheItem# { Subscriber: { Subscriber: [1:7489509186071292231:2438] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743787244325 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-04-04T17:20:44.902851Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489509186071292566:2670], recipient# [1:7489509186071292565:2669], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:20:44.902877Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509186071292565:2669] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:20:44.902998Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509186071292565:2669] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-04-04T17:20:44.903693Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509186071292565:2669] Handle TEvDescribeSchemeResult Forward to# [1:7489509186071292564:2668] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 66 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743787244325 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743787244325 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743787244360 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardI... (TRUNCATED) >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::SequentialReadsPragma-Enabled [GOOD] Test command err: Trying to start YDB, gRPC: 3008, MsgBus: 23851 2025-04-04T17:20:06.545067Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509023350309853:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:06.546097Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014f8/r3tmp/tmpOXBwJ9/pdisk_1.dat 2025-04-04T17:20:06.935672Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3008, node 1 2025-04-04T17:20:06.958946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:06.959084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:06.963363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:07.099448Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:07.099482Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:07.099493Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:07.099626Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23851 TClient is connected to server localhost:23851 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:07.804674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:09.492130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509036235212401:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:09.492228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:09.828907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:20:09.971199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509036235212506:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:09.971272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:09.971676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509036235212511:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:09.979476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:20:09.989420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509036235212513:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:20:10.044691Z node 1 :TX_PROXY ERROR: Actor# [1:7489509040530179860:2396] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 21103, MsgBus: 3199 2025-04-04T17:20:11.256241Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509043800149762:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:11.256335Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014f8/r3tmp/tmplVwSho/pdisk_1.dat 2025-04-04T17:20:11.406771Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:11.421228Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:11.421315Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:11.423147Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21103, node 2 2025-04-04T17:20:11.501242Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:11.501266Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:11.501274Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:11.501403Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3199 TClient is connected to server localhost:3199 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:11.978649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:11.984097Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:20:11.997312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:12.081928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:12.223449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:12.283739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:14.578426Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509056685053426:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:14.578507Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:14.624520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:14.660968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:14.728283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:14.769546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:14.808534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:14.843925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:14.904742Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509056685053940:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:14.904836Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: { : Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:35.487233Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:35.531842Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:35.574991Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:35.644481Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:35.725504Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:35.759060Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:35.832150Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:35.899694Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489509146815044061:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:35.899837Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:35.900221Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489509146815044066:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:35.904442Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:35.913968Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489509146815044068:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:20:35.997205Z node 5 :TX_PROXY ERROR: Actor# [5:7489509146815044124:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:36.345766Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489509129635172601:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:36.345856Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25637, MsgBus: 10078 2025-04-04T17:20:39.593653Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489509166611336797:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:39.593726Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014f8/r3tmp/tmpbZ13qB/pdisk_1.dat 2025-04-04T17:20:39.710199Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:39.769881Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:39.769993Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:39.771680Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25637, node 6 2025-04-04T17:20:39.835481Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:39.835500Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:39.835508Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:39.835627Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10078 TClient is connected to server localhost:10078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:40.487864Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:40.495618Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:20:40.509214Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:40.611578Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:40.809554Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:40.898107Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:43.823536Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509183791207741:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:43.823665Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:43.874143Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:43.960735Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:44.005652Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:44.044170Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:44.087089Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:44.174936Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:44.273110Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509188086175559:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:44.273259Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:44.274111Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509188086175564:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:44.279119Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:44.295143Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489509188086175566:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:20:44.353124Z node 6 :TX_PROXY ERROR: Actor# [6:7489509188086175621:3455] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:44.594144Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489509166611336797:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:44.594228Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TxUsage::WriteToTopic_Demo_30 >> TExtSubDomainTest::GenericCases [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true >> KqpNewEngine::PkRangeSelect4 [GOOD] >> KqpNewEngine::PruneEffectPartitions+UseSink >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And_In >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false >> KqpRanges::CastKeyBounds [GOOD] >> TExtSubDomainTest::DeclareAndLs >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::GenericCases [GOOD] Test command err: 2025-04-04T17:20:43.349202Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509183431932349:2279];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:43.349270Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001668/r3tmp/tmpqzM6PR/pdisk_1.dat 2025-04-04T17:20:43.900255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:43.900401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:43.906147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:43.909339Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:30047 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:20:44.103683Z node 1 :TX_PROXY DEBUG: actor# [1:7489509183431932359:2115] Handle TEvNavigate describe path dc-1 2025-04-04T17:20:44.103723Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509187726900130:2437] HANDLE EvNavigateScheme dc-1 2025-04-04T17:20:44.103833Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489509183431932389:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:44.103888Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489509183431932389:2129], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T17:20:44.104070Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509187726900131:2438][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:20:44.105905Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183431932037:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509187726900135:2438] 2025-04-04T17:20:44.105969Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509183431932037:2050] Subscribe: subscriber# [1:7489509187726900135:2438], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:44.106023Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183431932040:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509187726900136:2438] 2025-04-04T17:20:44.106038Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509183431932040:2053] Subscribe: subscriber# [1:7489509187726900136:2438], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:44.106056Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183431932043:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509187726900137:2438] 2025-04-04T17:20:44.106080Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509183431932043:2056] Subscribe: subscriber# [1:7489509187726900137:2438], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:44.106121Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509187726900135:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509183431932037:2050] 2025-04-04T17:20:44.106151Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509187726900136:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509183431932040:2053] 2025-04-04T17:20:44.106167Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509187726900137:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509183431932043:2056] 2025-04-04T17:20:44.106271Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509187726900131:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509187726900132:2438] 2025-04-04T17:20:44.106299Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509187726900131:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509187726900133:2438] 2025-04-04T17:20:44.106353Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489509187726900131:2438][/dc-1] Set up state: owner# [1:7489509183431932389:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:44.106446Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509187726900131:2438][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509187726900134:2438] 2025-04-04T17:20:44.106492Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489509187726900131:2438][/dc-1] Path was already updated: owner# [1:7489509183431932389:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:44.106535Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509187726900135:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509187726900132:2438], cookie# 1 2025-04-04T17:20:44.106548Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509187726900136:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509187726900133:2438], cookie# 1 2025-04-04T17:20:44.106557Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509187726900137:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509187726900134:2438], cookie# 1 2025-04-04T17:20:44.106586Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183431932037:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509187726900135:2438] 2025-04-04T17:20:44.106609Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183431932037:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509187726900135:2438], cookie# 1 2025-04-04T17:20:44.106625Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183431932040:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509187726900136:2438] 2025-04-04T17:20:44.106637Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183431932040:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509187726900136:2438], cookie# 1 2025-04-04T17:20:44.106650Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183431932043:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509187726900137:2438] 2025-04-04T17:20:44.106660Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183431932043:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509187726900137:2438], cookie# 1 2025-04-04T17:20:44.110374Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509187726900135:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509183431932037:2050], cookie# 1 2025-04-04T17:20:44.110401Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509187726900136:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509183431932040:2053], cookie# 1 2025-04-04T17:20:44.110415Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509187726900137:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509183431932043:2056], cookie# 1 2025-04-04T17:20:44.110452Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509187726900131:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509187726900132:2438], cookie# 1 2025-04-04T17:20:44.110478Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509187726900131:2438][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:20:44.110526Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509187726900131:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509187726900133:2438], cookie# 1 2025-04-04T17:20:44.110553Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509187726900131:2438][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:20:44.110575Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509187726900131:2438][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509187726900134:2438], cookie# 1 2025-04-04T17:20:44.110606Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509187726900131:2438][/dc-1] Unexpected sync response: sender# [1:7489509187726900134:2438], cookie# 1 2025-04-04T17:20:44.183542Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489509183431932389:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T17:20:44.183939Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489509183431932389:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... inInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:46.700204Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509196316835535:3023][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:20:46.700706Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183431932037:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [1:7489509196316835546:3023] 2025-04-04T17:20:46.700727Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509183431932037:2050] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-04-04T17:20:46.700805Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509183431932037:2050] Subscribe: subscriber# [1:7489509196316835546:3023], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:46.700840Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183431932040:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [1:7489509196316835547:3023] 2025-04-04T17:20:46.700847Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509183431932040:2053] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-04-04T17:20:46.700864Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509183431932040:2053] Subscribe: subscriber# [1:7489509196316835547:3023], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:46.700883Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183431932043:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [1:7489509196316835548:3023] 2025-04-04T17:20:46.700890Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509183431932043:2056] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-04-04T17:20:46.700907Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509183431932043:2056] Subscribe: subscriber# [1:7489509196316835548:3023], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:46.700935Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509196316835546:3023][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7489509183431932037:2050] 2025-04-04T17:20:46.700964Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509196316835547:3023][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7489509183431932040:2053] 2025-04-04T17:20:46.700999Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509196316835548:3023][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7489509183431932043:2056] 2025-04-04T17:20:46.701030Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509196316835535:3023][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7489509196316835543:3023] 2025-04-04T17:20:46.701068Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509196316835535:3023][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7489509196316835544:3023] 2025-04-04T17:20:46.701092Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489509196316835535:3023][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7489509183431932389:2129], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:46.701109Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509196316835535:3023][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7489509196316835545:3023] 2025-04-04T17:20:46.701128Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489509196316835535:3023][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [1:7489509183431932389:2129], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:46.701149Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183431932037:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7489509196316835546:3023] 2025-04-04T17:20:46.701162Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183431932040:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7489509196316835547:3023] 2025-04-04T17:20:46.701174Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183431932043:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7489509196316835548:3023] 2025-04-04T17:20:46.701202Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489509183431932389:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-04T17:20:46.701291Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489509183431932389:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7489509196316835535:3023] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:20:46.701384Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489509183431932389:2129], cacheItem# { Subscriber: { Subscriber: [1:7489509196316835535:3023] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:20:46.701480Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489509196316835549:3025], recipient# [1:7489509196316835526:2338], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:47.352844Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489509183431932389:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:47.352968Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489509183431932389:2129], cacheItem# { Subscriber: { Subscriber: [1:7489509187726900163:2459] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:20:47.353041Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489509200611802853:3029], recipient# [1:7489509200611802852:2339], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:47.705710Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489509183431932389:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:47.705913Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489509183431932389:2129], cacheItem# { Subscriber: { Subscriber: [1:7489509196316835535:3023] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:20:47.706014Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489509200611802867:3030], recipient# [1:7489509200611802866:2340], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TxUsage::ReadRuleGeneration [GOOD] >> KqpService::PatternCache [GOOD] >> KqpService::RangeCache+UseCache >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::CastKeyBounds [GOOD] Test command err: Trying to start YDB, gRPC: 9021, MsgBus: 24809 2025-04-04T17:20:15.424484Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509060307722025:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:15.424985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014e3/r3tmp/tmpaRWlPY/pdisk_1.dat 2025-04-04T17:20:15.837896Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9021, node 1 2025-04-04T17:20:15.843194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:15.843389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:15.845365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:15.942849Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:15.942874Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:15.942880Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:15.942991Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24809 TClient is connected to server localhost:24809 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:16.534450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:18.501681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509073192624561:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:18.501794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:18.764284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:20:18.869611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509073192624663:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:18.869725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:18.869880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509073192624668:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:18.873561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:20:18.883525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509073192624670:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:20:18.970365Z node 1 :TX_PROXY ERROR: Actor# [1:7489509073192624721:2396] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:19.085943Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489509077487592056:2356], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing key column in input: Key for table: /Root/TestUpsertNotNullPk, code: 2029 2025-04-04T17:20:19.086218Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2NjNDBhOWItZmFkOGRjNDAtNWE0MGY1ZDQtYWJjNzMyYTA=, ActorId: [1:7489509073192624543:2329], ActorState: ExecuteState, TraceId: 01jr0x2p3s6v3709yjs6p25qe8, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2025-04-04T17:20:19.103985Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489509077487592065:2360], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:47: Error: Failed to convert 'Key': Null to Uint64
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-04T17:20:19.104249Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2NjNDBhOWItZmFkOGRjNDAtNWE0MGY1ZDQtYWJjNzMyYTA=, ActorId: [1:7489509073192624543:2329], ActorState: ExecuteState, TraceId: 01jr0x2p4nar399w1sx6wgskgt, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 63168, MsgBus: 13919 2025-04-04T17:20:19.800277Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509077565096579:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:19.809968Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014e3/r3tmp/tmpCSdNeZ/pdisk_1.dat 2025-04-04T17:20:19.968729Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:19.995417Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:19.995530Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:19.997228Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63168, node 2 2025-04-04T17:20:20.069802Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:20.069831Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:20.069840Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:20.069988Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13919 TClient is connected to server localhost:13919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:20:20.523445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:20:20.532273Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:20:23.023489Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509094744966414:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:23.023574Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:23.037703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:20:23.133430Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509094744966514:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:23.133574Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:23.134557Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509094744966519:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:23.138971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCr ... e 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489509165154660001:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:39.967631Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:39.971351Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489509165154660006:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:39.975398Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:39.986691Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489509165154660008:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:20:40.072503Z node 5 :TX_PROXY ERROR: Actor# [5:7489509169449627359:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:40.883110Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489509147974788717:2219];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:40.883192Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Join2"],"PlanNodeId":1,"Operators":[{"Inputs":[],"Path":"\/Root\/Join2","Name":"Delete","Table":"Join2","SinkType":"KqpTableSink"}],"Node Type":"Stage-Sink","Stats":{"ComputeNodes":[{"Tasks":[{"EgressRows":3,"NodeId":5,"FinishTimeMs":1743787241353,"EgressBytes":39,"TaskId":1,"Host":"ghrun-mup5cka3jq","ComputeTimeUs":352}],"PeakMemoryUsageBytes":196608,"CpuTimeUs":3739}],"UseLlvm":"undefined","MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[1,1048576]},"Tasks":1,"Egress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":39,"Max":39,"Min":39,"History":[1,39]}},"Name":"KqpTableSink","Egress":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Splits":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":39,"Max":39,"Min":39}},"Push":{"Chunks":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":39,"Max":39,"Min":39,"History":[1,39]}}}],"PhysicalStageId":0,"StageDurationUs":0,"EgressRows":{"Count":1,"Sum":3,"Max":3,"Min":3},"BaseTimeMs":1743787241353,"EgressBytes":{"Count":1,"Sum":39,"Max":39,"Min":39},"CpuTimeUs":{"Count":1,"Sum":3284,"Max":3284,"Min":3284,"History":[0,3284]}}}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":331982,"CpuTimeUs":328318},"ProcessCpuTimeUs":432,"TotalDurationUs":345952,"ResourcePoolId":"default","QueuedTimeUs":544},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"A-SelfCpu":3.284,"A-Cpu":3.284,"Path":"\/Root\/Join2","Name":"Delete","Table":"Join2","SinkType":"KqpTableSink"}],"Node Type":"Delete"}],"Node Type":"Sink"}],"Node Type":"Query","PlanNodeType":"Query"}} Trying to start YDB, gRPC: 16146, MsgBus: 11416 2025-04-04T17:20:42.290633Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489509177512105874:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:42.290756Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014e3/r3tmp/tmpIfc2Aq/pdisk_1.dat 2025-04-04T17:20:42.390900Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:42.415964Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:42.416066Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:42.417702Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16146, node 6 2025-04-04T17:20:42.464671Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:42.464696Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:42.464707Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:42.464850Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11416 TClient is connected to server localhost:11416 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:43.044369Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:43.073806Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:43.162016Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:43.355739Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:43.447767Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:46.453331Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509194691976818:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:46.453444Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:46.505575Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:46.546859Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:46.587789Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:46.672992Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:46.714745Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:46.761799Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:46.852583Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509194691977334:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:46.852718Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:46.854258Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509194691977339:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:46.859409Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:46.870123Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489509194691977341:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:20:46.927752Z node 6 :TX_PROXY ERROR: Actor# [6:7489509194691977397:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:47.292294Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489509177512105874:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:47.292359Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true |92.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetUnknownObject >> KqpMergeCn::TopSortBy_Float_Limit4 [GOOD] >> KqpMergeCn::TopSortBy_String_Limit3 >> TExtSubDomainTest::DeclareAndLs [GOOD] >> TS3WrapperTests::AbortUnknownUpload >> TS3WrapperTests::GetUnknownObject [GOOD] >> TS3WrapperTests::AbortUnknownUpload [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/unittest >> TxUsage::ReadRuleGeneration [GOOD] Test command err: 2025-04-04T17:18:58.073265Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508732275758195:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:58.073321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ca3/r3tmp/tmpvFuRLs/pdisk_1.dat 2025-04-04T17:18:58.236708Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:18:58.435409Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:58.441425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:58.441544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:58.443473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23535, node 1 2025-04-04T17:18:58.584041Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000ca3/r3tmp/yandexggcmsE.tmp 2025-04-04T17:18:58.584069Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000ca3/r3tmp/yandexggcmsE.tmp 2025-04-04T17:18:58.584264Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000ca3/r3tmp/yandexggcmsE.tmp 2025-04-04T17:18:58.584395Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:18:58.799130Z INFO: TTestServer started on Port 2860 GrpcPort 23535 TClient is connected to server localhost:2860 PQClient connected to localhost:23535 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:59.041333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:59.078717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:19:00.602490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508740865693481:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.602503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508740865693461:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.602648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.606332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:19:00.611263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508740865693517:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.611330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.616463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508740865693485:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:19:00.696892Z node 1 :TX_PROXY ERROR: Actor# [1:7489508740865693541:2447] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:00.978360Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508740865693557:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:19:00.984937Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjRlZTUyYzAtZjNmZGYwZC1mNzc2M2Q2MC02MTM0ODBiMQ==, ActorId: [1:7489508740865693453:2336], ActorState: ExecuteState, TraceId: 01jr0x09f7aysqgmd1b7d0d103, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:19:00.987573Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:19:01.023698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:01.054447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:01.164361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489508745160661134:2630] 2025-04-04T17:19:03.074686Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508732275758195:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:03.074779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:19:07.400165Z :WithoutPartitionWithRestart INFO: TTopicSdkTestSetup started 2025-04-04T17:19:07.413706Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-04T17:19:07.427592Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7489508770930465239:2819] connected; active server actors: 1 2025-04-04T17:19:07.428050Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-04T17:19:07.428651Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-04T17:19:07.428782Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-04-04T17:19:07.429246Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-04T17:19:07.434005Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:19:07.434256Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-04T17:19:07.434702Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:19:07.434860Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-04T17:19:07.434885Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:19:07.434899Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-04T17:19:07.434926Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:19:07.434965Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:19:07.435008Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-04T17:19:07.440374Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-04-04T17:19:07.440410Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7489508770930465253:2466], now have 1 active actors on pipe 2025-04-04T17:19:07.471981Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7489508770930465238:2818], now have 1 active actors on pipe 2025-04-04T17:19:07.499271Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7489508732275758468 RawX2: 4294969479 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test ... 17903640882709759851_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2025-04-04T17:20:48.400108Z node 9 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_9_2_17903640882709759851_v1 after read state TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid f7084a3c-18ae3c61-43074e77-a939d447 has messages 1 2025-04-04T17:20:48.400173Z node 9 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_9_2_17903640882709759851_v1 read done: guid# f7084a3c-18ae3c61-43074e77-a939d447, partition# TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1), size# 201 2025-04-04T17:20:48.400202Z node 9 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_9_2_17903640882709759851_v1 response to read: guid# f7084a3c-18ae3c61-43074e77-a939d447 2025-04-04T17:20:48.400338Z node 9 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_9_2_17903640882709759851_v1 Process answer. Aval parts: 0 2025-04-04T17:20:48.400553Z :DEBUG: [/Root] [/Root] [5a1247cc-58ce0fce-30253ada-de8573f2] [] Got ReadResponse, serverBytesSize = 201, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428599 2025-04-04T17:20:48.400702Z :DEBUG: [/Root] [/Root] [5a1247cc-58ce0fce-30253ada-de8573f2] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428599 2025-04-04T17:20:48.401007Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (3-3) 2025-04-04T17:20:48.401064Z :DEBUG: [/Root] [/Root] [5a1247cc-58ce0fce-30253ada-de8573f2] [] Returning serverBytesSize = 201 to budget 2025-04-04T17:20:48.401102Z :DEBUG: [/Root] [/Root] [5a1247cc-58ce0fce-30253ada-de8573f2] [] In ContinueReadingDataImpl, ReadSizeBudget = 201, ReadSizeServerDelta = 52428599 2025-04-04T17:20:48.401350Z :DEBUG: [/Root] [/Root] [5a1247cc-58ce0fce-30253ada-de8573f2] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-04T17:20:48.401467Z node 9 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_9_2_17903640882709759851_v1 grpc read done: success# 1, data# { read_request { bytes_size: 201 } } 2025-04-04T17:20:48.401536Z node 9 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_9_2_17903640882709759851_v1 got read request: guid# 29fd2947-47165e94-a3a1b0d-ef4352d 2025-04-04T17:20:48.401550Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (3-3) 2025-04-04T17:20:48.401653Z :DEBUG: [/Root] [/Root] [5a1247cc-58ce0fce-30253ada-de8573f2] [] The application data is transferred to the client. Number of messages 1, size 9 bytes 2025-04-04T17:20:48.401715Z :DEBUG: [/Root] [/Root] [5a1247cc-58ce0fce-30253ada-de8573f2] [] Returning serverBytesSize = 0 to budget 0 1 2025-04-04T17:20:48.401865Z :DEBUG: [/Root] [/Root] [5a1247cc-58ce0fce-30253ada-de8573f2] [] Commit offsets [3, 4). Partition stream id: 1 2025-04-04T17:20:48.402293Z node 9 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_9_2_17903640882709759851_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { start: 3 end: 4 } } } } 2025-04-04T17:20:48.402497Z node 9 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_9_2_17903640882709759851_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) committing to position 4 prev 3 end 4 by cookie 2 2025-04-04T17:20:48.402577Z node 9 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2025-04-04T17:20:48.402618Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2025-04-04T17:20:48.402712Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user consumer-1 offset is set to 4 (startOffset 0) session consumer-1_9_2_17903640882709759851_v1 2025-04-04T17:20:48.402836Z node 9 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:20:48.403813Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user consumer-1 readTimeStamp for offset 4 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 2 2025-04-04T17:20:48.403877Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:20:48.403903Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 2 2025-04-04T17:20:48.404001Z node 9 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_9_2_17903640882709759851_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 2 } 2025-04-04T17:20:48.404039Z node 9 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_9_2_17903640882709759851_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) commit done to position 4 endOffset 4 with cookie 2 2025-04-04T17:20:48.404062Z node 9 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_9_2_17903640882709759851_v1 replying for commits: assignId# 1, from# 2, to# 2, offset# 4 2025-04-04T17:20:48.404517Z :DEBUG: [/Root] [/Root] [5a1247cc-58ce0fce-30253ada-de8573f2] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 4 } } 2025-04-04T17:20:49.271727Z node 9 :PQ_WRITE_PROXY INFO: init check schema 2025-04-04T17:20:49.272725Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|79d688c4-f1438d7b-64924b87-affec493_0 describe result for acl check 2025-04-04T17:20:49.383547Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:3:4 2025-04-04T17:20:49.383616Z :INFO: [/Root] [/Root] [5a1247cc-58ce0fce-30253ada-de8573f2] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1000 BytesRead: 9 MessagesRead: 1 BytesReadCompressed: 9 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:20:49.394610Z node 9 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_9_2_17903640882709759851_v1 checking auth because of timeout 2025-04-04T17:20:49.394698Z node 9 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_9_2_17903640882709759851_v1 auth for : consumer-1 2025-04-04T17:20:49.395305Z node 9 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_9_2_17903640882709759851_v1 Handle describe topics response 2025-04-04T17:20:49.395396Z node 9 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_9_2_17903640882709759851_v1 auth is DEAD 2025-04-04T17:20:49.395492Z node 9 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer-1 session consumer-1_9_2_17903640882709759851_v1 auth ok: topics# 1, initDone# 1 2025-04-04T17:20:50.382929Z :INFO: [/Root] [/Root] [5a1247cc-58ce0fce-30253ada-de8573f2] Closing read session. Close timeout: 0.000000s 2025-04-04T17:20:50.383018Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:3:4 2025-04-04T17:20:50.383075Z :INFO: [/Root] [/Root] [5a1247cc-58ce0fce-30253ada-de8573f2] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2000 BytesRead: 9 MessagesRead: 1 BytesReadCompressed: 9 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:20:50.383226Z :NOTICE: [/Root] [/Root] [5a1247cc-58ce0fce-30253ada-de8573f2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-04T17:20:50.383287Z :DEBUG: [/Root] [/Root] [5a1247cc-58ce0fce-30253ada-de8573f2] [] Abort session to cluster 2025-04-04T17:20:50.383971Z :NOTICE: [/Root] [/Root] [5a1247cc-58ce0fce-30253ada-de8573f2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:20:50.385016Z node 9 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_9_2_17903640882709759851_v1 grpc read done: success# 0, data# { } 2025-04-04T17:20:50.385133Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|79d688c4-f1438d7b-64924b87-affec493_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-04-04T17:20:50.385054Z node 9 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer-1 session consumer-1_9_2_17903640882709759851_v1 grpc read failed 2025-04-04T17:20:50.385082Z node 9 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer-1 session consumer-1_9_2_17903640882709759851_v1 grpc closed 2025-04-04T17:20:50.385198Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|79d688c4-f1438d7b-64924b87-affec493_0] PartitionId [0] Generation [1] Write session will now close 2025-04-04T17:20:50.385265Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|79d688c4-f1438d7b-64924b87-affec493_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-04T17:20:50.385745Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|79d688c4-f1438d7b-64924b87-affec493_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-04T17:20:50.385796Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|79d688c4-f1438d7b-64924b87-affec493_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-04T17:20:50.385118Z node 9 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer-1 session consumer-1_9_2_17903640882709759851_v1 is DEAD 2025-04-04T17:20:50.386318Z node 9 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [9:7489509201951019871:2532] disconnected; active server actors: 1 2025-04-04T17:20:50.386343Z node 9 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [9:7489509201951019871:2532] client consumer-1 disconnected session consumer-1_9_2_17903640882709759851_v1 2025-04-04T17:20:50.386481Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session consumer-1_9_2_17903640882709759851_v1 2025-04-04T17:20:50.386519Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [9:7489509201951019874:2535] destroyed 2025-04-04T17:20:50.386609Z node 9 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: consumer-1_9_2_17903640882709759851_v1 2025-04-04T17:20:50.386881Z node 9 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message_group_id|79d688c4-f1438d7b-64924b87-affec493_0 grpc read done: success: 0 data: 2025-04-04T17:20:50.386907Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|79d688c4-f1438d7b-64924b87-affec493_0 grpc read failed 2025-04-04T17:20:50.386950Z node 9 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 4 sessionId: test-message_group_id|79d688c4-f1438d7b-64924b87-affec493_0 2025-04-04T17:20:50.386971Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|79d688c4-f1438d7b-64924b87-affec493_0 is DEAD 2025-04-04T17:20:50.387439Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:20:50.387752Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [9:7489509193361085094:2493] destroyed 2025-04-04T17:20:50.387808Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-04T17:20:50.474960Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:20:50.474995Z node 9 :IMPORT WARN: Table profiles were not loaded ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetUnknownObject [GOOD] Test command err: 2025-04-04T17:20:52.338839Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 39C5CE30-08F2-4E3C-A1F6-0B830FBF1FB0, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:29435 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7D6E8AD2-6310-4672-A178-D5668F5EED73 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2025-04-04T17:20:52.366752Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 39C5CE30-08F2-4E3C-A1F6-0B830FBF1FB0, response# No response body. ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortUnknownUpload [GOOD] Test command err: 2025-04-04T17:20:52.468222Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 455AAC71-DB7B-47A9-93C7-576067241A4C, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: uploadId } REQUEST: DELETE /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:18367 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 95A46CAC-BA38-4517-9C24-9D29CE14560F amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=uploadId 2025-04-04T17:20:52.474169Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 455AAC71-DB7B-47A9-93C7-576067241A4C, response# >> KqpSqlIn::KeySuffix_OnlyTail [GOOD] >> KqpSqlIn::KeySuffix_NotPointPrefix >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndLs [GOOD] Test command err: 2025-04-04T17:20:49.430326Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509209784171717:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:49.435557Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00164e/r3tmp/tmpxJ7AQ8/pdisk_1.dat 2025-04-04T17:20:49.732612Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:49.736662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:49.736781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:49.740074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14583 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:20:49.922984Z node 1 :TX_PROXY DEBUG: actor# [1:7489509209784171929:2087] Handle TEvNavigate describe path dc-1 2025-04-04T17:20:49.923035Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509209784172245:2257] HANDLE EvNavigateScheme dc-1 2025-04-04T17:20:49.923341Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489509209784171975:2111], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:49.923433Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489509209784171975:2111], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T17:20:49.923643Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509209784172246:2258][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:20:49.931443Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509209784171667:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509209784172250:2258] 2025-04-04T17:20:49.931520Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509209784171667:2049] Subscribe: subscriber# [1:7489509209784172250:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:49.931581Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509209784171670:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509209784172251:2258] 2025-04-04T17:20:49.931605Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509209784171670:2052] Subscribe: subscriber# [1:7489509209784172251:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:49.931652Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509209784172250:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509209784171667:2049] 2025-04-04T17:20:49.931675Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509209784172251:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509209784171670:2052] 2025-04-04T17:20:49.931706Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509209784172246:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509209784172247:2258] 2025-04-04T17:20:49.931728Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509209784172246:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509209784172248:2258] 2025-04-04T17:20:49.931781Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489509209784172246:2258][/dc-1] Set up state: owner# [1:7489509209784171975:2111], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:49.931899Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509209784172250:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509209784172247:2258], cookie# 1 2025-04-04T17:20:49.931915Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509209784172251:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509209784172248:2258], cookie# 1 2025-04-04T17:20:49.931926Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509209784172252:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509209784172249:2258], cookie# 1 2025-04-04T17:20:49.931946Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509209784171667:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509209784172250:2258] 2025-04-04T17:20:49.931996Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509209784171667:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509209784172250:2258], cookie# 1 2025-04-04T17:20:49.932031Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509209784171670:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509209784172251:2258] 2025-04-04T17:20:49.932056Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509209784171670:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509209784172251:2258], cookie# 1 2025-04-04T17:20:49.932289Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509209784171673:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509209784172252:2258] 2025-04-04T17:20:49.932355Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509209784171673:2055] Subscribe: subscriber# [1:7489509209784172252:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:49.932400Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509209784171673:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509209784172252:2258], cookie# 1 2025-04-04T17:20:49.932432Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509209784172250:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509209784171667:2049], cookie# 1 2025-04-04T17:20:49.932452Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509209784172251:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509209784171670:2052], cookie# 1 2025-04-04T17:20:49.932492Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509209784172252:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509209784171673:2055] 2025-04-04T17:20:49.932523Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509209784172252:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509209784171673:2055], cookie# 1 2025-04-04T17:20:49.933372Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509209784172246:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509209784172247:2258], cookie# 1 2025-04-04T17:20:49.933410Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509209784172246:2258][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:20:49.933437Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509209784172246:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509209784172248:2258], cookie# 1 2025-04-04T17:20:49.933464Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509209784172246:2258][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:20:49.933512Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509209784172246:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509209784172249:2258] 2025-04-04T17:20:49.933575Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489509209784172246:2258][/dc-1] Path was already updated: owner# [1:7489509209784171975:2111], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:49.933598Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509209784172246:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509209784172249:2258], cookie# 1 2025-04-04T17:20:49.933623Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509209784172246:2258][/dc-1] Unexpected sync response: sender# [1:7489509209784172249:2258], cookie# 1 2025-04-04T17:20:49.933654Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509209784171673:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509209784172252:2258] 2025-04-04T17:20:50.001405Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489509209784171975:2111], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T17:20:50.001753Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489509209784171975:2111], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... SyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2025-04-04T17:20:50.164404Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489509209784171975:2111], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7489509214079139597:2299] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1743787250205 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:20:50.164504Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489509209784171975:2111], cacheItem# { Subscriber: { Subscriber: [1:7489509214079139597:2299] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1743787250205 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-04-04T17:20:50.164727Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489509214079139604:2300], recipient# [1:7489509214079139596:2298], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:20:50.164803Z node 1 :TX_PROXY INFO: Actor# [1:7489509214079139596:2298] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2025-04-04T17:20:50.167775Z node 1 :TX_PROXY DEBUG: actor# [1:7489509209784171929:2087] Handle TEvNavigate describe path /dc-1 2025-04-04T17:20:50.167828Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509214079139606:2302] HANDLE EvNavigateScheme /dc-1 2025-04-04T17:20:50.167929Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489509209784171975:2111], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:50.168057Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509209784172246:2258][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7489509209784171975:2111], cookie# 4 2025-04-04T17:20:50.168150Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509209784172250:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509209784172247:2258], cookie# 4 2025-04-04T17:20:50.168190Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509209784172251:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509209784172248:2258], cookie# 4 2025-04-04T17:20:50.168212Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509209784172252:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509209784172249:2258], cookie# 4 2025-04-04T17:20:50.168238Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509209784171670:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509209784172251:2258], cookie# 4 2025-04-04T17:20:50.168244Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509209784171667:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509209784172250:2258], cookie# 4 2025-04-04T17:20:50.168262Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509209784171673:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509209784172252:2258], cookie# 4 2025-04-04T17:20:50.168302Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509209784172251:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7489509209784171670:2052], cookie# 4 2025-04-04T17:20:50.168317Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509209784172250:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7489509209784171667:2049], cookie# 4 2025-04-04T17:20:50.168331Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509209784172252:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7489509209784171673:2055], cookie# 4 2025-04-04T17:20:50.168363Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509209784172246:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7489509209784172248:2258], cookie# 4 2025-04-04T17:20:50.168389Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509209784172246:2258][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:20:50.168403Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509209784172246:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7489509209784172247:2258], cookie# 4 2025-04-04T17:20:50.168421Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509209784172246:2258][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:20:50.168452Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509209784172246:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7489509209784172249:2258], cookie# 4 2025-04-04T17:20:50.168464Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509209784172246:2258][/dc-1] Unexpected sync response: sender# [1:7489509209784172249:2258], cookie# 4 2025-04-04T17:20:50.168567Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489509209784171975:2111], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-04T17:20:50.168641Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489509209784171975:2111], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7489509209784172246:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743787250184 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:20:50.168745Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489509209784171975:2111], cacheItem# { Subscriber: { Subscriber: [1:7489509209784172246:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743787250184 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-04-04T17:20:50.169004Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489509214079139607:2303], recipient# [1:7489509214079139606:2302], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:20:50.169075Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509214079139606:2302] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:20:50.169148Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509214079139606:2302] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-04-04T17:20:50.169946Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509214079139606:2302] Handle TEvDescribeSchemeResult Forward to# [1:7489509214079139605:2301] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 66 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743787250184 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743787250184 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743787250205 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" Path... (TRUNCATED) >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true >> TS3WrapperTests::UploadUnknownPart >> TS3WrapperTests::UploadUnknownPart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:130:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:132:2058] recipient: [1:108:2140] 2025-04-04T17:20:25.633649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:20:25.637107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:20:25.637189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:20:25.637231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:20:25.637324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:20:25.637363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:20:25.637428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:20:25.638526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:20:25.638871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:20:25.716736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:20:25.716800Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:172:2058] recipient: [1:15:2062] 2025-04-04T17:20:25.733441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:20:25.733881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:20:25.734068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:20:25.741189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:20:25.741367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:20:25.741945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:20:25.742612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:20:25.746042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:20:25.747462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:20:25.747541Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:20:25.747737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:20:25.747793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:20:25.747836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:20:25.747965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] Leader for TabletID 72057594037968897 is [1:215:2214] sender: [1:216:2058] recipient: [1:209:2210] 2025-04-04T17:20:25.757355Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-04T17:20:25.891972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:20:25.892172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:20:25.892410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:20:25.892659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:20:25.892711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:20:25.903468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:20:25.903622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:20:25.903848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:20:25.903914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:20:25.903947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:20:25.904000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:20:25.907175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:20:25.907232Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:20:25.907283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:20:25.910813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:20:25.910866Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:20:25.910910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:20:25.910982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:20:25.915308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:20:25.920045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:20:25.920250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:20:25.921255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:20:25.921411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:20:25.921460Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:20:25.921733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:20:25.921791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:20:25.921952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:20:25.922041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:20:25.924850Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:20:25.924900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:20:25.925086Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:20:25.925138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:20:25.925583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:20:25.925635Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:20:25.925802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:20:25.925858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:20:25.925907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:20:25.925937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:20:25.925971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:20:25.926011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:20:25.926044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id ... perationId: 1003:2, at schemeshard: 72057594046678944 2025-04-04T17:20:53.171465Z node 26 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:2 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:20:53.171667Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-04-04T17:20:53.171760Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-04-04T17:20:53.171783Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-04-04T17:20:53.171815Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-04-04T17:20:53.171848Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-04-04T17:20:53.171878Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: false 2025-04-04T17:20:53.173326Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-04T17:20:53.173409Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-04T17:20:53.173436Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-04T17:20:53.173594Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-04T17:20:53.173638Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-04-04T17:20:53.173659Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-04-04T17:20:53.173680Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-04-04T17:20:53.173705Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-04-04T17:20:53.173765Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2025-04-04T17:20:53.174757Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-04-04T17:20:53.174819Z node 26 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:20:53.175043Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:20:53.175155Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-04-04T17:20:53.175186Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-04T17:20:53.175218Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-04-04T17:20:53.175246Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-04T17:20:53.175279Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2025-04-04T17:20:53.175311Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-04-04T17:20:53.175352Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-04-04T17:20:53.175380Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-04-04T17:20:53.175470Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:20:53.175509Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2025-04-04T17:20:53.175539Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2025-04-04T17:20:53.175568Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-04T17:20:53.175591Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2025-04-04T17:20:53.175614Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2025-04-04T17:20:53.175653Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-04T17:20:53.176865Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-04T17:20:53.178200Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-04T17:20:53.178325Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-04T17:20:53.178378Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-04T17:20:53.189200Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-04T17:20:53.189777Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-04-04T17:20:53.193808Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 344 RawX2: 111669152024 } TabletId: 72075186233409546 State: 4 2025-04-04T17:20:53.193899Z node 26 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-04-04T17:20:53.196105Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:20:53.196626Z node 26 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409546 2025-04-04T17:20:53.196840Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T17:20:53.197111Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409546 2025-04-04T17:20:53.200064Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:20:53.200113Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-04T17:20:53.200191Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-04T17:20:53.200232Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-04T17:20:53.200273Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:20:53.203898Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:20:53.203965Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2025-04-04T17:20:53.204359Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-04-04T17:20:53.204615Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-04-04T17:20:53.204650Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-04-04T17:20:53.205535Z node 26 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-04-04T17:20:53.205625Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-04-04T17:20:53.205658Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:630:2556] 2025-04-04T17:20:53.211604Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 347 RawX2: 111669152026 } TabletId: 72075186233409547 State: 4 2025-04-04T17:20:53.211692Z node 26 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-04-04T17:20:53.213679Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:20:53.214193Z node 26 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409547 2025-04-04T17:20:53.214410Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T17:20:53.214663Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2025-04-04T17:20:53.215101Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:20:53.215142Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T17:20:53.215207Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:20:53.221424Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:20:53.221503Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409547 2025-04-04T17:20:53.222133Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-04-04T17:20:53.222534Z node 26 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-04T17:20:53.222599Z node 26 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] >> TxUsage::WriteToTopic_Demo_42 [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::UploadUnknownPart [GOOD] Test command err: 2025-04-04T17:20:53.905141Z node 1 :S3_WRAPPER NOTICE: Request: uuid# D5F0B6E6-09A0-4110-A2F8-F8E833E1275F, request# UploadPart { Bucket: TEST Key: key UploadId: uploadId PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:7790 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6211623B-74AC-41C1-AA02-4C881DF83D90 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=uploadId / 4 2025-04-04T17:20:53.911356Z node 1 :S3_WRAPPER NOTICE: Response: uuid# D5F0B6E6-09A0-4110-A2F8-F8E833E1275F, response# |93.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> KqpNotNullColumns::UpdateTable_UniqIndex [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndexPg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-04T17:20:43.331250Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509181507573119:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:43.331351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00166c/r3tmp/tmpe1UXl1/pdisk_1.dat 2025-04-04T17:20:43.902673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:43.902844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:43.903199Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:43.908809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4152 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:20:44.138541Z node 1 :TX_PROXY DEBUG: actor# [1:7489509181507573207:2115] Handle TEvNavigate describe path dc-1 2025-04-04T17:20:44.138587Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509185802540964:2426] HANDLE EvNavigateScheme dc-1 2025-04-04T17:20:44.138693Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489509181507573238:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:44.138732Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489509181507573238:2129], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T17:20:44.138919Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509185802540965:2427][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:20:44.141072Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509181507572885:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509185802540969:2427] 2025-04-04T17:20:44.141139Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509181507572885:2050] Subscribe: subscriber# [1:7489509185802540969:2427], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:44.141209Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509181507572888:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509185802540970:2427] 2025-04-04T17:20:44.141245Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509181507572888:2053] Subscribe: subscriber# [1:7489509185802540970:2427], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:44.141273Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509181507572891:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509185802540971:2427] 2025-04-04T17:20:44.141285Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509181507572891:2056] Subscribe: subscriber# [1:7489509185802540971:2427], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:44.141323Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509185802540969:2427][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509181507572885:2050] 2025-04-04T17:20:44.141340Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509185802540970:2427][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509181507572888:2053] 2025-04-04T17:20:44.141373Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509185802540971:2427][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509181507572891:2056] 2025-04-04T17:20:44.141404Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509185802540965:2427][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509185802540966:2427] 2025-04-04T17:20:44.141405Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509181507572885:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509185802540969:2427] 2025-04-04T17:20:44.141421Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509185802540965:2427][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509185802540967:2427] 2025-04-04T17:20:44.141437Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509181507572888:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509185802540970:2427] 2025-04-04T17:20:44.141452Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509181507572891:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509185802540971:2427] 2025-04-04T17:20:44.141457Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489509185802540965:2427][/dc-1] Set up state: owner# [1:7489509181507573238:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:44.141542Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509185802540965:2427][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509185802540968:2427] 2025-04-04T17:20:44.141698Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489509185802540965:2427][/dc-1] Path was already updated: owner# [1:7489509181507573238:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:44.141735Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509185802540969:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509185802540966:2427], cookie# 1 2025-04-04T17:20:44.141745Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509185802540970:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509185802540967:2427], cookie# 1 2025-04-04T17:20:44.141753Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509185802540971:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509185802540968:2427], cookie# 1 2025-04-04T17:20:44.141771Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509181507572885:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509185802540969:2427], cookie# 1 2025-04-04T17:20:44.141785Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509181507572888:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509185802540970:2427], cookie# 1 2025-04-04T17:20:44.141798Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509181507572891:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509185802540971:2427], cookie# 1 2025-04-04T17:20:44.141814Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509185802540969:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509181507572885:2050], cookie# 1 2025-04-04T17:20:44.141839Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509185802540970:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509181507572888:2053], cookie# 1 2025-04-04T17:20:44.141848Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509185802540971:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509181507572891:2056], cookie# 1 2025-04-04T17:20:44.141870Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509185802540965:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509185802540966:2427], cookie# 1 2025-04-04T17:20:44.141881Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509185802540965:2427][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:20:44.141891Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509185802540965:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509185802540967:2427], cookie# 1 2025-04-04T17:20:44.141901Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509185802540965:2427][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:20:44.141912Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509185802540965:2427][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509185802540968:2427], cookie# 1 2025-04-04T17:20:44.141922Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509185802540965:2427][/dc-1] Unexpected sync response: sender# [1:7489509185802540968:2427], cookie# 1 2025-04-04T17:20:44.189910Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489509181507573238:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T17:20:44.191485Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489509181507573238:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... nager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7489509203501468536:2056] 2025-04-04T17:20:51.690455Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489509216386371707:2794][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7489509216386371716:2794] 2025-04-04T17:20:51.690510Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489509216386371707:2794][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7489509216386371718:2794] 2025-04-04T17:20:51.690540Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7489509216386371707:2794][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7489509203501468865:2126], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:51.690568Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509203501468530:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489509216386371722:2795] 2025-04-04T17:20:51.690583Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509203501468530:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489509216386371723:2794] 2025-04-04T17:20:51.690598Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509203501468536:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489509216386371726:2795] 2025-04-04T17:20:51.690611Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509203501468536:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489509216386371727:2794] 2025-04-04T17:20:51.690650Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7489509203501468865:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-04T17:20:51.690699Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7489509203501468865:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7489509216386371707:2794] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:20:51.690774Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509203501468865:2126], cacheItem# { Subscriber: { Subscriber: [3:7489509216386371707:2794] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:20:51.690896Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509216386371729:2797], recipient# [3:7489509216386371704:2318], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:51.690999Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509203501468533:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7489509216386371724:2795] 2025-04-04T17:20:51.691011Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7489509203501468533:2053] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-04-04T17:20:51.691042Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7489509203501468533:2053] Subscribe: subscriber# [3:7489509216386371724:2795], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:51.691112Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509203501468533:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [3:7489509216386371725:2794] 2025-04-04T17:20:51.691120Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7489509203501468533:2053] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-04-04T17:20:51.691130Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7489509216386371724:2795][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7489509203501468533:2053] 2025-04-04T17:20:51.691135Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7489509203501468533:2053] Subscribe: subscriber# [3:7489509216386371725:2794], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:51.691164Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509203501468533:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489509216386371713:2793] 2025-04-04T17:20:51.691177Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509203501468533:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489509216386371724:2795] 2025-04-04T17:20:51.691183Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489509216386371715:2795][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7489509216386371720:2795] 2025-04-04T17:20:51.691201Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7489509216386371725:2794][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7489509203501468533:2053] 2025-04-04T17:20:51.691220Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489509216386371707:2794][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7489509216386371717:2794] 2025-04-04T17:20:51.691228Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7489509216386371715:2795][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7489509203501468865:2126], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:51.691246Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7489509216386371707:2794][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7489509203501468865:2126], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:51.691255Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509203501468533:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489509216386371725:2794] 2025-04-04T17:20:51.844895Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489509203501468865:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:51.845012Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509203501468865:2126], cacheItem# { Subscriber: { Subscriber: [3:7489509207796437052:2780] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:20:51.845165Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509216386371731:2798], recipient# [3:7489509216386371730:2321], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:52.691248Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489509203501468865:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:52.691403Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509203501468865:2126], cacheItem# { Subscriber: { Subscriber: [3:7489509216386371715:2795] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:20:52.691493Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509220681339047:2802], recipient# [3:7489509220681339046:2322], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TS3WrapperTests::CopyPartUpload >> TS3WrapperTests::CompleteUnknownUpload |93.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TxUsage::WriteToTopic_Demo_43 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-04T17:20:50.826176Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509213913746576:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:50.826273Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00163c/r3tmp/tmpnlfrLt/pdisk_1.dat 2025-04-04T17:20:51.193080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:51.193211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:51.210592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:51.243943Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:7709 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:20:51.409298Z node 1 :TX_PROXY DEBUG: actor# [1:7489509213913746798:2092] Handle TEvNavigate describe path dc-1 2025-04-04T17:20:51.409389Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509218208714615:2439] HANDLE EvNavigateScheme dc-1 2025-04-04T17:20:51.409532Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489509218208714139:2116], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:51.409611Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489509218208714139:2116], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T17:20:51.409794Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509218208714616:2440][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:20:51.412065Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509213913746527:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509218208714620:2440] 2025-04-04T17:20:51.412079Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509213913746530:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509218208714621:2440] 2025-04-04T17:20:51.412131Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509213913746530:2052] Subscribe: subscriber# [1:7489509218208714621:2440], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:51.412130Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509213913746527:2049] Subscribe: subscriber# [1:7489509218208714620:2440], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:51.412203Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509213913746533:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509218208714622:2440] 2025-04-04T17:20:51.412242Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509213913746533:2055] Subscribe: subscriber# [1:7489509218208714622:2440], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:51.412249Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509218208714620:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509213913746527:2049] 2025-04-04T17:20:51.412277Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509213913746527:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509218208714620:2440] 2025-04-04T17:20:51.412324Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509218208714621:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509213913746530:2052] 2025-04-04T17:20:51.412355Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509218208714622:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509213913746533:2055] 2025-04-04T17:20:51.412398Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509213913746530:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509218208714621:2440] 2025-04-04T17:20:51.412407Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509218208714616:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509218208714617:2440] 2025-04-04T17:20:51.412424Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509213913746533:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509218208714622:2440] 2025-04-04T17:20:51.412435Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509218208714616:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509218208714618:2440] 2025-04-04T17:20:51.412492Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489509218208714616:2440][/dc-1] Set up state: owner# [1:7489509218208714139:2116], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:51.412601Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509218208714616:2440][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509218208714619:2440] 2025-04-04T17:20:51.412664Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489509218208714616:2440][/dc-1] Path was already updated: owner# [1:7489509218208714139:2116], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:51.412724Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509218208714620:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509218208714617:2440], cookie# 1 2025-04-04T17:20:51.412750Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509218208714621:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509218208714618:2440], cookie# 1 2025-04-04T17:20:51.412769Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509218208714622:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509218208714619:2440], cookie# 1 2025-04-04T17:20:51.412795Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509213913746527:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509218208714620:2440], cookie# 1 2025-04-04T17:20:51.412836Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509213913746530:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509218208714621:2440], cookie# 1 2025-04-04T17:20:51.412881Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509213913746533:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509218208714622:2440], cookie# 1 2025-04-04T17:20:51.412920Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509218208714620:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509213913746527:2049], cookie# 1 2025-04-04T17:20:51.412957Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509218208714621:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509213913746530:2052], cookie# 1 2025-04-04T17:20:51.412980Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509218208714622:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509213913746533:2055], cookie# 1 2025-04-04T17:20:51.413023Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509218208714616:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509218208714617:2440], cookie# 1 2025-04-04T17:20:51.413043Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509218208714616:2440][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:20:51.413071Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509218208714616:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509218208714618:2440], cookie# 1 2025-04-04T17:20:51.413096Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509218208714616:2440][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:20:51.413120Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509218208714616:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509218208714619:2440], cookie# 1 2025-04-04T17:20:51.413141Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509218208714616:2440][/dc-1] Unexpected sync response: sender# [1:7489509218208714619:2440], cookie# 1 2025-04-04T17:20:51.465114Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489509218208714139:2116], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T17:20:51.465502Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489509218208714139:2116], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... HEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489509218208714139:2116], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7489509218208714810:2589] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1743787251703 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:20:52.220620Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489509218208714139:2116], cacheItem# { Subscriber: { Subscriber: [1:7489509218208714810:2589] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1743787251703 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2025-04-04T17:20:52.220782Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489509222503682248:2676], recipient# [1:7489509222503682247:2675], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:20:52.220809Z node 1 :TX_PROXY INFO: Actor# [1:7489509222503682247:2675] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2025-04-04T17:20:52.224665Z node 1 :TX_PROXY DEBUG: actor# [1:7489509213913746798:2092] Handle TEvNavigate describe path /dc-1 2025-04-04T17:20:52.224703Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509222503682250:2678] HANDLE EvNavigateScheme /dc-1 2025-04-04T17:20:52.224805Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489509218208714139:2116], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:52.224879Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509218208714616:2440][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7489509218208714139:2116], cookie# 4 2025-04-04T17:20:52.224927Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509218208714620:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509218208714617:2440], cookie# 4 2025-04-04T17:20:52.224946Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509218208714621:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509218208714618:2440], cookie# 4 2025-04-04T17:20:52.224960Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509218208714622:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509218208714619:2440], cookie# 4 2025-04-04T17:20:52.224984Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509213913746527:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509218208714620:2440], cookie# 4 2025-04-04T17:20:52.225006Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509213913746530:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509218208714621:2440], cookie# 4 2025-04-04T17:20:52.225023Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509213913746533:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509218208714622:2440], cookie# 4 2025-04-04T17:20:52.225044Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509218208714620:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7489509213913746527:2049], cookie# 4 2025-04-04T17:20:52.225059Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509218208714621:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7489509213913746530:2052], cookie# 4 2025-04-04T17:20:52.225072Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509218208714622:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7489509213913746533:2055], cookie# 4 2025-04-04T17:20:52.225363Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509218208714616:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7489509218208714617:2440], cookie# 4 2025-04-04T17:20:52.225410Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509218208714616:2440][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:20:52.225427Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509218208714616:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7489509218208714618:2440], cookie# 4 2025-04-04T17:20:52.225442Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509218208714616:2440][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:20:52.225467Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509218208714616:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7489509218208714619:2440], cookie# 4 2025-04-04T17:20:52.225477Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509218208714616:2440][/dc-1] Unexpected sync response: sender# [1:7489509218208714619:2440], cookie# 4 2025-04-04T17:20:52.225575Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489509218208714139:2116], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-04T17:20:52.225688Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489509218208714139:2116], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7489509218208714616:2440] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743787251675 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:20:52.225784Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489509218208714139:2116], cacheItem# { Subscriber: { Subscriber: [1:7489509218208714616:2440] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743787251675 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-04-04T17:20:52.225996Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489509222503682251:2679], recipient# [1:7489509222503682250:2678], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:20:52.226046Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509222503682250:2678] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:20:52.226122Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509222503682250:2678] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-04-04T17:20:52.226656Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509222503682250:2678] Handle TEvDescribeSchemeResult Forward to# [1:7489509222503682249:2677] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 66 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743787251675 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743787251675 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743787251703 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardI... (TRUNCATED) >> TS3WrapperTests::CopyPartUpload [GOOD] >> TS3WrapperTests::CompleteUnknownUpload [GOOD] >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CopyPartUpload [GOOD] Test command err: 2025-04-04T17:20:55.054663Z node 1 :S3_WRAPPER NOTICE: Request: uuid# A5A224A7-1997-4AA6-AC03-6E9C3F0BD76F, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:20591 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C534B10A-55CA-40D6-A158-546CBA1B3B7C amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-04-04T17:20:55.061394Z node 1 :S3_WRAPPER NOTICE: Response: uuid# A5A224A7-1997-4AA6-AC03-6E9C3F0BD76F, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-04-04T17:20:55.061744Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 0AAF1735-66D1-476F-AB79-97BBC913F06B, request# CreateMultipartUpload { Bucket: TEST Key: key1 } REQUEST: POST /TEST/key1?uploads HTTP/1.1 HEADERS: Host: localhost:20591 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DEE8E8BC-2A13-405A-BCD5-031373B84E17 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploads= 2025-04-04T17:20:55.065779Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 0AAF1735-66D1-476F-AB79-97BBC913F06B, response# CreateMultipartUploadResult { Bucket: Key: TEST/key1 UploadId: 1 } 2025-04-04T17:20:55.066103Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 581610E0-82FC-4683-9266-43CDCF2AAC6E, request# UploadPartCopy { Bucket: TEST Key: key1 UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key1?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:20591 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 04E0257C-A454-4796-9862-D8772F76BA39 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-copy-source: /TEST/key x-amz-copy-source-range: bytes=1-2 S3_MOCK::HttpServeWrite: /TEST/key1 / partNumber=1&uploadId=1 / 0 2025-04-04T17:20:55.068953Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 581610E0-82FC-4683-9266-43CDCF2AAC6E, response# UploadPartCopyResult { } 2025-04-04T17:20:55.069354Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 28F0179B-B737-4B51-A29B-C5774DABC43A, request# CompleteMultipartUpload { Bucket: TEST Key: key1 UploadId: 1 MultipartUpload: { Parts: [afc7e8a98f75755e513d9d5ead888e1d] } } REQUEST: POST /TEST/key1?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:20591 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EEDBF24B-C012-4F3D-BC21-9506146EDB79 amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploadId=1 2025-04-04T17:20:55.076056Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 28F0179B-B737-4B51-A29B-C5774DABC43A, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key1 ETag: afc7e8a98f75755e513d9d5ead888e1d } 2025-04-04T17:20:55.077256Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 44921665-97F8-46A3-BB9B-3DA72D83EE60, request# GetObject { Bucket: TEST Key: key1 Range: bytes=0-1 } REQUEST: GET /TEST/key1 HTTP/1.1 HEADERS: Host: localhost:20591 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1B459197-D20E-462F-8AB1-74493C388B34 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-1 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key1 / 2 2025-04-04T17:20:55.079990Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 44921665-97F8-46A3-BB9B-3DA72D83EE60, response# GetObjectResult { } ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CompleteUnknownUpload [GOOD] Test command err: 2025-04-04T17:20:55.013362Z node 1 :S3_WRAPPER NOTICE: Request: uuid# AFF555A1-7A04-4F26-B84D-AD8D106528B6, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: uploadId MultipartUpload: { Parts: [ETag] } } REQUEST: POST /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:5590 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EDD5D5C2-0AE9-4C3F-8A3C-10D949976D73 amz-sdk-request: attempt=1 content-length: 207 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=uploadId 2025-04-04T17:20:55.020984Z node 1 :S3_WRAPPER NOTICE: Response: uuid# AFF555A1-7A04-4F26-B84D-AD8D106528B6, response# |93.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> KqpNewEngine::PruneEffectPartitions+UseSink [GOOD] >> KqpNewEngine::PrecomputeKey >> TS3WrapperTests::MultipartUpload |93.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadUnknownObject >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] >> TS3WrapperTests::MultipartUpload [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-04T17:20:43.276485Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509183928279575:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:43.276581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00166d/r3tmp/tmpT4wMwS/pdisk_1.dat 2025-04-04T17:20:43.846361Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:43.871196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:43.871332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:43.878267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11397 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:20:44.083155Z node 1 :TX_PROXY DEBUG: actor# [1:7489509183928279816:2115] Handle TEvNavigate describe path dc-1 2025-04-04T17:20:44.083205Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509188223247580:2430] HANDLE EvNavigateScheme dc-1 2025-04-04T17:20:44.083320Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489509183928279857:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:44.083376Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489509183928279857:2129], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T17:20:44.083647Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509188223247581:2431][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:20:44.090088Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183928279503:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509188223247585:2431] 2025-04-04T17:20:44.090088Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183928279509:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509188223247587:2431] 2025-04-04T17:20:44.090162Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509183928279509:2056] Subscribe: subscriber# [1:7489509188223247587:2431], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:44.090162Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509183928279503:2050] Subscribe: subscriber# [1:7489509188223247585:2431], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:44.090230Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183928279506:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509188223247586:2431] 2025-04-04T17:20:44.090264Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509188223247587:2431][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509183928279509:2056] 2025-04-04T17:20:44.090264Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509183928279506:2053] Subscribe: subscriber# [1:7489509188223247586:2431], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:44.090305Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183928279509:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509188223247587:2431] 2025-04-04T17:20:44.090341Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509188223247585:2431][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509183928279503:2050] 2025-04-04T17:20:44.090370Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509188223247586:2431][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509183928279506:2053] 2025-04-04T17:20:44.090412Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509188223247581:2431][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509188223247584:2431] 2025-04-04T17:20:44.090440Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509188223247581:2431][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509188223247582:2431] 2025-04-04T17:20:44.090496Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489509188223247581:2431][/dc-1] Set up state: owner# [1:7489509183928279857:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:44.090541Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183928279503:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509188223247585:2431] 2025-04-04T17:20:44.090559Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183928279506:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509188223247586:2431] 2025-04-04T17:20:44.090639Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509188223247581:2431][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509188223247583:2431] 2025-04-04T17:20:44.094323Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489509188223247581:2431][/dc-1] Path was already updated: owner# [1:7489509183928279857:2129], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:44.094437Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509188223247585:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509188223247582:2431], cookie# 1 2025-04-04T17:20:44.094455Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509188223247586:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509188223247583:2431], cookie# 1 2025-04-04T17:20:44.094469Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509188223247587:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509188223247584:2431], cookie# 1 2025-04-04T17:20:44.094516Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183928279503:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509188223247585:2431], cookie# 1 2025-04-04T17:20:44.094566Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183928279506:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509188223247586:2431], cookie# 1 2025-04-04T17:20:44.094588Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509183928279509:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509188223247587:2431], cookie# 1 2025-04-04T17:20:44.094621Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509188223247585:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509183928279503:2050], cookie# 1 2025-04-04T17:20:44.094633Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509188223247586:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509183928279506:2053], cookie# 1 2025-04-04T17:20:44.094645Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509188223247587:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509183928279509:2056], cookie# 1 2025-04-04T17:20:44.094670Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509188223247581:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509188223247582:2431], cookie# 1 2025-04-04T17:20:44.094695Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509188223247581:2431][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:20:44.094708Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509188223247581:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509188223247583:2431], cookie# 1 2025-04-04T17:20:44.094732Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509188223247581:2431][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:20:44.094756Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509188223247581:2431][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509188223247584:2431], cookie# 1 2025-04-04T17:20:44.094770Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509188223247581:2431][/dc-1] Unexpected sync response: sender# [1:7489509188223247584:2431], cookie# 1 2025-04-04T17:20:44.182549Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489509183928279857:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T17:20:44.182877Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489509183928279857:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... pool_classifiers 2025-04-04T17:20:52.690135Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509207892168907:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7489509220777072334:3004] 2025-04-04T17:20:52.690142Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7489509207892168907:2056] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-04-04T17:20:52.690147Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7489509207892168904:2053] Subscribe: subscriber# [3:7489509220777072333:3004], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:52.690177Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7489509207892168907:2056] Subscribe: subscriber# [3:7489509220777072334:3004], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:52.690207Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7489509220777072332:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7489509207892168901:2050] 2025-04-04T17:20:52.690263Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7489509220777072333:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7489509207892168904:2053] 2025-04-04T17:20:52.690276Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509207892168901:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489509220777072332:3004] 2025-04-04T17:20:52.690283Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7489509220777072334:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7489509207892168907:2056] 2025-04-04T17:20:52.690294Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509207892168904:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489509220777072333:3004] 2025-04-04T17:20:52.690307Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509207892168907:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489509220777072334:3004] 2025-04-04T17:20:52.690319Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489509220777072316:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7489509220777072329:3004] 2025-04-04T17:20:52.690347Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489509220777072316:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7489509220777072330:3004] 2025-04-04T17:20:52.690372Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7489509220777072316:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [3:7489509207892169239:2126], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:52.690412Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489509220777072316:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7489509220777072331:3004] 2025-04-04T17:20:52.690445Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7489509220777072316:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7489509207892169239:2126], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:52.690452Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7489509207892169239:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-04T17:20:52.690507Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7489509207892169239:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7489509220777072316:3004] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:20:52.690558Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509207892169239:2126], cacheItem# { Subscriber: { Subscriber: [3:7489509220777072316:3004] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:20:52.690641Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509220777072336:3006], recipient# [3:7489509220777072315:2324], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:52.881721Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489509207892169239:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:52.881848Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509207892169239:2126], cacheItem# { Subscriber: { Subscriber: [3:7489509212187137283:2665] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:20:52.881952Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509220777072338:3007], recipient# [3:7489509220777072337:2325], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:53.690785Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489509207892169239:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:53.690917Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509207892169239:2126], cacheItem# { Subscriber: { Subscriber: [3:7489509220777072316:3004] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:20:53.691003Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509225072039654:3011], recipient# [3:7489509225072039653:2326], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:53.882815Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489509207892169239:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:53.882961Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509207892169239:2126], cacheItem# { Subscriber: { Subscriber: [3:7489509212187137283:2665] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:20:53.883071Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509225072039656:3012], recipient# [3:7489509225072039655:2327], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> Balancing::Balancing_ManyTopics_TopicApi [GOOD] >> Balancing::Balancing_ManyTopics_PQv1 >> TS3WrapperTests::HeadUnknownObject [GOOD] >> TS3WrapperTests::GetObject >> TS3WrapperTests::HeadObject ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::MultipartUpload [GOOD] Test command err: 2025-04-04T17:20:56.436763Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 84040347-58AF-43AB-BDD3-ACE7CEE352E5, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:10443 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CD8F77A6-840B-4E39-824B-B451710F5098 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2025-04-04T17:20:56.444455Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 84040347-58AF-43AB-BDD3-ACE7CEE352E5, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2025-04-04T17:20:56.444953Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 8EDFCC52-9239-400B-B9DA-4142CC698551, request# UploadPart { Bucket: TEST Key: key UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:10443 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3EEC4276-9D8D-43C7-9F45-8E4EE8D7921C amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=1 / 4 2025-04-04T17:20:56.448067Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 8EDFCC52-9239-400B-B9DA-4142CC698551, response# UploadPartResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-04-04T17:20:56.448380Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 78F0463A-FD36-457E-8566-E8CB85177CDF, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: 1 MultipartUpload: { Parts: [841a2d689ad86bd1611447453c22c6fc] } } REQUEST: POST /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:10443 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A15C3A39-00B6-440C-AE25-4537F531B771 amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=1 2025-04-04T17:20:56.451870Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 78F0463A-FD36-457E-8566-E8CB85177CDF, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-04-04T17:20:56.452162Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 0A414FFD-4996-4669-8AF3-CD6B4D63C10C, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:10443 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 247F2BAD-2E8B-4046-A042-C365B9CAA39F amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2025-04-04T17:20:56.454533Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 0A414FFD-4996-4669-8AF3-CD6B4D63C10C, response# GetObjectResult { } ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadUnknownObject [GOOD] Test command err: 2025-04-04T17:20:56.594391Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 1F360BCD-E184-4E25-A7A3-6B2A7488FBCC, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:7077 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F376FF8D-C025-471D-AD3B-DB65F5BE3BC8 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2025-04-04T17:20:56.601301Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 1F360BCD-E184-4E25-A7A3-6B2A7488FBCC, response# No response body. >> TS3WrapperTests::GetObject [GOOD] >> TS3WrapperTests::HeadObject [GOOD] |93.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |93.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::PutObject >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-04T17:20:46.666533Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509196400490885:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:46.666587Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001657/r3tmp/tmpGQFzLI/pdisk_1.dat 2025-04-04T17:20:47.088548Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:47.092491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:47.092602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:47.103896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25491 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:20:47.252133Z node 1 :TX_PROXY DEBUG: actor# [1:7489509196400491129:2115] Handle TEvNavigate describe path dc-1 2025-04-04T17:20:47.252203Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509200695458900:2438] HANDLE EvNavigateScheme dc-1 2025-04-04T17:20:47.252409Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489509196400491152:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:47.252514Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489509196400491152:2128], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T17:20:47.252782Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509200695458901:2439][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:20:47.254970Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509196400490810:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509200695458905:2439] 2025-04-04T17:20:47.255036Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509196400490810:2050] Subscribe: subscriber# [1:7489509200695458905:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:47.255139Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509196400490813:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509200695458906:2439] 2025-04-04T17:20:47.255165Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509196400490813:2053] Subscribe: subscriber# [1:7489509200695458906:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:47.255191Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509196400490816:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509200695458907:2439] 2025-04-04T17:20:47.255205Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509196400490816:2056] Subscribe: subscriber# [1:7489509200695458907:2439], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:47.255308Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509200695458905:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509196400490810:2050] 2025-04-04T17:20:47.255335Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509200695458906:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509196400490813:2053] 2025-04-04T17:20:47.255354Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509200695458907:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509196400490816:2056] 2025-04-04T17:20:47.255426Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509200695458901:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509200695458902:2439] 2025-04-04T17:20:47.255456Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509200695458901:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509200695458903:2439] 2025-04-04T17:20:47.255541Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489509200695458901:2439][/dc-1] Set up state: owner# [1:7489509196400491152:2128], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:47.255671Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509200695458901:2439][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509200695458904:2439] 2025-04-04T17:20:47.255713Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489509200695458901:2439][/dc-1] Path was already updated: owner# [1:7489509196400491152:2128], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:47.255748Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509200695458905:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509200695458902:2439], cookie# 1 2025-04-04T17:20:47.255763Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509200695458906:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509200695458903:2439], cookie# 1 2025-04-04T17:20:47.255776Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509200695458907:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509200695458904:2439], cookie# 1 2025-04-04T17:20:47.255802Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509196400490810:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509200695458905:2439] 2025-04-04T17:20:47.255839Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509196400490810:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509200695458905:2439], cookie# 1 2025-04-04T17:20:47.255861Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509196400490813:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509200695458906:2439] 2025-04-04T17:20:47.255881Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509196400490813:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509200695458906:2439], cookie# 1 2025-04-04T17:20:47.255915Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509196400490816:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509200695458907:2439] 2025-04-04T17:20:47.255930Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509196400490816:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509200695458907:2439], cookie# 1 2025-04-04T17:20:47.259194Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509200695458905:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509196400490810:2050], cookie# 1 2025-04-04T17:20:47.259224Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509200695458906:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509196400490813:2053], cookie# 1 2025-04-04T17:20:47.259266Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509200695458907:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509196400490816:2056], cookie# 1 2025-04-04T17:20:47.259302Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509200695458901:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509200695458902:2439], cookie# 1 2025-04-04T17:20:47.259346Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509200695458901:2439][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:20:47.259366Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509200695458901:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509200695458903:2439], cookie# 1 2025-04-04T17:20:47.259389Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509200695458901:2439][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:20:47.259423Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509200695458901:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509200695458904:2439], cookie# 1 2025-04-04T17:20:47.259437Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509200695458901:2439][/dc-1] Unexpected sync response: sender# [1:7489509200695458904:2439], cookie# 1 2025-04-04T17:20:47.314938Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489509196400491152:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T17:20:47.315261Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489509196400491152:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... r# [3:7489509218160437540:2056] 2025-04-04T17:20:55.289783Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489509235340308177:2935][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7489509235340308181:2935] 2025-04-04T17:20:55.289810Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7489509235340308177:2935][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [3:7489509222455405168:2126], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:55.289816Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489509235340308179:2937][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7489509235340308186:2937] 2025-04-04T17:20:55.289832Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489509235340308177:2935][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7489509235340308182:2935] 2025-04-04T17:20:55.289851Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489509235340308179:2937][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7489509235340308187:2937] 2025-04-04T17:20:55.289886Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7489509235340308179:2937][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7489509222455405168:2126], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:55.289911Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489509235340308179:2937][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7489509235340308188:2937] 2025-04-04T17:20:55.289921Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7489509235340308177:2935][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7489509222455405168:2126], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:55.289935Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7489509235340308179:2937][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7489509222455405168:2126], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:55.289961Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509218160437537:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489509235340308196:2936] 2025-04-04T17:20:55.289962Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509218160437534:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489509235340308195:2936] 2025-04-04T17:20:55.289976Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509218160437537:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489509235340308184:2935] 2025-04-04T17:20:55.289980Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509218160437534:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489509235340308183:2935] 2025-04-04T17:20:55.289990Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509218160437537:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489509235340308190:2937] 2025-04-04T17:20:55.289994Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509218160437534:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489509235340308189:2937] 2025-04-04T17:20:55.290016Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7489509235340308197:2936][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7489509218160437540:2056] 2025-04-04T17:20:55.290043Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7489509235340308178:2936][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7489509235340308194:2936] 2025-04-04T17:20:55.290044Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7489509222455405168:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-04-04T17:20:55.290065Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7489509235340308178:2936][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7489509222455405168:2126], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:55.290082Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509218160437540:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489509235340308185:2935] 2025-04-04T17:20:55.290097Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509218160437540:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489509235340308191:2937] 2025-04-04T17:20:55.290110Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7489509218160437540:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7489509235340308197:2936] 2025-04-04T17:20:55.290120Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7489509222455405168:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7489509235340308178:2936] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:20:55.290207Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509222455405168:2126], cacheItem# { Subscriber: { Subscriber: [3:7489509235340308178:2936] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:20:55.290236Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7489509222455405168:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-04-04T17:20:55.290297Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7489509222455405168:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7489509235340308177:2935] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:20:55.290355Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509222455405168:2126], cacheItem# { Subscriber: { Subscriber: [3:7489509235340308177:2935] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:20:55.290410Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7489509222455405168:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-04T17:20:55.290457Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7489509222455405168:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7489509235340308179:2937] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:20:55.290515Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509222455405168:2126], cacheItem# { Subscriber: { Subscriber: [3:7489509235340308179:2937] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:20:55.290585Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509235340308198:2938], recipient# [3:7489509235340308166:2316], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:55.290680Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509235340308199:2939], recipient# [3:7489509235340308176:2324], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadObject [GOOD] Test command err: 2025-04-04T17:20:57.009919Z node 1 :S3_WRAPPER NOTICE: Request: uuid# BDB10A97-792B-44D5-8928-D37A7CC095DA, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:6807 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E359558C-6B18-438D-9B7C-6F58022B1A9E amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-04-04T17:20:57.015030Z node 1 :S3_WRAPPER NOTICE: Response: uuid# BDB10A97-792B-44D5-8928-D37A7CC095DA, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-04-04T17:20:57.015417Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 5E1DFE88-6F36-4305-9789-885340416409, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:6807 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F942F568-DF1E-4DE9-8B91-35AEC21978B0 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2025-04-04T17:20:57.018226Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 5E1DFE88-6F36-4305-9789-885340416409, response# HeadObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc ContentLength: 4 } ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetObject [GOOD] Test command err: 2025-04-04T17:20:57.012576Z node 1 :S3_WRAPPER NOTICE: Request: uuid# CE425D34-3474-4975-AE26-69F0C43DBA6D, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:1194 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EAA0BBFF-17EF-4ABF-B81D-6BD2210DC5A7 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-04-04T17:20:57.020113Z node 1 :S3_WRAPPER NOTICE: Response: uuid# CE425D34-3474-4975-AE26-69F0C43DBA6D, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-04-04T17:20:57.020482Z node 1 :S3_WRAPPER NOTICE: Request: uuid# EF326E88-5D72-4F54-B265-6FF5B5878DDC, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:1194 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3A4646CC-4E89-46CC-9847-4BBA75647C70 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2025-04-04T17:20:57.024730Z node 1 :S3_WRAPPER NOTICE: Response: uuid# EF326E88-5D72-4F54-B265-6FF5B5878DDC, response# GetObjectResult { } >> TS3WrapperTests::PutObject [GOOD] >> KqpReturning::ReturningSerial [GOOD] >> KqpReturning::ReturningColumnsOrder ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::PutObject [GOOD] Test command err: 2025-04-04T17:20:57.641907Z node 1 :S3_WRAPPER NOTICE: Request: uuid# A1FD3EB7-4CE3-4599-9375-CB59BB2572EE, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:18474 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 083A8FFC-C016-4C0E-A87E-2648E86EE791 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-04-04T17:20:57.647274Z node 1 :S3_WRAPPER NOTICE: Response: uuid# A1FD3EB7-4CE3-4599-9375-CB59BB2572EE, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } >> TS3WrapperTests::AbortMultipartUpload >> LocalPartition::DiscoveryServiceBadNodeId [GOOD] >> LocalPartition::DescribeHang >> TS3WrapperTests::AbortMultipartUpload [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5 >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true >> TxUsage::WriteToTopic_Demo_30 [GOOD] >> KqpMergeCn::TopSortBy_String_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Utf8_Limit2 >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And_In [GOOD] >> KqpSqlIn::SecondaryIndex_ComplexKey_In_And_In ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortMultipartUpload [GOOD] Test command err: 2025-04-04T17:20:58.493085Z node 1 :S3_WRAPPER NOTICE: Request: uuid# C9E533D9-5C78-4D36-9066-48BE627C59FB, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:14803 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A9835B1B-4AA0-4175-BE84-842AE154AFF2 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2025-04-04T17:20:58.499252Z node 1 :S3_WRAPPER NOTICE: Response: uuid# C9E533D9-5C78-4D36-9066-48BE627C59FB, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2025-04-04T17:20:58.499702Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 781FED46-ACE4-4633-BF3F-8A7A759F51B0, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: 1 } REQUEST: DELETE /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:14803 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 09FC40D7-CF85-46FA-ACE4-F46A78963B8D amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=1 2025-04-04T17:20:58.502401Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 781FED46-ACE4-4633-BF3F-8A7A759F51B0, response# AbortMultipartUploadResult { } 2025-04-04T17:20:58.502721Z node 1 :S3_WRAPPER NOTICE: Request: uuid# E297014B-441F-410A-8DA2-1C01469CA260, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:14803 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1EA1D90A-04BE-4958-98B0-4457A191164A amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2025-04-04T17:20:58.505569Z node 1 :S3_WRAPPER NOTICE: Response: uuid# E297014B-441F-410A-8DA2-1C01469CA260, response# No response body. |93.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TxUsage::WriteToTopic_Demo_31 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] Test command err: 2025-04-04T17:18:06.991677Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:18:07.091767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:18:07.091822Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:07.098944Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:18:07.099337Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:18:07.099665Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:18:07.112065Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:18:07.149039Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:18:07.149160Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:18:07.152163Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:18:07.152228Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:18:07.152270Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:18:07.154484Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:18:07.154807Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:18:07.154881Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:18:07.227995Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:18:07.256021Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:18:07.256170Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:18:07.256248Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:18:07.256280Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:18:07.256309Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:18:07.256341Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:07.256485Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.256532Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.256772Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:18:07.256859Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:18:07.256964Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:18:07.256992Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:18:07.257033Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:18:07.257067Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:18:07.257093Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:18:07.257141Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:18:07.257184Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:18:07.257275Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.257308Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.257353Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:18:07.260786Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:18:07.260853Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:18:07.260926Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:18:07.261181Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:18:07.261251Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:18:07.261315Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:18:07.261356Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:07.261393Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:18:07.261427Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:18:07.261456Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:18:07.266525Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:18:07.266585Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:18:07.266663Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:18:07.266698Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:18:07.266739Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:18:07.266767Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:18:07.266800Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:18:07.266849Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:18:07.266888Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:18:07.286740Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:18:07.286838Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:18:07.286879Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:18:07.286912Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:18:07.294558Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:18:07.295115Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.295164Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:18:07.295205Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:18:07.295349Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:18:07.295384Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:18:07.295522Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:18:07.295562Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:18:07.295594Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:18:07.295627Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:18:07.299102Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:18:07.299180Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:18:07.299393Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.299425Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:18:07.299477Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:18:07.299519Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:18:07.299551Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:18:07.299584Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:18:07.299618Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:18:07.299654Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:18:07.299687Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:18:07.299729Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:18:07.299768Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:18:07.299936Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:18:07.299976Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:18:07.300002Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:18:07.300025Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:18:07.300048Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:18:07.300095Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:18:07.300119Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:18:07.300147Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:18:07.300181Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:18:07.300236Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:18:07.300269Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:18:07.300298Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:18:07.300334Z node 1 :TX_DATA ... 0:45.547028Z node 3 :TX_DATASHARD DEBUG: tx 10 at 9437184 restored its data 2025-04-04T17:20:45.780653Z node 3 :TX_DATASHARD TRACE: Operation [0:10] at 9437184 exceeded memory limit 37748736 and requests 301989888 more for the next try 2025-04-04T17:20:45.781604Z node 3 :TX_DATASHARD DEBUG: tx 10 released its data 2025-04-04T17:20:45.781663Z node 3 :TX_DATASHARD TRACE: Execution status for [0:10] at 9437184 is Restart 2025-04-04T17:20:45.918002Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:20:45.918086Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:10] at 9437184 on unit ExecuteDataTx 2025-04-04T17:20:45.918911Z node 3 :TX_DATASHARD DEBUG: tx 10 at 9437184 restored its data 2025-04-04T17:20:45.928206Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:10] execution 2025-04-04T17:20:45.928415Z node 3 :TX_DATASHARD DEBUG: tx 10 released its data 2025-04-04T17:20:45.928466Z node 3 :TX_DATASHARD TRACE: Execution status for [0:10] at 9437184 is Restart 2025-04-04T17:20:46.273139Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:20:46.273230Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:10] at 9437184 on unit ExecuteDataTx 2025-04-04T17:20:46.274020Z node 3 :TX_DATASHARD DEBUG: tx 10 at 9437184 restored its data 2025-04-04T17:20:47.374738Z node 3 :TX_DATASHARD TRACE: Executed operation [0:10] at tablet 9437184 with status COMPLETE 2025-04-04T17:20:47.374877Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:10] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 129871, SelectRangeBytes: 40000268, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:20:47.374954Z node 3 :TX_DATASHARD TRACE: Execution status for [0:10] at 9437184 is Executed 2025-04-04T17:20:47.375001Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 9437184 executing on unit ExecuteDataTx 2025-04-04T17:20:47.375042Z node 3 :TX_DATASHARD TRACE: Add [0:10] at 9437184 to execution unit FinishPropose 2025-04-04T17:20:47.375082Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:10] at 9437184 on unit FinishPropose 2025-04-04T17:20:47.375138Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 10 at tablet 9437184 send to client, exec latency: 11 ms, propose latency: 11 ms, status: COMPLETE 2025-04-04T17:20:47.375219Z node 3 :TX_DATASHARD TRACE: Execution status for [0:10] at 9437184 is DelayComplete 2025-04-04T17:20:47.375251Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 9437184 executing on unit FinishPropose 2025-04-04T17:20:47.375283Z node 3 :TX_DATASHARD TRACE: Add [0:10] at 9437184 to execution unit CompletedOperations 2025-04-04T17:20:47.375317Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:10] at 9437184 on unit CompletedOperations 2025-04-04T17:20:47.375370Z node 3 :TX_DATASHARD TRACE: Execution status for [0:10] at 9437184 is Executed 2025-04-04T17:20:47.375398Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 9437184 executing on unit CompletedOperations 2025-04-04T17:20:47.375428Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:10] at 9437184 has finished 2025-04-04T17:20:47.406810Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:20:47.406899Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:10] at 9437184 on unit FinishPropose 2025-04-04T17:20:47.406970Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:20:51.288316Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:98:2133], Recipient [3:233:2226]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 98 RawX2: 12884904021 } 2025-04-04T17:20:51.288391Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-04T17:20:51.288727Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:512:2487], Recipient [3:233:2226]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:20:51.288776Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:20:51.288820Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:511:2486], serverId# [3:512:2487], sessionId# [0:0:0] 2025-04-04T17:20:51.289064Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:98:2133], Recipient [3:233:2226]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 98 RawX2: 12884904021 } TxBody: "\032\354\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4\000\'?8\003\013?>\003?<\003j\030\001\003?@\000\003?B\000\003?D\007\240%&\003?F\000\006\004?J\003\203\014\000\003\203\014\000\003\003?L\000\377\007\002\000\005?\032\005?\026?x\000\005?\030\003\005? \005?\034?x\000\006\ 2025-04-04T17:20:51.289125Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:20:51.289213Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:20:51.289944Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit CheckDataTx 2025-04-04T17:20:51.320363Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-04-04T17:20:51.320462Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit CheckDataTx 2025-04-04T17:20:51.320512Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:20:51.320559Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:20:51.320631Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-04-04T17:20:51.320702Z node 3 :TX_DATASHARD TRACE: Activated operation [0:11] at 9437184 2025-04-04T17:20:51.320747Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-04-04T17:20:51.320777Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit BuildAndWaitDependencies 2025-04-04T17:20:51.320803Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit ExecuteDataTx 2025-04-04T17:20:51.320837Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-04T17:20:51.329108Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-04T17:20:51.329314Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-04T17:20:51.329361Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-04T17:20:51.555891Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:20:51.555958Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-04T17:20:51.556543Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-04T17:20:51.583455Z node 3 :TX_DATASHARD TRACE: Operation [0:11] at 9437184 exceeded memory limit 4194304 and requests 33554432 more for the next try 2025-04-04T17:20:51.583736Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-04T17:20:51.583786Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-04T17:20:51.584329Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:20:51.584395Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-04T17:20:51.585078Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-04T17:20:51.791754Z node 3 :TX_DATASHARD TRACE: Operation [0:11] at 9437184 exceeded memory limit 37748736 and requests 301989888 more for the next try 2025-04-04T17:20:51.792662Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-04T17:20:51.792707Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-04T17:20:51.919089Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:20:51.919181Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-04T17:20:51.919941Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-04T17:20:51.928880Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-04-04T17:20:51.929104Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-04-04T17:20:51.929162Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-04-04T17:20:52.352163Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:20:52.352243Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-04-04T17:20:52.353038Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-04-04T17:20:53.562287Z node 3 :TX_DATASHARD TRACE: Executed operation [0:11] at tablet 9437184 with status COMPLETE 2025-04-04T17:20:53.562398Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:11] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 129871, SelectRangeBytes: 40000268, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-04-04T17:20:53.562467Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-04-04T17:20:53.562508Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit ExecuteDataTx 2025-04-04T17:20:53.562546Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit FinishPropose 2025-04-04T17:20:53.562585Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit FinishPropose 2025-04-04T17:20:53.562635Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 11 at tablet 9437184 send to client, exec latency: 12 ms, propose latency: 12 ms, status: COMPLETE 2025-04-04T17:20:53.562711Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is DelayComplete 2025-04-04T17:20:53.562741Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit FinishPropose 2025-04-04T17:20:53.562773Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit CompletedOperations 2025-04-04T17:20:53.562817Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit CompletedOperations 2025-04-04T17:20:53.562864Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-04-04T17:20:53.562889Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit CompletedOperations 2025-04-04T17:20:53.562917Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:11] at 9437184 has finished 2025-04-04T17:20:53.594334Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:20:53.594408Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:11] at 9437184 on unit FinishPropose 2025-04-04T17:20:53.594467Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |93.1%| [TA] $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TA] {RESULT} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TA] $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::Sinks_Oltp_WriteToTopics_1 [GOOD] >> KqpSqlIn::KeySuffix_NotPointPrefix [GOOD] >> KqpSqlIn::KeyTypeMissmatch_Int >> TxUsage::Sinks_Oltp_WriteToTopics_2 >> KqpNewEngine::PrecomputeKey [GOOD] >> KqpNewEngine::PrimaryView >> YdbSdkSessionsPool::WaitQueue1 >> YdbSdkSessionsPool::CustomPlan >> YdbSdkSessionsPool::StressTestSync10 >> YdbSdkSessionsPool::StressTestAsync1 >> YdbSdkSessionsPool::Get1Session >> YdbSdkSessionsPool::WaitQueue10 >> YdbSdkSessionsPool::RunSmallPlan >> YdbSdkSessionsPool::StressTestAsync10 >> YdbSdkSessionsPool::PeriodicTask10 >> YdbSdkSessionsPool::StressTestSync1 >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit [GOOD] >> TxUsage::WriteToTopic_Demo_47 [GOOD] >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartNo >> TxUsage::WriteToTopic_Demo_48 >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-04T17:20:43.295003Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509182859433729:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:43.295058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001676/r3tmp/tmp64YSLM/pdisk_1.dat 2025-04-04T17:20:43.931212Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:43.946087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:43.946225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:43.962689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18499 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:20:44.145970Z node 1 :TX_PROXY DEBUG: actor# [1:7489509182859433816:2113] Handle TEvNavigate describe path dc-1 2025-04-04T17:20:44.146027Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509187154401593:2433] HANDLE EvNavigateScheme dc-1 2025-04-04T17:20:44.146175Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489509182859433841:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:44.146236Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489509182859433841:2128], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T17:20:44.146518Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509187154401594:2434][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:20:44.148446Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509182859433510:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509187154401600:2434] 2025-04-04T17:20:44.148516Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509182859433510:2056] Subscribe: subscriber# [1:7489509187154401600:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:44.148650Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509187154401600:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509182859433510:2056] 2025-04-04T17:20:44.148689Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509187154401594:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509187154401597:2434] 2025-04-04T17:20:44.148725Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509182859433510:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509187154401600:2434] 2025-04-04T17:20:44.148747Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509182859433504:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509187154401598:2434] 2025-04-04T17:20:44.148786Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509182859433504:2050] Subscribe: subscriber# [1:7489509187154401598:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:44.148832Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509182859433507:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509187154401599:2434] 2025-04-04T17:20:44.148847Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509182859433507:2053] Subscribe: subscriber# [1:7489509187154401599:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:44.148877Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509187154401598:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509182859433504:2050] 2025-04-04T17:20:44.148893Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509187154401599:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509182859433507:2053] 2025-04-04T17:20:44.148919Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509187154401594:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509187154401595:2434] 2025-04-04T17:20:44.148966Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489509187154401594:2434][/dc-1] Set up state: owner# [1:7489509182859433841:2128], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:44.149107Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509187154401594:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509187154401596:2434] 2025-04-04T17:20:44.149152Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489509187154401594:2434][/dc-1] Path was already updated: owner# [1:7489509182859433841:2128], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:44.149204Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509187154401598:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509187154401595:2434], cookie# 1 2025-04-04T17:20:44.149218Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509187154401599:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509187154401596:2434], cookie# 1 2025-04-04T17:20:44.149231Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509187154401600:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509187154401597:2434], cookie# 1 2025-04-04T17:20:44.150359Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509182859433504:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509187154401598:2434] 2025-04-04T17:20:44.150425Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509182859433504:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509187154401598:2434], cookie# 1 2025-04-04T17:20:44.150496Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509182859433507:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509187154401599:2434] 2025-04-04T17:20:44.150513Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509182859433507:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509187154401599:2434], cookie# 1 2025-04-04T17:20:44.150529Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509182859433510:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509187154401600:2434], cookie# 1 2025-04-04T17:20:44.150567Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509187154401598:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509182859433504:2050], cookie# 1 2025-04-04T17:20:44.150583Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509187154401599:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509182859433507:2053], cookie# 1 2025-04-04T17:20:44.150597Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509187154401600:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509182859433510:2056], cookie# 1 2025-04-04T17:20:44.150639Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509187154401594:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509187154401595:2434], cookie# 1 2025-04-04T17:20:44.150662Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509187154401594:2434][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:20:44.150684Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509187154401594:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509187154401596:2434], cookie# 1 2025-04-04T17:20:44.150710Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509187154401594:2434][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:20:44.150742Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509187154401594:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509187154401597:2434], cookie# 1 2025-04-04T17:20:44.150758Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509187154401594:2434][/dc-1] Unexpected sync response: sender# [1:7489509187154401597:2434], cookie# 1 2025-04-04T17:20:44.224534Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489509182859433841:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T17:20:44.224998Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489509182859433841:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... nfo }] } 2025-04-04T17:21:00.434734Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509226397867150:2126], cacheItem# { Subscriber: { Subscriber: [3:7489509243577737554:3000] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:21:00.434807Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509256462640187:3555], recipient# [3:7489509256462640186:2338], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:00.914480Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489509226397867150:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:00.914629Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509226397867150:2126], cacheItem# { Subscriber: { Subscriber: [3:7489509230692835575:2983] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:21:00.914734Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509256462640189:3556], recipient# [3:7489509256462640188:2339], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:00.920004Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489509226397867150:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:00.920154Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509226397867150:2126], cacheItem# { Subscriber: { Subscriber: [3:7489509230692835575:2983] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:21:00.920294Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509256462640191:3557], recipient# [3:7489509256462640190:2340], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:01.435719Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489509226397867150:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:01.435883Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509226397867150:2126], cacheItem# { Subscriber: { Subscriber: [3:7489509243577737554:3000] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:21:01.436018Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509260757607507:3561], recipient# [3:7489509260757607506:2341], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:01.915413Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489509226397867150:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:01.915591Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509226397867150:2126], cacheItem# { Subscriber: { Subscriber: [3:7489509230692835575:2983] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:21:01.915703Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509260757607509:3562], recipient# [3:7489509260757607508:2342], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:01.920953Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489509226397867150:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:01.921099Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509226397867150:2126], cacheItem# { Subscriber: { Subscriber: [3:7489509230692835575:2983] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:21:01.921200Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509260757607511:3563], recipient# [3:7489509260757607510:2343], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:02.436607Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489509226397867150:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:02.436762Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509226397867150:2126], cacheItem# { Subscriber: { Subscriber: [3:7489509243577737554:3000] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:21:02.436874Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509265052574831:3567], recipient# [3:7489509265052574830:2344], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpReturning::ReturningColumnsOrder [GOOD] >> KqpReturning::Random >> AsyncIndexChangeCollector::InsertSingleRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] Test command err: RandomSeed# 9689351498459517554 Reassign# 7 -- VSlotId { NodeId: 8 PDiskId: 1000 VSlotId: 1000 } GroupId: 2181038080 GroupGeneration: 1 VDiskKind: "Default" FailDomainIdx: 7 VDiskMetrics { SatisfactionRank: 0 VSlotId { NodeId: 8 PDiskId: 1000 VSlotId: 1000 } State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 1000 } Status: "READY" Ready: true Put# [1:1:1:0:0:28:0] Put# [1:1:2:0:0:8:0] Put# [1:1:3:0:0:14:0] Put# [1:1:4:0:0:18:0] 2025-04-04T17:17:52.317932Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:7:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-04T17:17:52.320205Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:7:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 2839226462941385637] 2025-04-04T17:17:52.330660Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:7:0]: THullOsirisActor: RESURRECT: id# [1:1:1:0:0:28:3] 2025-04-04T17:17:52.330747Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:7:0]: THullOsirisActor: RESURRECT: id# [1:1:2:0:0:8:4] 2025-04-04T17:17:52.330788Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:7:0]: THullOsirisActor: RESURRECT: id# [1:1:3:0:0:14:5] 2025-04-04T17:17:52.330816Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:7:0]: THullOsirisActor: RESURRECT: id# [1:1:4:0:0:18:5] 2025-04-04T17:17:52.331122Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:7:0]: THullOsirisActor: FINISH: BlobsResurrected# 4 PartsResurrected# 4 Put# [1:1:5:0:0:99:0] Put# [1:1:6:0:0:50:0] Put# [1:1:7:0:0:42:0] Put# [1:1:8:0:0:2:0] Put# [1:1:9:0:0:24:0] Put# [1:1:10:0:0:73:0] Put# [1:1:11:0:0:28:0] Put# [1:1:12:0:0:50:0] Put# [1:1:13:0:0:14:0] Put# [1:1:14:0:0:48:0] Put# [1:1:15:0:0:82:0] Put# [1:1:16:0:0:21:0] Put# [1:1:17:0:0:68:0] Put# [1:1:18:0:0:38:0] Put# [1:1:19:0:0:84:0] Put# [1:1:20:0:0:85:0] Put# [1:1:21:0:0:42:0] Put# [1:1:22:0:0:48:0] Put# [1:1:23:0:0:42:0] Put# [1:1:24:0:0:9:0] Put# [1:1:25:0:0:95:0] Put# [1:1:26:0:0:86:0] Put# [1:1:27:0:0:95:0] Put# [1:1:28:0:0:87:0] Put# [1:1:29:0:0:41:0] Put# [1:1:30:0:0:34:0] Put# [1:1:31:0:0:34:0] Put# [1:1:32:0:0:88:0] Put# [1:1:33:0:0:38:0] Put# [1:1:34:0:0:43:0] Put# [1:1:35:0:0:30:0] Put# [1:1:36:0:0:6:0] Put# [1:1:37:0:0:53:0] Put# [1:1:38:0:0:22:0] Put# [1:1:39:0:0:42:0] Put# [1:1:40:0:0:75:0] Put# [1:1:41:0:0:92:0] Put# [1:1:42:0:0:72:0] Put# [1:1:43:0:0:42:0] Put# [1:1:44:0:0:26:0] Put# [1:1:45:0:0:32:0] Put# [1:1:46:0:0:98:0] Put# [1:1:47:0:0:9:0] Put# [1:1:48:0:0:40:0] Put# [1:1:49:0:0:96:0] Put# [1:1:50:0:0:39:0] Put# [1:1:51:0:0:20:0] Put# [1:1:52:0:0:84:0] Put# [1:1:53:0:0:99:0] Put# [1:1:54:0:0:88:0] Put# [1:1:55:0:0:81:0] Put# [1:1:56:0:0:15:0] Put# [1:1:57:0:0:82:0] Put# [1:1:58:0:0:44:0] Put# [1:1:59:0:0:39:0] Put# [1:1:60:0:0:97:0] Put# [1:1:61:0:0:53:0] Put# [1:1:62:0:0:96:0] Put# [1:1:63:0:0:50:0] Put# [1:1:64:0:0:2:0] Put# [1:1:65:0:0:14:0] Put# [1:1:66:0:0:96:0] Put# [1:1:67:0:0:94:0] Put# [1:1:68:0:0:68:0] Put# [1:1:69:0:0:70:0] Put# [1:1:70:0:0:58:0] Put# [1:1:71:0:0:27:0] Put# [1:1:72:0:0:53:0] Put# [1:1:73:0:0:95:0] Put# [1:1:74:0:0:44:0] Put# [1:1:75:0:0:41:0] Put# [1:1:76:0:0:20:0] Put# [1:1:77:0:0:2:0] Put# [1:1:78:0:0:46:0] Put# [1:1:79:0:0:67:0] Put# [1:1:80:0:0:72:0] Put# [1:1:81:0:0:35:0] Put# [1:1:82:0:0:86:0] Put# [1:1:83:0:0:18:0] Put# [1:1:84:0:0:24:0] Put# [1:1:85:0:0:96:0] Put# [1:1:86:0:0:84:0] Put# [1:1:87:0:0:59:0] Put# [1:1:88:0:0:84:0] Put# [1:1:89:0:0:22:0] Put# [1:1:90:0:0:96:0] Put# [1:1:91:0:0:84:0] Put# [1:1:92:0:0:21:0] Put# [1:1:93:0:0:5:0] Put# [1:1:94:0:0:24:0] Put# [1:1:95:0:0:58:0] Put# [1:1:96:0:0:36:0] Put# [1:1:97:0:0:82:0] Put# [1:1:98:0:0:5:0] Put# [1:1:99:0:0:71:0] Put# [1:1:100:0:0:83:0] Put# [1:1:101:0:0:21:0] Put# [1:1:102:0:0:77:0] Put# [1:1:103:0:0:8:0] Put# [1:1:104:0:0:65:0] Put# [1:1:105:0:0:34:0] Put# [1:1:106:0:0:81:0] Put# [1:1:107:0:0:43:0] Put# [1:1:108:0:0:35:0] Put# [1:1:109:0:0:49:0] Put# [1:1:110:0:0:16:0] Put# [1:1:111:0:0:93:0] Put# [1:1:112:0:0:29:0] Put# [1:1:113:0:0:66:0] Put# [1:1:114:0:0:39:0] Put# [1:1:115:0:0:95:0] Put# [1:1:116:0:0:16:0] Put# [1:1:117:0:0:10:0] Put# [1:1:118:0:0:97:0] Put# [1:1:119:0:0:11:0] Put# [1:1:120:0:0:43:0] Put# [1:1:121:0:0:59:0] Put# [1:1:122:0:0:60:0] Put# [1:1:123:0:0:61:0] Put# [1:1:124:0:0:84:0] Put# [1:1:125:0:0:91:0] Put# [1:1:126:0:0:74:0] Put# [1:1:127:0:0:28:0] Put# [1:1:128:0:0:4:0] Put# [1:1:129:0:0:3:0] Put# [1:1:130:0:0:10:0] Put# [1:1:131:0:0:45:0] Put# [1:1:132:0:0:70:0] Put# [1:1:133:0:0:97:0] Put# [1:1:134:0:0:48:0] Put# [1:1:135:0:0:94:0] Put# [1:1:136:0:0:59:0] Put# [1:1:137:0:0:31:0] Put# [1:1:138:0:0:55:0] Put# [1:1:139:0:0:15:0] Put# [1:1:140:0:0:51:0] Put# [1:1:141:0:0:6:0] Put# [1:1:142:0:0:5:0] Put# [1:1:143:0:0:99:0] Put# [1:1:144:0:0:63:0] Put# [1:1:145:0:0:99:0] Put# [1:1:146:0:0:53:0] Put# [1:1:147:0:0:89:0] Put# [1:1:148:0:0:21:0] Put# [1:1:149:0:0:41:0] Put# [1:1:150:0:0:42:0] Put# [1:1:151:0:0:91:0] Put# [1:1:152:0:0:41:0] Put# [1:1:153:0:0:99:0] Put# [1:1:154:0:0:75:0] Put# [1:1:155:0:0:76:0] Put# [1:1:156:0:0:15:0] Put# [1:1:157:0:0:15:0] Put# [1:1:158:0:0:77:0] Put# [1:1:159:0:0:12:0] Put# [1:1:160:0:0:77:0] Put# [1:1:161:0:0:83:0] Put# [1:1:162:0:0:12:0] Put# [1:1:163:0:0:80:0] Put# [1:1:164:0:0:30:0] Put# [1:1:165:0:0:70:0] Put# [1:1:166:0:0:94:0] Put# [1:1:167:0:0:15:0] Put# [1:1:168:0:0:87:0] Put# [1:1:169:0:0:82:0] Put# [1:1:170:0:0:67:0] Put# [1:1:171:0:0:21:0] Put# [1:1:172:0:0:89:0] Put# [1:1:173:0:0:88:0] Put# [1:1:174:0:0:20:0] Put# [1:1:175:0:0:37:0] Put# [1:1:176:0:0:53:0] Put# [1:1:177:0:0:1:0] Put# [1:1:178:0:0:27:0] Put# [1:1:179:0:0:15:0] Put# [1:1:180:0:0:71:0] Put# [1:1:181:0:0:24:0] Put# [1:1:182:0:0:88:0] Put# [1:1:183:0:0:85:0] Put# [1:1:184:0:0:10:0] Put# [1:1:185:0:0:50:0] Put# [1:1:186:0:0:29:0] Put# [1:1:187:0:0:31:0] Put# [1:1:188:0:0:63:0] Put# [1:1:189:0:0:79:0] Put# [1:1:190:0:0:62:0] Put# [1:1:191:0:0:49:0] Put# [1:1:192:0:0:58:0] Put# [1:1:193:0:0:93:0] Put# [1:1:194:0:0:39:0] Put# [1:1:195:0:0:54:0] Put# [1:1:196:0:0:9:0] Put# [1:1:197:0:0:85:0] Put# [1:1:198:0:0:2:0] Put# [1:1:199:0:0:78:0] Put# [1:1:200:0:0:45:0] Put# [1:1:201:0:0:29:0] Put# [1:1:202:0:0:60:0] Put# [1:1:203:0:0:75:0] Put# [1:1:204:0:0:12:0] Put# [1:1:205:0:0:5:0] Put# [1:1:206:0:0:35:0] Put# [1:1:207:0:0:9:0] Put# [1:1:208:0:0:99:0] Put# [1:1:209:0:0:15:0] Put# [1:1:210:0:0:9:0] Put# [1:1:211:0:0:95:0] Put# [1:1:212:0:0:98:0] Put# [1:1:213:0:0:81:0] Put# [1:1:214:0:0:68:0] Put# [1:1:215:0:0:89:0] Put# [1:1:216:0:0:6:0] Put# [1:1:217:0:0:58:0] Put# [1:1:218:0:0:92:0] Put# [1:1:219:0:0:67:0] Put# [1:1:220:0:0:76:0] Put# [1:1:221:0:0:16:0] Put# [1:1:222:0:0:73:0] Put# [1:1:223:0:0:7:0] Put# [1:1:224:0:0:73:0] Put# [1:1:225:0:0:90:0] Put# [1:1:226:0:0:78:0] Put# [1:1:227:0:0:32:0] Put# [1:1:228:0:0:18:0] Put# [1:1:229:0:0:75:0] Put# [1:1:230:0:0:89:0] Put# [1:1:231:0:0:31:0] Put# [1:1:232:0:0:37:0] Put# [1:1:233:0:0:11:0] Put# [1:1:234:0:0:78:0] Put# [1:1:235:0:0:7:0] Put# [1:1:236:0:0:98:0] Put# [1:1:237:0:0:36:0] Put# [1:1:238:0:0:40:0] Put# [1:1:239:0:0:79:0] Put# [1:1:240:0:0:79:0] Put# [1:1:241:0:0:26:0] Put# [1:1:242:0:0:82:0] Put# [1:1:243:0:0:83:0] Put# [1:1:244:0:0:83:0] Put# [1:1:245:0:0:48:0] Put# [1:1:246:0:0:56:0] Put# [1:1:247:0:0:30:0] Put# [1:1:248:0:0:78:0] Put# [1:1:249:0:0:63:0] Put# [1:1:250:0:0:4:0] Put# [1:1:251:0:0:50:0] Put# [1:1:252:0:0:21:0] Put# [1:1:253:0:0:87:0] Put# [1:1:254:0:0:75:0] Put# [1:1:255:0:0:15:0] Put# [1:1:256:0:0:8:0] Put# [1:1:257:0:0:77:0] Put# [1:1:258:0:0:6:0] Put# [1:1:259:0:0:18:0] Put# [1:1:260:0:0:12:0] Put# [1:1:261:0:0:69:0] Put# [1:1:262:0:0:67:0] Put# [1:1:263:0:0:24:0] Put# [1:1:264:0:0:90:0] Put# [1:1:265:0:0:5:0] Put# [1:1:266:0:0:23:0] Put# [1:1:267:0:0:1:0] Put# [1:1:268:0:0:49:0] Put# [1:1:269:0:0:63:0] Put# [1:1:270:0:0:71:0] Put# [1:1:271:0:0:23:0] Put# [1:1:272:0:0:3:0] Put# [1:1:273:0:0:43:0] Put# [1:1:274:0:0:92:0] Put# [1:1:275:0:0:68:0] Put# [1:1:276:0:0:83:0] Put# [1:1:277:0:0:30:0] Put# [1:1:278:0:0:79:0] Put# [1:1:279:0:0:33:0] Put# [1:1:280:0:0:74:0] Put# [1:1:281:0:0:65:0] Put# [1:1:282:0:0:84:0] Put# [1:1:283:0:0:29:0] Put# [1:1:284:0:0:80:0] Put# [1:1:285:0:0:94:0] Put# [1:1:286:0:0:31:0] Put# [1:1:287:0:0:10:0] Put# [1:1:288:0:0:75:0] Put# [1:1:289:0:0:65:0] Put# [1:1:290:0:0:66:0] Put# [1:1:291:0:0:18:0] Put# [1:1:292:0:0:59:0] Put# [1:1:293:0:0:3:0] Put# [1:1:294:0:0:81:0] Put# [1:1:295:0:0:71:0] Put# [1:1:296:0:0:50:0] Put# [1:1:297:0:0:90:0] Put# [1:1:298:0:0:87:0] Put# [1:1:299:0:0:72:0] Put# [1:1:300:0:0:74:0] Put# [1:1:301:0:0:75:0] Put# [1:1:302:0:0:60:0] Put# [1:1:303:0:0:6:0] Put# [1:1:304:0:0:68:0] Put# [1:1:305:0:0:88:0] Put# [1:1:306:0:0:12:0] Put# [1:1:307:0:0:61:0] Put# [1:1:308:0:0:37:0] Put# [1:1:309:0:0:21:0] Put# [1:1:310:0:0:83:0] Put# [1:1:311:0:0:99:0] Put# [1:1:312:0:0:54:0] Put# [1:1:313:0:0:65:0] Put# [1:1:314:0:0:66:0] Put# [1:1:315:0:0:97:0] Put# [1:1:316:0:0:47:0] Put# [1:1:317:0:0:57:0] Put# [1:1:318:0:0:33:0] Put# [1:1:319:0:0:46:0] Put# [1:1:320:0:0:7:0] Put# [1:1:321:0:0:51:0] Put# [1:1:322:0:0:35:0] Put# [1:1:323:0:0:29:0] Put# [1:1:324:0:0:24:0] Put# [1:1:325:0:0:5:0] Put# [1:1:326:0:0:18:0] Put# [1:1:327:0:0:64:0] Put# [1:1:328:0:0:81:0] Put# [1:1:329:0:0:71:0] Put# [1:1:330:0:0:39:0] Put# [1:1:331:0:0:36:0] Put# [1:1:332:0:0:100:0] Put# [1:1:333:0:0:62:0] Put# [1:1:334:0:0:34:0] Put# [1:1:335:0:0:11:0] Put# [1:1:336:0:0:47:0] Put# [1:1:337:0:0:99:0] Put# [1:1:338:0:0:83:0] Put# [1:1:339:0:0:52:0] Put# [1:1:340:0:0:47:0] Put# [1:1:341:0:0:3:0] Put# [1:1:342:0:0:22:0] Put# [1:1:343:0:0:33:0] Put# [1:1:344:0:0:79:0] Put# [1:1:345:0:0:1:0] Put# [1:1:346:0:0:5:0] Put# [1:1:347:0:0:28:0] Put# [1:1:348:0:0:39:0] Put# [1:1:349:0:0:7:0] Put# [1:1:350:0:0:83:0] Put# [1:1:351:0:0:13:0] Put# [1:1:352:0:0:92:0] Put# [1:1:353:0:0:87:0] Put# [1:1:354:0:0:16:0] Put# [1:1:355:0:0:73:0] Put# [1:1:356:0:0:66:0] Put# [1:1:357:0:0:75:0] Put# [1:1:358:0:0:69:0] Put# [1:1:359:0:0:86:0] Put# [1:1:360:0:0:8:0] Put# [1:1:361:0:0:44:0] Put# [1:1:362:0:0:71:0] Put# [1:1:363:0:0:21:0] Put# [1:1:364:0:0:48:0] Put# [1:1:365:0:0:60:0] Put# [1:1:366:0:0:8:0] Put# [1:1:367:0:0:21:0] Put# [1:1:368:0:0:69:0] Put# [1:1:369:0:0:35:0] Put# [1:1:370:0:0:40:0] Put# [1:1:371:0:0:65:0] Put# [1:1:372:0:0:23:0] Put# [1:1:373:0:0:52:0] Put# [1:1:374:0:0:27:0] Put# [1:1:375:0:0:6:0] Put# [1:1:376:0:0:68:0] Put# [1:1:377:0:0:33:0] Put# [1:1:378:0:0:16:0] Put# [1:1:379:0:0:36:0] Put# [1:1:380:0:0:89:0] Put# [1:1:381:0:0:93:0] Put# [1:1:382:0:0:45:0] Put# [1:1:383:0:0:87:0] Put# [1:1:384:0:0:18:0] Put# [1:1:385:0:0:14:0] Put# [1:1:386:0:0:45:0] Put# [1:1:387:0:0:45:0] Put# [1:1:388:0:0:100:0] Put# [1:1:389:0:0:32:0] Put# [1:1:390:0:0:19:0] Put# [1:1:391:0:0:14:0] Put# [1:1:392:0:0:34:0] Put# [1:1:393:0:0:5:0] Put# [1:1:394:0:0:78:0] Put# [1:1:395:0:0:4:0] Put# [1:1:396:0:0:4:0] Put# [1:1:397:0:0:78:0] Put# [1:1:398:0:0:6:0] Put# [1:1:399:0:0:52:0] Put# [1:1:400:0:0:24:0] Put# [1:1:401:0:0:78:0] Put# [1:1:402:0:0:40:0] Put# [1:1:403:0:0:60:0] Put# [1:1:404:0:0:48:0] Put# [1:1:405:0:0:64:0] Put# [1:1:406:0:0:50:0] Put# [1:1:407:0:0:33:0] Put# [1:1:408:0:0:1:0] Put# [1:1:409:0:0:80:0] Put# [1:1:410:0:0:66:0] Put# [1:1:411:0:0:46:0] Put# [1:1:412:0:0:6:0] Put# [1:1:413:0:0:29:0] Put# [1:1:414:0:0:1:0] Put# [1:1:415:0:0:61:0] Put# [1:1:416:0:0:5:0] Put# [1:1:417:0:0:23:0] Put# [1:1:418:0:0:21:0] Put# [1:1:419:0:0:14:0] Put# [1:1:420:0:0:69:0] Put# [1:1:421:0:0:43:0] Put# [1:1:422:0:0:87:0] Put# [1:1:423:0:0:41:0] Put# [1:1:424:0:0:93:0] Put# [1:1:425:0:0:33:0] Put# [1:1:426:0:0:75:0] Put# [1:1:427:0:0:92:0] Put# [1:1:428:0:0:31:0] Put# [1:1:429:0:0:42:0] Put# [1:1:430:0:0:53:0] Put# [1:1:431:0:0:94:0] Put# [1:1:432:0:0:33:0] Put# [1:1:433:0:0:35:0] Put# [1:1:434:0:0:88:0] Put# [1:1:435:0:0:57:0] Put# [1:1:436:0:0:36:0] Put# [1:1:437:0:0:63:0] Put# [1:1:438:0:0:20:0] Put# [1:1:439:0:0:49:0] Put# [1:1:440:0:0:11:0] Put# [1:1:441:0:0:95:0] Put# [1:1:442:0:0:72:0] Put# [1:1:443:0:0:95:0] Put# [1:1:444:0:0:27:0] Put# [1:1:445:0:0:51:0] Put# [1:1:446:0:0:62:0] Put# [1:1:447:0:0:68:0] ... :0:5:0] Put# [1:4:9520:0:0:85:0] Put# [1:4:9521:0:0:32:0] Put# [1:4:9522:0:0:85:0] Put# [1:4:9523:0:0:17:0] Put# [1:4:9524:0:0:84:0] Put# [1:4:9525:0:0:15:0] Put# [1:4:9526:0:0:89:0] Put# [1:4:9527:0:0:71:0] Put# [1:4:9528:0:0:60:0] Put# [1:4:9529:0:0:51:0] Put# [1:4:9530:0:0:27:0] Put# [1:4:9531:0:0:37:0] Put# [1:4:9532:0:0:42:0] Put# [1:4:9533:0:0:77:0] Put# [1:4:9534:0:0:33:0] Put# [1:4:9535:0:0:81:0] Put# [1:4:9536:0:0:3:0] Put# [1:4:9537:0:0:91:0] Put# [1:4:9538:0:0:3:0] Put# [1:4:9539:0:0:82:0] Put# [1:4:9540:0:0:3:0] Put# [1:4:9541:0:0:45:0] Put# [1:4:9542:0:0:35:0] Put# [1:4:9543:0:0:13:0] Put# [1:4:9544:0:0:70:0] Put# [1:4:9545:0:0:75:0] Put# [1:4:9546:0:0:98:0] Put# [1:4:9547:0:0:50:0] Put# [1:4:9548:0:0:79:0] Put# [1:4:9549:0:0:77:0] Put# [1:4:9550:0:0:68:0] Put# [1:4:9551:0:0:67:0] Put# [1:4:9552:0:0:32:0] Put# [1:4:9553:0:0:98:0] Put# [1:4:9554:0:0:56:0] Put# [1:4:9555:0:0:88:0] Put# [1:4:9556:0:0:95:0] Put# [1:4:9557:0:0:30:0] Put# [1:4:9558:0:0:18:0] Put# [1:4:9559:0:0:79:0] Put# [1:4:9560:0:0:69:0] Put# [1:4:9561:0:0:37:0] Put# [1:4:9562:0:0:9:0] Put# [1:4:9563:0:0:58:0] Put# [1:4:9564:0:0:83:0] Put# [1:4:9565:0:0:81:0] Put# [1:4:9566:0:0:59:0] Put# [1:4:9567:0:0:88:0] Put# [1:4:9568:0:0:50:0] Put# [1:4:9569:0:0:27:0] Put# [1:4:9570:0:0:86:0] Put# [1:4:9571:0:0:38:0] Put# [1:4:9572:0:0:51:0] Put# [1:4:9573:0:0:43:0] Put# [1:4:9574:0:0:86:0] Put# [1:4:9575:0:0:46:0] Put# [1:4:9576:0:0:18:0] Put# [1:4:9577:0:0:30:0] Put# [1:4:9578:0:0:76:0] Put# [1:4:9579:0:0:73:0] Put# [1:4:9580:0:0:74:0] Put# [1:4:9581:0:0:86:0] Put# [1:4:9582:0:0:95:0] Put# [1:4:9583:0:0:54:0] Put# [1:4:9584:0:0:83:0] Put# [1:4:9585:0:0:43:0] Put# [1:4:9586:0:0:57:0] Put# [1:4:9587:0:0:51:0] Put# [1:4:9588:0:0:96:0] Put# [1:4:9589:0:0:47:0] Put# [1:4:9590:0:0:91:0] Put# [1:4:9591:0:0:46:0] Put# [1:4:9592:0:0:50:0] Put# [1:4:9593:0:0:28:0] Put# [1:4:9594:0:0:28:0] Put# [1:4:9595:0:0:43:0] Put# [1:4:9596:0:0:64:0] Put# [1:4:9597:0:0:62:0] Put# [1:4:9598:0:0:13:0] Put# [1:4:9599:0:0:99:0] Put# [1:4:9600:0:0:55:0] Put# [1:4:9601:0:0:33:0] Put# [1:4:9602:0:0:74:0] Put# [1:4:9603:0:0:16:0] Put# [1:4:9604:0:0:46:0] Put# [1:4:9605:0:0:20:0] Put# [1:4:9606:0:0:68:0] Put# [1:4:9607:0:0:23:0] Put# [1:4:9608:0:0:89:0] Put# [1:4:9609:0:0:4:0] Put# [1:4:9610:0:0:46:0] Put# [1:4:9611:0:0:98:0] Put# [1:4:9612:0:0:49:0] Put# [1:4:9613:0:0:47:0] Put# [1:4:9614:0:0:91:0] Put# [1:4:9615:0:0:75:0] Put# [1:4:9616:0:0:10:0] Put# [1:4:9617:0:0:55:0] Put# [1:4:9618:0:0:44:0] Put# [1:4:9619:0:0:51:0] Put# [1:4:9620:0:0:98:0] Put# [1:4:9621:0:0:82:0] Put# [1:4:9622:0:0:21:0] Put# [1:4:9623:0:0:79:0] Put# [1:4:9624:0:0:20:0] Put# [1:4:9625:0:0:33:0] Put# [1:4:9626:0:0:54:0] Put# [1:4:9627:0:0:90:0] Put# [1:4:9628:0:0:72:0] Put# [1:4:9629:0:0:93:0] Put# [1:4:9630:0:0:93:0] Put# [1:4:9631:0:0:35:0] Put# [1:4:9632:0:0:1:0] Put# [1:4:9633:0:0:33:0] Put# [1:4:9634:0:0:48:0] Put# [1:4:9635:0:0:54:0] Put# [1:4:9636:0:0:91:0] Put# [1:4:9637:0:0:50:0] Put# [1:4:9638:0:0:36:0] Put# [1:4:9639:0:0:95:0] Put# [1:4:9640:0:0:66:0] Put# [1:4:9641:0:0:28:0] Put# [1:4:9642:0:0:70:0] Put# [1:4:9643:0:0:56:0] Put# [1:4:9644:0:0:94:0] Put# [1:4:9645:0:0:42:0] Put# [1:4:9646:0:0:76:0] Put# [1:4:9647:0:0:1:0] Put# [1:4:9648:0:0:94:0] Put# [1:4:9649:0:0:79:0] Put# [1:4:9650:0:0:90:0] Put# [1:4:9651:0:0:81:0] Put# [1:4:9652:0:0:24:0] Put# [1:4:9653:0:0:14:0] Put# [1:4:9654:0:0:50:0] Put# [1:4:9655:0:0:35:0] Put# [1:4:9656:0:0:93:0] Put# [1:4:9657:0:0:2:0] Put# [1:4:9658:0:0:28:0] Put# [1:4:9659:0:0:72:0] Put# [1:4:9660:0:0:33:0] Put# [1:4:9661:0:0:24:0] Put# [1:4:9662:0:0:20:0] Put# [1:4:9663:0:0:45:0] Put# [1:4:9664:0:0:95:0] Put# [1:4:9665:0:0:26:0] Put# [1:4:9666:0:0:99:0] Put# [1:4:9667:0:0:31:0] Put# [1:4:9668:0:0:75:0] Put# [1:4:9669:0:0:11:0] Put# [1:4:9670:0:0:34:0] Put# [1:4:9671:0:0:82:0] Put# [1:4:9672:0:0:33:0] Put# [1:4:9673:0:0:80:0] Put# [1:4:9674:0:0:10:0] Put# [1:4:9675:0:0:98:0] Put# [1:4:9676:0:0:48:0] Put# [1:4:9677:0:0:4:0] Put# [1:4:9678:0:0:18:0] Put# [1:4:9679:0:0:10:0] Put# [1:4:9680:0:0:66:0] Put# [1:4:9681:0:0:51:0] Put# [1:4:9682:0:0:56:0] Put# [1:4:9683:0:0:40:0] Put# [1:4:9684:0:0:66:0] Put# [1:4:9685:0:0:24:0] Put# [1:4:9686:0:0:11:0] Put# [1:4:9687:0:0:57:0] Put# [1:4:9688:0:0:62:0] Put# [1:4:9689:0:0:22:0] Put# [1:4:9690:0:0:64:0] Put# [1:4:9691:0:0:66:0] Put# [1:4:9692:0:0:67:0] Put# [1:4:9693:0:0:7:0] Put# [1:4:9694:0:0:83:0] Put# [1:4:9695:0:0:89:0] Put# [1:4:9696:0:0:37:0] Put# [1:4:9697:0:0:74:0] Put# [1:4:9698:0:0:69:0] Put# [1:4:9699:0:0:68:0] Put# [1:4:9700:0:0:59:0] Put# [1:4:9701:0:0:87:0] Put# [1:4:9702:0:0:8:0] Put# [1:4:9703:0:0:97:0] Put# [1:4:9704:0:0:65:0] Put# [1:4:9705:0:0:47:0] Put# [1:4:9706:0:0:77:0] Put# [1:4:9707:0:0:61:0] Put# [1:4:9708:0:0:8:0] Put# [1:4:9709:0:0:67:0] Put# [1:4:9710:0:0:20:0] Put# [1:4:9711:0:0:86:0] Put# [1:4:9712:0:0:7:0] Put# [1:4:9713:0:0:90:0] Put# [1:4:9714:0:0:66:0] Put# [1:4:9715:0:0:59:0] Put# [1:4:9716:0:0:27:0] Put# [1:4:9717:0:0:65:0] Put# [1:4:9718:0:0:96:0] Put# [1:4:9719:0:0:86:0] Put# [1:4:9720:0:0:27:0] Put# [1:4:9721:0:0:49:0] Put# [1:4:9722:0:0:25:0] Put# [1:4:9723:0:0:67:0] Put# [1:4:9724:0:0:65:0] Put# [1:4:9725:0:0:88:0] Put# [1:4:9726:0:0:87:0] Put# [1:4:9727:0:0:97:0] Put# [1:4:9728:0:0:7:0] Put# [1:4:9729:0:0:80:0] Put# [1:4:9730:0:0:92:0] Put# [1:4:9731:0:0:91:0] Put# [1:4:9732:0:0:66:0] Put# [1:4:9733:0:0:12:0] Put# [1:4:9734:0:0:39:0] Put# [1:4:9735:0:0:80:0] Put# [1:4:9736:0:0:14:0] Put# [1:4:9737:0:0:58:0] Put# [1:4:9738:0:0:29:0] Put# [1:4:9739:0:0:73:0] Put# [1:4:9740:0:0:15:0] Put# [1:4:9741:0:0:17:0] Put# [1:4:9742:0:0:13:0] Put# [1:4:9743:0:0:85:0] Put# [1:4:9744:0:0:62:0] Put# [1:4:9745:0:0:17:0] Put# [1:4:9746:0:0:99:0] Put# [1:4:9747:0:0:2:0] Put# [1:4:9748:0:0:74:0] Put# [1:4:9749:0:0:13:0] Put# [1:4:9750:0:0:41:0] Put# [1:4:9751:0:0:56:0] Put# [1:4:9752:0:0:51:0] Put# [1:4:9753:0:0:89:0] Put# [1:4:9754:0:0:30:0] Put# [1:4:9755:0:0:22:0] Put# [1:4:9756:0:0:4:0] Put# [1:4:9757:0:0:80:0] Put# [1:4:9758:0:0:81:0] Put# [1:4:9759:0:0:33:0] Put# [1:4:9760:0:0:25:0] Put# [1:4:9761:0:0:66:0] Put# [1:4:9762:0:0:82:0] Put# [1:4:9763:0:0:58:0] Put# [1:4:9764:0:0:37:0] Put# [1:4:9765:0:0:47:0] Put# [1:4:9766:0:0:79:0] Put# [1:4:9767:0:0:68:0] Put# [1:4:9768:0:0:7:0] Put# [1:4:9769:0:0:17:0] Put# [1:4:9770:0:0:73:0] Put# [1:4:9771:0:0:22:0] Put# [1:4:9772:0:0:90:0] Put# [1:4:9773:0:0:99:0] Put# [1:4:9774:0:0:83:0] Put# [1:4:9775:0:0:8:0] Put# [1:4:9776:0:0:62:0] Put# [1:4:9777:0:0:55:0] Put# [1:4:9778:0:0:5:0] Put# [1:4:9779:0:0:8:0] Put# [1:4:9780:0:0:100:0] Put# [1:4:9781:0:0:67:0] Put# [1:4:9782:0:0:40:0] Put# [1:4:9783:0:0:46:0] Put# [1:4:9784:0:0:95:0] Put# [1:4:9785:0:0:89:0] Put# [1:4:9786:0:0:97:0] Put# [1:4:9787:0:0:36:0] Put# [1:4:9788:0:0:41:0] Put# [1:4:9789:0:0:1:0] Put# [1:4:9790:0:0:59:0] Put# [1:4:9791:0:0:74:0] Put# [1:4:9792:0:0:5:0] Put# [1:4:9793:0:0:3:0] Put# [1:4:9794:0:0:86:0] Put# [1:4:9795:0:0:51:0] Put# [1:4:9796:0:0:54:0] Put# [1:4:9797:0:0:93:0] Put# [1:4:9798:0:0:61:0] Put# [1:4:9799:0:0:95:0] Put# [1:4:9800:0:0:35:0] Put# [1:4:9801:0:0:32:0] Put# [1:4:9802:0:0:3:0] Put# [1:4:9803:0:0:100:0] Put# [1:4:9804:0:0:55:0] Put# [1:4:9805:0:0:56:0] Put# [1:4:9806:0:0:67:0] Put# [1:4:9807:0:0:2:0] Put# [1:4:9808:0:0:67:0] Put# [1:4:9809:0:0:98:0] Put# [1:4:9810:0:0:10:0] Put# [1:4:9811:0:0:21:0] Put# [1:4:9812:0:0:82:0] Put# [1:4:9813:0:0:29:0] Put# [1:4:9814:0:0:93:0] Put# [1:4:9815:0:0:93:0] Put# [1:4:9816:0:0:91:0] Put# [1:4:9817:0:0:100:0] Put# [1:4:9818:0:0:15:0] Put# [1:4:9819:0:0:62:0] Put# [1:4:9820:0:0:53:0] Put# [1:4:9821:0:0:82:0] Put# [1:4:9822:0:0:29:0] Put# [1:4:9823:0:0:96:0] Put# [1:4:9824:0:0:70:0] Put# [1:4:9825:0:0:39:0] Put# [1:4:9826:0:0:20:0] Put# [1:4:9827:0:0:74:0] Put# [1:4:9828:0:0:72:0] Put# [1:4:9829:0:0:100:0] Put# [1:4:9830:0:0:79:0] Put# [1:4:9831:0:0:34:0] Put# [1:4:9832:0:0:71:0] Put# [1:4:9833:0:0:53:0] Put# [1:4:9834:0:0:85:0] Put# [1:4:9835:0:0:65:0] Put# [1:4:9836:0:0:5:0] Put# [1:4:9837:0:0:88:0] Put# [1:4:9838:0:0:91:0] Put# [1:4:9839:0:0:65:0] Put# [1:4:9840:0:0:78:0] Put# [1:4:9841:0:0:22:0] Put# [1:4:9842:0:0:10:0] Put# [1:4:9843:0:0:13:0] Put# [1:4:9844:0:0:30:0] Put# [1:4:9845:0:0:65:0] Put# [1:4:9846:0:0:90:0] Put# [1:4:9847:0:0:69:0] Put# [1:4:9848:0:0:82:0] Put# [1:4:9849:0:0:68:0] Put# [1:4:9850:0:0:81:0] Put# [1:4:9851:0:0:17:0] Put# [1:4:9852:0:0:86:0] Put# [1:4:9853:0:0:88:0] Put# [1:4:9854:0:0:34:0] Put# [1:4:9855:0:0:7:0] Put# [1:4:9856:0:0:22:0] Put# [1:4:9857:0:0:74:0] Put# [1:4:9858:0:0:23:0] Put# [1:4:9859:0:0:89:0] Put# [1:4:9860:0:0:73:0] Put# [1:4:9861:0:0:97:0] Put# [1:4:9862:0:0:39:0] Put# [1:4:9863:0:0:54:0] Put# [1:4:9864:0:0:48:0] Put# [1:4:9865:0:0:73:0] Put# [1:4:9866:0:0:69:0] Put# [1:4:9867:0:0:57:0] Put# [1:4:9868:0:0:68:0] Put# [1:4:9869:0:0:86:0] Put# [1:4:9870:0:0:70:0] Put# [1:4:9871:0:0:81:0] Put# [1:4:9872:0:0:56:0] Put# [1:4:9873:0:0:51:0] Put# [1:4:9874:0:0:27:0] Put# [1:4:9875:0:0:27:0] Put# [1:4:9876:0:0:96:0] Put# [1:4:9877:0:0:70:0] Put# [1:4:9878:0:0:59:0] Put# [1:4:9879:0:0:39:0] Put# [1:4:9880:0:0:59:0] Put# [1:4:9881:0:0:49:0] Put# [1:4:9882:0:0:61:0] Put# [1:4:9883:0:0:67:0] Put# [1:4:9884:0:0:8:0] Put# [1:4:9885:0:0:81:0] Put# [1:4:9886:0:0:1:0] Put# [1:4:9887:0:0:57:0] Put# [1:4:9888:0:0:75:0] Put# [1:4:9889:0:0:16:0] Put# [1:4:9890:0:0:19:0] Put# [1:4:9891:0:0:79:0] Put# [1:4:9892:0:0:8:0] Put# [1:4:9893:0:0:98:0] Put# [1:4:9894:0:0:43:0] Put# [1:4:9895:0:0:87:0] Put# [1:4:9896:0:0:5:0] Put# [1:4:9897:0:0:60:0] Put# [1:4:9898:0:0:75:0] Put# [1:4:9899:0:0:10:0] Put# [1:4:9900:0:0:8:0] Put# [1:4:9901:0:0:51:0] Put# [1:4:9902:0:0:68:0] Put# [1:4:9903:0:0:58:0] Put# [1:4:9904:0:0:56:0] Put# [1:4:9905:0:0:10:0] Put# [1:4:9906:0:0:13:0] Put# [1:4:9907:0:0:57:0] Put# [1:4:9908:0:0:96:0] Put# [1:4:9909:0:0:73:0] Put# [1:4:9910:0:0:96:0] Put# [1:4:9911:0:0:40:0] Put# [1:4:9912:0:0:58:0] Put# [1:4:9913:0:0:5:0] Put# [1:4:9914:0:0:14:0] Put# [1:4:9915:0:0:86:0] Put# [1:4:9916:0:0:94:0] Put# [1:4:9917:0:0:94:0] Put# [1:4:9918:0:0:48:0] Put# [1:4:9919:0:0:88:0] Put# [1:4:9920:0:0:74:0] Put# [1:4:9921:0:0:30:0] Put# [1:4:9922:0:0:72:0] Put# [1:4:9923:0:0:78:0] Put# [1:4:9924:0:0:28:0] Put# [1:4:9925:0:0:61:0] Put# [1:4:9926:0:0:7:0] Put# [1:4:9927:0:0:44:0] Put# [1:4:9928:0:0:86:0] Put# [1:4:9929:0:0:75:0] Put# [1:4:9930:0:0:25:0] Put# [1:4:9931:0:0:54:0] Put# [1:4:9932:0:0:49:0] Put# [1:4:9933:0:0:87:0] Put# [1:4:9934:0:0:25:0] Put# [1:4:9935:0:0:42:0] Put# [1:4:9936:0:0:72:0] Put# [1:4:9937:0:0:97:0] Put# [1:4:9938:0:0:52:0] Put# [1:4:9939:0:0:72:0] Put# [1:4:9940:0:0:89:0] Put# [1:4:9941:0:0:63:0] Put# [1:4:9942:0:0:74:0] Put# [1:4:9943:0:0:43:0] Put# [1:4:9944:0:0:38:0] Put# [1:4:9945:0:0:89:0] Put# [1:4:9946:0:0:57:0] Put# [1:4:9947:0:0:92:0] Put# [1:4:9948:0:0:71:0] Put# [1:4:9949:0:0:34:0] Put# [1:4:9950:0:0:88:0] Put# [1:4:9951:0:0:40:0] Put# [1:4:9952:0:0:27:0] Put# [1:4:9953:0:0:11:0] Put# [1:4:9954:0:0:62:0] Put# [1:4:9955:0:0:94:0] Put# [1:4:9956:0:0:12:0] Put# [1:4:9957:0:0:37:0] Put# [1:4:9958:0:0:16:0] Put# [1:4:9959:0:0:11:0] Put# [1:4:9960:0:0:44:0] Put# [1:4:9961:0:0:27:0] Put# [1:4:9962:0:0:58:0] Put# [1:4:9963:0:0:37:0] Put# [1:4:9964:0:0:91:0] Put# [1:4:9965:0:0:29:0] Put# [1:4:9966:0:0:21:0] Put# [1:4:9967:0:0:48:0] Put# [1:4:9968:0:0:72:0] Put# [1:4:9969:0:0:30:0] Put# [1:4:9970:0:0:35:0] Put# [1:4:9971:0:0:61:0] Put# [1:4:9972:0:0:60:0] Put# [1:4:9973:0:0:21:0] Put# [1:4:9974:0:0:12:0] Put# [1:4:9975:0:0:13:0] Put# [1:4:9976:0:0:54:0] Put# [1:4:9977:0:0:12:0] Put# [1:4:9978:0:0:37:0] Put# [1:4:9979:0:0:33:0] Put# [1:4:9980:0:0:97:0] Put# [1:4:9981:0:0:81:0] Put# [1:4:9982:0:0:67:0] Put# [1:4:9983:0:0:52:0] Put# [1:4:9984:0:0:68:0] Put# [1:4:9985:0:0:93:0] Put# [1:4:9986:0:0:86:0] Put# [1:4:9987:0:0:18:0] Put# [1:4:9988:0:0:78:0] Put# [1:4:9989:0:0:5:0] Put# [1:4:9990:0:0:15:0] Put# [1:4:9991:0:0:52:0] Put# [1:4:9992:0:0:49:0] Put# [1:4:9993:0:0:67:0] Put# [1:4:9994:0:0:7:0] Put# [1:4:9995:0:0:65:0] Put# [1:4:9996:0:0:5:0] Put# [1:4:9997:0:0:53:0] Put# [1:4:9998:0:0:16:0] Put# [1:4:9999:0:0:17:0] Put# [1:4:10000:0:0:1:0] >> KqpMergeCn::TopSortBy_Utf8_Limit2 [GOOD] >> KqpMergeCn::TopSortBy_Timestamp_Limit2 |93.1%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbSdkSessionsPool::Get1Session [GOOD] >> YdbSdkSessionsPool::PeriodicTask1 >> YdbSdkSessionsPool::RunSmallPlan [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn >> KqpNotNullColumns::UpdateTable_UniqIndexPg [GOOD] >> KqpNotNullColumns::UpdateTable_Immediate >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] >> YdbSdkSessionsPool::WaitQueue1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::RunSmallPlan [GOOD] Test command err: 2025-04-04T17:21:03.171746Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509266374576367:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:03.171823Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ab9/r3tmp/tmph1fA5a/pdisk_1.dat 2025-04-04T17:21:03.704972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:03.705125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:03.713042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:03.740138Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19745, node 1 2025-04-04T17:21:03.808905Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:21:03.808937Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:21:03.951484Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:03.951507Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:03.951513Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:03.951630Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25254 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:04.345053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... >> YdbSdkSessionsPool::WaitQueue10 [GOOD] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] Test command err: 2025-04-04T17:19:16.196650Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:16.275562Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:16.293742Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:16.293947Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:16.300481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:16.300724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:16.300966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:16.301079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:16.301191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:16.301298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:16.301412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:16.301528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:16.301637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:16.301777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:16.301871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:16.301997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:16.329132Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:16.329379Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:16.329436Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:16.329611Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:16.329752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:16.329827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:16.329865Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:16.329944Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:16.329998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:16.330035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:16.330090Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:16.330312Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:16.330395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:16.330447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:16.330475Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:16.330576Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:16.330670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:16.330728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:16.330778Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:16.330851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:16.330884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:16.330917Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:16.330959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:16.330989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:16.331015Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:16.331361Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-04-04T17:19:16.331479Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=62; 2025-04-04T17:19:16.331546Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-04T17:19:16.331606Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=27; 2025-04-04T17:19:16.331771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:16.331838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:16.331868Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:16.332046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:16.332095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:16.332136Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:16.332306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:16.332352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:16.332389Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:16.332532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:16.332566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:16.332609Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:16.332731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:16.332765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:16.332834Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... 6.427272Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:81:2696:0]; 2025-04-04T17:21:06.427340Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:82:8528:0]; 2025-04-04T17:21:06.427408Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:83:2776:0]; 2025-04-04T17:21:06.427476Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:84:2768:0]; 2025-04-04T17:21:06.427537Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:85:2768:0]; 2025-04-04T17:21:06.427598Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:86:2768:0]; 2025-04-04T17:21:06.427662Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:87:2768:0]; 2025-04-04T17:21:06.427723Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:88:2768:0]; 2025-04-04T17:21:06.427785Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:89:2768:0]; 2025-04-04T17:21:06.427867Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:90:2768:0]; 2025-04-04T17:21:06.427948Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:91:2768:0]; 2025-04-04T17:21:06.428014Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:92:2768:0]; 2025-04-04T17:21:06.428077Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:93:2768:0]; 2025-04-04T17:21:06.428139Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:94:2768:0]; 2025-04-04T17:21:06.428207Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:95:2768:0]; 2025-04-04T17:21:06.428263Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:96:2768:0]; 2025-04-04T17:21:06.428325Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:97:2768:0]; 2025-04-04T17:21:06.428431Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:98:2768:0]; 2025-04-04T17:21:06.428502Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:99:2768:0]; 2025-04-04T17:21:06.428550Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:100:2768:0]; 2025-04-04T17:21:06.428598Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:101:2768:0]; 2025-04-04T17:21:06.428641Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:102:2768:0]; 2025-04-04T17:21:06.428698Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:103:2768:0]; 2025-04-04T17:21:06.428743Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:104:2768:0]; 2025-04-04T17:21:06.428785Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:105:2768:0]; 2025-04-04T17:21:06.428835Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:106:2768:0]; 2025-04-04T17:21:06.428887Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:107:2768:0]; 2025-04-04T17:21:06.428940Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:108:2768:0]; 2025-04-04T17:21:06.428986Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:109:2768:0]; 2025-04-04T17:21:06.429029Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:110:2768:0]; 2025-04-04T17:21:06.429076Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:111:2768:0]; 2025-04-04T17:21:06.429118Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:112:2768:0]; 2025-04-04T17:21:06.429163Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:113:2768:0]; 2025-04-04T17:21:06.429214Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:114:2768:0]; 2025-04-04T17:21:06.429267Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:115:2768:0]; 2025-04-04T17:21:06.429310Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:116:2768:0]; 2025-04-04T17:21:06.429356Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:117:2768:0]; 2025-04-04T17:21:06.429405Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:118:2768:0]; 2025-04-04T17:21:06.429457Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:119:2696:0]; 2025-04-04T17:21:06.429505Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:120:2696:0]; 2025-04-04T17:21:06.429548Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:121:2696:0]; 2025-04-04T17:21:06.429605Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:122:2696:0]; 2025-04-04T17:21:06.429658Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:123:8528:0]; 2025-04-04T17:21:06.429705Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:124:2768:0]; 2025-04-04T17:21:06.429751Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:125:2768:0]; 2025-04-04T17:21:06.429797Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:126:2768:0]; 2025-04-04T17:21:06.429845Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:127:2768:0]; 2025-04-04T17:21:06.429896Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:128:2768:0]; 2025-04-04T17:21:06.429949Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:129:2768:0]; 2025-04-04T17:21:06.430007Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:130:2768:0]; 2025-04-04T17:21:06.430062Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:131:2768:0]; 2025-04-04T17:21:06.430112Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:132:2768:0]; 2025-04-04T17:21:06.430160Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:133:2768:0]; 2025-04-04T17:21:06.430206Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:134:2768:0]; 2025-04-04T17:21:06.430286Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:135:2768:0]; 2025-04-04T17:21:06.430358Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:136:2768:0]; 2025-04-04T17:21:06.430416Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:137:2768:0]; 2025-04-04T17:21:06.430481Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:138:2768:0]; 2025-04-04T17:21:06.430537Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:139:2768:0]; 2025-04-04T17:21:06.430586Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:140:2768:0]; 2025-04-04T17:21:06.430629Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:141:2768:0]; 2025-04-04T17:21:06.430674Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:142:2768:0]; 2025-04-04T17:21:06.430723Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:143:2768:0]; 2025-04-04T17:21:06.430772Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:144:2768:0]; 2025-04-04T17:21:06.430817Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:145:2768:0]; 2025-04-04T17:21:06.430885Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:146:2768:0]; 2025-04-04T17:21:06.430948Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:147:2768:0]; 2025-04-04T17:21:06.431006Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:148:2768:0]; 2025-04-04T17:21:06.431054Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:149:2768:0]; 2025-04-04T17:21:06.431098Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:150:2768:0]; 2025-04-04T17:21:06.431144Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:151:2768:0]; 2025-04-04T17:21:06.431186Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:152:2768:0]; 2025-04-04T17:21:06.431230Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:153:2768:0]; 2025-04-04T17:21:06.431291Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:154:2768:0]; 2025-04-04T17:21:06.431343Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:155:2768:0]; 2025-04-04T17:21:06.431394Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:156:2768:0]; 2025-04-04T17:21:06.431452Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:157:2768:0]; 2025-04-04T17:21:06.431514Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:158:2768:0]; 2025-04-04T17:21:06.431582Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:159:2768:0]; 2025-04-04T17:21:06.431639Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:160:2696:0]; 2025-04-04T17:21:06.431711Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:161:2696:0]; 2025-04-04T17:21:06.431773Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:162:2696:0]; 2025-04-04T17:21:06.431837Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:163:2696:0]; 2025-04-04T17:21:06.431887Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:164:8528:0]; 2025-04-04T17:21:06.871006Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-04T17:21:06.871902Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[4] (CS::GENERAL) apply at tablet 9437184 2025-04-04T17:21:06.971574Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:2 Blob count: 692 2025-04-04T17:21:06.978313Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2078720;raw_bytes=2324579;count=1;records=24469} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7587944;raw_bytes=7088522;count=3;records=75200} inactive {blob_bytes=100419184;raw_bytes=104021253;count=42;records=1103721} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::WaitQueue1 [GOOD] Test command err: 2025-04-04T17:21:03.143808Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509266123710039:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:03.144388Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a86/r3tmp/tmpQDHPbU/pdisk_1.dat 2025-04-04T17:21:03.616125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:03.617623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:03.631148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:03.643450Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2529, node 1 2025-04-04T17:21:03.835170Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:03.835200Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:03.835213Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:03.835359Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:04.321083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::WaitQueue10 [GOOD] Test command err: 2025-04-04T17:21:03.244260Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509266832116213:2278];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:03.244346Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a9f/r3tmp/tmpY2Qsi6/pdisk_1.dat 2025-04-04T17:21:03.699298Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:03.700505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:03.700629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 2568, node 1 2025-04-04T17:21:03.757480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:03.758692Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:21:03.758712Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:21:03.822818Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:03.822858Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:03.822867Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:03.822976Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:04.324234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... >> TxUsage::WriteToTopic_Demo_31 [GOOD] >> KqpSqlIn::SecondaryIndex_ComplexKey_In_And_In [GOOD] >> KqpSqlIn::PhasesCount >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1 >> TxUsage::WriteToTopic_Demo_43 [GOOD] >> KqpService::RangeCache+UseCache [GOOD] >> LocalPartition::DescribeHang [GOOD] >> LocalPartition::DiscoveryHang >> TxUsage::WriteToTopic_Demo_32 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-04-04T17:20:49.416313Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509206953136850:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:49.417378Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001647/r3tmp/tmpmdff4U/pdisk_1.dat 2025-04-04T17:20:49.841199Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:49.848078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:49.848548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:49.861750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26341 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T17:20:50.066630Z node 1 :TX_PROXY DEBUG: actor# [1:7489509206953137080:2114] Handle TEvNavigate describe path dc-1 2025-04-04T17:20:50.066682Z node 1 :TX_PROXY DEBUG: Actor# [1:7489509211248104859:2447] HANDLE EvNavigateScheme dc-1 2025-04-04T17:20:50.066821Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489509206953137103:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:20:50.066867Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489509206953137103:2127], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T17:20:50.067088Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509211248104860:2448][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T17:20:50.068968Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509206953136766:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509211248104866:2448] 2025-04-04T17:20:50.069022Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509206953136766:2056] Subscribe: subscriber# [1:7489509211248104866:2448], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:50.069090Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509211248104866:2448][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509206953136766:2056] 2025-04-04T17:20:50.069129Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509211248104860:2448][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509211248104863:2448] 2025-04-04T17:20:50.069192Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509206953136766:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509211248104866:2448] 2025-04-04T17:20:50.069215Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509206953136760:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509211248104864:2448] 2025-04-04T17:20:50.069235Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509206953136760:2050] Subscribe: subscriber# [1:7489509211248104864:2448], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:50.069259Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509206953136763:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489509211248104865:2448] 2025-04-04T17:20:50.069274Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489509206953136763:2053] Subscribe: subscriber# [1:7489509211248104865:2448], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T17:20:50.069355Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509211248104864:2448][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509206953136760:2050] 2025-04-04T17:20:50.069406Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509211248104865:2448][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509206953136763:2053] 2025-04-04T17:20:50.069433Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509211248104860:2448][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509211248104861:2448] 2025-04-04T17:20:50.069506Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489509211248104860:2448][/dc-1] Set up state: owner# [1:7489509206953137103:2127], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:50.069606Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509211248104860:2448][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489509211248104862:2448] 2025-04-04T17:20:50.069651Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489509211248104860:2448][/dc-1] Path was already updated: owner# [1:7489509206953137103:2127], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T17:20:50.069690Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509211248104864:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509211248104861:2448], cookie# 1 2025-04-04T17:20:50.069704Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509211248104865:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509211248104862:2448], cookie# 1 2025-04-04T17:20:50.069716Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509211248104866:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509211248104863:2448], cookie# 1 2025-04-04T17:20:50.070324Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509206953136760:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509211248104864:2448] 2025-04-04T17:20:50.070365Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509206953136760:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509211248104864:2448], cookie# 1 2025-04-04T17:20:50.070395Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509206953136763:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489509211248104865:2448] 2025-04-04T17:20:50.070424Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509206953136763:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509211248104865:2448], cookie# 1 2025-04-04T17:20:50.070456Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489509206953136766:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489509211248104866:2448], cookie# 1 2025-04-04T17:20:50.070493Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509211248104864:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509206953136760:2050], cookie# 1 2025-04-04T17:20:50.070509Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509211248104865:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509206953136763:2053], cookie# 1 2025-04-04T17:20:50.070554Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489509211248104866:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509206953136766:2056], cookie# 1 2025-04-04T17:20:50.070592Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509211248104860:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509211248104861:2448], cookie# 1 2025-04-04T17:20:50.070620Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509211248104860:2448][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T17:20:50.070636Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509211248104860:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509211248104862:2448], cookie# 1 2025-04-04T17:20:50.070656Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509211248104860:2448][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T17:20:50.070686Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509211248104860:2448][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489509211248104863:2448], cookie# 1 2025-04-04T17:20:50.070700Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489509211248104860:2448][/dc-1] Unexpected sync response: sender# [1:7489509211248104863:2448], cookie# 1 2025-04-04T17:20:50.135207Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489509206953137103:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T17:20:50.135649Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489509206953137103:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... nfo }] } 2025-04-04T17:21:06.114119Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509249642020906:2127], cacheItem# { Subscriber: { Subscriber: [3:7489509262526924208:3197] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:21:06.114219Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509279706794455:4065], recipient# [3:7489509279706794454:2348], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:06.939732Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489509249642020906:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:06.939896Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509249642020906:2127], cacheItem# { Subscriber: { Subscriber: [3:7489509249642021987:2963] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:21:06.940021Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509279706794469:4066], recipient# [3:7489509279706794468:2349], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:06.944199Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489509249642020906:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:06.944310Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509249642020906:2127], cacheItem# { Subscriber: { Subscriber: [3:7489509249642021987:2963] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:21:06.944380Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509279706794471:4067], recipient# [3:7489509279706794470:2350], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:07.114281Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489509249642020906:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:07.114402Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509249642020906:2127], cacheItem# { Subscriber: { Subscriber: [3:7489509262526924208:3197] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:21:07.114514Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509284001761775:4071], recipient# [3:7489509284001761774:2351], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:07.941886Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489509249642020906:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:07.942061Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509249642020906:2127], cacheItem# { Subscriber: { Subscriber: [3:7489509249642021987:2963] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:21:07.942221Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509284001761789:4072], recipient# [3:7489509284001761788:2352], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:07.944775Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489509249642020906:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:07.944948Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509249642020906:2127], cacheItem# { Subscriber: { Subscriber: [3:7489509249642021987:2963] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:21:07.945148Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509284001761791:4073], recipient# [3:7489509284001761790:2353], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:08.116005Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489509249642020906:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:21:08.116159Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489509249642020906:2127], cacheItem# { Subscriber: { Subscriber: [3:7489509262526924208:3197] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:21:08.116275Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489509288296729095:4077], recipient# [3:7489509288296729094:2354], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TxUsage::WriteToTopic_Demo_44 >> KqpSqlIn::KeyTypeMissmatch_Int [GOOD] >> KqpSqlIn::KeyTypeMissmatch_Str >> DataShardVolatile::DistributedWriteThenImmediateUpsert >> DataShardVolatile::DistributedWrite >> KqpWorkload::KV [GOOD] |93.1%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> AsyncIndexChangeCollector::InsertSingleRow [GOOD] >> AsyncIndexChangeCollector::InsertManyRows >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::RangeCache+UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 14719, MsgBus: 29503 2025-04-04T17:13:48.075030Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489507401231465982:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:13:48.075161Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018a5/r3tmp/tmpB4hix1/pdisk_1.dat 2025-04-04T17:13:48.518898Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:13:48.524150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:13:48.524241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:13:48.527858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14719, node 1 2025-04-04T17:13:48.710862Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:13:48.710891Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:13:48.710897Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:13:48.711004Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29503 TClient is connected to server localhost:29503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:13:49.432746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.469090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.607087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.829585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:49.908648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:13:51.328194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507414116369657:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.328308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.566395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.611401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.650221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.680325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.713155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.748127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:13:51.803763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507414116370165:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.803825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.804073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489507414116370170:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:13:51.808131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:13:51.819294Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T17:13:51.819492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489507414116370172:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:13:51.876361Z node 1 :TX_PROXY ERROR: Actor# [1:7489507414116370225:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:13:52.702929Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=MTMzM2MzMzItNWQwY2U0MmEtN2IyOWY4YTEtZTk4ODRmMzc=, ActorId: [1:7489507414116369630:2403], ActorState: ReadyState, Session closed due to explicit close event 2025-04-04T17:13:52.702979Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=MTMzM2MzMzItNWQwY2U0MmEtN2IyOWY4YTEtZTk4ODRmMzc=, ActorId: [1:7489507414116369630:2403], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:13:52.702994Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTMzM2MzMzItNWQwY2U0MmEtN2IyOWY4YTEtZTk4ODRmMzc=, ActorId: [1:7489507414116369630:2403], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-04T17:13:52.703009Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTMzM2MzMzItNWQwY2U0MmEtN2IyOWY4YTEtZTk4ODRmMzc=, ActorId: [1:7489507414116369630:2403], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:13:52.703066Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTMzM2MzMzItNWQwY2U0MmEtN2IyOWY4YTEtZTk4ODRmMzc=, ActorId: [1:7489507414116369630:2403], ActorState: unknown state, Session actor destroyed 2025-04-04T17:13:52.708740Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTYwMmRkNzQtNWRiM2MyZDYtYTM1ODNjZGItYWMxNmJmMTM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MTYwMmRkNzQtNWRiM2MyZDYtYTM1ODNjZGItYWMxNmJmMTM= 2025-04-04T17:13:52.708929Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTYwMmRkNzQtNWRiM2MyZDYtYTM1ODNjZGItYWMxNmJmMTM=, ActorId: [1:7489507418411337779:2488], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:13:52.725371Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjQ5MTA2YzUtNmVlOTFlZWItNzkwNTg2NGQtYzU0YjE2MWY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YjQ5MTA2YzUtNmVlOTFlZWItNzkwNTg2NGQtYzU0YjE2MWY= 2025-04-04T17:13:52.725502Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjQ5MTA2YzUtNmVlOTFlZWItNzkwNTg2NGQtYzU0YjE2MWY=, ActorId: [1:7489507418411337782:2490], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:13:52.736926Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Mjc0YzEzMDUtMmRlY2E1MjUtY2U2MzU4ZGUtYzEwMzg5YTI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id Mjc0YzEzMDUtMmRlY2E1MjUtY2U2MzU4ZGUtYzEwMzg5YTI= 2025-04-04T17:13:52.737113Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Mjc0YzEzMDUtMmRlY2E1MjUtY2U2MzU4ZGUtYzEwMzg5YTI=, ActorId: [1:7489507418411337784:2492], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:13:52.745884Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NWI0NjBmZWMtYmRhZDIyMi0zYzRlYWI0ZS0yNzc5MmEwOA==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NWI0NjBmZWMtYmRhZDIyMi0zYzRlYWI0ZS0yNzc5MmEwOA== 2025-04-04T17:13:52.746379Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NWI0NjBmZWMtYmRhZDIyMi0zYzRlYWI0ZS0yNzc5MmEwOA==, ActorId: [1:7489507418411337788:2494], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:13:52.754703Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDAwY2IwMGItZGI5YjI0MTQtYWU2Yjk0NDEtMmI5ZGE5Mzc=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZDAwY2IwMGItZGI5YjI0MTQtYWU2Yjk0NDEtMmI5ZGE5Mzc= 2025-04-04T17:13:52.754996Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDAwY2IwMGItZGI5YjI0MTQtYWU2Yjk0NDEtMmI5ZGE5Mzc=, ActorId: [1:7489507418411337790:2496], ActorState: unknown state, session actor bootstrapped 2025-04-04T17:13:52.764685Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTdhYzkxZWMtOGYwMTJkYjctNzhmODBhZWUtMjlkMzEyNjI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NTdhYzkxZWMtOGYwMTJkYjctNzhmODBhZWUtMjlkMzEyNjI= 2025-04-04T17:13:52.765176Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTdhYzkxZWMtOGYwMTJkYjctNzhmODBhZWUtMjlkMzEyNjI=, ActorId: [1:74895074184113 ... /pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:00.100381Z node 7 :TX_PROXY ERROR: Actor# [7:7489508737611335157:2429] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:00.111359Z node 7 :TX_PROXY ERROR: Actor# [7:7489508737611335168:2437] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:00.119501Z node 7 :TX_PROXY ERROR: Actor# [7:7489508737611335176:2443] txid# 281474976715672, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:00.124091Z node 7 :TX_PROXY ERROR: Actor# [7:7489508737611335184:2449] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:00.128001Z node 7 :TX_PROXY ERROR: Actor# [7:7489508737611335192:2455] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:00.131415Z node 7 :TX_PROXY ERROR: Actor# [7:7489508737611335200:2461] txid# 281474976715675, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:00.132254Z node 7 :TX_PROXY ERROR: Actor# [7:7489508737611335205:2465] txid# 281474976715676, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:00.135068Z node 7 :TX_PROXY ERROR: Actor# [7:7489508737611335212:2471] txid# 281474976715677, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:00.827400Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489508716136497767:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:00.827570Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:19:10.951525Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:19:10.951558Z node 7 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 31976, MsgBus: 3182 2025-04-04T17:20:50.529894Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7489509210196151834:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:50.529998Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018a5/r3tmp/tmpL3i6eq/pdisk_1.dat 2025-04-04T17:20:50.668695Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:50.696988Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:50.697114Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:50.698560Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31976, node 8 2025-04-04T17:20:50.775054Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:50.775088Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:50.775100Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:50.775304Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3182 TClient is connected to server localhost:3182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:51.533132Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:51.555943Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:51.648889Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:51.900344Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:51.990913Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:55.530236Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7489509210196151834:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:55.530356Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:20:55.947055Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7489509231670990108:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:55.947248Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:56.019002Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:56.060761Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:56.101690Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:56.218568Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:56.263662Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:56.310508Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:56.399303Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7489509235965957921:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:56.399441Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:56.399473Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7489509235965957926:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:56.406354Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:56.421655Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7489509235965957928:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:20:56.507938Z node 8 :TX_PROXY ERROR: Actor# [8:7489509235965957983:3462] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:05.644225Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:21:05.644259Z node 8 :IMPORT WARN: Table profiles were not loaded took: 10.986958s took: 10.989012s took: 10.990428s took: 10.990901s took: 10.991566s took: 10.994590s took: 10.995035s took: 10.995725s took: 10.996020s took: 10.997443s ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::KV [GOOD] Test command err: Trying to start YDB, gRPC: 9455, MsgBus: 8612 2025-04-04T17:19:34.556290Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508886196949190:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:34.556380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bc7/r3tmp/tmpCmHA1z/pdisk_1.dat 2025-04-04T17:19:34.926388Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:34.945835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:34.945957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:34.948134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9455, node 1 2025-04-04T17:19:35.041633Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:35.041652Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:35.041658Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:35.041751Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8612 TClient is connected to server localhost:8612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:19:35.544291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:35.564585Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:19:37.589535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508899081851745:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:37.589660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:37.799420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:19:38.303462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508903376820636:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:38.303547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:38.303698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508903376820641:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:38.306661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:19:38.316752Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-04T17:19:38.317192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508903376820643:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:19:38.392000Z node 1 :TX_PROXY ERROR: Actor# [1:7489508903376820724:3413] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:39.557012Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508886196949190:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:39.557088Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:19:49.922340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:19:49.922380Z node 1 :IMPORT WARN: Table profiles were not loaded took: 0.111253s took: 0.111153s took: 0.113178s took: 0.114053s took: 0.114399s took: 0.113933s took: 0.114827s took: 0.115974s took: 0.116297s took: 0.119181s took: 0.325756s took: 0.331598s took: 0.333239s took: 0.333100s took: 0.334277s took: 0.334982s took: 0.337877s took: 0.339911s took: 0.340163s took: 0.340845s took: 0.134411s took: 0.137310s took: 0.137296s took: 0.140358s took: 0.140371s took: 0.141411s took: 0.141255s took: 0.141906s took: 0.145383s took: 0.144808s took: 0.024233s took: 0.026567s took: 0.025554s took: 0.027721s took: 0.029200s took: 0.027080s took: 0.031317s took: 0.033340s took: 0.035306s took: 0.037967s took: 0.192142s took: 0.192768s took: 0.193954s took: 0.211778s took: 0.213938s took: 0.213515s took: 0.216271s took: 0.216892s took: 0.217181s took: 0.216813s 2025-04-04T17:21:10.230565Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-04T17:21:10.230598Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-04-04T17:21:10.230609Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2025-04-04T17:21:10.230620Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037912 not found 2025-04-04T17:21:10.230634Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-04-04T17:21:10.230646Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-04-04T17:21:10.230657Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2025-04-04T17:21:10.230673Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-04-04T17:21:10.230692Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2025-04-04T17:21:10.230712Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2025-04-04T17:21:10.230734Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037917 not found 2025-04-04T17:21:10.243700Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2025-04-04T17:21:10.243740Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2025-04-04T17:21:10.243757Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2025-04-04T17:21:10.243777Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2025-04-04T17:21:10.243794Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2025-04-04T17:21:10.243811Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-04-04T17:21:10.243829Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-04T17:21:10.243846Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-04-04T17:21:10.243861Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-04-04T17:21:10.243878Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-04-04T17:21:10.243892Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2025-04-04T17:21:10.243906Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2025-04-04T17:21:10.243924Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037916 not found 2025-04-04T17:21:10.243940Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2025-04-04T17:21:10.243956Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2025-04-04T17:21:10.243980Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-04T17:21:10.244004Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-04-04T17:21:10.244023Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037915 not found 2025-04-04T17:21:10.244039Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037909 not found 2025-04-04T17:21:10.244054Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2025-04-04T17:21:10.244075Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2025-04-04T17:21:10.244090Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-04-04T17:21:10.244119Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2025-04-04T17:21:10.244141Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2025-04-04T17:21:10.244156Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-04-04T17:21:10.244171Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2025-04-04T17:21:10.244186Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2025-04-04T17:21:10.244204Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2025-04-04T17:21:10.265447Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpsert >> KqpNewEngine::PrimaryView [GOOD] |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] >> KqpReturning::Random [GOOD] |93.2%| [TA] $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} >> Balancing::Balancing_ManyTopics_PQv1 [GOOD] >> TPersQueueMirrorer::TestBasicRemote ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:130:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:132:2058] recipient: [1:108:2140] 2025-04-04T17:20:30.562288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:20:30.562393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:20:30.562514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:20:30.562558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:20:30.562605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:20:30.562639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:20:30.562715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:20:30.562811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:20:30.563133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:20:30.650630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:20:30.650694Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:172:2058] recipient: [1:15:2062] 2025-04-04T17:20:30.665777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:20:30.666269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:20:30.666463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:20:30.673993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:20:30.674189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:20:30.674844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:20:30.675439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:20:30.678613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:20:30.679882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:20:30.679952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:20:30.680129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:20:30.680182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:20:30.680223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:20:30.680340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] Leader for TabletID 72057594037968897 is [1:215:2214] sender: [1:216:2058] recipient: [1:209:2210] 2025-04-04T17:20:30.688985Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-04T17:20:30.817719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:20:30.817936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:20:30.818186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:20:30.818434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:20:30.818493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:20:30.820695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:20:30.820809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:20:30.820948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:20:30.820991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:20:30.821016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:20:30.821051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:20:30.822572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:20:30.822613Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:20:30.822639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:20:30.824206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:20:30.824250Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:20:30.824289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:20:30.824360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:20:30.827908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:20:30.832118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:20:30.832311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:20:30.833204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:20:30.833351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:20:30.833397Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:20:30.833682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:20:30.833731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:20:30.833896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:20:30.833983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:20:30.836269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:20:30.836337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:20:30.836532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:20:30.836575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:20:30.837000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:20:30.837047Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:20:30.837157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:20:30.837214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:20:30.837257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:20:30.837293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:20:30.837330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:20:30.837368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:20:30.837404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id ... nBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:21:12.260759Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:21:12.261031Z node 24 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 302us result status StatusSuccess 2025-04-04T17:21:12.261876Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:21:12.273095Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:810:2626] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-04T17:21:12.273215Z node 24 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][24:731:2626] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-04T17:21:12.273373Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:810:2626] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743787272246555 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743787272246555 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743787272246555 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-04T17:21:12.276301Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:810:2626] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-04T17:21:12.276413Z node 24 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][24:731:2626] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] >> KqpMergeCn::TopSortBy_Timestamp_Limit2 [GOOD] >> KqpMergeCn::TopSortBy_Interval_Limit3 >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PrimaryView [GOOD] Test command err: Trying to start YDB, gRPC: 27567, MsgBus: 24703 2025-04-04T17:20:24.817582Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509099487238810:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:24.817646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014d5/r3tmp/tmpDxZwRP/pdisk_1.dat 2025-04-04T17:20:25.229858Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:25.234880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:25.234994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:25.238200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27567, node 1 2025-04-04T17:20:25.321989Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:25.322019Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:25.322028Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:25.322170Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24703 TClient is connected to server localhost:24703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:25.824924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:25.854277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:25.991118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:20:26.144874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:26.217261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:28.021089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509116667109529:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:28.021208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:28.319270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:28.356274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:28.387143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:28.410537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:28.439481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:28.515257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:28.599331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509116667110046:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:28.599495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:28.599906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509116667110051:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:28.603877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:28.620939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509116667110054:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:20:28.715589Z node 1 :TX_PROXY ERROR: Actor# [1:7489509116667110108:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:29.822412Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509099487238810:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:29.822486Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13618, MsgBus: 62556 2025-04-04T17:20:30.733744Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509128063507231:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:30.733817Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014d5/r3tmp/tmpE6kzvQ/pdisk_1.dat 2025-04-04T17:20:30.885439Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:30.896802Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:30.896892Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:30.898413Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13618, node 2 2025-04-04T17:20:30.967856Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:30.967880Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:30.967890Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:30.968001Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62556 TClient is connected to server localhost:62556 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:20:31.391486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:20:31.408927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:31.485465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:31.684200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:31.762502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:33.938463Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService ... lf is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:59.815131Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:59.850620Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:59.888612Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:59.926125Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:21:00.006299Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509255528428836:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:00.006464Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:00.006556Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509255528428841:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:00.010129Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:21:00.019659Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489509255528428843:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:21:00.110460Z node 6 :TX_PROXY ERROR: Actor# [6:7489509255528428900:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:00.980307Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489509234053590057:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:00.980372Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8094, MsgBus: 8985 2025-04-04T17:21:02.657611Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489509265732651812:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:02.657702Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014d5/r3tmp/tmpv0NMCY/pdisk_1.dat 2025-04-04T17:21:02.770780Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:02.800693Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:02.800769Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:02.802402Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8094, node 7 2025-04-04T17:21:02.855863Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:02.855893Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:02.855902Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:02.856041Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8985 TClient is connected to server localhost:8985 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:03.480510Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:03.489808Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:21:03.506518Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:03.632290Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:21:03.874841Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:21:04.015185Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:07.197473Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489509287207490085:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:07.197598Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:07.228056Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:07.262562Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:21:07.298175Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:21:07.334685Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:21:07.373720Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:21:07.449017Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:21:07.500645Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489509287207490600:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:07.500747Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:07.500763Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489509287207490605:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:07.505489Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:21:07.517374Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489509287207490607:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:21:07.605075Z node 7 :TX_PROXY ERROR: Actor# [7:7489509287207490660:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:07.657826Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489509265732651812:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:07.657915Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:21:08.881698Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:21:08.923853Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:21:08.968657Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpReturning::Random [GOOD] Test command err: Trying to start YDB, gRPC: 15591, MsgBus: 5479 2025-04-04T17:20:28.184905Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509119023786663:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:28.184961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014cf/r3tmp/tmpWLgtwm/pdisk_1.dat 2025-04-04T17:20:28.605412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:28.605750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:28.609942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15591, node 1 2025-04-04T17:20:28.639372Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:20:28.639404Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:20:28.641033Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:28.718919Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:28.718958Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:28.718972Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:28.719100Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5479 TClient is connected to server localhost:5479 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:29.308757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:29.347091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:29.506530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:29.690155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:20:29.762714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:31.688318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509131908690320:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:31.688598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:31.940566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:31.974168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:32.007395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:32.038805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:32.067893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:32.108118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:32.147295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509136203658128:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:32.147397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:32.148243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509136203658133:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:32.152240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:32.169875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509136203658135:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:20:32.246573Z node 1 :TX_PROXY ERROR: Actor# [1:7489509136203658191:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:33.187949Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509119023786663:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:33.199307Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:4:13: Warning: At function: RemovePrefixMembers, At function: RemoveSystemMembers, At function: PersistableRepr, At function: SqlProject
:4:27: Warning: At function: Filter, At function: Coalesce
:4:50: Warning: At function: SqlIn
:4:50: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 2868, MsgBus: 19602 2025-04-04T17:20:34.436507Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509141531584198:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:34.436570Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014cf/r3tmp/tmpwCuVv5/pdisk_1.dat 2025-04-04T17:20:34.581448Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2868, node 2 2025-04-04T17:20:34.601003Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:34.601087Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:34.603345Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:34.638589Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:34.638614Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:34.638621Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:34.638740Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19602 TClient is connected to server localhost:19602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:35.060359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:35.069573Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:20:35.076955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at ... access permissions } 2025-04-04T17:21:01.805066Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:01.840705Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:21:01.874431Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:21:01.911633Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:21:01.945581Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:21:01.980173Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:21:02.024623Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489509265740121345:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:02.024713Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489509265740121350:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:02.024724Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:02.027809Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:21:02.039063Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489509265740121352:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:21:02.142295Z node 5 :TX_PROXY ERROR: Actor# [5:7489509265740121406:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:03.130862Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489509248560249872:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:03.130941Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:21:03.272381Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 [[[2];["321"]];[["111"];[2]]] Trying to start YDB, gRPC: 4261, MsgBus: 11466 2025-04-04T17:21:05.445895Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489509278012575187:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:05.445976Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014cf/r3tmp/tmpuBjW4G/pdisk_1.dat 2025-04-04T17:21:05.576042Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:05.611368Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:05.611485Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:05.613168Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4261, node 6 2025-04-04T17:21:05.676920Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:05.676953Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:05.676963Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:05.677080Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11466 TClient is connected to server localhost:11466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:21:06.298884Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:21:06.316928Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:06.396897Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:06.567314Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:06.637693Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:09.855040Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509295192446148:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:09.855126Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:09.902572Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:09.953888Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:21:09.988801Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:21:10.028924Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:21:10.067649Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:21:10.107319Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:21:10.188512Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509299487413957:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:10.188602Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:10.188631Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509299487413962:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:10.193855Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:21:10.204274Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489509299487413964:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:21:10.302024Z node 6 :TX_PROXY ERROR: Actor# [6:7489509299487414020:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:10.446338Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489509278012575187:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:10.446412Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:21:11.563466Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] >> IcbAsActorTests::TestHttpGetResponse [GOOD] >> ControlImplementationTests::TestControlWrapperBounds [GOOD] >> ControlImplementationTests::TestTControl [GOOD] >> ControlImplementationTests::TestRegisterSharedControl [GOOD] >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> ControlImplementationTests::TestControlWrapperBounds [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> ControlImplementationTests::TestTControl [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> ControlImplementationTests::TestRegisterSharedControl [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpGetResponse [GOOD] >> ControlImplementationTests::TestRegisterLocalControl [GOOD] >> IcbAsActorTests::TestHttpPostReaction [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> YdbSdkSessionsPool::CustomPlan [GOOD] >> YdbSdkSessionsPool::FailTest >> ControlImplementationTests::TestControlWrapperAsI64 >> YdbSdkSessionsPool::StressTestSync10 [GOOD] >> YdbSdkSessionsPool::StressTestSync1 [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> ControlImplementationTests::TestRegisterLocalControl [GOOD] |93.2%| [TA] $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpPostReaction [GOOD] >> KqpNotNullColumns::UpdateTable_Immediate [GOOD] >> AsyncIndexChangeCollector::InsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow >> ControlImplementationTests::TestControlWrapperAsI64 [GOOD] |93.2%| [TA] $(B)/ydb/core/kqp/ut/service/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> TxUsage::WriteToTopic_Demo_19_RestartNo [GOOD] >> DataShardTxOrder::RandomPoints_DelayData [GOOD] |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> ControlImplementationTests::TestControlWrapperAsI64 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestSync10 [GOOD] >> ControlImplementationTests::TestParallelRegisterSharedControl >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit Test command err: 2025-04-04T17:21:03.199819Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509267682648604:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:03.200287Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a90/r3tmp/tmpsJInlE/pdisk_1.dat 2025-04-04T17:21:03.636506Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:03.658182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:03.658307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:03.662230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1599, node 1 2025-04-04T17:21:03.871247Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:03.871273Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:03.871281Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:03.871386Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:04.373021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:08.192666Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509267682648604:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:08.192759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> AsyncIndexChangeCollector::CoveredIndexUpsert [GOOD] >> ControlImplementationTests::TestParallelRegisterSharedControl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestSync1 [GOOD] >> AsyncIndexChangeCollector::AllColumnsInPk Test command err: 2025-04-04T17:21:03.216236Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509268616842681:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:03.216271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a94/r3tmp/tmpNgNRA1/pdisk_1.dat 2025-04-04T17:21:03.801269Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:03.817708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:03.817836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:03.827184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22840, node 1 2025-04-04T17:21:03.915048Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:03.915079Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:03.915084Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:03.915175Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20794 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:04.303275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:08.216753Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509268616842681:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:08.216858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::UpdateTable_Immediate [GOOD] Test command err: Trying to start YDB, gRPC: 18533, MsgBus: 22050 2025-04-04T17:20:30.212852Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509124858204791:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:30.213185Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014cd/r3tmp/tmp7ynNN3/pdisk_1.dat 2025-04-04T17:20:30.572591Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:30.595924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:30.596024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 18533, node 1 2025-04-04T17:20:30.600823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:30.647309Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:30.647333Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:30.647354Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:30.647500Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22050 TClient is connected to server localhost:22050 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:31.246381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:33.402230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509137743107186:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:33.402350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:33.669939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:20:33.788737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509137743107289:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:33.788829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:33.795338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509137743107294:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:33.799170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:20:33.810661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509137743107296:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:20:33.910585Z node 1 :TX_PROXY ERROR: Actor# [1:7489509137743107348:2396] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:34.051760Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489509142038074683:2356], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Value. All not null columns should be initialized, code: 2032 2025-04-04T17:20:34.052733Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjcyYjNjMTgtY2Q2ODEyZjctMmJiYTIzMDEtZmIxMzgyYTI=, ActorId: [1:7489509137743107183:2329], ActorState: ExecuteState, TraceId: 01jr0x34q8fze70sjfnbvjn16a, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-04T17:20:34.075761Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489509142038074692:2360], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:45: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64?,'Value':String>
:1:45: Error: Failed to convert 'Value': Null to String
:1:45: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-04T17:20:34.076171Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjcyYjNjMTgtY2Q2ODEyZjctMmJiYTIzMDEtZmIxMzgyYTI=, ActorId: [1:7489509137743107183:2329], ActorState: ExecuteState, TraceId: 01jr0x34re9tjcdfh52pnx3es1, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 16925, MsgBus: 9382 2025-04-04T17:20:34.825809Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509144114979559:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:34.825871Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014cd/r3tmp/tmpzMFxFk/pdisk_1.dat 2025-04-04T17:20:34.954907Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:34.978311Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:34.978408Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:34.980010Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16925, node 2 2025-04-04T17:20:35.050017Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:35.050060Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:35.050091Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:35.050209Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9382 TClient is connected to server localhost:9382 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:35.523445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:37.983937Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509156999882096:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:37.984025Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:37.992979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:20:38.053159Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509161294849491:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:38.053243Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:38.053438Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509161294849497:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:38.056980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:20:38.066972Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TP ... disk_1.dat 2025-04-04T17:20:54.765038Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:54.784128Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:54.784234Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:54.786712Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11652, node 5 2025-04-04T17:20:54.838824Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:54.838848Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:54.838856Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:54.839003Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16536 TClient is connected to server localhost:16536 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:55.336820Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:57.984141Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489509243414158953:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:57.984141Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489509243414158945:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:57.984231Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:57.988130Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:20:58.000192Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489509243414158959:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:20:58.091257Z node 5 :TX_PROXY ERROR: Actor# [5:7489509247709126306:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:58.214760Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:20:59.631813Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489509230529256402:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:59.631903Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:21:05.554534Z node 5 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0x41xaddymbpabr9axkpg9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NTQ1MWFjNmUtZGNiNzMyMzctYjgxZWI3YTgtYWY1MjdmZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:21:05.554732Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=NTQ1MWFjNmUtZGNiNzMyMzctYjgxZWI3YTgtYWY1MjdmZDM=, ActorId: [5:7489509269183963685:2530], ActorState: ExecuteState, TraceId: 01jr0x41xaddymbpabr9axkpg9, Create QueryResponse for error on request, msg: 2025-04-04T17:21:07.214127Z node 5 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jr0x43h0b6cwcpnjv5p8zdr5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NDIwMjg2MDUtNDg1NWEzZTYtNDRiODA4YWEtYjNiOTgzMTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:21:07.214449Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=NDIwMjg2MDUtNDg1NWEzZTYtNDRiODA4YWEtYjNiOTgzMTQ=, ActorId: [5:7489509277773898382:2556], ActorState: ExecuteState, TraceId: 01jr0x43h0b6cwcpnjv5p8zdr5, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 25028, MsgBus: 16498 2025-04-04T17:21:07.920528Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489509283168528095:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:07.920597Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014cd/r3tmp/tmpym2xhZ/pdisk_1.dat 2025-04-04T17:21:08.044690Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:08.074753Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:08.074878Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:08.077128Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25028, node 6 2025-04-04T17:21:08.128760Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:08.128789Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:08.128797Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:08.129004Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16498 TClient is connected to server localhost:16498 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:08.707824Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:11.980524Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509300348397941:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:11.980613Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:11.995058Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:21:12.055010Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509304643365388:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:12.055092Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:12.055121Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509304643365393:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:12.058507Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:21:12.066519Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489509304643365395:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:21:12.141179Z node 6 :TX_PROXY ERROR: Actor# [6:7489509304643365446:2430] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:12.920596Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489509283168528095:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:12.920691Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSqlIn::PhasesCount [GOOD] |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> ControlImplementationTests::TestParallelRegisterSharedControl [GOOD] >> DataShardVolatile::DistributedWriteThenImmediateUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenSplit |93.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/service/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSqlIn::KeyTypeMissmatch_Str [GOOD] >> KqpSqlIn::InWithCast >> DataShardVolatile::DistributedWrite [GOOD] >> DataShardVolatile::DistributedWriteBrokenLock |93.3%| [TA] $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.3%| [TA] {RESULT} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayData [GOOD] Test command err: 2025-04-04T17:17:44.091378Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:17:44.161450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:17:44.161500Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:44.167838Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:17:44.168253Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:17:44.168533Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:17:44.178015Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:17:44.214476Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:17:44.214552Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:17:44.216037Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:17:44.216094Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:17:44.216147Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:17:44.216428Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:17:44.216680Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:17:44.216738Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:17:44.267653Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:17:44.296014Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:17:44.296143Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:17:44.296226Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:17:44.296272Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:17:44.296310Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:17:44.296336Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:44.296458Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:44.296498Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:44.296734Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:17:44.296812Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:17:44.296894Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:44.296928Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:17:44.296958Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:17:44.296987Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:17:44.297010Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:17:44.297042Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:17:44.297071Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:17:44.297137Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:44.297162Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:44.297198Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:17:44.299138Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:17:44.299178Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:17:44.299242Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:17:44.299354Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:17:44.299388Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:17:44.299431Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:17:44.299459Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:44.299482Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:17:44.299524Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:17:44.299549Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:44.299756Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:17:44.299782Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:17:44.299804Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:17:44.299824Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:44.299853Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:17:44.299878Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:17:44.299909Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:17:44.299946Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:44.299962Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:17:44.311844Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:17:44.311914Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:17:44.311952Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:17:44.311990Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:17:44.312041Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:17:44.312528Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:44.312574Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:17:44.312613Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:17:44.312741Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:17:44.312769Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:17:44.312865Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:17:44.312908Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:44.312946Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:17:44.312979Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:17:44.319312Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:17:44.319364Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:17:44.319514Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:44.319545Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:17:44.319577Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:17:44.319607Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:17:44.319631Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:17:44.319666Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:17:44.319690Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:17:44.319719Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:44.319747Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:17:44.319780Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:17:44.319807Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:17:44.319999Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:17:44.320039Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:17:44.320061Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:17:44.320084Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:17:44.320103Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:17:44.320140Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:17:44.320158Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:17:44.320188Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:17:44.320229Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:17:44.320282Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:17:44.320306Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:17:44.320333Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-04-04T17:17:44.320363Z node 1 :TX_D ... est 9437184 consumer 9437184 txId 523 2025-04-04T17:21:15.940794Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:234:2227], Recipient [16:345:2313]: {TEvReadSet step# 1000005 txid# 524 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2025-04-04T17:21:15.940834Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:21:15.940865Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 524 2025-04-04T17:21:15.941013Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:234:2227], Recipient [16:345:2313]: {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-04-04T17:21:15.941041Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:21:15.941063Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 510 2025-04-04T17:21:15.941158Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:234:2227], Recipient [16:345:2313]: {TEvReadSet step# 1000005 txid# 525 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2025-04-04T17:21:15.941182Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:21:15.941203Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 525 2025-04-04T17:21:15.941296Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:234:2227], Recipient [16:345:2313]: {TEvReadSet step# 1000005 txid# 526 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2025-04-04T17:21:15.941318Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:21:15.941337Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 526 2025-04-04T17:21:15.941447Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:234:2227], Recipient [16:345:2313]: {TEvReadSet step# 1000005 txid# 527 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2025-04-04T17:21:15.941472Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:21:15.941511Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 527 2025-04-04T17:21:15.941596Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:234:2227], Recipient [16:345:2313]: {TEvReadSet step# 1000005 txid# 528 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2025-04-04T17:21:15.941618Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:21:15.941638Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 528 2025-04-04T17:21:15.941751Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:234:2227], Recipient [16:345:2313]: {TEvReadSet step# 1000005 txid# 529 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2025-04-04T17:21:15.941774Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:21:15.941795Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 529 2025-04-04T17:21:15.941869Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:234:2227], Recipient [16:345:2313]: {TEvReadSet step# 1000005 txid# 530 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2025-04-04T17:21:15.941890Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:21:15.941910Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 530 2025-04-04T17:21:15.941997Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:234:2227], Recipient [16:345:2313]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-04-04T17:21:15.942020Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:21:15.942038Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2025-04-04T17:21:15.942083Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:234:2227], Recipient [16:345:2313]: {TEvReadSet step# 1000005 txid# 531 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2025-04-04T17:21:15.942101Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:21:15.942121Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 531 2025-04-04T17:21:15.942252Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:234:2227], Recipient [16:345:2313]: {TEvReadSet step# 1000005 txid# 532 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2025-04-04T17:21:15.942287Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:21:15.942307Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 532 2025-04-04T17:21:15.942408Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:234:2227], Recipient [16:345:2313]: {TEvReadSet step# 1000005 txid# 533 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2025-04-04T17:21:15.942431Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:21:15.942453Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 533 2025-04-04T17:21:15.942522Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:234:2227], Recipient [16:345:2313]: {TEvReadSet step# 1000005 txid# 534 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2025-04-04T17:21:15.942543Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:21:15.942562Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 534 2025-04-04T17:21:15.942674Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:234:2227], Recipient [16:345:2313]: {TEvReadSet step# 1000005 txid# 535 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2025-04-04T17:21:15.942696Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:21:15.942713Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 535 2025-04-04T17:21:15.942778Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:234:2227], Recipient [16:345:2313]: {TEvReadSet step# 1000005 txid# 536 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2025-04-04T17:21:15.942799Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:21:15.942819Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 536 2025-04-04T17:21:15.942908Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:234:2227], Recipient [16:345:2313]: {TEvReadSet step# 1000005 txid# 537 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2025-04-04T17:21:15.942929Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:21:15.942947Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 537 2025-04-04T17:21:15.943045Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:234:2227], Recipient [16:345:2313]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-04-04T17:21:15.943069Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:21:15.943089Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2025-04-04T17:21:15.943170Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:234:2227], Recipient [16:345:2313]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-04-04T17:21:15.943192Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:21:15.943212Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 2025-04-04T17:21:15.943329Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:234:2227], Recipient [16:345:2313]: {TEvReadSet step# 1000005 txid# 515 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-04-04T17:21:15.943353Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:21:15.943374Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 515 2025-04-04T17:21:15.963692Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:21:15.963751Z node 16 :TX_DATASHARD TRACE: Complete execution for [1000005:538] at 9437184 on unit CompleteOperation 2025-04-04T17:21:15.963802Z node 16 :TX_DATASHARD DEBUG: Complete [1000005 : 538] from 9437184 at tablet 9437184 send result to client [16:98:2133], exec latency: 1 ms, propose latency: 2 ms 2025-04-04T17:21:15.963865Z node 16 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-04-04T17:21:15.963910Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:21:15.964151Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:21:15.964182Z node 16 :TX_DATASHARD TRACE: Complete execution for [1000005:539] at 9437184 on unit CompleteOperation 2025-04-04T17:21:15.964212Z node 16 :TX_DATASHARD DEBUG: Complete [1000005 : 539] from 9437184 at tablet 9437184 send result to client [16:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:21:15.964235Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:21:15.964420Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:234:2227], Recipient [16:345:2313]: {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-04-04T17:21:15.964465Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:21:15.964499Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 538 expect 31 25 31 17 29 30 31 28 29 31 24 31 4 31 3 11 22 12 11 7 30 12 - - 12 9 - 9 2 9 - - actual 31 25 31 17 29 30 31 28 29 31 24 31 4 31 3 11 22 12 11 7 30 12 - - 12 9 - 9 2 9 - - interm 30 25 30 17 29 30 24 28 29 29 24 24 4 12 3 11 22 12 11 7 30 12 - - 12 9 - 9 2 9 - - ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::PhasesCount [GOOD] Test command err: Trying to start YDB, gRPC: 22516, MsgBus: 64147 2025-04-04T17:20:21.312181Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509085636547978:2230];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:21.312757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014e2/r3tmp/tmp8EKuPI/pdisk_1.dat 2025-04-04T17:20:21.672591Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:21.696417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:21.696522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:21.698377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22516, node 1 2025-04-04T17:20:21.793866Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:21.793896Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:21.793904Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:21.794068Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64147 TClient is connected to server localhost:64147 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:22.337568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:22.355302Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:20:22.366636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:22.515703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:22.681867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:22.763876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:24.394668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509098521451467:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:24.394795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:24.675601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:24.710364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:24.742124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:24.776550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:24.818467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:24.862499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:24.919418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509098521451977:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:24.919506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:24.919997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509098521451982:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:24.924070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:24.938816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509098521451984:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:20:25.038965Z node 1 :TX_PROXY ERROR: Actor# [1:7489509102816419335:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:26.104580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:20:26.136381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:20:26.213841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:20:26.284518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:20:26.309767Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509085636547978:2230];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:26.309849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:72: Warning: At function: Filter, At function: Coalesce
:5:84: Warning: At function: SqlIn
:5:84: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:72: Warning: At function: Filter, At function: Coalesce
:5:84: Warning: At function: SqlIn
:5:84: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 29229, MsgBus: 11729 2025-04-04T17:20:30.418340Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509127233650271:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:30.418384Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014e2/r3tmp/tmpJfIOV8/pdisk_1.dat 2025-04-04T17:20:30.523592Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:30.552781Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:30.552908Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 29229, node 2 2025-04-04T17:20:30.556318Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:30.588912Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:30.588933Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:30.588940Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:30.589050Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11729 TClient is connected to server localhost:11729 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathS ... 4976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:03.192249Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:21:03.232208Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:21:03.307279Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:21:03.381359Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:21:03.455724Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:21:03.571855Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489509266910714220:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:03.571997Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:03.572254Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489509266910714225:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:03.583386Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:21:03.611106Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489509266910714227:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:21:03.675313Z node 5 :TX_PROXY ERROR: Actor# [5:7489509266910714281:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:04.265639Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489509249730842717:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:04.265722Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:21:05.041682Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:21:05.086081Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:21:05.143155Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 65497, MsgBus: 20825 2025-04-04T17:21:09.706722Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489509295246824831:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:09.706807Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014e2/r3tmp/tmpdMNPtN/pdisk_1.dat 2025-04-04T17:21:09.876975Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:09.880765Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:09.880869Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:09.883192Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65497, node 6 2025-04-04T17:21:09.940496Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:09.940521Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:09.940539Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:09.940725Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20825 TClient is connected to server localhost:20825 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:10.553168Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:21:10.570852Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:21:10.646639Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:10.875856Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:10.957564Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:13.725959Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509312426695798:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:13.726072Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:13.768536Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:13.836088Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:21:13.907029Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:21:13.945979Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:21:13.985051Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:21:14.026607Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:21:14.113962Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509316721663613:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:14.114063Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:14.114140Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509316721663618:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:14.119834Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:21:14.132249Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489509316721663620:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:21:14.200375Z node 6 :TX_PROXY ERROR: Actor# [6:7489509316721663675:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:14.706839Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489509295246824831:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:14.706973Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> YdbSdkSessionsPool::FailTest [GOOD] >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> Compression::WriteRAW >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::FailTest [GOOD] Test command err: 2025-04-04T17:21:03.159458Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509266691907953:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:03.159550Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a64/r3tmp/tmpkzuBQe/pdisk_1.dat 2025-04-04T17:21:03.622046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:03.622166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:03.631352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2701, node 1 2025-04-04T17:21:03.669676Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:03.678648Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:21:03.682042Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:21:03.826958Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:03.826980Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:03.826986Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:03.827081Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:04.301704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:08.159577Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509266691907953:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:08.159633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:21:15.387753Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489509318867646605:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:15.387802Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a64/r3tmp/tmpYnJyKU/pdisk_1.dat 2025-04-04T17:21:15.506967Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:15.542756Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:15.542920Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:15.545939Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22334, node 4 2025-04-04T17:21:15.592741Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:15.592776Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:15.592792Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:15.593008Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:15.784088Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> TxUsage::WriteToTopic_Demo_32 [GOOD] >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads >> YdbTableSplit::SplitByLoadWithUpdates >> YdbTableSplit::MergeByNoLoadAfterSplit >> KqpMergeCn::TopSortBy_Interval_Limit3 [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry >> AsyncIndexChangeCollector::AllColumnsInPk [GOOD] >> AsyncIndexChangeCollector::CoverIndexedColumn >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop >> TxUsage::WriteToTopic_Demo_33 >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> LocalPartition::DiscoveryHang [GOOD] >> LocalPartition::WithoutPartition ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2025-04-04T17:21:19.550262Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.550298Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.550384Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:21:19.550834Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:21:19.551570Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:21:19.562729Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.563178Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:21:19.564134Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T17:21:19.564573Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T17:21:19.564760Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-04T17:21:19.564898Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:21:19.564988Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:21:19.565024Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-04-04T17:21:19.565060Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-04T17:21:19.565086Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-04T17:21:19.566361Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.566394Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.566414Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:21:19.566721Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:21:19.567239Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:21:19.567368Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.567532Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-04-04T17:21:19.571338Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T17:21:19.571534Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-04T17:21:19.571862Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-04T17:21:19.572049Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-04T17:21:19.572132Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:21:19.572155Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-04T17:21:19.572178Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-04T17:21:19.572295Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-04-04T17:21:19.572320Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-04T17:21:19.572335Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-04T17:21:19.572346Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-04T17:21:19.572423Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-04-04T17:21:19.572476Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-04-04T17:21:19.572492Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-04-04T17:21:19.572514Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-04T17:21:19.572573Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-04-04T17:21:19.572594Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-04-04T17:21:19.572607Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-04-04T17:21:19.572639Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-04T17:21:19.572737Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2025-04-04T17:21:19.573981Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.573997Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.574014Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:21:19.574288Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:21:19.574696Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:21:19.574849Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.575049Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 100 Compressed message data size: 91 2025-04-04T17:21:19.575963Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T17:21:19.576194Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-04-04T17:21:19.576627Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-04-04T17:21:19.576897Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-04-04T17:21:19.577004Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:21:19.577036Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-04T17:21:19.577106Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 Getting new event 2025-04-04T17:21:19.577131Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-04T17:21:19.577144Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-04T17:21:19.577191Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 Getting new event 2025-04-04T17:21:19.577222Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-04T17:21:19.577235Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-04-04T17:21:19.577269Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 Getting new event 2025-04-04T17:21:19.577284Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-04-04T17:21:19.577297Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStream ... tream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-04T17:21:21.578662Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 201). Partition stream id: 1 2025-04-04T17:21:21.635942Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-04T17:21:21.636034Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-04T17:21:21.636101Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:21:21.650594Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:21:21.652401Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:21:21.652811Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-04-04T17:21:21.654189Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2025-04-04T17:21:21.765488Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2025-04-04T17:21:21.767358Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:21:21.768893Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-04T17:21:21.772060Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-04T17:21:21.773021Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-04-04T17:21:21.778426Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-04-04T17:21:21.779285Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-04-04T17:21:21.780005Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2025-04-04T17:21:21.780750Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2025-04-04T17:21:21.790442Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2025-04-04T17:21:21.791219Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2025-04-04T17:21:21.791281Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2025-04-04T17:21:21.791560Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-04-04T17:21:21.802930Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 11). Partition stream id: 1 2025-04-04T17:21:21.809877Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:21.809924Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:21.809970Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:21:21.810691Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:21:21.811160Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:21:21.811618Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:21.812040Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:21:21.812690Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2025-04-04T17:21:21.814468Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:21.814505Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:21.814541Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:21:21.814957Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:21:21.815532Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:21:21.815698Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:21.816533Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:21.816711Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:21:21.816824Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:21:21.816875Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-04T17:21:21.817815Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpMergeCn::TopSortBy_Interval_Limit3 [GOOD] Test command err: Trying to start YDB, gRPC: 9250, MsgBus: 4287 2025-04-04T17:20:33.449922Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509140979157394:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:33.452044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014be/r3tmp/tmpTabZ7t/pdisk_1.dat 2025-04-04T17:20:33.908156Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:33.922065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:33.922283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:33.925510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9250, node 1 2025-04-04T17:20:34.011741Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:34.011771Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:34.011778Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:34.011907Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4287 TClient is connected to server localhost:4287 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:34.577213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:34.593396Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:34.613017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:20:34.733194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:34.881069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:34.968491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:36.551356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509153864060929:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:36.551486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:36.810287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:36.842142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:36.874010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:36.943012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:37.010004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:37.051259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:37.140329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509158159028749:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:37.140417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:37.140738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509158159028754:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:37.144568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:37.159059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509158159028756:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:20:37.231313Z node 1 :TX_PROXY ERROR: Actor# [1:7489509158159028812:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:38.180632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:20:38.449281Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509140979157394:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:38.449380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:20:38.872998Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787238900, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 4942, MsgBus: 18982 2025-04-04T17:20:39.660545Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509165692190831:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:39.660581Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014be/r3tmp/tmpcUR7Ph/pdisk_1.dat 2025-04-04T17:20:39.821568Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4942, node 2 2025-04-04T17:20:39.875951Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:39.876279Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:39.879235Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:39.912820Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:39.912864Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:39.912872Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:39.913005Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18982 TClient is connected to server localhost:18982 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:40.405448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:40.412363Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:20:40.428751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:40.513086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation pa ... oposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:21:10.249127Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:21:10.281561Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:21:10.314950Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:21:10.385501Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:21:10.441270Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509298718709169:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:10.441365Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:10.441415Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509298718709174:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:10.445420Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:21:10.456228Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489509298718709176:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:21:10.512520Z node 6 :TX_PROXY ERROR: Actor# [6:7489509298718709229:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:11.404994Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489509281538837696:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:11.405070Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:21:11.746959Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:21:12.705764Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787272738, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 23817, MsgBus: 15626 2025-04-04T17:21:13.559562Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489509310639495425:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:13.559632Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014be/r3tmp/tmpyj939q/pdisk_1.dat 2025-04-04T17:21:13.671525Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:13.701524Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:13.701653Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:13.703929Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23817, node 7 2025-04-04T17:21:13.757035Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:13.757058Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:13.757065Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:13.757173Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15626 TClient is connected to server localhost:15626 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:14.283007Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:14.292822Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:14.368295Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:14.529890Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:14.623694Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:17.261165Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489509327819366397:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:17.261327Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:17.306460Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:17.340783Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:21:17.375661Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:21:17.420366Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:21:17.463292Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:21:17.521858Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:21:17.574410Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489509327819366909:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:17.574531Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:17.574795Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489509327819366914:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:17.579344Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:21:17.591771Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489509327819366916:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:21:17.684644Z node 7 :TX_PROXY ERROR: Actor# [7:7489509327819366974:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:18.559639Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489509310639495425:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:18.559731Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:21:18.953171Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:21:20.264371Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787280242, txId: 281474976715673] shutting down >> DataShardVolatile::DistributedWriteThenSplit [GOOD] >> DataShardVolatile::DistributedWriteThenReadIterator >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] >> DataShardVolatile::DistributedWriteBrokenLock [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopics_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] Test command err: 2025-04-04T17:19:21.166934Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:21.261846Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:21.281990Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:21.282388Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:21.290518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:21.290758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:21.291007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:21.291141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:21.291265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:21.291406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:21.291518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:21.291633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:21.291748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:21.291877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:21.291982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:21.292105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:21.313204Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:21.313442Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:21.313493Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:21.313663Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:21.313832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:21.313908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:21.313949Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:21.314043Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:21.314121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:21.314162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:21.314191Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:21.314371Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:21.314423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:21.314475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:21.314506Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:21.314611Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:21.314685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:21.314732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:21.314761Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:21.314844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:21.314886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:21.314917Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:21.314959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:21.315002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:21.315037Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:21.315406Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-04-04T17:19:21.315484Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-04T17:19:21.315534Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=21; 2025-04-04T17:19:21.315587Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=26; 2025-04-04T17:19:21.315710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:21.315750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:21.315770Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:21.315906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:21.315942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:21.315968Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:21.316064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:21.316090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:21.316108Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:21.316235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:21.316261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:21.316280Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:21.316376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:21.316510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:21.316560Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:44;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););(portion_id:48;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:49;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-04-04T17:21:22.763791Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:5923:7915];fline=general_compaction.cpp:131;event=blobs_created;appended=1;switched=3; 2025-04-04T17:21:22.765088Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5923:7915];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpSqlIn::InWithCast [GOOD] |93.3%| [TA] $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1 [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_3 >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsWithQueryServiceDdl >> S3SettingsConversion::Port >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] >> S3SettingsConversion::FoldersStyleDeduction [GOOD] >> S3SettingsConversion::Port [GOOD] >> ColumnShardTiers::DSConfigs >> Secret::SimpleQueryService [GOOD] >> S3SettingsConversion::FoldersStrictStyle [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2 |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStyleDeduction [GOOD] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Port [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] Test command err: 2025-04-04T17:21:08.637189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:21:08.637816Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:21:08.637885Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b78/r3tmp/tmpr07Qql/pdisk_1.dat 2025-04-04T17:21:09.098198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:21:09.148629Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:09.195437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:09.196098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:09.209119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:09.308015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:21:09.380710Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:676:2577] 2025-04-04T17:21:09.381067Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:21:09.420951Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:21:09.421084Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:21:09.423810Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:21:09.423990Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:21:09.424074Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:21:09.425950Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:21:09.426178Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:21:09.426307Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:701:2577] in generation 1 2025-04-04T17:21:09.427756Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:679:2579] 2025-04-04T17:21:09.427898Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:21:09.435582Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:21:09.435670Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:21:09.436708Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-04T17:21:09.436760Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-04T17:21:09.436799Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-04T17:21:09.437076Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:21:09.437155Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:21:09.437217Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:710:2579] in generation 1 2025-04-04T17:21:09.448207Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:21:09.466987Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:21:09.468373Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:21:09.468533Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-04-04T17:21:09.468566Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:21:09.468595Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:21:09.468622Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:21:09.469722Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:21:09.469766Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-04T17:21:09.469830Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:21:09.469893Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-04-04T17:21:09.469913Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-04T17:21:09.469930Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-04T17:21:09.469949Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:21:09.470385Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:21:09.470522Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:21:09.470619Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:21:09.470651Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:21:09.470727Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:21:09.470757Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:21:09.470793Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-04T17:21:09.470849Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-04T17:21:09.472143Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2573], serverId# [1:687:2583], sessionId# [0:0:0] 2025-04-04T17:21:09.472196Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T17:21:09.472218Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:21:09.472240Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-04T17:21:09.472276Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T17:21:09.472404Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:21:09.473293Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:21:09.473398Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:21:09.473937Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:670:2574], serverId# [1:695:2589], sessionId# [0:0:0] 2025-04-04T17:21:09.474138Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:21:09.474347Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-04T17:21:09.474394Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-04T17:21:09.476025Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:21:09.476104Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T17:21:09.487029Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:21:09.487226Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:21:09.487294Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T17:21:09.487321Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-04T17:21:09.638264Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:738:2617], serverId# [1:741:2620], sessionId# [0:0:0] 2025-04-04T17:21:09.638571Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:739:2618], serverId# [1:742:2621], sessionId# [0:0:0] 2025-04-04T17:21:09.643292Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:21:09.643380Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:21:09.643708Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-04T17:21:09.643741Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:21:09.643887Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:21:09.643932Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:21:09.644018Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:21:09.644277Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:21:09.644437Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:21:09.644559Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T17:21:09.644591Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:21:09.644640Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-04T17:21:09.644906Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:21:09.645015Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:21:09.646144Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:21:09.646263Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-04T17:21:09.649475Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T ... 2025-04-04T17:21:24.175539Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:21:24.175579Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:21:24.175613Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:21:24.175842Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:21:24.175981Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:21:24.176090Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T17:21:24.176112Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:21:24.176134Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-04T17:21:24.176293Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:21:24.176357Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:21:24.176448Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:21:24.176506Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-04T17:21:24.176924Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:21:24.177281Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:21:24.178402Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T17:21:24.178456Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:21:24.178831Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:21:24.179157Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:21:24.181286Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:21:24.181338Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:21:24.182504Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:21:24.182605Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:21:24.182662Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-04-04T17:21:24.182681Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:21:24.183271Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-04-04T17:21:24.183323Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T17:21:24.184278Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:21:24.184379Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:21:24.184439Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:21:24.184502Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:21:24.184553Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:21:24.184675Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:21:24.186070Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T17:21:24.186116Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-04T17:21:24.186145Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2025-04-04T17:21:24.186192Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:21:24.186229Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:21:24.186316Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:21:24.186664Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:21:24.186744Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T17:21:24.190353Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:21:24.190433Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:21:24.190587Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:21:24.191072Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:21:24.191404Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-04-04T17:21:24.191438Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-04T17:21:24.201027Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:786:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.201140Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:796:2662], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.201524Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.206880Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:21:24.262033Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:21:24.262157Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T17:21:24.411150Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:21:24.411251Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T17:21:24.414105Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:800:2665], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:21:24.448465Z node 4 :TX_PROXY ERROR: Actor# [4:878:2712] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:24.642534Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0x4nq72vztyb3dpwhs9zq0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MzgwYTA4MGEtNjVkMzk2YzQtYjhlYjcwZDctM2EzYTkwMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:24.645784Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:957:2753], serverId# [4:958:2754], sessionId# [0:0:0] 2025-04-04T17:21:24.646136Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Acquired lock# 281474976715660, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T17:21:24.649614Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0x4nq72vztyb3dpwhs9zq0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MzgwYTA4MGEtNjVkMzk2YzQtYjhlYjcwZDctM2EzYTkwMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:24.653597Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0x4nq72vztyb3dpwhs9zq0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MzgwYTA4MGEtNjVkMzk2YzQtYjhlYjcwZDctM2EzYTkwMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:24.654037Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:21:24.655327Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743787284655249 Step: 1501 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-04T17:21:24.666537Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T17:21:24.666698Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-04-04T17:21:24.666837Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-04T17:21:24.666922Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:21:24.667923Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037889 2025-04-04T17:21:24.668005Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:21:24.672857Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:974:2763], serverId# [4:975:2764], sessionId# [0:0:0] 2025-04-04T17:21:24.679050Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:976:2765], serverId# [4:977:2766], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::InWithCast [GOOD] Test command err: Trying to start YDB, gRPC: 20357, MsgBus: 63277 2025-04-04T17:20:36.712528Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509152097428057:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:36.712578Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014b9/r3tmp/tmp5aoOIc/pdisk_1.dat 2025-04-04T17:20:37.071409Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:37.104044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:37.104149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 20357, node 1 2025-04-04T17:20:37.106547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:37.178718Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:37.178740Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:37.178745Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:37.178868Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63277 TClient is connected to server localhost:63277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:37.651242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:37.684055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:37.824477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:37.999058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:38.087520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:39.815518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509164982331744:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:39.815740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:40.142187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:20:40.179518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:20:40.210757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:40.242839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:40.278722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:20:40.315101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:20:40.364080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509169277299550:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:40.364150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:40.364510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509169277299555:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:40.368878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:20:40.379483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509169277299557:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:20:40.442084Z node 1 :TX_PROXY ERROR: Actor# [1:7489509169277299611:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:41.411179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:20:41.444945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:20:41.483038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:20:41.712873Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509152097428057:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:41.712970Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 12986, MsgBus: 6485 2025-04-04T17:20:45.635573Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509192425866021:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:45.635634Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014b9/r3tmp/tmpecbzUb/pdisk_1.dat 2025-04-04T17:20:45.736903Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12986, node 2 2025-04-04T17:20:45.773031Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:45.773134Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:45.774388Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:45.844753Z node 2 :NET_CLASSIF ... don't have access permissions } 2025-04-04T17:21:14.864642Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:14.900670Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:21:14.936167Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:21:14.970320Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:21:15.006871Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:21:15.043059Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:21:15.123332Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489509318481408405:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:15.123453Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:15.123632Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489509318481408410:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:15.128311Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:21:15.140251Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489509318481408412:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:21:15.216187Z node 5 :TX_PROXY ERROR: Actor# [5:7489509318481408467:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:15.763213Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489509297006569643:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:15.763284Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:4:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:5:22: Warning: At function: Filter, At function: Coalesce
:6:23: Warning: At function: SqlIn
:6:23: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 6879, MsgBus: 22074 2025-04-04T17:21:17.790648Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489509328836818270:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:17.790705Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014b9/r3tmp/tmpN0iqIA/pdisk_1.dat 2025-04-04T17:21:17.896155Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:17.928434Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:17.928557Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 6879, node 6 2025-04-04T17:21:17.930697Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:17.965349Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:17.965378Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:17.965390Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:17.965568Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22074 TClient is connected to server localhost:22074 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:18.445880Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:18.459801Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:18.550865Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:18.733611Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:18.853502Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:21.245902Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509346016689258:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:21.245981Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:21.303526Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:21.347558Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:21:21.384454Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:21:21.432287Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:21:21.480434Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:21:21.555468Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:21:21.649428Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509346016689776:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:21.649540Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:21.649594Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489509346016689781:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:21.654624Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:21:21.665486Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489509346016689783:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:21:21.723222Z node 6 :TX_PROXY ERROR: Actor# [6:7489509346016689838:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:22.790994Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489509328836818270:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:22.791094Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStrictStyle [GOOD] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> YdbSdkSessionsPool::PeriodicTask10 [GOOD] >> Secret::Simple [GOOD] >> S3SettingsConversion::StyleDeduction [GOOD] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] Test command err: 2025-04-04T17:21:10.240850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:21:10.241275Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:21:10.241328Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b6f/r3tmp/tmpZqgLW3/pdisk_1.dat 2025-04-04T17:21:10.606528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:21:10.651785Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:10.691372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:10.691546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:10.703202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:10.785049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:21:10.830320Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:676:2577] 2025-04-04T17:21:10.830647Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:21:10.877369Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:21:10.877538Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:21:10.879166Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:21:10.879245Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:21:10.879289Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:21:10.879632Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:21:10.879782Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:21:10.879871Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:701:2577] in generation 1 2025-04-04T17:21:10.881279Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:679:2579] 2025-04-04T17:21:10.881516Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:21:10.890887Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:21:10.890997Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:21:10.892290Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-04T17:21:10.892356Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-04T17:21:10.892394Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-04T17:21:10.892676Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:21:10.892770Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:21:10.892827Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:710:2579] in generation 1 2025-04-04T17:21:10.903711Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:21:10.933780Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:21:10.933987Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:21:10.934114Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:713:2598] 2025-04-04T17:21:10.934167Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:21:10.934198Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:21:10.934230Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:21:10.934528Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:21:10.934563Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-04T17:21:10.934609Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:21:10.934682Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:714:2599] 2025-04-04T17:21:10.934705Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-04T17:21:10.934725Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-04T17:21:10.934744Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:21:10.935150Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:21:10.935275Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:21:10.935380Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:21:10.935418Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:21:10.935459Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:21:10.935493Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:21:10.935532Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-04T17:21:10.935580Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-04T17:21:10.936032Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2573], serverId# [1:687:2583], sessionId# [0:0:0] 2025-04-04T17:21:10.936099Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T17:21:10.936125Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:21:10.936147Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-04T17:21:10.936191Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T17:21:10.936332Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:21:10.936555Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:21:10.936662Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:21:10.937066Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:670:2574], serverId# [1:695:2589], sessionId# [0:0:0] 2025-04-04T17:21:10.937243Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:21:10.937378Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-04-04T17:21:10.937428Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-04-04T17:21:10.939205Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:21:10.939279Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T17:21:10.950197Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:21:10.950343Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:21:10.950434Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T17:21:10.950472Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-04-04T17:21:11.103841Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:738:2617], serverId# [1:741:2620], sessionId# [0:0:0] 2025-04-04T17:21:11.104138Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:739:2618], serverId# [1:742:2621], sessionId# [0:0:0] 2025-04-04T17:21:11.112001Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:21:11.112085Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:21:11.112424Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-04-04T17:21:11.112460Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:21:11.112619Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:21:11.112658Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:21:11.112707Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:21:11.112887Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:21:11.113008Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:21:11.113128Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T17:21:11.113147Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:21:11.113182Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-04-04T17:21:11.113351Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:21:11.113429Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:21:11.114224Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:21:11.114313Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-04-04T17:21:11.116142Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T ... 89 step# 1000} 2025-04-04T17:21:25.066016Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T17:21:25.066093Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-04T17:21:25.066124Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:21:25.066151Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037890 2025-04-04T17:21:25.066349Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037890 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:21:25.066464Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:21:25.066743Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:21:25.066789Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:21:25.066836Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-04T17:21:25.066903Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037890 tableId# [OwnerId: 72057594046644480, LocalPathId: 6] schema version# 1 2025-04-04T17:21:25.067218Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037890 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:21:25.067560Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:21:25.071029Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T17:21:25.071102Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-04T17:21:25.071150Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2025-04-04T17:21:25.071223Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:21:25.071298Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:21:25.071414Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:21:25.072188Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:21:25.072229Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:21:25.073521Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:21:25.073568Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:21:25.073604Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:21:25.073653Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:21:25.073697Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:21:25.073766Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:21:25.073848Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 0 2025-04-04T17:21:25.073877Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T17:21:25.074774Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1000} 2025-04-04T17:21:25.074830Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T17:21:25.075615Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:21:25.075676Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T17:21:25.075706Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-04T17:21:25.076073Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T17:21:25.076100Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-04T17:21:25.076124Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2025-04-04T17:21:25.076160Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:21:25.076190Z node 4 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:21:25.076234Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T17:21:25.079667Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:21:25.080092Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:21:25.080504Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-04-04T17:21:25.080556Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-04T17:21:25.081003Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:21:25.081427Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:21:25.081467Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:21:25.081822Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-04-04T17:21:25.081852Z node 4 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-04-04T17:21:25.089533Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:837:2696], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:25.089619Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:846:2701], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:25.089680Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:25.093404Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:21:25.097517Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T17:21:25.097625Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-04T17:21:25.097954Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:21:25.249001Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:21:25.249116Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-04-04T17:21:25.249168Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-04-04T17:21:25.252415Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:851:2704], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:21:25.287519Z node 4 :TX_PROXY ERROR: Actor# [4:933:2755] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:25.382900Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0x4pk0chr24tdeah0pqrry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MzA0OTk5NmItZjg4MTQwMzItYTc1NGNmOGEtN2Y5NzdkNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:25.383469Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1034:2799], serverId# [4:1035:2800], sessionId# [0:0:0] 2025-04-04T17:21:25.383705Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:21:25.385124Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743787285385009 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 38b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-04T17:21:25.385289Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1743787285385009 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-04-04T17:21:25.396414Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T17:21:25.396608Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 38 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-04-04T17:21:25.396670Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:21:25.401207Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1041:2805], serverId# [4:1042:2806], sessionId# [0:0:0] 2025-04-04T17:21:25.407180Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1043:2807], serverId# [4:1044:2808], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::SimpleQueryService [GOOD] Test command err: 2025-04-04T17:19:04.501703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:19:04.502180Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:19:04.502232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001542/r3tmp/tmphG0RIw/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19547, node 1 TClient is connected to server localhost:16175 2025-04-04T17:19:05.473273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:19:05.521521Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:05.528372Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:05.528412Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:05.528432Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:05.528750Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:19:05.566510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:05.566632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:05.577881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-04-04T17:19:17.345000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:807:2675], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:17.345130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:818:2680], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:17.345213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:17.356216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T17:19:17.375588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:821:2683], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T17:19:17.430142Z node 1 :TX_PROXY ERROR: Actor# [1:872:2715] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:17.771082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-04-04T17:19:18.998515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:19.446822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480 2025-04-04T17:19:20.318466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-04T17:19:21.069389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:19:21.549028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-04T17:19:22.983481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2025-04-04T17:19:23.429639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: 2025-04-04T17:19:40.428256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:19:40.428328Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 2025-04-04T17:20:03.724513Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jr0x26s8bgf2mkpv1f8tgrfx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGRhNDVjZjEtMmMzMTRjMTItNDhjNjJkYmMtOWJlZGU0YWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-04T17:20:27.577939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715749:0, at schemeshard: 72057594046644480 2025-04-04T17:20:28.791178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715756:0, at schemeshard: 72057594046644480 2025-04-04T17:20:30.501656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715767:0, at schemeshard: 72057594046644480 2025-04-04T17:20:31.012510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715770:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-04T17:20:44.656641Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715785. Ctx: { TraceId: 01jr0x3erbf8c508vek93tsv8r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWY1ZjM5N2YtMjg1NzIwOGMtM2ZiZTkyMzMtNTQ5MzkwY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 2025-04-04T17:21:23.405906Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715831. Ctx: { TraceId: 01jr0x4mv3d4dvsk6v2c0b219r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmI4M2I3ZGYtNWRmMDVhMjgtN2NkZDM1My1lZGQwYzBkZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::PeriodicTask10 [GOOD] Test command err: 2025-04-04T17:21:03.157912Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509267416120578:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:03.157986Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a7f/r3tmp/tmpZ5Q1XT/pdisk_1.dat 2025-04-04T17:21:03.643711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:03.643813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:03.647264Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:03.652921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13666, node 1 2025-04-04T17:21:03.819135Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:03.819168Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:03.819184Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:03.819334Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22799 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:04.425964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:08.158133Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509267416120578:2079];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:08.158221Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:21:18.644878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:21:18.644909Z node 1 :IMPORT WARN: Table profiles were not loaded |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::StyleDeduction [GOOD] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TPersQueueMirrorer::TestBasicRemote [GOOD] >> TPersQueueMirrorer::ValidStartStream |93.4%| [TA] $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |93.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink >> DataShardVolatile::DistributedWriteThenReadIterator [GOOD] >> DataShardVolatile::DistributedWriteThenReadIteratorStream ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Simple [GOOD] Test command err: 2025-04-04T17:19:07.109960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:19:07.110427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:19:07.110488Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00143b/r3tmp/tmpqVkRYF/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11091, node 1 TClient is connected to server localhost:13145 2025-04-04T17:19:07.649697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:19:07.687012Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:07.690582Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:07.690638Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:07.690678Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:07.691094Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:19:07.726993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:07.727138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:07.738951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-04-04T17:19:19.661623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:810:2676], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:19.661790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:19.664631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-04-04T17:19:19.825824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:927:2754], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:19.825943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:19.826170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:932:2759], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:19.830585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-04T17:19:19.959277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:934:2761], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:19:20.187383Z node 1 :TX_PROXY ERROR: Actor# [1:1031:2829] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:20.556456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:20.932040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-04-04T17:19:21.714897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-04T17:19:22.556790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:19:22.995489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-04T17:19:24.149559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2025-04-04T17:19:24.592599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: 2025-04-04T17:19:40.213393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:19:40.213472Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 2025-04-04T17:20:04.884433Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jr0x27wt408avbse69gf2jjv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGViMDhkMGItYWZmZGMwOWItZjhjMzA3OTctYTMzYzNhZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-04T17:20:28.543986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715757:0, at schemeshard: 72057594046644480 2025-04-04T17:20:29.888103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715766:0, at schemeshard: 72057594046644480 2025-04-04T17:20:32.216459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715779:0, at schemeshard: 72057594046644480 2025-04-04T17:20:32.794075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715782:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-04T17:20:46.084121Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715797. Ctx: { TraceId: 01jr0x3g4hbcgyawcy1depys27, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTBkNjY0NzItYmFhYmY4MmQtZmMzODIwYWMtODQ5MTFlMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 2025-04-04T17:21:25.313585Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715847. Ctx: { TraceId: 01jr0x4pp143ras7zp0hw4q9k9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWQ4NzVkNzQtYzhhOTc1NzEtYTg4YTdlYmMtY2U4YzNiN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit [GOOD] |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> YdbSdkSessionsPool::PeriodicTask1 [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Basic [GOOD] |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Basic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::PeriodicTask1 [GOOD] Test command err: 2025-04-04T17:21:03.202272Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509267632255551:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:03.207735Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a7b/r3tmp/tmpotdTI5/pdisk_1.dat 2025-04-04T17:21:03.636929Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:03.638961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:03.639057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 13907, node 1 2025-04-04T17:21:03.664526Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:21:03.664552Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:21:03.666530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:03.822778Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:03.822807Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:03.822815Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:03.822948Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11866 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:04.302217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:07.390530Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489509285122898216:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:07.395634Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a7b/r3tmp/tmp1sprcQ/pdisk_1.dat 2025-04-04T17:21:07.523699Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:07.555203Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:07.555294Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:07.558204Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23924, node 4 2025-04-04T17:21:07.610699Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:07.610726Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:07.610733Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:07.610895Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10212 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:07.864962Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:12.389196Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489509285122898216:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:12.389263Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:21:22.508357Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:21:22.508389Z node 4 :IMPORT WARN: Table profiles were not loaded >> TxUsage::WriteToTopic_Demo_33 [GOOD] |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TxUsage::WriteToTopic_Demo_34 >> Compression::WriteRAW [GOOD] >> Compression::WriteGZIP >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> ColumnShardTiers::TieringUsage |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> LocalPartition::WithoutPartition [GOOD] >> LocalPartition::WithoutPartitionDeadNode |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage >> DataShardVolatile::DistributedWriteThenReadIteratorStream [GOOD] >> DataShardVolatile::DistributedWriteThenScanQuery |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> SlowTopicAutopartitioning::CDC_Write >> TPQTestSlow::TestWriteVeryBigMessage >> KqpSinkLocks::InvalidateOnCommit >> KqpTx::InteractiveTx >> KqpSinkMvcc::OlapNamedStatement >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite >> KqpSinkTx::SnapshotROInteractive2 >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink >> DataShardVolatile::DistributedWriteThenScanQuery [GOOD] >> DataShardVolatile::DistributedWriteWithAsyncIndex >> TPersQueueMirrorer::ValidStartStream [GOOD] >> TxUsage::WriteToTopic_Demo_48 [GOOD] >> TxUsage::WriteToTopic_Demo_34 [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2 [GOOD] >> TxUsage::WriteToTopic_Demo_50 >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TPersQueueMirrorer::ValidStartStream [GOOD] Test command err: 2025-04-04T17:20:01.310340Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509002663282062:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:01.310404Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:20:01.507424Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001097/r3tmp/tmp4KF0XD/pdisk_1.dat 2025-04-04T17:20:01.746907Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:01.772500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:01.773284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:01.778655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12071, node 1 2025-04-04T17:20:01.991024Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001097/r3tmp/yandexxEQ9Ql.tmp 2025-04-04T17:20:01.991070Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001097/r3tmp/yandexxEQ9Ql.tmp 2025-04-04T17:20:01.991350Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001097/r3tmp/yandexxEQ9Ql.tmp 2025-04-04T17:20:01.991490Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:20:02.269284Z INFO: TTestServer started on Port 3566 GrpcPort 12071 TClient is connected to server localhost:3566 PQClient connected to localhost:12071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:20:02.633836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:20:02.683723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:20:04.203192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509015548184621:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:04.203198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509015548184632:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:04.204332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:04.212494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:20:04.224853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509015548184636:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:20:04.282431Z node 1 :TX_PROXY ERROR: Actor# [1:7489509015548184700:2451] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:20:04.667848Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489509015548184708:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:20:04.674774Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzIxNzk3ODctZDE0MTA1YzctZjJkZGFkZjktMmRjYjVjNjQ=, ActorId: [1:7489509015548184619:2336], ActorState: ExecuteState, TraceId: 01jr0x27k84ewh50fmefskn6ey, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:20:04.677470Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:20:04.731375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:20:04.778589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:20:04.876317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489509019843152287:2635] 2025-04-04T17:20:06.310476Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509002663282062:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:06.310627Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:20:11.155571Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-04T17:20:11.176468Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-04T17:20:11.184289Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7489509045612956349:2790], Recipient [1:7489509002663282308:2177]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:20:11.184320Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:20:11.184333Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:20:11.184371Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7489509045612956345:2787], Recipient [1:7489509002663282308:2177]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-04T17:20:11.184387Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T17:20:11.256664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 10 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:20:11.257101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:20:11.258233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-04T17:20:11.258307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-04T17:20:11.258355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-04T17:20:11.258426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-04-04T17:20:11.258445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-04-04T17:20:11.258465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 4 2025-04-04T17:20:11.258488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 5 2025-04-04 ... 027495Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_11279994256323401408_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 5, commitOffset# (empty maybe) 2025-04-04T17:21:42.027569Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_11279994256323401408_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 readOffset 1 committedOffset 1 clientCommitOffset (empty maybe) clientReadOffset 5 2025-04-04T17:21:42.027620Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_11279994256323401408_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 5 endOffset 10 2025-04-04T17:21:42.027704Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_11279994256323401408_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 5, endOffset# 10, WTime# 1743787301867, sizeLag# 1179 2025-04-04T17:21:42.027724Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_11279994256323401408_v1TEvPartitionReady. Aval parts: 1 2025-04-04T17:21:42.027763Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_11279994256323401408_v1 performing read request: guid# 2aeaf243-eda324a2-8b721709-cf67722a, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 6, size# 1414, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-04T17:21:42.027879Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_11279994256323401408_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 6 maxSize 1414 maxTimeLagMs 0 readTimestampMs 0 readOffset 5 EndOffset 10 ClientCommitOffset 1 committedOffset 1 Guid 2aeaf243-eda324a2-8b721709-cf67722a 2025-04-04T17:21:42.027990Z node 7 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-04T17:21:42.028029Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-04-04T17:21:42.028134Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 Topic 'rt3.dc1--topic1' partition 0 user user offset 5 count 6 size 1414 endOffset 10 max time lag 0ms effective offset 5 2025-04-04T17:21:42.028171Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 added 0 blobs, size 0 count 0 last offset 5, current partition end offset: 10 2025-04-04T17:21:42.028318Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 3. All data is from uncompacted head. 2025-04-04T17:21:42.028344Z node 7 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T17:21:42.028476Z node 7 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 5 2025-04-04T17:21:42.028911Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_11279994256323401408_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 10 Result { Offset: 5 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 6 WriteTimestampMS: 1743787301896 CreateTimestampMS: 1743787301892 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 6 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 7 WriteTimestampMS: 1743787301900 CreateTimestampMS: 1743787301892 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 7 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 8 WriteTimestampMS: 1743787301949 CreateTimestampMS: 1743787301892 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 8 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 9 WriteTimestampMS: 1743787301949 CreateTimestampMS: 1743787301892 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 9 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 10 WriteTimestampMS: 1743787301949 CreateTimestampMS: 1743787301892 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 18446744073709551581 RealReadOffset: 9 WaitQuotaTimeMs: 0 } Cookie: 5 } 2025-04-04T17:21:42.029095Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_11279994256323401408_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset10 2025-04-04T17:21:42.029128Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_11279994256323401408_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 ReadOffset 10 ReadGuid 2aeaf243-eda324a2-8b721709-cf67722a has messages 1 2025-04-04T17:21:42.029200Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_11279994256323401408_v1 read done: guid# 2aeaf243-eda324a2-8b721709-cf67722a, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 681 2025-04-04T17:21:42.029239Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_11279994256323401408_v1 response to read: guid# 2aeaf243-eda324a2-8b721709-cf67722a 2025-04-04T17:21:42.029437Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_11279994256323401408_v1 Process answer. Aval parts: 0 2025-04-04T17:21:42.029881Z :DEBUG: [] [] [b9d7581d-bf057a2f-595e4a1d-b3b8c4a7] [] Got ReadResponse, serverBytesSize = 681, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428119 2025-04-04T17:21:42.030060Z :DEBUG: [] [] [b9d7581d-bf057a2f-595e4a1d-b3b8c4a7] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428119 2025-04-04T17:21:42.030374Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (5-9) 2025-04-04T17:21:42.030448Z :DEBUG: [] [] [b9d7581d-bf057a2f-595e4a1d-b3b8c4a7] [] Returning serverBytesSize = 681 to budget 2025-04-04T17:21:42.030483Z :DEBUG: [] [] [b9d7581d-bf057a2f-595e4a1d-b3b8c4a7] [] In ContinueReadingDataImpl, ReadSizeBudget = 681, ReadSizeServerDelta = 52428119 2025-04-04T17:21:42.030761Z :DEBUG: [] [] [b9d7581d-bf057a2f-595e4a1d-b3b8c4a7] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-04T17:21:42.030931Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (5-5) 2025-04-04T17:21:42.030994Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (6-6) 2025-04-04T17:21:42.031034Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (7-7) 2025-04-04T17:21:42.031066Z :DEBUG: [] Take Data. Partition 0. Read: {2, 1} (8-8) 2025-04-04T17:21:42.031103Z :DEBUG: [] Take Data. Partition 0. Read: {2, 2} (9-9) 2025-04-04T17:21:42.030990Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_11279994256323401408_v1 grpc read done: success# 1, data# { read_request { bytes_size: 681 } } 2025-04-04T17:21:42.031157Z :DEBUG: [] [] [b9d7581d-bf057a2f-595e4a1d-b3b8c4a7] [] The application data is transferred to the client. Number of messages 5, size 115 bytes 2025-04-04T17:21:42.031090Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_11279994256323401408_v1 got read request: guid# f6d5cceb-ffff6279-e377744c-80f55d60 2025-04-04T17:21:42.031212Z :DEBUG: [] [] [b9d7581d-bf057a2f-595e4a1d-b3b8c4a7] [] Returning serverBytesSize = 0 to budget 2025-04-04T17:21:42.031391Z :INFO: [] [] [b9d7581d-bf057a2f-595e4a1d-b3b8c4a7] Closing read session. Close timeout: 0.000000s 2025-04-04T17:21:42.031448Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:9:1 2025-04-04T17:21:42.031494Z :INFO: [] [] [b9d7581d-bf057a2f-595e4a1d-b3b8c4a7] Counters: { Errors: 0 CurrentSessionLifetimeMs: 37 BytesRead: 115 MessagesRead: 5 BytesReadCompressed: 115 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:21:42.031612Z :NOTICE: [] [] [b9d7581d-bf057a2f-595e4a1d-b3b8c4a7] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-04T17:21:42.031657Z :DEBUG: [] [] [b9d7581d-bf057a2f-595e4a1d-b3b8c4a7] [] Abort session to cluster 2025-04-04T17:21:42.032670Z :NOTICE: [] [] [b9d7581d-bf057a2f-595e4a1d-b3b8c4a7] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:21:42.032802Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_11279994256323401408_v1 grpc read done: success# 0, data# { } 2025-04-04T17:21:42.032824Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_11279994256323401408_v1 grpc read failed 2025-04-04T17:21:42.032855Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_11279994256323401408_v1 closed 2025-04-04T17:21:42.033327Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_11279994256323401408_v1 is DEAD 2025-04-04T17:21:42.033663Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_7_2_11279994256323401408_v1 2025-04-04T17:21:42.033699Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [7:7489509436451254885:2533] destroyed 2025-04-04T17:21:42.033715Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_7_2_11279994256323401408_v1 2025-04-04T17:21:42.034073Z node 8 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [7:7489509436451254883:2530] disconnected; active server actors: 1 2025-04-04T17:21:42.034828Z :DEBUG: [] MessageGroupId [src-id-test] SessionId [src-id-test|3e140ab2-e9869071-e2e293d6-68a6d6cf_0] Write session: destroy 2025-04-04T17:21:42.034109Z node 8 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic1] pipe [7:7489509436451254883:2530] client user disconnected session shared/user_7_2_11279994256323401408_v1 2025-04-04T17:21:42.253942Z node 7 :KQP_COMPUTE WARN: SelfId: [7:7489509436451254909:2535], TxId: 281474976710686, task: 1, CA Id [7:7489509436451254907:2535]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 0 2025-04-04T17:21:42.287034Z node 7 :KQP_COMPUTE WARN: SelfId: [7:7489509436451254909:2535], TxId: 281474976710686, task: 1, CA Id [7:7489509436451254907:2535]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-04T17:21:42.335601Z node 7 :KQP_COMPUTE WARN: SelfId: [7:7489509436451254909:2535], TxId: 281474976710686, task: 1, CA Id [7:7489509436451254907:2535]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-04T17:21:42.391770Z node 7 :KQP_COMPUTE WARN: SelfId: [7:7489509436451254909:2535], TxId: 281474976710686, task: 1, CA Id [7:7489509436451254907:2535]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-04T17:21:42.501807Z node 7 :KQP_COMPUTE WARN: SelfId: [7:7489509436451254909:2535], TxId: 281474976710686, task: 1, CA Id [7:7489509436451254907:2535]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-04T17:21:42.626135Z node 7 :KQP_COMPUTE WARN: SelfId: [7:7489509436451254909:2535], TxId: 281474976710686, task: 1, CA Id [7:7489509436451254907:2535]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/unittest >> TxUsage::WriteToTopic_Demo_34 [GOOD] Test command err: 2025-04-04T17:18:58.073220Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508730866017557:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:58.073292Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:18:58.237457Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ea3/r3tmp/tmphHwmrw/pdisk_1.dat 2025-04-04T17:18:58.396295Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32444, node 1 2025-04-04T17:18:58.464826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:58.464982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:58.467174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:58.584246Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000ea3/r3tmp/yandexWcgTnu.tmp 2025-04-04T17:18:58.584282Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000ea3/r3tmp/yandexWcgTnu.tmp 2025-04-04T17:18:58.584465Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000ea3/r3tmp/yandexWcgTnu.tmp 2025-04-04T17:18:58.584619Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:18:58.799129Z INFO: TTestServer started on Port 27481 GrpcPort 32444 TClient is connected to server localhost:27481 PQClient connected to localhost:32444 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:59.033697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:59.070694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:19:00.546590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508739455952832:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.546756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508739455952845:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.546842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.555936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:19:00.567444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508739455952847:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:19:00.636511Z node 1 :TX_PROXY ERROR: Actor# [1:7489508739455952911:2448] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:00.983247Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508739455952926:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:19:00.985035Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmYxMDI4MGYtMjI1ZmQxZmQtOWFiZTQzMTctYzM1ZTUyYzE=, ActorId: [1:7489508739455952830:2336], ActorState: ExecuteState, TraceId: 01jr0x09db77v4d939jzrphqj4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:19:00.992830Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:19:01.024736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:01.055322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:01.143603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489508743750920502:2631] 2025-04-04T17:19:03.073105Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508730866017557:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:03.073196Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:19:07.278461Z :WriteToTopic_Demo_3 INFO: TTopicSdkTestSetup started 2025-04-04T17:19:07.293221Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-04T17:19:07.329062Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:19:07.329253Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-04T17:19:07.340283Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:19:07.340657Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-04T17:19:07.341277Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:19:07.341303Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-04T17:19:07.341325Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:19:07.341387Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:19:07.341444Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-04T17:19:07.352605Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7489508769520724571:2791] connected; active server actors: 1 2025-04-04T17:19:07.352889Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-04T17:19:07.353298Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7489508769520724570:2790], now have 1 active actors on pipe 2025-04-04T17:19:07.354101Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-04T17:19:07.354282Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-04-04T17:19:07.354589Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-04-04T17:19:07.354632Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-04T17:19:07.366206Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7489508730866017824 RawX2: 4294969476 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "test-topic" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/test-topic" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } 2025-04-04T17:19:07.366933Z node 1 :PERSQUEUE ... written_in_tx { } } write_statistics { persisting_time { nanos: 1000000 } min_queue_wait_time { nanos: 48000000 } max_queue_wait_time { nanos: 48000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-04-04T17:21:44.740538Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|6122d514-a194c06-2e5666bb-7e48134_0] PartitionId [0] Generation [1] OnAck: seqNo=3, txId=01jr0x59fn2hhfy60gy2rgssef, WriteCount=2, AckCount=2 2025-04-04T17:21:44.741779Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|6122d514-a194c06-2e5666bb-7e48134_0] PartitionId [0] Generation [1] Write session: acknoledged message 3 2025-04-04T17:21:44.750593Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7489509445700289100 RawX2: 4503651166980570 } TxId: 281474976715678 Data { Operations { PartitionId: 0 Path: "/Root/topic_A" SupportivePartition: 100001 } Op: Commit SendingShards: 72075186224037894 ReceivingShards: 72075186224037894 Immediate: true WriteId { NodeId: 12 KeyId: 281474976715677 } } 2025-04-04T17:21:44.750636Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] PartitionId {0, {12, 281474976715677}, 100001} for WriteId {12, 281474976715677} 2025-04-04T17:21:44.750673Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976715678 has WriteId {12, 281474976715677} 2025-04-04T17:21:44.750691Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] immediate transaction 2025-04-04T17:21:44.750774Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {12, 281474976715677}, 100001}, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoRequest 2025-04-04T17:21:44.750867Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {12, 281474976715677}, 100001}, State: StateIdle] Send TEvPQ::TEvGetWriteInfoResponse 2025-04-04T17:21:44.750929Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-04-04T17:21:44.750985Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-04-04T17:21:44.751015Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=1, t.WriteInfo->BlobsFromHead.size=1 2025-04-04T17:21:44.751046Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Head=Offset 0 PartNo 0 PackedSize 16536 count 1 nextOffset 1 batches 1, NewHead=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 2025-04-04T17:21:44.751084Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] add key D0000100001_00000000000000000000_00000_0000000001_00013 2025-04-04T17:21:44.751542Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] PartitionedBlob.GetFormedBlobs().size=1 2025-04-04T17:21:44.751604Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] writing blob: topic 'topic_A' partition 0 old key D0000100001_00000000000000000000_00000_0000000001_00013 new key d0000000000_00000000000000000001_00000_0000000001_00013 size 7001240 WTime 1743787304751 2025-04-04T17:21:44.751727Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 part blob processing sourceId '\0test-message_group_id' seqNo 3 partNo 0 2025-04-04T17:21:44.752560Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 part blob complete sourceId '\0test-message_group_id' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 16546 count 1 nextOffset 3 batches 1 2025-04-04T17:21:44.752613Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-04-04T17:21:44.753310Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Add new write blob: topic 'topic_A' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 1 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 16536 WTime 1743787304751 2025-04-04T17:21:44.753538Z node 12 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:21:44.753613Z node 12 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 1 size 16536 2025-04-04T17:21:44.753661Z node 12 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 16536 2025-04-04T17:21:44.753677Z node 12 :PERSQUEUE DEBUG: CacheProxy. Rename blob from D0000100001_00000000000000000000_00000_0000000001_00013 to d0000000000_00000000000000000001_00000_0000000001_00013 2025-04-04T17:21:44.755273Z node 12 :PERSQUEUE DEBUG: Renaming head blob in L1. Old partition 100001 old offset 0 old count 1 new partition 0 new offset 1 new count 1 actorID [12:7489509445700288905:2487] 2025-04-04T17:21:44.755298Z node 12 :PERSQUEUE DEBUG: Duplicate blob in L1. Partition 0 offset 0 count 1 size 16536 actorID [12:7489509445700288905:2487] is actual 1 2025-04-04T17:21:44.755330Z node 12 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 1 size 16536 actorID [12:7489509445700288905:2487] 2025-04-04T17:21:44.755354Z node 12 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 16536 actorID [12:7489509445700288905:2487] 2025-04-04T17:21:44.755443Z node 12 :PERSQUEUE DEBUG: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 0 offset 0 partno 0 count 1 parts 0 2025-04-04T17:21:44.755524Z node 12 :PERSQUEUE DEBUG: PQ Cache (L2). Renamed. old Tablet '72075186224037894' partition 100001 offset 0 partno 0 count 1 parts 13, new Tablet '72075186224037894' partition 0 offset 1 partno 0 count 1 parts 13 2025-04-04T17:21:44.755571Z node 12 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037894' partition 0 offset 0 partno 0 count 1 parts 0 size 16536 2025-04-04T17:21:44.755613Z node 12 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037894' partition 0 offset 2 partno 0 count 1 parts 0 size 16536 2025-04-04T17:21:44.758162Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 7016861 2025-04-04T17:21:44.759286Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPQ::TEvTransactionCompleted WriteId {12, 281474976715677} 2025-04-04T17:21:44.759319Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvPQ::TEvDeletePartition to partition {0, {12, 281474976715677}, 100001} 2025-04-04T17:21:44.759341Z node 12 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:21:44.759368Z node 12 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000000_00000_0000000001_00000|(+) to d0000000000_00000000000000000000_00000_0000000001_00000|(+) 2025-04-04T17:21:44.759412Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {12, 281474976715677}, 100001}, State: StateIdle] Handle TEvPQ::TEvDeletePartition 2025-04-04T17:21:44.759473Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvLongTxService::TEvLockStatus LockId: 281474976715677 LockNode: 12 Status: STATUS_NOT_FOUND 2025-04-04T17:21:44.759495Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxWriteInfo: WriteId {12, 281474976715677}, TxId 281474976715678, Status STATUS_SUBSCRIBED 2025-04-04T17:21:44.759515Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] there is already a transaction TxId 281474976715678 for WriteId {12, 281474976715677} 2025-04-04T17:21:44.759642Z node 12 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:21:44.759660Z node 12 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from D0000100001(+) to D0000100002(-) 2025-04-04T17:21:44.760816Z node 12 :PERSQUEUE DEBUG: Deleting head blob in L1. Partition 0 offset 0 count 1 actorID [12:7489509445700288905:2487] 2025-04-04T17:21:44.760895Z node 12 :PERSQUEUE DEBUG: Deleting head blob in L1. Partition 100001 offset 1 count 1 actorID [12:7489509445700288905:2487] 2025-04-04T17:21:44.760938Z node 12 :PERSQUEUE DEBUG: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 0 offset 0 partno 0 count 1 parts 0 2025-04-04T17:21:44.760969Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:21:44.760994Z node 12 :PERSQUEUE DEBUG: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100001 offset 1 partno 0 count 1 parts 0 2025-04-04T17:21:44.762059Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|6122d514-a194c06-2e5666bb-7e48134_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-04-04T17:21:44.762102Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|6122d514-a194c06-2e5666bb-7e48134_0] PartitionId [0] Generation [1] Write session will now close 2025-04-04T17:21:44.762150Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|6122d514-a194c06-2e5666bb-7e48134_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-04T17:21:44.761603Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPQ::TEvDeletePartitionDone {0, {12, 281474976715677}, 100001} 2025-04-04T17:21:44.761686Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvUnsubscribeLock for WriteId {12, 281474976715677} 2025-04-04T17:21:44.761723Z node 12 :PERSQUEUE WARN: [PQ: 72075186224037894] Unknown transaction 281474976715678 2025-04-04T17:21:44.761784Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:21:44.762592Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|6122d514-a194c06-2e5666bb-7e48134_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-04T17:21:44.762474Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:21:44.762637Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|6122d514-a194c06-2e5666bb-7e48134_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-04T17:21:44.763755Z node 12 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: test-message_group_id|6122d514-a194c06-2e5666bb-7e48134_0 grpc read done: success: 0 data: 2025-04-04T17:21:44.763778Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|6122d514-a194c06-2e5666bb-7e48134_0 grpc read failed 2025-04-04T17:21:44.763824Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|6122d514-a194c06-2e5666bb-7e48134_0 grpc closed 2025-04-04T17:21:44.763840Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|6122d514-a194c06-2e5666bb-7e48134_0 is DEAD 2025-04-04T17:21:44.764706Z node 12 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:21:44.764760Z node 12 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:21:44.764790Z node 12 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:21:44.765097Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [12:7489509445700289072:2506] destroyed 2025-04-04T17:21:44.765127Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [12:7489509445700288999:2506] destroyed 2025-04-04T17:21:44.765150Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [12:7489509445700289002:2506] destroyed 2025-04-04T17:21:44.765188Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit [GOOD] >> KqpTx::InteractiveTx [GOOD] >> KqpTx::InvalidateOnError >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] >> Compression::WriteGZIP [GOOD] >> Compression::WriteZSTD >> KqpSinkTx::ExplicitTcl >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> KqpSnapshotRead::TestSnapshotExpiration-withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] Test command err: 2025-04-04T17:21:21.549541Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509345467607461:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:21.549681Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bfe/r3tmp/tmpk5j6Wp/pdisk_1.dat 2025-04-04T17:21:21.990883Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:22.027093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:22.027203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:22.030845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16282, node 1 2025-04-04T17:21:22.189316Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:22.189344Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:22.189355Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:22.189477Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5536 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:22.662964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:5536 2025-04-04T17:21:24.656010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509358352510262:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.656134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.842365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:21:24.967306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509358352510435:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.967372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.976899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743787284953 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743787284953 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-04T17:21:25.033739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509362647477835:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:25.033863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:25.033944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509362647477855:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:25.034000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509362647477856:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:25.034084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509362647477859:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:25.034173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509362647477861:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:25.034427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509362647477865:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:25.034463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509362647477858:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:25.035328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509362647477883:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:25.035399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:25.035885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509362647477887:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:25.036279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509362647477891:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:25.037999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509362647477918:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:25.038024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509362647477920:2422], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:25.038074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:25.038851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509362647477927:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:25.038859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509362647477929:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:25.038951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't ... /Root 2025-04-04T17:21:44.994268Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725178. Ctx: { TraceId: 01jr0x5a0v64h05zsdsgd3dhyj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzBhZjUwNjYtNjJlZGUxYzAtYTdiMTdlODYtNjRiZjJmZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:44.994748Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725177. Ctx: { TraceId: 01jr0x5a0tcc8az37p9zxjty15, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmIyZWU4ZjctN2Y1NWY2MzgtZjI4NDI4ZGUtMWRlNjBmYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:44.995751Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725180. Ctx: { TraceId: 01jr0x5a0y6er6s8gzem1bnc1b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzliYWVlYmQtOTRkOThjMGMtODRlMGI0YjgtODk2YWYzODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:44.996142Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725181. Ctx: { TraceId: 01jr0x5a0y743zpzb4sqvvrprt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwNjRhYjgtZTRjM2YxMjMtNjc5YzA4NTAtZWI1ZWUyY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:44.996528Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725182. Ctx: { TraceId: 01jr0x5a106qa2p4xdxt7r5gbm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTc3ZTIzNmEtZDA5OGY0NTItMjI2YTc5MDktYTdmNDNjYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:45.001971Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725183. Ctx: { TraceId: 01jr0x5a103e0rdzzpjs11jx52, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2E4YWMxNDktNjk2ODQ2ZTEtYzkxZDJkNTgtNGZjOTA5Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:45.005997Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725184. Ctx: { TraceId: 01jr0x5a192bvehde4kwd13f7h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRhNDU4YzMtYzIxMTFlNC03Zjg2MDZhMi0yNWJiN2U0YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:45.018268Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725185. Ctx: { TraceId: 01jr0x5a1f4bxg1z6wfq3hh7vw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTI0MTM4NzItMWJjMGMxMjktOGY5MmEyMTQtYWE4NjE1NmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:45.020343Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725186. Ctx: { TraceId: 01jr0x5a1f36pvg9v4w8mjsn8p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTljMmY2YzMtYzM4NjgyMDItZWM4ZjUzOWQtMThjODcyNDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:45.020963Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725187. Ctx: { TraceId: 01jr0x5a1gd6q836zn6xr5j4bt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmIyZWU4ZjctN2Y1NWY2MzgtZjI4NDI4ZGUtMWRlNjBmYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:45.021392Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725188. Ctx: { TraceId: 01jr0x5a1gcaqjam467rbzk9rh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDMxNDY5YjUtYzg4MzhmMzktZjI0MTMwYmUtZGJhZjc1OGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:45.021798Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725190. Ctx: { TraceId: 01jr0x5a1pa9a5akrb774jg4av, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzBhZjUwNjYtNjJlZGUxYzAtYTdiMTdlODYtNjRiZjJmZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:45.021816Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725189. Ctx: { TraceId: 01jr0x5a1p2scdjw9w8y64xkbm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzliYWVlYmQtOTRkOThjMGMtODRlMGI0YjgtODk2YWYzODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:45.022212Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725192. Ctx: { TraceId: 01jr0x5a1p3ne2vzndxdehnxqz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2E4YWMxNDktNjk2ODQ2ZTEtYzkxZDJkNTgtNGZjOTA5Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:45.022484Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725191. Ctx: { TraceId: 01jr0x5a1p67rrxjk72pxyf0fn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwNjRhYjgtZTRjM2YxMjMtNjc5YzA4NTAtZWI1ZWUyY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:45.023860Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725193. Ctx: { TraceId: 01jr0x5a1rdnra4z84pv3dctz9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTc3ZTIzNmEtZDA5OGY0NTItMjI2YTc5MDktYTdmNDNjYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-04-04T17:21:45.030040Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725194. Ctx: { TraceId: 01jr0x5a208qfk24fvpm6qj4x6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRhNDU4YzMtYzIxMTFlNC03Zjg2MDZhMi0yNWJiN2U0YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743787284953 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-04T17:21:45.037459Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725195. Ctx: { TraceId: 01jr0x5a27e137xr7gst1qd0j1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTI0MTM4NzItMWJjMGMxMjktOGY5MmEyMTQtYWE4NjE1NmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:45.037914Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725196. Ctx: { TraceId: 01jr0x5a293kxzbt0ttgdsxw28, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTljMmY2YzMtYzM4NjgyMDItZWM4ZjUzOWQtMThjODcyNDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:45.050447Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725197. Ctx: { TraceId: 01jr0x5a2c1yzbjmk0bsanxd85, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmIyZWU4ZjctN2Y1NWY2MzgtZjI4NDI4ZGUtMWRlNjBmYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:45.051058Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725202. Ctx: { TraceId: 01jr0x5a2gbcw2eqss2kp8eb4h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTc3ZTIzNmEtZDA5OGY0NTItMjI2YTc5MDktYTdmNDNjYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:45.051570Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725203. Ctx: { TraceId: 01jr0x5a2gcna045pgk4x56s5e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzBhZjUwNjYtNjJlZGUxYzAtYTdiMTdlODYtNjRiZjJmZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:45.051986Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725198. Ctx: { TraceId: 01jr0x5a2c9szkjva0ycphqmtb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDMxNDY5YjUtYzg4MzhmMzktZjI0MTMwYmUtZGJhZjc1OGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:45.052417Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725200. Ctx: { TraceId: 01jr0x5a2f8jcx4rp6s50pfvyk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTAwNjRhYjgtZTRjM2YxMjMtNjc5YzA4NTAtZWI1ZWUyY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:45.052805Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725201. Ctx: { TraceId: 01jr0x5a2gcwkb4rp5mjhb4d2k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2E4YWMxNDktNjk2ODQ2ZTEtYzkxZDJkNTgtNGZjOTA5Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:45.055901Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976725199. Ctx: { TraceId: 01jr0x5a2e8k2fjjaq4ve5kja5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzliYWVlYmQtOTRkOThjMGMtODRlMGI0YjgtODk2YWYzODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743787284953 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-04T17:21:45.814415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 281192 rowCount 4191 cpuUsage 0 2025-04-04T17:21:45.822425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 362702 rowCount 5295 cpuUsage 0 2025-04-04T17:21:45.915575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-04-04T17:21:45.915725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 4191, DataSize 281192 2025-04-04T17:21:45.915857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 5295, DataSize 362702 2025-04-04T17:21:45.916015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 Table has 2 shards >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink [GOOD] >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/unittest >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit [GOOD] Test command err: 2025-04-04T17:18:58.073185Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508730674676120:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:58.073592Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000d36/r3tmp/tmpOKUuSJ/pdisk_1.dat 2025-04-04T17:18:58.262131Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:18:58.416628Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18246, node 1 2025-04-04T17:18:58.485164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:58.485257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:58.487019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:58.584060Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000d36/r3tmp/yandexV603rD.tmp 2025-04-04T17:18:58.584086Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000d36/r3tmp/yandexV603rD.tmp 2025-04-04T17:18:58.584263Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000d36/r3tmp/yandexV603rD.tmp 2025-04-04T17:18:58.584405Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:18:58.799191Z INFO: TTestServer started on Port 16947 GrpcPort 18246 TClient is connected to server localhost:16947 PQClient connected to localhost:18246 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:59.033749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:18:59.072243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:19:00.737992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508739264611397:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.737992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508739264611402:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.738108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.741131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:19:00.750319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508739264611411:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:19:00.944355Z node 1 :TX_PROXY ERROR: Actor# [1:7489508739264611475:2448] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:00.984539Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508739264611490:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:19:00.985966Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTI1Y2E2MzgtMzExN2UyMzYtODEwODJkMDQtOWUwMjAwZDM=, ActorId: [1:7489508739264611394:2336], ActorState: ExecuteState, TraceId: 01jr0x09kv78djs9zhnkz1h20r, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:19:00.988149Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:19:01.023501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:01.053391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:01.135996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489508743559579066:2631] 2025-04-04T17:19:03.071756Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508730674676120:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:03.071902Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:19:07.377026Z :WriteToTopic_Demo_2 INFO: TTopicSdkTestSetup started 2025-04-04T17:19:07.388861Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-04T17:19:07.414917Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:19:07.415192Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-04T17:19:07.416065Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:19:07.416227Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-04T17:19:07.416249Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:19:07.416261Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-04T17:19:07.416289Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:19:07.416323Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:19:07.416366Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-04T17:19:07.450352Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7489508769329383143:2801], now have 1 active actors on pipe 2025-04-04T17:19:07.450377Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7489508769329383144:2802] connected; active server actors: 1 2025-04-04T17:19:07.450670Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-04T17:19:07.451488Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-04T17:19:07.451618Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-04-04T17:19:07.452148Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-04-04T17:19:07.452167Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-04T17:19:07.461787Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7489508730674676406 RawX2: 4294969485 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "test-topic" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/test-topic" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } 2025-04-04T17:19:07.461946Z node 1 :PERSQUEUE ... (0-0) 2025-04-04T17:21:44.998719Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-04-04T17:21:44.999050Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 0} (2-2) 2025-04-04T17:21:44.999187Z :DEBUG: [/Root] Take Data. Partition 0. Read: {3, 0} (3-3) 2025-04-04T17:21:44.999688Z :DEBUG: [/Root] Take Data. Partition 0. Read: {4, 0} (4-4) 2025-04-04T17:21:44.999818Z :DEBUG: [/Root] Take Data. Partition 0. Read: {5, 0} (5-5) 2025-04-04T17:21:44.999942Z :DEBUG: [/Root] Take Data. Partition 0. Read: {5, 1} (6-6) 2025-04-04T17:21:45.000075Z :DEBUG: [/Root] Take Data. Partition 0. Read: {5, 2} (7-7) 2025-04-04T17:21:45.000939Z :DEBUG: [/Root] Take Data. Partition 0. Read: {6, 0} (8-8) 2025-04-04T17:21:45.001074Z :DEBUG: [/Root] Take Data. Partition 0. Read: {6, 1} (9-9) 2025-04-04T17:21:45.001202Z :DEBUG: [/Root] Take Data. Partition 0. Read: {7, 0} (10-10) 2025-04-04T17:21:45.001339Z :DEBUG: [/Root] Take Data. Partition 0. Read: {8, 0} (11-11) 2025-04-04T17:21:45.001502Z :DEBUG: [/Root] Take Data. Partition 0. Read: {9, 0} (12-12) 2025-04-04T17:21:45.001632Z :DEBUG: [/Root] Take Data. Partition 0. Read: {9, 1} (13-13) 2025-04-04T17:21:45.001781Z :DEBUG: [/Root] Take Data. Partition 0. Read: {9, 2} (14-14) 2025-04-04T17:21:45.001919Z :DEBUG: [/Root] Take Data. Partition 0. Read: {9, 3} (15-15) 2025-04-04T17:21:45.004315Z :DEBUG: [/Root] Take Data. Partition 0. Read: {9, 4} (16-16) 2025-04-04T17:21:45.004451Z :DEBUG: [/Root] Take Data. Partition 0. Read: {10, 0} (17-17) 2025-04-04T17:21:45.004565Z :DEBUG: [/Root] Take Data. Partition 0. Read: {11, 0} (18-18) 2025-04-04T17:21:45.004668Z :DEBUG: [/Root] Take Data. Partition 0. Read: {11, 1} (19-19) 2025-04-04T17:21:45.004731Z :DEBUG: [/Root] [/Root] [98a2e355-3d6e7e66-2d9ece4d-3efbafaf] [] The application data is transferred to the client. Number of messages 20, size 2000000 bytes 2025-04-04T17:21:45.004790Z :DEBUG: [/Root] [/Root] [98a2e355-3d6e7e66-2d9ece4d-3efbafaf] [] Returning serverBytesSize = 0 to budget 0 20 2025-04-04T17:21:45.007584Z :DEBUG: [/Root] [/Root] [98a2e355-3d6e7e66-2d9ece4d-3efbafaf] [] Commit offsets [0, 20). Partition stream id: 1 2025-04-04T17:21:45.044698Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_12939548851105229625_v1 grpc read done: success# 1, data# { read_request { bytes_size: 2002680 } } 2025-04-04T17:21:45.044899Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_12939548851105229625_v1 got read request: guid# 24fd5970-2d7fd0c8-5f7049a0-5f8fcb01 2025-04-04T17:21:45.052805Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_12939548851105229625_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { end: 20 } } } } 2025-04-04T17:21:45.053095Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_12939548851105229625_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) committing to position 20 prev 0 end 20 by cookie 2 2025-04-04T17:21:45.053392Z node 12 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2025-04-04T17:21:45.053427Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2025-04-04T17:21:45.053531Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer offset is set to 20 (startOffset 0) session test-consumer_12_1_12939548851105229625_v1 2025-04-04T17:21:45.053665Z node 12 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:21:45.054759Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer readTimeStamp for offset 20 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04T17:21:45.054823Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:21:45.054868Z node 12 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 2 2025-04-04T17:21:45.054982Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_12939548851105229625_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 2 } 2025-04-04T17:21:45.055018Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_12939548851105229625_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 20 endOffset 20 with cookie 2 2025-04-04T17:21:45.055056Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_12939548851105229625_v1 replying for commits: assignId# 1, from# 2, to# 2, offset# 20 2025-04-04T17:21:45.058865Z :DEBUG: [/Root] [/Root] [98a2e355-3d6e7e66-2d9ece4d-3efbafaf] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 20 } } 2025-04-04T17:21:45.942375Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:19:20 2025-04-04T17:21:45.942461Z :INFO: [/Root] [/Root] [98a2e355-3d6e7e66-2d9ece4d-3efbafaf] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1007 BytesRead: 2000000 MessagesRead: 20 BytesReadCompressed: 2000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:21:45.952717Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_12939548851105229625_v1 checking auth because of timeout 2025-04-04T17:21:45.952828Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_12939548851105229625_v1 auth for : test-consumer 2025-04-04T17:21:45.954223Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_12939548851105229625_v1 Handle describe topics response 2025-04-04T17:21:45.954350Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_12939548851105229625_v1 auth is DEAD 2025-04-04T17:21:45.954440Z node 12 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_12_1_12939548851105229625_v1 auth ok: topics# 1, initDone# 1 2025-04-04T17:21:46.936609Z :INFO: [/Root] [/Root] [98a2e355-3d6e7e66-2d9ece4d-3efbafaf] Closing read session. Close timeout: 0.000000s 2025-04-04T17:21:46.936682Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:19:20 2025-04-04T17:21:46.936733Z :INFO: [/Root] [/Root] [98a2e355-3d6e7e66-2d9ece4d-3efbafaf] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2002 BytesRead: 2000000 MessagesRead: 20 BytesReadCompressed: 2000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:21:46.936844Z :NOTICE: [/Root] [/Root] [98a2e355-3d6e7e66-2d9ece4d-3efbafaf] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-04T17:21:46.936896Z :DEBUG: [/Root] [/Root] [98a2e355-3d6e7e66-2d9ece4d-3efbafaf] [] Abort session to cluster 2025-04-04T17:21:46.938093Z :NOTICE: [/Root] [/Root] [98a2e355-3d6e7e66-2d9ece4d-3efbafaf] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:21:46.938054Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_12939548851105229625_v1 grpc read done: success# 0, data# { } 2025-04-04T17:21:46.938097Z node 12 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_12_1_12939548851105229625_v1 grpc read failed 2025-04-04T17:21:46.938133Z node 12 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_12_1_12939548851105229625_v1 grpc closed 2025-04-04T17:21:46.938176Z node 12 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_12_1_12939548851105229625_v1 is DEAD 2025-04-04T17:21:46.939246Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|5ef34a18-73eb523e-80b3c15a-3490da21_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2025-04-04T17:21:46.938418Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session test-consumer_12_1_12939548851105229625_v1 2025-04-04T17:21:46.939305Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|5ef34a18-73eb523e-80b3c15a-3490da21_0] PartitionId [0] Generation [2] Write session will now close 2025-04-04T17:21:46.938464Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [12:7489509445025446962:2549] destroyed 2025-04-04T17:21:46.938522Z node 12 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_12_1_12939548851105229625_v1 2025-04-04T17:21:46.939178Z node 12 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][topic_A] pipe [12:7489509445025446959:2546] disconnected; active server actors: 1 2025-04-04T17:21:46.939349Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|5ef34a18-73eb523e-80b3c15a-3490da21_0] PartitionId [0] Generation [2] Write session: aborting 2025-04-04T17:21:46.939204Z node 12 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][topic_A] pipe [12:7489509445025446959:2546] client test-consumer disconnected session test-consumer_12_1_12939548851105229625_v1 2025-04-04T17:21:46.940107Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|5ef34a18-73eb523e-80b3c15a-3490da21_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2025-04-04T17:21:46.940177Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|5ef34a18-73eb523e-80b3c15a-3490da21_0] PartitionId [0] Generation [2] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-04-04T17:21:46.940237Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2025-04-04T17:21:46.940270Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|5ef34a18-73eb523e-80b3c15a-3490da21_0] PartitionId [0] Generation [2] Write session is aborting and will not restart 2025-04-04T17:21:46.940327Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|5ef34a18-73eb523e-80b3c15a-3490da21_0] PartitionId [0] Generation [2] Write session: destroy 2025-04-04T17:21:46.942138Z node 12 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-message_group_id|5ef34a18-73eb523e-80b3c15a-3490da21_0 grpc read done: success: 0 data: 2025-04-04T17:21:46.947516Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|5ef34a18-73eb523e-80b3c15a-3490da21_0 grpc read failed 2025-04-04T17:21:46.947621Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|5ef34a18-73eb523e-80b3c15a-3490da21_0 grpc closed 2025-04-04T17:21:46.947654Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|5ef34a18-73eb523e-80b3c15a-3490da21_0 is DEAD 2025-04-04T17:21:46.949613Z node 12 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:21:46.949971Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [12:7489509440730479576:2522] destroyed 2025-04-04T17:21:46.950078Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-04T17:21:47.055608Z node 12 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:21:47.055642Z node 12 :IMPORT WARN: Table profiles were not loaded >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> DataShardVolatile::DistributedWriteWithAsyncIndex [GOOD] >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink >> KqpLocks::DifferentKeyUpdate >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> KqpTx::InvalidateOnError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::DataReceivedCallback [GOOD] Test command err: 2025-04-04T17:21:19.500655Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.500688Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.500724Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:21:19.502659Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:21:19.521352Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:21:19.521576Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.524154Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:21:19.528002Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.528144Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:21:19.528257Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:21:19.528298Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-04T17:21:19.528931Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.528957Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.528976Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:21:19.529355Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:21:19.529995Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:21:19.530110Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.530310Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:21:19.530639Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.530736Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:21:19.530843Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:21:19.530897Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-04T17:21:19.531723Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.531750Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.531769Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:21:19.532081Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:21:19.532820Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:21:19.532970Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.533244Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:21:19.536258Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.536423Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:21:19.536505Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:21:19.536562Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-04T17:21:19.537558Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.537584Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.537638Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:21:19.537950Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:21:19.538562Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:21:19.538677Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.538910Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:21:19.548111Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.552771Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:21:19.552926Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:21:19.553009Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-04T17:21:19.554054Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.554080Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.554101Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:21:19.554484Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:21:19.555036Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:21:19.555170Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.555345Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:21:19.555766Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.555901Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:21:19.556025Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:21:19.556064Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-04T17:21:19.556700Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.556720Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.556748Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:21:19.557066Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:21:19.557690Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:21:19.557793Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.558029Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:21:19.558449Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.558558Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:21:19.558639Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:21:19.558677Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-04T17:21:19.560800Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.560820Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.560839Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:21:19.561181Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:21:19.561754Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:21:19.561868Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.562036Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:21:19.562635Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.562820Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:21:19.562955Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:21:19.563002Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-04-04T17:21:19.563923Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.563939Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.563968Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:21:19.564209Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:21:19.564952Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:21:19.565056Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.565216Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:21:19.566427Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.566746Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:21:19.566815Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:21:19.566847Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-04-04T17:21:19.571060Z :ReadSession INFO: Random seed for debugging is 1743787279571036 2025-04-04T17:21:19.927916Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509335225315436:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:19.928097Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:21:19.968810Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509335859509005:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:19.968907Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error ... node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_2607381100666664269_v1 got read request: guid# 7a18c45c-3a0824be-cccb4b8f-1a6ecab1 2025-04-04T17:21:38.167610Z :DEBUG: [/Root] [/Root] [bb571a85-cae5a366-fcce018c-1dfcaadb] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-04T17:21:38.167744Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_2607381100666664269_v1 grpc read done: success# 1, data# { commit { cookies { assign_id: 1 partition_cookie: 3 } } } 2025-04-04T17:21:38.167837Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_2607381100666664269_v1 commit request from client for 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2025-04-04T17:21:38.167868Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_2607381100666664269_v1 commit request from 3 to 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2025-04-04T17:21:38.167900Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_2607381100666664269_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 3 2025-04-04T17:21:38.168131Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T17:21:38.168158Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T17:21:38.168266Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_2607381100666664269_v1 2025-04-04T17:21:38.168405Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:21:38.170602Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04T17:21:38.170647Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:21:38.170648Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 3 2025-04-04T17:21:38.170843Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_2607381100666664269_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 3 } 2025-04-04T17:21:38.170905Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_2607381100666664269_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 3 2025-04-04T17:21:38.170940Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_2607381100666664269_v1 replying for commits: assignId# 1, from# 3, to# 3, offset# 3 2025-04-04T17:21:38.171390Z :DEBUG: [/Root] [/Root] [bb571a85-cae5a366-fcce018c-1dfcaadb] [dc1] Committed response: { cookies { assign_id: 1 partition_cookie: 3 } } 2025-04-04T17:21:38.259522Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|adcb78f4-42444b3d-75be3bee-611e23ee_0] Write session will now close 2025-04-04T17:21:38.259600Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|adcb78f4-42444b3d-75be3bee-611e23ee_0] Write session: aborting 2025-04-04T17:21:38.260539Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|adcb78f4-42444b3d-75be3bee-611e23ee_0] Write session: gracefully shut down, all writes complete 2025-04-04T17:21:38.260592Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|adcb78f4-42444b3d-75be3bee-611e23ee_0] Write session is aborting and will not restart 2025-04-04T17:21:38.260659Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|adcb78f4-42444b3d-75be3bee-611e23ee_0] Write session: destroy 2025-04-04T17:21:38.261580Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|adcb78f4-42444b3d-75be3bee-611e23ee_0 grpc read done: success: 0 data: 2025-04-04T17:21:38.261608Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|adcb78f4-42444b3d-75be3bee-611e23ee_0 grpc read failed 2025-04-04T17:21:38.261638Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|adcb78f4-42444b3d-75be3bee-611e23ee_0 grpc closed 2025-04-04T17:21:38.261651Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|adcb78f4-42444b3d-75be3bee-611e23ee_0 is DEAD 2025-04-04T17:21:38.262101Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:21:38.262495Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7489509416829697101:2631] destroyed 2025-04-04T17:21:38.262531Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-04T17:21:40.873450Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_2607381100666664269_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2025-04-04T17:21:48.166405Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_2607381100666664269_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2025-04-04T17:21:48.263810Z :INFO: [/Root] [/Root] [bb571a85-cae5a366-fcce018c-1dfcaadb] Closing read session. Close timeout: 0.000000s 2025-04-04T17:21:48.263928Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2025-04-04T17:21:48.263985Z :INFO: [/Root] [/Root] [bb571a85-cae5a366-fcce018c-1dfcaadb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16445 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:21:48.264112Z :NOTICE: [/Root] [/Root] [bb571a85-cae5a366-fcce018c-1dfcaadb] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-04T17:21:48.264169Z :DEBUG: [/Root] [/Root] [bb571a85-cae5a366-fcce018c-1dfcaadb] [dc1] Abort session to cluster 2025-04-04T17:21:48.264785Z :NOTICE: [/Root] [/Root] [bb571a85-cae5a366-fcce018c-1dfcaadb] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:21:48.265458Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_2607381100666664269_v1 grpc read done: success# 0, data# { } 2025-04-04T17:21:48.265516Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_2607381100666664269_v1 grpc read failed 2025-04-04T17:21:48.265559Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_2607381100666664269_v1 closed 2025-04-04T17:21:48.265650Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_2607381100666664269_v1 is DEAD 2025-04-04T17:21:48.266470Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7489509386764925512:2536] disconnected; active server actors: 1 2025-04-04T17:21:48.267405Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7489509386764925512:2536] client user disconnected session shared/user_1_1_2607381100666664269_v1 2025-04-04T17:21:48.267389Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_2607381100666664269_v1 2025-04-04T17:21:48.267432Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7489509386764925515:2539] destroyed 2025-04-04T17:21:48.267502Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_2607381100666664269_v1 2025-04-04T17:21:48.773748Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489509459779370670:2735] TxId: 281474976710724. Ctx: { TraceId: 01jr0x5dcjchmkmr81f95b7dv5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDU5MzAwYmItNThiNGFlMDYtMTBjZGY5MjgtNzU3NzNkNWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 2025-04-04T17:21:48.774707Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489509459779370681:2742], TxId: 281474976710724, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=NDU5MzAwYmItNThiNGFlMDYtMTBjZGY5MjgtNzU3NzNkNWY=. TraceId : 01jr0x5dcjchmkmr81f95b7dv5. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7489509459779370670:2735], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-04T17:21:48.774721Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489509459779370682:2743], TxId: 281474976710724, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NDU5MzAwYmItNThiNGFlMDYtMTBjZGY5MjgtNzU3NzNkNWY=. TraceId : 01jr0x5dcjchmkmr81f95b7dv5. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489509459779370670:2735], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-04T17:21:49.711770Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:49.711805Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:49.711844Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:21:49.712277Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:21:49.712944Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:21:49.713160Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:49.713437Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:21:49.714201Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T17:21:49.714659Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T17:21:49.714884Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-04T17:21:49.715006Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:21:49.715062Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:21:49.715100Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-04-04T17:21:49.715312Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-04T17:21:49.715368Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> KqpSinkLocks::InvalidateOnCommit [GOOD] >> KqpSinkLocks::InvalidateOlapOnCommit >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 >> KqpSinkTx::SnapshotROInteractive2 [GOOD] >> KqpSnapshotIsolation::TConflictReadWriteOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::InvalidateOnError [GOOD] Test command err: Trying to start YDB, gRPC: 22325, MsgBus: 10849 2025-04-04T17:21:40.840024Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509428877092833:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:40.840435Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000edf/r3tmp/tmptETNbO/pdisk_1.dat 2025-04-04T17:21:41.269555Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:41.301696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:41.301826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:41.303440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22325, node 1 2025-04-04T17:21:41.430883Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:41.430927Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:41.430938Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:41.431090Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10849 TClient is connected to server localhost:10849 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:42.189975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:42.233025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:21:42.404038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:42.562328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:21:42.636867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:43.964356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509441761996364:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:43.964474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:44.497310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:44.569154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:21:44.600408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:21:44.630726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:21:44.665069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:21:44.708233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:21:44.791154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509446056964178:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:44.791245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:44.791309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509446056964183:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:44.795930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:21:44.807443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509446056964185:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:21:44.884307Z node 1 :TX_PROXY ERROR: Actor# [1:7489509446056964240:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:45.838016Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509428877092833:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:45.838110Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 11649, MsgBus: 16251 2025-04-04T17:21:47.500767Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509455169616945:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:47.500831Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000edf/r3tmp/tmpW1shrS/pdisk_1.dat 2025-04-04T17:21:47.610532Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11649, node 2 2025-04-04T17:21:47.653127Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:47.653229Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:47.660799Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:47.685326Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:47.685357Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:47.685365Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:47.685468Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16251 TClient is connected to server localhost:16251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:48.111228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:48.124870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:48.176380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:48.293683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:48.371968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:50.565931Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509468054520598:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:50.566027Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:50.608966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:50.642379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:21:50.666692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:21:50.693027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:21:50.723124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:21:50.797059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:21:50.874962Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509468054521116:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:50.875048Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:50.875256Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509468054521121:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:50.879365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:21:50.895408Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509468054521123:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:21:50.992381Z node 2 :TX_PROXY ERROR: Actor# [2:7489509468054521178:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:52.202549Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489509476644456077:2500], TxId: 281474976715672, task: 1. Ctx: { TraceId : 01jr0x5gskchqj9b0wk5mgfnnq. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjQ3MmE3OGUtZGQ5ZWU5NzItM2JjNGQ0ZWQtNjAwOTFjMWI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-04T17:21:52.202962Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489509476644456079:2501], TxId: 281474976715672, task: 2. Ctx: { TraceId : 01jr0x5gskchqj9b0wk5mgfnnq. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjQ3MmE3OGUtZGQ5ZWU5NzItM2JjNGQ0ZWQtNjAwOTFjMWI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7489509476644456074:2488], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:21:52.203398Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjQ3MmE3OGUtZGQ5ZWU5NzItM2JjNGQ0ZWQtNjAwOTFjMWI=, ActorId: [2:7489509472349488729:2488], ActorState: ExecuteState, TraceId: 01jr0x5gskchqj9b0wk5mgfnnq, Create QueryResponse for error on request, msg: 2025-04-04T17:21:52.273698Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjQ3MmE3OGUtZGQ5ZWU5NzItM2JjNGQ0ZWQtNjAwOTFjMWI=, ActorId: [2:7489509472349488729:2488], ActorState: ExecuteState, TraceId: 01jr0x5h2kcwex5anbqn0aevvy, Create QueryResponse for error on request, msg: >> KqpSnapshotIsolation::TSimpleOltp >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink [GOOD] >> KqpSinkLocks::DifferentKeyUpdate >> LocalPartition::WithoutPartitionDeadNode [GOOD] >> LocalPartition::WithoutPartitionPartitionRelocation >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables [GOOD] >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 25564, MsgBus: 29132 2025-04-04T17:21:40.849935Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509427691540823:2262];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:40.850261Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ed9/r3tmp/tmpWcAXYX/pdisk_1.dat 2025-04-04T17:21:41.261607Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25564, node 1 2025-04-04T17:21:41.301992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:41.302070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:41.303860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:41.426507Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:41.426533Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:41.426550Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:41.426671Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29132 TClient is connected to server localhost:29132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:42.185881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:42.206527Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:21:42.230593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:42.433521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:42.611213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:21:42.682192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:21:44.040369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509444871411595:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:44.040487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:44.494718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:44.522191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:21:44.559233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:21:44.598335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:21:44.634990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:21:44.672420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:21:44.725992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509444871412106:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:44.726077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:44.726383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509444871412111:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:44.732290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:21:44.744401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509444871412113:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:21:44.834001Z node 1 :TX_PROXY ERROR: Actor# [1:7489509444871412166:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:45.846610Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509427691540823:2262];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:45.846691Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:21:48.920296Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2FkODIxMjQtNzYyZDJhMWQtNzVkZjQyZWYtOGE3NmVlMTY=, ActorId: [1:7489509453461347025:2490], ActorState: ExecuteState, TraceId: 01jr0x5dkr9bcqtf8g7622sdsh, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken Trying to start YDB, gRPC: 24574, MsgBus: 63631 2025-04-04T17:21:49.684010Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509466444755225:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:49.684106Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ed9/r3tmp/tmp2dOmXL/pdisk_1.dat 2025-04-04T17:21:49.794297Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24574, node 2 2025-04-04T17:21:49.828210Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:49.829154Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:49.840776Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:49.872649Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:49.872673Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:49.872681Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:49.872803Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63631 TClient is connected to server localhost:63631 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:50.269746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:50.277174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:50.346305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:50.479896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:50.545851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:52.871573Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509479329658870:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:52.871645Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:52.911452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:52.940577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:21:52.966318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:21:52.992943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:21:53.019990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:21:53.050636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:21:53.091946Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509483624626673:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:53.092036Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509483624626678:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:53.092047Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:53.095286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:21:53.110233Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509483624626680:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:21:53.179740Z node 2 :TX_PROXY ERROR: Actor# [2:7489509483624626734:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:54.592488Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjY2MTg4MWItODEwNjM4MGEtNzhmOTMyOTItOWZmNjc4N2M=, ActorId: [2:7489509483624627026:2496], ActorState: ExecuteState, TraceId: 01jr0x5kbj2c80q1kdyw1hwey9, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-04-04T17:21:54.684324Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509466444755225:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:54.684404Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpLocks::DifferentKeyUpdate [GOOD] >> KqpLocks::EmptyRange >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink >> KqpLocks::TwoPhaseTx >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink >> Secret::Validation [GOOD] >> KqpSinkTx::ExplicitTcl [GOOD] >> KqpSinkTx::Interactive >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Validation [GOOD] Test command err: 2025-04-04T17:19:04.503686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:19:04.504165Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:19:04.504213Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001558/r3tmp/tmplKUeo4/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3889, node 1 TClient is connected to server localhost:12278 2025-04-04T17:19:05.473012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:19:05.521521Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:05.526385Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:05.526447Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:05.526477Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:05.527740Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:19:05.565086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:05.565231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:05.577785Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-04T17:19:17.408265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:755:2633], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:17.408425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: Unexpected token '-' : syntax error... ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2025-04-04T17:19:27.797960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:784:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:27.798113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:27.804575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-04-04T17:19:27.994705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:894:2728], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:27.994836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:27.995186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:899:2733], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:28.000579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-04T17:19:28.140011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:901:2735], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:19:28.428869Z node 1 :TX_PROXY ERROR: Actor# [1:997:2802] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:28.950381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:29.410309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-04-04T17:19:30.178418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-04T17:19:30.906279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:19:31.380392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-04T17:19:32.444264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-04T17:19:32.735069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: Execution, code: 1060
:1:48: Error: Executing ALTER OBJECT SECRET
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 2025-04-04T17:19:34.530688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:19:34.530770Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-04T17:20:10.668387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2025-04-04T17:20:11.693903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715723:0, at schemeshard: 72057594046644480 2025-04-04T17:20:13.495819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715732:0, at schemeshard: 72057594046644480 2025-04-04T17:20:14.081887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715735:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: Execution, code: 1060
:1:42: Error: Executing CREATE OBJECT SECRET_ACCESS
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-04T17:20:40.447259Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3723:4875], TxId: 281474976715766, task: 1. Ctx: { TraceId : 01jr0x3apf1p9ssc6mp4avs1yz. SessionId : ydb://session/3?node_id=1&id=NDJhNDE0YzktNGFhYTgzMjUtYjg5NzE4MzQtNGU2ZTgzNWE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-04T17:20:40.448333Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3724:4876], TxId: 281474976715766, task: 2. Ctx: { TraceId : 01jr0x3apf1p9ssc6mp4avs1yz. SessionId : ydb://session/3?node_id=1&id=NDJhNDE0YzktNGFhYTgzMjUtYjg5NzE4MzQtNGU2ZTgzNWE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:3720:4803], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:20:40.448969Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDJhNDE0YzktNGFhYTgzMjUtYjg5NzE4MzQtNGU2ZTgzNWE=, ActorId: [1:3623:4803], ActorState: ExecuteState, TraceId: 01jr0x3apf1p9ssc6mp4avs1yz, Create QueryResponse for error on request, msg: 2025-04-04T17:20:40.460044Z node 1 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jr0x3aes0dam0mz9sw8ttk92" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NDJhNDE0YzktNGFhYTgzMjUtYjg5NzE4MzQtNGU2ZTgzNWE=" tx_control { tx_id: "01jr0x3aes0dam0mz9sw8ttk92" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: Execution, code: 1060
:1:29: Error: Executing DROP OBJECT SECRET
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2025-04-04T17:21:04.409088Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4251:5268], for# root@builtin, access# DescribeSchema 2025-04-04T17:21:04.409208Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4251:5268], for# root@builtin, access# DescribeSchema 2025-04-04T17:21:04.411601Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:4248:5265], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:21:04.413768Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjhmODI1NTAtZGRkNzBjN2EtMTI5MzUwMWUtZGU5MGQyMmQ=, ActorId: [1:4243:5261], ActorState: ExecuteState, TraceId: 01jr0x42c8fc5y7qta6k46188d, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-04T17:21:16.054095Z node 1 :TICKET_PARSER ERROR: Ticket **** (51449FAE): Could not find correct token validator REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing UPSERT OBJECT SECRET
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-04-04T17:21:56.001821Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715848. Ctx: { TraceId: 01jr0x5mcx0kzedz5gjapfwsxn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmM3ZjY3NTQtNzNiMWVlOC03ZTFjODIwNS00YzE1NzJlMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> KqpSinkMvcc::OlapNamedStatement [GOOD] >> KqpSinkMvcc::OlapMultiSinks >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> KqpSnapshotIsolation::TConflictReadWriteOltp >> Secret::ValidationQueryService [GOOD] >> Compression::WriteZSTD [GOOD] >> Compression::WriteWithMixedCodecs ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3 [GOOD] Test command err: 2025-04-04T17:18:58.073302Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508732567328916:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:58.075263Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:18:58.266948Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e7d/r3tmp/tmpQgmU1T/pdisk_1.dat 2025-04-04T17:18:58.444983Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11567, node 1 2025-04-04T17:18:58.488868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:58.489001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:58.490924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:58.590811Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000e7d/r3tmp/yandex2JSUry.tmp 2025-04-04T17:18:58.590835Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000e7d/r3tmp/yandex2JSUry.tmp 2025-04-04T17:18:58.590980Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000e7d/r3tmp/yandex2JSUry.tmp 2025-04-04T17:18:58.591075Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:18:58.799136Z INFO: TTestServer started on Port 16215 GrpcPort 11567 TClient is connected to server localhost:16215 PQClient connected to localhost:11567 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:59.053785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:59.086313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:19:00.583427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508741157264171:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.583456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508741157264200:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.583526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.586759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:19:00.596633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508741157264208:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:19:00.687984Z node 1 :TX_PROXY ERROR: Actor# [1:7489508741157264272:2448] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:00.981222Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508741157264287:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:19:00.984999Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2U4NzUwOS1jYzllOWI5Zi03ZjU5ZjM1ZC03MjkxYWRiMA==, ActorId: [1:7489508741157264167:2335], ActorState: ExecuteState, TraceId: 01jr0x09ey6n20rjvpq7z53yej, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:19:00.988627Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:19:01.023951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:01.063239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:01.142000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489508745452231877:2631] 2025-04-04T17:19:03.072941Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508732567328916:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:03.073032Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:19:07.264211Z :Sinks_Oltp_WriteToTopic_1 INFO: TTopicSdkTestSetup started 2025-04-04T17:19:07.282789Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-04T17:19:07.306523Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7489508771222035965:2804] connected; active server actors: 1 2025-04-04T17:19:07.309228Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-04T17:19:07.309765Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-04T17:19:07.312919Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-04-04T17:19:07.314388Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-04T17:19:07.329031Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:19:07.334609Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-04T17:19:07.340275Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:19:07.340732Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-04T17:19:07.341281Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:19:07.341304Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-04T17:19:07.341328Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:19:07.341391Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:19:07.341443Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-04T17:19:07.342619Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-04-04T17:19:07.342702Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7489508771222035984:2467], now have 1 active actors on pipe 2025-04-04T17:19:07.351201Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7489508771222035962:2803], now have 1 active actors on pipe 2025-04-04T17:19:07.367010Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7489508732567329230 RawX2: 4294969496 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "test-topic" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/test-topic" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consume ... pic_A partitition {0, {4, 281474976715680}, 100000} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:21:58.563106Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {4, 281474976715680}, 100000}, State: StateIdle] Process pending events. Count 0 2025-04-04T17:21:58.563487Z node 4 :PERSQUEUE INFO: new Cookie test-message_group_id|f1335f32-6cca8827-d2a60787-baadbeef_0 generated for partition {0, {4, 281474976715680}, 100000} topic 'topic_A' owner test-message_group_id 2025-04-04T17:21:58.563577Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {4, 281474976715680}, 100000}, State: StateIdle] TPartition::ReplyOwnerOk. Partition: {0, {4, 281474976715680}, 100000} 2025-04-04T17:21:58.563617Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:21:58.565280Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2025-04-04T17:21:58.565314Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2025-04-04T17:21:58.565594Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:21:58.565786Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2025-04-04T17:21:58.565809Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2025-04-04T17:21:58.565896Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Reserve bytes in transaction. Partition: 0, WriteId: { NodeId: 4 KeyId: 281474976715680 }, NeedSupportivePartition: 0 2025-04-04T17:21:58.565959Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-04T17:21:58.566020Z node 4 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T17:21:58.566159Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2025-04-04T17:21:58.566177Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2025-04-04T17:21:58.566223Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Write in transaction. Partition: 0, WriteId: { NodeId: 4 KeyId: 281474976715680 }, NeedSupportivePartition: 0 2025-04-04T17:21:58.566284Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message topic: topic_A partition: 0 SourceId: '\0test-message_group_id' SeqNo: 1 partNo : 0 messageNo: 1 size 216 offset: -1 2025-04-04T17:21:58.566480Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {4, 281474976715680}, 100000}, State: StateIdle] Topic 'topic_A' partition {0, {4, 281474976715680}, 100000} part blob processing sourceId '\0test-message_group_id' seqNo 1 partNo 0 2025-04-04T17:21:58.567277Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {4, 281474976715680}, 100000}, State: StateIdle] Topic 'topic_A' partition {0, {4, 281474976715680}, 100000} part blob complete sourceId '\0test-message_group_id' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 305 count 1 nextOffset 1 batches 1 2025-04-04T17:21:58.567845Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {4, 281474976715680}, 100000}, State: StateIdle] Add new write blob: topic 'topic_A' partition {0, {4, 281474976715680}, 100000} compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 D0000100000_00000000000000000000_00000_0000000001_00000| size 293 WTime 1743787318567 2025-04-04T17:21:58.568006Z node 4 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:21:58.568066Z node 4 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 100000 offset 0 partNo 0 count 1 size 293 2025-04-04T17:21:58.568947Z node 4 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 100000 offset 0 count 1 size 293 actorID [4:7489509491797605086:2486] 2025-04-04T17:21:58.569032Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {4, 281474976715680}, 100000}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 238 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:21:58.569101Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {4, 281474976715680}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {4, 281474976715680}, 100000} 2025-04-04T17:21:58.569161Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {4, 281474976715680}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {4, 281474976715680}, 100000}, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-04T17:21:58.569210Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-04T17:21:58.569303Z node 4 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T17:21:58.569367Z node 4 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037894' partition 100000 offset 0 partno 0 count 1 parts 0 size 293 2025-04-04T17:21:58.569908Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|26f7d986-80c1f272-e890de7b-3bb646bc_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2025-04-04T17:21:58.570073Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|26f7d986-80c1f272-e890de7b-3bb646bc_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 1 written_in_tx { } } write_statistics { persisting_time { nanos: 1000000 } min_queue_wait_time { nanos: 1000000 } max_queue_wait_time { nanos: 1000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-04-04T17:21:58.570115Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|26f7d986-80c1f272-e890de7b-3bb646bc_0] PartitionId [0] Generation [1] OnAck: seqNo=1, txId=01jr0x5px06k1xq8p0gyc5gqts, WriteCount=1, AckCount=1 2025-04-04T17:21:58.570152Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|26f7d986-80c1f272-e890de7b-3bb646bc_0] PartitionId [0] Generation [1] Write session: acknoledged message 1 2025-04-04T17:21:58.574428Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvLongTxService::TEvLockStatus LockId: 281474976715680 LockNode: 4 Status: STATUS_NOT_FOUND 2025-04-04T17:21:58.574474Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxWriteInfo: WriteId {4, 281474976715680}, TxId (empty maybe), Status STATUS_SUBSCRIBED 2025-04-04T17:21:58.574490Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] delete partitions for WriteId {4, 281474976715680} 2025-04-04T17:21:58.574514Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvPQ::TEvDeletePartition to partition {0, {4, 281474976715680}, 100000} 2025-04-04T17:21:58.574552Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {4, 281474976715680}, 100000}, State: StateIdle] Handle TEvPQ::TEvDeletePartition 2025-04-04T17:21:58.574745Z node 4 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:21:58.574788Z node 4 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from D0000100000(+) to D0000100001(-) 2025-04-04T17:21:58.575370Z node 4 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037935;self_id=[4:7489509491797605837:2533];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037935;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:21:58.575391Z node 4 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037936;self_id=[4:7489509491797605849:2541];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037936;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:21:58.575497Z node 4 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037940;self_id=[4:7489509491797605875:2552];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037940;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:21:58.575527Z node 4 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037926;self_id=[4:7489509491797605841:2536];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037926;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:21:58.575647Z node 4 :PERSQUEUE DEBUG: Deleting head blob in L1. Partition 100000 offset 0 count 1 actorID [4:7489509491797605086:2486] 2025-04-04T17:21:58.575741Z node 4 :PERSQUEUE DEBUG: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 0 partno 0 count 1 parts 0 2025-04-04T17:21:58.576382Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPQ::TEvDeletePartitionDone {0, {4, 281474976715680}, 100000} 2025-04-04T17:21:58.576452Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvUnsubscribeLock for WriteId {4, 281474976715680} 2025-04-04T17:21:58.576516Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:21:58.577034Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:21:59.358814Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server connected, pipe [4:7489509508977477665:4575], now have 1 active actors on pipe 2025-04-04T17:21:59.363701Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|26f7d986-80c1f272-e890de7b-3bb646bc_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-04-04T17:21:59.363743Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|26f7d986-80c1f272-e890de7b-3bb646bc_0] PartitionId [0] Generation [1] Write session will now close 2025-04-04T17:21:59.363787Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|26f7d986-80c1f272-e890de7b-3bb646bc_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-04T17:21:59.364199Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|26f7d986-80c1f272-e890de7b-3bb646bc_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-04T17:21:59.364239Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|26f7d986-80c1f272-e890de7b-3bb646bc_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-04T17:21:59.365762Z node 4 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message_group_id|26f7d986-80c1f272-e890de7b-3bb646bc_0 grpc read done: success: 0 data: 2025-04-04T17:21:59.365784Z node 4 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|26f7d986-80c1f272-e890de7b-3bb646bc_0 grpc read failed 2025-04-04T17:21:59.365815Z node 4 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|26f7d986-80c1f272-e890de7b-3bb646bc_0 grpc closed 2025-04-04T17:21:59.365828Z node 4 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|26f7d986-80c1f272-e890de7b-3bb646bc_0 is DEAD 2025-04-04T17:21:59.366550Z node 4 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:21:59.366587Z node 4 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:21:59.367383Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [4:7489509504682509962:2949] destroyed 2025-04-04T17:21:59.367416Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [4:7489509504682509965:2949] destroyed 2025-04-04T17:21:59.367449Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. >> BasicUsage::BrokenCredentialsProvider [GOOD] >> KqpLocks::EmptyRange [GOOD] >> KqpLocks::EmptyRangeAlreadyBroken ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService [GOOD] Test command err: 2025-04-04T17:19:08.205141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:19:08.205454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:19:08.205488Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0013a0/r3tmp/tmpl8Ev4A/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21844, node 1 TClient is connected to server localhost:25022 2025-04-04T17:19:08.748600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:19:08.784072Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:19:08.787548Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:19:08.787618Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:19:08.787653Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:19:08.788119Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:19:08.823686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:19:08.823797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:19:08.835098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-04T17:19:20.487307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:20.487430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:757:2632], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:20.487535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:20.493458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T17:19:20.513542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:761:2635], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T17:19:20.567036Z node 1 :TX_PROXY ERROR: Actor# [1:812:2667] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:20.577258Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:822:2676], status: GENERIC_ERROR, issues:
:1:20: Error: Unexpected token '-' : syntax error... 2025-04-04T17:19:20.578813Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTMxMGY0MDEtZTI5NjliYTItNGRmZGQ2MjYtNTM4NDcwYzM=, ActorId: [1:745:2625], ActorState: ExecuteState, TraceId: 01jr0x0wwz60c8kfwvwtdk9ga0, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: Unexpected token '-' : syntax error... ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2025-04-04T17:19:30.971624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-04-04T17:19:31.705045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:19:32.145363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480 2025-04-04T17:19:32.744334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-04-04T17:19:33.611344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:19:34.174698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-04T17:19:34.773141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-04T17:19:35.605851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-04T17:19:37.907122Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTJkMzgwYzMtZWU5NDg2MjQtYTAxMzJkNjctNjljNWM5Yjg=, ActorId: [1:841:2687], ActorState: ExecuteState, TraceId: 01jr0x16z34ejps390zm8k8193, Create QueryResponse for error on request, msg: 2025-04-04T17:19:37.909606Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715689. Ctx: { TraceId: 01jr0x16z34ejps390zm8k8193, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTJkMzgwYzMtZWU5NDg2MjQtYTAxMzJkNjctNjljNWM5Yjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 2025-04-04T17:19:39.421738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:19:39.421814Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-04T17:20:14.053364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2025-04-04T17:20:15.198119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715726:0, at schemeshard: 72057594046644480 2025-04-04T17:20:16.875086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715733:0, at schemeshard: 72057594046644480 2025-04-04T17:20:17.582790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715738:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 2025-04-04T17:20:30.930138Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWM5ZTYyYzItMzMyOGJhMjItNzY5MGUwNWQtMmRhZmNlNjA=, ActorId: [1:3312:4582], ActorState: ExecuteState, TraceId: 01jr0x30zz8w8xmkrt5rqkq8zy, Create QueryResponse for error on request, msg: 2025-04-04T17:20:30.932036Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jr0x30zz8w8xmkrt5rqkq8zy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWM5ZTYyYzItMzMyOGJhMjItNzY5MGUwNWQtMmRhZmNlNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-04-04T17:20:43.684809Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3724:4888], TxId: 281474976715768, task: 1. Ctx: { TraceId : 01jr0x3dv9bnmft7d8ph2dj7xr. SessionId : ydb://session/3?node_id=1&id=YzFjNjFiMGMtODUzYWFiMTAtYjc5Yzk1NjktY2ZkNWYyODI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-04T17:20:43.685560Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3725:4889], TxId: 281474976715768, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzFjNjFiMGMtODUzYWFiMTAtYjc5Yzk1NjktY2ZkNWYyODI=. TraceId : 01jr0x3dv9bnmft7d8ph2dj7xr. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:3721:4816], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:20:43.686132Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzFjNjFiMGMtODUzYWFiMTAtYjc5Yzk1NjktY2ZkNWYyODI=, ActorId: [1:3624:4816], ActorState: ExecuteState, TraceId: 01jr0x3dv9bnmft7d8ph2dj7xr, Create QueryResponse for error on request, msg: 2025-04-04T17:20:43.694215Z node 1 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jr0x3dksc9dnz1mapgr10x6x" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=YzFjNjFiMGMtODUzYWFiMTAtYjc5Yzk1NjktY2ZkNWYyODI=" tx_control { tx_id: "01jr0x3dksc9dnz1mapgr10x6x" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2025-04-04T17:20:56.051690Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGQyZjY5NGQtYmM3ZGE4YWEtY2NkNDY2ZjEtZjljNmRhZTg=, ActorId: [1:3935:5043], ActorState: ExecuteState, TraceId: 01jr0x3sky1ecmcfsfrjv52hma, Create QueryResponse for error on request, msg: 2025-04-04T17:20:56.053321Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715782. Ctx: { TraceId: 01jr0x3sky1ecmcfsfrjv52hma, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGQyZjY5NGQtYmM3ZGE4YWEtY2NkNDY2ZjEtZjljNmRhZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2025-04-04T17:21:07.580386Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4251:5281], for# root@builtin, access# DescribeSchema 2025-04-04T17:21:07.580480Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4251:5281], for# root@builtin, access# DescribeSchema 2025-04-04T17:21:07.581943Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:4248:5278], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:21:07.583703Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTU1NzJhZTAtZmJjNGUxNDMtODZmODM2ZjMtMTQxYmFjNzg=, ActorId: [1:4244:5275], ActorState: ExecuteState, TraceId: 01jr0x45fh6m9vt4xpsqdkma30, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-04T17:21:19.184928Z node 1 :TICKET_PARSER ERROR: Ticket **** (51449FAE): Could not find correct token validator 2025-04-04T17:21:19.945215Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODExNGE5ZmMtYWEzMGEyOTAtYzJkOTRkMjEtODY0Y2EzOTQ=, ActorId: [1:4510:5475], ActorState: ExecuteState, TraceId: 01jr0x4gtf4pty41j836dcc2vh, Create QueryResponse for error on request, msg: 2025-04-04T17:21:19.946425Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715805. Ctx: { TraceId: 01jr0x4gtf4pty41j836dcc2vh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODExNGE5ZmMtYWEzMGEyOTAtYzJkOTRkMjEtODY0Y2EzOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-04-04T17:21:32.492153Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzFmNGM5NTgtNzQzMDA4YWMtMmIxZmRjNWYtOTk5ZTg4ODk=, ActorId: [1:4909:5775], ActorState: ExecuteState, TraceId: 01jr0x4wx43svkq4gbnhwa7t72, Create QueryResponse for error on request, msg: 2025-04-04T17:21:32.493361Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715821. Ctx: { TraceId: 01jr0x4wx43svkq4gbnhwa7t72, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFmNGM5NTgtNzQzMDA4YWMtMmIxZmRjNWYtOTk5ZTg4ODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-04-04T17:21:59.189373Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715853. Ctx: { TraceId: 01jr0x5qg367wcp0s0kwv7rbgs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2EzOWYwYzItYWJiMDUyMGYtNGI1YWU1YzAtYzVhZjY4YjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink |93.7%| [TA] $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TA] {RESULT} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpLocks::TwoPhaseTx [GOOD] >> KqpLocks::MixedTxFail-useSink >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2025-04-04T17:21:19.572655Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1743787279572632 2025-04-04T17:21:19.930676Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509337961930188:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:19.930835Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:21:19.979656Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509337291727011:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:19.979724Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:21:20.140234Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001729/r3tmp/tmpfThTo8/pdisk_1.dat 2025-04-04T17:21:20.151663Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:21:20.388101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:20.388246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:20.389050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:20.389117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:20.392275Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:21:20.392389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:20.393214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:20.401385Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8667, node 1 2025-04-04T17:21:20.450002Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:21:20.450029Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:21:20.594713Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001729/r3tmp/yandexNeJYhK.tmp 2025-04-04T17:21:20.594758Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001729/r3tmp/yandexNeJYhK.tmp 2025-04-04T17:21:20.594957Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001729/r3tmp/yandexNeJYhK.tmp 2025-04-04T17:21:20.595121Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:21:20.827965Z INFO: TTestServer started on Port 9612 GrpcPort 8667 TClient is connected to server localhost:9612 PQClient connected to localhost:8667 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:21.176519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-04T17:21:23.095346Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509354471596522:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:23.095352Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509354471596513:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:23.095420Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:23.106431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T17:21:23.132230Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509354471596527:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T17:21:23.195652Z node 2 :TX_PROXY ERROR: Actor# [2:7489509354471596555:2129] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:23.460815Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489509355141800481:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:21:23.461464Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489509354471596562:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:21:23.461792Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTZiYTczNDktYWI3M2UzZjAtODg3NTJmNTItZTRjNWI4OGY=, ActorId: [2:7489509354471596511:2308], ActorState: ExecuteState, TraceId: 01jr0x4mmnbd96qtb1rkpgdx5x, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:21:23.464797Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:21:23.467923Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjBmYTVmODQtNzM5ZTcxNGUtMjkxYWI0NDUtZWZkMmQ3ZjY=, ActorId: [1:7489509355141800455:2336], ActorState: ExecuteState, TraceId: 01jr0x4mpg53zm1mnmwxtt8vkm, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:21:23.468267Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:21:23.508850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:21:23.647194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:23.776575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:8667", true, true, 1000); 2025-04-04T17:21:24.113724Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr0x4nc444t8hp3tdrwg5b33, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTBmNTMyOTMtMTQ5ZGY2YjMtODFmZTllZTMtYTY0MzE3Y2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489509359436768177:2980] 2025-04-04T17:21:24.930737Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509337961930188:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:24.930846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:21:24.979876Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509337291727011:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:24.979970Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:21:30.146096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:8667 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-04T17:21:30.224743Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:8667 MetaRequest { CmdCreateTopic { Topic: "rt3.d ... ted, have version: 1 2025-04-04T17:22:00.526696Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2025-04-04T17:22:00.528807Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-04-04T17:22:00.528888Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:20050 2025-04-04T17:22:00.536509Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-04T17:22:00.536786Z node 5 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-04T17:22:00.536835Z node 5 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-04-04T17:22:00.537687Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-04T17:22:00.537830Z node 5 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:46970 2025-04-04T17:22:00.537853Z node 5 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:46970 proto=v1 topic=test-topic durationSec=0 2025-04-04T17:22:00.537862Z node 5 :PQ_WRITE_PROXY INFO: init check schema 2025-04-04T17:22:00.539530Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-04T17:22:00.539662Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-04T17:22:00.539672Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-04T17:22:00.539682Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-04T17:22:00.539702Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7489509511356284360:2520] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-04T17:22:00.542383Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7489509511356284360:2520] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-04T17:22:00.713877Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7489509511356284360:2520] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-04T17:22:00.714273Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7489509511356284399:2520] connected; active server actors: 1 2025-04-04T17:22:00.714340Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7489509511356284360:2520] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-04T17:22:00.714364Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7489509511356284360:2520] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-04T17:22:00.714777Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7489509511356284399:2520] disconnected; active server actors: 1 2025-04-04T17:22:00.714808Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7489509511356284399:2520] disconnected no session 2025-04-04T17:22:00.844105Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7489509511356284360:2520] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-04T17:22:00.844153Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7489509511356284360:2520] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-04T17:22:00.844171Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7489509511356284360:2520] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-04T17:22:00.844204Z node 5 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-04T17:22:00.846553Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [5:7489509511356284423:2520], now have 1 active actors on pipe 2025-04-04T17:22:00.846765Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 6, Generation: 1 2025-04-04T17:22:00.848123Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T17:22:00.848173Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T17:22:00.848276Z node 6 :PERSQUEUE INFO: new Cookie src|2ad9640d-e8c4e099-11ec6eec-f76be2af_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-04T17:22:00.848397Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-04T17:22:00.848509Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:22:00.849792Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T17:22:00.849827Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T17:22:00.849906Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:22:00.850270Z node 5 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|2ad9640d-e8c4e099-11ec6eec-f76be2af_0 2025-04-04T17:22:00.854831Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743787320854 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:22:00.854975Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|2ad9640d-e8c4e099-11ec6eec-f76be2af_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-04T17:22:00.855197Z :INFO: [] MessageGroupId [src] SessionId [src|2ad9640d-e8c4e099-11ec6eec-f76be2af_0] Write session: close. Timeout = 0 ms 2025-04-04T17:22:00.855256Z :INFO: [] MessageGroupId [src] SessionId [src|2ad9640d-e8c4e099-11ec6eec-f76be2af_0] Write session will now close 2025-04-04T17:22:00.855303Z :DEBUG: [] MessageGroupId [src] SessionId [src|2ad9640d-e8c4e099-11ec6eec-f76be2af_0] Write session: aborting 2025-04-04T17:22:00.855822Z :INFO: [] MessageGroupId [src] SessionId [src|2ad9640d-e8c4e099-11ec6eec-f76be2af_0] Write session: gracefully shut down, all writes complete 2025-04-04T17:22:00.855881Z :DEBUG: [] MessageGroupId [src] SessionId [src|2ad9640d-e8c4e099-11ec6eec-f76be2af_0] Write session: destroy 2025-04-04T17:22:00.857494Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|2ad9640d-e8c4e099-11ec6eec-f76be2af_0 grpc read done: success: 0 data: 2025-04-04T17:22:00.857528Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|2ad9640d-e8c4e099-11ec6eec-f76be2af_0 grpc read failed 2025-04-04T17:22:00.857561Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|2ad9640d-e8c4e099-11ec6eec-f76be2af_0 grpc closed 2025-04-04T17:22:00.857578Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|2ad9640d-e8c4e099-11ec6eec-f76be2af_0 is DEAD 2025-04-04T17:22:00.858298Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:22:00.861756Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7489509511356284423:2520] destroyed 2025-04-04T17:22:00.861828Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-04T17:22:00.877517Z :INFO: [/Root] [/Root] [ede2f995-58b6c09a-30667d6e-2077ead4] Starting read session 2025-04-04T17:22:00.877601Z :DEBUG: [/Root] [/Root] [ede2f995-58b6c09a-30667d6e-2077ead4] Starting session to cluster null (localhost:20050) 2025-04-04T17:22:00.879547Z :DEBUG: [/Root] [/Root] [ede2f995-58b6c09a-30667d6e-2077ead4] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:22:00.879595Z :DEBUG: [/Root] [/Root] [ede2f995-58b6c09a-30667d6e-2077ead4] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:22:00.879653Z :DEBUG: [/Root] [/Root] [ede2f995-58b6c09a-30667d6e-2077ead4] [null] Reconnecting session to cluster null in 0.000000s 2025-04-04T17:22:00.882041Z :ERROR: [/Root] [/Root] [ede2f995-58b6c09a-30667d6e-2077ead4] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2025-04-04T17:22:00.882118Z :DEBUG: [/Root] [/Root] [ede2f995-58b6c09a-30667d6e-2077ead4] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:22:00.882155Z :DEBUG: [/Root] [/Root] [ede2f995-58b6c09a-30667d6e-2077ead4] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:22:00.882327Z :INFO: [/Root] [/Root] [ede2f995-58b6c09a-30667d6e-2077ead4] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2025-04-04T17:22:00.882524Z :NOTICE: [/Root] [/Root] [ede2f995-58b6c09a-30667d6e-2077ead4] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:22:00.882561Z :DEBUG: [/Root] [/Root] [ede2f995-58b6c09a-30667d6e-2077ead4] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2025-04-04T17:22:00.882652Z :INFO: [/Root] [/Root] [ede2f995-58b6c09a-30667d6e-2077ead4] Closing read session. Close timeout: 0.000000s 2025-04-04T17:22:00.882701Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-04T17:22:00.882744Z :INFO: [/Root] [/Root] [ede2f995-58b6c09a-30667d6e-2077ead4] Counters: { Errors: 1 CurrentSessionLifetimeMs: 5 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:22:00.882847Z :NOTICE: [/Root] [/Root] [ede2f995-58b6c09a-30667d6e-2077ead4] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink [GOOD] >> KqpTx::LocksAbortOnCommit >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 22220, MsgBus: 30001 2025-04-04T17:21:50.589589Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509471817493221:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:50.589774Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ec7/r3tmp/tmpYlv0sl/pdisk_1.dat 2025-04-04T17:21:50.919815Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22220, node 1 2025-04-04T17:21:50.976261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:50.976412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:50.978392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:50.990019Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:50.990046Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:50.990059Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:50.990287Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30001 TClient is connected to server localhost:30001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:51.489986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:51.514901Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:51.525200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:21:51.684217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:51.831587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:51.894747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:53.427303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509484702396884:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:53.427391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:53.618480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:53.647582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:21:53.689580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:21:53.712341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:21:53.735351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:21:53.761884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:21:53.797690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509484702397393:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:53.797766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509484702397398:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:53.797782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:53.801003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:21:53.809488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509484702397400:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:21:53.880189Z node 1 :TX_PROXY ERROR: Actor# [1:7489509484702397454:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:54.777740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:21:54.811985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:21:54.855144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:21:55.589708Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509471817493221:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:55.589812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 4795, MsgBus: 19916 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ec7/r3tmp/tmpcjABBM/pdisk_1.dat 2025-04-04T17:21:58.717006Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:58.717996Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 4795, node 2 2025-04-04T17:21:58.762475Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:58.762572Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:58.765881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:58.778996Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:58.779020Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:58.779028Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:58.779142Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19916 TClient is connected to server localhost:19916 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:59.249606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:59.263602Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:21:59.277327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:59.361343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:21:59.547241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:21:59.635060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:01.986668Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509519163582683:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:01.986758Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:02.023189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:02.055124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:02.085689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:02.114733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:02.143751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:02.177086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:02.225344Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509523458550487:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:02.225421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:02.225471Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509523458550492:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:02.228997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:02.238001Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509523458550494:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:22:02.305211Z node 2 :TX_PROXY ERROR: Actor# [2:7489509523458550547:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:03.578391Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715673; 2025-04-04T17:22:03.592150Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489509527753518259:2496], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [2:7489509527753518132:2496]Got LOCKS BROKEN for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[2:7489509527753518259:2496].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-04T17:22:03.607564Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489509527753518252:2496], SessionActorId: [2:7489509527753518132:2496], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7489509527753518132:2496]. isRollback=0 2025-04-04T17:22:03.608482Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTY4ZWI3NDYtMzdiYWIxYWYtMWFkM2U5MzUtMmEwZDM3MTM=, ActorId: [2:7489509527753518132:2496], ActorState: ExecuteState, TraceId: 01jr0x5w3m5ay0wtfdeyvhsssx, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7489509527753518253:2496] from: [2:7489509527753518252:2496] 2025-04-04T17:22:03.608624Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7489509527753518253:2496] TxId: 281474976715673. Ctx: { TraceId: 01jr0x5w3m5ay0wtfdeyvhsssx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTY4ZWI3NDYtMzdiYWIxYWYtMWFkM2U5MzUtMmEwZDM3MTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-04T17:22:03.609716Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTY4ZWI3NDYtMzdiYWIxYWYtMWFkM2U5MzUtMmEwZDM3MTM=, ActorId: [2:7489509527753518132:2496], ActorState: ExecuteState, TraceId: 01jr0x5w3m5ay0wtfdeyvhsssx, Create QueryResponse for error on request, msg: >> KqpSnapshotIsolation::TConflictWriteOltp >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] >> KqpSnapshotRead::TestSnapshotExpiration-withSink [GOOD] >> KqpTx::BeginTransactionBadMode >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] >> TxUsage::WriteToTopic_Demo_50 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 [GOOD] Test command err: Trying to start YDB, gRPC: 20484, MsgBus: 17554 2025-04-04T17:21:41.820388Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509430835415686:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:41.820445Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ed4/r3tmp/tmpdRBOeX/pdisk_1.dat 2025-04-04T17:21:42.212302Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20484, node 1 2025-04-04T17:21:42.230189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:42.230337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:42.232757Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:21:42.236141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:42.248126Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:21:42.286078Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:42.286100Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:42.286108Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:42.286228Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17554 TClient is connected to server localhost:17554 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:42.831294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:44.735256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509443720318093:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:44.735256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509443720318088:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:44.735350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:44.741158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:21:44.757018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509443720318102:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:21:44.824076Z node 1 :TX_PROXY ERROR: Actor# [1:7489509443720318153:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:45.047095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:21:45.175228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:21:46.163282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:46.919499Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509430835415686:2209];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:46.928186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 28231, MsgBus: 5329 2025-04-04T17:21:53.666701Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509481785466459:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:53.666767Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ed4/r3tmp/tmprHekQE/pdisk_1.dat 2025-04-04T17:21:53.752634Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28231, node 2 2025-04-04T17:21:53.794982Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:53.795386Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:53.798291Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:53.820476Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:53.820500Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:53.820505Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:53.820611Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5329 TClient is connected to server localhost:5329 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:54.240293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:56.435691Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509494670369002:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:56.435876Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509494670369007:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:56.435929Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:56.440906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:21:56.449575Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509494670369016:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:21:56.508721Z node 2 :TX_PROXY ERROR: Actor# [2:7489509494670369067:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:56.547898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:21:56.585665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:21:57.697696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:58.688009Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509481785466459:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:58.688224Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:21:59.289880Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715666; 2025-04-04T17:21:59.291853Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489509507555279571:2969], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [2:7489509503260311967:2969]Got LOCKS BROKEN for table `/Root/KV`. ShardID=72075186224037889, Sink=[2:7489509507555279571:2969].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-04T17:21:59.308212Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489509507555279563:2969], SessionActorId: [2:7489509503260311967:2969], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7489509503260311967:2969]. isRollback=0 2025-04-04T17:21:59.308621Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGRkYWQ5ZTctMzJjZWY3ZWItZTBiZTAzZGMtNDMyMWI1Zjk=, ActorId: [2:7489509503260311967:2969], ActorState: ExecuteState, TraceId: 01jr0x5qxy77gshty9krkpxns0, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7489509507555279564:2969] from: [2:7489509507555279563:2969] 2025-04-04T17:21:59.308704Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7489509507555279564:2969] TxId: 281474976715666. Ctx: { TraceId: 01jr0x5qxy77gshty9krkpxns0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGRkYWQ5ZTctMzJjZWY3ZWItZTBiZTAzZGMtNDMyMWI1Zjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-04T17:21:59.309589Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGRkYWQ5ZTctMzJjZWY3ZWItZTBiZTAzZGMtNDMyMWI1Zjk=, ActorId: [2:7489509503260311967:2969], ActorState: ExecuteState, TraceId: 01jr0x5qxy77gshty9krkpxns0, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] Test command err: Trying to start YDB, gRPC: 22527, MsgBus: 11798 2025-04-04T17:16:55.092008Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508204646635228:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:55.092153Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001614/r3tmp/tmpAOcOBX/pdisk_1.dat 2025-04-04T17:16:55.419821Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22527, node 1 2025-04-04T17:16:55.481529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:55.481936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:16:55.489243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:55.511254Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:55.511275Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:55.511286Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:55.511529Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11798 TClient is connected to server localhost:11798 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:55.985786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:55.998965Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:16:56.009426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:56.158933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:16:56.328801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:56.410499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:58.134494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508217531538877:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:58.134600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:58.361251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:58.399255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:58.467996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:58.506537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:58.539110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:58.608765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:58.695112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508217531539399:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:58.695256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:58.695559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508217531539404:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:58.698878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:58.710121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508217531539406:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:58.768453Z node 1 :TX_PROXY ERROR: Actor# [1:7489508217531539459:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:59.679503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:17:00.092129Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508204646635228:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:00.092210Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17734, MsgBus: 9502 2025-04-04T17:17:02.222828Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489508233442498039:2206];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001614/r3tmp/tmpJs19Ir/pdisk_1.dat 2025-04-04T17:17:02.273682Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:17:02.369471Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:02.407176Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:02.407255Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:02.415021Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17734, node 2 2025-04-04T17:17:02.537329Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:02.537343Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:02.537348Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:02.537426Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9502 TClient is connected to server localhost:9502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:02.988104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:03.007376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:03.096235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:03.221991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId ... ctorState: ExecuteState, TraceId: 01jr0x500c5eq9vwy4mvgz7tg9, Create QueryResponse for error on request, msg: 2025-04-04T17:21:35.678522Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x50g73hax564h2hdmjy6q, Create QueryResponse for error on request, msg: 2025-04-04T17:21:36.132006Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7489509410337137084:2490] TxId: 281474976715872. Ctx: { TraceId: 01jr0x50ydd1cjsd4vp716dryy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 436ms } {
: Error: Cancelling after 436ms during execution } ] 2025-04-04T17:21:36.183642Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489509410337137093:4027], TxId: 281474976715872, task: 3. Ctx: { TraceId : 01jr0x50ydd1cjsd4vp716dryy. SessionId : ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7489509410337137084:2490], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-04-04T17:21:36.186905Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x50ydd1cjsd4vp716dryy, Create QueryResponse for error on request, msg: 2025-04-04T17:21:36.632576Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x51e3d7sb29ycr9kkj877, Create QueryResponse for error on request, msg: 2025-04-04T17:21:37.444607Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x527cbpaenwj7m9tj3k19, Create QueryResponse for error on request, msg: 2025-04-04T17:21:37.897190Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x52ngbq3387taenxv10ag, Create QueryResponse for error on request, msg: 2025-04-04T17:21:38.602330Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x53bf24py18y9m231zzm7, Create QueryResponse for error on request, msg: 2025-04-04T17:21:39.050197Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x53shbk8xphnfq73jax8v, Create QueryResponse for error on request, msg: 2025-04-04T17:21:39.525277Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x548714vbezx2prdgz89t, Create QueryResponse for error on request, msg: 2025-04-04T17:21:40.326163Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x55164c1x6y04bvandpnf, Create QueryResponse for error on request, msg: 2025-04-04T17:21:40.782990Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x55fd91tymrk0ss1738w6, Create QueryResponse for error on request, msg: 2025-04-04T17:21:41.261599Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x55ya3n0dh7hbc8myyy8v, Create QueryResponse for error on request, msg: 2025-04-04T17:21:42.091124Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x56r4adzf0akft86r50zc, Create QueryResponse for error on request, msg: 2025-04-04T17:21:43.005498Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x57mn0s5dehjn81qgjdsq, Create QueryResponse for error on request, msg: 2025-04-04T17:21:43.470497Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x5835e873qrxfypehrgrj, Create QueryResponse for error on request, msg: 2025-04-04T17:21:44.042980Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x58jnd0hmwgyrn6t0cskt, Create QueryResponse for error on request, msg: 2025-04-04T17:21:44.894496Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x59fj1sba4z1hemw47y2y, Create QueryResponse for error on request, msg: 2025-04-04T17:21:45.365866Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x59yb1ewfj56ccspq87eb, Create QueryResponse for error on request, msg: 2025-04-04T17:21:45.958782Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x5agpcqgmmgnj9czhqnkj, Create QueryResponse for error on request, msg: 2025-04-04T17:21:46.853039Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x5bck6wtdrjs1ce2d5zrb, Create QueryResponse for error on request, msg: 2025-04-04T17:21:47.728275Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x5c7x47n2gcbd6ac8k9ke, Create QueryResponse for error on request, msg: 2025-04-04T17:21:48.544213Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x5d1b8y6qqngv2tpff48c, Create QueryResponse for error on request, msg: 2025-04-04T17:21:49.410604Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x5dw7a69mb94wft9w0x41, Create QueryResponse for error on request, msg: 2025-04-04T17:21:50.284987Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x5eqmb76fckbm9md97h67, Create QueryResponse for error on request, msg: 2025-04-04T17:21:51.242476Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x5fnfdwx2n7v1a6vprndd, Create QueryResponse for error on request, msg: 2025-04-04T17:21:52.218516Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x5gkt2hy032jerd5p2g16, Create QueryResponse for error on request, msg: 2025-04-04T17:21:53.106163Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x5hfk5qgd3nebaxdr00jc, Create QueryResponse for error on request, msg: 2025-04-04T17:21:53.936597Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x5j9gajmep1p59yebvx3m, Create QueryResponse for error on request, msg: 2025-04-04T17:21:54.837633Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x5k5jfvve2jqn26pm3wx0, Create QueryResponse for error on request, msg: 2025-04-04T17:21:55.739871Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x5m1qbgs3bv0jjdx1wpwz, Create QueryResponse for error on request, msg: 2025-04-04T17:21:56.626088Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x5mxb51fa9vb0a0nphfpk, Create QueryResponse for error on request, msg: 2025-04-04T17:21:57.627189Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x5nwj915p6bzc5dw2bwcf, Create QueryResponse for error on request, msg: 2025-04-04T17:21:58.602525Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x5ptw77w6bx1pqzzn8k5t, Create QueryResponse for error on request, msg: 2025-04-04T17:21:59.543054Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x5qrab7pmp8d9mb7wm2wk, Create QueryResponse for error on request, msg: 2025-04-04T17:22:00.414491Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x5rkf56aswg1h9qa1tb2p, Create QueryResponse for error on request, msg: 2025-04-04T17:22:01.262404Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x5sdycg5pm4hrr1bpenpc, Create QueryResponse for error on request, msg: 2025-04-04T17:22:02.234055Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x5tc79v5endee7mqkacke, Create QueryResponse for error on request, msg: 2025-04-04T17:22:03.122497Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmY5NDk3MjYtZDUzYzZjYzQtMjczMTI3ZDQtOWM1MWE5Y2Y=, ActorId: [4:7489509028085041874:2490], ActorState: ExecuteState, TraceId: 01jr0x5v7w3pjan571fed4vt7e, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:222:2060] recipient: [1:217:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:222:2060] recipient: [1:217:2142] Leader for TabletID 72057594046678944 is [1:238:2156] sender: [1:239:2060] recipient: [1:217:2142] 2025-04-04T17:21:22.142412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:21:22.142511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:21:22.142553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:21:22.142591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:21:22.142643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:21:22.142693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:21:22.142750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:21:22.142810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:21:22.143159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:21:22.219865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:21:22.219938Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:22.234661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:21:22.234880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:21:22.235009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:21:22.242944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:21:22.243145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:21:22.243767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:21:22.244526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:21:22.248803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:21:22.256978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:21:22.257068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:21:22.257191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:21:22.257239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:21:22.257286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:21:22.257407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.263798Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:238:2156] sender: [1:345:2060] recipient: [1:17:2064] 2025-04-04T17:21:22.410199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:21:22.410446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.410679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:21:22.410951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:21:22.411007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.413596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:21:22.413739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:21:22.413916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.413980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:21:22.414031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:21:22.414061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:21:22.415957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.416011Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:21:22.416044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:21:22.417878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.417920Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.417983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:21:22.418051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:21:22.421680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:21:22.423493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:21:22.423674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:21:22.424697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:21:22.424830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 231 RawX2: 4294969447 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:21:22.424887Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:21:22.425185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:21:22.425241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:21:22.425408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:21:22.425484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:21:22.427468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:21:22.427512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:21:22.427686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:21:22.427736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:312:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:21:22.428162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.428225Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:21:22.428318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:21:22.428352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:21:22.428387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:21:22.428416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:21:22.428451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:21:22.428489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:21:22.428521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:21:22.428546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:21:22.428604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:21:22.428667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:21:22.428704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:21:22.430491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:21:22.430589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:21:22.430624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... hemaChanged> complete, operationId: 104:2, at schemeshard: 72057594046678944 2025-04-04T17:22:05.656000Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:22:05.656042Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 104:2 2025-04-04T17:22:05.656173Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:970:2741] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2025-04-04T17:22:05.656291Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [7:232:2152], Recipient [7:970:2741]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2025-04-04T17:22:05.656333Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-04T17:22:05.656375Z node 7 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 104 datashard 72075186233409550 state Ready 2025-04-04T17:22:05.656469Z node 7 :TX_DATASHARD DEBUG: 72075186233409550 Got TEvSchemaChangedResult from SS at 72075186233409550 2025-04-04T17:22:05.656748Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:232:2152], Recipient [7:232:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:22:05.656787Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:22:05.656859Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:2, at schemeshard: 72057594046678944 2025-04-04T17:22:05.656904Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:2 ProgressState 2025-04-04T17:22:05.657025Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:22:05.657052Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 2/3 2025-04-04T17:22:05.657086Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-04-04T17:22:05.657123Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 2/3 2025-04-04T17:22:05.657156Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-04-04T17:22:05.657193Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2025-04-04T17:22:05.657536Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:22:05.657582Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:22:05.657620Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 104:0 2025-04-04T17:22:05.657691Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:978:2747] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2025-04-04T17:22:05.657782Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [7:232:2152], Recipient [7:978:2747]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2025-04-04T17:22:05.657810Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-04T17:22:05.657835Z node 7 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 104 datashard 72075186233409551 state Ready 2025-04-04T17:22:05.657871Z node 7 :TX_DATASHARD DEBUG: 72075186233409551 Got TEvSchemaChangedResult from SS at 72075186233409551 2025-04-04T17:22:05.658066Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:232:2152], Recipient [7:232:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:22:05.658107Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:22:05.658159Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:22:05.658203Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-04T17:22:05.658376Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:22:05.658412Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2025-04-04T17:22:05.658435Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-04T17:22:05.658478Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2025-04-04T17:22:05.658502Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-04T17:22:05.658529Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2025-04-04T17:22:05.658602Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:574:2402] message: TxId: 104 2025-04-04T17:22:05.658654Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-04T17:22:05.658721Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-04T17:22:05.658755Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-04T17:22:05.658872Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 4 2025-04-04T17:22:05.658915Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2025-04-04T17:22:05.658965Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2025-04-04T17:22:05.658998Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 3 2025-04-04T17:22:05.659017Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2025-04-04T17:22:05.659033Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2025-04-04T17:22:05.659071Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 3 2025-04-04T17:22:05.661609Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:22:05.661739Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:22:05.661827Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:574:2402] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 104 at schemeshard: 72057594046678944 2025-04-04T17:22:05.661979Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-04T17:22:05.662021Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:1029:2784] 2025-04-04T17:22:05.662352Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1031:2786], Recipient [7:232:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:22:05.662405Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:22:05.662432Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-04T17:22:05.663413Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:548:2101], Recipient [7:232:2152] 2025-04-04T17:22:05.663462Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T17:22:05.665191Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/tmp" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "NotTempTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Utf8" } KeyColumnNames: "key" } IndexDescription { Name: "ValueIndex" KeyColumnNames: "value" } } AllowCreateInTempDir: false } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:22:05.665588Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-04T17:22:05.665654Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-04T17:22:05.666861Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:22:05.669352Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/tmp\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:22:05.669623Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE TABLE WITH INDEXES, path: /MyRoot/tmp/NotTempTable 2025-04-04T17:22:05.669709Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-04T17:22:05.670121Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-04T17:22:05.670165Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-04T17:22:05.670592Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:1101:2856], Recipient [7:232:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:22:05.670651Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:22:05.670691Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-04T17:22:05.670827Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:574:2402], Recipient [7:232:2152]: NKikimrScheme.TEvNotifyTxCompletion TxId: 105 2025-04-04T17:22:05.670865Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-04T17:22:05.670928Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-04T17:22:05.671034Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-04T17:22:05.671069Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:1099:2854] 2025-04-04T17:22:05.671271Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1101:2856], Recipient [7:232:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:22:05.671319Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:22:05.671353Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 >> TxUsage::Sinks_Oltp_WriteToTopics_3 [GOOD] >> KqpSinkTx::OlapLocksAbortOnCommit >> KqpSinkLocks::DifferentKeyUpdate [GOOD] >> KqpSinkLocks::DifferentKeyUpdateOlap >> TxUsage::Write_Random_Sized_Messages_In_Wide_Transactions >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_4 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] Test command err: 2025-04-04T17:21:21.572364Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509345713790418:2144];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:21.572819Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c05/r3tmp/tmpYQBnyl/pdisk_1.dat 2025-04-04T17:21:21.986735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:21.988068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:21.992835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:22.001653Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14040, node 1 2025-04-04T17:21:22.064446Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:21:22.065128Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:21:22.191283Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:22.191304Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:22.191313Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:22.191429Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21761 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:22.650620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:21761 2025-04-04T17:21:24.352643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509358598693289:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.352755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.723089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:21:24.895997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509358598693466:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.896560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.896642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509358598693471:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.901971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:21:24.917973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509358598693473:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:21:25.015842Z node 1 :TX_PROXY ERROR: Actor# [1:7489509362893660846:2814] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:25.266399Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0x4pcy4hqe3n8y56w9c9rx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RhN2Y5YzctYjBmNTFjNmItOGQ5Y2RlNzAtNzg0Y2ZmNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:25.295190Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr0x4psbf2jd3x2vx0r8grdc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RhN2Y5YzctYjBmNTFjNmItOGQ5Y2RlNzAtNzg0Y2ZmNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:25.308458Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jr0x4pst0fv139h11d481zjb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RhN2Y5YzctYjBmNTFjNmItOGQ5Y2RlNzAtNzg0Y2ZmNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:25.319790Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr0x4pt5b45ddztzvcw6ytvg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RhN2Y5YzctYjBmNTFjNmItOGQ5Y2RlNzAtNzg0Y2ZmNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:25.331017Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0x4pthbc5ad7jck5t9jg6q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RhN2Y5YzctYjBmNTFjNmItOGQ5Y2RlNzAtNzg0Y2ZmNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:25.342656Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr0x4ptwds7ywby2epr9cjh7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RhN2Y5YzctYjBmNTFjNmItOGQ5Y2RlNzAtNzg0Y2ZmNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:25.355334Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0x4pv97b55e2aaf8rarqm3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RhN2Y5YzctYjBmNTFjNmItOGQ5Y2RlNzAtNzg0Y2ZmNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:25.368131Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01jr0x4pvn8qgqyc3bqs45atdb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RhN2Y5YzctYjBmNTFjNmItOGQ5Y2RlNzAtNzg0Y2ZmNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:25.380709Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jr0x4pw25h5sjswmys7gcnve, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RhN2Y5YzctYjBmNTFjNmItOGQ5Y2RlNzAtNzg0Y2ZmNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:25.393681Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jr0x4pwg89f261w5ghbp6m0s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RhN2Y5YzctYjBmNTFjNmItOGQ5Y2RlNzAtNzg0Y2ZmNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:25.405171Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jr0x4pwv6z22bs37ppy2cxx0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RhN2Y5YzctYjBmNTFjNmItOGQ5Y2RlNzAtNzg0Y2ZmNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:25.418265Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr0x4px89cf9t67rw29x4mke, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RhN2Y5YzctYjBmNTFjNmItOGQ5Y2RlNzAtNzg0Y2ZmNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:25.429905Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jr0x4pxk74xy8w4ssq8bctvw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RhN2Y5YzctYjBmNTFjNmItOGQ5Y2RlNzAtNzg0Y2ZmNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:25.441553Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jr0x4pxz0ggb08jk5t8hp09v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RhN2Y5YzctYjBmNTFjNmItOGQ5Y2RlNzAtNzg0Y2ZmNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:25.457889Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr0x4pyfemr8yst1yhn5tqzx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RhN2Y5YzctYjBmNTFjNmItOGQ5Y2RlNzAtNzg0Y2ZmNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:25.472964Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr0x4pyzbe01b3ww2bbqzj40, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RhN2Y5YzctYjBmNTFjNmItOGQ5Y2RlNzAtNzg0Y2ZmNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:25.483955Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr0x4pz970xt14ahq5tv6nb2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RhN2Y5YzctYjBmNTFjNmItOGQ5Y2RlNzAtNzg0Y2ZmNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:25.496069Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jr0x4pzpcrny0m1qt9vnt6sh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RhN2Y5YzctYjBmNTFjNmItOGQ5Y2RlNzAtNzg0Y2ZmNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:21:25.510152Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jr0x4q048ppf5fn5ftrp3pv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RhN2Y5YzctYjBmNTFjNmItOGQ5Y2RlNzAtNzg0Y2ZmNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T ... t, use /Root 2025-04-04T17:22:03.922808Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721499. Ctx: { TraceId: 01jr0x5wggddsmmss7z7e3gd43, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjc2Y2RmMjQtN2U2MGZkYmMtZTYyOTRmYzgtYTQ4MDk3YWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.928174Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721500. Ctx: { TraceId: 01jr0x5wgm5r4d6zadc7scd91t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQyYTU3NWEtYmZhYmQ2ZDgtYWJhYmMwYTgtODE3MmFjZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.928585Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721501. Ctx: { TraceId: 01jr0x5wgm5ebc76q9z0rfdsf8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI2Mjk2YWEtZjRmMzBjM2QtZjQyMmNlZjEtZGM3OWM5OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.928650Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721502. Ctx: { TraceId: 01jr0x5wgpfv8v6dq25grj9zkx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdhYTg0MjUtYThkMzY0YzktYzY5NjQwZmYtZDA0ZDg0YzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.929033Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721503. Ctx: { TraceId: 01jr0x5wgpaws0jek3n36ndz9p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGJiMWJhMi02YTZlZTdhZS1lYTNlM2RjMC1jODM0OTU3ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.929920Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721504. Ctx: { TraceId: 01jr0x5wgm2fkx6rsnegrqjst0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWQ5Yzc4ZDQtMjJjMGRiNDItNmI1NzRlNjQtZGNiNjVhY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.934086Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721505. Ctx: { TraceId: 01jr0x5wgmfwg2m9hrp0v1dmr6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDdiNDE2ZjUtZWEyYjYxOTctZTkxMDM3NDMtOWQ3MjkzZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.938835Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721506. Ctx: { TraceId: 01jr0x5wgv2rrd6jscyrvkmnw0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjM3OWE4YzMtODcxOGVmZDItZjZlZDc2ZTItOWJhNmZiMTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.939624Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721507. Ctx: { TraceId: 01jr0x5wgv6g4k507aq2pdszrq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmRkMmZmYzYtYTliNGJjOTktNmZkYWM3MjktNzk4MzU0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.939906Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721509. Ctx: { TraceId: 01jr0x5wgwbtd073djf2nj23vm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjc2Y2RmMjQtN2U2MGZkYmMtZTYyOTRmYzgtYTQ4MDk3YWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.939998Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721508. Ctx: { TraceId: 01jr0x5wgv2kvwyq8nfdgbpab1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdkZWI0ZWItZTQ4MDUwZjktZmNkOGRkZGMtN2FlYmNlNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.946349Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721511. Ctx: { TraceId: 01jr0x5wh46nmtgyse592kjrwr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI2Mjk2YWEtZjRmMzBjM2QtZjQyMmNlZjEtZGM3OWM5OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.946457Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721512. Ctx: { TraceId: 01jr0x5wh35s3dfs4qxznm40as, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQyYTU3NWEtYmZhYmQ2ZDgtYWJhYmMwYTgtODE3MmFjZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.946852Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721510. Ctx: { TraceId: 01jr0x5wh44drsnc6e1btmj6tn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWQ5Yzc4ZDQtMjJjMGRiNDItNmI1NzRlNjQtZGNiNjVhY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.947216Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721513. Ctx: { TraceId: 01jr0x5wh218sk0n0wf12jv8yq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdhYTg0MjUtYThkMzY0YzktYzY5NjQwZmYtZDA0ZDg0YzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.948301Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721514. Ctx: { TraceId: 01jr0x5wh261cwtcmry95a9gxn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGJiMWJhMi02YTZlZTdhZS1lYTNlM2RjMC1jODM0OTU3ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.955726Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721515. Ctx: { TraceId: 01jr0x5whb626xcz35k449q2x6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDdiNDE2ZjUtZWEyYjYxOTctZTkxMDM3NDMtOWQ3MjkzZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-04-04T17:22:03.963573Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721518. Ctx: { TraceId: 01jr0x5whj5ra5n5yxkn77kqh4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjc2Y2RmMjQtN2U2MGZkYmMtZTYyOTRmYzgtYTQ4MDk3YWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.964057Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721516. Ctx: { TraceId: 01jr0x5whj9q5cxs2t8d9pkete, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjM3OWE4YzMtODcxOGVmZDItZjZlZDc2ZTItOWJhNmZiMTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.964368Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721517. Ctx: { TraceId: 01jr0x5whjf4918bmr0vjh5sqs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmRkMmZmYzYtYTliNGJjOTktNmZkYWM3MjktNzk4MzU0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.966206Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721519. Ctx: { TraceId: 01jr0x5whp5f9b7xe5kd0wed4z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdkZWI0ZWItZTQ4MDUwZjktZmNkOGRkZGMtN2FlYmNlNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.966741Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721520. Ctx: { TraceId: 01jr0x5whp2gshzer6ktz953fg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdhYTg0MjUtYThkMzY0YzktYzY5NjQwZmYtZDA0ZDg0YzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.967581Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721521. Ctx: { TraceId: 01jr0x5whp86h7cn05hzyqwfv2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGJiMWJhMi02YTZlZTdhZS1lYTNlM2RjMC1jODM0OTU3ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743787284848 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-04T17:22:03.983009Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721522. Ctx: { TraceId: 01jr0x5whwak8ven6yptj96n5v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWQ5Yzc4ZDQtMjJjMGRiNDItNmI1NzRlNjQtZGNiNjVhY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.983680Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721523. Ctx: { TraceId: 01jr0x5wj22hxeqqd74rnevnh0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDdiNDE2ZjUtZWEyYjYxOTctZTkxMDM3NDMtOWQ3MjkzZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:22:03.986497Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721524. Ctx: { TraceId: 01jr0x5whwf47pdw6fevvjbtd4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI2Mjk2YWEtZjRmMzBjM2QtZjQyMmNlZjEtZGM3OWM5OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743787284848 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-04T17:22:04.943267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 64460 rowCount 1052 cpuUsage 0 2025-04-04T17:22:04.946083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 58010 rowCount 948 cpuUsage 0 2025-04-04T17:22:05.043301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-04-04T17:22:05.043491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 1052, DataSize 64460 2025-04-04T17:22:05.043650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 948, DataSize 58010 2025-04-04T17:22:05.044052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 Table has 2 shards >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex [GOOD] >> TSchemeshardBackgroundCleaningTest::TempInTemp >> KqpSnapshotIsolation::TConflictWriteOltpNoSink >> KqpTx::TooManyTx >> KqpSinkTx::OlapInvalidateOnError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] Test command err: Trying to start YDB, gRPC: 27656, MsgBus: 1183 2025-04-04T17:21:40.832929Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509426989297135:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:40.833048Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000edc/r3tmp/tmpraiQfE/pdisk_1.dat 2025-04-04T17:21:41.257448Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:41.264357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:41.264449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:41.270272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27656, node 1 2025-04-04T17:21:41.426490Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:41.426519Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:41.426533Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:41.426687Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1183 TClient is connected to server localhost:1183 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:42.157856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:42.183593Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:21:43.979998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509439874199552:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:43.980095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509439874199542:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:43.980322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:43.987025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:21:43.996800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509439874199571:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:21:44.066378Z node 1 :TX_PROXY ERROR: Actor# [1:7489509444169166918:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:44.505162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:21:44.621542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:21:45.702888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:45.836721Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509426989297135:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:45.836797Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:21:49.443609Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710666; 2025-04-04T17:21:49.444637Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489509465644012437:2970], SessionActorId: [1:7489509452759109945:2970], Got LOCKS BROKEN for table. ShardID=72075186224037989, Sink=[1:7489509465644012437:2970].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-04T17:21:49.445229Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489509465644012437:2970], SessionActorId: [1:7489509452759109945:2970], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7489509452759109945:2970]. isRollback=0 2025-04-04T17:21:49.445466Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Njg3OGU1ZmYtN2Q2ODk4NzAtOTEzMDc5MWUtYjI5ZGRhMGU=, ActorId: [1:7489509452759109945:2970], ActorState: ExecuteState, TraceId: 01jr0x5e3rc72503w8r98vrw9a, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7489509465644012438:2970] from: [1:7489509465644012437:2970] 2025-04-04T17:21:49.445540Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489509465644012438:2970] TxId: 281474976710666. Ctx: { TraceId: 01jr0x5e3rc72503w8r98vrw9a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njg3OGU1ZmYtN2Q2ODk4NzAtOTEzMDc5MWUtYjI5ZGRhMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-04T17:21:49.445737Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Njg3OGU1ZmYtN2Q2ODk4NzAtOTEzMDc5MWUtYjI5ZGRhMGU=, ActorId: [1:7489509452759109945:2970], ActorState: ExecuteState, TraceId: 01jr0x5e3rc72503w8r98vrw9a, Create QueryResponse for error on request, msg: 2025-04-04T17:21:49.448187Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710666; WAIT_INDEXATION: 0 2025-04-04T17:21:49.448403Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1743787309488 : 281474976710666] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 7107, MsgBus: 22221 2025-04-04T17:21:55.251124Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509489293051839:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:55.251329Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000edc/r3tmp/tmpjNjKrj/pdisk_1.dat 2025-04-04T17:21:55.356778Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7107, node 2 2025-04-04T17:21:55.397001Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:55.397109Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:55.411875Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:55.442442Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:55.442479Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:55.442486Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:55.442623Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22221 TClient is connected to server localhost:22221 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:55.849864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:55.858181Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:21:58.574919Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509502177954366:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:58.575019Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:58.577180Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509502177954401:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:58.581572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:21:58.590665Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509502177954403:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:21:58.688823Z node 2 :TX_PROXY ERROR: Actor# [2:7489509502177954454:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:58.742376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:21:58.791590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:21:59.880675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:00.653308Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509489293051839:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:00.745546Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:01.723048Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGJlN2E1My01MzhhNjE1Yy1mZDZiYTg0OC1jMGRjMGY4NQ==, ActorId: [2:7489509515062864610:2969], ActorState: ExecuteState, TraceId: 01jr0x5ta9bm7xvbgqshjw512r, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] >> KqpLocks::Invalidate >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink >> YdbSdkSessionsPool::StressTestAsync1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] Test command err: Trying to start YDB, gRPC: 10899, MsgBus: 10728 2025-04-04T17:21:51.231066Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509474654051610:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:51.231333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ec2/r3tmp/tmpGDRack/pdisk_1.dat 2025-04-04T17:21:51.546945Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10899, node 1 2025-04-04T17:21:51.594830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:51.595128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:51.599445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:51.632065Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:51.632087Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:51.632095Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:51.632250Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10728 TClient is connected to server localhost:10728 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:52.137923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:52.166437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:52.302547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:52.452131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:52.513980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:54.154880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509487538955283:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:54.155025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:54.363436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:54.389883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:21:54.438148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:21:54.462861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:21:54.486872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:21:54.514162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:21:54.552919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509487538955792:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:54.553024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:54.553110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509487538955798:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:54.556081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:21:54.564377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509487538955800:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:21:54.666294Z node 1 :TX_PROXY ERROR: Actor# [1:7489509487538955854:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19715, MsgBus: 17057 2025-04-04T17:21:56.709154Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509496907116676:2219];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ec2/r3tmp/tmprooFRY/pdisk_1.dat 2025-04-04T17:21:56.787667Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:21:56.799089Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19715, node 2 2025-04-04T17:21:56.847664Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:56.847749Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:56.854972Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:56.878814Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:56.878845Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:56.878856Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:56.878992Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17057 TClient is connected to server localhost:17057 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:57.290414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:57.300073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:57.363778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:21:57.512009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:21:57.588151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:59.891088Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509509792020152:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:59.891199Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: { ... suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:00.010202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:00.040976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:00.073761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:00.108164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:00.162799Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509514086987958:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:00.162870Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:00.162972Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509514086987963:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:00.166545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:00.186057Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509514086987965:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:22:00.253243Z node 2 :TX_PROXY ERROR: Actor# [2:7489509514086988019:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:01.715584Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509496907116676:2219];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:01.715646Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:01.925169Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGIxZTVhNTAtZTExNTgwYTQtN2VjOThlZjItODZlOWNkYWU=, ActorId: [2:7489509518381955576:2488], ActorState: ExecuteState, TraceId: 01jr0x5tevdstmhar1qmh818zf, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 23485, MsgBus: 13375 2025-04-04T17:22:02.980773Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489509521761764394:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:02.980840Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ec2/r3tmp/tmptkUEZp/pdisk_1.dat 2025-04-04T17:22:03.167854Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:03.203565Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:03.203681Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:03.205229Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23485, node 3 2025-04-04T17:22:03.253399Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:03.253425Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:03.253440Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:03.253672Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13375 TClient is connected to server localhost:13375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:22:03.828213Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:22:03.849000Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:03.951091Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:04.114061Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:22:04.214658Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:06.444893Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489509538941635363:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:06.444987Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:06.481355Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:06.509837Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:06.535567Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:06.560799Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:06.590997Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:06.657490Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:06.732788Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489509538941635879:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:06.732852Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:06.732873Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489509538941635884:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:06.735943Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:06.744860Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489509538941635886:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:22:06.819709Z node 3 :TX_PROXY ERROR: Actor# [3:7489509538941635938:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:08.004453Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489509521761764394:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:08.004774Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:08.409024Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MWM0ZTg0ZDYtZWVkOWE5ZDItYmFjN2UxNzktMzkxZTg0YmY=, ActorId: [3:7489509543236603494:2488], ActorState: ExecuteState, TraceId: 01jr0x60n9b5fyw3ae8geb99hq, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001
: Error: tx has deferred effects, but locks are broken >> KqpTx::LocksAbortOnCommit [GOOD] >> KqpTx::MixEnginesOldNew >> KqpTx::RollbackTx >> KqpSinkTx::Interactive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestAsync1 [GOOD] Test command err: 2025-04-04T17:21:03.142846Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509269588344906:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:03.142912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001aa9/r3tmp/tmpnbodyF/pdisk_1.dat 2025-04-04T17:21:03.642144Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:03.671803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:03.671899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:03.677645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13048, node 1 2025-04-04T17:21:03.924161Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:03.924185Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:03.924197Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:03.924311Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:04.401434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:08.143016Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509269588344906:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:08.143111Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:21:18.639057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:21:18.639107Z node 1 :IMPORT WARN: Table profiles were not loaded >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort >> KqpTx::BeginTransactionBadMode [GOOD] >> KqpSinkLocks::InvalidateOlapOnCommit [GOOD] >> KqpLocks::MixedTxFail-useSink [GOOD] >> KqpLocksTricky::TestNoLocksIssue+withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage Test command err: 2025-04-04T17:21:38.547376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:21:38.547812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:21:38.547873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ffa/r3tmp/tmpzvpfbx/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15482, node 1 TClient is connected to server localhost:6454 2025-04-04T17:21:39.063954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:21:39.107522Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:39.111061Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:39.111116Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:39.111160Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:39.111525Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:21:39.147365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:39.147545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:39.159067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:39.281674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-04T17:21:39.400520Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:687:2579], Recipient [1:740:2624]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:21:39.402170Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:687:2579], Recipient [1:740:2624]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:21:39.402607Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:740:2624];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:21:39.430760Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:740:2624];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:21:39.431363Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-04-04T17:21:39.440919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:21:39.441197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:21:39.441473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:21:39.441561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:21:39.441643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:21:39.441730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:21:39.441791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:21:39.441867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:21:39.441972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:21:39.442047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:21:39.442112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:21:39.442220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:21:39.457133Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:687:2579], Recipient [1:740:2624]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:21:39.458713Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-04-04T17:21:39.459047Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:21:39.459107Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:21:39.459328Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:21:39.459489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:21:39.459557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:21:39.459589Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:21:39.459665Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:21:39.459707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:21:39.459737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:21:39.459755Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:21:39.459885Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:21:39.459927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:21:39.459963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:21:39.459990Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:21:39.460080Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:21:39.460147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:21:39.460197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:21:39.460243Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:21:39.460306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:21:39.460335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:21:39.460355Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:21:39.460397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:21:39.460426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:21:39.460448Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:21:39.460785Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=35; 2025-04-04T17:21:39.460850Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-04-04T17:21:39.460916Z node 1 :TX_COLUMNSHARD INFO: tablet_id= ... ;raw_bytes=96858239;count=2;records=82491} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=18276048;raw_bytes=616499685;count=6;records=517509} inactive {blob_bytes=26334856;raw_bytes=883196628;count=17;records=748294} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037888 TEvBlobStorage::TEvPut tId=72075186224037888;c=1;:74/0:size=3447;count=18;;1:size=60872;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445408;count=1;;7:size=1445928;count=1;;8:size=1445448;count=1;;9:size=1445376;count=1;;10:size=1445528;count=1;;11:size=4137168;count=5;;12:size=1445744;count=1;;13:size=1445360;count=1;;14:size=1445608;count=1;;15:size=1445920;count=1;;16:size=1445360;count=1;;17:size=808584;count=1;;18:size=2314192;count=2;;19:size=1402896;count=1;;20:size=1403096;count=1;;21:size=1402840;count=1;;22:size=1445400;count=1;;23:size=1987408;count=3;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72075186224037888;c=0;:74/0:size=3516;count=19;;1:size=60872;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445408;count=1;;7:size=1445928;count=1;;8:size=1445448;count=1;;9:size=1445376;count=1;;10:size=1445528;count=1;;11:size=4137168;count=5;;12:size=1445744;count=1;;13:size=1445360;count=1;;14:size=1445608;count=1;;15:size=1445920;count=1;;16:size=1445360;count=1;;17:size=808584;count=1;;18:size=2314192;count=2;;19:size=1402896;count=1;;20:size=1403096;count=1;;21:size=1402840;count=1;;22:size=1445400;count=1;;23:size=1987408;count=3;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-04-04T17:22:08.459909Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=552ef3ca-117911f0-b206f69e-758ef410;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-04-04T17:22:08.459997Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=552ef3ca-117911f0-b206f69e-758ef410;fline=with_appended.cpp:65;portions=26,27,28,;task_id=552ef3ca-117911f0-b206f69e-758ef410; 2025-04-04T17:22:08.460312Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=552ef3ca-117911f0-b206f69e-758ef410;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:26;path_id:3;records_count:56040;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:1974264;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-04-04T17:22:08.460542Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=552ef3ca-117911f0-b206f69e-758ef410;fline=tiering.cpp:49;tiering_info=__DEFAULT/0.000000s;$$DELETE/352828.000000s;; 2025-04-04T17:22:08.460667Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=552ef3ca-117911f0-b206f69e-758ef410;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:27;path_id:3;records_count:56040;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:1974144;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-04-04T17:22:08.460788Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=552ef3ca-117911f0-b206f69e-758ef410;fline=tiering.cpp:49;tiering_info=__DEFAULT/0.000000s;$$DELETE/408868.000000s;; 2025-04-04T17:22:08.460878Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=552ef3ca-117911f0-b206f69e-758ef410;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:28;path_id:3;records_count:28016;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:987568;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-04-04T17:22:08.460965Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=552ef3ca-117911f0-b206f69e-758ef410;fline=tiering.cpp:49;tiering_info=__DEFAULT/0.000000s;$$DELETE/436884.000000s;; 2025-04-04T17:22:08.461042Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=552ef3ca-117911f0-b206f69e-758ef410;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::552ef3ca-117911f0-b206f69e-758ef410; 2025-04-04T17:22:08.461128Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=552ef3ca-117911f0-b206f69e-758ef410;fline=granule.cpp:101;event=OnCompactionFinished;info=(granule:3;path_id:3;size:21193880;portions_count:28;); 2025-04-04T17:22:08.461197Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=552ef3ca-117911f0-b206f69e-758ef410;tablet_id=72075186224037888;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:22:08.461275Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=552ef3ca-117911f0-b206f69e-758ef410;tablet_id=72075186224037888;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:22:08.461384Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=552ef3ca-117911f0-b206f69e-758ef410;tablet_id=72075186224037888;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=1; 2025-04-04T17:22:08.461463Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=552ef3ca-117911f0-b206f69e-758ef410;tablet_id=72075186224037888;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=21000; 2025-04-04T17:22:08.461517Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=552ef3ca-117911f0-b206f69e-758ef410;tablet_id=72075186224037888;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:22:08.461584Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=552ef3ca-117911f0-b206f69e-758ef410;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:22:08.461635Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=552ef3ca-117911f0-b206f69e-758ef410;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:22:08.461728Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=552ef3ca-117911f0-b206f69e-758ef410;tablet_id=72075186224037888;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.600000s; 2025-04-04T17:22:08.461791Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=552ef3ca-117911f0-b206f69e-758ef410;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:22:08.462025Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:21 Blob count: 3 VERIFY failed (2025-04-04T17:22:08.462297Z): tablet_id=72075186224037888;task_id=552ef3ca-117911f0-b206f69e-758ef410;verification=CompactionsLimit.Dec() >= 0;fline=ro_controller.cpp:39; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+873 (0x17DE1949) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+571 (0x17DD290B) NActors::TVerifyFormattedRecordWriter::~TVerifyFormattedRecordWriter()+326 (0x190DCC96) NKikimr::NYDBTest::NColumnShard::TReadOnlyController::DoOnWriteIndexComplete(NKikimr::NOlap::TColumnEngineChanges const&, NKikimr::NColumnShard::TColumnShard const&)+4340 (0x451E2464) NKikimr::NColumnShard::TTxWriteIndex::Complete(NActors::TActorContext const&)+4797 (0x2B3DC2FD) NKikimr::NTabletFlatExecutor::TSeat::Complete(NActors::TActorContext const&, bool)+810 (0x1D8D7A8A) NKikimr::NTabletFlatExecutor::TLogicRedo::Confirm(unsigned int, NActors::TActorContext const&, NActors::TActorId const&)+3491 (0x1D8C8943) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&)+3435 (0x1D73903B) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+1257 (0x1D6DD9A9) NActors::IActor::Receive(TAutoPtr&)+253 (0x190BDA0D) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+3557 (0x3017AE85) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+12610 (0x30173702) NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration)+1076 (0x3017DA74) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration)+292 (0x30334EA4) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration)+419 (0x30333FC3) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration)+307 (0x3032C223) NActors::TTestActorRuntime::SimulateSleep(TDuration)+1115 (0x3032BDFB) NKikimr::NTestSuiteColumnShardTiers::TTestCaseTTLUsage::Execute_(NUnitTest::TTestContext&)+3620 (0x179CA414) std::__y1::__function::__func, void ()>::operator()()+280 (0x179DACE8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x18291E16) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x18262B19) NKikimr::NTestSuiteColumnShardTiers::TCurrentTest::Execute()+1094 (0x179D9CD6) NUnitTest::TTestFactory::Execute()+2438 (0x182643E6) NUnitTest::RunMain(int, char**)+5213 (0x1828C74D) ??+0 (0x7FD2386C8D90) __libc_start_main+128 (0x7FD2386C8E40) _start+41 (0x15916029) >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::Interactive [GOOD] Test command err: Trying to start YDB, gRPC: 12439, MsgBus: 28459 2025-04-04T17:21:47.958686Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509456203950168:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:47.958741Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ecf/r3tmp/tmpUTE24B/pdisk_1.dat 2025-04-04T17:21:48.333124Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12439, node 1 2025-04-04T17:21:48.349665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:48.350368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:48.352834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:48.391869Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:48.391896Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:48.391903Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:48.392036Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28459 TClient is connected to server localhost:28459 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:48.931160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:48.951004Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:21:50.849448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509469088852723:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:50.849518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509469088852702:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:50.849649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:50.853559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:21:50.861897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509469088852729:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:21:50.943037Z node 1 :TX_PROXY ERROR: Actor# [1:7489509469088852780:2333] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:51.143410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:21:51.242315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:21:52.080805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:52.958892Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509456203950168:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:52.959033Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:21:53.312049Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTc0YzNhYTEtNjJkNDAxYmItZjkxN2I5Y2ItYTdiNGYzMGU=, ActorId: [1:7489509481973763081:2969], ActorState: ReadyState, TraceId: 01jr0x5j4pev3agrryn2nm818p, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 24031, MsgBus: 32421 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ecf/r3tmp/tmpwkmKVI/pdisk_1.dat 2025-04-04T17:21:59.379325Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:21:59.440420Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:59.455013Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:59.455096Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:59.456964Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24031, node 2 2025-04-04T17:21:59.532499Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:59.532531Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:59.532538Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:59.532648Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32421 TClient is connected to server localhost:32421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:59.967161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:59.972001Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:22:02.354969Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509523123008337:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:02.355059Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509523123008327:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:02.355229Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:02.360032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:02.368489Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509523123008341:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:22:02.437222Z node 2 :TX_PROXY ERROR: Actor# [2:7489509523123008394:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:02.489888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:02.533613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:03.601720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::BeginTransactionBadMode [GOOD] Test command err: Trying to start YDB, gRPC: 24962, MsgBus: 9102 2025-04-04T17:21:48.874860Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509460999259693:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:48.875562Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000eca/r3tmp/tmpJk9g1c/pdisk_1.dat 2025-04-04T17:21:49.335362Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:49.342992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:49.343082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:49.346047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24962, node 1 2025-04-04T17:21:49.434752Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:49.434774Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:49.434781Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:49.434900Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9102 TClient is connected to server localhost:9102 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:49.964755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:49.984480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:50.137954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:50.269173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:50.327393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:51.863115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509473884163227:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:51.863332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:52.141842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:52.172395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:21:52.198175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:21:52.227751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:21:52.274447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:21:52.307763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:21:52.349121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509478179131036:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:52.349187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:52.349265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509478179131041:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:52.352542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:21:52.360829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509478179131043:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:21:52.441307Z node 1 :TX_PROXY ERROR: Actor# [1:7489509478179131097:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:53.871873Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509460999259693:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:53.871955Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:04.319714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:22:04.319750Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:05.734317Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489509534013706835:2640], TxId: 281474976710682, task: 1. Ctx: { TraceId : 01jr0x5y1d2bhmbd9d2es0qfch. SessionId : ydb://session/3?node_id=1&id=NDg3YWExMGEtMjRjNWJhNzktNzJiYmExZTctOWRkZTUzZDI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1743787313226/18446744073709551615 shard 72075186224037888 with lowWatermark v1743787313422/18446744073709551615 (node# 1 state# Ready) } } 2025-04-04T17:22:05.734665Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489509534013706835:2640], TxId: 281474976710682, task: 1. Ctx: { TraceId : 01jr0x5y1d2bhmbd9d2es0qfch. SessionId : ydb://session/3?node_id=1&id=NDg3YWExMGEtMjRjNWJhNzktNzJiYmExZTctOWRkZTUzZDI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1743787313226/18446744073709551615 shard 72075186224037888 with lowWatermark v1743787313422/18446744073709551615 (node# 1 state# Ready) } }. 2025-04-04T17:22:05.734975Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489509534013706837:2641], TxId: 281474976710682, task: 2. Ctx: { TraceId : 01jr0x5y1d2bhmbd9d2es0qfch. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NDg3YWExMGEtMjRjNWJhNzktNzJiYmExZTctOWRkZTUzZDI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7489509534013706831:2488], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:22:05.735302Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDg3YWExMGEtMjRjNWJhNzktNzJiYmExZTctOWRkZTUzZDI=, ActorId: [1:7489509482474098650:2488], ActorState: ExecuteState, TraceId: 01jr0x5y1d2bhmbd9d2es0qfch, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 13280, MsgBus: 15964 2025-04-04T17:22:06.445746Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509539851537242:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:06.445808Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000eca/r3tmp/tmpf9lfaW/pdisk_1.dat 2025-04-04T17:22:06.522893Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13280, node 2 2025-04-04T17:22:06.570835Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:06.570916Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:06.572382Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:06.580506Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:06.580533Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:06.580545Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:06.580677Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15964 TClient is connected to server localhost:15964 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:07.005941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:07.022510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:07.087283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:07.262287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:07.333092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:09.576630Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509552736440912:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:09.576739Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:09.615230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:09.647788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:09.680035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:09.709511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:09.740740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:09.776971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:09.859080Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509552736441428:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:09.859166Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:09.859449Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509552736441433:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:09.863079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:09.873942Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509552736441435:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:22:09.954595Z node 2 :TX_PROXY ERROR: Actor# [2:7489509552736441490:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSnapshotIsolation::TConflictReadWriteOltp [FAIL] >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink >> KqpSinkTx::SnapshotRO >> KqpSnapshotIsolation::TSimpleOltp [FAIL] >> KqpSnapshotIsolation::TSimpleOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::InvalidateOlapOnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 13779, MsgBus: 22524 2025-04-04T17:21:40.832964Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509426774074436:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:40.833049Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ed7/r3tmp/tmpl3Gd9C/pdisk_1.dat 2025-04-04T17:21:41.245992Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:41.263075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:41.263194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:41.271548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13779, node 1 2025-04-04T17:21:41.426850Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:41.426886Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:41.426908Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:41.427040Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22524 TClient is connected to server localhost:22524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:42.158390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:43.877641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509439658976854:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:43.877776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:43.877998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509439658976866:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:43.885297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:21:43.898043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509439658976868:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:21:43.969797Z node 1 :TX_PROXY ERROR: Actor# [1:7489509439658976919:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:44.505244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:21:44.618462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:21:45.641187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:46.370936Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509426774074436:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:46.494949Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:21:47.192000Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-04-04T17:21:47.192217Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-04T17:21:47.192345Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-04T17:21:47.192556Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489509456838854656:2969], Table: `/Root/Test` ([72057594046644480:6:1]), SessionActorId: [1:7489509452543887129:2969]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037888, Sink=[1:7489509456838854656:2969].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-04T17:21:47.193227Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489509456838854642:2969], SessionActorId: [1:7489509452543887129:2969], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7489509452543887129:2969]. isRollback=0 2025-04-04T17:21:47.193565Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MThlMGQwNWItNjEzYWU4ZTctYjVkNTRlYWUtNmFjODJmNjA=, ActorId: [1:7489509452543887129:2969], ActorState: ExecuteState, TraceId: 01jr0x5c4z7yz1z6r87ddf1946, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7489509456838854643:2969] from: [1:7489509456838854642:2969] 2025-04-04T17:21:47.193683Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489509456838854643:2969] TxId: 281474976710665. Ctx: { TraceId: 01jr0x5c4z7yz1z6r87ddf1946, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MThlMGQwNWItNjEzYWU4ZTctYjVkNTRlYWUtNmFjODJmNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-04T17:21:47.194007Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MThlMGQwNWItNjEzYWU4ZTctYjVkNTRlYWUtNmFjODJmNjA=, ActorId: [1:7489509452543887129:2969], ActorState: ExecuteState, TraceId: 01jr0x5c4z7yz1z6r87ddf1946, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 19670, MsgBus: 9873 2025-04-04T17:21:53.264099Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509484266102137:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:53.264176Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ed7/r3tmp/tmpCTik7E/pdisk_1.dat 2025-04-04T17:21:53.342863Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19670, node 2 2025-04-04T17:21:53.384495Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:53.384573Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:53.390391Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:53.409137Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:53.409166Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:53.409178Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:53.409303Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9873 TClient is connected to server localhost:9873 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:53.773482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:56.438941Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509497151004667:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 202 ... node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037962;self_id=[2:7489509501445973654:2535];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037962;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.349642Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[2:7489509501445973436:2496];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037903;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.349781Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[2:7489509501445973436:2496];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037903;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.349966Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037977;self_id=[2:7489509501445973606:2517];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037977;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.350107Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037977;self_id=[2:7489509501445973606:2517];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037977;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.350421Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037997;self_id=[2:7489509501445973433:2493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037997;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.350579Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037997;self_id=[2:7489509501445973433:2493];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037997;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.368186Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[2:7489509501445973876:2565];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037933;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.368499Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[2:7489509501445973876:2565];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037933;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.412524Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7489509518625846173:3119];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.412800Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7489509518625846173:3119];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.437635Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7489509518625845935:3089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.437900Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7489509518625845935:3089];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.439163Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7489509518625846023:3101];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038040;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.439366Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7489509518625846023:3101];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038040;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.442225Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7489509518625845973:3094];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038013;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.442445Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7489509518625845973:3094];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.444289Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7489509518625846263:3159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038022;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.444481Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7489509518625846263:3159];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.444821Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037999;self_id=[2:7489509518625845982:3096];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037999;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.444989Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037999;self_id=[2:7489509518625845982:3096];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037999;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.448417Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7489509518625846035:3102];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038038;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.448685Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7489509518625846035:3102];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038038;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.451293Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7489509518625846195:3140];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038024;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.451560Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7489509518625846195:3140];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038024;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.451775Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7489509518625846176:3122];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038025;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.452025Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7489509518625846176:3122];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.455311Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7489509518625846175:3121];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.455558Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7489509518625846175:3121];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.462130Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7489509518625846277:3164];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.462412Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7489509518625846277:3164];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.462876Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7489509518625846165:3114];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.463080Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7489509518625846165:3114];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.463267Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7489509518625846061:3108];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038039;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.463427Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7489509518625846061:3108];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038039;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.463588Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7489509518625846164:3113];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038036;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.463741Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7489509518625846164:3113];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038036;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.465899Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7489509518625846045:3103];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038037;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.466115Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7489509518625846045:3103];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.466304Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[2:7489509518625846003:3100];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038029;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.466525Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[2:7489509518625846003:3100];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038029;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.469899Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7489509518625846154:3110];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038014;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:05.469978Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7489509518625846211:3154];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-04T17:22:08.338534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:22:08.338571Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] >> KqpTx::CommitRequired >> KqpTx::TooManyTx [GOOD] >> KqpTx::SnapshotROInteractive2 >> KqpSnapshotIsolation::TReadOnlyOltp >> LocalPartition::WithoutPartitionPartitionRelocation [GOOD] >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 >> KqpSnapshotRead::TestReadOnly-withSink >> KqpLocks::Invalidate [GOOD] >> KqpLocks::InvalidateOnCommit >> KqpTx::MixEnginesOldNew [GOOD] >> KqpSnapshotIsolation::TConflictWriteOltp [FAIL] >> KqpSnapshotIsolation::TConflictWriteOlap >> KqpTx::RollbackTx [GOOD] >> KqpTx::RollbackTx2 >> Compression::WriteWithMixedCodecs [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::MixEnginesOldNew [GOOD] Test command err: Trying to start YDB, gRPC: 13544, MsgBus: 6711 2025-04-04T17:22:04.937064Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509528033898649:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:04.937244Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000eb5/r3tmp/tmpLzr3Rs/pdisk_1.dat 2025-04-04T17:22:05.249774Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13544, node 1 2025-04-04T17:22:05.320184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:05.320303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:05.322129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:05.330520Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:05.330541Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:05.330549Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:05.330678Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6711 TClient is connected to server localhost:6711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:05.787981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:05.809314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:05.944683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:06.094432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:06.165600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:07.751870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509540918802336:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:07.752014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:07.977580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:08.035196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:08.070228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:08.101210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:08.136084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:08.175267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:08.222404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509545213770145:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:08.222485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:08.222825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509545213770150:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:08.226916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:08.241453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509545213770152:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:22:08.305369Z node 1 :TX_PROXY ERROR: Actor# [1:7489509545213770206:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:09.807600Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDY1YWY1ZjUtNDk0ZjdiMjAtYzBiN2JlMjktZDRkOGQ3MjA=, ActorId: [1:7489509549508737761:2488], ActorState: ExecuteState, TraceId: 01jr0x624f23q3y6d542dv585y, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-04-04T17:22:09.826933Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDY1YWY1ZjUtNDk0ZjdiMjAtYzBiN2JlMjktZDRkOGQ3MjA=, ActorId: [1:7489509549508737761:2488], ActorState: ReadyState, TraceId: 01jr0x629290szw47nqkyxnkxa, Create QueryResponse for error on request, msg: 2025-04-04T17:22:09.938412Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509528033898649:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:09.938515Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29173, MsgBus: 15139 2025-04-04T17:22:10.618589Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509555995204983:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:10.618663Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000eb5/r3tmp/tmpS7Zd7A/pdisk_1.dat 2025-04-04T17:22:10.712153Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29173, node 2 2025-04-04T17:22:10.751781Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:10.751879Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:10.774563Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:10.827684Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:10.827707Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:10.827713Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:10.827816Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15139 TClient is connected to server localhost:15139 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:22:11.241381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:22:11.286140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:22:11.367305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:22:11.543881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:11.614793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:13.771256Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509568880108634:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:13.771342Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:13.819839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:13.858972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:13.929786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:13.963336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:14.002361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:14.060087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:14.153222Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509573175076449:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:14.153313Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:14.153718Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509573175076454:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:14.157598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:14.169077Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509573175076456:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:22:14.258918Z node 2 :TX_PROXY ERROR: Actor# [2:7489509573175076511:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:15.619107Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509555995204983:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:15.619178Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 8198, MsgBus: 6127 2025-04-04T17:21:41.956445Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509431686247626:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:41.956932Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ed1/r3tmp/tmpxrarCM/pdisk_1.dat 2025-04-04T17:21:42.333408Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:42.370984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:42.371113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 8198, node 1 2025-04-04T17:21:42.372917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:42.410922Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:42.410943Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:42.410952Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:42.411060Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6127 TClient is connected to server localhost:6127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:42.933252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:42.959673Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:21:44.840097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509444571150011:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:44.840113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509444571150036:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:44.840227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:44.843317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:21:44.852076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509444571150040:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:21:44.906207Z node 1 :TX_PROXY ERROR: Actor# [1:7489509444571150091:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:45.148846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:21:45.278592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:21:46.415911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:47.179547Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509431686247626:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:47.191427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 20280, MsgBus: 15530 2025-04-04T17:21:54.024147Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509487182101687:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:54.024255Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ed1/r3tmp/tmpngLAVF/pdisk_1.dat 2025-04-04T17:21:54.104960Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20280, node 2 2025-04-04T17:21:54.153294Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:54.153377Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:54.154978Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:54.161920Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:54.161944Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:54.161951Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:54.162053Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15530 TClient is connected to server localhost:15530 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:54.531912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:56.963668Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509495772036931:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:56.963755Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509495772036915:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:56.964168Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:56.968670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:21:56.979236Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509495772036950:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:21:57.077143Z node 2 :TX_PROXY ERROR: Actor# [2:7489509500067004299:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:57.141598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:21:57.312622Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[2:7489509500067004490:2346];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:21:57.312822Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[2:7489509500067004490:2346];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:21:57.313112Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[2:7489509500067004490:2346];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:21:57.313269Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[2:7489509500067004490:2346];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_r ... .192162Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7489509517246879638:3375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038042;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.192225Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7489509517246879635:3372];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038041;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.192366Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7489509517246879638:3375];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.192469Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7489509517246879635:3372];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038041;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.198997Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7489509517246879630:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038036;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.199000Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7489509517246879594:3364];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038032;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.199304Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7489509517246879643:3379];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038045;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.199785Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7489509517246879825:3388];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038015;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.202624Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7489509517246879630:3368];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038036;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.202869Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7489509517246879594:3364];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.203010Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7489509517246879643:3379];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038045;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.203131Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7489509517246879825:3388];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.206549Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7489509517246879884:3392];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.206813Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7489509517246879576:3353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038037;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.206993Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7489509517246879705:3385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038010;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.207485Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7489509517246879884:3392];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.207622Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7489509517246879576:3353];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.207722Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7489509517246879705:3385];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.207860Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7489509517246879885:3393];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038011;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.208028Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7489509517246879704:3384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.208460Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7489509517246879885:3393];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038011;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.208574Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7489509517246879704:3384];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.208710Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7489509517246879847:3390];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.209026Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7489509517246879847:3390];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.222470Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7489509517246879826:3389];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.222807Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7489509517246879915:3395];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.222973Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038096;self_id=[2:7489509517246879451:3330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038096;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.223134Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7489509517246879887:3394];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038009;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.223334Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7489509517246879745:3386];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.223416Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7489509517246879826:3389];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.223464Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7489509517246879745:3386];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.223569Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7489509517246879887:3394];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.223600Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7489509517246879915:3395];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.223736Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038096;self_id=[2:7489509517246879451:3330];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038096;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.223762Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7489509517246879746:3387];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038001;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:12.223928Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7489509517246879746:3387];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x17FBC427 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x17FA567A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F0C5732BD8F 17. ??:0: ?? @ 0x7F0C5732BE3F 18. ??:0: ?? @ 0x15B7B028 >> TxUsage::Write_Random_Sized_Messages_In_Wide_Transactions [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:222:2060] recipient: [1:217:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:222:2060] recipient: [1:217:2142] Leader for TabletID 72057594046678944 is [1:238:2156] sender: [1:239:2060] recipient: [1:217:2142] 2025-04-04T17:21:22.142708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:21:22.142811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:21:22.142850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:21:22.142902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:21:22.142951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:21:22.142982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:21:22.143057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:21:22.143148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:21:22.143491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:21:22.227029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:21:22.227092Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:22.239827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:21:22.240006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:21:22.240148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:21:22.246516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:21:22.246754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:21:22.247592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:21:22.248305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:21:22.251352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:21:22.256982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:21:22.257049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:21:22.257246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:21:22.257312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:21:22.257365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:21:22.257521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.263721Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:238:2156] sender: [1:345:2060] recipient: [1:17:2064] 2025-04-04T17:21:22.389542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:21:22.390926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.391973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:21:22.394173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:21:22.394278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.397755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:21:22.397882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:21:22.398115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.398157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:21:22.398187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:21:22.398213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:21:22.400132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.400185Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:21:22.400236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:21:22.401954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.402003Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.402052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:21:22.402101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:21:22.412028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:21:22.414078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:21:22.414326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:21:22.415466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:21:22.415603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 231 RawX2: 4294969447 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:21:22.415686Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:21:22.415997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:21:22.416069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:21:22.416260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:21:22.416345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:21:22.418607Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:21:22.418657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:21:22.418928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:21:22.418977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:312:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:21:22.419357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.419431Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:21:22.419525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:21:22.419564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:21:22.419603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:21:22.419636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:21:22.419684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:21:22.419729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:21:22.419768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:21:22.419801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:21:22.419864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:21:22.419902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:21:22.419936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:21:22.421645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:21:22.421755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:21:22.421816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-04-04T17:22:17.200098Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-04-04T17:22:17.200126Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-04-04T17:22:17.200159Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-04T17:22:17.200238Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-04-04T17:22:17.200276Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:22:17.201757Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:22:17.203484Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-04T17:22:17.203526Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:22:17.203618Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-04T17:22:17.203640Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-04T17:22:17.203902Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-04-04T17:22:17.203967Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-04-04T17:22:17.204344Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:679:2507], Recipient [7:231:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:22:17.204394Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:22:17.204432Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-04T17:22:17.204574Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:576:2404], Recipient [7:231:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2025-04-04T17:22:17.204605Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-04T17:22:17.204674Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-04-04T17:22:17.204777Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-04T17:22:17.204814Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:677:2505] 2025-04-04T17:22:17.204989Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:679:2507], Recipient [7:231:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:22:17.205026Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:22:17.205065Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 2025-04-04T17:22:17.205470Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:550:2101], Recipient [7:231:2151] 2025-04-04T17:22:17.205515Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T17:22:17.207960Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 550 RawX2: 34359740469 } AllowCreateInTempDir: false } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:22:17.208267Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:22:17.208395Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 107:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-04-04T17:22:17.208583Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:22:17.210777Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 107, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/test/tmp/a/b\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:22:17.211036Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 107, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2025-04-04T17:22:17.211116Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2025-04-04T17:22:17.211498Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-04-04T17:22:17.211540Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-04-04T17:22:17.212002Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:685:2513], Recipient [7:231:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:22:17.212055Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:22:17.212089Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-04T17:22:17.212219Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:576:2404], Recipient [7:231:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 107 2025-04-04T17:22:17.212253Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-04T17:22:17.212315Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-04-04T17:22:17.212419Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-04T17:22:17.212462Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [7:683:2511] 2025-04-04T17:22:17.212630Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:685:2513], Recipient [7:231:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:22:17.212663Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:22:17.212698Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-04-04T17:22:17.213100Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:550:2101], Recipient [7:231:2151] 2025-04-04T17:22:17.213150Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T17:22:17.215838Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 550 RawX2: 34359740469 } AllowCreateInTempDir: true } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:22:17.216151Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 108:0, at schemeshard: 72057594046678944 2025-04-04T17:22:17.216218Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., at schemeshard: 72057594046678944 2025-04-04T17:22:17.216410Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:22:17.219039Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusPreconditionFailed Reason: "Can\'t create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can\'t be created in another temporary directory." TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:22:17.219268Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2025-04-04T17:22:17.219333Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-04-04T17:22:17.219711Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-04-04T17:22:17.219753Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-04-04T17:22:17.220110Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:691:2519], Recipient [7:231:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:22:17.220161Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:22:17.220200Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-04T17:22:17.220327Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:576:2404], Recipient [7:231:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 108 2025-04-04T17:22:17.220363Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-04T17:22:17.220423Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-04-04T17:22:17.220525Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-04-04T17:22:17.220562Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [7:689:2517] 2025-04-04T17:22:17.220736Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:691:2519], Recipient [7:231:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:22:17.220766Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:22:17.220805Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 108 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2025-04-04T17:21:19.561044Z :ReadSession INFO: Random seed for debugging is 1743787279561014 2025-04-04T17:21:19.927260Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509334813630376:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:19.927564Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:21:19.968953Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509338515750622:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:19.970023Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:21:20.130508Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:21:20.146576Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00171d/r3tmp/tmpY1wmzZ/pdisk_1.dat 2025-04-04T17:21:20.387815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:20.387905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:20.389105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:20.389185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:20.391875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:20.396490Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:21:20.397491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:20.413883Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63833, node 1 2025-04-04T17:21:20.456632Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:21:20.456719Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:21:20.590045Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/00171d/r3tmp/yandex8oa0HI.tmp 2025-04-04T17:21:20.590074Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/00171d/r3tmp/yandex8oa0HI.tmp 2025-04-04T17:21:20.590354Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/00171d/r3tmp/yandex8oa0HI.tmp 2025-04-04T17:21:20.590556Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:21:20.818680Z INFO: TTestServer started on Port 14243 GrpcPort 63833 TClient is connected to server localhost:14243 PQClient connected to localhost:63833 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:21.118618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-04T17:21:23.169936Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509355695620126:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:23.170459Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509355695620104:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:23.170691Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:23.177051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-04-04T17:21:23.200673Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509355695620133:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-04-04T17:21:23.460891Z node 2 :TX_PROXY ERROR: Actor# [2:7489509355695620161:2129] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:23.489683Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489509351993500654:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:21:23.491606Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzEyZmI1MjUtODBhZTZiYS00MWE1MzhkNS1mYjY5OGY1ZQ==, ActorId: [1:7489509351993500584:2336], ActorState: ExecuteState, TraceId: 01jr0x4mq10vyd43fx1pf7npp0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:21:23.492496Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489509355695620176:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:21:23.493886Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:21:23.494074Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTc4NTM3MmUtMjVlNGQ2ZTYtY2E2NWI0NjQtMzA4NDU1NDk=, ActorId: [2:7489509355695620102:2308], ActorState: ExecuteState, TraceId: 01jr0x4mpy0tg5tdh8sbsxc76q, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:21:23.494477Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:21:23.508810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:21:23.671791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:23.792857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:63833", true, true, 1000); 2025-04-04T17:21:24.113686Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0x4ncsbaznnh84dz8wem4q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWNiZTg2ZGYtZDc3YzI0ZWMtMjU1ZDExNzgtNDRhNGU0MmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489509356288468386:2996] 2025-04-04T17:21:24.927364Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509334813630376:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:24.927470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:21:24.968824Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509338515750622:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:24.968903Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:21:30.150521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:63833 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-04T17:21:30.225799Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:63833 MetaRequest { CmdCreateTopic { Topic: "rt3.dc ... RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 2 } 2025-04-04T17:22:15.214465Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_14847024705930789404_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 3 from offset3 2025-04-04T17:22:15.214517Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_14847024705930789404_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 8cf48497-2cc011e1-300f26c1-fef54565 has messages 1 2025-04-04T17:22:15.214620Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_14847024705930789404_v1 read done: guid# 8cf48497-2cc011e1-300f26c1-fef54565, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 220 2025-04-04T17:22:15.214656Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_14847024705930789404_v1 response to read: guid# 8cf48497-2cc011e1-300f26c1-fef54565 2025-04-04T17:22:15.215616Z :DEBUG: [/Root] [/Root] [1225871d-f2649690-b1af0e9e-4f814450] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:22:15.214906Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_14847024705930789404_v1 Process answer. Aval parts: 0 2025-04-04T17:22:15.216322Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_14847024705930789404_v1 grpc read done: success# 1, data# { read { } } 2025-04-04T17:22:15.216411Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_14847024705930789404_v1 got read request: guid# 245bfcf4-e3884b94-9856304e-af42d2b 2025-04-04T17:22:15.217266Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2025-04-04T17:22:15.217882Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) 2025-04-04T17:22:15.218009Z :DEBUG: [/Root] [/Root] [1225871d-f2649690-b1af0e9e-4f814450] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2025-04-04T17:22:14.089000Z WriteTime: 2025-04-04T17:22:14.093000Z Ip: "ipv6:[::1]:58778" UncompressedSize: 8 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:58778" } } } } 2025-04-04T17:22:15.218292Z :INFO: [/Root] [/Root] [1225871d-f2649690-b1af0e9e-4f814450] Closing read session. Close timeout: 3.000000s 2025-04-04T17:22:15.218352Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-04-04T17:22:15.218402Z :INFO: [/Root] [/Root] [1225871d-f2649690-b1af0e9e-4f814450] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1390 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:22:15.219481Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_14847024705930789404_v1 grpc read done: success# 0, data# { } 2025-04-04T17:22:15.219508Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_14847024705930789404_v1 grpc read failed 2025-04-04T17:22:15.219536Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_14847024705930789404_v1 grpc closed 2025-04-04T17:22:15.219577Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_14847024705930789404_v1 is DEAD 2025-04-04T17:22:15.219795Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_7_1_14847024705930789404_v1 2025-04-04T17:22:15.219842Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [7:7489509569262877870:2546] destroyed 2025-04-04T17:22:15.219881Z :INFO: [/Root] [/Root] [1225871d-f2649690-b1af0e9e-4f814450] Closing read session. Close timeout: 0.000000s 2025-04-04T17:22:15.219886Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_7_1_14847024705930789404_v1 2025-04-04T17:22:15.219934Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-04-04T17:22:15.219986Z :INFO: [/Root] [/Root] [1225871d-f2649690-b1af0e9e-4f814450] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1391 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:22:15.220115Z :NOTICE: [/Root] [/Root] [1225871d-f2649690-b1af0e9e-4f814450] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:22:15.220746Z node 8 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [7:7489509569262877868:2543] disconnected; active server actors: 1 2025-04-04T17:22:15.220798Z node 8 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [7:7489509569262877868:2543] client user disconnected session shared/user_7_1_14847024705930789404_v1 2025-04-04T17:22:17.199272Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:22:17.199346Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:22:17.199394Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:22:17.199739Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:22:17.200176Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:22:17.200371Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:22:17.200736Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2025-04-04T17:22:17.202873Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:22:17.202924Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:22:17.202992Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:22:17.203282Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:22:17.203654Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:22:17.203783Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:22:17.204027Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:22:17.205367Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-04-04T17:22:17.205816Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2025-04-04T17:22:17.205895Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2025-04-04T17:22:17.207513Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:22:17.207576Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-04-04T17:22:17.207608Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-04-04T17:22:17.207658Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-04-04T17:22:17.209857Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:22:17.209915Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:22:17.209952Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:22:17.210198Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:22:17.210569Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:22:17.210712Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:22:17.210946Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-04-04T17:22:17.211658Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:22:17.211827Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:22:17.211943Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:22:17.212007Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-04-04T17:22:17.212103Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 2025-04-04T17:22:17.213722Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:22:17.213773Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:22:17.213809Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:22:17.214045Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-04-04T17:22:17.214416Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-04-04T17:22:17.214512Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:22:17.215683Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:22:17.216550Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-04-04T17:22:17.216646Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-04-04T17:22:17.216753Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> KqpSnapshotIsolation::TConflictWriteOltpNoSink [FAIL] >> KqpSnapshotIsolation::TReadOnlyOlap >> KqpSnapshotRead::TestReadOnly+withSink >> KqpTx::CommitRequired [GOOD] >> KqpTx::CommitPrepared >> KqpSinkTx::LocksAbortOnCommit >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort [GOOD] >> DataShardVolatile::DistributedWriteAsymmetricExecute >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] >> KqpSinkMvcc::OlapMultiSinks [GOOD] >> KqpSinkTx::OlapDeferredEffects >> KqpTx::SnapshotRO ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/unittest >> TxUsage::Write_Random_Sized_Messages_In_Wide_Transactions [GOOD] Test command err: 2025-04-04T17:18:58.073359Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508732302067054:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:58.073875Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:18:58.253216Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000d25/r3tmp/tmpydpz6p/pdisk_1.dat 2025-04-04T17:18:58.427603Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21304, node 1 2025-04-04T17:18:58.478105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:58.478202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:58.479840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:58.585909Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000d25/r3tmp/yandexbl3Md6.tmp 2025-04-04T17:18:58.585941Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000d25/r3tmp/yandexbl3Md6.tmp 2025-04-04T17:18:58.586068Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000d25/r3tmp/yandexbl3Md6.tmp 2025-04-04T17:18:58.586178Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:18:58.799163Z INFO: TTestServer started on Port 12268 GrpcPort 21304 TClient is connected to server localhost:12268 PQClient connected to localhost:21304 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:59.033317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:59.070703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:59.199489Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:00.907924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508740892002328:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.907941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508740892002333:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.908061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.917362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:19:00.918907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508740892002372:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.919121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.937057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508740892002342:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:19:01.129593Z node 1 :TX_PROXY ERROR: Actor# [1:7489508745186969694:2447] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:01.164630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:01.176478Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508745186969712:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:19:01.178010Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWZlNzQyM2QtZWU4ODQ1ZTctZGNmNGM4NjUtZWY4ZWI1NGE=, ActorId: [1:7489508740892002325:2336], ActorState: ExecuteState, TraceId: 01jr0x09rxdeyka09zhg1vvpwx, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:19:01.179695Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:19:01.236889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:01.308977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489508745186969990:2627] 2025-04-04T17:19:03.073042Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508732302067054:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:03.073147Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:19:07.513299Z :WriteToTopic_Invalid_Session INFO: TTopicSdkTestSetup started 2025-04-04T17:19:07.524962Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-04T17:19:07.544706Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7489508770956774091:2815] connected; active server actors: 1 2025-04-04T17:19:07.544928Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-04T17:19:07.545704Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-04T17:19:07.545824Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-04-04T17:19:07.547679Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-04T17:19:07.552152Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:19:07.553107Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:19:07.553295Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-04T17:19:07.553323Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:19:07.553338Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-04T17:19:07.557257Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:19:07.557323Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:19:07.557382Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-04T17:19:07.557428Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-04T17:19:07.558191Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-04-04T17:19:07.558290Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7489508770956774113:2466], now have 1 active actors on pipe 2025-04-04T17:19:07.586440Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7489508770956774090:2814], now have 1 active actors on pipe 2025-04-04T17:19:07.596919Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7489508732302067323 RawX2: 4294969480 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChann ... 0 } 2025-04-04T17:22:18.127649Z node 12 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:22:18.127727Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete partitions for TxId 281474976715672 2025-04-04T17:22:18.127746Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, NewState EXECUTED 2025-04-04T17:22:18.127765Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 moved from EXECUTING to EXECUTED 2025-04-04T17:22:18.128108Z node 12 :PERSQUEUE DEBUG: [TxId: 281474976715672] save tx TxId: 281474976715672 State: EXECUTED MinStep: 1743787338041 MaxStep: 18446744073709551615 Step: 1743787338160 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "test-topic" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/test-topic" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } } BootstrapConfig { } SourceActor { RawX1: 7489509541556783249 RawX2: 51539609684 } Partitions { Partition { PartitionId: 0 } } 2025-04-04T17:22:18.128307Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:22:18.129227Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:22:18.129252Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTED 2025-04-04T17:22:18.129268Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, State EXECUTED 2025-04-04T17:22:18.129289Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 State EXECUTED FrontTxId 281474976715672 2025-04-04T17:22:18.129308Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-04-04T17:22:18.129328Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, NewState WAIT_RS_ACKS 2025-04-04T17:22:18.129343Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 moved from EXECUTED to WAIT_RS_ACKS 2025-04-04T17:22:18.129366Z node 12 :PERSQUEUE DEBUG: [TxId: 281474976715672] PredicateAcks: 0/0 2025-04-04T17:22:18.129383Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-04T17:22:18.129400Z node 12 :PERSQUEUE DEBUG: [TxId: 281474976715672] PredicateAcks: 0/0 2025-04-04T17:22:18.129418Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976715672 to the list for deletion 2025-04-04T17:22:18.129439Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, NewState DELETING 2025-04-04T17:22:18.129465Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976715672 2025-04-04T17:22:18.129517Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:22:18.131194Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:22:18.131213Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-04-04T17:22:18.131224Z node 12 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-04T17:22:18.131231Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, State DELETING 2025-04-04T17:22:18.131249Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976715672 2025-04-04T17:22:18.131346Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-04T17:22:18.134900Z :DEBUG: [/Root] MessageGroupId [src] SessionId [] Write session: try to update token 2025-04-04T17:22:18.142418Z :INFO: [/Root] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-04-04T17:22:18.142468Z :INFO: [/Root] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:28371 2025-04-04T17:22:18.158873Z :DEBUG: [/Root] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-04T17:22:18.160069Z node 12 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-04T17:22:18.160107Z node 12 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-04-04T17:22:18.160799Z node 12 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-04-04T17:22:18.160914Z node 12 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:50298 2025-04-04T17:22:18.160938Z node 12 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:50298 proto=v1 topic=test-topic durationSec=0 2025-04-04T17:22:18.160948Z node 12 :PQ_WRITE_PROXY INFO: init check schema 2025-04-04T17:22:18.164372Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-04T17:22:18.164563Z node 12 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-04T17:22:18.164583Z node 12 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-04T17:22:18.164595Z node 12 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-04T17:22:18.164633Z node 12 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [12:7489509588801424751:2468] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-04T17:22:18.164660Z node 12 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-04T17:22:18.165302Z node 12 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 12, Generation: 1 2025-04-04T17:22:18.165356Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [12:7489509588801424754:2468], now have 1 active actors on pipe 2025-04-04T17:22:18.165392Z node 12 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2025-04-04T17:22:18.165424Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2025-04-04T17:22:18.165530Z node 12 :PERSQUEUE INFO: new Cookie src|9309837c-9ef18881-a872cef6-bf17bd2e_0 generated for partition 0 topic 'test-topic' owner src 2025-04-04T17:22:18.165646Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-04T17:22:18.165733Z node 12 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:22:18.165867Z node 12 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2025-04-04T17:22:18.165893Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2025-04-04T17:22:18.165960Z node 12 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:22:18.166037Z node 12 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|9309837c-9ef18881-a872cef6-bf17bd2e_0 2025-04-04T17:22:18.167017Z :INFO: [/Root] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743787338166 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:22:18.167123Z :INFO: [/Root] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|9309837c-9ef18881-a872cef6-bf17bd2e_0" topic: "test-topic" 2025-04-04T17:22:18.170344Z :INFO: [/Root] MessageGroupId [src] SessionId [src|9309837c-9ef18881-a872cef6-bf17bd2e_0] Write session: close. Timeout = 0 ms 2025-04-04T17:22:18.170394Z :INFO: [/Root] MessageGroupId [src] SessionId [src|9309837c-9ef18881-a872cef6-bf17bd2e_0] Write session will now close 2025-04-04T17:22:18.170437Z :DEBUG: [/Root] MessageGroupId [src] SessionId [src|9309837c-9ef18881-a872cef6-bf17bd2e_0] Write session: aborting 2025-04-04T17:22:18.171991Z node 12 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|9309837c-9ef18881-a872cef6-bf17bd2e_0 grpc read done: success: 0 data: 2025-04-04T17:22:18.172015Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|9309837c-9ef18881-a872cef6-bf17bd2e_0 grpc read failed 2025-04-04T17:22:18.172046Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|9309837c-9ef18881-a872cef6-bf17bd2e_0 grpc closed 2025-04-04T17:22:18.172060Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|9309837c-9ef18881-a872cef6-bf17bd2e_0 is DEAD 2025-04-04T17:22:18.172984Z node 12 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:22:18.173123Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [12:7489509588801424754:2468] destroyed 2025-04-04T17:22:18.173170Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-04T17:22:18.174326Z :INFO: [/Root] MessageGroupId [src] SessionId [src|9309837c-9ef18881-a872cef6-bf17bd2e_0] Write session: gracefully shut down, all writes complete 2025-04-04T17:22:18.174831Z :DEBUG: [/Root] MessageGroupId [src] SessionId [src|9309837c-9ef18881-a872cef6-bf17bd2e_0] Write session is aborting and will not restart 2025-04-04T17:22:18.174939Z :DEBUG: [/Root] MessageGroupId [src] SessionId [src|9309837c-9ef18881-a872cef6-bf17bd2e_0] Write session: destroy 2025-04-04T17:22:18.185457Z :Write_Random_Sized_Messages_In_Wide_Transactions INFO: Topic created >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] >> KqpTx::SnapshotROInteractive2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:107:2057] recipient: [1:100:2134] 2025-04-04T17:21:40.674420Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:21:40.674513Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] Leader for TabletID 72057594037927938 is [1:152:2173] sender: [1:153:2057] recipient: [1:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:178:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:21:40.707134Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:21:40.728637Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-04T17:21:40.735612Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2025-04-04T17:21:40.741075Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:21:40.743657Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2025-04-04T17:21:40.745631Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:21:40.758166Z node 1 :PERSQUEUE INFO: new Cookie default|74ef002b-adf08b30-a096867-64d6c98a_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:21:40.770128Z node 1 :PERSQUEUE INFO: new Cookie default|4e1db068-905da7ac-52677b02-66ad75e2_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:21:40.801224Z node 1 :PERSQUEUE INFO: new Cookie default|8b3cb73c-a464ab1c-714c0e69-35e3056c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:242:2057] recipient: [1:98:2133] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:245:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:246:2057] recipient: [1:244:2245] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:247:2246] sender: [1:248:2057] recipient: [1:244:2245] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_TREE Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:21:40.886276Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:21:40.886357Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:21:40.887080Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:296:2287] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:21:40.889507Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:297:2288] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:21:40.907318Z node 1 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:21:40.907432Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:296:2287] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:21:40.917152Z node 1 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:21:40.917243Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:297:2288] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:247:2246] sender: [1:329:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:102:2057] recipient: [2:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:102:2057] recipient: [2:100:2134] Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:107:2057] recipient: [2:100:2134] 2025-04-04T17:21:41.384541Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:21:41.384629Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:148:2057] recipient: [2:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:148:2057] recipient: [2:146:2169] Leader for TabletID 72057594037927938 is [2:152:2173] sender: [2:153:2057] recipient: [2:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:178:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:21:41.404857Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:21:41.405790Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-04T17:21:41.406482Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:184:2197] 2025-04-04T17:21:41.409040Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:21:41.410906Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:185:2198] 2025-04-04T17:21:41.412732Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:21:41.420636Z node 2 :PERSQUEUE INFO: new Cookie default|e4505565-fcd56f90-ed030ef0-816499e3_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:21:41.428501Z node 2 :PERSQUEUE INFO: new Cookie default|30ee0cfc-a2a9d155-99f3656f-5f29d217_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:21:41.455532Z node 2 :PERSQUEUE INFO: new Cookie default|b133f3dc-46036011-fe6ab78-5e0505f_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR !Reboot 72057594037927937 (actor [2:106:2138]) on event NKikimr::TEvPersQueue::TEvOffsets ! Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:243:2057] recipient: [2:98:2133] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:246:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:247:2057] recipient: [2:245:2246] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:248:2247] sender: [2:249:2057] recipient: [2:245:2246] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_TREE Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:21:41.513685Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:21:41.513762Z node 2 :PERSQUEUE ... " Version: 54 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 54 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 54 Important: false } 2025-04-04T17:22:19.667147Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:184:2197] 2025-04-04T17:22:19.669433Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [54:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:22:19.670788Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:185:2198] 2025-04-04T17:22:19.672649Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [54:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:22:19.679236Z node 54 :PERSQUEUE INFO: new Cookie default|a31b6a4b-f9a4b995-36315368-8389be8b_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:22:19.685770Z node 54 :PERSQUEUE INFO: new Cookie default|38f76014-1afb503d-5b0fdbaf-88f2cac5_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:22:19.712984Z node 54 :PERSQUEUE INFO: new Cookie default|7b96e71b-5c717f7e-de9daf2f-6113b559_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:106:2138] sender: [54:242:2057] recipient: [54:98:2133] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:106:2138] sender: [54:245:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [54:106:2138] sender: [54:246:2057] recipient: [54:244:2245] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:247:2246] sender: [54:248:2057] recipient: [54:244:2245] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_TREE Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:22:19.772135Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:22:19.772188Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:22:19.773175Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:296:2287] 2025-04-04T17:22:19.775557Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:297:2288] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:22:19.799228Z node 54 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:22:19.799313Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [54:296:2287] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:22:19.806014Z node 54 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:22:19.806087Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [54:297:2288] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:247:2246] sender: [54:329:2057] recipient: [54:14:2061] Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:102:2057] recipient: [55:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:102:2057] recipient: [55:100:2134] Leader for TabletID 72057594037927937 is [55:106:2138] sender: [55:107:2057] recipient: [55:100:2134] 2025-04-04T17:22:20.319110Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:22:20.319179Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [55:148:2057] recipient: [55:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [55:148:2057] recipient: [55:146:2169] Leader for TabletID 72057594037927938 is [55:152:2173] sender: [55:153:2057] recipient: [55:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:106:2138] sender: [55:178:2057] recipient: [55:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:22:20.339500Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:22:20.340341Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 55 actor [55:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 55 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 55 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 Important: false } 2025-04-04T17:22:20.341033Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [55:184:2197] 2025-04-04T17:22:20.343909Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [55:184:2197] 2025-04-04T17:22:20.345539Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [55:185:2198] 2025-04-04T17:22:20.347722Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [55:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:22:20.356357Z node 55 :PERSQUEUE INFO: new Cookie default|5df7b5ae-483c26a5-fa2088f2-fa1ced75_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:22:20.362681Z node 55 :PERSQUEUE INFO: new Cookie default|26dad5af-e85d9d0c-abb4eabf-d586534b_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:22:20.387696Z node 55 :PERSQUEUE INFO: new Cookie default|42a7c3a8-fa099dd5-4227d1a7-566c4be0_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:106:2138] sender: [55:244:2057] recipient: [55:98:2133] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:106:2138] sender: [55:247:2057] recipient: [55:246:2247] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:106:2138] sender: [55:248:2057] recipient: [55:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:249:2248] sender: [55:250:2057] recipient: [55:246:2247] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_TREE Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:22:20.447502Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:22:20.447560Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:22:20.448439Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [55:298:2289] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:22:20.451103Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [55:299:2290] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:22:20.473064Z node 55 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:22:20.473140Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [55:299:2290] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:22:20.477806Z node 55 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:22:20.477879Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [55:298:2289] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:249:2248] sender: [55:329:2057] recipient: [55:14:2061] Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR >> KqpLocks::InvalidateOnCommit [GOOD] >> KqpLocks::MixedTxFail+useSink >> KqpSnapshotRead::TestReadOnly-withSink [GOOD] >> KqpSnapshotRead::TestSnapshotExpiration+withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 23777, MsgBus: 19335 2025-04-04T17:22:08.837085Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509545364279494:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:08.838884Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ea9/r3tmp/tmpCkW5JD/pdisk_1.dat 2025-04-04T17:22:09.150410Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23777, node 1 2025-04-04T17:22:09.210337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:09.210452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:09.212088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:09.229480Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:09.229504Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:09.229512Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:09.229654Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19335 TClient is connected to server localhost:19335 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:09.711425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:09.728595Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:22:09.739797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:09.869192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:10.044419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:10.117475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:11.808172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509558249183053:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:11.808344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:12.058115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:12.136824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:12.209360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:12.244321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:12.283638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:12.368373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:12.452101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509562544150877:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:12.452160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:12.452302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509562544150882:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:12.456345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:12.469985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509562544150884:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:22:12.527438Z node 1 :TX_PROXY ERROR: Actor# [1:7489509562544150936:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:13.570551Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWFlZDIyYjgtOWY4NjVjYzctM2U1YTdiZDQtZWRlNTBmNGY=, ActorId: [1:7489509566839118490:2488], ActorState: ReadyState, TraceId: 01jr0x65x848wf1wa28m0sqz4c, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:855: Too many transactions, current active: 2 MaxTxPerSession: 2 2025-04-04T17:22:13.836231Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509545364279494:2185];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:13.836325Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14684, MsgBus: 63927 2025-04-04T17:22:14.491752Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509574246723650:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:14.491784Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ea9/r3tmp/tmph2nuwN/pdisk_1.dat 2025-04-04T17:22:14.602969Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:14.608371Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:14.608451Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:14.610015Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14684, node 2 2025-04-04T17:22:14.673976Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:14.674000Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:14.674008Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:14.674121Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63927 TClient is connected to server localhost:63927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:22:15.104242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:22:15.109987Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:22:15.124044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:15.210385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:15.361626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:15.449450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:17.408262Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509587131627275:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:17.408332Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:17.442976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:17.479336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:17.504019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:17.530037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:17.559509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:17.594305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:17.646315Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509587131627784:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:17.646428Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:17.646733Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509587131627789:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:17.650888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:17.662025Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509587131627791:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:22:17.741115Z node 2 :TX_PROXY ERROR: Actor# [2:7489509587131627845:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:19.494469Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509574246723650:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:19.494531Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapMultiSinks [GOOD] Test command err: Trying to start YDB, gRPC: 29733, MsgBus: 20146 2025-04-04T17:21:40.843123Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509426690241305:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:40.843202Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ee1/r3tmp/tmper0c4a/pdisk_1.dat 2025-04-04T17:21:41.253404Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:41.255006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:41.255112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:41.271545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29733, node 1 2025-04-04T17:21:41.430657Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:41.430687Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:41.430695Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:41.430841Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20146 TClient is connected to server localhost:20146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:42.260434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:42.277607Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:21:43.899418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509439575143872:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:43.899568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509439575143864:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:43.899764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:43.905530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:21:43.920966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509439575143878:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:21:43.984366Z node 1 :TX_PROXY ERROR: Actor# [1:7489509439575143929:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:44.512358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:21:44.670445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509443870111390:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:21:44.670445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509443870111387:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:21:44.670632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509443870111387:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:21:44.670873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509443870111387:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:21:44.671013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509443870111387:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:21:44.671108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509443870111387:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:21:44.671183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509443870111390:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:21:44.671233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509443870111387:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:21:44.671307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509443870111387:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:21:44.671430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509443870111387:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:21:44.671447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509443870111390:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:21:44.671563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509443870111390:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:21:44.671565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509443870111387:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:21:44.671672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509443870111390:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:21:44.671713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509443870111387:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:21:44.671788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509443870111390:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:21:44.671795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509443870111387:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:21:44.671873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509443870111387:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:21:44.671890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509443870111390:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:21:44.671989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509443870111390:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:21:44.672168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509443870111390:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:21:44.672292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509443870111390:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:21:44.672459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509443870111390:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:21:44.672594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509443870111390:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:21:44.707700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509443870111388:2347];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:21:44.707775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509443870111388:2347];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abs ... 7554Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037984;self_id=[2:7489509533565069815:2436];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037984;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.700012Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037953;self_id=[2:7489509537860037328:2482];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037953;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.700207Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037953;self_id=[2:7489509537860037328:2482];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037953;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.734055Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;self_id=[2:7489509537860037650:2561];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037929;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.738745Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;self_id=[2:7489509537860037650:2561];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037929;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.909697Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037976;self_id=[2:7489509537860037344:2483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037976;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.909929Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037976;self_id=[2:7489509537860037344:2483];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037976;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.912748Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[2:7489509537860037510:2511];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037934;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.914072Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[2:7489509537860037510:2511];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037934;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.956580Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7489509533565068634:2351];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.957135Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:7489509537860037494:2502];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037905;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.957328Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:7489509537860037494:2502];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037905;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.957490Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[2:7489509537860037503:2506];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037933;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.957608Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[2:7489509537860037503:2506];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037933;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.957722Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[2:7489509537860037532:2525];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037922;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.957841Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[2:7489509537860037532:2525];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037922;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.957912Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7489509533565068634:2351];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037892;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.967901Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[2:7489509537860037642:2558];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037920;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.968243Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;self_id=[2:7489509533565069538:2420];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037914;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.968451Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7489509533565068643:2354];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.969149Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7489509533565068643:2354];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037894;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.969376Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;self_id=[2:7489509537860037639:2557];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037907;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.969599Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037963;self_id=[2:7489509537860037133:2452];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037963;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.969765Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037963;self_id=[2:7489509537860037133:2452];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037963;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.969938Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[2:7489509533565068635:2352];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.970128Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7489509533565068616:2347];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.970313Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[2:7489509537860037642:2558];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037920;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.970458Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;self_id=[2:7489509533565069538:2420];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037914;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.970597Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;self_id=[2:7489509537860037639:2557];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037907;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.970722Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[2:7489509533565068635:2352];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.970846Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7489509533565068616:2347];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037896;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.971026Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[2:7489509533565068642:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.971441Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[2:7489509533565068642:2353];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.973127Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[2:7489509533565069536:2418];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037902;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.973341Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[2:7489509533565069536:2418];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037902;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.973702Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037982;self_id=[2:7489509537860037132:2451];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037982;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.973917Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:7489509533565069535:2417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037900;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.974105Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037971;self_id=[2:7489509537860037216:2466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037971;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.974377Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037982;self_id=[2:7489509537860037132:2451];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037982;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.974525Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:7489509533565069535:2417];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037900;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.974656Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037971;self_id=[2:7489509537860037216:2466];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037971;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2025-04-04T17:22:13.974829Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037994;self_id=[2:7489509533565069736:2430];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037994;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:13.975210Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037994;self_id=[2:7489509533565069736:2430];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037994;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2025-04-04T17:22:15.898098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:22:15.898143Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpTx::RollbackByIdle >> KqpTx::RollbackTx2 [GOOD] >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] >> KqpSinkLocks::EmptyRange >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 21191, MsgBus: 64302 2025-04-04T17:22:01.886558Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509517323578065:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:01.886614Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000eba/r3tmp/tmpU8qkXi/pdisk_1.dat 2025-04-04T17:22:02.175148Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:02.180317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:02.180425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:02.183890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21191, node 1 2025-04-04T17:22:02.255194Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:02.255222Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:02.255239Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:02.255383Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64302 TClient is connected to server localhost:64302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:02.837892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:02.857183Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:22:04.838588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509530208480622:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:04.838685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509530208480598:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:04.839028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:04.843583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:04.853040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509530208480627:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:22:04.926704Z node 1 :TX_PROXY ERROR: Actor# [1:7489509530208480680:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:05.185244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:05.283080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:06.139747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:06.987807Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509517323578065:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:06.991946Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:07.238326Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODI0MjBhMmQtMWFmYWFlOGUtYTIwMGNhYzYtZWQ5YWVlYTc=, ActorId: [1:7489509543093391084:2969], ActorState: ExecuteState, TraceId: 01jr0x5zpde5wvy9tagcqfewfx, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x17FB9CA0 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x17FA5222 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7FF06C81BD8F 17. ??:0: ?? @ 0x7FF06C81BE3F 18. ??:0: ?? @ 0x15B7B028 Trying to start YDB, gRPC: 5677, MsgBus: 62155 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000eba/r3tmp/tmpCOVLu8/pdisk_1.dat 2025-04-04T17:22:13.474472Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:22:13.490626Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5677, node 2 2025-04-04T17:22:13.514710Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:13.514821Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:13.524409Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:13.598055Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:13.598079Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:13.598086Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:13.598188Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62155 TClient is connected to server localhost:62155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:14.041589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:14.056645Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:22:16.545636Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509580793938893:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:16.545729Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:16.545951Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509580793938918:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:16.550943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:16.559522Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509580793938920:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:22:16.640897Z node 2 :TX_PROXY ERROR: Actor# [2:7489509580793938971:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:16.689279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:16.724288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:17.726161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:19.130443Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmFiOWIzNDEtNmFhYWQ1ZTItNjRlNmE4OWQtMjg4ZGEzYzY=, ActorId: [2:7489509593678849197:2968], ActorState: ExecuteState, TraceId: 01jr0x6b9q3ewzewbzyvy1d7q8, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x17FB9CA0 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x17FA544A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7FF06C81BD8F 17. ??:0: ?? @ 0x7FF06C81BE3F 18. ??:0: ?? @ 0x15B7B028 >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 >> KqpTx::ExplicitTcl >> KqpSinkLocks::EmptyRangeOlap >> KqpLocksTricky::TestNoLocksIssue+withSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackTx2 [GOOD] Test command err: Trying to start YDB, gRPC: 11885, MsgBus: 4585 2025-04-04T17:22:10.957190Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509556444578833:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:10.957371Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ea4/r3tmp/tmp2PtdwL/pdisk_1.dat 2025-04-04T17:22:11.375350Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:11.407200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:11.407300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11885, node 1 2025-04-04T17:22:11.409048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:11.465648Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:11.465667Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:11.465674Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:11.465790Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4585 TClient is connected to server localhost:4585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:12.061711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:12.083522Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:22:12.096404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:12.280578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:12.466022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:12.557964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:14.372826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509573624449579:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:14.372957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:14.633584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:14.660980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:14.685292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:14.711827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:14.740887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:14.783143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:14.833361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509573624450087:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:14.833439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:14.833625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509573624450092:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:14.837406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:14.850579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509573624450094:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:22:14.909068Z node 1 :TX_PROXY ERROR: Actor# [1:7489509573624450148:3441] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:15.953911Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509556444578833:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:15.953990Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:16.129757Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWFlNzAxMWItNmFkY2U1NzEtMTQxYTczZDktODJhOTE2NTA=, ActorId: [1:7489509577919417701:2488], ActorState: ReadyState, TraceId: 01jr0x68dp29jsvgpcnq56v7e4, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 29207, MsgBus: 18087 2025-04-04T17:22:17.052021Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509587006272375:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:17.052104Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ea4/r3tmp/tmpiIRTs4/pdisk_1.dat 2025-04-04T17:22:17.157092Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29207, node 2 2025-04-04T17:22:17.185623Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:17.185727Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:17.191813Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:17.222431Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:17.222454Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:17.222462Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:17.222572Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18087 TClient is connected to server localhost:18087 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:17.614012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:17.624132Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:22:17.636856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:17.717296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:17.878016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:17.968922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:20.012486Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509599891176005:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:20.012592Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:20.062116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:20.091678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:20.120566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:20.152343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:20.184499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:20.224045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:20.286474Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509599891176519:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:20.286613Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:20.286871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509599891176524:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:20.290461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:20.300418Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509599891176526:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:22:20.377673Z node 2 :TX_PROXY ERROR: Actor# [2:7489509599891176579:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:21.830806Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTk5OGM1YTMtYTIzMDliYjYtYWJmMjIxNi1jZDcxNWQxNQ==, ActorId: [2:7489509604186144136:2488], ActorState: ReadyState, TraceId: 01jr0x6e069fvxd5qejc1d1m05, Create QueryResponse for error on request, msg: >> KqpSinkTx::OlapInvalidateOnError [FAIL] >> KqpSinkTx::OlapInteractive ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] Test command err: 2025-04-04T17:21:21.532249Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509344126839972:2175];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:21.532310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bf6/r3tmp/tmpKVqqYy/pdisk_1.dat 2025-04-04T17:21:21.951998Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:21.989582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:21.989715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:21.996974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61552, node 1 2025-04-04T17:21:22.189205Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:22.189229Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:22.189239Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:22.189375Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:22.652501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Triggering split by load TClient is connected to server localhost:24308 2025-04-04T17:21:24.289477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509357011742800:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.289586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.723145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:21:24.891055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509357011742981:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.891138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.911811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743787284848 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743787284848 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-04T17:21:24.969345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509357011743084:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.969439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.969811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509357011743105:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.969893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509357011743103:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.969956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509357011743101:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.970024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509357011743100:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.970053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509357011743104:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.970129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509357011743113:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.970227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509357011743115:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.970436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.971490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509357011743135:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.971531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509357011743137:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.971548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.972749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509357011743160:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.972778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509357011743163:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.972821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:24.975026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:21:24.975263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710665:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:21:24.975307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710665:1, at schemeshard: 72057594046644480 2025-04-04T17:21:24.975455Z nod ... athType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743787284848 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards Fast forward > 10h to trigger the merge TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743787284848 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-04-04T17:22:19.941946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.4938 2025-04-04T17:22:19.942006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.442 2025-04-04T17:22:20.042118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-04-04T17:22:20.042323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-04-04T17:22:20.042605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:22:20.043117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:22:20.043807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-04T17:22:20.043877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-04-04T17:22:20.045817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-04-04T17:22:20.050472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-04T17:22:20.050544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2025-04-04T17:22:20.054448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:22:20.060151Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:7489509597530098674:3818] 2025-04-04T17:22:20.078537Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2025-04-04T17:22:20.078616Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-04-04T17:22:20.078786Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-04T17:22:20.083477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId# 281474976715658:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715658:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715658 TabletId: 72075186224037891 2025-04-04T17:22:20.083523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 131 2025-04-04T17:22:20.085181Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:22:20.099714Z node 1 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037891 2025-04-04T17:22:20.099814Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:22:20.099857Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-04T17:22:20.099881Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037891 2025-04-04T17:22:20.100142Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-04-04T17:22:20.102885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715658:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715658 TabletId: 72075186224037889 2025-04-04T17:22:20.103137Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715658:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715658 TabletId: 72075186224037890 2025-04-04T17:22:20.103379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 131 -> 132 2025-04-04T17:22:20.103647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037891 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-04T17:22:20.105075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:22:20.105285Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:22:20.105330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:22:20.105728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-04-04T17:22:20.105762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-04-04T17:22:20.105780Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-04-04T17:22:20.111145Z node 1 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-04-04T17:22:20.111147Z node 1 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-04-04T17:22:20.112799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715658:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037890, at schemeshard: 72057594046644480 2025-04-04T17:22:20.112980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715658:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-04-04T17:22:20.113029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-04T17:22:20.113046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-04-04T17:22:20.113074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-04-04T17:22:20.116605Z node 1 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T17:22:20.116628Z node 1 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T17:22:20.116650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 281474976715658:0 2025-04-04T17:22:20.117001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:22:20.117191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:22:20.127855Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-04-04T17:22:20.127926Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-04T17:22:20.128814Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-04T17:22:20.128830Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-04T17:22:20.128931Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-04-04T17:22:20.130163Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-04T17:22:20.130346Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-04T17:22:20.130447Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-04-04T17:22:20.205576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-04T17:22:20.205749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037891 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-04-04T17:22:20.208049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743787284848 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 3 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) >> KqpTx::CommitPrepared [GOOD] >> KqpSnapshotIsolation::TReadOnlyOltp [FAIL] >> KqpSnapshotIsolation::TReadOnlyOltpNoSink >> KqpSnapshotIsolation::TSimpleOltpNoSink >> TxUsage::WriteToTopic_Demo_44 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestNoLocksIssue+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 8193, MsgBus: 7321 2025-04-04T17:21:58.286523Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509503921207231:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:58.298321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ebd/r3tmp/tmpUdktij/pdisk_1.dat 2025-04-04T17:21:58.661455Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:58.677966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:58.678064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:58.681608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8193, node 1 2025-04-04T17:21:58.762634Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:58.762675Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:58.762695Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:58.762850Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7321 TClient is connected to server localhost:7321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:59.340982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:59.352650Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:21:59.368445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:59.521741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:59.702829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:59.777248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:01.422655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509516806110917:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:01.422785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:01.665325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:01.698678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:01.738783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:01.769615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:01.801255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:01.873163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:01.956493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509516806111438:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:01.956594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:01.956824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509516806111443:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:01.961129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:01.976781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509516806111445:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:22:02.072134Z node 1 :TX_PROXY ERROR: Actor# [1:7489509521101078796:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:03.282491Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509503921207231:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:03.282539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:03.388155Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTQ1YTFkYjktNmZhZjYxNDgtYTc4MTI2ZjktY2IyZjM2ODM=, ActorId: [1:7489509521101079050:2488], ActorState: ExecuteState, TraceId: 01jr0x5vzc89402ee8t1vmmfk7, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 12667, MsgBus: 1443 2025-04-04T17:22:04.150536Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509528887897320:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:04.150814Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ebd/r3tmp/tmphIe295/pdisk_1.dat 2025-04-04T17:22:04.258306Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:04.267708Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:04.267785Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:04.269178Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12667, node 2 2025-04-04T17:22:04.315797Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:04.315816Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:04.315820Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:04.315893Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1443 TClient is connected to server localhost:1443 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:22:04.732177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:22:07.490775Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509541772799848:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:07.490899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509541772799856:2333], DatabaseId: /Root, PoolId: default, Failed to fetch ... 25-04-04T17:22:10.419050Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-04-04T17:22:10.419061Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-04-04T17:22:10.419472Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;self_id=[2:7489509546067768521:2420];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037899;local_tx_no=13;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037907,72075186224037947;receive=72075186224037903; 2025-04-04T17:22:10.419556Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;self_id=[2:7489509546067768521:2420];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037899;local_tx_no=14;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037907,72075186224037947;receive=72075186224037903; 2025-04-04T17:22:10.419626Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-04-04T17:22:10.419820Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;self_id=[2:7489509546067768521:2420];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037899;local_tx_no=16;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037947;receive=72075186224037907; 2025-04-04T17:22:10.419908Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;self_id=[2:7489509546067768521:2420];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037899;local_tx_no=17;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037947;receive=72075186224037907; 2025-04-04T17:22:10.420779Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-04-04T17:22:10.901850Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzczM2VmNzItMWRmMDY4MjMtOTFjZWUxOWYtZWQwODJlNDc=, ActorId: [2:7489509554657704818:2820], ActorState: ExecuteState, TraceId: 01jr0x63864t0t32g38kprax57, Create QueryResponse for error on request, msg: 2025-04-04T17:22:10.904124Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=281474976715670;tx_id=281474976715670;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715670; 2025-04-04T17:22:10.904597Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;self_id=[2:7489509546067768056:2388];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037936;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715665;problem=finished; 2025-04-04T17:22:10.906619Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Complete;fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=lock invalidated;tx_id=281474976715670; 2025-04-04T17:22:10.906729Z node 2 :TX_COLUMNSHARD_TX WARN: fline=manager.cpp:134;event=abort;tx_id=281474976715665;problem=finished; Trying to start YDB, gRPC: 16185, MsgBus: 12323 2025-04-04T17:22:15.265029Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:99:2145], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:22:15.265370Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:22:15.265452Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ebd/r3tmp/tmp8V7lVj/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16185, node 3 2025-04-04T17:22:15.686994Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:15.687810Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:15.687854Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:15.687893Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:15.688260Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:22:15.723787Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:15.723924Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:15.739288Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12323 TClient is connected to server localhost:12323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:16.085482Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:16.142860Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:16.442520Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:16.871229Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:17.191101Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:17.816868Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1819:3416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:17.817163Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:17.842134Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:18.037204Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:18.318513Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:18.630016Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:18.888390Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:19.231286Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:19.528709Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2393:3853], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:19.528813Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:19.529199Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2398:3858], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:19.534386Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:19.703546Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:2400:3860], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:22:19.759989Z node 3 :TX_PROXY ERROR: Actor# [3:2465:3906] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:20.792173Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:22:21.025157Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:22:21.423816Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 |93.8%| [TA] $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSinkTx::SnapshotRO [GOOD] >> KqpSinkTx::SnapshotROInteractive1 |93.8%| [TA] {RESULT} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSnapshotRead::TestReadOnly+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink >> KqpSinkMvcc::OltpMultiSinksNoSinks >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink [GOOD] >> TxUsage::WriteToTopic_Demo_45 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitPrepared [GOOD] Test command err: Trying to start YDB, gRPC: 21357, MsgBus: 4639 2025-04-04T17:22:14.283880Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509574174790782:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:14.284010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e6a/r3tmp/tmpn4P3FS/pdisk_1.dat 2025-04-04T17:22:14.610071Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21357, node 1 2025-04-04T17:22:14.648898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:14.649001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:14.658434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:14.734825Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:14.734849Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:14.734855Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:14.734991Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4639 TClient is connected to server localhost:4639 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:15.309914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:15.333941Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:22:15.357627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:15.505731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:22:15.652085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:15.730329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:17.488446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509587059694442:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:17.488566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:17.744042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:17.778542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:17.810319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:17.844867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:17.874697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:17.918446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:18.012631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509591354662253:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:18.012705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:18.013072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509591354662258:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:18.016342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:18.027673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509591354662260:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:22:18.111607Z node 1 :TX_PROXY ERROR: Actor# [1:7489509591354662315:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 16498, MsgBus: 5452 2025-04-04T17:22:19.687665Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509594452897011:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:19.687726Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e6a/r3tmp/tmpbcMvcJ/pdisk_1.dat 2025-04-04T17:22:19.838795Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:19.847497Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:19.847579Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:19.849240Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16498, node 2 2025-04-04T17:22:19.906282Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:19.906300Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:19.906307Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:19.906416Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5452 TClient is connected to server localhost:5452 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:20.313835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:20.329692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:20.381537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:20.537013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:20.614462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:22.733585Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509607337800649:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:22.733687Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:22.773898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:22.806316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:22.837553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:22.870546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:22.903971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:22.936235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:22.993795Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509607337801160:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:22.993879Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:22.994033Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509607337801165:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:22.997849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:23.006468Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509607337801167:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:22:23.083052Z node 2 :TX_PROXY ERROR: Actor# [2:7489509611632768517:3436] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSinkLocks::DifferentKeyUpdateOlap [GOOD] >> KqpTx::CommitRoTx >> TxUsage::Sinks_Oltp_WriteToTopics_4 [GOOD] >> KqpTx::SnapshotRO [GOOD] >> KqpTx::SnapshotROInteractive1 >> KqpTx::DeferredEffects >> DataShardVolatile::DistributedWriteAsymmetricExecute [GOOD] >> DataShardVolatile::DistributedWriteThenDropTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 28330, MsgBus: 11308 2025-04-04T17:22:12.493425Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509564864838303:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:12.496022Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ea1/r3tmp/tmpiILAVY/pdisk_1.dat 2025-04-04T17:22:12.954154Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:12.985941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:12.986058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 28330, node 1 2025-04-04T17:22:12.988462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:13.072450Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:13.072473Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:13.072479Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:13.072587Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11308 TClient is connected to server localhost:11308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:13.637737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:13.686629Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:22:13.698006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:13.840488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:13.993737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:14.060031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:15.699516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509577749741826:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:15.699643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:15.935940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:15.969290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:16.000151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:16.032398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:16.064403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:16.137750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:16.191958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509582044709638:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:16.192097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:16.192415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509582044709643:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:16.196280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:16.205820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509582044709645:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:22:16.267957Z node 1 :TX_PROXY ERROR: Actor# [1:7489509582044709698:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:17.494815Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509564864838303:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:17.494898Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:17.625059Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWZmOThmMjEtMmFhYzJkNGQtZGExNzUxNDUtZTllYTgw, ActorId: [1:7489509586339677254:2488], ActorState: ExecuteState, TraceId: 01jr0x69t843xh81rswneape3c, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 13318, MsgBus: 19187 2025-04-04T17:22:18.387890Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509590004643774:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:18.387937Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ea1/r3tmp/tmpwXsu2K/pdisk_1.dat 2025-04-04T17:22:18.559671Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:18.570831Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:18.570928Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:18.573030Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13318, node 2 2025-04-04T17:22:18.644871Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:18.644891Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:18.644898Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:18.644993Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19187 TClient is connected to server localhost:19187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:19.028029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:19.043601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:19.102389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:19.236979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:19.318918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:21.633058Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509602889547428:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:21.633147Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:21.658633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:21.691669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:21.721826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:21.753296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:21.787440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:21.826381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:21.879960Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509602889547938:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:21.880056Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:21.880333Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509602889547943:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:21.884013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:21.897495Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509602889547945:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:22:21.969747Z node 2 :TX_PROXY ERROR: Actor# [2:7489509602889547999:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:23.391836Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509590004643774:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:23.391990Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:25.382776Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710673; 2025-04-04T17:22:25.384538Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489509620069417716:2496], Table: `/Root/EightShard` ([72057594046644480:3:1]), SessionActorId: [2:7489509607184515587:2496]Got LOCKS BROKEN for table `/Root/EightShard`. ShardID=72075186224037891, Sink=[2:7489509620069417716:2496].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-04T17:22:25.385119Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489509620069417696:2496], SessionActorId: [2:7489509607184515587:2496], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/EightShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7489509607184515587:2496]. isRollback=0 2025-04-04T17:22:25.385420Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGIwNTVkNjgtODEyOWE2YzEtZWVjM2ZmNTAtOTgxYTI2NGU=, ActorId: [2:7489509607184515587:2496], ActorState: ExecuteState, TraceId: 01jr0x6h721zng3j7enpmq568r, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7489509620069417697:2496] from: [2:7489509620069417696:2496] 2025-04-04T17:22:25.385542Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7489509620069417697:2496] TxId: 281474976710673. Ctx: { TraceId: 01jr0x6h721zng3j7enpmq568r, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGIwNTVkNjgtODEyOWE2YzEtZWVjM2ZmNTAtOTgxYTI2NGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/EightShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-04T17:22:25.385766Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGIwNTVkNjgtODEyOWE2YzEtZWVjM2ZmNTAtOTgxYTI2NGU=, ActorId: [2:7489509607184515587:2496], ActorState: ExecuteState, TraceId: 01jr0x6h721zng3j7enpmq568r, Create QueryResponse for error on request, msg: 2025-04-04T17:22:25.387450Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710673; 2025-04-04T17:22:25.387601Z node 2 :TX_DATASHARD ERROR: Complete volatile write [1743787345426 : 281474976710673] from 72075186224037888 at tablet 72075186224037888, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } >> TxUsage::The_Configuration_Is_Changing_As_We_Write_To_The_Topic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::DifferentKeyUpdateOlap [GOOD] Test command err: Trying to start YDB, gRPC: 23494, MsgBus: 26561 2025-04-04T17:21:55.761397Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509490811032371:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:55.761458Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ebf/r3tmp/tmpbkcz6q/pdisk_1.dat 2025-04-04T17:21:56.125675Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23494, node 1 2025-04-04T17:21:56.168455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:56.168572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:56.171006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:56.183033Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:56.183056Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:56.183065Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:56.183212Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26561 TClient is connected to server localhost:26561 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:56.684954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:58.844913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509503695934930:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:58.845052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:58.845489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509503695934942:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:58.850654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:21:58.863082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509503695934944:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:21:58.919861Z node 1 :TX_PROXY ERROR: Actor# [1:7489509503695934995:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:59.205027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:21:59.351026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:00.354301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:01.068654Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509490811032371:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:01.099666Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 31778, MsgBus: 8254 2025-04-04T17:22:07.669772Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509543573489793:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:07.670003Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ebf/r3tmp/tmp0NhSd9/pdisk_1.dat 2025-04-04T17:22:07.782282Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:07.806042Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:07.806127Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 31778, node 2 2025-04-04T17:22:07.807792Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:07.863374Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:07.863394Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:07.863403Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:07.863522Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8254 TClient is connected to server localhost:8254 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:08.289782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:10.733995Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509556458392326:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:10.734076Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:10.738561Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509556458392353:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:10.746173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:10.761568Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509556458392355:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:22:10.860413Z node 2 :TX_PROXY ERROR: Actor# [2:7489509556458392406:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:10.907955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:11.075709Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7489509556458392583:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:11.075906Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7489509556458392583:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:11.076246Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7489509556458392583:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:22:11.076371Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7489509556458392583:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:22:11.076473Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7489509556458392 ... ;tablet_id=72075186224038035;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.286907Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[2:7489509577933235034:3364];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038035;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.288414Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7489509577933235009:3348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038049;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.288749Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7489509577933235009:3348];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038049;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.292017Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[2:7489509577933235041:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038051;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.292418Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[2:7489509577933235041:3368];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038051;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.292756Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7489509577933235028:3360];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038052;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.293138Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7489509577933235028:3360];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038052;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.293531Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7489509577933235054:3375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.293889Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7489509577933235054:3375];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.295973Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7489509577933235020:3355];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038045;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.296376Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7489509577933235020:3355];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038045;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.296588Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7489509577933235031:3362];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038048;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.296975Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7489509577933235031:3362];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038048;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.299579Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7489509577933235053:3374];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038031;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.299966Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7489509577933235053:3374];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.303531Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[2:7489509577933235139:3382];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038006;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.303968Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[2:7489509577933235139:3382];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038006;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.305701Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7489509577933235158:3390];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.306057Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7489509577933235158:3390];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.309771Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7489509577933235140:3383];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038021;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.310188Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7489509577933235140:3383];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.312861Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7489509577933235015:3352];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038039;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.313270Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7489509577933235015:3352];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038039;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.316429Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7489509577933235102:3377];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038027;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.316880Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7489509577933235102:3377];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038027;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.320029Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7489509577933235018:3354];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038038;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.320471Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7489509577933235018:3354];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038038;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.323071Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7489509577933235143:3384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.323493Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7489509577933235143:3384];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.323753Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7489509577933235157:3389];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038014;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.324099Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7489509577933235157:3389];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038014;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.328795Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7489509577933235008:3347];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038037;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.329496Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7489509577933235008:3347];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.329991Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7489509577933235161:3391];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.332711Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7489509577933235161:3391];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.338004Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7489509577933235248:3393];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.338521Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7489509577933235248:3393];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.338790Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7489509577933235044:3369];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038018;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.339259Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7489509577933235155:3388];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.339623Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7489509577933235155:3388];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.339760Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7489509577933235044:3369];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:19.738381Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-04-04T17:22:19.739372Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-04T17:22:22.766626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:22:22.766669Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpTx::RollbackByIdle [GOOD] >> KqpTx::RollbackInvalidated >> KqpSinkLocks::TInvalidateOlap >> KqpLimits::TooBigQuery-useSink [GOOD] >> KqpLimits::TooBigKey+useSink >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission [GOOD] >> KqpLocks::MixedTxFail+useSink [GOOD] >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink >> KqpSinkTx::OlapSnapshotROInteractive1 >> KqpTx::ExplicitTcl [GOOD] >> KqpTx::EmptyTxOnCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/unittest >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission [GOOD] Test command err: 2025-04-04T17:18:58.074906Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508732634245422:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:58.075958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:18:58.254420Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000cdd/r3tmp/tmpGPYsHv/pdisk_1.dat 2025-04-04T17:18:58.441284Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11997, node 1 2025-04-04T17:18:58.489606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:58.489697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:58.502263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:58.584072Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000cdd/r3tmp/yandexYvQ87D.tmp 2025-04-04T17:18:58.584096Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000cdd/r3tmp/yandexYvQ87D.tmp 2025-04-04T17:18:58.584264Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000cdd/r3tmp/yandexYvQ87D.tmp 2025-04-04T17:18:58.584389Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:18:58.799273Z INFO: TTestServer started on Port 26309 GrpcPort 11997 TClient is connected to server localhost:26309 PQClient connected to localhost:11997 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:59.033598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:59.070693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:19:00.727997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508741224180822:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.727997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508741224180810:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.728084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.731930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:19:00.740513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508741224180824:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:19:00.899965Z node 1 :TX_PROXY ERROR: Actor# [1:7489508741224180888:2448] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:00.996188Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508741224180903:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:19:00.997745Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWUwYmNjMWEtY2IwY2JmYzgtYjQ0MzU0YWYtYmI1ZTU1NzE=, ActorId: [1:7489508741224180807:2336], ActorState: ExecuteState, TraceId: 01jr0x09kb05z4rh25s2vp1fh9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:19:01.000330Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:19:01.022858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:01.053139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:01.140053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489508745519148480:2632] 2025-04-04T17:19:03.074523Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508732634245422:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:03.074630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:19:07.322714Z :Statistics INFO: TTopicSdkTestSetup started 2025-04-04T17:19:07.335647Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-04T17:19:07.354879Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:19:07.355211Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-04T17:19:07.355775Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:19:07.355969Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-04T17:19:07.355992Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:19:07.356006Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-04T17:19:07.356033Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:19:07.356063Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:19:07.356098Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-04T17:19:07.393050Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7489508771288952555:2798], now have 1 active actors on pipe 2025-04-04T17:19:07.393089Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7489508771288952557:2799] connected; active server actors: 1 2025-04-04T17:19:07.393338Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-04T17:19:07.393923Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-04T17:19:07.394049Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-04-04T17:19:07.394232Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-04-04T17:19:07.394281Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-04T17:19:07.404967Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7489508732634245825 RawX2: 4294969486 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "test-topic" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/test-topic" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } 2025-04-04T17:19:07.405090Z node 1 :PERSQUEUE DEBUG: [P ... PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-04T17:22:27.353948Z node 14 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [14:7489509629557184999:2487] (SourceId=test-message_group_id, PreferedPartition=0) ReplyResult: Partition=0, SeqNo=0 2025-04-04T17:22:27.353979Z node 14 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 3 sessionId: partition: 0 expectedGeneration: 1 2025-04-04T17:22:27.355015Z node 14 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 14, Generation: 1 2025-04-04T17:22:27.355088Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [14:7489509629557185002:2487], now have 1 active actors on pipe 2025-04-04T17:22:27.355130Z node 14 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2025-04-04T17:22:27.355173Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2025-04-04T17:22:27.355289Z node 14 :PERSQUEUE INFO: new Cookie test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0 generated for partition 0 topic 'test-topic' owner test-message_group_id 2025-04-04T17:22:27.355407Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-04T17:22:27.355471Z node 14 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:22:27.355890Z node 14 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2025-04-04T17:22:27.355917Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2025-04-04T17:22:27.356027Z node 14 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:22:27.356119Z node 14 :PQ_WRITE_PROXY INFO: session inited cookie: 3 partition: 0 MaxSeqNo: 0 sessionId: test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0 2025-04-04T17:22:27.357221Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|2b034c68-3007e20a-af56a1fb-8314e712_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2025-04-04T17:22:27.357375Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|2b034c68-3007e20a-af56a1fb-8314e712_0] PartitionId [0] Generation [1] Write session established. Init response: session_id: "test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0" 2025-04-04T17:22:27.357432Z :TRACE: [/Root] TRACE_EVENT InitResponse partition_id=0 session_id=test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0 2025-04-04T17:22:27.357508Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0] PartitionId [0] Generation [1] Write session: set DirectWriteToPartitionId 0 2025-04-04T17:22:27.357719Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0] PartitionId [0] Generation [1] Write 1 messages with Id from 1 to 1 2025-04-04T17:22:27.357848Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0] PartitionId [0] Generation [1] Write session: close. Timeout 18446744073709.551615s 2025-04-04T17:22:27.358366Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0] PartitionId [0] Generation [1] Write session: try to update token 2025-04-04T17:22:27.358421Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 1 2025-04-04T17:22:27.358988Z node 14 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-04T17:22:27.359364Z node 14 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-04T17:22:27.362408Z node 14 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2025-04-04T17:22:27.362468Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2025-04-04T17:22:27.362597Z node 14 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-04T17:22:27.362705Z node 14 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T17:22:27.362836Z node 14 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2025-04-04T17:22:27.362851Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2025-04-04T17:22:27.362927Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: test-topic partition: 0 SourceId: '\0test-message_group_id' SeqNo: 1 partNo : 0 messageNo: 1 size 98 offset: -1 2025-04-04T17:22:27.363172Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 part blob processing sourceId '\0test-message_group_id' seqNo 1 partNo 0 2025-04-04T17:22:27.364144Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 part blob complete sourceId '\0test-message_group_id' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 187 count 1 nextOffset 1 batches 1 2025-04-04T17:22:27.365139Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'test-topic' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 175 WTime 1743787347364 2025-04-04T17:22:27.365372Z node 14 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:22:27.365489Z node 14 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 1 size 175 2025-04-04T17:22:27.366664Z node 14 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 1 size 175 actorID [14:7489509629557184868:2465] 2025-04-04T17:22:27.366794Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 120 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:22:27.366811Z node 14 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 1 parts 0 size 175 2025-04-04T17:22:27.366864Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:22:27.366940Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-04T17:22:27.367191Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user test-consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04T17:22:27.367247Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user test-consumer send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-04T17:22:27.367349Z node 14 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-04T17:22:27.367504Z node 14 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T17:22:27.368193Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2025-04-04T17:22:27.367962Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'test-topic' partition 0 user test-consumer offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-04-04T17:22:27.368022Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-04-04T17:22:27.368083Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-04T17:22:27.368110Z node 14 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T17:22:27.368198Z node 14 :PERSQUEUE DEBUG: Topic 'test-topic' partition 0 user test-consumer readTimeStamp done, result 1743787347362 queuesize 0 startOffset 0 2025-04-04T17:22:27.368440Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 1 written { } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 1000000 } max_queue_wait_time { nanos: 1000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-04-04T17:22:27.368505Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0] PartitionId [0] Generation [1] OnAck: seqNo=1, txId=? 2025-04-04T17:22:27.368566Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0] PartitionId [0] Generation [1] Write session: acknoledged message 1 2025-04-04T17:22:27.458022Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0] PartitionId [0] Generation [1] Write session will now close 2025-04-04T17:22:27.458137Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-04T17:22:27.458906Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-04T17:22:27.459240Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-04T17:22:27.461422Z node 14 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0 grpc read done: success: 0 data: 2025-04-04T17:22:27.461481Z node 14 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0 grpc read failed 2025-04-04T17:22:27.461734Z node 14 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 3 sessionId: test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0 2025-04-04T17:22:27.461780Z node 14 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|8e039354-298d7560-55f5a295-52abb23b_0 is DEAD 2025-04-04T17:22:27.462354Z node 14 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:22:27.465027Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [14:7489509629557185002:2487] destroyed 2025-04-04T17:22:27.465117Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> KqpSinkTx::OlapLocksAbortOnCommit [GOOD] >> KqpSinkTx::OlapSnapshotRO ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::MixedTxFail+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 64840, MsgBus: 2847 2025-04-04T17:22:09.826428Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509551461111096:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:09.826776Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ea6/r3tmp/tmpSYMWrU/pdisk_1.dat 2025-04-04T17:22:10.175649Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:10.194314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:10.194426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:10.196292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64840, node 1 2025-04-04T17:22:10.273698Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:10.273732Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:10.273740Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:10.273888Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2847 TClient is connected to server localhost:2847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:10.758201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:10.774212Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:22:10.787952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:10.924389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:11.079590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:11.150185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:13.004604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509564346014748:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:13.004783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:13.320783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:13.352347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:13.380078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:13.408079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:13.439676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:13.478820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:13.570222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509568640982559:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:13.570326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:13.570702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509568640982564:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:13.574764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:13.587328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509568640982567:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:22:13.647302Z node 1 :TX_PROXY ERROR: Actor# [1:7489509568640982621:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:14.826202Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509551461111096:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:14.826289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:14.956553Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDAwYWU3ODAtY2JiN2Q1YTktYmRmMGI2NmYtOGEyY2ZkMTU=, ActorId: [1:7489509572935950173:2488], ActorState: ExecuteState, TraceId: 01jr0x676r0wq6aksg4tda0d13, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 27411, MsgBus: 17063 2025-04-04T17:22:15.944821Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509578630374026:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:15.944881Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ea6/r3tmp/tmpgJ4XbW/pdisk_1.dat 2025-04-04T17:22:16.064293Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:16.074356Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:16.074441Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:16.076403Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27411, node 2 2025-04-04T17:22:16.138687Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:16.138716Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:16.138726Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:16.138856Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17063 TClient is connected to server localhost:17063 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:16.596099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:16.607071Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:22:16.612611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:16.665836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propos ... 1;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.290061Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.291051Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.298200Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.299205Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.312245Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.312968Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.318623Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.318753Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.324928Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.330923Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.337072Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.343406Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.349226Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037952;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.355069Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.362307Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.363690Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.368379Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.369806Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.374926Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.375209Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.380543Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.384878Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.385486Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.390377Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.390401Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.396659Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.396986Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.400657Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.406147Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.415056Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:22:27.630287Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-04-04T17:22:27.630296Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489509618594524478:2396];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=13;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037907,72075186224037947;receive=72075186224037899; 2025-04-04T17:22:27.630368Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489509618594524478:2396];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=14;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037907,72075186224037947;receive=72075186224037899; 2025-04-04T17:22:27.630753Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-04-04T17:22:27.630835Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-04-04T17:22:27.631133Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489509618594524478:2396];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=16;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037907;receive=72075186224037947; 2025-04-04T17:22:27.631213Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;self_id=[3:7489509618594524478:2396];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037903;local_tx_no=17;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037907;receive=72075186224037947; 2025-04-04T17:22:27.631633Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-04-04T17:22:27.951502Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715668; 2025-04-04T17:22:27.952702Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7489509627184461224:2818], SessionActorId: [3:7489509627184461169:2818], Got LOCKS BROKEN for table. ShardID=72075186224037888, Sink=[3:7489509627184461224:2818].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-04T17:22:27.953355Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7489509627184461224:2818], SessionActorId: [3:7489509627184461169:2818], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/DataShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:7489509627184461169:2818]. isRollback=0 2025-04-04T17:22:27.953562Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTM1YjY5YTUtYjg1OThjNmUtYTY2NjllOTUtNmM1MzQ1MjM=, ActorId: [3:7489509627184461169:2818], ActorState: ExecuteState, TraceId: 01jr0x6kx7afjgfk11cwt26fxc, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7489509627184461253:2818] from: [3:7489509627184461224:2818] 2025-04-04T17:22:27.953650Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7489509627184461253:2818] TxId: 281474976715668. Ctx: { TraceId: 01jr0x6kx7afjgfk11cwt26fxc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTM1YjY5YTUtYjg1OThjNmUtYTY2NjllOTUtNmM1MzQ1MjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/DataShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-04T17:22:27.953840Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTM1YjY5YTUtYjg1OThjNmUtYTY2NjllOTUtNmM1MzQ1MjM=, ActorId: [3:7489509627184461169:2818], ActorState: ExecuteState, TraceId: 01jr0x6kx7afjgfk11cwt26fxc, Create QueryResponse for error on request, msg: 2025-04-04T17:22:27.954038Z node 3 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037936;self_id=[3:7489509618594524432:2381];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037936;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:27.954640Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=281474976715668;tx_id=281474976715668;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715668; 2025-04-04T17:22:27.956921Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Complete;fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=lock invalidated;tx_id=281474976715668; >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] >> KqpSinkMvcc::SnapshotExpiration >> KqpTx::CommitRoTx [GOOD] >> KqpTx::CommitRoTx_TLI >> KqpSnapshotIsolation::TSimpleOlap [GOOD] >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] >> KqpLocksTricky::TestNoLocksIssue-withSink >> KqpSinkTx::LocksAbortOnCommit [GOOD] >> KqpSinkTx::InvalidateOnError >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] >> KqpTx::SnapshotROInteractive1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 4136, MsgBus: 3530 2025-04-04T17:22:19.459412Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509594873802249:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:19.459466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e3a/r3tmp/tmpJiVRvg/pdisk_1.dat 2025-04-04T17:22:19.799546Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:19.846503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:19.846639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:19.847911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4136, node 1 2025-04-04T17:22:19.910759Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:19.910781Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:19.910787Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:19.910886Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3530 TClient is connected to server localhost:3530 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:20.460381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:20.479171Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:22:20.496102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:20.649983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:22:20.785850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:20.864944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:22.705121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509607758705898:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:22.705254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:23.017044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:23.046157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:23.077924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:23.108606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:23.140455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:23.212983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:23.300770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509612053673711:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:23.300826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:23.301120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509612053673716:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:23.304567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:23.316788Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T17:22:23.316978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509612053673719:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:22:23.386544Z node 1 :TX_PROXY ERROR: Actor# [1:7489509612053673773:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:24.461100Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509594873802249:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:24.461210Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9016, MsgBus: 21599 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e3a/r3tmp/tmpHtzDf0/pdisk_1.dat 2025-04-04T17:22:25.826952Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:22:25.887426Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9016, node 2 2025-04-04T17:22:25.915236Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:25.915348Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:25.932302Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:25.960124Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:25.960144Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:25.960151Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:25.960261Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21599 TClient is connected to server localhost:21599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:26.399672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:26.405806Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:22:26.415233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:26.493497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:26.628291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:26.694722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:28.956490Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509634196717980:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:28.956582Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:28.996572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:29.036275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:29.072606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:29.107413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:29.143092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:29.182897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:29.236758Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509638491685785:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:29.236861Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:29.237136Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509638491685790:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:29.240996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:29.254470Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509638491685792:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:22:29.325536Z node 2 :TX_PROXY ERROR: Actor# [2:7489509638491685845:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:31.029029Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjZjODUyMzQtMTZkNzIxY2EtYTIxN2E0OGQtMzdhMTExZjY=, ActorId: [2:7489509642786653398:2488], ActorState: ExecuteState, TraceId: 01jr0x6pxkdxv3kv1vzgrah05p, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken >> KqpTx::DeferredEffects [GOOD] >> KqpTx::CommitStats >> KqpTx::RollbackInvalidated [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotROInteractive1 [GOOD] Test command err: Trying to start YDB, gRPC: 23182, MsgBus: 4419 2025-04-04T17:22:20.703533Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509600560419633:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:20.703575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000d4c/r3tmp/tmp9jwfwx/pdisk_1.dat 2025-04-04T17:22:21.063752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:21.064014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:21.066190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23182, node 1 2025-04-04T17:22:21.093340Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:21.097038Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:22:21.097713Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:22:21.146590Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:21.146635Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:21.146653Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:21.146763Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4419 TClient is connected to server localhost:4419 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:21.661955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:21.678993Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:22:21.689413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:21.827294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:21.975899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:22.059853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:23.883755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509613445323314:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:23.883899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:24.157708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:24.188609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:24.216435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:24.247587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:24.273672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:24.307668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:24.397258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509617740291123:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:24.397333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:24.397419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509617740291128:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:24.400932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:24.411679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509617740291130:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:22:24.501289Z node 1 :TX_PROXY ERROR: Actor# [1:7489509617740291185:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:25.706347Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509600560419633:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:25.706437Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:26.116826Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTVmNDRiNWYtMjM0NjMzZDYtYmI4YzFjNDktZDIxMTgyOTY=, ActorId: [1:7489509622035258738:2488], ActorState: ExecuteState, TraceId: 01jr0x6j3kes23rgxnhmbhac89, Create QueryResponse for error on request, msg:
:3:25: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 Trying to start YDB, gRPC: 11632, MsgBus: 18494 2025-04-04T17:22:26.978260Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509625428244056:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:26.978308Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000d4c/r3tmp/tmpygpNYK/pdisk_1.dat 2025-04-04T17:22:27.065568Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11632, node 2 2025-04-04T17:22:27.123768Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:27.123863Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:27.171844Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:27.198686Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:27.198706Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:27.198713Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:27.198823Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18494 TClient is connected to server localhost:18494 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:27.639720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:27.648342Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:22:27.661220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:27.736552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:27.872252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:27.976681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:30.066166Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509642608115017:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:30.066269Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:30.112863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:30.150907Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:30.222136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:30.297534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:30.366533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:30.436831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:30.486364Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509642608115542:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:30.486446Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:30.486771Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509642608115547:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:30.490694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:30.504530Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509642608115549:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:22:30.578709Z node 2 :TX_PROXY ERROR: Actor# [2:7489509642608115602:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:31.978394Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509625428244056:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:31.978478Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackInvalidated [GOOD] Test command err: Trying to start YDB, gRPC: 29943, MsgBus: 17986 2025-04-04T17:22:22.595043Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509606861083741:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:22.595158Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000d3f/r3tmp/tmpZ3bwUO/pdisk_1.dat 2025-04-04T17:22:22.969491Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29943, node 1 2025-04-04T17:22:22.996647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:22.996733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:22.998534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:23.050309Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:23.050340Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:23.050360Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:23.050515Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17986 TClient is connected to server localhost:17986 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:23.696688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:23.738636Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:22:23.760673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:23.948997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:24.107586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:24.173945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:25.799062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509619745987396:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:25.799192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:26.079728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:26.110481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:26.145207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:26.175325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:26.208550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:26.248635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:26.311744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509624040955204:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:26.311845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:26.312155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509624040955209:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:26.316387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:26.325743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509624040955211:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:22:26.417491Z node 1 :TX_PROXY ERROR: Actor# [1:7489509624040955266:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:27.308633Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODNkNDg4ZmItMTYyMDY2Y2ItODJlY2Y1NzEtMzU1M2RkZDY=, ActorId: [1:7489509628335922817:2488], ActorState: ReadyState, TraceId: 01jr0x6kb14vf6rp8xzh4ypk0y, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 9703, MsgBus: 8221 2025-04-04T17:22:28.118182Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509631375846383:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000d3f/r3tmp/tmpnK7gX7/pdisk_1.dat 2025-04-04T17:22:28.210072Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:22:28.254650Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:28.277705Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:28.277783Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:28.279302Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9703, node 2 2025-04-04T17:22:28.329597Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:28.329617Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:28.329624Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:28.329722Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8221 TClient is connected to server localhost:8221 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:28.835156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:28.843169Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:22:28.860222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:28.940946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:29.093957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:29.171480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:31.237220Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509644260749871:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:31.237327Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:31.275307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:31.305789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:31.337020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:31.365740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:31.393854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:31.421514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:31.464889Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509644260750379:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:31.464962Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509644260750384:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:31.464970Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:31.468268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:31.477828Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509644260750386:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:22:31.550033Z node 2 :TX_PROXY ERROR: Actor# [2:7489509644260750440:3435] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:32.468832Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489509648555718020:2499], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/BadTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:22:32.469044Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTg5MjI4ZmEtNWYwYmVhMDctODg0OTA1OTctMzA4NWE1OWQ=, ActorId: [2:7489509648555717993:2488], ActorState: ExecuteState, TraceId: 01jr0x6rc98vdtdxcbm32tmbfh, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 01jr0x6rbtbwrv2qng2zwmc7st 2025-04-04T17:22:32.477534Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTg5MjI4ZmEtNWYwYmVhMDctODg0OTA1OTctMzA4NWE1OWQ=, ActorId: [2:7489509648555717993:2488], ActorState: ReadyState, TraceId: 01jr0x6rcx5sfdzn6zb77sd662, Create QueryResponse for error on request, msg: >> KqpTx::RollbackManyTx >> KqpTx::EmptyTxOnCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 3902, MsgBus: 17048 2025-04-04T17:22:14.595166Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509571311264270:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:14.601355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e5f/r3tmp/tmpnJ2mw7/pdisk_1.dat 2025-04-04T17:22:14.957870Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:14.999633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:14.999730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:15.002207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3902, node 1 2025-04-04T17:22:15.053208Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:15.053233Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:15.053244Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:15.053371Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17048 TClient is connected to server localhost:17048 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:15.743350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:17.626289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509584196166804:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:17.626435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509584196166772:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:17.626981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:17.630317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:17.640327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509584196166809:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:22:17.698260Z node 1 :TX_PROXY ERROR: Actor# [1:7489509584196166862:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:17.942137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:18.063782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:19.048061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:19.717705Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509571311264270:2082];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:19.737555Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:20.264845Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTczNjZhYTctNmEzYWNmN2YtNmE5ZGM0MTctYWUxZWU2ZTA=, ActorId: [1:7489509597081077202:2969], ActorState: ExecuteState, TraceId: 01jr0x6cazefxm4wgbz57vnxb5, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x17FC10AE 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x17FA58A2 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F42CBF9DD8F 17. ??:0: ?? @ 0x7F42CBF9DE3F 18. ??:0: ?? @ 0x15B7B028 Trying to start YDB, gRPC: 16976, MsgBus: 1892 2025-04-04T17:22:25.190869Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509621395649590:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:25.190901Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e5f/r3tmp/tmpEqUFzJ/pdisk_1.dat 2025-04-04T17:22:25.369254Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:25.395392Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:25.395493Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:25.396726Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16976, node 2 2025-04-04T17:22:25.474895Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:25.474917Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:25.474923Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:25.475050Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1892 TClient is connected to server localhost:1892 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:25.894527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:25.900333Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:22:28.287854Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509634280552112:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:28.287966Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:28.289364Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509634280552148:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:28.292959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:28.300679Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509634280552150:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:22:28.372230Z node 2 :TX_PROXY ERROR: Actor# [2:7489509634280552201:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:28.416946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:28.488407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:29.568845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:30.496182Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509621395649590:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:30.529042Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:31.206547Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTQ5NTcxOWEtZTMwOWJmNmEtOTY0OGM2NGUtN2Q4MmYzNDM=, ActorId: [2:7489509647165462456:2968], ActorState: ExecuteState, TraceId: 01jr0x6q090fm3z2jtqgjp8mnx, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x17FC10AE 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x17FA5ACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F42CBF9DD8F 17. ??:0: ?? @ 0x7F42CBF9DE3F 18. ??:0: ?? @ 0x15B7B028 >> KqpSinkLocks::EmptyRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOlap [GOOD] Test command err: Trying to start YDB, gRPC: 19365, MsgBus: 30683 2025-04-04T17:21:54.705149Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509487707213494:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:54.705295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ec1/r3tmp/tmpInK0dV/pdisk_1.dat 2025-04-04T17:21:55.011833Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19365, node 1 2025-04-04T17:21:55.041419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:55.041870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:55.045392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:55.065610Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:55.065635Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:55.065645Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:55.065772Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30683 TClient is connected to server localhost:30683 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:55.543023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:57.547629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509500592116057:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:57.547707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509500592116049:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:57.547973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:57.551899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:21:57.563336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509500592116063:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:21:57.642085Z node 1 :TX_PROXY ERROR: Actor# [1:7489509500592116114:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:57.859910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:21:57.976892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:21:58.972375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:59.808499Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509487707213494:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:59.819840Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:00.370693Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDYwYjc4ODQtMmJhMzZkMDktZmIxYWM3M2MtMzAzNGFhZDY=, ActorId: [1:7489509513477026398:2969], ActorState: ExecuteState, TraceId: 01jr0x5rx7e3gv31fpzxyqagas, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-04T17:22:10.011326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:22:10.011367Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x17FAC851 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x17FA4522 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F597D984D8F 17. ??:0: ?? @ 0x7F597D984E3F 18. ??:0: ?? @ 0x15B7B028 Trying to start YDB, gRPC: 8811, MsgBus: 22399 2025-04-04T17:22:13.491933Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509567612039726:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:13.503432Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ec1/r3tmp/tmpdlgoiq/pdisk_1.dat 2025-04-04T17:22:13.621613Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:13.659136Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:13.659239Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:13.663602Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8811, node 2 2025-04-04T17:22:13.722787Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:13.722816Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:13.722823Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:13.722970Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22399 TClient is connected to server localhost:22399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:22:14.206468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:22:16.704219Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509580496942106:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:16.704349Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509580496942098:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04 ... ;tablet_id=72075186224038004;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.200671Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7489509601971785003:3361];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.208060Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7489509601971784968:3346];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.208382Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037992;self_id=[2:7489509584791910814:2482];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037992;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.208694Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7489509601971784722:3303];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038040;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.209119Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[2:7489509589086878459:2543];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037922;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.209207Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;self_id=[2:7489509589086878540:2587];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037947;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.209296Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037980;self_id=[2:7489509584791911100:2508];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037980;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.209467Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;self_id=[2:7489509589086878507:2566];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037917;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.209596Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[2:7489509589086878714:2616];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037921;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.209763Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037944;self_id=[2:7489509589086878525:2580];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037944;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.209927Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037955;self_id=[2:7489509589086878639:2604];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037955;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.210000Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[2:7489509589086878655:2609];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037923;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.210093Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037963;self_id=[2:7489509589086878451:2540];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037963;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.210186Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[2:7489509589086878712:2615];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037908;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.210718Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037918;self_id=[2:7489509589086878751:2626];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037918;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.210879Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[2:7489509589086878531:2584];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037933;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.211041Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037948;self_id=[2:7489509589086878552:2592];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037948;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.214525Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7489509601971785003:3361];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.216915Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7489509601971784722:3303];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038040;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.218162Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037992;self_id=[2:7489509584791910814:2482];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037992;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.220670Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037917;self_id=[2:7489509589086878507:2566];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037917;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.220780Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;self_id=[2:7489509589086878655:2609];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037923;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.220832Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[2:7489509589086878714:2616];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037921;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.220958Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037944;self_id=[2:7489509589086878525:2580];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037944;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.220972Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037963;self_id=[2:7489509589086878451:2540];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037963;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.221080Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037955;self_id=[2:7489509589086878639:2604];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037955;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.221099Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[2:7489509589086878712:2615];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037908;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.221255Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037918;self_id=[2:7489509589086878751:2626];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037918;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.221394Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037948;self_id=[2:7489509589086878552:2592];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037948;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.221837Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;self_id=[2:7489509589086878531:2584];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037933;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.223434Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[2:7489509589086878459:2543];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037922;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.223606Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;self_id=[2:7489509589086878540:2587];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037947;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.223726Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037980;self_id=[2:7489509584791911100:2508];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037980;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.358113Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-04-04T17:22:25.359535Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.374725Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037966;self_id=[2:7489509584791911106:2513];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037966;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.375003Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[2:7489509589086878777:2627];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037925;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.375724Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037966;self_id=[2:7489509584791911106:2513];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037966;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.375973Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[2:7489509589086878777:2627];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037925;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.386532Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[2:7489509589086878498:2558];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037940;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.394401Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[2:7489509589086878498:2558];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037940;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.394677Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[2:7489509589086878657:2610];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037910;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:25.398508Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[2:7489509589086878657:2610];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037910;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-04T17:22:28.588925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:22:28.588961Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 >> KqpSinkLocks::EmptyRangeAlreadyBroken >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink >> DataShardVolatile::DistributedWriteThenDropTable [GOOD] >> DataShardVolatile::DistributedWriteThenCopyTable >> KqpSinkLocks::TInvalidate >> KqpSnapshotIsolation::TSimpleOltpNoSink [FAIL] >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 9635, MsgBus: 26378 2025-04-04T17:22:06.046198Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509540530890049:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:06.046274Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000eb2/r3tmp/tmpO08sYg/pdisk_1.dat 2025-04-04T17:22:06.370163Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9635, node 1 2025-04-04T17:22:06.435864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:06.436005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:06.437759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:06.439109Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:06.439129Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:06.439135Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:06.439239Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26378 TClient is connected to server localhost:26378 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:06.849439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:09.067243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509553415792586:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:09.067508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509553415792573:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:09.067670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:09.072008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:09.080812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509553415792612:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:22:09.173645Z node 1 :TX_PROXY ERROR: Actor# [1:7489509553415792664:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:09.407208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:09.521525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:10.474984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:11.195833Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509540530890049:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:11.205354Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:11.686579Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjM0MDZmNWMtNjdlOGY3YmYtNDY4NDQxY2YtNGU4MDJmMg==, ActorId: [1:7489509562005735734:2969], ActorState: ExecuteState, TraceId: 01jr0x640z024582mkprzd7y3w, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x17FB28C0 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x17FA4BA2 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7FB70BDE0D8F 17. ??:0: ?? @ 0x7FB70BDE0E3F 18. ??:0: ?? @ 0x15B7B028 Trying to start YDB, gRPC: 21740, MsgBus: 1331 2025-04-04T17:22:16.718108Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509583553328550:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:16.718154Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000eb2/r3tmp/tmp0LoUdk/pdisk_1.dat 2025-04-04T17:22:16.829525Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21740, node 2 2025-04-04T17:22:16.860666Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:16.860746Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:16.864597Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:16.946919Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:16.946976Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:16.946983Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:16.947120Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1331 TClient is connected to server localhost:1331 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:17.351958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:19.827746Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509596438231062:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:19.827910Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:19.829875Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509596438231097:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: { @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7FB70BDE0D8F 17. ??:0: ?? @ 0x7FB70BDE0E3F 18. ??:0: ?? @ 0x15B7B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::EmptyTxOnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 25267, MsgBus: 10862 2025-04-04T17:22:23.865850Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509612124173206:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:23.866351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000d24/r3tmp/tmp9daD3C/pdisk_1.dat 2025-04-04T17:22:24.270370Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:24.273785Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:24.273847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:24.275987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25267, node 1 2025-04-04T17:22:24.358877Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:24.358905Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:24.358912Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:24.359062Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10862 TClient is connected to server localhost:10862 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:24.905704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:24.925958Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:22:24.936895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:25.061499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:25.265154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:25.347683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:27.171619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509629304044161:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:27.171769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:27.390868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:27.421282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:27.448705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:27.474525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:27.506657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:27.576764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:27.626894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509629304044676:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:27.627002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:27.627210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509629304044681:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:27.631924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:27.646419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509629304044683:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:22:27.729635Z node 1 :TX_PROXY ERROR: Actor# [1:7489509629304044736:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:28.866464Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509612124173206:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:28.866531Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:29.104984Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjE4ZGM3ODUtN2U4ZGI3NjMtMmU5ODdjNTEtNzA2ZDNkOWQ=, ActorId: [1:7489509633599012288:2488], ActorState: ReadyState, TraceId: 01jr0x6n346hejfvvcrmmmeemy, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 1193, MsgBus: 29893 2025-04-04T17:22:30.055098Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509640724207403:2224];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000d24/r3tmp/tmpWYKT66/pdisk_1.dat 2025-04-04T17:22:30.115498Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:22:30.183997Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:30.207263Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:30.207353Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:30.208670Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1193, node 2 2025-04-04T17:22:30.293972Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:30.293996Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:30.294002Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:30.294110Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29893 TClient is connected to server localhost:29893 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:30.722172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:30.742028Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:30.752196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:22:30.837208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:30.989622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:31.057705Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:32.942580Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509649314143569:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:32.942693Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:32.969889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:33.003140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:33.028576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:33.059784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:33.092558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:33.165932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:33.207703Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509653609111384:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:33.207778Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:33.207834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509653609111389:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:33.228428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:33.243332Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509653609111391:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:22:33.316439Z node 2 :TX_PROXY ERROR: Actor# [2:7489509653609111444:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 >> KqpSinkMvcc::OltpNamedStatementNoSink >> TxUsage::WriteToTopic_Demo_45 [GOOD] >> KqpLimits::TooBigKey+useSink [GOOD] >> KqpLimits::TooBigKey-useSink >> KqpTx::CommitRoTx_TLI [GOOD] >> KqpSinkTx::SnapshotROInteractive1 [GOOD] >> KqpSnapshotIsolation::TReadOnlyOlap [GOOD] >> KqpSinkMvcc::OltpMultiSinksNoSinks [GOOD] >> KqpSinkMvcc::OltpMultiSinks >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] >> KqpTx::CommitStats [GOOD] >> KqpSinkTx::OlapDeferredEffects [GOOD] >> KqpSinkTx::OlapExplicitTcl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitRoTx_TLI [GOOD] Test command err: Trying to start YDB, gRPC: 61094, MsgBus: 28249 2025-04-04T17:22:26.650035Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509622470033140:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:26.650502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ca0/r3tmp/tmpjdDA0F/pdisk_1.dat 2025-04-04T17:22:27.027058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:27.027202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:27.029140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:27.049010Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61094, node 1 2025-04-04T17:22:27.109717Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:27.109739Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:27.109781Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:27.109896Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28249 TClient is connected to server localhost:28249 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:27.602508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:27.615529Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:22:27.622019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:27.744570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:27.907238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:27.985627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:29.600466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509635354936647:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:29.600587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:29.781165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:29.867881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:29.903479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:29.937849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:29.976002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:30.025635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:30.091684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509639649904457:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:30.091764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:30.092056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509639649904462:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:30.095508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:30.108574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509639649904464:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:22:30.193523Z node 1 :TX_PROXY ERROR: Actor# [1:7489509639649904519:3441] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 17675, MsgBus: 30397 2025-04-04T17:22:31.871528Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509645398047030:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:31.871634Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ca0/r3tmp/tmpcA6Rf5/pdisk_1.dat 2025-04-04T17:22:31.966994Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17675, node 2 2025-04-04T17:22:32.000814Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:32.000901Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:32.008747Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:32.033221Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:32.033245Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:32.033251Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:32.033360Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30397 TClient is connected to server localhost:30397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:22:32.424093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:22:32.431113Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:22:32.437056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:32.496102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:32.641319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:22:32.707226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:34.980308Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509658282950683:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:34.980410Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:35.008975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:35.073929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:35.104954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:35.140697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:35.209143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:35.249155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:35.293701Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509662577918494:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:35.293780Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:35.294121Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509662577918499:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:35.297656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:35.309708Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509662577918501:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:22:35.372580Z node 2 :TX_PROXY ERROR: Actor# [2:7489509662577918553:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink [GOOD] >> DataShardVolatile::VolatileTxAbortedOnSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::SnapshotROInteractive1 [GOOD] Test command err: Trying to start YDB, gRPC: 2640, MsgBus: 12487 2025-04-04T17:22:13.451514Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509568185455678:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:13.451640Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e84/r3tmp/tmpPhQkGh/pdisk_1.dat 2025-04-04T17:22:13.902550Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:13.924700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:13.924847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:13.926604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2640, node 1 2025-04-04T17:22:14.025593Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:14.025615Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:14.025621Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:14.025723Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12487 TClient is connected to server localhost:12487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:14.577345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:16.478921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509581070357995:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:16.479076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:16.479204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509581070358003:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:16.483154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:16.493837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509581070358022:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:22:16.577865Z node 1 :TX_PROXY ERROR: Actor# [1:7489509581070358073:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:16.793307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:16.899197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:17.840375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:18.571439Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509568185455678:2270];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:18.589502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:19.657736Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTNiMzZhNDEtMTkxMTk2NmQtZGEwYjVjZTAtOGQ0OTcxNDQ=, ActorId: [1:7489509589660301009:2969], ActorState: ExecuteState, TraceId: 01jr0x6bssa7qhb7731bcabdzj, Create QueryResponse for error on request, msg:
:3:29: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 6190, MsgBus: 10526 2025-04-04T17:22:25.600365Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509618865374890:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:25.600420Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e84/r3tmp/tmpADAYAB/pdisk_1.dat 2025-04-04T17:22:25.687845Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6190, node 2 2025-04-04T17:22:25.734516Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:25.734603Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:25.735713Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:25.822289Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:25.822312Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:25.822320Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:25.822450Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10526 TClient is connected to server localhost:10526 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:26.228796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:26.237771Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:22:28.882110Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509631750277437:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:28.882168Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509631750277421:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:28.882214Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:28.885683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:28.894415Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509631750277450:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:22:28.969989Z node 2 :TX_PROXY ERROR: Actor# [2:7489509631750277501:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:29.011694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:29.072100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:30.157204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:30.917370Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509618865374890:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:30.976593Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 8823, MsgBus: 12931 2025-04-04T17:22:15.568607Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509575637236545:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:15.569114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e46/r3tmp/tmpI3xvKo/pdisk_1.dat 2025-04-04T17:22:16.004251Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:16.017937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:16.018028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 8823, node 1 2025-04-04T17:22:16.019613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:16.066702Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:16.066721Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:16.066728Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:16.066830Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12931 TClient is connected to server localhost:12931 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:16.524222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:16.541418Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:16.552990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:22:16.685428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:16.901416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:16.990778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:18.721361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509588522140071:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:18.721605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:18.994464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:19.027006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:19.055802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:19.122593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:19.157200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:19.229955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:19.275522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509592817107883:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:19.275618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:19.275886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509592817107888:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:19.279450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:19.294673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509592817107890:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:22:19.371967Z node 1 :TX_PROXY ERROR: Actor# [1:7489509592817107944:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:20.562183Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509575637236545:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:20.562281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 3664, MsgBus: 26065 2025-04-04T17:22:21.734793Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509603893891889:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:21.734838Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e46/r3tmp/tmpay2f4p/pdisk_1.dat 2025-04-04T17:22:21.831774Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3664, node 2 2025-04-04T17:22:21.866686Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:21.866774Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:21.869861Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:21.918893Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:21.918937Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:21.918946Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:21.919098Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26065 TClient is connected to server localhost:26065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:22.353394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:22:22.370493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:22:22.425530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:22.557623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:22.653432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:24.864338Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509616778795539:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:24.864435Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:24.924647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:24.961494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:24.989274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:25.021785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:25.054801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:25.092706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:25.176940Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509621073763347:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:25.177048Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:25.177398Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509621073763352:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:25.180773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:25.190837Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509621073763354:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:22:25.280629Z node 2 :TX_PROXY ERROR: Actor# [2:7489509621073763409:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:26.734873Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509603893891889:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:26.734937Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:36.830642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:22:36.830675Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:37.510799Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489509672613371801:2632], TxId: 281474976715681, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWQ3MjkxZjUtZTUwY2Y4MDEtMjVkMjllMmYtNjk5MjlhMzE=. TraceId : 01jr0x6x4ffdvkfg3xkmkz6znw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1743787346084/18446744073709551615 shard 72075186224037888 with lowWatermark v1743787346420/18446744073709551615 (node# 2 state# Ready) } } 2025-04-04T17:22:37.511254Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489509672613371801:2632], TxId: 281474976715681, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWQ3MjkxZjUtZTUwY2Y4MDEtMjVkMjllMmYtNjk5MjlhMzE=. TraceId : 01jr0x6x4ffdvkfg3xkmkz6znw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1743787346084/18446744073709551615 shard 72075186224037888 with lowWatermark v1743787346420/18446744073709551615 (node# 2 state# Ready) } }. 2025-04-04T17:22:37.511576Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489509672613371803:2633], TxId: 281474976715681, task: 2. Ctx: { TraceId : 01jr0x6x4ffdvkfg3xkmkz6znw. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWQ3MjkxZjUtZTUwY2Y4MDEtMjVkMjllMmYtNjk5MjlhMzE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7489509672613371797:2496], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:22:37.511920Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWQ3MjkxZjUtZTUwY2Y4MDEtMjVkMjllMmYtNjk5MjlhMzE=, ActorId: [2:7489509625368730993:2496], ActorState: ExecuteState, TraceId: 01jr0x6x4ffdvkfg3xkmkz6znw, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitStats [GOOD] Test command err: Trying to start YDB, gRPC: 11047, MsgBus: 2231 2025-04-04T17:22:27.491349Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509630470962392:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:27.492140Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c8b/r3tmp/tmp2n8SNp/pdisk_1.dat 2025-04-04T17:22:27.898084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:27.898169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11047, node 1 2025-04-04T17:22:27.901501Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:27.902189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:27.905825Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:22:27.910237Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:22:27.990857Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:27.990883Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:27.990890Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:27.991025Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2231 TClient is connected to server localhost:2231 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:28.562052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:28.586995Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:22:28.601230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:28.724905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:28.905131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:28.982697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:30.627477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509643355866035:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:30.627612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:30.873387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:30.906672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:30.975878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:31.008112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:31.035134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:31.073270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:31.170145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509647650833848:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:31.170326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:31.170906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509647650833854:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:31.174630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:31.188297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509647650833856:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:22:31.272786Z node 1 :TX_PROXY ERROR: Actor# [1:7489509647650833911:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:32.485089Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509630470962392:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:32.485192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7270, MsgBus: 15465 2025-04-04T17:22:33.097010Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509653608468936:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:33.097091Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c8b/r3tmp/tmpOAIz8j/pdisk_1.dat 2025-04-04T17:22:33.204612Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7270, node 2 2025-04-04T17:22:33.232506Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:33.232597Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:33.238072Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:33.258957Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:33.258976Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:33.258981Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:33.259090Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15465 TClient is connected to server localhost:15465 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:22:33.692710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:22:33.699217Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:22:33.707532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:22:33.755916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:22:33.925661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:33.991560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:36.125868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509666493372564:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:36.126032Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:36.161341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:36.193136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:36.260138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:36.290414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:36.322012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:36.393185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:36.434339Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509666493373082:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:36.434412Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:36.434669Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509666493373087:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:36.437841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:36.446624Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509666493373089:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:22:36.545123Z node 2 :TX_PROXY ERROR: Actor# [2:7489509666493373143:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/unittest >> TxUsage::WriteToTopic_Demo_45 [GOOD] Test command err: 2025-04-04T17:18:58.078834Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508729597342288:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:58.078890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:18:58.255153Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e87/r3tmp/tmpa9qUxP/pdisk_1.dat 2025-04-04T17:18:58.432761Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28533, node 1 2025-04-04T17:18:58.483055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:58.483171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:58.484934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:18:58.584133Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000e87/r3tmp/yandexiqBYx6.tmp 2025-04-04T17:18:58.584163Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000e87/r3tmp/yandexiqBYx6.tmp 2025-04-04T17:18:58.584317Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000e87/r3tmp/yandexiqBYx6.tmp 2025-04-04T17:18:58.584455Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:18:58.799325Z INFO: TTestServer started on Port 25103 GrpcPort 28533 TClient is connected to server localhost:25103 PQClient connected to localhost:28533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:59.034619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:59.070695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:59.198892Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:19:00.546744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508738187277610:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.546882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.547009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508738187277633:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.555936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:19:00.568984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508738187277639:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:19:00.652680Z node 1 :TX_PROXY ERROR: Actor# [1:7489508738187277703:2449] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:00.989961Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508738187277711:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:19:00.991730Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTliZWUwM2YtNDk4ZjRiMmEtMmRhZDljNjktMjQwNTljYzY=, ActorId: [1:7489508738187277607:2336], ActorState: ExecuteState, TraceId: 01jr0x09dkccn6ybazgq6pnfw0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:19:00.993927Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:19:01.023328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:01.095518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:01.167347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489508742482245290:2631] 2025-04-04T17:19:03.075277Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508729597342288:2132];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:03.075349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:19:07.376969Z :WriteToTopic_Demo_4 INFO: TTopicSdkTestSetup started 2025-04-04T17:19:07.388365Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-04T17:19:07.402137Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7489508768252049380:2806] connected; active server actors: 1 2025-04-04T17:19:07.402509Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-04T17:19:07.402935Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-04T17:19:07.403051Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-04-04T17:19:07.403427Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-04T17:19:07.411568Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:19:07.411795Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-04T17:19:07.412287Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:19:07.412467Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-04T17:19:07.412488Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:19:07.412503Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-04T17:19:07.412548Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:19:07.412577Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:19:07.412618Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-04T17:19:07.414437Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-04-04T17:19:07.414480Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7489508768252049397:2465], now have 1 active actors on pipe 2025-04-04T17:19:07.447382Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7489508768252049379:2805], now have 1 active actors on pipe 2025-04-04T17:19:07.470821Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7489508729597342619 RawX2: 4294969480 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "test-topic" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/test-topic" ... 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Try execute txs with state EXECUTED 2025-04-04T17:22:37.150546Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710675, State EXECUTED 2025-04-04T17:22:37.150567Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710675 State EXECUTED FrontTxId 281474976710675 2025-04-04T17:22:37.150589Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TPersQueue::SendEvReadSetAckToSenders 2025-04-04T17:22:37.150622Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 1743787357130 txid# 281474976710675 TabletSource# 72075186224037895 TabletDest# 72075186224037894 SetTabletConsumer# 72075186224037894 Flags# 0 Seqno# 0} 2025-04-04T17:22:37.150649Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710675, NewState WAIT_RS_ACKS 2025-04-04T17:22:37.150665Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710675 moved from EXECUTED to WAIT_RS_ACKS 2025-04-04T17:22:37.150687Z node 12 :PERSQUEUE DEBUG: [TxId: 281474976710675] PredicateAcks: 1/1 2025-04-04T17:22:37.150711Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] WriteId {12, 281474976710674} Partitions.size=0 2025-04-04T17:22:37.150724Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-04T17:22:37.150737Z node 12 :PERSQUEUE DEBUG: [TxId: 281474976710675] PredicateAcks: 1/1 2025-04-04T17:22:37.150753Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] WriteId {12, 281474976710674} Partitions.size=0 2025-04-04T17:22:37.150771Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] add an TxId 281474976710675 to the list for deletion 2025-04-04T17:22:37.150789Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710675, NewState DELETING 2025-04-04T17:22:37.150805Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Handle TEvTxProcessing::TEvReadSetAck Step: 1743787357130 TxId: 281474976710675 TabletSource: 72075186224037895 TabletDest: 72075186224037894 TabletConsumer: 72075186224037894 Flags: 0 Seqno: 0 2025-04-04T17:22:37.150815Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] delete key for TxId 281474976710675 2025-04-04T17:22:37.150829Z node 12 :PERSQUEUE DEBUG: [TxId: 281474976710675] Handle TEvReadSetAck 2025-04-04T17:22:37.150848Z node 12 :PERSQUEUE DEBUG: [TxId: 281474976710675] Predicate acks 1/1 2025-04-04T17:22:37.150881Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:22:37.150899Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Try execute txs with state WAIT_RS_ACKS 2025-04-04T17:22:37.150919Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895] TxId 281474976710675, State WAIT_RS_ACKS 2025-04-04T17:22:37.150947Z node 12 :PERSQUEUE DEBUG: [TxId: 281474976710675] PredicateAcks: 1/1 2025-04-04T17:22:37.151018Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895] WriteId {12, 281474976710674} Partitions.size=0 2025-04-04T17:22:37.151034Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-04T17:22:37.151050Z node 12 :PERSQUEUE DEBUG: [TxId: 281474976710675] PredicateAcks: 1/1 2025-04-04T17:22:37.151082Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895] WriteId {12, 281474976710674} Partitions.size=0 2025-04-04T17:22:37.151108Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895] add an TxId 281474976710675 to the list for deletion 2025-04-04T17:22:37.151142Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895] TxId 281474976710675, NewState DELETING 2025-04-04T17:22:37.151169Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895] delete key for TxId 281474976710675 2025-04-04T17:22:37.151241Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:22:37.151762Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:22:37.151783Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Try execute txs with state DELETING 2025-04-04T17:22:37.151796Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710675, State DELETING 2025-04-04T17:22:37.151816Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] delete WriteId {12, 281474976710674} 2025-04-04T17:22:37.151837Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] delete TxId 281474976710675 2025-04-04T17:22:37.152043Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:22:37.152062Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Try execute txs with state DELETING 2025-04-04T17:22:37.152077Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895] TxId 281474976710675, State DELETING 2025-04-04T17:22:37.152095Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895] delete WriteId {12, 281474976710674} 2025-04-04T17:22:37.152112Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895] delete TxId 281474976710675 2025-04-04T17:22:37.158045Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895] server connected, pipe [12:7489509673118553079:3017], now have 1 active actors on pipe 2025-04-04T17:22:37.158377Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 2 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-04-04T17:22:37.159649Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server connected, pipe [12:7489509673118553084:3021], now have 1 active actors on pipe 2025-04-04T17:22:37.159918Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 2 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-04-04T17:22:37.164869Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_2|4763e464-a355f6be-4888d20a-760f5393_0] PartitionId [1] Generation [1] Write session: close. Timeout 0.000000s 2025-04-04T17:22:37.164905Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_2|4763e464-a355f6be-4888d20a-760f5393_0] PartitionId [1] Generation [1] Write session will now close 2025-04-04T17:22:37.164944Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id_2|4763e464-a355f6be-4888d20a-760f5393_0] PartitionId [1] Generation [1] Write session: aborting 2025-04-04T17:22:37.165269Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_2|4763e464-a355f6be-4888d20a-760f5393_0] PartitionId [1] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-04T17:22:37.165315Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id_2|4763e464-a355f6be-4888d20a-760f5393_0] PartitionId [1] Generation [1] Write session: destroy 2025-04-04T17:22:37.166093Z node 12 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message_group_id_2|4763e464-a355f6be-4888d20a-760f5393_0 grpc read done: success: 0 data: 2025-04-04T17:22:37.166127Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id_2|4763e464-a355f6be-4888d20a-760f5393_0 grpc read failed 2025-04-04T17:22:37.166164Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id_2|4763e464-a355f6be-4888d20a-760f5393_0 grpc closed 2025-04-04T17:22:37.166182Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id_2|4763e464-a355f6be-4888d20a-760f5393_0 is DEAD 2025-04-04T17:22:37.167234Z node 12 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=1) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:22:37.167282Z node 12 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=1) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:22:37.167415Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [12:7489509673118552979:2522] destroyed 2025-04-04T17:22:37.167447Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [12:7489509673118552982:2522] destroyed 2025-04-04T17:22:37.167482Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 1, State: StateIdle] TPartition::DropOwner. 2025-04-04T17:22:37.168683Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_1|a30924a6-7e0c4178-469b813d-6151a140_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-04-04T17:22:37.168717Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_1|a30924a6-7e0c4178-469b813d-6151a140_0] PartitionId [0] Generation [1] Write session will now close 2025-04-04T17:22:37.168739Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id_1|a30924a6-7e0c4178-469b813d-6151a140_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-04T17:22:37.168957Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id_1|a30924a6-7e0c4178-469b813d-6151a140_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-04T17:22:37.168974Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id_1|a30924a6-7e0c4178-469b813d-6151a140_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-04T17:22:37.169591Z node 12 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: test-message_group_id_1|a30924a6-7e0c4178-469b813d-6151a140_0 grpc read done: success: 0 data: 2025-04-04T17:22:37.169616Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id_1|a30924a6-7e0c4178-469b813d-6151a140_0 grpc read failed 2025-04-04T17:22:37.169647Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id_1|a30924a6-7e0c4178-469b813d-6151a140_0 grpc closed 2025-04-04T17:22:37.169663Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id_1|a30924a6-7e0c4178-469b813d-6151a140_0 is DEAD 2025-04-04T17:22:37.170433Z node 12 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037895 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:22:37.170495Z node 12 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037895 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:22:37.170853Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895] server disconnected, pipe [12:7489509668823585654:2515] destroyed 2025-04-04T17:22:37.170888Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895] server disconnected, pipe [12:7489509668823585657:2515] destroyed 2025-04-04T17:22:37.170925Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895, Partition: 0, State: StateIdle] TPartition::DropOwner. >> KqpProxy::CalcPeerStats [GOOD] >> KqpProxy::CreatesScriptExecutionsTable >> ScriptExecutionsTest::RunCheckLeaseStatus >> TableCreation::ConcurrentTableCreationWithDifferentVersions >> TableCreation::MultipleTablesCreation >> TableCreation::ConcurrentTableCreation >> KqpProxy::PingNotExistedSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOlap [GOOD] Test command err: Trying to start YDB, gRPC: 12920, MsgBus: 9533 2025-04-04T17:22:08.648724Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509547475880332:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:08.648972Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000eae/r3tmp/tmpwKLJlx/pdisk_1.dat 2025-04-04T17:22:08.974783Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12920, node 1 2025-04-04T17:22:09.039036Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:09.039061Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:09.039078Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:09.039297Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:22:09.047242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:09.047395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:09.049152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9533 TClient is connected to server localhost:9533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:09.529639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:11.636607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509560360782884:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:11.636712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:11.636948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509560360782896:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:11.641502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:11.655613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509560360782898:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:22:11.758186Z node 1 :TX_PROXY ERROR: Actor# [1:7489509560360782951:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:12.017999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:12.161857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:13.196680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:13.977348Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509547475880332:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:14.013757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:14.510073Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzFhYmUxNzctZWVkNTNjZGMtZDFiMmU4NjEtMTJhZmMzOWU=, ActorId: [1:7489509573245693211:2968], ActorState: ExecuteState, TraceId: 01jr0x66s800p0fttg3t71e1ha, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x17FB28C0 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x17FA4DCA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7FD152838D8F 17. ??:0: ?? @ 0x7FD152838E3F 18. ??:0: ?? @ 0x15B7B028 Trying to start YDB, gRPC: 25591, MsgBus: 12540 2025-04-04T17:22:19.223549Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509593963763421:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:19.223590Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000eae/r3tmp/tmpMF8Ysx/pdisk_1.dat 2025-04-04T17:22:19.353468Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:19.368195Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:19.368315Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:19.371587Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25591, node 2 2025-04-04T17:22:19.429680Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:19.429703Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:19.429710Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:19.429822Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12540 TClient is connected to server localhost:12540 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:22:19.882470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:22:22.362721Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509606848665942:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:22.362937Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:22.363220Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509606848665977:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: { ... st::TEvNotifyPlanStep;tablet_id=72075186224037939;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.308661Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;self_id=[2:7489509611143634524:2523];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037916;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.308676Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7489509606848666190:2347];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.308789Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;self_id=[2:7489509611143634524:2523];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037916;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.308820Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7489509606848666190:2347];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037894;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.308948Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:7489509611143634501:2518];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037905;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.309072Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[2:7489509611143634471:2505];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037909;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.309280Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037958;self_id=[2:7489509611143635475:2544];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037958;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.309448Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037938;self_id=[2:7489509611143634426:2496];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037938;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.309588Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037938;self_id=[2:7489509611143634426:2496];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037938;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.309699Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037979;self_id=[2:7489509611143635625:2618];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037979;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.309824Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037979;self_id=[2:7489509611143635625:2618];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037979;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.309879Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;self_id=[2:7489509611143635513:2571];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037970;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.309956Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;self_id=[2:7489509611143635506:2565];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037968;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.310022Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037966;self_id=[2:7489509611143635497:2558];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037966;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.310081Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;self_id=[2:7489509611143635506:2565];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037968;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.311077Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037943;self_id=[2:7489509611143634234:2481];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037943;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.311308Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[2:7489509611143634539:2527];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037904;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.311450Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037904;self_id=[2:7489509611143634539:2527];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037904;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.312003Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7489509606848666201:2350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.312137Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7489509606848666201:2350];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.312505Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037943;self_id=[2:7489509611143634234:2481];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037943;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.312721Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7489509606848666189:2346];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.312850Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7489509606848666189:2346];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.313047Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7489509606848666187:2344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.313178Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7489509606848666187:2344];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037895;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.313354Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7489509606848666191:2348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.313488Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7489509606848666191:2348];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.313628Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;self_id=[2:7489509611143634452:2503];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037929;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.313751Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;self_id=[2:7489509611143634452:2503];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037929;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.313901Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037972;self_id=[2:7489509611143635583:2597];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037972;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.314022Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037972;self_id=[2:7489509611143635583:2597];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037972;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.320325Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[2:7489509606848666211:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.320566Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[2:7489509606848666211:2353];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.320747Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;self_id=[2:7489509611143635512:2570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037981;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.320933Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;self_id=[2:7489509611143635512:2570];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037981;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.321075Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;self_id=[2:7489509611143635633:2619];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037969;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.321222Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;self_id=[2:7489509611143635633:2619];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037969;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.327948Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037988;self_id=[2:7489509611143635496:2557];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037988;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.328174Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037988;self_id=[2:7489509611143635496:2557];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037988;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.333587Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;task_id=63e35cb2-117911f0-aca97289-156fe356;tablet_id=72075186224037897;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.333911Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.335107Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037997;self_id=[2:7489509611143635505:2564];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037997;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:31.335310Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037997;self_id=[2:7489509611143635505:2564];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037997;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-04T17:22:34.334580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:22:34.334612Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpProxy::NoLocalSessionExecution >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink [GOOD] >> KqpProxy::InvalidSessionID >> KqpSinkLocks::EmptyRangeOlap [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap >> TxUsage::The_Configuration_Is_Changing_As_We_Write_To_The_Topic [GOOD] >> KqpLocksTricky::TestNoLocksIssue-withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 [GOOD] >> DataShardVolatile::DistributedWriteThenCopyTable [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsert >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink [GOOD] >> KqpSinkTx::InvalidateOnError [GOOD] >> KqpTx::RollbackManyTx [GOOD] >> KqpTx::RollbackRoTx >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted >> IndexBuildTest::RejectsCreate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 8556, MsgBus: 23290 2025-04-04T17:22:29.400501Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509637128766760:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:29.414090Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c65/r3tmp/tmpOuPRvN/pdisk_1.dat 2025-04-04T17:22:29.866099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:29.868226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:29.872805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8556, node 1 2025-04-04T17:22:29.923129Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:22:29.923156Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:22:29.941794Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:29.984829Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:29.984852Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:29.985317Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:29.985471Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23290 TClient is connected to server localhost:23290 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:30.563999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:30.578624Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:30.592562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:22:30.744391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:30.925921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:31.011217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:32.668857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509650013670309:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:32.669000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:32.884505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:32.913587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:32.946416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:32.975035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:33.003851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:33.032604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:33.072098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509654308638113:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:33.072167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:33.072241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509654308638118:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:33.075565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:33.084375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509654308638120:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:22:33.169018Z node 1 :TX_PROXY ERROR: Actor# [1:7489509654308638174:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:34.421021Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509637128766760:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:34.421413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 4099, MsgBus: 31010 2025-04-04T17:22:35.560759Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509661989935679:2091];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:35.561850Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c65/r3tmp/tmp3qVtTx/pdisk_1.dat 2025-04-04T17:22:35.660775Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4099, node 2 2025-04-04T17:22:35.699077Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:35.699159Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:35.703921Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:35.738062Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:35.738081Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:35.738088Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:35.738198Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31010 TClient is connected to server localhost:31010 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:36.160990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:36.170792Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:22:36.186751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:36.236837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:36.373639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:36.448984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:38.322438Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509674874839280:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:38.322514Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:38.362698Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:38.392848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:38.420880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:38.446497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:38.474151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:38.539475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:38.579461Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509674874839792:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:38.579541Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509674874839797:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:38.579550Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:38.582308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:38.591260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509674874839799:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:22:38.647307Z node 2 :TX_PROXY ERROR: Actor# [2:7489509674874839855:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:39.663802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:22:39.742713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:22:39.816429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:22:40.559708Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509661989935679:2091];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:40.569223Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TableCreation::ConcurrentTableCreationWithDifferentVersions [GOOD] >> TableCreation::ConcurrentUpdateTable >> TableCreation::MultipleTablesCreation [GOOD] >> TableCreation::CreateOldTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 13340, MsgBus: 64040 2025-04-04T17:22:25.386591Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509620278955494:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:25.390506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000cb8/r3tmp/tmpKMgbiH/pdisk_1.dat 2025-04-04T17:22:25.739627Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13340, node 1 2025-04-04T17:22:25.794444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:25.794584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:25.796601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:25.815181Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:25.815204Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:25.815212Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:25.815328Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64040 TClient is connected to server localhost:64040 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:26.357915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:26.372358Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:22:28.293826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509633163857948:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:28.293834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509633163857953:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:28.293972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:28.298453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:28.307725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509633163857962:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:22:28.386491Z node 1 :TX_PROXY ERROR: Actor# [1:7489509633163858013:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:28.658571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:28.783027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:29.836639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:30.619354Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509620278955494:2142];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:30.638793Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:31.258445Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTc1MDg2MTktZmRiZGRlZmYtOGM5MmU2YzAtNDkzNmNkNWQ=, ActorId: [1:7489509646048768351:2968], ActorState: ExecuteState, TraceId: 01jr0x6q268n7g9bqr1gmd3syx, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x17FAC851 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x17FA474A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F498BFE3D8F 17. ??:0: ?? @ 0x7F498BFE3E3F 18. ??:0: ?? @ 0x15B7B028 Trying to start YDB, gRPC: 9907, MsgBus: 13280 2025-04-04T17:22:35.932092Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509661962585529:2095];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:35.933565Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000cb8/r3tmp/tmpmaHK5Y/pdisk_1.dat 2025-04-04T17:22:36.086961Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:36.103127Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:36.103213Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:36.104783Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9907, node 2 2025-04-04T17:22:36.167160Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:36.167193Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:36.167201Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:36.167322Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13280 TClient is connected to server localhost:13280 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:36.571999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:36.593914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:36.677114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:36.848739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:22:36.919243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:39.200610Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509679142456417:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:39.200679Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:39.243207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:39.267860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:39.295190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:39.325628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:39.355303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:39.383299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:39.427556Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509679142456925:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:39.427639Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:39.427647Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509679142456930:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:39.430551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:39.437604Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509679142456932:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:22:39.537082Z node 2 :TX_PROXY ERROR: Actor# [2:7489509679142456987:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:40.932554Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509661962585529:2095];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:40.932617Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TableCreation::ConcurrentTableCreation [GOOD] >> TableCreation::ConcurrentMultipleTablesCreation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 [GOOD] Test command err: Trying to start YDB, gRPC: 17317, MsgBus: 23604 2025-04-04T17:22:04.357418Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509530900815875:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:04.357565Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000eb7/r3tmp/tmpR4ioTn/pdisk_1.dat 2025-04-04T17:22:04.716646Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:04.763135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:04.763258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 17317, node 1 2025-04-04T17:22:04.765215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:04.802011Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:04.802034Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:04.802045Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:04.802175Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23604 TClient is connected to server localhost:23604 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:05.292208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:07.356372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509543785718414:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:07.356453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509543785718438:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:07.356511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:07.361088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:07.376556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509543785718441:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:22:07.442220Z node 1 :TX_PROXY ERROR: Actor# [1:7489509543785718494:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:07.681669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:07.833717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509543785718679:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:07.833719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509543785718681:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:07.833900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509543785718681:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:07.834284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509543785718681:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:22:07.834342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509543785718679:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:07.834473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509543785718681:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:22:07.834648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509543785718679:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:22:07.834653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509543785718681:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:22:07.834777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509543785718679:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:22:07.834807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509543785718681:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:22:07.834895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509543785718679:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:22:07.834924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509543785718681:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:22:07.835026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509543785718679:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:22:07.835072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509543785718681:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:22:07.835113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509543785718679:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:22:07.835198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509543785718681:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:22:07.835251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509543785718679:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:22:07.835341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509543785718681:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:22:07.835361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509543785718679:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:22:07.835481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509543785718681:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:22:07.835509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509543785718679:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:22:07.835633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509543785718679:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:22:07.835635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509543785718681:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:22:07.835738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509543785718679:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:22:07.875089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489509543785718680:2345];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:07.875161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489509543785718680:2345];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:07.875365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;sel ... 7;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.181810Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037952;self_id=[2:7489509631905822053:2536];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037952;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.181837Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037968;self_id=[2:7489509631905821802:2494];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037968;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.181863Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037969;self_id=[2:7489509631905821614:2459];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037969;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.181894Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037985;self_id=[2:7489509631905821782:2483];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037985;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.181919Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037970;self_id=[2:7489509631905821775:2479];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037970;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.181948Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037986;self_id=[2:7489509631905821658:2474];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037986;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.181979Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037971;self_id=[2:7489509631905821780:2481];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037971;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182051Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037987;self_id=[2:7489509631905821657:2473];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037987;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182070Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037972;self_id=[2:7489509631905821786:2486];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037972;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182132Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037973;self_id=[2:7489509631905821801:2493];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037973;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182133Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037988;self_id=[2:7489509631905821655:2471];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037988;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182187Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037974;self_id=[2:7489509631905821927:2495];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037974;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182197Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037989;self_id=[2:7489509631905821748:2476];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037989;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182258Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037975;self_id=[2:7489509631905821790:2488];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037975;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182277Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037990;self_id=[2:7489509631905821649:2467];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037990;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182312Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037976;self_id=[2:7489509631905821785:2485];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037976;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182333Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037991;self_id=[2:7489509631905821650:2468];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037991;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182364Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037977;self_id=[2:7489509631905821749:2477];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037977;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182394Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037992;self_id=[2:7489509631905821656:2472];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037992;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182457Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037962;self_id=[2:7489509631905822056:2537];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037962;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182479Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037993;self_id=[2:7489509631905821652:2470];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037993;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182532Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037963;self_id=[2:7489509631905822086:2550];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037963;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182547Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037978;self_id=[2:7489509631905821787:2487];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037978;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182588Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037964;self_id=[2:7489509631905822051:2534];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037964;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182634Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037979;self_id=[2:7489509631905821793:2491];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037979;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182645Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037965;self_id=[2:7489509631905822017:2526];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037965;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182688Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037980;self_id=[2:7489509631905821792:2490];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037980;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182701Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037982;self_id=[2:7489509631905821778:2480];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037982;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182744Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037981;self_id=[2:7489509631905821800:2492];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037981;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182767Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037983;self_id=[2:7489509631905821774:2478];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037983;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.182837Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037984;self_id=[2:7489509631905821784:2484];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037984;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.184223Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037994;self_id=[2:7489509631905821648:2466];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037994;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.184313Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037995;self_id=[2:7489509631905821659:2475];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037995;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.184327Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037996;self_id=[2:7489509631905821647:2465];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037996;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:37.184381Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037997;self_id=[2:7489509631905821615:2460];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037997;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; WAIT_INDEXATION: 0 2025-04-04T17:22:38.824859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:22:38.824894Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::InvalidateOnError [GOOD] Test command err: Trying to start YDB, gRPC: 19651, MsgBus: 63727 2025-04-04T17:22:19.909480Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509595001268723:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:19.909601Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e2c/r3tmp/tmpDfWzgq/pdisk_1.dat 2025-04-04T17:22:20.277228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:20.277344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:20.282559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:20.309356Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19651, node 1 2025-04-04T17:22:20.364380Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:20.364407Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:20.364414Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:20.364530Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63727 TClient is connected to server localhost:63727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:20.905452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:23.135238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509612181138368:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:23.135327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509612181138351:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:23.135538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:23.140517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:23.152473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509612181138380:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:22:23.210596Z node 1 :TX_PROXY ERROR: Actor# [1:7489509612181138431:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:23.490879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:23.603179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:24.658726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:25.326456Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509595001268723:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:25.393715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:26.354795Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710667; 2025-04-04T17:22:26.375528Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489509625066049481:2969], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [1:7489509620771081435:2969]Got LOCKS BROKEN for table `/Root/KV`. ShardID=72075186224037889, Sink=[1:7489509625066049481:2969].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-04T17:22:26.376220Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489509625066049469:2969], SessionActorId: [1:7489509620771081435:2969], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7489509620771081435:2969]. isRollback=0 2025-04-04T17:22:26.377563Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDUwZmM3NjUtMmNjMjJiYzAtYTFlMTkzYjAtMmE2M2U1ZGY=, ActorId: [1:7489509620771081435:2969], ActorState: ExecuteState, TraceId: 01jr0x6jcn3rheegytd767ytn0, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7489509625066049470:2969] from: [1:7489509625066049469:2969] 2025-04-04T17:22:26.377647Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489509625066049470:2969] TxId: 281474976710667. Ctx: { TraceId: 01jr0x6jcn3rheegytd767ytn0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDUwZmM3NjUtMmNjMjJiYzAtYTFlMTkzYjAtMmE2M2U1ZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-04T17:22:26.377865Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDUwZmM3NjUtMmNjMjJiYzAtYTFlMTkzYjAtMmE2M2U1ZGY=, ActorId: [1:7489509620771081435:2969], ActorState: ExecuteState, TraceId: 01jr0x6jcn3rheegytd767ytn0, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 22356, MsgBus: 9233 2025-04-04T17:22:32.389853Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509648308264683:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:32.389931Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e2c/r3tmp/tmp4XahZk/pdisk_1.dat 2025-04-04T17:22:32.484168Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22356, node 2 2025-04-04T17:22:32.528790Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:32.528889Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:32.531683Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:32.558713Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:32.558735Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:32.558742Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:32.558854Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9233 TClient is connected to server localhost:9233 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:32.977567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:35.524411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509661193167233:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:35.524485Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509661193167214:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:35.524662Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:35.527978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:35.538599Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509661193167243:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:22:35.621022Z node 2 :TX_PROXY ERROR: Actor# [2:7489509661193167294:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:35.674282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:35.721656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:36.745634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:37.568948Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509648308264683:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:37.582093Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:38.016009Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-04T17:22:38.016235Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037889 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T17:22:38.016385Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037889 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T17:22:38.016672Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489509674078077670:2969], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [2:7489509669783110319:2969]Got CONSTRAINT VIOLATION for table `/Root/KV`. ShardID=72075186224037889, Sink=[2:7489509674078077670:2969].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-04T17:22:38.016767Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489509674078077663:2969], SessionActorId: [2:7489509669783110319:2969], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KV`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[2:7489509669783110319:2969]. isRollback=0 2025-04-04T17:22:38.016948Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjFiZDg1YTUtNjIzOGJiNTEtOTlmMDIzNjgtYTgzMjhlYjE=, ActorId: [2:7489509669783110319:2969], ActorState: ExecuteState, TraceId: 01jr0x6xr6fq45j60z0ndkzytf, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7489509674078077664:2969] from: [2:7489509674078077663:2969] 2025-04-04T17:22:38.017029Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7489509674078077664:2969] TxId: 281474976715664. Ctx: { TraceId: 01jr0x6xr6fq45j60z0ndkzytf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjFiZDg1YTUtNjIzOGJiNTEtOTlmMDIzNjgtYTgzMjhlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/KV`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-04T17:22:38.017972Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjFiZDg1YTUtNjIzOGJiNTEtOTlmMDIzNjgtYTgzMjhlYjE=, ActorId: [2:7489509669783110319:2969], ActorState: ExecuteState, TraceId: 01jr0x6xr6fq45j60z0ndkzytf, Create QueryResponse for error on request, msg:
: Error: Constraint violated. Table: `/Root/KV`., code: 2012
: Error: Duplicate keys have been found., code: 2012 2025-04-04T17:22:38.062350Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjFiZDg1YTUtNjIzOGJiNTEtOTlmMDIzNjgtYTgzMjhlYjE=, ActorId: [2:7489509669783110319:2969], ActorState: ExecuteState, TraceId: 01jr0x6xt70py5abv7hnmfe6f1, Create QueryResponse for error on request, msg:
: Error: Transaction not found: 01jr0x6xqz69n714a7av69ff3g, code: 2015 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpProxy::PingNotExistedSession [GOOD] >> ScriptExecutionsTest::AttemptToUpdateDeletedLease >> KqpProxy::InvalidSessionID [GOOD] >> KqpProxy::LoadedMetadataAfterCompilationTimeout >> KqpLimits::TooBigKey-useSink [GOOD] >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other >> DataShardVolatile::VolatileTxAbortedOnSplit [GOOD] >> DataShardVolatile::VolatileTxAbortedOnDrop >> KqpProxy::NoLocalSessionExecution [GOOD] >> KqpProxy::NoUserAccessToScriptExecutionsTable >> KqpSinkLocks::TInvalidateOlap [GOOD] >> KqpSinkLocks::UncommittedRead >> IndexBuildTest::WithFollowers >> KqpSinkTx::OlapInteractive [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> ScriptExecutionsTest::RunCheckLeaseStatus [GOOD] >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady >> IndexBuildTest::CancellationNotEnoughRetries >> IndexBuildTest::BaseCase >> KqpSinkTx::OlapSnapshotROInteractive1 [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive2 >> KqpSinkLocks::TInvalidate [GOOD] >> KqpSinkLocks::OlapUncommittedRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigKey-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 30339, MsgBus: 24182 2025-04-04T17:17:08.473432Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508257583893182:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:08.473478Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015fd/r3tmp/tmpUF6vTG/pdisk_1.dat 2025-04-04T17:17:08.902184Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30339, node 1 2025-04-04T17:17:08.914512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:17:08.914745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:17:08.926847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:17:09.018711Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:17:09.018734Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:17:09.018749Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:17:09.018889Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24182 TClient is connected to server localhost:24182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:17:09.540028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:09.556152Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:17:09.561620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:09.700559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:09.880955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:17:09.956976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:17:11.636710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508270468796842:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:11.636811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:11.861148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:17:11.892983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:17:11.926045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:17:11.967231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:17:12.054343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:17:12.124375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:17:12.164498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508274763764654:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:12.164596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:12.164781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508274763764659:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:17:12.168293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:17:12.176951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508274763764661:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:17:12.237007Z node 1 :TX_PROXY ERROR: Actor# [1:7489508274763764714:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:17:12.873334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:13.475692Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508257583893182:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:17:13.475810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:17:23.858358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:17:23.858402Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 32544, MsgBus: 3195 2025-04-04T17:20:20.842952Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509083579775122:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:20:20.843012Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015fd/r3tmp/tmplEhkwk/pdisk_1.dat 2025-04-04T17:20:21.000812Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:21.034926Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:21.035054Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:21.037019Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32544, node 2 2025-04-04T17:20:21.142963Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:21.142988Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:21.143000Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:21.143189Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3195 TClient is connected to server localhost:3195 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:20:21.743001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:20:21.761185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:20:21.875552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 w ... 121Z node 3 :TX_DATASHARD ERROR: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 2025-04-04T17:22:35.813565Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7489509662485034412:2501], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [3:7489509662485034371:2501]Got BAD REQUEST for table `/Root/Test`. ShardID=72075186224037914, Sink=[3:7489509662485034412:2501].{
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 at tablet# 72075186224037914, code: 2017 } 2025-04-04T17:22:35.814404Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7489509662485034405:2501], SessionActorId: [3:7489509662485034371:2501], statusCode=BAD_REQUEST. Issue=
: Error: Bad request. Table: `/Root/Test`., code: 2017
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 at tablet# 72075186224037914, code: 2017 . sessionActorId=[3:7489509662485034371:2501]. isRollback=0 2025-04-04T17:22:35.815447Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDQ4NDE2ZmYtMzc5NzA1MzctZWRhMTEyM2EtMzQ3NzViYw==, ActorId: [3:7489509662485034371:2501], ActorState: ExecuteState, TraceId: 01jr0x6vgy5snx0z1gd23sk81w, got TEvKqpBuffer::TEvError in ExecuteState, status: BAD_REQUEST send to: [3:7489509662485034406:2501] from: [3:7489509662485034405:2501] 2025-04-04T17:22:35.815562Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7489509662485034406:2501] TxId: 281474976710671. Ctx: { TraceId: 01jr0x6vgy5snx0z1gd23sk81w, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDQ4NDE2ZmYtMzc5NzA1MzctZWRhMTEyM2EtMzQ3NzViYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. BAD_REQUEST: {
: Error: Bad request. Table: `/Root/Test`., code: 2017 subissue: {
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 at tablet# 72075186224037914, code: 2017 } } 2025-04-04T17:22:35.816779Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDQ4NDE2ZmYtMzc5NzA1MzctZWRhMTEyM2EtMzQ3NzViYw==, ActorId: [3:7489509662485034371:2501], ActorState: ExecuteState, TraceId: 01jr0x6vgy5snx0z1gd23sk81w, Create QueryResponse for error on request, msg:
: Error: Bad request. Table: `/Root/Test`., code: 2017
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 at tablet# 72075186224037914, code: 2017 Trying to start YDB, gRPC: 28498, MsgBus: 10131 2025-04-04T17:22:37.320831Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489509669997495935:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:37.320921Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015fd/r3tmp/tmpSphbRi/pdisk_1.dat 2025-04-04T17:22:37.509697Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:37.512880Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:37.512987Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:37.514417Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28498, node 4 2025-04-04T17:22:37.582881Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:37.582911Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:37.582921Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:37.583097Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10131 TClient is connected to server localhost:10131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:38.136577Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:38.154618Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:38.232548Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:22:38.422355Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:38.499193Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:42.321208Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489509669997495935:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:42.321287Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:42.438645Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489509691472334193:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:42.438749Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:42.465801Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:42.518860Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:42.555592Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:42.628816Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:42.668971Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:42.747203Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:42.809538Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489509691472334713:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:42.809640Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489509691472334718:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:42.809679Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:42.813909Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:42.825305Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489509691472334720:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:22:42.927162Z node 4 :TX_PROXY ERROR: Actor# [4:7489509691472334776:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:44.589005Z node 4 :TX_DATASHARD ERROR: Operation [0:281474976710671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914 2025-04-04T17:22:44.589182Z node 4 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710671 at tablet 72075186224037914 status: BAD_REQUEST errors: BAD_ARGUMENT (Operation [0:281474976710671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914) | 2025-04-04T17:22:44.589444Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7489509700062269691:2491] TxId: 281474976710671. Ctx: { TraceId: 01jr0x74002378dhg3rp9pnfdz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NTNmM2M3ZGQtZjBmYzQ5YjMtZTI4YWI0ZTctOTUzYWFhNDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. BAD_REQUEST: [BAD_ARGUMENT] Operation [0:281474976710671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914; 2025-04-04T17:22:44.589785Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NTNmM2M3ZGQtZjBmYzQ5YjMtZTI4YWI0ZTctOTUzYWFhNDU=, ActorId: [4:7489509700062269651:2491], ActorState: ExecuteState, TraceId: 01jr0x74002378dhg3rp9pnfdz, Create QueryResponse for error on request, msg:
: Error: Bad request., code: 2017
: Error: [BAD_ARGUMENT] Operation [0:281474976710671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914 >> KqpSinkLocks::EmptyRangeAlreadyBroken [GOOD] >> KqpProxy::CreatesScriptExecutionsTable [GOOD] >> KqpProxy::DatabasesCacheForServerless >> IndexBuildTest::RejectsCreate [GOOD] >> IndexBuildTest::RejectsDropIndex >> IndexBuildTest::WithFollowers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::EmptyRangeAlreadyBroken [GOOD] Test command err: Trying to start YDB, gRPC: 19119, MsgBus: 12501 2025-04-04T17:22:23.330744Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509611420849062:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:23.338075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000d2e/r3tmp/tmpIvycr7/pdisk_1.dat 2025-04-04T17:22:23.790688Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:23.807734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:23.807854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:23.809820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19119, node 1 2025-04-04T17:22:23.937298Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:23.937326Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:23.937334Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:23.937445Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12501 TClient is connected to server localhost:12501 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:24.501231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:24.515970Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:22:26.482859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509624305751550:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:26.482953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509624305751558:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:26.483040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:26.487750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:26.498566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509624305751564:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:22:26.562164Z node 1 :TX_PROXY ERROR: Actor# [1:7489509624305751615:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:26.807885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:26.936551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:27.915805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:28.653044Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509611420849062:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:28.703138Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:29.435065Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=6; 2025-04-04T17:22:29.446754Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 6 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-04T17:22:29.446903Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 6 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-04T17:22:29.447083Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489509637190662185:2969], Table: `/Root/Test` ([72057594046644480:6:1]), SessionActorId: [1:7489509637190661905:2969]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037888, Sink=[1:7489509637190662185:2969].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-04T17:22:29.447726Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489509637190662177:2969], SessionActorId: [1:7489509637190661905:2969], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7489509637190661905:2969]. isRollback=0 2025-04-04T17:22:29.447976Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDZlNjdhZjUtZjIzZGE4OTItMWIxMmQ0NDQtYjBiNzcwYWQ=, ActorId: [1:7489509637190661905:2969], ActorState: ExecuteState, TraceId: 01jr0x6nc59yspjjjxxka14nr7, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7489509637190662178:2969] from: [1:7489509637190662177:2969] 2025-04-04T17:22:29.448062Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489509637190662178:2969] TxId: 281474976710667. Ctx: { TraceId: 01jr0x6nc59yspjjjxxka14nr7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDZlNjdhZjUtZjIzZGE4OTItMWIxMmQ0NDQtYjBiNzcwYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-04T17:22:29.448942Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDZlNjdhZjUtZjIzZGE4OTItMWIxMmQ0NDQtYjBiNzcwYWQ=, ActorId: [1:7489509637190661905:2969], ActorState: ExecuteState, TraceId: 01jr0x6nc59yspjjjxxka14nr7, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 9725, MsgBus: 2247 2025-04-04T17:22:35.418601Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509663174828196:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:35.420207Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000d2e/r3tmp/tmplS8cxF/pdisk_1.dat 2025-04-04T17:22:35.529266Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9725, node 2 2025-04-04T17:22:35.582143Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:35.582594Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:35.604133Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:35.642778Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:35.642799Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:35.642806Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:35.642912Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2247 TClient is connected to server localhost:2247 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:36.100420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:38.497458Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509676059730574:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:38.497523Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509676059730569:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:38.497622Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:38.500440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:38.509174Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509676059730583:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:22:38.596837Z node 2 :TX_PROXY ERROR: Actor# [2:7489509676059730634:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:38.637161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:38.684104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:39.777980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:40.601287Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509663174828196:2221];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:40.634820Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:41.410717Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzA2NjEzLWU5YTk4ZjVhLTg3YWE5ODhmLTkzNTNjYTM0, ActorId: [2:7489509688944640953:2969], ActorState: ExecuteState, TraceId: 01jr0x71067300680jwqdvq17x, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001
: Error: tx has deferred effects, but locks are broken WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::WithFollowers [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:22:47.153243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:22:47.153367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:22:47.153434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:22:47.153475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:22:47.153543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:22:47.153573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:22:47.153632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:22:47.153710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:22:47.154037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:22:47.223729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:22:47.223771Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:47.233194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:22:47.233371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:22:47.233497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:22:47.242030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:22:47.242330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:22:47.242927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:47.243607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:22:47.248687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:47.249570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:22:47.249616Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:47.249759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:22:47.249792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:22:47.249816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:22:47.249901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.261352Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:22:47.390306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:22:47.390542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.390769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:22:47.391054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:22:47.391105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.399397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:47.399537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:22:47.399785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.399836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:22:47.399875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:22:47.399923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:22:47.402338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.402389Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:22:47.402425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:22:47.404292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.404340Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.404380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:47.404449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:22:47.407631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:22:47.408868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:22:47.409021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:22:47.409901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:47.410010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:22:47.410062Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:47.410347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:22:47.410395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:47.410554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:22:47.410629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:22:47.412430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:22:47.412476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:22:47.412651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:47.412690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:22:47.412976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.413020Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:22:47.413099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:22:47.413143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:47.413186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:22:47.413222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:47.413252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:22:47.413296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:47.413334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:22:47.413377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:22:47.413429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:22:47.413465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:22:47.413494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:22:47.415183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:22:47.415270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:22:47.415300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... operation is done id#104:1 progress is 2/3 2025-04-04T17:22:48.046271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-04-04T17:22:48.046298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:1 progress is 2/3 2025-04-04T17:22:48.046323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-04-04T17:22:48.046354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: false 2025-04-04T17:22:48.046983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:22:48.047098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:22:48.047131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:22:48.047163Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-04-04T17:22:48.047191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:22:48.048275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:22:48.048369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:22:48.048401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:22:48.048424Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-04T17:22:48.048448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:22:48.049580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:22:48.049647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:22:48.049668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:22:48.049703Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-04-04T17:22:48.049738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:22:48.050352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:22:48.050441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:22:48.050458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:22:48.051256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:2, at schemeshard: 72057594046678944 2025-04-04T17:22:48.051334Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:2 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:22:48.051589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-04T17:22:48.051693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 3/3 2025-04-04T17:22:48.051736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-04T17:22:48.051769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 3/3 2025-04-04T17:22:48.051791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-04T17:22:48.051816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: false 2025-04-04T17:22:48.051843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-04-04T17:22:48.051884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-04T17:22:48.051913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-04T17:22:48.051985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:22:48.052031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2025-04-04T17:22:48.052051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2025-04-04T17:22:48.052080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:22:48.052128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2025-04-04T17:22:48.052157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2025-04-04T17:22:48.052198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T17:22:48.052232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 1, subscribers: 1 2025-04-04T17:22:48.052264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-04-04T17:22:48.052729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:22:48.052790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:22:48.052815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:22:48.052878Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-04T17:22:48.052917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-04T17:22:48.053014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2025-04-04T17:22:48.053047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:335:2314] 2025-04-04T17:22:48.054082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T17:22:48.055381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T17:22:48.055464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T17:22:48.055508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T17:22:48.057048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T17:22:48.057126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-04T17:22:48.057161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:696:2654] TestWaitNotification: OK eventTxId 104 2025-04-04T17:22:48.057796Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/WithFollowers" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:22:48.058013Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/WithFollowers" took 232us result status StatusSuccess 2025-04-04T17:22:48.058424Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/WithFollowers" PathDescription { Self { Name: "WithFollowers" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "WithFollowers" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "valueFloat" Type: "Float" TypeId: 33 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> IndexBuildTest::RejectsDropIndex [GOOD] >> IndexBuildTest::CheckLimitWithDroppedIndex >> TableCreation::CreateOldTable [GOOD] >> TableCreation::ConcurrentMultipleTablesCreation [GOOD] >> KqpSinkMvcc::OltpNamedStatementNoSink [GOOD] >> KqpSinkMvcc::OltpNamedStatement >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] >> TableCreation::ConcurrentUpdateTable [GOOD] >> KqpSinkMvcc::OltpMultiSinks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsDropIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:22:45.518230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:22:45.518397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:22:45.518457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:22:45.518507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:22:45.519299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:22:45.519362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:22:45.519461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:22:45.519553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:22:45.521272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:22:45.608757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:22:45.608826Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:45.624298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:22:45.624579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:22:45.624759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:22:45.637200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:22:45.637529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:22:45.638267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:45.639218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:22:45.651853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:45.653262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:22:45.653323Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:45.653563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:22:45.653650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:22:45.653706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:22:45.653831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:22:45.668542Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:22:45.817126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:22:45.817442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:45.817724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:22:45.818025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:22:45.818089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:45.823367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:45.823551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:22:45.823793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:45.823855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:22:45.823895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:22:45.823953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:22:45.826521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:45.826608Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:22:45.826654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:22:45.829049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:45.829107Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:45.829185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:45.829275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:22:45.833549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:22:45.835888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:22:45.836074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:22:45.837197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:45.837371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:22:45.837492Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:45.837854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:22:45.837917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:45.838125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:22:45.838220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:22:45.840782Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:22:45.840838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:22:45.841039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:45.841088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:22:45.841452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:45.841509Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:22:45.841623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:22:45.841663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:45.841716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:22:45.841768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:45.841812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:22:45.841856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:45.841902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:22:45.841951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:22:45.842022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:22:45.842065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:22:45.842102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:22:45.844144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:22:45.844254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:22:45.844295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 107 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 107 at step: 5000004 2025-04-04T17:22:48.778789Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:48.778938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 107 Coordinator: 72057594046316545 AckTo { RawX1: 126 RawX2: 8589936743 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:22:48.778996Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TPropose operationId# 107:0 HandleReply TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2025-04-04T17:22:48.779091Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 128 -> 136 2025-04-04T17:22:48.784156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:22:48.784238Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-04T17:22:48.784307Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 107:0 ProgressState, no renaming has been detected for this operation 2025-04-04T17:22:48.784349Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 136 -> 137 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 FAKE_COORDINATOR: Erasing txId 107 2025-04-04T17:22:48.785942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 648 } } 2025-04-04T17:22:48.785989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409547, partId: 0 2025-04-04T17:22:48.786105Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 648 } } 2025-04-04T17:22:48.786206Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 648 } } 2025-04-04T17:22:48.787903Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 8589936902 } Origin: 72075186233409547 State: 5 TxId: 107 Step: 0 Generation: 2 2025-04-04T17:22:48.787961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409547, partId: 0 2025-04-04T17:22:48.788097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 328 RawX2: 8589936902 } Origin: 72075186233409547 State: 5 TxId: 107 Step: 0 Generation: 2 2025-04-04T17:22:48.788153Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-04-04T17:22:48.788624Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:22:48.788685Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-04-04T17:22:48.788738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 107:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-04-04T17:22:48.788783Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2025-04-04T17:22:48.788894Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-04-04T17:22:48.789020Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2025-04-04T17:22:48.789144Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:22:48.789214Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:22:48.794914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:22:48.795525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:22:48.795803Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:22:48.795854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:22:48.796016Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:22:48.796162Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:48.796202Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 107, path id: 1 2025-04-04T17:22:48.796244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 107, path id: 2 2025-04-04T17:22:48.796698Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:22:48.796755Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:22:48.796847Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:22:48.796886Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-04-04T17:22:48.796929Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-04-04T17:22:48.797726Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 107 2025-04-04T17:22:48.797824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 107 2025-04-04T17:22:48.797862Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-04T17:22:48.797902Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-04T17:22:48.797946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:22:48.798722Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-04-04T17:22:48.798795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-04-04T17:22:48.798820Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-04T17:22:48.798847Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:22:48.798874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:22:48.798937Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-04-04T17:22:48.805055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:22:48.805123Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:22:48.805399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:22:48.805536Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-04T17:22:48.805575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-04T17:22:48.805622Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-04T17:22:48.805656Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-04T17:22:48.805694Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-04-04T17:22:48.805783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:377:2345] message: TxId: 107 2025-04-04T17:22:48.805827Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-04T17:22:48.805869Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-04T17:22:48.805901Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-04T17:22:48.805990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:22:48.807257Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-04T17:22:48.808524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-04T17:22:48.809379Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-04T17:22:48.809434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:581:2541] TestWaitNotification: OK eventTxId 107 >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInteractive [GOOD] Test command err: Trying to start YDB, gRPC: 22344, MsgBus: 30500 2025-04-04T17:22:08.904631Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509546652445084:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:08.905319Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000eac/r3tmp/tmpHo7HP6/pdisk_1.dat 2025-04-04T17:22:09.196785Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22344, node 1 2025-04-04T17:22:09.275985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:09.276100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:09.277597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:09.281285Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:09.281320Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:09.281335Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:09.281479Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30500 TClient is connected to server localhost:30500 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:09.774066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:09.789839Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:22:11.839411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509559537347610:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:11.839497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509559537347639:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:11.839550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:11.844573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:11.859420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509559537347647:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:22:11.927714Z node 1 :TX_PROXY ERROR: Actor# [1:7489509559537347698:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:12.207039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:12.411162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489509563832315151:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:12.411164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509563832315162:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:12.411396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509563832315162:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:12.411453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489509563832315151:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:12.411769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509563832315162:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:22:12.411886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489509563832315151:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:22:12.411959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509563832315162:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:22:12.412042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489509563832315151:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:22:12.412069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509563832315162:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:22:12.412213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489509563832315151:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:22:12.412242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509563832315162:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:22:12.412318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489509563832315151:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:22:12.412358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509563832315162:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:22:12.412436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489509563832315151:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:22:12.412500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509563832315162:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:22:12.412577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489509563832315151:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:22:12.412661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509563832315162:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:22:12.412688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489509563832315151:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:22:12.413636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509563832315162:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:22:12.413708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489509563832315151:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:22:12.413764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509563832315162:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:22:12.413859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489509563832315151:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:22:12.413914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509563832315162:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:22:12.413974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7489509563832315151:2344];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:22:12.451372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509563832315163:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:12.451428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509563832315163:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abs ... 7:22:39.798918Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7489509652366601361:3307];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.799198Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7489509652366601372:3316];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038009;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.799480Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7489509652366601375:3319];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.799732Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7489509652366601340:3291];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038043;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.799916Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7489509652366601366:3312];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.800088Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7489509652366601346:3296];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.800216Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7489509652366601288:3264];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.800319Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7489509652366601432:3329];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.800498Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7489509652366601363:3309];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.800931Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7489509652366601348:3298];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038048;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.801115Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7489509652366601444:3333];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038025;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.801816Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7489509652366601372:3316];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.801940Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7489509652366601375:3319];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.802046Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7489509652366601340:3291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038043;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.802193Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7489509652366601363:3309];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.802431Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7489509652366601348:3298];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038048;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.802595Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7489509652366601444:3333];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.810014Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[2:7489509652366601358:3304];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038056;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.810378Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[2:7489509652366601358:3304];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038056;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.810634Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7489509652366601373:3317];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.810820Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7489509652366601373:3317];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.811011Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[2:7489509652366601356:3303];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038035;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.811178Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[2:7489509652366601356:3303];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038035;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.811428Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7489509652366601364:3310];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038054;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.811628Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7489509652366601364:3310];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038054;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.811811Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7489509652366601387:3326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.812002Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7489509652366601387:3326];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.817957Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[2:7489509652366601351:3301];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038034;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.818322Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7489509652366601453:3337];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038053;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.818542Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7489509652366601456:3338];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038027;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.818752Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7489509652366601456:3338];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038027;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.818916Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[2:7489509652366601351:3301];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038034;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.819013Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7489509652366601453:3337];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038053;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.822912Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[2:7489509652366601386:3325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038029;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.823190Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[2:7489509652366601386:3325];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038029;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.827558Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7489509652366601452:3336];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038021;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.827819Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7489509652366601452:3336];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.828017Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7489509652366601388:3327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.828930Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7489509652366601388:3327];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.834714Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7489509652366601371:3315];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.835017Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7489509652366601371:3315];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.835243Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7489509652366601446:3334];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038028;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.836096Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7489509652366601446:3334];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.847402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:22:39.847440Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpTx::RollbackRoTx [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::CreateOldTable [GOOD] Test command err: 2025-04-04T17:22:40.228195Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509684972382289:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:40.229295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001625/r3tmp/tmpfNQCsn/pdisk_1.dat 2025-04-04T17:22:40.661402Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:40.674837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:40.674939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:40.678756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12245 TServer::EnableGrpc on GrpcPort 2624, node 1 2025-04-04T17:22:40.940763Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:40.940797Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:40.940805Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:40.940946Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:22:41.248030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:43.152298Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:43.154314Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:22:43.155409Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-04T17:22:43.155454Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-04T17:22:43.155474Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:43.155517Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:22:43.155572Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:43.155602Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:43.156001Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:43.156027Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:43.158403Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-04T17:22:43.158412Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-04T17:22:43.158495Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-04T17:22:43.158578Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-04T17:22:43.158583Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-04T17:22:43.158600Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-04T17:22:43.158620Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-04T17:22:43.158623Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-04T17:22:43.158633Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-04T17:22:43.162945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-04T17:22:43.164612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:22:43.166836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:43.173418Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-04T17:22:43.173416Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-04T17:22:43.173467Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710659 2025-04-04T17:22:43.173467Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710658 2025-04-04T17:22:43.175399Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-04T17:22:43.175449Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710660 2025-04-04T17:22:43.283048Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-04T17:22:43.352278Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-04T17:22:43.355433Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-04T17:22:43.379339Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-04T17:22:43.413761Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-04T17:22:43.452940Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-04T17:22:43.453334Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 812243da-e742816e-34739029-94507cea, Bootstrap. Database: /dc-1 2025-04-04T17:22:43.461113Z node 1 :KQP_PROXY DEBUG: Request has 18445000286346.090529s seconds to be completed 2025-04-04T17:22:43.464164Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=Mjc2ZWJlYTAtYzc5Mzc3YTgtM2ZmNmExYmQtMWQ0M2NlNWI=, workerId: [1:7489509697857284934:2333], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-04T17:22:43.464303Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T17:22:43.465295Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 812243da-e742816e-34739029-94507cea, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-04T17:22:43.465874Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=Mjc2ZWJlYTAtYzc5Mzc3YTgtM2ZmNmExYmQtMWQ0M2NlNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7489509697857284934:2333] 2025-04-04T17:22:43.465914Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7489509697857284936:2465] 2025-04-04T17:22:43.468039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509697857284945:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:43.468128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509697857284937:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:43.468280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:43.472063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-04T17:22:43.481276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509697857284951:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T17:22:43.542593Z node 1 :TX_PROXY ERROR: Actor# [1:7489509697857284993:2497] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges) ... UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-04T17:22:48.277744Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MTQ4MjQ0YmEtZTI0NzdiOGMtZjViOWYzYmQtNTRkN2I1ODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [2:7489509717089049530:2333] 2025-04-04T17:22:48.277772Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [2:7489509717089049533:2459] 2025-04-04T17:22:48.278838Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509717089049541:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:48.278857Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509717089049532:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:48.278918Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:48.281188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:2, at schemeshard: 72057594046644480 2025-04-04T17:22:48.287976Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509717089049547:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-04-04T17:22:48.356656Z node 2 :TX_PROXY ERROR: Actor# [2:7489509717089049588:2490] txid# 281474976715662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:48.510927Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:7489509717089049531:2334], selfId: [2:7489509704204146969:2267], source: [2:7489509717089049530:2333] 2025-04-04T17:22:48.511123Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 7b8743fb-78fff9ea-6f2da5df-4bd827d9, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTQ4MjQ0YmEtZTI0NzdiOGMtZjViOWYzYmQtNTRkN2I1ODY=, TxId: 2025-04-04T17:22:48.511151Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 7b8743fb-78fff9ea-6f2da5df-4bd827d9, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTQ4MjQ0YmEtZTI0NzdiOGMtZjViOWYzYmQtNTRkN2I1ODY=, TxId: 2025-04-04T17:22:48.511160Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] Create script execution operation. ExecutionId: 7b8743fb-78fff9ea-6f2da5df-4bd827d9. Result: SUCCESS. Issues: 2025-04-04T17:22:48.513223Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=YTI3YmYzMC1mZTFlYzJhMC0zNzhiMTBkYy05ZGM4ZmQ3YQ==, workerId: [2:7489509717089049639:2349], database: dc-1, longSession: 1, local sessions count: 2 2025-04-04T17:22:48.513333Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T17:22:48.513402Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MTQ4MjQ0YmEtZTI0NzdiOGMtZjViOWYzYmQtNTRkN2I1ODY=, workerId: [2:7489509717089049530:2333], local sessions count: 1 2025-04-04T17:22:48.513491Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YTI3YmYzMC1mZTFlYzJhMC0zNzhiMTBkYy05ZGM4ZmQ3YQ==, CurrentExecutionId: 7b8743fb-78fff9ea-6f2da5df-4bd827d9, CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [2:7489509717089049639:2349] 2025-04-04T17:22:48.513525Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 604800.000000s actor id: [2:7489509717089049641:2523] 2025-04-04T17:22:48.528953Z node 2 :KQP_PROXY DEBUG: TraceId: "01jr0x782gfefbg5jnvt120zrx", Request has 18445000286341.022688s seconds to be completed 2025-04-04T17:22:48.530533Z node 2 :KQP_PROXY DEBUG: TraceId: "01jr0x782gfefbg5jnvt120zrx", Created new session, sessionId: ydb://session/3?node_id=2&id=YjMyNzcyMDItZTY0YjMyMWYtNzNmMWFkYmYtNjNiMWZlNTk=, workerId: [2:7489509717089049656:2359], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-04T17:22:48.530643Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jr0x782gfefbg5jnvt120zrx 2025-04-04T17:22:48.532934Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Describe result: PathErrorUnknown 2025-04-04T17:22:48.532978Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Creating table 2025-04-04T17:22:48.533035Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2025-04-04T17:22:48.535780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:1, at schemeshard: 72057594046644480 2025-04-04T17:22:48.537571Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715664 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 10 } 2025-04-04T17:22:48.537608Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976715664 2025-04-04T17:22:48.555694Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 7b8743fb-78fff9ea-6f2da5df-4bd827d9, Bootstrap. Database: /dc-1 2025-04-04T17:22:48.556065Z node 2 :KQP_PROXY DEBUG: Request has 18445000286340.995565s seconds to be completed 2025-04-04T17:22:48.557633Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=N2E2OTBkMWUtZmQxZmNiYzQtN2ZhNmRhODQtZGU1NjQxMWM=, workerId: [2:7489509717089049727:2363], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-04T17:22:48.557739Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T17:22:48.557812Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [2:7489509717089049527:2457], selfId: [2:7489509704204146969:2267], source: [2:7489509717089049639:2349] 2025-04-04T17:22:48.557933Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 7b8743fb-78fff9ea-6f2da5df-4bd827d9, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2025-04-04T17:22:48.558210Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=N2E2OTBkMWUtZmQxZmNiYzQtN2ZhNmRhODQtZGU1NjQxMWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 8, targetId: [2:7489509717089049727:2363] 2025-04-04T17:22:48.558256Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 8 timeout: 300.000000s actor id: [2:7489509717089049732:2573] 2025-04-04T17:22:48.562741Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: create. Transaction completed: 281474976715664. Doublechecking... 2025-04-04T17:22:48.618505Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-04T17:22:48.622593Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-04T17:22:48.651054Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YjMyNzcyMDItZTY0YjMyMWYtNzNmMWFkYmYtNjNiMWZlNTk=, workerId: [2:7489509717089049656:2359], local sessions count: 2 2025-04-04T17:22:48.683140Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7489509717089049729:2364], selfId: [2:7489509704204146969:2267], source: [2:7489509717089049727:2363] 2025-04-04T17:22:48.683297Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 7b8743fb-78fff9ea-6f2da5df-4bd827d9, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=N2E2OTBkMWUtZmQxZmNiYzQtN2ZhNmRhODQtZGU1NjQxMWM=, TxId: 2025-04-04T17:22:48.683323Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 7b8743fb-78fff9ea-6f2da5df-4bd827d9, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2E2OTBkMWUtZmQxZmNiYzQtN2ZhNmRhODQtZGU1NjQxMWM=, TxId: 2025-04-04T17:22:48.683421Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 7b8743fb-78fff9ea-6f2da5df-4bd827d9, start saving rows range [0; 1) 2025-04-04T17:22:48.683489Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 7b8743fb-78fff9ea-6f2da5df-4bd827d9, Bootstrap. Database: /dc-1 2025-04-04T17:22:48.683842Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=N2E2OTBkMWUtZmQxZmNiYzQtN2ZhNmRhODQtZGU1NjQxMWM=, workerId: [2:7489509717089049727:2363], local sessions count: 1 2025-04-04T17:22:48.683905Z node 2 :KQP_PROXY DEBUG: Request has 18445000286340.867721s seconds to be completed 2025-04-04T17:22:48.685516Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZDFhZWE3ZTktNDMyZjVmZmItZDllNmZhMWYtZGU1M2IyYjc=, workerId: [2:7489509717089049786:2375], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-04T17:22:48.685619Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T17:22:48.685879Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 7b8743fb-78fff9ea-6f2da5df-4bd827d9, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-04-04T17:22:48.686213Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZDFhZWE3ZTktNDMyZjVmZmItZDllNmZhMWYtZGU1M2IyYjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7489509717089049786:2375] 2025-04-04T17:22:48.686271Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7489509717089049788:2607] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:221:2060] recipient: [1:215:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:221:2060] recipient: [1:215:2141] Leader for TabletID 72057594046678944 is [1:232:2152] sender: [1:233:2060] recipient: [1:215:2141] 2025-04-04T17:21:22.132415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:21:22.132545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:21:22.132573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:21:22.132608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:21:22.133545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:21:22.133592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:21:22.133659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:21:22.133725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:21:22.135377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:21:22.214351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:21:22.214408Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:22.224557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:21:22.224991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:21:22.225209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:21:22.238381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:21:22.238742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:21:22.241768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:21:22.242736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:21:22.248927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:21:22.256854Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:21:22.256937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:21:22.257116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:21:22.257188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:21:22.257344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:21:22.257517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.264130Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:232:2152] sender: [1:345:2060] recipient: [1:17:2064] 2025-04-04T17:21:22.394843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:21:22.395061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.395251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:21:22.395459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:21:22.395511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.397734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:21:22.397890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:21:22.398052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.398108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:21:22.398197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:21:22.398232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:21:22.400101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.400148Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:21:22.400188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:21:22.401890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.401931Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.401968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:21:22.402020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:21:22.406819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:21:22.408862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:21:22.409070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:21:22.410052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:21:22.410188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 240 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:21:22.410272Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:21:22.411458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:21:22.411521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:21:22.411675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:21:22.411742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:21:22.413754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:21:22.413793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:21:22.413940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:21:22.413986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:312:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:21:22.414352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:21:22.414405Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:21:22.414487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:21:22.414522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:21:22.414558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:21:22.414586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:21:22.414617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:21:22.414676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:21:22.414710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:21:22.414739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:21:22.414790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:21:22.414823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:21:22.414850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:21:22.416684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:21:22.416776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:21:22.416815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ient [7:231:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:22:46.702633Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:22:46.702715Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:231:2151], Recipient [7:231:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:22:46.702745Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:22:46.974502Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:231:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:22:46.974574Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:22:46.974653Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:231:2151], Recipient [7:231:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:22:46.974684Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:22:47.214727Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:231:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:22:47.214801Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:22:47.214863Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:231:2151], Recipient [7:231:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:22:47.214892Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:22:47.481752Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:231:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:22:47.481838Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:22:47.481919Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:231:2151], Recipient [7:231:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:22:47.481950Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:22:47.733587Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:231:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:22:47.733653Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:22:47.733720Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:231:2151], Recipient [7:231:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:22:47.733748Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:22:47.977277Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:231:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:22:47.977353Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:22:47.977469Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:231:2151], Recipient [7:231:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:22:47.977500Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:22:48.251726Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:231:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:22:48.251818Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:22:48.251904Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:231:2151], Recipient [7:231:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:22:48.251938Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:22:48.480897Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:231:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:22:48.480963Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:22:48.481033Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:231:2151], Recipient [7:231:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:22:48.481061Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:22:48.722365Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:231:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:22:48.722446Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:22:48.722527Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:231:2151], Recipient [7:231:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:22:48.722558Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:22:48.996657Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:231:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:22:48.996719Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:22:48.996797Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:231:2151], Recipient [7:231:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:22:48.996838Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:22:49.039315Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1074:2834], Recipient [7:231:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-04-04T17:22:49.039398Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-04T17:22:49.039516Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:22:49.039744Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable" took 222us result status StatusPathDoesNotExist 2025-04-04T17:22:49.039908Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:22:49.040370Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1075:2835], Recipient [7:231:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-04-04T17:22:49.040438Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-04T17:22:49.040524Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:22:49.040690Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp" took 172us result status StatusPathDoesNotExist 2025-04-04T17:22:49.040807Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:22:49.041244Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1076:2836], Recipient [7:231:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true } 2025-04-04T17:22:49.041296Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-04T17:22:49.041375Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:22:49.041525Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable/ValueIndex" took 157us result status StatusPathDoesNotExist 2025-04-04T17:22:49.041650Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable/ValueIndex\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable/ValueIndex" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::ConcurrentMultipleTablesCreation [GOOD] Test command err: 2025-04-04T17:22:40.327492Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509682776857664:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:40.327740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00161b/r3tmp/tmpDBySma/pdisk_1.dat 2025-04-04T17:22:40.691529Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:40.700867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:40.700999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:40.703561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3952 TServer::EnableGrpc on GrpcPort 23755, node 1 2025-04-04T17:22:40.941978Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:40.942012Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:40.942023Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:40.942185Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:22:41.251837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:41.276144Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:22:43.048500Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:43.049944Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:22:43.050829Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-04T17:22:43.050865Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-04T17:22:43.050880Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:43.050910Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:22:43.051021Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:43.051058Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:43.051944Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:43.051978Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:43.052872Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-04T17:22:43.052893Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-04T17:22:43.052949Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-04T17:22:43.053441Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-04T17:22:43.053446Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-04T17:22:43.053463Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-04T17:22:43.053533Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-04T17:22:43.053535Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-04T17:22:43.053550Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-04T17:22:43.058682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-04-04T17:22:43.060895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:22:43.062890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:43.076319Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-04T17:22:43.076380Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710658 2025-04-04T17:22:43.077268Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-04T17:22:43.077295Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710659 2025-04-04T17:22:43.078266Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-04T17:22:43.078294Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710660 2025-04-04T17:22:43.207036Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-04T17:22:43.251283Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-04T17:22:43.270811Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-04T17:22:43.295632Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-04T17:22:43.319381Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-04T17:22:43.356521Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-04T17:22:43.356865Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 64977fc9-6c8ba04b-70ed5e98-1b9233f2, Bootstrap. Database: /dc-1 2025-04-04T17:22:43.370306Z node 1 :KQP_PROXY DEBUG: Request has 18445000286346.181336s seconds to be completed 2025-04-04T17:22:43.372855Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=YTdkMzk1YTQtZTU4N2RmMTQtN2NmMGY4ODYtMTg1NDNlY2U=, workerId: [1:7489509695661760304:2333], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-04T17:22:43.372981Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T17:22:43.373565Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 64977fc9-6c8ba04b-70ed5e98-1b9233f2, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-04T17:22:43.374126Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=YTdkMzk1YTQtZTU4N2RmMTQtN2NmMGY4ODYtMTg1NDNlY2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7489509695661760304:2333] 2025-04-04T17:22:43.374165Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7489509695661760307:2466] 2025-04-04T17:22:43.375968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509695661760312:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:43.375968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509695661760306:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:43.376052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:43.379499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-04T17:22:43.390009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509695661760321:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T17:22:43.484239Z node 1 :TX_PROXY ERROR: Actor# [1:7489509695661760363:2498] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/po ... completed: 281474976715675. Doublechecking... 2025-04-04T17:22:48.660799Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Request: create. Transaction completed: 281474976715675. Doublechecking... 2025-04-04T17:22:48.660807Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Request: create. Transaction completed: 281474976715675. Doublechecking... 2025-04-04T17:22:48.660827Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Request: create. Transaction completed: 281474976715675. Doublechecking... 2025-04-04T17:22:48.660848Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Request: create. Transaction completed: 281474976715675. Doublechecking... 2025-04-04T17:22:48.660861Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Request: create. Transaction completed: 281474976715675. Doublechecking... 2025-04-04T17:22:48.660875Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Request: create. Transaction completed: 281474976715675. Doublechecking... 2025-04-04T17:22:48.683709Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715692. Doublechecking... 2025-04-04T17:22:48.683744Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715692. Doublechecking... 2025-04-04T17:22:48.683761Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715692. Doublechecking... 2025-04-04T17:22:48.683773Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715692. Doublechecking... 2025-04-04T17:22:48.683782Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715692. Doublechecking... 2025-04-04T17:22:48.683795Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715692. Doublechecking... 2025-04-04T17:22:48.683809Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715692. Doublechecking... 2025-04-04T17:22:48.683814Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715692. Doublechecking... 2025-04-04T17:22:48.683825Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715692. Doublechecking... 2025-04-04T17:22:48.683853Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715692. Doublechecking... 2025-04-04T17:22:48.683874Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715692. Doublechecking... 2025-04-04T17:22:48.683875Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715692. Doublechecking... 2025-04-04T17:22:48.683894Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715692. Doublechecking... 2025-04-04T17:22:48.683908Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715692. Doublechecking... 2025-04-04T17:22:48.683942Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715692. Doublechecking... 2025-04-04T17:22:48.683968Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715692. Doublechecking... 2025-04-04T17:22:48.684010Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715692. Doublechecking... 2025-04-04T17:22:48.684027Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715692. Doublechecking... 2025-04-04T17:22:48.684044Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715692. Doublechecking... 2025-04-04T17:22:48.684061Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976715692. Doublechecking... 2025-04-04T17:22:48.714162Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-04T17:22:48.714162Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-04T17:22:48.718201Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-04T17:22:48.720764Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-04T17:22:48.731028Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-04T17:22:48.731870Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-04T17:22:48.732436Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-04T17:22:48.732451Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-04T17:22:48.734504Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-04T17:22:48.734557Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-04T17:22:48.736042Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-04T17:22:48.736589Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-04T17:22:48.739148Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-04T17:22:48.741126Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-04T17:22:48.741612Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-04T17:22:48.742108Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-04T17:22:48.744193Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-04T17:22:48.744681Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-04T17:22:48.744691Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-04T17:22:48.747868Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-04T17:22:48.748295Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-04T17:22:48.749873Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-04T17:22:48.750350Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-04T17:22:48.753442Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-04T17:22:48.753887Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-04T17:22:48.758094Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-04T17:22:48.758694Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-04T17:22:48.758733Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-04T17:22:48.758901Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-04T17:22:48.759103Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-04T17:22:48.759517Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-04T17:22:48.760051Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-04-04T17:22:48.763179Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-04T17:22:48.763228Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-04T17:22:48.765729Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-04T17:22:48.767274Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-04T17:22:48.767729Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-04T17:22:48.771983Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-04T17:22:48.771984Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-04T17:22:48.776069Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-04-04T17:22:48.802795Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MTIzZTNlNWQtM2FmZGZhN2QtZTcxNjYxZTEtNDZjNjYyY2I=, workerId: [2:7489509720641952671:2359], local sessions count: 2 2025-04-04T17:22:48.817655Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7489509720641953175:2363], selfId: [2:7489509707757049776:2067], source: [2:7489509720641953173:2362] 2025-04-04T17:22:48.817860Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: dcbf75b4-f2b2db21-8b690342-21519cc5, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGVjNTM3YjQtYTlmYWNlODgtMjhmMmQxNDMtMmU4Njk5ZmQ=, TxId: 2025-04-04T17:22:48.817883Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: dcbf75b4-f2b2db21-8b690342-21519cc5, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGVjNTM3YjQtYTlmYWNlODgtMjhmMmQxNDMtMmU4Njk5ZmQ=, TxId: 2025-04-04T17:22:48.818040Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: dcbf75b4-f2b2db21-8b690342-21519cc5, start saving rows range [0; 1) 2025-04-04T17:22:48.818105Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: dcbf75b4-f2b2db21-8b690342-21519cc5, Bootstrap. Database: /dc-1 2025-04-04T17:22:48.818168Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZGVjNTM3YjQtYTlmYWNlODgtMjhmMmQxNDMtMmU4Njk5ZmQ=, workerId: [2:7489509720641953173:2362], local sessions count: 1 2025-04-04T17:22:48.818256Z node 2 :KQP_PROXY DEBUG: Request has 18445000286340.733389s seconds to be completed 2025-04-04T17:22:48.820080Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZjVmMjRjYzMtM2MwODUwNS01MDY1MjAyYy1iYTZlNjU0Mg==, workerId: [2:7489509720641953330:2379], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-04T17:22:48.820205Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T17:22:48.820546Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: dcbf75b4-f2b2db21-8b690342-21519cc5, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-04-04T17:22:48.820938Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZjVmMjRjYzMtM2MwODUwNS01MDY1MjAyYy1iYTZlNjU0Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7489509720641953330:2379] 2025-04-04T17:22:48.820967Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7489509720641953332:3081] >> IndexBuildTest::Lock >> KqpSinkTx::OlapSnapshotRO [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:22:45.517702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:22:45.517838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:22:45.517920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:22:45.517969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:22:45.519039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:22:45.519090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:22:45.519172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:22:45.519321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:22:45.520523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:22:45.597875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:22:45.597947Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:45.611211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:22:45.611463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:22:45.611628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:22:45.627045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:22:45.629449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:22:45.632229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:45.634274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:22:45.643084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:45.650644Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:22:45.650738Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:45.650983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:22:45.651068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:22:45.651182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:22:45.651329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:22:45.656932Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:22:45.784904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:22:45.786492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:45.788378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:22:45.790613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:22:45.790702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:45.793868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:45.794055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:22:45.794285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:45.794339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:22:45.794426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:22:45.794480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:22:45.796689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:45.796741Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:22:45.796775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:22:45.798633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:45.798680Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:45.798715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:45.798783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:22:45.807899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:22:45.809509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:22:45.809665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:22:45.811276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:45.811398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:22:45.811450Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:45.812490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:22:45.812543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:45.812672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:22:45.812737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:22:45.814946Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:22:45.814988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:22:45.815123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:45.815149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:22:45.815382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:45.815424Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:22:45.815487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:22:45.815524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:45.815559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:22:45.815588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:45.815613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:22:45.815641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:45.815682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:22:45.815705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:22:45.815753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:22:45.815782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:22:45.815804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:22:45.817172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:22:45.817240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:22:45.817263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... athId: 3] was 2 2025-04-04T17:22:49.402372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:49.402632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:22:49.402684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:22:49.402950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:22:49.403083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:22:49.403204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:22:49.403387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:22:49.403478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:22:49.403777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:22:49.404128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:49.404314Z node 1 :BUILD_INDEX DEBUG: AddShardStatus id# 102 shard 72057594046678944:11 range { From: -inf, To: inf } 2025-04-04T17:22:49.404397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-04T17:22:49.404443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-04T17:22:49.404501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-04T17:22:49.409907Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-04T17:22:49.409985Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: by_embedding, IndexColumn: embedding, DataColumns: covered, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [0:0:0], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976720765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976720766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T17:22:49.410009Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 0 2025-04-04T17:22:49.413697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:22:49.413787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:49.414390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:22:49.414453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:22:49.414493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:22:49.415893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:4187:5647] sender: [1:4246:2058] recipient: [1:15:2062] 2025-04-04T17:22:49.459263Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/by_embedding" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:22:49.459527Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/by_embedding" took 315us result status StatusSuccess 2025-04-04T17:22:49.460501Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/by_embedding" PathDescription { Self { Name: "by_embedding" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Children { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "by_embedding" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataColumnNames: "covered" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::ConcurrentUpdateTable [GOOD] Test command err: 2025-04-04T17:22:40.277077Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509684408186450:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:40.277220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00162b/r3tmp/tmp9pXGeE/pdisk_1.dat 2025-04-04T17:22:40.590632Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:40.658764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:40.658859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:40.662210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27692 TServer::EnableGrpc on GrpcPort 3328, node 1 2025-04-04T17:22:40.940577Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:40.940598Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:40.940607Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:40.940722Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:22:41.248270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:43.024061Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:43.025398Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:22:43.027517Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-04T17:22:43.027561Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-04T17:22:43.027585Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:43.027619Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:22:43.027710Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:43.027749Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:43.032287Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-04T17:22:43.032309Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-04T17:22:43.032349Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-04T17:22:43.032426Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-04T17:22:43.032429Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-04T17:22:43.032445Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-04T17:22:43.032560Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-04T17:22:43.032567Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-04T17:22:43.032579Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-04T17:22:43.037386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-04T17:22:43.042346Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:43.042415Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:43.043993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:22:43.051851Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-04T17:22:43.051934Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710660 2025-04-04T17:22:43.052007Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-04T17:22:43.052026Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710658 2025-04-04T17:22:43.053746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:22:43.056367Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-04T17:22:43.056405Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710659 2025-04-04T17:22:43.189115Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-04T17:22:43.237547Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-04T17:22:43.239665Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-04T17:22:43.246026Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-04T17:22:43.302627Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-04T17:22:43.306503Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-04T17:22:43.311646Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 245b966c-a64416e7-90440603-3b79abf, Bootstrap. Database: /dc-1 2025-04-04T17:22:43.324406Z node 1 :KQP_PROXY DEBUG: Request has 18445000286346.227238s seconds to be completed 2025-04-04T17:22:43.348216Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=ZjE4NmE0ZjItZjBmOTAwOS02MzhiNWY0Ny1jNmJhZWY4MA==, workerId: [1:7489509697293089226:2333], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-04T17:22:43.348319Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T17:22:43.351249Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 245b966c-a64416e7-90440603-3b79abf, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-04T17:22:43.354505Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=ZjE4NmE0ZjItZjBmOTAwOS02MzhiNWY0Ny1jNmJhZWY4MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7489509697293089226:2333] 2025-04-04T17:22:43.354553Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7489509697293089228:2466] 2025-04-04T17:22:43.356344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509697293089229:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:43.356466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:43.357004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509697293089241:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:43.362622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-04T17:22:43.370664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509697293089243:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T17:22:43.433514Z node 1 :TX_PROXY ERROR: Actor# [1:7489509697293089285:2498] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" ... :22:48.715892Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715675 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-04T17:22:48.715911Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-04T17:22:48.715949Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715673 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-04T17:22:48.715956Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-04T17:22:48.715987Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715667 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-04T17:22:48.715993Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-04T17:22:48.716008Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715674 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-04T17:22:48.716012Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-04T17:22:48.716051Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976715668 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-04-04T17:22:48.716057Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-04-04T17:22:48.724910Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: alter. Transaction completed: 281474976715666. Doublechecking... 2025-04-04T17:22:48.770068Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-04T17:22:48.780793Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-04T17:22:48.786575Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-04T17:22:48.788460Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-04T17:22:48.791807Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-04T17:22:48.793306Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7489509718676269617:2364], selfId: [2:7489509705791366862:2265], source: [2:7489509718676269615:2362] 2025-04-04T17:22:48.793502Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 81530dbc-40a49de0-a66ec62d-249d2a8d, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDk5ZmVjNzgtZmRkMzNjMWItNzY1MWE0NWYtNDRmNjUwMWI=, TxId: 2025-04-04T17:22:48.793534Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 81530dbc-40a49de0-a66ec62d-249d2a8d, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDk5ZmVjNzgtZmRkMzNjMWItNzY1MWE0NWYtNDRmNjUwMWI=, TxId: 2025-04-04T17:22:48.793652Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 81530dbc-40a49de0-a66ec62d-249d2a8d, start saving rows range [0; 1) 2025-04-04T17:22:48.793739Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 81530dbc-40a49de0-a66ec62d-249d2a8d, Bootstrap. Database: /dc-1 2025-04-04T17:22:48.793993Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NDk5ZmVjNzgtZmRkMzNjMWItNzY1MWE0NWYtNDRmNjUwMWI=, workerId: [2:7489509718676269615:2362], local sessions count: 2 2025-04-04T17:22:48.794027Z node 2 :KQP_PROXY DEBUG: Request has 18445000286340.757599s seconds to be completed 2025-04-04T17:22:48.795933Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=NGU5M2QwMDgtNDllYmQ4ZGQtMTgyZTIzYmYtOTQxMjE2YjQ=, workerId: [2:7489509718676269777:2374], database: /dc-1, longSession: 1, local sessions count: 3 2025-04-04T17:22:48.796032Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-04T17:22:48.796047Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T17:22:48.796076Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-04T17:22:48.796296Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 81530dbc-40a49de0-a66ec62d-249d2a8d, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-04-04T17:22:48.796638Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NGU5M2QwMDgtNDllYmQ4ZGQtMTgyZTIzYmYtOTQxMjE2YjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7489509718676269777:2374] 2025-04-04T17:22:48.796689Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7489509718676269781:2698] 2025-04-04T17:22:48.806561Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-04T17:22:48.813989Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-04T17:22:48.813990Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-04-04T17:22:48.832161Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZWQ4Y2MwZmMtMWFjNDc5OTAtMWU2NDk3NGYtYTExNDQ1NzQ=, workerId: [2:7489509718676269549:2359], local sessions count: 2 2025-04-04T17:22:48.942278Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 10, sender: [2:7489509718676269780:2375], selfId: [2:7489509705791366862:2265], source: [2:7489509718676269777:2374] 2025-04-04T17:22:48.942566Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 81530dbc-40a49de0-a66ec62d-249d2a8d, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NGU5M2QwMDgtNDllYmQ4ZGQtMTgyZTIzYmYtOTQxMjE2YjQ=, TxId: 2025-04-04T17:22:48.942596Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 81530dbc-40a49de0-a66ec62d-249d2a8d, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGU5M2QwMDgtNDllYmQ4ZGQtMTgyZTIzYmYtOTQxMjE2YjQ=, TxId: 2025-04-04T17:22:48.942685Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 81530dbc-40a49de0-a66ec62d-249d2a8d, result part successfully saved 2025-04-04T17:22:48.942711Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 81530dbc-40a49de0-a66ec62d-249d2a8d, reply SUCCESS, issues: 2025-04-04T17:22:48.942869Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NGU5M2QwMDgtNDllYmQ4ZGQtMTgyZTIzYmYtOTQxMjE2YjQ=, workerId: [2:7489509718676269777:2374], local sessions count: 1 2025-04-04T17:22:48.943036Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 81530dbc-40a49de0-a66ec62d-249d2a8d, Bootstrap. Database: /dc-1 2025-04-04T17:22:48.943145Z node 2 :KQP_PROXY DEBUG: Request has 18445000286340.608482s seconds to be completed 2025-04-04T17:22:48.944932Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZDc4ZDBlYjYtZWUzZjc5NGQtNjhiNGI1MWEtYWM0ZTI4NzY=, workerId: [2:7489509718676269814:2386], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-04T17:22:48.945051Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T17:22:48.945260Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 81530dbc-40a49de0-a66ec62d-249d2a8d, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-04T17:22:48.945591Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZDc4ZDBlYjYtZWUzZjc5NGQtNjhiNGI1MWEtYWM0ZTI4NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 12, targetId: [2:7489509718676269814:2386] 2025-04-04T17:22:48.945656Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 12 timeout: 300.000000s actor id: [2:7489509718676269816:2717] |94.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpMultiSinks [GOOD] Test command err: Trying to start YDB, gRPC: 22223, MsgBus: 14821 2025-04-04T17:22:26.028031Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509623473708485:2236];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:26.028810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ca7/r3tmp/tmpOhqRR6/pdisk_1.dat 2025-04-04T17:22:26.407245Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:26.443809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:26.443918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 22223, node 1 2025-04-04T17:22:26.448704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:26.525459Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:26.525485Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:26.525491Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:26.525945Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14821 TClient is connected to server localhost:14821 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:27.029054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:27.043721Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:22:28.919712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509632063643557:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:28.922330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509632063643539:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:28.922470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:28.924655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:28.934277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509632063643577:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:22:28.995936Z node 1 :TX_PROXY ERROR: Actor# [1:7489509632063643628:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:29.259948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:29.390381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:30.404752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:31.215064Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509623473708485:2236];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:31.242264Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 12693, MsgBus: 28257 2025-04-04T17:22:37.898486Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509669835905626:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:37.898614Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ca7/r3tmp/tmp7YVoN2/pdisk_1.dat 2025-04-04T17:22:37.996339Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12693, node 2 2025-04-04T17:22:38.030805Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:38.030924Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:38.032555Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:38.066774Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:38.066802Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:38.066811Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:38.066933Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28257 TClient is connected to server localhost:28257 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:38.427288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:41.043509Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509687015775478:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:41.043648Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509687015775460:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:41.043820Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:41.047690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:41.056705Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509687015775481:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:22:41.118894Z node 2 :TX_PROXY ERROR: Actor# [2:7489509687015775532:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:41.160891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:41.235938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:42.396349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:43.137440Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509669835905626:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:43.178189Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> IndexBuildTest::CheckLimitWithDroppedIndex [GOOD] >> IndexBuildTest::DropIndex >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] [GOOD] >> IndexBuildTest::ShadowDataNotAllowedByDefault ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackRoTx [GOOD] Test command err: Trying to start YDB, gRPC: 24814, MsgBus: 24597 2025-04-04T17:22:34.636416Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509660027509439:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:34.636546Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c12/r3tmp/tmptceJjP/pdisk_1.dat 2025-04-04T17:22:34.973803Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24814, node 1 2025-04-04T17:22:34.987780Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:22:35.016833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:35.016960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:35.018553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:35.026514Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:35.026546Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:35.026558Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:35.026671Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24597 TClient is connected to server localhost:24597 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:35.524454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:35.558651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:35.704856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:35.899482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:22:35.979492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:37.736211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509672912413113:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:37.736328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:37.977638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:38.004509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:38.032312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:38.059431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:38.084118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:38.149673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:38.230374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509677207380930:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:38.230460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:38.230534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509677207380935:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:38.234600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:38.247646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509677207380937:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:22:38.303409Z node 1 :TX_PROXY ERROR: Actor# [1:7489509677207380989:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:39.636548Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509660027509439:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:39.636645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12656, MsgBus: 13745 2025-04-04T17:22:44.781780Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509701647110615:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:44.781871Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c12/r3tmp/tmpCMENIj/pdisk_1.dat 2025-04-04T17:22:44.894663Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12656, node 2 2025-04-04T17:22:44.920678Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:44.920788Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:44.922588Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:44.941861Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:44.941883Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:44.941890Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:44.942003Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13745 TClient is connected to server localhost:13745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:45.371978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:45.381812Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:22:45.392236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:45.466226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:45.642013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:45.707785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:48.132444Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509718826981564:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:48.132535Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:48.172258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:48.203165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:48.236145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:48.265128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:48.296091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:48.328201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:48.370631Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509718826982072:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:48.370725Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:48.370925Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509718826982077:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:48.374306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:48.382954Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509718826982079:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:22:48.450648Z node 2 :TX_PROXY ERROR: Actor# [2:7489509718826982132:3438] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:49.539359Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTU1YzAzZmUtZDIxODBjYTEtZTFlYjgxYmUtMjY2ZDk4MmI=, ActorId: [2:7489509723121949685:2488], ActorState: ReadyState, TraceId: 01jr0x791q53dj705jxpx509cm, Create QueryResponse for error on request, msg: >> DataShardVolatile::DistributedWriteThenBulkUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapSnapshotRO [GOOD] Test command err: Trying to start YDB, gRPC: 63887, MsgBus: 22847 2025-04-04T17:22:07.591157Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509544403098890:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:07.591250Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000eaf/r3tmp/tmpHia5lL/pdisk_1.dat 2025-04-04T17:22:07.896724Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63887, node 1 2025-04-04T17:22:07.959087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:07.959200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:07.961586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:07.970030Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:07.970060Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:07.970069Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:07.970215Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22847 TClient is connected to server localhost:22847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:08.485969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:10.526435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509557288001444:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:10.526556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509557288001419:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:10.526915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:10.532059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:10.545895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509557288001448:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:22:10.631394Z node 1 :TX_PROXY ERROR: Actor# [1:7489509557288001501:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:10.911794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:11.107392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509561582968994:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:11.107676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509561582968994:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:11.107952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509561582968994:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:22:11.108126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509561582968994:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:22:11.108234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509561582968994:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:22:11.108365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509561582968994:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:22:11.108505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509561582968994:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:22:11.108611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509561582968994:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:22:11.108756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509561582968994:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:22:11.108879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509561582968994:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:22:11.109021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509561582968994:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:22:11.109156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509561582968994:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:22:11.111707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509561582968987:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:11.111766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509561582968987:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:11.111946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509561582968987:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:22:11.112108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509561582968987:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:22:11.112227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509561582968987:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:22:11.112526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509561582968987:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:22:11.112641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509561582968987:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:22:11.112750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509561582968987:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:22:11.112845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509561582968987:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:22:11.112960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509561582968987:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:22:11.113073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509561582968987:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:22:11.113216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509561582968987:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:22:11.156679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509561582969001:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:11.156730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509561582969001:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:11.156880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;sel ... Shard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038034;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.762809Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[2:7489509679535645010:3310];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038034;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.765866Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[2:7489509679535644980:3306];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038029;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.766753Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7489509675240677661:3286];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038053;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.767543Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7489509675240677659:3284];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038039;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.768819Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[2:7489509679535644980:3306];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038029;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.769050Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7489509675240677661:3286];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038053;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.769266Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7489509675240677659:3284];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038039;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.771172Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7489509679535645047:3331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038022;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.771582Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7489509679535645047:3331];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.772292Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7489509679535644975:3302];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038037;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.772638Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7489509679535644975:3302];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.773525Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7489509679535645016:3316];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.773804Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;self_id=[2:7489509675240677356:3247];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038092;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.774061Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;self_id=[2:7489509675240677356:3247];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038092;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.774398Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7489509679535645016:3316];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.774683Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7489509679535645074:3343];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038025;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.778491Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7489509679535645074:3343];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.778793Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7489509679535645068:3339];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038001;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.779573Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[2:7489509679535644971:3298];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038069;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.779854Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7489509679535645069:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.780567Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[2:7489509679535645015:3315];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038051;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.780801Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[2:7489509679535645015:3315];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038051;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.781549Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7489509679535645027:3323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038015;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.781772Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7489509679535645027:3323];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.784396Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7489509679535645068:3339];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.785119Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[2:7489509679535644971:3298];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038069;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.785804Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7489509679535645069:3340];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.786553Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7489509679535645065:3336];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.786797Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7489509679535645065:3336];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.787790Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7489509679535645172:3345];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.788036Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7489509679535645172:3345];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.788256Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7489509679535645070:3341];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038050;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.789110Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7489509679535644968:3295];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038048;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.789358Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7489509679535645073:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.789591Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7489509679535645014:3314];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038049;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.789822Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7489509679535645013:3313];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038044;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.797654Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7489509679535645070:3341];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038050;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.797949Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7489509679535644968:3295];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038048;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.798133Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7489509679535645073:3342];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.798362Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7489509679535645014:3314];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038049;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:43.798546Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7489509679535645013:3313];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038044;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:44.141920Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzgxZWEzZmEtOGU5MGU4YS1hM2E0N2JmMS1jMDJlZTYyNQ==, ActorId: [2:7489509692420550115:3885], ActorState: ExecuteState, TraceId: 01jr0x73pq95d5cnsqh4kbmm4r, Create QueryResponse for error on request, msg:
:3:29: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-04T17:22:45.464086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:22:45.464115Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> IndexBuildTest::Lock [GOOD] >> IndexBuildTest::IndexPartitioningIsPersisted ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:130:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:132:2058] recipient: [1:108:2140] 2025-04-04T17:20:34.916970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:20:34.917064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:20:34.917160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:20:34.917195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:20:34.917236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:20:34.917270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:20:34.917327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:20:34.917390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:20:34.917666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:20:34.997371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:20:34.997425Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:172:2058] recipient: [1:15:2062] 2025-04-04T17:20:35.014758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:20:35.015335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:20:35.015545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:20:35.025121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:20:35.025297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:20:35.025839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:20:35.026425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:20:35.029971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:20:35.030987Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:20:35.031044Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:20:35.031181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:20:35.031234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:20:35.031292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:20:35.031415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] Leader for TabletID 72057594037968897 is [1:215:2214] sender: [1:216:2058] recipient: [1:209:2210] 2025-04-04T17:20:35.037566Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-04T17:20:35.168495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:20:35.168703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:20:35.168949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:20:35.169192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:20:35.169250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:20:35.171935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:20:35.172052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:20:35.172202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:20:35.172244Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:20:35.172267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:20:35.172301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:20:35.173978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:20:35.174026Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:20:35.174055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:20:35.175340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:20:35.175376Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:20:35.175403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:20:35.175451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:20:35.183419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:20:35.184864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:20:35.185011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:20:35.185761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:20:35.185862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:20:35.185894Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:20:35.186089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:20:35.186128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:20:35.186278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:20:35.186355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:20:35.188334Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:20:35.188369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:20:35.188502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:20:35.188530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:20:35.188970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:20:35.189005Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:20:35.189086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:20:35.189159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:20:35.189193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:20:35.189224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:20:35.189249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:20:35.189274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:20:35.189297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id ... aCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:22:50.780485Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:22:50.780772Z node 54 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 312us result status StatusSuccess 2025-04-04T17:22:50.781570Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:22:50.792685Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1118:2883] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-04T17:22:50.792810Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][54:1065:2883] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-04-04T17:22:50.792980Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1118:2883] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743787370758940 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743787370758940 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743787370758940 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-04T17:22:50.799734Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1118:2883] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-04T17:22:50.799864Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][54:1065:2883] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] Test command err: 2025-04-04T17:22:41.351242Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509689373239019:2147];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:41.352235Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:22:41.380079Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509687440599180:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:41.380126Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001615/r3tmp/tmpzFFHzb/pdisk_1.dat 2025-04-04T17:22:41.846981Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:41.855471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:41.855567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:41.856802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:41.856849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:41.863222Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:22:41.863401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:41.863980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28069 2025-04-04T17:22:44.476946Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:44.478595Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:22:44.483998Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-04T17:22:44.484036Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-04T17:22:44.484054Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:44.484125Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:22:44.484178Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.484216Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.487496Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.487538Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.529812Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:44.531528Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:22:44.538363Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZjM2OGY2ZjItY2MyZjdjMTAtZDQwZGMyOTUtODEzYzU4ZmE=, workerId: [2:7489509700325501354:2308], database: , longSession: 1, local sessions count: 1 2025-04-04T17:22:44.538407Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:22:44.538553Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T17:22:44.538610Z node 2 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-04T17:22:44.538649Z node 2 :KQP_PROXY DEBUG: Updated table service config. 2025-04-04T17:22:44.538665Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:44.538690Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:22:44.538744Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.538813Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.548020Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.548127Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.548178Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.548994Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZjM2OGY2ZjItY2MyZjdjMTAtZDQwZGMyOTUtODEzYzU4ZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [2:7489509700325501354:2308] 2025-04-04T17:22:44.549089Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [2:7489509700325501369:2117] 2025-04-04T17:22:44.546910Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZjM2OGY2ZjItY2MyZjdjMTAtZDQwZGMyOTUtODEzYzU4ZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [2:8678280833929343339:121] 2025-04-04T17:22:44.546975Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 600.000000s actor id: [1:7489509702258141618:2471] 2025-04-04T17:22:44.551536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509702258141619:2313], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:44.551648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:44.552947Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509700325501370:2309], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:44.553040Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:44.793927Z node 2 :KQP_PROXY DEBUG: TraceId: "01jr0x746554ywb03fsee3d8fj", Created new session, sessionId: ydb://session/3?node_id=2&id=Y2RmMmE3MGYtYTEzNzM2Y2ItYjI5N2EwYjAtN2VjZTE3MzA=, workerId: [2:7489509700325501382:2311], database: , longSession: 0, local sessions count: 2 2025-04-04T17:22:44.794130Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jr0x746554ywb03fsee3d8fj, Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=Y2RmMmE3MGYtYTEzNzM2Y2ItYjI5N2EwYjAtN2VjZTE3MzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 4, targetId: [2:7489509700325501382:2311] 2025-04-04T17:22:44.794155Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 4 timeout: 300.000000s actor id: [2:7489509700325501384:2121] 2025-04-04T17:22:44.794402Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509700325501383:2312], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:44.794496Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:44.794584Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509700325501389:2315], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:44.808984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T17:22:44.833478Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509700325501391:2316], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T17:22:44.977818Z node 2 :TX_PROXY ERROR: Actor# [2:7489509700325501419:2131] txid# 281474976715658, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:45.032660Z node 2 :KQP_PROXY DEBUG: TraceId: "01jr0x746554ywb03fsee3d8fj", Forwarded response to sender actor, requestId: 4, sender: [2:7489509700325501381:2310], selfId: [2:7489509687440599383:2278], source: [2:7489509700325501382:2311] 2025-04-04T17:22:45.039691Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=Y2RmMmE3MGYtYTEzNzM2Y2ItYjI5N2EwYjAtN2VjZTE3MzA=, workerId: [2:7489509700325501382:2311], local sessions count: 1 2025-04-04T17:22:45.043706Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7489509689373239162:2281], selfId: [2:7489509687440599383:2278], source: [2:7489509700325501354:2308] 2025-04-04T17:22:45.044050Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [1:7489509693668206939:2442], selfId: [1:7489509689373239162:2281], source: [2:7489509687440599383:2278] 2025-04-04T17:22:46.470377Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489509711812270323:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:46.470473Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001615/r3tmp/tmpSHYjiJ/pdisk_1.dat 2025-04-04T17:22:46.600616Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:46.616164Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:46.616251Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:46.619780Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16484, node 3 2025-04-04T17:22:46.696636Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:46.696658Z node 3 :NET_CLASSIFIER WARN: will try to initialize f ... 2025-04-04T17:22:49.199284Z node 3 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976715659 2025-04-04T17:22:49.321073Z node 3 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976715661. Doublechecking... 2025-04-04T17:22:49.372630Z node 3 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976715659. Doublechecking... 2025-04-04T17:22:49.374124Z node 3 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976715660. Doublechecking... 2025-04-04T17:22:49.388134Z node 3 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-04T17:22:49.462282Z node 3 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-04T17:22:49.462635Z node 3 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-04T17:22:49.462962Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 37493edc-cec02851-a88a0607-617eaec1, Bootstrap. Database: /Root 2025-04-04T17:22:49.463105Z node 3 :KQP_PROXY DEBUG: Request has 18445000286340.088529s seconds to be completed 2025-04-04T17:22:49.464999Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=YjhlODI4YjEtYTQyMGU4YjEtNzRiMDFjMDQtMjhkNzdhMDg=, workerId: [3:7489509724697173675:2357], database: /Root, longSession: 1, local sessions count: 1 2025-04-04T17:22:49.465147Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T17:22:49.465991Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 37493edc-cec02851-a88a0607-617eaec1, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-04T17:22:49.466504Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=YjhlODI4YjEtYTQyMGU4YjEtNzRiMDFjMDQtMjhkNzdhMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [3:7489509724697173675:2357] 2025-04-04T17:22:49.466549Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 300.000000s actor id: [3:7489509724697173678:2962] 2025-04-04T17:22:49.466772Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489509724697173677:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:49.466859Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:49.467153Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489509724697173683:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:49.470148Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:2, at schemeshard: 72057594046644480 2025-04-04T17:22:49.492291Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489509724697173685:2363], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-04T17:22:49.581334Z node 3 :TX_PROXY ERROR: Actor# [3:7489509724697173751:3016] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:49.787794Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [3:7489509724697173676:2358], selfId: [3:7489509711812270547:2280], source: [3:7489509724697173675:2357] 2025-04-04T17:22:49.788154Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 37493edc-cec02851-a88a0607-617eaec1, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=YjhlODI4YjEtYTQyMGU4YjEtNzRiMDFjMDQtMjhkNzdhMDg=, TxId: 2025-04-04T17:22:49.788188Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 37493edc-cec02851-a88a0607-617eaec1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=YjhlODI4YjEtYTQyMGU4YjEtNzRiMDFjMDQtMjhkNzdhMDg=, TxId: 2025-04-04T17:22:49.788205Z node 3 :KQP_PROXY DEBUG: [ScriptExecutions] Create script execution operation. ExecutionId: 37493edc-cec02851-a88a0607-617eaec1. Result: SUCCESS. Issues: 2025-04-04T17:22:49.788444Z node 3 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=3&id=YjhlODI4YjEtYTQyMGU4YjEtNzRiMDFjMDQtMjhkNzdhMDg=, workerId: [3:7489509724697173675:2357], local sessions count: 0 2025-04-04T17:22:49.790327Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=YzRhYTlmYzYtYWYyYTBjYjAtNTVjYjdmOGMtNDc5MGEzYw==, workerId: [3:7489509724697173830:2374], database: /Root, longSession: 1, local sessions count: 1 2025-04-04T17:22:49.790444Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T17:22:49.790785Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jr0x78pw00xsxxrtemqy5tz6, Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=YzRhYTlmYzYtYWYyYTBjYjAtNTVjYjdmOGMtNDc5MGEzYw==, CurrentExecutionId: 37493edc-cec02851-a88a0607-617eaec1, CustomerSuppliedId: 01jr0x78pw00xsxxrtemqy5tz6, PoolId: }. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 7, targetId: [3:7489509724697173830:2374] 2025-04-04T17:22:49.790822Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 7 timeout: 604800.000000s actor id: [3:7489509724697173831:3062] 2025-04-04T17:22:49.822335Z node 3 :KQP_PROXY DEBUG: TraceId: "01jr0x79ax1yysd9985t5nxmyh", Request has 18445000286339.729307s seconds to be completed 2025-04-04T17:22:49.824258Z node 3 :KQP_PROXY DEBUG: TraceId: "01jr0x79ax1yysd9985t5nxmyh", Created new session, sessionId: ydb://session/3?node_id=3&id=ODZlZDk4YzgtOGEwMzhmNmYtODFkMzhhMjQtOGE5MzhlMGI=, workerId: [3:7489509724697173841:2380], database: /Root, longSession: 1, local sessions count: 2 2025-04-04T17:22:49.824345Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jr0x79ax1yysd9985t5nxmyh 2025-04-04T17:22:49.834050Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jr0x79b9eqhxapmk787atwpc, Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=ODZlZDk4YzgtOGEwMzhmNmYtODFkMzhhMjQtOGE5MzhlMGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 9, targetId: [3:7489509724697173841:2380] 2025-04-04T17:22:49.834085Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 9 timeout: 300.000000s actor id: [3:7489509724697173844:3066] 2025-04-04T17:22:49.842483Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7489509724697173848:3068], for# user@builtin, access# DescribeSchema 2025-04-04T17:22:49.842528Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7489509724697173848:3068], for# user@builtin, access# DescribeSchema 2025-04-04T17:22:49.853500Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 37493edc-cec02851-a88a0607-617eaec1, Bootstrap. Database: /Root 2025-04-04T17:22:49.853568Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489509724697173845:2382], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/script_executions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:22:49.854981Z node 3 :KQP_PROXY DEBUG: Request has 18445000286339.696648s seconds to be completed 2025-04-04T17:22:49.855245Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ODZlZDk4YzgtOGEwMzhmNmYtODFkMzhhMjQtOGE5MzhlMGI=, ActorId: [3:7489509724697173841:2380], ActorState: ExecuteState, TraceId: 01jr0x79b9eqhxapmk787atwpc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:22:49.856372Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=MzYzN2I2YzUtOTZiMzM4MjgtZTNiZGIzM2EtZmQ5ODQ1YzA=, workerId: [3:7489509724697173857:2385], database: /Root, longSession: 1, local sessions count: 3 2025-04-04T17:22:49.856450Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T17:22:49.856566Z node 3 :KQP_PROXY DEBUG: TraceId: "01jr0x79b9eqhxapmk787atwpc", Forwarded response to sender actor, requestId: 9, sender: [3:7489509724697173843:2381], selfId: [3:7489509711812270547:2280], source: [3:7489509724697173841:2380] 2025-04-04T17:22:49.856636Z node 3 :KQP_PROXY DEBUG: TraceId: "01jr0x78pw00xsxxrtemqy5tz6", Forwarded response to sender actor, requestId: 7, sender: [3:7489509724697173672:2960], selfId: [3:7489509711812270547:2280], source: [3:7489509724697173830:2374] 2025-04-04T17:22:49.856783Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 37493edc-cec02851-a88a0607-617eaec1, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2025-04-04T17:22:49.857105Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=MzYzN2I2YzUtOTZiMzM4MjgtZTNiZGIzM2EtZmQ5ODQ1YzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 11, targetId: [3:7489509724697173857:2385] 2025-04-04T17:22:49.857235Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 11 timeout: 300.000000s actor id: [3:7489509724697173859:3072] >> IndexBuildTest::ShadowDataNotAllowedByDefault [GOOD] >> IndexBuildTest::ShadowDataEdgeCases >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] >> IndexBuildTest::DropIndex [GOOD] >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::DropIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:22:49.597737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:22:49.597842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:22:49.597904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:22:49.597937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:22:49.597975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:22:49.598002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:22:49.598055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:22:49.598123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:22:49.598462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:22:49.676473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:22:49.676531Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:49.695880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:22:49.696126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:22:49.696272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:22:49.714021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:22:49.714363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:22:49.714970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:49.715859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:22:49.721012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:49.722281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:22:49.722346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:49.722538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:22:49.722587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:22:49.722625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:22:49.722747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:22:49.729590Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:22:49.859234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:22:49.859491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:49.859705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:22:49.859922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:22:49.859984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:49.862261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:49.862418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:22:49.862637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:49.862705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:22:49.862741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:22:49.862788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:22:49.864557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:49.864609Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:22:49.864640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:22:49.866145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:49.866185Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:49.866259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:49.866311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:22:49.869898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:22:49.871660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:22:49.871850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:22:49.872738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:49.872891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:22:49.872948Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:49.873190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:22:49.873235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:49.873404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:22:49.873473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:22:49.875799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:22:49.875842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:22:49.876011Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:49.876049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:22:49.876361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:49.876400Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:22:49.876507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:22:49.876538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:49.876578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:22:49.876614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:49.876644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:22:49.876682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:49.876725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:22:49.876756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:22:49.876820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:22:49.876850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:22:49.876877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:22:49.878706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:22:49.878806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:22:49.878840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 09550, at schemeshard: 72057594046678944 2025-04-04T17:22:52.477775Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 129 -> 240 2025-04-04T17:22:52.478001Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:1, at schemeshard: 72057594046678944 2025-04-04T17:22:52.478030Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:1 ProgressState 2025-04-04T17:22:52.478117Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 1/3 2025-04-04T17:22:52.478142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/3 2025-04-04T17:22:52.478171Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 1/3 2025-04-04T17:22:52.478196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/3 2025-04-04T17:22:52.478225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/3, is published: false 2025-04-04T17:22:52.478674Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2025-04-04T17:22:52.478705Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:2 ProgressState at tablet: 72057594046678944 2025-04-04T17:22:52.478745Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 105:2, at schemeshard: 72057594046678944 2025-04-04T17:22:52.478765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 105:2, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-04T17:22:52.478784Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:2 129 -> 240 2025-04-04T17:22:52.479110Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:22:52.479189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:22:52.479215Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-04T17:22:52.479248Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2025-04-04T17:22:52.479297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2025-04-04T17:22:52.479723Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:22:52.479772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:22:52.479789Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-04T17:22:52.479810Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 18446744073709551615 2025-04-04T17:22:52.479834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2025-04-04T17:22:52.480380Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:22:52.480429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:22:52.480446Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-04T17:22:52.480465Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-04-04T17:22:52.480483Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:22:52.481826Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:22:52.481881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:22:52.481897Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-04T17:22:52.482309Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:22:52.482374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:22:52.482391Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-04T17:22:52.482554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-04T17:22:52.482602Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 105:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:22:52.482800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-04-04T17:22:52.482918Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 2/3 2025-04-04T17:22:52.482946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2025-04-04T17:22:52.482978Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 2/3 2025-04-04T17:22:52.483006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2025-04-04T17:22:52.483053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: false 2025-04-04T17:22:52.484119Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:22:52.484184Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-04-04T17:22:52.484202Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-04-04T17:22:52.484224Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 18446744073709551615 2025-04-04T17:22:52.484245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 4 2025-04-04T17:22:52.484305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: true 2025-04-04T17:22:52.485659Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2025-04-04T17:22:52.485703Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 105:2 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:22:52.485854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-04-04T17:22:52.485945Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:2 progress is 3/3 2025-04-04T17:22:52.485968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-04-04T17:22:52.485992Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:2 progress is 3/3 2025-04-04T17:22:52.486011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-04-04T17:22:52.486033Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 3/3, is published: true 2025-04-04T17:22:52.486078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:417:2374] message: TxId: 105 2025-04-04T17:22:52.486111Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-04-04T17:22:52.486151Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-04T17:22:52.486181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-04T17:22:52.486276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-04T17:22:52.486315Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:1 2025-04-04T17:22:52.486331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:1 2025-04-04T17:22:52.486350Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 2025-04-04T17:22:52.486366Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:2 2025-04-04T17:22:52.486378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:2 2025-04-04T17:22:52.486402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-04-04T17:22:52.486614Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-04T17:22:52.486685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-04T17:22:52.486754Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-04T17:22:52.486779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-04T17:22:52.486818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-04T17:22:52.488922Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-04T17:22:52.489261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-04T17:22:52.489307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:946:2870] TestWaitNotification: OK eventTxId 105 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] Test command err: 2025-04-04T17:22:41.015744Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509688146640384:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:41.015835Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001619/r3tmp/tmpEyum8m/pdisk_1.dat 2025-04-04T17:22:41.462775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:41.462883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:41.467528Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:41.469694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18057, node 1 2025-04-04T17:22:41.618815Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:41.618842Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:41.618850Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:41.618956Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:41.854344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:44.078552Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:44.080208Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-04T17:22:44.082092Z node 1 :KQP_PROXY DEBUG: Received ping session request, request_id: 2, sender: [1:7489509688146641317:2317], trace_id: 01jr0x71kw7n60n95hg44em4mm 2025-04-04T17:22:44.082597Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 5.000000s actor id: [0:0:0] 2025-04-04T17:22:44.082654Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-04T17:22:44.082690Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-04T17:22:44.082708Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:44.082752Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-04T17:22:44.082844Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.082897Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.083212Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.083246Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.083288Z node 1 :KQP_PROXY DEBUG: Session not found, targetId: [2:8678280833929343339:121] requestId: 2 2025-04-04T17:22:44.085803Z node 1 :KQP_PROXY DEBUG: TraceId: "01jr0x71kw7n60n95hg44em4mm", Forwarded response to sender actor, requestId: 2, sender: [1:7489509688146641317:2317], selfId: [1:7489509688146640628:2281], source: [1:7489509688146640628:2281] 2025-04-04T17:22:45.424318Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489509704046959683:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:45.424643Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001619/r3tmp/tmp8ZZ3yB/pdisk_1.dat 2025-04-04T17:22:45.593505Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:45.607109Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:45.607208Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:45.609497Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18974 TServer::EnableGrpc on GrpcPort 8615, node 4 2025-04-04T17:22:45.831953Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:45.831981Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:45.831990Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:45.832116Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-04T17:22:45.899586Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:22:45.908387Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:22:48.611874Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:48.613313Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:22:48.614700Z node 4 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-04T17:22:48.614731Z node 4 :KQP_PROXY DEBUG: Updated table service config. 2025-04-04T17:22:48.614751Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:48.614782Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:22:48.614830Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:48.614876Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:48.615006Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:48.615230Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:48.615548Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-04T17:22:48.615559Z node 4 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-04T17:22:48.615590Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-04T17:22:48.615753Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-04T17:22:48.615758Z node 4 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-04T17:22:48.615774Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-04T17:22:48.616278Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-04T17:22:48.616294Z node 4 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-04T17:22:48.616339Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-04T17:22:48.619827Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480 2025-04-04T17:22:48.622598Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:22:48.623981Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:22:48.629142Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-04T17:22:48.629161Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-04T17:22:48.629193Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976715660 2025-04-04T17:22:48.629193Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976715659 2025-04-04T17:22:48.629284Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-04T17:22:48.629300Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976715658 2025-04-04T17:22:48.788771Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976715660. Doublechecking... 2025-04-04T17:22:48.821384Z node 4 :KQP_PROXY DEBUG: ... XY DEBUG: Received create session request, trace_id: 2025-04-04T17:22:51.460403Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: c38bb201-ef65f9c3-51fd0621-4b21984e, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-04T17:22:51.460875Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=ZDNjZmE2M2EtNWU5ODBjZjctYmE0ZjE4YTItOTA5YTY0NWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 20, targetId: [4:7489509729816764579:2441] 2025-04-04T17:22:51.460910Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 20 timeout: 300.000000s actor id: [4:7489509729816764581:2661] 2025-04-04T17:22:51.467604Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 20, sender: [4:7489509729816764580:2442], selfId: [4:7489509704046959756:2277], source: [4:7489509729816764579:2441] 2025-04-04T17:22:51.467871Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: c38bb201-ef65f9c3-51fd0621-4b21984e, State: Get operation info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=ZDNjZmE2M2EtNWU5ODBjZjctYmE0ZjE4YTItOTA5YTY0NWM=, TxId: 01jr0x7ay5e03xvkyxwv4zc5s5 2025-04-04T17:22:51.468406Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: c38bb201-ef65f9c3-51fd0621-4b21984e, State: Get operation info, RunDataQuery: -- TSaveScriptFinalStatusActor::FinishScriptExecution DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $operation_status AS Int32; DECLARE $execution_status AS Int32; DECLARE $finalization_status AS Int32; DECLARE $issues AS JsonDocument; DECLARE $plan AS JsonDocument; DECLARE $stats AS JsonDocument; DECLARE $ast AS Optional; DECLARE $ast_compressed AS Optional; DECLARE $ast_compression_method AS Optional; DECLARE $operation_ttl AS Interval; DECLARE $customer_supplied_id AS Text; DECLARE $user_token AS Text; DECLARE $script_sinks AS Optional; DECLARE $script_secret_names AS Optional; DECLARE $applicate_script_external_effect_required AS Bool; UPDATE `.metadata/script_executions` SET operation_status = $operation_status, execution_status = $execution_status, finalization_status = IF($applicate_script_external_effect_required, $finalization_status, NULL), issues = $issues, plan = $plan, end_ts = CurrentUtcTimestamp(), stats = $stats, ast = $ast, ast_compressed = $ast_compressed, ast_compression_method = $ast_compression_method, expire_at = IF($operation_ttl > CAST(0 AS Interval), CurrentUtcTimestamp() + $operation_ttl, NULL), customer_supplied_id = IF($applicate_script_external_effect_required, $customer_supplied_id, NULL), user_token = IF($applicate_script_external_effect_required, $user_token, NULL), script_sinks = IF($applicate_script_external_effect_required, $script_sinks, NULL), script_secret_names = IF($applicate_script_external_effect_required, $script_secret_names, NULL) WHERE database = $database AND execution_id = $execution_id; DELETE FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id; 2025-04-04T17:22:51.468832Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=ZDNjZmE2M2EtNWU5ODBjZjctYmE0ZjE4YTItOTA5YTY0NWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 21, targetId: [4:7489509729816764579:2441] 2025-04-04T17:22:51.468868Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 21 timeout: 300.000000s actor id: [4:7489509729816764602:2666] 2025-04-04T17:22:51.480893Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 21, sender: [4:7489509729816764601:2448], selfId: [4:7489509704046959756:2277], source: [4:7489509729816764579:2441] 2025-04-04T17:22:51.481251Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: c38bb201-ef65f9c3-51fd0621-4b21984e, State: Update final status, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=ZDNjZmE2M2EtNWU5ODBjZjctYmE0ZjE4YTItOTA5YTY0NWM=, TxId: 2025-04-04T17:22:51.481379Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: c38bb201-ef65f9c3-51fd0621-4b21984e, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=ZDNjZmE2M2EtNWU5ODBjZjctYmE0ZjE4YTItOTA5YTY0NWM=, TxId: 2025-04-04T17:22:51.481432Z node 4 :KQP_PROXY DEBUG: [ScriptExecutions] Finish script execution operation. ExecutionId: c38bb201-ef65f9c3-51fd0621-4b21984e. UNAVAILABLE. Issues: {
: Error: Lease expired } 2025-04-04T17:22:51.481854Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=ZDNjZmE2M2EtNWU5ODBjZjctYmE0ZjE4YTItOTA5YTY0NWM=, workerId: [4:7489509729816764579:2441], local sessions count: 1 2025-04-04T17:22:51.481994Z node 4 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: c38bb201-ef65f9c3-51fd0621-4b21984e, successfully finalized script execution operation 2025-04-04T17:22:51.482011Z node 4 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: c38bb201-ef65f9c3-51fd0621-4b21984e, reply success 2025-04-04T17:22:51.489964Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jr0x7az1ftfbgvm1khxym66d, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=OGI5ZGEyZWUtZDhjZTY0OWUtNzViMjdiMjktNzkzZmRiOTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 22, targetId: [4:7489509721226829721:2360] 2025-04-04T17:22:51.490012Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 22 timeout: 300.000000s actor id: [4:7489509729816764625:2674] 2025-04-04T17:22:51.589193Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:51.936932Z node 4 :KQP_PROXY DEBUG: TraceId: "01jr0x7az1ftfbgvm1khxym66d", Forwarded response to sender actor, requestId: 22, sender: [4:7489509729816764624:2453], selfId: [4:7489509704046959756:2277], source: [4:7489509721226829721:2360] 2025-04-04T17:22:51.938697Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: c38bb201-ef65f9c3-51fd0621-4b21984e, Bootstrap. Database: /dc-1 2025-04-04T17:22:51.938891Z node 4 :KQP_PROXY DEBUG: Request has 18445000286337.612745s seconds to be completed 2025-04-04T17:22:51.940784Z node 4 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=4&id=ZjMzOTM0MGMtNTZiY2Y5YzQtMjAyYTY3MTUtNzA4YWUwOGU=, workerId: [4:7489509729816764675:2466], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-04T17:22:51.940917Z node 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T17:22:51.941157Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: c38bb201-ef65f9c3-51fd0621-4b21984e, RunDataQuery: -- TScriptLeaseUpdater::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT lease_deadline FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-04T17:22:51.941487Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=ZjMzOTM0MGMtNTZiY2Y5YzQtMjAyYTY3MTUtNzA4YWUwOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 24, targetId: [4:7489509729816764675:2466] 2025-04-04T17:22:51.941525Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 24 timeout: 300.000000s actor id: [4:7489509729816764677:2699] 2025-04-04T17:22:52.151645Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 24, sender: [4:7489509729816764676:2467], selfId: [4:7489509704046959756:2277], source: [4:7489509729816764675:2466] 2025-04-04T17:22:52.151821Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: c38bb201-ef65f9c3-51fd0621-4b21984e, State: Get lease info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=ZjMzOTM0MGMtNTZiY2Y5YzQtMjAyYTY3MTUtNzA4YWUwOGU=, TxId: 01jr0x7bkk52489bgews8gzyvf 2025-04-04T17:22:52.151965Z node 4 :KQP_PROXY WARN: [TQueryBase] [TScriptLeaseUpdater] TraceId: c38bb201-ef65f9c3-51fd0621-4b21984e, State: Get lease info, Finish with BAD_REQUEST, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=4&id=ZjMzOTM0MGMtNTZiY2Y5YzQtMjAyYTY3MTUtNzA4YWUwOGU=, TxId: 01jr0x7bkk52489bgews8gzyvf 2025-04-04T17:22:52.152008Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: c38bb201-ef65f9c3-51fd0621-4b21984e, State: Get lease info, Rollback transaction: 01jr0x7bkk52489bgews8gzyvf 2025-04-04T17:22:52.152249Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=ZjMzOTM0MGMtNTZiY2Y5YzQtMjAyYTY3MTUtNzA4YWUwOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 25, targetId: [4:7489509729816764675:2466] 2025-04-04T17:22:52.152300Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 25 timeout: 600.000000s actor id: [4:7489509734111731999:2711] 2025-04-04T17:22:52.153772Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 25, sender: [4:7489509734111731998:2474], selfId: [4:7489509704046959756:2277], source: [4:7489509729816764675:2466] 2025-04-04T17:22:52.154142Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: c38bb201-ef65f9c3-51fd0621-4b21984e, State: Get lease info, RollbackTransactionResult: SUCCESS. Issues: 2025-04-04T17:22:52.154516Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=ZjMzOTM0MGMtNTZiY2Y5YzQtMjAyYTY3MTUtNzA4YWUwOGU=, workerId: [4:7489509729816764675:2466], local sessions count: 1 2025-04-04T17:22:52.159269Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=OGI5ZGEyZWUtZDhjZTY0OWUtNzViMjdiMjktNzkzZmRiOTg=, workerId: [4:7489509721226829721:2360], local sessions count: 0 >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] >> DataShardVolatile::VolatileTxAbortedOnDrop [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter+UseSink >> IndexBuildTest::ShadowDataEdgeCases [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring [GOOD] Test command err: 2025-04-04T17:22:40.227945Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509684624499081:2198];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:40.228006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001627/r3tmp/tmpdWEOng/pdisk_1.dat 2025-04-04T17:22:40.632557Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:40.647341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:40.648344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:40.654794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64251 TServer::EnableGrpc on GrpcPort 21987, node 1 2025-04-04T17:22:40.942112Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:40.942141Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:40.942158Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:40.942308Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:22:41.248126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:42.991864Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:42.994701Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:22:42.997511Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-04T17:22:42.997569Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-04T17:22:42.997591Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:42.997638Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:22:42.997721Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:42.997767Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:42.998809Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:42.998864Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:43.006385Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-04T17:22:43.006415Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-04T17:22:43.014515Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-04T17:22:43.014545Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-04T17:22:43.014797Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-04T17:22:43.014824Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-04T17:22:43.015220Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-04T17:22:43.015234Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-04T17:22:43.019575Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-04T17:22:43.025929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-04-04T17:22:43.027484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:22:43.029095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:43.043772Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-04T17:22:43.043773Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-04T17:22:43.043849Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710658 2025-04-04T17:22:43.043919Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-04T17:22:43.043944Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710660 2025-04-04T17:22:43.044017Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710659 2025-04-04T17:22:43.162308Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-04-04T17:22:43.224651Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-04-04T17:22:43.231135Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-04-04T17:22:43.240043Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-04T17:22:43.322130Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-04-04T17:22:43.330742Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-04-04T17:22:43.331314Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: f60c5e7e-84d23e24-eecd1758-72286979, Bootstrap. Database: /dc-1 2025-04-04T17:22:43.343021Z node 1 :KQP_PROXY DEBUG: Request has 18445000286346.208658s seconds to be completed 2025-04-04T17:22:43.347808Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=NWZiYzg2MjAtYTdiYTE1ZjctZDE4M2E4OTAtZjIwMGM1ZGM=, workerId: [1:7489509697509401727:2333], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-04T17:22:43.347937Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T17:22:43.351758Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: f60c5e7e-84d23e24-eecd1758-72286979, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-04-04T17:22:43.356252Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=NWZiYzg2MjAtYTdiYTE1ZjctZDE4M2E4OTAtZjIwMGM1ZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7489509697509401727:2333] 2025-04-04T17:22:43.356289Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7489509697509401729:2467] 2025-04-04T17:22:43.358465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509697509401730:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:43.358539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509697509401741:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:43.358581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:43.364253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-04T17:22:43.373560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509697509401744:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T17:22:43.437589Z node 1 :TX_PROXY ERROR: Actor# [1:7489509697509401786:2499] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges ... supplied_id, NULL), user_token = IF($applicate_script_external_effect_required, $user_token, NULL), script_sinks = IF($applicate_script_external_effect_required, $script_sinks, NULL), script_secret_names = IF($applicate_script_external_effect_required, $script_secret_names, NULL) WHERE database = $database AND execution_id = $execution_id; DELETE FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id; 2025-04-04T17:22:51.184017Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MTI0Mjc2ODctN2ViYWFiZWYtMjNkZTJhODItNDgxZGJjNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 16, targetId: [2:7489509727633648225:2390] 2025-04-04T17:22:51.184078Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 16 timeout: 300.000000s actor id: [2:7489509731928615591:2605] 2025-04-04T17:22:51.404423Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: e7cf2bf5-56854c5b-1371ab0e-8985f1e9, Bootstrap. Database: /dc-1 2025-04-04T17:22:51.412657Z node 2 :KQP_PROXY DEBUG: Request has 18445000286338.138993s seconds to be completed 2025-04-04T17:22:51.414618Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=YzlhNzU4ODAtMzIyY2FjYjgtZWM0N2YxZTMtYTI4OGE3OWQ=, workerId: [2:7489509731928615612:2416], database: /dc-1, longSession: 1, local sessions count: 4 2025-04-04T17:22:51.414791Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T17:22:51.415054Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: e7cf2bf5-56854c5b-1371ab0e-8985f1e9, RunDataQuery: -- TScriptLeaseUpdater::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT lease_deadline FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-04T17:22:51.415842Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YzlhNzU4ODAtMzIyY2FjYjgtZWM0N2YxZTMtYTI4OGE3OWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 18, targetId: [2:7489509731928615612:2416] 2025-04-04T17:22:51.415907Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 18 timeout: 300.000000s actor id: [2:7489509731928615614:2620] 2025-04-04T17:22:51.580845Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 16, sender: [2:7489509731928615590:2409], selfId: [2:7489509710453778151:2272], source: [2:7489509727633648225:2390] 2025-04-04T17:22:51.581425Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 8f8ae728-8e7b7cfc-10012463-5108a357, State: Update final status, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTI0Mjc2ODctN2ViYWFiZWYtMjNkZTJhODItNDgxZGJjNDk=, TxId: 2025-04-04T17:22:51.581481Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 8f8ae728-8e7b7cfc-10012463-5108a357, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTI0Mjc2ODctN2ViYWFiZWYtMjNkZTJhODItNDgxZGJjNDk=, TxId: 2025-04-04T17:22:51.581492Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] Finish script execution operation. ExecutionId: 8f8ae728-8e7b7cfc-10012463-5108a357. SUCCESS. Issues: 2025-04-04T17:22:51.582170Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MTQzYzdjMjYtMzJkOWQyNS03MjEyYmQwYS1mYTkxOGE2NA==, workerId: [2:7489509727633648115:2349], local sessions count: 3 2025-04-04T17:22:51.582368Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MTI0Mjc2ODctN2ViYWFiZWYtMjNkZTJhODItNDgxZGJjNDk=, workerId: [2:7489509727633648225:2390], local sessions count: 2 2025-04-04T17:22:51.631277Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 18, sender: [2:7489509731928615613:2417], selfId: [2:7489509710453778151:2272], source: [2:7489509731928615612:2416] 2025-04-04T17:22:51.633136Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: e7cf2bf5-56854c5b-1371ab0e-8985f1e9, State: Get lease info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzlhNzU4ODAtMzIyY2FjYjgtZWM0N2YxZTMtYTI4OGE3OWQ=, TxId: 01jr0x7b3a9ahw3g2mctg2qz93 2025-04-04T17:22:51.633300Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: e7cf2bf5-56854c5b-1371ab0e-8985f1e9, State: Get lease info, RunDataQuery: -- TScriptLeaseUpdater::OnGetLeaseInfo DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $lease_duration AS Interval; UPDATE `.metadata/script_execution_leases` SET lease_deadline=(CurrentUtcTimestamp() + $lease_duration) WHERE database = $database AND execution_id = $execution_id; 2025-04-04T17:22:51.633589Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YzlhNzU4ODAtMzIyY2FjYjgtZWM0N2YxZTMtYTI4OGE3OWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 19, targetId: [2:7489509731928615612:2416] 2025-04-04T17:22:51.633620Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 19 timeout: 300.000000s actor id: [2:7489509731928615662:2639] 2025-04-04T17:22:51.766634Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 19, sender: [2:7489509731928615661:2428], selfId: [2:7489509710453778151:2272], source: [2:7489509731928615612:2416] 2025-04-04T17:22:51.766830Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: e7cf2bf5-56854c5b-1371ab0e-8985f1e9, State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzlhNzU4ODAtMzIyY2FjYjgtZWM0N2YxZTMtYTI4OGE3OWQ=, TxId: 2025-04-04T17:22:51.766898Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: e7cf2bf5-56854c5b-1371ab0e-8985f1e9, State: Update lease, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzlhNzU4ODAtMzIyY2FjYjgtZWM0N2YxZTMtYTI4OGE3OWQ=, TxId: 2025-04-04T17:22:51.767487Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YzlhNzU4ODAtMzIyY2FjYjgtZWM0N2YxZTMtYTI4OGE3OWQ=, workerId: [2:7489509731928615612:2416], local sessions count: 1 2025-04-04T17:22:51.776433Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jr0x7b7z02p0qbzdfreqhcq1, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=N2E3ZWIxNTEtNjBhNjk3ODctYWFhZTllMDMtYzNlYTYwNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 20, targetId: [2:7489509727633648132:2359] 2025-04-04T17:22:51.776479Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 20 timeout: 300.000000s actor id: [2:7489509731928615689:2650] 2025-04-04T17:22:51.873738Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509710453777938:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:51.873817Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:52.216107Z node 2 :KQP_PROXY DEBUG: TraceId: "01jr0x7b7z02p0qbzdfreqhcq1", Forwarded response to sender actor, requestId: 20, sender: [2:7489509731928615688:2435], selfId: [2:7489509710453778151:2272], source: [2:7489509727633648132:2359] 2025-04-04T17:22:52.217383Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: e7cf2bf5-56854c5b-1371ab0e-8985f1e9, Bootstrap. Start TCheckLeaseStatusQueryActor 2025-04-04T17:22:52.217452Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: e7cf2bf5-56854c5b-1371ab0e-8985f1e9, Bootstrap. Database: /dc-1 2025-04-04T17:22:52.217642Z node 2 :KQP_PROXY DEBUG: Request has 18445000286337.333989s seconds to be completed 2025-04-04T17:22:52.219290Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZGNkNDVmYTUtMzI0NGQ0YTktMWMxNWM4ZTMtYzBiOGFlNDg=, workerId: [2:7489509736223583037:2449], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-04T17:22:52.219398Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T17:22:52.219540Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: e7cf2bf5-56854c5b-1371ab0e-8985f1e9, RunDataQuery: -- TCheckLeaseStatusQueryActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, execution_status, finalization_status, issues, run_script_actor_id FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-04-04T17:22:52.219757Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZGNkNDVmYTUtMzI0NGQ0YTktMWMxNWM4ZTMtYzBiOGFlNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 22, targetId: [2:7489509736223583037:2449] 2025-04-04T17:22:52.219781Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 22 timeout: 300.000000s actor id: [2:7489509736223583039:2676] 2025-04-04T17:22:52.415206Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:52.568400Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 22, sender: [2:7489509736223583038:2450], selfId: [2:7489509710453778151:2272], source: [2:7489509736223583037:2449] 2025-04-04T17:22:52.568588Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: e7cf2bf5-56854c5b-1371ab0e-8985f1e9, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGNkNDVmYTUtMzI0NGQ0YTktMWMxNWM4ZTMtYzBiOGFlNDg=, TxId: 2025-04-04T17:22:52.568669Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: e7cf2bf5-56854c5b-1371ab0e-8985f1e9, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGNkNDVmYTUtMzI0NGQ0YTktMWMxNWM4ZTMtYzBiOGFlNDg=, TxId: 2025-04-04T17:22:52.568730Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: e7cf2bf5-56854c5b-1371ab0e-8985f1e9, reply success 2025-04-04T17:22:52.568878Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZGNkNDVmYTUtMzI0NGQ0YTktMWMxNWM4ZTMtYzBiOGFlNDg=, workerId: [2:7489509736223583037:2449], local sessions count: 1 2025-04-04T17:22:52.574643Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=N2E3ZWIxNTEtNjBhNjk3ODctYWFhZTllMDMtYzNlYTYwNjI=, workerId: [2:7489509727633648132:2359], local sessions count: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:22:50.912480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:22:50.912604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:22:50.912677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:22:50.912724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:22:50.912772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:22:50.912802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:22:50.912892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:22:50.912977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:22:50.913327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:22:51.008938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:22:51.009004Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:51.022791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:22:51.023116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:22:51.023290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:22:51.034817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:22:51.035178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:22:51.035955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:51.036889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:22:51.044294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:51.045721Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:22:51.045793Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:51.045981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:22:51.046048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:22:51.046095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:22:51.046298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:22:51.055098Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:22:51.230139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:22:51.230473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:51.230706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:22:51.230895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:22:51.230954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:51.233373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:51.233590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:22:51.233839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:51.233891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:22:51.233919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:22:51.233976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:22:51.236582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:51.236644Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:22:51.236682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:22:51.238339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:51.238386Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:51.238446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:51.238508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:22:51.242473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:22:51.244611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:22:51.244809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:22:51.245852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:51.246018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:22:51.246077Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:51.246409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:22:51.246466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:51.246673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:22:51.246753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:22:51.249146Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:22:51.249201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:22:51.249378Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:51.249425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:22:51.249928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:51.249994Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:22:51.250148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:22:51.250212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:51.250291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:22:51.250336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:51.250374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:22:51.250419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:51.250500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:22:51.250540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:22:51.250627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:22:51.250678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:22:51.250719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:22:51.252994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:22:51.253128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:22:51.253170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Index" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:22:53.353812Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:22:53.354027Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index" took 244us result status StatusSuccess 2025-04-04T17:22:53.354665Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index" PathDescription { Self { Name: "Index" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Index" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:22:53.355260Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:22:53.355536Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable" took 312us result status StatusSuccess 2025-04-04T17:22:53.356434Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "alice" } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "bob" } } Tuple { } } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\005\000\000\000alice\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\003\000\000\000bob\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 3 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::ShadowDataEdgeCases [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:22:51.753052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:22:51.753153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:22:51.753216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:22:51.753250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:22:51.753289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:22:51.753314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:22:51.753374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:22:51.753448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:22:51.753750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:22:51.826522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:22:51.826574Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:51.837405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:22:51.837614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:22:51.837768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:22:51.847273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:22:51.847543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:22:51.848167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:51.848970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:22:51.853718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:51.855111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:22:51.855183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:51.855363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:22:51.855420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:22:51.855470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:22:51.855594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:22:51.861823Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:22:51.994747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:22:51.994975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:51.995223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:22:51.995434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:22:51.995484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:51.997573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:51.997698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:22:51.997889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:51.997934Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:22:51.997965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:22:51.998016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:22:51.999748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:51.999800Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:22:51.999837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:22:52.001410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:52.001451Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:52.001528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:52.001588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:22:52.005090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:22:52.007032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:22:52.007267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:22:52.008321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:52.008479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:22:52.008537Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:52.008819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:22:52.008873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:52.009049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:22:52.009129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:22:52.011394Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:22:52.011448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:22:52.011599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:52.011636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:22:52.012008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:52.012072Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:22:52.012188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:22:52.012222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:52.012263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:22:52.012331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:52.012378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:22:52.012424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:52.012465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:22:52.012494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:22:52.012554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:22:52.012595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:22:52.012629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:22:52.014465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:22:52.014569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:22:52.014607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tId, TxId: 109, tablet: 72075186233409548, partId: 0 2025-04-04T17:22:53.597272Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: PREPARED TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 PrepareArriveTime: 160500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 172 } } 2025-04-04T17:22:53.597355Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId# 109:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: PREPARED TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 PrepareArriveTime: 160500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 172 } } 2025-04-04T17:22:53.597393Z node 2 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-04T17:22:53.597473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409548, shardIdx: 72057594046678944:3, operationId: 109:0, left await: 0, at schemeshard: 72057594046678944 2025-04-04T17:22:53.597511Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 3 -> 128 2025-04-04T17:22:53.599278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-04T17:22:53.599439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-04T17:22:53.599489Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 109:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:22:53.599573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 109 ready parts: 1/1 2025-04-04T17:22:53.599736Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:22:53.601462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 109:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:109 msg type: 269090816 2025-04-04T17:22:53.601593Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 109, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 109 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 109 at step: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72075186233409548 for txId: 109 at step: 5000008 2025-04-04T17:22:53.602618Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:53.602749Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 109 Coordinator: 72057594046316545 AckTo { RawX1: 126 RawX2: 8589936743 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:22:53.602825Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 109:0 HandleReply TEvOperationPlan, operationId: 109:0, stepId: 5000008, at schemeshard: 72057594046678944 2025-04-04T17:22:53.603076Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 128 -> 129 2025-04-04T17:22:53.603234Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000008 2025-04-04T17:22:53.608330Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:22:53.608380Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 109, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-04T17:22:53.608592Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:53.608655Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 109, path id: 4 2025-04-04T17:22:53.609614Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-04T17:22:53.609679Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 109:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:22:53.610149Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 109 2025-04-04T17:22:53.610259Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 109 2025-04-04T17:22:53.610301Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 109 2025-04-04T17:22:53.610337Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 109, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-04-04T17:22:53.610385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-04T17:22:53.610495Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 109, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 109 2025-04-04T17:22:53.618511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 109 2025-04-04T17:22:53.619331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1217 } } 2025-04-04T17:22:53.619374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 109, tablet: 72075186233409548, partId: 0 2025-04-04T17:22:53.619490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1217 } } 2025-04-04T17:22:53.619569Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1217 } } 2025-04-04T17:22:53.620732Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 675 RawX2: 8589937221 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2025-04-04T17:22:53.620780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 109, tablet: 72075186233409548, partId: 0 2025-04-04T17:22:53.620896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: Source { RawX1: 675 RawX2: 8589937221 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2025-04-04T17:22:53.620945Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 109:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:22:53.621024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 109:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 675 RawX2: 8589937221 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2025-04-04T17:22:53.621085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 109:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:53.621120Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-04T17:22:53.621155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 109:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-04-04T17:22:53.621192Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 129 -> 240 2025-04-04T17:22:53.623061Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-04T17:22:53.623924Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-04T17:22:53.624192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2025-04-04T17:22:53.624232Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 109:0 ProgressState 2025-04-04T17:22:53.624534Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#109:0 progress is 1/1 2025-04-04T17:22:53.624573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-04-04T17:22:53.624617Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#109:0 progress is 1/1 2025-04-04T17:22:53.624648Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-04-04T17:22:53.624683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 109, ready parts: 1/1, is published: true 2025-04-04T17:22:53.624748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:335:2314] message: TxId: 109 2025-04-04T17:22:53.624793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-04-04T17:22:53.624835Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 109:0 2025-04-04T17:22:53.624867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 109:0 2025-04-04T17:22:53.624991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T17:22:53.626760Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-04-04T17:22:53.626821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:783:2728] TestWaitNotification: OK eventTxId 109 >> DataStreams::TestUpdateStorage >> DataStreams::TestDeleteStream >> DataStreams::TestUpdateStream >> DataStreams::TestGetRecordsStreamWithSingleShard >> DataStreams::TestNonChargeableUser >> DataStreams::TestControlPlaneAndMeteringData >> DataStreams::TestStreamStorageRetention >> DataStreams::TestGetShardIterator >> DataStreams::TestReservedResourcesMetering >> DataStreams::TestPutRecordsOfAnauthorizedUser >> BasicStatistics::ServerlessGlobalIndex [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage >> Sharding::XXUsage [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage [GOOD] Test command err: 6715080793285450045 17502165570158299706 18147328781880615017 1080450805689924384 10776897597011460424 1657523940696234894 188219473987201551 9031448816716092598 4306435429060538296 7690984107267498292 2744517546871237796 11200944239709289729 10279748244213772646 10588861053947768267 8420130885063275166 6027331847613428307 10284374326543654418 14181081072950333537 15529286812943673201 13588940981210400147 164852255672563262 13235268945499914268 16679050633109588779 1385971610604326572 13418661558476599230 8349232624687679518 12402949729473115207 9351368106458696534 17866676729979190007 16626919547463990913 9742656992869668786 9605415601825555410 2919678017731726163 14725296249616177913 15263885194029381884 4128211355714304359 4246270097244529870 2335758620046346671 18216442524553049683 18111699263754341418 7814970665086387503 18034165295799066242 9570695104535670376 11430058203546077139 8274278695147404116 2515634051035436290 6632642964665838508 15329467136816780393 5768119730541480535 4059731182101581254 10437317632130198052 12077040380683853782 11818610620773241248 5521670163082097260 15641129799327160362 12893505581906642839 3337513773270124807 6826189563989471468 5473423200758862971 13616982537937932929 18007518035307363304 8869535242468797651 9398443868387160368 14818437463290546246 10305605522930456894 6878928846888004663 18359830943291610274 14570208907836461705 4708267219011078326 13395439780495535765 8238827249329541468 1254523394397648285 11727600438545797405 14756570080485325032 12421055187831233908 8674489303679495286 12872334300096922426 9042191425026010263 3050447591227719133 8417507705008535395 11613487667212403472 7127449404751332468 6100201075907074546 8703632729195462795 1340592731212779638 5068016675824482406 4352609912632492160 1534775648701425578 13766359337330076494 5840302035784090871 >> KqpProxy::LoadedMetadataAfterCompilationTimeout [GOOD] >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 [GOOD] |94.0%| [TA] $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.0%| [TA] {RESULT} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::ServerlessGlobalIndex [GOOD] Test command err: 2025-04-04T17:20:03.234161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:20:03.234502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:20:03.234626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000875/r3tmp/tmpoR85aq/pdisk_1.dat 2025-04-04T17:20:03.614028Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24432, node 1 2025-04-04T17:20:03.861540Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:20:03.861596Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:20:03.861626Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:20:03.862101Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:20:03.864502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:20:03.953822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:03.953993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:03.968967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26315 2025-04-04T17:20:04.520090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:20:07.584851Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:20:07.624117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:07.624229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:07.657692Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:20:07.661064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:07.903951Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:20:07.904536Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:20:07.905001Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:20:07.905152Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:20:07.905385Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:20:07.905473Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:20:07.905552Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:20:07.905657Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:20:07.905740Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:20:08.096539Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:20:08.096658Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:20:08.112234Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:20:08.253383Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:08.293711Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:20:08.293804Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:20:08.323601Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:20:08.324015Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:20:08.324232Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:20:08.324294Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:20:08.324355Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:20:08.324396Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:20:08.324436Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:20:08.324484Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:20:08.325133Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:20:08.355976Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:20:08.356097Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2594], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:20:08.363448Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2603] 2025-04-04T17:20:08.366073Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1898:2613] 2025-04-04T17:20:08.366213Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1898:2613], schemeshard id = 72075186224037897 2025-04-04T17:20:08.373799Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-04T17:20:08.394302Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:20:08.394370Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:20:08.394432Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-04T17:20:08.409149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:20:08.417140Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:20:08.417311Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:20:08.630687Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:20:08.817839Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:20:08.898622Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:20:09.643962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:20:10.328004Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:20:10.510794Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-04T17:20:10.510861Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:20:10.510951Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2595:2946], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:20:10.512407Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2598:2949] 2025-04-04T17:20:10.512586Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2598:2949], schemeshard id = 72075186224037899 2025-04-04T17:20:11.641758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2730:3250], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:11.641919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:11.662801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-04T17:20:11.986214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2961:3295], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:11.986615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:20:11.988335Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2966:3299]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:20:11.988571Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:20:11.988828Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-04T17:20:11.988904Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2969:3302] 2025-04-04T17:20:11.988992Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2969:3302] 2025-04-04T17:20:11.989733Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2970:3137] 2025-04-04T17:20:11.990090Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2969:3302], server id = [2:2970:3137], tablet id = 72075186224037894, status = OK 2025-04-04T17:20:11.990345Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2970:3137], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:20:11.990425Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-04T17:20:11.990709Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:20:11.990803Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2966:3299], StatRequests.size() = 1 2025-04-04T17:20:12.000100Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3003:3311]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:20:12.000328Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] R ... .000000s, at schemeshard: 72075186224037897 2025-04-04T17:22:47.142712Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 25 2025-04-04T17:22:47.159581Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:22:47.216375Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:7851:5556]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:22:47.216568Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-04-04T17:22:47.216604Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:7851:5556], StatRequests.size() = 1 2025-04-04T17:22:48.445473Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:22:48.445550Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:22:48.445597Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-04T17:22:48.445636Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:22:48.445951Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-04T17:22:48.460320Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:22:48.464128Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7887:5582], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:48.464238Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7898:5587], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:48.464471Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:48.479382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-04T17:22:48.544900Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7901:5590], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-04T17:22:48.648564Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:7996:5638]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:22:48.649017Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-04T17:22:48.649061Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:7996:5638], StatRequests.size() = 1 2025-04-04T17:22:48.728153Z node 2 :TX_PROXY ERROR: Actor# [2:8001:5640] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Shared/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:48.752439Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:8030:5655]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:22:48.752638Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-04T17:22:48.752838Z node 2 :STATISTICS DEBUG: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-04-04T17:22:48.752886Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-04T17:22:48.753017Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:22:48.753092Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:8030:5655], StatRequests.size() = 1 2025-04-04T17:22:48.864307Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTRjZWM5My0xNDU0M2IxMC1mMmU3MmM3My0yMTI5NzJhNg==, TxId: 2025-04-04T17:22:48.864411Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTRjZWM5My0xNDU0M2IxMC1mMmU3MmM3My0yMTI5NzJhNg==, TxId: 2025-04-04T17:22:48.865073Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:22:48.880104Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:22:48.880185Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:22:48.934777Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:22:48.934847Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:22:49.000263Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:3236:3179], schemeshard count = 1 2025-04-04T17:22:49.239449Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037899 2025-04-04T17:22:49.239520Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 225.000000s, at schemeshard: 72075186224037899 2025-04-04T17:22:49.239778Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 50 2025-04-04T17:22:49.254503Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:22:50.240598Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:8098:5699]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:22:50.240880Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-04T17:22:50.240923Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:8098:5699], StatRequests.size() = 1 2025-04-04T17:22:51.530408Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:22:51.541431Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:22:51.541501Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:22:51.541542Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 4] is data table. 2025-04-04T17:22:51.541576Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 4] 2025-04-04T17:22:51.541899Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-04T17:22:51.544779Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:22:51.560498Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YWQyZjVmZDktMmJlMDc0My1lM2U4MGUzOC0xOWZhNDc2NA==, TxId: 2025-04-04T17:22:51.560567Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YWQyZjVmZDktMmJlMDc0My1lM2U4MGUzOC0xOWZhNDc2NA==, TxId: 2025-04-04T17:22:51.561098Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:22:51.576503Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 4] 2025-04-04T17:22:51.576563Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:22:51.645567Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:8169:5743]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:22:51.645902Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-04T17:22:51.645945Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:8169:5743], StatRequests.size() = 1 2025-04-04T17:22:53.025776Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:8221:5774]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:22:53.026046Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-04T17:22:53.026078Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:8221:5774], StatRequests.size() = 1 2025-04-04T17:22:54.206067Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-04-04T17:22:54.206582Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:22:54.206993Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:22:54.218605Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:22:54.218667Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:22:54.218707Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-04-04T17:22:54.218738Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-04T17:22:54.219044Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-04T17:22:54.221735Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:22:54.236495Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OGQ5MThiM2ItOWQ5MTc3YzktYmY0ZGE2MzgtZGE0N2FmMmY=, TxId: 2025-04-04T17:22:54.236580Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OGQ5MThiM2ItOWQ5MTc3YzktYmY0ZGE2MzgtZGE0N2FmMmY=, TxId: 2025-04-04T17:22:54.237186Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:22:54.252198Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-04T17:22:54.252256Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:22:54.348003Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:8285:5812]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:22:54.348290Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-04T17:22:54.348336Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:8285:5812], StatRequests.size() = 1 >> KqpSinkMvcc::SnapshotExpiration [GOOD] >> KqpSinkTx::DeferredEffects >> KqpSinkLocks::UncommittedRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 [GOOD] Test command err: Trying to start YDB, gRPC: 2929, MsgBus: 1271 2025-04-04T17:22:15.120713Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509578448389142:2253];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e55/r3tmp/tmpCkayHe/pdisk_1.dat 2025-04-04T17:22:15.344752Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:22:15.521991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:15.522112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:15.525792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:15.550103Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2929, node 1 2025-04-04T17:22:15.658818Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:15.658843Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:15.658850Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:15.658996Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1271 TClient is connected to server localhost:1271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:16.217902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:18.310634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509591333291488:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:18.310785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509591333291470:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:18.310922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:18.314902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:18.326954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509591333291492:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:22:18.407200Z node 1 :TX_PROXY ERROR: Actor# [1:7489509591333291545:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:18.698437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:18.859883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509591333291750:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:18.859885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509591333291733:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:18.860066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509591333291733:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:18.860358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509591333291733:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:22:18.860491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509591333291733:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:22:18.860597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509591333291733:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:22:18.860698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509591333291733:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:22:18.860788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509591333291733:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:22:18.860880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509591333291733:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:22:18.861011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509591333291733:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:22:18.861130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509591333291733:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:22:18.861243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509591333291733:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:22:18.861376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489509591333291733:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:22:18.862521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509591333291750:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:18.862763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509591333291750:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:22:18.862887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509591333291750:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:22:18.863072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509591333291750:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:22:18.863174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509591333291750:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:22:18.863277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509591333291750:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:22:18.863401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509591333291750:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:22:18.863569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509591333291750:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:22:18.863693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509591333291750:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:22:18.863808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509591333291750:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:22:18.863943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509591333291750:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:22:18.899989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509591333291743:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:18.900062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509591333291743:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:18.900282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id= ... 6;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.101485Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037967;self_id=[2:7489509690300485885:2544];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037967;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.101492Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037955;self_id=[2:7489509690300485593:2493];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037955;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.101561Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037968;self_id=[2:7489509690300485934:2574];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037968;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.101594Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037956;self_id=[2:7489509690300485909:2558];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037956;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.101640Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037969;self_id=[2:7489509690300485931:2572];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037969;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.101655Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037957;self_id=[2:7489509690300485965:2583];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037957;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.101718Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037974;self_id=[2:7489509690300485875:2538];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037974;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.101729Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037970;self_id=[2:7489509690300485963:2582];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037970;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.101794Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037975;self_id=[2:7489509690300485954:2575];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037975;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.101796Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037971;self_id=[2:7489509690300485715:2513];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037971;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.101849Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037972;self_id=[2:7489509690300485973:2589];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037972;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.101854Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037976;self_id=[2:7489509690300485956:2576];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037976;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.101918Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037973;self_id=[2:7489509690300485924:2567];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037973;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.101922Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037977;self_id=[2:7489509690300485895:2551];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037977;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.101981Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037990;self_id=[2:7489509690300485712:2510];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037990;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.101982Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037964;self_id=[2:7489509690300486006:2607];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037964;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.102038Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037979;self_id=[2:7489509690300485917:2561];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037979;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.102041Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037991;self_id=[2:7489509690300485664:2507];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037991;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.102105Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037992;self_id=[2:7489509690300485655:2501];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037992;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.102152Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037980;self_id=[2:7489509690300486001:2605];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037980;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.102200Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037993;self_id=[2:7489509690300485727:2523];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037993;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.102224Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037981;self_id=[2:7489509690300485788:2535];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037981;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.102284Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037978;self_id=[2:7489509690300485894:2550];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037978;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.102327Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037984;self_id=[2:7489509690300485667:2508];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037984;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.102375Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037995;self_id=[2:7489509690300485845:2536];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037995;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.102420Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037982;self_id=[2:7489509690300486005:2606];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037982;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.102448Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037996;self_id=[2:7489509690300485731:2525];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037996;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.102484Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037983;self_id=[2:7489509690300485873:2537];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037983;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.102512Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037997;self_id=[2:7489509690300485714:2512];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037997;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.102557Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037985;self_id=[2:7489509690300485728:2524];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037985;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.102626Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037986;self_id=[2:7489509690300485661:2506];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037986;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.102692Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037989;self_id=[2:7489509690300485669:2509];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037989;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.102753Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037987;self_id=[2:7489509690300485649:2496];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037987;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.102814Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037988;self_id=[2:7489509690300485775:2526];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037988;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:50.102876Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037994;self_id=[2:7489509690300485726:2522];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037994;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; WAIT_INDEXATION: 0 2025-04-04T17:22:51.646398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:22:51.646423Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> KqpSinkTx::OlapExplicitTcl [GOOD] |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing >> ObjectStorageListingTest::ListingNoFilter >> IndexBuildTest::BaseCase [GOOD] >> IndexBuildTest::CancelBuild >> DataStreams::TestStreamStorageRetention [GOOD] >> DataStreams::TestStreamPagination >> DataStreams::TestDeleteStream [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlag >> DataStreams::TestUpdateStorage [GOOD] >> DataStreams::TestStreamTimeRetention >> DataStreams::TestGetShardIterator [GOOD] >> DataStreams::TestGetRecordsWithoutPermission >> DataStreams::TestGetRecordsStreamWithSingleShard [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS >> DataStreams::TestPutRecordsOfAnauthorizedUser [GOOD] >> DataStreams::TestPutRecordsWithRead |94.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index/test-results/unittest/{meta.json ... results_accumulator.log} >> DataStreams::TestUpdateStream [GOOD] >> DataStreams::TestNonChargeableUser [GOOD] >> DataStreams::Test_AutoPartitioning_Describe >> DataStreams::TestPutEmptyMessage |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other [GOOD] >> DataStreams::TestControlPlaneAndMeteringData [GOOD] |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> DataShardVolatile::UpsertNoLocksArbiter+UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter-UseSink >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag [GOOD] >> DataStreams::ChangeBetweenRetentionModes |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::UncommittedRead [GOOD] Test command err: Trying to start YDB, gRPC: 15625, MsgBus: 64758 2025-04-04T17:22:28.225878Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509632636873336:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:28.225925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c7c/r3tmp/tmpU5e04g/pdisk_1.dat 2025-04-04T17:22:28.613284Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:28.618788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:28.618878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:28.621767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15625, node 1 2025-04-04T17:22:28.705545Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:28.705567Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:28.705581Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:28.705685Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64758 TClient is connected to server localhost:64758 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:29.229264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:31.181039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509645521775883:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:31.181119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:31.181272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509645521775895:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:31.185527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:31.195175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509645521775897:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:22:31.271878Z node 1 :TX_PROXY ERROR: Actor# [1:7489509645521775951:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:31.501282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:31.627846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509645521776139:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:31.628091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509645521776139:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:31.628330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509645521776139:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:22:31.628476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509645521776139:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:22:31.628635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509645521776139:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:22:31.628735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509645521776139:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:22:31.628834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509645521776139:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:22:31.628964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509645521776139:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:22:31.629088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509645521776139:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:22:31.629208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509645521776139:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:22:31.629311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509645521776139:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:22:31.629416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509645521776139:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:22:31.632285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489509645521776138:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:31.632373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489509645521776138:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:31.632660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489509645521776138:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:22:31.632857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489509645521776138:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:22:31.633013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489509645521776138:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:22:31.633167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489509645521776138:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:22:31.633292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489509645521776138:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:22:31.633438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489509645521776138:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:22:31.633567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489509645521776138:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:22:31.633686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489509645521776138:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:22:31.633839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489509645521776138:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:22:31.633965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489509645521776138:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:22:31.660229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509645521776133:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:31.660273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509645521776133:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:31.660467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;sel ... otifyPlanStep;tablet_id=72075186224037939;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.801361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037971;self_id=[1:7489509649816744695:2494];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037971;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.801431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[1:7489509649816744999:2551];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037910;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.801471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037967;self_id=[1:7489509649816744825:2503];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037967;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.801526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037962;self_id=[1:7489509649816744516:2475];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037962;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.801594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;self_id=[1:7489509649816744860:2507];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037952;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.801623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037979;self_id=[1:7489509649816744698:2496];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037979;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.801725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037955;self_id=[1:7489509649816744869:2509];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037955;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.869789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038084;self_id=[1:7489509662701650315:3135];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038084;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.870049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038084;self_id=[1:7489509662701650315:3135];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038084;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.880877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;self_id=[1:7489509662701650442:3199];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038093;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.881119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;self_id=[1:7489509662701650442:3199];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038093;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.882074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037999;self_id=[1:7489509662701650375:3158];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037999;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.882335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037999;self_id=[1:7489509662701650375:3158];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037999;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.889696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[1:7489509662701650363:3146];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038082;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.889931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[1:7489509662701650363:3146];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038082;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.890091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[1:7489509662701650412:3195];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038095;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.890232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[1:7489509662701650412:3195];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038095;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.890410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[1:7489509662701650385:3168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038094;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.890573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[1:7489509662701650382:3165];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038088;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.890590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[1:7489509662701650385:3168];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038094;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:39.890828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[1:7489509662701650382:3165];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038088;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-04T17:22:43.601502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:22:43.601532Z node 1 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 27703, MsgBus: 19881 2025-04-04T17:22:46.335405Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509710383606823:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c7c/r3tmp/tmp9bNyHv/pdisk_1.dat 2025-04-04T17:22:46.378407Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:22:46.437498Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:46.469734Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:46.469842Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:46.471716Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27703, node 2 2025-04-04T17:22:46.516391Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:46.516422Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:46.516432Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:46.516558Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19881 TClient is connected to server localhost:19881 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:46.927262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:49.830693Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509723268509208:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:49.830765Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509723268509234:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:49.830823Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:49.835045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:49.848602Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509723268509246:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:22:49.913126Z node 2 :TX_PROXY ERROR: Actor# [2:7489509723268509297:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:49.947976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:50.021609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:50.954986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:51.626638Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509710383606823:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:51.692215Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapExplicitTcl [GOOD] Test command err: Trying to start YDB, gRPC: 8399, MsgBus: 12505 2025-04-04T17:22:20.634170Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509597888965170:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:20.641370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000dbd/r3tmp/tmpbMq6nK/pdisk_1.dat 2025-04-04T17:22:20.979489Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8399, node 1 2025-04-04T17:22:21.032983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:21.033112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:21.039092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:21.051925Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:21.051946Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:21.051965Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:21.052053Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12505 TClient is connected to server localhost:12505 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:21.567106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:21.584262Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:22:23.466566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509610773867550:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:23.466694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:23.466994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509610773867587:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:23.471165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:23.508328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509610773867589:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:22:23.572857Z node 1 :TX_PROXY ERROR: Actor# [1:7489509610773867640:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:23.831184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:24.022422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509610773867803:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:24.022433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509610773867800:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:24.022667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509610773867800:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:24.022998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509610773867800:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:22:24.023149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509610773867800:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:22:24.023260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509610773867800:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:22:24.023385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509610773867800:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:22:24.023485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509610773867800:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:22:24.023600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509610773867800:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:22:24.023704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509610773867800:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:22:24.023823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509610773867800:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:22:24.023958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509610773867800:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:22:24.024065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509610773867800:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:22:24.026521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509610773867803:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:24.026790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509610773867803:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:22:24.026915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509610773867803:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:22:24.027113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509610773867803:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:22:24.027252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509610773867803:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:22:24.027354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509610773867803:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:22:24.027488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509610773867803:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:22:24.027633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509610773867803:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:22:24.027764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509610773867803:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:22:24.027867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509610773867803:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:22:24.027967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509610773867803:2347];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:22:24.062466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489509610773867802:2346];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:24.062531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489509610773867802:2346];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstr ... COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7489509712784789804:3289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038045;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.106708Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7489509712784790151:3324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038037;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.106798Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7489509712784790152:3325];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.106843Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7489509712784790151:3324];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.106947Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7489509712784790138:3314];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038041;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.106979Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7489509712784789781:3287];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038059;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.107079Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7489509712784790138:3314];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038041;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.107164Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7489509712784789781:3287];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038059;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.107199Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7489509712784790185:3332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.107315Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7489509712784790185:3332];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.107443Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7489509712784790195:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.107606Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7489509712784790195:3340];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.107897Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[2:7489509712784789837:3298];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038047;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.108113Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[2:7489509712784789837:3298];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038047;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.112192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[2:7489509712784789829:3294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038051;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.112451Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[2:7489509712784789829:3294];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038051;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.112637Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7489509712784790143:3318];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038027;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.112811Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7489509712784790143:3318];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038027;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.113692Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7489509712784790128:3310];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038028;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.114024Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7489509712784790128:3310];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.114682Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7489509712784790146:3321];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038036;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.114891Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7489509712784790146:3321];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038036;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.115218Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7489509712784790154:3327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038022;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.115426Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7489509712784790154:3327];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.118316Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7489509712784790197:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038011;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.118626Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7489509712784790197:3342];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038011;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.118863Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7489509712784789856:3299];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038048;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.119073Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7489509712784789856:3299];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038048;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.119830Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7489509712784790133:3312];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.119989Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7489509712784790133:3312];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.122940Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7489509712784790093:3308];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038049;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.123248Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7489509712784790093:3308];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038049;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.123925Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[2:7489509712784790174:3328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038029;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.124147Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[2:7489509712784790174:3328];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038029;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.126537Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7489509712784790187:3334];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.126769Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7489509712784790187:3334];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.127511Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7489509712784790191:3337];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.127733Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7489509712784790191:3337];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.128519Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7489509712784790137:3313];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038021;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.128753Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7489509712784790137:3313];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.130623Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmM5YWQwMTUtMzNlNzk5MTctZDU4ZDQ5NWEtMmZmZjE5NmY=, ActorId: [2:7489509725669695046:3887], ActorState: ReadyState, TraceId: 01jr0x7bjk7v73qnezjpjjt2wn, Create QueryResponse for error on request, msg: 2025-04-04T17:22:52.131085Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7489509712784790193:3339];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038009;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:52.131326Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7489509712784790193:3339];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-04T17:22:53.703915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:22:53.703960Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> KqpSinkMvcc::OltpNamedStatement [GOOD] >> SlowTopicAutopartitioning::CDC_Write [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenDrop >> IndexBuildTest::CancelBuild [GOOD] |94.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/unittest >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other [GOOD] Test command err: 2025-04-04T17:18:58.086772Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508732556286965:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:18:58.086870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e93/r3tmp/tmpqRqnWu/pdisk_1.dat 2025-04-04T17:18:58.261966Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:18:58.403804Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:18:58.409118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:18:58.409653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:18:58.420514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9641, node 1 2025-04-04T17:18:58.584130Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000e93/r3tmp/yandexuxdI8x.tmp 2025-04-04T17:18:58.584155Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000e93/r3tmp/yandexuxdI8x.tmp 2025-04-04T17:18:58.584379Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000e93/r3tmp/yandexuxdI8x.tmp 2025-04-04T17:18:58.584524Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:18:58.799229Z INFO: TTestServer started on Port 11369 GrpcPort 9641 TClient is connected to server localhost:11369 PQClient connected to localhost:9641 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:18:59.061256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:18:59.088941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:19:00.695150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508741146222384:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.695150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508741146222376:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.695263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:19:00.700343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:19:00.711397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508741146222390:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:19:00.899935Z node 1 :TX_PROXY ERROR: Actor# [1:7489508741146222454:2448] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:19:00.978373Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489508741146222462:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:19:00.984950Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjZmN2MxNGUtZjMzYmE0NmMtYmU4NmQ5NS05ZWUzOWE2, ActorId: [1:7489508741146222358:2336], ActorState: ExecuteState, TraceId: 01jr0x09hr1a3yawk1fg0s30p8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:19:00.987307Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:19:01.028082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:19:01.059669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:19:01.130160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489508745441190045:2631] 2025-04-04T17:19:03.087250Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508732556286965:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:19:03.087310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:19:07.323618Z :TwoSessionOneConsumer INFO: TTopicSdkTestSetup started 2025-04-04T17:19:07.337456Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-04T17:19:07.367800Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:19:07.368538Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-04T17:19:07.368762Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:19:07.368950Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-04T17:19:07.368975Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:19:07.368992Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-04T17:19:07.369023Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:19:07.369069Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:19:07.369120Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-04T17:19:07.399179Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7489508771210994129:2804], now have 1 active actors on pipe 2025-04-04T17:19:07.399469Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7489508771210994130:2805] connected; active server actors: 1 2025-04-04T17:19:07.399689Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-04T17:19:07.400200Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-04T17:19:07.400307Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-04-04T17:19:07.400801Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-04-04T17:19:07.400826Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-04T17:19:07.413585Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7489508732556287385 RawX2: 4294969486 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "test-topic" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/test-topic" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } 2025-04-04T17:19:07.413745Z node 1 :PERSQUEUE DEBUG ... T topic topic_A partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:22:56.347568Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T17:22:56.347629Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 2 2025-04-04T17:22:56.347657Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04T17:22:56.348086Z node 12 :PERSQUEUE DEBUG: [topic_A:{0, {1, 281474976715674}, 100000}:Initializer] Start initializing step TInitInfoRangeStep 2025-04-04T17:22:56.349117Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895] server connected, pipe [12:7489509754247733522:2483], now have 1 active actors on pipe 2025-04-04T17:22:56.349361Z node 12 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037896][topic_A] TEvClientConnected TabletId 72075186224037895, NodeId 12, Generation 2 2025-04-04T17:22:56.349428Z node 12 :PERSQUEUE DEBUG: [topic_A:{0, {1, 281474976715674}, 100000}:Initializer] Start initializing step TInitDataRangeStep 2025-04-04T17:22:56.349921Z node 12 :PERSQUEUE DEBUG: [topic_A:{0, {1, 281474976715674}, 100000}:TInitDataRangeStep] Got data offset 0 count 1 size 250 so 0 eo 1 D0000100000_00000000000000000000_00000_0000000001_00000| 2025-04-04T17:22:56.350042Z node 12 :PERSQUEUE DEBUG: [topic_A:{0, {1, 281474976715674}, 100000}:TInitDataRangeStep] Got data offset 1 count 1 size 250 so 0 eo 2 D0000100000_00000000000000000001_00000_0000000001_00000| 2025-04-04T17:22:56.350179Z node 12 :PERSQUEUE DEBUG: [topic_A:{0, {1, 281474976715674}, 100000}:Initializer] Start initializing step TInitDataStep 2025-04-04T17:22:56.350809Z node 12 :PERSQUEUE DEBUG: [topic_A:{0, {1, 281474976715674}, 100000}:TInitDataStep] read res partition offset 0 endOffset 2 key 0,1 valuesize 250 expected 250 2025-04-04T17:22:56.350914Z node 12 :PERSQUEUE DEBUG: [topic_A:{0, {1, 281474976715674}, 100000}:TInitDataStep] read res partition offset 1 endOffset 2 key 1,1 valuesize 250 expected 250 2025-04-04T17:22:56.350937Z node 12 :PERSQUEUE DEBUG: [topic_A:{0, {1, 281474976715674}, 100000}:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-04T17:22:56.350957Z node 12 :PERSQUEUE INFO: [topic_A:{0, {1, 281474976715674}, 100000}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:22:56.350975Z node 12 :PERSQUEUE DEBUG: [topic_A:{0, {1, 281474976715674}, 100000}:Initializer] Initializing completed. 2025-04-04T17:22:56.351004Z node 12 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: {0, {1, 281474976715674}, 100000}, State: StateInit] init complete for topic 'topic_A' partition {0, {1, 281474976715674}, 100000} generation 2 [12:7489509754247733534:2511] 2025-04-04T17:22:56.351069Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895, Partition: {0, {1, 281474976715674}, 100000}, State: StateInit] SYNC INIT topic topic_A partitition {0, {1, 281474976715674}, 100000} so 0 endOffset 2 Head Offset 0 PartNo 0 PackedSize 500 count 2 nextOffset 2 batches 2 SYNC INIT sourceId test-message_group_id_3 seqNo 1 offset 1 SYNC INIT sourceId test-message_group_id_1 seqNo 1 offset 0 SYNC INIT HEAD KEY: D0000100000_00000000000000000000_00000_0000000001_00000| size 250 SYNC INIT HEAD KEY: D0000100000_00000000000000000001_00000_0000000001_00000| size 250 2025-04-04T17:22:56.351105Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895, Partition: {0, {1, 281474976715674}, 100000}, State: StateIdle] Process pending events. Count 0 2025-04-04T17:22:56.351165Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895, Partition: {0, {1, 281474976715674}, 100000}, State: StateIdle] Init complete for topic 'topic_A' Partition: {0, {1, 281474976715674}, 100000} SourceId: test-message_group_id_3 SeqNo: 1 offset: 1 MaxOffset: 2 2025-04-04T17:22:56.351186Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037895, Partition: {0, {1, 281474976715674}, 100000}, State: StateIdle] Init complete for topic 'topic_A' Partition: {0, {1, 281474976715674}, 100000} SourceId: test-message_group_id_1 SeqNo: 1 offset: 0 MaxOffset: 2 2025-04-04T17:22:58.327589Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server connected, pipe [12:7489509762837668220:3025], now have 1 active actors on pipe 2025-04-04T17:22:58.327869Z node 12 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037896][topic_A] TEvClientDestroyed 72075186224037894 2025-04-04T17:22:58.334790Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:22:58.334977Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Registered with mediator time cast 2025-04-04T17:22:58.335351Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:22:58.335714Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] has a tx info 2025-04-04T17:22:58.335752Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] PlanStep 1742296505901, PlanTxId 281474976715673, ExecStep 1742296505901, ExecTxId 281474976715673 2025-04-04T17:22:58.335976Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:22:58.336515Z node 12 :PERSQUEUE DEBUG: [topic_A:1:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:22:58.336561Z node 12 :PERSQUEUE NOTICE: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:22:58.336578Z node 12 :PERSQUEUE INFO: [PQ: 72075186224037894] has a tx writes info 2025-04-04T17:22:58.337372Z node 12 :PERSQUEUE DEBUG: [topic_A:{1, {1, 281474976715674}, 100000}:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:22:58.337399Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvSubscribeLock for WriteId {1, 281474976715674} 2025-04-04T17:22:58.337738Z node 12 :PERSQUEUE DEBUG: [topic_A:{1, {1, 281474976715674}, 100000}:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:22:58.338033Z node 12 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: {1, {1, 281474976715674}, 100000}, State: StateInit] bootstrapping {1, {1, 281474976715674}, 100000} [12:7489509762837668247:2536] 2025-04-04T17:22:58.338597Z node 12 :PERSQUEUE DEBUG: [topic_A:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:22:58.338911Z node 12 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 1, State: StateInit] bootstrapping 1 [12:7489509762837668246:2535] 2025-04-04T17:22:58.339086Z node 12 :PERSQUEUE DEBUG: [topic_A:{1, {1, 281474976715674}, 100000}:Initializer] Start initializing step TInitDiskStatusStep 2025-04-04T17:22:58.339415Z node 12 :PERSQUEUE DEBUG: [topic_A:{1, {1, 281474976715674}, 100000}:Initializer] Start initializing step TInitMetaStep 2025-04-04T17:22:58.339721Z node 12 :PERSQUEUE DEBUG: [topic_A:{1, {1, 281474976715674}, 100000}:Initializer] Start initializing step TInitInfoRangeStep 2025-04-04T17:22:58.339902Z node 12 :PERSQUEUE DEBUG: [topic_A:1:Initializer] Start initializing step TInitDiskStatusStep 2025-04-04T17:22:58.340302Z node 12 :PERSQUEUE DEBUG: [topic_A:{1, {1, 281474976715674}, 100000}:Initializer] Start initializing step TInitDataRangeStep 2025-04-04T17:22:58.340403Z node 12 :PERSQUEUE DEBUG: [topic_A:1:Initializer] Start initializing step TInitMetaStep 2025-04-04T17:22:58.340645Z node 12 :PERSQUEUE DEBUG: [topic_A:{1, {1, 281474976715674}, 100000}:TInitDataRangeStep] Got data offset 0 count 1 size 250 so 0 eo 1 D0000100000_00000000000000000000_00000_0000000001_00000| 2025-04-04T17:22:58.340663Z node 12 :PERSQUEUE DEBUG: [topic_A:1:Initializer] Start initializing step TInitInfoRangeStep 2025-04-04T17:22:58.340750Z node 12 :PERSQUEUE DEBUG: [topic_A:{1, {1, 281474976715674}, 100000}:Initializer] Start initializing step TInitDataStep 2025-04-04T17:22:58.341258Z node 12 :PERSQUEUE DEBUG: [topic_A:{1, {1, 281474976715674}, 100000}:TInitDataStep] read res partition offset 0 endOffset 1 key 0,1 valuesize 250 expected 250 2025-04-04T17:22:58.341299Z node 12 :PERSQUEUE DEBUG: [topic_A:{1, {1, 281474976715674}, 100000}:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-04T17:22:58.341317Z node 12 :PERSQUEUE INFO: [topic_A:{1, {1, 281474976715674}, 100000}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:22:58.341337Z node 12 :PERSQUEUE DEBUG: [topic_A:{1, {1, 281474976715674}, 100000}:Initializer] Initializing completed. 2025-04-04T17:22:58.341365Z node 12 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: {1, {1, 281474976715674}, 100000}, State: StateInit] init complete for topic 'topic_A' partition {1, {1, 281474976715674}, 100000} generation 2 [12:7489509762837668247:2536] 2025-04-04T17:22:58.341411Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {1, {1, 281474976715674}, 100000}, State: StateInit] SYNC INIT topic topic_A partitition {1, {1, 281474976715674}, 100000} so 0 endOffset 1 Head Offset 0 PartNo 0 PackedSize 250 count 1 nextOffset 1 batches 1 SYNC INIT sourceId test-message_group_id_2 seqNo 1 offset 0 SYNC INIT HEAD KEY: D0000100000_00000000000000000000_00000_0000000001_00000| size 250 2025-04-04T17:22:58.341439Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {1, {1, 281474976715674}, 100000}, State: StateIdle] Process pending events. Count 0 2025-04-04T17:22:58.341505Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {1, {1, 281474976715674}, 100000}, State: StateIdle] Init complete for topic 'topic_A' Partition: {1, {1, 281474976715674}, 100000} SourceId: test-message_group_id_2 SeqNo: 1 offset: 0 MaxOffset: 1 2025-04-04T17:22:58.341835Z node 12 :PERSQUEUE DEBUG: [topic_A:1:Initializer] Start initializing step TInitDataRangeStep 2025-04-04T17:22:58.342049Z node 12 :PERSQUEUE DEBUG: [topic_A:1:Initializer] Start initializing step TInitDataStep 2025-04-04T17:22:58.342080Z node 12 :PERSQUEUE DEBUG: [topic_A:1:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-04T17:22:58.342099Z node 12 :PERSQUEUE INFO: [topic_A:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:22:58.342113Z node 12 :PERSQUEUE DEBUG: [topic_A:1:Initializer] Initializing completed. 2025-04-04T17:22:58.342224Z node 12 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 1, State: StateInit] init complete for topic 'topic_A' partition 1 generation 2 [12:7489509762837668246:2535] 2025-04-04T17:22:58.342273Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 1, State: StateInit] SYNC INIT topic topic_A partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:22:58.342309Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-04T17:22:58.342367Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 1, State: StateIdle] Topic 'topic_A' partition 1 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 2 2025-04-04T17:22:58.342389Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 1, State: StateIdle] Topic 'topic_A' partition 1 user test-consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04T17:22:58.342941Z node 12 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037896][topic_A] TEvClientConnected TabletId 72075186224037894, NodeId 12, Generation 2 2025-04-04T17:22:58.342987Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server connected, pipe [12:7489509762837668222:2483], now have 1 active actors on pipe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag [GOOD] Test command err: 2025-04-04T17:22:42.167583Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509692608317724:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:42.167810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00160d/r3tmp/tmpx7Ku1U/pdisk_1.dat 2025-04-04T17:22:42.473636Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:42.534970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:42.535096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:42.536999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10986 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:22:42.773048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:44.741941Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:44.743732Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:22:44.744348Z node 1 :KQP_PROXY WARN: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2025-04-04T17:22:44.746417Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-04T17:22:44.746457Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-04T17:22:44.746476Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:44.746551Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:22:44.746627Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.746731Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.746918Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [1:7489509692608318295:2281], selfId: [1:7489509692608317978:2278], source: [1:7489509692608317978:2278] 2025-04-04T17:22:44.746940Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.746960Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.747869Z node 1 :KQP_PROXY WARN: Failed to parse session id: unknown://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2025-04-04T17:22:44.748026Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7489509692608318295:2281], selfId: [1:7489509692608317978:2278], source: [1:7489509692608317978:2278] 2025-04-04T17:22:44.748313Z node 1 :KQP_PROXY WARN: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=eqweq 2025-04-04T17:22:44.748390Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 4, sender: [1:7489509692608318295:2281], selfId: [1:7489509692608317978:2278], source: [1:7489509692608317978:2278] 2025-04-04T17:22:44.750033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509701198252938:2309], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:44.750149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:48.169618Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:22:48.169742Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:22:48.169930Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00160d/r3tmp/tmpevCBHR/pdisk_1.dat 2025-04-04T17:22:48.457558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:22:48.482767Z node 2 :KQP_PROXY DEBUG: Updated table service config. 2025-04-04T17:22:48.482844Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:48.485035Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:48.507638Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:313:2356], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: Root/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:22:48.509652Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:313:2356], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-04-04T17:22:48.509810Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:313:2356], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:609:2531] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:22:48.510020Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:313:2356], cacheItem# { Subscriber: { Subscriber: [2:609:2531] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:22:48.510148Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:313:2356], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-04-04T17:22:48.510232Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:313:2356], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:610:2532] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:22:48.510372Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:313:2356], cacheItem# { Subscriber: { Subscriber: [2:610:2532] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:22:48.510568Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:623:2533], recipient# [2:322:2364], result# { ErrorCount: 2 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: Root/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:22:48.523499Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:48.523664Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:48.535508Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:48.616919Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:313:2356], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /Root PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 500 ParentPathId: 1 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" E ... s: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:22:55.482193Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:1140:2943], recipient# [2:1138:2941], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } Captured Event Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to KQP_COMPILE_COMPUTATION_PATTERN_SERVICE Captured TEvents::TSystem::Wakeup to KQP_NODE_SERVICE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to TICKET_PARSER_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to TICKET_PARSER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to KQP_COMPILE_COMPUTATION_PATTERN_SERVICE Captured TEvents::TSystem::Wakeup to KQP_NODE_SERVICE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BSC_STAT_PROCESSOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to TICKET_PARSER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NIcNodeCache::TIcNodeCacheServiceActor Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to KQP_COMPILE_COMPUTATION_PATTERN_SERVICE Captured TEvents::TSystem::Wakeup to KQP_NODE_SERVICE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to NKikimr::NBsController::TBlobStorageController::TSelfHealActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to TICKET_PARSER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to KQP_COMPILE_COMPUTATION_PATTERN_SERVICE Captured TEvents::TSystem::Wakeup to KQP_NODE_SERVICE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR 2025-04-04T17:22:56.171277Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(20) 2025-04-04T17:22:56.171352Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 20 sessionId: ydb://session/3?node_id=2&id=ZmM3NWFhZDEtZWQ2ZjJiYmUtMmFjMzIxMDYtMmE2ZDAzOTI= status: TIMEOUT round: 0 2025-04-04T17:22:56.171538Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmM3NWFhZDEtZWQ2ZjJiYmUtMmFjMzIxMDYtMmE2ZDAzOTI=, ActorId: [2:1131:2934], ActorState: ExecuteState, TraceId: 01jr0x7epnb5mebv3wf45kcpbs, Create QueryResponse for error on request, msg: 2025-04-04T17:22:56.171747Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 20, sender: [2:592:2517], selfId: [2:57:2104], source: [2:1131:2934] Send scheduled evet back 2025-04-04T17:22:56.171870Z node 2 :KQP_COMPILE_ACTOR NOTICE: Compilation timeout, self: [2:1133:2936], cluster: db, database: , text: "SELECT * FROM `/Root/Table`;", startTime: 2025-04-04T17:22:55.318384Z 2025-04-04T17:22:56.171947Z node 2 :KQP_COMPILE_ACTOR DEBUG: Send response, self: [2:1133:2936], owner: [2:307:2350], status: TIMEOUT, issues:
: Error: Query compilation timed out. , uid: c96bcc4e-c767274f-24f2a50f-8a87e5c Send captured event back Send captured event back Send captured event back Send captured event back Send captured event back 2025-04-04T17:22:57.164708Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489509758802421791:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:57.164776Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00160d/r3tmp/tmpQ2CNIt/pdisk_1.dat 2025-04-04T17:22:57.352670Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:57.378695Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:57.378806Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:57.382342Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63059, node 3 2025-04-04T17:22:57.448073Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:57.448103Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:57.448113Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:57.448239Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17197 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:57.702983Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:00.084799Z node 3 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:23:00.085711Z node 3 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-04T17:23:00.085963Z node 3 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-04T17:23:00.086309Z node 3 :KQP_PROXY DEBUG: Updated table service config. 2025-04-04T17:23:00.086336Z node 3 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:23:00.086388Z node 3 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-04-04T17:23:00.086744Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:23:00.086793Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:23:00.086879Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:23:00.090611Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpNamedStatement [GOOD] Test command err: Trying to start YDB, gRPC: 6891, MsgBus: 2809 2025-04-04T17:22:37.297324Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509670469515414:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:37.299304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000be5/r3tmp/tmpA26Ki7/pdisk_1.dat 2025-04-04T17:22:37.593844Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6891, node 1 2025-04-04T17:22:37.676130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:37.676277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:37.677570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:37.688783Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:37.688814Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:37.688821Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:37.688962Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2809 TClient is connected to server localhost:2809 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:38.181450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:40.154865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509683354417957:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:40.154987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509683354417928:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:40.155420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:40.159919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:40.171472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509683354417966:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:22:40.258869Z node 1 :TX_PROXY ERROR: Actor# [1:7489509683354418017:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:40.505920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:40.637097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:41.585736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:42.344216Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509670469515414:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:42.349501Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 62583, MsgBus: 14239 2025-04-04T17:22:49.710301Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509722428997650:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:49.711042Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000be5/r3tmp/tmpu89vpF/pdisk_1.dat 2025-04-04T17:22:49.825193Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:49.841992Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:49.842090Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:49.843619Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62583, node 2 2025-04-04T17:22:49.899595Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:49.899614Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:49.899620Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:49.899720Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14239 TClient is connected to server localhost:14239 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:50.318087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:52.877672Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509735313900188:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:52.877768Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509735313900180:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:52.878049Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:52.881891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:52.890435Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509735313900194:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:22:52.952149Z node 2 :TX_PROXY ERROR: Actor# [2:7489509735313900245:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:52.992570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:53.056803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:54.004203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:54.816560Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509722428997650:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:54.822164Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:22:47.428377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:22:47.428479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:22:47.428534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:22:47.428568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:22:47.428615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:22:47.428647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:22:47.428711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:22:47.428777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:22:47.429067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:22:47.509445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:22:47.509510Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:47.519118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:22:47.519361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:22:47.519520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:22:47.527566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:22:47.527831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:22:47.528382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:47.529137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:22:47.535626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:47.536622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:22:47.536666Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:47.536810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:22:47.536849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:22:47.536877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:22:47.536966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.544209Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:22:47.670135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:22:47.670381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.670659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:22:47.670896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:22:47.670956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.672833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:47.672972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:22:47.673169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.673223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:22:47.673263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:22:47.673316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:22:47.675784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.675840Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:22:47.675877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:22:47.677397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.677438Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.677503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:47.677564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:22:47.687336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:22:47.689019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:22:47.689204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:22:47.690234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:47.690400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:22:47.690457Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:47.690733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:22:47.690805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:47.691000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:22:47.691102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:22:47.693034Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:22:47.693082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:22:47.693244Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:47.693289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:22:47.693600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.693657Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:22:47.693779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:22:47.693817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:47.693859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:22:47.693901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:47.693935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:22:47.693972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:47.694009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:22:47.694037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:22:47.694108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:22:47.694143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:22:47.694172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:22:47.695988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:22:47.696103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:22:47.696136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... : 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-04-04T17:23:02.594127Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:02.594233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 126 RawX2: 8589936743 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:02.594316Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-04-04T17:23:02.594362Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-04-04T17:23:02.596487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-04-04T17:23:02.596541Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-04-04T17:23:02.596646Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-04T17:23:02.596681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-04T17:23:02.596716Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-04-04T17:23:02.596745Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-04T17:23:02.596788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-04-04T17:23:02.596856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:131:2154] message: TxId: 281474976710760 2025-04-04T17:23:02.596898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-04-04T17:23:02.596948Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-04-04T17:23:02.596981Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-04-04T17:23:02.597046Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 13 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-04-04T17:23:02.598953Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-04-04T17:23:02.599022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-04-04T17:23:02.599097Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-04-04T17:23:02.599174Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1170:3024], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T17:23:02.601052Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-04T17:23:02.601136Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1170:3024], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T17:23:02.601202Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2025-04-04T17:23:02.603201Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-04T17:23:02.603291Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancelled, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1170:3024], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T17:23:02.603330Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-04T17:23:02.603512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:23:02.603575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1266:3109] TestWaitNotification: OK eventTxId 102 2025-04-04T17:23:02.606463Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-04-04T17:23:02.606745Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 0 } 2025-04-04T17:23:02.610125Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:23:02.610402Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 320us result status StatusSuccess 2025-04-04T17:23:02.610871Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:02.613684Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:23:02.613936Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 286us result status StatusPathDoesNotExist 2025-04-04T17:23:02.614155Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/index1\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 5000005, drop txId: 281474976710759" Path: "/MyRoot/Table/index1" PathId: 3 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::ReadTopic |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] >> KqpProxy::DatabasesCacheForServerless [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlag [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlagFalse >> DataStreams::TestPutRecordsWithRead [GOOD] >> DataStreams::TestPutRecordsCornerCases >> DataStreams::TestReservedResourcesMetering [GOOD] >> DataStreams::TestReservedStorageMetering >> DataStreams::TestGetRecordsWithoutPermission [GOOD] >> DataStreams::TestGetRecordsWithCount >> DataStreams::ChangeBetweenRetentionModes [GOOD] >> DataStreams::TestCreateExistingStream |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> KqpSinkLocks::OlapUncommittedRead [GOOD] >> ObjectStorageListingTest::FilterListing [GOOD] >> DataStreams::Test_AutoPartitioning_Describe [GOOD] >> ObjectStorageListingTest::ListingNoFilter [GOOD] >> DataStreams::Test_Crreate_AutoPartitioning_Disabled |94.2%| [TA] $(B)/ydb/public/sdk/cpp/src/client/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataStreams::TestPutEmptyMessage [GOOD] >> DataStreams::TestListStreamConsumers >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] Test command err: Trying to start YDB, gRPC: 4600, MsgBus: 2192 2025-04-04T17:22:23.978948Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509611848016056:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:23.979102Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000d1c/r3tmp/tmpfiFlLe/pdisk_1.dat 2025-04-04T17:22:24.341073Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4600, node 1 2025-04-04T17:22:24.392151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:24.392335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:24.394485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:24.471661Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:24.471685Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:24.471693Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:24.471849Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2192 TClient is connected to server localhost:2192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:25.025089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:25.046935Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:22:27.006049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509629027885751:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:27.006232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509629027885741:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:27.006461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:27.012871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:27.024991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509629027885763:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:22:27.123594Z node 1 :TX_PROXY ERROR: Actor# [1:7489509629027885816:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:27.363324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:27.525391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509629027885993:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:27.525394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489509629027885994:2345];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:27.525582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489509629027885994:2345];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:27.525849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489509629027885994:2345];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:22:27.526001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489509629027885994:2345];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:22:27.526097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489509629027885994:2345];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:22:27.526186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489509629027885994:2345];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:22:27.526301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489509629027885994:2345];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:22:27.526401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489509629027885994:2345];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:22:27.526432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509629027885993:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:27.526585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489509629027885994:2345];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:22:27.526603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509629027885993:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:22:27.526675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509629027885993:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:22:27.526726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489509629027885994:2345];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:22:27.526736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509629027885993:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:22:27.526828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509629027885993:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:22:27.526908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509629027885993:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:22:27.526924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489509629027885994:2345];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:22:27.527077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509629027885993:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:22:27.527183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509629027885993:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:22:27.527292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509629027885993:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:22:27.527401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509629027885993:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:22:27.527499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7489509629027885993:2344];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:22:27.529418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489509629027885994:2345];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:22:27.561724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509629027885996:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:27.561795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7489509629027885996:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract ... .TEvCancelTransactionProposal TxId: 281474976715671 2025-04-04T17:22:57.027914Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037896;self_id=[2:7489509705260334557:2351];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:57.028081Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037889 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715671 2025-04-04T17:22:57.028089Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037894 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715671 2025-04-04T17:22:57.028133Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037889;self_id=[2:7489509705260334530:2344];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:57.028142Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037894;self_id=[2:7489509705260334546:2348];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:57.028220Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037890 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715671 2025-04-04T17:22:57.028267Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037890;self_id=[2:7489509705260334624:2353];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:57.028279Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037892 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715671 2025-04-04T17:22:57.028363Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037892;self_id=[2:7489509705260334591:2352];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:57.028453Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037893 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715671 2025-04-04T17:22:57.028504Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037893;self_id=[2:7489509705260334550:2350];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:57.028527Z node 2 :TX_COLUMNSHARD WARN: TColumnShard.StateWork at 72075186224037895 unhandled event type: NKikimr::TEvDataShard::TEvCancelTransactionProposal event: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715671 2025-04-04T17:22:57.028575Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037895;self_id=[2:7489509705260334540:2347];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0;
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 2025-04-04T17:22:57.146964Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;self_id=[2:7489509709555303255:2482];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037942;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.150111Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037942;self_id=[2:7489509709555303255:2482];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037942;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.161179Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;self_id=[2:7489509709555302784:2422];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037907;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.161616Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:7489509709555302201:2409];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037899;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.162178Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;self_id=[2:7489509709555302784:2422];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037907;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.162317Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:7489509709555302201:2409];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037899;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.297028Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[2:7489509713850270873:2555];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037906;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.297323Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[2:7489509713850270873:2555];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037906;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.334950Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[2:7489509713850270810:2527];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037920;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.335316Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[2:7489509713850270810:2527];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037920;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.415736Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[2:7489509709555302785:2423];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037901;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.423520Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[2:7489509709555302785:2423];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037901;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.423860Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037938;self_id=[2:7489509713850270764:2524];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037938;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.428832Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:7489509709555302786:2424];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037900;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.430460Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037938;self_id=[2:7489509713850270764:2524];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037938;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.431838Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:7489509709555302786:2424];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037900;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.456870Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:7489509713850270842:2544];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037905;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.457151Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:7489509713850270842:2544];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037905;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.457335Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;self_id=[2:7489509709555302839:2427];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037996;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.457481Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;self_id=[2:7489509709555302839:2427];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037996;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.458791Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;self_id=[2:7489509709555302787:2425];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037931;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.458972Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;self_id=[2:7489509709555302787:2425];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037931;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.459142Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7489509726735175560:3040];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.459284Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7489509726735175560:3040];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.462818Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7489509726735175587:3051];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038014;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.464702Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7489509726735175587:3051];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038014;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2025-04-04T17:22:57.466802Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7489509726735175562:3042];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038024;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.467811Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7489509726735175562:3042];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038024;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.475158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:22:57.475186Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> DataStreams::TestStreamPagination [GOOD] >> DataStreams::TestShardPagination ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::ListingNoFilter [GOOD] Test command err: 2025-04-04T17:23:02.308438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:23:02.309065Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:23:02.309149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00183d/r3tmp/tmprXxkQT/pdisk_1.dat 2025-04-04T17:23:02.846185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:23:02.896402Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:02.941008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:02.941524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:02.955423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:23:03.053808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:23:03.118304Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:23:03.118586Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:03.164618Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:03.164756Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:23:03.167346Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:23:03.167440Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:23:03.167498Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:23:03.169732Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:23:03.169909Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:23:03.169989Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:23:03.180744Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:23:03.214936Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:23:03.215127Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:23:03.215233Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:23:03.215276Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:23:03.215309Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:23:03.215344Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:23:03.215741Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:23:03.215858Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:23:03.215976Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:23:03.216034Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:23:03.216095Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:23:03.216139Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:23:03.216205Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:23:03.216622Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:23:03.216833Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:23:03.216919Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:23:03.218572Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:23:03.229700Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:23:03.229912Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:23:03.369811Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:23:03.379970Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:23:03.380049Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:23:03.380920Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:23:03.380971Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:23:03.381031Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:23:03.381278Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:23:03.381448Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:23:03.382049Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:23:03.382127Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:23:03.384167Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:23:03.384662Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:23:03.386263Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:23:03.386328Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:23:03.387405Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:23:03.387503Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:23:03.388574Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:23:03.388920Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:23:03.388955Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:23:03.388996Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:23:03.389061Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:23:03.389107Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:23:03.389191Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:23:03.393745Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:23:03.393940Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:23:03.394007Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:23:03.418870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:735:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:03.419004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:03.419103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:03.426943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:23:03.432672Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:23:03.584192Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:23:03.587060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:749:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:23:03.667493Z node 1 :TX_PROXY ERROR: Actor# [1:823:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:23:04.405887Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0x7pkr8e7qp8kt8rypj863, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjllYTYyNjktODE1NzU5ZTMtNDdkMjQwZjUtODMyYmZiYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:23:04.431864Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:850:2687], serverId# [1:851:2688], sessionId# [0:0:0] 2025-04-04T17:23:04.432170Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:23:04.445119Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:23:04.445263Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:23:04.449411Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:858:2694], serverId# [1:859:2695], sessionId# [0:0:0] 2025-04-04T17:23:04.449754Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-04T17:23:04.449977Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 3 common prefixes: 2 2025-04-04T17:23:04.450188Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:858:2694], serverId# [1:859:2695], sessionId# [0:0:0] |94.2%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing [GOOD] Test command err: 2025-04-04T17:23:02.308444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:23:02.308991Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:23:02.309044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001844/r3tmp/tmpopScmg/pdisk_1.dat 2025-04-04T17:23:02.846056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:23:02.895704Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:02.941026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:02.941534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:02.954316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:23:03.053809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:23:03.123209Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:23:03.123442Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:03.170095Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:03.170210Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:23:03.171902Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:23:03.171973Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:23:03.172028Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:23:03.172352Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:23:03.172475Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:23:03.172539Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:23:03.183181Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:23:03.206636Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:23:03.208168Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:23:03.208365Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:23:03.208434Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:23:03.208473Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:23:03.208508Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:23:03.210128Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:23:03.210354Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:23:03.210561Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:23:03.210632Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:23:03.210728Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:23:03.210777Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:23:03.210866Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:23:03.211369Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:23:03.212559Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:23:03.212705Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:23:03.214801Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:23:03.225532Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:23:03.227660Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:23:03.369755Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:23:03.373641Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:23:03.373732Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:23:03.374541Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:23:03.374589Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:23:03.374648Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:23:03.374890Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:23:03.375078Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:23:03.375517Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:23:03.375569Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:23:03.378373Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:23:03.378938Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:23:03.381797Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:23:03.381853Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:23:03.383716Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:23:03.383810Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:23:03.384938Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:23:03.385284Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:23:03.385328Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:23:03.385415Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:23:03.385472Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:23:03.385511Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:23:03.385577Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:23:03.390904Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:23:03.391121Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:23:03.391185Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:23:03.418870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:735:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:03.419015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:03.419127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:03.426969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:23:03.433009Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:23:03.584279Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:23:03.587132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:749:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:23:03.659536Z node 1 :TX_PROXY ERROR: Actor# [1:823:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:23:04.405842Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0x7pkr4g492kgdk5k0b8z5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTU1ZjJlMmItNWQ5ODIzOGEtZjkxMzFhNjYtN2QwODExMmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:23:04.417633Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:850:2687], serverId# [1:851:2688], sessionId# [0:0:0] 2025-04-04T17:23:04.419168Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:23:04.436429Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:23:04.436564Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:23:04.444207Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:858:2694], serverId# [1:859:2695], sessionId# [0:0:0] 2025-04-04T17:23:04.444576Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-04T17:23:04.444795Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 1 2025-04-04T17:23:04.445016Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:858:2694], serverId# [1:859:2695], sessionId# [0:0:0] 2025-04-04T17:23:04.447153Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:864:2700], serverId# [1:865:2701], sessionId# [0:0:0] 2025-04-04T17:23:04.447398Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-04T17:23:04.447609Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 1 common prefixes: 1 2025-04-04T17:23:04.447817Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:864:2700], serverId# [1:865:2701], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::DatabasesCacheForServerless [GOOD] Test command err: 2025-04-04T17:22:40.555453Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509683435104943:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:40.562597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:22:40.607130Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489509683387690892:2138];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:40.607174Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:22:40.640360Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489509686164537743:2267];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:40.640420Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:22:40.659620Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489509684281239644:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:40.659697Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00161f/r3tmp/tmphVmBtC/pdisk_1.dat 2025-04-04T17:22:41.072757Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:22:41.371543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:41.371681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:41.372543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:41.372595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:41.372858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:41.372922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:41.374905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:41.374961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:41.375098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:41.375146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:41.379994Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-04-04T17:22:41.380068Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-04T17:22:41.380126Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:22:41.380158Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T17:22:41.380960Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:41.381419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:41.390855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:41.391197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:41.391353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:41.391498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27586 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:22:41.990313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:44.411367Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:44.413249Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:22:44.420419Z node 4 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-04T17:22:44.420483Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.420511Z node 4 :KQP_PROXY DEBUG: Updated table service config. 2025-04-04T17:22:44.420564Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:44.420599Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:22:44.420687Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.425493Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.425641Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.426049Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-04-04T17:22:44.426066Z node 4 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-04-04T17:22:44.426117Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-04T17:22:44.426124Z node 4 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-04T17:22:44.426156Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-04T17:22:44.426159Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-04-04T17:22:44.426347Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-04T17:22:44.426359Z node 4 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-04T17:22:44.426391Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-04T17:22:44.432688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72057594046644480 2025-04-04T17:22:44.434511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 2025-04-04T17:22:44.436428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:0, at schemeshard: 72057594046644480 2025-04-04T17:22:44.445578Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-04-04T17:22:44.445625Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976720659 2025-04-04T17:22:44.445733Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-04-04T17:22:44.445759Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-04-04T17:22:44.445785Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:22:44.445799Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976720658 2025-04-04T17:22:44.574650Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:44.575745Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:22:44.576970Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-04T17:22:44.577012Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-04T17:22:44.577026Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:22:44.577065Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:22:44.577429Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.577474Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.582160Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.582532Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:22:44.584437Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-04-04T17:22:44.584456Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-04-04T17:22:44.584487Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-04-04T17:22:44.584593Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-04-04T17:22:44.586225Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-04-04T17:22:44.586258Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-04-04T17:22:44.586282Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-04-04T17:22:44.627251Z node 5 :KQP_PR ... essor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:22:51.832501Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:51.832606Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:51.839173Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-04-04T17:22:51.840121Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:51.896836Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:22:51.897016Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:22:51.897100Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:22:51.897167Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:22:51.897230Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:22:51.897317Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:22:51.897407Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:22:51.897471Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:22:51.897536Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:22:51.952161Z node 8 :HIVE WARN: HIVE#72075186224037888 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:51.952270Z node 8 :HIVE WARN: HIVE#72075186224037888 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:51.956337Z node 8 :HIVE WARN: HIVE#72075186224037888 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:52.054518Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:52.056382Z node 8 :STATISTICS WARN: [72075186224037894] TTxInit::Complete. EnableColumnStatistics=false 2025-04-04T17:22:52.138444Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:52.154679Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489509736517748304:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:52.154755Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:22:52.165722Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:52.165801Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:52.168703Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 7 Cookie 7 2025-04-04T17:22:52.169490Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:52.411536Z node 7 :HIVE WARN: HIVE#72075186224038889 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:52.411663Z node 7 :HIVE WARN: HIVE#72075186224038889 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:52.412010Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:22:52.412154Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:22:52.412380Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:22:52.412478Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:22:52.412559Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:22:52.412649Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:22:52.412737Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:22:52.412851Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:22:52.412986Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:22:52.415471Z node 7 :HIVE WARN: HIVE#72075186224038889 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:52.507400Z node 7 :STATISTICS WARN: [72075186224038895] TTxInit::Complete. EnableColumnStatistics=false 2025-04-04T17:22:52.507827Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:52.567890Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:52.662567Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:52.782857Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7489509736517749256:2535], Database: /Root/test-serverless, Start database fetching 2025-04-04T17:22:52.783073Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7489509736517749256:2535], Database: /Root/test-serverless, Database info successfully fetched, serverless: 1 2025-04-04T17:22:53.297469Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489509720382254813:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:53.297574Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:55.416473Z node 8 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-04T17:22:55.416748Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-04T17:22:55.416768Z node 8 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-04T17:22:55.417146Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7489509747593782422:2345], Start check tables existence, number paths: 2 2025-04-04T17:22:55.417232Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2025-04-04T17:22:55.419379Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7489509747593782422:2345], Describe table /Root/test-dedicated/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-04T17:22:55.419449Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7489509747593782422:2345], Describe table /Root/test-dedicated/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-04T17:22:55.419483Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7489509747593782422:2345], Successfully finished 2025-04-04T17:22:55.419525Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-04T17:22:55.986753Z node 7 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-04-04T17:22:55.987408Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7489509749402651259:2369], Start check tables existence, number paths: 2 2025-04-04T17:22:55.987564Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-04-04T17:22:55.987577Z node 7 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-04-04T17:22:55.991342Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7489509749402651259:2369], Describe table /Root/test-shared/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-04-04T17:22:55.991413Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7489509749402651259:2369], Describe table /Root/test-shared/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-04-04T17:22:55.991446Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7489509749402651259:2369], Successfully finished 2025-04-04T17:22:55.991557Z node 7 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2025-04-04T17:22:55.991575Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-04-04T17:22:56.822576Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7489509730413912461:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:56.822704Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:57.154878Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489509736517748304:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:57.154967Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:23:02.786634Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2025-04-04T17:23:02.787375Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T17:23:02.787696Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 7 2025-04-04T17:23:02.787928Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T17:23:02.794671Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=MjgxZGIwYWItNThmMjRhYzItNjRlMDRmZmQtMjdjYmY3NmI=, ActorId: [6:7489509733267157528:2334], ActorState: ReadyState, Session closed due to explicit close event 2025-04-04T17:23:02.798543Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=MjgxZGIwYWItNThmMjRhYzItNjRlMDRmZmQtMjdjYmY3NmI=, ActorId: [6:7489509733267157528:2334], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-04-04T17:23:02.798595Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjgxZGIwYWItNThmMjRhYzItNjRlMDRmZmQtMjdjYmY3NmI=, ActorId: [6:7489509733267157528:2334], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-04-04T17:23:02.798635Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjgxZGIwYWItNThmMjRhYzItNjRlMDRmZmQtMjdjYmY3NmI=, ActorId: [6:7489509733267157528:2334], ActorState: unknown state, Cleanup temp tables: 0 2025-04-04T17:23:02.798885Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjgxZGIwYWItNThmMjRhYzItNjRlMDRmZmQtMjdjYmY3NmI=, ActorId: [6:7489509733267157528:2334], ActorState: unknown state, Session actor destroyed |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::OlapUncommittedRead [GOOD] Test command err: Trying to start YDB, gRPC: 15986, MsgBus: 15852 2025-04-04T17:22:35.768156Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509662756666674:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:35.768324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bfb/r3tmp/tmpkRcLVQ/pdisk_1.dat 2025-04-04T17:22:36.110472Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:36.115500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:36.115583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:36.118304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15986, node 1 2025-04-04T17:22:36.182999Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:36.183022Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:36.183040Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:36.183212Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15852 TClient is connected to server localhost:15852 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:36.729555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:36.747065Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:22:38.466674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509675641569219:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:38.466739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509675641569205:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:38.466856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:38.471404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:38.481248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509675641569222:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:22:38.563970Z node 1 :TX_PROXY ERROR: Actor# [1:7489509675641569273:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:38.748091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:38.856014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:39.653925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:40.764286Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509662756666674:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:40.764342Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:41.143834Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-04-04T17:22:41.144025Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-04T17:22:41.144134Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-04-04T17:22:41.144301Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489509688526479871:2968], Table: `/Root/Test` ([72057594046644480:6:1]), SessionActorId: [1:7489509684231512367:2968]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037888, Sink=[1:7489509688526479871:2968].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-04T17:22:41.144851Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489509688526479856:2968], SessionActorId: [1:7489509684231512367:2968], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7489509684231512367:2968]. isRollback=0 2025-04-04T17:22:41.145131Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGFhMTk2NDMtZDRlNWM5YWEtNDVjMzM2ZWMtZDc1NmJjN2M=, ActorId: [1:7489509684231512367:2968], ActorState: ExecuteState, TraceId: 01jr0x70sy3bnmxk90eenkpgvg, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7489509688526479857:2968] from: [1:7489509688526479856:2968] 2025-04-04T17:22:41.145218Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489509688526479857:2968] TxId: 281474976710665. Ctx: { TraceId: 01jr0x70sy3bnmxk90eenkpgvg, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFhMTk2NDMtZDRlNWM5YWEtNDVjMzM2ZWMtZDc1NmJjN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-04T17:22:41.146786Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGFhMTk2NDMtZDRlNWM5YWEtNDVjMzM2ZWMtZDc1NmJjN2M=, ActorId: [1:7489509684231512367:2968], ActorState: ExecuteState, TraceId: 01jr0x70sy3bnmxk90eenkpgvg, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 11529, MsgBus: 8938 2025-04-04T17:22:47.221537Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509713787055055:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:47.221589Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bfb/r3tmp/tmpIozWBD/pdisk_1.dat 2025-04-04T17:22:47.367729Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:47.384653Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:47.384753Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:47.387986Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11529, node 2 2025-04-04T17:22:47.438780Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:47.438802Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:47.438812Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:47.438931Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8938 TClient is connected to server localhost:8938 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:47.801075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:50.179975Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509726671957606:2329], DatabaseId: / ... let_id=72075186224038066;self_id=[2:7489509743851833044:3258];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038066;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.802788Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7489509743851833047:3259];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038059;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.803361Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7489509743851833047:3259];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038059;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.806385Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[2:7489509743851833048:3260];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038063;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.806866Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[2:7489509743851833048:3260];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038063;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.845857Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7489509743851833035:3253];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038054;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.846441Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7489509743851833035:3253];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038054;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.846688Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[2:7489509743851832981:3234];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038072;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.847083Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[2:7489509743851832981:3234];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038072;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.847368Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7489509743851833024:3250];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038057;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.847716Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7489509743851833024:3250];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.847936Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[2:7489509743851833074:3267];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038061;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.848293Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[2:7489509743851833074:3267];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038061;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.848486Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[2:7489509743851832795:3189];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038094;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.848841Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[2:7489509743851832795:3189];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038094;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.849056Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[2:7489509743851832979:3233];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038065;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.849364Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[2:7489509743851832979:3233];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038065;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.849592Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7489509743851833031:3252];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038049;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.849929Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7489509743851833031:3252];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038049;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.850132Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038080;self_id=[2:7489509743851832898:3216];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038080;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.850568Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038080;self_id=[2:7489509743851832898:3216];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038080;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.850802Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;self_id=[2:7489509743851832919:3218];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038071;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.851184Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;self_id=[2:7489509743851832919:3218];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038071;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.851551Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;self_id=[2:7489509743851832998:3242];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038064;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.851872Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;self_id=[2:7489509743851832998:3242];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038064;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.852247Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[2:7489509743851832837:3200];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038091;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.852572Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[2:7489509743851832837:3200];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038091;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.852751Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7489509743851832969:3227];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038070;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.853054Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7489509743851832969:3227];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038070;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.874617Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[2:7489509743851832993:3241];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038056;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:22:57.875007Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[2:7489509743851832993:3241];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038056;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2025-04-04T17:22:58.266275Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037893;self_id=[2:7489509726671957870:2348];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:58.266403Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037894;self_id=[2:7489509726671957868:2347];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:58.266463Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037888;self_id=[2:7489509726671957860:2345];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:58.266692Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037895;self_id=[2:7489509726671957984:2353];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:58.266784Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037896;self_id=[2:7489509726671957872:2350];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:58.266845Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037891;self_id=[2:7489509726671957883:2352];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:58.266907Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037892;self_id=[2:7489509726671957871:2349];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:58.266961Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037897;self_id=[2:7489509726671957873:2351];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:58.267398Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037890;self_id=[2:7489509726671957859:2344];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-04-04T17:22:58.267487Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037889;self_id=[2:7489509726671957864:2346];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-04T17:23:02.354340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:23:02.354373Z node 2 :IMPORT WARN: Table profiles were not loaded |94.2%| [TA] $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] Test command err: 2025-04-04T17:19:07.864517Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:07.953632Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:07.970097Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:07.970358Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:07.977804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:07.978028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:07.978271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:07.978396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:07.978516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:07.978634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:07.978725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:07.978831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:07.978961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:07.979097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:07.979220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:07.979312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:08.005299Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:08.005554Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:08.005619Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:08.005793Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:08.005958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:08.006032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:08.006089Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:08.006199Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:08.006296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:08.006352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:08.006396Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:08.006596Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:08.006665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:08.006706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:08.006739Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:08.006820Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:08.006891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:08.006955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:08.006987Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:08.007061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:08.007098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:08.007148Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:08.007202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:08.007242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:08.007277Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:08.007700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=53; 2025-04-04T17:19:08.007822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=61; 2025-04-04T17:19:08.007921Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-04-04T17:19:08.007996Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=31; 2025-04-04T17:19:08.008166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:08.008219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:08.008251Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:08.008428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:08.008480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:08.008519Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:08.008675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:08.008725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:08.008763Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:08.008930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:08.008967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:08.008994Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:08.009107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:08.009146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:08.009215Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:8568];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:74;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:75;blob_range:[NO_BLOB:0:8552];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:8488];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:54;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:55;blob_range:[NO_BLOB:0:8472];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:1;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;;;switched=(portion_id:55;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2117976;index_size:24;meta:((produced=INSERTED;)););(portion_id:54;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2549080;index_size:24;meta:((produced=SPLIT_COMPACTED;)););; 2025-04-04T17:23:04.833295Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11161:12788];fline=general_compaction.cpp:131;event=blobs_created;appended=1;switched=2; 2025-04-04T17:23:04.835596Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11161:12788];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 18506, MsgBus: 64543 2025-04-04T17:22:35.240026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:22:35.240532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:22:35.240593Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c1e/r3tmp/tmpIVt10U/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18506, node 1 2025-04-04T17:22:35.710523Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:35.714436Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:35.714483Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:35.714522Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:35.714798Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:22:35.757610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:35.757818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:35.769518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64543 TClient is connected to server localhost:64543 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:36.161607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:36.202540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:36.556150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:36.951269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:37.241965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:37.937502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1807:3404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:37.937681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:37.960894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:38.187433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:38.429612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:38.699651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:38.931448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:39.247691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:39.517067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2393:3855], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:39.517206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:39.517608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2398:3860], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:39.523342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:39.686385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2400:3862], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:22:39.734778Z node 1 :TX_PROXY ERROR: Actor# [1:2465:3908] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:40.731306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:22:40.958432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:22:41.354939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 18568, MsgBus: 25157 2025-04-04T17:22:46.249988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:22:46.250114Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:22:46.250319Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c1e/r3tmp/tmpcJWKcD/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18568, node 2 2025-04-04T17:22:46.625405Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:46.626631Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:46.626720Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:46.626759Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:46.627207Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:22:46.661847Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:46.661975Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:46.673327Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25157 TClient is connected to server localhost:25157 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:47.020127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:47.086337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 20 ... 72057594046644480 2025-04-04T17:22:48.881563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:49.130335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:49.392247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:49.633874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:22:49.958690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:22:50.262508Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2396:3855], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:50.262627Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:50.263042Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2401:3860], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:50.269264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:22:50.433267Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2403:3862], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:22:50.480853Z node 2 :TX_PROXY ERROR: Actor# [2:2466:3906] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:51.420928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:22:51.662470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:22:52.021334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9493, MsgBus: 4988 2025-04-04T17:22:58.016128Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:99:2145], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:22:58.016515Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:22:58.016616Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c1e/r3tmp/tmpDPOQl2/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9493, node 3 2025-04-04T17:22:58.437714Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:58.438795Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:58.438864Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:58.438912Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:58.439424Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:22:58.476447Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:58.476602Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:58.488443Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4988 TClient is connected to server localhost:4988 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:58.794866Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:58.874140Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:59.180657Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:59.572347Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:59.932054Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:00.496325Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1806:3402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:00.496732Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:00.519172Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:23:00.741938Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:23:00.993601Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:23:01.242450Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:23:01.484144Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:23:01.818804Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:23:02.091413Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2393:3853], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:02.091592Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:02.092133Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2398:3858], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:02.098571Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:23:02.266078Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:2400:3860], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:23:02.312191Z node 3 :TX_PROXY ERROR: Actor# [3:2465:3906] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:23:03.292645Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:23:03.596943Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:23:03.979353Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::SimpleSubscriber >> TBoardSubscriberTest::DropByDisconnect >> BsControllerConfig::OverlayMap >> TBoardSubscriberTest::NotAvailableByShutdown >> DataStreams::TestStreamTimeRetention [GOOD] >> DataStreams::TestUnsupported >> BsControllerConfig::MergeIntersectingBoxes >> BsControllerConfig::ManyPDisksRestarts |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> BsControllerConfig::SelectAllGroups >> TBoardSubscriberTest::ReconnectReplica >> TBoardSubscriberTest::ManySubscribersManyPublisher >> BsControllerConfig::ExtendByCreatingSeparateBox >> TSequence::CreateSequenceParallel >> BsControllerConfig::OverlayMapCrossReferences >> TSequence::CreateSequence >> DataShardVolatile::UpsertNoLocksArbiter-UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 63435, MsgBus: 12265 2025-04-04T17:22:29.844570Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509636509189034:2196];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:29.850321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c48/r3tmp/tmpjRnLNp/pdisk_1.dat 2025-04-04T17:22:30.242860Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:30.250579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:30.250676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:30.253975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63435, node 1 2025-04-04T17:22:30.338459Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:30.338480Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:30.338485Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:30.338606Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12265 TClient is connected to server localhost:12265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:30.881742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:32.711620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509649394091433:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:32.711674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509649394091451:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:32.711718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:32.715055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:32.724558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509649394091454:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:22:32.781543Z node 1 :TX_PROXY ERROR: Actor# [1:7489509649394091505:2333] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:32.997948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:33.147117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509653689059000:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:33.147291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509653689059000:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:33.147545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509653689059000:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:22:33.147675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509653689059000:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:22:33.147804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509653689059000:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:22:33.147937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509653689059000:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:22:33.148083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509653689059000:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:22:33.148207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509653689059000:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:22:33.148326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509653689059000:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:22:33.148452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509653689059000:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:22:33.148565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509653689059002:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:33.148589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509653689059000:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:22:33.148685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509653689059002:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:33.148721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489509653689059000:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:22:33.148916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509653689059002:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:22:33.149042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509653689059002:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:22:33.149171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509653689059002:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:22:33.149295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509653689059002:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:22:33.149435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509653689059002:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:22:33.149581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509653689059002:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:22:33.149738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509653689059002:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:22:33.149892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509653689059002:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:22:33.150018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509653689059002:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:22:33.150145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489509653689059002:2352];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:22:33.178323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509653689058993:2345];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:33.178376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489509653689058993:2345];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:33.178571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;sel ... 4385Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038062;self_id=[2:7489509750335412129:3405];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038062;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.106355Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7489509750335412072:3387];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038076;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.107401Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7489509750335412072:3387];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038076;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.107616Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[2:7489509750335411915:3345];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038088;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.109122Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[2:7489509750335411915:3345];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038088;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.110158Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[2:7489509750335411838:3318];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038095;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.110390Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[2:7489509750335411838:3318];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038095;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.124931Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7489509750335412137:3406];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038057;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.125253Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7489509750335412137:3406];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.125463Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038083;self_id=[2:7489509750335412020:3377];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038083;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.125615Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038083;self_id=[2:7489509750335412020:3377];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038083;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.147325Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[2:7489509750335412152:3412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038072;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.224851Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;self_id=[2:7489509750335412085:3393];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038068;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.225203Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;self_id=[2:7489509750335412085:3393];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038068;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.230790Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[2:7489509750335412125:3403];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038067;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.231137Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[2:7489509750335412125:3403];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038067;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.233220Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7489509750335412245:3423];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038054;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.233461Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7489509750335412245:3423];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038054;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.233695Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7489509750335411878:3334];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038049;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.233907Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[2:7489509750335411999:3367];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038073;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.234185Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038080;self_id=[2:7489509750335411786:3269];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038080;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.234397Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038080;self_id=[2:7489509750335411786:3269];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038080;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.234597Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[2:7489509750335412145:3407];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038061;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.234761Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[2:7489509750335412145:3407];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038061;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.234932Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[2:7489509750335412122:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038063;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.235094Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[2:7489509750335412122:3402];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038063;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.235313Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7489509750335412178:3416];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038060;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.235466Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7489509750335412178:3416];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038060;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.239257Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;self_id=[2:7489509750335411852:3327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038093;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.239493Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;self_id=[2:7489509750335411852:3327];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038093;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.239708Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[2:7489509750335411854:3328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038090;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.239857Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[2:7489509750335411854:3328];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038090;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.240674Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7489509750335411934:3359];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038081;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.240852Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7489509750335411934:3359];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038081;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.241245Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[2:7489509750335412208:3422];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038051;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.241451Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[2:7489509750335412208:3422];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038051;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.243129Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[2:7489509750335412128:3404];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038065;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.243338Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[2:7489509750335412128:3404];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038065;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.245723Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[2:7489509750335412152:3412];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038072;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.245978Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[2:7489509750335412007:3372];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038079;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.246200Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[2:7489509750335412007:3372];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038079;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.269183Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7489509750335411878:3334];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038049;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T17:23:00.269460Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[2:7489509750335411999:3367];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038073;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-04-04T17:23:02.374450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:23:02.374493Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> BsControllerConfig::OverlayMap [GOOD] >> TBoardSubscriberTest::DropByDisconnect [GOOD] >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] >> TBoardSubscriberTest::SimpleSubscriber [GOOD] >> TBoardSubscriberTest::ReconnectReplica [GOOD] >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlagFalse [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMap [GOOD] >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady [GOOD] >> IndexBuildTest::RejectsCancel |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::SimpleSubscriber [GOOD] |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::DropByDisconnect [GOOD] |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ReconnectReplica [GOOD] |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] >> KqpSinkTx::DeferredEffects [GOOD] >> TSequence::CreateSequence [GOOD] >> TSequence::CreateDropRecreate >> DataStreams::TestCreateExistingStream [GOOD] >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] >> DataStreams::ListStreamsValidation |94.3%| [TA] $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |94.3%| [TA] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> DataStreams::TestListStreamConsumers [GOOD] >> DataStreams::TestListShards1Shard >> TSequence::CreateSequenceParallel [GOOD] >> TSequence::CreateSequenceSequential >> VDiskBalancing::TestRandom_Mirror3dc >> VDiskBalancing::TestRandom_Block42 >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob >> DataShardVolatile::DistributedWriteLostPlanThenDrop [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenSplit >> VDiskBalancing::TestStopOneNode_Block42 >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> DataStreams::TestShardPagination [GOOD] >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 >> BsControllerConfig::SelectAllGroups [GOOD] >> TSequence::CreateDropRecreate [GOOD] >> TSequence::CreateSequenceInsideSequenceNotAllowed >> VDiskBalancing::TestStopOneNode_Mirror3dc >> TSequence::CreateSequenceSequential [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] Test command err: 2025-04-04T17:22:55.109955Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509748769839073:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:55.110010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00183f/r3tmp/tmpHJ4fmI/pdisk_1.dat 2025-04-04T17:22:55.645205Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:55.658651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:55.658721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:55.665894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31264, node 1 2025-04-04T17:22:55.861835Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:55.861873Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:55.861880Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:55.861996Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21226 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:56.282635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:56.397684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:21226 2025-04-04T17:22:56.611156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:56.622903Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-04T17:22:57.010788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:57.333290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:23:00.097024Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489509771861852702:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:00.097165Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00183f/r3tmp/tmpvxBuya/pdisk_1.dat 2025-04-04T17:23:00.256318Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:00.288824Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:00.288904Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:00.292321Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14286, node 4 2025-04-04T17:23:00.408830Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:00.408854Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:00.408865Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:00.409050Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20395 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:00.668246Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:00.721881Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:20395 2025-04-04T17:23:00.922027Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "8" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000001" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } ALTER_SCHEME: { Name: "test-topic" Split { Partition: 1 SplitBoundary: "a" } } 2025-04-04T17:23:02.142037Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 107:0, at schemeshard: 72057594046644480 2025-04-04T17:23:03.350544Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:23:03.448627Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:23:03.592697Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:23:03.805567Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:23:05.240969Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489509793463943673:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:05.241036Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00183f/r3tmp/tmp1RYE7a/pdisk_1.dat 2025-04-04T17:23:05.423252Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:05.467050Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:05.467153Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:05.471845Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12655, node 7 2025-04-04T17:23:05.560751Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:05.560773Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:05.560783Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:05.560950Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:05.823900Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:05.884554Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:28097 2025-04-04T17:23:06.102106Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... >> DataStreams::TestPutRecordsCornerCases [GOOD] >> DataStreams::TestPutRecords >> TSequence::CreateSequenceInsideSequenceNotAllowed [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::SelectAllGroups [GOOD] Test command err: 2025-04-04T17:23:08.327718Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-04T17:23:08.345366Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-04T17:23:08.350447Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-04T17:23:08.353484Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:23:08.355314Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-04T17:23:08.355895Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-04T17:23:08.356677Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-04T17:23:08.357174Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-04T17:23:08.370192Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-04T17:23:08.370454Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-04T17:23:08.371673Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-04T17:23:08.371838Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T17:23:08.371998Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T17:23:08.372149Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-04T17:23:08.551736Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.122512s 2025-04-04T17:23:08.551863Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.122665s ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::DeferredEffects [GOOD] Test command err: Trying to start YDB, gRPC: 65090, MsgBus: 5696 2025-04-04T17:22:31.960858Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509647748133872:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:31.964048Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c38/r3tmp/tmp9vtz8J/pdisk_1.dat TServer::EnableGrpc on GrpcPort 65090, node 1 2025-04-04T17:22:32.311696Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:22:32.311736Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:22:32.338008Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:32.361216Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:32.361246Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:32.361281Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:32.361427Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:22:32.361775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:32.361900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:32.367012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5696 TClient is connected to server localhost:5696 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:32.898855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:35.001174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509660633036335:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:35.001267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509660633036349:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:35.001342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:22:35.005345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:22:35.016647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509664928003655:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:22:35.118479Z node 1 :TX_PROXY ERROR: Actor# [1:7489509664928003706:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:35.352179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:22:35.467052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:36.446263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:37.119835Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509647748133872:2139];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:37.129709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:22:47.338542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:22:47.338568Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:51.149251Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489509733647490246:3225], TxId: 281474976710678, task: 1. Ctx: { TraceId : 01jr0x7am7b9fp4ch84c9nj9bx. SessionId : ydb://session/3?node_id=1&id=NWYyNmQ2NmMtMzJlYjVhMDItOTdlNGY5ZjItYTI3YjQ3ZmQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 7 has no snapshot at v1743787357515/18446744073709551615 shard 72075186224037889 with lowWatermark v1743787357767/18446744073709551615 (node# 1 state# Ready) } } 2025-04-04T17:22:51.149744Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489509733647490246:3225], TxId: 281474976710678, task: 1. Ctx: { TraceId : 01jr0x7am7b9fp4ch84c9nj9bx. SessionId : ydb://session/3?node_id=1&id=NWYyNmQ2NmMtMzJlYjVhMDItOTdlNGY5ZjItYTI3YjQ3ZmQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 7 has no snapshot at v1743787357515/18446744073709551615 shard 72075186224037889 with lowWatermark v1743787357767/18446744073709551615 (node# 1 state# Ready) } }. 2025-04-04T17:22:51.150465Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489509733647490242:2969] TxId: 281474976710678. Ctx: { TraceId: 01jr0x7am7b9fp4ch84c9nj9bx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWYyNmQ2NmMtMzJlYjVhMDItOTdlNGY5ZjItYTI3YjQ3ZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Unexpected event while waiting for shutdown: NYql::NDq::TEvDqCompute::TEvChannelData 2025-04-04T17:22:51.150572Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489509733647490247:3226], TxId: 281474976710678, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NWYyNmQ2NmMtMzJlYjVhMDItOTdlNGY5ZjItYTI3YjQ3ZmQ=. TraceId : 01jr0x7am7b9fp4ch84c9nj9bx. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7489509733647490242:2969], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:22:51.150837Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489509733647490248:3227], TxId: 281474976710678, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NWYyNmQ2NmMtMzJlYjVhMDItOTdlNGY5ZjItYTI3YjQ3ZmQ=. TraceId : 01jr0x7am7b9fp4ch84c9nj9bx. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489509733647490242:2969], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:22:51.152326Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWYyNmQ2NmMtMzJlYjVhMDItOTdlNGY5ZjItYTI3YjQ3ZmQ=, ActorId: [1:7489509673517946711:2969], ActorState: ExecuteState, TraceId: 01jr0x7am7b9fp4ch84c9nj9bx, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 13235, MsgBus: 13694 2025-04-04T17:22:57.213214Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509756674129675:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:57.213329Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c38/r3tmp/tmpNsOvUV/pdisk_1.dat 2025-04-04T17:22:57.324473Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13235, node 2 2025-04-04T17:22:57.365574Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:57.365995Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:57.379296Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:57.418843Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:57.418870Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:57.418894Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:57.419043Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13694 TClient is connected to server localhost:13694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:57.832742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:00.888464Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509769559032222:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:00.888535Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509769559032217:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:00.888725Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:00.893393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:23:00.905664Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509769559032232:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:23:00.993812Z node 2 :TX_PROXY ERROR: Actor# [2:7489509769559032283:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:23:01.033376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:23:01.070703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:23:02.160025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:23:02.216800Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509756674129675:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:02.216884Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestShardPagination [GOOD] Test command err: 2025-04-04T17:22:55.116607Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509746814162797:2161];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:55.120250Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001855/r3tmp/tmpEJeEJS/pdisk_1.dat 2025-04-04T17:22:55.560209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:55.566388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:55.582949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:55.598150Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13380, node 1 2025-04-04T17:22:55.814757Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:55.814794Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:55.814802Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:55.814921Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19752 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:56.288753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:56.400058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:19752 2025-04-04T17:22:56.583588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:56.602989Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 40960, code: 500080 2025-04-04T17:22:57.066621Z node 1 :TX_PROXY ERROR: Actor# [1:7489509755404099449:3450] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/stream_TestStreamStorageRetention\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:22:59.791307Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489509764848580978:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:59.791703Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001855/r3tmp/tmpG2ZIV8/pdisk_1.dat 2025-04-04T17:23:00.018755Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:00.064397Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:00.064509Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:00.071403Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28324, node 4 2025-04-04T17:23:00.214180Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:00.214212Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:00.214219Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:00.214368Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11572 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:00.492634Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:00.548111Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:11572 2025-04-04T17:23:00.767802Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:05.814555Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489509791850463460:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:05.814653Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001855/r3tmp/tmpgtXC8O/pdisk_1.dat 2025-04-04T17:23:05.939193Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:05.980676Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:05.980778Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:05.982974Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9384, node 7 2025-04-04T17:23:06.047135Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:06.047183Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:06.047194Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:06.047356Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2465 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:06.353036Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:06.408842Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:2465 2025-04-04T17:23:06.570566Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:06.592367Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 >> RemoteTopicReader::ReadTopic [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence [GOOD] >> TSequence::CreateSequenceInsideTableThenDropTable >> TSequence::CreateSequenceInsideIndexTableNotAllowed [GOOD] >> TSequence::CopyTableWithSequence >> BsControllerConfig::OverlayMapCrossReferences [GOOD] >> DataStreams::TestUnsupported [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> SlowTopicAutopartitioning::CDC_Write [GOOD] Test command err: 2025-04-04T17:21:40.376612Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509427539879180:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:40.376681Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:21:40.566080Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001bee/r3tmp/tmpQqOmuR/pdisk_1.dat 2025-04-04T17:21:40.779560Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17528, node 1 2025-04-04T17:21:40.794333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:40.795267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:40.802401Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:21:40.802458Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:21:40.803613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:40.956520Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001bee/r3tmp/yandexbX9tUD.tmp 2025-04-04T17:21:40.956571Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001bee/r3tmp/yandexbX9tUD.tmp 2025-04-04T17:21:40.956874Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001bee/r3tmp/yandexbX9tUD.tmp 2025-04-04T17:21:40.957028Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:21:41.275814Z INFO: TTestServer started on Port 14796 GrpcPort 17528 TClient is connected to server localhost:14796 PQClient connected to localhost:17528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:41.646579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:41.663216Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:41.674596Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:21:41.682794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:21:41.861896Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:43.597797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509440424781884:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:43.597828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509440424781872:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:43.597940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:43.607124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:21:43.610264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509440424781918:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:43.610702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:43.619254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509440424781887:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:21:43.701947Z node 1 :TX_PROXY ERROR: Actor# [1:7489509440424781943:2450] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:44.062872Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489509440424781959:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:21:44.076487Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmM4MGZiMDItMTg1NmI0YTYtNzgyYWExNi04OWVjZWVhYw==, ActorId: [1:7489509440424781870:2337], ActorState: ExecuteState, TraceId: 01jr0x58mwdcyf34yv9s8p2xp3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:21:44.079348Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:21:44.116720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:21:44.150369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:21:44.266971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489509444719749546:2640] 2025-04-04T17:21:45.382520Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509427539879180:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:45.382624Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:21:50.842226Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-04T17:21:50.857808Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7489509427539879585:2193]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:21:50.857864Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:21:50.857917Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7489509427539879585:2193], Recipient [1:7489509427539879585:2193]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:21:50.857933Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:21:50.907939Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7489509470489553636:2804], Recipient [1:7489509427539879585:2193]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:21:50.907974Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:21:50.907999Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:21:50.908042Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7489509470489553632:2801], Recipient [1:7489509427539879585:2193]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-04T17:21:50.908061Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T17:21:51.020998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "origin" Columns { Name: "id" Type: "Uint64" NotNull: false } Columns { Name: "order" Type: "Uint64" NotNull: false } Columns { Name: "value" Type: "Utf8" NotNull: false } KeyColumnNames: "id" KeyColumnNames: "order" UniformPartitionsCount: 64 PartitionConfig { PartitioningPolicy { MinPartitionsCount: 64 MaxPartitionsCount: 64 } } Temporary: false } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:21:51.021380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/origin, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:21:51.021516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/origin, opId: 281474976710673:0, schema: Name: "origin" Columns { ... 72075186224037960, Partition: 7, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:23:00.930954Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037959, Partition: 5, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:23:00.930996Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037959, Partition: 5, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:23:00.931002Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037960, Partition: 7, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:23:00.931013Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037959, Partition: 5, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:23:00.931019Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037960, Partition: 7, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:23:00.931064Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037959, Partition: 5, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:23:00.931068Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037960, Partition: 7, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:23:00.946747Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7489509427539879585:2193]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:23:00.946793Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:23:00.946834Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7489509427539879585:2193], Recipient [1:7489509427539879585:2193]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:23:00.946846Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:23:00.947206Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7489509702417794646:3227], Partition 3, Sender [0:0:0], Recipient [1:7489509702417794703:3235], Cookie: 0 2025-04-04T17:23:00.947278Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7489509702417794703:3235]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:23:00.947305Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:23:00.947347Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037958, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:23:00.947424Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037958, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:23:00.947450Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037958, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:23:00.947474Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037958, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:23:00.948223Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7489509749662435886:3353], Partition 4, Sender [0:0:0], Recipient [1:7489509749662436066:3366], Cookie: 0 2025-04-04T17:23:00.948278Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7489509749662436066:3366]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:23:00.948295Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:23:00.948323Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037959, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:23:00.948366Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037959, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:23:00.948392Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037959, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:23:00.948412Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037959, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:23:00.967748Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7489509758252370833:3397], Partition 6, Sender [0:0:0], Recipient [1:7489509762547338689:3428], Cookie: 0 2025-04-04T17:23:00.967830Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7489509762547338689:3428]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:23:00.967852Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:23:00.967897Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037960, Partition: 6, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:23:00.967988Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037960, Partition: 6, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:23:00.968020Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037960, Partition: 6, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:23:00.968047Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037960, Partition: 6, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:23:00.970763Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7489509474784523566:2681], Partition 1, Sender [0:0:0], Recipient [1:7489509474784523853:2750], Cookie: 0 2025-04-04T17:23:00.970823Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7489509474784523853:2750]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:23:00.970842Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:23:00.970886Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:23:00.970947Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:23:00.970974Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:23:00.970998Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:23:00.971208Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7489509474784523566:2681], Partition 0, Sender [0:0:0], Recipient [1:7489509474784523854:2751], Cookie: 0 2025-04-04T17:23:00.971250Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7489509474784523854:2751]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:23:00.971270Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:23:00.971271Z node 1 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [1:7489509702417794646:3227], Partition 2, Sender [0:0:0], Recipient [1:7489509702417794702:3234], Cookie: 0 2025-04-04T17:23:00.971296Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:23:00.971339Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:23:00.971358Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:23:00.971367Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [1:7489509702417794702:3234]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:23:00.971376Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:23:00.971388Z node 1 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:23:00.971421Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037958, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:23:00.971471Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037958, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:23:00.971508Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037958, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:23:00.971540Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037958, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:23:00.973998Z node 1 :PERSQUEUE TRACE: StateIdle event# 65538 (NActors::TEvents::TEvWakeup), Tablet [1:7489509749662435886:3353], Partition 5, Sender [0:0:0], Recipient [1:7489509749662436065:3365], Cookie: 0 2025-04-04T17:23:00.974071Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 65538, Sender [0:0:0], Recipient [1:7489509749662436065:3365]: NActors::TEvents::TEvWakeup 2025-04-04T17:23:00.974315Z node 1 :PERSQUEUE TRACE: HandleHook, received event# 271188501, Sender [1:7489509749662436065:3365], Recipient [1:7489509749662435886:3353]: NKikimr::TEvPQ::TEvPartitionCounters 2025-04-04T17:23:00.974349Z node 1 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionCounters 2025-04-04T17:23:00.974371Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037959] Handle TEvPQ::TEvPartitionCounters PartitionId 5 2025-04-04T17:23:00.974587Z node 1 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [1:7489509749662436065:3365], Recipient [1:7489509749662435886:3353]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-04-04T17:23:00.974610Z node 1 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-04-04T17:23:00.974664Z node 1 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [1:7489509749662436065:3365], Recipient [1:7489509749662435886:3353]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-04-04T17:23:00.974677Z node 1 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-04-04T17:23:00.978993Z node 1 :PERSQUEUE TRACE: StateIdle event# 65538 (NActors::TEvents::TEvWakeup), Tablet [1:7489509749662435886:3353], Partition 4, Sender [0:0:0], Recipient [1:7489509749662436066:3366], Cookie: 0 2025-04-04T17:23:00.979073Z node 1 :PERSQUEUE TRACE: StateIdle, received event# 65538, Sender [0:0:0], Recipient [1:7489509749662436066:3366]: NActors::TEvents::TEvWakeup 2025-04-04T17:23:00.979319Z node 1 :PERSQUEUE TRACE: HandleHook, received event# 271188501, Sender [1:7489509749662436066:3366], Recipient [1:7489509749662435886:3353]: NKikimr::TEvPQ::TEvPartitionCounters 2025-04-04T17:23:00.979347Z node 1 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionCounters 2025-04-04T17:23:00.979365Z node 1 :PERSQUEUE TRACE: [PQ: 72075186224037959] Handle TEvPQ::TEvPartitionCounters PartitionId 4 2025-04-04T17:23:00.979552Z node 1 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [1:7489509749662436066:3366], Recipient [1:7489509749662435886:3353]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-04-04T17:23:00.979584Z node 1 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-04-04T17:23:00.979634Z node 1 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [1:7489509749662436066:3366], Recipient [1:7489509749662435886:3353]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-04-04T17:23:00.979654Z node 1 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMapCrossReferences [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::ReadTopic [GOOD] Test command err: 2025-04-04T17:23:04.352905Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509788563596231:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:04.353506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000c5c/r3tmp/tmplbbu1b/pdisk_1.dat 2025-04-04T17:23:04.737714Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:04.766444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:04.768422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:04.771987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16503 TServer::EnableGrpc on GrpcPort 26046, node 1 2025-04-04T17:23:05.125246Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:05.125282Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:05.125291Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:05.125438Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:05.760212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:06.099216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-04-04T17:23:07.257340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509801448499014:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:07.257353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509801448499027:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:07.257419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489509801448499026:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:07.257496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:07.266072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-04-04T17:23:07.271535Z node 1 :TX_PROXY ERROR: Actor# [1:7489509801448499036:2447] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:23:07.276126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509801448499035:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T17:23:07.276131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489509801448499034:2370], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T17:23:07.333523Z node 1 :TX_PROXY ERROR: Actor# [1:7489509801448499083:2478] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:23:07.357954Z node 1 :TX_PROXY ERROR: Actor# [1:7489509801448499101:2486] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:23:08.355945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-04T17:23:08.722411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:23:09.148785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:23:09.353082Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509788563596231:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:09.353147Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:23:09.516131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:23:09.901754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:23:10.728250Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489509814333401654:2803] Handshake: worker# [1:7489509792858564123:2294] 2025-04-04T17:23:10.730976Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489509814333401654:2803] Create read session: session# [1:7489509814333401655:2293] 2025-04-04T17:23:10.731195Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489509814333401654:2803] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-04T17:23:10.755771Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489509814333401654:2803] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Offset: 0 Data: 9b Codec: RAW }] } } 2025-04-04T17:23:10.756048Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489509814333401654:2803] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-04T17:23:10.819218Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489509814333401654:2803] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Offset: 1 Data: 9b Codec: RAW }] } } 2025-04-04T17:23:10.914117Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489509814333401739:2835] Handshake: worker# [1:7489509792858564123:2294] 2025-04-04T17:23:10.917514Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489509814333401739:2835] Create read session: session# [1:7489509814333401740:2293] 2025-04-04T17:23:10.917688Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489509814333401739:2835] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2025-04-04T17:23:10.926671Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7489509814333401739:2835] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Offset: 1 Data: 9b Codec: RAW }] } } >> TSequence::CreateSequenceInsideTableThenDropTable [GOOD] >> TSequence::CreateSequencesWithIndexedTable |94.3%| [TA] $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} >> DataStreams::TestReservedStorageMetering [GOOD] >> DataStreams::TestReservedConsumersMetering >> TOlap::CustomDefaultPresets >> TOlap::CreateStore >> TOlap::CreateTableWithNullableKeys >> TOlap::Decimal >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob [GOOD] >> TSequence::CopyTableWithSequence [GOOD] >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] >> TOlap::StoreStats >> TOlap::AlterStore >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] >> TOlap::CreateStoreWithDirs >> TSequence::AlterSequence >> TOlap::CreateDropStandaloneTable |94.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestUnsupported [GOOD] Test command err: 2025-04-04T17:22:55.109688Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509748473200468:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:55.109801Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001832/r3tmp/tmpE1NjQg/pdisk_1.dat 2025-04-04T17:22:55.574034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:55.574190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:55.584994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:55.586759Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11341, node 1 2025-04-04T17:22:55.816317Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:55.816344Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:55.816350Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:55.816491Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:56.368199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:56.389766Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:22:56.462453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:13078 2025-04-04T17:22:56.653881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:56.666539Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-04T17:22:56.912115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:59.776682Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489509767435790876:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:59.776901Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001832/r3tmp/tmpRIS94o/pdisk_1.dat 2025-04-04T17:22:59.996225Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:00.014158Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:00.015041Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:00.022030Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6653, node 4 2025-04-04T17:23:00.199296Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:00.199327Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:00.199336Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:00.199573Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22219 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:00.492539Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:00.556136Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:22219 2025-04-04T17:23:00.768195Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:00.941375Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:23:00.990803Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480 encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "16" shard_id: "shard-000000" } records { sequence_number: "17" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000000" } records { sequence_number: "20" shard_id: "shard-000000" } records { sequence_number: "21" shard_id: "shard-000000" } records { sequence_number: "22" shard_id: "shard-000000" } records { sequence_number: "23" shard_id: "shard-000000" } records { sequence_number: "24" shard_id: "shard-000000" } records { sequence_number: "25" shard_id: "shard-000000" } records { sequence_number: "26" shard_id: "shard-000000" } records { sequence_number: "27" shard_id: "shard-000000" } records { sequence_number: "28" shard_id: "shard-000000" } records { sequence_number: "29" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "30" shard_id: "shard-000000" } records { sequence_number: "31" shard_id: "shard-000000" } records { sequence_number: "32" shard_id: "shard-000000" } records { sequence_number: "33" shard_id: "shard-000000" } records { sequence_number: "34" shard_id: "shard-000000" } records { sequence_number: "35" shard_id: "shard-000000" } records { sequence_number: "36" shard_id: "shard-000000" } records { sequence_number: "37" shard_id: "shard-000000" } records { sequence_number: "38" shard_id: "shard-000000" } records { sequence_number: "39" shard_id: "shard-000000" } records { sequence_number: "40" shard_id: "shard-000000" } records { sequence_number: "41" shard_id: "shard-000000" } records { sequence_number: "42" shard_id: "shard-000000" } records { sequence_number: "43" shard_id: "shard-000000" } records { sequence_number: "44" shard_id: "shard-000000" } records { sequence_number: "45" shard_id: "shard-000000" } records { sequence_number: "46" shard_id: "shard-000000" } records { sequence_number: "47" shard_id: "shard-000000" } records { sequence_number: "48" shard_id: "shard-000000" } records { sequence_number: "49" shard_id: "shard-000000" } records { sequence_number: "50" shard_id: "shard-000000" } records { sequence_number: "51" shard_id: "shard-000000" } records { sequence_number: "52" shard_id: "shard-000000" } records { sequence_number: "53" shard_id: "shard-000000" } records { sequence_number: "54" shard_id: "shard-000000" } records { sequence_number: "55" shard_id: "shard-000000" } records { sequence_number: "56" shard_id: "shard-000000" } records { sequence_number: "57" shard_id: "shard-000000" } records { sequence_number: "58" shard_id: "shard-000000" } records { sequence_number: "59" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "60" shard_id: "shard-000000" } records { sequence_number: "61" shard_id: "shard-000000" } records { sequence_number: "62" shard_id: "shard-000000" } records { sequence_number: "63" shard_id: "shard-000000" } records { sequence_number: "64" shard_id: "shard-000000" } records { sequence_number: " ... rd_id: "shard-000000" } records { sequence_number: "69" shard_id: "shard-000000" } records { sequence_number: "70" shard_id: "shard-000000" } records { sequence_number: "71" shard_id: "shard-000000" } records { sequence_number: "72" shard_id: "shard-000000" } records { sequence_number: "73" shard_id: "shard-000000" } records { sequence_number: "74" shard_id: "shard-000000" } records { sequence_number: "75" shard_id: "shard-000000" } records { sequence_number: "76" shard_id: "shard-000000" } records { sequence_number: "77" shard_id: "shard-000000" } records { sequence_number: "78" shard_id: "shard-000000" } records { sequence_number: "79" shard_id: "shard-000000" } records { sequence_number: "80" shard_id: "shard-000000" } records { sequence_number: "81" shard_id: "shard-000000" } records { sequence_number: "82" shard_id: "shard-000000" } records { sequence_number: "83" shard_id: "shard-000000" } records { sequence_number: "84" shard_id: "shard-000000" } records { sequence_number: "85" shard_id: "shard-000000" } records { sequence_number: "86" shard_id: "shard-000000" } records { sequence_number: "87" shard_id: "shard-000000" } records { sequence_number: "88" shard_id: "shard-000000" } records { sequence_number: "89" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "90" shard_id: "shard-000000" } records { sequence_number: "91" shard_id: "shard-000000" } records { sequence_number: "92" shard_id: "shard-000000" } records { sequence_number: "93" shard_id: "shard-000000" } records { sequence_number: "94" shard_id: "shard-000000" } records { sequence_number: "95" shard_id: "shard-000000" } records { sequence_number: "96" shard_id: "shard-000000" } records { sequence_number: "97" shard_id: "shard-000000" } records { sequence_number: "98" shard_id: "shard-000000" } records { sequence_number: "99" shard_id: "shard-000000" } records { sequence_number: "100" shard_id: "shard-000000" } records { sequence_number: "101" shard_id: "shard-000000" } records { sequence_number: "102" shard_id: "shard-000000" } records { sequence_number: "103" shard_id: "shard-000000" } records { sequence_number: "104" shard_id: "shard-000000" } records { sequence_number: "105" shard_id: "shard-000000" } records { sequence_number: "106" shard_id: "shard-000000" } records { sequence_number: "107" shard_id: "shard-000000" } records { sequence_number: "108" shard_id: "shard-000000" } records { sequence_number: "109" shard_id: "shard-000000" } records { sequence_number: "110" shard_id: "shard-000000" } records { sequence_number: "111" shard_id: "shard-000000" } records { sequence_number: "112" shard_id: "shard-000000" } records { sequence_number: "113" shard_id: "shard-000000" } records { sequence_number: "114" shard_id: "shard-000000" } records { sequence_number: "115" shard_id: "shard-000000" } records { sequence_number: "116" shard_id: "shard-000000" } records { sequence_number: "117" shard_id: "shard-000000" } records { sequence_number: "118" shard_id: "shard-000000" } records { sequence_number: "119" shard_id: "shard-000000" } 2025-04-04T17:23:04.777081Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489509767435790876:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:04.777164Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; encryption_type: NONE records { sequence_number: "120" shard_id: "shard-000000" } records { sequence_number: "121" shard_id: "shard-000000" } records { sequence_number: "122" shard_id: "shard-000000" } records { sequence_number: "123" shard_id: "shard-000000" } records { sequence_number: "124" shard_id: "shard-000000" } records { sequence_number: "125" shard_id: "shard-000000" } records { sequence_number: "126" shard_id: "shard-000000" } records { sequence_number: "127" shard_id: "shard-000000" } records { sequence_number: "128" shard_id: "shard-000000" } records { sequence_number: "129" shard_id: "shard-000000" } records { sequence_number: "130" shard_id: "shard-000000" } records { sequence_number: "131" shard_id: "shard-000000" } records { sequence_number: "132" shard_id: "shard-000000" } records { sequence_number: "133" shard_id: "shard-000000" } records { sequence_number: "134" shard_id: "shard-000000" } records { sequence_number: "135" shard_id: "shard-000000" } records { sequence_number: "136" shard_id: "shard-000000" } records { sequence_number: "137" shard_id: "shard-000000" } records { sequence_number: "138" shard_id: "shard-000000" } records { sequence_number: "139" shard_id: "shard-000000" } records { sequence_number: "140" shard_id: "shard-000000" } records { sequence_number: "141" shard_id: "shard-000000" } records { sequence_number: "142" shard_id: "shard-000000" } records { sequence_number: "143" shard_id: "shard-000000" } records { sequence_number: "144" shard_id: "shard-000000" } records { sequence_number: "145" shard_id: "shard-000000" } records { sequence_number: "146" shard_id: "shard-000000" } records { sequence_number: "147" shard_id: "shard-000000" } records { sequence_number: "148" shard_id: "shard-000000" } records { sequence_number: "149" shard_id: "shard-000000" } Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1743787380901-2","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1743787380,"finish":1743787380},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743787380}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1743787380967-3","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743787380,"finish":1743787381},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743787381}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1743787381013-4","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743787381,"finish":1743787382},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743787382}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1743787382045-5","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743787382,"finish":1743787383},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743787383}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1743787383060-6","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743787383,"finish":1743787384},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743787384}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1743787384077-7","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743787384,"finish":1743787385},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743787385}' 2025-04-04T17:23:08.354491Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489509803272982900:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:08.354603Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001832/r3tmp/tmp7JzMaR/pdisk_1.dat 2025-04-04T17:23:08.476413Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:08.516450Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:08.516550Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:08.518827Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3095, node 7 2025-04-04T17:23:08.563004Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:08.563028Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:08.563035Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:08.563183Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28362 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:08.834773Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:08.887150Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:28362 2025-04-04T17:23:09.065333Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... |94.4%| [TA] $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] Test command err: RandomSeed# 15031159635757850177 SEND TEvPut with key [1:1:1:0:0:100:0] 2025-04-04T17:23:11.696014Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-04T17:23:11.696509Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 Node 4: Node 5: Node 6: 2 Node 7: 3 2025-04-04T17:23:11.768183Z 1 00h01m00.011024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 7 Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 2 Node 4: Node 5: 1 Node 6: Node 7: 3 Start compaction 1 Finish compaction 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob [GOOD] Test command err: RandomSeed# 15408916727657308936 SEND TEvPut with key [1:1:1:0:0:3201024:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:3201024:0] 2025-04-04T17:23:11.611503Z 3 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:185:16] ServerId# [1:288:57] TabletId# 72057594037932033 PipeClientId# [3:185:16] 2025-04-04T17:23:11.611687Z 8 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:215:16] ServerId# [1:293:62] TabletId# 72057594037932033 PipeClientId# [8:215:16] 2025-04-04T17:23:11.611790Z 6 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:203:16] ServerId# [1:291:60] TabletId# 72057594037932033 PipeClientId# [6:203:16] 2025-04-04T17:23:11.611877Z 5 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:197:16] ServerId# [1:290:59] TabletId# 72057594037932033 PipeClientId# [5:197:16] 2025-04-04T17:23:11.611981Z 4 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:191:16] ServerId# [1:289:58] TabletId# 72057594037932033 PipeClientId# [4:191:16] 2025-04-04T17:23:11.612074Z 2 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:179:16] ServerId# [1:287:56] TabletId# 72057594037932033 PipeClientId# [2:179:16] 2025-04-04T17:23:11.612170Z 7 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:209:16] ServerId# [1:292:61] TabletId# 72057594037932033 PipeClientId# [7:209:16] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction >> TSequence::CreateSequencesWithIndexedTable [GOOD] >> TSequence::CreateTableWithDefaultFromSequence >> IndexBuildTest::RejectsCancel [GOOD] >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob [GOOD] >> VDiskBalancing::TestStopOneNode_Mirror3dc [GOOD] >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] Test command err: RandomSeed# 14374938440314549656 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:100:0] 2025-04-04T17:23:11.850123Z 3 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:185:16] ServerId# [1:288:57] TabletId# 72057594037932033 PipeClientId# [3:185:16] 2025-04-04T17:23:11.850398Z 8 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:215:16] ServerId# [1:293:62] TabletId# 72057594037932033 PipeClientId# [8:215:16] 2025-04-04T17:23:11.850558Z 6 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:203:16] ServerId# [1:291:60] TabletId# 72057594037932033 PipeClientId# [6:203:16] 2025-04-04T17:23:11.850664Z 5 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:197:16] ServerId# [1:290:59] TabletId# 72057594037932033 PipeClientId# [5:197:16] 2025-04-04T17:23:11.850821Z 4 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:191:16] ServerId# [1:289:58] TabletId# 72057594037932033 PipeClientId# [4:191:16] 2025-04-04T17:23:11.850930Z 2 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:179:16] ServerId# [1:287:56] TabletId# 72057594037932033 PipeClientId# [2:179:16] 2025-04-04T17:23:11.851089Z 7 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:209:16] ServerId# [1:292:61] TabletId# 72057594037932033 PipeClientId# [7:209:16] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob [GOOD] Test command err: RandomSeed# 3509959991044646506 SEND TEvPut with key [1:1:1:0:0:3201024:0] 2025-04-04T17:23:12.369587Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-04T17:23:12.370083Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 Node 4: Node 5: Node 6: 2 Node 7: 3 2025-04-04T17:23:12.450510Z 1 00h01m00.011024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 7 Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 2 Node 4: Node 5: 1 Node 6: Node 7: 3 Start compaction 1 Finish compaction 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc [GOOD] Test command err: RandomSeed# 16237110653409085634 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:100:0] 2025-04-04T17:23:12.026934Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] >> TOlap::Decimal [GOOD] >> DataStreams::ListStreamsValidation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsCancel [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:22:47.303097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:22:47.303173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:22:47.303220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:22:47.303245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:22:47.303277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:22:47.303295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:22:47.303336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:22:47.303386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:22:47.303612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:22:47.367344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:22:47.367401Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:47.379191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:22:47.379425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:22:47.379570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:22:47.390488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:22:47.390784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:22:47.391403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:47.392202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:22:47.397114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:47.398452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:22:47.398521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:47.398722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:22:47.398773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:22:47.398812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:22:47.398964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.404913Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:22:47.524749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:22:47.524973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.525172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:22:47.525389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:22:47.525437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.528113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:47.528289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:22:47.528559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.528618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:22:47.528658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:22:47.528720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:22:47.530933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.531027Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:22:47.531067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:22:47.533011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.533067Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.533138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:47.533205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:22:47.537003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:22:47.539098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:22:47.539360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:22:47.540457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:47.540621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:22:47.540685Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:47.540981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:22:47.541039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:47.541245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:22:47.541333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:22:47.543445Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:22:47.543494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:22:47.543669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:47.543710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:22:47.544133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.544190Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:22:47.544296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:22:47.544354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:47.544402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:22:47.544446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:47.544486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:22:47.544531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:47.544582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:22:47.544616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:22:47.544685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:22:47.544722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:22:47.544753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:22:47.546693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:22:47.546802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:22:47.546857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... uildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1170:3024], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 1818, read rows: 101, read bytes: 1818 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T17:23:13.437137Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-04T17:23:13.438782Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-04-04T17:23:13.438862Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1170:3024], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 1818, read rows: 101, read bytes: 1818 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T17:23:13.438906Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-04-04T17:23:13.439063Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:23:13.439106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1264:3107] TestWaitNotification: OK eventTxId 102 2025-04-04T17:23:13.441447Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: DoExecute TxId: 105 DatabaseName: "/MyRoot" IndexBuildId: 102 2025-04-04T17:23:13.441587Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: Reply TxId: 105 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <102> has been finished already" severity: 1 } BUILDINDEX RESPONSE CANCEL: NKikimrIndexBuilder.TEvCancelResponse TxId: 105 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <102> has been finished already" severity: 1 } 2025-04-04T17:23:13.443901Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-04-04T17:23:13.444122Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_DONE Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_DONE Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } 2025-04-04T17:23:13.446431Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:23:13.446679Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 275us result status StatusSuccess 2025-04-04T17:23:13.447152Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "index1" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:13.449279Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:23:13.449532Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 273us result status StatusSuccess 2025-04-04T17:23:13.450122Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/index1" PathDescription { Self { Name: "index1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "index1" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "index" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] Test command err: RandomSeed# 6897394790825403967 SEND TEvPut with key [1:1:1:0:0:533504:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:533504:0] 2025-04-04T17:23:11.954922Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] Test command err: 2025-04-04T17:19:16.440657Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:16.523874Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:16.548543Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:16.548791Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:16.557066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:16.557300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:16.557534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:16.557656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:16.557821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:16.557953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:16.558051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:16.558165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:16.558303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:16.558427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:16.558563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:16.558682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:16.588307Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:16.588752Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:16.588828Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:16.589016Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:16.589187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:16.589265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:16.589308Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:16.589394Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:16.589454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:16.589511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:16.589569Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:16.589759Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:16.589828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:16.589872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:16.589902Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:16.590004Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:16.590082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:16.590152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:16.590189Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:16.590287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:16.590327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:16.590357Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:16.590408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:16.590449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:16.590481Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:16.590912Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-04-04T17:19:16.591036Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=65; 2025-04-04T17:19:16.591119Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=43; 2025-04-04T17:19:16.591191Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-04-04T17:19:16.591365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:16.591437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:16.591473Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:16.591684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:16.591750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:16.591788Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:16.591965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:16.592012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:16.592051Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:16.592264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:16.592314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:16.592344Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:16.592483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:16.592525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:16.592590Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:8568];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:74;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:75;blob_range:[NO_BLOB:0:8552];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:8488];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:54;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:55;blob_range:[NO_BLOB:0:8472];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:1;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;;;switched=(portion_id:55;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2117976;index_size:24;meta:((produced=INSERTED;)););(portion_id:54;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2549080;index_size:24;meta:((produced=SPLIT_COMPACTED;)););; 2025-04-04T17:23:12.773639Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11161:12788];fline=general_compaction.cpp:131;event=blobs_created;appended=1;switched=2; 2025-04-04T17:23:12.775551Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11161:12788];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TOlap::CreateStore [GOOD] >> TOlap::CreateDropTable >> TOlap::CreateTableWithNullableKeys [GOOD] >> TOlap::AlterStore [GOOD] >> TOlap::AlterTtl >> TOlap::CreateStoreWithDirs [GOOD] >> TOlap::CreateTable >> TOlap::CustomDefaultPresets [GOOD] >> TSequence::AlterSequence [GOOD] >> TSequence::AlterTableSetDefaultFromSequence >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex >> DataStreams::TestListShards1Shard [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] Test command err: RandomSeed# 14742908571957539644 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:100:0] 2025-04-04T17:23:11.860079Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:6324:829] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Start compaction Finish compaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:13.919948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:13.920095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:13.920144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:13.920182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:13.921111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:13.921164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:13.921243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:13.921341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:13.922502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:14.004928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:14.004980Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:14.016100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:14.016333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:14.016474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:14.026268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:14.026550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:14.027286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.028159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:14.033766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.035076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:14.035136Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.035356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:14.035405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:14.035455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:14.035564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.041843Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:14.165159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:14.165396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.165617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:14.165836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:14.165889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.168099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.168244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:14.168427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.168487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:14.168523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:14.168572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:14.170522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.170581Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:14.170622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:14.172366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.172417Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.172498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.172555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.176332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:14.178118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:14.178334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:14.179257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.179393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:14.179439Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.179704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:14.179781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.179974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:14.180049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:14.182053Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:14.182126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:14.182313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.182370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:14.182746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.182792Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:14.182912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:14.182948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.182996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:14.183064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.183103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:14.183144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.183180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:14.183213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:14.183293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:14.183333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:14.183372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:14.185138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:14.185260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:14.185300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tionPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.379525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:14.379587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TPropose operationId# 101:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000002 2025-04-04T17:23:14.379755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 129 2025-04-04T17:23:14.379911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:14.379980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:23:14.381352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tx_controller.cpp:211;event=finished_tx;tx_id=101; FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2025-04-04T17:23:14.382896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:14.382939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:14.383133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:23:14.383294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.383344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-04T17:23:14.383388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-04-04T17:23:14.383770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.383820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:23:14.383946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-04-04T17:23:14.384641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:23:14.384739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:23:14.384795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:23:14.384851Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:23:14.384892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:14.386229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:23:14.386339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:23:14.386369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:23:14.386398Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:23:14.386447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:23:14.386548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-04-04T17:23:14.388052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-04-04T17:23:14.388668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:23:14.389991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:23:14.412503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 101 MinStep: 0 Step: 5000002 2025-04-04T17:23:14.412563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-04T17:23:14.412692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 101 MinStep: 0 Step: 5000002 2025-04-04T17:23:14.412787Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvColumnShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 101 MinStep: 0 Step: 5000002 2025-04-04T17:23:14.413118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2025-04-04T17:23:14.413178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-04-04T17:23:14.413276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T17:23:14.415944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.416332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.416451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.416493Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:23:14.416594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:23:14.416627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:23:14.416663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:23:14.416701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:23:14.416757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-04-04T17:23:14.416822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:370:2349] message: TxId: 101 2025-04-04T17:23:14.416868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:23:14.416907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:23:14.416942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:23:14.417079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:23:14.418755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:23:14.418806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:371:2350] TestWaitNotification: OK eventTxId 101 2025-04-04T17:23:14.419385Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:23:14.419591Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 258us result status StatusSuccess 2025-04-04T17:23:14.420059Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnStoreVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Decimal(35,9)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TOlap::CreateDropStandaloneTable [GOOD] >> TOlap::CreateDropStandaloneTableDefaultSharding ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTableWithNullableKeys [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:13.919917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:13.920097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:13.920138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:13.920168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:13.921152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:13.921200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:13.921274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:13.921355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:13.922472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:14.003200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:14.003254Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:14.013911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:14.014127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:14.014318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:14.022401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:14.022698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:14.023408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.024256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:14.029173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.030550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:14.030632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.030831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:14.030873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:14.030919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:14.031004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.036648Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:14.144073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:14.145947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.148215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:14.150610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:14.150710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.153762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.153966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:14.154273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.154354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:14.154454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:14.154501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:14.156640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.156698Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:14.156729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:14.158721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.158759Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.158828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.158882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.163619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:14.165278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:14.165410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:14.166369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.166489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:14.166523Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.168112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:14.168210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.168451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:14.168534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:14.170712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:14.170755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:14.170932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.170975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:14.171309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.171351Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:14.171463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:14.171524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.171575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:14.171610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.171653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:14.171689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.171723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:14.171758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:14.171832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:14.171871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:14.171907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:14.173606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:14.173714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:14.173756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TProposedWaitParts operationId# 104:0 ProgressState at schemeshard: 72057594046678944 2025-04-04T17:23:14.640534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TDropColumnTable TProposedWaitParts operationId# 104:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409547 2025-04-04T17:23:14.640949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:23:14.641040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:23:14.641080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:23:14.641117Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-04T17:23:14.641158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-04T17:23:14.641419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:23:14.641461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:23:14.641481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:23:14.641503Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-04T17:23:14.641519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T17:23:14.642562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:23:14.642662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:23:14.642695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:23:14.642720Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-04-04T17:23:14.642744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:23:14.642802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-04-04T17:23:14.643685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 275382275 2025-04-04T17:23:14.645032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T17:23:14.645404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T17:23:14.646032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T17:23:14.657741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409547 TxId: 104 MinStep: 0 Step: 5000005 2025-04-04T17:23:14.657795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409547, partId: 0 2025-04-04T17:23:14.657895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409547 TxId: 104 MinStep: 0 Step: 5000005 2025-04-04T17:23:14.657944Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvColumnShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409547 TxId: 104 MinStep: 0 Step: 5000005 2025-04-04T17:23:14.658205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 104 2025-04-04T17:23:14.658235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409547, partId: 0 2025-04-04T17:23:14.658356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 104 2025-04-04T17:23:14.658398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 130 FAKE_COORDINATOR: Erasing txId 104 2025-04-04T17:23:14.660655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.660970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.661051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.661086Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropColumnTable TProposedDeleteParts operationId# 104:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:14.661142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T17:23:14.661216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:23:14.661247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:23:14.661286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:23:14.661314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:23:14.661346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-04-04T17:23:14.661410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:370:2349] message: TxId: 104 2025-04-04T17:23:14.661444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:23:14.661478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-04T17:23:14.661509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-04T17:23:14.661592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-04T17:23:14.663118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:23:14.663214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-04T17:23:14.663256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:608:2574] 2025-04-04T17:23:14.663676Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-04-04T17:23:14.664126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:467:2442];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:23:14.666845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T17:23:14.667450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409547 2025-04-04T17:23:14.667882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:23:14.667929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-04T17:23:14.667986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:23:14.669730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:23:14.669786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-04T17:23:14.669902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-04-04T17:23:14.670375Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyDir/MyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:23:14.670549Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyDir/MyTable" took 204us result status StatusPathDoesNotExist 2025-04-04T17:23:14.670668Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyDir/MyTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/MyDir\' (id: [OwnerId: 72057594046678944, LocalPathId: 3])" Path: "/MyRoot/MyDir/MyTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/MyDir" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:23:14.671071Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2025-04-04T17:23:14.671136Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 4 took 78us result status StatusPathDoesNotExist 2025-04-04T17:23:14.671209Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty" Path: "" PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CustomDefaultPresets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:13.919976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:13.920111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:13.920160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:13.920203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:13.921135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:13.921191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:13.921266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:13.921369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:13.922500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:13.987596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:13.987652Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:13.997787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:13.997987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:13.998221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:14.007025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:14.007324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:14.010163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.012081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:14.021351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.030142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:14.030211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.030432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:14.030475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:14.030584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:14.030727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.036420Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:14.156856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:14.157113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.157349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:14.157561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:14.157631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.159841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.160030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:14.160228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.160294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:14.160331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:14.160379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:14.162531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.162602Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:14.162662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:14.164728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.164794Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.164857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.164918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.169033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:14.171010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:14.171248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:14.172386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.172533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:14.172589Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.172882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:14.172965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.173185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:14.173268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:14.175738Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:14.175795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:14.176014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.176075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:14.176490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.176551Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:14.176675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:14.176714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.176759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:14.176801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.176845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:14.176883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.176921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:14.176967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:14.177033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:14.177075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:14.177109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:14.179107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:14.179248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:14.179293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tate->FrontStep: 5000003 2025-04-04T17:23:14.530717Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:14.530788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:23:14.531090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:23:14.531294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.531357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-04T17:23:14.531422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-04T17:23:14.531875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.531953Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:23:14.532057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-04-04T17:23:14.533152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:23:14.533278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:23:14.533328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:23:14.533380Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-04T17:23:14.533451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:23:14.535295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:23:14.535411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:23:14.535452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:23:14.535491Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-04-04T17:23:14.535529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:23:14.535624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-04T17:23:14.537530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-04-04T17:23:14.537633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:3 msg type: 268697639 2025-04-04T17:23:14.537725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72057594037968897 2025-04-04T17:23:14.538270Z node 1 :HIVE INFO: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 102 TxPartId: 0 2025-04-04T17:23:14.538491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Update tablets object reply, message: Status: OK TxId: 102 TxPartId: 0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.538645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 102 TxPartId: 0 2025-04-04T17:23:14.540410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:23:14.540953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:23:14.542747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.555396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 102 MinStep: 0 Step: 5000003 2025-04-04T17:23:14.555460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-04T17:23:14.555604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 102 MinStep: 0 Step: 5000003 2025-04-04T17:23:14.555699Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvColumnShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 102 MinStep: 0 Step: 5000003 2025-04-04T17:23:14.556100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 2025-04-04T17:23:14.556184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-04T17:23:14.556307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:23:14.559675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.560284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.560532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.560622Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:23:14.560789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:23:14.560840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:23:14.560899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:23:14.560948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:23:14.560999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T17:23:14.561083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:370:2349] message: TxId: 102 2025-04-04T17:23:14.561153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:23:14.561226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:23:14.561268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:23:14.561414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:23:14.563731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:23:14.563825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:426:2404] TestWaitNotification: OK eventTxId 102 2025-04-04T17:23:14.564521Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:23:14.564808Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 309us result status StatusSuccess 2025-04-04T17:23:14.565580Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TOlap::CreateTableTtl ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::ListStreamsValidation [GOOD] Test command err: 2025-04-04T17:22:55.108642Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509748681477740:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:55.108709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00180a/r3tmp/tmpzOUxVJ/pdisk_1.dat 2025-04-04T17:22:55.618578Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:55.636844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:55.636929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:55.663780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12012, node 1 2025-04-04T17:22:55.850627Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:55.850647Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:55.850657Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:55.850776Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20681 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:56.378820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:56.504494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:20681 2025-04-04T17:22:56.713092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:58.574610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:58.776154Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037890:1][1:7489509761566381262:2372] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:6:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-04-04T17:22:58.950389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:22:59.133612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:22:59.160899Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2025-04-04T17:22:59.160922Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2025-04-04T17:22:59.160931Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-04-04T17:22:59.160940Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2025-04-04T17:22:59.160948Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-04-04T17:22:59.160956Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2025-04-04T17:22:59.160982Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2025-04-04T17:22:59.160995Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2025-04-04T17:22:59.161002Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-04-04T17:22:59.161010Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-04-04T17:22:59.161017Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-04-04T17:22:59.161027Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2025-04-04T17:22:59.161038Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-04-04T17:22:59.161045Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2025-04-04T17:22:59.161052Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-04-04T17:22:59.161087Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-04-04T17:22:59.175830Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,19) wasn't found 2025-04-04T17:22:59.175881Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,7) wasn't found 2025-04-04T17:22:59.175918Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,10) wasn't found 2025-04-04T17:22:59.175940Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,16) wasn't found 2025-04-04T17:22:59.175959Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,21) wasn't found 2025-04-04T17:22:59.176005Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,12) wasn't found 2025-04-04T17:22:59.176024Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2025-04-04T17:22:59.176049Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,9) wasn't found 2025-04-04T17:22:59.176116Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,20) wasn't found 2025-04-04T17:22:59.176133Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,17) wasn't found 2025-04-04T17:22:59.176150Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,14) wasn't found 2025-04-04T17:22:59.176169Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,11) wasn't found 2025-04-04T17:22:59.176197Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,8) wasn't found 2025-04-04T17:23:00.659038Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489509769639954770:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:00.659122Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00180a/r3tmp/tmpAkUMQZ/pdisk_1.dat 2025-04-04T17:23:00.779958Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:00.807116Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:00.807199Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:00.809853Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23402, node 4 2025-04-04T17:23:00.873369Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:00.873389Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:00.873396Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:00.873529Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15935 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:01.097727Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:01.148821Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:15935 2025-04-04T17:23:01.308522Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:01.501368Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:23:01.568996Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:23:01.654618Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:23:05.090758Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489509791911605075:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:05.094497Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00180a/r3tmp/tmpWyiEei/pdisk_1.dat 2025-04-04T17:23:05.265493Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:05.321246Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:05.321411Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:05.324666Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30330, node 7 2025-04-04T17:23:05.383260Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:05.383303Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:05.383314Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:05.383494Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:05.677150Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:05.747001Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:4898 2025-04-04T17:23:05.957289Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:06.234075Z node 7 :TX_PROXY ERROR: Actor# [7:7489509796206574536:3497] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/stream_TestCreateExistingStream\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:23:09.968552Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489509810401624132:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:09.968795Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00180a/r3tmp/tmpWvkY5d/pdisk_1.dat 2025-04-04T17:23:10.153448Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:10.198715Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:10.198819Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:10.202569Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14328, node 10 2025-04-04T17:23:10.260604Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:10.260630Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:10.260639Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:10.260761Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:10.579366Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:10.643054Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:31394 2025-04-04T17:23:10.870367Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... >> TOlap::CreateDropTable [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestListShards1Shard [GOOD] Test command err: 2025-04-04T17:22:55.237444Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509748064140332:2153];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:55.241129Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001833/r3tmp/tmp2phgsY/pdisk_1.dat 2025-04-04T17:22:55.765659Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:55.791717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:55.791835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:55.799773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62037, node 1 2025-04-04T17:22:55.962437Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:55.962462Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:55.962473Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:55.962592Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:56.305791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:56.480795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:5919 2025-04-04T17:22:56.691171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:57.044809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480 encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "16" shard_id: "shard-000000" } records { sequence_number: "17" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000000" } records { sequence_number: "20" shard_id: "shard-000000" } records { sequence_number: "21" shard_id: "shard-000000" } records { sequence_number: "22" shard_id: "shard-000000" } records { sequence_number: "23" shard_id: "shard-000000" } records { sequence_number: "24" shard_id: "shard-000000" } records { sequence_number: "25" shard_id: "shard-000000" } records { sequence_number: "26" shard_id: "shard-000000" } records { sequence_number: "27" shard_id: "shard-000000" } records { sequence_number: "28" shard_id: "shard-000000" } records { sequence_number: "29" shard_id: "shard-000000" } 2025-04-04T17:22:57.136536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:22:57.244393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:22:57.264342Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-04T17:22:57.264389Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-04T17:22:57.264442Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-04T17:22:57.275086Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-04-04T17:22:57.275171Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-04-04T17:22:57.275221Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1743787376910-1","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":1,"unit":"second","start":1743787376,"finish":1743787377},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743787377}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037888-1743787376910-2","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743787376,"finish":1743787377},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743787377}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037890-1743787377190-3","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1743787377,"finish":1743787377},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037890","source_wt":1743787377}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037890-1743787377190-4","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1743787377,"finish":1743787377},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037890","source_wt":1743787377}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1743787377186-5","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1743787377,"finish":1743787377},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743787377}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037888-1743787377186-6","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1743787377,"finish":1743787377},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743787377}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1743787376910-1","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":1,"unit":"second","start":1743787376,"finish":1743787377},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743787377}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037888-1743787376910-2","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743787376,"finish":1743787377},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743787377}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037890-1743787377190-3","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1743787377,"finish":1743787377},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037890","source_wt":1743787377}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037890-1743787377190-4","schema":"ydb.serverless.v1","tags":{"ydb_size" ... d: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:06.134544Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:06.200835Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:28123 2025-04-04T17:23:06.413394Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR E0000 00:00:1743787386.607212 723313 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743787386.607467 723313 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743787386.614347 723313 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743787386.614523 723313 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743787386.620083 723313 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743787386.620210 723313 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743787386.625830 723313 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743787386.625935 723313 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-04T17:23:06.641303Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:23:06.695336Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480 E0000 00:00:1743787386.749645 723313 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743787386.749752 723313 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-04T17:23:06.757725Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480 E0000 00:00:1743787386.806988 723313 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743787386.807112 723313 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-04T17:23:06.814415Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715664:0, at schemeshard: 72057594046644480 E0000 00:00:1743787386.867401 723313 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743787386.867503 723313 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743787386.877959 723313 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743787386.878066 723313 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-04T17:23:06.911919Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:23:06.928280Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2025-04-04T17:23:06.929053Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2025-04-04T17:23:06.929105Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2025-04-04T17:23:06.929137Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037893 not found 2025-04-04T17:23:06.929155Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037892 not found 2025-04-04T17:23:06.929169Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2025-04-04T17:23:06.938383Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found 2025-04-04T17:23:06.938483Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-04-04T17:23:06.938556Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2025-04-04T17:23:06.938610Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2025-04-04T17:23:06.938652Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-04-04T17:23:06.938689Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found E0000 00:00:1743787386.945954 723313 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743787386.946055 723313 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-04-04T17:23:10.058149Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489509814472998195:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:10.058325Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001833/r3tmp/tmp7dSMaG/pdisk_1.dat 2025-04-04T17:23:10.239953Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:10.281545Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:10.281651Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:10.284906Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7658, node 10 2025-04-04T17:23:10.340797Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:10.340827Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:10.340838Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:10.341022Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:10.681787Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:10.751335Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:20020 2025-04-04T17:23:11.013624Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... E0000 00:00:1743787391.184909 725058 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743787391.196460 725058 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743787391.205014 725058 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743787391.214170 725058 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743787391.221239 725058 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink >> TOlap::CreateTable [GOOD] >> TOlap::AlterTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateDropTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:13.919952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:13.920077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:13.920120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:13.920154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:13.921125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:13.921171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:13.921240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:13.921355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:13.922485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:13.996412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:13.996468Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:14.005483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:14.005681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:14.005826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:14.015161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:14.015405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:14.015925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.016679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:14.022341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.030204Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:14.030300Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.030514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:14.030570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:14.030618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:14.030754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.037197Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:14.171220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:14.171448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.171916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:14.172164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:14.172222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.174691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.174853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:14.175040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.175123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:14.175181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:14.175218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:14.177071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.177126Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:14.177164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:14.178947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.179028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.179094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.179156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.182869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:14.184772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:14.184960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:14.185974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.186104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:14.186149Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.186470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:14.186567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.186743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:14.186827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:14.189051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:14.189121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:14.189288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.189325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:14.189668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.189731Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:14.189827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:14.189878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.189927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:14.189970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.190009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:14.190047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.190084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:14.190115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:14.190186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:14.190230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:14.190288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:14.192175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:14.192280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:14.192315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... INFO: TDropOlapStore TPropose operationId# 107:0 HandleReply TEvOperationPlan at schemeshard: 72057594046678944, stepId: 5000007 2025-04-04T17:23:15.546636Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 128 -> 129 2025-04-04T17:23:15.546761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:15.546816Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: Erasing txId 107 2025-04-04T17:23:15.548294Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:15.548327Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:15.548445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:23:15.548562Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:15.548589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 107, path id: 1 2025-04-04T17:23:15.548618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 107, path id: 2 2025-04-04T17:23:15.548658Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:23:15.548706Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedWaitParts operationId# 107:0 ProgressState at schemeshard: 72057594046678944 2025-04-04T17:23:15.548773Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TDropOlapStore TProposedWaitParts operationId# 107:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-04-04T17:23:15.549267Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-04-04T17:23:15.549323Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-04-04T17:23:15.549353Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-04T17:23:15.549386Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:23:15.549410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:23:15.549768Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 107 2025-04-04T17:23:15.549832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 107 2025-04-04T17:23:15.549853Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-04-04T17:23:15.549872Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-04-04T17:23:15.549893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:15.549938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-04-04T17:23:15.551644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 107:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-04-04T17:23:15.551723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 107, partId: 0, tablet: 72075186233409546 2025-04-04T17:23:15.552319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 107 2025-04-04T17:23:15.552359Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409546, partId: 0 2025-04-04T17:23:15.552440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 107 2025-04-04T17:23:15.552485Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 130 2025-04-04T17:23:15.553333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-04T17:23:15.553516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-04-04T17:23:15.554148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:23:15.554276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-04-04T17:23:15.554316Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedDeleteParts operationId# 107:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:15.554373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:23:15.554473Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-04T17:23:15.554505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-04T17:23:15.554538Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-04-04T17:23:15.554588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-04T17:23:15.554624Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-04-04T17:23:15.554653Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-04-04T17:23:15.554691Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-04-04T17:23:15.554730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-04-04T17:23:15.554814Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:23:15.556246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:23:15.556559Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-04T17:23:15.556748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:15.557256Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409546 2025-04-04T17:23:15.557819Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186233409546;self_id=[2:307:2294];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:23:15.560227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:23:15.560274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:23:15.560330Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:15.562340Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:23:15.562409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-04T17:23:15.563273Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2025-04-04T17:23:15.563743Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-04-04T17:23:15.563782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-04-04T17:23:15.564202Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-04-04T17:23:15.564275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-04-04T17:23:15.564304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:639:2616] TestWaitNotification: OK eventTxId 107 2025-04-04T17:23:15.564802Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:23:15.564934Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 167us result status StatusPathDoesNotExist 2025-04-04T17:23:15.565045Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/OlapStore" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:23:15.565512Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2025-04-04T17:23:15.565577Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 69us result status StatusPathDoesNotExist 2025-04-04T17:23:15.565633Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty" Path: "" PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataStreams::TestPutRecords [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:08.874116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:08.874265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:08.874317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:08.874351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:08.875205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:08.875239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:08.875320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:08.875402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:08.876416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:08.946135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:08.946202Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:08.955677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:08.955870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:08.956034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:08.965991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:08.966307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:08.968674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:08.970397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:08.980624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:08.988527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:08.988599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:08.988774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:08.988822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:08.988927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:08.989028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:08.994209Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:09.122139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:09.124154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:09.125821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:09.130328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:09.130412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:09.133166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:09.133317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:09.133503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:09.133560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:09.133679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:09.133734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:09.135824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:09.135883Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:09.135922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:09.137830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:09.137908Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:09.137947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:09.138001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:09.142466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:09.144631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:09.144898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:09.145941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:09.146074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:09.146135Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:09.147367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:09.147431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:09.147665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:09.147765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:09.150159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:09.150208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:09.150396Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:09.150435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:09.150777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:09.150827Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:09.150933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:09.150981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:09.151032Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:09.151090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:09.151133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:09.151170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:09.151202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:09.151231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:09.151290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:09.151324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:09.151383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:09.153298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:09.153404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:09.153439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... peration in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:23:15.662571Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-04T17:23:15.662589Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-04T17:23:15.662627Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/4, is published: true 2025-04-04T17:23:15.662645Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:23:15.663246Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:23:15.663274Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:2 2025-04-04T17:23:15.663343Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:339:2318] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2025-04-04T17:23:15.663673Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:23:15.663701Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:23:15.663730Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-04-04T17:23:15.663763Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:15.663993Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T17:23:15.664131Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:23:15.664163Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-04-04T17:23:15.664200Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-04-04T17:23:15.664243Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-04-04T17:23:15.664284Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-04-04T17:23:15.664325Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2025-04-04T17:23:15.664674Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:23:15.664698Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:0 2025-04-04T17:23:15.664742Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:341:2319] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2025-04-04T17:23:15.665303Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:23:15.665331Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:23:15.665420Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:23:15.665442Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:23:15.665474Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:23:15.665505Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:15.665665Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:23:15.665735Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:23:15.665753Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-04-04T17:23:15.665776Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-04T17:23:15.665812Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-04-04T17:23:15.665830Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-04T17:23:15.665850Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2025-04-04T17:23:15.665896Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:414:2372] message: TxId: 102 2025-04-04T17:23:15.665945Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-04-04T17:23:15.665988Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:23:15.666032Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:23:15.666114Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:23:15.666150Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-04-04T17:23:15.666167Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-04-04T17:23:15.666187Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:23:15.666203Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-04-04T17:23:15.666217Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-04-04T17:23:15.666263Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-04T17:23:15.666288Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2025-04-04T17:23:15.666305Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2025-04-04T17:23:15.666332Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-04T17:23:15.666710Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:23:15.666732Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:23:15.666972Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435082, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-04-04T17:23:15.667008Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-04-04T17:23:15.667077Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:23:15.667115Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-04T17:23:15.667177Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:23:15.667381Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:23:15.667402Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:23:15.667427Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:23:15.667475Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:23:15.667515Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:23:15.667537Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:23:15.668584Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:23:15.668613Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:23:15.669811Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:23:15.669875Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:23:15.669935Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:414:2372] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2025-04-04T17:23:15.670026Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:23:15.670061Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:520:2471] 2025-04-04T17:23:15.670162Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:23:15.670316Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:522:2473], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:23:15.670341Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:23:15.670357Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-04-04T17:23:15.670735Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:597:2548], Recipient [7:123:2149]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-04-04T17:23:15.670776Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-04-04T17:23:15.670853Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:23:15.671032Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 185us result status StatusPathDoesNotExist 2025-04-04T17:23:15.671177Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeTable, state: EPathStateNotExist), drop stepId: 5000003, drop txId: 102" Path: "/MyRoot/Table" PathId: 2 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:13.919931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:13.920073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:13.920119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:13.920158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:13.921120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:13.921176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:13.921251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:13.921362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:13.922462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:14.008868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:14.008931Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:14.020686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:14.020953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:14.021128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:14.033789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:14.034102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:14.034991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.035905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:14.041391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.042868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:14.042952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.043216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:14.043273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:14.043332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:14.043470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.050595Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:14.203510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:14.203780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.204027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:14.204291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:14.204360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.207225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.207403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:14.207629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.207701Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:14.207758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:14.207798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:14.210285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.210352Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:14.210402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:14.212496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.212569Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.212607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.212653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.222158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:14.223817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:14.223987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:14.224813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.224919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:14.224957Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.225202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:14.225272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.225429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:14.225513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:14.227693Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:14.227768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:14.227971Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.228017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:14.228382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.228468Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:14.228586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:14.228627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.228676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:14.228721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.228767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:14.228810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.228849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:14.228884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:14.228962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:14.229002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:14.229037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:14.231271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:14.231391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:14.231435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 25-04-04T17:23:15.615754Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 111 ready parts: 3/3 2025-04-04T17:23:15.615788Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#111:2 progress is 3/3 2025-04-04T17:23:15.615817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 111 ready parts: 3/3 2025-04-04T17:23:15.615848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 111, ready parts: 3/3, is published: true 2025-04-04T17:23:15.615907Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:340:2319] message: TxId: 111 2025-04-04T17:23:15.615950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 111 ready parts: 3/3 2025-04-04T17:23:15.615993Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 111:0 2025-04-04T17:23:15.616020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 111:0 2025-04-04T17:23:15.616082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-04-04T17:23:15.616107Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 111:1 2025-04-04T17:23:15.616121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 111:1 2025-04-04T17:23:15.616142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-04-04T17:23:15.616156Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 111:2 2025-04-04T17:23:15.616169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 111:2 2025-04-04T17:23:15.616216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 2025-04-04T17:23:15.617422Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 111: got EvNotifyTxCompletionResult 2025-04-04T17:23:15.617463Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 111: satisfy waiter [2:604:2581] TestWaitNotification: OK eventTxId 111 2025-04-04T17:23:15.618067Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/DirA/DirB/NestedTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:23:15.618280Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/DirA/DirB/NestedTable" took 230us result status StatusSuccess 2025-04-04T17:23:15.618585Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/DirA/DirB/NestedTable" PathDescription { Self { Name: "NestedTable" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 111 CreateStep: 5000006 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "NestedTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 112 2025-04-04T17:23:15.621000Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/OlapStore/MyDir" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TableWithTiers" Schema { Columns { Name: "timestamp" Type: "Timestamp" } Columns { Name: "data" Type: "Utf8" } KeyColumnNames: "timestamp" } ColumnShardCount: 1 } } TxId: 112 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:15.621260Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/OlapStore/MyDir/TableWithTiers, opId: 112:0, at schemeshard: 72057594046678944 2025-04-04T17:23:15.621523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 3], parent name: MyDir, child name: TableWithTiers, child id: [OwnerId: 72057594046678944, LocalPathId: 9], at schemeshard: 72057594046678944 2025-04-04T17:23:15.621571Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 0 2025-04-04T17:23:15.621690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 1 2025-04-04T17:23:15.621867Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 112:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:15.621907Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 112:0, at schemeshard: 72057594046678944 2025-04-04T17:23:15.621996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:23:15.622034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-04-04T17:23:15.623650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 112, response: Status: StatusAccepted TxId: 112 SchemeshardId: 72057594046678944 PathId: 9, at schemeshard: 72057594046678944 2025-04-04T17:23:15.623840Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 112, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE COLUMN TABLE, path: /MyRoot/OlapStore/MyDir/ 2025-04-04T17:23:15.624060Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:15.624111Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:23:15.624283Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 9] 2025-04-04T17:23:15.624378Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:15.624444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 112, path id: 3 2025-04-04T17:23:15.624480Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 112, path id: 9 2025-04-04T17:23:15.624762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-04-04T17:23:15.624816Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TConfigureParts operationId# 112:0 ProgressState at tabletId# 72057594046678944 2025-04-04T17:23:15.624974Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TConfigureParts operationId# 112:0 ProgressState Propose modify scheme on shard tabletId: 72075186233409546 2025-04-04T17:23:15.625696Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T17:23:15.625799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T17:23:15.625833Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-04T17:23:15.625889Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 8 2025-04-04T17:23:15.625938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:23:15.626756Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 1 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T17:23:15.626835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 1 PathOwnerId: 72057594046678944, cookie: 112 2025-04-04T17:23:15.626862Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-04-04T17:23:15.626892Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 1 2025-04-04T17:23:15.626926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-04-04T17:23:15.627003Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-04-04T17:23:15.629116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382272 2025-04-04T17:23:15.629257Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 0, tablet: 72075186233409546 2025-04-04T17:23:15.630945Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-04-04T17:23:15.631073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 TestModificationResult got TxId: 112, wait until txId: 112 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::AlterTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:13.919950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:13.920084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:13.920141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:13.920180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:13.921076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:13.921133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:13.921233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:13.921362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:13.922448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:14.006984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:14.007061Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:14.018796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:14.019088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:14.019256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:14.030532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:14.030832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:14.031546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.032422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:14.037990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.039456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:14.039524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.039762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:14.039821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:14.039880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:14.040015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.047347Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:14.179398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:14.179653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.179894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:14.180133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:14.180188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.182459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.182593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:14.182743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.182799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:14.182858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:14.182887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:14.184879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.184948Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:14.184993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:14.186901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.186971Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.187014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.187102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.190283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:14.192127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:14.192314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:14.193327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.193466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:14.193527Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.193800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:14.193888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.194078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:14.194176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:14.196709Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:14.196763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:14.196959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.197002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:14.197325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.197381Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:14.197481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:14.197516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.197562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:14.197604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.197650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:14.197693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.197731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:14.197761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:14.197843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:14.197886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:14.197921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:14.199868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:14.199991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:14.200040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... us: PREPARED TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 106 MinStep: 0 MaxStep: 18446744073709551615 DomainCoordinators: 72057594046316545 2025-04-04T17:23:15.620057Z node 2 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-04T17:23:15.620113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 106:0, left await: 0, at schemeshard: 72057594046678944 2025-04-04T17:23:15.620150Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 3 -> 128 2025-04-04T17:23:15.621791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-04T17:23:15.621928Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-04T17:23:15.621988Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId# 106:0 HandleReply ProgressState at tablet: 72057594046678944 2025-04-04T17:23:15.622049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 106 ready parts: 1/1 2025-04-04T17:23:15.622162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 106 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:15.623518Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:106 msg type: 269090816 2025-04-04T17:23:15.623619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 106 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 106 at step: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 106 at step: 5000007 2025-04-04T17:23:15.632941Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=106;fline=tx_controller.cpp:211;event=finished_tx;tx_id=106; 2025-04-04T17:23:15.633203Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:15.633312Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 106 Coordinator: 72057594046316545 AckTo { RawX1: 126 RawX2: 8589936743 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:15.633363Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId# 106:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000007 2025-04-04T17:23:15.633952Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 128 -> 129 2025-04-04T17:23:15.634142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:23:15.634194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:23:15.640648Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:15.640714Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:23:15.640925Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:23:15.641102Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:15.641148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 106, path id: 2 2025-04-04T17:23:15.641191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 106, path id: 3 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000007 2025-04-04T17:23:15.641630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-04T17:23:15.641685Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:23:15.641752Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-04-04T17:23:15.645530Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2025-04-04T17:23:15.645628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2025-04-04T17:23:15.645656Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-04-04T17:23:15.645693Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-04-04T17:23:15.645739Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:23:15.646440Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2025-04-04T17:23:15.646493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2025-04-04T17:23:15.646510Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-04-04T17:23:15.646530Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 14 2025-04-04T17:23:15.646551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:23:15.646599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2025-04-04T17:23:15.649505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-04-04T17:23:15.650893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-04T17:23:15.651444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-04T17:23:15.651640Z node 2 :TX_TIERING ERROR: fline=manager.cpp:158;error=cannot_read_secrets;reason=Can't read access key: No such secret: SId:secret; 2025-04-04T17:23:15.663843Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 106 MinStep: 0 Step: 5000007 2025-04-04T17:23:15.663890Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-04-04T17:23:15.663970Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 106 MinStep: 0 Step: 5000007 2025-04-04T17:23:15.664030Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvColumnShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 106 MinStep: 0 Step: 5000007 FAKE_COORDINATOR: Erasing txId 106 2025-04-04T17:23:15.664357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2025-04-04T17:23:15.664408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-04-04T17:23:15.664535Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2025-04-04T17:23:15.666871Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-04T17:23:15.667204Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-04T17:23:15.667315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-04T17:23:15.667351Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-04-04T17:23:15.667463Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-04T17:23:15.667508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T17:23:15.667546Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-04-04T17:23:15.667575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T17:23:15.667608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-04-04T17:23:15.667664Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:340:2319] message: TxId: 106 2025-04-04T17:23:15.667700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-04-04T17:23:15.667738Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-04-04T17:23:15.667781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-04-04T17:23:15.667891Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:23:15.669437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-04T17:23:15.669493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:545:2522] TestWaitNotification: OK eventTxId 106 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 >> TSequence::AlterTableSetDefaultFromSequence [GOOD] >> TOlap::CreateTableTtl [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndWait >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS >> TSchemeShardExtSubDomainTest::Fake [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive >> TSchemeShardExtSubDomainTest::Create >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestPutRecords [GOOD] Test command err: 2025-04-04T17:22:55.161333Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509750474528650:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:55.161384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001800/r3tmp/tmprRowXA/pdisk_1.dat 2025-04-04T17:22:55.690601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:55.690734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:55.701899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:22:55.721981Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19596, node 1 2025-04-04T17:22:55.823313Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:55.823334Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:55.823347Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:55.823438Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3911 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:56.314180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:56.399700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:3911 2025-04-04T17:22:56.607685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:56.622055Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-04-04T17:23:00.046998Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489509772543608021:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:00.047092Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001800/r3tmp/tmp9OJ6qF/pdisk_1.dat 2025-04-04T17:23:00.230154Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:00.268610Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:00.268774Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:00.273141Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11149, node 4 2025-04-04T17:23:00.388093Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:00.388126Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:00.388135Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:00.388278Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12661 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:00.640793Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:00.698738Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:12661 2025-04-04T17:23:00.910776Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:01.107286Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480 encryption_type: NONE sequence_number: "0" shard_id: "shard-000000" encryption_type: NONE records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000003" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } 2025-04-04T17:23:01.167988Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:23:01.230906Z :INFO: [/Root/] [/Root/] [8319abd5-bda610b4-1fedcde0-27c393b3] Starting read session 2025-04-04T17:23:01.232194Z :DEBUG: [/Root/] [/Root/] [8319abd5-bda610b4-1fedcde0-27c393b3] Starting session to cluster null (localhost:11149) 2025-04-04T17:23:01.236027Z :DEBUG: [/Root/] [/Root/] [8319abd5-bda610b4-1fedcde0-27c393b3] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:23:01.236092Z :DEBUG: [/Root/] [/Root/] [8319abd5-bda610b4-1fedcde0-27c393b3] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:23:01.236163Z :DEBUG: [/Root/] [/Root/] [8319abd5-bda610b4-1fedcde0-27c393b3] [null] Reconnecting session to cluster null in 0.000000s 2025-04-04T17:23:01.248508Z node 4 :PQ_READ_PROXY DEBUG: new grpc connection 2025-04-04T17:23:01.248538Z node 4 :PQ_READ_PROXY DEBUG: new session created cookie 1 2025-04-04T17:23:01.248693Z :DEBUG: [/Root/] [/Root/] [8319abd5-bda610b4-1fedcde0-27c393b3] [null] Successfully connected. Initializing session 2025-04-04T17:23:01.279519Z node 4 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { topic: "/Root/stream_TestPutRecordsWithRead" } read_only_original: true consumer: "user1" read_params { max_read_size: 104857600 } } } 2025-04-04T17:23:01.279816Z node 4 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_4_1_13423530364869235474_v1 read init: from# ipv6:[::1]:41654, request# { init_request { topics_read_settings { topic: "/Root/stream_TestPutRecordsWithRead" } read_only_original: true consumer: "user1" read_params { max_read_size: 104857600 } } } 2025-04-04T17:23:01.280748Z node 4 :PQ_READ_PROXY DEBUG: session cookie 1 consumer user1 session user1_4_1_13423530364869235474_v1 auth for : user1 2025-04-04T17:23:01.283477Z node 4 :PQ_READ_PROXY DEBUG: session cookie 1 consumer user1 session user1_4_1_13423530364869235474_v1 Handle describe topics response 2025-04-04T17:23:01.283582Z node 4 :PQ_READ_PROXY DEBUG: session cookie 1 consumer user1 session user1_4_1_13423530364869235474_v1 auth is DEAD 2025-04-04T17:23:01.284755Z node 4 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_4_1_13423530364869235474_v1 auth ok: topics# 1, initDone# 0 2025-04-04T17:23:01.286384Z :INFO: [/Root/] [/Root/] [8319abd5-bda610b4-1fedcde0-27c393b3] [null] Server session id: user1_4_1_13423530364869235474_v1 2025-04-04T17:23:01.287500Z node 4 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_4_1_13423530364869235474_v1 register session: topic# /Root/stream_TestPutReco ... cId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 0(assignId:5) EndOffset 2 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2025-04-04T17:23:09.711289Z :DEBUG: [/Root/] [/Root/] [a5681303-d533a21-9c2baa40-185664e1] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:23:09.711488Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (0-2) 2025-04-04T17:23:09.711525Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (3-3) 2025-04-04T17:23:09.711681Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {0, 0} (0-0) 2025-04-04T17:23:09.711776Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {1, 0} (1-1) 2025-04-04T17:23:09.712726Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {2, 0} (2-2) 2025-04-04T17:23:09.712798Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {3, 0} (3-3) 2025-04-04T17:23:09.712852Z :DEBUG: [/Root/] [/Root/] [a5681303-d533a21-9c2baa40-185664e1] [null] The application data is transferred to the client. Number of messages 4, size 1049088 bytes 2025-04-04T17:23:09.787565Z :DEBUG: [/Root/] [/Root/] [a5681303-d533a21-9c2baa40-185664e1] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:23:09.787716Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 0 (0-1) 2025-04-04T17:23:09.787750Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 4 (0-1) 2025-04-04T17:23:09.787800Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (0-1) 2025-04-04T17:23:09.787829Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-04-04T17:23:09.787851Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-04-04T17:23:09.787893Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-04-04T17:23:09.787934Z :DEBUG: [/Root/] [/Root/] [a5681303-d533a21-9c2baa40-185664e1] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2025-04-04T17:23:09.787992Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (3-3) 2025-04-04T17:23:09.788039Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (4-4) 2025-04-04T17:23:09.788056Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (5-5) 2025-04-04T17:23:09.788097Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {0, 0} (0-0) 2025-04-04T17:23:09.788126Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {1, 0} (1-1) 2025-04-04T17:23:09.788144Z :DEBUG: [/Root/] [/Root/] [a5681303-d533a21-9c2baa40-185664e1] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2025-04-04T17:23:09.788210Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (7-7) 2025-04-04T17:23:09.788207Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (6-6) 2025-04-04T17:23:09.788230Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {0, 0} (0-0) 2025-04-04T17:23:09.788941Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {1, 0} (1-1) 2025-04-04T17:23:09.790679Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {2, 0} (2-2) 2025-04-04T17:23:09.791516Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {3, 0} (3-3) 2025-04-04T17:23:09.795034Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {4, 0} (4-4) 2025-04-04T17:23:09.795853Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {5, 0} (5-5) 2025-04-04T17:23:09.795899Z :DEBUG: [/Root/] [/Root/] [a5681303-d533a21-9c2baa40-185664e1] [null] The application data is transferred to the client. Number of messages 6, size 5242883 bytes 2025-04-04T17:23:09.795971Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (8-8) 2025-04-04T17:23:09.797614Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {6, 0} (6-6) 2025-04-04T17:23:09.799139Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {7, 0} (7-7) 2025-04-04T17:23:09.801466Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {8, 0} (8-8) 2025-04-04T17:23:09.801503Z :DEBUG: [/Root/] [/Root/] [a5681303-d533a21-9c2baa40-185664e1] [null] The application data is transferred to the client. Number of messages 3, size 3145728 bytes 2025-04-04T17:23:09.803651Z :INFO: [/Root/] [/Root/] [a5681303-d533a21-9c2baa40-185664e1] Closing read session. Close timeout: 0.000000s 2025-04-04T17:23:09.803707Z :INFO: [/Root/] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:stream_TestPutRecordsCornerCases:0:5:1:0 null:stream_TestPutRecordsCornerCases:2:4:0:0 null:stream_TestPutRecordsCornerCases:1:3:8:0 null:stream_TestPutRecordsCornerCases:4:2:1:0 null:stream_TestPutRecordsCornerCases:3:1:3:0 2025-04-04T17:23:09.803749Z :INFO: [/Root/] [/Root/] [a5681303-d533a21-9c2baa40-185664e1] Counters: { Errors: 0 CurrentSessionLifetimeMs: 122 BytesRead: 9437699 MessagesRead: 17 BytesReadCompressed: 9437699 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:23:09.803836Z :NOTICE: [/Root/] [/Root/] [a5681303-d533a21-9c2baa40-185664e1] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-04T17:23:09.803873Z :DEBUG: [/Root/] [/Root/] [a5681303-d533a21-9c2baa40-185664e1] [null] Abort session to cluster 2025-04-04T17:23:09.804340Z :NOTICE: [/Root/] [/Root/] [a5681303-d533a21-9c2baa40-185664e1] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:23:09.805099Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10253299015330968906_v1 grpc read failed 2025-04-04T17:23:09.805165Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10253299015330968906_v1 closed 2025-04-04T17:23:09.805259Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_10253299015330968906_v1 is DEAD 2025-04-04T17:23:11.152155Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489509819222292955:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:11.152237Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001800/r3tmp/tmpZ7Q9WQ/pdisk_1.dat 2025-04-04T17:23:11.304508Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:11.348435Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:11.348542Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:11.353272Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27470, node 10 2025-04-04T17:23:11.438786Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:11.438820Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:11.438831Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:11.439004Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23708 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:11.793676Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:11.908108Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:23708 2025-04-04T17:23:12.197885Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:12.466461Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480
: Error: Access for stream /Root/stream_TestPutRecords is denied for subject user2@builtin, code: 500018 2025-04-04T17:23:12.582017Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 PutRecordsResponse = encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } PutRecord response = encryption_type: NONE sequence_number: "7" shard_id: "shard-000004" ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::AlterTableSetDefaultFromSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:08.874116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:08.874233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:08.874299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:08.874337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:08.875218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:08.875262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:08.875340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:08.875423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:08.876373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:08.961861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:08.961920Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:08.973843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:08.974145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:08.974348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:08.984343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:08.984642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:08.985249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:08.986094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:08.991268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:08.992634Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:08.992705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:08.992904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:08.992949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:08.992996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:08.993120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:08.999378Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:09.129355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:09.129591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:09.129823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:09.130034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:09.130084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:09.133125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:09.133311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:09.133573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:09.133634Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:09.133710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:09.133755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:09.135823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:09.135899Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:09.135942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:09.137830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:09.137883Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:09.137923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:09.137994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:09.142572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:09.146968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:09.147150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:09.147996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:09.148147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:09.148204Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:09.148475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:09.148535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:09.148692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:09.148767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:09.150737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:09.150785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:09.150918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:09.150954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:09.151258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:09.151302Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:09.151403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:09.151462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:09.151509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:09.151551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:09.151587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:09.151629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:09.151667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:09.151695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:09.151750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:09.151784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:09.151814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:09.153555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:09.153678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:09.153736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1311 } } 2025-04-04T17:23:16.420237Z node 7 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1311 } } 2025-04-04T17:23:16.420290Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:23:16.420822Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [7:206:2208], Recipient [7:123:2149]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 6] Version: 3 } 2025-04-04T17:23:16.420868Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-04T17:23:16.420938Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 114 2025-04-04T17:23:16.421003Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 114 2025-04-04T17:23:16.421041Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 114 2025-04-04T17:23:16.421080Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 114, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-04-04T17:23:16.421108Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-04-04T17:23:16.421192Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 114, ready parts: 0/1, is published: true 2025-04-04T17:23:16.421235Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:23:16.421910Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:1051:2989], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:23:16.421964Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:23:16.421991Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-04-04T17:23:16.422173Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [7:988:2934], Recipient [7:123:2149]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 988 RawX2: 30064774006 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-04-04T17:23:16.422207Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-04-04T17:23:16.422323Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 988 RawX2: 30064774006 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-04-04T17:23:16.422372Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2025-04-04T17:23:16.422558Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: Source { RawX1: 988 RawX2: 30064774006 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-04-04T17:23:16.422665Z node 7 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:23:16.422786Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 988 RawX2: 30064774006 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-04-04T17:23:16.422887Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 114:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:16.422939Z node 7 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 114:0, at schemeshard: 72057594046678944 2025-04-04T17:23:16.422985Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 114:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-04-04T17:23:16.423028Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 114:0 129 -> 240 2025-04-04T17:23:16.423207Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:23:16.425637Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:23:16.426310Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-04-04T17:23:16.426373Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:23:16.428214Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-04-04T17:23:16.428253Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:23:16.428376Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-04-04T17:23:16.428407Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:23:16.428540Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-04-04T17:23:16.428587Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:23:16.428628Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 114:0 2025-04-04T17:23:16.428740Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:988:2934] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 114 at schemeshard: 72057594046678944 2025-04-04T17:23:16.429102Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T17:23:16.429143Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T17:23:16.429207Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 114:0, at schemeshard: 72057594046678944 2025-04-04T17:23:16.429264Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 114:0 ProgressState 2025-04-04T17:23:16.429396Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:23:16.429428Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#114:0 progress is 1/1 2025-04-04T17:23:16.429469Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-04-04T17:23:16.429511Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#114:0 progress is 1/1 2025-04-04T17:23:16.429549Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-04-04T17:23:16.429589Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 114, ready parts: 1/1, is published: true 2025-04-04T17:23:16.429671Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:392:2360] message: TxId: 114 2025-04-04T17:23:16.429737Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-04-04T17:23:16.429797Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 114:0 2025-04-04T17:23:16.429845Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 114:0 2025-04-04T17:23:16.429988Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-04-04T17:23:16.432027Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:23:16.432132Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:392:2360] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 114 at schemeshard: 72057594046678944 2025-04-04T17:23:16.432309Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2025-04-04T17:23:16.432377Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [7:1019:2957] 2025-04-04T17:23:16.432627Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1021:2959], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:23:16.432665Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:23:16.432692Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 114 TestModificationResults wait txId: 115 2025-04-04T17:23:16.433891Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [7:1060:2998], Recipient [7:123:2149]: {TEvModifySchemeTransaction txid# 115 TabletId# 72057594046678944} 2025-04-04T17:23:16.433965Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T17:23:16.436575Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table3" Columns { Name: "value" DefaultFromSequence: "/MyRoot/seq1" } } } TxId: 115 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:16.436875Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/Table3, pathId: , opId: 115:0, at schemeshard: 72057594046678944 2025-04-04T17:23:16.437351Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 115:1, propose status:StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, at schemeshard: 72057594046678944 2025-04-04T17:23:16.437586Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:23:16.440316Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 115, response: Status: StatusInvalidParameter Reason: "Column \'value\' is of type Bool but default expression is of type Int64" TxId: 115 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:16.440500Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 115, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, operation: ALTER TABLE, path: /MyRoot/Table3 2025-04-04T17:23:16.440570Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 115, wait until txId: 115 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTableTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:15.947764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:15.947900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:15.947944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:15.947991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:15.948044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:15.948078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:15.948136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:15.948234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:15.948538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:16.007836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:16.007880Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:16.015426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:16.015596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:16.015721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:16.022876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:16.023071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:16.023542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:16.024044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:16.027421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:16.028397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:16.028436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:16.028605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:16.028640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:16.028669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:16.028761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:16.033419Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:16.134319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:16.134502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:16.134684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:16.134859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:16.134896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:16.136994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:16.137152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:16.137342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:16.137405Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:16.137437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:16.137495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:16.139252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:16.139314Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:16.139347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:16.140936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:16.140977Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:16.141016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:16.141068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:16.143983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:16.145364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:16.145486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:16.146369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:16.146488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:16.146538Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:16.146825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:16.146929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:16.147146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:16.147243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:16.148900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:16.148937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:16.149058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:16.149095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:16.149406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:16.149438Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:16.149521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:16.149553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:16.149590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:16.149616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:16.149646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:16.149679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:16.149702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:16.149726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:16.149778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:16.149804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:16.149825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:16.151085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:16.151158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:16.151189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2025-04-04T17:23:16.602178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-04-04T17:23:16.602217Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-04T17:23:16.602346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-04T17:23:16.602385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T17:23:16.602527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-04T17:23:16.602569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T17:23:16.602605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-04-04T17:23:16.602680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:370:2349] message: TxId: 105 2025-04-04T17:23:16.602723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T17:23:16.602759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-04T17:23:16.602807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-04T17:23:16.602922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-04-04T17:23:16.604882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-04T17:23:16.604938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:602:2579] TestWaitNotification: OK eventTxId 105 2025-04-04T17:23:16.605656Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:23:16.605930Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/Table3" took 276us result status StatusSuccess 2025-04-04T17:23:16.606456Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/Table3" PathDescription { Self { Name: "Table3" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "Table3" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 360 EvictToExternalStorage { Storage: "/MyRoot/Tier1" } } } Version: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2025-04-04T17:23:16.610016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/OlapStore" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "Table4" TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 3600000000 EvictToExternalStorage { Storage: "/MyRoot/Tier1" } } } } ColumnShardCount: 1 } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:16.610331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/OlapStore/Table4, opId: 106:0, at schemeshard: 72057594046678944 2025-04-04T17:23:16.610723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: OlapStore, child name: Table4, child id: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-04-04T17:23:16.610819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 0 2025-04-04T17:23:16.610985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-04-04T17:23:16.611261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:16.611321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 106:0, at schemeshard: 72057594046678944 2025-04-04T17:23:16.611427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:23:16.611477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-04-04T17:23:16.613611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusAccepted TxId: 106 SchemeshardId: 72057594046678944 PathId: 7, at schemeshard: 72057594046678944 2025-04-04T17:23:16.613794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE COLUMN TABLE, path: /MyRoot/OlapStore/ 2025-04-04T17:23:16.614042Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:16.614077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:23:16.614284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-04-04T17:23:16.614376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:16.614412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 106, path id: 2 2025-04-04T17:23:16.614447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 106, path id: 7 2025-04-04T17:23:16.614801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-04-04T17:23:16.614863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TConfigureParts operationId# 106:0 ProgressState at tabletId# 72057594046678944 2025-04-04T17:23:16.615040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TConfigureParts operationId# 106:0 ProgressState Propose modify scheme on shard tabletId: 72075186233409546 2025-04-04T17:23:16.615730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-04-04T17:23:16.615816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-04-04T17:23:16.615849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-04-04T17:23:16.615884Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 10 2025-04-04T17:23:16.615921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-04T17:23:16.616902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 1 PathOwnerId: 72057594046678944, cookie: 106 2025-04-04T17:23:16.616992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 1 PathOwnerId: 72057594046678944, cookie: 106 2025-04-04T17:23:16.617025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-04-04T17:23:16.617051Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 1 2025-04-04T17:23:16.617078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-04-04T17:23:16.617134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2025-04-04T17:23:16.619350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382272 2025-04-04T17:23:16.619483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2025-04-04T17:23:16.620619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-04-04T17:23:16.621712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain >> DataShardVolatile::DistributedWriteLostPlanThenSplit [GOOD] >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency |94.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed [GOOD] >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::Create [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2025-04-04T17:21:19.601048Z :WriteRAW INFO: Random seed for debugging is 1743787279601023 2025-04-04T17:21:19.932778Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509338026899823:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:19.938329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:21:19.976044Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509337426802768:2155];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:20.135037Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:21:20.150090Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:21:20.150173Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00171c/r3tmp/tmpszzFcM/pdisk_1.dat 2025-04-04T17:21:20.377899Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:20.390646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:20.390733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:20.394372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:20.394444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:20.408129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:20.409264Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:21:20.410486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62840, node 1 2025-04-04T17:21:20.590160Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/00171c/r3tmp/yandexTwXKYN.tmp 2025-04-04T17:21:20.590199Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/00171c/r3tmp/yandexTwXKYN.tmp 2025-04-04T17:21:20.590466Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/00171c/r3tmp/yandexTwXKYN.tmp 2025-04-04T17:21:20.590640Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:21:20.822463Z INFO: TTestServer started on Port 25429 GrpcPort 62840 TClient is connected to server localhost:25429 PQClient connected to localhost:62840 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:21.148816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-04T17:21:23.196511Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509354606672189:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:23.196589Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509354606672167:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:23.196995Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:23.205504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T17:21:23.224317Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509354606672196:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T17:21:23.310815Z node 2 :TX_PROXY ERROR: Actor# [2:7489509354606672224:2129] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:23.520300Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489509355206770119:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:21:23.519213Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489509354606672239:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:21:23.519499Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGUyZDExMzUtNjMwY2I0OWEtMWE5NDM2MWQtYzA0N2MzOTI=, ActorId: [2:7489509354606672165:2308], ActorState: ExecuteState, TraceId: 01jr0x4mqs544ey3phbwvfg9ge, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:21:23.521607Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTIwMWMxZC00ODhlMWQ3MS0zOWQ0YzNjYy1iYTYzOTZlNQ==, ActorId: [1:7489509355206770078:2336], ActorState: ExecuteState, TraceId: 01jr0x4mww1tpczx6krxxeyd1c, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:21:23.521729Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:21:23.521973Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:21:23.545197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:21:23.659144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:23.776646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:62840", true, true, 1000); 2025-04-04T17:21:24.113744Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr0x4nc56tgerqg7wmb28ysx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTM0NjBhN2MtNGZjNDNkZDctNDQ0MTk5OGYtNTlkZDZiY2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489509359501737807:2977] 2025-04-04T17:21:24.929352Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509338026899823:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:24.929422Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:21:24.972311Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489509337426802768:2155];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:24.972382Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:21:30.149584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:62840 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-04T17:21:30.219982Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:62840 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 ... SER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-04T17:23:14.559920Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7489509830359066679:2500] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-04T17:23:14.562350Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7489509830359066679:2500] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-04T17:23:14.714877Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7489509830359066679:2500] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-04T17:23:14.715156Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7489509830359066729:2500] connected; active server actors: 1 2025-04-04T17:23:14.715237Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7489509830359066679:2500] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-04T17:23:14.715276Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7489509830359066679:2500] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-04T17:23:14.715605Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7489509830359066729:2500] disconnected; active server actors: 1 2025-04-04T17:23:14.715642Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7489509830359066729:2500] disconnected no session 2025-04-04T17:23:14.863397Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7489509830359066679:2500] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-04T17:23:14.863445Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7489509830359066679:2500] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-04T17:23:14.863471Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7489509830359066679:2500] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-04T17:23:14.863513Z node 15 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-04T17:23:14.864442Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [15:7489509830359066751:2500], now have 1 active actors on pipe 2025-04-04T17:23:14.864564Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 16, Generation: 1 2025-04-04T17:23:14.864717Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T17:23:14.864764Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T17:23:14.864902Z node 16 :PERSQUEUE INFO: new Cookie src|80e43720-e48998ec-a5fef330-32699971_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-04T17:23:14.865026Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-04T17:23:14.865106Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:23:14.865601Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T17:23:14.865630Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T17:23:14.865779Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:23:14.866105Z node 15 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|80e43720-e48998ec-a5fef330-32699971_0 2025-04-04T17:23:14.867010Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743787394866 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:23:14.867182Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|80e43720-e48998ec-a5fef330-32699971_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-04T17:23:14.867413Z :INFO: [] MessageGroupId [src] SessionId [src|80e43720-e48998ec-a5fef330-32699971_0] Write session: close. Timeout = 0 ms 2025-04-04T17:23:14.867465Z :INFO: [] MessageGroupId [src] SessionId [src|80e43720-e48998ec-a5fef330-32699971_0] Write session will now close 2025-04-04T17:23:14.867526Z :DEBUG: [] MessageGroupId [src] SessionId [src|80e43720-e48998ec-a5fef330-32699971_0] Write session: aborting 2025-04-04T17:23:14.868398Z :INFO: [] MessageGroupId [src] SessionId [src|80e43720-e48998ec-a5fef330-32699971_0] Write session: gracefully shut down, all writes complete 2025-04-04T17:23:14.868481Z :DEBUG: [] MessageGroupId [src] SessionId [src|80e43720-e48998ec-a5fef330-32699971_0] Write session is aborting and will not restart 2025-04-04T17:23:14.868553Z :DEBUG: [] MessageGroupId [src] SessionId [src|80e43720-e48998ec-a5fef330-32699971_0] Write session: destroy 2025-04-04T17:23:14.868699Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|80e43720-e48998ec-a5fef330-32699971_0 grpc read done: success: 0 data: 2025-04-04T17:23:14.868729Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|80e43720-e48998ec-a5fef330-32699971_0 grpc read failed 2025-04-04T17:23:14.869181Z node 15 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: src|80e43720-e48998ec-a5fef330-32699971_0 2025-04-04T17:23:14.869217Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|80e43720-e48998ec-a5fef330-32699971_0 is DEAD 2025-04-04T17:23:14.869689Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:23:14.870052Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [15:7489509830359066751:2500] destroyed 2025-04-04T17:23:14.870127Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-04T17:23:14.898946Z :INFO: [/Root] [/Root] [6a7e6ee2-d9ef1ed3-7c6a332f-12ad4a55] Starting read session 2025-04-04T17:23:14.898999Z :DEBUG: [/Root] [/Root] [6a7e6ee2-d9ef1ed3-7c6a332f-12ad4a55] Starting cluster discovery 2025-04-04T17:23:14.899273Z :INFO: [/Root] [/Root] [6a7e6ee2-d9ef1ed3-7c6a332f-12ad4a55] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:14716: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:14716
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:14716. " 2025-04-04T17:23:14.899317Z :DEBUG: [/Root] [/Root] [6a7e6ee2-d9ef1ed3-7c6a332f-12ad4a55] Restart cluster discovery in 0.008990s 2025-04-04T17:23:14.909457Z :DEBUG: [/Root] [/Root] [6a7e6ee2-d9ef1ed3-7c6a332f-12ad4a55] Starting cluster discovery 2025-04-04T17:23:14.909892Z :INFO: [/Root] [/Root] [6a7e6ee2-d9ef1ed3-7c6a332f-12ad4a55] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:14716: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:14716
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:14716. " 2025-04-04T17:23:14.909952Z :DEBUG: [/Root] [/Root] [6a7e6ee2-d9ef1ed3-7c6a332f-12ad4a55] Restart cluster discovery in 0.014339s 2025-04-04T17:23:14.925457Z :DEBUG: [/Root] [/Root] [6a7e6ee2-d9ef1ed3-7c6a332f-12ad4a55] Starting cluster discovery 2025-04-04T17:23:14.925675Z :INFO: [/Root] [/Root] [6a7e6ee2-d9ef1ed3-7c6a332f-12ad4a55] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:14716: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:14716
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:14716. " 2025-04-04T17:23:14.925709Z :DEBUG: [/Root] [/Root] [6a7e6ee2-d9ef1ed3-7c6a332f-12ad4a55] Restart cluster discovery in 0.027040s 2025-04-04T17:23:14.953417Z :DEBUG: [/Root] [/Root] [6a7e6ee2-d9ef1ed3-7c6a332f-12ad4a55] Starting cluster discovery 2025-04-04T17:23:14.953739Z :NOTICE: [/Root] [/Root] [6a7e6ee2-d9ef1ed3-7c6a332f-12ad4a55] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:14716: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:14716
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:14716. " } 2025-04-04T17:23:14.953936Z :NOTICE: [/Root] [/Root] [6a7e6ee2-d9ef1ed3-7c6a332f-12ad4a55] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:14716: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:14716
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:14716. " } 2025-04-04T17:23:14.954092Z :INFO: [/Root] [/Root] [6a7e6ee2-d9ef1ed3-7c6a332f-12ad4a55] Closing read session. Close timeout: 0.000000s 2025-04-04T17:23:14.954188Z :NOTICE: [/Root] [/Root] [6a7e6ee2-d9ef1ed3-7c6a332f-12ad4a55] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:23:15.275755Z node 15 :KQP_COMPUTE WARN: SelfId: [15:7489509834654034079:2515], TxId: 281474976710684, task: 1, CA Id [15:7489509834654034077:2515]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-04T17:23:15.307803Z node 15 :KQP_COMPUTE WARN: SelfId: [15:7489509834654034079:2515], TxId: 281474976710684, task: 1, CA Id [15:7489509834654034077:2515]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T17:23:15.352302Z node 15 :KQP_COMPUTE WARN: SelfId: [15:7489509834654034079:2515], TxId: 281474976710684, task: 1, CA Id [15:7489509834654034077:2515]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T17:23:15.420423Z node 15 :KQP_COMPUTE WARN: SelfId: [15:7489509834654034079:2515], TxId: 281474976710684, task: 1, CA Id [15:7489509834654034077:2515]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T17:23:15.518687Z node 15 :KQP_COMPUTE WARN: SelfId: [15:7489509834654034079:2515], TxId: 281474976710684, task: 1, CA Id [15:7489509834654034077:2515]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T17:23:15.684556Z node 15 :KQP_COMPUTE WARN: SelfId: [15:7489509834654034079:2515], TxId: 281474976710684, task: 1, CA Id [15:7489509834654034077:2515]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::Drop >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true >> TCdcStreamTests::VirtualTimestamps >> TSchemeShardExtSubDomainTest::CreateAndAlter [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive >> TCdcStreamTests::Basic >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |94.5%| [TA] $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TGRpcStreamingTest::ClientNeverWrites >> TGRpcStreamingTest::WritesDoneFromClient >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools [GOOD] >> TGRpcStreamingTest::ReadFinish >> TSchemeShardExtSubDomainTest::Drop [GOOD] >> TGRpcStreamingTest::ClientDisconnects >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice [GOOD] >> TGRpcStreamingTest::SimpleEcho >> ResultFormatter::EmptyDict >> TGRpcStreamingTest::WriteAndFinishWorks >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> ResultFormatter::Primitive [GOOD] >> TSchemeShardExtSubDomainTest::Drop-ExternalHive >> ResultFormatter::List [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false >> ResultFormatter::Struct [GOOD] >> ResultFormatter::Null [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive [GOOD] >> ResultFormatter::EmptyDict [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> DataStreams::TestReservedConsumersMetering [GOOD] >> ResultFormatter::Utf8WithQuotes [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail-useSink [GOOD] >> TCdcStreamTests::VirtualTimestamps [GOOD] >> TCdcStreamTests::Basic [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> ResultFormatter::Dict [GOOD] >> TSchemeShardExtSubDomainTest::Drop-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true >> KqpLimits::QSReplySize+useSink >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false >> ResultFormatter::Decimal [GOOD] >> ResultFormatter::VariantStruct [GOOD] >> TCdcStreamTests::ResolvedTimestamps >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive >> TCdcStreamTests::Attributes >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true [GOOD] >> TGRpcStreamingTest::ClientDisconnects [GOOD] >> TGRpcStreamingTest::SimpleEcho [GOOD] >> TGRpcStreamingTest::ReadFinish [GOOD] >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] >> ResultFormatter::FormatEmptySchema [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] >> TGRpcStreamingTest::ClientNeverWrites [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst >> TCdcStreamTests::ResolvedTimestamps [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TCdcStreamTests::Attributes [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive >> TCdcStreamTests::RetentionPeriod >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false >> TCdcStreamTests::DocApi >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TCdcStreamTests::RetentionPeriod [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool [GOOD] >> TCdcStreamTests::TopicPartitions >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false [GOOD] >> TCdcStreamTests::DocApi [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] >> TCdcStreamTests::DocApiNegative >> ResultFormatter::FormatNonEmptySchema [GOOD] >> ResultFormatter::Optional [GOOD] >> ResultFormatter::Void [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] >> ResultFormatter::VariantTuple [GOOD] >> ResultFormatter::Pg >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink [GOOD] |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Null [GOOD] |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Struct [GOOD] |94.5%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Decimal [GOOD] |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantStruct [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:17.664624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:17.664706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:17.664734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:17.664788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:17.664828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:17.664855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:17.664899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:17.664948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:17.665184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:17.717956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:17.717999Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:17.725357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:17.725514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:17.725652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:17.731652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:17.731839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:17.732283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.732789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:17.736485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.737419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:17.737460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.737627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:17.737669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:17.737704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:17.737803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.742508Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:17.820870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:17.821049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.821223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:17.821381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:17.821420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.822934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.823067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:17.823182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.823219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:17.823245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:17.823289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:17.824743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.824779Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:17.824803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:17.826014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.826051Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.826089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.826123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.828778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:17.829948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:17.830080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:17.830808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.830883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:17.830928Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.831118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:17.831154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.831268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:17.831354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:17.832769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:17.832816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:17.832918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.832946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:17.833150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.833183Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:17.833249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:17.833287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.833312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:17.833341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.833366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:17.833390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.833414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:17.833444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:17.833484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:17.833518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:17.833545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:17.834685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:17.834760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:17.834785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Z node 7 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [7:123:2149] sender: [7:239:2058] recipient: [7:15:2062] 2025-04-04T17:23:21.968762Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:21.968934Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:21.969092Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:21.969254Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:21.969297Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:21.971187Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:21.971301Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:21.971466Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:21.971518Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:21.971556Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:21.971592Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:21.972836Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:21.972892Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:21.972935Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:21.974155Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:21.974191Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:21.974263Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:21.974315Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:21.974430Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:21.975460Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:21.975638Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:21.976315Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:21.976418Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 30064773228 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:21.976461Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:21.976675Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:21.976727Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:21.976907Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:21.976976Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:21.978272Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:21.978319Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:21.978460Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:21.978506Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:21.978815Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:21.978862Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:21.978968Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:21.979004Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:21.979040Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:21.979093Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:21.979130Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:21.979171Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:21.979202Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:21.979233Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:21.979289Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:21.979325Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:21.979363Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:21.979772Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:21.979867Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:21.979904Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T17:23:21.979940Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T17:23:21.979979Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:21.980059Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T17:23:21.982027Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T17:23:21.982467Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-04-04T17:23:21.982969Z node 7 :TX_PROXY DEBUG: actor# [7:269:2260] Bootstrap 2025-04-04T17:23:22.003840Z node 7 :TX_PROXY DEBUG: actor# [7:269:2260] Become StateWork (SchemeCache [7:274:2265]) 2025-04-04T17:23:22.006740Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_1" ExternalSchemeShard: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:22.006929Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 101:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_1" ExternalSchemeShard: true } 2025-04-04T17:23:22.006981Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 101:0, path /MyRoot/USER_1 2025-04-04T17:23:22.007179Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-04-04T17:23:22.007237Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-04-04T17:23:22.008032Z node 7 :TX_PROXY DEBUG: actor# [7:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:23:22.011146Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Invalid AlterExtSubDomain request: Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:22.011307Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: ALTER DATABASE, path: /MyRoot/USER_1 2025-04-04T17:23:22.011561Z node 7 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:17.443294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:17.443396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:17.443447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:17.443485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:17.444715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:17.444749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:17.444801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:17.444893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:17.446197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:17.509806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:17.509858Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:17.518591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:17.518797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:17.518937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:17.526893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:17.527107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:17.529727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.530515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:17.537651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.545357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:17.545448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.545671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:17.545739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:17.545823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:17.545976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.553134Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:17.669987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:17.670177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.670422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:17.670650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:17.670723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.672833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.672947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:17.673069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.673131Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:17.673161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:17.673187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:17.674509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.674558Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:17.674590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:17.675752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.675795Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.675844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.675888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.678404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:17.679697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:17.679854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:17.680596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.680696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:17.680783Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.681033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:17.681090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.681231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:17.681296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:17.682832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:17.682873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:17.682996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.683025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:17.683294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.683330Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:17.683418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:17.683449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.683479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:17.683502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.683532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:17.683584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.683621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:17.683647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:17.683691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:17.683732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:17.683761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:17.685068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:17.685148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:17.685174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... chemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-04T17:23:22.130971Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:23:22.131041Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 103:0, ProgressState, NeedSyncHive: 0 2025-04-04T17:23:22.131103Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 240 -> 240 2025-04-04T17:23:22.132020Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:23:22.132152Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:23:22.132209Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:23:22.132261Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-04-04T17:23:22.132328Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-04T17:23:22.132431Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-04T17:23:22.135119Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-04T17:23:22.135211Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:23:22.135325Z node 7 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:394:2365], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:23:22.135437Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2025-04-04T17:23:22.135470Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2025-04-04T17:23:22.135604Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2025-04-04T17:23:22.135638Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:489:2431], at schemeshard: 72075186234409546, txId: 0, path id: 1 2025-04-04T17:23:22.136544Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409546, cookie: 0 2025-04-04T17:23:22.136641Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:23:22.136686Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-04T17:23:22.136827Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:23:22.136869Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:23:22.136924Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:23:22.136963Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:23:22.137025Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-04T17:23:22.137079Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:23:22.137118Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T17:23:22.137158Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T17:23:22.137237Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:23:22.137556Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:23:22.137617Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-04T17:23:22.139521Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-04T17:23:22.139573Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-04T17:23:22.139996Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T17:23:22.140098Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:23:22.140141Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:565:2505] TestWaitNotification: OK eventTxId 103 2025-04-04T17:23:22.140673Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:23:22.140876Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 242us result status StatusSuccess 2025-04-04T17:23:22.141290Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:22.141807Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:23:22.141977Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 194us result status StatusSuccess 2025-04-04T17:23:22.142323Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:22.142855Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409546 2025-04-04T17:23:22.143080Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186234409546 describe path "/MyRoot/USER_0" took 197us result status StatusSuccess 2025-04-04T17:23:22.143406Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186234409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } } } PathId: 1 PathOwnerId: 72075186234409546, at schemeshard: 72075186234409546 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:17.443380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:17.443474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:17.443509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:17.443540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:17.444735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:17.444763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:17.444812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:17.444882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:17.446233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:17.523660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:17.523708Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:17.532158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:17.532451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:17.532652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:17.542578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:17.542861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:17.543604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.544386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:17.549171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.550502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:17.550571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.550755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:17.550806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:17.550858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:17.550996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.557597Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:17.663633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:17.663828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.664012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:17.664195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:17.664239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.666348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.666499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:17.666648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.666705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:17.666745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:17.666775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:17.668453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.668506Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:17.668561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:17.670293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.670350Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.670412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.670475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.674157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:17.675918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:17.676081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:17.677038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.677160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:17.677207Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.677501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:17.677581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.677754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:17.677834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:17.679879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:17.679927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:17.680089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.680131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:17.680537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.680601Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:17.680714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:17.680749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.680794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:17.680832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.680875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:17.680921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.680956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:17.680991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:17.681048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:17.681085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:17.681119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:17.683004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:17.683146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:17.683190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... shard: 72057594046678944, txId: 103, path id: 1 2025-04-04T17:23:22.143529Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-04T17:23:22.143607Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:23:22.143662Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 103:0 ProgressState 2025-04-04T17:23:22.143734Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 135 -> 240 2025-04-04T17:23:22.144908Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:23:22.145009Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:23:22.145050Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:23:22.145085Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-04T17:23:22.145121Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:22.145883Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:23:22.145970Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:23:22.145999Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:23:22.146029Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:23:22.146059Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-04T17:23:22.146121Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-04T17:23:22.149073Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72075186233409546 at ss 72057594046678944 2025-04-04T17:23:22.149137Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72075186233409546 at ss 72057594046678944 2025-04-04T17:23:22.149170Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72075186233409546 at ss 72057594046678944 2025-04-04T17:23:22.149194Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72075186233409546 at ss 72057594046678944 2025-04-04T17:23:22.149741Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:23:22.149797Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-04T17:23:22.149952Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:23:22.150004Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:23:22.150057Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:23:22.150104Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:23:22.150175Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-04T17:23:22.150230Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:23:22.150319Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T17:23:22.150370Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T17:23:22.150598Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:23:22.151552Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:23:22.151948Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:23:22.152297Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-04T17:23:22.152484Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186234409547 2025-04-04T17:23:22.152645Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:22.152973Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186234409547 2025-04-04T17:23:22.154268Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-04T17:23:22.154480Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:23:22.154915Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186234409546 2025-04-04T17:23:22.155775Z node 7 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:22.158655Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T17:23:22.158816Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:23:22.159307Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186234409548 Forgetting tablet 72075186234409546 Forgetting tablet 72075186234409548 2025-04-04T17:23:22.160417Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-04T17:23:22.160554Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:23:22.160945Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:23:22.160989Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:23:22.161098Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:23:22.161975Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:23:22.162017Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:23:22.162081Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:22.164362Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:23:22.164419Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-04T17:23:22.164514Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-04T17:23:22.164540Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2025-04-04T17:23:22.164592Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:23:22.164615Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2025-04-04T17:23:22.166302Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-04T17:23:22.166356Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2025-04-04T17:23:22.166516Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:23:22.166599Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-04T17:23:22.166839Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-04T17:23:22.166882Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-04T17:23:22.167295Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T17:23:22.167386Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:23:22.167422Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:584:2524] TestWaitNotification: OK eventTxId 103 2025-04-04T17:23:22.167927Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:23:22.168142Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 257us result status StatusPathDoesNotExist 2025-04-04T17:23:22.168298Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientDisconnects [GOOD] Test command err: 2025-04-04T17:23:20.111286Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509854248488175:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:20.111360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00090a/r3tmp/tmpARWsnD/pdisk_1.dat 2025-04-04T17:23:20.457185Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:20.519444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:20.519608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:20.524324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:23:20.543747Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] stream accepted Name# Session ok# true peer# ipv6:[::1]:34460 2025-04-04T17:23:20.544456Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] facade attach Name# Session actor# [1:7489509854248488695:2255] peer# ipv6:[::1]:34460 2025-04-04T17:23:20.545162Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] stream done notification Name# Session ok# true peer# ipv6:[::1]:34460 2025-04-04T17:23:20.545237Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone 2025-04-04T17:23:20.547508Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] stream finished Name# Session ok# false peer# unknown grpc status# (1) message# Request abandoned 2025-04-04T17:23:20.547568Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] deregistering request Name# Session peer# unknown (finish done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ReadFinish [GOOD] Test command err: 2025-04-04T17:23:20.111331Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509856299507158:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:20.111385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000888/r3tmp/tmpYYLckC/pdisk_1.dat 2025-04-04T17:23:20.464171Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:20.509205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:20.509406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:20.511382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:23:20.544852Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] stream accepted Name# Session ok# true peer# ipv6:[::1]:60266 2025-04-04T17:23:20.545199Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] facade attach Name# Session actor# [1:7489509856299507683:2255] peer# ipv6:[::1]:60266 2025-04-04T17:23:20.545230Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] facade read Name# Session peer# ipv6:[::1]:60266 2025-04-04T17:23:20.545284Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] facade finish Name# Session peer# ipv6:[::1]:60266 grpc status# (0) message# 2025-04-04T17:23:20.545583Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] read finished Name# Session ok# false data# peer# ipv6:[::1]:60266 2025-04-04T17:23:20.545616Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] stream done notification Name# Session ok# true peer# ipv6:[::1]:60266 2025-04-04T17:23:20.545638Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] stream finished Name# Session ok# true peer# ipv6:[::1]:60266 grpc status# (0) message# 2025-04-04T17:23:20.545676Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] deregistering request Name# Session peer# ipv6:[::1]:60266 (finish done) 2025-04-04T17:23:20.545709Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:17.443503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:17.443624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:17.443669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:17.443703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:17.444777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:17.444815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:17.444881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:17.444970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:17.446232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:17.507591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:17.507640Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:17.518576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:17.518737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:17.518914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:17.526391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:17.526663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:17.529712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.530440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:17.536922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.545043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:17.545100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.545298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:17.545344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:17.545386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:17.545504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.550976Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:17.652291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:17.652445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.653854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:17.655746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:17.655807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.658376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.658502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:17.658639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.658682Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:17.658739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:17.658761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:17.660093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.660132Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:17.660165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:17.661329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.661359Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.661398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.661434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.664607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:17.665750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:17.665897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:17.666630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.666725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:17.666762Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.667929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:17.667992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.668141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:17.668197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:17.669586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:17.669621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:17.669725Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.669760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:17.669959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.669994Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:17.670060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:17.670083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.670107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:17.670126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.670161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:17.670204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.670230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:17.670272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:17.670310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:17.670351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:17.670372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:17.671558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:17.671666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:17.671705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... d: 72057594046678944, LocalPathId: 2], type: EPathTypeExtSubDomain, state: EPathStateAlter)" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:22.882548Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_0', error: path is under operation (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExtSubDomain, state: EPathStateAlter), operation: ALTER DATABASE, path: /MyRoot/USER_0 2025-04-04T17:23:22.884426Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:23:22.884499Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 ProgressState, operation type: TxAlterExtSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:22.884556Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 ProgressState no shards to create, do next state 2025-04-04T17:23:22.884601Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 2 -> 3 2025-04-04T17:23:22.886437Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:23:22.886503Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:22.886548Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 3 -> 128 2025-04-04T17:23:22.888216Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:23:22.888275Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:23:22.888337Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 103:0, at tablet# 72057594046678944 2025-04-04T17:23:22.888397Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-04-04T17:23:22.888562Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:22.890116Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-04-04T17:23:22.890307Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-04-04T17:23:22.890695Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:22.890840Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 30064773228 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:22.890920Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2025-04-04T17:23:22.891246Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-04-04T17:23:22.891312Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2025-04-04T17:23:22.891463Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:23:22.891654Z node 7 :FLAT_TX_SCHEMESHARD INFO: Send TEvUpdateTenantSchemeShard, to actor: [7:394:2365], msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 103 2025-04-04T17:23:22.893776Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186234409546, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4 2025-04-04T17:23:22.893909Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72075186234409546 2025-04-04T17:23:22.894093Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 2025-04-04T17:23:22.894358Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:22.894417Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:23:22.894658Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:22.894716Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-04T17:23:22.895186Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:23:22.895256Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 103:0, ProgressState, NeedSyncHive: 0 2025-04-04T17:23:22.895301Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 240 -> 240 2025-04-04T17:23:22.896024Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:23:22.896138Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:23:22.896183Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:23:22.896230Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-04-04T17:23:22.896282Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-04T17:23:22.896375Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-04T17:23:22.899470Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2025-04-04T17:23:22.899522Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2025-04-04T17:23:22.899670Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2025-04-04T17:23:22.899709Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:489:2431], at schemeshard: 72075186234409546, txId: 0, path id: 1 2025-04-04T17:23:22.900197Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-04T17:23:22.900281Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:23:22.900401Z node 7 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:394:2365], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:23:22.900860Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409546, cookie: 0 2025-04-04T17:23:22.901414Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:23:22.901469Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-04T17:23:22.901636Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:23:22.901686Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:23:22.901743Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:23:22.901793Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:23:22.901848Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-04T17:23:22.901905Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:23:22.901956Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T17:23:22.901997Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T17:23:22.902085Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:23:22.902475Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:23:22.902547Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 104 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2025-04-04T17:23:22.904629Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-04T17:23:22.904687Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-04T17:23:22.905201Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T17:23:22.905330Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:23:22.905407Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:567:2507] TestWaitNotification: OK eventTxId 103 |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::FormatNonEmptySchema [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::SimpleEcho [GOOD] Test command err: 2025-04-04T17:23:20.111292Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509856707928567:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:20.111354Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0006d1/r3tmp/tmpeI8UdE/pdisk_1.dat 2025-04-04T17:23:20.497989Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:20.516552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:20.516882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:20.522901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:23:20.548169Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] stream accepted Name# Session ok# true peer# ipv6:[::1]:59642 2025-04-04T17:23:20.548529Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] facade attach Name# Session actor# [1:7489509856707929091:2255] peer# ipv6:[::1]:59642 2025-04-04T17:23:20.548563Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] facade read Name# Session peer# ipv6:[::1]:59642 2025-04-04T17:23:20.548723Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] read finished Name# Session ok# true data# peer# ipv6:[::1]:59642 2025-04-04T17:23:20.548766Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 1 2025-04-04T17:23:20.548790Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] facade write Name# Session data# peer# ipv6:[::1]:59642 2025-04-04T17:23:20.549005Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] facade finish Name# Session peer# ipv6:[::1]:59642 grpc status# (0) message# 2025-04-04T17:23:20.549091Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] write finished Name# Session ok# true peer# ipv6:[::1]:59642 2025-04-04T17:23:20.549358Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] stream finished Name# Session ok# true peer# ipv6:[::1]:59642 grpc status# (0) message# 2025-04-04T17:23:20.549424Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] stream done notification Name# Session ok# true peer# ipv6:[::1]:59642 2025-04-04T17:23:20.549433Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] deregistering request Name# Session peer# ipv6:[::1]:59642 (finish done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:17.443371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:17.443482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:17.443528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:17.443563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:17.444786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:17.444834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:17.444906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:17.444992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:17.446232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:17.528623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:17.528674Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:17.539083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:17.539329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:17.539503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:17.546398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:17.546591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:17.547084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.547666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:17.551540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.552545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:17.552590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.552751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:17.552804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:17.552841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:17.552928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.557567Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:17.654824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:17.655027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.655243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:17.655752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:17.655807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.658976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.659139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:17.659279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.659326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:17.659375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:17.659407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:17.661043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.661113Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:17.661155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:17.662590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.662652Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.662700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.662746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.665518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:17.667148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:17.667304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:17.668258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.668368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:17.668414Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.668644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:17.668693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.668864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:17.668929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:17.670604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:17.670638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:17.670767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.670799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:17.671044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.671110Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:17.671192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:17.671223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.671262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:17.671300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.671330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:17.671357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.671387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:17.671413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:17.671466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:17.671494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:17.671518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:17.672805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:17.672891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:17.672918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 3.022421Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-04-04T17:23:23.022535Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-04T17:23:23.022572Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T17:23:23.022607Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-04-04T17:23:23.022639Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T17:23:23.022677Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-04-04T17:23:23.022716Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-04-04T17:23:23.022756Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-04-04T17:23:23.022802Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-04-04T17:23:23.022938Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:23:23.024122Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-04T17:23:23.024316Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-04-04T17:23:23.024452Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-04T17:23:23.024577Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186234409547 2025-04-04T17:23:23.024667Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:23.024873Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:23:23.025739Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186234409546 2025-04-04T17:23:23.025871Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-04T17:23:23.026006Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186234409547 2025-04-04T17:23:23.026359Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T17:23:23.026450Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:23:23.027406Z node 6 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:23.030546Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186234409548 2025-04-04T17:23:23.031022Z node 6 :TX_DATASHARD ERROR: Datashard's schemeshard pipe destroyed while no messages to sent at 72075186234409549 2025-04-04T17:23:23.031083Z node 6 :TX_DATASHARD ERROR: Datashard's schemeshard pipe destroyed while no messages to sent at 72075186234409550 Forgetting tablet 72075186234409546 2025-04-04T17:23:23.031677Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-04T17:23:23.031862Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186234409548 2025-04-04T17:23:23.033709Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:23:23.033780Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:23:23.033908Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:23:23.034636Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:23:23.034690Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:23:23.034755Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:23.036733Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:23:23.036782Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-04T17:23:23.036867Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-04T17:23:23.036884Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2025-04-04T17:23:23.036927Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:23:23.036950Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2025-04-04T17:23:23.038631Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-04T17:23:23.038685Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2025-04-04T17:23:23.038751Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:23:23.038854Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-04-04T17:23:23.039160Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-04-04T17:23:23.039197Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-04-04T17:23:23.039584Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-04-04T17:23:23.039668Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-04-04T17:23:23.039704Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [6:787:2695] TestWaitNotification: OK eventTxId 105 2025-04-04T17:23:23.040130Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:23:23.040306Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir/table_1" took 211us result status StatusPathDoesNotExist 2025-04-04T17:23:23.040435Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/dir/table_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/dir/table_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:23:23.040926Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:23:23.041065Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 167us result status StatusPathDoesNotExist 2025-04-04T17:23:23.041159Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:23:23.041569Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:23:23.041697Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 148us result status StatusSuccess 2025-04-04T17:23:23.041954Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:17.512948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:17.513072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:17.513120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:17.513157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:17.513206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:17.513256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:17.513326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:17.513415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:17.513800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:17.603761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:17.603822Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:17.617884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:17.618224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:17.618507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:17.629723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:17.630071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:17.630811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.631758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:17.637754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.639243Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:17.639309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.639522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:17.639564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:17.639602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:17.639717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.645794Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:17.804066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:17.804298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.804520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:17.804766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:17.804857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.807045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.807201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:17.807398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.807481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:17.807519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:17.807553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:17.809372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.809441Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:17.809487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:17.811261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.811314Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.811375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.811423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.815755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:17.817750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:17.817915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:17.818996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.819142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:17.819193Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.819523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:17.819583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.819775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:17.819870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:17.821870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:17.821939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:17.822124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.822172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:17.822555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.822607Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:17.822721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:17.822757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.822806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:17.822849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.822894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:17.822951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.822994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:17.823032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:17.823107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:17.823148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:17.823187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:17.825435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:17.825537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:17.825576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2.539194Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:23:22.539429Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:22.539470Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:22.539614Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:23:22.539779Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:22.539818Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-04T17:23:22.539861Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-04-04T17:23:22.540125Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:23:22.540186Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 103:0 ProgressState 2025-04-04T17:23:22.540245Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 135 -> 240 2025-04-04T17:23:22.541334Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:23:22.541423Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:23:22.541456Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:23:22.541492Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-04T17:23:22.541529Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:22.542278Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:23:22.542371Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:23:22.542399Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:23:22.542434Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:23:22.542467Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:23:22.542533Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-04T17:23:22.544630Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:23:22.544728Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:23:22.544760Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:23:22.544897Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:23:22.544950Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-04-04T17:23:22.545112Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:23:22.545163Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:23:22.545216Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-04-04T17:23:22.545264Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:23:22.545314Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-04-04T17:23:22.545368Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-04-04T17:23:22.545421Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T17:23:22.545462Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T17:23:22.545659Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:23:22.546590Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:23:22.547816Z node 7 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-04-04T17:23:22.549079Z node 7 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:22.553254Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:22.553723Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:23:22.554402Z node 7 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-04-04T17:23:22.554772Z node 7 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-04-04T17:23:22.556360Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-04T17:23:22.556596Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2025-04-04T17:23:22.557669Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T17:23:22.557831Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:23:22.558915Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:23:22.559112Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:23:22.559169Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:23:22.559297Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:23:22.559618Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:23:22.559674Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:23:22.559759Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:22.561805Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:23:22.561876Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-04T17:23:22.561962Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-04T17:23:22.561988Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-04T17:23:22.563612Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:23:22.563680Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-04T17:23:22.563826Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:23:22.563960Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-04T17:23:22.564207Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-04T17:23:22.564265Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-04T17:23:22.564733Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T17:23:22.564830Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:23:22.564881Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:540:2489] TestWaitNotification: OK eventTxId 103 2025-04-04T17:23:22.565453Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:23:22.565671Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 259us result status StatusPathDoesNotExist 2025-04-04T17:23:22.565891Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientNeverWrites [GOOD] Test command err: 2025-04-04T17:23:20.114393Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509856816898346:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:20.114532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000a7e/r3tmp/tmp2W00RU/pdisk_1.dat 2025-04-04T17:23:20.464147Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:20.533702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:20.534003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:20.536275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:23:20.545290Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] stream accepted Name# Session ok# true peer# ipv6:[::1]:49734 2025-04-04T17:23:20.545636Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] facade attach Name# Session actor# [1:7489509856816898731:2255] peer# ipv6:[::1]:49734 2025-04-04T17:23:20.545684Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] facade read Name# Session peer# ipv6:[::1]:49734 2025-04-04T17:23:20.545743Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] facade write Name# Session data# peer# ipv6:[::1]:49734 2025-04-04T17:23:20.545995Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] facade finish Name# Session peer# ipv6:[::1]:49734 grpc status# (0) message# 2025-04-04T17:23:20.546461Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] write finished Name# Session ok# true peer# ipv6:[::1]:49734 2025-04-04T17:23:20.546688Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2025-04-04T17:23:20.546740Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] read finished Name# Session ok# false data# peer# ipv6:[::1]:49734 2025-04-04T17:23:20.546869Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] stream finished Name# Session ok# true peer# ipv6:[::1]:49734 grpc status# (0) message# 2025-04-04T17:23:20.546870Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] stream done notification Name# Session ok# true peer# ipv6:[::1]:49734 2025-04-04T17:23:20.546925Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ca80] deregistering request Name# Session peer# ipv6:[::1]:49734 (finish done) 2025-04-04T17:23:20.546981Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 2025-04-04T17:23:20.547003Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] Test command err: 2025-04-04T17:23:20.111288Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509854448822621:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:20.111331Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0007af/r3tmp/tmpac2SjN/pdisk_1.dat 2025-04-04T17:23:20.454845Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:20.509536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:20.509635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:20.511221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:23:20.543884Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:60306 2025-04-04T17:23:20.544223Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7489509854448823147:2255] peer# ipv6:[::1]:60306 2025-04-04T17:23:20.544262Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade write Name# Session data# peer# ipv6:[::1]:60306 2025-04-04T17:23:20.544493Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade write Name# Session data# peer# ipv6:[::1]:60306 grpc status# (0) message# 2025-04-04T17:23:20.544990Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] write finished Name# Session ok# true peer# ipv6:[::1]:60306 2025-04-04T17:23:20.545090Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2025-04-04T17:23:20.545298Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] write finished Name# Session ok# true peer# ipv6:[::1]:60306 2025-04-04T17:23:20.545334Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# true peer# ipv6:[::1]:60306 grpc status# (0) message# 2025-04-04T17:23:20.545355Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:60306 2025-04-04T17:23:20.545378Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# ipv6:[::1]:60306 (finish done) 2025-04-04T17:23:20.545409Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] Test command err: 2025-04-04T17:23:20.113539Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509857598010752:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:20.113728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0007e8/r3tmp/tmp0HwMkn/pdisk_1.dat 2025-04-04T17:23:20.460646Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:20.507076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:20.508082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:20.511205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:23:20.543756Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] stream accepted Name# Session ok# true peer# ipv6:[::1]:39458 2025-04-04T17:23:20.544045Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] facade attach Name# Session actor# [1:7489509857598011285:2255] peer# ipv6:[::1]:39458 2025-04-04T17:23:20.544078Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] facade read Name# Session peer# ipv6:[::1]:39458 2025-04-04T17:23:20.544219Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] read finished Name# Session ok# false data# peer# ipv6:[::1]:39458 2025-04-04T17:23:20.544310Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 2025-04-04T17:23:20.544365Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] facade finish Name# Session peer# ipv6:[::1]:39458 grpc status# (9) message# Everything is A-OK 2025-04-04T17:23:20.546546Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] stream done notification Name# Session ok# true peer# ipv6:[::1]:39458 2025-04-04T17:23:20.546674Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] stream finished Name# Session ok# true peer# ipv6:[::1]:39458 grpc status# (9) message# Everything is A-OK 2025-04-04T17:23:20.546717Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone 2025-04-04T17:23:20.546720Z node 1 :GRPC_SERVER DEBUG: [0x51f000029280] deregistering request Name# Session peer# ipv6:[::1]:39458 (finish done) >> ResultFormatter::Pg [GOOD] >> TCdcStreamTests::DocApiNegative [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TCdcStreamTests::Negative >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:17.443387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:17.443526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:17.443600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:17.443663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:17.444779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:17.444854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:17.444960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:17.445071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:17.446182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:17.529999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:17.530066Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:17.541438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:17.541682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:17.541892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:17.552092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:17.552418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:17.553071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.553866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:17.559200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.560455Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:17.560533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.560726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:17.560808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:17.560900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:17.561051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.567592Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:17.663246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:17.663450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.663640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:17.663810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:17.663863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.665557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.665684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:17.665842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.665891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:17.665922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:17.665947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:17.667366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.667407Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:17.667433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:17.668689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.668723Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.668771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.668830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.671513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:17.672784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:17.672921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:17.673685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.673789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:17.673834Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.674073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:17.674114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.674260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:17.674331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:17.675815Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:17.675852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:17.675979Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.676010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:17.676263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.676320Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:17.676397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:17.676426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.676464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:17.676499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.676533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:17.676573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.676601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:17.676632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:17.676676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:17.676705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:17.676762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:17.677997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:17.678081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:17.678120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... D INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:23:23.031801Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:23:23.031866Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-04T17:23:23.034364Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-04T17:23:23.034513Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-04T17:23:23.035547Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:23.035696Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 30064773228 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:23.035770Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 102:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-04-04T17:23:23.035871Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 102 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:23.035916Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:23:23.035963Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 134 2025-04-04T17:23:23.036614Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:23:23.037115Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:23:23.038604Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:23:23.038669Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:23.038796Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 134 -> 135 2025-04-04T17:23:23.039103Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:23.039180Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:23:23.040910Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:23.040953Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:23.041079Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:23:23.041227Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:23.041260Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-04-04T17:23:23.041294Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-04T17:23:23.041575Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:23:23.041622Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 102:0 ProgressState 2025-04-04T17:23:23.041669Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 135 -> 240 2025-04-04T17:23:23.042515Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:23:23.042595Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:23:23.042623Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:23:23.042654Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-04T17:23:23.042687Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:23.043119Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:23:23.043205Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:23:23.043230Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:23:23.043257Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:23:23.043284Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:23:23.043339Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-04T17:23:23.046070Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:23:23.046128Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:23:23.046314Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:23:23.046366Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:23:23.046415Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:23:23.046457Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:23:23.046507Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T17:23:23.046562Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:23:23.046613Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:23:23.046655Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:23:23.046733Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:23:23.047561Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:23:23.047632Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:23:23.047708Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:23:23.048236Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:23:23.048301Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:23:23.048374Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:23.048769Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:23:23.049051Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:23:23.051037Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-04-04T17:23:23.051134Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-04T17:23:23.051371Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:23:23.051419Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T17:23:23.051874Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:23:23.052003Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:23:23.052048Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:343:2334] TestWaitNotification: OK eventTxId 102 2025-04-04T17:23:23.052578Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:23:23.052780Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 245us result status StatusPathDoesNotExist 2025-04-04T17:23:23.052943Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:13.941431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:13.941558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:13.941614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:13.941649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:13.941693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:13.941715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:13.941766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:13.941883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:13.942172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:14.021427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:14.021488Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:14.032517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:14.032714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:14.032872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:14.042381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:14.042667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:14.043422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.044391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:14.049858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.051344Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:14.051422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.051634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:14.051682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:14.051730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:14.051916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.061310Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:14.223085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:14.223345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.223579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:14.223789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:14.223837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.225945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.226083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:14.226286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.226351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:14.226405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:14.226438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:14.228174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.228243Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:14.228283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:14.229827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.229879Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.229920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.229969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.233953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:14.235634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:14.235883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:14.236795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.236921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:14.236969Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.237218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:14.237297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.237473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:14.237543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:14.240001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:14.240061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:14.240338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.240392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:14.240771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.240845Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:14.240951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:14.240988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.241035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:14.241083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.241123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:14.241181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.241228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:14.241263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:14.241334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:14.241377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:14.241409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:14.243884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:14.244024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:14.244069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... pipe to deleted shardIdx 72057594046678944:50 tabletId 72075186233409595 2025-04-04T17:23:21.275930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:23:21.275963Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-04-04T17:23:21.276017Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:23:21.276124Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:52 2025-04-04T17:23:21.276141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:52 tabletId 72075186233409597 2025-04-04T17:23:21.276183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:46 2025-04-04T17:23:21.276196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:46 tabletId 72075186233409591 2025-04-04T17:23:21.276237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:48 2025-04-04T17:23:21.276250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:48 tabletId 72075186233409593 2025-04-04T17:23:21.276281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:42 2025-04-04T17:23:21.276294Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:42 tabletId 72075186233409587 2025-04-04T17:23:21.276332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:44 2025-04-04T17:23:21.276347Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:44 tabletId 72075186233409589 2025-04-04T17:23:21.276384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:65 2025-04-04T17:23:21.276397Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:65 tabletId 72075186233409610 2025-04-04T17:23:21.276457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:63 2025-04-04T17:23:21.276471Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:63 tabletId 72075186233409608 2025-04-04T17:23:21.276499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:61 2025-04-04T17:23:21.276512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:61 tabletId 72075186233409606 2025-04-04T17:23:21.279598Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:59 2025-04-04T17:23:21.279624Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:59 tabletId 72075186233409604 2025-04-04T17:23:21.280423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-04-04T17:23:21.280454Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-04-04T17:23:21.280565Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:23:21.280588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-04T17:23:21.280633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-04T17:23:21.280647Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-04T17:23:21.280703Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2025-04-04T17:23:21.280717Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-04-04T17:23:21.280771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:21 2025-04-04T17:23:21.280785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2025-04-04T17:23:21.280829Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:19 2025-04-04T17:23:21.280845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2025-04-04T17:23:21.280895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:17 2025-04-04T17:23:21.280913Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2025-04-04T17:23:21.281555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2025-04-04T17:23:21.281575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2025-04-04T17:23:21.281627Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:13 2025-04-04T17:23:21.281640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2025-04-04T17:23:21.281677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:11 2025-04-04T17:23:21.281690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2025-04-04T17:23:21.281727Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:9 2025-04-04T17:23:21.281740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2025-04-04T17:23:21.281937Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:36 2025-04-04T17:23:21.281959Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2025-04-04T17:23:21.282012Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:38 2025-04-04T17:23:21.282027Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2025-04-04T17:23:21.282413Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:40 2025-04-04T17:23:21.282436Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:40 tabletId 72075186233409585 2025-04-04T17:23:21.282543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:32 2025-04-04T17:23:21.282558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2025-04-04T17:23:21.284212Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:34 2025-04-04T17:23:21.284244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2025-04-04T17:23:21.284315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:28 2025-04-04T17:23:21.284329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2025-04-04T17:23:21.284359Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:30 2025-04-04T17:23:21.284371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2025-04-04T17:23:21.284537Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:26 2025-04-04T17:23:21.284556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2025-04-04T17:23:21.284795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-04-04T17:23:21.284812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-04-04T17:23:21.284986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:57 2025-04-04T17:23:21.285002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:57 tabletId 72075186233409602 2025-04-04T17:23:21.285321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:53 2025-04-04T17:23:21.285338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:53 tabletId 72075186233409598 2025-04-04T17:23:21.285473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:55 2025-04-04T17:23:21.285488Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:55 tabletId 72075186233409600 2025-04-04T17:23:21.285930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:49 2025-04-04T17:23:21.285965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:49 tabletId 72075186233409594 2025-04-04T17:23:21.286031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:47 2025-04-04T17:23:21.286054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:47 tabletId 72075186233409592 2025-04-04T17:23:21.286130Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:51 2025-04-04T17:23:21.286154Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:51 tabletId 72075186233409596 2025-04-04T17:23:21.288454Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:45 2025-04-04T17:23:21.288480Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:45 tabletId 72075186233409590 2025-04-04T17:23:21.288524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:43 2025-04-04T17:23:21.288540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:43 tabletId 72075186233409588 2025-04-04T17:23:21.288575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:41 2025-04-04T17:23:21.288601Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:41 tabletId 72075186233409586 2025-04-04T17:23:21.288735Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 2025-04-04T17:23:21.289129Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyDir/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:23:21.289287Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyDir/ColumnTable" took 178us result status StatusPathDoesNotExist 2025-04-04T17:23:21.289389Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyDir/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/MyDir\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/MyDir/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/MyDir" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "MyDir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:23:21.289737Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2025-04-04T17:23:21.289794Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 4 took 57us result status StatusPathDoesNotExist 2025-04-04T17:23:21.289863Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty" Path: "" PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] Test command err: 2025-04-04T17:22:55.152652Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509749840873166:2273];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:55.152729Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001818/r3tmp/tmpFtFyBW/pdisk_1.dat 2025-04-04T17:22:55.607953Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:55.625243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:55.625325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:55.630204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26904, node 1 2025-04-04T17:22:55.811010Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:55.811046Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:55.811056Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:55.811209Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15806 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:56.351765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:56.444238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:15806 2025-04-04T17:22:56.627171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:56.916430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:22:56.950952Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-04T17:22:56.952238Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-04T17:22:56.952269Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-04-04T17:22:56.952285Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-04T17:22:59.803899Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489509766439582029:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:59.804054Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001818/r3tmp/tmpTd5X34/pdisk_1.dat 2025-04-04T17:22:59.968635Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:00.011072Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:00.011204Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:00.014712Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10279, node 4 2025-04-04T17:23:00.188916Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:00.188967Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:00.188984Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:00.189140Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:00.469033Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:00.528538Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:22629 2025-04-04T17:23:00.711787Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:00.727158Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-04T17:23:00.904410Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:23:00.964120Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:23:00.980864Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found 2025-04-04T17:23:00.980913Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2025-04-04T17:23:00.980929Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037891 not found 2025-04-04T17:23:00.980955Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037890 not found 2025-04-04T17:23:00.989050Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-04-04T17:23:00.989143Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2025-04-04T17:23:00.989205Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-04-04T17:23:00.989298Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2025-04-04T17:23:04.624568Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489509788046189802:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:04.625020Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001818/r3tmp/tmpup3hFA/pdisk_1.dat 2025-04-04T17:23:04.794619Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:04.841034Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:04.841144Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:04.844386Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16184, node 7 2025-04-04T17:23:04.908664Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:04.908698Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:04.908707Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:04.908875Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26542 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:05.191180Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:05.277877Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:26542 2025-04-04T17:23:05.485430Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:05.684569Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:23:05.738942Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:23:05.786259Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:23:05.804046Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2025-04-04T17:23:05.804079Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2025-04-04T17:23:05.804093Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2025-04-04T17:23:05.806118Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2025-04-04T17:23:05.809300Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-04-04T17:23:05.809513Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2025-04-04T17:23:05.809546Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-04-04T17:23:05.809608Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2025-04-04T17:23:09.151833Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489509809935593163:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:09.151965Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001818/r3tmp/tmpH4hgNo/pdisk_1.dat 2025-04-04T17:23:09.250772Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:09.277859Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:09.277933Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:09.280419Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21042, node 10 2025-04-04T17:23:09.335781Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:09.335804Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:09.335811Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:09.335919Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28609 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:09.641274Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:09.696233Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:28609 2025-04-04T17:23:09.882306Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:09.901740Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-04T17:23:14.151902Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489509809935593163:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:14.152021Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestReservedConsumersMetering [GOOD] Test command err: 2025-04-04T17:22:55.129721Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509749178954466:2105];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:55.134007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001823/r3tmp/tmpOR6hsx/pdisk_1.dat 2025-04-04T17:22:55.560664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:55.560792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:55.576849Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:55.583198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64974, node 1 2025-04-04T17:22:55.811149Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:55.811174Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:55.811182Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:55.811280Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5293 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:56.371347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:56.475697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:5293 2025-04-04T17:22:56.673936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:57.030581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "0" shard_id: "shard-000009" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000005" } records { sequence_number: "0" shard_id: "shard-000008" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "1" shard_id: "shard-000005" } records { sequence_number: "1" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000009" } records { sequence_number: "0" shard_id: "shard-000006" } records { sequence_number: "2" shard_id: "shard-000001" } records { sequence_number: "0" shard_id: "shard-000007" } records { sequence_number: "1" shard_id: "shard-000007" } records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000007" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000005" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000009" } records { sequence_number: "1" shard_id: "shard-000008" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000006" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000009" } records { sequence_number: "3" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000009" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "5" shard_id: "shard-000001" } records { sequence_number: "5" shard_id: "shard-000009" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "3" shard_id: "shard-000005" } records { sequence_number: "2" shard_id: "shard-000008" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000005" } records { sequence_number: "6" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000009" } records { sequence_number: "2" shard_id: "shard-000006" } records { sequence_number: "7" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000007" } records { sequence_number: "4" shard_id: "shard-000007" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000007" } records { sequence_number: "8" shard_id: "shard-000004" } records { sequence_number: "5" shard_id: "shard-000005" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "7" shard_id: "shard-000009" } records { sequence_number: "3" shard_id: "shard-000008" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000006" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000009" } records { sequence_number: "8" shard_id: "shard-000001" } records { sequence_number: "9" shard_id: "shard-000009" } records { sequence_number: "9" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "10" shard_id: "shard-000001" } records { sequence_number: "10" shard_id: "shard-000009" } records { sequence_number: "10" shard_id: "shard-000004" } records { sequence_number: "6" shard_id: "shard-000005" } records { sequence_number: "4" shard_id: "shard-000008" } records { sequence_number: "11" shard_id: "shard-000004" } records { sequence_number: "12" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000005" } records { sequence_number: "11" shard_id: "shard-000001" } records { sequence_number: "11" shard_id: "shard-000009" } records { sequence_number: "4" shard_id: "shard-000006" } records { sequence_number: "12" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000007" } records { sequence_number: "7" shard_id: "shard-000007" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000007" } records { sequence_number: "13" shard_id: "shard-000004" } records { sequence_number: "8" shard_id: "shard-000005" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "12" shard_id: "shard-000009" } records { sequence_number: "5" shard_id: "shard-000008" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000006" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000009" } records { sequence_number: "13" shard_id: "shard-000001" } records { sequence_number: "14" shard_id: "shard-000009" } records { sequence_number: "14" shard_id: "shard-000004" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000001" } 2025-04-04T17:23:00.128958Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509749178954466:2105];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:00.129063Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; encryption_type: NONE records { sequence_number: "15" shard_id: "shard-000001" } records { sequence_number: "15" shard_id: "shard-000009" } records { sequence_number: "15" shard_id: "shard-000004" } records { sequence_number: "9" shard_id: "shard-000005" } records { sequence_number: "6" shard_id: "shard-000008" } records { sequence_number: "16" shard_id: "shard-000004" } records { sequence_number: "17" shard_id: "shard-000004" } records { sequence_number: "10" shard_id: "shard-000005" } records { sequence_number: "16" shard_id: "shard-000001" } records { sequence_number: "16" shard_id: "shard-000009" } records { sequence_number: "6" shard_id: "shard-000006" } records { sequence_number: "17" shard_id: "shard-000001" } records { sequence_number: "9" shard_id: "shard-000007" } records { sequence_number: "10" shard_id: "shard-000007" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000007" } records { sequence_number: "18" shard_id: "shard-000004" } records { sequence_number: "11" shard_id: "shard-000005" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "17" shard_id: "shard-000009" } records { sequence_number: "7" shard_id: "shard-000008" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000006" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000009" } records { sequence_number: "18" shard_id: "shard-000001" } records { sequence_number: "19" shard_id: "shard-000009" } records { sequence_number: "19" shard_id: "shard-000004" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "20" shard_id: "shard-000001" } records { sequence_number: "20" shard_id: "shard-000009" } records { sequence_number: "20" shard_id: "shard-000004" } records { sequence_number: "12" shard_id: "shard-000005" } records { sequence_number: "8" shard_id: "shard-000008" } records { sequence_number: "21" shard_id: "shard-000004" } rec ... older_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743787394324-170","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":0,"unit":"second","start":1743787394,"finish":1743787394},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743787394}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743787394324-171","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1743787394,"finish":1743787394},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743787394}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1743787394324-172","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1743787394,"finish":1743787394},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743787394}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1743787394357-173","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1743787394,"finish":1743787395},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743787395}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743787394357-174","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1743787394,"finish":1743787395},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743787395}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743787394357-175","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1743787394,"finish":1743787395},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743787395}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1743787394357-176","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743787394,"finish":1743787395},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743787395}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1743787395384-177","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1743787395,"finish":1743787396},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743787396}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743787395384-178","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1743787395,"finish":1743787396},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743787396}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743787395384-179","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1743787395,"finish":1743787396},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743787396}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1743787395384-180","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743787395,"finish":1743787396},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743787396}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1743787396399-181","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1743787396,"finish":1743787397},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743787397}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743787396399-182","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1743787396,"finish":1743787397},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743787397}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743787396399-183","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1743787396,"finish":1743787397},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743787397}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1743787396399-184","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743787396,"finish":1743787397},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743787397}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1743787397451-185","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1743787397,"finish":1743787398},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743787398}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743787397451-186","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1743787397,"finish":1743787398},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743787398}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743787397451-187","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1743787397,"finish":1743787398},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743787398}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1743787397451-188","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743787397,"finish":1743787398},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743787398}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1743787398465-189","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1743787398,"finish":1743787399},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743787399}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743787398465-190","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1743787398,"finish":1743787399},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743787399}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743787398465-191","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1743787398,"finish":1743787399},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743787399}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1743787398465-192","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743787398,"finish":1743787399},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743787399}' |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantTuple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:17.443345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:17.443460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:17.443506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:17.443542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:17.444765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:17.444810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:17.444910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:17.444986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:17.446196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:17.525360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:17.525418Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:17.536012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:17.536245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:17.536407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:17.545476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:17.545772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:17.546455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.547173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:17.551788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.552868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:17.552923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.553076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:17.553123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:17.553163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:17.553277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.558982Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:17.685471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:17.685679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.685878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:17.686114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:17.686165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.688135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.688259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:17.688426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.688489Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:17.688536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:17.688573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:17.690265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.690315Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:17.690346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:17.691967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.692011Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.692058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.692098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.695564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:17.697229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:17.697416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:17.698380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.698493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:17.698537Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.698813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:17.698862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.699013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:17.699092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:17.700960Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:17.701017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:17.701171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.701221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:17.701512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.701558Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:17.701640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:17.701673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.701714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:17.701750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.701785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:17.701833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.701872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:17.701899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:17.701981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:17.702023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:17.702051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:17.703685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:17.703774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:17.703833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... EvConfigureStatus operationId:102:0 at schemeshard:72057594046678944 2025-04-04T17:23:23.280014Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 102:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2025-04-04T17:23:23.281572Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-04T17:23:23.281755Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:23:23.281790Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T17:23:23.282095Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:23:23.282134Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-04T17:23:23.282177Z node 8 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:23:23.314929Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2025-04-04T17:23:23.315111Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409549 2025-04-04T17:23:23.315169Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 102:0 HandleReply TEvConfigureStatus operationId:102:0 at schemeshard:72057594046678944 2025-04-04T17:23:23.315229Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 102:0 Got OK TEvConfigureStatus from tablet# 72075186233409549 shardIdx# 72057594046678944:4 at schemeshard# 72057594046678944 2025-04-04T17:23:23.315271Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2025-04-04T17:23:23.316807Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:23:23.316935Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:23:23.316979Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:23:23.317019Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 102:0, at tablet# 72057594046678944 2025-04-04T17:23:23.317066Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-04T17:23:23.317185Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:23.318404Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-04T17:23:23.318535Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-04T17:23:23.318821Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:23.318933Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 34359740523 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:23.318998Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-04-04T17:23:23.319244Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-04-04T17:23:23.319292Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-04-04T17:23:23.319393Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:23:23.319476Z node 8 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[8:364:2339], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 72075186233409549, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:23:23.321057Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:23.321108Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:23:23.321265Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:23.321304Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [8:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-04T17:23:23.321605Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:23:23.321655Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 102:0, ProgressState, NeedSyncHive: 0 2025-04-04T17:23:23.321692Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-04-04T17:23:23.322310Z node 8 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:23:23.322432Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:23:23.322476Z node 8 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:23:23.322522Z node 8 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-04T17:23:23.322565Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-04T17:23:23.322653Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-04-04T17:23:23.324694Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:23:23.324740Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:23:23.324868Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:23:23.324910Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:23:23.324956Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:23:23.324991Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:23:23.325028Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T17:23:23.325089Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:304:2295] message: TxId: 102 2025-04-04T17:23:23.325138Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:23:23.325177Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:23:23.325208Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:23:23.325362Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:23:23.325880Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:23:23.327185Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:23:23.327232Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:510:2449] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-04-04T17:23:23.329560Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:23.329692Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } 2025-04-04T17:23:23.329731Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, path /MyRoot/USER_0 2025-04-04T17:23:23.329834Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 103:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-04-04T17:23:23.329873Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-04-04T17:23:23.331674Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:23.331819Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, operation: ALTER DATABASE, path: /MyRoot/USER_0 TestModificationResult got TxId: 103, wait until txId: 103 |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Pg [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:17.443342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:17.443461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:17.443510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:17.443545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:17.444772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:17.444817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:17.444878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:17.444968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:17.446199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:17.516314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:17.516359Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:17.523680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:17.523826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:17.523944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:17.529961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:17.530130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:17.530627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.531198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:17.536819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.545009Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:17.545063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.545201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:17.545256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:17.545376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:17.545486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.550263Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:17.650684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:17.652083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.653843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:17.655747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:17.655798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.658376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.658506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:17.658639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.658761Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:17.658796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:17.658818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:17.660099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.660135Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:17.660157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:17.661363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.661394Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.661424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.661476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.664722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:17.665947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:17.666059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:17.666729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.666840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:17.666877Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.667921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:17.667969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.668115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:17.668200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:17.669585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:17.669620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:17.669736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.669760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:17.669967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.670003Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:17.670069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:17.670098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.670128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:17.670149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.670174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:17.670229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.670272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:17.670296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:17.670331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:17.670355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:17.670375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:17.671521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:17.671588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:17.671611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 30064773228 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:23.474352Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 104:0, stepId:5000005, at schemeshard: 72057594046678944 2025-04-04T17:23:23.474526Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:23:23.474561Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:23:23.474599Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:23:23.474631Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:23:23.474693Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:23:23.474751Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-04T17:23:23.474876Z node 7 :FLAT_TX_SCHEMESHARD INFO: Send TEvUpdateTenantSchemeShard, to actor: [7:356:2334], msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2, at schemeshard: 72057594046678944 2025-04-04T17:23:23.474919Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:23:23.474960Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-04T17:23:23.475000Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-04T17:23:23.475060Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-04-04T17:23:23.475123Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2025-04-04T17:23:23.475162Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 6 2025-04-04T17:23:23.476522Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186233409546, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2 2025-04-04T17:23:23.476618Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2, at schemeshard: 72075186233409546 2025-04-04T17:23:23.476770Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 2025-04-04T17:23:23.476929Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:23.476969Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:23:23.477147Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:23.477188Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 2 FAKE_COORDINATOR: Erasing txId 104 2025-04-04T17:23:23.477735Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:23:23.477819Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:23:23.477859Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:23:23.477901Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-04-04T17:23:23.477942Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:23:23.478011Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-04T17:23:23.479857Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 2 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-04T17:23:23.479938Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:23:23.480031Z node 7 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:356:2334], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 2, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 2, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:23:23.480164Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-04T17:23:23.480188Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-04T17:23:23.480276Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-04T17:23:23.480300Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:445:2397], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-04-04T17:23:23.480760Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-04-04T17:23:23.480823Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:23:23.480961Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186233409546, cookie: 0 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-04T17:23:23.481154Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-04T17:23:23.481192Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-04T17:23:23.481533Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-04T17:23:23.481604Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-04T17:23:23.481640Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:545:2495] TestWaitNotification: OK eventTxId 104 2025-04-04T17:23:23.482048Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:23:23.482196Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 172us result status StatusSuccess 2025-04-04T17:23:23.482580Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:23.483173Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-04-04T17:23:23.483321Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/USER_0" took 163us result status StatusSuccess 2025-04-04T17:23:23.483612Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 |94.6%| [TA] $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {RESULT} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TCdcStreamTests::Negative [GOOD] >> TCdcStreamTests::DisableProtoSourceIdInfo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] Test command err: 2025-04-04T17:19:12.150649Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:12.254504Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:12.277180Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:12.277389Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:12.284815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:12.285057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:12.285319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:12.285439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:12.285561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:12.285683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:12.285798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:12.285913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:12.286079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:12.286207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:12.286384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:12.286488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:12.308790Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:12.309022Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:12.309104Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:12.309285Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:12.309435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:12.309502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:12.309554Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:12.309683Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:12.309756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:12.309803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:12.309841Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:12.310055Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:12.310132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:12.310175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:12.310206Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:12.310349Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:12.310437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:12.310502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:12.310529Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:12.310623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:12.310670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:12.310713Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:12.310767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:12.310805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:12.310837Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:12.311246Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=62; 2025-04-04T17:19:12.311354Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=49; 2025-04-04T17:19:12.311439Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-04T17:19:12.311525Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-04-04T17:19:12.311693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:12.311752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:12.311785Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:12.311937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:12.311978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:12.312002Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:12.312107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:12.312134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:12.312164Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:12.312307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:12.312336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:12.312361Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:12.312460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:12.312489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:12.312533Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... LOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:61;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:57;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-04-04T17:23:22.949866Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11455:13082];fline=general_compaction.cpp:131;event=blobs_created;appended=1;switched=2; 2025-04-04T17:23:22.952006Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11455:13082];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> ResultFormatter::EmptyResultSet [GOOD] >> ResultFormatter::EmptyList [GOOD] >> ResultFormatter::EmptyTuple [GOOD] >> ResultFormatter::StructWithNoFields [GOOD] >> ResultFormatter::StructTypeNameAsString [GOOD] >> TCdcStreamTests::TopicPartitions [GOOD] >> TCdcStreamTests::ReplicationAttribute >> TCdcStreamTests::DisableProtoSourceIdInfo [GOOD] >> TCdcStreamTests::CreateStream |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::StructTypeNameAsString [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::EmptyTuple [GOOD] >> ResultFormatter::Tuple [GOOD] >> ResultFormatter::Tagged [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Tagged [GOOD] >> TCdcStreamTests::ReplicationAttribute [GOOD] >> TCdcStreamTests::RebootSchemeShard >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency [GOOD] >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit >> TCdcStreamTests::CreateStream [GOOD] >> TCdcStreamTests::AlterStream |94.7%| [TA] $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TA] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersAfterTtl >> TColumnShardTestSchema::ColdTiersWithStat >> TColumnShardTestSchema::ExportWithLostAnswer >> TColumnShardTestSchema::RebootInternalTTL |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdCompactionSmoke >> TColumnShardTestSchema::RebootExternalTTL >> TColumnShardTestSchema::RebootHotTiersTtl >> TColumnShardTestSchema::EnableColdTiersAfterTtl >> TColumnShardTestSchema::RebootOneTier >> TColumnShardTestSchema::RebootHotTiersRevCompression >> TColumnShardTestSchema::InternalTTL >> TColumnShardTestSchema::OneColdTier >> TColumnShardTestSchema::RebootDrop >> TColumnShardTestSchema::RebootHotTiersTtlWithStat >> TColumnShardTestSchema::OneTierExternalTtl >> TColumnShardTestSchema::RebootForgetWithLostAnswer >> TCdcStreamTests::RebootSchemeShard [GOOD] >> TCdcStreamTests::StreamOnIndexTableNegative >> TCdcStreamTests::AlterStream [GOOD] >> TCdcStreamTests::DropStream >> TColumnShardTestSchema::ColdTiers >> BsControllerConfig::ExtendByCreatingSeparateBox [GOOD] >> BsControllerConfig::ExtendBoxAndStoragePool >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] >> TColumnShardTestSchema::HotTiersRevCompression >> TCdcStreamTests::StreamOnIndexTableNegative [GOOD] >> TCdcStreamTests::StreamOnIndexTable >> ColumnShardTiers::DSConfigs [GOOD] >> TColumnShardTestSchema::ForgetWithLostAnswer >> TColumnShardTestSchema::DropWriteRace >> TColumnShardTestSchema::HotTiers >> TColumnShardTestSchema::CreateTable >> TColumnShardTestSchema::ExternalTTL_Types >> TColumnShardTestSchema::RebootHotTiersAfterTtl >> TColumnShardTestSchema::ForgetAfterFail ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:17.443351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:17.443466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:17.443513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:17.443570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:17.444787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:17.444840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:17.444913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:17.445013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:17.446201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:17.527761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:17.527829Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:17.539014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:17.539276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:17.539485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:17.548877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:17.549214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:17.549904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.550658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:17.555484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.556745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:17.556813Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.557019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:17.557075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:17.557125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:17.557257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.563577Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:17.680228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:17.680422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.680591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:17.680775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:17.680819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.682630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.682760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:17.682889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.682940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:17.682974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:17.683003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:17.684558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.684607Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:17.684639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:17.685952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.685983Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.686039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.686072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.688648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:17.689930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:17.690059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:17.690821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:17.690928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:17.690966Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.691203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:17.691241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:17.691361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:17.691414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:17.693023Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:17.693059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:17.693209Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:17.693257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:17.693500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:17.693536Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:17.693617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:17.693645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.693672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:17.693717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.693751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:17.693782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:17.693811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:17.693845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:17.693892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:17.693920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:17.693955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:17.695311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:17.695406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:17.695442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 2 2025-04-04T17:23:27.825608Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 116:0 1 -> 2 2025-04-04T17:23:27.826068Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 116:1, propose status:StatusAccepted, reason: , at schemeshard: 72075186233409546 2025-04-04T17:23:27.826126Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 116:0, at schemeshard: 72075186233409546 2025-04-04T17:23:27.826282Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 12 2025-04-04T17:23:27.826352Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 3 2025-04-04T17:23:27.828903Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 116, response: Status: StatusAccepted TxId: 116 SchemeshardId: 72075186233409546 PathId: 9, at schemeshard: 72075186233409546 2025-04-04T17:23:27.829106Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 116, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2025-04-04T17:23:27.829366Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-04-04T17:23:27.829417Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 116, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-04-04T17:23:27.829628Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 116, path id: [OwnerId: 72075186233409546, LocalPathId: 9] 2025-04-04T17:23:27.829709Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-04-04T17:23:27.829760Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:727:2629], at schemeshard: 72075186233409546, txId: 116, path id: 1 2025-04-04T17:23:27.829823Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:727:2629], at schemeshard: 72075186233409546, txId: 116, path id: 9 2025-04-04T17:23:27.830425Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-04-04T17:23:27.830498Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 116:0 ProgressState, operation type: TxCreateTable, at tablet# 72075186233409546 2025-04-04T17:23:27.830752Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 116:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 9 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-04-04T17:23:27.831418Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 16 PathOwnerId: 72075186233409546, cookie: 116 2025-04-04T17:23:27.831519Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 16 PathOwnerId: 72075186233409546, cookie: 116 2025-04-04T17:23:27.831560Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-04-04T17:23:27.831607Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 16 2025-04-04T17:23:27.831654Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 13 2025-04-04T17:23:27.832295Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 9 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-04-04T17:23:27.832359Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 9 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-04-04T17:23:27.832380Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-04-04T17:23:27.832406Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 9], version: 1 2025-04-04T17:23:27.832429Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 4 2025-04-04T17:23:27.832483Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 116, ready parts: 0/1, is published: true 2025-04-04T17:23:27.834916Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72057594037968897 cookie: 72075186233409546:11 msg type: 268697601 2025-04-04T17:23:27.835058Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72057594037968897 2025-04-04T17:23:27.835125Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-04-04T17:23:27.836033Z node 7 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 9 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-04-04T17:23:27.836259Z node 7 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72075186233409546, OwnerIdx 11, type DataShard, boot OK, tablet id 72075186233409556 2025-04-04T17:23:27.836387Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72075186233409546 message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-04-04T17:23:27.836447Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-04-04T17:23:27.836560Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 116:0, at schemeshard: 72075186233409546, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-04-04T17:23:27.836626Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, at tabletId: 72075186233409546 2025-04-04T17:23:27.836728Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-04-04T17:23:27.836827Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 116:0 2 -> 3 2025-04-04T17:23:27.837933Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-04-04T17:23:27.838092Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-04-04T17:23:27.840761Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 116:0, at schemeshard: 72075186233409546 2025-04-04T17:23:27.841096Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-04-04T17:23:27.841150Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId# 116:0 ProgressState at tabletId# 72075186233409546 2025-04-04T17:23:27.841228Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 seqNo: 3:8 2025-04-04T17:23:27.841575Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 676 RawX2: 30064773661 } TxBody: "\n\236\004\n\007Table11\020\t\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\n\000\220\000\000\020\000\001\020\t:\004\010\003\020\010" TxId: 116 ExecLevel: 0 Flags: 0 SchemeShardId: 72075186233409546 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } SubDomainPathId: 1 2025-04-04T17:23:27.844785Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72075186233409556 cookie: 72075186233409546:11 msg type: 269549568 2025-04-04T17:23:27.844963Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72075186233409556 TestModificationResult got TxId: 116, wait until txId: 116 TestModificationResults wait txId: 117 2025-04-04T17:23:27.869902Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table12" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key" } } TxId: 117 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2025-04-04T17:23:27.872082Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 117, response: Status: StatusQuotaExceeded Reason: "Request exceeded a limit on the number of schema operations, try again later." TxId: 117 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-04-04T17:23:27.872262Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 117, database: /MyRoot/USER_0, subject: , status: StatusQuotaExceeded, reason: Request exceeded a limit on the number of schema operations, try again later., operation: CREATE TABLE, path: /MyRoot/USER_0/Table12 TestModificationResult got TxId: 117, wait until txId: 117 >> BsControllerConfig::MergeIntersectingBoxes [GOOD] >> BsControllerConfig::MoveGroups >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] >> TColumnShardTestSchema::HotTiersTtlWithStat |94.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigs [GOOD] Test command err: 2025-04-04T17:21:28.765117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:21:28.765569Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:21:28.765632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00117d/r3tmp/tmpCJ3OCg/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26407, node 1 TClient is connected to server localhost:23965 2025-04-04T17:21:29.636256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:21:29.689528Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:29.693684Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:29.693745Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:29.693789Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:29.695084Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:21:29.735901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:29.736735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:29.749868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2025-04-04T17:21:41.983104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:755:2633], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:41.983266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:41.987796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-04-04T17:21:42.189974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:872:2711], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:42.190072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:42.190374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:877:2716], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:42.194665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-04T17:21:42.319375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:879:2718], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:21:42.916472Z node 1 :TX_PROXY ERROR: Actor# [1:975:2785] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:43.442481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:21:43.926582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-04-04T17:21:44.680429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-04T17:21:45.451398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:21:45.938532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-04T17:21:47.105923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-04T17:21:47.406347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-04T17:22:02.760762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715708:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 2025-04-04T17:22:04.621388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:22:04.621464Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-04T17:22:04.936205Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:215;event=skip_tier_manager_start;tier=/Root/tier1;has_secrets=1;tier_config=0; 2025-04-04T17:22:04.936299Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:196;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-04-04T17:22:04.936352Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={}; 2025-04-04T17:22:04.936432Z node 1 :TX_TIERING INFO: fline=manager.cpp:128;event=start_subscribing_metadata; 2025-04-04T17:22:04.936689Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:154;event=watch_scheme_objects;names=/Root/tier1; 2025-04-04T17:22:04.936822Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:62;event=TEvRefreshSubscriberData;snapshot=secrets; 2025-04-04T17:22:04.936875Z node 1 :TX_TIERING INFO: fline=manager.cpp:271;event=update_secrets;tablet=0; 2025-04-04T17:22:04.936940Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:196;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-04-04T17:22:04.937012Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:22:04.938832Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:111;component=TSchemeObjectWatcher;event=NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult;path=Root/tier1; 2025-04-04T17:22:04.940116Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:140;event=object_fetched;path=/Root/tier1; 2025-04-04T17:22:04.940280Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:75;component=tiering_manager;event=object_updated;path=/Root/tier1; 2025-04-04T17:22:04.940443Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=1; 2025-04-04T17:22:04.940529Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-04-04T17:22:04.940598Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc2", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-04T17:22:16.260093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715723:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc2", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc2", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION= ... :Tier '/Root/tier2' stopped at tablet 0 2025-04-04T17:23:04.765575Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 0 2025-04-04T17:23:04.765629Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 2025-04-04T17:23:15.899223Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-04T17:23:15.899348Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-04T17:23:15.899377Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-04T17:23:15.899399Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-04T17:23:15.899434Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-04T17:23:15.899701Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-04T17:23:15.899743Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-04-04T17:23:15.899798Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-04-04T17:23:15.899850Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 0 2025-04-04T17:23:15.899893Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-04T17:23:15.899948Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-04-04T17:23:15.900003Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:15.900771Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-04T17:23:15.900808Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-04-04T17:23:15.900837Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-04-04T17:23:15.900860Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 0 2025-04-04T17:23:15.900882Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-04T17:23:15.900912Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-04-04T17:23:15.900940Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:15.901036Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-04T17:23:15.901061Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037892;has_config=0; 2025-04-04T17:23:15.901083Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037892 2025-04-04T17:23:15.901102Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037892 2025-04-04T17:23:15.901119Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 2025-04-04T17:23:15.901144Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037892 2025-04-04T17:23:15.901177Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:15.901212Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-04T17:23:15.901234Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037893;has_config=0; 2025-04-04T17:23:15.901253Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037893 2025-04-04T17:23:15.901273Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037893 2025-04-04T17:23:15.901290Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 2025-04-04T17:23:15.901324Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037893 2025-04-04T17:23:15.901351Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:15.901374Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-04T17:23:15.901392Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037894;has_config=0; 2025-04-04T17:23:15.901412Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037894 2025-04-04T17:23:15.901439Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037894 2025-04-04T17:23:15.901462Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037894 2025-04-04T17:23:15.901492Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037894 2025-04-04T17:23:15.901522Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:15.902171Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037892;self_id=[1:3109:4415];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:234;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-04-04T17:23:15.902275Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037893;self_id=[1:3114:4417];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:234;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-04-04T17:23:15.902335Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037894;self_id=[1:3122:4421];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:234;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2025-04-04T17:23:26.852251Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-04T17:23:26.852604Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-04T17:23:26.853142Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-04T17:23:26.853196Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-04-04T17:23:26.853253Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-04T17:23:26.853331Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:26.853495Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-04T17:23:26.853535Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-04T17:23:26.853569Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-04T17:23:26.853646Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-04T17:23:26.853785Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-04T17:23:26.853818Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-04-04T17:23:26.853851Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-04T17:23:26.853896Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:26.853982Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-04T17:23:26.854007Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-04-04T17:23:26.854034Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-04T17:23:26.854067Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:26.854480Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-04T17:23:26.854518Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037892;has_config=0; 2025-04-04T17:23:26.854546Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 2025-04-04T17:23:26.854602Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:26.854634Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-04T17:23:26.854659Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037893;has_config=0; 2025-04-04T17:23:26.854683Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 2025-04-04T17:23:26.854724Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:26.854753Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-04T17:23:26.854777Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037894;has_config=0; 2025-04-04T17:23:26.854803Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037894 2025-04-04T17:23:26.854839Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:26.855174Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037892;self_id=[1:3109:4415];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:234;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-04-04T17:23:26.855258Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037893;self_id=[1:3114:4417];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:234;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-04-04T17:23:26.855313Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037894;self_id=[1:3122:4421];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:234;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 >> TCdcStreamTests::DropStream [GOOD] >> TCdcStreamTests::AlterStreamImplShouldFail >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS [GOOD] >> DataStreams::TestGetRecordsStreamWithMultipleShards ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] Test command err: 2025-04-04T17:21:28.529449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:21:28.530120Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:21:28.530215Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001195/r3tmp/tmpgu5YWH/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13640, node 1 TClient is connected to server localhost:9730 2025-04-04T17:21:29.639710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:21:29.688254Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:29.699057Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:29.699125Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:29.699185Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:29.699678Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:21:29.735922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:29.736745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:29.749988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2025-04-04T17:21:41.520331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:41.520466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:757:2632], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:41.520571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:41.537414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T17:21:41.555574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:761:2635], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T17:21:41.621070Z node 1 :TX_PROXY ERROR: Actor# [1:812:2667] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:42.050665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-04-04T17:21:43.059532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:43.499634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480 2025-04-04T17:21:44.347206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-04T17:21:45.118926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:21:45.581390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-04T17:21:47.070750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-04T17:21:47.398589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-04T17:21:51.707594Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jr0x56k76xvqcxhdcvd8hy2r", SessionId: ydb://session/3?node_id=1&id=ZGUxYmNkMTItYTNjMjNiMmUtNWJlYWVhNTMtOTAyMDU0Yw==, Slow query, duration: 10.196249s, status: STATUS_CODE_UNSPECIFIED, user: root@builtin, results: 0b, text: "\n UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`);\n UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`);\n ", parameters: 0b REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-04T17:22:02.771947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715708:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 2025-04-04T17:22:04.565117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:22:04.565191Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-04T17:22:04.868832Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:215;event=skip_tier_manager_start;tier=/Root/tier1;has_secrets=1;tier_config=0; 2025-04-04T17:22:04.868917Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:196;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-04-04T17:22:04.868961Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={}; 2025-04-04T17:22:04.869025Z node 1 :TX_TIERING INFO: fline=manager.cpp:128;event=start_subscribing_metadata; 2025-04-04T17:22:04.869225Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:154;event=watch_scheme_objects;names=/Root/tier1; 2025-04-04T17:22:04.869422Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:62;event=TEvRefreshSubscriberData;snapshot=secrets; 2025-04-04T17:22:04.869458Z node 1 :TX_TIERING INFO: fline=manager.cpp:271;event=update_secrets;tablet=0; 2025-04-04T17:22:04.869500Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:196;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-04-04T17:22:04.869554Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:22:04.870727Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:111;component=TSchemeObjectWatcher;event=NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult;path=Root/tier1; 2025-04-04T17:22:04.871839Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:140;event=object_fetched;path=/Root/tier1; 2025-04-04T17:22:04.871992Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:75;component=tiering_manager;event=object_updated;path=/Root/tier1; 2025-04-04T17:22:04.872144Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=1; 2025-04-04T17:22:04.872226Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-04-04T17:22:04.872287Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc2", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-04T17:22:15.872723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715723:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc2", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc2", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025 ... :Tier '/Root/tier2' stopped at tablet 0 2025-04-04T17:23:05.083736Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 0 2025-04-04T17:23:05.083799Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 2025-04-04T17:23:16.188466Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-04T17:23:16.188653Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-04T17:23:16.188701Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-04T17:23:16.188738Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-04T17:23:16.188784Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-04-04T17:23:16.188936Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-04T17:23:16.188990Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-04-04T17:23:16.189055Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-04-04T17:23:16.189121Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 0 2025-04-04T17:23:16.189170Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-04T17:23:16.189256Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-04-04T17:23:16.189330Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:16.189455Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-04T17:23:16.189490Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037892;has_config=0; 2025-04-04T17:23:16.189528Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037892 2025-04-04T17:23:16.189565Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037892 2025-04-04T17:23:16.189595Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 2025-04-04T17:23:16.189632Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037892 2025-04-04T17:23:16.189686Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:16.189722Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-04T17:23:16.189752Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037893;has_config=0; 2025-04-04T17:23:16.189780Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037893 2025-04-04T17:23:16.189809Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037893 2025-04-04T17:23:16.189835Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 2025-04-04T17:23:16.189869Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037893 2025-04-04T17:23:16.189911Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:16.189950Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-04T17:23:16.189977Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037894;has_config=0; 2025-04-04T17:23:16.190008Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037894 2025-04-04T17:23:16.190037Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037894 2025-04-04T17:23:16.190066Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037894 2025-04-04T17:23:16.190100Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037894 2025-04-04T17:23:16.190140Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:16.190186Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-04-04T17:23:16.190212Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-04-04T17:23:16.190260Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-04-04T17:23:16.190293Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 0 2025-04-04T17:23:16.190320Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-04T17:23:16.190355Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-04-04T17:23:16.190393Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:16.190822Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037892;self_id=[1:3141:4440];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:234;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-04-04T17:23:16.190941Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037893;self_id=[1:3144:4442];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:234;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-04-04T17:23:16.191015Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037894;self_id=[1:3147:4444];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:234;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2025-04-04T17:23:27.141243Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-04T17:23:27.141809Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-04T17:23:27.141880Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-04-04T17:23:27.141937Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-04T17:23:27.142029Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:27.142261Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-04T17:23:27.142310Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-04T17:23:27.142344Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-04T17:23:27.142425Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-04T17:23:27.142585Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-04-04T17:23:27.143173Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-04T17:23:27.143219Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037892;has_config=0; 2025-04-04T17:23:27.143261Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 2025-04-04T17:23:27.143327Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:27.143367Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-04T17:23:27.143396Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037893;has_config=0; 2025-04-04T17:23:27.143426Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 2025-04-04T17:23:27.143468Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:27.143502Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-04T17:23:27.143529Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037894;has_config=0; 2025-04-04T17:23:27.143555Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037894 2025-04-04T17:23:27.143594Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:27.143637Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-04T17:23:27.143666Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-04-04T17:23:27.143696Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-04T17:23:27.143735Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:27.143776Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-04-04T17:23:27.143803Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-04-04T17:23:27.143827Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-04-04T17:23:27.143862Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-04-04T17:23:27.144418Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037892;self_id=[1:3141:4440];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:234;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-04-04T17:23:27.144515Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037893;self_id=[1:3144:4442];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:234;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-04-04T17:23:27.144570Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037894;self_id=[1:3147:4444];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:234;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 >> TCdcStreamTests::StreamOnIndexTable [GOOD] >> TCdcStreamTests::StreamOnBuildingIndexTable >> TColumnShardTestSchema::DropWriteRace [GOOD] >> TColumnShardTestSchema::ExportAfterFail >> TColumnShardTestSchema::CreateTable [GOOD] >> TCdcStreamTests::AlterStreamImplShouldFail [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::DropWriteRace [GOOD] Test command err: 2025-04-04T17:23:28.968270Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:29.046132Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:29.062804Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:29.063046Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:29.069431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:29.069611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:29.069816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:29.069898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:29.069962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:29.070036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:29.070119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:29.070189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:29.070307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:29.070384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:29.070473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:29.070577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:29.091985Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:29.092158Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:29.092210Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:29.092364Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:29.092506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:29.092569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:29.092601Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:29.092692Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:29.092745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:29.092786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:29.092820Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:29.092990Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:29.093036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:29.093064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:29.093083Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:29.093180Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:29.093223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:29.093251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:29.093271Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:29.093326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:29.093356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:29.093385Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:29.093423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:29.093452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:29.093477Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:29.093787Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=41; 2025-04-04T17:23:29.093961Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=127; 2025-04-04T17:23:29.094025Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=24; 2025-04-04T17:23:29.094097Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=29; 2025-04-04T17:23:29.094301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:29.094365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:29.094411Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:29.094590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:29.094620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:29.094641Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:29.094745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:29.094773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:29.094824Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:29.094969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:23:29.095024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:23:29.095058Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:23:29.095185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:23:29.095220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:23:29.095269Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=14; 2025-04-04T17:23:29.332269Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=50; 2025-04-04T17:23:29.332312Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-04-04T17:23:29.332417Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=65; 2025-04-04T17:23:29.332467Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-04-04T17:23:29.332533Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2025-04-04T17:23:29.332651Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=74; 2025-04-04T17:23:29.332728Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=27; 2025-04-04T17:23:29.332772Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=4500; 2025-04-04T17:23:29.332936Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T17:23:29.333008Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T17:23:29.333101Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T17:23:29.333431Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:23:29.333500Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;fline=columnshard_impl.cpp:519;problem=Background activities cannot be started: no index at tablet; 2025-04-04T17:23:29.333752Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:23:29.333897Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T17:23:29.333982Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T17:23:29.334014Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T17:23:29.334042Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T17:23:29.334095Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:23:29.334153Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:519;problem=Background activities cannot be started: no index at tablet; 2025-04-04T17:23:30.024028Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=101;this=88923004793728;method=TTxController::StartProposeOnExecute;tx_info=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:98:2133];cookie=00:0;;fline=schema.h:34;event=sync_schema; 2025-04-04T17:23:30.036348Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:98:2133];cookie=00:0;;this=88923004793728;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:98:2133];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:98:2133];cookie=00:0;;int_this=89197881128640;fline=columnshard__propose_transaction.cpp:104;event=actual tx operator; 2025-04-04T17:23:30.036437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:98:2133];cookie=00:0;;this=88923004793728;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:98:2133];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:98:2133];cookie=00:0;;int_this=89197881128640;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:98:2133]; 2025-04-04T17:23:30.036504Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:98:2133];cookie=00:0;;this=88923004793728;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:98:2133];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:98:2133];cookie=00:0;;int_this=89197881128640;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=101; 2025-04-04T17:23:30.036782Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1000000001 at tablet 9437184, mediator 0 2025-04-04T17:23:30.036835Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] execute at tablet 9437184 2025-04-04T17:23:30.037163Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 1 ttl settings: { Version: 1 } at tablet 9437184 2025-04-04T17:23:30.041757Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=0; 2025-04-04T17:23:30.041893Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:245;method=RegisterTable;path_id=1; 2025-04-04T17:23:30.041935Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine.h:143;event=RegisterTable;path_id=1; 2025-04-04T17:23:30.047977Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:487;event=OnTieringModified;path_id=1; 2025-04-04T17:23:30.048184Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tx_controller.cpp:211;event=finished_tx;tx_id=101; 2025-04-04T17:23:30.072060Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-04-04T17:23:30.072229Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6120;columns=10; 2025-04-04T17:23:30.089796Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];fline=actor.cpp:22;event=flush_writing;size=6120;count=1; 2025-04-04T17:23:30.092045Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 1 at tablet 9437184 2025-04-04T17:23:30.092417Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-04T17:23:30.104937Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-04-04T17:23:30.105170Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:23:30.131934Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:98:2133];cookie=00:2;;this=88923005121888;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:98:2133];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:98:2133];cookie=00:2;;int_this=89197881206208;fline=columnshard__propose_transaction.cpp:104;event=actual tx operator; 2025-04-04T17:23:30.132021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:98:2133];cookie=00:2;;this=88923005121888;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:98:2133];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:98:2133];cookie=00:2;;int_this=89197881206208;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:98:2133]; 2025-04-04T17:23:30.132081Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:98:2133];cookie=00:2;;this=88923005121888;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:98:2133];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:98:2133];cookie=00:2;;int_this=89197881206208;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=103; 2025-04-04T17:23:30.132344Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1000000002 at tablet 9437184, mediator 0 2025-04-04T17:23:30.132393Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] execute at tablet 9437184 2025-04-04T17:23:30.132686Z node 1 :TX_COLUMNSHARD DEBUG: DropTable for pathId: 1 at tablet 9437184 2025-04-04T17:23:30.132789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=103;fline=tx_controller.cpp:211;event=finished_tx;tx_id=103; 2025-04-04T17:23:30.145760Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] complete at tablet 9437184 2025-04-04T17:23:30.145919Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:23:30.146210Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1000000003 at tablet 9437184, mediator 0 2025-04-04T17:23:30.146299Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[8] execute at tablet 9437184 2025-04-04T17:23:30.146597Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;fline=abstract.h:83;progress_tx_id=102;lock_id=1;broken=0; 2025-04-04T17:23:30.146777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;commit_tx_id=102;commit_lock_id=1;fline=insert_table.cpp:50;event=abort_insertion;path_id=1;blob_range={ Blob: DS:0:[9437184:2:1:3:0:7080:0] Offset: 0 Size: 7080 }; 2025-04-04T17:23:30.146929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;fline=tx_controller.cpp:211;event=finished_tx;tx_id=102; 2025-04-04T17:23:30.159373Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[8] complete at tablet 9437184 2025-04-04T17:23:30.159509Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=102;lock_id=1;broken=0; 2025-04-04T17:23:30.159651Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable [GOOD] Test command err: 2025-04-04T17:23:28.976968Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:29.077278Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:29.103447Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:29.103771Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:29.112842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:29.113084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:29.113334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:29.113452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:29.113608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:29.113730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:29.113876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:29.114014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:29.114165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:29.114331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:29.114446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:29.114549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:29.143650Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:29.144240Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:29.144301Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:29.144493Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:29.144675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:29.144749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:29.144825Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:29.144935Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:29.145009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:29.145051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:29.145128Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:29.145299Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:29.145371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:29.145416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:29.145445Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:29.145558Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:29.145613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:29.145659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:29.145694Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:29.145757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:29.145799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:29.145832Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:29.145900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:29.145941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:29.145997Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:29.146472Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-04-04T17:23:29.146579Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=53; 2025-04-04T17:23:29.146661Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-04T17:23:29.146749Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-04-04T17:23:29.146921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:29.146988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:29.147023Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:29.147308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:29.147366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:29.147402Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:29.147577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:29.147637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:29.147694Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:29.147907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:23:29.147962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:23:29.147993Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:23:29.148137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:23:29.148195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:23:29.148250Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... s { Id: 1 Name: "k0" TypeId: 4608 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-04-04T17:23:30.529488Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=118;this=88923004875712;method=TTxController::StartProposeOnExecute;tx_info=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=019:0;;fline=schema.h:34;event=sync_schema; 2025-04-04T17:23:30.541945Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=019:0;;this=88923004875712;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=019:0;;int_this=89197881199680;fline=columnshard__propose_transaction.cpp:104;event=actual tx operator; 2025-04-04T17:23:30.542035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=019:0;;this=88923004875712;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=019:0;;int_this=89197881199680;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:166:2191]; 2025-04-04T17:23:30.542089Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=019:0;;this=88923004875712;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=019:0;;int_this=89197881199680;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=118; 2025-04-04T17:23:30.542440Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1018 at tablet 9437184, mediator 0 2025-04-04T17:23:30.542511Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[34] execute at tablet 9437184 2025-04-04T17:23:30.542869Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 19 ttl settings: { Version: 1 } at tablet 9437184 2025-04-04T17:23:30.542987Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=118;fline=tables_manager.cpp:245;method=RegisterTable;path_id=19; 2025-04-04T17:23:30.543031Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=118;fline=column_engine.h:143;event=RegisterTable;path_id=19; 2025-04-04T17:23:30.543484Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=118;fline=column_engine_logs.cpp:487;event=OnTieringModified;path_id=19; 2025-04-04T17:23:30.543617Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=118;fline=tx_controller.cpp:211;event=finished_tx;tx_id=118; 2025-04-04T17:23:30.555890Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[34] complete at tablet 9437184 2025-04-04T17:23:30.556042Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 20 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-04-04T17:23:30.557625Z node 1 :TX_COLUMNSHARD_TX ERROR: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=119;this=88923004879072;method=TTxController::StartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=020:0;;fline=tx_controller.cpp:345;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-04-04T17:23:30.570154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=020:0;;this=88923004879072;op_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:166:2191]; 2025-04-04T17:23:30.570280Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=020:0;;this=88923004879072;op_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=119; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-04-04T17:23:30.571759Z node 1 :TX_COLUMNSHARD_TX ERROR: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=88923004880864;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=021:0;;fline=tx_controller.cpp:345;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-04-04T17:23:30.584179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=021:0;;this=88923004880864;op_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:166:2191]; 2025-04-04T17:23:30.584246Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=021:0;;this=88923004880864;op_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-04-04T17:23:30.585667Z node 1 :TX_COLUMNSHARD_TX ERROR: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=88923004882656;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=022:0;;fline=tx_controller.cpp:345;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-04-04T17:23:30.597755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=022:0;;this=88923004882656;op_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:166:2191]; 2025-04-04T17:23:30.597813Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=022:0;;this=88923004882656;op_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:166:2191];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TCdcStreamTests::StreamOnBuildingIndexTable [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanEnabled >> TCdcStreamTests::DropStreamImplShouldFail [GOOD] >> TCdcStreamTests::CopyTableShouldNotCopyStream >> TColumnShardTestSchema::RebootColdTiers >> TCdcStreamWithInitialScanTests::InitialScanEnabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanDisabled >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction >> TColumnShardTestSchema::RebootExportAfterFail >> TColumnShardTestSchema::InternalTTL_Types >> TCdcStreamTests::CopyTableShouldNotCopyStream [GOOD] >> TCdcStreamTests::MoveTableShouldFail >> IndexBuildTest::CancellationNotEnoughRetries [GOOD] >> IndexBuildTest::CancellationNoTable >> TCdcStreamWithInitialScanTests::InitialScanDisabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanProgress >> IndexBuildTest::CancellationNoTable [GOOD] >> DataStreams::TestGetRecordsStreamWithMultipleShards [GOOD] >> DataStreams::TestGetRecordsWithBigSeqno >> TCdcStreamWithInitialScanTests::InitialScanProgress [GOOD] >> TCdcStreamWithInitialScanTests::WithoutPqTransactions >> TCdcStreamTests::MoveTableShouldFail [GOOD] >> TCdcStreamTests::CheckSchemeLimits >> KqpLimits::QSReplySize+useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancellationNoTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:22:47.384852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:22:47.384990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:22:47.385063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:22:47.385112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:22:47.385182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:22:47.385219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:22:47.385307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:22:47.385408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:22:47.385795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:22:47.458475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:22:47.458535Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:47.469535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:22:47.469728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:22:47.469872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:22:47.478908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:22:47.479226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:22:47.479797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:47.480534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:22:47.486812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:47.488162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:22:47.488222Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:47.488397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:22:47.488449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:22:47.488485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:22:47.488590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.495119Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:22:47.620174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:22:47.620391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.620592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:22:47.620857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:22:47.620906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.622808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:47.622953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:22:47.623146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.623203Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:22:47.623261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:22:47.623307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:22:47.624869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.624913Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:22:47.624947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:22:47.626687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.626731Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.626779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:47.626836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:22:47.630671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:22:47.632119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:22:47.632306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:22:47.633262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:22:47.633384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:22:47.633448Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:47.633686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:22:47.633735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:22:47.633898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:22:47.633973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:22:47.635776Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:22:47.635820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:22:47.636023Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:22:47.636076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:22:47.636398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:22:47.636439Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:22:47.636538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:22:47.636573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:47.636610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:22:47.636646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:47.636677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:22:47.636714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:22:47.636761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:22:47.636791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:22:47.636846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:22:47.636880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:22:47.636909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:22:47.638602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:22:47.638695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:22:47.638749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... less db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:33.991885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:33.991964Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:33.998092Z node 2 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [2:131:2154] sender: [2:239:2058] recipient: [2:15:2062] 2025-04-04T17:23:34.006373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:34.006544Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:34.006733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:34.006890Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:34.006930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:34.009007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:34.009123Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:34.009273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:34.009319Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:34.009351Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:34.009379Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:34.011150Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:34.011213Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:34.011244Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:34.013055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:34.013121Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:34.013167Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:34.013218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:34.013370Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:34.015281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:34.015469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:34.016494Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:34.016619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 126 RawX2: 8589936743 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:34.016667Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:34.016945Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:34.017008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:34.017193Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:34.017272Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:34.019560Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:34.019619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:34.019807Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:34.019857Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:34.020198Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:34.020262Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:34.020384Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:34.020423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:34.020464Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:34.020502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:34.020546Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:34.020589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:34.020628Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:34.020659Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:34.020733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:34.020777Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:34.020812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:34.021421Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:34.021529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:34.021571Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-04-04T17:23:34.021610Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-04-04T17:23:34.021655Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:34.021750Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-04-04T17:23:34.025429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-04-04T17:23:34.025952Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:34.026533Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Bootstrap 2025-04-04T17:23:34.047757Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] Become StateWork (SchemeCache [2:274:2265]) 2025-04-04T17:23:34.048344Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 101 DatabaseName: "/MyRoot" Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { settings { } } } max_batch_rows: 2 max_shards_in_flight: 2 } 2025-04-04T17:23:34.048511Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: Reply TxId: 101 Status: BAD_REQUEST Issues { message: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" severity: 1 } SchemeStatus: 2 2025-04-04T17:23:34.049023Z node 2 :TX_PROXY DEBUG: actor# [2:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:23:34.051071Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 BUILDINDEX RESPONSE CREATE: NKikimrIndexBuilder.TEvCreateResponse TxId: 101 Status: BAD_REQUEST Issues { message: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" severity: 1 } SchemeStatus: 2 TestWaitNotification wait txId: 101 2025-04-04T17:23:34.051383Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:23:34.051434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-04T17:23:34.051756Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:23:34.051836Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:23:34.051863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:281:2272] TestWaitNotification: OK eventTxId 101 2025-04-04T17:23:34.052175Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" PageSize: 100 PageToken: "" 2025-04-04T17:23:34.052243Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: Reply Status: SUCCESS NextPageToken: "0" BUILDINDEX RESPONSE LIST: NKikimrIndexBuilder.TEvListResponse Status: SUCCESS NextPageToken: "0" >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink |94.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} >> TCdcStreamWithInitialScanTests::WithoutPqTransactions [GOOD] >> TCdcStreamWithInitialScanTests::WithPqTransactions >> TColumnShardTestSchema::Drop >> TCdcStreamTests::CheckSchemeLimits [GOOD] >> TCdcStreamTests::MeteringServerless |94.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} >> TCdcStreamWithInitialScanTests::WithPqTransactions [GOOD] >> TCdcStreamWithInitialScanTests::AlterStream >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] Test command err: 2025-04-04T17:22:55.113134Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509747763929655:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:55.113245Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001822/r3tmp/tmp7Q0ka5/pdisk_1.dat 2025-04-04T17:22:55.645158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:55.645281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:55.646614Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:55.655786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27229, node 1 2025-04-04T17:22:55.815052Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:55.815076Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:55.815085Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:55.815182Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25655 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:56.285149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:56.400610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:25655 2025-04-04T17:22:56.602689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:57.052483Z node 1 :PERSQUEUE ERROR: [PQ: 72075186224037888, Partition: 0, State: StateIdle] reading from too big offset - topic stream_TestGetRecordsStreamWithSingleShard partition 0 client $without_consumer EndOffset 30 offset 100000 2025-04-04T17:22:59.901766Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489509766182227339:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:59.901810Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001822/r3tmp/tmpHkYOy3/pdisk_1.dat 2025-04-04T17:23:00.169193Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16113, node 4 2025-04-04T17:23:00.304190Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:00.304421Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:00.319283Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:23:00.323621Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:00.323657Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:00.323692Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:00.323841Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1299 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:00.607367Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:00.669647Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:1299 2025-04-04T17:23:00.878624Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:04.901444Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489509766182227339:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:04.901505Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:23:15.135870Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:23:15.135904Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:30.108087Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489509898728256350:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:30.108183Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001822/r3tmp/tmpX8cvC5/pdisk_1.dat 2025-04-04T17:23:30.242990Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:30.285081Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:30.285204Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:30.288537Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2706, node 7 2025-04-04T17:23:30.343929Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:30.343960Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:30.343973Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:30.344137Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22370 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:30.651986Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:30.697486Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:22370 2025-04-04T17:23:30.870915Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:34.911835Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489509916548103250:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:34.911904Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001822/r3tmp/tmpWEq17S/pdisk_1.dat 2025-04-04T17:23:35.046349Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:35.073197Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:35.073296Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:35.075689Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4747, node 10 2025-04-04T17:23:35.153883Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:35.153914Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:35.153922Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:35.154107Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22581 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:35.486360Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:35.542123Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:22581 2025-04-04T17:23:35.800593Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TCdcStreamWithInitialScanTests::AlterStream [GOOD] >> TCdcStreamWithInitialScanTests::DropStream >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QSReplySize+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 27881, MsgBus: 7789 2025-04-04T17:16:49.846547Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489508175465381835:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:49.846605Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001626/r3tmp/tmp0hxPT8/pdisk_1.dat 2025-04-04T17:16:50.454528Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:16:50.463241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:16:50.463340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 27881, node 1 2025-04-04T17:16:50.476413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:16:50.525308Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:16:50.542122Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:16:50.650458Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:16:50.650480Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:16:50.650486Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:16:50.650608Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7789 TClient is connected to server localhost:7789 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:16:51.254856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.296550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:51.506382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:16:51.818474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:16:51.933319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:16:53.234979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508192645253758:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.235080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.533295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.601649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.650727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.713324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.763717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.859443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:16:53.911917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508192645254634:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.911988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.912076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489508192645254639:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:16:53.914990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:16:53.933678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489508192645254641:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:16:53.996890Z node 1 :TX_PROXY ERROR: Actor# [1:7489508192645254718:4768] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:16:54.847816Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489508175465381835:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:16:54.847887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:16:54.871785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:17:05.444494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:17:05.444530Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:17:06.243927Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037919, for tableId 16 don't have enough keys: RowCount: 450 DataSize: 471881910 IndexSize: 11509 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 44 2025-04-04T17:17:10.023344Z node 1 :TX_DATASHARD ERROR: CPU usage 68.9133 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037919 table: [/Root/LargeTable] 2025-04-04T17:17:17.323050Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037919, for tableId 16 don't have enough keys: RowCount: 810 DataSize: 849388590 IndexSize: 20657 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 80 2025-04-04T17:17:27.863533Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037919, for tableId 16 don't have enough keys: RowCount: 1080 DataSize: 1132521480 IndexSize: 27296 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 107 2025-04-04T17:17:38.105229Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037919, for tableId 16 don't have enough keys: RowCount: 1410 DataSize: 1478570070 IndexSize: 35620 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 140 2025-04-04T17:17:48.387707Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037919, for tableId 16 don't have enough keys: RowCount: 1830 DataSize: 1918997250 IndexSize: 46046 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 182 2025-04-04T17:17:59.390409Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037919, for tableId 16 don't have enough keys: RowCount: 2340 DataSize: 2453798220 IndexSize: 58981 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 233 2025-04-04T17:18:05.760460Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037920, for tableId 16 don't have enough keys: RowCount: 1180 DataSize: 1237387860 IndexSize: 35324 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 118 2025-04-04T17:18:05.769026Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037921, for tableId 16 don't have enough keys: RowCount: 1520 DataSize: 1593921360 IndexSize: 47583 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 151 2025-04-04T17:18:05.813000Z node 1 :OPS_COMPACT ERROR: Compact{72075186224037919.1.355, eph 87} end=2, 9 blobs 0r (max 600), put Spent{time=0.754s,wait=0.025s,interrupts=3} 2025-04-04T17:18:05.813156Z node 1 :TABLET_EXECUTOR ERROR: Leader{72075186224037919:1:374} Compact 227 on TGenCompactionParams{1001: gen 2 epoch 0, 4 parts} step 355, product {0 parts epoch 0} thrown 2025-04-04T17:18:10.430009Z node 1 :TX_DATASHARD ERROR: CPU usage 73.812 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037919 table: 2025-04-04T17:18:17.465675Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037921, for tableId 16 don't have enough keys: RowCount: 1880 DataSize: 1971428040 IndexSize: 56731 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 187 2025-04-04T17:18:20.608141Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037920, for tableId 16 don't have enough keys: RowCount: 1180 DataSize: 1237387860 IndexSize: 35324 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 118 2025-04-04T17:18:20.774778Z node 1 :TX_DATASHARD ERROR: CPU usage 90.5656 is higher th ... 075186224037890 errors: OUT_OF_SPACE (Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 281474976710759) | 2025-04-04T17:23:20.834786Z node 3 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710759 at tablet 72075186224037890 status: ERROR errors: OUT_OF_SPACE (Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 281474976710759) | 2025-04-04T17:23:20.835021Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7489509856188135872:2337] TxId: 281474976710759. Ctx: { TraceId: 01jr0x871r0kaxekn1akv38dfr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDY2ODU1MDEtMTk5OWFkNTAtNmUzYjcxYjItMjIzMWM1M2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ERROR: [OUT_OF_SPACE] Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 281474976710759; 2025-04-04T17:23:20.835541Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDY2ODU1MDEtMTk5OWFkNTAtNmUzYjcxYjItMjIzMWM1M2M=, ActorId: [3:7489509242007800686:2337], ActorState: ExecuteState, TraceId: 01jr0x871r0kaxekn1akv38dfr, Create QueryResponse for error on request, msg: Got out of space. Successfully inserted 30 x 0 lines, each of size 1048576bytes Trying to start YDB, gRPC: 23231, MsgBus: 23740 2025-04-04T17:23:21.841578Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489509859669916363:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:21.841642Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001626/r3tmp/tmpzkrAKi/pdisk_1.dat 2025-04-04T17:23:21.981552Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:21.992572Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:21.992676Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:21.994905Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23231, node 4 2025-04-04T17:23:22.037655Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:22.037682Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:22.037690Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:22.037854Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23740 TClient is connected to server localhost:23740 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:22.520224Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:22.536913Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:22.607848Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:22.749928Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:22.815253Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:24.636087Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489509872554820039:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:24.636191Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:24.668967Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:23:24.701808Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:23:24.734113Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:23:24.764413Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:23:24.792616Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:23:24.832814Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:23:24.906739Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489509872554820553:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:24.906800Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489509872554820558:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:24.906816Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:23:24.909688Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:23:24.918146Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489509872554820560:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:23:24.999771Z node 4 :TX_PROXY ERROR: Actor# [4:7489509872554820617:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:23:25.696677Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:26.841913Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489509859669916363:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:26.841984Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:23:34.576770Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489509911209527621:2641], SessionActorId: [4:7489509911209527603:2641], statusCode=PRECONDITION_FAILED. Issue=
: Error: Stream write queries aren't allowed., code: 2029 . sessionActorId=[4:7489509911209527603:2641]. isRollback=0 2025-04-04T17:23:34.667359Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjA1NzI5MDAtOWQ4YTc2M2ItNDZlYWRmYmQtMjQ4N2FkYTE=, ActorId: [4:7489509911209527603:2641], ActorState: ExecuteState, TraceId: 01jr0x8kmk9w8yrv728x5js0h1, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [4:7489509911209527622:2641] from: [4:7489509911209527621:2641] 2025-04-04T17:23:34.667522Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7489509911209527622:2641] TxId: 281474976715672. Ctx: { TraceId: 01jr0x8kmk9w8yrv728x5js0h1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZjA1NzI5MDAtOWQ4YTc2M2ItNDZlYWRmYmQtMjQ4N2FkYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Stream write queries aren't allowed., code: 2029 } 2025-04-04T17:23:34.667707Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489509911209527630:2654], TxId: 281474976715672, task: 5. Ctx: { TraceId : 01jr0x8kmk9w8yrv728x5js0h1. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ZjA1NzI5MDAtOWQ4YTc2M2ItNDZlYWRmYmQtMjQ4N2FkYTE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7489509911209527622:2641], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:23:34.667712Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7489509911209527627:2651], TxId: 281474976715672, task: 2. Ctx: { TraceId : 01jr0x8kmk9w8yrv728x5js0h1. SessionId : ydb://session/3?node_id=4&id=ZjA1NzI5MDAtOWQ4YTc2M2ItNDZlYWRmYmQtMjQ4N2FkYTE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7489509911209527622:2641], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:23:34.672689Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjA1NzI5MDAtOWQ4YTc2M2ItNDZlYWRmYmQtMjQ4N2FkYTE=, ActorId: [4:7489509911209527603:2641], ActorState: ExecuteState, TraceId: 01jr0x8kmk9w8yrv728x5js0h1, Create QueryResponse for error on request, msg:
: Error: Stream write queries aren't allowed., code: 2029 >> TOlap::StoreStats [GOOD] >> TColumnShardTestSchema::HotTiersWithStat >> TCdcStreamWithInitialScanTests::DropStream [GOOD] >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart |94.8%| [TA] $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::RebootDrop [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::StoreStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:13.919953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:13.920086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:13.920129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:13.920162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:13.921122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:13.921172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:13.921241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:13.921362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:13.922461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:14.002187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:14.002275Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:14.013733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:14.013997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:14.014168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:14.024336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:14.024630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:14.025365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.026270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:14.031639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.033134Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:14.033187Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.033366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:14.033409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:14.033451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:14.033570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.040154Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:14.169188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:14.169436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.169659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:14.169919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:14.169970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.172312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.172495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:14.172673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.172732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:14.172767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:14.172823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:14.174826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.174885Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:14.174919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:14.176831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.176877Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.176934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.176990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.180471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:14.182203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:14.182395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:14.183437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:14.183552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:14.183663Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.183935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:14.184017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:14.184205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:14.184286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:14.186299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:14.186350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:14.186532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:14.186574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:14.186915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:14.186983Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:14.187105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:14.187142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.187184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:14.187220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.187262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:14.187296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:14.187329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:14.187357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:14.187426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:14.187463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:14.187496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:14.189114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:14.189214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:14.189251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... : EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } TableStats { DataSize: 7280144 RowCount: 554151 IndexSize: 0 LastAccessTime: 1743787405453 LastUpdateTime: 1743787405453 ImmediateTxCompleted: 11 PlannedTxCompleted: 12 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 4 RowUpdates: 1100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 7280144 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 548 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 712 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 7280144 DataSize: 7280144 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 7280144 DataSize: 7280144 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } ChildrenExist: true } Children { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } TableStats { DataSize: 7280144 RowCount: 554151 IndexSize: 0 LastAccessTime: 1743787405453 LastUpdateTime: 1743787405453 ImmediateTxCompleted: 11 PlannedTxCompleted: 12 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 4 RowUpdates: 1100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 7280144 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 548 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 712 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 7280144 DataSize: 7280144 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 7280144 DataSize: 7280144 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 2 PathOwnerId: 72057594046678944 2025-04-04T17:23:41.496880Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:23:41.497148Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 279us result status StatusSuccess 2025-04-04T17:23:41.497621Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 7280144 RowCount: 554151 IndexSize: 0 LastAccessTime: 1743787405453 LastUpdateTime: 1743787405453 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 548 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 712 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 7280144 DataSize: 7280144 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 7280144 DataSize: 7280144 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 7280144 RowCount: 554151 IndexSize: 0 LastAccessTime: 1743787405453 LastUpdateTime: 1743787405453 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 548 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 712 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 7280144 DataSize: 7280144 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 7280144 DataSize: 7280144 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 3 PathOwnerId: 72057594046678944 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |94.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootDrop [GOOD] Test command err: 2025-04-04T17:23:27.548010Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:27.708927Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:27.734097Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:27.734393Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:27.742323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:27.742535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:27.742797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:27.742919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:27.743015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:27.743165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:27.743292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:27.743404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:27.743534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:27.743649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:27.743776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:27.743914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:27.771834Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:27.772000Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:27.772042Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:27.772213Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:27.772366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:27.772437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:27.772475Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:27.772582Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:27.772653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:27.772705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:27.772750Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:27.772909Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:27.772970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:27.773003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:27.773029Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:27.773126Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:27.773180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:27.773232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:27.773263Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:27.773327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:27.773364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:27.773392Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:27.773435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:27.773466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:27.773492Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:27.773842Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=38; 2025-04-04T17:23:27.773944Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=58; 2025-04-04T17:23:27.774015Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-04T17:23:27.774102Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-04-04T17:23:27.774302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:27.774362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:27.774397Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:27.774575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:27.774616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:27.774644Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:27.774826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:27.774870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:27.774908Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:27.775105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:23:27.775161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:23:27.775189Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:23:27.775315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:23:27.775361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:23:27.775416Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... ons_count=7};; 2025-04-04T17:23:42.062391Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T17:23:42.062818Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T17:23:42.062862Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T17:23:42.062895Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T17:23:42.062955Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:23:42.063035Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:23:42.063127Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=1; 2025-04-04T17:23:42.063205Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700004;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:23:42.063264Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:23:42.063329Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:23:42.063388Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:23:42.063503Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=1.000000s; 2025-04-04T17:23:42.063572Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:23:42.064502Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=7;path_id=1; 2025-04-04T17:23:42.065288Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; 2025-04-04T17:23:42.240002Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000003:max} readable: {1000000004:max} at tablet 9437184 2025-04-04T17:23:42.240193Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-04T17:23:42.242789Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:32;event=parse_program;program=Command { Projection { Columns { Id: 9 } } } ; 2025-04-04T17:23:42.242908Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 9 } } } ; 2025-04-04T17:23:42.243207Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Projection","input":"9"},"fetch":"9"}]}; 2025-04-04T17:23:42.243354Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:127;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-04T17:23:42.244679Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:949:2950];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1009:3002];trace_detailed=; 2025-04-04T17:23:42.246110Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:85;ff_first=(column_ids=9;column_names=saved_at;);; 2025-04-04T17:23:42.246434Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:100;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; 2025-04-04T17:23:42.247003Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1009:3002];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:23:42.247150Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1009:3002];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:42.247249Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1009:3002];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:42.247303Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1009:3002] finished for tablet 9437184 2025-04-04T17:23:42.247761Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1009:3002];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1002:2996];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743787422244590,"name":"_full_task","f":1743787422244590,"d_finished":0,"c":0,"l":1743787422247378,"d":2788},"events":[{"name":"bootstrap","f":1743787422244897,"d_finished":1704,"c":1,"l":1743787422246601,"d":1704},{"a":1743787422246976,"name":"ack","f":1743787422246976,"d_finished":0,"c":0,"l":1743787422247378,"d":402},{"a":1743787422246955,"name":"processing","f":1743787422246955,"d_finished":0,"c":0,"l":1743787422247378,"d":423},{"name":"ProduceResults","f":1743787422246582,"d_finished":262,"c":2,"l":1743787422247279,"d":262},{"a":1743787422247282,"name":"Finish","f":1743787422247282,"d_finished":0,"c":0,"l":1743787422247378,"d":96}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:42.247869Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1009:3002];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1002:2996];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:23:42.248246Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1009:3002];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1002:2996];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":1743787422244590,"name":"_full_task","f":1743787422244590,"d_finished":0,"c":0,"l":1743787422247917,"d":3327},"events":[{"name":"bootstrap","f":1743787422244897,"d_finished":1704,"c":1,"l":1743787422246601,"d":1704},{"a":1743787422246976,"name":"ack","f":1743787422246976,"d_finished":0,"c":0,"l":1743787422247917,"d":941},{"a":1743787422246955,"name":"processing","f":1743787422246955,"d_finished":0,"c":0,"l":1743787422247917,"d":962},{"name":"ProduceResults","f":1743787422246582,"d_finished":262,"c":2,"l":1743787422247279,"d":262},{"a":1743787422247282,"name":"Finish","f":1743787422247282,"d_finished":0,"c":0,"l":1743787422247917,"d":635}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:42.248330Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1009:3002];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:23:42.243307Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-04T17:23:42.248382Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1009:3002];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:23:42.248496Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1009:3002];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; >> TColumnShardTestSchema::RebootForgetAfterFail >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart [GOOD] >> TCdcStreamWithInitialScanTests::MeteringServerless >> TColumnShardTestSchema::RebootInternalTTL [GOOD] >> TColumnShardTestSchema::RebootHotTiersWithStat >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink >> TColumnShardTestSchema::RebootExternalTTL [GOOD] >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootInternalTTL [GOOD] Test command err: 2025-04-04T17:23:27.548253Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:27.700476Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:23:27.706016Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:23:27.708936Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:27.737138Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:27.737462Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:27.746501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:27.746748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:27.746993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:27.747222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:27.747360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:27.747500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:27.747625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:27.747757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:27.747905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:27.748043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:27.748163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:27.748289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:27.774050Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:23:27.778234Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:27.778450Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:27.778503Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:27.778698Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:27.778884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:27.778962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:27.779010Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:27.779159Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:27.779243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:27.779303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:27.779350Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:27.779534Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:27.779603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:27.779646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:27.779699Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:27.779799Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:27.779861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:27.779912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:27.779946Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:27.780021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:27.780058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:27.780088Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:27.780143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:27.780190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:27.780223Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:27.780629Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-04-04T17:23:27.780741Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=55; 2025-04-04T17:23:27.780821Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-04-04T17:23:27.780922Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-04-04T17:23:27.781121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:27.781184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:27.781228Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:27.781487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:27.781546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:27.781584Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:27.781755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:27.781820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:27.781855Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:27.782072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:23:27.782122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:23:27.782178Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T1 ... _id=[1:1031:3027];bytes=362872;rows=45359;faults=0;finished=0;fault=0;schema=saved_at: timestamp[us]; 2025-04-04T17:23:43.965140Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:1;records_count:34641;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:43.965269Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:34641;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:43.965308Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:23:43.965373Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:23:43.965530Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:23:43.965673Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:34641;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:43.965717Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:23:43.965825Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=34641; 2025-04-04T17:23:43.965883Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=277128;num_rows=34641;batch_columns=saved_at; 2025-04-04T17:23:43.966044Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1031:3027];bytes=277128;rows=34641;faults=0;finished=0;fault=0;schema=saved_at: timestamp[us]; 2025-04-04T17:23:43.966164Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:43.966313Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:43.966426Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:43.966553Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:23:43.966630Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:43.966696Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:43.966731Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1032:3028] finished for tablet 9437184 2025-04-04T17:23:43.967244Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1031:3027];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["f_ack","l_task_result"],"t":0.682},{"events":["l_ProduceResults","f_Finish"],"t":0.684},{"events":["l_ack","l_processing","l_Finish"],"t":0.685}],"full":{"a":1743787423281752,"name":"_full_task","f":1743787423281752,"d_finished":0,"c":0,"l":1743787423966787,"d":685035},"events":[{"name":"bootstrap","f":1743787423282018,"d_finished":4136,"c":1,"l":1743787423286154,"d":4136},{"a":1743787423966537,"name":"ack","f":1743787423964386,"d_finished":1962,"c":2,"l":1743787423966452,"d":2212},{"a":1743787423966524,"name":"processing","f":1743787423288766,"d_finished":180649,"c":18,"l":1743787423966455,"d":180912},{"name":"ProduceResults","f":1743787423283934,"d_finished":5016,"c":22,"l":1743787423966714,"d":5016},{"a":1743787423966716,"name":"Finish","f":1743787423966716,"d_finished":0,"c":0,"l":1743787423966787,"d":71},{"name":"task_result","f":1743787423288794,"d_finished":178206,"c":16,"l":1743787423964200,"d":178206}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:43.967359Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1031:3027];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:23:43.967810Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1031:3027];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["f_ack","l_task_result"],"t":0.682},{"events":["l_ProduceResults","f_Finish"],"t":0.684},{"events":["l_ack","l_processing","l_Finish"],"t":0.685}],"full":{"a":1743787423281752,"name":"_full_task","f":1743787423281752,"d_finished":0,"c":0,"l":1743787423967407,"d":685655},"events":[{"name":"bootstrap","f":1743787423282018,"d_finished":4136,"c":1,"l":1743787423286154,"d":4136},{"a":1743787423966537,"name":"ack","f":1743787423964386,"d_finished":1962,"c":2,"l":1743787423966452,"d":2832},{"a":1743787423966524,"name":"processing","f":1743787423288766,"d_finished":180649,"c":18,"l":1743787423966455,"d":181532},{"name":"ProduceResults","f":1743787423283934,"d_finished":5016,"c":22,"l":1743787423966714,"d":5016},{"a":1743787423966716,"name":"Finish","f":1743787423966716,"d_finished":0,"c":0,"l":1743787423967407,"d":691},{"name":"task_result","f":1743787423288794,"d_finished":178206,"c":16,"l":1743787423964200,"d":178206}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:43.967902Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:23:43.280623Z;index_granules=0;index_portions=2;index_batches=1720;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=5265968;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5265968;selected_rows=0; 2025-04-04T17:23:43.967973Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:23:43.968269Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; >> TColumnShardTestSchema::RebootOneTier [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExternalTTL [GOOD] Test command err: 2025-04-04T17:23:27.548227Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:27.698774Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:23:27.706006Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:23:27.708946Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:27.737010Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:27.737365Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:27.746361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:27.746592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:27.746868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:27.747089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:27.747217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:27.747332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:27.747455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:27.747593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:27.747735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:27.747860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:27.747968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:27.748105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:27.773802Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:23:27.777756Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:27.777954Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:27.778012Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:27.778190Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:27.778384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:27.778459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:27.778503Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:27.778596Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:27.778667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:27.778720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:27.778773Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:27.778974Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:27.779039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:27.779098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:27.779137Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:27.779234Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:27.779313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:27.779357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:27.779389Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:27.779461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:27.779497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:27.779530Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:27.779579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:27.779620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:27.779649Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:27.780084Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2025-04-04T17:23:27.780187Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=48; 2025-04-04T17:23:27.780266Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-04-04T17:23:27.780362Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=49; 2025-04-04T17:23:27.780544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:27.780602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:27.780647Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:27.780864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:27.780916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:27.780948Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:27.781146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:27.781205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:27.781242Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:27.781445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:23:27.781494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:23:27.781575Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T1 ... t=send_data;compute_actor_id=[1:1031:3027];bytes=362872;rows=45359;faults=0;finished=0;fault=0;schema=saved_at: timestamp[us]; 2025-04-04T17:23:44.602952Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:1;records_count:34641;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:44.603115Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:34641;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:44.603167Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:23:44.603221Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:23:44.603405Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:23:44.603574Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:34641;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:44.603626Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:23:44.603785Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=34641; 2025-04-04T17:23:44.603859Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=277128;num_rows=34641;batch_columns=saved_at; 2025-04-04T17:23:44.604042Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1031:3027];bytes=277128;rows=34641;faults=0;finished=0;fault=0;schema=saved_at: timestamp[us]; 2025-04-04T17:23:44.604194Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:44.604338Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:44.604464Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:44.604610Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:23:44.604699Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:44.604777Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:44.604815Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1032:3028] finished for tablet 9437184 2025-04-04T17:23:44.605338Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1031:3027];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.496},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.499}],"full":{"a":1743787424105445,"name":"_full_task","f":1743787424105445,"d_finished":0,"c":0,"l":1743787424604878,"d":499433},"events":[{"name":"bootstrap","f":1743787424105719,"d_finished":3292,"c":1,"l":1743787424109011,"d":3292},{"a":1743787424604591,"name":"ack","f":1743787424602067,"d_finished":2310,"c":2,"l":1743787424604496,"d":2597},{"a":1743787424604577,"name":"processing","f":1743787424111111,"d_finished":124719,"c":18,"l":1743787424604499,"d":125020},{"name":"ProduceResults","f":1743787424107261,"d_finished":5152,"c":22,"l":1743787424604797,"d":5152},{"a":1743787424604800,"name":"Finish","f":1743787424604800,"d_finished":0,"c":0,"l":1743787424604878,"d":78},{"name":"task_result","f":1743787424111130,"d_finished":121986,"c":16,"l":1743787424601828,"d":121986}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:44.605436Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1031:3027];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:23:44.605924Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1031:3027];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.496},{"events":["l_ProduceResults","f_Finish"],"t":0.499},{"events":["l_ack","l_processing","l_Finish"],"t":0.5}],"full":{"a":1743787424105445,"name":"_full_task","f":1743787424105445,"d_finished":0,"c":0,"l":1743787424605490,"d":500045},"events":[{"name":"bootstrap","f":1743787424105719,"d_finished":3292,"c":1,"l":1743787424109011,"d":3292},{"a":1743787424604591,"name":"ack","f":1743787424602067,"d_finished":2310,"c":2,"l":1743787424604496,"d":3209},{"a":1743787424604577,"name":"processing","f":1743787424111111,"d_finished":124719,"c":18,"l":1743787424604499,"d":125632},{"name":"ProduceResults","f":1743787424107261,"d_finished":5152,"c":22,"l":1743787424604797,"d":5152},{"a":1743787424604800,"name":"Finish","f":1743787424604800,"d_finished":0,"c":0,"l":1743787424605490,"d":690},{"name":"task_result","f":1743787424111130,"d_finished":121986,"c":16,"l":1743787424601828,"d":121986}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:44.606057Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:23:44.104549Z;index_granules=0;index_portions=2;index_batches=1720;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=5265968;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5265968;selected_rows=0; 2025-04-04T17:23:44.606108Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:23:44.606459Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1032:3028];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; >> TCdcStreamWithInitialScanTests::MeteringServerless [GOOD] >> TCdcStreamWithInitialScanTests::MeteringDedicated ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneTier [GOOD] Test command err: 2025-04-04T17:23:27.548136Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:27.692844Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:23:27.706003Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:23:27.708923Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:27.733349Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:27.733663Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:27.741610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:27.741831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:27.742052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:27.742179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:27.742364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:27.742487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:27.742591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:27.742701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:27.742841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:27.742943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:27.743047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:27.743170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:27.766790Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:23:27.770683Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:27.770876Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:27.770921Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:27.771120Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:27.771295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:27.771363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:27.771404Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:27.771484Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:27.771568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:27.771613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:27.771643Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:27.771805Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:27.771859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:27.771923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:27.771959Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:27.772037Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:27.772085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:27.772126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:27.772151Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:27.772221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:27.772253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:27.772276Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:27.772314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:27.772352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:27.772380Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:27.772754Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=42; 2025-04-04T17:23:27.772856Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=58; 2025-04-04T17:23:27.772940Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-04-04T17:23:27.773008Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=29; 2025-04-04T17:23:27.773169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:27.773220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:27.773256Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:27.773486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:27.773530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:27.773557Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:27.773704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:27.773763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:27.773791Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:27.773949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:23:27.773985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:23:27.774013Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T1 ... 1030:3026];bytes=350080;rows=43760;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-04-04T17:23:44.910559Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:1;records_count:36240;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:23:44.910707Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:36240;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:23:44.910756Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:23:44.910808Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:23:44.910995Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:23:44.911180Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:36240;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:23:44.911230Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:23:44.911366Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=36240; 2025-04-04T17:23:44.911435Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=289920;num_rows=36240;batch_columns=timestamp; 2025-04-04T17:23:44.911615Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1030:3026];bytes=289920;rows=36240;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-04-04T17:23:44.911780Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:23:44.911906Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:23:44.912025Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:23:44.912181Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:23:44.912270Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:23:44.912345Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:23:44.912383Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1031:3027] finished for tablet 9437184 2025-04-04T17:23:44.912895Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1030:3026];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["f_ack","l_task_result"],"t":0.671},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.674}],"full":{"a":1743787424237996,"name":"_full_task","f":1743787424237996,"d_finished":0,"c":0,"l":1743787424912446,"d":674450},"events":[{"name":"bootstrap","f":1743787424238274,"d_finished":4350,"c":1,"l":1743787424242624,"d":4350},{"a":1743787424912160,"name":"ack","f":1743787424909665,"d_finished":2277,"c":2,"l":1743787424912056,"d":2563},{"a":1743787424912145,"name":"processing","f":1743787424245149,"d_finished":218571,"c":16,"l":1743787424912060,"d":218872},{"name":"ProduceResults","f":1743787424240299,"d_finished":5596,"c":20,"l":1743787424912365,"d":5596},{"a":1743787424912368,"name":"Finish","f":1743787424912368,"d_finished":0,"c":0,"l":1743787424912446,"d":78},{"name":"task_result","f":1743787424245175,"d_finished":215775,"c":14,"l":1743787424909398,"d":215775}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:23:44.912988Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1030:3026];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:23:44.913481Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1030:3026];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["f_ack","l_task_result"],"t":0.671},{"events":["l_ProduceResults","f_Finish"],"t":0.674},{"events":["l_ack","l_processing","l_Finish"],"t":0.675}],"full":{"a":1743787424237996,"name":"_full_task","f":1743787424237996,"d_finished":0,"c":0,"l":1743787424913042,"d":675046},"events":[{"name":"bootstrap","f":1743787424238274,"d_finished":4350,"c":1,"l":1743787424242624,"d":4350},{"a":1743787424912160,"name":"ack","f":1743787424909665,"d_finished":2277,"c":2,"l":1743787424912056,"d":3159},{"a":1743787424912145,"name":"processing","f":1743787424245149,"d_finished":218571,"c":16,"l":1743787424912060,"d":219468},{"name":"ProduceResults","f":1743787424240299,"d_finished":5596,"c":20,"l":1743787424912365,"d":5596},{"a":1743787424912368,"name":"Finish","f":1743787424912368,"d_finished":0,"c":0,"l":1743787424913042,"d":674},{"name":"task_result","f":1743787424245175,"d_finished":215775,"c":14,"l":1743787424909398,"d":215775}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:23:44.913576Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:23:44.236776Z;index_granules=0;index_portions=2;index_batches=1721;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=5175704;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5175704;selected_rows=0; 2025-04-04T17:23:44.913630Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:23:44.913933Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1031:3027];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; >> TColumnShardTestSchema::OneTier >> TColumnShardTestSchema::ExternalTTL >> TColumnShardTestSchema::Drop [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> DataStreams::TestGetRecordsWithCount [GOOD] >> DataStreams::TestInvalidRetentionCombinations ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop [GOOD] Test command err: 2025-04-04T17:23:37.483924Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:37.567862Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:37.593506Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:37.593831Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:37.601961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:37.602182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:37.602496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:37.602606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:37.602706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:37.602803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:37.602932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:37.603052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:37.603190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:37.603316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:37.603421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:37.603592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:37.632610Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:37.632808Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:37.632873Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:37.633057Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:37.633211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:37.633281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:37.633323Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:37.633424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:37.633496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:37.633539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:37.633578Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:37.633756Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:37.633823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:37.633892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:37.633929Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:37.634042Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:37.634104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:37.634142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:37.634171Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:37.634262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:37.634298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:37.634321Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:37.634363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:37.634396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:37.634430Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:37.634810Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-04-04T17:23:37.634903Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-04-04T17:23:37.634968Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-04-04T17:23:37.635041Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-04-04T17:23:37.635223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:37.635277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:37.635322Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:37.635570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:37.635609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:37.635636Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:37.635800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:37.635846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:37.635887Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:37.636094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:23:37.636134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:23:37.636168Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:23:37.636305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:23:37.636350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:23:37.636401Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... 04T17:23:47.471148Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:122:2872:0]; 2025-04-04T17:23:47.471201Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:123:2872:0]; 2025-04-04T17:23:47.471247Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:124:2872:0]; 2025-04-04T17:23:47.471307Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:125:2872:0]; 2025-04-04T17:23:47.471364Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:126:2872:0]; 2025-04-04T17:23:47.471412Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:127:2872:0]; 2025-04-04T17:23:47.471463Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:128:2872:0]; 2025-04-04T17:23:47.471503Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:129:2872:0]; 2025-04-04T17:23:47.471548Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:130:2872:0]; 2025-04-04T17:23:47.471599Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:131:2864:0]; 2025-04-04T17:23:47.471667Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:132:2872:0]; 2025-04-04T17:23:47.471720Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:133:2872:0]; 2025-04-04T17:23:47.471787Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:134:2864:0]; 2025-04-04T17:23:47.471842Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:135:2864:0]; 2025-04-04T17:23:47.471908Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:136:2864:0]; 2025-04-04T17:23:47.471958Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:137:2856:0]; 2025-04-04T17:23:47.472002Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:138:2864:0]; 2025-04-04T17:23:47.472051Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:139:2864:0]; 2025-04-04T17:23:47.472094Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:140:2856:0]; 2025-04-04T17:23:47.472138Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:141:2856:0]; 2025-04-04T17:23:47.472202Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:142:2800:0]; 2025-04-04T17:23:47.472263Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:143:2752:0]; 2025-04-04T17:23:47.472316Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:144:2792:0]; 2025-04-04T17:23:47.472363Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:145:2792:0]; 2025-04-04T17:23:47.472407Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:146:2792:0]; 2025-04-04T17:23:47.472467Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:147:2784:0]; 2025-04-04T17:23:47.472524Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:148:2784:0]; 2025-04-04T17:23:47.472579Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:149:2784:0]; 2025-04-04T17:23:47.472625Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:150:2784:0]; 2025-04-04T17:23:47.472670Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:151:2784:0]; 2025-04-04T17:23:47.472717Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:152:2776:0]; 2025-04-04T17:23:47.472767Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:153:2768:0]; 2025-04-04T17:23:47.472816Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:154:9448:0]; 2025-04-04T17:23:47.476775Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:85;ff_first=(column_ids=9;column_names=saved_at;);; 2025-04-04T17:23:47.477056Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:100;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; 2025-04-04T17:23:47.579567Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-04T17:23:47.580216Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[22] (CS::GENERAL) apply at tablet 9437184 2025-04-04T17:23:47.605445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=909ebff8-117911f0-bbec7ad6-6fa0518e;fline=with_appended.cpp:24;event=skip_inserted_data;reason=table_removed;path_id=1; 2025-04-04T17:23:47.608270Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 464 2025-04-04T17:23:47.612330Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=10308;raw_bytes=8378;count=1;records=100} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=5601076;raw_bytes=7864534;count=3;records=80000} inactive {blob_bytes=5605344;raw_bytes=7864506;count=2;records=80000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T17:23:47.620325Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:660:2677];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:23:47.620464Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:660:2677];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:47.620550Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:660:2677];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:47.620592Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:660:2677] finished for tablet 9437184 2025-04-04T17:23:47.620865Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:660:2677];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:650:2667];stats={"p":[{"events":["f_bootstrap"],"t":0.01},{"events":["l_bootstrap","f_ProduceResults"],"t":0.012},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.155}],"full":{"a":1743787427464781,"name":"_full_task","f":1743787427464781,"d_finished":0,"c":0,"l":1743787427620633,"d":155852},"events":[{"name":"bootstrap","f":1743787427475617,"d_finished":1609,"c":1,"l":1743787427477226,"d":1609},{"a":1743787427620295,"name":"ack","f":1743787427620295,"d_finished":0,"c":0,"l":1743787427620633,"d":338},{"a":1743787427620269,"name":"processing","f":1743787427620269,"d_finished":0,"c":0,"l":1743787427620633,"d":364},{"name":"ProduceResults","f":1743787427477207,"d_finished":222,"c":2,"l":1743787427620570,"d":222},{"a":1743787427620573,"name":"Finish","f":1743787427620573,"d_finished":0,"c":0,"l":1743787427620633,"d":60}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:47.620914Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:660:2677];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:650:2667];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:23:47.621182Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:660:2677];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:650:2667];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.01},{"events":["l_bootstrap","f_ProduceResults"],"t":0.012},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.155},{"events":["l_ack","l_processing","l_Finish"],"t":0.156}],"full":{"a":1743787427464781,"name":"_full_task","f":1743787427464781,"d_finished":0,"c":0,"l":1743787427620945,"d":156164},"events":[{"name":"bootstrap","f":1743787427475617,"d_finished":1609,"c":1,"l":1743787427477226,"d":1609},{"a":1743787427620295,"name":"ack","f":1743787427620295,"d_finished":0,"c":0,"l":1743787427620945,"d":650},{"a":1743787427620269,"name":"processing","f":1743787427620269,"d_finished":0,"c":0,"l":1743787427620945,"d":676},{"name":"ProduceResults","f":1743787427477207,"d_finished":222,"c":2,"l":1743787427620570,"d":222},{"a":1743787427620573,"name":"Finish","f":1743787427620573,"d_finished":0,"c":0,"l":1743787427620945,"d":372}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:47.621239Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:660:2677];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:23:47.463762Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-04T17:23:47.621268Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:660:2677];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:23:47.621337Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:660:2677];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TColumnShardTestSchema::RebootColdTiersWithStat >> TColumnShardTestSchema::RebootOneTierExternalTtl >> BsControllerConfig::MoveGroups [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MoveGroups [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3014:2106] recipient: [1:2915:2116] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3014:2106] recipient: [1:2915:2116] Leader for TabletID 72057594037932033 is [1:3016:2118] sender: [1:3017:2106] recipient: [1:2915:2116] 2025-04-04T17:23:08.634752Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-04T17:23:08.638937Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-04T17:23:08.639362Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-04T17:23:08.641445Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:23:08.641884Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-04T17:23:08.642501Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-04T17:23:08.642538Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-04T17:23:08.642906Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-04T17:23:08.650654Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-04T17:23:08.650800Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-04T17:23:08.650930Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-04T17:23:08.650998Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T17:23:08.651089Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T17:23:08.651147Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3016:2118] sender: [1:3042:2106] recipient: [1:60:2107] 2025-04-04T17:23:08.662868Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-04T17:23:08.663024Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T17:23:08.673746Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T17:23:08.673851Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T17:23:08.673912Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T17:23:08.673960Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T17:23:08.674062Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T17:23:08.674130Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T17:23:08.674175Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T17:23:08.674221Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T17:23:08.684889Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T17:23:08.685038Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T17:23:08.695799Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T17:23:08.695960Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-04T17:23:08.697167Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-04T17:23:08.697218Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-04T17:23:08.697419Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-04T17:23:08.697465Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-04T17:23:08.712644Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 150 PDiskFilter { Property { Type: ROT } } } } } 2025-04-04T17:23:08.713986Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-04-04T17:23:08.714032Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-04-04T17:23:08.714048Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-04-04T17:23:08.714064Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-04-04T17:23:08.714078Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-04-04T17:23:08.714092Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-04-04T17:23:08.714105Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-04-04T17:23:08.714131Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-04-04T17:23:08.714148Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-04-04T17:23:08.714161Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-04-04T17:23:08.714186Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-04-04T17:23:08.714212Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-04-04T17:23:08.714228Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-04-04T17:23:08.714267Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-04-04T17:23:08.714289Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-04-04T17:23:08.714319Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-04-04T17:23:08.714335Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-04-04T17:23:08.714348Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-04-04T17:23:08.714361Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-04-04T17:23:08.714394Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-04-04T17:23:08.714411Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:3 ... 78:1000 Path# /dev/disk1 2025-04-04T17:23:39.977649Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 178:1001 Path# /dev/disk2 2025-04-04T17:23:39.977676Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 178:1002 Path# /dev/disk3 2025-04-04T17:23:39.977710Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 179:1000 Path# /dev/disk1 2025-04-04T17:23:39.977739Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 179:1001 Path# /dev/disk2 2025-04-04T17:23:39.977767Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 179:1002 Path# /dev/disk3 2025-04-04T17:23:39.977794Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 180:1000 Path# /dev/disk1 2025-04-04T17:23:39.977844Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 180:1001 Path# /dev/disk2 2025-04-04T17:23:39.977873Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 180:1002 Path# /dev/disk3 2025-04-04T17:23:39.977900Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 181:1000 Path# /dev/disk1 2025-04-04T17:23:39.977928Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 181:1001 Path# /dev/disk2 2025-04-04T17:23:39.977958Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 181:1002 Path# /dev/disk3 2025-04-04T17:23:39.977988Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 182:1000 Path# /dev/disk1 2025-04-04T17:23:39.978015Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 182:1001 Path# /dev/disk2 2025-04-04T17:23:39.978045Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 182:1002 Path# /dev/disk3 2025-04-04T17:23:39.978071Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 183:1000 Path# /dev/disk1 2025-04-04T17:23:39.978099Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 183:1001 Path# /dev/disk2 2025-04-04T17:23:39.978125Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 183:1002 Path# /dev/disk3 2025-04-04T17:23:39.978152Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 184:1000 Path# /dev/disk1 2025-04-04T17:23:39.978181Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 184:1001 Path# /dev/disk2 2025-04-04T17:23:39.978209Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 184:1002 Path# /dev/disk3 2025-04-04T17:23:39.978265Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 185:1000 Path# /dev/disk1 2025-04-04T17:23:39.978298Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 185:1001 Path# /dev/disk2 2025-04-04T17:23:39.978325Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 185:1002 Path# /dev/disk3 2025-04-04T17:23:39.978354Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 186:1000 Path# /dev/disk1 2025-04-04T17:23:39.978387Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 186:1001 Path# /dev/disk2 2025-04-04T17:23:39.978415Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 186:1002 Path# /dev/disk3 2025-04-04T17:23:39.978445Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 187:1000 Path# /dev/disk1 2025-04-04T17:23:39.978475Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 187:1001 Path# /dev/disk2 2025-04-04T17:23:39.978506Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 187:1002 Path# /dev/disk3 2025-04-04T17:23:39.978537Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 188:1000 Path# /dev/disk1 2025-04-04T17:23:39.978566Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 188:1001 Path# /dev/disk2 2025-04-04T17:23:39.978591Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 188:1002 Path# /dev/disk3 2025-04-04T17:23:39.978619Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 189:1000 Path# /dev/disk1 2025-04-04T17:23:39.978644Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 189:1001 Path# /dev/disk2 2025-04-04T17:23:39.978672Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 189:1002 Path# /dev/disk3 2025-04-04T17:23:39.978701Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 190:1000 Path# /dev/disk1 2025-04-04T17:23:39.978730Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 190:1001 Path# /dev/disk2 2025-04-04T17:23:39.978764Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 190:1002 Path# /dev/disk3 2025-04-04T17:23:39.978792Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 191:1000 Path# /dev/disk1 2025-04-04T17:23:39.978821Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 191:1001 Path# /dev/disk2 2025-04-04T17:23:39.978850Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 191:1002 Path# /dev/disk3 2025-04-04T17:23:39.978880Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 192:1000 Path# /dev/disk1 2025-04-04T17:23:39.978908Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 192:1001 Path# /dev/disk2 2025-04-04T17:23:39.978937Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 192:1002 Path# /dev/disk3 2025-04-04T17:23:39.978964Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 193:1000 Path# /dev/disk1 2025-04-04T17:23:39.978990Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 193:1001 Path# /dev/disk2 2025-04-04T17:23:39.979019Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 193:1002 Path# /dev/disk3 2025-04-04T17:23:39.979047Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 194:1000 Path# /dev/disk1 2025-04-04T17:23:39.979077Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 194:1001 Path# /dev/disk2 2025-04-04T17:23:39.979122Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 194:1002 Path# /dev/disk3 2025-04-04T17:23:39.979151Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 195:1000 Path# /dev/disk1 2025-04-04T17:23:39.979179Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 195:1001 Path# /dev/disk2 2025-04-04T17:23:39.979204Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 195:1002 Path# /dev/disk3 2025-04-04T17:23:39.979231Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 196:1000 Path# /dev/disk1 2025-04-04T17:23:39.979265Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 196:1001 Path# /dev/disk2 2025-04-04T17:23:39.979322Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 196:1002 Path# /dev/disk3 2025-04-04T17:23:39.979353Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 197:1000 Path# /dev/disk1 2025-04-04T17:23:39.979380Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 197:1001 Path# /dev/disk2 2025-04-04T17:23:39.979408Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 197:1002 Path# /dev/disk3 2025-04-04T17:23:39.979438Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 198:1000 Path# /dev/disk1 2025-04-04T17:23:39.979467Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 198:1001 Path# /dev/disk2 2025-04-04T17:23:39.979494Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 198:1002 Path# /dev/disk3 2025-04-04T17:23:39.979522Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 199:1000 Path# /dev/disk1 2025-04-04T17:23:39.979551Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 199:1001 Path# /dev/disk2 2025-04-04T17:23:39.979577Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 199:1002 Path# /dev/disk3 2025-04-04T17:23:39.979603Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 200:1000 Path# /dev/disk1 2025-04-04T17:23:39.979631Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 200:1001 Path# /dev/disk2 2025-04-04T17:23:39.979657Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 200:1002 Path# /dev/disk3 2025-04-04T17:23:40.304223Z node 151 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.331445s 2025-04-04T17:23:40.304456Z node 151 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.331698s 2025-04-04T17:23:40.340708Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-04-04T17:23:40.452247Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 1 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 1 ExplicitGroupId: 2147483748 } } } 2025-04-04T17:23:40.469346Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-04-04T17:23:40.577490Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 2 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 2 ExplicitGroupId: 2147483749 } } } 2025-04-04T17:23:40.594437Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-04-04T17:23:40.704668Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 3 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 3 } } } 2025-04-04T17:23:40.723941Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } >> TColumnShardTestSchema::InternalTTL [GOOD] >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::InternalTTL [GOOD] Test command err: 2025-04-04T17:23:27.548209Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:27.693342Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:23:27.706073Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:23:27.708966Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:27.740552Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:27.740874Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:27.751205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:27.751517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:27.751760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:27.751895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:27.752090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:27.752270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:27.752429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:27.752568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:27.752765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:27.752940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:27.753115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:27.753313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:27.781691Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:23:27.786806Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:27.787031Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:27.787125Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:27.787350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:27.787588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:27.787688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:27.787748Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:27.787875Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:27.787989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:27.788079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:27.788124Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:27.788379Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:27.788477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:27.788554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:27.788612Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:27.788751Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:27.788839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:27.788898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:27.788942Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:27.789059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:27.789115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:27.789160Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:27.789224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:27.789281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:27.789334Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:27.789821Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-04-04T17:23:27.789941Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=60; 2025-04-04T17:23:27.790056Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=45; 2025-04-04T17:23:27.790186Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=65; 2025-04-04T17:23:27.790432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:27.790498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:27.790541Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:27.790834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:27.790905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:27.790949Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:27.791184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:27.791272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:27.791330Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:27.791595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:23:27.791665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:23:27.791718Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T1 ... mp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:50.366366Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:50.366396Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:23:50.366420Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:23:50.366495Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:23:50.366589Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:50.366618Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:23:50.366691Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=71; 2025-04-04T17:23:50.366729Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=568;num_rows=71;batch_columns=saved_at; 2025-04-04T17:23:50.366838Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:684:2700];bytes=568;rows=71;faults=0;finished=0;fault=0;schema=saved_at: timestamp[us]; 2025-04-04T17:23:50.366915Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:50.366978Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:50.367079Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:50.367194Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:23:50.367263Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:50.367313Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:50.367337Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:691:2707] finished for tablet 9437184 2025-04-04T17:23:50.367699Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:684:2700];stats={"p":[{"events":["f_bootstrap"],"t":0.064},{"events":["f_ProduceResults"],"t":0.38},{"events":["l_bootstrap"],"t":0.536},{"events":["f_processing","f_task_result"],"t":0.556},{"events":["l_task_result"],"t":5.704},{"events":["f_ack"],"t":5.743},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":6.346}],"full":{"a":1743787424020806,"name":"_full_task","f":1743787424020806,"d_finished":0,"c":0,"l":1743787430367383,"d":6346577},"events":[{"name":"bootstrap","f":1743787424085166,"d_finished":472327,"c":1,"l":1743787424557493,"d":472327},{"a":1743787430367180,"name":"ack","f":1743787429764050,"d_finished":559911,"c":903,"l":1743787430367125,"d":560114},{"a":1743787430367161,"name":"processing","f":1743787424577772,"d_finished":2833134,"c":4515,"l":1743787430367127,"d":2833356},{"name":"ProduceResults","f":1743787424401746,"d_finished":1117032,"c":5420,"l":1743787430367324,"d":1117032},{"a":1743787430367326,"name":"Finish","f":1743787430367326,"d_finished":0,"c":0,"l":1743787430367383,"d":57},{"name":"task_result","f":1743787424577800,"d_finished":2204242,"c":3612,"l":1743787429724927,"d":2204242}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:50.367758Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:684:2700];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:23:50.368078Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:684:2700];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.064},{"events":["f_ProduceResults"],"t":0.38},{"events":["l_bootstrap"],"t":0.536},{"events":["f_processing","f_task_result"],"t":0.556},{"events":["l_task_result"],"t":5.704},{"events":["f_ack"],"t":5.743},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":6.346}],"full":{"a":1743787424020806,"name":"_full_task","f":1743787424020806,"d_finished":0,"c":0,"l":1743787430367793,"d":6346987},"events":[{"name":"bootstrap","f":1743787424085166,"d_finished":472327,"c":1,"l":1743787424557493,"d":472327},{"a":1743787430367180,"name":"ack","f":1743787429764050,"d_finished":559911,"c":903,"l":1743787430367125,"d":560524},{"a":1743787430367161,"name":"processing","f":1743787424577772,"d_finished":2833134,"c":4515,"l":1743787430367127,"d":2833766},{"name":"ProduceResults","f":1743787424401746,"d_finished":1117032,"c":5420,"l":1743787430367324,"d":1117032},{"a":1743787430367326,"name":"Finish","f":1743787430367326,"d_finished":0,"c":0,"l":1743787430367793,"d":467},{"name":"task_result","f":1743787424577800,"d_finished":2204242,"c":3612,"l":1743787429724927,"d":2204242}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:23:50.368138Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:23:43.962485Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=903;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7037528;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7037528;selected_rows=0; 2025-04-04T17:23:50.368196Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:23:50.368367Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TColumnShardTestSchema::OneTierExternalTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:19.832704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:19.832850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:19.832893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:19.832923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:19.834218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:19.834290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:19.834374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:19.834473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:19.836362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:19.931942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:19.931994Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:19.943208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:19.943452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:19.943577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:19.953043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:19.953301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:19.953913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:19.954658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:19.959998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:19.961240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:19.961296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:19.961490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:19.961534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:19.961579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:19.961701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:19.967269Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:20.108970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:20.109160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:20.109341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:20.109582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:20.109640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:20.111759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:20.111882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:20.112066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:20.112135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:20.112168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:20.112230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:20.114062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:20.114114Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:20.114150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:20.116054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:20.116106Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:20.116153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:20.116186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:20.118961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:20.120696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:20.120868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:20.121785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:20.121898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:20.121934Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:20.122142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:20.122175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:20.122365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:20.122450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:20.124211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:20.124251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:20.124401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:20.124443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:20.124702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:20.124729Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:20.124825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:20.124854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:20.124894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:20.124928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:20.124967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:20.125001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:20.125050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:20.125085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:20.125143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:20.125175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:20.125201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:20.126602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:20.126680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:20.126738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4157Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 2/3 2025-04-04T17:23:47.234183Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2025-04-04T17:23:47.234214Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 2/3 2025-04-04T17:23:47.234257Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2025-04-04T17:23:47.234296Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: false 2025-04-04T17:23:47.234747Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-04-04T17:23:47.234939Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-04-04T17:23:47.235003Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2025-04-04T17:23:47.235073Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], version: 5 2025-04-04T17:23:47.235155Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 4 2025-04-04T17:23:47.236360Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-04-04T17:23:47.236454Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-04-04T17:23:47.236483Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2025-04-04T17:23:47.236514Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 5 2025-04-04T17:23:47.236545Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 6 2025-04-04T17:23:47.236621Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true 2025-04-04T17:23:47.241196Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2025-04-04T17:23:47.242120Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2025-04-04T17:23:47.254718Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 250 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1303 } } 2025-04-04T17:23:47.254782Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2025-04-04T17:23:47.254916Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 250 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1303 } } 2025-04-04T17:23:47.255091Z node 19 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72075186233409546, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 250 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1303 } } 2025-04-04T17:23:47.256423Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 747 RawX2: 81604381261 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-04-04T17:23:47.256511Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2025-04-04T17:23:47.256705Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: Source { RawX1: 747 RawX2: 81604381261 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-04-04T17:23:47.256806Z node 19 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72075186233409546 2025-04-04T17:23:47.256963Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72075186233409546 message: Source { RawX1: 747 RawX2: 81604381261 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-04-04T17:23:47.257058Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72075186233409546:4, datashard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72075186233409546 2025-04-04T17:23:47.257121Z node 19 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-04-04T17:23:47.257198Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409552, at schemeshard: 72075186233409546 2025-04-04T17:23:47.257279Z node 19 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:1 129 -> 240 2025-04-04T17:23:47.259853Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-04-04T17:23:47.261318Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-04-04T17:23:47.261779Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-04-04T17:23:47.261849Z node 19 :FLAT_TX_SCHEMESHARD INFO: [72075186233409546] TDone opId# 281474976715657:1 ProgressState 2025-04-04T17:23:47.262045Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:1 progress is 3/3 2025-04-04T17:23:47.262104Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-04-04T17:23:47.262178Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:1 progress is 3/3 2025-04-04T17:23:47.262262Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-04-04T17:23:47.262331Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2025-04-04T17:23:47.262394Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-04-04T17:23:47.262465Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-04T17:23:47.262520Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2025-04-04T17:23:47.262622Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 3 2025-04-04T17:23:47.262668Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:1 2025-04-04T17:23:47.262690Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:1 2025-04-04T17:23:47.262769Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 5 2025-04-04T17:23:47.262823Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:2 2025-04-04T17:23:47.262849Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:2 2025-04-04T17:23:47.262882Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-04-04T17:23:49.579764Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-04-04T17:23:49.580170Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe pathId 4 took 401us result status StatusNameConflict 2025-04-04T17:23:49.580460Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/Shared/Table/Stream/streamImpl\', error: path is not a common path (id: [OwnerId: 72075186233409546, LocalPathId: 4], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" Path: "/MyRoot/Shared/Table/Stream/streamImpl" PathId: 4 LastExistedPrefixPath: "/MyRoot/Shared/Table/Stream/streamImpl" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409554 } } PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 2025-04-04T17:23:51.595525Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-04-04T17:23:51.595941Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe pathId 4 took 456us result status StatusNameConflict 2025-04-04T17:23:51.596188Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/Shared/Table/Stream/streamImpl\', error: path is not a common path (id: [OwnerId: 72075186233409546, LocalPathId: 4], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" Path: "/MyRoot/Shared/Table/Stream/streamImpl" PathId: 4 LastExistedPrefixPath: "/MyRoot/Shared/Table/Stream/streamImpl" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409554 } } PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink >> TColumnShardTestSchema::HotTiersTtl >> DataStreams::TestInvalidRetentionCombinations [GOOD] >> YdbSdkSessionsPool::StressTestAsync10 [GOOD] >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneTierExternalTtl [GOOD] Test command err: 2025-04-04T17:23:27.611156Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:27.692890Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:23:27.705927Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:23:27.708875Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:27.726862Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:27.727300Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:27.737357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:27.737552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:27.737723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:27.737868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:27.737959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:27.738039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:27.738105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:27.738220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:27.738355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:27.738426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:27.738498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:27.738567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:27.756500Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:23:27.761932Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:27.762110Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:27.762190Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:27.762389Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:27.763755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:27.763828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:27.763867Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:27.763954Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:27.764028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:27.764064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:27.764097Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:27.764234Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:27.764287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:27.764317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:27.764341Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:27.764400Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:27.764436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:27.764461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:27.764480Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:27.764532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:27.764557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:27.764575Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:27.764608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:27.764634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:27.764655Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:27.764954Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=37; 2025-04-04T17:23:27.765040Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=43; 2025-04-04T17:23:27.765112Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=26; 2025-04-04T17:23:27.765169Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=29; 2025-04-04T17:23:27.765350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:27.765392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:27.765423Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:27.765551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:27.765578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:27.765600Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:27.765687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:27.765727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:27.765748Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:27.765869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:23:27.765898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:23:27.765934Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T1 ... umn_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:23:51.991401Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:685:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:23:51.991423Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:685:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:23:51.991448Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:685:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:23:51.991524Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:685:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:23:51.991594Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:685:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:23:51.991619Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:685:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:23:51.991665Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:685:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=73; 2025-04-04T17:23:51.991693Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:685:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=584;num_rows=73;batch_columns=timestamp; 2025-04-04T17:23:51.991788Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:685:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:678:2694];bytes=584;rows=73;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-04-04T17:23:51.991851Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:685:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:23:51.991910Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:685:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:23:51.991996Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:685:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:23:51.992083Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:685:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:23:51.992139Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:685:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:23:51.992184Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:685:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:23:51.992207Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:685:2701] finished for tablet 9437184 2025-04-04T17:23:51.992612Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:685:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:678:2694];stats={"p":[{"events":["f_bootstrap"],"t":0.039},{"events":["f_ProduceResults"],"t":0.309},{"events":["l_bootstrap"],"t":0.475},{"events":["f_processing","f_task_result"],"t":0.489},{"events":["l_task_result"],"t":5.921},{"events":["f_ack"],"t":5.947},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":6.464}],"full":{"a":1743787425527730,"name":"_full_task","f":1743787425527730,"d_finished":0,"c":0,"l":1743787431992253,"d":6464523},"events":[{"name":"bootstrap","f":1743787425567505,"d_finished":436009,"c":1,"l":1743787426003514,"d":436009},{"a":1743787431992069,"name":"ack","f":1743787431475034,"d_finished":479336,"c":904,"l":1743787431992020,"d":479520},{"a":1743787431992058,"name":"processing","f":1743787426017545,"d_finished":2862714,"c":4520,"l":1743787431992021,"d":2862909},{"name":"ProduceResults","f":1743787425837720,"d_finished":1087005,"c":5426,"l":1743787431992196,"d":1087005},{"a":1743787431992197,"name":"Finish","f":1743787431992197,"d_finished":0,"c":0,"l":1743787431992253,"d":56},{"name":"task_result","f":1743787426017567,"d_finished":2291045,"c":3616,"l":1743787431449436,"d":2291045}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:23:51.992680Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:685:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:678:2694];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:23:51.992995Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:685:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:678:2694];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.039},{"events":["f_ProduceResults"],"t":0.309},{"events":["l_bootstrap"],"t":0.475},{"events":["f_processing","f_task_result"],"t":0.489},{"events":["l_task_result"],"t":5.921},{"events":["f_ack"],"t":5.947},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":6.464}],"full":{"a":1743787425527730,"name":"_full_task","f":1743787425527730,"d_finished":0,"c":0,"l":1743787431992715,"d":6464985},"events":[{"name":"bootstrap","f":1743787425567505,"d_finished":436009,"c":1,"l":1743787426003514,"d":436009},{"a":1743787431992069,"name":"ack","f":1743787431475034,"d_finished":479336,"c":904,"l":1743787431992020,"d":479982},{"a":1743787431992058,"name":"processing","f":1743787426017545,"d_finished":2862714,"c":4520,"l":1743787431992021,"d":2863371},{"name":"ProduceResults","f":1743787425837720,"d_finished":1087005,"c":5426,"l":1743787431992196,"d":1087005},{"a":1743787431992197,"name":"Finish","f":1743787431992197,"d_finished":0,"c":0,"l":1743787431992715,"d":518},{"name":"task_result","f":1743787426017567,"d_finished":2291045,"c":3616,"l":1743787431449436,"d":2291045}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:23:51.993053Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:685:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:23:45.485811Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=904;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7049848;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7049848;selected_rows=0; 2025-04-04T17:23:51.993088Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:685:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:23:51.993270Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:685:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3014:2106] recipient: [1:2915:2116] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3014:2106] recipient: [1:2915:2116] Leader for TabletID 72057594037932033 is [1:3016:2118] sender: [1:3017:2106] recipient: [1:2915:2116] 2025-04-04T17:23:08.692860Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-04T17:23:08.697144Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-04T17:23:08.697490Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-04T17:23:08.699096Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:23:08.699468Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-04T17:23:08.700044Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-04T17:23:08.700082Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-04T17:23:08.700397Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-04T17:23:08.709078Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-04T17:23:08.709203Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-04T17:23:08.709361Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-04T17:23:08.709435Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T17:23:08.709517Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T17:23:08.709613Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3016:2118] sender: [1:3042:2106] recipient: [1:60:2107] 2025-04-04T17:23:08.721737Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-04T17:23:08.721914Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T17:23:08.732667Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T17:23:08.732792Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T17:23:08.732856Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T17:23:08.732912Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T17:23:08.733024Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T17:23:08.733084Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T17:23:08.733122Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T17:23:08.733166Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T17:23:08.744013Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T17:23:08.744160Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T17:23:08.754939Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T17:23:08.755145Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-04T17:23:08.756168Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-04T17:23:08.756211Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-04T17:23:08.756382Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-04T17:23:08.756424Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-04T17:23:08.768456Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 60 PDiskFilter { Property { Type: ROT } } } } Command { QueryBaseConfig { } } } 2025-04-04T17:23:08.769270Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-04-04T17:23:08.769313Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-04-04T17:23:08.769338Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-04-04T17:23:08.769359Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-04-04T17:23:08.769375Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-04-04T17:23:08.769390Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-04-04T17:23:08.769405Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-04-04T17:23:08.769432Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-04-04T17:23:08.769470Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-04-04T17:23:08.769506Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-04-04T17:23:08.769526Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-04-04T17:23:08.769541Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-04-04T17:23:08.769554Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-04-04T17:23:08.769568Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-04-04T17:23:08.769584Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-04-04T17:23:08.769619Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-04-04T17:23:08.769636Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-04-04T17:23:08.769651Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-04-04T17:23:08.769665Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-04-04T17:23:08.769679Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-04-04T17:23:08.769703Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2025-04-04T17:23:08.769720Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2025-04-04T17:23:08.769734Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2025-04-04T17:23:08.769760Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2025-04-04T17:23:08.769775Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2025-04-04T17:23:08.769799Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2025-04-04T17:23:08.769818Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2025-04-04T17:23:08.769836Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2025-04-04T17:23:08.769855Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2025-04-04T17:23:08.769870Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 2025-04-04T17:23:08.769889Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2025-04-04T17:23:08.769902Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1001 Path# /dev/disk2 2025-04-04T17:23:08.769916Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Cr ... ER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 204:1000 Path# /dev/disk1 2025-04-04T17:23:41.390340Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 204:1001 Path# /dev/disk2 2025-04-04T17:23:41.390356Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 204:1002 Path# /dev/disk3 2025-04-04T17:23:41.390372Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 205:1000 Path# /dev/disk1 2025-04-04T17:23:41.390389Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 205:1001 Path# /dev/disk2 2025-04-04T17:23:41.390405Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 205:1002 Path# /dev/disk3 2025-04-04T17:23:41.390420Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 206:1000 Path# /dev/disk1 2025-04-04T17:23:41.390441Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 206:1001 Path# /dev/disk2 2025-04-04T17:23:41.390457Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 206:1002 Path# /dev/disk3 2025-04-04T17:23:41.390479Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 207:1000 Path# /dev/disk1 2025-04-04T17:23:41.390500Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 207:1001 Path# /dev/disk2 2025-04-04T17:23:41.390517Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 207:1002 Path# /dev/disk3 2025-04-04T17:23:41.390532Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 208:1000 Path# /dev/disk1 2025-04-04T17:23:41.390548Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 208:1001 Path# /dev/disk2 2025-04-04T17:23:41.390565Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 208:1002 Path# /dev/disk3 2025-04-04T17:23:41.390596Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 209:1000 Path# /dev/disk1 2025-04-04T17:23:41.390632Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 209:1001 Path# /dev/disk2 2025-04-04T17:23:41.390656Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 209:1002 Path# /dev/disk3 2025-04-04T17:23:41.390691Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 210:1000 Path# /dev/disk1 2025-04-04T17:23:41.390723Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 210:1001 Path# /dev/disk2 2025-04-04T17:23:41.390741Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 210:1002 Path# /dev/disk3 2025-04-04T17:23:41.696011Z node 161 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.310678s 2025-04-04T17:23:41.696209Z node 161 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.310897s 2025-04-04T17:23:41.749164Z node 161 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 4 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 4 } ItemConfigGeneration: 1 } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 180 PDiskFilter { Property { Type: ROT } } ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } 2025-04-04T17:23:41.751542Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 211:1000 Path# /dev/disk1 2025-04-04T17:23:41.751619Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 211:1001 Path# /dev/disk2 2025-04-04T17:23:41.751648Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 211:1002 Path# /dev/disk3 2025-04-04T17:23:41.751674Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 212:1000 Path# /dev/disk1 2025-04-04T17:23:41.751705Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 212:1001 Path# /dev/disk2 2025-04-04T17:23:41.751731Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 212:1002 Path# /dev/disk3 2025-04-04T17:23:41.751759Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 213:1000 Path# /dev/disk1 2025-04-04T17:23:41.751784Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 213:1001 Path# /dev/disk2 2025-04-04T17:23:41.751811Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 213:1002 Path# /dev/disk3 2025-04-04T17:23:41.751835Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 214:1000 Path# /dev/disk1 2025-04-04T17:23:41.751860Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 214:1001 Path# /dev/disk2 2025-04-04T17:23:41.751884Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 214:1002 Path# /dev/disk3 2025-04-04T17:23:41.751910Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 215:1000 Path# /dev/disk1 2025-04-04T17:23:41.751935Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 215:1001 Path# /dev/disk2 2025-04-04T17:23:41.751960Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 215:1002 Path# /dev/disk3 2025-04-04T17:23:41.751985Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 216:1000 Path# /dev/disk1 2025-04-04T17:23:41.752011Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 216:1001 Path# /dev/disk2 2025-04-04T17:23:41.752041Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 216:1002 Path# /dev/disk3 2025-04-04T17:23:41.752071Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 217:1000 Path# /dev/disk1 2025-04-04T17:23:41.752096Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 217:1001 Path# /dev/disk2 2025-04-04T17:23:41.752122Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 217:1002 Path# /dev/disk3 2025-04-04T17:23:41.752146Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 218:1000 Path# /dev/disk1 2025-04-04T17:23:41.752172Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 218:1001 Path# /dev/disk2 2025-04-04T17:23:41.752195Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 218:1002 Path# /dev/disk3 2025-04-04T17:23:41.752221Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 219:1000 Path# /dev/disk1 2025-04-04T17:23:41.752246Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 219:1001 Path# /dev/disk2 2025-04-04T17:23:41.752272Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 219:1002 Path# /dev/disk3 2025-04-04T17:23:41.752294Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 220:1000 Path# /dev/disk1 2025-04-04T17:23:41.752324Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 220:1001 Path# /dev/disk2 2025-04-04T17:23:41.752349Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 220:1002 Path# /dev/disk3 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestInvalidRetentionCombinations [GOOD] Test command err: 2025-04-04T17:22:55.136346Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509749438104887:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:55.136387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00183a/r3tmp/tmpoEY4dX/pdisk_1.dat 2025-04-04T17:22:55.571963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:55.572088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:55.590685Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:55.593004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21643, node 1 2025-04-04T17:22:55.809845Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:22:55.809871Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:22:55.809879Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:22:55.810000Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2732 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:22:56.282883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:56.397794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:2732 2025-04-04T17:22:56.572513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:22:59.785958Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489509765949657894:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:22:59.786087Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00183a/r3tmp/tmpjyAsaB/pdisk_1.dat 2025-04-04T17:22:59.952993Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:22:59.994322Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:22:59.994462Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:22:59.998669Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25298, node 4 2025-04-04T17:23:00.140114Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:00.140143Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:00.140163Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:00.140357Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27959 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:00.384680Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:00.461721Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:27959 2025-04-04T17:23:00.671440Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:00.695333Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-04-04T17:23:00.856483Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:23:00.885096Z node 4 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [4:7489509770244626432:2830], for# user2@builtin, access# DescribeSchema 2025-04-04T17:23:00.892501Z node 4 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [4:7489509770244626435:2831], for# user2@builtin, access# DescribeSchema 2025-04-04T17:23:00.900319Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:23:04.923815Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489509789596523863:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:04.923933Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00183a/r3tmp/tmpco0PDW/pdisk_1.dat 2025-04-04T17:23:05.114163Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:05.155842Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:05.155979Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:05.159837Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12014, node 7 2025-04-04T17:23:05.259995Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:05.260023Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:05.260032Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:05.260215Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8615 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:05.557813Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:05.634262Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:8615 2025-04-04T17:23:05.854916Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:09.923809Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489509789596523863:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:09.923945Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:23:20.087467Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:23:20.087499Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:48.478488Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489509976217253048:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:23:48.478561Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00183a/r3tmp/tmpV7nCGK/pdisk_1.dat 2025-04-04T17:23:48.637708Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:48.685762Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:23:48.685927Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:23:48.688954Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63550, node 10 2025-04-04T17:23:48.756827Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:23:48.756848Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:23:48.756883Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:23:48.757052Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:23:49.040456Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:23:49.118781Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:25020 2025-04-04T17:23:49.390135Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting...
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 10, code: 500080
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 144, storage 0, code: 500080
: Error: write_speed per second in partition must have values from set {131072,524288,1048576}, got 130048, code: 500080
: Error: write_speed per second in partition must have values from set {131072,524288,1048576}, got 1049600, code: 500080 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestAsync10 [GOOD] Test command err: 2025-04-04T17:21:03.148006Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509269771660350:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:03.159904Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001a71/r3tmp/tmp8u7RYT/pdisk_1.dat 2025-04-04T17:21:03.638626Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:03.661942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:03.662289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:03.671337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1780, node 1 2025-04-04T17:21:03.894903Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:03.894935Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:03.894941Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:03.895062Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18744 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:04.426580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:21:08.148121Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489509269771660350:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:08.148194Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:21:18.631797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:21:18.631822Z node 1 :IMPORT WARN: Table profiles were not loaded >> TColumnShardTestSchema::RebootOneColdTier >> TColumnShardTestSchema::RebootExportWithLostAnswer |94.9%| [TA] $(B)/ydb/services/ydb/sdk_sessions_pool_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {RESULT} $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::RebootHotTiers >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TKeyValueTracingTest::WriteHuge >> TKeyValueTracingTest::WriteSmall >> ColumnShardTiers::TieringUsage [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink >> TKeyValueTracingTest::WriteSmall [FAIL] >> TKeyValueTracingTest::WriteHuge [FAIL] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> BsControllerConfig::ManyPDisksRestarts [GOOD] >> BsControllerConfig::MergeBoxes >> TColumnShardTestSchema::RebootOneTierExternalTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0xF9BEE90) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4134 (0xF14B8A6) NTestSuiteTKeyValueTracingTest::TTestCaseWriteHuge::Execute_(NUnitTest::TTestContext&)+216 (0xF157558) std::__y1::__function::__func, void ()>::operator()()+280 (0xF168858) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0xF9EC5E6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0xF9C58B9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1094 (0xF167846) NUnitTest::TTestFactory::Execute()+2438 (0xF9C7186) NUnitTest::RunMain(int, char**)+5213 (0xF9E6F1D) ??+0 (0x7F2530E3AD90) __libc_start_main+128 (0x7F2530E3AE40) _start+41 (0xD0AA029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0xF9BEE90) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4134 (0xF14B8A6) NTestSuiteTKeyValueTracingTest::TTestCaseWriteSmall::Execute_(NUnitTest::TTestContext&)+216 (0xF157248) std::__y1::__function::__func, void ()>::operator()()+280 (0xF168858) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0xF9EC5E6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0xF9C58B9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1094 (0xF167846) NUnitTest::TTestFactory::Execute()+2438 (0xF9C7186) NUnitTest::RunMain(int, char**)+5213 (0xF9E6F1D) ??+0 (0x7F7E151B5D90) __libc_start_main+128 (0x7F7E151B5E40) _start+41 (0xD0AA029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneTierExternalTtl [GOOD] Test command err: 2025-04-04T17:23:50.603002Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:50.691383Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:23:50.695991Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:23:50.696424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:50.723999Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:50.724335Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:50.733496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:50.733745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:50.734052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:50.734200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:50.734335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:50.734461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:50.734589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:50.734718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:50.734845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:50.734965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:50.735069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:50.735207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:50.760876Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:23:50.765311Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:50.765512Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:50.765578Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:50.765777Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:50.765971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:50.766050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:50.766105Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:50.766264Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:50.766381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:50.766457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:50.766501Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:50.766694Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:50.766792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:50.766842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:50.766877Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:50.766980Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:50.767040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:50.767081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:50.767132Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:50.767210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:50.767253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:50.767288Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:50.767348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:50.767400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:50.767434Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:50.767840Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2025-04-04T17:23:50.767945Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=55; 2025-04-04T17:23:50.768048Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2025-04-04T17:23:50.768149Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-04-04T17:23:50.768312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:50.768364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:50.768424Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:50.768683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:50.768735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:50.768770Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:50.768932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:50.768995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:50.769031Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:50.769213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:23:50.769282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:23:50.769319Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T1 ... 1:1027:3023];bytes=350080;rows=43760;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-04-04T17:24:07.238539Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:1;records_count:36240;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:07.238636Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:36240;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:07.238666Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:24:07.238703Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:24:07.238815Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:24:07.238945Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:36240;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:07.238980Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:24:07.239086Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=36240; 2025-04-04T17:24:07.239145Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=289920;num_rows=36240;batch_columns=timestamp; 2025-04-04T17:24:07.239295Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1027:3023];bytes=289920;rows=36240;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-04-04T17:24:07.239413Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:07.239517Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:07.239611Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:07.239705Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:24:07.239765Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:07.239816Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:07.239840Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1028:3024] finished for tablet 9437184 2025-04-04T17:24:07.240253Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1027:3023];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.006},{"events":["f_ack","l_task_result"],"t":0.458},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.46}],"full":{"a":1743787446779004,"name":"_full_task","f":1743787446779004,"d_finished":0,"c":0,"l":1743787447239883,"d":460879},"events":[{"name":"bootstrap","f":1743787446779287,"d_finished":4027,"c":1,"l":1743787446783314,"d":4027},{"a":1743787447239690,"name":"ack","f":1743787447237823,"d_finished":1731,"c":2,"l":1743787447239630,"d":1924},{"a":1743787447239682,"name":"processing","f":1743787446785544,"d_finished":122803,"c":16,"l":1743787447239633,"d":123004},{"name":"ProduceResults","f":1743787446781143,"d_finished":4085,"c":20,"l":1743787447239828,"d":4085},{"a":1743787447239829,"name":"Finish","f":1743787447239829,"d_finished":0,"c":0,"l":1743787447239883,"d":54},{"name":"task_result","f":1743787446785572,"d_finished":120750,"c":14,"l":1743787447237673,"d":120750}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:07.240326Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1027:3023];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:24:07.240703Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1027:3023];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.004},{"events":["f_processing","f_task_result"],"t":0.006},{"events":["f_ack","l_task_result"],"t":0.458},{"events":["l_ProduceResults","f_Finish"],"t":0.46},{"events":["l_ack","l_processing","l_Finish"],"t":0.461}],"full":{"a":1743787446779004,"name":"_full_task","f":1743787446779004,"d_finished":0,"c":0,"l":1743787447240358,"d":461354},"events":[{"name":"bootstrap","f":1743787446779287,"d_finished":4027,"c":1,"l":1743787446783314,"d":4027},{"a":1743787447239690,"name":"ack","f":1743787447237823,"d_finished":1731,"c":2,"l":1743787447239630,"d":2399},{"a":1743787447239682,"name":"processing","f":1743787446785544,"d_finished":122803,"c":16,"l":1743787447239633,"d":123479},{"name":"ProduceResults","f":1743787446781143,"d_finished":4085,"c":20,"l":1743787447239828,"d":4085},{"a":1743787447239829,"name":"Finish","f":1743787447239829,"d_finished":0,"c":0,"l":1743787447240358,"d":529},{"name":"task_result","f":1743787446785572,"d_finished":120750,"c":14,"l":1743787447237673,"d":120750}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:07.240760Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:06.777860Z;index_granules=0;index_portions=2;index_batches=1721;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=5175704;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5175704;selected_rows=0; 2025-04-04T17:24:07.240796Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:07.241019Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1028:3024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink >> TColumnShardTestSchema::ExportAfterFail [GOOD] >> TKeyValueTracingTest::ReadSmall >> TColumnShardTestSchema::OneTier [GOOD] >> TKeyValueTracingTest::ReadHuge >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPlan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788010.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788010.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786810.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-04-04T17:23:31.926426Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:31.997907Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:32.013749Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:32.014034Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:32.020640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:32.020810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:32.020999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:32.021105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:32.021197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:32.021290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:32.021377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:32.021481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:32.021590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:32.021667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:32.021730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:32.021795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:32.046000Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:32.046219Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:32.046296Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:32.046439Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:32.046630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:32.046718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:32.046764Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:32.046860Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:32.046948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:32.047002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:32.047045Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:32.047231Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:32.047298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:32.047343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:32.047377Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:32.047475Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:32.047535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:32.047629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:32.047672Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:32.047760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:32.047802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:32.047835Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:32.047889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:32.047931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:32.047969Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:32.048372Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=36; 2025-04-04T17:23:32.048482Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=57; 2025-04-04T17:23:32.048570Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=38; 2025-04-04T17:23:32.048699Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=50; 2025-04-04T17:23:32.048885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:32.048951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:32.048993Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:32.049215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:32.049279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:32.049325Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:32.049495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:32.049545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:32.049582Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:32.049883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description= ... ;);;;); 2025-04-04T17:24:09.127838Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:14867;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:09.127872Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:24:09.127907Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:24:09.128030Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:24:09.128110Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:14867;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:09.128136Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:24:09.128194Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=14867; 2025-04-04T17:24:09.128224Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=118936;num_rows=14867;batch_columns=timestamp; 2025-04-04T17:24:09.128331Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1270:3279];bytes=118936;rows=14867;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; Got TEvKqpCompute::TEvScanData [1:1271:3280]->[1:1270:3279] 2025-04-04T17:24:09.128426Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:09.128494Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:09.128553Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:09.128629Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:24:09.128693Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:09.128756Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:09.128782Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1271:3280] finished for tablet 9437184 2025-04-04T17:24:09.129073Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1270:3279];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.007},{"events":["f_processing","f_task_result"],"t":0.009},{"events":["f_ack","l_task_result"],"t":0.595},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.599}],"full":{"a":1743787448528867,"name":"_full_task","f":1743787448528867,"d_finished":0,"c":0,"l":1743787449128820,"d":599953},"events":[{"name":"bootstrap","f":1743787448528994,"d_finished":7601,"c":1,"l":1743787448536595,"d":7601},{"a":1743787449128617,"name":"ack","f":1743787449124085,"d_finished":4111,"c":7,"l":1743787449128570,"d":4314},{"a":1743787449128610,"name":"processing","f":1743787448538109,"d_finished":258871,"c":56,"l":1743787449128571,"d":259081},{"name":"ProduceResults","f":1743787448531650,"d_finished":10449,"c":65,"l":1743787449128772,"d":10449},{"a":1743787449128773,"name":"Finish","f":1743787449128773,"d_finished":0,"c":0,"l":1743787449128820,"d":47},{"name":"task_result","f":1743787448538124,"d_finished":253993,"c":49,"l":1743787449123879,"d":253993}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:09.129120Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1270:3279];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:24:09.129442Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1270:3279];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.007},{"events":["f_processing","f_task_result"],"t":0.009},{"events":["f_ack","l_task_result"],"t":0.595},{"events":["l_ProduceResults","f_Finish"],"t":0.599},{"events":["l_ack","l_processing","l_Finish"],"t":0.6}],"full":{"a":1743787448528867,"name":"_full_task","f":1743787448528867,"d_finished":0,"c":0,"l":1743787449129146,"d":600279},"events":[{"name":"bootstrap","f":1743787448528994,"d_finished":7601,"c":1,"l":1743787448536595,"d":7601},{"a":1743787449128617,"name":"ack","f":1743787449124085,"d_finished":4111,"c":7,"l":1743787449128570,"d":4640},{"a":1743787449128610,"name":"processing","f":1743787448538109,"d_finished":258871,"c":56,"l":1743787449128571,"d":259407},{"name":"ProduceResults","f":1743787448531650,"d_finished":10449,"c":65,"l":1743787449128772,"d":10449},{"a":1743787449128773,"name":"Finish","f":1743787449128773,"d_finished":0,"c":0,"l":1743787449129146,"d":373},{"name":"task_result","f":1743787448538124,"d_finished":253993,"c":49,"l":1743787449123879,"d":253993}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1271:3280]->[1:1270:3279] 2025-04-04T17:24:09.129506Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:08.528531Z;index_granules=0;index_portions=7;index_batches=1260;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=10402524;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10402524;selected_rows=0; 2025-04-04T17:24:09.129531Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:09.129708Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 160000/10402332 160000/10402524 |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TColumnShardTestSchema::ExternalTTL [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneTier [GOOD] Test command err: 2025-04-04T17:23:47.149601Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:47.223294Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:23:47.227393Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:23:47.227756Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:47.249813Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:47.250035Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:47.255508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:47.255657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:47.255808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:47.255885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:47.255997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:47.256069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:47.256135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:47.256199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:47.256280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:47.256344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:47.256398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:47.256463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:47.275483Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:23:47.279336Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:47.279521Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:47.279569Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:47.279738Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:47.279894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:47.279959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:47.280000Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:47.280098Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:47.280194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:47.280243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:47.280272Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:47.280447Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:47.280506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:47.280564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:47.280589Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:47.280658Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:47.280697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:47.280725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:47.280747Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:47.280793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:47.280815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:47.280834Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:47.280888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:47.280924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:47.280955Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:47.281346Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-04-04T17:23:47.281464Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=54; 2025-04-04T17:23:47.281556Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=46; 2025-04-04T17:23:47.281664Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=56; 2025-04-04T17:23:47.281852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:47.281920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:47.281962Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:47.282222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:47.282293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:47.282339Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:47.282497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:47.282557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:47.282587Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:47.282754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:23:47.282790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:23:47.282819Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T1 ... mn_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:08.740133Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:08.740152Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:24:08.740174Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:24:08.740233Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:24:08.740288Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:08.740309Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:24:08.740353Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=73; 2025-04-04T17:24:08.740386Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=584;num_rows=73;batch_columns=timestamp; 2025-04-04T17:24:08.740470Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:673:2689];bytes=584;rows=73;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-04-04T17:24:08.740518Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:08.740569Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:08.740672Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:08.740733Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:24:08.740780Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:08.740826Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:08.740856Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:680:2696] finished for tablet 9437184 2025-04-04T17:24:08.741273Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:673:2689];stats={"p":[{"events":["f_bootstrap"],"t":0.04},{"events":["f_ProduceResults"],"t":0.289},{"events":["l_bootstrap"],"t":0.453},{"events":["f_processing","f_task_result"],"t":0.467},{"events":["l_task_result"],"t":5.768},{"events":["f_ack"],"t":5.796},{"events":["l_ProduceResults","f_Finish"],"t":6.349},{"events":["l_ack","l_processing","l_Finish"],"t":6.35}],"full":{"a":1743787442390847,"name":"_full_task","f":1743787442390847,"d_finished":0,"c":0,"l":1743787448740903,"d":6350056},"events":[{"name":"bootstrap","f":1743787442430943,"d_finished":413384,"c":1,"l":1743787442844327,"d":413384},{"a":1743787448740724,"name":"ack","f":1743787448187709,"d_finished":513127,"c":904,"l":1743787448740691,"d":513306},{"a":1743787448740716,"name":"processing","f":1743787442858156,"d_finished":2836382,"c":4520,"l":1743787448740692,"d":2836569},{"name":"ProduceResults","f":1743787442680455,"d_finished":1099175,"c":5426,"l":1743787448740841,"d":1099175},{"a":1743787448740846,"name":"Finish","f":1743787448740846,"d_finished":0,"c":0,"l":1743787448740903,"d":57},{"name":"task_result","f":1743787442858175,"d_finished":2253938,"c":3616,"l":1743787448159123,"d":2253938}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:08.741343Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:673:2689];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:24:08.741682Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:673:2689];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.04},{"events":["f_ProduceResults"],"t":0.289},{"events":["l_bootstrap"],"t":0.453},{"events":["f_processing","f_task_result"],"t":0.467},{"events":["l_task_result"],"t":5.768},{"events":["f_ack"],"t":5.796},{"events":["l_ProduceResults","f_Finish"],"t":6.349},{"events":["l_ack","l_processing","l_Finish"],"t":6.35}],"full":{"a":1743787442390847,"name":"_full_task","f":1743787442390847,"d_finished":0,"c":0,"l":1743787448741376,"d":6350529},"events":[{"name":"bootstrap","f":1743787442430943,"d_finished":413384,"c":1,"l":1743787442844327,"d":413384},{"a":1743787448740724,"name":"ack","f":1743787448187709,"d_finished":513127,"c":904,"l":1743787448740691,"d":513779},{"a":1743787448740716,"name":"processing","f":1743787442858156,"d_finished":2836382,"c":4520,"l":1743787448740692,"d":2837042},{"name":"ProduceResults","f":1743787442680455,"d_finished":1099175,"c":5426,"l":1743787448740841,"d":1099175},{"a":1743787448740846,"name":"Finish","f":1743787448740846,"d_finished":0,"c":0,"l":1743787448741376,"d":530},{"name":"task_result","f":1743787442858175,"d_finished":2253938,"c":3616,"l":1743787448159123,"d":2253938}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:08.741760Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:02.353884Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=904;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7049848;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7049848;selected_rows=0; 2025-04-04T17:24:08.741796Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:08.741963Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TKeyValueTracingTest::ReadSmall [FAIL] >> TKeyValueTracingTest::ReadHuge [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExternalTTL [GOOD] Test command err: 2025-04-04T17:23:47.505362Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:47.574266Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:23:47.577732Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:23:47.578001Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:47.594487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:47.594723Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:47.600735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:47.600913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:47.601084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:47.601171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:47.601290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:47.601371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:47.601434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:47.601513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:47.601593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:47.601665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:47.601742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:47.601821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:47.618629Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:23:47.622085Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:47.622219Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:47.622275Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:47.622400Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:47.622564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:47.622613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:47.622643Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:47.622700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:47.622748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:47.622779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:47.622800Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:47.622928Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:47.622989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:47.623063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:47.623093Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:47.623184Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:47.623241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:47.623277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:47.623295Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:47.623336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:47.623356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:47.623372Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:47.623398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:47.623419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:47.623437Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:47.623761Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=54; 2025-04-04T17:23:47.623861Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=51; 2025-04-04T17:23:47.623911Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=20; 2025-04-04T17:23:47.623978Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=40; 2025-04-04T17:23:47.624117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:47.624153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:47.624179Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:47.624310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:47.624339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:47.624356Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:47.624436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:47.624473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:47.624493Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:47.624643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:23:47.624672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:23:47.624707Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T1 ... tamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:24:09.856370Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:24:09.856404Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:24:09.856432Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:24:09.856527Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:24:09.856607Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:24:09.856637Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:24:09.856704Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=71; 2025-04-04T17:24:09.856744Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=568;num_rows=71;batch_columns=saved_at; 2025-04-04T17:24:09.856873Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:684:2700];bytes=568;rows=71;faults=0;finished=0;fault=0;schema=saved_at: timestamp[us]; 2025-04-04T17:24:09.856951Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:24:09.857041Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:24:09.857152Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:24:09.857256Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:24:09.857324Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:24:09.857405Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:24:09.857435Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:691:2707] finished for tablet 9437184 2025-04-04T17:24:09.857906Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:684:2700];stats={"p":[{"events":["f_bootstrap"],"t":0.049},{"events":["f_ProduceResults"],"t":0.33},{"events":["l_bootstrap"],"t":0.531},{"events":["f_processing","f_task_result"],"t":0.545},{"events":["l_task_result"],"t":5.582},{"events":["f_ack"],"t":5.65},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":6.538}],"full":{"a":1743787443319218,"name":"_full_task","f":1743787443319218,"d_finished":0,"c":0,"l":1743787449857501,"d":6538283},"events":[{"name":"bootstrap","f":1743787443368858,"d_finished":482281,"c":1,"l":1743787443851139,"d":482281},{"a":1743787449857241,"name":"ack","f":1743787448969587,"d_finished":820131,"c":903,"l":1743787449857180,"d":820391},{"a":1743787449857230,"name":"processing","f":1743787443864328,"d_finished":3027321,"c":4515,"l":1743787449857181,"d":3027592},{"name":"ProduceResults","f":1743787443649272,"d_finished":1302143,"c":5420,"l":1743787449857421,"d":1302143},{"a":1743787449857423,"name":"Finish","f":1743787449857423,"d_finished":0,"c":0,"l":1743787449857501,"d":78},{"name":"task_result","f":1743787443864358,"d_finished":2137593,"c":3612,"l":1743787448901973,"d":2137593}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:24:09.857986Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:684:2700];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:24:09.858431Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:684:2700];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.049},{"events":["f_ProduceResults"],"t":0.33},{"events":["l_bootstrap"],"t":0.531},{"events":["f_processing","f_task_result"],"t":0.545},{"events":["l_task_result"],"t":5.582},{"events":["f_ack"],"t":5.65},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":6.538}],"full":{"a":1743787443319218,"name":"_full_task","f":1743787443319218,"d_finished":0,"c":0,"l":1743787449858031,"d":6538813},"events":[{"name":"bootstrap","f":1743787443368858,"d_finished":482281,"c":1,"l":1743787443851139,"d":482281},{"a":1743787449857241,"name":"ack","f":1743787448969587,"d_finished":820131,"c":903,"l":1743787449857180,"d":820921},{"a":1743787449857230,"name":"processing","f":1743787443864328,"d_finished":3027321,"c":4515,"l":1743787449857181,"d":3028122},{"name":"ProduceResults","f":1743787443649272,"d_finished":1302143,"c":5420,"l":1743787449857421,"d":1302143},{"a":1743787449857423,"name":"Finish","f":1743787449857423,"d_finished":0,"c":0,"l":1743787449858031,"d":608},{"name":"task_result","f":1743787443864358,"d_finished":2137593,"c":3612,"l":1743787448901973,"d":2137593}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:24:09.858512Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:03.275898Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=903;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7037528;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7037528;selected_rows=0; 2025-04-04T17:24:09.858560Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:09.858757Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TReplicaTest::CommitWithoutHandshake >> TReplicaTest::UpdateWithoutHandshake >> TReplicaTest::Subscribe >> TReplicaTest::HandshakeWithStaleGeneration >> TReplicaTest::UpdateWithoutHandshake [GOOD] >> TReplicaTest::UpdateWithStaleGeneration >> TReplicaTest::CommitWithoutHandshake [GOOD] >> TReplicaTest::CommitWithStaleGeneration >> TReplicaTest::Subscribe [GOOD] >> TReplicaTest::SubscribeUnknownPath >> TReplicaTest::HandshakeWithStaleGeneration [GOOD] >> TReplicaTest::IdempotencyUpdatesAliveSubscriber >> TReplicaTest::CommitWithStaleGeneration [GOOD] >> TReplicaTest::Delete >> TReplicaTest::SubscribeUnknownPath [GOOD] >> TReplicaTest::SyncVersion >> TReplicaTest::UpdateWithStaleGeneration [GOOD] >> TReplicaTest::IdempotencyUpdatesAliveSubscriber [GOOD] >> TReplicaTest::IdempotencyUpdatesVariant2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0xF9BEE90) TestOneRead(TBasicString>, TBasicString>)+4580 (0xF151154) NTestSuiteTKeyValueTracingTest::TTestCaseReadSmall::Execute_(NUnitTest::TTestContext&)+130 (0xF157812) std::__y1::__function::__func, void ()>::operator()()+280 (0xF168858) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0xF9EC5E6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0xF9C58B9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1094 (0xF167846) NUnitTest::TTestFactory::Execute()+2438 (0xF9C7186) NUnitTest::RunMain(int, char**)+5213 (0xF9E6F1D) ??+0 (0x7F11C9711D90) __libc_start_main+128 (0x7F11C9711E40) _start+41 (0xD0AA029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0xF9BEE90) TestOneRead(TBasicString>, TBasicString>)+4580 (0xF151154) NTestSuiteTKeyValueTracingTest::TTestCaseReadHuge::Execute_(NUnitTest::TTestContext&)+130 (0xF157AE2) std::__y1::__function::__func, void ()>::operator()()+280 (0xF168858) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0xF9EC5E6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0xF9C58B9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1094 (0xF167846) NUnitTest::TTestFactory::Execute()+2438 (0xF9C7186) NUnitTest::RunMain(int, char**)+5213 (0xF9E6F1D) ??+0 (0x7F64D4447D90) __libc_start_main+128 (0x7F64D4447E40) _start+41 (0xD0AA029) >> TReplicaTest::Delete [GOOD] >> TReplicaTest::SyncVersion [GOOD] >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UpdateWithStaleGeneration [GOOD] Test command err: 2025-04-04T17:24:13.040198Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:13.040305Z node 1 :SCHEME_BOARD_REPLICA ERROR: [1:6:2053] Reject update from unknown populator: sender# [1:7:2054], owner# 1, generation# 1 2025-04-04T17:24:13.040444Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2025-04-04T17:24:13.040484Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path 2025-04-04T17:24:13.042954Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-04-04T17:24:13.043162Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:7:2054] 2025-04-04T17:24:13.044317Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# path 2025-04-04T17:24:13.044437Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:7:2054] 2025-04-04T17:24:13.044477Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-04-04T17:24:13.044571Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-04T17:24:13.044667Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:7:2054] 2025-04-04T17:24:13.044714Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1] 2025-04-04T17:24:13.310355Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-04-04T17:24:13.310427Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-04T17:24:13.310559Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 0 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:13.310600Z node 2 :SCHEME_BOARD_REPLICA ERROR: [2:6:2053] Reject update from stale populator: sender# [2:7:2054], owner# 1, generation# 0, pending generation# 1 2025-04-04T17:24:13.310675Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:7:2054] 2025-04-04T17:24:13.310735Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path 2025-04-04T17:24:13.310809Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-04-04T17:24:13.310905Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] 2025-04-04T17:24:13.310951Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Unsubscribe: subscriber# [2:7:2054], path# path 2025-04-04T17:24:13.311012Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:7:2054] 2025-04-04T17:24:13.311060Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-04-04T17:24:13.311110Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-04T17:24:13.311225Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:7:2054] 2025-04-04T17:24:13.311268Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Unsubscribe: subscriber# [2:7:2054], path# [OwnerId: 1, LocalPathId: 1] >> TColumnShardTestSchema::OneColdTier [GOOD] |94.9%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::Delete [GOOD] Test command err: 2025-04-04T17:24:13.033655Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-04T17:24:13.033828Z node 1 :SCHEME_BOARD_REPLICA ERROR: [1:6:2053] Reject commit from unknown populator: sender# [1:7:2054], owner# 1, generation# 1 2025-04-04T17:24:13.034915Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-04T17:24:13.034976Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-04T17:24:13.290554Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 0 }: sender# [2:7:2054] 2025-04-04T17:24:13.290621Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 0 2025-04-04T17:24:13.290693Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2025-04-04T17:24:13.290749Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-04T17:24:13.290861Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2025-04-04T17:24:13.290896Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Commit generation: owner# 1, generation# 1 2025-04-04T17:24:13.290964Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 0 }: sender# [2:7:2054] 2025-04-04T17:24:13.291000Z node 2 :SCHEME_BOARD_REPLICA ERROR: [2:6:2053] Reject commit from stale populator: sender# [2:7:2054], owner# 1, generation# 0, pending generation# 1 2025-04-04T17:24:13.291042Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [2:7:2054] 2025-04-04T17:24:13.291074Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 2 2025-04-04T17:24:13.551268Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-04-04T17:24:13.551329Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-04T17:24:13.551483Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:13.551522Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# false 2025-04-04T17:24:13.557026Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 42, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-04T17:24:13.557214Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2025-04-04T17:24:13.557302Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-04-04T17:24:13.557442Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:9:2056] 2025-04-04T17:24:13.557481Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:9:2056], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-04T17:24:13.557606Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-04-04T17:24:13.557644Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# true 2025-04-04T17:24:13.557673Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 42, LocalPathId: 1] 2025-04-04T17:24:13.557788Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:10:2057] 2025-04-04T17:24:13.557841Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:10:2057], path# path, domainOwnerId# 0, capabilities# 2025-04-04T17:24:13.557930Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:11:2058] 2025-04-04T17:24:13.557984Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:11:2058], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-04T17:24:13.558093Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:12:2059] 2025-04-04T17:24:13.558125Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:12:2059], path# path, domainOwnerId# 0, capabilities# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] Test command err: 2025-04-04T17:24:13.133313Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:7:2054] 2025-04-04T17:24:13.133407Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 2 2025-04-04T17:24:13.133491Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-04T17:24:13.133533Z node 1 :SCHEME_BOARD_REPLICA ERROR: [1:6:2053] Reject handshake from stale populator: sender# [1:7:2054], owner# 1, generation# 1, pending generation# 2 2025-04-04T17:24:13.383583Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-04-04T17:24:13.383637Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-04T17:24:13.383730Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:8:2055] 2025-04-04T17:24:13.383754Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-04-04T17:24:13.383857Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-04T17:24:13.384000Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:13.384023Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-04T17:24:13.388577Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-04T17:24:13.388742Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 40 2025-04-04T17:24:13.388768Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-04-04T17:24:13.388789Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-04T17:24:13.388845Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:13.388867Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-04T17:24:13.388889Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-04T17:24:13.388943Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:13.388976Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-04-04T17:24:13.389030Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-04T17:24:13.389104Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:9:2056] 2025-04-04T17:24:13.389141Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2025-04-04T17:24:13.660947Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-04-04T17:24:13.661020Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-04T17:24:13.661138Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:13.661184Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-04T17:24:13.661242Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-04T17:24:13.661325Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:13.661353Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-04-04T17:24:13.661394Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-04T17:24:13.661454Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-04T17:24:13.661537Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-04-04T17:24:13.661578Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# true 2025-04-04T17:24:13.661611Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 2] 2025-04-04T17:24:13.661676Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:13.661714Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-04T17:24:13.661748Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-04T17:24:13.661811Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:13.661842Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-04-04T17:24:13.661873Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 2] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::SyncVersion [GOOD] Test command err: 2025-04-04T17:24:13.043755Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-04T17:24:13.043824Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-04T17:24:13.043964Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:13.043996Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-04T17:24:13.048472Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-04T17:24:13.048587Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2025-04-04T17:24:13.048660Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-04-04T17:24:13.048760Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 40 2025-04-04T17:24:13.048795Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-04-04T17:24:13.048827Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-04T17:24:13.298621Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:7:2054] 2025-04-04T17:24:13.298681Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path 2025-04-04T17:24:13.298745Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-04-04T17:24:13.576436Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-04-04T17:24:13.576501Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-04T17:24:13.576631Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 76 2025-04-04T17:24:13.576668Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-04T17:24:13.576766Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 100500, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 32} 2025-04-04T17:24:13.576847Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:7:2054] 2025-04-04T17:24:13.576900Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-04-04T17:24:13.576979Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:7:2054], cookie# 1 >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] >> TReplicaTest::Unsubscribe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788007.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=143788007.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788007.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123788007.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786807.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123786807.000000s;Name=;Codec=}; 2025-04-04T17:23:28.819557Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:28.909550Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:28.934700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:28.935053Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:28.943651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:28.943902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:28.944146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:28.944271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:28.944379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:28.944517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:28.944641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:28.944756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:28.944877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:28.944988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:28.945092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:28.945197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:28.974292Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:28.974517Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:28.974572Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:28.974751Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:28.974911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:28.974989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:28.975045Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:28.975170Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:28.975257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:28.975303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:28.975335Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:28.975500Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:28.975562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:28.975601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:28.975632Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:28.975722Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:28.975781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:28.975822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:28.975857Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:28.975926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:28.975964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:28.975999Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:28.976051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:28.976091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:28.976128Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:28.976557Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-04-04T17:23:28.976661Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=51; 2025-04-04T17:23:28.976742Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-04-04T17:23:28.976892Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=78; 2025-04-04T17:23:28.977103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:28.977160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:28.977201Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:28.977412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:28.977475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:28.977522Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:28.977669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:28.977709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:28.977740Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:28.977966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... 763416,"d":3432},{"a":1743787453763483,"name":"processing","f":1743787453283795,"d_finished":178702,"c":32,"l":1743787453763418,"d":179501},{"name":"ProduceResults","f":1743787453280257,"d_finished":7816,"c":38,"l":1743787453763715,"d":7816},{"a":1743787453763717,"name":"Finish","f":1743787453763717,"d_finished":0,"c":0,"l":1743787453764282,"d":565},{"name":"task_result","f":1743787453283812,"d_finished":175465,"c":28,"l":1743787453760377,"d":175465}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1284:3291]->[1:1283:3290] 2025-04-04T17:24:13.764716Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:13.277522Z;index_granules=0;index_portions=4;index_batches=1731;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203504;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203504;selected_rows=0; 2025-04-04T17:24:13.764758Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:13.764992Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:13.766829Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-04-04T17:24:13.767091Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-04-04T17:24:13.767208Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-04T17:24:13.767366Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:32;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:13.767435Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:13.767570Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Projection","input":"1"},"fetch":"1"}]}; 2025-04-04T17:24:13.767657Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:127;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-04T17:24:13.768136Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1300:3307];trace_detailed=; 2025-04-04T17:24:13.768571Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:85;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-04T17:24:13.768806Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:100;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:13.768984Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:13.769098Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:13.769399Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:24:13.769505Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:13.769644Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:13.769702Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1300:3307] finished for tablet 9437184 2025-04-04T17:24:13.770150Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1299:3306];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743787453768070,"name":"_full_task","f":1743787453768070,"d_finished":0,"c":0,"l":1743787453769772,"d":1702},"events":[{"name":"bootstrap","f":1743787453768301,"d_finished":830,"c":1,"l":1743787453769131,"d":830},{"a":1743787453769374,"name":"ack","f":1743787453769374,"d_finished":0,"c":0,"l":1743787453769772,"d":398},{"a":1743787453769354,"name":"processing","f":1743787453769354,"d_finished":0,"c":0,"l":1743787453769772,"d":418},{"name":"ProduceResults","f":1743787453768903,"d_finished":471,"c":2,"l":1743787453769671,"d":471},{"a":1743787453769674,"name":"Finish","f":1743787453769674,"d_finished":0,"c":0,"l":1743787453769772,"d":98}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:13.770230Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1299:3306];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:24:13.770656Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1299:3306];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743787453768070,"name":"_full_task","f":1743787453768070,"d_finished":0,"c":0,"l":1743787453770299,"d":2229},"events":[{"name":"bootstrap","f":1743787453768301,"d_finished":830,"c":1,"l":1743787453769131,"d":830},{"a":1743787453769374,"name":"ack","f":1743787453769374,"d_finished":0,"c":0,"l":1743787453770299,"d":925},{"a":1743787453769354,"name":"processing","f":1743787453769354,"d_finished":0,"c":0,"l":1743787453770299,"d":945},{"name":"ProduceResults","f":1743787453768903,"d_finished":471,"c":2,"l":1743787453769671,"d":471},{"a":1743787453769674,"name":"Finish","f":1743787453769674,"d_finished":0,"c":0,"l":1743787453770299,"d":625}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1300:3307]->[1:1299:3306] 2025-04-04T17:24:13.770750Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:13.767623Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-04T17:24:13.770802Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:13.770918Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot >> TReplicaTest::Unsubscribe [GOOD] >> TReplicaTest::UnsubscribeUnknownPath >> TReplicaTest::Merge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788007.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=143788007.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788007.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123788007.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786807.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123786807.000000s;Name=;Codec=}; 2025-04-04T17:23:29.257257Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:29.345202Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:29.369627Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:29.369918Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:29.376575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:29.376769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:29.376983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:29.377085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:29.377192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:29.377416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:29.377597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:29.377769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:29.377974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:29.378146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:29.378326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:29.378449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:29.404576Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:29.404826Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:29.404910Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:29.405090Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:29.405243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:29.405316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:29.405379Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:29.405484Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:29.405538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:29.405569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:29.405600Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:29.405768Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:29.405827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:29.405867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:29.405899Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:29.405989Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:29.406026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:29.406056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:29.406077Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:29.406135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:29.406163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:29.406183Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:29.406216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:29.406272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:29.406304Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:29.406711Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=51; 2025-04-04T17:23:29.406817Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=55; 2025-04-04T17:23:29.406905Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=38; 2025-04-04T17:23:29.407001Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-04-04T17:23:29.407193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:29.407280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:29.407315Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:29.407491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:29.407552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:29.407584Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:29.407688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:29.407722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:29.407757Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:29.407947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... 031246,"d":3909},{"a":1743787454031321,"name":"processing","f":1743787453395232,"d_finished":238653,"c":32,"l":1743787454031249,"d":239588},{"name":"ProduceResults","f":1743787453390276,"d_finished":9533,"c":38,"l":1743787454031562,"d":9533},{"a":1743787454031565,"name":"Finish","f":1743787454031565,"d_finished":0,"c":0,"l":1743787454032256,"d":691},{"name":"task_result","f":1743787453395258,"d_finished":234866,"c":28,"l":1743787454028007,"d":234866}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1284:3291]->[1:1283:3290] 2025-04-04T17:24:14.032770Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:13.386371Z;index_granules=0;index_portions=4;index_batches=1731;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203504;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203504;selected_rows=0; 2025-04-04T17:24:14.032811Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:14.033086Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:14.035020Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-04-04T17:24:14.035355Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-04-04T17:24:14.035484Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-04T17:24:14.035667Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:32;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:14.035737Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:14.035906Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Projection","input":"1"},"fetch":"1"}]}; 2025-04-04T17:24:14.035995Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:127;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-04T17:24:14.036481Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1300:3307];trace_detailed=; 2025-04-04T17:24:14.036905Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:85;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-04T17:24:14.037163Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:100;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:14.037349Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:14.037491Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:14.037827Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:24:14.037948Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:14.038082Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:14.038130Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1300:3307] finished for tablet 9437184 2025-04-04T17:24:14.038614Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1299:3306];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743787454036409,"name":"_full_task","f":1743787454036409,"d_finished":0,"c":0,"l":1743787454038197,"d":1788},"events":[{"name":"bootstrap","f":1743787454036602,"d_finished":921,"c":1,"l":1743787454037523,"d":921},{"a":1743787454037800,"name":"ack","f":1743787454037800,"d_finished":0,"c":0,"l":1743787454038197,"d":397},{"a":1743787454037778,"name":"processing","f":1743787454037778,"d_finished":0,"c":0,"l":1743787454038197,"d":419},{"name":"ProduceResults","f":1743787454037265,"d_finished":511,"c":2,"l":1743787454038110,"d":511},{"a":1743787454038113,"name":"Finish","f":1743787454038113,"d_finished":0,"c":0,"l":1743787454038197,"d":84}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:14.038700Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1299:3306];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:24:14.039132Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1299:3306];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743787454036409,"name":"_full_task","f":1743787454036409,"d_finished":0,"c":0,"l":1743787454038751,"d":2342},"events":[{"name":"bootstrap","f":1743787454036602,"d_finished":921,"c":1,"l":1743787454037523,"d":921},{"a":1743787454037800,"name":"ack","f":1743787454037800,"d_finished":0,"c":0,"l":1743787454038751,"d":951},{"a":1743787454037778,"name":"processing","f":1743787454037778,"d_finished":0,"c":0,"l":1743787454038751,"d":973},{"name":"ProduceResults","f":1743787454037265,"d_finished":511,"c":2,"l":1743787454038110,"d":511},{"a":1743787454038113,"name":"Finish","f":1743787454038113,"d_finished":0,"c":0,"l":1743787454038751,"d":638}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1300:3307]->[1:1299:3306] 2025-04-04T17:24:14.039257Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:14.035965Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-04T17:24:14.039311Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:14.039433Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath >> TReplicaTest::Handshake >> TReplicaTest::Merge [GOOD] >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath [GOOD] >> TReplicaCombinationTest::MigratedPathRecreation >> TReplicaTest::UnsubscribeUnknownPath [GOOD] >> TReplicaTest::Commit >> TReplicaTest::Handshake [GOOD] >> TReplicaTest::DoubleUnsubscribe >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers [GOOD] >> TReplicaTest::StrongNotificationAfterCommit >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] >> TReplicaTest::DoubleUnsubscribe [GOOD] >> TReplicaTest::DoubleDelete >> TReplicaTest::Commit [GOOD] >> TReplicaTest::AckNotifications ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeUnknownPath [GOOD] Test command err: 2025-04-04T17:24:15.055887Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-04T17:24:15.055970Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-04T17:24:15.056104Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2025-04-04T17:24:15.056140Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path 2025-04-04T17:24:15.056262Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-04-04T17:24:15.056352Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:9:2056] 2025-04-04T17:24:15.056388Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:9:2056], path# path, domainOwnerId# 0, capabilities# 2025-04-04T17:24:15.056519Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:15.056560Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-04T17:24:15.063304Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-04T17:24:15.063526Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:8:2055] 2025-04-04T17:24:15.063575Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:8:2055], path# path 2025-04-04T17:24:15.063680Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 40 2025-04-04T17:24:15.063708Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-04-04T17:24:15.063736Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-04T17:24:15.329698Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] >> TReplicaTest::StrongNotificationAfterCommit [GOOD] >> TReplicaTest::AckNotifications [GOOD] >> TReplicaTest::AckNotificationsUponPathRecreation >> TReplicaTest::DoubleDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] Test command err: 2025-04-04T17:24:15.083245Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:7:2054] 2025-04-04T17:24:15.083301Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 800, generation# 1 2025-04-04T17:24:15.083376Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:7:2054] 2025-04-04T17:24:15.083404Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 800, generation# 1 2025-04-04T17:24:15.083480Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:8:2055] 2025-04-04T17:24:15.083504Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 800, generation# 1 2025-04-04T17:24:15.083549Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:8:2055] 2025-04-04T17:24:15.083571Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 800, generation# 1 2025-04-04T17:24:15.083706Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 103 2025-04-04T17:24:15.083743Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-04-04T17:24:15.088076Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-04-04T17:24:15.088218Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 103 2025-04-04T17:24:15.088246Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-04-04T17:24:15.088283Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-04-04T17:24:15.088358Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:9:2056] 2025-04-04T17:24:15.088441Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:9:2056], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2025-04-04T17:24:15.116154Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:11:2058] 2025-04-04T17:24:15.116205Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Successful handshake: owner# 800, generation# 1 2025-04-04T17:24:15.116260Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:11:2058] 2025-04-04T17:24:15.116279Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Commit generation: owner# 800, generation# 1 2025-04-04T17:24:15.116326Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:12:2059] 2025-04-04T17:24:15.116345Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Successful handshake: owner# 900, generation# 1 2025-04-04T17:24:15.116385Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:12:2059] 2025-04-04T17:24:15.116414Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Commit generation: owner# 900, generation# 1 2025-04-04T17:24:15.116496Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:11:2058], cookie# 0, event size# 103 2025-04-04T17:24:15.116526Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-04-04T17:24:15.116565Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:10:2057] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-04-04T17:24:15.116617Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:12:2059], cookie# 0, event size# 103 2025-04-04T17:24:15.116633Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Update description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], deletion# false 2025-04-04T17:24:15.116661Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Replace GSS by TSS description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], domainId# [OwnerId: 800, LocalPathId: 2], curPathId# [OwnerId: 800, LocalPathId: 2], curDomainId# [OwnerId: 800, LocalPathId: 2] 2025-04-04T17:24:15.116716Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:10:2057] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 900, LocalPathId: 1], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-04-04T17:24:15.116798Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:13:2060] 2025-04-04T17:24:15.116832Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:10:2057] Subscribe: subscriber# [1:13:2060], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2025-04-04T17:24:15.117060Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:15:2062] 2025-04-04T17:24:15.117077Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Successful handshake: owner# 800, generation# 1 2025-04-04T17:24:15.117113Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:15:2062] 2025-04-04T17:24:15.117129Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Commit generation: owner# 800, generation# 1 2025-04-04T17:24:15.117162Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:16:2063] 2025-04-04T17:24:15.117177Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Successful handshake: owner# 800, generation# 1 2025-04-04T17:24:15.117226Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:16:2063] 2025-04-04T17:24:15.117241Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Commit generation: owner# 800, generation# 1 2025-04-04T17:24:15.117273Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:15:2062], cookie# 0, event size# 103 2025-04-04T17:24:15.117298Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-04-04T17:24:15.117322Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:14:2061] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-04-04T17:24:15.117354Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:16:2063], cookie# 0, event size# 103 2025-04-04T17:24:15.117375Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-04-04T17:24:15.117413Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:14:2061] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 2, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-04-04T17:24:15.117460Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:17:2064] 2025-04-04T17:24:15.117478Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:14:2061] Subscribe: subscriber# [1:17:2064], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2025-04-04T17:24:15.117694Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:19:2066] 2025-04-04T17:24:15.117720Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Successful handshake: owner# 800, generation# 1 2025-04-04T17:24:15.117775Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:19:2066] 2025-04-04T17:24:15.117804Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Commit generation: owner# 800, generation# 1 2025-04-04T17:24:15.117867Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:20:2067] 2025-04-04T17:24:15.117890Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Successful handshake: owner# 900, generation# 1 2025-04-04T17:24:15.117929Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:20:2067] 2025-04-04T17:24:15.117948Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Commit generation: owner# 900, generation# 1 2025-04-04T17:24:15.118016Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:19:2066], cookie# 0, event size# 103 2025-04-04T17:24:15.118040Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-04-04T17:24:15.118071Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:18:2065] Upsert description: path# /Root/Te ... ble_inside, pathId# [OwnerId: 910, LocalPathId: 9] 2025-04-04T17:24:15.514318Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:394:2441] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:397:2444] 2025-04-04T17:24:15.514342Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:394:2441] Upsert description: path# /Root/Tenant/table_inside 2025-04-04T17:24:15.514405Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:394:2441] Subscribe: subscriber# [2:397:2444], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2025-04-04T17:24:15.516705Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:399:2446] 2025-04-04T17:24:15.516749Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Successful handshake: owner# 910, generation# 1 2025-04-04T17:24:15.516825Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:399:2446] 2025-04-04T17:24:15.516854Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Commit generation: owner# 910, generation# 1 2025-04-04T17:24:15.516913Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:400:2447] 2025-04-04T17:24:15.516937Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Successful handshake: owner# 910, generation# 1 2025-04-04T17:24:15.516981Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:400:2447] 2025-04-04T17:24:15.517004Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Commit generation: owner# 910, generation# 1 2025-04-04T17:24:15.517074Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:399:2446], cookie# 0, event size# 64 2025-04-04T17:24:15.517104Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2025-04-04T17:24:15.517141Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:398:2445] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2025-04-04T17:24:15.517215Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:400:2447], cookie# 0, event size# 130 2025-04-04T17:24:15.517241Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# false 2025-04-04T17:24:15.517280Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Path was explicitly deleted, ignoring: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9] 2025-04-04T17:24:15.517342Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:401:2448] 2025-04-04T17:24:15.517366Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:398:2445] Upsert description: path# /Root/Tenant/table_inside 2025-04-04T17:24:15.517410Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:398:2445] Subscribe: subscriber# [2:401:2448], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2025-04-04T17:24:15.519752Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:403:2450] 2025-04-04T17:24:15.519790Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Successful handshake: owner# 910, generation# 1 2025-04-04T17:24:15.519847Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:403:2450] 2025-04-04T17:24:15.519876Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Commit generation: owner# 910, generation# 1 2025-04-04T17:24:15.519929Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:404:2451] 2025-04-04T17:24:15.519971Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Successful handshake: owner# 910, generation# 1 2025-04-04T17:24:15.520017Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:404:2451] 2025-04-04T17:24:15.520042Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Commit generation: owner# 910, generation# 1 2025-04-04T17:24:15.520109Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:403:2450], cookie# 0, event size# 64 2025-04-04T17:24:15.520136Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2025-04-04T17:24:15.520160Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:402:2449] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2025-04-04T17:24:15.520218Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:404:2451], cookie# 0, event size# 64 2025-04-04T17:24:15.520252Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2025-04-04T17:24:15.520317Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:405:2452] 2025-04-04T17:24:15.520344Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:402:2449] Upsert description: path# /Root/Tenant/table_inside 2025-04-04T17:24:15.520387Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:402:2449] Subscribe: subscriber# [2:405:2452], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2025-04-04T17:24:15.634828Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:7:2054] 2025-04-04T17:24:15.634895Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 800, generation# 1 2025-04-04T17:24:15.635003Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:7:2054] 2025-04-04T17:24:15.635053Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 800, generation# 1 2025-04-04T17:24:15.635119Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:8:2055] 2025-04-04T17:24:15.635166Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 900, generation# 1 2025-04-04T17:24:15.635230Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:8:2055] 2025-04-04T17:24:15.635264Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 900, generation# 1 2025-04-04T17:24:15.635358Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 118 2025-04-04T17:24:15.635392Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2025-04-04T17:24:15.635441Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-04-04T17:24:15.635533Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 117 2025-04-04T17:24:15.635569Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2025-04-04T17:24:15.635604Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description by newest path form tenant schemeshard: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], domainId# [OwnerId: 800, LocalPathId: 1], curPathId# [OwnerId: 800, LocalPathId: 1111], curDomainId# [OwnerId: 800, LocalPathId: 1] 2025-04-04T17:24:15.635648Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111] 2025-04-04T17:24:15.635700Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-04-04T17:24:15.635773Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 0 }: sender# [3:9:2056] 2025-04-04T17:24:15.635835Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:9:2056], path# /root/db/dir_inside, domainOwnerId# 0, capabilities# =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 1111 PathOwnerId: 800 =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 11 PathOwnerId: 900 =========== DomainId: [OwnerId: 800, LocalPathId: 1] IsDeletion: 0 PathId: [OwnerId: 900, LocalPathId: 11] Versions: 1 >> TReplicaTest::Update >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::StrongNotificationAfterCommit [GOOD] Test command err: 2025-04-04T17:24:15.321556Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2025-04-04T17:24:15.321629Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path 2025-04-04T17:24:15.321796Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-04-04T17:24:15.321923Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:9:2056] 2025-04-04T17:24:15.321956Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-04-04T17:24:15.322003Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:9:2056], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-04T17:24:15.322083Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-04T17:24:15.322118Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-04T17:24:15.322290Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:15.322327Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-04T17:24:15.329670Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-04T17:24:15.329952Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 40 2025-04-04T17:24:15.330009Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-04-04T17:24:15.330052Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-04T17:24:15.599164Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-04-04T17:24:15.599229Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-04T17:24:15.599369Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:8:2055] 2025-04-04T17:24:15.599408Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-04-04T17:24:15.599504Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-04T17:24:15.599668Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:15.599726Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-04T17:24:15.599795Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-04T17:24:15.599919Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 40 2025-04-04T17:24:15.599950Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-04-04T17:24:15.599983Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-04T17:24:15.600081Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:8:2055] 2025-04-04T17:24:15.600152Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Unsubscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1] 2025-04-04T17:24:15.600224Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:15.600255Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-04T17:24:15.600286Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-04T17:24:15.600364Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:15.600396Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-04-04T17:24:15.600462Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-04T17:24:15.600549Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:9:2056] 2025-04-04T17:24:15.600595Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2025-04-04T17:24:15.880081Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 1 }: sender# [3:8:2055] 2025-04-04T17:24:15.880147Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path 2025-04-04T17:24:15.880220Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 1, capabilities# 2025-04-04T17:24:15.880360Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-04-04T17:24:15.880403Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-04T17:24:15.880496Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-04-04T17:24:15.880548Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 1, generation# 1 2025-04-04T17:24:15.880652Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimr::NSchemeBoard::TReplica::TEvPrivate::TEvSendStrongNotifications { Owner: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::DoubleDelete [GOOD] Test command err: 2025-04-04T17:24:15.471066Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-04T17:24:15.471152Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-04T17:24:15.722837Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-04-04T17:24:15.722885Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-04T17:24:15.722992Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:15.723032Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-04T17:24:15.728061Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-04T17:24:15.728203Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:7:2054] 2025-04-04T17:24:15.728283Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-04-04T17:24:15.728403Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] 2025-04-04T17:24:15.728444Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Unsubscribe: subscriber# [2:7:2054], path# path 2025-04-04T17:24:15.728482Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] 2025-04-04T17:24:15.982040Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-04-04T17:24:15.982085Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-04T17:24:15.982169Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2025-04-04T17:24:15.982254Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path 2025-04-04T17:24:15.982321Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-04-04T17:24:15.982436Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:15.982471Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-04T17:24:15.982522Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-04T17:24:15.982664Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-04-04T17:24:15.982697Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-04-04T17:24:15.982731Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-04T17:24:15.982824Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:9:2056] 2025-04-04T17:24:15.982857Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# 2025-04-04T17:24:15.982951Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-04-04T17:24:15.982975Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true >> TReplicaTest::Update [GOOD] >> TReplicaTest::UnsubscribeWithoutSubscribe >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] Test command err: 2025-04-04T17:24:15.700227Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-04T17:24:15.700326Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-04T17:24:15.700433Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-04T17:24:15.700471Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 1, generation# 1 2025-04-04T17:24:15.700520Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:7:2054] 2025-04-04T17:24:15.700552Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 2 2025-04-04T17:24:15.979385Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:8:2055] 2025-04-04T17:24:15.979456Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path 2025-04-04T17:24:15.979617Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:8:2055], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2025-04-04T17:24:15.979744Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-04-04T17:24:15.979780Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-04T17:24:15.979909Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:15.979956Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-04T17:24:15.988279Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-04T17:24:15.988535Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:8:2055] 2025-04-04T17:24:15.988653Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 40 2025-04-04T17:24:15.988704Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-04-04T17:24:15.988745Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-04T17:24:15.988838Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [2:8:2055] 2025-04-04T17:24:16.260598Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-04-04T17:24:16.260674Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-04T17:24:16.260815Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:16.260890Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-04T17:24:16.260988Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 2, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-04T17:24:16.261116Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2025-04-04T17:24:16.261215Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2025-04-04T17:24:16.261369Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:16.261415Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-04T17:24:16.261467Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 3, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-04T17:24:16.261684Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:16.261725Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-04-04T17:24:16.261760Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-04-04T17:24:16.261840Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path 2025-04-04T17:24:16.261904Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2025-04-04T17:24:16.261960Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-04T17:24:16.262065Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 3 }: sender# [3:8:2055] >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] Test command err: 2025-04-04T17:24:16.477327Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-04-04T17:24:16.477395Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-04T17:24:16.477532Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:16.477565Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-04T17:24:16.482565Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-04T17:24:16.482704Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2025-04-04T17:24:16.482797Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-04-04T17:24:16.482913Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:7:2054] 2025-04-04T17:24:16.482969Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# path 2025-04-04T17:24:16.483026Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:7:2054] 2025-04-04T17:24:16.483062Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-04-04T17:24:16.483160Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:7:2054] 2025-04-04T17:24:16.483206Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1] 2025-04-04T17:24:16.755229Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-04-04T17:24:16.755299Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-04-04T17:24:16.755431Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-04-04T17:24:16.755480Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-04-04T17:24:16.755571Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-04-04T17:24:16.755644Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] |95.0%| [TA] $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} |95.0%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardVolatile::DistributedUpsertRestartAfterPlan [GOOD] >> DataShardVolatile::CompactedVolatileChangesCommit >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::AlreadyExists >> ColumnBuildTest::BaseCase >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::ValidDefaultValue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788008.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=143788008.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788008.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123788008.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786808.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123786808.000000s;Name=;Codec=}; 2025-04-04T17:23:30.755406Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:30.845289Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:30.871692Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:30.872059Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:30.880367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:30.880612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:30.880858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:30.880980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:30.881101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:30.881265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:30.881399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:30.881523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:30.881649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:30.881769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:30.881880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:30.881994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:30.911966Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:30.912194Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:30.912275Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:30.912444Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:30.912639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:30.912724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:30.912771Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:30.912874Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:30.912958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:30.913005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:30.913039Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:30.913196Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:30.913260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:30.913303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:30.913336Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:30.913431Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:30.913483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:30.913526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:30.913564Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:30.913641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:30.913681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:30.913713Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:30.913765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:30.913810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:30.913847Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:30.914314Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=71; 2025-04-04T17:23:30.914418Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=56; 2025-04-04T17:23:30.914504Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-04-04T17:23:30.914692Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-04-04T17:23:30.914899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:30.914963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:30.915003Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:30.915247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:30.915326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:30.915373Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:30.915551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:30.915599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:30.915633Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:30.915804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... 46,"c":4,"l":1743787458632676,"d":2608},{"a":1743787458632713,"name":"processing","f":1743787458266281,"d_finished":128299,"c":32,"l":1743787458632677,"d":128869},{"name":"ProduceResults","f":1743787458261518,"d_finished":7230,"c":38,"l":1743787458632855,"d":7230},{"a":1743787458632857,"name":"Finish","f":1743787458632857,"d_finished":0,"c":0,"l":1743787458633283,"d":426},{"name":"task_result","f":1743787458266308,"d_finished":125659,"c":28,"l":1743787458630478,"d":125659}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1284:3291]->[1:1283:3290] 2025-04-04T17:24:18.633654Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:18.257822Z;index_granules=0;index_portions=4;index_batches=1731;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203504;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203504;selected_rows=0; 2025-04-04T17:24:18.633686Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:18.633884Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:18.635291Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-04-04T17:24:18.635509Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-04-04T17:24:18.635599Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-04T17:24:18.635722Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:32;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:18.635771Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:18.635884Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Projection","input":"1"},"fetch":"1"}]}; 2025-04-04T17:24:18.635947Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:127;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-04T17:24:18.636313Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1300:3307];trace_detailed=; 2025-04-04T17:24:18.636667Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:85;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-04T17:24:18.636855Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:100;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:18.636993Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:18.637100Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:18.637307Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:24:18.637390Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:18.637493Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:18.637532Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1300:3307] finished for tablet 9437184 2025-04-04T17:24:18.637922Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1299:3306];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743787458636265,"name":"_full_task","f":1743787458636265,"d_finished":0,"c":0,"l":1743787458637589,"d":1324},"events":[{"name":"bootstrap","f":1743787458636426,"d_finished":695,"c":1,"l":1743787458637121,"d":695},{"a":1743787458637290,"name":"ack","f":1743787458637290,"d_finished":0,"c":0,"l":1743787458637589,"d":299},{"a":1743787458637272,"name":"processing","f":1743787458637272,"d_finished":0,"c":0,"l":1743787458637589,"d":317},{"name":"ProduceResults","f":1743787458636928,"d_finished":384,"c":2,"l":1743787458637518,"d":384},{"a":1743787458637519,"name":"Finish","f":1743787458637519,"d_finished":0,"c":0,"l":1743787458637589,"d":70}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:18.637986Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1299:3306];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:24:18.638302Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1299:3306];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743787458636265,"name":"_full_task","f":1743787458636265,"d_finished":0,"c":0,"l":1743787458638022,"d":1757},"events":[{"name":"bootstrap","f":1743787458636426,"d_finished":695,"c":1,"l":1743787458637121,"d":695},{"a":1743787458637290,"name":"ack","f":1743787458637290,"d_finished":0,"c":0,"l":1743787458638022,"d":732},{"a":1743787458637272,"name":"processing","f":1743787458637272,"d_finished":0,"c":0,"l":1743787458638022,"d":750},{"name":"ProduceResults","f":1743787458636928,"d_finished":384,"c":2,"l":1743787458637518,"d":384},{"a":1743787458637519,"name":"Finish","f":1743787458637519,"d_finished":0,"c":0,"l":1743787458638022,"d":503}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1300:3307]->[1:1299:3306] 2025-04-04T17:24:18.638376Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:18.635922Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-04T17:24:18.638420Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:18.638507Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TieringUsage [GOOD] Test command err: 2025-04-04T17:21:36.743796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:21:36.744433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:21:36.744506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0010d1/r3tmp/tmp2sqRm0/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19524, node 1 TClient is connected to server localhost:27510 2025-04-04T17:21:37.333551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:21:37.380063Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:37.383244Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:21:37.383304Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:21:37.383353Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:21:37.383727Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:21:37.419495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:37.419676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:37.431317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2025-04-04T17:21:47.769212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:683:2573], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:47.769363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:47.878206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-04-04T17:21:48.194559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:826:2665], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:48.194671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:48.195040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:831:2670], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:48.198932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-04T17:21:48.329536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:833:2672], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:21:48.589568Z node 1 :TX_PROXY ERROR: Actor# [1:928:2738] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:49.052911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:21:49.522491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-04-04T17:21:50.256975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-04T17:21:50.950037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:21:51.455229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-04T17:21:52.537405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-04T17:21:52.823005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-04T17:22:07.897629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715708:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-04T17:22:19.814888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715715:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-04T17:22:22.497515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715732:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715732 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 15 2025-04-04T17:22:22.742406Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037892;self_id=[1:2917:4257];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:22:22.760890Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037892;self_id=[1:2917:4257];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:22:22.761303Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037892 2025-04-04T17:22:22.769532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2917:4257];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:22:22.769768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2917:4257];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:22:22.770043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2917:4257];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:22:22.770134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2917:4257];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:22:22.770223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2917:4257];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:22:22.770360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2917:4257];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:22:22.770436Z node ... ventHandle, TDelete>, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #20 0x30173701 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #21 0x3017da73 in NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #22 0x30396303 in NKikimr::NSchemeShard::TEvSchemeShard::TEvNotifyTxCompletionResult::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:474:13 #23 0x303958a2 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:523:20 #24 0x303958a2 in NKikimr::NSchemeShard::TEvSchemeShard::TEvNotifyTxCompletionResult::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:529:20 #25 0x30387ce2 in NKikimr::NSchemeShard::TEvSchemeShard::TEvNotifyTxCompletionResult::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:574:24 #26 0x30387946 in NKikimr::Tests::NCommon::THelper::WaitForSchemeOperation(NActors::TActorId, unsigned long) /-S/ydb/core/testlib/common_helper.cpp:51:13 #27 0x30a89ab0 in NKikimr::Tests::NCS::THelperSchemaless::CreateTestOlapTable(NActors::TActorId, TBasicString>, TBasicString>) /-S/ydb/core/testlib/cs_helper.cpp:62:5 #28 0x179bb57f in NKikimr::TLocalHelper::CreateTestOlapTable(TBasicString>, unsigned int, TBasicString>, unsigned int, TBasicString>) /-S/ydb/core/tx/tiering/ut/ut_tiers.cpp:75:16 #29 0x179c4524 in NKikimr::NTestSuiteColumnShardTiers::TTestCaseTieringUsage::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/tx/tiering/ut/ut_tiers.cpp:443:17 #30 0x179dace7 in operator() /-S/ydb/core/tx/tiering/ut/ut_tiers.cpp:148:1 #31 0x179dace7 in __invoke<(lambda at /-S/ydb/core/tx/tiering/ut/ut_tiers.cpp:148:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #32 0x179dace7 in __call<(lambda at /-S/ydb/core/tx/tiering/ut/ut_tiers.cpp:148:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #33 0x179dace7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #34 0x179dace7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311:10 #35 0x18291e15 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428:12 #36 0x18291e15 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987:10 #37 0x18291e15 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #38 0x18262b18 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x17ad15cd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x2b8a8ae6 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:270:10 #2 0x2b8a8ae6 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:294:10 #3 0x2b8a8ae6 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:119:32 #4 0x2b8a8ae6 in __allocate_at_least > *> > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x2b8a8ae6 in __split_buffer /-S/contrib/libs/cxxsupp/libcxx/include/__split_buffer:347:25 #6 0x2b8a8ae6 in std::__y1::deque>, std::__y1::allocator>>>::__add_back_capacity() /-S/contrib/libs/cxxsupp/libcxx/include/deque:2165:51 #7 0x2b89b7d2 in push_back /-S/contrib/libs/cxxsupp/libcxx/include/deque:1549:5 #8 0x2b89b7d2 in NKikimr::NOlap::TBlobManager::StartBlobBatch() /-S/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp:380:23 #9 0x2b882110 in TWriteAction /-S/ydb/core/tx/columnshard/blobs_action/bs/write.h:41:30 #10 0x2b882110 in void std::__y1::allocator::construct[abi:fe190000]> const&, std::__y1::shared_ptr&>(NKikimr::NOlap::NBlobOperations::NBlobStorage::TWriteAction*, TBasicString> const&, std::__y1::shared_ptr&) /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:167:24 #11 0x2b87863a in construct > &, std::__y1::shared_ptr &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator_traits.h:319:9 #12 0x2b87863a in __shared_ptr_emplace > &, std::__y1::shared_ptr &, std::__y1::allocator, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:296:5 #13 0x2b87863a in allocate_shared, const TBasicString > &, std::__y1::shared_ptr &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:857:51 #14 0x2b87863a in make_shared > &, std::__y1::shared_ptr &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:865:10 #15 0x2b87863a in NKikimr::NOlap::NBlobOperations::NBlobStorage::TOperator::DoStartWritingAction() /-S/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp:16:12 #16 0x2b265148 in NKikimr::NOlap::IBlobsStorageOperator::StartWritingAction(NKikimr::NOlap::NBlobOperations::EConsumer) /-S/ydb/core/tx/columnshard/blobs_action/abstract/storage.h:106:23 #17 0x2b2bf347 in NKikimr::NColumnShard::TWriteOperation::Start(NKikimr::NColumnShard::TColumnShard&, std::__y1::shared_ptr const&, NActors::TActorId const&, NKikimr::NOlap::TWritingContext const&) /-S/ydb/core/tx/columnshard/operations/write.cpp:39:53 #18 0x2b437d38 in NKikimr::NColumnShard::TWriteTask::Execute(NKikimr::NColumnShard::TColumnShard*, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/tablet/write_queue.cpp:32:21 #19 0x2b438787 in NKikimr::NColumnShard::TWriteTasksQueue::Drain(bool, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/tablet/write_queue.cpp:40:52 #20 0x2b3f6c3f in NKikimr::NColumnShard::TColumnShard::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/tx/columnshard/columnshard__write.cpp:597:22 #21 0x2b39b19e in NKikimr::NColumnShard::TColumnShard::StateWork(TAutoPtr&) /-S/ydb/core/tx/columnshard/columnshard_impl.h:442:13 #22 0x190bda0c in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.h:553:23 #23 0x3017ae84 in NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool) /-S/ydb/library/actors/testlib/test_runtime.cpp:1702:33 #24 0x30173701 in NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) /-S/ydb/library/actors/testlib/test_runtime.cpp:1295:45 #25 0x301704a0 in DispatchEvents /-S/ydb/library/actors/testlib/test_runtime.cpp:1091:16 #26 0x301704a0 in NActors::TTestActorRuntimeBase::DispatchEvents(NActors::TDispatchOptions const&) /-S/ydb/library/actors/testlib/test_runtime.cpp:1082:16 #27 0x30a8c3bf in NKikimr::Tests::NCS::THelperSchemaless::SendDataViaActorSystem(TBasicString>, std::__y1::shared_ptr, Ydb::StatusIds_StatusCode const&) const /-S/ydb/core/testlib/cs_helper.cpp:101:14 #28 0x179c547c in NKikimr::NTestSuiteColumnShardTiers::TTestCaseTieringUsage::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/tx/tiering/ut/ut_tiers.cpp:465:17 #29 0x179dace7 in operator() /-S/ydb/core/tx/tiering/ut/ut_tiers.cpp:148:1 #30 0x179dace7 in __invoke<(lambda at /-S/ydb/core/tx/tiering/ut/ut_tiers.cpp:148:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #31 0x179dace7 in __call<(lambda at /-S/ydb/core/tx/tiering/ut/ut_tiers.cpp:148:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #32 0x179dace7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #33 0x179dace7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311:10 #34 0x18291e15 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428:12 #35 0x18291e15 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987:10 #36 0x18291e15 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #37 0x18262b18 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #38 0x179d9cd5 in NKikimr::NTestSuiteColumnShardTiers::TCurrentTest::Execute() /-S/ydb/core/tx/tiering/ut/ut_tiers.cpp:148:1 #39 0x182643e5 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #40 0x1828c74c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #41 0x7f49c1b20d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 1091869 byte(s) leaked in 22113 allocation(s). |95.0%| [TA] $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788007.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=143788007.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788007.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123788007.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786807.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123786807.000000s;Name=;Codec=}; 2025-04-04T17:23:28.828341Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:28.908437Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:28.925098Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:28.925419Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:28.932900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:28.933152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:28.933414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:28.933550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:28.933685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:28.933820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:28.933946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:28.934065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:28.934196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:28.934335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:28.934453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:28.934576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:28.955802Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:28.956027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:28.956082Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:28.956265Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:28.956442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:28.956537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:28.956593Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:28.956664Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:28.956723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:28.956759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:28.956780Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:28.956901Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:28.956946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:28.956975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:28.956995Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:28.957065Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:28.957103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:28.957132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:28.957154Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:28.957221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:28.957247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:28.957269Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:28.957301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:28.957331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:28.957352Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:28.957698Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=42; 2025-04-04T17:23:28.957786Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-04-04T17:23:28.957846Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=27; 2025-04-04T17:23:28.957940Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-04-04T17:23:28.958095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:28.958139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:28.958168Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:28.958368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:28.958411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:28.958444Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:28.958557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:28.958587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:28.958608Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:28.958746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... ta.cpp:29;PRECHARGE:finishLoadingTime=9; 2025-04-04T17:24:19.345975Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=157; 2025-04-04T17:24:19.346003Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=17170; 2025-04-04T17:24:19.350573Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=4509; 2025-04-04T17:24:19.354633Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=3286; 2025-04-04T17:24:19.354709Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=4066; 2025-04-04T17:24:19.354832Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=66; 2025-04-04T17:24:19.354913Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=44; 2025-04-04T17:24:19.355020Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=68; 2025-04-04T17:24:19.355101Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=48; 2025-04-04T17:24:19.359463Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4304; 2025-04-04T17:24:19.365392Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=5831; 2025-04-04T17:24:19.365500Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=31; 2025-04-04T17:24:19.365561Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=22; 2025-04-04T17:24:19.365599Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2025-04-04T17:24:19.365638Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-04-04T17:24:19.365675Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-04T17:24:19.365734Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=32; 2025-04-04T17:24:19.365771Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-04-04T17:24:19.365841Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=42; 2025-04-04T17:24:19.365879Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-04-04T17:24:19.365929Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=21; 2025-04-04T17:24:19.366002Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=40; 2025-04-04T17:24:19.366189Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=157; 2025-04-04T17:24:19.366217Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=42602; 2025-04-04T17:24:19.366370Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=20801572;raw_bytes=32169208;count=11;records=320000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T17:24:19.366460Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T17:24:19.366499Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T17:24:19.366547Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T17:24:19.373140Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=1; 2025-04-04T17:24:19.373244Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:19.373285Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:19.373344Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=3; 2025-04-04T17:24:19.373390Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:24:19.373421Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:19.373456Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:19.373483Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:19.373548Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:19.373986Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:19.374041Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1980:3880];tablet_id=9437184;parent=[1:1942:3849];fline=manager.cpp:82;event=ask_data;request=request_id=91;1={portions_count=11};; 2025-04-04T17:24:19.374670Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T17:24:19.375036Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T17:24:19.375061Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T17:24:19.375078Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T17:24:19.375108Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:19.375174Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:19.375224Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=3; 2025-04-04T17:24:19.375271Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:24:19.375304Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:19.375340Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:19.375371Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:19.375438Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:19.376632Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=11;path_id=1; 2025-04-04T17:24:19.377536Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 |95.0%| [TA] {RESULT} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] >> TCdcStreamTests::MeteringServerless [GOOD] >> TCdcStreamTests::MeteringDedicated ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788013.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788013.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786813.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-04-04T17:23:34.563697Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:34.632400Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:34.649875Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:34.650148Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:34.657288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:34.657458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:34.657642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:34.657718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:34.657792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:34.657897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:34.658019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:34.658135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:34.658276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:34.658361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:34.658427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:34.658488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:34.680304Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:34.680515Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:34.680580Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:34.680744Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:34.680909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:34.680976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:34.681011Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:34.681114Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:34.681172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:34.681203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:34.681224Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:34.681335Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:34.681376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:34.681416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:34.681441Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:34.681507Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:34.681541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:34.681572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:34.681592Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:34.681646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:34.681671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:34.681699Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:34.681736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:34.681765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:34.681784Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:34.682097Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=38; 2025-04-04T17:23:34.682180Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-04-04T17:23:34.682264Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=26; 2025-04-04T17:23:34.682365Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-04-04T17:23:34.682482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:34.682533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:34.682559Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:34.682696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:34.682731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:34.682761Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:34.682868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:34.682900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:34.682920Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:34.683055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description= ... me=10; 2025-04-04T17:24:21.047076Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=197; 2025-04-04T17:24:21.047105Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=21589; 2025-04-04T17:24:21.051110Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=3940; 2025-04-04T17:24:21.055698Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=3776; 2025-04-04T17:24:21.055798Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=4593; 2025-04-04T17:24:21.055930Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=74; 2025-04-04T17:24:21.056016Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=48; 2025-04-04T17:24:21.056117Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=65; 2025-04-04T17:24:21.056237Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=75; 2025-04-04T17:24:21.060756Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4452; 2025-04-04T17:24:21.067111Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=6250; 2025-04-04T17:24:21.067238Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=30; 2025-04-04T17:24:21.067303Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=22; 2025-04-04T17:24:21.067338Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-04-04T17:24:21.067380Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2025-04-04T17:24:21.067415Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-04T17:24:21.067470Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=30; 2025-04-04T17:24:21.067508Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-04-04T17:24:21.067577Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=43; 2025-04-04T17:24:21.067612Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-04-04T17:24:21.067669Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=28; 2025-04-04T17:24:21.067758Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=46; 2025-04-04T17:24:21.067930Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=143; 2025-04-04T17:24:21.067958Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=47961; 2025-04-04T17:24:21.068080Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=20801572;raw_bytes=32169208;count=11;records=320000} evicted {blob_bytes=10402524;raw_bytes=16084646;count=7;records=160000} at tablet 9437184 2025-04-04T17:24:21.068166Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1923:3831];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T17:24:21.068208Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1923:3831];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T17:24:21.068260Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1923:3831];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T17:24:21.074986Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1923:3831];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=1; 2025-04-04T17:24:21.075204Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:21.075265Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:21.075345Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=3; 2025-04-04T17:24:21.075404Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:24:21.075439Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:21.075476Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:21.075503Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:21.075575Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:21.076283Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:21.076365Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1961:3862];tablet_id=9437184;parent=[1:1923:3831];fline=manager.cpp:82;event=ask_data;request=request_id=104;1={portions_count=18};; 2025-04-04T17:24:21.077165Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1923:3831];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T17:24:21.077942Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1923:3831];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T17:24:21.077974Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T17:24:21.077992Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T17:24:21.078024Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1923:3831];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:21.078072Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1923:3831];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:21.078144Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1923:3831];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=3; 2025-04-04T17:24:21.078193Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1923:3831];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:24:21.078227Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1923:3831];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:21.078288Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1923:3831];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:21.078328Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1923:3831];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:21.078418Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1923:3831];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:21.078873Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1923:3831];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=18;path_id=1; 2025-04-04T17:24:21.080053Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1923:3831];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 160000/10402332 160000/10402524 >> ColumnBuildTest::AlreadyExists [GOOD] |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::ForbidAccess |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::AlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:24:19.712327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:24:19.712450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:24:19.712518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:24:19.712573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:24:19.713614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:24:19.713655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:24:19.713733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:24:19.713801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:24:19.715136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:24:19.796395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:24:19.796467Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:19.807900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:24:19.808169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:24:19.808328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:24:19.818050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:24:19.818338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:24:19.819016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:24:19.819791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:24:19.824793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:24:19.825723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:24:19.825777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:24:19.825943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:24:19.825984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:24:19.826023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:24:19.826114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.831003Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:24:19.931333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:24:19.931519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.931751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:24:19.931924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:24:19.931965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.934300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:24:19.934444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:24:19.934608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.934651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:24:19.934745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:24:19.934772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:24:19.936569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.936615Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:24:19.936652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:24:19.938608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.938661Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.938697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:24:19.938732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:24:19.949794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:24:19.951564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:24:19.951717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:24:19.952363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:24:19.952468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:24:19.952526Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:24:19.952841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:24:19.952910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:24:19.953095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:24:19.953199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:24:19.955575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:24:19.955612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:24:19.955746Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:24:19.955776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:24:19.955994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.956028Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:24:19.956093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:24:19.956116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:24:19.956162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:24:19.956191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:24:19.956216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:24:19.956244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:24:19.956274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:24:19.956307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:24:19.956354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:24:19.956379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:24:19.956403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:24:19.957739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:24:19.957806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:24:19.957829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:783:2664] TestWaitNotification: OK eventTxId 105 2025-04-04T17:24:22.277179Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-04-04T17:24:22.277389Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 242us result status StatusSuccess 2025-04-04T17:24:22.277689Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 200 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 2 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2025-04-04T17:24:22.279442Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 106 DatabaseName: "/MyRoot/ServerLessDB" Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_shards_in_flight: 2 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } 2025-04-04T17:24:22.281430Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-04T17:24:22.281511Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1148:3019], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T17:24:22.281630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 106, at schemeshard: 72075186233409549 2025-04-04T17:24:22.281682Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 106, txId# 281474976725757 2025-04-04T17:24:22.281757Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1148:3019], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T17:24:22.283841Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-04T17:24:22.283941Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1148:3019], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-04-04T17:24:22.286491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerLessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" Columns { Name: "value" Type: "Uint64" DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: true } } Internal: true } TxId: 281474976725757 TabletId: 72075186233409549 FailOnExist: true , at schemeshard: 72075186233409549 2025-04-04T17:24:22.286710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/ServerLessDB/Table, pathId: , opId: 281474976725757:0, at schemeshard: 72075186233409549 2025-04-04T17:24:22.287007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976725757:1, propose status:StatusInvalidParameter, reason: Cannot alter type for column 'value', at schemeshard: 72075186233409549 2025-04-04T17:24:22.290162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976725757, response: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2025-04-04T17:24:22.290360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976725757, database: /MyRoot/ServerLessDB, subject: , status: StatusInvalidParameter, reason: Cannot alter type for column 'value', operation: ALTER TABLE, path: /MyRoot/ServerLessDB/Table 2025-04-04T17:24:22.290547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976725757, status# StatusInvalidParameter 2025-04-04T17:24:22.290615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549 2025-04-04T17:24:22.290677Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 106, cookie: 106, txId: 281474976725757, status: StatusInvalidParameter 2025-04-04T17:24:22.290774Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1148:3019], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549 2025-04-04T17:24:22.291361Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuilder::TTxReply: ReplyOnCreation, BuildIndexId: 106, status: BAD_REQUEST, error: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column 'value', replyTo: [1:1148:3019] 2025-04-04T17:24:22.291598Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Message: TxId: 106 Status: BAD_REQUEST Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } IndexBuild { Id: 106 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } State: STATE_PREPARING Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } BUILDINDEX RESPONSE CREATE: NKikimrIndexBuilder.TEvCreateResponse TxId: 106 Status: BAD_REQUEST Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } IndexBuild { Id: 106 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } State: STATE_PREPARING Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Simple >> ColumnBuildTest::ValidDefaultValue [GOOD] >> ColumnBuildTest::BaseCase [GOOD] >> TColumnShardTestSchema::RebootHotTiersTtlWithStat [GOOD] >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::ValidDefaultValue [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:24:19.712302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:24:19.712431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:24:19.712496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:24:19.712556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:24:19.713600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:24:19.713657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:24:19.713737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:24:19.713800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:24:19.715166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:24:19.798230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:24:19.798328Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:19.809774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:24:19.810031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:24:19.810179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:24:19.820269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:24:19.820548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:24:19.821146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:24:19.821939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:24:19.827104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:24:19.828364Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:24:19.828435Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:24:19.828649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:24:19.828705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:24:19.828754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:24:19.828887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.835057Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:24:19.969095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:24:19.969323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.969540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:24:19.969777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:24:19.969829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.972362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:24:19.972496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:24:19.972685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.972735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:24:19.972771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:24:19.972804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:24:19.974749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.974803Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:24:19.974841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:24:19.976471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.976515Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.976548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:24:19.976586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:24:19.986397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:24:19.988258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:24:19.988440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:24:19.989390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:24:19.989514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:24:19.989585Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:24:19.989866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:24:19.989918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:24:19.990086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:24:19.990180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:24:19.992396Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:24:19.992443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:24:19.992599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:24:19.992640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:24:19.992923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.992972Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:24:19.993061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:24:19.993096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:24:19.993171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:24:19.993212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:24:19.993245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:24:19.993285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:24:19.993331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:24:19.993366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:24:19.993435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:24:19.993473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:24:19.993503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:24:19.995555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:24:19.995672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:24:19.995704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1148:3019], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }}, record: Status: StatusAccepted TxId: 281474976725761 SchemeshardId: 72075186233409549 PathId: 2 2025-04-04T17:24:23.575130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-04-04T17:24:23.575187Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 ProgressState 2025-04-04T17:24:23.575236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976725761 ready parts: 1/1 2025-04-04T17:24:23.575325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409550 message:Transaction { AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976725761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2025-04-04T17:24:23.578023Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-04T17:24:23.578097Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1148:3019], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-04T17:24:23.578565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976725761:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725761 msg type: 269090816 2025-04-04T17:24:23.578662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976725761, partId: 4294967295, tablet: 72075186233409550 2025-04-04T17:24:23.578808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976725761, at schemeshard: 72075186233409549 2025-04-04T17:24:23.578842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 0/1, is published: true 2025-04-04T17:24:23.578880Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976725761, at schemeshard: 72075186233409549 2025-04-04T17:24:23.591229Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:1819:3682], Recipient [1:756:2645]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409549 ClientId: [1:1819:3682] ServerId: [1:1822:3685] } 2025-04-04T17:24:23.591284Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-04T17:24:23.648739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 650, transactions count in step: 1, at schemeshard: 72075186233409549 2025-04-04T17:24:23.648847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725761 AckTo { RawX1: 0 RawX2: 0 } } Step: 650 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2025-04-04T17:24:23.648897Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 HandleReply TEvOperationPlan: step# 650 2025-04-04T17:24:23.648933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976725761:0 128 -> 240 2025-04-04T17:24:23.650959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-04-04T17:24:23.651003Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDone opId# 281474976725761:0 ProgressState 2025-04-04T17:24:23.651071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-04-04T17:24:23.651096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-04T17:24:23.651127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-04-04T17:24:23.651170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-04T17:24:23.651199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2025-04-04T17:24:23.651252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:568:2508] message: TxId: 281474976725761 2025-04-04T17:24:23.651288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-04T17:24:23.651316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976725761:0 2025-04-04T17:24:23.651339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976725761:0 2025-04-04T17:24:23.651420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-04-04T17:24:23.653759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2025-04-04T17:24:23.653822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976725761 2025-04-04T17:24:23.653874Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfoId: 106 2025-04-04T17:24:23.653958Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1148:3019], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-04T17:24:23.655672Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-04T17:24:23.655757Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1148:3019], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-04T17:24:23.655802Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-04T17:24:23.657415Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-04T17:24:23.657489Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1148:3019], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-04T17:24:23.657563Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2025-04-04T17:24:23.657688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-04T17:24:23.657724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1168:3039] TestWaitNotification: OK eventTxId 106 2025-04-04T17:24:23.659982Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2025-04-04T17:24:23.660232Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "ColumnValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 1111 } } } } } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "ColumnValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 1111 } } } } } Progress: 100 } >> StatisticsSaveLoad::Delete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:24:19.712287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:24:19.712444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:24:19.712496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:24:19.712545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:24:19.713590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:24:19.713622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:24:19.713676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:24:19.713761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:24:19.715110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:24:19.779678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:24:19.779741Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:19.789499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:24:19.789720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:24:19.789964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:24:19.798951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:24:19.799211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:24:19.801906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:24:19.802907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:24:19.811719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:24:19.819631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:24:19.819712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:24:19.819890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:24:19.819946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:24:19.820047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:24:19.820172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.825862Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:24:19.925984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:24:19.927603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.928715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:24:19.930819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:24:19.930908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.934554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:24:19.934708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:24:19.934910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.934964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:24:19.935009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:24:19.935041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:24:19.937187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.937246Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:24:19.937281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:24:19.939123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.939191Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.939230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:24:19.939272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:24:19.943126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:24:19.945367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:24:19.945565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:24:19.946639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:24:19.946788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:24:19.946869Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:24:19.947213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:24:19.947268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:24:19.947429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:24:19.947514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:24:19.949854Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:24:19.949906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:24:19.950080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:24:19.950114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:24:19.950444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.950490Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:24:19.950583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:24:19.950618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:24:19.950680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:24:19.950721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:24:19.950756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:24:19.950800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:24:19.950850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:24:19.950887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:24:19.950951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:24:19.950986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:24:19.951017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:24:19.952882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:24:19.953056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:24:19.953102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... , IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1148:3019], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }}, record: Status: StatusAccepted TxId: 281474976725761 SchemeshardId: 72075186233409549 PathId: 2 2025-04-04T17:24:23.719524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-04-04T17:24:23.719586Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 ProgressState 2025-04-04T17:24:23.719651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976725761 ready parts: 1/1 2025-04-04T17:24:23.719780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409550 message:Transaction { AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976725761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2025-04-04T17:24:23.723703Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-04T17:24:23.723805Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1148:3019], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-04T17:24:23.724568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976725761:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725761 msg type: 269090816 2025-04-04T17:24:23.724692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976725761, partId: 4294967295, tablet: 72075186233409550 2025-04-04T17:24:23.724948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976725761, at schemeshard: 72075186233409549 2025-04-04T17:24:23.724994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 0/1, is published: true 2025-04-04T17:24:23.725032Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976725761, at schemeshard: 72075186233409549 2025-04-04T17:24:23.737772Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:1823:3686], Recipient [1:754:2643]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409549 ClientId: [1:1823:3686] ServerId: [1:1826:3689] } 2025-04-04T17:24:23.737848Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-04T17:24:23.798825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 650, transactions count in step: 1, at schemeshard: 72075186233409549 2025-04-04T17:24:23.798984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725761 AckTo { RawX1: 0 RawX2: 0 } } Step: 650 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2025-04-04T17:24:23.799055Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 HandleReply TEvOperationPlan: step# 650 2025-04-04T17:24:23.799116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976725761:0 128 -> 240 2025-04-04T17:24:23.802293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-04-04T17:24:23.802366Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDone opId# 281474976725761:0 ProgressState 2025-04-04T17:24:23.802464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-04-04T17:24:23.802501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-04T17:24:23.802553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-04-04T17:24:23.802587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-04T17:24:23.802624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2025-04-04T17:24:23.802723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:568:2508] message: TxId: 281474976725761 2025-04-04T17:24:23.802792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-04-04T17:24:23.802833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976725761:0 2025-04-04T17:24:23.802869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976725761:0 2025-04-04T17:24:23.802974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-04-04T17:24:23.807465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2025-04-04T17:24:23.807568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976725761 2025-04-04T17:24:23.807651Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfoId: 106 2025-04-04T17:24:23.807763Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1148:3019], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-04T17:24:23.810649Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-04T17:24:23.810758Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1148:3019], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-04T17:24:23.810826Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-04-04T17:24:23.813364Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-04-04T17:24:23.813479Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1148:3019], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-04-04T17:24:23.813535Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2025-04-04T17:24:23.813740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-04-04T17:24:23.813800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1168:3039] TestWaitNotification: OK eventTxId 106 2025-04-04T17:24:23.817221Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2025-04-04T17:24:23.817620Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 100 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtlWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788009.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788009.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143788009.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788009.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788009.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123788009.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=143788009.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788009.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123786809.000000s;Name=;Codec=}; 2025-04-04T17:23:29.909626Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:29.978520Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:29.994210Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:29.994530Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:30.000794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:30.000967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:30.001134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:30.001204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:30.001285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:30.001376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:30.001447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:30.001517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:30.001591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:30.001677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:30.001759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:30.001823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:30.020136Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:30.020311Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:30.020366Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:30.020493Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:30.020616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:30.020688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:30.020727Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:30.020791Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:30.020842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:30.020880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:30.020906Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:30.021030Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:30.021072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:30.021100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:30.021128Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:30.021208Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:30.021245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:30.021274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:30.021294Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:30.021349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:30.021375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:30.021398Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:30.021431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:30.021458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:30.021477Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:30.021770Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=38; 2025-04-04T17:23:30.021846Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=42; 2025-04-04T17:23:30.021923Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-04T17:23:30.022009Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=32; 2025-04-04T17:23:30.022187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:30.022260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:30.022292Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:30.022447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:30.022488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:30.022516Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:30.022611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchem ... es;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=529; 2025-04-04T17:24:23.547628Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=44698; 2025-04-04T17:24:23.561561Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=13852; 2025-04-04T17:24:23.571776Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=9321; 2025-04-04T17:24:23.571878Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=10228; 2025-04-04T17:24:23.572039Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=100; 2025-04-04T17:24:23.572153Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=69; 2025-04-04T17:24:23.572265Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=69; 2025-04-04T17:24:23.572343Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=48; 2025-04-04T17:24:23.579805Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=7401; 2025-04-04T17:24:23.589588Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=9685; 2025-04-04T17:24:23.589717Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=39; 2025-04-04T17:24:23.589782Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=22; 2025-04-04T17:24:23.589822Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-04-04T17:24:23.589863Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-04-04T17:24:23.589904Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-04-04T17:24:23.589971Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=35; 2025-04-04T17:24:23.590014Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2025-04-04T17:24:23.590091Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=47; 2025-04-04T17:24:23.590131Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-04-04T17:24:23.590184Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=23; 2025-04-04T17:24:23.590291Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=68; 2025-04-04T17:24:23.590524Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=198; 2025-04-04T17:24:23.590557Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=93961; 2025-04-04T17:24:23.590675Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=36397736;raw_bytes=56295575;count=22;records=560000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T17:24:23.590774Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2541:4450];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T17:24:23.590815Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2541:4450];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T17:24:23.590863Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2541:4450];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T17:24:23.605289Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2541:4450];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=1; 2025-04-04T17:24:23.605421Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:23.605482Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:23.605557Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=6; 2025-04-04T17:24:23.605609Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-04-04T17:24:23.605642Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:23.605679Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:23.605709Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:23.605786Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:23.606312Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:23.606849Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:2591:4493];tablet_id=9437184;parent=[1:2541:4450];fline=manager.cpp:82;event=ask_data;request=request_id=116;1={portions_count=22};; 2025-04-04T17:24:23.607372Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2541:4450];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T17:24:23.607704Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2541:4450];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T17:24:23.607741Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T17:24:23.607766Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T17:24:23.607808Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2541:4450];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:23.607871Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2541:4450];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:23.607931Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2541:4450];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=6; 2025-04-04T17:24:23.608011Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2541:4450];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-04-04T17:24:23.608063Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2541:4450];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:23.608115Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2541:4450];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:23.608159Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2541:4450];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:23.608254Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2541:4450];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:23.609246Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2541:4450];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=22;path_id=1; 2025-04-04T17:24:23.610050Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2541:4450];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 80000/5203352 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788009.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788009.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143788009.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788009.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788009.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123788009.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=143788009.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788009.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123786809.000000s;Name=;Codec=}; 2025-04-04T17:23:30.077140Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:30.163500Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:30.189312Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:30.189644Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:30.197786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:30.198006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:30.198228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:30.198372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:30.198486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:30.198629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:30.198749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:30.198858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:30.198977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:30.199107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:30.199209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:30.199308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:30.228214Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:30.228399Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:30.228460Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:30.228632Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:30.228799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:30.228876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:30.228922Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:30.229019Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:30.229090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:30.229130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:30.229161Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:30.229307Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:30.229358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:30.229395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:30.229426Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:30.229509Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:30.229558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:30.229595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:30.229623Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:30.229696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:30.229731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:30.229758Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:30.229801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:30.229844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:30.229876Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:30.230262Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-04-04T17:23:30.230363Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=56; 2025-04-04T17:23:30.230440Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-04T17:23:30.230527Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=49; 2025-04-04T17:23:30.230731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:30.230789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:30.230823Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:30.231032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:30.231106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:30.231148Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:30.231299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchem ... ;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=282; 2025-04-04T17:24:23.667458Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=37427; 2025-04-04T17:24:23.674573Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=7038; 2025-04-04T17:24:23.681718Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=6203; 2025-04-04T17:24:23.681818Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=7156; 2025-04-04T17:24:23.681954Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=75; 2025-04-04T17:24:23.682044Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=49; 2025-04-04T17:24:23.682157Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=74; 2025-04-04T17:24:23.682258Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=50; 2025-04-04T17:24:23.691513Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=9175; 2025-04-04T17:24:23.708272Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=16623; 2025-04-04T17:24:23.708431Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=45; 2025-04-04T17:24:23.708520Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=30; 2025-04-04T17:24:23.708575Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-04-04T17:24:23.708637Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=13; 2025-04-04T17:24:23.708694Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=9; 2025-04-04T17:24:23.708787Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=48; 2025-04-04T17:24:23.708851Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=11; 2025-04-04T17:24:23.708952Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=56; 2025-04-04T17:24:23.709008Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=10; 2025-04-04T17:24:23.709091Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=36; 2025-04-04T17:24:23.709198Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=58; 2025-04-04T17:24:23.709538Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=292; 2025-04-04T17:24:23.709589Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=85274; 2025-04-04T17:24:23.709770Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=36397736;raw_bytes=56295575;count=22;records=560000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T17:24:23.709891Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2542:4451];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T17:24:23.709950Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2542:4451];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T17:24:23.710023Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2542:4451];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T17:24:23.730424Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2542:4451];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=1; 2025-04-04T17:24:23.730600Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:23.730671Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:23.730751Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=6; 2025-04-04T17:24:23.730820Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-04-04T17:24:23.730869Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:23.730923Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:23.730966Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:23.731073Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:23.731901Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:23.731999Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:2592:4494];tablet_id=9437184;parent=[1:2542:4451];fline=manager.cpp:82;event=ask_data;request=request_id=116;1={portions_count=22};; 2025-04-04T17:24:23.733036Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2542:4451];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T17:24:23.733504Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2542:4451];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T17:24:23.733543Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T17:24:23.733572Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T17:24:23.733619Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2542:4451];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:23.733685Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2542:4451];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:23.733748Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2542:4451];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=6; 2025-04-04T17:24:23.733813Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2542:4451];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-04-04T17:24:23.733865Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2542:4451];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:23.733922Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2542:4451];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:23.733968Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2542:4451];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:23.734068Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2542:4451];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:23.735571Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2542:4451];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=22;path_id=1; 2025-04-04T17:24:23.736620Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2542:4451];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 80000/5203352 0/0 >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:24:19.712301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:24:19.712429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:24:19.712494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:24:19.712535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:24:19.713612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:24:19.713658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:24:19.713722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:24:19.713784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:24:19.715104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:24:19.795960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:24:19.796020Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:19.807900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:24:19.808169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:24:19.808331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:24:19.818005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:24:19.818338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:24:19.818993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:24:19.819862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:24:19.825138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:24:19.826226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:24:19.826320Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:24:19.826507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:24:19.826565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:24:19.826608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:24:19.826728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.832705Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:24:19.927079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:24:19.927834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.928671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:24:19.930807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:24:19.930887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.934271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:24:19.934422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:24:19.934639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.934681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:24:19.934737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:24:19.934763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:24:19.936422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.936471Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:24:19.936507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:24:19.937872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.937907Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.937936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:24:19.937971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:24:19.941654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:24:19.943287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:24:19.943445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:24:19.944156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:24:19.944247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:24:19.944292Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:24:19.945667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:24:19.945714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:24:19.945855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:24:19.945946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:24:19.947727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:24:19.947764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:24:19.947879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:24:19.947904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:24:19.948104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:24:19.948134Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:24:19.948198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:24:19.948221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:24:19.948270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:24:19.948310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:24:19.948334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:24:19.948373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:24:19.948405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:24:19.948430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:24:19.948473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:24:19.948501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:24:19.948520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:24:19.949787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:24:19.949868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:24:19.949894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... MKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'27))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.672488Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2060:3923], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'28))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.677837Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2061:3924], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'29))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.683046Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2062:3925], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'30))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.688195Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2063:3926], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'31))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.693716Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2064:3927], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'32))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.699007Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2065:3928], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'33))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.704206Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2066:3929], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'34))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.709230Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2067:3930], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'35))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.714169Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2068:3931], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'36))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.719386Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2069:3932], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'37))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.724840Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2070:3933], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'38))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.730309Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2071:3934], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'39))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.735379Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2072:3935], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'40))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.740886Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2073:3936], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'41))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.746107Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2074:3937], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'42))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.751491Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2075:3938], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'43))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.756961Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2076:3939], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'44))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.762334Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2077:3940], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'45))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.767845Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2078:3941], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'46))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.773448Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2079:3942], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'47))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.778681Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2080:3943], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'48))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.783964Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2081:3944], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'49))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-04-04T17:24:24.789323Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2082:3945], Recipient [1:756:2645]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'50))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } >> TColumnShardTestSchema::ColdTiers [GOOD] >> TColumnShardTestSchema::ColdTiersWithStat [GOOD] |95.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} >> BsControllerConfig::MergeBoxes [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes+UseSink [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788008.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788008.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143788008.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788008.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788008.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143788008.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786808.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123788008.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123788008.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786808.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123786808.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123786808.000000s;Name=;Codec=}; 2025-04-04T17:23:28.988134Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:29.079832Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:29.106410Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:29.106752Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:29.115641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:29.115894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:29.116142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:29.116260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:29.116383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:29.116527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:29.116633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:29.116740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:29.116845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:29.116947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:29.117055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:29.117156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:29.147055Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:29.147324Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:29.147381Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:29.147562Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:29.147734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:29.147823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:29.147870Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:29.147966Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:29.148043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:29.148098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:29.148135Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:29.148322Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:29.148386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:29.148427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:29.148460Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:29.148555Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:29.148611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:29.148653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:29.148688Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:29.148764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:29.148802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:29.148830Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:29.148877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:29.148916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:29.148956Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:29.149402Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-04-04T17:23:29.149513Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=52; 2025-04-04T17:23:29.149621Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=42; 2025-04-04T17:23:29.149724Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=51; 2025-04-04T17:23:29.149905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:29.150022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:29.150069Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:29.150318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:29.150391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:29.150436Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:29.150609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:29.150658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:4 ... 743787465760423,"d":7565},{"a":1743787465760425,"name":"Finish","f":1743787465760425,"d_finished":0,"c":0,"l":1743787465761013,"d":588},{"name":"task_result","f":1743787465409415,"d_finished":136282,"c":28,"l":1743787465756691,"d":136282}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1384:3389]->[1:1383:3388] 2025-04-04T17:24:25.761514Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1384:3389];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:25.403610Z;index_granules=0;index_portions=4;index_batches=627;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203584;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203584;selected_rows=0; 2025-04-04T17:24:25.761552Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1384:3389];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:25.761816Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1384:3389];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:25.763555Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2025-04-04T17:24:25.763754Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000011:max} readable: {1000000011:max} at tablet 9437184 2025-04-04T17:24:25.763843Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-04T17:24:25.763971Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:32;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:25.764034Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:25.764160Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Projection","input":"1"},"fetch":"1"}]}; 2025-04-04T17:24:25.764224Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:127;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-04T17:24:25.764580Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1392:3397];trace_detailed=; 2025-04-04T17:24:25.764925Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:85;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-04T17:24:25.765134Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:100;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:25.765274Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:25.765383Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:25.765632Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:24:25.765720Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:25.765820Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:25.765856Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1392:3397] finished for tablet 9437184 2025-04-04T17:24:25.766182Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1391:3396];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743787465764531,"name":"_full_task","f":1743787465764531,"d_finished":0,"c":0,"l":1743787465765906,"d":1375},"events":[{"name":"bootstrap","f":1743787465764717,"d_finished":693,"c":1,"l":1743787465765410,"d":693},{"a":1743787465765612,"name":"ack","f":1743787465765612,"d_finished":0,"c":0,"l":1743787465765906,"d":294},{"a":1743787465765596,"name":"processing","f":1743787465765596,"d_finished":0,"c":0,"l":1743787465765906,"d":310},{"name":"ProduceResults","f":1743787465765208,"d_finished":388,"c":2,"l":1743787465765842,"d":388},{"a":1743787465765844,"name":"Finish","f":1743787465765844,"d_finished":0,"c":0,"l":1743787465765906,"d":62}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:25.766257Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1391:3396];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:24:25.766536Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1391:3396];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743787465764531,"name":"_full_task","f":1743787465764531,"d_finished":0,"c":0,"l":1743787465766290,"d":1759},"events":[{"name":"bootstrap","f":1743787465764717,"d_finished":693,"c":1,"l":1743787465765410,"d":693},{"a":1743787465765612,"name":"ack","f":1743787465765612,"d_finished":0,"c":0,"l":1743787465766290,"d":678},{"a":1743787465765596,"name":"processing","f":1743787465765596,"d_finished":0,"c":0,"l":1743787465766290,"d":694},{"name":"ProduceResults","f":1743787465765208,"d_finished":388,"c":2,"l":1743787465765842,"d":388},{"a":1743787465765844,"name":"Finish","f":1743787465765844,"d_finished":0,"c":0,"l":1743787465766290,"d":446}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1392:3397]->[1:1391:3396] 2025-04-04T17:24:25.766624Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:25.764201Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-04T17:24:25.766658Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:25.766740Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402136 160000/10402136 160000/10402136 80000/5203584 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788008.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788008.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143788008.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788008.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788008.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143788008.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786808.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123788008.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123788008.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786808.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123786808.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123786808.000000s;Name=;Codec=}; 2025-04-04T17:23:28.984804Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:29.056034Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:29.083716Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:29.084090Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:29.093110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:29.093383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:29.093640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:29.093766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:29.093900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:29.094067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:29.094189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:29.094327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:29.094451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:29.094569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:29.094687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:29.094803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:29.125862Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:29.126102Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:29.126184Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:29.126401Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:29.126592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:29.126679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:29.126724Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:29.126824Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:29.126910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:29.126971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:29.127006Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:29.127193Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:29.127261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:29.127304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:29.127336Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:29.127458Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:29.127526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:29.127580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:29.127612Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:29.127699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:29.127740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:29.127792Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:29.127849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:29.127891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:29.127926Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:29.128401Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=70; 2025-04-04T17:23:29.128520Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=69; 2025-04-04T17:23:29.128644Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=54; 2025-04-04T17:23:29.128747Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-04-04T17:23:29.128948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:29.129010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:29.129047Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:29.129290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:29.129352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:29.129394Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:29.129553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:29.129599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:4 ... 743787465788588,"d":8528},{"a":1743787465788589,"name":"Finish","f":1743787465788589,"d_finished":0,"c":0,"l":1743787465789078,"d":489},{"name":"task_result","f":1743787465415437,"d_finished":136535,"c":28,"l":1743787465784540,"d":136535}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1400:3405]->[1:1399:3404] 2025-04-04T17:24:25.789457Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1400:3405];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:25.405332Z;index_granules=0;index_portions=4;index_batches=627;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203584;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203584;selected_rows=0; 2025-04-04T17:24:25.789493Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1400:3405];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:25.789704Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1400:3405];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:25.791260Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2025-04-04T17:24:25.791481Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000011:max} readable: {1000000011:max} at tablet 9437184 2025-04-04T17:24:25.791581Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-04T17:24:25.791731Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:32;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:25.791788Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:25.791921Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Projection","input":"1"},"fetch":"1"}]}; 2025-04-04T17:24:25.792001Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:127;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-04T17:24:25.792401Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1408:3413];trace_detailed=; 2025-04-04T17:24:25.792741Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:85;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-04T17:24:25.792972Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:100;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:25.793133Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:25.793258Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:25.793519Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1408:3413];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:24:25.793608Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1408:3413];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:25.793711Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1408:3413];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:25.793747Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1408:3413] finished for tablet 9437184 2025-04-04T17:24:25.794153Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1408:3413];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1407:3412];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743787465792349,"name":"_full_task","f":1743787465792349,"d_finished":0,"c":0,"l":1743787465793805,"d":1456},"events":[{"name":"bootstrap","f":1743787465792507,"d_finished":779,"c":1,"l":1743787465793286,"d":779},{"a":1743787465793501,"name":"ack","f":1743787465793501,"d_finished":0,"c":0,"l":1743787465793805,"d":304},{"a":1743787465793487,"name":"processing","f":1743787465793487,"d_finished":0,"c":0,"l":1743787465793805,"d":318},{"name":"ProduceResults","f":1743787465793065,"d_finished":413,"c":2,"l":1743787465793734,"d":413},{"a":1743787465793736,"name":"Finish","f":1743787465793736,"d_finished":0,"c":0,"l":1743787465793805,"d":69}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:25.794214Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1408:3413];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1407:3412];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:24:25.794564Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1408:3413];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1407:3412];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743787465792349,"name":"_full_task","f":1743787465792349,"d_finished":0,"c":0,"l":1743787465794268,"d":1919},"events":[{"name":"bootstrap","f":1743787465792507,"d_finished":779,"c":1,"l":1743787465793286,"d":779},{"a":1743787465793501,"name":"ack","f":1743787465793501,"d_finished":0,"c":0,"l":1743787465794268,"d":767},{"a":1743787465793487,"name":"processing","f":1743787465793487,"d_finished":0,"c":0,"l":1743787465794268,"d":781},{"name":"ProduceResults","f":1743787465793065,"d_finished":413,"c":2,"l":1743787465793734,"d":413},{"a":1743787465793736,"name":"Finish","f":1743787465793736,"d_finished":0,"c":0,"l":1743787465794268,"d":532}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1408:3413]->[1:1407:3412] 2025-04-04T17:24:25.794638Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1408:3413];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:25.791975Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-04T17:24:25.794679Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1408:3413];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:25.794774Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1408:3413];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10401568 160000/10401568 160000/10401568 80000/5203584 0/0 >> DataShardVolatile::CompactedVolatileChangesCommit [GOOD] >> DataShardVolatile::CompactedVolatileChangesAbort ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MergeBoxes [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11014:2156] recipient: [1:10834:2166] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11014:2156] recipient: [1:10834:2166] Leader for TabletID 72057594037932033 is [1:11016:2168] sender: [1:11017:2156] recipient: [1:10834:2166] 2025-04-04T17:23:09.085450Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-04T17:23:09.089481Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-04T17:23:09.089749Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-04T17:23:09.091327Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:23:09.091670Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-04T17:23:09.092153Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-04T17:23:09.092187Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-04T17:23:09.092497Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-04T17:23:09.100293Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-04T17:23:09.100447Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-04T17:23:09.100589Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-04T17:23:09.100689Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T17:23:09.100774Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T17:23:09.100853Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:11016:2168] sender: [1:11042:2156] recipient: [1:110:2157] 2025-04-04T17:23:09.113944Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-04T17:23:09.114101Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T17:23:09.124991Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T17:23:09.125126Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T17:23:09.125225Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T17:23:09.125299Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T17:23:09.125400Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T17:23:09.125450Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T17:23:09.125490Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T17:23:09.125551Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T17:23:09.136395Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T17:23:09.136574Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T17:23:09.147400Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T17:23:09.147542Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-04T17:23:09.148707Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-04T17:23:09.148758Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-04T17:23:09.148944Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-04T17:23:09.148987Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-04-04T17:23:09.164035Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk0" } Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" } Drive { Path: "/dev/disk3" } Drive { Path: "/dev/disk4" } Drive { Path: "/dev/disk5" } Drive { Path: "/dev/disk6" } Drive { Path: "/dev/disk7" } Drive { Path: "/dev/disk8" Type: SSD } Drive { Path: "/dev/disk9" Type: SSD } Drive { Path: "/dev/disk10" Type: SSD } Drive { Path: "/dev/disk11" Type: SSD } Drive { Path: "/dev/disk12" Type: SSD } Drive { Path: "/dev/disk13" Type: SSD } Drive { Path: "/dev/disk14" Type: SSD } Drive { Path: "/dev/disk15" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12061 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12062 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12063 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12064 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12065 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12066 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12067 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12068 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12069 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12070 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12071 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12072 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12073 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12074 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12075 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12076 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12077 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12078 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12079 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12080 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12081 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12082 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12083 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12084 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12085 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12086 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12087 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12088 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12089 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12090 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12091 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12092 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12093 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12094 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12095 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12096 } HostConfigId: 1 } Host { Ke ... 9} Create new pdisk PDiskId# 275:1002 Path# /dev/disk3 2025-04-04T17:24:16.644357Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 276:1000 Path# /dev/disk1 2025-04-04T17:24:16.644385Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 276:1001 Path# /dev/disk2 2025-04-04T17:24:16.644411Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 276:1002 Path# /dev/disk3 2025-04-04T17:24:16.644441Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 277:1000 Path# /dev/disk1 2025-04-04T17:24:16.644468Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 277:1001 Path# /dev/disk2 2025-04-04T17:24:16.644498Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 277:1002 Path# /dev/disk3 2025-04-04T17:24:16.644524Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 278:1000 Path# /dev/disk1 2025-04-04T17:24:16.644551Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 278:1001 Path# /dev/disk2 2025-04-04T17:24:16.644582Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 278:1002 Path# /dev/disk3 2025-04-04T17:24:16.644609Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 279:1000 Path# /dev/disk1 2025-04-04T17:24:16.644636Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 279:1001 Path# /dev/disk2 2025-04-04T17:24:16.644666Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 279:1002 Path# /dev/disk3 2025-04-04T17:24:16.644693Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 280:1000 Path# /dev/disk1 2025-04-04T17:24:16.644736Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 280:1001 Path# /dev/disk2 2025-04-04T17:24:16.644781Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 280:1002 Path# /dev/disk3 2025-04-04T17:24:16.644813Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 281:1000 Path# /dev/disk1 2025-04-04T17:24:16.644842Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 281:1001 Path# /dev/disk2 2025-04-04T17:24:16.644867Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 281:1002 Path# /dev/disk3 2025-04-04T17:24:16.644894Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 282:1000 Path# /dev/disk1 2025-04-04T17:24:16.644921Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 282:1001 Path# /dev/disk2 2025-04-04T17:24:16.644975Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 282:1002 Path# /dev/disk3 2025-04-04T17:24:16.645003Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 283:1000 Path# /dev/disk1 2025-04-04T17:24:16.645030Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 283:1001 Path# /dev/disk2 2025-04-04T17:24:16.645057Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 283:1002 Path# /dev/disk3 2025-04-04T17:24:16.645084Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 284:1000 Path# /dev/disk1 2025-04-04T17:24:16.645113Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 284:1001 Path# /dev/disk2 2025-04-04T17:24:16.645139Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 284:1002 Path# /dev/disk3 2025-04-04T17:24:16.645167Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 285:1000 Path# /dev/disk1 2025-04-04T17:24:16.645193Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 285:1001 Path# /dev/disk2 2025-04-04T17:24:16.645222Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 285:1002 Path# /dev/disk3 2025-04-04T17:24:16.645248Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 286:1000 Path# /dev/disk1 2025-04-04T17:24:16.645275Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 286:1001 Path# /dev/disk2 2025-04-04T17:24:16.645303Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 286:1002 Path# /dev/disk3 2025-04-04T17:24:16.645330Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 287:1000 Path# /dev/disk1 2025-04-04T17:24:16.645356Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 287:1001 Path# /dev/disk2 2025-04-04T17:24:16.645385Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 287:1002 Path# /dev/disk3 2025-04-04T17:24:16.645411Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 288:1000 Path# /dev/disk1 2025-04-04T17:24:16.645437Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 288:1001 Path# /dev/disk2 2025-04-04T17:24:16.645462Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 288:1002 Path# /dev/disk3 2025-04-04T17:24:16.645501Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 289:1000 Path# /dev/disk1 2025-04-04T17:24:16.645534Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 289:1001 Path# /dev/disk2 2025-04-04T17:24:16.645560Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 289:1002 Path# /dev/disk3 2025-04-04T17:24:16.645587Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 290:1000 Path# /dev/disk1 2025-04-04T17:24:16.645613Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 290:1001 Path# /dev/disk2 2025-04-04T17:24:16.645640Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 290:1002 Path# /dev/disk3 2025-04-04T17:24:16.645667Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 291:1000 Path# /dev/disk1 2025-04-04T17:24:16.645693Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 291:1001 Path# /dev/disk2 2025-04-04T17:24:16.645721Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 291:1002 Path# /dev/disk3 2025-04-04T17:24:16.645746Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 292:1000 Path# /dev/disk1 2025-04-04T17:24:16.645774Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 292:1001 Path# /dev/disk2 2025-04-04T17:24:16.645803Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 292:1002 Path# /dev/disk3 2025-04-04T17:24:16.645830Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 293:1000 Path# /dev/disk1 2025-04-04T17:24:16.645859Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 293:1001 Path# /dev/disk2 2025-04-04T17:24:16.645887Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 293:1002 Path# /dev/disk3 2025-04-04T17:24:16.645915Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 294:1000 Path# /dev/disk1 2025-04-04T17:24:16.645941Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 294:1001 Path# /dev/disk2 2025-04-04T17:24:16.645967Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 294:1002 Path# /dev/disk3 2025-04-04T17:24:16.645993Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 295:1000 Path# /dev/disk1 2025-04-04T17:24:16.646020Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 295:1001 Path# /dev/disk2 2025-04-04T17:24:16.646048Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 295:1002 Path# /dev/disk3 2025-04-04T17:24:16.646074Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 296:1000 Path# /dev/disk1 2025-04-04T17:24:16.646101Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 296:1001 Path# /dev/disk2 2025-04-04T17:24:16.646127Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 296:1002 Path# /dev/disk3 2025-04-04T17:24:16.646156Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 297:1000 Path# /dev/disk1 2025-04-04T17:24:16.646181Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 297:1001 Path# /dev/disk2 2025-04-04T17:24:16.646209Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 297:1002 Path# /dev/disk3 2025-04-04T17:24:16.646235Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 298:1000 Path# /dev/disk1 2025-04-04T17:24:16.646287Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 298:1001 Path# /dev/disk2 2025-04-04T17:24:16.646315Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 298:1002 Path# /dev/disk3 2025-04-04T17:24:16.646342Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 299:1000 Path# /dev/disk1 2025-04-04T17:24:16.646367Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 299:1001 Path# /dev/disk2 2025-04-04T17:24:16.646395Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 299:1002 Path# /dev/disk3 2025-04-04T17:24:16.646421Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 300:1000 Path# /dev/disk1 2025-04-04T17:24:16.646446Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 300:1001 Path# /dev/disk2 2025-04-04T17:24:16.646471Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 300:1002 Path# /dev/disk3 2025-04-04T17:24:16.842279Z node 251 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.199906s 2025-04-04T17:24:16.842421Z node 251 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.200089s 2025-04-04T17:24:16.868127Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MergeBoxes { OriginBoxId: 2 OriginBoxGeneration: 1 TargetBoxId: 1 TargetBoxGeneration: 1 StoragePoolIdMap { OriginStoragePoolId: 1 TargetStoragePoolId: 2 } } } } 2025-04-04T17:24:16.886347Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { ReadBox { BoxId: 1 } } Command { QueryBaseConfig { } } } |95.1%| [TA] $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardWrite::WriteImmediateBadRequest >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite >> DataShardWrite::UpsertImmediate >> KikimrProvider::TestFillAuthPropertiesNone [GOOD] >> KikimrProvider::TestFillAuthPropertiesServiceAccount [GOOD] >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile >> KikimrIcGateway::TestLoadTableMetadata >> KikimrIcGateway::TestListPath >> KikimrIcGateway::TestCreateSameExternalTable >> ReadAttributesUtils::ReplaceAttributesEmpty [GOOD] >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata >> KikimrProvider::TestFillAuthPropertiesBasic [GOOD] >> KikimrProvider::TestFillAuthPropertiesAws [GOOD] >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] >> KikimrIcGateway::TestCreateExternalTable |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] >> TFlatTest::SelectRangeReverse >> ReadAttributesUtils::AttributesGatheringEmpry [GOOD] >> ReadAttributesUtils::AttributesGatheringFilter [GOOD] >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] >> TObjectStorageListingTest::Split >> TLocksTest::SetLockFail >> TFlatTest::CrossRW >> KikimrIcGateway::TestCreateSameExternalTable [GOOD] >> KikimrIcGateway::TestDropExternalTable >> KikimrIcGateway::TestCreateExternalTable [GOOD] >> KikimrIcGateway::TestCreateResourcePool >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] >> TColumnShardTestSchema::HotTiersRevCompression [GOOD] >> DataShardWrite::WriteImmediateBadRequest [GOOD] >> DataShardWrite::WriteImmediateSeveralOperations >> TFlatTest::CopyCopiedTableAndRead >> KikimrIcGateway::TestListPath [GOOD] >> KikimrIcGateway::TestDropTable >> TFlatTest::SelectRangeReverse [GOOD] >> TFlatTest::SelectRangeReverseExcludeKeys ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=143788010.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788010.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788010.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143788010.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788010.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788010.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143788010.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786810.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123788010.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123788010.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786810.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123786810.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123786810.000000s;Name=;Codec=}; 2025-04-04T17:23:30.339231Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:30.433706Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:30.460261Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:30.460575Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:30.469001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:30.469221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:30.469462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:30.469583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:30.469715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:30.469859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:30.469972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:30.470084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:30.470202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:30.470335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:30.470443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:30.470549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:30.500121Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:30.500358Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:30.500423Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:30.500613Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:30.500769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:30.500842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:30.500894Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:30.501004Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:30.501081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:30.501124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:30.501155Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:30.501328Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:30.501400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:30.501443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:30.501473Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:30.501564Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:30.501620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:30.501660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:30.501692Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:30.501765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:30.501801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:30.501830Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:30.501877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:30.501922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:30.501953Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:30.502376Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-04-04T17:23:30.502482Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=51; 2025-04-04T17:23:30.502597Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=42; 2025-04-04T17:23:30.502677Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-04-04T17:23:30.502868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:30.502942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:30.502982Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:30.503204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:30.503256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:30.503299Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;pr ... "name":"Finish","f":1743787470873320,"d_finished":0,"c":0,"l":1743787470873998,"d":678},{"name":"task_result","f":1743787470321171,"d_finished":226860,"c":28,"l":1743787470867564,"d":226860}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1962:3967]->[1:1961:3966] 2025-04-04T17:24:30.874596Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:30.309305Z;index_granules=0;index_portions=4;index_batches=627;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203544;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203544;selected_rows=0; 2025-04-04T17:24:30.874639Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:30.874918Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:30.877078Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2025-04-04T17:24:30.877421Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2025-04-04T17:24:30.877548Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-04T17:24:30.877725Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:32;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:30.877799Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:30.877964Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Projection","input":"1"},"fetch":"1"}]}; 2025-04-04T17:24:30.878062Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:127;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-04T17:24:30.878608Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1970:3975];trace_detailed=; 2025-04-04T17:24:30.879052Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:85;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-04T17:24:30.879348Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:100;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:30.879548Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:30.879707Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:30.880161Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:24:30.880289Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:30.880439Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:30.880491Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1970:3975] finished for tablet 9437184 2025-04-04T17:24:30.881021Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1969:3974];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743787470878536,"name":"_full_task","f":1743787470878536,"d_finished":0,"c":0,"l":1743787470880568,"d":2032},"events":[{"name":"bootstrap","f":1743787470878748,"d_finished":999,"c":1,"l":1743787470879747,"d":999},{"a":1743787470880130,"name":"ack","f":1743787470880130,"d_finished":0,"c":0,"l":1743787470880568,"d":438},{"a":1743787470880107,"name":"processing","f":1743787470880107,"d_finished":0,"c":0,"l":1743787470880568,"d":461},{"name":"ProduceResults","f":1743787470879455,"d_finished":566,"c":2,"l":1743787470880471,"d":566},{"a":1743787470880474,"name":"Finish","f":1743787470880474,"d_finished":0,"c":0,"l":1743787470880568,"d":94}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:30.881106Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1969:3974];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:24:30.881589Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1969:3974];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743787470878536,"name":"_full_task","f":1743787470878536,"d_finished":0,"c":0,"l":1743787470881165,"d":2629},"events":[{"name":"bootstrap","f":1743787470878748,"d_finished":999,"c":1,"l":1743787470879747,"d":999},{"a":1743787470880130,"name":"ack","f":1743787470880130,"d_finished":0,"c":0,"l":1743787470881165,"d":1035},{"a":1743787470880107,"name":"processing","f":1743787470880107,"d_finished":0,"c":0,"l":1743787470881165,"d":1058},{"name":"ProduceResults","f":1743787470879455,"d_finished":566,"c":2,"l":1743787470880471,"d":566},{"a":1743787470880474,"name":"Finish","f":1743787470880474,"d_finished":0,"c":0,"l":1743787470881165,"d":691}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1970:3975]->[1:1969:3974] 2025-04-04T17:24:30.881697Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:30.878028Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-04T17:24:30.881748Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:30.881877Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402096 160000/10402096 160000/10402096 80000/5203544 0/0 >> KikimrIcGateway::TestLoadTableMetadata [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata >> DataShardWrite::UpsertImmediate [GOOD] >> DataShardWrite::UpsertImmediateManyColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersRevCompression [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788011.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788011.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143788011.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788011.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788011.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143788011.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786811.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123788011.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123788011.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786811.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123786811.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123786811.000000s;Name=;Codec=}; 2025-04-04T17:23:31.362518Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:31.430376Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:31.447248Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:31.447538Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:31.454365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:31.454574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:31.454781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:31.454851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:31.454931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:31.455018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:31.455105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:31.455170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:31.455260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:31.455326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:31.455383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:31.455441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:31.474686Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:31.474904Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:31.474968Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:31.475151Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:31.475301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:31.475370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:31.475416Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:31.475514Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:31.475581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:31.475628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:31.475662Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:31.475806Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:31.475863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:31.475899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:31.475929Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:31.476017Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:31.476063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:31.476103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:31.476132Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:31.476201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:31.476237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:31.476266Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:31.476312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:31.476352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:31.476383Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:31.476750Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-04-04T17:23:31.476856Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=44; 2025-04-04T17:23:31.476956Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-04-04T17:23:31.477031Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=31; 2025-04-04T17:23:31.477191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:31.477258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:31.477295Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:31.477482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:31.477544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:31.477581Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... 5},{"a":1743787471041311,"name":"Finish","f":1743787471041311,"d_finished":0,"c":0,"l":1743787471041928,"d":617},{"name":"task_result","f":1743787470569316,"d_finished":172968,"c":28,"l":1743787471032098,"d":172968}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1974:3979]->[1:1973:3978] 2025-04-04T17:24:31.042474Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:30.559387Z;index_granules=0;index_portions=4;index_batches=627;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203544;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203544;selected_rows=0; 2025-04-04T17:24:31.042513Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:31.042792Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:31.044889Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2025-04-04T17:24:31.045248Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2025-04-04T17:24:31.045377Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-04T17:24:31.045562Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:32;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:31.045634Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:31.045795Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Projection","input":"1"},"fetch":"1"}]}; 2025-04-04T17:24:31.045887Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:127;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-04T17:24:31.046411Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1982:3987];trace_detailed=; 2025-04-04T17:24:31.046850Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:85;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-04T17:24:31.047113Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:100;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:31.047322Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:31.047464Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:31.047914Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:24:31.048040Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:31.048179Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:31.048230Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1982:3987] finished for tablet 9437184 2025-04-04T17:24:31.048682Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1981:3986];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743787471046336,"name":"_full_task","f":1743787471046336,"d_finished":0,"c":0,"l":1743787471048299,"d":1963},"events":[{"name":"bootstrap","f":1743787471046550,"d_finished":949,"c":1,"l":1743787471047499,"d":949},{"a":1743787471047885,"name":"ack","f":1743787471047885,"d_finished":0,"c":0,"l":1743787471048299,"d":414},{"a":1743787471047863,"name":"processing","f":1743787471047863,"d_finished":0,"c":0,"l":1743787471048299,"d":436},{"name":"ProduceResults","f":1743787471047239,"d_finished":521,"c":2,"l":1743787471048211,"d":521},{"a":1743787471048214,"name":"Finish","f":1743787471048214,"d_finished":0,"c":0,"l":1743787471048299,"d":85}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:31.048775Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1981:3986];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:24:31.049231Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1981:3986];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743787471046336,"name":"_full_task","f":1743787471046336,"d_finished":0,"c":0,"l":1743787471048826,"d":2490},"events":[{"name":"bootstrap","f":1743787471046550,"d_finished":949,"c":1,"l":1743787471047499,"d":949},{"a":1743787471047885,"name":"ack","f":1743787471047885,"d_finished":0,"c":0,"l":1743787471048826,"d":941},{"a":1743787471047863,"name":"processing","f":1743787471047863,"d_finished":0,"c":0,"l":1743787471048826,"d":963},{"name":"ProduceResults","f":1743787471047239,"d_finished":521,"c":2,"l":1743787471048211,"d":521},{"a":1743787471048214,"name":"Finish","f":1743787471048214,"d_finished":0,"c":0,"l":1743787471048826,"d":612}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1982:3987]->[1:1981:3986] 2025-04-04T17:24:31.049336Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:31.045855Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-04T17:24:31.049390Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:31.049517Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=143788010.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788010.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788010.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143788010.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788010.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788010.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143788010.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786810.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123788010.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123788010.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786810.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123786810.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123786810.000000s;Name=;Codec=}; 2025-04-04T17:23:30.355865Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:30.440128Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:30.455545Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:30.455808Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:30.462199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:30.462436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:30.462656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:30.462775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:30.462897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:30.463027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:30.463153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:30.463262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:30.463379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:30.463486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:30.463586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:30.463691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:30.482498Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:30.482664Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:30.482703Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:30.482822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:30.482934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:30.482991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:30.483024Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:30.483109Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:30.483175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:30.483207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:30.483227Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:30.483326Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:30.483361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:30.483390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:30.483409Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:30.483486Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:30.483523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:30.483552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:30.483571Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:30.483615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:30.483638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:30.483658Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:30.483685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:30.483708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:30.483730Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:30.483994Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=35; 2025-04-04T17:23:30.484062Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-04-04T17:23:30.484135Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=28; 2025-04-04T17:23:30.484201Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=30; 2025-04-04T17:23:30.484326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:30.484367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:30.484389Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:30.484529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:30.484568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:30.484594Z node 1 :TX_COLUMNSHARD NOTICE: tabl ... e":"Finish","f":1743787470922348,"d_finished":0,"c":0,"l":1743787470923060,"d":712},{"name":"task_result","f":1743787470329471,"d_finished":247858,"c":28,"l":1743787470916151,"d":247858}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1962:3967]->[1:1961:3966] 2025-04-04T17:24:30.923682Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:30.319964Z;index_granules=0;index_portions=4;index_batches=627;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203544;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203544;selected_rows=0; 2025-04-04T17:24:30.923737Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:30.924034Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1962:3967];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:30.926395Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2025-04-04T17:24:30.926801Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2025-04-04T17:24:30.926939Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-04T17:24:30.927132Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:32;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:30.927234Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:30.927407Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Projection","input":"1"},"fetch":"1"}]}; 2025-04-04T17:24:30.927510Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:127;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-04T17:24:30.928076Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1970:3975];trace_detailed=; 2025-04-04T17:24:30.928650Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:85;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-04T17:24:30.928943Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:100;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:30.929156Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:30.929320Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:30.929772Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:24:30.929914Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:30.930067Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:30.930123Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1970:3975] finished for tablet 9437184 2025-04-04T17:24:30.930687Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1969:3974];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743787470927996,"name":"_full_task","f":1743787470927996,"d_finished":0,"c":0,"l":1743787470930201,"d":2205},"events":[{"name":"bootstrap","f":1743787470928326,"d_finished":1046,"c":1,"l":1743787470929372,"d":1046},{"a":1743787470929739,"name":"ack","f":1743787470929739,"d_finished":0,"c":0,"l":1743787470930201,"d":462},{"a":1743787470929715,"name":"processing","f":1743787470929715,"d_finished":0,"c":0,"l":1743787470930201,"d":486},{"name":"ProduceResults","f":1743787470929063,"d_finished":600,"c":2,"l":1743787470930102,"d":600},{"a":1743787470930105,"name":"Finish","f":1743787470930105,"d_finished":0,"c":0,"l":1743787470930201,"d":96}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:30.930786Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1969:3974];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:24:30.931306Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1969:3974];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743787470927996,"name":"_full_task","f":1743787470927996,"d_finished":0,"c":0,"l":1743787470930843,"d":2847},"events":[{"name":"bootstrap","f":1743787470928326,"d_finished":1046,"c":1,"l":1743787470929372,"d":1046},{"a":1743787470929739,"name":"ack","f":1743787470929739,"d_finished":0,"c":0,"l":1743787470930843,"d":1104},{"a":1743787470929715,"name":"processing","f":1743787470929715,"d_finished":0,"c":0,"l":1743787470930843,"d":1128},{"name":"ProduceResults","f":1743787470929063,"d_finished":600,"c":2,"l":1743787470930102,"d":600},{"a":1743787470930105,"name":"Finish","f":1743787470930105,"d_finished":0,"c":0,"l":1743787470930843,"d":738}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1970:3975]->[1:1969:3974] 2025-04-04T17:24:30.931431Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:30.927474Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-04T17:24:30.931488Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:30.931616Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402096 160000/10402096 160000/10402096 80000/5203544 0/0 >> TColumnShardTestSchema::HotTiers [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite >> TObjectStorageListingTest::Split [GOOD] >> TObjectStorageListingTest::SuffixColumns >> TLocksTest::SetLockFail [GOOD] >> TLocksTest::SetEraseSet >> TFlatTest::CrossRW [GOOD] >> TFlatTest::GetTabletCounters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788011.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788011.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143788011.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788011.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788011.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143788011.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786811.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123788011.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123788011.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786811.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123786811.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123786811.000000s;Name=;Codec=}; 2025-04-04T17:23:31.667165Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:31.752492Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:31.776353Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:31.776672Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:31.784433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:31.784667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:31.784892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:31.785008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:31.785134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:31.785289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:31.785395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:31.785501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:31.785603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:31.785709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:31.785812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:31.785907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:31.813669Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:31.813873Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:31.813937Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:31.814103Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:31.814322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:31.814397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:31.814438Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:31.814520Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:31.814593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:31.814644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:31.814673Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:31.814830Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:31.814883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:31.814919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:31.814950Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:31.815037Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:31.815105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:31.815146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:31.815179Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:31.815248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:31.815283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:31.815311Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:31.815356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:31.815393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:31.815426Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:31.815790Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-04-04T17:23:31.815878Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-04-04T17:23:31.815977Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2025-04-04T17:23:31.816054Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-04-04T17:23:31.816210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:31.816264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:31.816297Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:31.816495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:31.816551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:31.816587Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... 4},{"a":1743787472145282,"name":"Finish","f":1743787472145282,"d_finished":0,"c":0,"l":1743787472145916,"d":634},{"name":"task_result","f":1743787471622838,"d_finished":221618,"c":28,"l":1743787472139664,"d":221618}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1974:3979]->[1:1973:3978] 2025-04-04T17:24:32.146437Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:31.614567Z;index_granules=0;index_portions=4;index_batches=627;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203544;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203544;selected_rows=0; 2025-04-04T17:24:32.146483Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:32.146751Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:32.148945Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2025-04-04T17:24:32.149343Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2025-04-04T17:24:32.149478Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-04T17:24:32.149669Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:32;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:32.149745Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:32.149908Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Projection","input":"1"},"fetch":"1"}]}; 2025-04-04T17:24:32.150002Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:127;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-04T17:24:32.150534Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1982:3987];trace_detailed=; 2025-04-04T17:24:32.150962Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:85;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-04T17:24:32.151233Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:100;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:32.151419Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:32.151568Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:32.152021Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:24:32.152146Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:32.152276Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:32.152322Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1982:3987] finished for tablet 9437184 2025-04-04T17:24:32.152779Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1981:3986];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743787472150463,"name":"_full_task","f":1743787472150463,"d_finished":0,"c":0,"l":1743787472152393,"d":1930},"events":[{"name":"bootstrap","f":1743787472150675,"d_finished":922,"c":1,"l":1743787472151597,"d":922},{"a":1743787472151993,"name":"ack","f":1743787472151993,"d_finished":0,"c":0,"l":1743787472152393,"d":400},{"a":1743787472151968,"name":"processing","f":1743787472151968,"d_finished":0,"c":0,"l":1743787472152393,"d":425},{"name":"ProduceResults","f":1743787472151332,"d_finished":513,"c":2,"l":1743787472152303,"d":513},{"a":1743787472152306,"name":"Finish","f":1743787472152306,"d_finished":0,"c":0,"l":1743787472152393,"d":87}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:32.152864Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1981:3986];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:24:32.153305Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1981:3986];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743787472150463,"name":"_full_task","f":1743787472150463,"d_finished":0,"c":0,"l":1743787472152911,"d":2448},"events":[{"name":"bootstrap","f":1743787472150675,"d_finished":922,"c":1,"l":1743787472151597,"d":922},{"a":1743787472151993,"name":"ack","f":1743787472151993,"d_finished":0,"c":0,"l":1743787472152911,"d":918},{"a":1743787472151968,"name":"processing","f":1743787472151968,"d_finished":0,"c":0,"l":1743787472152911,"d":943},{"name":"ProduceResults","f":1743787472151332,"d_finished":513,"c":2,"l":1743787472152303,"d":513},{"a":1743787472152306,"name":"Finish","f":1743787472152306,"d_finished":0,"c":0,"l":1743787472152911,"d":605}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1982:3987]->[1:1981:3986] 2025-04-04T17:24:32.153409Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:32.149970Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-04T17:24:32.153455Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:32.153577Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> TLocksFatTest::PointSetBreak >> KikimrIcGateway::TestDropExternalTable [GOOD] >> KikimrIcGateway::TestDropExternalDataSource >> TLocksTest::Range_GoodLock0 >> KikimrIcGateway::TestCreateResourcePool [GOOD] >> KikimrIcGateway::TestALterResourcePool >> TFlatTest::ShardFreezeRejectBadProtobuf >> TFlatTest::CopyCopiedTableAndRead [GOOD] >> TFlatTest::CopyCopiedTableAndDropFirstCopy >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] >> StatisticsSaveLoad::Simple [GOOD] >> StatisticsSaveLoad::Delete [GOOD] >> DataShardWrite::WriteImmediateSeveralOperations [GOOD] >> DataShardWrite::UpsertPreparedManyTables+Volatile >> TFlatTest::CopyTableAndReturnPartAfterCompaction >> KikimrIcGateway::TestDropTable [GOOD] >> KikimrIcGateway::TestDropResourcePool >> DataShardWrite::UpsertImmediateManyColumns [GOOD] >> DataShardWrite::ReplaceImmediate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Simple [GOOD] Test command err: 2025-04-04T17:24:25.887913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:24:25.888162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:24:25.888247Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b73/r3tmp/tmplaGinw/pdisk_1.dat 2025-04-04T17:24:26.276975Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20896, node 1 2025-04-04T17:24:26.812760Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:26.812823Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:26.812864Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:26.813512Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:24:26.819836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:24:26.913390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:26.915413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:26.930558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22487 2025-04-04T17:24:27.467768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:24:30.286570Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:24:30.320811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:30.320930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:30.350367Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:24:30.352703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:30.579276Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:30.582080Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:30.582842Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:30.583056Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:30.583190Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:30.583492Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:30.583624Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:30.583761Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:30.583865Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:30.767319Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:30.767454Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:30.781417Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:30.941871Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:30.992400Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:24:30.992525Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:24:31.030040Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:24:31.031635Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:24:31.031895Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:24:31.031962Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:24:31.032016Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:24:31.032082Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:24:31.032143Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:24:31.032201Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:24:31.032645Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:24:31.060320Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:24:31.060446Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:24:31.066442Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:24:31.074602Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:24:31.075636Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:24:31.080470Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:24:31.102708Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:24:31.102789Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:24:31.102903Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:24:31.125143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:24:31.141232Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:24:31.141503Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:24:31.372266Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:24:31.517357Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:24:31.606726Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:24:32.447721Z node 1 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:24:32.448145Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:24:32.469566Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:24:32.476753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2254:3083], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:32.476883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2270:3088], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:32.476994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:32.493448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72075186224037897 2025-04-04T17:24:32.540021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2274:3091], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:24:32.856968Z node 1 :TX_PROXY ERROR: Actor# [1:2364:3120] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:33.321671Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2386:3132]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:24:33.322029Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:24:33.322177Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2388:3134] 2025-04-04T17:24:33.323534Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2388:3134] 2025-04-04T17:24:33.324304Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2389:2842] 2025-04-04T17:24:33.324588Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2388:3134], server id = [2:2389:2842], tablet id = 72075186224037894, status = OK 2025-04-04T17:24:33.324792Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2389:2842], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:24:33.324867Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-04T17:24:33.325128Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:24:33.325205Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2386:3132], StatRequests.size() = 1 2025-04-04T17:24:33.732329Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=NWU3ODAzYzEtMzYyYjQ4YmMtZTYwNzJkNGYtYTUzNTk0YjA=, TxId: 2025-04-04T17:24:33.732429Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=NWU3ODAzYzEtMzYyYjQ4YmMtZTYwNzJkNGYtYTUzNTk0YjA=, TxId: 2025-04-04T17:24:33.733608Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:24:33.736689Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-04-04T17:24:33.772270Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2417:3155]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:24:33.772503Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:24:33.772558Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2417:3155], StatRequests.size() = 1 2025-04-04T17:24:33.973267Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=OGQ2ZGYxN2UtOTYxY2JmZGMtOWU0NjYxODctNjMxNWE1NmQ=, TxId: 01jr0xaf0h4nx8459501kwayfg 2025-04-04T17:24:33.974463Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=OGQ2ZGYxN2UtOTYxY2JmZGMtOWU0NjYxODctNjMxNWE1NmQ=, TxId: 01jr0xaf0h4nx8459501kwayfg 2025-04-04T17:24:33.978511Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:24:33.981592Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-04-04T17:24:34.001958Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=NTYzYzFlODAtNGM5NmEzODctYjRmYzc4NzgtNzg3OWFjZjM=, TxId: 01jr0xaf1yc8ntxsqx9gxd75z5 2025-04-04T17:24:34.002087Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=NTYzYzFlODAtNGM5NmEzODctYjRmYzc4NzgtNzg3OWFjZjM=, TxId: 01jr0xaf1yc8ntxsqx9gxd75z5 >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite [GOOD] >> DataShardWrite::DeleteImmediate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Delete [GOOD] Test command err: 2025-04-04T17:24:26.789997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:24:26.790203Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:24:26.790290Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b6c/r3tmp/tmpbM8wiT/pdisk_1.dat 2025-04-04T17:24:27.064335Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10791, node 1 2025-04-04T17:24:27.287932Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:27.287980Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:27.288013Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:27.288359Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:24:27.290115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:24:27.370848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:27.370994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:27.384544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21595 2025-04-04T17:24:27.837688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:24:31.246812Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:24:31.273177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:31.273263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:31.300097Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:24:31.301601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:31.515951Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:31.516461Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:31.516859Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:31.516982Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:31.517037Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:31.517232Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:31.517314Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:31.517383Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:31.517438Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:31.689317Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:31.689491Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:31.702926Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:31.835015Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:31.873445Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:24:31.873537Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:24:31.900668Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:24:31.900881Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:24:31.901087Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:24:31.901144Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:24:31.901194Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:24:31.901247Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:24:31.901296Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:24:31.901349Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:24:31.902480Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:24:31.929855Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:24:31.929974Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:24:31.936286Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:24:31.942079Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:24:31.942833Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:24:31.948055Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:24:31.968465Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:24:31.968531Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:24:31.968609Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:24:31.982502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:24:31.988986Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:24:31.989130Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:24:32.202598Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:24:32.367504Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:24:32.479421Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:24:33.294205Z node 1 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:24:33.294847Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:24:33.320017Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:24:33.325146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2257:3083], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:33.325262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2270:3088], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:33.325782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:33.334218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72075186224037897 2025-04-04T17:24:33.390464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2274:3091], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:24:33.639509Z node 1 :TX_PROXY ERROR: Actor# [1:2362:3120] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:33.814668Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2384:3132]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:24:33.814890Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:24:33.814976Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2386:3134] 2025-04-04T17:24:33.815038Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2386:3134] 2025-04-04T17:24:33.815450Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2387:2840] 2025-04-04T17:24:33.815650Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2386:3134], server id = [2:2387:2840], tablet id = 72075186224037894, status = OK 2025-04-04T17:24:33.815781Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2387:2840], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:24:33.815835Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-04T17:24:33.815996Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:24:33.816051Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2384:3132], StatRequests.size() = 1 2025-04-04T17:24:33.967353Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=NGQwOTJhMTYtNjBhNDUxNWUtY2Y1MTBlOGEtZTVhYmMwZWQ=, TxId: 2025-04-04T17:24:33.967417Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=NGQwOTJhMTYtNjBhNDUxNWUtY2Y1MTBlOGEtZTVhYmMwZWQ=, TxId: 2025-04-04T17:24:33.968331Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:24:33.970587Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:24:33.982275Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2415:3155]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:24:33.982463Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:24:33.982496Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2415:3155], StatRequests.size() = 1 2025-04-04T17:24:34.099147Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=MmY4YWU2NjItYzBmZGNkZWMtZGQ2YmY0Y2MtOWM1OTJhYTE=, TxId: 2025-04-04T17:24:34.099231Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=MmY4YWU2NjItYzBmZGNkZWMtZGQ2YmY0Y2MtOWM1OTJhYTE=, TxId: 2025-04-04T17:24:34.100124Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:24:34.102655Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-04-04T17:24:34.118224Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2447:3171]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:24:34.118441Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:24:34.118476Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:2447:3171], StatRequests.size() = 1 2025-04-04T17:24:34.224283Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=YTg0MWU2YWItM2ZlODY5ZjMtNjUxODBjOWUtODUyNTY4N2Q=, TxId: 01jr0xaf8vaj8vhr7cx4gef613 2025-04-04T17:24:34.224454Z node 1 :STATISTICS WARN: [TQueryBase] Finish with BAD_REQUEST, Issues: {
: Error: No data }, SessionId: ydb://session/3?node_id=1&id=YTg0MWU2YWItM2ZlODY5ZjMtNjUxODBjOWUtODUyNTY4N2Q=, TxId: 01jr0xaf8vaj8vhr7cx4gef613 >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] Test command err: 2025-04-04T17:24:29.241564Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510154102747029:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:29.241732Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea4/r3tmp/tmpBCJGGN/pdisk_1.dat 2025-04-04T17:24:29.560944Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:29.629339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:29.629444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:29.631399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4061 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:29.821702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:29.852897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:32.095336Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510167385538336:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:32.095502Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea4/r3tmp/tmpqZ64gN/pdisk_1.dat 2025-04-04T17:24:32.209010Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:32.234725Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:32.234858Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:32.236954Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11563 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:32.415946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:32.432267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::GetTabletCounters [GOOD] >> TObjectStorageListingTest::SuffixColumns [GOOD] >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata >> DataShardVolatile::NotCachingAbortingDeletes-UseSink [GOOD] >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] >> TFlatTest::AutoSplitMergeQueue >> TFlatTest::ShardFreezeRejectBadProtobuf [GOOD] >> TFlatTest::SelectRangeSkipNullKeys >> StatisticsSaveLoad::ForbidAccess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788015.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788015.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143788015.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788015.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788015.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143788015.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786815.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123788015.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123788015.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786815.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123786815.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123786815.000000s;Name=;Codec=}; 2025-04-04T17:23:36.037053Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:36.148259Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:36.181187Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:36.181621Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:36.191831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:36.192148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:36.192483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:36.192693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:36.192903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:36.193118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:36.193292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:36.193470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:36.193661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:36.193833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:36.194010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:36.194180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:36.232142Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:36.232428Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:36.232539Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:36.232802Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:36.233030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:36.233135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:36.233206Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:36.233349Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:36.233447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:36.233519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:36.233568Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:36.233788Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:36.233879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:36.233943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:36.233990Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:36.234127Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:36.234214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:36.234309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:36.234356Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:36.234472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:36.234534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:36.234580Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:36.234662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:36.234723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:36.234767Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:36.235310Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=67; 2025-04-04T17:23:36.235451Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=67; 2025-04-04T17:23:36.235570Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=46; 2025-04-04T17:23:36.235701Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=56; 2025-04-04T17:23:36.235930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:36.236013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:36.236063Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:36.236355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:36.236446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:36.236502Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TT ... 5},{"a":1743787476615320,"name":"Finish","f":1743787476615320,"d_finished":0,"c":0,"l":1743787476615762,"d":442},{"name":"task_result","f":1743787476252522,"d_finished":128462,"c":28,"l":1743787476610908,"d":128462}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1974:3979]->[1:1973:3978] 2025-04-04T17:24:36.616084Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:36.246651Z;index_granules=0;index_portions=4;index_batches=627;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203544;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203544;selected_rows=0; 2025-04-04T17:24:36.616113Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:36.616312Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:36.617926Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2025-04-04T17:24:36.618190Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2025-04-04T17:24:36.618312Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-04T17:24:36.618450Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:32;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:36.618511Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:36.618639Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Projection","input":"1"},"fetch":"1"}]}; 2025-04-04T17:24:36.618727Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:127;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-04T17:24:36.619102Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1982:3987];trace_detailed=; 2025-04-04T17:24:36.619469Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:85;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-04T17:24:36.619684Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:100;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:36.619851Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:36.619975Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:36.620316Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:24:36.620403Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:36.620507Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:36.620540Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1982:3987] finished for tablet 9437184 2025-04-04T17:24:36.620961Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1981:3986];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743787476619049,"name":"_full_task","f":1743787476619049,"d_finished":0,"c":0,"l":1743787476620589,"d":1540},"events":[{"name":"bootstrap","f":1743787476619242,"d_finished":756,"c":1,"l":1743787476619998,"d":756},{"a":1743787476620295,"name":"ack","f":1743787476620295,"d_finished":0,"c":0,"l":1743787476620589,"d":294},{"a":1743787476620281,"name":"processing","f":1743787476620281,"d_finished":0,"c":0,"l":1743787476620589,"d":308},{"name":"ProduceResults","f":1743787476619777,"d_finished":405,"c":2,"l":1743787476620525,"d":405},{"a":1743787476620527,"name":"Finish","f":1743787476620527,"d_finished":0,"c":0,"l":1743787476620589,"d":62}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:36.621034Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1981:3986];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:24:36.621357Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1981:3986];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743787476619049,"name":"_full_task","f":1743787476619049,"d_finished":0,"c":0,"l":1743787476621067,"d":2018},"events":[{"name":"bootstrap","f":1743787476619242,"d_finished":756,"c":1,"l":1743787476619998,"d":756},{"a":1743787476620295,"name":"ack","f":1743787476620295,"d_finished":0,"c":0,"l":1743787476621067,"d":772},{"a":1743787476620281,"name":"processing","f":1743787476620281,"d_finished":0,"c":0,"l":1743787476621067,"d":786},{"name":"ProduceResults","f":1743787476619777,"d_finished":405,"c":2,"l":1743787476620525,"d":405},{"a":1743787476620527,"name":"Finish","f":1743787476620527,"d_finished":0,"c":0,"l":1743787476621067,"d":540}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1982:3987]->[1:1981:3986] 2025-04-04T17:24:36.621427Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:36.618696Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-04T17:24:36.621463Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:36.621553Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> KikimrIcGateway::TestDropExternalDataSource [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile >> TColumnShardTestSchema::HotTiersTtl [GOOD] >> KikimrIcGateway::TestALterResourcePool [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SuffixColumns [GOOD] Test command err: 2025-04-04T17:24:30.399981Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510158908369674:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:30.400223Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea2/r3tmp/tmpmSeIpb/pdisk_1.dat 2025-04-04T17:24:30.692478Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5424, node 1 2025-04-04T17:24:30.772397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:30.772443Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:30.772456Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:30.772462Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:30.772546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:30.772614Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:24:30.774321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5005 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:31.095462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:31.124970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743787471272 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "Hash" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Path" ... (TRUNCATED) waiting... TClient::Ls request: /dc-1/Dir/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743787471272 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "Hash" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Path" ... (TRUNCATED) 2025-04-04T17:24:33.199397Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510170195733630:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:33.199476Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea2/r3tmp/tmpKg4pEG/pdisk_1.dat 2025-04-04T17:24:33.278122Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24923, node 2 2025-04-04T17:24:33.325973Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:33.325995Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:33.326000Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:33.326084Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:24:33.332193Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:33.332271Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:33.333823Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32501 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:33.516197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:33.534779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:33.896008Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553163, Sender [2:7489510170195734962:2483], Recipient [2:7489510170195734309:2311]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 3 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\002\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3\010\000\000\000B\000\000\000\000\000\000\000" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 6 MaxKeys: 10 2025-04-04T17:24:33.896052Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2025-04-04T17:24:33.896232Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3") (type:4, value:"B\0\0\0\0\0\0\0")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-04T17:24:33.896394Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 77, String : ) 2025-04-04T17:24:33.896431Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 88, String : ) 2025-04-04T17:24:33.896452Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 666, String : ) 2025-04-04T17:24:33.896470Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, String : ) 2025-04-04T17:24:33.896487Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, String : ) 2025-04-04T17:24:33.896538Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 5 common prefixes: 0 2025-04-04T17:24:33.904545Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553163, Sender [2:7489510170195734966:2484], Recipient [2:7489510170195734309:2311]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 3 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\001\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 5 MaxKeys: 10 2025-04-04T17:24:33.904578Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2025-04-04T17:24:33.904739Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-04-04T17:24:33.904945Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, Uint64 : 10) 2025-04-04T17:24:33.904994Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, Uint64 : 10) 2025-04-04T17:24:33.905077Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] Test command err: 2025-04-04T17:21:13.956266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:21:13.956798Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:21:13.956859Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00133f/r3tmp/tmpcDwKw3/pdisk_1.dat 2025-04-04T17:21:14.360388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:21:14.403021Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:14.441201Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:21:14.442159Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:21:14.442584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:14.442732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:14.454376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:14.628296Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-04T17:21:14.628365Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T17:21:14.628522Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:644:2552] 2025-04-04T17:21:14.759064Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2552] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T17:21:14.759187Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2552] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:21:14.759857Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2552] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:21:14.759958Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2552] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:21:14.760296Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2552] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:21:14.760603Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2552] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:21:14.760740Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2552] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T17:21:14.762626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:21:14.763153Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2552] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T17:21:14.763752Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2552] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T17:21:14.763849Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2552] txid# 281474976715657 SEND to# [1:592:2517] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-04T17:21:14.796716Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:660:2567], Recipient [1:668:2573]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:21:14.797921Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:660:2567], Recipient [1:668:2573]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:21:14.798404Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:668:2573] 2025-04-04T17:21:14.798733Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:21:14.846236Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:660:2567], Recipient [1:668:2573]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:21:14.847048Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:21:14.847164Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:21:14.848980Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:21:14.849074Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:21:14.849134Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:21:14.849530Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:21:14.849679Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:21:14.849776Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:685:2573] in generation 1 2025-04-04T17:21:14.860473Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:21:14.884629Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:21:14.884810Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:21:14.884910Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:687:2583] 2025-04-04T17:21:14.884944Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:21:14.884973Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:21:14.885000Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:21:14.885164Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:668:2573], Recipient [1:668:2573]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:21:14.885199Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:21:14.885464Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:21:14.885538Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:21:14.885598Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:21:14.885640Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:21:14.885701Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:21:14.885730Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:21:14.885765Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:21:14.885790Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:21:14.885823Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:21:14.885925Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:676:2577], Recipient [1:668:2573]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:21:14.885954Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:21:14.885983Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2572], serverId# [1:676:2577], sessionId# [0:0:0] 2025-04-04T17:21:14.886301Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:676:2577] 2025-04-04T17:21:14.886332Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:21:14.886402Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:21:14.886563Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:21:14.886645Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:21:14.886717Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:21:14.886759Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:21:14.886790Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:21:14.886814Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:21:14.886859Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:21:14.887092Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:21:14.887124Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:21:14.887168Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:21:14.887192Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:21:14.887232Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:21:14.887254Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:21:14.887278Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:21:14.887300Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:21:14.887320Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:21:14.888388Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:688:2584], Recipient [1:668:2573]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:21:14.888434Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:21:14.899182Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Compl ... nnected at leader tablet# 72075186224037888, clientId# [26:972:2788], serverId# [26:973:2789], sessionId# [0:0:0] 2025-04-04T17:24:35.057399Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [26:972:2788] 2025-04-04T17:24:35.057499Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [26:972:2788] 2025-04-04T17:24:35.057577Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [26:972:2788] 2025-04-04T17:24:35.057691Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] shutdown pipe due to pending shutdown request [26:972:2788] 2025-04-04T17:24:35.057762Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] notify reset [26:972:2788] 2025-04-04T17:24:35.057983Z node 26 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [26:971:2787], Recipient [26:696:2584]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-04-04T17:24:35.058115Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} queued, type NKikimr::NDataShard::TDataShard::TTxCompactTable 2025-04-04T17:24:35.058261Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:24:35.058404Z node 26 :TABLET_EXECUTOR DEBUG: TCompactionLogic PrepareForceCompaction for 72075186224037888 table 1001, mode Full, forced state None, forced mode Full 2025-04-04T17:24:35.058561Z node 26 :TX_DATASHARD INFO: Started background compaction# 1 of 72075186224037888 tableId# 2 localTid# 1001, requested from [26:971:2787], partsCount# 0, memtableSize# 656, memtableWaste# 3952, memtableRows# 2 2025-04-04T17:24:35.058706Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} hope 1 -> done Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T17:24:35.058825Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:24:35.059090Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888: task 1, edge 9223372036854775807/0, generation 0 2025-04-04T17:24:35.059184Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:21} starting compaction 2025-04-04T17:24:35.059567Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:22} starting Scan{1 on 1001, Compact{72075186224037888.1.21, eph 1}} 2025-04-04T17:24:35.059718Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:22} started compaction 1 2025-04-04T17:24:35.059793Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888 started compaction 1 generation 0 ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR 2025-04-04T17:24:35.160339Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:22} Compact 1 on TGenCompactionParams{1001: gen 0 epoch +inf, 0 parts} step 21, product {tx status + 1 parts epoch 2} done 2025-04-04T17:24:35.160783Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CompactionFinished for 72075186224037888: compaction 1, generation 0 2025-04-04T17:24:35.160957Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 1, state Free, final id 0, final level 0 2025-04-04T17:24:35.161042Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 3, state Free, final id 0, final level 0 2025-04-04T17:24:35.161535Z node 26 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 1, ts 1970-01-01T00:00:01.535484Z 2025-04-04T17:24:35.161726Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:23} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} queued, type NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs 2025-04-04T17:24:35.161856Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:23} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:24:35.161971Z node 26 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 1, front# 1 2025-04-04T17:24:35.162082Z node 26 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [26:971:2787]pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T17:24:35.162741Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:23} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} hope 1 -> done Change{17, redo 83b alter 0b annex 0, ~{ 27 } -{ }, 0 gb} 2025-04-04T17:24:35.162884Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:23} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} release 4194304b of static, Memory{0 dyn 0} ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR ========= Starting an immediate read ========= 2025-04-04T17:24:35.338697Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0xag736aqx22g0frhpxt2k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=NjdmOTU0ZmItYjk2ODEyZmUtODFiZWE4MmYtYTFmNWY3OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:24:35.340512Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send [26:909:2733] 2025-04-04T17:24:35.340626Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [26:909:2733] 2025-04-04T17:24:35.341009Z node 26 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [26:997:2795], Recipient [26:696:2584]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2025-04-04T17:24:35.341266Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:24} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2025-04-04T17:24:35.341401Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:24} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:24:35.341512Z node 26 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T17:24:35.341620Z node 26 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1536/281474976715662 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T17:24:35.341716Z node 26 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v1536/18446744073709551615 2025-04-04T17:24:35.341852Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-04-04T17:24:35.342088Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-04T17:24:35.342206Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-04-04T17:24:35.342337Z node 26 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:24:35.342417Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:24:35.342494Z node 26 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-04-04T17:24:35.342601Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-04T17:24:35.342642Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:24:35.342673Z node 26 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T17:24:35.342709Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:24:35.342917Z node 26 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-04-04T17:24:35.343183Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is DelayComplete 2025-04-04T17:24:35.343228Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:24:35.343312Z node 26 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:24:35.343386Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:24:35.343431Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-04T17:24:35.343451Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:24:35.343495Z node 26 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-04-04T17:24:35.343579Z node 26 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T17:24:35.343724Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:24} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{18, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T17:24:35.343871Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:24} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:24:35.406916Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:21} commited cookie 8 for step 20 2025-04-04T17:24:35.428157Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:22} commited cookie 8 for step 21 2025-04-04T17:24:35.449174Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-04T17:24:35.449395Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:24:35.449754Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{12, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-04T17:24:35.449917Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:24:35.450671Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:19} commited cookie 1 for step 18 2025-04-04T17:24:35.450964Z node 26 :PIPE_CLIENT DEBUG: TClient[72057594046382081] send [26:547:2483] 2025-04-04T17:24:35.451104Z node 26 :PIPE_CLIENT DEBUG: TClient[72057594046382081] push event to server [26:547:2483] 2025-04-04T17:24:35.476143Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:23} commited cookie 8 for step 22 2025-04-04T17:24:35.517968Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:24} commited cookie 8 for step 23 2025-04-04T17:24:35.539214Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:25} commited cookie 8 for step 24 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::GetTabletCounters [GOOD] Test command err: 2025-04-04T17:24:30.569025Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510155658258558:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:30.569097Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea1/r3tmp/tmp3krW0y/pdisk_1.dat 2025-04-04T17:24:30.805588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:30.805688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:30.807291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:30.832898Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21916 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:31.053839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:31.074217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:33.515841Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510170345983015:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:33.515969Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea1/r3tmp/tmptCZYQw/pdisk_1.dat 2025-04-04T17:24:33.613448Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:33.657751Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:33.657882Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:33.659642Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17126 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:33.784891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:33.805171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743787473911 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) >> TFlatTest::WriteSplitByPartialKeyAndRead >> TFlatTest::CopyTableAndReturnPartAfterCompaction [GOOD] >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction >> TColumnShardTestSchema::RebootColdTiers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::ForbidAccess [GOOD] Test command err: 2025-04-04T17:24:25.932583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:24:25.932818Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:24:25.932898Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bc2/r3tmp/tmpKzkj2H/pdisk_1.dat 2025-04-04T17:24:26.276992Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24656, node 1 2025-04-04T17:24:26.811928Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:26.811987Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:26.812019Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:26.812545Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:24:26.819665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:24:26.913411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:26.915455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:26.930582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8012 2025-04-04T17:24:27.467753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:24:30.943663Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:24:30.980435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:30.980554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:31.009424Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:24:31.012136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:31.252284Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:31.252923Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:31.253438Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:31.253609Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:31.253703Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:31.253934Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:31.254050Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:31.254155Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:31.254263Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:31.423899Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:31.424019Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:31.437586Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:31.592580Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:31.641749Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:24:31.641858Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:24:31.677263Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:24:31.677420Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:24:31.677577Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:24:31.677631Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:24:31.677672Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:24:31.677710Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:24:31.677750Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:24:31.677786Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:24:31.678784Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:24:31.708025Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:24:31.708155Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:24:31.714745Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:24:31.719610Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:24:31.720270Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:24:31.724358Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:24:31.741067Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:24:31.741117Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:24:31.741173Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:24:31.754562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:24:31.764170Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:24:31.764354Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:24:31.978441Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:24:32.144560Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:24:32.236471Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:24:33.316880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:33.317186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:33.403302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:24:33.919565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2542:3117], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:33.919702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:33.920895Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2546:3121]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:24:33.921075Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:24:33.921154Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2548:3123] 2025-04-04T17:24:33.921240Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2548:3123] 2025-04-04T17:24:33.921858Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2549:2995] 2025-04-04T17:24:33.922157Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2548:3123], server id = [2:2549:2995], tablet id = 72075186224037894, status = OK 2025-04-04T17:24:33.922377Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2549:2995], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:24:33.922451Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-04T17:24:33.922656Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:24:33.922735Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2546:3121], StatRequests.size() = 1 2025-04-04T17:24:33.942974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2553:3127], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:33.943131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:33.943495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2558:3132], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:33.949980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-04T17:24:34.141166Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:24:34.141233Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:24:34.226500Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2548:3123], schemeshard count = 1 2025-04-04T17:24:34.702324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2560:3134], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-04T17:24:34.871094Z node 1 :TX_PROXY ERROR: Actor# [1:2684:3208] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:34.881408Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2707:3224]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:24:34.881610Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:24:34.881653Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2707:3224], StatRequests.size() = 1 2025-04-04T17:24:34.949825Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0xaezg3sf96cyt36338zw0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTI2ZjA0YzUtMWU2NGFiNjgtZWFiMThjMzItMTJmOGI2MTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:24:35.099472Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:2784:3256], for# user@builtin, access# DescribeSchema 2025-04-04T17:24:35.099549Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:2784:3256], for# user@builtin, access# DescribeSchema 2025-04-04T17:24:35.111973Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:2774:3252], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/Database/.metadata/_statistics]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:24:35.113727Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzMxM2ExY2EtNzhhMmI1YzItMmRmOTFmYjUtZjA3NmQ2NzY=, ActorId: [1:2765:3244], ActorState: ExecuteState, TraceId: 01jr0xag4baekm62xxcc0gmak3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> TLocksFatTest::PointSetNotBreak >> TLocksTest::Range_Pinhole ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropExternalDataSource [GOOD] Test command err: Trying to start YDB, gRPC: 16709, MsgBus: 31133 2025-04-04T17:24:28.036959Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510149948932797:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:28.037092Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00130b/r3tmp/tmp7jU6CJ/pdisk_1.dat 2025-04-04T17:24:28.372829Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16709, node 1 2025-04-04T17:24:28.438397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:28.438513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:28.440144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:28.547668Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:28.547696Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:28.547701Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:28.547844Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31133 TClient is connected to server localhost:31133 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:29.217951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:29.249170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-04T17:24:29.271714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:24:29.287966Z node 1 :TX_PROXY ERROR: Actor# [1:7489510154243900789:2342] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/f1/f2/external_table\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:29.288088Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710660, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges)
: Error: Scheme operation failed, status: ExecComplete, reason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges) Trying to start YDB, gRPC: 24273, MsgBus: 29283 2025-04-04T17:24:30.860620Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510157804575844:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:30.860727Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00130b/r3tmp/tmpL44l74/pdisk_1.dat 2025-04-04T17:24:30.960283Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24273, node 2 2025-04-04T17:24:30.996544Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:30.996805Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:30.999567Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:31.020182Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:31.020204Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:31.020211Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:31.020320Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29283 TClient is connected to server localhost:29283 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:31.392808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:31.409678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-04T17:24:31.424532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27584, MsgBus: 10925 2025-04-04T17:24:34.374809Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489510176233313644:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:34.374869Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00130b/r3tmp/tmpVfLaVE/pdisk_1.dat 2025-04-04T17:24:34.469997Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27584, node 3 2025-04-04T17:24:34.516010Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:34.516111Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:34.517583Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:34.520828Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:34.520853Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:34.520863Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:34.520977Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10925 TClient is connected to server localhost:10925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:34.869333Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:34.884053Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788036.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788036.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143788036.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788036.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788036.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123788036.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=143788036.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788036.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123786836.000000s;Name=;Codec=}; 2025-04-04T17:23:56.852028Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:56.925775Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:56.943738Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:56.944051Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:56.951893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:56.952109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:56.952364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:56.952499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:56.952626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:56.952721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:56.952802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:56.952892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:56.953022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:56.953136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:56.953238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:56.953353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:56.979963Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:56.980192Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:56.980254Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:56.980415Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:56.980576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:56.980662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:56.980705Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:56.980808Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:56.980873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:56.980915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:56.980945Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:56.981090Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:56.981142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:56.981176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:56.981202Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:56.981283Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:56.981331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:56.981371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:56.981406Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:56.981478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:56.981513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:56.981542Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:56.981584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:56.981623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:56.981654Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:56.982032Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2025-04-04T17:23:56.982129Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=53; 2025-04-04T17:23:56.982208Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=38; 2025-04-04T17:23:56.982312Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=61; 2025-04-04T17:23:56.982508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:56.982563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:56.982598Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:56.982778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:56.982829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:56.982867Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:56.983023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchem ... 77585947,"d":8668},{"a":1743787477585948,"name":"Finish","f":1743787477585948,"d_finished":0,"c":0,"l":1743787477586397,"d":449},{"name":"task_result","f":1743787477166356,"d_finished":163569,"c":28,"l":1743787477581462,"d":163569}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1995:4004]->[1:1994:4003] 2025-04-04T17:24:37.586758Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1995:4004];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:37.155494Z;index_granules=0;index_portions=4;index_batches=1731;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203352;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203352;selected_rows=0; 2025-04-04T17:24:37.586785Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1995:4004];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:37.586968Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1995:4004];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:37.588419Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-04-04T17:24:37.588659Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000011:max} readable: {1000000011:max} at tablet 9437184 2025-04-04T17:24:37.588757Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-04T17:24:37.588887Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:32;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:37.588937Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:37.589053Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Projection","input":"1"},"fetch":"1"}]}; 2025-04-04T17:24:37.589115Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:127;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-04T17:24:37.589483Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:2011:4020];trace_detailed=; 2025-04-04T17:24:37.589791Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:85;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-04T17:24:37.590020Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:100;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:37.590200Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:37.590372Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:37.590799Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2011:4020];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:24:37.590925Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2011:4020];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:37.591075Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2011:4020];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:37.591128Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:2011:4020] finished for tablet 9437184 2025-04-04T17:24:37.591605Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2011:4020];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:2010:4019];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743787477589435,"name":"_full_task","f":1743787477589435,"d_finished":0,"c":0,"l":1743787477591213,"d":1778},"events":[{"name":"bootstrap","f":1743787477589584,"d_finished":822,"c":1,"l":1743787477590406,"d":822},{"a":1743787477590770,"name":"ack","f":1743787477590770,"d_finished":0,"c":0,"l":1743787477591213,"d":443},{"a":1743787477590746,"name":"processing","f":1743787477590746,"d_finished":0,"c":0,"l":1743787477591213,"d":467},{"name":"ProduceResults","f":1743787477590120,"d_finished":555,"c":2,"l":1743787477591105,"d":555},{"a":1743787477591111,"name":"Finish","f":1743787477591111,"d_finished":0,"c":0,"l":1743787477591213,"d":102}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:37.591690Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2011:4020];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:2010:4019];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:24:37.592112Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2011:4020];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:2010:4019];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743787477589435,"name":"_full_task","f":1743787477589435,"d_finished":0,"c":0,"l":1743787477591742,"d":2307},"events":[{"name":"bootstrap","f":1743787477589584,"d_finished":822,"c":1,"l":1743787477590406,"d":822},{"a":1743787477590770,"name":"ack","f":1743787477590770,"d_finished":0,"c":0,"l":1743787477591742,"d":972},{"a":1743787477590746,"name":"processing","f":1743787477590746,"d_finished":0,"c":0,"l":1743787477591742,"d":996},{"name":"ProduceResults","f":1743787477590120,"d_finished":555,"c":2,"l":1743787477591105,"d":555},{"a":1743787477591111,"name":"Finish","f":1743787477591111,"d_finished":0,"c":0,"l":1743787477591742,"d":631}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:2011:4020]->[1:2010:4019] 2025-04-04T17:24:37.592192Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2011:4020];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:37.589091Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-04T17:24:37.592229Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2011:4020];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:37.592320Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2011:4020];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 80000/5203352 0/0 |95.2%| [TA] $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] {RESULT} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestALterResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 62076, MsgBus: 14731 2025-04-04T17:24:28.321651Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510146308722084:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:28.321747Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012f9/r3tmp/tmpM7j4MA/pdisk_1.dat 2025-04-04T17:24:28.553423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:28.553541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:28.555569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62076, node 1 2025-04-04T17:24:28.572920Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:24:28.574135Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:24:28.593071Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:28.604769Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:28.604790Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:28.604797Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:28.604924Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14731 TClient is connected to server localhost:14731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:29.213317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:29.248601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-04-04T17:24:29.271442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64296, MsgBus: 29707 2025-04-04T17:24:31.094363Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510160622504944:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:31.094459Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012f9/r3tmp/tmpAPqrZd/pdisk_1.dat 2025-04-04T17:24:31.204191Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64296, node 2 2025-04-04T17:24:31.232568Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:31.232641Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:31.236288Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:31.264164Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:31.264194Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:31.264199Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:31.264299Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29707 TClient is connected to server localhost:29707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:31.706425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:31.718838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2041, MsgBus: 20555 2025-04-04T17:24:34.499214Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489510174731666076:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:34.499296Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0012f9/r3tmp/tmpk8zxQF/pdisk_1.dat 2025-04-04T17:24:34.598924Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2041, node 3 2025-04-04T17:24:34.632714Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:34.632817Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:34.636771Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:34.657955Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:34.657975Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:34.657982Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:34.658095Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20555 TClient is connected to server localhost:20555 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:35.004740Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:35.026396Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:24:35.042952Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976715659:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788014.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788014.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143788014.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788014.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788014.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143788014.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786814.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123788014.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123788014.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786814.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123786814.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123786814.000000s;Name=;Codec=}; 2025-04-04T17:23:34.450781Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:34.562998Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:34.597255Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:34.597761Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:34.608261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:34.608577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:34.608896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:34.609089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:34.609303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:34.609527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:34.609706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:34.609890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:34.610083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:34.610287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:34.610467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:34.610646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:34.649624Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:34.649929Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:34.650047Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:34.650315Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:34.650533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:34.650635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:34.650704Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:34.650840Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:34.650967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:34.651041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:34.651106Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:34.651324Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:34.651424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:34.651489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:34.651546Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:34.651693Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:34.651780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:34.651844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:34.651890Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:34.652017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:34.652080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:34.652121Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:34.652190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:34.652247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:34.652304Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:34.652839Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=65; 2025-04-04T17:23:34.652977Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=63; 2025-04-04T17:23:34.653120Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=55; 2025-04-04T17:23:34.653238Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=56; 2025-04-04T17:23:34.653472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:34.653554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:34.653603Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:34.653904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:34.653987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:34.654040Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:34.654275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:34.654350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:4 ... _data.cpp:29;EXECUTE:finishLoadingTime=320; 2025-04-04T17:24:37.884190Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=30301; 2025-04-04T17:24:37.890979Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=6703; 2025-04-04T17:24:37.898742Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=6711; 2025-04-04T17:24:37.898857Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=7779; 2025-04-04T17:24:37.899045Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=115; 2025-04-04T17:24:37.899192Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=75; 2025-04-04T17:24:37.899326Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=91; 2025-04-04T17:24:37.899461Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=90; 2025-04-04T17:24:37.908346Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8803; 2025-04-04T17:24:37.919822Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=11354; 2025-04-04T17:24:37.919971Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=48; 2025-04-04T17:24:37.920054Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=30; 2025-04-04T17:24:37.920103Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-04-04T17:24:37.920149Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-04-04T17:24:37.920194Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-04-04T17:24:37.920276Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=45; 2025-04-04T17:24:37.920323Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-04-04T17:24:37.920413Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=55; 2025-04-04T17:24:37.920456Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-04-04T17:24:37.920537Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=43; 2025-04-04T17:24:37.920639Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=57; 2025-04-04T17:24:37.920997Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=318; 2025-04-04T17:24:37.921042Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=74110; 2025-04-04T17:24:37.921195Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=31203592;raw_bytes=48253350;count=18;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T17:24:37.921304Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T17:24:37.921359Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T17:24:37.921428Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T17:24:37.939563Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=1; 2025-04-04T17:24:37.939712Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:37.939776Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:37.939856Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=4; 2025-04-04T17:24:37.939922Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:24:37.939969Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:37.940021Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:37.940058Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:37.940152Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:37.940569Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:37.940648Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2199:4072];tablet_id=9437184;parent=[1:2159:4039];fline=manager.cpp:82;event=ask_data;request=request_id=123;1={portions_count=18};; 2025-04-04T17:24:37.941617Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T17:24:37.941707Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T17:24:37.941735Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T17:24:37.941759Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T17:24:37.941802Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:37.941864Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:37.941961Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=4; 2025-04-04T17:24:37.942030Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:24:37.942075Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:37.942127Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:37.942166Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:37.942278Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:37.943525Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=18;path_id=1; 2025-04-04T17:24:37.944493Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402136 160000/10402136 160000/10402136 80000/5203584 0/0 >> KikimrIcGateway::TestDropResourcePool [GOOD] >> DataShardWrite::UpsertPreparedManyTables+Volatile [GOOD] >> DataShardWrite::UpsertPreparedManyTables-Volatile >> TLocksTest::GoodLock >> TLocksFatTest::PointSetBreak [GOOD] >> TLocksFatTest::LocksLimit >> DataShardWrite::DeleteImmediate [GOOD] >> DataShardWrite::CancelImmediate >> TLocksTest::SetEraseSet [GOOD] >> TLocksTest::BrokenLockErase >> TLocksTest::GoodSameKeyLock >> TFlatTest::WriteSplitKillRead >> DataShardWrite::ReplaceImmediate [GOOD] >> DataShardWrite::ReplaceImmediate_DefaultValue >> TFlatTest::SelectRangeSkipNullKeys [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 1284, MsgBus: 30158 2025-04-04T17:24:28.036884Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510149572456556:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:28.036997Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001328/r3tmp/tmpWoUK6X/pdisk_1.dat 2025-04-04T17:24:28.368810Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:28.429992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 1284, node 1 2025-04-04T17:24:28.430792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:28.432428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:28.547667Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:28.547697Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:28.547706Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:28.547834Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30158 TClient is connected to server localhost:30158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:29.170929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:30.544589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510158162391857:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:30.544675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:30.921758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:24:31.030884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:24:31.056462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:31.083968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:31.140422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510162457359468:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:31.140529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:31.140590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510162457359473:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:31.144576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2025-04-04T17:24:31.154765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510162457359475:2369], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2025-04-04T17:24:31.211335Z node 1 :TX_PROXY ERROR: Actor# [1:7489510162457359527:2569] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 26247, MsgBus: 64768 2025-04-04T17:24:32.037821Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510166173786855:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:32.037921Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001328/r3tmp/tmpNNnYbq/pdisk_1.dat 2025-04-04T17:24:32.158400Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:32.170715Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:32.170791Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:32.172243Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26247, node 2 2025-04-04T17:24:32.211390Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:32.211417Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:32.211425Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:32.211520Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64768 TClient is connected to server localhost:64768 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:32.563452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:34.745246Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510174763722147:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:34.745326Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:34.775793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:24:34.803635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:24:34.832883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:34.893570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:34.929397Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510174763722458:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:34.929486Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:34.929555Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510174763722463:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:34.932763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2025-04-04T17:24:34.941169Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510174763722465:2368], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-04-04T17:24:35.036935Z node 2 :TX_PROXY ERROR: Actor# [2:7489510179058689815:2563] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:35.196448Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found
: Info: Success, code: 4 Trying to start YDB, gRPC: 5975, MsgBus: 5722 2025-04-04T17:24:35.745191Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489510177577157155:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:35.745293Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001328/r3tmp/tmpx6SPJP/pdisk_1.dat 2025-04-04T17:24:35.834618Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5975, node 3 2025-04-04T17:24:35.868291Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:35.868320Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:35.868327Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:35.868424Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:24:35.869011Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:35.869082Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:35.870379Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5722 TClient is connected to server localhost:5722 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:36.209841Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:36.229203Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 >> TFlatTest::WriteMergeAndRead >> KqpYql::EvaluateExpr2 >> KqpNewEngine::Update+UseSink >> TLocksTest::UpdateLockedKey >> TFlatTest::WriteSplitByPartialKeyAndRead [GOOD] >> TFlatTest::WriteSplitAndReadFromFollower >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetEraseSet [GOOD] Test command err: 2025-04-04T17:24:30.387495Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510157113594730:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:30.388099Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea3/r3tmp/tmpHGovWd/pdisk_1.dat 2025-04-04T17:24:30.650038Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:30.742703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:30.742816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:30.744466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6671 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:30.929016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:30.957385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:31.064576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:31.101531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:31.137204Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710662: Validate (783): Key validation status: 3 2025-04-04T17:24:31.137318Z node 1 :TX_PROXY ERROR: Actor# [1:7489510161408562911:2494] txid# 281474976710662 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-04-04T17:24:31.137410Z node 1 :TX_PROXY ERROR: Actor# [1:7489510161408562911:2494] txid# 281474976710662 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-04-04T17:24:31.137469Z node 1 :TX_PROXY ERROR: Actor# [1:7489510161408562911:2494] txid# 281474976710662 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-04-04T17:24:31.140595Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710663: Validate (783): Key validation status: 3 2025-04-04T17:24:31.140674Z node 1 :TX_PROXY ERROR: Actor# [1:7489510161408562933:2501] txid# 281474976710663 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-04-04T17:24:31.140765Z node 1 :TX_PROXY ERROR: Actor# [1:7489510161408562933:2501] txid# 281474976710663 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-04-04T17:24:31.140795Z node 1 :TX_PROXY ERROR: Actor# [1:7489510161408562933:2501] txid# 281474976710663 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2025-04-04T17:24:31.144190Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710664: Validate (783): Key validation status: 3 2025-04-04T17:24:31.144286Z node 1 :TX_PROXY ERROR: Actor# [1:7489510161408562940:2505] txid# 281474976710664 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-04-04T17:24:31.144365Z node 1 :TX_PROXY ERROR: Actor# [1:7489510161408562940:2505] txid# 281474976710664 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-04-04T17:24:31.144411Z node 1 :TX_PROXY ERROR: Actor# [1:7489510161408562940:2505] txid# 281474976710664 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-04-04T17:24:31.147118Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710665: Validate (783): Key validation status: 3 2025-04-04T17:24:31.147213Z node 1 :TX_PROXY ERROR: Actor# [1:7489510161408562946:2508] txid# 281474976710665 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-04-04T17:24:31.147274Z node 1 :TX_PROXY ERROR: Actor# [1:7489510161408562946:2508] txid# 281474976710665 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-04-04T17:24:31.147330Z node 1 :TX_PROXY ERROR: Actor# [1:7489510161408562946:2508] txid# 281474976710665 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2025-04-04T17:24:33.230692Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510170883625471:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:33.230797Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea3/r3tmp/tmpphkATw/pdisk_1.dat 2025-04-04T17:24:33.346268Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:33.375825Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:33.375938Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:33.377627Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30157 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:33.551476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:33.574783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:33.646330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:33.693063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:36.587378Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489510181557360896:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:36.587415Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea3/r3tmp/tmpuypZ1J/pdisk_1.dat 2025-04-04T17:24:36.683954Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:36.726803Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:36.726901Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:36.728492Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6895 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:36.899936Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:36.919741Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:36.989713Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:37.034438Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TFlatTest::SplitInvalidPath >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeSkipNullKeys [GOOD] Test command err: 2025-04-04T17:24:34.592899Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510175025440688:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:34.593076Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e9e/r3tmp/tmpHse64t/pdisk_1.dat 2025-04-04T17:24:34.913524Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:34.963936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:34.964093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:34.966405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31119 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:35.158175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:35.170126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:35.269238Z node 1 :TX_PROXY ERROR: Actor# [1:7489510179320408676:2361] txid# 281474976710659, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-04-04T17:24:35.271453Z node 1 :TX_PROXY ERROR: Actor# [1:7489510179320408689:2367] txid# 281474976710660, issues: { message: "Unexpected freeze state" severity: 1 } Error 128: Unexpected freeze state 2025-04-04T17:24:35.273373Z node 1 :TX_PROXY ERROR: Actor# [1:7489510179320408695:2372] txid# 281474976710661, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-04-04T17:24:35.275281Z node 1 :TX_PROXY ERROR: Actor# [1:7489510179320408701:2377] txid# 281474976710662, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-04-04T17:24:37.575515Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510186565226659:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:37.575588Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e9e/r3tmp/tmp687m9a/pdisk_1.dat 2025-04-04T17:24:37.688502Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:37.709209Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:37.709324Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:37.710736Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22949 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:37.853350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:37.871423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> KqpNewEngine::MultiSelect >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] Test command err: 2025-04-04T17:24:35.699356Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510180441654203:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:35.699458Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e9c/r3tmp/tmpKv7eF8/pdisk_1.dat 2025-04-04T17:24:35.975146Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:36.068824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:36.068900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:36.070696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2057 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:36.186439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:36.223944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:36.382406Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1211 647 2154)b }, ecr=1.000 2025-04-04T17:24:36.385557Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1085 521 2626)b }, ecr=1.000 2025-04-04T17:24:36.405448Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1519 647 6413)b }, ecr=1.000 2025-04-04T17:24:36.408540Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2272 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743787476333 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) Copy TableOld to Table 2025-04-04T17:24:36.479029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 100000 InMemStepsToSnapshot: 2 InMemForceStepsToSnapshot: 3 InMemForceSizeToSnapshot: 1000000 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 200000 ReadAheadLoThreshold: 100000 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 10000 CountToCompact: 2 ForceCountToCompact: 2 ForceSizeToCompact: 20000 CompactionBrokerQueue: 1 KeepInCache: true } } ColumnFamilies { Id: 0 ColumnCache: ColumnCacheNone Storage: ColumnStorageTest_1_2_1k } } CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:24:36.479327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:24:36.479677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-04-04T17:24:36.479715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-04-04T17:24:36.479723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-04T17:24:36.479758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-04-04T17:24:36.479787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-04-04T17:24:36.479878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-04-04T17:24:36.479954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:24:36.480403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-04T17:24:36.480438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-04-04T17:24:36.480846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-04-04T17:24:36.480942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-04-04T17:24:36.481073Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:24:36.481088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T17:24:36.481162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-04-04T17:24:36.481211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:24:36.481242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489510180441654726:2243], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 2 2025-04-04T17:24:36.481255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489510180441654726:2243], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 4 2025-04-04T17:24:36.481295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:24:36.481349Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 waiting... 2025-04-04T17:24:36.481679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-04T17:24:36.481782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-04T17:24:36.482936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-04T17:24:36.483001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-04T17:24:36.483008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-04-04T17:24:36.483025Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-04-04T17:24:36.483035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-04T17:24:36.483158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-04T17:24:36.483235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-04T17:24:36.483245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-04-04T17:24:36.483251Z node 1 :FLAT_TX_SCHEME ... 25-04-04T17:24:39.855763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715784:0, at schemeshard: 72057594046644480 2025-04-04T17:24:39.855816Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715784:0, at schemeshard: 72057594046644480 2025-04-04T17:24:39.855837Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715784:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:24:39.856078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-04-04T17:24:39.856177Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715784:0 progress is 1/1 2025-04-04T17:24:39.856195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715784 ready parts: 1/1 2025-04-04T17:24:39.856211Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715784:0 progress is 1/1 2025-04-04T17:24:39.856218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715784 ready parts: 1/1 2025-04-04T17:24:39.856231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715784, ready parts: 1/1, is published: true 2025-04-04T17:24:39.856265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7489510194867585160:2687] message: TxId: 281474976715784 2025-04-04T17:24:39.856280Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715784 ready parts: 1/1 2025-04-04T17:24:39.856292Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715784:0 2025-04-04T17:24:39.856300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715784:0 2025-04-04T17:24:39.856368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-04-04T17:24:39.857021Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715784 datashard 72075186224037891 state PreOffline 2025-04-04T17:24:39.857059Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 Got TEvSchemaChangedResult from SS at 72075186224037891 2025-04-04T17:24:39.857194Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 parts [ [72075186224037889:1:16:1:12288:286:0] [72075186224037889:1:23:1:12288:234:0] ] return ack processed 2025-04-04T17:24:39.857243Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T17:24:39.857310Z node 2 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-04-04T17:24:39.858912Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715784 datashard 72075186224037890 state PreOffline 2025-04-04T17:24:39.858954Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-04-04T17:24:39.859203Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T17:24:39.859350Z node 2 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 Check that tablet 2025-04-04T17:24:39.859450Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7489510194867585185:2689], serverId# [2:7489510194867585189:3450], sessionId# [0:0:0] 72075186224037888 was deleted 2025-04-04T17:24:39.859643Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510190572616176 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-04-04T17:24:39.859693Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:24:39.859866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510194867583749 RawX2: 4503608217307443 } TabletId: 72075186224037891 State: 4 2025-04-04T17:24:39.859892Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:24:39.860038Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-04-04T17:24:39.860139Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T17:24:39.860203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:24:39.860256Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-04-04T17:24:39.860275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 Check that tablet 72075186224037889 was deleted 2025-04-04T17:24:39.860686Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-04-04T17:24:39.861458Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-04T17:24:39.861676Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-04T17:24:39.861687Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-04T17:24:39.861720Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-04-04T17:24:39.861739Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-04T17:24:39.861752Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-04-04T17:24:39.861778Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7489510194867585031:3313], serverId# [2:7489510194867585032:3314], sessionId# [0:0:0] 2025-04-04T17:24:39.861842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-04T17:24:39.861962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-04-04T17:24:39.862049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-04T17:24:39.862058Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-04T17:24:39.862070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-04T17:24:39.862104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-04T17:24:39.862133Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-04-04T17:24:39.862408Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-04T17:24:39.862438Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-04-04T17:24:39.862930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-04T17:24:39.862955Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-04T17:24:39.862989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-04T17:24:39.863003Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-04T17:24:39.863029Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-04T17:24:39.863767Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-04T17:24:39.863847Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-04T17:24:39.865315Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T17:24:39.865653Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510194867583744 RawX2: 4503608217307442 } TabletId: 72075186224037890 State: 4 2025-04-04T17:24:39.865727Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:24:39.866052Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-04T17:24:39.866060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:24:39.867590Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-04T17:24:39.867653Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-04T17:24:39.867775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-04-04T17:24:39.867908Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-04T17:24:39.867928Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-04-04T17:24:39.867960Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-04T17:24:39.868048Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-04T17:24:39.868134Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-04-04T17:24:39.868448Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-04T17:24:39.868721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-04T17:24:39.868747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-04T17:24:39.868783Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-04T17:24:40.161674Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-04-04T17:24:40.162259Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TFlatTest::WriteSplitKillRead [GOOD] >> TFlatTest::WriteSplitWriteSplit >> TPQTest::TestUserInfoCompatibility >> TColumnShardTestSchema::HotTiersWithStat [GOOD] >> KqpScripting::ScanQuery >> DataShardWrite::UpsertPreparedManyTables-Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache+Volatile >> TColumnShardTestSchema::HotTiersTtlWithStat [GOOD] >> TFlatTest::WriteMergeAndRead [GOOD] >> TFlatTest::WriteSplitAndRead >> DataShardWrite::ReplaceImmediate_DefaultValue [GOOD] >> DataShardWrite::UpdateImmediate >> KqpNewEngine::JoinWithParams >> TFlatTest::SplitInvalidPath [GOOD] >> TFlatTest::SplitThenMerge >> TColumnShardTestSchema::RebootOneColdTier [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788025.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788025.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143788025.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788025.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788025.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143788025.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786825.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123788025.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123788025.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786825.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123786825.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123786825.000000s;Name=;Codec=}; 2025-04-04T17:23:45.393989Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:45.470768Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:45.486714Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:45.486990Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:45.493813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:45.494013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:45.494195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:45.494294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:45.494382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:45.494485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:45.494552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:45.494619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:45.494688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:45.494754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:45.494836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:45.494900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:45.513133Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:45.513311Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:45.513396Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:45.513534Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:45.513675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:45.513740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:45.513784Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:45.513847Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:45.513903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:45.513938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:45.513964Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:45.514083Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:45.514126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:45.514156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:45.514176Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:45.514273Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:45.514319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:45.514350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:45.514377Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:45.514438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:45.514466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:45.514487Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:45.514522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:45.514552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:45.514576Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:45.514932Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=41; 2025-04-04T17:23:45.515003Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-04-04T17:23:45.515109Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-04-04T17:23:45.515176Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=30; 2025-04-04T17:23:45.515345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:45.515392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:45.515419Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:45.515565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:45.515600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:45.515629Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... "a":1743787483554781,"name":"Finish","f":1743787483554781,"d_finished":0,"c":0,"l":1743787483555295,"d":514},{"name":"task_result","f":1743787483101177,"d_finished":170994,"c":28,"l":1743787483550519,"d":170994}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1974:3979]->[1:1973:3978] 2025-04-04T17:24:43.555695Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:43.092558Z;index_granules=0;index_portions=4;index_batches=627;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203544;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203544;selected_rows=0; 2025-04-04T17:24:43.555724Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:43.555940Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1974:3979];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:43.557609Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2025-04-04T17:24:43.557867Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2025-04-04T17:24:43.557966Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-04T17:24:43.558098Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:32;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:43.558154Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:43.558298Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Projection","input":"1"},"fetch":"1"}]}; 2025-04-04T17:24:43.558369Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:127;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-04T17:24:43.558900Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1982:3987];trace_detailed=; 2025-04-04T17:24:43.559375Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:85;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-04T17:24:43.559692Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:100;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:43.559903Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:43.560026Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:43.560396Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:24:43.560492Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:43.560608Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:43.560646Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1982:3987] finished for tablet 9437184 2025-04-04T17:24:43.561010Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1981:3986];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743787483558816,"name":"_full_task","f":1743787483558816,"d_finished":0,"c":0,"l":1743787483560697,"d":1881},"events":[{"name":"bootstrap","f":1743787483559044,"d_finished":1008,"c":1,"l":1743787483560052,"d":1008},{"a":1743787483560376,"name":"ack","f":1743787483560376,"d_finished":0,"c":0,"l":1743787483560697,"d":321},{"a":1743787483560361,"name":"processing","f":1743787483560361,"d_finished":0,"c":0,"l":1743787483560697,"d":336},{"name":"ProduceResults","f":1743787483559812,"d_finished":450,"c":2,"l":1743787483560633,"d":450},{"a":1743787483560635,"name":"Finish","f":1743787483560635,"d_finished":0,"c":0,"l":1743787483560697,"d":62}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:43.561073Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1981:3986];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:24:43.561402Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1981:3986];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743787483558816,"name":"_full_task","f":1743787483558816,"d_finished":0,"c":0,"l":1743787483561113,"d":2297},"events":[{"name":"bootstrap","f":1743787483559044,"d_finished":1008,"c":1,"l":1743787483560052,"d":1008},{"a":1743787483560376,"name":"ack","f":1743787483560376,"d_finished":0,"c":0,"l":1743787483561113,"d":737},{"a":1743787483560361,"name":"processing","f":1743787483560361,"d_finished":0,"c":0,"l":1743787483561113,"d":752},{"name":"ProduceResults","f":1743787483559812,"d_finished":450,"c":2,"l":1743787483560633,"d":450},{"a":1743787483560635,"name":"Finish","f":1743787483560635,"d_finished":0,"c":0,"l":1743787483561113,"d":478}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1982:3987]->[1:1981:3986] 2025-04-04T17:24:43.561475Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:43.558344Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-04T17:24:43.561513Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:43.561624Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1982:3987];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> TLocksFatTest::PointSetNotBreak [GOOD] >> TLocksFatTest::PointSetRemove >> DataShardWrite::CancelImmediate [GOOD] >> DataShardWrite::DeletePrepared+Volatile >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersTtlWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788012.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788012.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143788012.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788012.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788012.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123788012.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=143788012.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788012.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123786812.000000s;Name=;Codec=}; 2025-04-04T17:23:32.324005Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:32.388645Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:32.404602Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:32.404868Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:32.411002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:32.411201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:32.411395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:32.411480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:32.411573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:32.411688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:32.411786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:32.411858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:32.411957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:32.412026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:32.412090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:32.412150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:32.433316Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:32.433472Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:32.433533Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:32.433762Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:32.433901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:32.433949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:32.433980Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:32.434040Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:32.434120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:32.434159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:32.434181Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:32.434353Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:32.434404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:32.434433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:32.434457Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:32.434517Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:32.434554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:32.434585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:32.434604Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:32.434667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:32.434695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:32.434725Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:32.434762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:32.434789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:32.434820Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:32.435134Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-04-04T17:23:32.435223Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=44; 2025-04-04T17:23:32.435288Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=29; 2025-04-04T17:23:32.508581Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=144; 2025-04-04T17:23:32.508934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:32.509048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:32.509102Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:32.509380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:32.509454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:32.509511Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:32.509724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSche ... ame":"Finish","f":1743787483946659,"d_finished":0,"c":0,"l":1743787483947362,"d":703},{"name":"task_result","f":1743787482330003,"d_finished":617062,"c":28,"l":1743787483743797,"d":617062}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1999:4008]->[1:1998:4007] 2025-04-04T17:24:43.947921Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1999:4008];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:42.155562Z;index_granules=0;index_portions=4;index_batches=1731;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203352;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203352;selected_rows=0; 2025-04-04T17:24:43.947963Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1999:4008];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:43.948243Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1999:4008];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:43.950857Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-04-04T17:24:43.951215Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000011:max} readable: {1000000011:max} at tablet 9437184 2025-04-04T17:24:43.951334Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-04-04T17:24:43.951508Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:32;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:43.951575Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-04-04T17:24:43.951736Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:43;event=program_parsed;result={"processors":[{"processor":{"internal":{},"type":"Projection","input":"1"},"fetch":"1"}]}; 2025-04-04T17:24:43.951827Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:127;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-04-04T17:24:43.952471Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:2015:4024];trace_detailed=; 2025-04-04T17:24:43.952927Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:85;ff_first=(column_ids=1;column_names=timestamp;);; 2025-04-04T17:24:43.953182Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:100;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-04-04T17:24:43.953362Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:44.047265Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:44.047850Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2015:4024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:24:44.048024Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2015:4024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:44.048213Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2015:4024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:44.048282Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:2015:4024] finished for tablet 9437184 2025-04-04T17:24:44.048765Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2015:4024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:2014:4023];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.095},{"events":["l_ack","l_processing","l_Finish"],"t":0.096}],"full":{"a":1743787483952296,"name":"_full_task","f":1743787483952296,"d_finished":0,"c":0,"l":1743787484048371,"d":96075},"events":[{"name":"bootstrap","f":1743787483952620,"d_finished":94706,"c":1,"l":1743787484047326,"d":94706},{"a":1743787484047823,"name":"ack","f":1743787484047823,"d_finished":0,"c":0,"l":1743787484048371,"d":548},{"a":1743787484047802,"name":"processing","f":1743787484047802,"d_finished":0,"c":0,"l":1743787484048371,"d":569},{"name":"ProduceResults","f":1743787483953279,"d_finished":94415,"c":2,"l":1743787484048257,"d":94415},{"a":1743787484048262,"name":"Finish","f":1743787484048262,"d_finished":0,"c":0,"l":1743787484048371,"d":109}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:24:44.048838Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2015:4024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:2014:4023];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:24:44.049193Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2015:4024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:2014:4023];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.095},{"events":["l_ack","l_processing","l_Finish"],"t":0.096}],"full":{"a":1743787483952296,"name":"_full_task","f":1743787483952296,"d_finished":0,"c":0,"l":1743787484048880,"d":96584},"events":[{"name":"bootstrap","f":1743787483952620,"d_finished":94706,"c":1,"l":1743787484047326,"d":94706},{"a":1743787484047823,"name":"ack","f":1743787484047823,"d_finished":0,"c":0,"l":1743787484048880,"d":1057},{"a":1743787484047802,"name":"processing","f":1743787484047802,"d_finished":0,"c":0,"l":1743787484048880,"d":1078},{"name":"ProduceResults","f":1743787483953279,"d_finished":94415,"c":2,"l":1743787484048257,"d":94415},{"a":1743787484048262,"name":"Finish","f":1743787484048262,"d_finished":0,"c":0,"l":1743787484048880,"d":618}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:2015:4024]->[1:2014:4023] 2025-04-04T17:24:44.049269Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2015:4024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:43.951794Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-04-04T17:24:44.049307Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2015:4024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:24:44.049402Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2015:4024];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 80000/5203352 0/0 >> TPQTest::TestUserInfoCompatibility [GOOD] >> TPQTest::TestWaitInOwners >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788034.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=143788034.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788034.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123788034.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786834.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123786834.000000s;Name=;Codec=}; 2025-04-04T17:23:56.085740Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:56.166223Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:56.183368Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:56.183608Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:56.189643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:56.189817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:56.189978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:56.190046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:56.190112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:56.190199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:56.190306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:56.190377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:56.190446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:56.190510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:56.190568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:56.190628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:56.209298Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:56.209480Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:56.209537Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:56.209690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:56.209839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:56.209895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:56.209924Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:56.209985Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:56.210034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:56.210067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:56.210093Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:56.210201Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:56.210256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:56.210292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:56.210309Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:56.210367Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:56.210401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:56.210439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:56.210462Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:56.210508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:56.210532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:56.210555Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:56.210597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:56.210625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:56.210647Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:56.210915Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=35; 2025-04-04T17:23:56.210978Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-04-04T17:23:56.211031Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=24; 2025-04-04T17:23:56.211106Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=28; 2025-04-04T17:23:56.211253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:56.211295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:56.211318Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:56.211468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:56.211518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:56.211546Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:56.211656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:56.211683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:56.211703Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:56.211822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... :29;PRECHARGE:finishLoadingTime=14; 2025-04-04T17:24:44.388278Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=294; 2025-04-04T17:24:44.388326Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=25235; 2025-04-04T17:24:44.395902Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=7485; 2025-04-04T17:24:44.404225Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=7121; 2025-04-04T17:24:44.404355Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=8342; 2025-04-04T17:24:44.404537Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=100; 2025-04-04T17:24:44.404692Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=94; 2025-04-04T17:24:44.404852Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=100; 2025-04-04T17:24:44.404985Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=83; 2025-04-04T17:24:44.413715Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8641; 2025-04-04T17:24:44.425318Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=11461; 2025-04-04T17:24:44.425469Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=42; 2025-04-04T17:24:44.425558Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=32; 2025-04-04T17:24:44.425614Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2025-04-04T17:24:44.425668Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2025-04-04T17:24:44.425723Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=9; 2025-04-04T17:24:44.425808Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=41; 2025-04-04T17:24:44.425867Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=11; 2025-04-04T17:24:44.425964Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=56; 2025-04-04T17:24:44.426019Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-04-04T17:24:44.426095Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=33; 2025-04-04T17:24:44.426195Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=57; 2025-04-04T17:24:44.426527Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=280; 2025-04-04T17:24:44.426582Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=69590; 2025-04-04T17:24:44.426755Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=20801572;raw_bytes=32169208;count=11;records=320000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T17:24:44.426875Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T17:24:44.426938Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T17:24:44.427009Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T17:24:44.437970Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=1; 2025-04-04T17:24:44.438142Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:44.438216Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:44.438320Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=3; 2025-04-04T17:24:44.438395Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:24:44.438443Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:44.438497Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:44.438541Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:44.438643Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:44.439370Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:44.439470Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1980:3880];tablet_id=9437184;parent=[1:1942:3849];fline=manager.cpp:82;event=ask_data;request=request_id=91;1={portions_count=11};; 2025-04-04T17:24:44.440744Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T17:24:44.440907Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T17:24:44.440942Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T17:24:44.440971Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T17:24:44.441020Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:44.441085Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:44.441147Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=3; 2025-04-04T17:24:44.441214Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:24:44.441261Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:44.441310Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:44.441355Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:44.441445Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:44.442517Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=11;path_id=1; 2025-04-04T17:24:44.443850Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 >> TColumnShardTestSchema::RebootHotTiersRevCompression [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788034.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=143788034.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788034.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123788034.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786834.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123786834.000000s;Name=;Codec=}; 2025-04-04T17:23:56.609135Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:56.696377Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:56.723688Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:56.724000Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:56.732101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:56.732324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:56.732564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:56.732687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:56.732829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:56.732973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:56.733096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:56.733207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:56.733318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:56.733431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:56.733551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:56.733654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:56.762734Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:56.762942Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:56.763007Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:56.763184Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:56.763346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:56.763411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:56.763475Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:56.763582Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:56.763644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:56.763686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:56.763720Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:56.763876Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:56.763940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:56.763980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:56.764010Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:56.764106Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:56.764158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:56.764232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:56.764270Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:56.764351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:56.764390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:56.764419Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:56.764463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:56.764503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:56.764535Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:56.764941Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-04-04T17:23:56.765046Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=52; 2025-04-04T17:23:56.765128Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-04T17:23:56.765240Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=50; 2025-04-04T17:23:56.765408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:56.765457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:56.765494Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:56.765716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:56.765772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:56.765810Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:56.765964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:56.766005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:56.766032Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:56.766312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... pp:29;PRECHARGE:finishLoadingTime=15; 2025-04-04T17:24:44.580741Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=337; 2025-04-04T17:24:44.580788Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=31851; 2025-04-04T17:24:44.588514Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=7649; 2025-04-04T17:24:44.597169Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=7582; 2025-04-04T17:24:44.597276Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=8670; 2025-04-04T17:24:44.597447Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=96; 2025-04-04T17:24:44.597578Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=74; 2025-04-04T17:24:44.597722Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=89; 2025-04-04T17:24:44.597849Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=72; 2025-04-04T17:24:44.607926Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=10003; 2025-04-04T17:24:44.620869Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=12832; 2025-04-04T17:24:44.621005Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=41; 2025-04-04T17:24:44.621092Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=30; 2025-04-04T17:24:44.621144Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-04-04T17:24:44.621190Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-04-04T17:24:44.621237Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-04-04T17:24:44.621312Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=40; 2025-04-04T17:24:44.621375Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-04-04T17:24:44.621480Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=58; 2025-04-04T17:24:44.621525Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-04-04T17:24:44.621595Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=32; 2025-04-04T17:24:44.621706Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=53; 2025-04-04T17:24:44.621977Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=222; 2025-04-04T17:24:44.622020Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=81472; 2025-04-04T17:24:44.622179Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=20801572;raw_bytes=32169208;count=11;records=320000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T17:24:44.622301Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T17:24:44.622373Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T17:24:44.622446Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T17:24:44.632937Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=1; 2025-04-04T17:24:44.633091Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:44.633159Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:44.633241Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=3; 2025-04-04T17:24:44.633309Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:24:44.633355Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:44.633408Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:44.633448Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:44.633548Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:44.634156Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:44.634275Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1980:3880];tablet_id=9437184;parent=[1:1942:3849];fline=manager.cpp:82;event=ask_data;request=request_id=91;1={portions_count=11};; 2025-04-04T17:24:44.635287Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T17:24:44.635688Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T17:24:44.635723Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T17:24:44.635774Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T17:24:44.635824Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:44.635885Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:44.635948Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=3; 2025-04-04T17:24:44.636006Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:24:44.636052Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:44.636103Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:44.636141Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:44.636235Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:44.637350Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=11;path_id=1; 2025-04-04T17:24:44.638701Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1942:3849];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 >> KqpYql::EvaluateExpr2 [GOOD] >> KqpYql::EvaluateExpr3 >> KqpNewEngine::Update+UseSink [GOOD] >> KqpNewEngine::Update-UseSink >> KqpNewEngine::KeyColumnOrder >> TFlatTest::WriteSplitWriteSplit [GOOD] >> TLocksFatTest::LocksLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersRevCompression [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788009.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788009.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143788009.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788009.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788009.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143788009.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786809.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123788009.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123788009.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786809.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123786809.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123786809.000000s;Name=;Codec=}; 2025-04-04T17:23:30.068338Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:30.157589Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:30.175827Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:30.176085Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:30.183396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:30.183626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:30.183869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:30.183983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:30.184128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:30.184268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:30.184372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:30.184483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:30.184602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:30.184709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:30.184816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:30.184914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:30.213818Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:30.214026Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:30.214115Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:30.214341Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:30.214512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:30.214584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:30.214647Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:30.214750Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:30.214817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:30.214863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:30.214900Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:30.215059Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:30.215143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:30.215190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:30.215221Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:30.215310Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:30.215364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:30.215412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:30.215448Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:30.215528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:30.215569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:30.215603Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:30.215655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:30.215703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:30.215735Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:30.216164Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=62; 2025-04-04T17:23:30.216277Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=61; 2025-04-04T17:23:30.216377Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-04-04T17:23:30.216488Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-04-04T17:23:30.216671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:30.216730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:30.216767Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:30.217008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:30.217067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:30.217105Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... pp:29;EXECUTE:finishLoadingTime=564; 2025-04-04T17:24:45.220445Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=60632; 2025-04-04T17:24:45.233924Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=13352; 2025-04-04T17:24:45.247741Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=12690; 2025-04-04T17:24:45.247864Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=13831; 2025-04-04T17:24:45.248069Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=132; 2025-04-04T17:24:45.248204Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=77; 2025-04-04T17:24:45.248349Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=98; 2025-04-04T17:24:45.248487Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=95; 2025-04-04T17:24:45.263118Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=14545; 2025-04-04T17:24:45.281898Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=18623; 2025-04-04T17:24:45.282056Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=44; 2025-04-04T17:24:45.282142Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=33; 2025-04-04T17:24:45.282201Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2025-04-04T17:24:45.282274Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=31; 2025-04-04T17:24:45.282325Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-04-04T17:24:45.282427Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=58; 2025-04-04T17:24:45.282494Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=10; 2025-04-04T17:24:45.282610Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=76; 2025-04-04T17:24:45.282669Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2025-04-04T17:24:45.282749Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=38; 2025-04-04T17:24:45.282853Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=60; 2025-04-04T17:24:45.283275Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=375; 2025-04-04T17:24:45.283332Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=131803; 2025-04-04T17:24:45.283502Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T17:24:45.283621Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T17:24:45.283685Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T17:24:45.283758Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T17:24:45.305891Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=1; 2025-04-04T17:24:45.306085Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:45.306161Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:45.306260Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=7; 2025-04-04T17:24:45.306333Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:24:45.306380Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:45.306434Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:45.306478Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:45.306594Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:45.307105Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:45.307229Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2641:4515];tablet_id=9437184;parent=[1:2599:4480];fline=manager.cpp:82;event=ask_data;request=request_id=150;1={portions_count=29};; 2025-04-04T17:24:45.307884Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T17:24:45.308174Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T17:24:45.308209Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T17:24:45.308237Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T17:24:45.308282Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:45.308343Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:45.308397Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=7; 2025-04-04T17:24:45.308455Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:24:45.308502Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:45.308551Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:45.308591Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:45.308701Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:45.309614Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=29;path_id=1; 2025-04-04T17:24:45.311206Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> KqpYql::UuidPrimaryKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] Test command err: 2025-04-04T17:24:38.458966Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510192079268558:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:38.459234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e9b/r3tmp/tmpCYuAJW/pdisk_1.dat 2025-04-04T17:24:38.759517Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:38.801084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:38.801193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:38.803027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30687 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:38.997659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:39.020289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743787479126 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Key2" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Va... (TRUNCATED) 2025-04-04T17:24:39.200116Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:39.201707Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:24:39.201749Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:24:39.265644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } } } TxId: 281474976710668 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:24:39.265821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-04T17:24:39.266061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-04T17:24:39.266083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-04T17:24:39.266660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-04T17:24:39.266742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710668:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:24:39.267582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710668, response: Status: StatusAccepted TxId: 281474976710668 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:24:39.267655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710668, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2025-04-04T17:24:39.267803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-04T17:24:39.267862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710668:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-04-04T17:24:39.268179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-04T17:24:39.268305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } waiting... 2025-04-04T17:24:39.268669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:2 msg type: 268697601 2025-04-04T17:24:39.268771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-04-04T17:24:39.268859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710668, partId: 0, tablet: 72057594037968897 2025-04-04T17:24:39.268894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:2, partId: 0 2025-04-04T17:24:39.268906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:3, partId: 0 2025-04-04T17:24:39.269461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T17:24:39.269488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710668, ready parts: 0/1, is published: true 2025-04-04T17:24:39.269507Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710668, at schemeshard: 72057594046644480 2025-04-04T17:24:39.271132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-04-04T17:24:39.271157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:2, partId: 0 2025-04-04T17:24:39.271277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-04-04T17:24:39.271306Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-04T17:24:39.271383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-04-04T17:24:39.271608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-04-04T17:24:39.271644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:3, partId: 0 2025-04-04T17:24:39.271689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-04-04T17:24:39.271703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-04T17:24:39.271748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-04-04T17:24:39.271801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710668:0 2 -> 3 2025-04-04T17:24:39.272056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-04T17:24:39.272456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-04T17:24:39.272539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-04T17:24:39.272564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-04T17:24:39.272616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186224037889 splitOp: 281474976710668:0 alterVersion: 1 at tablet: 72057594046644480 2025-04-04T17:24:39.272756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186224037890 ... 72075186224037891 state Offline 2025-04-04T17:24:42.818819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:24:42.818920Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510207176177611 RawX2: 4503608217307391 } TabletId: 72075186224037889 State: 4 2025-04-04T17:24:42.818963Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:24:42.819127Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-04-04T17:24:42.819734Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:7489510208579845836:2316], serverId# [3:7489510208579845838:2494], sessionId# [0:0:0] 2025-04-04T17:24:42.820747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:24:42.820973Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-04T17:24:42.821242Z node 3 :TX_DATASHARD INFO: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T17:24:42.821548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510208579845340 RawX2: 4503612512274679 } TabletId: 72075186224037888 State: 4 2025-04-04T17:24:42.821583Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:24:42.821792Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-04T17:24:42.822144Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-04T17:24:42.822213Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-04T17:24:42.822183Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-04T17:24:42.822265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-04-04T17:24:42.822441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-04T17:24:42.822581Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-04T17:24:42.823194Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-04T17:24:42.823232Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-04T17:24:42.823237Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-04-04T17:24:42.823597Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-04-04T17:24:42.823635Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-04-04T17:24:42.823682Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-04-04T17:24:42.824030Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-04-04T17:24:42.824090Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7489510207176178347:3137], serverId# [2:7489510207176178348:3138], sessionId# [0:0:0] 2025-04-04T17:24:42.823502Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-04T17:24:42.823615Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-04-04T17:24:42.824463Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-04-04T17:24:42.824507Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-04T17:24:42.824854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:24:42.824947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-04T17:24:42.825087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-04T17:24:42.825122Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-04T17:24:42.825178Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-04T17:24:42.825557Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-04-04T17:24:42.825618Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-04T17:24:42.826117Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-04-04T17:24:42.826218Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7489510207176177822:2720], serverId# [2:7489510207176177823:2721], sessionId# [0:0:0] 2025-04-04T17:24:42.826303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-04T17:24:42.826326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-04T17:24:42.826361Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-04T17:24:42.826377Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-04T17:24:42.826418Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-04T17:24:42.826471Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-04-04T17:24:42.826495Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-04T17:24:42.826514Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-04-04T17:24:42.826802Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510208579845562 RawX2: 4503612512274685 } TabletId: 72075186224037892 State: 4 2025-04-04T17:24:42.826833Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:24:42.826924Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-04T17:24:42.827516Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-04T17:24:42.827975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-04T17:24:42.827997Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-04T17:24:42.828705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:24:42.828809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-04T17:24:42.828817Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-04-04T17:24:42.828817Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-04-04T17:24:42.828848Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-04-04T17:24:42.828947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-04T17:24:42.828969Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-04-04T17:24:42.829266Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-04T17:24:42.829526Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-04-04T17:24:42.829552Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-04-04T17:24:42.829887Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-04-04T17:24:42.829943Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-04T17:24:42.831779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-04T17:24:42.831801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-04T17:24:42.831881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-04T17:24:42.832023Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-04-04T17:24:42.832024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-04T17:24:42.832087Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-04-04T17:24:42.832153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-04T17:24:42.832155Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-04-04T17:24:42.832174Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-04T17:24:42.832216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-04T17:24:42.832512Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-04-04T17:24:42.832949Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-04-04T17:24:42.832978Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-04-04T17:24:42.832978Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-04-04T17:24:42.833099Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-04-04T17:24:42.835022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-04T17:24:42.835060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-04-04T17:24:42.835115Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> KqpRanges::UpdateMulti >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile [GOOD] >> DataShardWrite::InsertImmediate >> KqpNewEngine::MultiSelect [GOOD] >> KqpNewEngine::MultiOutput >> KqpNewEngine::DuplicatedResults >> KikimrIcGateway::TestSecretsExistingValidation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitWriteSplit [GOOD] Test command err: 2025-04-04T17:24:40.520099Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510200046177718:2264];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:40.520172Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e95/r3tmp/tmphLihZ4/pdisk_1.dat 2025-04-04T17:24:40.882608Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:40.930185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:40.931819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:40.934084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3238 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:41.195557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:41.210286Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:41.232897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:41.393831Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1211 647 2154)b }, ecr=1.000 2025-04-04T17:24:41.401788Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1085 521 2626)b }, ecr=1.000 2025-04-04T17:24:41.429329Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1519 647 6413)b }, ecr=1.000 2025-04-04T17:24:41.433969Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2272 1432 5183)b }, ecr=1.000 2025-04-04T17:24:41.458472Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 4 blobs 8r (max 9), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3196 2180 6413)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743787481338 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-04-04T17:24:41.576623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } } } TxId: 281474976710680 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:24:41.576855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:24:41.577097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-04T17:24:41.577147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-04T17:24:41.577175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-04-04T17:24:41.577372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-04-04T17:24:41.577461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710680:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:24:41.578526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710680, response: Status: StatusAccepted TxId: 281474976710680 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:24:41.578589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710680, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2025-04-04T17:24:41.578752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710680:0, at schemeshard: 72057594046644480 2025-04-04T17:24:41.578795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710680:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-04-04T17:24:41.579106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-04T17:24:41.579288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-04T17:24:41.579362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 5 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-04T17:24:41.579748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-04-04T17:24:41.579846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-04-04T17:24:41.579873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:5 msg type: 268697601 2025-04-04T17:24:41.579917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710680, partId: 0, tablet: 72057594037968897 2025-04-04T17:24:41.579940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:3, partId: 0 2025-04-04T17:24:41.579966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:4, partId: 0 2025-04-04T17:24:41.579973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:5, partId: 0 waiting... 2025-04-04T17:24:41.582858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-04-04T17:24:41.582897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:3, partId: 0 2025-04-04T17:24:41.583019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710680:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-04-04T17:24:41.583047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710680:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-04-04T17:24:41.583098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-04-04T17:24:41.583361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 4 TabletID: 72075186224037891 Origin: 72057594037968897 2025-04-04T17:24:41.583389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:4, partId: 0 2025-04-04T17:24:41.583452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710680:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 7205759404 ... 9 2025-04-04T17:24:44.345447Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715694:0 progress is 1/1 2025-04-04T17:24:44.345457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715694 ready parts: 1/1 2025-04-04T17:24:44.345476Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715694:0 progress is 1/1 2025-04-04T17:24:44.345485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715694 ready parts: 1/1 2025-04-04T17:24:44.345497Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715694, ready parts: 1/1, is published: true 2025-04-04T17:24:44.345533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7489510215370854789:2423] message: TxId: 281474976715694 2025-04-04T17:24:44.345552Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715694 ready parts: 1/1 2025-04-04T17:24:44.345567Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715694:0 2025-04-04T17:24:44.345575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715694:0 2025-04-04T17:24:44.345688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 8 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-04-04T17:24:44.347995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510211075886791 RawX2: 4503608217307386 } TabletId: 72075186224037888 State: 4 2025-04-04T17:24:44.348038Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:24:44.348516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:24:44.353954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-04T17:24:44.354209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-04-04T17:24:44.354720Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-04T17:24:44.354748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-04T17:24:44.355472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510215370854426 RawX2: 4503608217307455 } TabletId: 72075186224037891 State: 4 2025-04-04T17:24:44.355520Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:24:44.355682Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510215370854432 RawX2: 4503608217307456 } TabletId: 72075186224037892 State: 4 2025-04-04T17:24:44.355729Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:24:44.355733Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-04T17:24:44.355825Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510215370854425 RawX2: 4503608217307454 } TabletId: 72075186224037890 State: 4 2025-04-04T17:24:44.355838Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:24:44.355932Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510215370854638 RawX2: 4503608217307483 } TabletId: 72075186224037894 State: 4 2025-04-04T17:24:44.355947Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:24:44.356096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:24:44.356153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:24:44.356189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:24:44.356225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:24:44.357014Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510211075886793 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-04-04T17:24:44.357051Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:24:44.357171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510215370854636 RawX2: 4503608217307482 } TabletId: 72075186224037893 State: 4 2025-04-04T17:24:44.357194Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:24:44.357452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:24:44.357540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-04T17:24:44.357704Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-04-04T17:24:44.357815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-04T17:24:44.357902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-04-04T17:24:44.358029Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-04T17:24:44.358125Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-04T17:24:44.358202Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-04-04T17:24:44.358314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-04T17:24:44.358392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:24:44.358428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-04T17:24:44.358440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-04T17:24:44.358917Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-04-04T17:24:44.358936Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-04-04T17:24:44.359047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-04T17:24:44.359058Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-04-04T17:24:44.359069Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-04-04T17:24:44.359078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-04T17:24:44.359085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-04T17:24:44.359086Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-04T17:24:44.359101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-04-04T17:24:44.359109Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-04-04T17:24:44.359225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-04T17:24:44.359358Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-04T17:24:44.360202Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-04T17:24:44.360214Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-04T17:24:44.360403Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-04-04T17:24:44.360528Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-04T17:24:44.360610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-04T17:24:44.360617Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-04T17:24:44.360641Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-04T17:24:44.360904Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-04-04T17:24:44.360914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-04-04T17:24:44.360936Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-04T17:24:44.361338Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2025-04-04T17:24:44.363981Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found >> TFlatTest::SplitThenMerge [GOOD] >> TFlatTest::WriteSplitAndRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::LocksLimit [GOOD] Test command err: 2025-04-04T17:24:34.182368Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510174599219918:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:34.182597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e9f/r3tmp/tmpYGFGFy/pdisk_1.dat 2025-04-04T17:24:34.440157Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:34.521336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:34.521453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:34.523460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24078 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:34.669960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:34.694954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:34.821592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:34.868279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:39.182302Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510174599219918:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:39.182365Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:24:39.988202Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510197067981800:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:39.988260Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e9f/r3tmp/tmpc75G2V/pdisk_1.dat 2025-04-04T17:24:40.094701Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:40.122677Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:40.122781Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:40.124621Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24504 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:40.316747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:40.336334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:40.405835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:40.486929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:43.170569Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489510213738028039:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:43.170751Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e9f/r3tmp/tmpzZCjNc/pdisk_1.dat 2025-04-04T17:24:43.255837Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:43.301578Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:43.301677Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:43.307159Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19164 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:43.469558Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:43.483168Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:24:43.487154Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:43.558224Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:43.632078Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> KqpScanArrowInChanels::AggregateNoColumn >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties >> DataShardWrite::UpdateImmediate [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=143788012.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788012.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788012.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143788012.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788012.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788012.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143788012.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786812.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123788012.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123788012.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786812.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123786812.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123786812.000000s;Name=;Codec=}; 2025-04-04T17:23:32.479003Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:32.569862Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:32.595427Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:32.595782Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:32.604157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:32.604438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:32.604689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:32.604825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:32.604956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:32.605112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:32.605218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:32.605320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:32.605438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:32.605549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:32.605663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:32.605766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:32.635569Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:32.635790Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:32.635873Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:32.636062Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:32.636226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:32.636295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:32.636339Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:32.636434Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:32.636523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:32.636589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:32.636624Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:32.636790Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:32.636855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:32.636897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:32.636927Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:32.637030Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:32.637084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:32.637129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:32.637160Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:32.637242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:32.637282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:32.637314Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:32.637360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:32.637398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:32.637434Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:32.637829Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2025-04-04T17:23:32.637930Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=48; 2025-04-04T17:23:32.638056Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=45; 2025-04-04T17:23:32.638137Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-04-04T17:23:32.638325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:32.638385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:32.638426Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:32.638663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:32.638725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:32.638767Z node 1 :TX_COLUMNSHARD NOTICE: tabl ... ta.cpp:29;EXECUTE:finishLoadingTime=526; 2025-04-04T17:24:47.066025Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=58267; 2025-04-04T17:24:47.079387Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=13261; 2025-04-04T17:24:47.089984Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=9452; 2025-04-04T17:24:47.090101Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=10599; 2025-04-04T17:24:47.090292Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=125; 2025-04-04T17:24:47.090413Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=70; 2025-04-04T17:24:47.090538Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=84; 2025-04-04T17:24:47.090680Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=95; 2025-04-04T17:24:47.098384Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=7631; 2025-04-04T17:24:47.108467Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=9984; 2025-04-04T17:24:47.108603Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=38; 2025-04-04T17:24:47.108677Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=29; 2025-04-04T17:24:47.108719Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2025-04-04T17:24:47.108761Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-04-04T17:24:47.108802Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-04-04T17:24:47.108867Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=32; 2025-04-04T17:24:47.108907Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-04-04T17:24:47.108989Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=47; 2025-04-04T17:24:47.109044Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2025-04-04T17:24:47.109100Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=26; 2025-04-04T17:24:47.109179Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=45; 2025-04-04T17:24:47.109494Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=284; 2025-04-04T17:24:47.109535Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=109994; 2025-04-04T17:24:47.109668Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T17:24:47.109768Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T17:24:47.109814Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T17:24:47.109875Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T17:24:47.126632Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=1; 2025-04-04T17:24:47.126812Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:47.126881Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:47.126960Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=6; 2025-04-04T17:24:47.127034Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:24:47.127080Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:47.127134Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:47.127175Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:47.127300Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:47.127883Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:47.127978Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2612:4486];tablet_id=9437184;parent=[1:2572:4453];fline=manager.cpp:82;event=ask_data;request=request_id=146;1={portions_count=29};; 2025-04-04T17:24:47.129062Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T17:24:47.129443Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T17:24:47.129478Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T17:24:47.129505Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T17:24:47.129552Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:47.129619Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:47.129681Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=6; 2025-04-04T17:24:47.129746Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:24:47.129792Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:47.129847Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:47.129893Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:47.129995Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:47.131841Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=29;path_id=1; 2025-04-04T17:24:47.133450Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402096 160000/10402096 160000/10402096 80000/5203544 0/0 >> DataShardWrite::UpsertPreparedNoTxCache+Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile >> HttpRequest::AnalyzeServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestSecretsExistingValidation [GOOD] Test command err: Trying to start YDB, gRPC: 13518, MsgBus: 17752 2025-04-04T17:24:28.036918Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510150381707528:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:28.037009Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00131f/r3tmp/tmpMxbN7B/pdisk_1.dat 2025-04-04T17:24:28.411119Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13518, node 1 2025-04-04T17:24:28.452532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:28.452690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:28.454895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:28.547579Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:28.547604Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:28.547611Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:28.547720Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17752 TClient is connected to server localhost:17752 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:29.226056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:30.532162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510158971642831:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:30.532288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:30.921525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:24:31.032561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:24:31.061863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:31.091737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:31.136207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510163266610443:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:31.136321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:31.136824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510163266610448:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:31.141484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2025-04-04T17:24:31.151908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510163266610450:2369], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2025-04-04T17:24:31.241888Z node 1 :TX_PROXY ERROR: Actor# [1:7489510163266610503:2571] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 64218, MsgBus: 19746 2025-04-04T17:24:32.198545Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510165813644727:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:32.198602Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00131f/r3tmp/tmpFkXamw/pdisk_1.dat 2025-04-04T17:24:32.294423Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64218, node 2 2025-04-04T17:24:32.324885Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:32.325018Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:32.327018Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:32.346587Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:32.346623Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:32.346630Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:32.346745Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19746 TClient is connected to server localhost:19746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:32.730167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:32.743933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:32.800359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:32.986329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:33.051066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:35.045532Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510178698548381:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:35.045632Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:35.089744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:35.117829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:35.146281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:35.174547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:35.215069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:35.247896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 28147497671 ... sues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:35.288791Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:35.288870Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510178698548895:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:35.292441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:24:35.302010Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510178698548897:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:24:35.391962Z node 2 :TX_PROXY ERROR: Actor# [2:7489510178698548951:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:36.176868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-04-04T17:24:36.730832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-04T17:24:37.198897Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489510165813644727:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:37.198976Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:24:37.230999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:1, at schemeshard: 72057594046644480 2025-04-04T17:24:37.674335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2025-04-04T17:24:38.181212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-04T17:24:38.664802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-04-04T17:24:39.225159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-04T17:24:39.258402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-04T17:24:41.410423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715720:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 63304, MsgBus: 31035 2025-04-04T17:24:42.290871Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489510209361584009:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:42.290957Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00131f/r3tmp/tmpz2Z9qU/pdisk_1.dat 2025-04-04T17:24:42.403361Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:42.423065Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:42.423192Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 63304, node 3 2025-04-04T17:24:42.425084Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:42.457410Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:42.457428Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:42.457433Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:42.457542Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31035 TClient is connected to server localhost:31035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:42.935514Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:42.953207Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:43.027157Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:43.202511Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:43.278165Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:45.764646Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489510222246487669:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:45.764746Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:45.795321Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:45.827219Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:45.859244Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:45.890687Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:45.922950Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:45.989533Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:24:46.064331Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489510226541455483:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:46.064430Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:46.064453Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489510226541455488:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:46.067523Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:24:46.076156Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489510226541455490:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:24:46.135247Z node 3 :TX_PROXY ERROR: Actor# [3:7489510226541455542:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:47.291413Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489510209361584009:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:47.291497Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardWrite::DeletePrepared+Volatile [GOOD] >> DataShardWrite::DeletePrepared-Volatile >> KqpScripting::ScanQuery [GOOD] >> KqpScripting::ScanQueryDisable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitThenMerge [GOOD] Test command err: 2025-04-04T17:24:42.111522Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510209328671402:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:42.111589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e92/r3tmp/tmpWd1tgu/pdisk_1.dat 2025-04-04T17:24:42.452905Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:42.488203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:42.488329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:42.490009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9404 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:42.709877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:42.740580Z node 1 :TX_PROXY ERROR: Actor# [1:7489510209328671990:2297] txid# 281474976710659, issues: { message: "Check failed: path: \'/dc-1/Dir1\', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 128: Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) 2025-04-04T17:24:44.964650Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510217960170171:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:44.964693Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e92/r3tmp/tmpYBnip7/pdisk_1.dat 2025-04-04T17:24:45.071701Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:45.120795Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:45.120894Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:45.122581Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20853 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:45.259917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:45.267224Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:45.274833Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:24:45.285011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:45.449961Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1211 647 2154)b }, ecr=1.000 2025-04-04T17:24:45.458004Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1085 521 2626)b }, ecr=1.000 2025-04-04T17:24:45.482508Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1519 647 6413)b }, ecr=1.000 2025-04-04T17:24:45.485875Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2272 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743787485412 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-04-04T17:24:45.509799Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:45.512149Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-04T17:24:45.512391Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:24:45.513680Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-04T17:24:45.513868Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:45.514482Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-04T17:24:45.515276Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-04T17:24:45.515440Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:24:45.515870Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-04T17:24:45.516563Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-04T17:24:45.516696Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:45.517104Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-04T17:24:45.517730Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-04T17:24:45.517847Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:24:45.518225Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-04T17:24:45.518891Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-04T17:24:45.519004Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:45.519402Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-04T17:24:45.520005Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-04T17:24:45.520116Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:24:45.520531Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-04T17:24:45.521177Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-04T17:24:45.521279Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:45.521672Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-04T17:24:45.522566Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-04T17:24:45.522676Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:24:45.523052Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-04T17:24:45.523691Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-04T17:24:45.523790Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:45.524142Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-04T17:24:45.524732Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-04T17:24:45.524832Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:24:45.525170Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-04-04T17:24:45.525793Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-04-04T17:24:45.525895Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:45.526581Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-04-04T17:24:45.527260Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:24:45.527282Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:24:45.527472Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04 ... 80 2025-04-04T17:24:45.940264Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7489510222255138720 RawX2: 4503608217307478 } Origin: 72075186224037894 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-04-04T17:24:45.940307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715693:0, shardIdx: 72057594046644480:7, datashard: 72075186224037894, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-04T17:24:45.940326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-04-04T17:24:45.940479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-04T17:24:45.940573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510222255138092 RawX2: 4503608217307386 } Origin: 72075186224037889 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-04-04T17:24:45.940590Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715693, tablet: 72075186224037889, partId: 0 2025-04-04T17:24:45.940668Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715693:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510222255138092 RawX2: 4503608217307386 } Origin: 72075186224037889 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-04-04T17:24:45.940686Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-04-04T17:24:45.940726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7489510222255138092 RawX2: 4503608217307386 } Origin: 72075186224037889 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-04-04T17:24:45.940744Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715693:0, shardIdx: 72057594046644480:2, datashard: 72075186224037889, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-04-04T17:24:45.940761Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-04T17:24:45.940774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715693:0, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-04-04T17:24:45.940786Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715693:0, datashard: 72075186224037894, at schemeshard: 72057594046644480 2025-04-04T17:24:45.940799Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715693:0 129 -> 240 2025-04-04T17:24:45.940976Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-04T17:24:45.941049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-04T17:24:45.941085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-04T17:24:45.941162Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715693 datashard 72075186224037889 state PreOffline 2025-04-04T17:24:45.941172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-04-04T17:24:45.941204Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-04-04T17:24:45.941205Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:24:45.941306Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715693 datashard 72075186224037894 state PreOffline 2025-04-04T17:24:45.941321Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-04-04T17:24:45.941853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-04T17:24:45.941983Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715693:0 progress is 1/1 2025-04-04T17:24:45.941996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2025-04-04T17:24:45.942031Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715693:0 progress is 1/1 2025-04-04T17:24:45.942053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2025-04-04T17:24:45.942068Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715693, ready parts: 1/1, is published: true 2025-04-04T17:24:45.942117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7489510222255138948:2427] message: TxId: 281474976715693 2025-04-04T17:24:45.942132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2025-04-04T17:24:45.942149Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715693:0 2025-04-04T17:24:45.942159Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715693:0 2025-04-04T17:24:45.942275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-04T17:24:45.942362Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T17:24:45.942421Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-04-04T17:24:45.943608Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T17:24:45.943656Z node 2 :TX_DATASHARD INFO: 72075186224037894 Initiating switch from PreOffline to Offline state 2025-04-04T17:24:45.944914Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T17:24:45.944993Z node 2 :TX_DATASHARD INFO: 72075186224037894 Reporting state Offline to schemeshard 72057594046644480 TClient::Ls request: /dc-1/Dir/TableOld 2025-04-04T17:24:45.945283Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510222255138092 RawX2: 4503608217307386 } TabletId: 72075186224037889 State: 4 2025-04-04T17:24:45.945324Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:24:45.945568Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510222255138720 RawX2: 4503608217307478 } TabletId: 72075186224037894 State: 4 2025-04-04T17:24:45.945600Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:24:45.945944Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-04T17:24:45.945951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:24:45.946144Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:24:45.946146Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037894 state Offline TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-04-04T17:24:45.947472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-04T17:24:45.947667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-04T17:24:45.947673Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-04-04T17:24:45.947701Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037894 reason = ReasonStop 2025-04-04T17:24:45.947822Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-04-04T17:24:45.947939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-04T17:24:45.948068Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-04T17:24:45.948088Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-04T17:24:45.948110Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-04T17:24:45.948157Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-04T17:24:45.948172Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-04-04T17:24:45.948815Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-04T17:24:45.948836Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-04-04T17:24:45.948911Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-04T17:24:45.948924Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-04T17:24:45.948952Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-04-04T17:24:45.948963Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-04-04T17:24:45.948983Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-04T17:24:45.949502Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037894 2025-04-04T17:24:45.949557Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037894 >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndRead [GOOD] Test command err: 2025-04-04T17:24:40.824869Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510199789945700:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:40.826957Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e94/r3tmp/tmp1kBxlE/pdisk_1.dat 2025-04-04T17:24:41.156345Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:41.222564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:41.222724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:41.224401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14975 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:41.469478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:41.490182Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:41.506147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:41.682688Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1211 647 2154)b }, ecr=1.000 2025-04-04T17:24:41.686588Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1085 521 2626)b }, ecr=1.000 2025-04-04T17:24:41.711124Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1519 647 6413)b }, ecr=1.000 2025-04-04T17:24:41.716150Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2272 1432 5183)b }, ecr=1.000 2025-04-04T17:24:41.737243Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:41.738946Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:24:41.739024Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:24:41.740716Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 4 blobs 8r (max 9), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3196 2180 6413)b }, ecr=1.000 2025-04-04T17:24:41.741386Z node 1 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:24:41.741418Z node 1 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-04-04T17:24:41.742835Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:41.745036Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:24:41.745110Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:24:41.747783Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:24:41.749380Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T17:24:41.749413Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TClient::Ls request: /dc-1/Dir/TableOld 2025-04-04T17:24:41.752517Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:24:41.752851Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.21, eph 3} end=0, 4 blobs 9r (max 9), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (4019 2983 5183)b }, ecr=1.000 2025-04-04T17:24:41.753799Z node 1 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-04-04T17:24:41.753813Z node 1 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2025-04-04T17:24:41.753894Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-04-04T17:24:41.753921Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743787481625 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-04-04T17:24:41.764002Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:41.766275Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-04T17:24:41.766477Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:24:41.767968Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-04T17:24:41.768167Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:41.768803Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-04-04T17:24:41.769716Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-04T17:24:41.769857Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:24:41.770344Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-04-04T17:24:41.771090Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-04T17:24:41.771228Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:41.771732Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-04-04T17:24:41.772450Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-04T17:24:41.774383Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:24:41.775088Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-04-04T17:24:41.776160Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-04T17:24:41.776286Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:41.776901Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-04-04T17:24:41.777847Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-04T17:24:41.778049Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:24:41.778624Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-04-04T17:24:41.779573Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-04T17:24:41.779693Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:41.780263Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-04-04T17:24:41.781269Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-04T17:24:41.781569Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:41.782090Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-04-04T17:24:41.783076Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-04T17:24:41.783211Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:24:41.783767Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-04-04T17:24:41.784688Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-04T17:24:41.784833Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:41.785368Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-04-04T17:24:41.786522Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-04T17:24:41.786664Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:24:41.787266Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-04-04T17:24:41.788176Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-04T17:24:41.788302Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:41.788795Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-04-04T17:24:41.789828Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-04T17:24:41.790649Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-04-04T17:24:41.791149Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-04-04T17:24:41.792213Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-04-04T17:24:41.792399Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:41.792940Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 7207518622403788 ... ard: 72057594046644480, message: Source { RawX1: 7489510220668512860 RawX2: 4503608217307458 } TabletId: 72075186224037891 State: 4 2025-04-04T17:24:45.205507Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:24:45.205662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510220668512861 RawX2: 4503608217307459 } TabletId: 72075186224037892 State: 4 2025-04-04T17:24:45.205679Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:24:45.205757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510220668512861 RawX2: 4503608217307459 } TabletId: 72075186224037892 State: 4 2025-04-04T17:24:45.205769Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:24:45.206033Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510220668512859 RawX2: 4503608217307457 } TabletId: 72075186224037890 State: 4 2025-04-04T17:24:45.206061Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:24:45.206133Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T17:24:45.206143Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510220668512859 RawX2: 4503608217307457 } TabletId: 72075186224037890 State: 4 2025-04-04T17:24:45.206158Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 TClient::Ls response: 2025-04-04T17:24:45.206425Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-04-04T17:24:45.206429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:24:45.206479Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-04-04T17:24:45.206483Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:24:45.206512Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-04-04T17:24:45.206517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:24:45.207136Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7489510216373545210 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-04-04T17:24:45.207172Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-04-04T17:24:45.207298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:24:45.207324Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-04T17:24:45.207361Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:24:45.207383Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-04-04T17:24:45.207556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-04T17:24:45.207804Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-04-04T17:24:45.208096Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-04-04T17:24:45.208667Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-04-04T17:24:45.208748Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-04T17:24:45.209271Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-04-04T17:24:45.209510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-04-04T17:24:45.209686Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-04T17:24:45.209816Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-04T17:24:45.209904Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-04T17:24:45.209977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-04-04T17:24:45.210325Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-04-04T17:24:45.210366Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-04-04T17:24:45.210411Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [2:7489510220668512991:2617], serverId# [2:7489510220668512994:2620], sessionId# [0:0:0] 2025-04-04T17:24:45.210711Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-04-04T17:24:45.210776Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-04-04T17:24:45.211014Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-04-04T17:24:45.211030Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-04-04T17:24:45.212171Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-04-04T17:24:45.212211Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7489510220668512989:2615], serverId# [2:7489510220668512992:2618], sessionId# [0:0:0] 2025-04-04T17:24:45.212413Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-04T17:24:45.212464Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-04-04T17:24:45.212610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:24:45.212660Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-04T17:24:45.212674Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-04T17:24:45.212723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-04-04T17:24:45.212729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-04-04T17:24:45.212746Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-04T17:24:45.212754Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-04-04T17:24:45.212771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-04-04T17:24:45.212821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-04T17:24:45.212948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-04T17:24:45.213640Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-04-04T17:24:45.213885Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-04-04T17:24:45.213951Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-04-04T17:24:45.214153Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-04-04T17:24:45.214292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-04T17:24:45.214662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-04T17:24:45.214678Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-04T17:24:45.214789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-04T17:24:45.214983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-04T17:24:45.215172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-04T17:24:45.215207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-04T17:24:45.215240Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-04T17:24:45.215349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-04T17:24:45.215460Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-04-04T17:24:45.215491Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7489510216373545331:2391], serverId# [2:7489510216373545332:2392], sessionId# [0:0:0] 2025-04-04T17:24:45.215620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-04T17:24:45.215638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-04T17:24:45.215761Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-04-04T17:24:45.215803Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-04-04T17:24:45.215973Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-04-04T17:24:45.216000Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> KqpNewEngine::JoinWithParams [GOOD] >> KqpNewEngine::JoinPure >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] Test command err: 2025-04-04T17:21:13.746822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:21:13.747559Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:21:13.747625Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001347/r3tmp/tmpHCM7qO/pdisk_1.dat 2025-04-04T17:21:14.235376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:21:14.283297Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:14.331113Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:21:14.331842Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:21:14.334077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:14.334920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:14.348120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:14.528548Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-04T17:21:14.528640Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T17:21:14.530787Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:644:2552] 2025-04-04T17:21:14.673553Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2552] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value2" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T17:21:14.673673Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2552] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:21:14.677368Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2552] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:21:14.677485Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2552] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:21:14.677978Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2552] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:21:14.678311Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2552] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:21:14.678449Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2552] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T17:21:14.681532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:21:14.682001Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2552] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T17:21:14.682675Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2552] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T17:21:14.682765Z node 1 :TX_PROXY DEBUG: Actor# [1:644:2552] txid# 281474976715657 SEND to# [1:592:2517] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-04T17:21:14.733919Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:660:2567], Recipient [1:668:2573]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:21:14.735081Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:660:2567], Recipient [1:668:2573]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:21:14.735545Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:668:2573] 2025-04-04T17:21:14.735862Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:21:14.789173Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:660:2567], Recipient [1:668:2573]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:21:14.790054Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:21:14.790197Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:21:14.792205Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:21:14.792299Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:21:14.792364Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:21:14.793972Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:21:14.794143Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:21:14.794289Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:685:2573] in generation 1 2025-04-04T17:21:14.805175Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:21:14.839269Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:21:14.840662Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:21:14.840870Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:687:2583] 2025-04-04T17:21:14.840919Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:21:14.840961Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:21:14.840996Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:21:14.841220Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:668:2573], Recipient [1:668:2573]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:21:14.841985Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:21:14.843090Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:21:14.843208Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:21:14.843299Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:21:14.843355Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:21:14.843421Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:21:14.843462Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:21:14.843502Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:21:14.843535Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:21:14.843589Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:21:14.843742Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:676:2577], Recipient [1:668:2573]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:21:14.843780Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:21:14.843829Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:667:2572], serverId# [1:676:2577], sessionId# [0:0:0] 2025-04-04T17:21:14.845241Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:676:2577] 2025-04-04T17:21:14.845296Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:21:14.845405Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:21:14.845799Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:21:14.845895Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:21:14.846013Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:21:14.846145Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:21:14.846187Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:21:14.846276Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:21:14.846316Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:21:14.846668Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:21:14.846720Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:21:14.846769Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:21:14.846821Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:21:14.846919Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:21:14.846961Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:21:14.846997Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:21:14.847043Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:21:14.847074Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:21:14.848768Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:688:2584], Recipient [1:668:2573]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:21:14.848828Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:21 ... p: 1537 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 RangesSize: 1 2025-04-04T17:24:47.541959Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:21} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2025-04-04T17:24:47.542010Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:21} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:24:47.542059Z node 28 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-04T17:24:47.542119Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037889 on unit CheckRead 2025-04-04T17:24:47.542190Z node 28 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037889 is Executed 2025-04-04T17:24:47.542222Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037889 executing on unit CheckRead 2025-04-04T17:24:47.542279Z node 28 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-04T17:24:47.542312Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-04T17:24:47.542361Z node 28 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037889 2025-04-04T17:24:47.542404Z node 28 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037889 is Executed 2025-04-04T17:24:47.542432Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-04T17:24:47.542458Z node 28 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037889 to execution unit ExecuteRead 2025-04-04T17:24:47.542485Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037889 on unit ExecuteRead 2025-04-04T17:24:47.542599Z node 28 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1537 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2025-04-04T17:24:47.542813Z node 28 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v1537/18446744073709551615 2025-04-04T17:24:47.542866Z node 28 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[28:1101:2883], 1} after executionsCount# 1 2025-04-04T17:24:47.542908Z node 28 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[28:1101:2883], 1} sends rowCount# 1, bytes# 32, quota rows left# 999, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:24:47.542977Z node 28 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[28:1101:2883], 1} finished in read 2025-04-04T17:24:47.543028Z node 28 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037889 is Executed 2025-04-04T17:24:47.543057Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037889 executing on unit ExecuteRead 2025-04-04T17:24:47.543086Z node 28 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037889 to execution unit CompletedOperations 2025-04-04T17:24:47.543116Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037889 on unit CompletedOperations 2025-04-04T17:24:47.543167Z node 28 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037889 is Executed 2025-04-04T17:24:47.543209Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037889 executing on unit CompletedOperations 2025-04-04T17:24:47.543236Z node 28 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037889 has finished 2025-04-04T17:24:47.543266Z node 28 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-04T17:24:47.543354Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:21} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T17:24:47.543411Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:21} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:24:47.543451Z node 28 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-04T17:24:47.544477Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] ::Bootstrap [28:1104:2886] 2025-04-04T17:24:47.544600Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] lookup [28:1104:2886] 2025-04-04T17:24:47.544712Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037889] send [28:954:2766] 2025-04-04T17:24:47.544746Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037889] push event to server [28:954:2766] 2025-04-04T17:24:47.544912Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] queue send [28:1104:2886] 2025-04-04T17:24:47.545065Z node 28 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [28:1101:2883], Recipient [28:704:2589]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-04-04T17:24:47.545112Z node 28 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 1 } 2025-04-04T17:24:47.545210Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] forward result local node, try to connect [28:1104:2886] 2025-04-04T17:24:47.545343Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890]::SendEvent [28:1104:2886] 2025-04-04T17:24:47.545670Z node 28 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [28:1105:2887], Recipient [28:1057:2855]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:47.545708Z node 28 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:47.545751Z node 28 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [28:1104:2886], serverId# [28:1105:2887], sessionId# [0:0:0] 2025-04-04T17:24:47.545795Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] connected with status OK role: Leader [28:1104:2886] 2025-04-04T17:24:47.545835Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] send queued [28:1104:2886] 2025-04-04T17:24:47.545865Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] push event to server [28:1104:2886] 2025-04-04T17:24:47.546066Z node 28 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [28:1101:2883], Recipient [28:1057:2855]: NKikimrTxDataShard.TEvRead ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1537 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2025-04-04T17:24:47.546156Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2025-04-04T17:24:47.546210Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:24:47.546282Z node 28 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-04-04T17:24:47.546350Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit CheckRead 2025-04-04T17:24:47.546418Z node 28 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-04T17:24:47.546449Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit CheckRead 2025-04-04T17:24:47.546478Z node 28 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-04-04T17:24:47.546511Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit BuildAndWaitDependencies 2025-04-04T17:24:47.546571Z node 28 :TX_DATASHARD TRACE: Activated operation [0:1] at 72075186224037890 2025-04-04T17:24:47.546612Z node 28 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-04T17:24:47.546639Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-04-04T17:24:47.546667Z node 28 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit ExecuteRead 2025-04-04T17:24:47.546694Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2025-04-04T17:24:47.546814Z node 28 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1537 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2025-04-04T17:24:47.547053Z node 28 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037890 promoting UnprotectedReadEdge to v1537/18446744073709551615 2025-04-04T17:24:47.547104Z node 28 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[28:1101:2883], 2} after executionsCount# 1 2025-04-04T17:24:47.547148Z node 28 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[28:1101:2883], 2} sends rowCount# 1, bytes# 32, quota rows left# 998, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:24:47.547234Z node 28 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[28:1101:2883], 2} finished in read 2025-04-04T17:24:47.547287Z node 28 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-04T17:24:47.547314Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit ExecuteRead 2025-04-04T17:24:47.547344Z node 28 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit CompletedOperations 2025-04-04T17:24:47.547374Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit CompletedOperations 2025-04-04T17:24:47.547422Z node 28 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-04T17:24:47.547447Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit CompletedOperations 2025-04-04T17:24:47.547474Z node 28 :TX_DATASHARD TRACE: Execution plan for [0:1] at 72075186224037890 has finished 2025-04-04T17:24:47.547506Z node 28 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-04-04T17:24:47.547595Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{17, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T17:24:47.547649Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:24:47.547690Z node 28 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-04-04T17:24:47.548405Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] send [28:1104:2886] 2025-04-04T17:24:47.548447Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] push event to server [28:1104:2886] 2025-04-04T17:24:47.548557Z node 28 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [28:1101:2883], Recipient [28:1057:2855]: NKikimrTxDataShard.TEvReadCancel ReadId: 2 2025-04-04T17:24:47.548601Z node 28 :TX_DATASHARD TRACE: 72075186224037890 ReadCancel: { ReadId: 2 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 11 } items { uint32_value: 111 } }, { items { uint32_value: 21 } items { uint32_value: 21 } } >> KqpReturning::ReturningWorksIndexedUpsert+QueryService >> TLocksTest::Range_GoodLock0 [GOOD] >> TLocksTest::Range_GoodLock1 >> KqpYql::EvaluateExpr3 [GOOD] |95.3%| [TA] $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNotNullColumns::InsertNotNullPk |95.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} >> VDiskBalancing::TestRandom_Mirror3dc [GOOD] >> KqpScripting::UnsafeTimestampCast >> KqpNewEngine::Update-UseSink [GOOD] >> KqpNewEngine::UpdateFromParams >> KqpReturning::ReturningWorks+QueryService >> KqpNewEngine::BlindWrite >> KqpNewEngine::KeyColumnOrder [GOOD] >> KqpNewEngine::KeyColumnOrder2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExpr3 [GOOD] Test command err: Trying to start YDB, gRPC: 16338, MsgBus: 5716 2025-04-04T17:24:41.008452Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510202524478903:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:41.008629Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019a5/r3tmp/tmpq2cUA1/pdisk_1.dat 2025-04-04T17:24:41.340962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:41.341093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:41.341384Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:41.345873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16338, node 1 2025-04-04T17:24:41.424779Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:41.424807Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:41.424828Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:41.425193Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5716 TClient is connected to server localhost:5716 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:41.931356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:41.956466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:24:42.103242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:42.230373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:42.288835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:43.944393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510211114415274:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:43.944495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:44.231364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:44.262003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:44.293918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:44.322787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:44.353291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:44.383102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:24:44.422445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510215409383084:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:44.422584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:44.422981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510215409383089:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:44.427702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:24:44.437229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510215409383092:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:24:44.495482Z node 1 :TX_PROXY ERROR: Actor# [1:7489510215409383145:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 30105, MsgBus: 11021 2025-04-04T17:24:46.347049Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510224423908367:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:46.347091Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019a5/r3tmp/tmp0AyMSy/pdisk_1.dat 2025-04-04T17:24:46.472989Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:46.475600Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:46.475671Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:46.477224Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30105, node 2 2025-04-04T17:24:46.522678Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:46.522710Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:46.522718Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:46.522836Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11021 TClient is connected to server localhost:11021 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:46.896161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:46.911005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:46.991252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:47.176210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:47.246726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:24:48.995373Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510233013844728:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:48.995481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:49.033363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:49.065239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:49.108031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:49.150511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:49.227723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:49.275649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:24:49.329779Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510237308812538:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:49.329859Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:49.330310Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510237308812543:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:49.334422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:24:49.343686Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510237308812545:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:24:49.445063Z node 2 :TX_PROXY ERROR: Actor# [2:7489510237308812603:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> DataShardWrite::InsertImmediate [GOOD] >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace >> KqpYql::UuidPrimaryKey [GOOD] >> KqpNewEngine::MultiOutput [GOOD] >> KqpNewEngine::MultiStatement >> KqpRanges::UpdateMulti [GOOD] >> KqpRanges::UpdateWhereInBigLiteralList ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestRandom_Mirror3dc [GOOD] Test command err: RandomSeed# 6110269656412695340 Step = 0 SEND TEvPut with key [1:1:0:0:0:51943:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:51943:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 1 SEND TEvPut with key [1:1:1:0:0:37868:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:37868:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 2 SEND TEvPut with key [1:1:2:0:0:85877:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:85877:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 3 SEND TEvPut with key [1:1:3:0:0:192081:0] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:192081:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 4 SEND TEvPut with key [1:1:4:0:0:267203:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:267203:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 3 2025-04-04T17:23:11.824471Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 5 SEND TEvPut with key [1:1:5:0:0:502135:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:502135:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 6 SEND TEvPut with key [1:1:6:0:0:377427:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:377427:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 4 2025-04-04T17:23:11.991629Z 1 00h01m10.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 7 SEND TEvPut with key [1:1:7:0:0:48850:0] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:48850:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 8 SEND TEvPut with key [1:1:8:0:0:411812:0] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:411812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 9 SEND TEvPut with key [1:1:9:0:0:293766:0] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:293766:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start node 3 Step = 10 SEND TEvPut with key [1:1:10:0:0:127358:0] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:127358:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 11 SEND TEvPut with key [1:1:11:0:0:282945:0] TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:282945:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 12 SEND TEvPut with key [1:1:12:0:0:34864:0] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:34864:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 13 SEND TEvPut with key [1:1:13:0:0:363096:0] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:363096:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 14 SEND TEvPut with key [1:1:14:0:0:179270:0] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:179270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 15 SEND TEvPut with key [1:1:15:0:0:358611:0] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:358611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 16 SEND TEvPut with key [1:1:16:0:0:136892:0] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:136892:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 17 SEND TEvPut with key [1:1:17:0:0:517733:0] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:517733:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 18 SEND TEvPut with key [1:1:18:0:0:250802:0] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:250802:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 19 SEND TEvPut with key [1:1:19:0:0:199490:0] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:199490:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 20 SEND TEvPut with key [1:1:20:0:0:244269:0] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:244269:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 21 SEND TEvPut with key [1:1:21:0:0:329606:0] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:329606:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 22 SEND TEvPut with key [1:1:22:0:0:322338:0] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:322338:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 23 SEND TEvPut with key [1:1:23:0:0:519258:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:519258:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 24 SEND TEvPut with key [1:1:24:0:0:56036:0] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:56036:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 25 SEND TEvPut with key [1:1:25:0:0:514591:0] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:514591:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Stop node 7 2025-04-04T17:23:12.779629Z 1 00h01m30.111024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 26 SEND TEvPut with key [1:1:26:0:0:5927:0] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:5927:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 27 SEND TEvPut with key [1:1:27:0:0:148482:0] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:148482:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 28 SEND TEvPut with key [1:1:28:0:0:6043:0] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:6043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 29 SEND TEvPut with key [1:1:29:0:0:265170:0] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:265170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 30 SEND TEvPut with key [1:1:30:0:0:264716:0] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:264716:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Compact vdisk 3 Step = 31 SEND TEvPut with key [1:1:31:0:0:168116:0] TEvPutResult: TEvPutResult {Id# [1:1:31:0:0:168116:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 32 SEND TEvPut with key [1:1:32:0:0:444749:0] TEvPutResult: TEvPutResult {Id# [1:1:32:0:0:444749:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 33 SEND TEvPut with key [1:1:33:0:0:350254:0] TEvPutResult: TEvPutResult {Id# [1:1:33:0:0:350254:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 34 SEND TEvPut with key [1:1:34:0:0:145950:0] TEvPutResult: TEvPutResult {Id# [1:1:34:0:0:145950:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 35 SEND TEvPut with key [1:1:35:0:0:358611:0] TEvPutResult: TEvPutResult {Id# [1:1:35:0:0:358611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 36 SEND TEvPut with key [1:1:36:0:0:139148:0] TEvPutResult: TEvPutResult {Id# [1:1:36:0:0:139148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 37 SEND TEvPut with key [1:1:37:0:0:200198:0] TEvPutResult: TEvPutResult {Id# [1:1:37:0:0:200198:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 38 SEND TEvPut with key [1:1:38:0:0:185170:0] TEvPutResult: TEvPutResult {Id# [1:1:38:0:0:185170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 39 SEND TEvPut with key [1:1:39:0:0:297271:0] TEvPutResult: TEvPutResult {Id# [1:1:39:0:0:297271:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 40 SEND TEvPut with key [1:1:40:0:0:419670:0] TEvPutResult: TEvPutResult {Id# [1:1:40:0:0:419670:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 41 SEND TEvPut with key [1:1:41:0:0:218956:0] TEvPutResult: TEvPutResult {Id# [1:1:41:0:0:218956:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 42 SEND TEvPut with key [1:1:42:0:0:154723:0] TEvPutResult: TEvPutResult {Id# [1:1:42:0:0:154723:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 43 SEND TEvPut with key [1:1:43:0:0:13332:0] TEvPutResult: TEvPutResult {Id# [1:1:43:0:0:13332:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 44 SEND TEvPut with key [1:1:44:0:0:448892:0] TEvPutResult: TEvPutResult {Id# [1:1:44:0:0:448892:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 45 SEND TEvPut with key [1:1:45:0:0:103231:0] TEvPutResult: TEvPutResult {Id# [1:1:45:0:0:103231:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 46 SEND TEvPut with key [1:1:46:0:0:295973:0] TEvPutResult: TEvPutResult {Id# [1:1:46:0:0:295973:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 47 SEND TEvPut with key [1:1:47:0:0:402799:0] TEvPutResult: TEvPutResult {Id# [1:1:47:0:0:402799:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 48 SEND TEvPut with key [1:1:48:0:0:165045:0] TEvPutResult: TEvPutResult {Id# [1:1:48:0:0:165045:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 49 SEND TEvPut with key [1:1:49:0:0:360099:0] TEvPutResult: TEvPutResult {Id# [1:1:49:0:0:360099:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 50 SEND TEvPut with key [1:1:50:0:0:97222:0] TEvPutResult: TEvPutResult {Id# [1:1:50:0:0:97222:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 51 SEND TEvPut with key [1:1:51:0:0:303396:0] TEvPutResult: TEvPutResult {Id# [1:1:51:0:0:303396:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 52 SEND TEvPut with key [1:1:52:0:0:304876:0] TEvPutResult: TEvPutResult {Id# [1:1:52:0:0:304876:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 53 SEND TEvPut with key [1:1:53:0:0:375063:0] TEvPutResult: TEvPutResult {Id# [1:1:53:0:0:375063:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Start node 4 Step = 54 SEND TEvPut with key [1:1:54:0:0:288044:0] TEvPutResult: TEvPutResult {Id# [1:1:54:0:0:288044:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999878} Step = 55 SEND TEvPut with key [1:1:55:0:0:181559:0] TEvPutResult: TEvPutResult {Id# [1:1:55:0:0:181559:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 56 SEND TEvPut with key [1:1:56:0:0:42993:0] TEvPutResult: TEvPutResult {Id# [1:1:56:0:0:42993:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999878} Step = 57 SEND TEvPut with key [1:1:57:0:0:424399:0] TEvPutResult: TEvPutResult {Id# [1:1:57:0:0:424399:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 58 SEND TEvPut with key [1:1:58:0:0:169341:0] TEvPutResult: TEvPutResult {Id# [1:1:58:0:0:169341:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999902} Step = 59 SEND TEvPut with key [1:1:59:0:0:405932:0] TEvPutResult: TEvPutResult {Id# [1:1:59:0:0:405932:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999902} Step = 60 SEND TEvPut with key [1:1:60:0:0:190148:0] TEvPutResult: TEvPutResult {Id# [1:1:60:0:0:190148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Stop node 3 2025-04-04T17:23:14.492163Z 1 00h02m00.161536s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Wipe node 0 2025-04-04T17:23:14.661675Z 1 00h02m10.211024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-04-04T17:23:14.663676Z 1 00h02m10.211024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 3226748546127285771] Step = 61 SEND TEvPut with key [1:1:61:0:0:500240:0] 2025-04-04T17:23:16.096699Z 1 00h03m50.211024s :BS_PROXY ERROR: Group# 2181038080 StateEstablishingSessions Wakeup TIMEOUT Marker# DSP12 TEvPutResult: TEvPutResult {Id# [1:1:61:0:0:500240:0] Status# ERROR StatusFlags# { } ErrorReason# "Timeout while establishing sessions (DSPE4)." ApproximateFreeSpaceShare# 0} Step = 62 SEND TEvPut with key [1:1:62:0:0:354994:0] TEvPutResult: TEvPutResult {Id# [1:1:62:0:0:354994:0] Status# ERROR StatusFlags# { } ErrorReason# "Timeout while establishing sessions (DSPE4)." ApproximateFreeSpaceShare# 0} Step = 63 SEND TEvPut with key [1:1:63:0:0:2 ... Id# [1:1:945:0:0:76599:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 946 SEND TEvPut with key [1:1:946:0:0:24822:0] TEvPutResult: TEvPutResult {Id# [1:1:946:0:0:24822:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Compact vdisk 2 Step = 947 SEND TEvPut with key [1:1:947:0:0:100167:0] TEvPutResult: TEvPutResult {Id# [1:1:947:0:0:100167:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 948 SEND TEvPut with key [1:1:948:0:0:112126:0] TEvPutResult: TEvPutResult {Id# [1:1:948:0:0:112126:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 949 SEND TEvPut with key [1:1:949:0:0:525378:0] TEvPutResult: TEvPutResult {Id# [1:1:949:0:0:525378:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 950 SEND TEvPut with key [1:1:950:0:0:410875:0] TEvPutResult: TEvPutResult {Id# [1:1:950:0:0:410875:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 951 SEND TEvPut with key [1:1:951:0:0:113503:0] TEvPutResult: TEvPutResult {Id# [1:1:951:0:0:113503:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 952 SEND TEvPut with key [1:1:952:0:0:431140:0] TEvPutResult: TEvPutResult {Id# [1:1:952:0:0:431140:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 953 SEND TEvPut with key [1:1:953:0:0:509293:0] TEvPutResult: TEvPutResult {Id# [1:1:953:0:0:509293:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Stop node 3 2025-04-04T17:24:26.680452Z 1 00h28m01.167168s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 954 SEND TEvPut with key [1:1:954:0:0:286395:0] TEvPutResult: TEvPutResult {Id# [1:1:954:0:0:286395:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Stop node 1 2025-04-04T17:24:26.973254Z 1 00h28m11.167680s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 955 SEND TEvPut with key [1:1:955:0:0:219270:0] TEvPutResult: TEvPutResult {Id# [1:1:955:0:0:219270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999792} Start node 1 Step = 956 SEND TEvPut with key [1:1:956:0:0:274971:0] TEvPutResult: TEvPutResult {Id# [1:1:956:0:0:274971:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999792} Step = 957 SEND TEvPut with key [1:1:957:0:0:487884:0] TEvPutResult: TEvPutResult {Id# [1:1:957:0:0:487884:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Start node 3 Step = 958 SEND TEvPut with key [1:1:958:0:0:327302:0] TEvPutResult: TEvPutResult {Id# [1:1:958:0:0:327302:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 959 SEND TEvPut with key [1:1:959:0:0:385917:0] TEvPutResult: TEvPutResult {Id# [1:1:959:0:0:385917:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 960 SEND TEvPut with key [1:1:960:0:0:200998:0] TEvPutResult: TEvPutResult {Id# [1:1:960:0:0:200998:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 961 SEND TEvPut with key [1:1:961:0:0:61147:0] TEvPutResult: TEvPutResult {Id# [1:1:961:0:0:61147:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 962 SEND TEvPut with key [1:1:962:0:0:237906:0] TEvPutResult: TEvPutResult {Id# [1:1:962:0:0:237906:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 963 SEND TEvPut with key [1:1:963:0:0:347273:0] TEvPutResult: TEvPutResult {Id# [1:1:963:0:0:347273:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 964 SEND TEvPut with key [1:1:964:0:0:181317:0] TEvPutResult: TEvPutResult {Id# [1:1:964:0:0:181317:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 965 SEND TEvPut with key [1:1:965:0:0:456096:0] TEvPutResult: TEvPutResult {Id# [1:1:965:0:0:456096:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 966 SEND TEvPut with key [1:1:966:0:0:93776:0] TEvPutResult: TEvPutResult {Id# [1:1:966:0:0:93776:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 967 SEND TEvPut with key [1:1:967:0:0:447659:0] TEvPutResult: TEvPutResult {Id# [1:1:967:0:0:447659:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 968 SEND TEvPut with key [1:1:968:0:0:14298:0] TEvPutResult: TEvPutResult {Id# [1:1:968:0:0:14298:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 969 SEND TEvPut with key [1:1:969:0:0:92781:0] TEvPutResult: TEvPutResult {Id# [1:1:969:0:0:92781:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 970 SEND TEvPut with key [1:1:970:0:0:334566:0] TEvPutResult: TEvPutResult {Id# [1:1:970:0:0:334566:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Stop node 0 2025-04-04T17:24:28.052469Z 9 00h28m41.212048s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [9:127483:350] ServerId# [1:128519:173] TabletId# 72057594037932033 PipeClientId# [9:127483:350] 2025-04-04T17:24:28.052631Z 8 00h28m41.212048s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:158117:16] ServerId# [1:158127:4100] TabletId# 72057594037932033 PipeClientId# [8:158117:16] 2025-04-04T17:24:28.052746Z 7 00h28m41.212048s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:157061:16] ServerId# [1:157068:3973] TabletId# 72057594037932033 PipeClientId# [7:157061:16] 2025-04-04T17:24:28.052864Z 6 00h28m41.212048s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:134128:16] ServerId# [1:134135:1012] TabletId# 72057594037932033 PipeClientId# [6:134128:16] 2025-04-04T17:24:28.052959Z 5 00h28m41.212048s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:154154:16] ServerId# [1:154162:3594] TabletId# 72057594037932033 PipeClientId# [5:154154:16] 2025-04-04T17:24:28.053070Z 4 00h28m41.212048s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:163086:16] ServerId# [1:163096:4701] TabletId# 72057594037932033 PipeClientId# [4:163086:16] 2025-04-04T17:24:28.053245Z 3 00h28m41.212048s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:153068:16] ServerId# [1:153078:3471] TabletId# 72057594037932033 PipeClientId# [3:153068:16] 2025-04-04T17:24:28.053369Z 2 00h28m41.212048s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:162123:16] ServerId# [1:162130:4592] TabletId# 72057594037932033 PipeClientId# [2:162123:16] Step = 971 SEND TEvPut with key [1:1:971:0:0:439384:0] TEvPutResult: TEvPutResult {Id# [1:1:971:0:0:439384:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99978} Step = 972 SEND TEvPut with key [1:1:972:0:0:252551:0] TEvPutResult: TEvPutResult {Id# [1:1:972:0:0:252551:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 973 SEND TEvPut with key [1:1:973:0:0:39982:0] TEvPutResult: TEvPutResult {Id# [1:1:973:0:0:39982:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Stop node 2 Step = 974 SEND TEvPut with key [1:1:974:0:0:526796:0] TEvPutResult: TEvPutResult {Id# [1:1:974:0:0:526796:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999768} Start node 0 Step = 975 SEND TEvPut with key [1:1:975:0:0:337763:0] TEvPutResult: TEvPutResult {Id# [1:1:975:0:0:337763:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Stop node 2 Step = 976 SEND TEvPut with key [1:1:976:0:0:475740:0] TEvPutResult: TEvPutResult {Id# [1:1:976:0:0:475740:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 977 SEND TEvPut with key [1:1:977:0:0:169780:0] TEvPutResult: TEvPutResult {Id# [1:1:977:0:0:169780:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 978 SEND TEvPut with key [1:1:978:0:0:481535:0] TEvPutResult: TEvPutResult {Id# [1:1:978:0:0:481535:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 979 SEND TEvPut with key [1:1:979:0:0:24668:0] TEvPutResult: TEvPutResult {Id# [1:1:979:0:0:24668:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 980 SEND TEvPut with key [1:1:980:0:0:159890:0] TEvPutResult: TEvPutResult {Id# [1:1:980:0:0:159890:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 981 SEND TEvPut with key [1:1:981:0:0:111300:0] TEvPutResult: TEvPutResult {Id# [1:1:981:0:0:111300:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 982 SEND TEvPut with key [1:1:982:0:0:355914:0] TEvPutResult: TEvPutResult {Id# [1:1:982:0:0:355914:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 983 SEND TEvPut with key [1:1:983:0:0:399106:0] TEvPutResult: TEvPutResult {Id# [1:1:983:0:0:399106:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 984 SEND TEvPut with key [1:1:984:0:0:347759:0] TEvPutResult: TEvPutResult {Id# [1:1:984:0:0:347759:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 985 SEND TEvPut with key [1:1:985:0:0:261994:0] TEvPutResult: TEvPutResult {Id# [1:1:985:0:0:261994:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 986 SEND TEvPut with key [1:1:986:0:0:101043:0] TEvPutResult: TEvPutResult {Id# [1:1:986:0:0:101043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 987 SEND TEvPut with key [1:1:987:0:0:138774:0] TEvPutResult: TEvPutResult {Id# [1:1:987:0:0:138774:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 988 SEND TEvPut with key [1:1:988:0:0:441913:0] TEvPutResult: TEvPutResult {Id# [1:1:988:0:0:441913:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 989 SEND TEvPut with key [1:1:989:0:0:134469:0] TEvPutResult: TEvPutResult {Id# [1:1:989:0:0:134469:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 990 SEND TEvPut with key [1:1:990:0:0:123825:0] TEvPutResult: TEvPutResult {Id# [1:1:990:0:0:123825:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 991 SEND TEvPut with key [1:1:991:0:0:40387:0] TEvPutResult: TEvPutResult {Id# [1:1:991:0:0:40387:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 992 SEND TEvPut with key [1:1:992:0:0:193000:0] TEvPutResult: TEvPutResult {Id# [1:1:992:0:0:193000:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Stop node 7 2025-04-04T17:24:29.940306Z 1 00h29m21.214096s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 993 SEND TEvPut with key [1:1:993:0:0:455894:0] TEvPutResult: TEvPutResult {Id# [1:1:993:0:0:455894:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Compact vdisk 0 Step = 994 SEND TEvPut with key [1:1:994:0:0:54378:0] TEvPutResult: TEvPutResult {Id# [1:1:994:0:0:54378:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Compact vdisk 6 Step = 995 SEND TEvPut with key [1:1:995:0:0:487669:0] TEvPutResult: TEvPutResult {Id# [1:1:995:0:0:487669:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 996 SEND TEvPut with key [1:1:996:0:0:194641:0] TEvPutResult: TEvPutResult {Id# [1:1:996:0:0:194641:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 997 SEND TEvPut with key [1:1:997:0:0:74188:0] TEvPutResult: TEvPutResult {Id# [1:1:997:0:0:74188:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 998 SEND TEvPut with key [1:1:998:0:0:136082:0] TEvPutResult: TEvPutResult {Id# [1:1:998:0:0:136082:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 999 SEND TEvPut with key [1:1:999:0:0:145518:0] TEvPutResult: TEvPutResult {Id# [1:1:999:0:0:145518:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Starting nodes Start compaction 1 Start checking >> TLocksFatTest::PointSetRemove [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] >> KqpNewEngine::DuplicatedResults [GOOD] >> KqpNewEngine::FlatmapLambdaMutiusedConnections >> TPartitionChooserSuite::TBoundaryChooserTest [GOOD] >> TPartitionChooserSuite::TBoundaryChooser_GetTabletIdTest [GOOD] >> TPartitionChooserSuite::THashChooserTest [GOOD] >> TPartitionChooserSuite::THashChooser_GetTabletIdTest [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 21658, MsgBus: 7548 2025-04-04T17:24:47.290500Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510228986858719:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:47.290964Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00199c/r3tmp/tmpSXSTuj/pdisk_1.dat 2025-04-04T17:24:47.621011Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:47.625034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:47.625134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:47.628090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21658, node 1 2025-04-04T17:24:47.692279Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:47.692298Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:47.692308Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:47.692407Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7548 TClient is connected to server localhost:7548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:48.134790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:48.148454Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:24:50.171578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510241871761135:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:50.171734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:50.428488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:24:50.546276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510241871761241:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:50.546378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:50.546472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510241871761246:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:50.550590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:24:50.560853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510241871761248:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:24:50.626335Z node 1 :TX_PROXY ERROR: Actor# [1:7489510241871761299:2396] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:51.577633Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489510246166728783:2397], status: GENERIC_ERROR, issues:
:3:25: Error: Invalid value "invalid-uuid" for type Uuid 2025-04-04T17:24:51.578164Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTVlZDlmOGMtZjI1MmU0NTUtMmViMzA1ZDktNDBjMDlhM2E=, ActorId: [1:7489510241871761117:2328], ActorState: ExecuteState, TraceId: 01jr0xb07k3c229d774ykfnwkz, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T17:24:52.285273Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510228986858719:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:52.285333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardWrite::DeletePrepared-Volatile [GOOD] >> DataShardWrite::DelayedVolatileTxAndEvWrite >> TPQTest::TestWaitInOwners [GOOD] >> TPQTest::TestWritePQBigMessage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] Test command err: 2025-04-04T17:24:30.236106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:24:30.236842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:24:30.236905Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016b7/r3tmp/tmpX62aKc/pdisk_1.dat 2025-04-04T17:24:30.723590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:24:30.771648Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:30.813007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:30.813189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:30.825090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:30.907565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:24:30.953153Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:24:30.954332Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:24:30.954867Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:24:30.955138Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:24:30.966714Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:24:31.004782Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:24:31.004926Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:24:31.006764Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:24:31.006871Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:24:31.006957Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:24:31.007393Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:24:31.007546Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:24:31.007670Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:24:31.018548Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:24:31.049892Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:24:31.050117Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:24:31.050270Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:24:31.050313Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:24:31.050349Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:24:31.050388Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:24:31.050633Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:24:31.050705Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:24:31.051027Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:24:31.051128Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:24:31.051317Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:24:31.051375Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:24:31.051431Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:24:31.051476Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:24:31.051515Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:24:31.051559Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:24:31.051604Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:24:31.051703Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:31.051743Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:31.051790Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:24:31.052200Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:24:31.052252Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:24:31.052364Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:31.052638Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:24:31.052705Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:24:31.052798Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:24:31.052856Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:24:31.052903Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:24:31.052941Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:24:31.052975Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:24:31.053266Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:24:31.053308Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:24:31.053346Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:24:31.053394Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:24:31.053456Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:24:31.053506Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:24:31.053542Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:24:31.053573Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:24:31.053599Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:24:31.055228Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:24:31.055297Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:24:31.066006Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:24:31.066096Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:24:31.066131Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:24:31.066202Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:24:31.066322Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:24:31.206367Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:31.206439Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:31.206545Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:24:31.206961Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:24:31.207010Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:24:31.207133Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:24:31.207212Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:24:31.207266Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:24:31.207305Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:24:31.211982Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:24:31.212056Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:24:31.212820Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:24:31.212862Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:24:31.212907Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:24:3 ... 1162Z node 6 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 4) 2025-04-04T17:24:53.271213Z node 6 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2025-04-04T17:24:53.271267Z node 6 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 loaded writeOp from db 1500:100 keys extracted: 3 2025-04-04T17:24:53.271314Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-04T17:24:53.271358Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit LoadWriteDetails 2025-04-04T17:24:53.271388Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:24:53.271416Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:24:53.271490Z node 6 :TX_DATASHARD TRACE: Operation [1500:100] is the new logically complete end at 72075186224037888 2025-04-04T17:24:53.271564Z node 6 :TX_DATASHARD TRACE: Operation [1500:100] is the new logically incomplete end at 72075186224037888 2025-04-04T17:24:53.271609Z node 6 :TX_DATASHARD TRACE: Activated operation [1500:100] at 72075186224037888 2025-04-04T17:24:53.271661Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-04T17:24:53.271688Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:24:53.271713Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit BuildWriteOutRS 2025-04-04T17:24:53.271751Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit BuildWriteOutRS 2025-04-04T17:24:53.271801Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-04T17:24:53.271822Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit BuildWriteOutRS 2025-04-04T17:24:53.271842Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2025-04-04T17:24:53.271863Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit StoreAndSendWriteOutRS 2025-04-04T17:24:53.271892Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-04T17:24:53.271914Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2025-04-04T17:24:53.271935Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit PrepareWriteTxInRS 2025-04-04T17:24:53.271958Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit PrepareWriteTxInRS 2025-04-04T17:24:53.271982Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-04T17:24:53.272005Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit PrepareWriteTxInRS 2025-04-04T17:24:53.272042Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit LoadAndWaitInRS 2025-04-04T17:24:53.272070Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit LoadAndWaitInRS 2025-04-04T17:24:53.272097Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-04T17:24:53.272119Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit LoadAndWaitInRS 2025-04-04T17:24:53.272150Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit ExecuteWrite 2025-04-04T17:24:53.272181Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit ExecuteWrite 2025-04-04T17:24:53.272211Z node 6 :TX_DATASHARD DEBUG: Executing write operation for [1500:100] at 72075186224037888 2025-04-04T17:24:53.272377Z node 6 :TX_DATASHARD DEBUG: Executed write operation for [1500:100] at 72075186224037888, row count=3 2025-04-04T17:24:53.272453Z node 6 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-04T17:24:53.272539Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:24:53.272574Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit ExecuteWrite 2025-04-04T17:24:53.272624Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit CompleteWrite 2025-04-04T17:24:53.272681Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit CompleteWrite 2025-04-04T17:24:53.272920Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is DelayComplete 2025-04-04T17:24:53.272965Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit CompleteWrite 2025-04-04T17:24:53.273009Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:24:53.273071Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:24:53.273112Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-04-04T17:24:53.273138Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:24:53.273168Z node 6 :TX_DATASHARD TRACE: Execution plan for [1500:100] at 72075186224037888 has finished 2025-04-04T17:24:53.273214Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:24:53.273259Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-04T17:24:53.273307Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:24:53.273349Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:24:53.284828Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-04-04T17:24:53.284989Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:24:53.285073Z node 6 :TX_DATASHARD TRACE: Complete execution for [1500:100] at 72075186224037888 on unit CompleteWrite 2025-04-04T17:24:53.285161Z node 6 :TX_DATASHARD DEBUG: Complete write [1500 : 100] from 72075186224037888 at tablet 72075186224037888 send result to client [6:592:2517] 2025-04-04T17:24:53.285246Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:24:53.287019Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:766:2636], Recipient [6:665:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:53.287099Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:53.287163Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:765:2635], serverId# [6:766:2636], sessionId# [0:0:0] 2025-04-04T17:24:53.287379Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:764:2634], Recipient [6:665:2569]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-04T17:24:53.288517Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:769:2639], Recipient [6:665:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:53.288608Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:53.288665Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:768:2638], serverId# [6:769:2639], sessionId# [0:0:0] 2025-04-04T17:24:53.288927Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:767:2637], Recipient [6:665:2569]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-04T17:24:53.289015Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T17:24:53.289160Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/100 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T17:24:53.289232Z node 6 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v1500/18446744073709551615 2025-04-04T17:24:53.289306Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-04-04T17:24:53.289418Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-04T17:24:53.289472Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-04-04T17:24:53.289517Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:24:53.289561Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:24:53.289618Z node 6 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-04-04T17:24:53.289671Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-04T17:24:53.289698Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:24:53.289722Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T17:24:53.289777Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:24:53.289893Z node 6 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-04T17:24:53.290181Z node 6 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[6:767:2637], 1000} after executionsCount# 1 2025-04-04T17:24:53.290287Z node 6 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[6:767:2637], 1000} sends rowCount# 3, bytes# 96, quota rows left# 18446744073709551612, quota bytes left# 18446744073709551519, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:24:53.290382Z node 6 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[6:767:2637], 1000} finished in read 2025-04-04T17:24:53.290450Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-04T17:24:53.290480Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:24:53.290506Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:24:53.290533Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:24:53.290579Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-04-04T17:24:53.290603Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:24:53.290649Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-04-04T17:24:53.290714Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T17:24:53.290838Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> KqpScripting::StreamExecuteYqlScriptSeveralQueries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::PointSetRemove [GOOD] Test command err: 2025-04-04T17:24:38.728339Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510191518540490:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:38.728519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e99/r3tmp/tmpT0caqe/pdisk_1.dat 2025-04-04T17:24:39.020046Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:39.037732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:39.037834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:39.040474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4113 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:39.296476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:39.322492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:39.445775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:39.497788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:43.730169Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510191518540490:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:43.730264Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:24:45.232649Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510219264086689:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:45.236014Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e99/r3tmp/tmp07CAdD/pdisk_1.dat 2025-04-04T17:24:45.375048Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:45.391918Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:45.392011Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:45.395601Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24683 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:45.562054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:45.580750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:45.650860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:24:45.702189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e99/r3tmp/tmptYpkS4/pdisk_1.dat 2025-04-04T17:24:49.122093Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:24:49.232272Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:49.252498Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:49.252601Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:49.254739Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2863 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:49.436299Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:49.463994Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:49.545798Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:49.618722Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> KqpNotNullColumns::InsertNotNullPk [GOOD] >> KqpNotNullColumns::InsertNotNull >> KqpNewEngine::JoinPure [GOOD] >> KqpNewEngine::JoinPureUncomparableKeys >> KqpScripting::ScanQueryDisable [GOOD] >> TLocksTest::Range_Pinhole [GOOD] >> TColumnShardTestSchema::RebootColdTiersWithStat [GOOD] >> TLocksTest::SetBreakSetEraseBreak >> KqpNewEngine::InShardsWrite >> KqpScanArrowInChanels::AggregateNoColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps >> KqpSqlIn::SimpleKey >> KqpScripting::ExecuteYqlScriptScanScalar >> TLocksTest::GoodLock [GOOD] >> TLocksTest::GoodNullLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryDisable [GOOD] Test command err: Trying to start YDB, gRPC: 8459, MsgBus: 63223 2025-04-04T17:24:44.139585Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510217114287148:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:44.141160Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019a0/r3tmp/tmpUuaInG/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8459, node 1 2025-04-04T17:24:44.483250Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:24:44.483770Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:24:44.485687Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:44.519241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:44.519401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:44.521682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:44.528120Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:44.528139Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:44.528144Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:44.528255Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63223 TClient is connected to server localhost:63223 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:45.014431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:45.049981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:45.195122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:45.341752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:24:45.420894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:47.123270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510229999190680:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:47.123395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:47.418102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:47.452925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:47.484908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:47.515083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:47.543401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:47.574187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:24:47.650144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510229999191199:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:47.650228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:47.650272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510229999191204:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:47.653445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:24:47.663054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510229999191206:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:24:47.760288Z node 1 :TX_PROXY ERROR: Actor# [1:7489510229999191261:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:49.014633Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787489031, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 4094, MsgBus: 31988 2025-04-04T17:24:49.751780Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510239671827710:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:49.753026Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019a0/r3tmp/tmp7QILXU/pdisk_1.dat 2025-04-04T17:24:49.845986Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4094, node 2 2025-04-04T17:24:49.890072Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:49.890686Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:49.893582Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:49.938268Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:49.938290Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:49.938297Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:49.938408Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31988 TClient is connected to server localhost:31988 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:50.340410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:50.364796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:50.435669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:50.623326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:50.702297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:52.954646Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510252556731354:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:52.954726Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:52.989444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:53.020212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:53.048510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:53.076399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:53.104590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:53.176608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:24:53.219532Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510256851699164:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:53.219601Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:53.219775Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510256851699169:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:53.222886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:24:53.235656Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510256851699171:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:24:53.296988Z node 2 :TX_PROXY ERROR: Actor# [2:7489510256851699224:3437] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:54.748216Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787494778, txId: 281474976715671] shutting down 2025-04-04T17:24:54.751984Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489510239671827710:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:54.752032Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootColdTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788030.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788030.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143788030.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788030.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788030.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143788030.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786830.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123788030.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123788030.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786830.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123786830.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123786830.000000s;Name=;Codec=}; 2025-04-04T17:23:51.064093Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:51.153573Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:51.181862Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:51.182207Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:51.191079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:51.191339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:51.191600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:51.191737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:51.191889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:51.192045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:51.192167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:51.192288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:51.192416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:51.192532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:51.192657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:51.192772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:51.224808Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:51.225014Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:51.225072Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:51.225244Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:51.225398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:51.225494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:51.225541Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:51.225661Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:51.225741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:51.225786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:51.225817Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:51.225982Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:51.226045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:51.226087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:51.226118Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:51.226216Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:51.226303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:51.226352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:51.226383Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:51.226466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:51.226504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:51.226536Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:51.226590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:51.226637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:51.226671Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:51.227091Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=53; 2025-04-04T17:23:51.227236Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=68; 2025-04-04T17:23:51.227336Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=38; 2025-04-04T17:23:51.227428Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=49; 2025-04-04T17:23:51.227613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:51.227668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:51.227708Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:51.227925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:51.227989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:51.228031Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:51.228189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:51.228238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:4 ... data.cpp:29;EXECUTE:finishLoadingTime=349; 2025-04-04T17:24:55.551646Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=32582; 2025-04-04T17:24:55.559520Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=7784; 2025-04-04T17:24:55.567723Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=7194; 2025-04-04T17:24:55.567845Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=8216; 2025-04-04T17:24:55.568022Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=104; 2025-04-04T17:24:55.568140Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=69; 2025-04-04T17:24:55.568303Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=113; 2025-04-04T17:24:55.568439Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=82; 2025-04-04T17:24:55.577037Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8522; 2025-04-04T17:24:55.588257Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=11102; 2025-04-04T17:24:55.588396Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=41; 2025-04-04T17:24:55.588476Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=30; 2025-04-04T17:24:55.588523Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-04-04T17:24:55.588571Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-04-04T17:24:55.588618Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-04-04T17:24:55.588713Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=56; 2025-04-04T17:24:55.588770Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-04-04T17:24:55.588888Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=76; 2025-04-04T17:24:55.588942Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-04-04T17:24:55.589011Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2025-04-04T17:24:55.589104Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=50; 2025-04-04T17:24:55.589464Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=319; 2025-04-04T17:24:55.589510Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=78441; 2025-04-04T17:24:55.589663Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=31203592;raw_bytes=48253350;count=18;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T17:24:55.589774Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T17:24:55.589831Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T17:24:55.589897Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T17:24:55.609179Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=1; 2025-04-04T17:24:55.609341Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:55.609405Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:55.609501Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=4; 2025-04-04T17:24:55.609570Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:24:55.609615Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:55.609665Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:55.609705Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:55.609802Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:55.610350Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:55.610460Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2199:4072];tablet_id=9437184;parent=[1:2159:4039];fline=manager.cpp:82;event=ask_data;request=request_id=123;1={portions_count=18};; 2025-04-04T17:24:55.611120Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T17:24:55.611815Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T17:24:55.611855Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T17:24:55.611882Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T17:24:55.611930Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:55.611992Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:55.612058Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=4; 2025-04-04T17:24:55.612125Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:24:55.612172Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:55.612219Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:55.612260Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:55.612354Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:55.613994Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=18;path_id=1; 2025-04-04T17:24:55.615014Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2159:4039];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402136 160000/10402136 160000/10402136 80000/5203584 0/0 >> KqpScripting::UnsafeTimestampCast [GOOD] >> KqpScripting::SystemTables >> KqpNewEngine::UpdateFromParams [GOOD] >> KqpNewEngine::UpsertEmptyInput >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace [GOOD] >> DataShardWrite::PreparedDistributedWritePageFault >> TLocksTest::GoodSameKeyLock [GOOD] >> TLocksTest::GoodSameShardLock >> KqpNewEngine::KeyColumnOrder2 [GOOD] >> KqpNewEngine::LocksMultiShard >> KqpNewEngine::BlindWrite [GOOD] >> KqpNewEngine::BlindWriteParameters >> TLocksTest::BrokenLockErase [GOOD] >> TLocksTest::BrokenDupLock >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] >> KqpNewEngine::MultiStatement [GOOD] >> KqpNewEngine::MultiStatementMixPure >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit+UseSink >> KqpAgg::AggWithLookup >> KqpReturning::ReturningWorksIndexedUpsert+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedUpsert-QueryService >> TLocksTest::UpdateLockedKey [GOOD] >> TLocksTest::SetLockNothing >> KqpReturning::ReturningWorks+QueryService [GOOD] >> KqpReturning::ReturningWorks-QueryService >> KqpNewEngine::FlatmapLambdaMutiusedConnections [GOOD] >> KqpNewEngine::EmptyMapWithBroadcast >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] >> KqpNotNullColumns::InsertNotNull [GOOD] >> KqpNotNullColumns::InsertFromSelect >> DataShardWrite::DelayedVolatileTxAndEvWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] Test command err: 2025-04-04T17:24:30.954338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:24:30.954772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:24:30.954843Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016cc/r3tmp/tmpdTl4jU/pdisk_1.dat 2025-04-04T17:24:31.283375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:24:31.324661Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:31.361699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:31.361811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:31.372986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:31.455191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:24:31.494109Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:24:31.495208Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:24:31.495716Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:24:31.495984Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:24:31.506860Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:24:31.542990Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:24:31.543143Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:24:31.544956Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:24:31.545059Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:24:31.545126Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:24:31.545542Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:24:31.545692Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:24:31.545798Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:24:31.556691Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:24:31.591217Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:24:31.591438Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:24:31.591578Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:24:31.591637Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:24:31.591682Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:24:31.591714Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:24:31.591944Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:24:31.591993Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:24:31.592321Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:24:31.592420Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:24:31.592558Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:24:31.592611Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:24:31.592675Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:24:31.592723Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:24:31.592759Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:24:31.592792Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:24:31.592838Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:24:31.592936Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:31.592969Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:31.593015Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:24:31.593409Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:24:31.593458Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:24:31.593564Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:31.593809Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:24:31.593879Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:24:31.593975Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:24:31.594019Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:24:31.594056Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:24:31.594115Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:24:31.594159Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:24:31.594462Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:24:31.594506Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:24:31.594559Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:24:31.594593Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:24:31.594653Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:24:31.594685Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:24:31.594719Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:24:31.594748Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:24:31.594772Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:24:31.596360Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:24:31.596416Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:24:31.607195Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:24:31.607295Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:24:31.607329Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:24:31.607389Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:24:31.607485Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:24:31.747060Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:31.747147Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:31.747264Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:24:31.747709Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:24:31.747754Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:24:31.747878Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:24:31.747940Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:24:31.747992Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:24:31.748032Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:24:31.752721Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:24:31.752792Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:24:31.753702Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:24:31.753759Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:24:31.753823Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:24:3 ... d: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-04T17:24:58.529073Z node 7 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[7:933:2774], 1001} after executionsCount# 1 2025-04-04T17:24:58.529103Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:933:2774], 1001} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:24:58.529148Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:933:2774], 1001} finished in read 2025-04-04T17:24:58.529175Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-04T17:24:58.529190Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2025-04-04T17:24:58.529208Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2025-04-04T17:24:58.529226Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2025-04-04T17:24:58.529263Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-04-04T17:24:58.529278Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2025-04-04T17:24:58.529297Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037889 has finished 2025-04-04T17:24:58.529321Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-04T17:24:58.529381Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-04T17:24:58.529772Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:938:2779], Recipient [7:719:2596]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:58.529797Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:58.529827Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [7:937:2778], serverId# [7:938:2779], sessionId# [0:0:0] 2025-04-04T17:24:58.529899Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [7:936:2777], Recipient [7:719:2596]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-04T17:24:58.538852Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:941:2782], Recipient [7:719:2596]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:58.538952Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:58.538993Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [7:940:2781], serverId# [7:941:2782], sessionId# [0:0:0] 2025-04-04T17:24:58.539149Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:939:2780], Recipient [7:719:2596]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-04T17:24:58.539243Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-04-04T17:24:58.539283Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1004/1000004 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T17:24:58.539343Z node 7 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to non-repeatable v1004/18446744073709551615 2025-04-04T17:24:58.539402Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit CheckRead 2025-04-04T17:24:58.539475Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-04-04T17:24:58.539505Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit CheckRead 2025-04-04T17:24:58.539531Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-04-04T17:24:58.539592Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit BuildAndWaitDependencies 2025-04-04T17:24:58.539636Z node 7 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037890 2025-04-04T17:24:58.539667Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-04-04T17:24:58.539688Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-04-04T17:24:58.539709Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit ExecuteRead 2025-04-04T17:24:58.539729Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit ExecuteRead 2025-04-04T17:24:58.539797Z node 7 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-04T17:24:58.539919Z node 7 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[7:939:2780], 1002} after executionsCount# 1 2025-04-04T17:24:58.539962Z node 7 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[7:939:2780], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:24:58.540076Z node 7 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[7:939:2780], 1002} finished in read 2025-04-04T17:24:58.540121Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-04-04T17:24:58.540160Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit ExecuteRead 2025-04-04T17:24:58.540183Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit CompletedOperations 2025-04-04T17:24:58.540199Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit CompletedOperations 2025-04-04T17:24:58.540229Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-04-04T17:24:58.540242Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit CompletedOperations 2025-04-04T17:24:58.540258Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037890 has finished 2025-04-04T17:24:58.540276Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-04-04T17:24:58.540334Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-04-04T17:24:58.541098Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:944:2785], Recipient [7:713:2594]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:58.541140Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:58.541168Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [7:943:2784], serverId# [7:944:2785], sessionId# [0:0:0] 2025-04-04T17:24:58.541260Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [7:942:2783], Recipient [7:713:2594]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-04T17:24:58.542064Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:947:2788], Recipient [7:713:2594]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:58.542105Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:58.542147Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [7:946:2787], serverId# [7:947:2788], sessionId# [0:0:0] 2025-04-04T17:24:58.542355Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:945:2786], Recipient [7:713:2594]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-04-04T17:24:58.542429Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-04-04T17:24:58.542463Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1004/1000004 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T17:24:58.542518Z node 7 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v1004/18446744073709551615 2025-04-04T17:24:58.542566Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit CheckRead 2025-04-04T17:24:58.542650Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-04-04T17:24:58.542683Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit CheckRead 2025-04-04T17:24:58.542726Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-04-04T17:24:58.542765Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit BuildAndWaitDependencies 2025-04-04T17:24:58.542810Z node 7 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037891 2025-04-04T17:24:58.542857Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-04-04T17:24:58.542892Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-04-04T17:24:58.542914Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit ExecuteRead 2025-04-04T17:24:58.542939Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit ExecuteRead 2025-04-04T17:24:58.543029Z node 7 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-04-04T17:24:58.543211Z node 7 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[7:945:2786], 1003} after executionsCount# 1 2025-04-04T17:24:58.543264Z node 7 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[7:945:2786], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:24:58.543324Z node 7 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[7:945:2786], 1003} finished in read 2025-04-04T17:24:58.543373Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-04-04T17:24:58.543406Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit ExecuteRead 2025-04-04T17:24:58.543434Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit CompletedOperations 2025-04-04T17:24:58.543460Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit CompletedOperations 2025-04-04T17:24:58.543504Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-04-04T17:24:58.543528Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit CompletedOperations 2025-04-04T17:24:58.543555Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037891 has finished 2025-04-04T17:24:58.543583Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-04-04T17:24:58.543681Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 >> KqpRanges::UpdateWhereInBigLiteralList [GOOD] >> KqpRanges::UpdateWhereInBigLiteralListPrefix >> KqpScripting::StreamExecuteYqlScriptSeveralQueries [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=143788027.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788027.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788027.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143788027.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788027.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788027.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143788027.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786827.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123788027.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123788027.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786827.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123786827.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123786827.000000s;Name=;Codec=}; 2025-04-04T17:23:47.889200Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:47.959908Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:47.984249Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:47.984558Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:47.992258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:47.992458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:47.992675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:47.992779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:47.992896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:47.993030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:47.993151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:47.993254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:47.993364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:47.993462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:47.993567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:47.993671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:48.020174Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:48.020366Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:48.020422Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:48.020583Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:48.020766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:48.020837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:48.020894Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:48.020988Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:48.021048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:48.021095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:48.021125Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:48.021266Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:48.021322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:48.021361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:48.021389Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:48.021477Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:48.021525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:48.021570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:48.021596Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:48.021668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:48.021702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:48.021728Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:48.021780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:48.021818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:48.021845Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:48.022204Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=41; 2025-04-04T17:23:48.022326Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=55; 2025-04-04T17:23:48.022428Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=38; 2025-04-04T17:23:48.022515Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-04-04T17:23:48.022669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:48.022802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:48.022843Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:48.023065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:48.023137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:48.023176Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;pr ... cpp:29;EXECUTE:finishLoadingTime=457; 2025-04-04T17:24:59.526882Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=50849; 2025-04-04T17:24:59.535478Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=8510; 2025-04-04T17:24:59.545718Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=9230; 2025-04-04T17:24:59.545869Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=10274; 2025-04-04T17:24:59.546061Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=112; 2025-04-04T17:24:59.546195Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=79; 2025-04-04T17:24:59.546370Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=125; 2025-04-04T17:24:59.546534Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=107; 2025-04-04T17:24:59.557014Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=10388; 2025-04-04T17:24:59.570981Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=13826; 2025-04-04T17:24:59.571165Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=56; 2025-04-04T17:24:59.571275Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=31; 2025-04-04T17:24:59.571335Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2025-04-04T17:24:59.571385Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-04-04T17:24:59.571435Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-04-04T17:24:59.571527Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=49; 2025-04-04T17:24:59.571584Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-04-04T17:24:59.571707Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=77; 2025-04-04T17:24:59.571770Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-04-04T17:24:59.571850Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=33; 2025-04-04T17:24:59.571956Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=61; 2025-04-04T17:24:59.572373Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=373; 2025-04-04T17:24:59.572417Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=105263; 2025-04-04T17:24:59.572557Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T17:24:59.572670Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T17:24:59.572721Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T17:24:59.572794Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T17:24:59.590644Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=1; 2025-04-04T17:24:59.590837Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:59.590934Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:59.591020Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=6; 2025-04-04T17:24:59.591094Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:24:59.591145Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:59.591216Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:59.591277Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:59.591410Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:59.592004Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:59.592101Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2612:4486];tablet_id=9437184;parent=[1:2572:4453];fline=manager.cpp:82;event=ask_data;request=request_id=146;1={portions_count=29};; 2025-04-04T17:24:59.593223Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T17:24:59.593602Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T17:24:59.593639Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T17:24:59.593670Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T17:24:59.593720Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:24:59.593786Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:24:59.593852Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=6; 2025-04-04T17:24:59.593921Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:24:59.593973Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:24:59.594029Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:59.594073Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:24:59.594185Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:24:59.595246Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=29;path_id=1; 2025-04-04T17:24:59.596772Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2572:4453];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402096 160000/10402096 160000/10402096 80000/5203544 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788023.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788023.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786823.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-04-04T17:23:45.329909Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:45.417299Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:45.443261Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:45.443585Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:45.454292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:45.454576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:45.454875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:45.455002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:45.455129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:45.455286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:45.455418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:45.455525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:45.455651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:45.455749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:45.455846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:45.455949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:45.484636Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:45.484886Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:45.484947Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:45.485116Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:45.485291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:45.485368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:45.485414Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:45.485518Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:45.485604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:45.485656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:45.485687Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:45.485843Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:45.485892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:45.485928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:45.485953Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:45.486038Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:45.486082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:45.486120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:45.486147Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:45.486264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:45.486322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:45.486357Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:45.486404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:45.486442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:45.486481Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:45.486888Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=54; 2025-04-04T17:23:45.486986Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=49; 2025-04-04T17:23:45.487069Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-04-04T17:23:45.487206Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-04-04T17:23:45.487400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:45.487459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:45.487492Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:45.487661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:45.487705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:45.487745Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:45.487905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:45.487950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:45.487984Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:45.488176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description= ... equest_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:33:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:81:8336:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:51:8408:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:22:8696:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:19:8576:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:32:8704:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:34:8656:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:83:2768:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:97:2768:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:50:8536:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:4:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:88:8560:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:70:9384:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:61:8552:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:29:8704:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:73:8560:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:93:8368:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:64:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:58:8568:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:35:8656:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:63:8568:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:80:8368:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:36:8624:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:47:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:69:2768:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:56:9608:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:79:8408:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:27:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:59:8552:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:89:8552:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:41:2848:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:45:8560:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:74:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:28:8712:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:30:8704:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:18:8592:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:44:8560:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:49:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:96:8328:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:20:2840:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:86:8568:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:66:8360:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:3:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:87:8552:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:98:9384:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:84:9608:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:72:8560:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:11:8672:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:6:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:94:8360:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:77:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 0/0 160000/10402524 >> TColumnShardTestSchema::RebootHotTiersWithStat [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DelayedVolatileTxAndEvWrite [GOOD] Test command err: 2025-04-04T17:24:30.236004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:24:30.236563Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:24:30.236622Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016de/r3tmp/tmpWWuWru/pdisk_1.dat 2025-04-04T17:24:30.648940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:24:30.686162Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:30.730723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:30.731904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:30.744736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:30.837492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:24:30.882616Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:24:30.883318Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:24:30.883632Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:24:30.883806Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:24:30.896208Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:24:30.922144Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:24:30.922325Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:24:30.924041Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:24:30.924122Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:24:30.924196Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:24:30.925263Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:24:30.925402Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:24:30.925479Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:24:30.936302Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:24:30.962075Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:24:30.963533Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:24:30.963639Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:24:30.963666Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:24:30.963695Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:24:30.963719Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:24:30.963893Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:24:30.964727Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:24:30.965575Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:24:30.965688Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:24:30.965822Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:24:30.965869Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:24:30.965912Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:24:30.965954Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:24:30.965983Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:24:30.966009Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:24:30.966049Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:24:30.966158Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:30.966197Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:30.966268Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:24:30.967846Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:24:30.967887Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:24:30.967963Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:30.968230Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:24:30.968270Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:24:30.968345Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:24:30.968379Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:24:30.968405Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:24:30.968427Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:24:30.968457Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:24:30.968643Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:24:30.968664Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:24:30.968685Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:24:30.968713Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:24:30.968779Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:24:30.968797Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:24:30.968818Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:24:30.968838Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:24:30.968878Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:24:30.969925Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:24:30.969957Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:24:30.980494Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:24:30.980547Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:24:30.980586Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:24:30.980629Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:24:30.980676Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:24:31.118802Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:31.118863Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:31.118889Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:24:31.119192Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:24:31.119220Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:24:31.119326Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:24:31.119357Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:24:31.119386Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:24:31.119409Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:24:31.122369Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:24:31.122433Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:24:31.123909Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:24:31.123975Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:24:31.124016Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:24:3 ... 15660 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-04T17:24:59.530797Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-04T17:24:59.530828Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715660 2025-04-04T17:24:59.530891Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 1511 txid# 281474976715660 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-04-04T17:24:59.531009Z node 7 :TX_DATASHARD DEBUG: Complete [1511 : 281474976715660] from 72075186224037889 at tablet 72075186224037889 send result to client [7:909:2663], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:24:59.531566Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:24:59.531782Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:24:59.532356Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:24:59.533487Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:24:59.534609Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-04T17:24:59.534745Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:696:2584], Recipient [7:699:2586]: {TEvReadSet step# 1511 txid# 281474976715660 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-04-04T17:24:59.534793Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:24:59.534855Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715660 2025-04-04T17:24:59.535139Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-04T17:24:59.535274Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:699:2586], Recipient [7:696:2584]: {TEvReadSet step# 1511 txid# 281474976715660 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-04-04T17:24:59.535307Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-04-04T17:24:59.535342Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715660 ... validating table 2025-04-04T17:24:59.851521Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0xb80yc1ej6qxwkq3952dr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ODQ3MTY3YTAtNzM1NmQ1YzctYjMzNTAyLTFjY2IxNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:24:59.861457Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:964:2782], Recipient [7:696:2584]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1511 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-04T17:24:59.861630Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T17:24:59.861731Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-04-04T17:24:59.861874Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-04T17:24:59.861938Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-04-04T17:24:59.861985Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:24:59.862034Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:24:59.862092Z node 7 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-04-04T17:24:59.862141Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-04T17:24:59.862170Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:24:59.862192Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T17:24:59.862220Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:24:59.862399Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1511 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-04T17:24:59.862751Z node 7 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v1511/18446744073709551615 2025-04-04T17:24:59.862823Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[7:964:2782], 0} after executionsCount# 1 2025-04-04T17:24:59.862890Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:964:2782], 0} sends rowCount# 1, bytes# 64, quota rows left# 1000, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:24:59.862990Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:964:2782], 0} finished in read 2025-04-04T17:24:59.863079Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-04T17:24:59.863123Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:24:59.863152Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:24:59.863181Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:24:59.863254Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-04-04T17:24:59.863279Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:24:59.863309Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-04-04T17:24:59.863364Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T17:24:59.863510Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-04T17:24:59.864827Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [7:964:2782], Recipient [7:696:2584]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-04T17:24:59.864904Z node 7 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-04T17:24:59.865055Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:964:2782], Recipient [7:699:2586]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1511 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 RangesSize: 1 2025-04-04T17:24:59.865134Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-04-04T17:24:59.865192Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CheckRead 2025-04-04T17:24:59.865903Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-04T17:24:59.865942Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CheckRead 2025-04-04T17:24:59.865978Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-04-04T17:24:59.866009Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit BuildAndWaitDependencies 2025-04-04T17:24:59.866061Z node 7 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037889 2025-04-04T17:24:59.866098Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-04T17:24:59.866137Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-04-04T17:24:59.866160Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit ExecuteRead 2025-04-04T17:24:59.866185Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit ExecuteRead 2025-04-04T17:24:59.866311Z node 7 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1511 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2025-04-04T17:24:59.866524Z node 7 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v1511/18446744073709551615 2025-04-04T17:24:59.866570Z node 7 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[7:964:2782], 1} after executionsCount# 1 2025-04-04T17:24:59.866612Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:964:2782], 1} sends rowCount# 1, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:24:59.866677Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:964:2782], 1} finished in read 2025-04-04T17:24:59.866728Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-04T17:24:59.866755Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit ExecuteRead 2025-04-04T17:24:59.866779Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit CompletedOperations 2025-04-04T17:24:59.866804Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CompletedOperations 2025-04-04T17:24:59.866847Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-04-04T17:24:59.866866Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CompletedOperations 2025-04-04T17:24:59.866892Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037889 has finished 2025-04-04T17:24:59.866921Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-04-04T17:24:59.867029Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-04-04T17:24:59.867873Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [7:964:2782], Recipient [7:699:2586]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-04-04T17:24:59.867928Z node 7 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 2 } items { int32_value: 3 } items { int32_value: 4 } }, { items { int32_value: 11 } items { int32_value: 12 } items { int32_value: 12 } items { int32_value: 12 } } >> KqpNewEngine::InShardsWrite [GOOD] >> KqpNewEngine::Join >> KqpNewEngine::JoinPureUncomparableKeys [GOOD] >> KqpNewEngine::JoinWithPrecompute |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788027.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788027.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143788027.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788027.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788027.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143788027.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786827.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123788027.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123788027.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786827.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123786827.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123786827.000000s;Name=;Codec=}; 2025-04-04T17:23:48.225516Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:48.304278Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:48.329675Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:48.330041Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:48.338463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:48.338696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:48.338970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:48.339094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:48.339243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:48.339412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:48.339526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:48.339635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:48.339747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:48.339858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:48.339958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:48.340062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:48.368330Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:48.368523Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:48.368610Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:48.368784Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:48.368964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:48.369040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:48.369084Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:48.369189Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:48.369258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:48.369300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:48.369351Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:48.369500Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:48.369557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:48.369595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:48.369629Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:48.369729Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:48.369778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:48.369817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:48.369846Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:48.369924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:48.369962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:48.369995Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:48.370043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:48.370084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:48.370117Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:48.370523Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-04-04T17:23:48.370617Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=49; 2025-04-04T17:23:48.370717Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-04-04T17:23:48.370805Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=40; 2025-04-04T17:23:48.370962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:48.371018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:48.371056Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:48.371296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:48.371353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:48.371392Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... p:29;EXECUTE:finishLoadingTime=299; 2025-04-04T17:25:01.086276Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=58014; 2025-04-04T17:25:01.095894Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=9541; 2025-04-04T17:25:01.108099Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=11186; 2025-04-04T17:25:01.108220Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=12226; 2025-04-04T17:25:01.108402Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=109; 2025-04-04T17:25:01.108532Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=76; 2025-04-04T17:25:01.108722Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=129; 2025-04-04T17:25:01.108858Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=73; 2025-04-04T17:25:01.123859Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=14926; 2025-04-04T17:25:01.140798Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=16823; 2025-04-04T17:25:01.140947Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=42; 2025-04-04T17:25:01.141032Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=31; 2025-04-04T17:25:01.141085Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-04-04T17:25:01.141153Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-04-04T17:25:01.141216Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=10; 2025-04-04T17:25:01.141322Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=62; 2025-04-04T17:25:01.141367Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-04-04T17:25:01.141450Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=53; 2025-04-04T17:25:01.141492Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-04-04T17:25:01.141552Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2025-04-04T17:25:01.141629Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=46; 2025-04-04T17:25:01.141901Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=242; 2025-04-04T17:25:01.141938Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=122331; 2025-04-04T17:25:01.142090Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T17:25:01.142207Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T17:25:01.142281Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T17:25:01.142355Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T17:25:01.161845Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=1; 2025-04-04T17:25:01.162033Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:25:01.162104Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:25:01.162183Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=7; 2025-04-04T17:25:01.162274Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:25:01.162319Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:25:01.162359Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:25:01.162402Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:25:01.162509Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:25:01.163082Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:25:01.163188Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2641:4515];tablet_id=9437184;parent=[1:2599:4480];fline=manager.cpp:82;event=ask_data;request=request_id=150;1={portions_count=29};; 2025-04-04T17:25:01.163818Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T17:25:01.163953Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T17:25:01.163987Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T17:25:01.164036Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T17:25:01.164084Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:25:01.164144Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:25:01.164201Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=7; 2025-04-04T17:25:01.164261Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:25:01.164304Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:25:01.164350Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:25:01.164387Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:25:01.164479Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:25:01.165247Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=29;path_id=1; 2025-04-04T17:25:01.166869Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> KikimrIcGateway::TestLoadDataSourceProperties [GOOD] >> TPQTestInternal::TestBatchPacking [GOOD] >> TPQTestInternal::TestKeyRange [GOOD] >> TPQTestInternal::TestAsInt [GOOD] >> TPQTestInternal::TestAsIntWide [GOOD] >> TPQTestInternal::StoreKeys [GOOD] >> KqpScripting::ScriptValidate >> KqpNewEngine::BlindWriteParameters [GOOD] >> KqpNewEngine::BlindWriteListParameter |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTestInternal::StoreKeys [GOOD] >> KqpKv::ReadRows_SpecificKey >> KqpScripting::ExecuteYqlScriptScanScalar [GOOD] >> KqpScripting::JoinIndexLookup >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] >> KqpNewEngine::UpsertEmptyInput [GOOD] >> KqpNotNullColumns::AlterAddNotNullColumn >> TLocksTest::SetBreakSetEraseBreak [GOOD] >> KqpNewEngine::LocksMultiShard [GOOD] >> KqpNewEngine::LocksEffects >> KqpSort::ReverseOptimized ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties [GOOD] Test command err: Trying to start YDB, gRPC: 21825, MsgBus: 27938 2025-04-04T17:24:28.036917Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510148669498538:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:28.037018Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001308/r3tmp/tmpoMIJfo/pdisk_1.dat 2025-04-04T17:24:28.382469Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21825, node 1 2025-04-04T17:24:28.435450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:28.435642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:28.437300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:28.547655Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:28.547677Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:28.547687Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:28.547803Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27938 TClient is connected to server localhost:27938 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:29.198153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:29.226071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:29.382236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:29.499204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:29.570836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:30.766178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510157259434929:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:30.766336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:30.993775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:31.023503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:31.053555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:31.080273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:31.107336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:31.140696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:24:31.214678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510161554402741:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:31.214730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:31.214824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510161554402746:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:31.218269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:24:31.227967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510161554402748:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:24:31.306634Z node 1 :TX_PROXY ERROR: Actor# [1:7489510161554402804:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:31.920423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:1, at schemeshard: 72057594046644480 2025-04-04T17:24:32.557164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:24:32.946727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:1, at schemeshard: 72057594046644480 2025-04-04T17:24:33.036814Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510148669498538:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:33.036912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:24:33.347553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-04-04T17:24:33.724520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-04T17:24:34.096859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-04-04T17:24:34.429468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T17:24:34.458215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-04-04T17:24:36.269001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710716:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23450, MsgBus: 62621 2025-04-04T17:24:36.929268Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510183987413822:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:36.929401Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001308/r3tmp/tmpWpy1I4/pdisk_1.dat 2025-04-04T17:24:37.046048Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:37.078195Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:37.078327Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:37.080072Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23450, node 2 2025-04-04T17:24:37.112409Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:37.112436Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:37.112444Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:37.112585Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62621 TClient is connected to server localhost:62621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 Parent ... OVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:24:42.386591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:1, at schemeshard: 72057594046644480 2025-04-04T17:24:42.947007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-04-04T17:24:43.450904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-04T17:24:43.998943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-04-04T17:24:44.546186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-04T17:24:44.584092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-04T17:24:47.777279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715732:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13283, MsgBus: 13897 2025-04-04T17:24:48.740638Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489510234126689234:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:48.740702Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001308/r3tmp/tmp4Jcyew/pdisk_1.dat 2025-04-04T17:24:48.880824Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:48.912459Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:48.912557Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:48.913628Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13283, node 3 2025-04-04T17:24:48.966882Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:48.966905Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:48.966914Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:48.967035Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13897 TClient is connected to server localhost:13897 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:49.482499Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:49.487304Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:24:49.500032Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:49.567814Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:49.722600Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:24:49.812263Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:52.546314Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489510251306560204:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:52.546437Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:52.590014Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:52.622572Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:52.696342Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:52.735409Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:52.811437Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:52.855994Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:24:52.920398Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489510251306560726:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:52.920497Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:52.920819Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489510251306560731:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:52.925570Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:24:52.939278Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489510251306560733:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:24:53.002070Z node 3 :TX_PROXY ERROR: Actor# [3:7489510251306560786:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:53.741073Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489510234126689234:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:53.741161Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:24:54.203875Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-04-04T17:24:54.851356Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.402025Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:1, at schemeshard: 72057594046644480 2025-04-04T17:24:56.040553Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-04-04T17:24:56.681713Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-04T17:24:57.321761Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-04-04T17:24:57.882538Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-04T17:24:57.927926Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-04-04T17:25:02.143441Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715734:0, at schemeshard: 72057594046644480 >> KqpScripting::SystemTables [GOOD] >> KqpYql::TableUseBeforeCreate >> KqpNotNullColumns::InsertFromSelect [GOOD] >> KqpNotNullColumns::InsertNotNullPg+useSink |95.4%| [TA] $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.4%| [TA] {RESULT} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowInChanels::AggregateWithFunction >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit+UseSink [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] Test command err: 2025-04-04T17:24:30.288064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:24:30.288434Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:24:30.288476Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0016bf/r3tmp/tmpsCcYY5/pdisk_1.dat 2025-04-04T17:24:30.649742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:24:30.686189Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:30.730752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:30.731937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:30.744692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:30.837459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:24:30.886440Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:24:30.887348Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:24:30.887745Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:24:30.887951Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:24:30.898573Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:24:30.925645Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:24:30.925727Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:24:30.926975Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:24:30.927047Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:24:30.927101Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:24:30.927391Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:24:30.927483Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:24:30.927543Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:24:30.938344Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:24:30.968638Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:24:30.968776Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:24:30.968849Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:24:30.968880Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:24:30.968903Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:24:30.968929Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:24:30.969115Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:24:30.969178Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:24:30.969407Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:24:30.969486Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:24:30.969585Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:24:30.969619Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:24:30.969649Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:24:30.969679Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:24:30.969701Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:24:30.969722Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:24:30.969747Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:24:30.969801Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:30.969829Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:30.969865Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:24:30.970133Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:24:30.970170Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:24:30.970292Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:24:30.970468Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:24:30.970509Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:24:30.970573Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:24:30.970601Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:24:30.970628Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:24:30.970652Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:24:30.970688Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:24:30.970887Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:24:30.970914Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:24:30.970935Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:24:30.970955Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:24:30.970995Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:24:30.971022Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:24:30.971041Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:24:30.971070Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:24:30.971089Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:24:30.972127Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:24:30.972158Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:24:30.982729Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:24:30.982795Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:24:30.982836Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:24:30.982879Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:24:30.982940Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:24:31.121711Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:31.121765Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:24:31.121904Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:24:31.122283Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:24:31.122321Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:24:31.122434Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:24:31.122483Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:24:31.122523Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:24:31.122583Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:24:31.127619Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:24:31.127688Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:24:31.128563Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:24:31.128614Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:24:31.128663Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:24:3 ... 75186224037888 is Executed 2025-04-04T17:25:03.459191Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:25:03.459236Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:25:03.459391Z node 7 :TX_DATASHARD DEBUG: Planned transaction txId 1234567890011 at step 3500 at tablet 72075186224037888 { Transactions { TxId: 1234567890011 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:25:03.459431Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:25:03.459755Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [7:809:2666], Recipient [7:809:2666]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:25:03.459794Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:25:03.459846Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:25:03.459894Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:25:03.459942Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-04T17:25:03.459994Z node 7 :TX_DATASHARD DEBUG: Found ready operation [3500:1234567890011] in PlanQueue unit at 72075186224037888 2025-04-04T17:25:03.460037Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PlanQueue 2025-04-04T17:25:03.460103Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-04T17:25:03.460148Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PlanQueue 2025-04-04T17:25:03.460190Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadWriteDetails 2025-04-04T17:25:03.460227Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadTxDetails 2025-04-04T17:25:03.460522Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2025-04-04T17:25:03.460611Z node 7 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2025-04-04T17:25:03.460661Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2025-04-04T17:25:03.460729Z node 7 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 loaded writeOp from db 3500:1234567890011 keys extracted: 1 2025-04-04T17:25:03.460866Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-04T17:25:03.460896Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadWriteDetails 2025-04-04T17:25:03.460915Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:25:03.460938Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:25:03.460993Z node 7 :TX_DATASHARD TRACE: Operation [3500:1234567890011] is the new logically complete end at 72075186224037888 2025-04-04T17:25:03.461038Z node 7 :TX_DATASHARD TRACE: Operation [3500:1234567890011] is the new logically incomplete end at 72075186224037888 2025-04-04T17:25:03.461075Z node 7 :TX_DATASHARD TRACE: Activated operation [3500:1234567890011] at 72075186224037888 2025-04-04T17:25:03.461125Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-04T17:25:03.461151Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:25:03.461176Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildWriteOutRS 2025-04-04T17:25:03.461193Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildWriteOutRS 2025-04-04T17:25:03.461220Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-04T17:25:03.461234Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildWriteOutRS 2025-04-04T17:25:03.461246Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2025-04-04T17:25:03.461260Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit StoreAndSendWriteOutRS 2025-04-04T17:25:03.461273Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-04T17:25:03.461285Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2025-04-04T17:25:03.461298Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit PrepareWriteTxInRS 2025-04-04T17:25:03.461310Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PrepareWriteTxInRS 2025-04-04T17:25:03.461324Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-04T17:25:03.461337Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PrepareWriteTxInRS 2025-04-04T17:25:03.461350Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadAndWaitInRS 2025-04-04T17:25:03.461362Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadAndWaitInRS 2025-04-04T17:25:03.461383Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-04T17:25:03.461395Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadAndWaitInRS 2025-04-04T17:25:03.461409Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit ExecuteWrite 2025-04-04T17:25:03.461427Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2025-04-04T17:25:03.461456Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [3500:1234567890011] at 72075186224037888 2025-04-04T17:25:03.461749Z node 7 :TX_DATASHARD TRACE: Tablet 72075186224037888 is not ready for [3500:1234567890011] execution 2025-04-04T17:25:03.461830Z node 7 :TX_DATASHARD DEBUG: tx 1234567890011 at 72075186224037888 released its data 2025-04-04T17:25:03.461880Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Restart 2025-04-04T17:25:03.461909Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:25:03.461950Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-04T17:25:03.461996Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:25:03.462032Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:25:03.462387Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:25:03.462429Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2025-04-04T17:25:03.462469Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [3500:1234567890011] at 72075186224037888 2025-04-04T17:25:03.462708Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2025-04-04T17:25:03.462781Z node 7 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2025-04-04T17:25:03.462838Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2025-04-04T17:25:03.462901Z node 7 :TX_DATASHARD DEBUG: tx 1234567890011 at 72075186224037888 restored its data 2025-04-04T17:25:03.463029Z node 7 :TX_DATASHARD DEBUG: Executed write operation for [3500:1234567890011] at 72075186224037888, row count=1 2025-04-04T17:25:03.463074Z node 7 :TX_DATASHARD TRACE: Lock 1234567890001 marked broken at v{min} 2025-04-04T17:25:03.463149Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-04T17:25:03.463214Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:25:03.463255Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit ExecuteWrite 2025-04-04T17:25:03.463288Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit CompleteWrite 2025-04-04T17:25:03.463320Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2025-04-04T17:25:03.463518Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is DelayComplete 2025-04-04T17:25:03.463560Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompleteWrite 2025-04-04T17:25:03.463602Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:25:03.463632Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:25:03.463658Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-04-04T17:25:03.463678Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:25:03.463710Z node 7 :TX_DATASHARD TRACE: Execution plan for [3500:1234567890011] at 72075186224037888 has finished 2025-04-04T17:25:03.463740Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:25:03.463775Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-04T17:25:03.463809Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:25:03.463841Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:25:03.464311Z node 7 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-04-04T17:25:03.464524Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:25:03.464565Z node 7 :TX_DATASHARD TRACE: Complete execution for [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2025-04-04T17:25:03.464623Z node 7 :TX_DATASHARD DEBUG: Complete write [3500 : 1234567890011] from 72075186224037888 at tablet 72075186224037888 send result to client [7:802:2660] 2025-04-04T17:25:03.464665Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TPQTest::TestWritePQBigMessage [GOOD] >> TPQTest::TestWritePQ >> KqpNewEngine::MultiStatementMixPure [GOOD] >> KqpNewEngine::MultiUsagePrecompute >> KqpNewEngine::DeleteOn+UseSink >> TColumnShardTestSchema::ExternalTTL_Types [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SystemTables [GOOD] Test command err: Trying to start YDB, gRPC: 11405, MsgBus: 18105 2025-04-04T17:24:51.840436Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510246655062658:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:51.841919Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001997/r3tmp/tmpQs6ECp/pdisk_1.dat 2025-04-04T17:24:52.191462Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11405, node 1 2025-04-04T17:24:52.224397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:52.224489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:52.227056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:52.272099Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:52.272122Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:52.272128Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:52.272249Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18105 TClient is connected to server localhost:18105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:52.755537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:52.789358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:52.953766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:53.117347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:53.195164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:54.716480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510259539966298:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:54.716570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:54.947932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.009907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.051621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.076930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.104417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.169479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.214631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510263834934113:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:55.214709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:55.214779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510263834934118:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:55.218422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:24:55.234352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510263834934120:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:24:55.325124Z node 1 :TX_PROXY ERROR: Actor# [1:7489510263834934174:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:56.368295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8666, MsgBus: 21072 2025-04-04T17:24:57.546465Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510271095796886:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:57.570719Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001997/r3tmp/tmpDTCaqN/pdisk_1.dat 2025-04-04T17:24:57.650573Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8666, node 2 2025-04-04T17:24:57.665220Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:57.665314Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:57.670669Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:57.742798Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:57.742820Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:57.742827Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:57.742924Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21072 TClient is connected to server localhost:21072 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:58.247777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:58.255503Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:24:58.277156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:58.356896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:58.500691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:58.572229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:25:00.983480Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510283980700346:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:00.983543Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:01.025027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:01.063588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:01.135419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:01.203061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:01.267709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:01.302646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:01.343859Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510288275668163:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:01.343936Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510288275668168:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:01.343935Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:01.347713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:01.357363Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510288275668170:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:01.440084Z node 2 :TX_PROXY ERROR: Actor# [2:7489510288275668224:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:02.536166Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489510271095796886:2222];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:02.536234Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:02.769174Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787502754, txId: 281474976715671] shutting down 2025-04-04T17:25:02.927722Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787502914, txId: 281474976715673] shutting down 2025-04-04T17:25:03.830960Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787503843, txId: 281474976715675] shutting down |95.4%| [TA] $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSqlIn::SimpleKey [GOOD] >> KqpSqlIn::SelectNotAllElements |95.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetBreakSetEraseBreak [GOOD] Test command err: 2025-04-04T17:24:38.818651Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510192769772658:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:38.818882Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e9a/r3tmp/tmp6tn6n8/pdisk_1.dat 2025-04-04T17:24:39.073257Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:39.089640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:39.089717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:39.091348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8103 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:39.337249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:39.368951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:39.486918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:39.527481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:41.993864Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510203531478333:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:41.993995Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e9a/r3tmp/tmp8RRpEW/pdisk_1.dat 2025-04-04T17:24:42.085623Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:42.136050Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:42.136156Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:42.138087Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12957 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:42.276281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:42.298278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:42.352489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:42.428451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:45.367980Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489510221969303676:2213];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e9a/r3tmp/tmpmJ42Jd/pdisk_1.dat 2025-04-04T17:24:45.418326Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:24:45.457328Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:45.493030Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:45.493121Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:45.494232Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27664 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:45.655167Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:45.669781Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:45.725501Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:45.773702Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:48.715664Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489510232630342036:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:48.715726Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e9a/r3tmp/tmpoSvdrx/pdisk_1.dat 2025-04-04T17:24:48.812633Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:48.853059Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:48.855937Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:48.857537Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20981 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:49.059552Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:49.077670Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:24:49.082689Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:49.172201Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:49.236286Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:52.763480Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489510252431612821:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:52.763599Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e9a/r3tmp/tmpfuEf37/pdisk_1.dat 2025-04-04T17:24:52.860458Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:52.898758Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:52.898839Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:52.900201Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20620 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-04T17:24:53.100494Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:53.119569Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:53.173183Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:53.218530Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:56.290779Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489510269197344261:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:56.290879Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e9a/r3tmp/tmpn8e8yV/pdisk_1.dat 2025-04-04T17:24:56.396209Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:56.425718Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:56.425809Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:56.430806Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23426 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:56.631816Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:24:56.656281Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:24:56.746279Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:56.817752Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:00.224542Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489510284168762113:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:00.224613Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e9a/r3tmp/tmpgqyDmd/pdisk_1.dat 2025-04-04T17:25:00.408962Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:00.423124Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:00.423234Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:00.425073Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19379 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:25:00.663837Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:00.699971Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:25:00.710439Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:25:00.715641Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:00.794308Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:00.874282Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test >> KqpRanges::IsNullPartial >> KqpAgg::AggWithLookup [GOOD] >> KqpAgg::AggWithSelfLookup >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] >> KqpNewEngine::EmptyMapWithBroadcast [GOOD] >> KqpNewEngine::FlatMapLambdaInnerPrecompute >> TPQTest::TestPartitionTotalQuota >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] >> KqpYql::EvaluateIf >> TCdcStreamTests::MeteringDedicated [GOOD] >> TCdcStreamTests::ChangeOwner >> KqpReturning::ReturningWorks-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedDelete+QueryService >> KqpNotNullColumns::InsertNotNullPkPg+useSink >> KqpKv::ReadRows_SpecificKey [GOOD] >> KqpKv::ReadRows_UnknownTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExternalTTL_Types [GOOD] Test command err: 2025-04-04T17:23:29.054827Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:29.143161Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:23:29.147877Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:23:29.148298Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:29.176648Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:29.177008Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:29.185926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:29.186173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:29.186447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:29.186683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:29.186814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:29.186929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:29.187067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:29.187232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:29.187395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:29.187520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:29.187629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:29.187737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:29.214118Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:23:29.219041Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:29.219267Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:29.219321Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:29.219516Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:29.219715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:29.219791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:29.219837Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:29.219927Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:29.220013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:29.220071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:29.220123Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:29.220335Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:29.220410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:29.220451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:29.220489Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:29.220582Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:29.220651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:29.220707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:29.220740Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:29.220812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:29.220853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:29.220885Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:29.220931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:29.220970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:29.220999Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:29.221417Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-04-04T17:23:29.221532Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=54; 2025-04-04T17:23:29.221615Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-04T17:23:29.221690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-04-04T17:23:29.221974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:29.222043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:29.222088Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:29.222331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:29.222392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:29.222429Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:29.222562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:29.222623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:29.222659Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:29.222868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:23:29.222911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:23:29.222963Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T1 ... ;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:25:04.953322Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:25:04.953359Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:25:04.953392Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:25:04.953518Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:25:04.953623Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:25:04.953659Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:25:04.953745Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=71; 2025-04-04T17:25:04.953797Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=568;num_rows=71;batch_columns=saved_at; 2025-04-04T17:25:04.953932Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[4:593:2609];bytes=568;rows=71;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2025-04-04T17:25:04.954028Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:25:04.954134Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:25:04.954229Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:25:04.954376Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:25:04.954466Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:25:04.954552Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:25:04.954589Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:600:2616] finished for tablet 9437184 2025-04-04T17:25:04.955077Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[4:593:2609];stats={"p":[{"events":["f_bootstrap"],"t":0.065},{"events":["f_ProduceResults"],"t":0.512},{"events":["l_bootstrap"],"t":0.764},{"events":["f_processing","f_task_result"],"t":0.787},{"events":["l_task_result"],"t":8.348},{"events":["f_ack"],"t":8.388},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":9.237}],"full":{"a":1743787495717071,"name":"_full_task","f":1743787495717071,"d_finished":0,"c":0,"l":1743787504954645,"d":9237574},"events":[{"name":"bootstrap","f":1743787495782361,"d_finished":699475,"c":1,"l":1743787496481836,"d":699475},{"a":1743787504954357,"name":"ack","f":1743787504105687,"d_finished":786301,"c":903,"l":1743787504954276,"d":786589},{"a":1743787504954342,"name":"processing","f":1743787496504394,"d_finished":3929266,"c":4515,"l":1743787504954278,"d":3929569},{"name":"ProduceResults","f":1743787496229266,"d_finished":1550738,"c":5420,"l":1743787504954573,"d":1550738},{"a":1743787504954575,"name":"Finish","f":1743787504954575,"d_finished":0,"c":0,"l":1743787504954645,"d":70},{"name":"task_result","f":1743787496504423,"d_finished":3055158,"c":3612,"l":1743787504065419,"d":3055158}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:25:04.955156Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[4:593:2609];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:25:04.955640Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[4:593:2609];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.065},{"events":["f_ProduceResults"],"t":0.512},{"events":["l_bootstrap"],"t":0.764},{"events":["f_processing","f_task_result"],"t":0.787},{"events":["l_task_result"],"t":8.348},{"events":["f_ack"],"t":8.388},{"events":["l_ProduceResults","f_Finish"],"t":9.237},{"events":["l_ack","l_processing","l_Finish"],"t":9.238}],"full":{"a":1743787495717071,"name":"_full_task","f":1743787495717071,"d_finished":0,"c":0,"l":1743787504955198,"d":9238127},"events":[{"name":"bootstrap","f":1743787495782361,"d_finished":699475,"c":1,"l":1743787496481836,"d":699475},{"a":1743787504954357,"name":"ack","f":1743787504105687,"d_finished":786301,"c":903,"l":1743787504954276,"d":787142},{"a":1743787504954342,"name":"processing","f":1743787496504394,"d_finished":3929266,"c":4515,"l":1743787504954278,"d":3930122},{"name":"ProduceResults","f":1743787496229266,"d_finished":1550738,"c":5420,"l":1743787504954573,"d":1550738},{"a":1743787504954575,"name":"Finish","f":1743787504954575,"d_finished":0,"c":0,"l":1743787504955198,"d":623},{"name":"task_result","f":1743787496504423,"d_finished":3055158,"c":3612,"l":1743787504065419,"d":3055158}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:25:04.955721Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:24:55.646317Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=903;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7037528;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7037528;selected_rows=0; 2025-04-04T17:25:04.955761Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:25:04.955955Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpNewEngine::Join [GOOD] >> KqpNewEngine::JoinIdxLookup >> KqpReturning::ReturningWorksIndexedUpsert-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedReplace-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] Test command err: 2025-04-04T17:24:54.488357Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510261980396344:2277];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:54.488485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:24:54.494318Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510260782027740:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:54.494547Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014bd/r3tmp/tmpcZVNys/pdisk_1.dat 2025-04-04T17:24:54.738877Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:24:54.740349Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:24:54.909757Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:54.927432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:54.927560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:54.933141Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:24:54.934211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:54.953765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:54.953853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 31875, node 1 2025-04-04T17:24:54.962315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:55.023065Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/0014bd/r3tmp/yandexLblXf3.tmp 2025-04-04T17:24:55.023109Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/0014bd/r3tmp/yandexLblXf3.tmp 2025-04-04T17:24:55.023314Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/0014bd/r3tmp/yandexLblXf3.tmp 2025-04-04T17:24:55.023485Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:24:55.080378Z INFO: TTestServer started on Port 11489 GrpcPort 31875 TClient is connected to server localhost:11489 PQClient connected to localhost:31875 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:55.340991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:55.392949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:24:57.744465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510274865299158:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:57.744561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510274865299194:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:57.744623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:57.748413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:24:57.781059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510274865299196:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:24:57.970443Z node 1 :TX_PROXY ERROR: Actor# [1:7489510274865299289:2815] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:58.003477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:58.011046Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489510274865299302:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:24:58.011722Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzZlZmM2Y2EtODY5ZjZmODMtNjY0ZWJkZmEtZjU5YWM3Njc=, ActorId: [1:7489510274865299154:2339], ActorState: ExecuteState, TraceId: 01jr0xb67cdkjkjz2dx6mry25c, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:24:58.014160Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:24:58.015555Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489510273666929999:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:24:58.015809Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmY3YTk3YTMtNGEyMGM2NmYtYWYxYmNmYWUtM2I0M2UzY2Q=, ActorId: [2:7489510273666929949:2306], ActorState: ExecuteState, TraceId: 01jr0xb6db80vm25cqpnbz7cr0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:24:58.016304Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:24:58.095850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:58.211412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:24:58.614303Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0xb6w870hp4395xh03smc4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmY3Mzk1NWEtMWJkYWNmMi0xMjNlYzk4Zi04NTBlNjVjZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489510279160266982:3113] 2025-04-04T17:24:59.482396Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510261980396344:2277];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:59.482465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:24:59.494618Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489510260782027740:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:59.494673Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok Received TEvChooseError: Bad SourceId 2025-04-04T17:25:04.771593Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7489510304930071216:3349] (SourceId=base64:a***, PreferedPartition=(NULL)) Start idle 2025-04-04T17:25:04.771646Z node 1 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [1:7489510304930071216:3349] (SourceId=base64:a***, PreferedPartition=(NULL)) ReplyError: Bad SourceId 2025-04-04T17:25:05.672161Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489510309225038564:2483], TxId: 281474976710681, task: 1, CA Id [1:7489510309225038562:2483]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 0 2025-04-04T17:25:05.707482Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489510309225038564:2483], TxId: 281474976710681, task: 1, CA Id [1:7489510309225038562:2483]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-04T17:25:05.751081Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489510309225038564:2483], TxId: 281474976710681, task: 1, CA Id [1:7489510309225038562:2483]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-04T17:25:05.812529Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489510309225038564:2483], TxId: 281474976710681, task: 1, CA Id [1:7489510309225038562:2483]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-04T17:25:05.953476Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489510309225038564:2483], TxId: 281474976710681, task: 1, CA Id [1:7489510309225038562:2483]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-04T17:25:06.067919Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489510309225038564:2483], TxId: 281474976710681, task: 1, CA Id [1:7489510309225038562:2483]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-04-04T17:25:06.206349Z node 1 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710682. Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-04T17:25:06.206476Z node 1 :KQP_EXECUTER WARN: ActorId: [1:7489510313520005919:2485] TxId: 281474976710682. Ctx: { TraceId: 01jr0xbe2yfecwnm7ypz4q521f, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjFjMjg2ZWMtYTVmODlhNzctMzM2ZDIwMTEtOTU5NDBkZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-04-04T17:25:06.206702Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjFjMjg2ZWMtYTVmODlhNzctMzM2ZDIwMTEtOTU5NDBkZjc=, ActorId: [1:7489510309225038580:2485], ActorState: ExecuteState, TraceId: 01jr0xbe2yfecwnm7ypz4q521f, Create QueryResponse for error on request, msg: 2025-04-04T17:25:06.207840Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jr0xbecj77w6hgawwfyvv67k" } } YdbStatus: UNAVAILABLE ConsumedRu: 202 } 2025-04-04T17:25:06.346355Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7489510309225038564:2483], TxId: 281474976710681, task: 1, CA Id [1:7489510309225038562:2483]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 >> TLocksTest::SetLockNothing [GOOD] >> KqpRanges::UpdateWhereInBigLiteralListPrefix [GOOD] >> KqpRanges::UpdateWhereInFullScan+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] Test command err: Trying to start YDB, gRPC: 29730, MsgBus: 14631 2025-04-04T17:24:55.432375Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510262960695127:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:55.434524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001993/r3tmp/tmpRaObgS/pdisk_1.dat 2025-04-04T17:24:55.762537Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:55.765188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:55.765297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:55.770465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29730, node 1 2025-04-04T17:24:55.846743Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:55.846772Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:55.846779Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:55.846886Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14631 TClient is connected to server localhost:14631 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:56.378605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:56.411782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:56.575261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:56.745088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:56.830130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:58.572165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510275845598803:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:58.572308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:58.894599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:58.940396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:59.012741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:59.045541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:59.077214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:59.110358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:24:59.162590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510280140566610:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:59.162681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:59.163132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510280140566615:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:59.166881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:24:59.180385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510280140566617:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:24:59.273219Z node 1 :TX_PROXY ERROR: Actor# [1:7489510280140566672:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:00.433558Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510262960695127:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:00.433640Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12046, MsgBus: 61499 2025-04-04T17:25:01.364946Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510291834754541:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:01.433644Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001993/r3tmp/tmpnsYmhv/pdisk_1.dat 2025-04-04T17:25:01.602422Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:01.635992Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:01.636073Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:01.639506Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12046, node 2 2025-04-04T17:25:01.708901Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:01.708928Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:01.708936Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:01.709043Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61499 TClient is connected to server localhost:61499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:02.211948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:02.226118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:02.295070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:02.492313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:02.577412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:04.772209Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510304719658105:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:04.772298Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:04.816729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:04.862978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:04.897075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:04.933747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:04.965829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:05.002937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:05.092176Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510309014625918:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:05.092241Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:05.092434Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510309014625923:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:05.097032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:05.108608Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510309014625925:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:05.206618Z node 2 :TX_PROXY ERROR: Actor# [2:7489510309014625981:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:06.366533Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489510291834754541:2137];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:06.366613Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Result: [[[[101u]]];[[[102u]]];[[[103u]]];[[[104u]]];[[[105u]]]] >> KqpScripting::ScriptValidate [GOOD] >> KqpScripting::ScriptStats >> TCdcStreamTests::ChangeOwner [GOOD] >> TCdcStreamTests::DropIndexWithStream >> KqpNotNullColumns::InsertNotNullPg+useSink [GOOD] >> KqpNotNullColumns::InsertNotNullPg-useSink >> KqpNewEngine::BlindWriteListParameter [GOOD] >> KqpNewEngine::BrokenLocksAtROTx >> KqpExtractPredicateLookup::SimpleRange >> KqpScripting::JoinIndexLookup [GOOD] >> KqpYql::TableUseBeforeCreate [GOOD] >> KqpYql::ScriptUdf ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetLockNothing [GOOD] Test command err: 2025-04-04T17:24:41.463849Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510205140760797:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:41.464619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e93/r3tmp/tmplGOJIo/pdisk_1.dat 2025-04-04T17:24:41.786678Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:41.803865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:41.803993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:41.807271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2771 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:42.061336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:42.086601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:42.201654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:24:42.239061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:44.835379Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510217956255840:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:44.835426Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e93/r3tmp/tmp0Lp3Im/pdisk_1.dat 2025-04-04T17:24:44.937227Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:44.978923Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:44.979034Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:44.980815Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29980 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:45.148458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:45.166154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:45.247474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:24:45.295158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:48.612851Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489510234271836579:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:48.612924Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e93/r3tmp/tmphVsM9l/pdisk_1.dat 2025-04-04T17:24:48.714601Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:48.747256Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:48.747348Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:48.755012Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22637 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:48.903578Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:48.925182Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:48.978939Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:49.033182Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:52.234898Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489510250471074603:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:52.234958Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e93/r3tmp/tmp7eFVju/pdisk_1.dat 2025-04-04T17:24:52.428679Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:52.428834Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:52.429081Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:52.444198Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23580 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:52.636477Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:52.643578Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:52.657472Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:24:52.734760Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:52.813290Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:55.962156Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7489510262367765667:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:55.962217Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e93/r3tmp/tmpmQlTDs/pdisk_1.dat 2025-04-04T17:24:56.070635Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:56.075508Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:56.075599Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:56.077189Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24175 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:56.307915Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:56.313118Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:56.334868Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:56.401089Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:56.458196Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:00.038474Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489510285249865362:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:00.038861Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e93/r3tmp/tmpNKPoo5/pdisk_1.dat 2025-04-04T17:25:00.201081Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:00.229504Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:00.229595Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:00.231690Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19252 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:25:00.483025Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:00.496251Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:00.508309Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:25:00.518350Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:00.604726Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:00.674236Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:04.224342Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489510301362371909:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:04.224473Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e93/r3tmp/tmpkys87m/pdisk_1.dat 2025-04-04T17:25:04.380682Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:04.415013Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:04.415120Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:04.416697Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62389 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:25:04.667940Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:04.675498Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:04.689206Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:25:04.694844Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:04.794230Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:04.869809Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> KqpSort::ReverseOptimized [GOOD] >> KqpSort::ReverseOptimizedWithPredicate >> KqpNotNullColumns::AlterAddNotNullColumn [GOOD] >> KqpNotNullColumns::AlterAddIndex >> HttpRequest::AnalyzeServerless [GOOD] >> KqpNotNullColumns::ReplaceNotNullPk >> KqpNewEngine::JoinWithPrecompute [GOOD] >> KqpNewEngine::JoinProjectMulti >> KqpNewEngine::LocksEffects [GOOD] >> KqpNewEngine::LeftSemiJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableUseBeforeCreate [GOOD] Test command err: Trying to start YDB, gRPC: 27373, MsgBus: 27670 2025-04-04T17:25:05.041005Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510306580419155:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:05.041137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00198a/r3tmp/tmpT8o5Q6/pdisk_1.dat 2025-04-04T17:25:05.459430Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27373, node 1 2025-04-04T17:25:05.502143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:05.502342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:05.506318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:05.551829Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:05.551852Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:05.551870Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:05.551995Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27670 TClient is connected to server localhost:27670 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:06.127545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:06.144219Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:06.158391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:25:06.349373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:06.565373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:06.641637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:08.287555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510319465322795:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:08.287692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:08.518663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:08.544205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:08.572413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:08.598100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:08.669023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:08.709104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:08.771055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510319465323310:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:08.771113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:08.771232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510319465323315:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:08.774132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:08.786234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510319465323317:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:08.885987Z node 1 :TX_PROXY ERROR: Actor# [1:7489510319465323372:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:3:13: Error: At function: KiReadTable!
:3:13: Error: Cannot find table 'db.[/Root/NewTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 >> TCdcStreamTests::DropIndexWithStream [GOOD] >> TCdcStreamTests::DropTableWithIndexWithStream >> KqpNewEngine::DeleteOn+UseSink [GOOD] >> KqpNewEngine::DeleteOn-UseSink >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit-UseSink [GOOD] >> KqpNewEngine::DependentSelect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::JoinIndexLookup [GOOD] Test command err: Trying to start YDB, gRPC: 7773, MsgBus: 27782 2025-04-04T17:24:57.335509Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510272584262913:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:57.335552Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001992/r3tmp/tmpBOQW5B/pdisk_1.dat 2025-04-04T17:24:57.754820Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:57.766101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:57.766233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:57.769575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7773, node 1 2025-04-04T17:24:57.853812Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:57.853834Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:57.853841Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:57.853968Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27782 TClient is connected to server localhost:27782 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:58.459286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:58.481669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:58.617512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:58.777928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:58.837152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:00.615493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510285469166573:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:00.615597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:00.929768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:00.961834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:00.990226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:01.019140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:01.052231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:01.118181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:01.166584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510289764134380:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:01.166659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:01.167112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510289764134385:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:01.173213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:01.183492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510289764134387:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:01.246449Z node 1 :TX_PROXY ERROR: Actor# [1:7489510289764134440:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:02.335918Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510272584262913:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:02.335994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:03.235056Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787503227, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 2772, MsgBus: 20189 2025-04-04T17:25:04.191645Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510300933122190:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:04.218529Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001992/r3tmp/tmplH40Wv/pdisk_1.dat 2025-04-04T17:25:04.326269Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:04.352040Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:04.352152Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:04.358794Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2772, node 2 2025-04-04T17:25:04.510744Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:04.510776Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:04.510783Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:04.510906Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20189 TClient is connected to server localhost:20189 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:05.085328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:05.124899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:05.213788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:05.372454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:05.441330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:25:07.524932Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510313818025696:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:07.525024Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:07.548661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:07.578220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:07.611379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:07.652696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:07.690604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:07.732163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:07.783742Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510313818026204:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:07.783858Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:07.784026Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510313818026209:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:07.788178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:07.799510Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510313818026211:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:07.888882Z node 2 :TX_PROXY ERROR: Actor# [2:7489510313818026264:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:09.192507Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489510300933122190:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:09.192562Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::AnalyzeServerless [GOOD] Test command err: 2025-04-04T17:24:52.475047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:24:52.475471Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:24:52.475652Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000849/r3tmp/tmpKiwbv9/pdisk_1.dat 2025-04-04T17:24:52.937418Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8879, node 1 2025-04-04T17:24:53.193162Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:53.193220Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:53.193251Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:53.193728Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:24:53.196489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:24:53.285830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:53.285957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:53.301244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16021 2025-04-04T17:24:53.862742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:24:57.033367Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:24:57.065758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:57.065858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:57.097204Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:24:57.099647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:57.322611Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:57.323348Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:57.323936Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:57.324109Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:57.324386Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:57.324516Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:57.324596Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:57.324703Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:57.324788Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:24:57.518474Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:57.518637Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:57.536646Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:57.706578Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:57.752636Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:24:57.752748Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:24:57.789162Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:24:57.789571Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:24:57.789795Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:24:57.789858Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:24:57.789912Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:24:57.789967Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:24:57.790026Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:24:57.790084Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:24:57.791016Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:24:57.829961Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:24:57.830071Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2594], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:24:57.838255Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2603] 2025-04-04T17:24:57.841815Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1898:2613] 2025-04-04T17:24:57.842002Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1898:2613], schemeshard id = 72075186224037897 2025-04-04T17:24:57.851389Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-04T17:24:57.876641Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:24:57.876721Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:24:57.876838Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-04T17:24:57.898492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:24:57.914494Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:24:57.914699Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:24:58.175470Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:24:58.374281Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:24:58.461344Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:24:59.244089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:25:00.031842Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:00.192273Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-04T17:25:00.192343Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:25:00.192464Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2602:2950], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:25:00.193945Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2610:2954] 2025-04-04T17:25:00.194564Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2610:2954], schemeshard id = 72075186224037899 2025-04-04T17:25:01.698271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2733:3251], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:01.698465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:01.739274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-04T17:25:02.222739Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2885:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:25:02.223001Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2885:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:25:02.223326Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2885:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:25:02.223474Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2885:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:25:02.223600Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2885:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:25:02.223733Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2885:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:25:02.223875Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2885:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:25:02.224002Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2885:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:25:02.224131Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2885:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17: ... RD WARN: tablet_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:25:03.368462Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:25:03.375405Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:25:03.381518Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:25:03.386754Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:25:03.394311Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:25:03.399705Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:25:03.405552Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:25:03.412939Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:25:03.418477Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:25:03.423902Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:25:04.869624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3551:3350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:04.869924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:04.906218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715662:0, at schemeshard: 72075186224037899 2025-04-04T17:25:05.532755Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-04-04T17:25:05.533315Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-04-04T17:25:05.533932Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-04-04T17:25:05.535110Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-04-04T17:25:05.535869Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-04-04T17:25:05.536291Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-04-04T17:25:05.536704Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-04-04T17:25:05.537404Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-04-04T17:25:05.537836Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-04-04T17:25:05.538293Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-04-04T17:25:06.957817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4358:3427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:06.958130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:06.961849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2025-04-04T17:25:07.017850Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-04-04T17:25:07.018524Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-04-04T17:25:07.019031Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-04-04T17:25:07.021057Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-04-04T17:25:07.021564Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-04-04T17:25:07.022070Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-04-04T17:25:07.022550Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-04-04T17:25:07.023017Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-04-04T17:25:07.023483Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-04-04T17:25:07.024663Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; waiting actualization: 0/0.000018s 2025-04-04T17:25:09.126053Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:4657:4497] 2025-04-04T17:25:09.130101Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:4653:3512] , Record { OperationId: "\000\000\000\000\034B@=sr\'L\021]M\365" Tables { PathId { OwnerId: 72057594046644480 LocalId: 2 } } } 2025-04-04T17:25:09.130187Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId= B@=sr'L]M 2025-04-04T17:25:09.130260Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId= B@=sr'L]M , PathId [OwnerId: 72057594046644480, LocalPathId: 2] Answer: 'Analyze sent. OperationId: 000000072280yq6wh79g8ntkfn' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpScanArrowInChanels::AggregateWithFunction [GOOD] >> KqpScanArrowInChanels::AggregateEmptySum >> TFlatTest::AutoSplitMergeQueue [GOOD] >> KqpKv::ReadRows_UnknownTable [GOOD] >> KqpKv::ReadRows_SpecificReturnValue >> KqpNotNullColumns::InsertNotNullPkPg+useSink [GOOD] >> KqpNotNullColumns::InsertNotNullPkPg-useSink >> KqpRanges::IsNullPartial [GOOD] >> KqpRanges::LiteralOr >> KqpSort::TopSortTableExprOffset >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] >> KqpNewEngine::MultiUsagePrecompute [GOOD] >> KqpNewEngine::MultiUsageInnerConnection >> KqpYql::EvaluateIf [GOOD] >> KqpYql::EvaluateFor >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::AutoSplitMergeQueue [GOOD] Test command err: 2025-04-04T17:24:31.912607Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510161951368956:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:31.912825Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ea0/r3tmp/tmpRnAFiW/pdisk_1.dat 2025-04-04T17:24:32.205480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:32.205568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:32.206996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:32.244401Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:14157 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:32.476099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:32.506974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:32.674607Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1211 647 2154)b }, ecr=1.000 2025-04-04T17:24:32.677874Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1085 521 2626)b }, ecr=1.000 2025-04-04T17:24:32.698007Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1519 647 6413)b }, ecr=1.000 2025-04-04T17:24:32.701782Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2272 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2025-04-04T17:24:32.793322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:24:32.793638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:24:32.794092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-04-04T17:24:32.794143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-04-04T17:24:32.794167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-04T17:24:32.794195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-04-04T17:24:32.794213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-04-04T17:24:32.794349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-04-04T17:24:32.794480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:24:32.795142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-04T17:24:32.795198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-04-04T17:24:32.795886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-04-04T17:24:32.796022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-04-04T17:24:32.796195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:24:32.796217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T17:24:32.796397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-04-04T17:24:32.796491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:24:32.796515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489510166246336780:2251], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 2 2025-04-04T17:24:32.796536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489510166246336780:2251], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 4 waiting... 2025-04-04T17:24:32.796576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:24:32.796617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-04-04T17:24:32.796983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-04T17:24:32.797278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-04-04T17:24:32.799579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-04T17:24:32.799705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-04T17:24:32.799720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-04-04T17:24:32.799738Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-04-04T17:24:32.799760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-04-04T17:24:32.800070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-04T17:24:32.800144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-04-04T17:24:32.800165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-04-04T17:24:32.800220Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2025-04-04T17:24:32.800237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 5 2025-04-04T17:24:32.800292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710676, ready parts: 0/1, is published: true 2025-04-04T17:24:32.800429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710676, at schemeshard: 72057594046644480 2025-04-04T17:24:32.800450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710676, ready parts: 0/1, is published: true 2025-04-04T17:24:32.800468Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710676, at schemeshard: 72057594046644480 2025-04-04T17:24:32.800550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-04-04T17:24:32.800701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-04-04T17:24:32.800761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710676, partId: 0, tablet: 72057594037968897 2025-04-04T17:24:32.800787Z node 1 :FLAT_TX_SC ... xId: 281474976715659 CreateStep: 1743787477922 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 16 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 16 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 14 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743787477922 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 16 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 16 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 14 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743787477922 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 16 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 16 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 14 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743787477922 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 16 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 16 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 14 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743787477922 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 16 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 16 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 14 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743787477922 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 16 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 16 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 14 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743787477922 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 16 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 16 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 14 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743787477922 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 16 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 16 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 14 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) 2025-04-04T17:25:10.988566Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037904 not found 2025-04-04T17:25:10.988613Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037907 not found 2025-04-04T17:25:11.023341Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037908 not found 2025-04-04T17:25:11.023396Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037905 not found 2025-04-04T17:25:11.121487Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037909 not found 2025-04-04T17:25:11.128962Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037910 not found 2025-04-04T17:25:11.144515Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037911 not found 2025-04-04T17:25:11.144556Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037912 not found 2025-04-04T17:25:11.249374Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037913 not found 2025-04-04T17:25:11.249409Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037914 not found TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743787477922 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 21 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 21 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 19 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743787477922 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 21 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 21 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 19 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) >> KqpScanArrowFormat::AllTypesColumns >> TLocksTest::Range_GoodLock1 [GOOD] >> KqpSqlIn::SelectNotAllElements [GOOD] >> KqpSqlIn::SimpleKey_In_And_In >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:23:19.832698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:23:19.832840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:19.832882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:23:19.832914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:23:19.834208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:23:19.834272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:23:19.834349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:23:19.834438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:23:19.836406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:23:19.921439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:23:19.921498Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:23:19.929963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:23:19.930132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:23:19.930236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:23:19.939738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:23:19.940051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:23:19.943098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:19.943980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:23:19.950706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:19.959211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:19.959288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:19.959485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:23:19.959548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:19.959664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:23:19.959826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:23:19.966159Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:23:20.080122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:23:20.081276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:20.083399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:23:20.086514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:23:20.086586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:20.089343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:20.089473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:23:20.089662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:20.089769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:23:20.089860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:23:20.089911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:23:20.091559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:20.091600Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:23:20.091635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:23:20.093089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:20.093121Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:20.093162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:20.093198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:23:20.097811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:23:20.099675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:23:20.099856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:23:20.100787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:23:20.100920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:23:20.100962Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:20.102378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:23:20.102446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:23:20.102663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:23:20.102750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:23:20.104859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:23:20.104921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:23:20.105083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:23:20.105126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:23:20.105438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:23:20.105487Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:23:20.105593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:20.105629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:20.105679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:23:20.105718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:20.105755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:23:20.105789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:23:20.105820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:23:20.105848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:23:20.105905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:23:20.105954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:23:20.105983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:23:20.107796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:20.107896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:23:20.107929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:25:13.008661Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:25:13.008686Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:25:13.009845Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:25:13.009918Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:25:13.009945Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:25:13.010473Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2025-04-04T17:25:13.010543Z node 19 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:25:13.010949Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 2025-04-04T17:25:13.011180Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 4/5 2025-04-04T17:25:13.011271Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/5 2025-04-04T17:25:13.011342Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 4/5 2025-04-04T17:25:13.011407Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/5 2025-04-04T17:25:13.011487Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: false 2025-04-04T17:25:13.011868Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:25:13.011935Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:25:13.011960Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:25:13.011989Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-04-04T17:25:13.012019Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-04-04T17:25:13.012089Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: true 2025-04-04T17:25:13.012965Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:25:13.013010Z node 19 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:25:13.013261Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:25:13.013365Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 5/5 2025-04-04T17:25:13.013395Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-04-04T17:25:13.013433Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 5/5 2025-04-04T17:25:13.013462Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-04-04T17:25:13.013494Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 5/5, is published: true 2025-04-04T17:25:13.013596Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [19:376:2344] message: TxId: 103 2025-04-04T17:25:13.013677Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-04-04T17:25:13.013753Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-04-04T17:25:13.013816Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-04-04T17:25:13.013968Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:25:13.014080Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-04-04T17:25:13.014106Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-04-04T17:25:13.014141Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:25:13.014163Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-04-04T17:25:13.014187Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-04-04T17:25:13.014226Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-04-04T17:25:13.014268Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:3 2025-04-04T17:25:13.014290Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:3 2025-04-04T17:25:13.014318Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-04-04T17:25:13.014342Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:4 2025-04-04T17:25:13.014364Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:4 2025-04-04T17:25:13.014419Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-04-04T17:25:13.014915Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:25:13.015009Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-04-04T17:25:13.015120Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-04-04T17:25:13.015192Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-04-04T17:25:13.015249Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-04-04T17:25:13.017709Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:25:13.017803Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:25:13.017883Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:25:13.017915Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:25:13.017952Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:25:13.023199Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:25:13.023460Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:25:13.023546Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [19:758:2661] 2025-04-04T17:25:13.023728Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-04-04T17:25:13.024699Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:25:13.025092Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 454us result status StatusPathDoesNotExist 2025-04-04T17:25:13.025415Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 4])" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:25:13.026122Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:25:13.026513Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" took 421us result status StatusPathDoesNotExist 2025-04-04T17:25:13.026730Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 4])" Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpAgg::AggWithSelfLookup [GOOD] >> KqpAgg::AggWithSelfLookup2 >> KqpNewEngine::JoinIdxLookup [GOOD] >> KqpNewEngine::JoinIdxLookupWithPredicate >> TColumnShardTestSchema::RebootHotTiers [GOOD] |95.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} |95.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNewEngine::FlatMapLambdaInnerPrecompute [GOOD] >> KqpNewEngine::DqSourceSequentialLimit |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788039.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788039.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143788039.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788039.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788039.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143788039.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786839.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123788039.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123788039.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786839.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123786839.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123786839.000000s;Name=;Codec=}; 2025-04-04T17:23:59.293499Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:59.374541Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:59.398935Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:59.399308Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:59.406979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:59.407203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:59.407428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:59.407535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:59.407668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:59.407790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:59.407885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:59.408007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:59.408121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:59.408222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:59.408321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:59.408416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:59.437229Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:59.437444Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:59.437522Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:59.437705Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:59.437897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:59.437977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:59.438030Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:59.438129Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:59.438190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:59.438233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:59.438289Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:59.438445Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:59.438510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:59.438553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:59.438588Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:59.438714Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:59.438784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:59.438836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:59.438871Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:59.438962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:59.439005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:59.439038Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:59.439093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:59.439161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:59.439199Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:59.439600Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-04-04T17:23:59.439716Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=64; 2025-04-04T17:23:59.439821Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-04-04T17:23:59.439899Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-04-04T17:23:59.440065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:59.440130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:59.440163Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:59.440395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:59.440450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:59.440487Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TT ... cpp:29;EXECUTE:finishLoadingTime=622; 2025-04-04T17:25:13.660335Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=54779; 2025-04-04T17:25:13.674662Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=14211; 2025-04-04T17:25:13.688439Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=12680; 2025-04-04T17:25:13.688562Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=13787; 2025-04-04T17:25:13.688739Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=108; 2025-04-04T17:25:13.688855Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=68; 2025-04-04T17:25:13.688984Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=85; 2025-04-04T17:25:13.689095Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=66; 2025-04-04T17:25:13.701536Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=12370; 2025-04-04T17:25:13.718231Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=16570; 2025-04-04T17:25:13.718413Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=40; 2025-04-04T17:25:13.718533Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=51; 2025-04-04T17:25:13.718596Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-04-04T17:25:13.718647Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-04-04T17:25:13.718694Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-04-04T17:25:13.718782Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=46; 2025-04-04T17:25:13.718835Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2025-04-04T17:25:13.718945Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=66; 2025-04-04T17:25:13.719001Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-04-04T17:25:13.719079Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=34; 2025-04-04T17:25:13.719183Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=58; 2025-04-04T17:25:13.719572Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=329; 2025-04-04T17:25:13.719623Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=121969; 2025-04-04T17:25:13.719781Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T17:25:13.719909Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T17:25:13.719988Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T17:25:13.720072Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T17:25:13.740247Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=1; 2025-04-04T17:25:13.740421Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:25:13.740490Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:25:13.740565Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=7; 2025-04-04T17:25:13.740632Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:25:13.740676Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:25:13.740731Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:25:13.740772Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:25:13.740873Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:25:13.741371Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:25:13.741464Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2641:4515];tablet_id=9437184;parent=[1:2599:4480];fline=manager.cpp:82;event=ask_data;request=request_id=150;1={portions_count=29};; 2025-04-04T17:25:13.742234Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T17:25:13.742715Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T17:25:13.742754Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T17:25:13.742780Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T17:25:13.742828Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:25:13.742901Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:25:13.742965Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=7; 2025-04-04T17:25:13.743027Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:25:13.743072Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:25:13.743135Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:25:13.743179Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:25:13.743287Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:25:13.750799Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=29;path_id=1; 2025-04-04T17:25:13.752355Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> KqpNotNullColumns::ReplaceNotNullPk [GOOD] >> KqpNotNullColumns::ReplaceNotNullPkPg >> KqpNotNullColumns::InsertNotNullPg-useSink [GOOD] >> KqpNotNullColumns::FailedMultiEffects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_GoodLock1 [GOOD] Test command err: 2025-04-04T17:24:34.542816Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510174278418351:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:34.542887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e9d/r3tmp/tmpe774XH/pdisk_1.dat 2025-04-04T17:24:34.837575Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:34.905345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:34.905497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:34.907368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10814 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:35.064884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:35.085110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:35.196015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:35.236864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:37.324387Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510188663427055:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:37.324459Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e9d/r3tmp/tmpV2mKZ2/pdisk_1.dat 2025-04-04T17:24:37.403946Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:37.448290Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:37.448377Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:37.450067Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5895 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:37.571799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:37.596035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:37.644956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:37.714306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:40.535841Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489510199387274572:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:40.535885Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e9d/r3tmp/tmpXAvpEd/pdisk_1.dat 2025-04-04T17:24:40.647081Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:40.679997Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:40.680142Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:40.682107Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12285 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:40.856684Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:40.873244Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:24:40.880345Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:40.940003Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:40.991915Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:43.857729Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489510213681687313:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:43.857939Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e9d/r3tmp/tmpE7WVPK/pdisk_1.dat 2025-04-04T17:24:43.954138Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:43.989774Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:43.989873Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:43.991420Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3965 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:44.147983Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:44.160343Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCom ... PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:55.362102Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:55.379796Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:55.464142Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:55.540803Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:59.479406Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7489510279833069297:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:59.479502Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e9d/r3tmp/tmp00OUIv/pdisk_1.dat 2025-04-04T17:24:59.574136Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:59.615246Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:59.615348Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:59.623132Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28327 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:59.846649Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:59.854993Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:59.868022Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:24:59.877788Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:59.973356Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:00.030482Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:03.895984Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7489510299145993426:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:03.896099Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e9d/r3tmp/tmpeinUUu/pdisk_1.dat 2025-04-04T17:25:04.059293Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:04.077548Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:04.077663Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:04.080637Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13471 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:25:04.390685Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:04.402658Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:04.422001Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:25:04.429952Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:04.526787Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:25:04.616284Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:08.955744Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489510319982295420:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:08.955818Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e9d/r3tmp/tmpH9uwz5/pdisk_1.dat 2025-04-04T17:25:09.150742Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:09.173944Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:09.174062Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:09.175900Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62118 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:25:09.459358Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:09.467740Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:09.476254Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:25:09.484277Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:09.566460Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:09.638524Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> KqpYql::ScriptUdf [GOOD] >> KqpYql::SelectNoAsciiValue >> KqpKv::ReadRows_SpecificReturnValue [GOOD] >> KqpKv::ReadRows_PgValue >> KqpSort::ReverseOptimizedWithPredicate [GOOD] >> KqpSort::ReverseMixedOrderNotOptimized ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788039.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788039.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143788039.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788039.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143788039.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143788039.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786839.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123788039.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123788039.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786839.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123786839.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123786839.000000s;Name=;Codec=}; 2025-04-04T17:23:59.729414Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:59.801824Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:59.815416Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:59.815634Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:59.821231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:59.821377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:59.821524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:59.821608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:59.821681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:59.821773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:59.821835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:59.821899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:59.821962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:59.822020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:59.822078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:59.822137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:59.838571Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:59.838727Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:59.838770Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:59.838881Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:59.838999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:59.839055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:59.839084Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:59.839162Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:59.839208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:59.839235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:59.839253Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:59.839349Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:59.839384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:59.839406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:59.839425Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:59.839480Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:59.839512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:59.839536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:59.839555Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:59.839604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:59.839625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:59.839641Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:59.839668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:59.839689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:59.839710Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:59.840000Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2025-04-04T17:23:59.840061Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=31; 2025-04-04T17:23:59.840140Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=32; 2025-04-04T17:23:59.840204Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=27; 2025-04-04T17:23:59.840305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:59.840340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:59.840361Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:59.840480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:59.840515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:59.840540Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... 29;EXECUTE:finishLoadingTime=606; 2025-04-04T17:25:14.319355Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=55606; 2025-04-04T17:25:14.333956Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=14496; 2025-04-04T17:25:14.348034Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=12799; 2025-04-04T17:25:14.348169Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=14084; 2025-04-04T17:25:14.348368Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=119; 2025-04-04T17:25:14.348505Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=81; 2025-04-04T17:25:14.348662Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=104; 2025-04-04T17:25:14.348790Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=78; 2025-04-04T17:25:14.365689Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=16811; 2025-04-04T17:25:14.385577Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=19735; 2025-04-04T17:25:14.385756Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=52; 2025-04-04T17:25:14.385854Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=35; 2025-04-04T17:25:14.385937Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2025-04-04T17:25:14.386010Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=14; 2025-04-04T17:25:14.386072Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=11; 2025-04-04T17:25:14.386170Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=52; 2025-04-04T17:25:14.386230Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-04-04T17:25:14.386365Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=67; 2025-04-04T17:25:14.386426Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=10; 2025-04-04T17:25:14.386510Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=37; 2025-04-04T17:25:14.386621Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=59; 2025-04-04T17:25:14.387065Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=394; 2025-04-04T17:25:14.387125Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=132704; 2025-04-04T17:25:14.387334Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T17:25:14.387456Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T17:25:14.387518Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T17:25:14.387598Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T17:25:14.410990Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=1; 2025-04-04T17:25:14.411194Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:25:14.411285Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:25:14.411370Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=7; 2025-04-04T17:25:14.411444Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:25:14.411495Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:25:14.411554Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:25:14.411604Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:25:14.411719Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:25:14.412301Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:25:14.412396Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2641:4515];tablet_id=9437184;parent=[1:2599:4480];fline=manager.cpp:82;event=ask_data;request=request_id=150;1={portions_count=29};; 2025-04-04T17:25:14.413180Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T17:25:14.413577Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T17:25:14.413617Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T17:25:14.413647Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T17:25:14.413699Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:25:14.413768Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:25:14.413832Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=7; 2025-04-04T17:25:14.413904Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-04-04T17:25:14.413955Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:25:14.414007Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:25:14.414056Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:25:14.414163Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:25:14.415424Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=29;path_id=1; 2025-04-04T17:25:14.416974Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2599:4480];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> TSourceIdTests::SourceIdStorageAdd [GOOD] >> TSourceIdTests::ProtoSourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::SourceIdStorageComplexDelete [GOOD] >> TSourceIdTests::HeartbeatEmitter [GOOD] >> TSourceIdTests::SourceIdMinSeqNo [GOOD] >> KqpRanges::UpdateWhereInFullScan+UseSink [GOOD] >> KqpRanges::UpdateWhereInFullScan-UseSink >> KqpNewEngine::BrokenLocksAtROTx [GOOD] >> KqpNewEngine::BrokenLocksAtROTxSharded |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdMinSeqNo [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift >> KqpScripting::ScriptStats [GOOD] >> KqpNewEngine::DeleteOn-UseSink [GOOD] >> KqpNewEngine::DeleteWithBuiltin+UseSink >> KqpScripting::StreamExecuteYqlScriptMixed >> KqpReturning::ReturningWorksIndexedDelete+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedDelete-QueryService >> KqpNotNullColumns::AlterAddIndex [GOOD] >> KqpNewEngine::DependentSelect [GOOD] >> KqpNewEngine::DqSourceCount >> KqpNotNullColumns::InsertNotNullPkPg-useSink [GOOD] >> KqpNotNullColumns::JoinBothTablesWithNotNullPk+StreamLookup >> KqpReturning::ReturningWorksIndexedReplace-QueryService [GOOD] >> KqpSort::ComplexPkExclusiveSecondOptionalPredicate >> KqpScripting::QueryStats >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test >> KqpNewEngine::JoinProjectMulti [GOOD] >> KqpNewEngine::JoinMultiConsumer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptStats [GOOD] Test command err: Trying to start YDB, gRPC: 16993, MsgBus: 27009 2025-04-04T17:25:03.671032Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510300348585305:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:03.672533Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00198e/r3tmp/tmpYJHkoN/pdisk_1.dat 2025-04-04T17:25:03.924347Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16993, node 1 2025-04-04T17:25:04.015123Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:04.015147Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:04.015154Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:04.015311Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:25:04.021251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:04.021372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:04.023236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27009 TClient is connected to server localhost:27009 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:04.611176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:04.631663Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:25:04.637468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:04.849173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:05.058043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:05.136664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.076105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510317528456175:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:07.076232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:07.348953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:07.382886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:07.462630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:07.495201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:07.538170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:07.614835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:07.728604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510317528456698:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:07.728676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:07.728848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510317528456703:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:07.732733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:07.746166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510317528456705:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:07.836504Z node 1 :TX_PROXY ERROR: Actor# [1:7489510317528456762:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:08.673504Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510300348585305:2134];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:08.674334Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29135, MsgBus: 13022 2025-04-04T17:25:09.525385Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510324123034295:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:09.526853Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00198e/r3tmp/tmpkL8KuP/pdisk_1.dat 2025-04-04T17:25:09.672952Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29135, node 2 2025-04-04T17:25:09.706057Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:09.706134Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:09.709732Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:09.782828Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:09.782862Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:09.782874Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:09.782989Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13022 TClient is connected to server localhost:13022 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:10.205209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:10.218369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:10.298717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:25:10.433669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:10.491547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:12.759530Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510337007937958:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:12.759620Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:12.795046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:12.828335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:12.864142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:12.895205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:12.925882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:12.978277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:13.029137Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510341302905763:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:13.029216Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:13.029273Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510341302905768:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:13.032937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:13.043899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510341302905770:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:13.112344Z node 2 :TX_PROXY ERROR: Actor# [2:7489510341302905823:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:14.526501Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489510324123034295:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:14.526553Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:14.668468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:15.296922Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787515288, txId: 281474976715674] shutting down 2025-04-04T17:25:15.932439Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787515939, txId: 281474976715678] shutting down 2025-04-04T17:25:16.284282Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787516277, txId: 281474976715682] shutting down >> KqpNewEngine::LeftSemiJoin [GOOD] >> KqpNewEngine::LocksInRoTx >> KqpYql::EvaluateFor [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::AlterAddIndex [GOOD] Test command err: Trying to start YDB, gRPC: 2257, MsgBus: 31658 2025-04-04T17:24:41.264985Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510205093953728:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:41.265068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014ac/r3tmp/tmpW78peD/pdisk_1.dat 2025-04-04T17:24:41.604243Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2257, node 1 2025-04-04T17:24:41.654484Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:41.654520Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:41.654530Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:41.654719Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:24:41.666853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:41.666971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:41.668817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31658 TClient is connected to server localhost:31658 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:42.136332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:42.164745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:42.277549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:42.425754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:42.496690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:44.131628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510217978857385:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:44.131735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:44.393218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:44.427381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:44.475632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:44.497939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:44.522193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:44.557810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:24:44.601744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510217978857895:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:44.601850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:44.602123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510217978857900:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:44.606035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:24:44.615209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510217978857902:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:24:44.702198Z node 1 :TX_PROXY ERROR: Actor# [1:7489510217978857956:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 15552, MsgBus: 5375 2025-04-04T17:24:46.401462Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510226520968247:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:46.401538Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014ac/r3tmp/tmpUAzAk7/pdisk_1.dat 2025-04-04T17:24:46.492826Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15552, node 2 2025-04-04T17:24:46.537699Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:46.537797Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:46.541152Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:46.570373Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:46.570400Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:46.570408Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:46.570513Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5375 TClient is connected to server localhost:5375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:46.923855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:46.940932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:47.007495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:24:47.161622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:47.231908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:49.302416Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510239405871904:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:49.302533Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: { : Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:08.326720Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:08.326938Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489510319990381045:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:08.331101Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:08.344079Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-04-04T17:25:08.344502Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489510319990381047:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:08.407528Z node 5 :TX_PROXY ERROR: Actor# [5:7489510319990381104:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:09.334362Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489510302810509570:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:09.334435Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:09.616413Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22943, MsgBus: 14368 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014ac/r3tmp/tmpKbFl7N/pdisk_1.dat 2025-04-04T17:25:10.877404Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:25:10.879238Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:10.916345Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:10.916452Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:10.918113Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22943, node 6 2025-04-04T17:25:11.031321Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:11.031342Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:11.031352Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:11.031496Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14368 TClient is connected to server localhost:14368 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:11.639882Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:11.651467Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:11.665556Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:11.741397Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:11.984168Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:12.072284Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:15.031866Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510348117343802:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:15.031978Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:15.086135Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:15.144258Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:15.180767Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:15.231750Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:15.274572Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:15.319451Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:15.391694Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510348117344316:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:15.391819Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:15.392140Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510348117344321:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:15.396139Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:15.412553Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489510348117344323:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:15.469079Z node 6 :TX_PROXY ERROR: Actor# [6:7489510348117344375:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:16.769561Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:16.856102Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-04-04T17:25:16.913780Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] >> KqpNotNullColumns::ReplaceNotNullPkPg [GOOD] >> KqpNotNullColumns::SelectNotNullColumns >> TLocksTest::GoodNullLock [GOOD] >> KqpSort::TopSortTableExprOffset [GOOD] >> KqpSort::UnionAllSortLimit >> KqpRanges::LiteralOr [GOOD] >> KqpRanges::LiteralOrCompisite >> KqpScanArrowFormat::AllTypesColumns [GOOD] >> KqpScanArrowFormat::AllTypesColumnsCellvec >> KqpNotNullColumns::FailedMultiEffects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateFor [GOOD] Test command err: Trying to start YDB, gRPC: 18592, MsgBus: 24102 2025-04-04T17:25:08.140343Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510321465606569:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:08.140440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001989/r3tmp/tmpQhk9nG/pdisk_1.dat 2025-04-04T17:25:08.456019Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:08.474792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:08.474884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:08.476818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18592, node 1 2025-04-04T17:25:08.556724Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:08.556755Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:08.556766Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:08.556918Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24102 TClient is connected to server localhost:24102 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:09.153943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:09.176419Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:09.193118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:09.362275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:09.537804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:09.621245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:11.329912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510334350510226:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:11.334827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:11.598978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:11.632932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:11.670878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:11.707310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:11.735411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:11.770860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:11.853996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510334350510740:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:11.854062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:11.854567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510334350510745:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:11.858617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:11.869913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510334350510747:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:11.943827Z node 1 :TX_PROXY ERROR: Actor# [1:7489510334350510802:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:13.140706Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510321465606569:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:13.140808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10333, MsgBus: 19765 2025-04-04T17:25:13.843416Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510343439808604:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:13.843469Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001989/r3tmp/tmpotjU1v/pdisk_1.dat 2025-04-04T17:25:13.986845Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:14.007140Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:14.007248Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:14.011417Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10333, node 2 2025-04-04T17:25:14.070986Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:14.071009Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:14.071016Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:14.071128Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19765 TClient is connected to server localhost:19765 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:14.523415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:14.529691Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:25:14.549009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:14.618512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:14.783424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:14.868603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:17.162373Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510360619679519:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:17.162460Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:17.193001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.227646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.255274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.291657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.327779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.400411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.452194Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510360619680030:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:17.452287Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:17.452565Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510360619680035:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:17.456412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:17.467208Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510360619680037:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:17.561949Z node 2 :TX_PROXY ERROR: Actor# [2:7489510360619680091:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TCacheTest::RacyRecreateAndSync >> TCacheTest::List >> TCacheTest::MigrationCommon >> TCacheTest::CheckSystemViewAccess ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 17199, MsgBus: 13335 2025-04-04T17:24:48.776474Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510236115225788:2241];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:48.777192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e88/r3tmp/tmpoZNnBb/pdisk_1.dat 2025-04-04T17:24:49.150925Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17199, node 1 2025-04-04T17:24:49.183424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:49.183545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:49.198557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:49.253673Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:49.253700Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:49.253717Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:49.253842Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13335 TClient is connected to server localhost:13335 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:49.773959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:49.796307Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:24:49.805907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:49.966162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:50.124658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:50.198057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:51.888322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510249000129271:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:51.888463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:52.119424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:52.153520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:52.185596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:52.213204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:52.283340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:52.325395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:24:52.438568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510253295097086:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:52.438652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:52.439216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510253295097091:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:52.443280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:24:52.455365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510253295097093:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:24:52.539682Z node 1 :TX_PROXY ERROR: Actor# [1:7489510253295097149:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:53.810969Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510236115225788:2241];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:53.811365Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:24:56.319586Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787494386, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 17927, MsgBus: 21898 2025-04-04T17:24:57.037033Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510271150780151:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:57.037096Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e88/r3tmp/tmp0vVQkZ/pdisk_1.dat 2025-04-04T17:24:57.133071Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:57.172084Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:57.172172Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:57.173891Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17927, node 2 2025-04-04T17:24:57.228259Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:57.228284Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:57.228296Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:57.228418Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21898 TClient is connected to server localhost:21898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:57.707593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:57.713870Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:24:57.719358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:57.810668Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:57.995754Z node 2 :FLAT_TX_SCHEMESHARD ... :25:08.638607Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:08.678054Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:08.755665Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:08.795884Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:08.832214Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:08.870543Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:08.948123Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489510320608000912:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:08.948271Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:08.948490Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489510320608000917:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:08.953432Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:08.963530Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489510320608000919:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:09.056010Z node 3 :TX_PROXY ERROR: Actor# [3:7489510324902968271:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:10.209047Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489510307723096751:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:10.209120Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:11.176298Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787510731, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 13446, MsgBus: 16451 2025-04-04T17:25:12.114206Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489510336300923773:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:12.114480Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e88/r3tmp/tmpP3gfr7/pdisk_1.dat 2025-04-04T17:25:12.309227Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:12.309329Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:12.318650Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:12.324627Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13446, node 4 2025-04-04T17:25:12.398846Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:12.398877Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:12.398890Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:12.399041Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16451 TClient is connected to server localhost:16451 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:12.992203Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:13.002559Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:13.019115Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:25:13.086082Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:13.268504Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:13.350593Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:16.266749Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489510353480794703:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:16.266843Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:16.318382Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:16.362377Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:16.401386Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:16.447082Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:16.496757Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:16.571695Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:16.674045Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489510353480795221:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:16.674178Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:16.674528Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489510353480795226:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:16.679291Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:16.693971Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489510353480795228:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:16.757008Z node 4 :TX_PROXY ERROR: Actor# [4:7489510353480795283:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:17.116861Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489510336300923773:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:17.116950Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:18.841324Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787518515, txId: 281474976710671] shutting down >> TLocksTest::BrokenDupLock [GOOD] >> TPQTabletTests::DropTablet >> TLocksTest::GoodSameShardLock [GOOD] >> TPQTabletTests::DropTablet [GOOD] >> TCacheTest::List [GOOD] >> TCacheTest::MigrationCommit >> TPQTabletTests::DropTablet_And_PlannedConfigTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::FailedMultiEffects [GOOD] Test command err: Trying to start YDB, gRPC: 25468, MsgBus: 2912 2025-04-04T17:24:51.435689Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510245510731784:2242];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:51.435783Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00147b/r3tmp/tmpC5MtH6/pdisk_1.dat 2025-04-04T17:24:51.827054Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25468, node 1 2025-04-04T17:24:51.862860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:51.862966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:51.865085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:51.918777Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:51.918804Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:51.918811Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:51.918912Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2912 TClient is connected to server localhost:2912 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:52.501377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:52.522831Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:24:54.386187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510258395634146:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:54.386347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:54.645193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:24:54.773012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510258395634251:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:54.773099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:54.773528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510258395634256:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:54.777014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:24:54.786174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510258395634258:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:24:54.870134Z node 1 :TX_PROXY ERROR: Actor# [1:7489510258395634309:2397] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:55.146916Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489510262690601671:2362], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing key column in input: Key for table: /Root/TestInsertNotNullPk, code: 2029 2025-04-04T17:24:55.147362Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjA0ZTU2MTEtOWRmZjA0YjgtOGU4ODJjYmUtNzdjNWY4OGM=, ActorId: [1:7489510258395634128:2328], ActorState: ExecuteState, TraceId: 01jr0xb3pq6s9esfdevk0tf4w2, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2025-04-04T17:24:55.164267Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489510262690601680:2366], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:47: Error: Failed to convert 'Key': Null to Uint64
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-04T17:24:55.165405Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjA0ZTU2MTEtOWRmZjA0YjgtOGU4ODJjYmUtNzdjNWY4OGM=, ActorId: [1:7489510258395634128:2328], ActorState: ExecuteState, TraceId: 01jr0xb3qj426d9mjya002e2e5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 4715, MsgBus: 3717 2025-04-04T17:24:55.814650Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510265164232313:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:55.854857Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00147b/r3tmp/tmpPqfDcX/pdisk_1.dat 2025-04-04T17:24:55.942849Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:55.963059Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:55.963140Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 4715, node 2 2025-04-04T17:24:55.964688Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:55.999675Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:55.999702Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:55.999709Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:55.999811Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3717 TClient is connected to server localhost:3717 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:56.440798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:56.449729Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:24:58.989628Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510278049134843:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:58.989700Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:59.005652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:24:59.073459Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510282344102239:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:59.073557Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:59.073826Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510282344102244:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2 ... Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:10.911958Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:10.919547Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:25:13.830896Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489510339681093532:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:13.831000Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:13.857972Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:25:13.956397Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489510339681093635:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:13.956473Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:13.956803Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489510339681093640:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:13.961173Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:25:14.013774Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489510339681093642:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:25:14.072091Z node 5 :TX_PROXY ERROR: Actor# [5:7489510343976060989:2392] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:14.423852Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7489510343976061057:2363], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Value. All not null columns should be initialized, code: 2032 2025-04-04T17:25:14.425773Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=YzBkNGMwZi1lOGFlYTQzMC1kZTYzNzY0Ny1mNDE2MDAzNw==, ActorId: [5:7489510339681093513:2329], ActorState: ExecuteState, TraceId: 01jr0xbpha9zgbzcp9fgd0jhmy, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-04T17:25:14.715554Z node 5 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:25:14.745735Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7489510343976061067:2367], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Value, code: 2031 2025-04-04T17:25:14.746628Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=YzBkNGMwZi1lOGFlYTQzMC1kZTYzNzY0Ny1mNDE2MDAzNw==, ActorId: [5:7489510339681093513:2329], ActorState: ExecuteState, TraceId: 01jr0xbpje59pmzn2nd1dkkv8w, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 11963, MsgBus: 25673 2025-04-04T17:25:15.764186Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489510349795710267:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:15.765807Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00147b/r3tmp/tmphZXvcy/pdisk_1.dat 2025-04-04T17:25:15.925835Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:15.925948Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:15.929501Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:15.945186Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11963, node 6 2025-04-04T17:25:15.995038Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:15.995068Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:15.995080Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:15.995248Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25673 TClient is connected to server localhost:25673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:16.623998Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:16.632213Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:25:19.366366Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510366975580087:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:19.366495Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:19.386477Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:25:19.439565Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510366975580187:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:19.439638Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:19.439948Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510366975580192:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:19.444786Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:25:19.457221Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489510366975580194:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:25:19.523622Z node 6 :TX_PROXY ERROR: Actor# [6:7489510366975580245:2392] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:19.659649Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7489510366975580284:2356], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:55: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64,'Value':String>
:3:55: Error: Failed to convert 'Value': Null to String
:3:55: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-04T17:25:19.659867Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=M2ZjMDM1ZTktZWI4YmYzOWUtNWY3YTU0ZGYtMzJlNGMxMDM=, ActorId: [6:7489510366975580059:2327], ActorState: ExecuteState, TraceId: 01jr0xbvmqbh8vb46a4px36hnw, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::GoodNullLock [GOOD] Test command err: 2025-04-04T17:24:40.079956Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510199209238475:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:40.080078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e98/r3tmp/tmpu7fWEM/pdisk_1.dat 2025-04-04T17:24:40.411808Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:40.479331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:40.479531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:40.481902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8724 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:40.671167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:40.685280Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:40.704636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:40.812012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:40.881395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:42.966515Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510206425718771:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:42.966583Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e98/r3tmp/tmp3mUnci/pdisk_1.dat 2025-04-04T17:24:43.053057Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:43.099836Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:43.099927Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:43.101618Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5910 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:43.225848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:43.230699Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:43.248642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:43.322171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:43.392347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:46.075329Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489510224748226565:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:46.075434Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e98/r3tmp/tmp7TrNIC/pdisk_1.dat 2025-04-04T17:24:46.174219Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:46.217705Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:46.217784Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:46.219509Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19140 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:46.350935Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:46.355498Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:46.370189Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:46.447615Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:46.497365Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:49.596632Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489510237186159165:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:49.596700Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e98/r3tmp/tmpqPRrqz/pdisk_1.dat 2025-04-04T17:24:49.679659Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:49.725272Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:49.725356Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:49.726951Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4863 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025- ... PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:25:02.003873Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:02.013436Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:02.030797Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:25:02.035268Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:02.116990Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:02.201750Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:05.839341Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7489510308631552779:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:05.839495Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e98/r3tmp/tmphDjh4k/pdisk_1.dat 2025-04-04T17:25:05.968931Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:05.992782Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:05.992882Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:06.000943Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65450 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:25:06.272198Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:06.282787Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:06.320939Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:25:06.339052Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:06.431005Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:06.509030Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:10.216477Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7489510329634910820:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:10.216570Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e98/r3tmp/tmp73m7Zj/pdisk_1.dat 2025-04-04T17:25:10.337315Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:10.372327Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:10.372432Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:10.374676Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26622 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:25:10.603453Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:10.628517Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:10.703576Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:10.781336Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:25:14.939731Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489510346463063297:2150];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:14.940321Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e98/r3tmp/tmpsnS4f1/pdisk_1.dat 2025-04-04T17:25:15.164893Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:15.190471Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:15.190581Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:15.192458Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9318 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:25:15.485355Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:15.497747Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:15.517090Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-04-04T17:25:15.526810Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:15.630232Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:15.706747Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> TCacheTest::CheckSystemViewAccess [GOOD] >> TCacheTest::CookiesArePreserved >> TCacheTest::RacyRecreateAndSync [GOOD] >> TCacheTest::RacyCreateAndSync >> TPQTabletTests::DropTablet_And_PlannedConfigTransaction [GOOD] >> TCacheTest::CookiesArePreserved [GOOD] >> TCacheTest::RacyCreateAndSync [GOOD] >> TPQTabletTests::Cancel_Tx >> KqpYql::SelectNoAsciiValue [GOOD] >> KqpYql::UuidPrimaryKeyDisabled >> TCacheTest::MigrationCommon [GOOD] >> TCacheTest::MigrationDeletedPathNavigate >> TPQTabletTests::Cancel_Tx [GOOD] >> KqpSort::ReverseMixedOrderNotOptimized [GOOD] >> KqpSort::ReverseRangeOptimized >> KqpNewEngine::MultiUsageInnerConnection [GOOD] >> KqpNewEngine::MultipleBroadcastJoin >> KqpNewEngine::DqSourceSequentialLimit [GOOD] >> KqpNewEngine::DqSourceLocksEffects >> TPQTabletTests::Config_TEvTxCommit_After_Restart >> KqpAgg::AggWithSelfLookup2 [GOOD] >> KqpAgg::AggWithHop >> TCacheTest::MigrationCommit [GOOD] >> KqpNewEngine::JoinIdxLookupWithPredicate [GOOD] >> KqpNewEngine::ItemsLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CookiesArePreserved [GOOD] Test command err: 2025-04-04T17:25:21.265799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:21.265848Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-04T17:25:21.461794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-04-04T17:25:21.474747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T17:25:21.481651Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-04T17:25:21.482398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 2025-04-04T17:25:21.487556Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:204:2194], for# user1@builtin, access# DescribeSchema 2025-04-04T17:25:21.488444Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:210:2200], for# user1@builtin, access# 2025-04-04T17:25:21.890352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:21.890408Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-04T17:25:21.945236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-04-04T17:25:21.951271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T17:25:21.958133Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::RacyCreateAndSync [GOOD] Test command err: 2025-04-04T17:25:21.267784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:21.267845Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-04T17:25:21.441570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T17:25:21.465332Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-04T17:25:21.469184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-04T17:25:21.507789Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-04T17:25:21.529299Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-04-04T17:25:21.889060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:21.889129Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-04T17:25:21.943635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T17:25:21.956407Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 >> TCacheTest::Navigate >> TPQTabletTests::Config_TEvTxCommit_After_Restart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenDupLock [GOOD] Test command err: 2025-04-04T17:24:40.335624Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510197834311831:2262];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:40.336288Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e97/r3tmp/tmpJkbCOI/pdisk_1.dat 2025-04-04T17:24:40.670101Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:40.746136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:40.746265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:40.748488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8725 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:40.961655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:40.986752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:41.104779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:41.153925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:43.784417Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510211156074574:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:43.785753Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e97/r3tmp/tmprhcmFX/pdisk_1.dat 2025-04-04T17:24:43.916524Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:43.933373Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:43.933466Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:43.935301Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13581 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:44.077275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:24:44.095895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:24:44.148376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:44.198935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:47.029655Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489510230188561820:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:47.029701Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e97/r3tmp/tmpFw1qJb/pdisk_1.dat 2025-04-04T17:24:47.153335Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:47.192095Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:47.192189Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:47.203177Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29154 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:47.371125Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:47.392262Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:47.471163Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:47.544009Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:50.282594Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489510243731566149:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:50.282657Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e97/r3tmp/tmpnhmErq/pdisk_1.dat 2025-04-04T17:24:50.436698Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:50.439887Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:50.439972Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:50.442385Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28722 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:50.636554Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:50.642720Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:50.657176Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation ... : SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:25:02.880013Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:02.886221Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:02.911090Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:02.994227Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:03.052872Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.046557Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7489510315873015320:2066];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:07.047461Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e97/r3tmp/tmpGIpQZD/pdisk_1.dat 2025-04-04T17:25:07.221102Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:07.239047Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:07.239158Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:07.241554Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21031 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:25:07.519716Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.530640Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.538625Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:25:07.543194Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.600596Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.663726Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:11.783119Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7489510331517227474:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:11.785604Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e97/r3tmp/tmpZupcAQ/pdisk_1.dat 2025-04-04T17:25:11.950869Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:11.957163Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:11.957286Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:11.960737Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10447 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:25:12.263998Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:12.271373Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:12.286985Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:12.359616Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:12.436315Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:16.405778Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489510356408587021:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:16.405836Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e97/r3tmp/tmpTGwmjV/pdisk_1.dat 2025-04-04T17:25:16.639951Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:16.644137Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:16.644253Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:16.645920Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5713 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:25:16.960399Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:16.984758Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:17.065824Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:17.135468Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> KqpNewEngine::DeleteWithBuiltin+UseSink [GOOD] >> KqpNewEngine::DeleteWithBuiltin-UseSink >> TPQTabletTests::All_New_Partitions_In_Another_Tablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::SelectNoAsciiValue [GOOD] Test command err: Trying to start YDB, gRPC: 2502, MsgBus: 16892 2025-04-04T17:25:10.583336Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510329690964781:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:10.583433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001985/r3tmp/tmpC5j0yG/pdisk_1.dat 2025-04-04T17:25:11.054619Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:11.056079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:11.056179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:11.059679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2502, node 1 2025-04-04T17:25:11.146801Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:11.146842Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:11.146862Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:11.147030Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16892 TClient is connected to server localhost:16892 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:11.733348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:11.766672Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:11.777034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:11.966138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:12.149216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:12.235105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:13.881731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510342575868435:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:13.881873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:14.157643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:14.190744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:14.222914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:14.261226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:14.300362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:14.374044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:14.437601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510346870836246:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:14.437706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:14.437791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510346870836251:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:14.441353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:14.454384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510346870836253:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:14.514527Z node 1 :TX_PROXY ERROR: Actor# [1:7489510346870836305:3438] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:10:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:10:20: Error: At function: Apply
:8:28: Error: At function: ScriptUdf
:8:28: Error: Module not loaded for script type: Python3 2025-04-04T17:25:15.583656Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510329690964781:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:15.583744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21779, MsgBus: 25934 2025-04-04T17:25:16.391331Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510353658796872:2215];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001985/r3tmp/tmpnfYVo5/pdisk_1.dat 2025-04-04T17:25:16.435651Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:25:16.535102Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:16.537415Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:16.537480Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:16.539178Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21779, node 2 2025-04-04T17:25:16.624761Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:16.624782Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:16.624795Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:16.624914Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25934 TClient is connected to server localhost:25934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:17.068310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:17.083301Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:25:17.093378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:17.178420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:17.318207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:17.407904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:19.510373Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510366543700346:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:19.510473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:19.548273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:19.580312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:19.646224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:19.675464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:19.709323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:19.785307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:19.836243Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510366543700867:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:19.836334Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:19.836433Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510366543700872:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:19.839849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:19.848736Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510366543700874:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:19.949448Z node 2 :TX_PROXY ERROR: Actor# [2:7489510366543700930:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:20.952142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:21.371817Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489510353658796872:2215];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:21.371880Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:21.425248Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787521462, txId: 281474976715675] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::GoodSameShardLock [GOOD] Test command err: 2025-04-04T17:24:40.505094Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510198193749235:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:40.506271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e96/r3tmp/tmpUBzG89/pdisk_1.dat 2025-04-04T17:24:40.872830Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:40.901148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:40.901260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:40.902916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62154 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:41.133178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:41.164859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:41.285502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:41.345571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:43.856174Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510213632945251:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:43.856278Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e96/r3tmp/tmpfQC3x5/pdisk_1.dat 2025-04-04T17:24:43.996900Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:44.007646Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:44.007729Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:44.009172Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14276 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:24:44.177224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:44.184552Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:44.204097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:44.286171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:44.332117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:46.874739Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489510226669244594:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:46.874808Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e96/r3tmp/tmpGAMOo4/pdisk_1.dat 2025-04-04T17:24:47.005389Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:47.019404Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:47.019485Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:47.021122Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5066 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-04T17:24:47.203499Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:47.224432Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:47.298960Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:47.361012Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:50.309451Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489510241921709374:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:50.309525Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e96/r3tmp/tmpXXX7QD/pdisk_1.dat 2025-04-04T17:24:50.418454Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:50.445538Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:50.445630Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:50.447001Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17393 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-04T17:24:50.663741Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:50.683107Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation ... SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-04-04T17:25:02.921796Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:02.949050Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:02.957698Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:25:03.048931Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:03.116096Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.214394Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7489510317042772698:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:07.214452Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e96/r3tmp/tmpLYU2wR/pdisk_1.dat 2025-04-04T17:25:07.336935Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:07.353886Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:07.353994Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:07.356068Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13728 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:25:07.617533Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.632165Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.648062Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-04-04T17:25:07.652818Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.725119Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.784147Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:11.761212Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7489510334497771001:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:11.761271Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e96/r3tmp/tmp0CwSoS/pdisk_1.dat 2025-04-04T17:25:11.999192Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:12.016536Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:12.016651Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:12.019779Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28056 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:25:12.339997Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:12.366995Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:12.455020Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:12.597721Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:16.725307Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489510355767873836:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:16.725389Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001e96/r3tmp/tmpJjca7z/pdisk_1.dat 2025-04-04T17:25:16.918368Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:16.933617Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:16.933739Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:16.935809Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19016 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:25:17.304250Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:17.312052Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:17.328089Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:17.414739Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:17.492206Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationCommit [GOOD] Test command err: 2025-04-04T17:25:21.262659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:21.262724Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-04T17:25:21.462007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 TestWaitNotification wait txId: 103 2025-04-04T17:25:21.485042Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:25:21.485223Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:25:21.485263Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-04-04T17:25:21.897138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:21.897211Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-04T17:25:21.950622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [2:69:2108] sender: [2:174:2067] recipient: [2:46:2093] Leader for TabletID 72057594046678944 is [2:69:2108] sender: [2:177:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:69:2108] sender: [2:178:2067] recipient: [2:176:2170] Leader for TabletID 72057594046678944 is [2:179:2171] sender: [2:180:2067] recipient: [2:176:2170] 2025-04-04T17:25:22.001553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:22.001627Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:179:2171] sender: [2:210:2067] recipient: [2:24:2071] 2025-04-04T17:25:22.032218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-04T17:25:22.039985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:246:2067] recipient: [2:237:2212] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:246:2067] recipient: [2:237:2212] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:247:2067] recipient: [2:240:2214] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:247:2067] recipient: [2:240:2214] Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:249:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:249:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:251:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:251:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409546 is [2:250:2218] sender: [2:254:2067] recipient: [2:237:2212] Leader for TabletID 72075186233409547 is [2:253:2220] sender: [2:255:2067] recipient: [2:240:2214] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-04-04T17:25:22.076725Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [2:250:2218] sender: [2:288:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [2:253:2220] sender: [2:289:2067] recipient: [2:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-04-04T17:25:22.157969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:339:2067] recipient: [2:335:2284] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:339:2067] recipient: [2:335:2284] Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:340:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:340:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409548 is [2:342:2288] sender: [2:343:2067] recipient: [2:335:2284] Leader for TabletID 72075186233409548 is [2:342:2288] sender: [2:346:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-04T17:25:22.313511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:419:2067] recipient: [2:414:2332] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:419:2067] recipient: [2:414:2332] Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:421:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:421:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409549 is [2:422:2336] sender: [2:423:2067] recipient: [2:414:2332] 2025-04-04T17:25:22.353236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:22.353287Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 Leader for TabletID 72075186233409549 is [2:422:2336] sender: [2:450:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 105 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } TestModificationResults wait txId: 106 2025-04-04T17:25:22.401457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:25:22.401501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-04T17:25:22.401730Z node 2 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-04-04T17:25:22.401826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-04T17:25:22.421308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-04-04T17:25:22.421702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } Leader for TabletID 72057594046678944 is [2:179:2171] sender: [2:511:2067] recipient: [2:46:2093] Leader for TabletID 72057594046678944 is [2:179:2171] sender: [2:513:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:179:2171] sender: [2:515:2067] recipient: [2:514:2407] Leader for TabletID 72057594046678944 is [2:516:2408] sender: [2:517:2067] recipient: [2:514:2407] 2025-04-04T17:25:22.466538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:22.466599Z node 2 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046678944 is [2:516:2408] sender: [2:544:2067] recipient: [2:24:2071] { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } >> TCacheTest::Navigate [GOOD] >> TCacheTest::PathBelongsToDomain >> KqpSqlIn::SimpleKey_In_And_In [GOOD] >> KqpSqlIn::SecondaryIndex_TupleParameter >> TCacheTest::MigrationLostMessage >> KqpNewEngine::BrokenLocksAtROTxSharded [GOOD] >> KqpNewEngine::BrokenLocksOnUpdate >> KqpYql::UuidPrimaryKeyBulkUpsert >> TPQTabletTests::All_New_Partitions_In_Another_Tablet [GOOD] >> TCacheTest::PathBelongsToDomain [GOOD] >> TPQTabletTests::After_Restarting_The_Tablet_Sends_A_TEvReadSet_For_Transactions_In_The_EXECUTED_State ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::PathBelongsToDomain [GOOD] Test command err: 2025-04-04T17:25:23.019926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:23.019989Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-04T17:25:23.189159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T17:25:23.209756Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-04-04T17:25:23.476863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:23.476922Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-04T17:25:23.521256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-04-04T17:25:23.526432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T17:25:23.533197Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 2025-04-04T17:25:23.543218Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Path does not belong to the specified domain: self# [2:225:2203], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:25:23.543507Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Path does not belong to the specified domain: self# [2:227:2205], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 2] >> TPQTabletTests::After_Restarting_The_Tablet_Sends_A_TEvReadSet_For_Transactions_In_The_EXECUTED_State [GOOD] >> TCacheTest::WatchRoot >> KqpNotNullColumns::SelectNotNullColumns [GOOD] >> KqpNotNullColumns::SecondaryKeyWithNotNullColumn >> KqpNewEngine::DqSourceCount [GOOD] >> KqpNewEngine::DqSource >> TCacheTest::Attributes |95.6%| [TA] $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |95.6%| [TA] {RESULT} $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpNotNullColumns::JoinBothTablesWithNotNullPk+StreamLookup [GOOD] >> KqpNotNullColumns::JoinBothTablesWithNotNullPk-StreamLookup >> TCacheTest::MigrationLostMessage [GOOD] >> TCacheTest::MigrationUndo >> KqpRanges::UpdateWhereInFullScan-UseSink [GOOD] >> KqpRanges::ScanKeyPrefix >> KqpScripting::StreamExecuteYqlScriptMixed [GOOD] >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan >> TCacheTest::Attributes [GOOD] >> TCacheTest::CheckAccess ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::After_Restarting_The_Tablet_Sends_A_TEvReadSet_For_Transactions_In_The_EXECUTED_State [GOOD] Test command err: 2025-04-04T17:25:21.539023Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:25:21.543870Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:25:21.544242Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-04T17:25:21.544306Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:25:21.544344Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-04T17:25:21.544404Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:25:21.544465Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:21.544524Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-04T17:25:21.565894Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:177:2192], now have 1 active actors on pipe 2025-04-04T17:25:21.566030Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T17:25:21.586338Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:21.589430Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-04T17:25:21.589607Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:21.590597Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:25:21.590751Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:25:21.591178Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:25:21.591613Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-04T17:25:21.592587Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-04T17:25:21.592668Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:185:2198] 2025-04-04T17:25:21.592736Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:25:21.593310Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T17:25:21.593432Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-04T17:25:21.593476Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-04T17:25:21.593701Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:25:21.593888Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:21.597000Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:25:21.597477Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:192:2203], now have 1 active actors on pipe 2025-04-04T17:25:21.598111Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:195:2205], now have 1 active actors on pipe 2025-04-04T17:25:21.598226Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvDropTablet Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:21.946593Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:25:21.950173Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:25:21.950448Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-04T17:25:21.950490Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:25:21.950540Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-04T17:25:21.950574Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:25:21.950611Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:21.950678Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-04T17:25:21.968881Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [2:177:2192], now have 1 active actors on pipe 2025-04-04T17:25:21.968983Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T17:25:21.969214Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 2(current 0) received from actor [2:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:21.971289Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-04T17:25:21.971412Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:21.972337Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:25:21.972455Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:25:21.972513Z node 2 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:25:21.972937Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:25:21.973194Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:185:2198] 2025-04-04T17:25:21.973988Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-04T17:25:21.974046Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [2:185:2198] 2025-04-04T17:25:21.974096Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:25:21.974511Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T17:25:21.974617Z node 2 :PERSQUEUE DEBUG: [PQ ... 037927937] TxId 67891, NewState CALCULATED 2025-04-04T17:25:24.291160Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from CALCULATING to CALCULATED 2025-04-04T17:25:24.291367Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: CALCULATED MinStep: 153 MaxStep: 30153 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 110 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 25769805967 } Partitions { } 2025-04-04T17:25:24.291468Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:24.298195Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:25:24.298284Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-04-04T17:25:24.298343Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State CALCULATED 2025-04-04T17:25:24.298392Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State CALCULATED FrontTxId 67891 2025-04-04T17:25:24.298442Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState WAIT_RS 2025-04-04T17:25:24.298482Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from CALCULATED to WAIT_RS 2025-04-04T17:25:24.298535Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-04-04T17:25:24.298583Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-04-04T17:25:24.298672Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE 2025-04-04T17:25:24.304235Z node 6 :PERSQUEUE DEBUG: Client pipe to tablet 72057594037927937 from 22222 is reset Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:24.330516Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_BATCH Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:24.332803Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:25:24.334177Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] has a tx info 2025-04-04T17:25:24.334236Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 110, PlanTxId 67891, ExecStep 110, ExecTxId 67891 2025-04-04T17:25:24.334404Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] ReadRange pair. Key tx_00000000000000067890, Status 0 2025-04-04T17:25:24.334496Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Restore Tx. TxId: 67890, Step: 100, State: EXECUTED, WriteId: 2025-04-04T17:25:24.334572Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] ReadRange pair. Key tx_00000000000000067891, Status 0 2025-04-04T17:25:24.334611Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Restore Tx. TxId: 67891, Step: 110, State: CALCULATED, WriteId: 2025-04-04T17:25:24.334638Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Fix tx state 2025-04-04T17:25:24.334698Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=2, PlannedTxs.size=2 2025-04-04T17:25:24.334745Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] top tx queue (100, 67890) 2025-04-04T17:25:24.334792Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxsOrder: 67890 EXECUTED 0 2025-04-04T17:25:24.334831Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxsOrder: 67891 PLANNED 0 2025-04-04T17:25:24.335422Z node 6 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:24.335479Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937] has a tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:25:24.335600Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:25:24.335953Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:25:24.336236Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [6:361:2339] 2025-04-04T17:25:24.337118Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDiskStatusStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:25:24.338388Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitMetaStep 2025-04-04T17:25:24.338707Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInfoRangeStep 2025-04-04T17:25:24.339435Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataRangeStep 2025-04-04T17:25:24.339722Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataStep 2025-04-04T17:25:24.339772Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-04T17:25:24.339832Z node 6 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:25:24.339869Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-04T17:25:24.339929Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [6:361:2339] 2025-04-04T17:25:24.339988Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:25:24.340048Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T17:25:24.340143Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 6 2025-04-04T17:25:24.340318Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-04-04T17:25:24.340371Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-04-04T17:25:24.340514Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-04-04T17:25:24.340565Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-04-04T17:25:24.340605Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-04-04T17:25:24.340652Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-04-04T17:25:24.340696Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-04-04T17:25:24.340745Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-04-04T17:25:24.340792Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-04-04T17:25:24.340825Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-04T17:25:24.340862Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-04-04T17:25:24.340904Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PLANNED 2025-04-04T17:25:24.340932Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State PLANNED 2025-04-04T17:25:24.340961Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State PLANNED FrontTxId 67891 2025-04-04T17:25:24.340991Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2025-04-04T17:25:24.341031Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 110, ExecTxId 67891 2025-04-04T17:25:24.341104Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState CALCULATING 2025-04-04T17:25:24.341153Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from PLANNED to CALCULATING Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:25:24.341480Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 110, TxId 67891 2025-04-04T17:25:24.341887Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 110, TxId 67891, Partition 0, Predicate 1 2025-04-04T17:25:24.341928Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] Handle TEvTxCalcPredicateResult 2025-04-04T17:25:24.341965Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] Partition responses 1/1 2025-04-04T17:25:24.342003Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-04-04T17:25:24.342043Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State CALCULATING 2025-04-04T17:25:24.342108Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State CALCULATING FrontTxId 67891 2025-04-04T17:25:24.342150Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-04T17:25:24.342195Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState CALCULATED 2025-04-04T17:25:24.342322Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from CALCULATING to CALCULATED 2025-04-04T17:25:24.342518Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: CALCULATED MinStep: 153 MaxStep: 30153 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 110 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 25769805967 } Partitions { } 2025-04-04T17:25:24.342625Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:25:24.342928Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-04-04T17:25:24.342981Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:24.347535Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:25:24.347599Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-04-04T17:25:24.347642Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State CALCULATED 2025-04-04T17:25:24.347683Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State CALCULATED FrontTxId 67891 2025-04-04T17:25:24.347724Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState WAIT_RS 2025-04-04T17:25:24.347766Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from CALCULATED to WAIT_RS 2025-04-04T17:25:24.347812Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-04-04T17:25:24.347856Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-04-04T17:25:24.347947Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> TPartitionTests::UserActCount >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced >> TCacheTest::WatchRoot [GOOD] >> TCacheTestWithDrops::LookupErrorUponEviction >> TCacheTest::SystemView >> TCacheTest::CheckAccess [GOOD] >> KqpScripting::QueryStats [GOOD] >> KqpScripting::Pure >> KqpSort::UnionAllSortLimit [GOOD] >> KqpSqlIn::CantRewrite >> TCacheTest::MigrationUndo [GOOD] >> TCacheTest::SystemView [GOOD] >> TCacheTest::TableSchemaVersion ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CheckAccess [GOOD] Test command err: 2025-04-04T17:25:24.790505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:24.790575Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-04T17:25:24.959763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T17:25:24.982065Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-04-04T17:25:25.228650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:25.228712Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-04T17:25:25.282843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-04T17:25:25.294258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 2025-04-04T17:25:25.298213Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:196:2186], for# user1@builtin, access# DescribeSchema 2025-04-04T17:25:25.298719Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:200:2190], for# user1@builtin, access# DescribeSchema >> KqpSort::ComplexPkExclusiveSecondOptionalPredicate [GOOD] >> KqpSort::ComplexPkInclusiveSecondOptionalPredicate >> KqpKv::ReadRows_PgValue [GOOD] >> KqpKv::ReadRows_PgKey >> KqpScanArrowFormat::AllTypesColumnsCellvec [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps >> KqpNewEngine::JoinMultiConsumer [GOOD] >> KqpNewEngine::JoinSameKey >> KqpNewEngine::LocksInRoTx [GOOD] >> KqpNewEngine::LiteralKeys >> TCacheTest::TableSchemaVersion [GOOD] >> KqpScripting::ScriptExplainCreatedTable >> KqpYql::UuidPrimaryKeyDisabled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationUndo [GOOD] Test command err: 2025-04-04T17:25:23.714865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:23.714935Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-04T17:25:23.879281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [1:69:2108] sender: [1:174:2067] recipient: [1:46:2093] Leader for TabletID 72057594046678944 is [1:69:2108] sender: [1:177:2067] recipient: [1:24:2071] Leader for TabletID 72057594046678944 is [1:69:2108] sender: [1:178:2067] recipient: [1:176:2170] Leader for TabletID 72057594046678944 is [1:179:2171] sender: [1:180:2067] recipient: [1:176:2170] 2025-04-04T17:25:23.937611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:23.937673Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [1:179:2171] sender: [1:210:2067] recipient: [1:24:2071] 2025-04-04T17:25:23.968254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-04T17:25:23.979159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:246:2067] recipient: [1:237:2212] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:246:2067] recipient: [1:237:2212] Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:247:2067] recipient: [1:240:2214] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:247:2067] recipient: [1:240:2214] Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:250:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:250:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:251:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:251:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409546 is [1:249:2218] sender: [1:253:2067] recipient: [1:237:2212] Leader for TabletID 72075186233409547 is [1:254:2220] sender: [1:255:2067] recipient: [1:240:2214] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-04-04T17:25:24.009807Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [1:249:2218] sender: [1:288:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409547 is [1:254:2220] sender: [1:289:2067] recipient: [1:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-04-04T17:25:24.090047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:339:2067] recipient: [1:335:2284] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:339:2067] recipient: [1:335:2284] Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:340:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:340:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409548 is [1:342:2288] sender: [1:343:2067] recipient: [1:335:2284] Leader for TabletID 72075186233409548 is [1:342:2288] sender: [1:344:2067] recipient: [1:24:2071] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-04T17:25:24.273980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:419:2067] recipient: [1:414:2332] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:419:2067] recipient: [1:414:2332] Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:421:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:421:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409549 is [1:422:2336] sender: [1:423:2067] recipient: [1:414:2332] 2025-04-04T17:25:24.316813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:24.316893Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 Leader for TabletID 72075186233409549 is [1:422:2336] sender: [1:450:2067] recipient: [1:24:2071] TestWaitNotification: OK eventTxId 105 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } TestModificationResults wait txId: 106 2025-04-04T17:25:24.369699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:25:24.369758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-04T17:25:24.370081Z node 1 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-04-04T17:25:24.370198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-04T17:25:24.388314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-04-04T17:25:24.388419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 skipDeleteNotification path: /Root/USER_0/DirA/Table1 pathId: [OwnerId: 72057594046678944, LocalPathId: 4] Strong: 1 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-04-04T17:25:24.430540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 108:0, at schemeshard: 72075186233409549 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 skipDeleteNotification path: /Root/USER_0/DirA pathId: [OwnerId: 72057594046678944, LocalPathId: 3] Strong: 1 TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 skipDeleteNotification path: /Root/USER_0/DirA pathId: [OwnerId: 72057594046678944, LocalPathId: 3] Strong: 1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 TestWaitNotification: OK eventTxId 109 TestModificationResults wait txId: 110 2025-04-04T17:25:24.544666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 110:0, at schemeshard: 72075186233409549 Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:624:2067] recipient: [1:614:2502] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:624:2067] recipient: [1:614:2502] Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:626:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:626:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409550 is [1:627:2506] sender: [1:628:2067] recipient: [1:614:2502] Leader for TabletID 72075186233409550 is [1:627:2506] sender: [1:629:2067] recipient: [1:24:2071] TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 TestWaitNotification: OK eventTxId 110 2025-04-04T17:25:25.018533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:25.018599Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-04T17:25:25.072571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-04-04T17:25:25.078136Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 65543, Sender [2:173:2169], Recipient [2:69:2108]: NActors::TEvents::TEvPoison 2025-04-04T17:25:25.078933Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [2:69:2108] sender: [2:174:2067] recipient: [2:46:2093] Leader for TabletID 72057594046678944 is [2:69:2108] sender: [2:177:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:69:2108] sender: [2:178:2067] recipient: [2:176:2170] Leader for TabletID 72057594046678944 is [2:179:2171] sender: [2:180:2067] recipient: [2:176:2170] 2025-04-04T17:25:25.085118Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received ev ... : 2] was 2 2025-04-04T17:25:25.675549Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: Mediator, at schemeshard: 72057594046678944 2025-04-04T17:25:25.675586Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:25:25.675615Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:3, tabletId: 72075186233409548, PathId: [OwnerId: 72057594046678944, LocalPathId: 4], TabletType: DataShard, at schemeshard: 72057594046678944 2025-04-04T17:25:25.675643Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-04-04T17:25:25.675768Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-04-04T17:25:25.675958Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:25.676156Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-04-04T17:25:25.676501Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:25.676618Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:25.677005Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:25.677077Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:25.677330Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:25.677497Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:25.677606Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:25.677819Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:25.677906Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:25.678128Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:25.678373Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:25.678546Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:25.678599Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:25.678652Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:25.678921Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:25:25.680359Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:25:25.680526Z node 2 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-04T17:25:25.681451Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435081, Sender [2:515:2401], Recipient [2:515:2401]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-04T17:25:25.681497Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-04T17:25:25.682156Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:25:25.682387Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:25:25.682650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:25:25.682707Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:25:25.682755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:25:25.682793Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:25:25.682946Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [2:531:2401], Recipient [2:515:2401]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-04T17:25:25.682983Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-04T17:25:25.683036Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:25:25.694878Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:159:2156], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:25:25.695040Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:159:2156], cacheItem# { Subscriber: { Subscriber: [2:381:2318] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 5000002 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:25:25.695377Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:543:2418], recipient# [2:542:2417], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0 TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } { Path: Root/USER_0 TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:25:25.695864Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:159:2156], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:25:25.696009Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:159:2156], cacheItem# { Subscriber: { Subscriber: [2:390:2321] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 200 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/USER_0/DirA TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:25:25.696236Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:545:2420], recipient# [2:544:2419], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:25:25.696705Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:159:2156], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:25:25.696819Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:159:2156], cacheItem# { Subscriber: { Subscriber: [2:399:2324] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 250 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: Root/USER_0/DirA/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:25:25.697011Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:547:2422], recipient# [2:546:2421], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA/Table1 TableId: [72057594046678944:4:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::TableSchemaVersion [GOOD] Test command err: 2025-04-04T17:25:25.527548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:25.527615Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-04T17:25:25.692232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-04-04T17:25:25.998007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:25.998091Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-04T17:25:26.048406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-04-04T17:25:26.083891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 101:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-04T17:25:26.232818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 >> TCacheTest::Recreate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyDisabled [GOOD] Test command err: Trying to start YDB, gRPC: 6032, MsgBus: 8275 2025-04-04T17:25:22.445554Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510379444428351:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:22.445618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00197c/r3tmp/tmpzZHtUJ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6032, node 1 2025-04-04T17:25:22.846600Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:22.852170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:22.852284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:22.853427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:22.873728Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:25:22.873831Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:25:22.940005Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:22.940042Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:22.940056Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:22.940214Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8275 TClient is connected to server localhost:8275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:23.502395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:23.523023Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:25.383427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510392329330891:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:25.383574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:25.625294Z node 1 :TX_PROXY ERROR: Actor# [1:7489510392329330914:2307] txid# 281474976710658, issues: { message: "Uuid as primary key is forbiden by configuration: key" severity: 1 } 2025-04-04T17:25:25.648520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510392329330922:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:25.648610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:25.664176Z node 1 :TX_PROXY ERROR: Actor# [1:7489510392329330929:2315] txid# 281474976710659, issues: { message: "Uuid as primary key is forbiden by configuration: key" severity: 1 } 2025-04-04T17:25:25.673753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510392329330937:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:25.673845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:25.687806Z node 1 :TX_PROXY ERROR: Actor# [1:7489510392329330944:2323] txid# 281474976710660, issues: { message: "Uuid as primary key is forbiden by configuration: val" severity: 1 } 2025-04-04T17:25:25.705900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510392329330952:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:25.706001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:25.716056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:25:25.874400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510392329331042:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:25.874626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> TCacheTest::Recreate [GOOD] >> TCacheTest::SysLocks >> KqpRanges::LiteralOrCompisite [GOOD] >> KqpRanges::LiteralOrCompisiteCollision >> KqpReturning::ReturningWorksIndexedDelete-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedDeleteV2+QueryService >> TCacheTest::SysLocks [GOOD] >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] >> TPQTest::TestSeveralOwners ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::SysLocks [GOOD] Test command err: 2025-04-04T17:25:27.385736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:27.385795Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-04T17:25:27.548322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T17:25:27.564701Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-04T17:25:27.566428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-04T17:25:27.597499Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-04T17:25:27.608244Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-04-04T17:25:27.821903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:27.821962Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-04T17:25:27.872576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::Simple_AutoscaleAwareSDK >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 14208, MsgBus: 17537 2025-04-04T17:25:23.789275Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510385421500188:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:23.789312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001978/r3tmp/tmpMAt3Cx/pdisk_1.dat 2025-04-04T17:25:24.194671Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:24.237862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:24.237998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:24.240164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14208, node 1 2025-04-04T17:25:24.313293Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:24.313320Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:24.313329Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:24.313487Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17537 TClient is connected to server localhost:17537 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:24.841844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:26.942159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510398306402607:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:26.942308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:27.161426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:25:27.316137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510402601370014:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:27.316246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:27.316337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510402601370019:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:27.320744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:25:27.331097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510402601370021:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:25:27.431379Z node 1 :TX_PROXY ERROR: Actor# [1:7489510402601370073:2403] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSort::ReverseRangeOptimized [GOOD] >> KqpSort::ReverseRangeLimitOptimized >> KqpNewEngine::DeleteWithBuiltin-UseSink [GOOD] >> KqpNewEngine::DeleteON >> KqpNotNullColumns::SecondaryKeyWithNotNullColumn [GOOD] >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumn >> KqpAgg::AggWithHop [GOOD] >> KqpAgg::GroupByLimit >> TPQTest::TestSeveralOwners [GOOD] >> TPQTest::TestReserveBytes >> KqpNewEngine::DqSourceLocksEffects [GOOD] >> KqpNewEngine::FullScanCount >> Cdc::KeysOnlyLog[PqRunner] >> Cdc::UuidExchange[PqRunner] >> KqpNewEngine::BrokenLocksOnUpdate [GOOD] >> KqpNewEngine::ComplexLookupLimit >> KqpKv::ReadRows_PgKey [GOOD] >> KqpKv::ReadRows_Nulls >> KqpScripting::Pure [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test >> KqpYql::BinaryJsonOffsetBound >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] >> KqpNotNullColumns::JoinBothTablesWithNotNullPk-StreamLookup [GOOD] >> KqpNotNullColumns::JoinLeftTableWithNotNullPk+StreamLookup >> ReadOnlyVDisk::TestWrites >> ReadOnlyVDisk::TestStorageLoad >> KqpNewEngine::DqSource [GOOD] >> KqpNewEngine::DqSourceLiteralRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] Test command err: 2025-04-04T17:25:24.714270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:24.714328Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-04T17:25:24.850698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T17:25:24.868811Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-04T17:25:24.870674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-04T17:25:24.900858Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-04-04T17:25:25.384447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:25.384503Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-04T17:25:25.431460Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 13861, MsgBus: 21020 2025-04-04T17:25:18.585573Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510364010403271:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:18.585746Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001980/r3tmp/tmpJnzUwF/pdisk_1.dat 2025-04-04T17:25:19.043657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:19.043764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:19.046356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13861, node 1 2025-04-04T17:25:19.095468Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:19.122506Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:25:19.163213Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:19.163246Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:19.163255Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:19.163392Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21020 TClient is connected to server localhost:21020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:19.710467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:19.737456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:19.871817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:20.034592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:20.107636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:21.782284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510376895306744:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:21.782421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:22.048221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:22.079293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:22.107710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:22.139368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:22.208547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:22.259427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:22.341298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510381190274559:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:22.341364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:22.341554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510381190274564:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:22.344544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:22.352932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510381190274566:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:22.433748Z node 1 :TX_PROXY ERROR: Actor# [1:7489510381190274621:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:23.572277Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510364010403271:2265];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:23.572351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:24.505225Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787524542, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 18090, MsgBus: 10707 2025-04-04T17:25:25.598364Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510392671465697:2288];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001980/r3tmp/tmpgaaTvx/pdisk_1.dat 2025-04-04T17:25:25.599807Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:25:25.661962Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18090, node 2 2025-04-04T17:25:25.708037Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:25.708139Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:25.713913Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:25.754787Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:25.754811Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:25.754819Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:25.754917Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10707 TClient is connected to server localhost:10707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:26.192729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:26.202318Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:26.216981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:26.297442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:25:26.462234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:26.534061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:28.723325Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510405556369090:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:28.723443Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:28.759896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:28.793155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:28.862591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:28.894363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:28.924615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:28.964150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:29.024155Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510409851336902:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:29.024242Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:29.024429Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510409851336907:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:29.028180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:29.042987Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510409851336909:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:29.103714Z node 2 :TX_PROXY ERROR: Actor# [2:7489510409851336962:3438] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpRanges::ScanKeyPrefix [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] Test command err: Trying to start YDB, gRPC: 24978, MsgBus: 30002 2025-04-04T17:25:17.652233Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510357037937706:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:17.652362Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001981/r3tmp/tmpQZsD2Z/pdisk_1.dat 2025-04-04T17:25:17.989930Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:18.038443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:18.038542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:18.040404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24978, node 1 2025-04-04T17:25:18.094963Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:18.094994Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:18.095008Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:18.095144Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30002 TClient is connected to server localhost:30002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:18.688959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:18.714540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:18.897470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:19.086414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:19.159626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:25:20.916788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510369922841362:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:20.916937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:21.195903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:21.224047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:21.254685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:21.284183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:21.311892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:21.345552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:21.431692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510374217809173:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:21.431806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:21.431842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510374217809178:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:21.435182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:21.443857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510374217809180:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:21.528047Z node 1 :TX_PROXY ERROR: Actor# [1:7489510374217809235:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:22.652240Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510357037937706:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:22.652301Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:22.856566Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787522876, txId: 281474976710671] shutting down 2025-04-04T17:25:23.216604Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787523247, txId: 281474976710673] shutting down 2025-04-04T17:25:23.898683Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787523912, txId: 281474976710677] shutting down Trying to start YDB, gRPC: 31548, MsgBus: 28218 2025-04-04T17:25:25.028632Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510393480824978:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:25.028707Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001981/r3tmp/tmpamL3xD/pdisk_1.dat 2025-04-04T17:25:25.127290Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:25.137101Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:25.137191Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:25.138832Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31548, node 2 2025-04-04T17:25:25.231591Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:25.231615Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:25.231621Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:25.231700Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28218 TClient is connected to server localhost:28218 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:25.635691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:25.645801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:25.727503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:25.891449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:25.970029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:28.508760Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510406365728637:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:28.508926Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:28.531568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:28.562201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:28.592407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:28.624400Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:28.655710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:28.727717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:28.780877Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510406365729152:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:28.780990Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510406365729157:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:28.780995Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:28.784029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:28.793164Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510406365729159:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:28.880133Z node 2 :TX_PROXY ERROR: Actor# [2:7489510406365729212:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:30.030340Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489510393480824978:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:30.079209Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:30.137954Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787530163, txId: 281474976715671] shutting down 2025-04-04T17:25:30.481336Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787530513, txId: 281474976715673] shutting down >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition >> KqpNewEngine::MultipleBroadcastJoin [GOOD] >> TPQTest::TestPartitionTotalQuota [GOOD] >> TPQTest::TestPartitionPerConsumerQuota >> KqpSqlIn::SecondaryIndex_TupleParameter [GOOD] >> KqpSqlIn::SecondaryIndex_TupleLiteral >> ReadOnlyVDisk::TestGarbageCollect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::ScanKeyPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 27776, MsgBus: 6720 2025-04-04T17:24:47.697109Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510228319410856:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:47.698183Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001494/r3tmp/tmpcR0xUB/pdisk_1.dat 2025-04-04T17:24:48.051155Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27776, node 1 2025-04-04T17:24:48.117663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:48.117774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:48.119399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:48.130398Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:48.130433Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:48.130444Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:48.130620Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6720 TClient is connected to server localhost:6720 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:48.665692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:48.700525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:48.848866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:49.010822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:49.107778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:50.762115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510241204314492:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:50.762219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:51.004191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:51.071438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:51.105377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:51.173514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:51.207988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:51.243391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:24:51.284662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510245499282303:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:51.284754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:51.284922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510245499282308:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:51.288194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:24:51.298740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510245499282310:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:24:51.378681Z node 1 :TX_PROXY ERROR: Actor# [1:7489510245499282363:3439] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:52.697644Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510228319410856:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:52.697709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28142, MsgBus: 14501 2025-04-04T17:24:53.517709Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510257472626310:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:53.517762Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001494/r3tmp/tmp5y6nBL/pdisk_1.dat 2025-04-04T17:24:53.624435Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28142, node 2 2025-04-04T17:24:53.650881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:53.650967Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:53.652726Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:53.682922Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:53.682948Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:53.682955Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:53.683061Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14501 TClient is connected to server localhost:14501 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:54.082498Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:54.089135Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:24:54.093801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:54.157304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:54.305031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:54.369309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreate ... : 72057594046644480 2025-04-04T17:25:21.217299Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:21.272017Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489510375021511443:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:21.272125Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:21.272247Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489510375021511448:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:21.277924Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:21.288063Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489510375021511450:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:21.381081Z node 5 :TX_PROXY ERROR: Actor# [5:7489510375021511504:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:21.903365Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489510353546672688:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:21.903447Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:22.406212Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:22.688865Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:25:22.875643Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-04T17:25:23.075605Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-04T17:25:23.439056Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:1:44: Warning: At function: Coalesce
:1:58: Warning: At function: SqlIn
:1:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 10330, MsgBus: 5380 2025-04-04T17:25:25.122646Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489510393912877505:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:25.122726Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001494/r3tmp/tmpaRtXKY/pdisk_1.dat 2025-04-04T17:25:25.280199Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:25.293753Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:25.293882Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:25.298287Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10330, node 6 2025-04-04T17:25:25.355329Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:25.355355Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:25.355366Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:25.355534Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5380 TClient is connected to server localhost:5380 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:25.966610Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:25.973957Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:25:25.985644Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:26.069749Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:26.339501Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:26.427874Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:29.357955Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510411092748453:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:29.358083Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:29.395194Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:29.449006Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:29.498126Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:29.542556Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:29.583815Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:29.663117Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:29.711655Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510411092748969:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:29.711763Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:29.711804Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510411092748974:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:29.716496Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:29.729149Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489510411092748976:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:29.824743Z node 6 :TX_PROXY ERROR: Actor# [6:7489510411092749029:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:30.123016Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489510393912877505:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:30.123094Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> KqpScanArrowFormat::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowFormat::AggregateWithFunction >> KqpScripting::ScriptExplainCreatedTable [GOOD] >> KqpScripting::ScriptExplain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::MultipleBroadcastJoin [GOOD] Test command err: Trying to start YDB, gRPC: 22658, MsgBus: 23327 2025-04-04T17:24:42.598504Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510207323065106:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:42.598773Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014aa/r3tmp/tmpsOQw27/pdisk_1.dat 2025-04-04T17:24:42.907584Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22658, node 1 2025-04-04T17:24:42.969923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:42.970067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:42.971736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:42.980360Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:42.980411Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:42.980449Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:42.980584Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23327 TClient is connected to server localhost:23327 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:43.442595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:43.461803Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:43.471107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:24:43.602215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:43.758059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:43.820533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:45.383382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510220207968780:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:45.383515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:45.627627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:45.684688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:45.719267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:45.747878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:45.814192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:45.882680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:24:45.928110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510220207969302:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:45.928175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510220207969307:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:45.928192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:45.931546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:24:45.940424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510220207969309:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:24:46.002804Z node 1 :TX_PROXY ERROR: Actor# [1:7489510220207969361:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19018, MsgBus: 14548 2025-04-04T17:24:47.987993Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510230368275032:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:47.988043Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014aa/r3tmp/tmpya6T4J/pdisk_1.dat 2025-04-04T17:24:48.105375Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:48.118501Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:48.118593Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:48.119964Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19018, node 2 2025-04-04T17:24:48.176093Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:48.176112Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:48.176117Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:48.176196Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14548 TClient is connected to server localhost:14548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:24:48.555889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:24:48.576537Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:24:48.589267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:48.663308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:24:48.846495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:48.922566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:51.106635Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510247548145989:2405], DatabaseId: /Root, ... 474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:18.225866Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:18.262352Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:18.303160Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:18.343003Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:18.420185Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:18.513709Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510364953560952:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:18.513798Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:18.513904Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510364953560957:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:18.525139Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:18.540868Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489510364953560959:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:18.597934Z node 6 :TX_PROXY ERROR: Actor# [6:7489510364953561014:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:18.727144Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489510343478722186:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:18.727227Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8968, MsgBus: 4956 2025-04-04T17:25:22.619978Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489510380264133805:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:22.620024Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014aa/r3tmp/tmp0nzA6t/pdisk_1.dat 2025-04-04T17:25:22.765903Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:22.801823Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:22.801944Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:22.803501Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8968, node 7 2025-04-04T17:25:22.898858Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:22.898889Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:22.898899Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:22.899063Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4956 TClient is connected to server localhost:4956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:23.583937Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:23.611211Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:23.750171Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:23.972151Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:25:24.057149Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:27.021890Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510401738972064:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:27.022005Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:27.093326Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:27.169608Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:27.213402Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:27.255169Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:27.333267Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:27.372660Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:27.428304Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510401738972576:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:27.428401Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510401738972581:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:27.428409Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:27.432483Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:27.442592Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489510401738972583:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:27.534759Z node 7 :TX_PROXY ERROR: Actor# [7:7489510401738972636:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:27.620258Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489510380264133805:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:27.620345Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:28.915736Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:28.955035Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:25:29.100636Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 [] >> KqpSort::ComplexPkInclusiveSecondOptionalPredicate [GOOD] >> KqpSort::Offset >> KqpNewEngine::LiteralKeys [GOOD] >> KqpSqlIn::CantRewrite [GOOD] >> KqpSqlIn::ComplexKey >> KqpNewEngine::JoinSameKey [GOOD] >> TCacheTest::MigrationDeletedPathNavigate [GOOD] >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumn [GOOD] >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg >> KqpScanArrowFormat::AggregateCountStar >> KqpNewEngine::ItemsLimit [GOOD] >> KqpNewEngine::JoinDictWithPure |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TPQTabletTests::ProposeTx_Missing_Operations >> Cdc::UuidExchange[PqRunner] [GOOD] >> Cdc::UuidExchange[YdsRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::LiteralKeys [GOOD] Test command err: Trying to start YDB, gRPC: 11493, MsgBus: 4689 2025-04-04T17:24:46.813712Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510225048886295:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:46.813763Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001495/r3tmp/tmpGqjlzU/pdisk_1.dat 2025-04-04T17:24:47.138971Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11493, node 1 2025-04-04T17:24:47.184159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:47.184275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:47.186566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:47.240810Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:47.240840Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:47.240851Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:47.240982Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4689 TClient is connected to server localhost:4689 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:47.760572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:47.790237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:47.896399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:48.049705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:48.127392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:49.912595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510237933789964:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:49.912724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:50.212366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:50.245891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:50.314263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:50.347708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:50.389491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:50.459435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:24:50.541595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510242228757783:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:50.541682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:50.541836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510242228757788:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:50.545359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:24:50.560446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510242228757790:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:24:50.632337Z node 1 :TX_PROXY ERROR: Actor# [1:7489510242228757845:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:51.541080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:24:51.713255Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:24:51.814382Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510225048886295:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:51.814441Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 64602, MsgBus: 61014 2025-04-04T17:24:52.477353Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510252774582462:2056];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:52.477840Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001495/r3tmp/tmppIFiIA/pdisk_1.dat 2025-04-04T17:24:52.583666Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64602, node 2 2025-04-04T17:24:52.631418Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:52.632249Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:52.639042Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:52.651629Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:52.651648Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:52.651656Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:52.651770Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61014 TClient is connected to server localhost:61014 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:53.084121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:53.104130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:53.182504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:24:53.322682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiti ... : [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510379046967741:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:22.718768Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:22.772272Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:22.823255Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:22.872663Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:22.946805Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:22.994829Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:23.049096Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:23.117918Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510383341935553:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:23.118050Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:23.118682Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510383341935558:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:23.124907Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:23.150407Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489510383341935560:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:23.249579Z node 6 :TX_PROXY ERROR: Actor# [6:7489510383341935616:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:23.816598Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489510361867096779:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:23.816710Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29956, MsgBus: 6120 2025-04-04T17:25:26.507206Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489510397502022790:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:26.507289Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001495/r3tmp/tmpaQNsEK/pdisk_1.dat 2025-04-04T17:25:26.624548Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:26.639695Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:26.639805Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:26.643444Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29956, node 7 2025-04-04T17:25:26.778825Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:26.778854Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:26.778864Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:26.779004Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6120 TClient is connected to server localhost:6120 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:27.417907Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:27.433013Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:27.513426Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:27.720067Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:27.813673Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:30.860433Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510414681893738:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:30.860559Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:30.914763Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:31.001099Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:31.041337Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:31.081914Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:31.117848Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:31.164993Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:31.215902Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510418976861546:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:31.215988Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510418976861551:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:31.216051Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:31.220934Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:31.234444Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489510418976861553:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:31.316888Z node 7 :TX_PROXY ERROR: Actor# [7:7489510418976861606:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:31.507358Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489510397502022790:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:31.507448Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationDeletedPathNavigate [GOOD] Test command err: 2025-04-04T17:25:21.266602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:21.266670Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-04T17:25:21.441778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-04-04T17:25:21.457143Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 65543, Sender [1:173:2169], Recipient [1:69:2108]: NActors::TEvents::TEvPoison 2025-04-04T17:25:21.457926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:69:2108] sender: [1:174:2067] recipient: [1:46:2093] Leader for TabletID 72057594046678944 is [1:69:2108] sender: [1:177:2067] recipient: [1:24:2071] Leader for TabletID 72057594046678944 is [1:69:2108] sender: [1:178:2067] recipient: [1:176:2170] Leader for TabletID 72057594046678944 is [1:179:2171] sender: [1:180:2067] recipient: [1:176:2170] 2025-04-04T17:25:21.467625Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828672, Sender [1:176:2170], Recipient [1:179:2171]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:25:21.481674Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828673, Sender [1:176:2170], Recipient [1:179:2171]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:25:21.481886Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828684, Sender [1:176:2170], Recipient [1:179:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:25:21.487871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:25:21.487968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:25:21.488025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:25:21.488081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:25:21.488120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:25:21.488144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:25:21.488207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:25:21.488342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:25:21.488729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:25:21.505786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:25:21.507285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:25:21.507451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:25:21.507596Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 65542, Sender [1:7238242728502259555:7369577], Recipient [1:179:2171]: TSystem::Undelivered 2025-04-04T17:25:21.507633Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, processing event TEvents::TEvUndelivered 2025-04-04T17:25:21.507660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:21.507685Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:21.508047Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Clear operation queue and active pipes 2025-04-04T17:25:21.508098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:25:21.509081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-04-04T17:25:21.509209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.509308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.510103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.510200Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-04-04T17:25:21.511292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.511430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.511633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.511731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.511801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.512493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.512779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.512893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.513882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.514011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.514846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.514928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.516325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.516533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.516633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.516812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.517601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.517804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.517905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.517961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.519304Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:25:21.520598Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:25:21.520712Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-04T17:25:21.521200Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435081, Sender [1:179:2171], Recipient [1:179:2171]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-04-04T17:25:21.521246Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-04-04T17:25:21.521920Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:25:21.521966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:25:21.522070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:25:21.522110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:25:21.522143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:25:21.522171Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:25:21.522456Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:195:2171], Recipient [1:179:2171]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-04-04T17:25:21.522494Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-04-04T17:25:21.522535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [1:179:2171] sender: [1:210:2067] recipient: [1:24:2071] 2025-04-04T17:25:21.544276Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:209:2188], Recipient [1:179:2171]: {TEvModifySchemeTransaction txid# 101 TabletId# 72057594046678944} 2025-04-04T17:25:21.544339Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T17:25:21.674854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateSubDomain SubDomain { Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:25:21.675123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/USER_0, opId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:25:21.675260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: Root, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:25:21.675447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-04-04T17:25:21.675650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-04-04T17:25:21.675789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:25:21.675835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 7205759404667894 ... 51615 2025-04-04T17:25:22.191392Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvSyncTenantSchemeShard 2025-04-04T17:25:22.191451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409549 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-04-04T17:25:22.191557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:25:22.191602Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-04-04T17:25:22.191695Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[1:422:2336], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-04-04T17:25:22.191770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:25:22.191801Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-04-04T17:25:22.191948Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:485:2381], Recipient [1:422:2336]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046678944 Status: OK ServerId: [1:522:2402] Leader: 1 Dead: 0 Generation: 4 VersionInfo: } 2025-04-04T17:25:22.191986Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-04T17:25:22.192022Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046678944, status: OK, at schemeshard: 72075186233409549 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-04-04T17:25:22.478204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:22.478277Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-04-04T17:25:22.525250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [2:69:2108] sender: [2:174:2067] recipient: [2:46:2093] Leader for TabletID 72057594046678944 is [2:69:2108] sender: [2:177:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:69:2108] sender: [2:178:2067] recipient: [2:176:2170] Leader for TabletID 72057594046678944 is [2:179:2171] sender: [2:180:2067] recipient: [2:176:2170] 2025-04-04T17:25:22.566278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:22.566339Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:179:2171] sender: [2:210:2067] recipient: [2:24:2071] 2025-04-04T17:25:22.593855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-04-04T17:25:22.600937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:246:2067] recipient: [2:237:2212] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:246:2067] recipient: [2:237:2212] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:247:2067] recipient: [2:240:2214] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:247:2067] recipient: [2:240:2214] Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:249:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:249:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:251:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:251:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409546 is [2:250:2218] sender: [2:254:2067] recipient: [2:237:2212] Leader for TabletID 72075186233409547 is [2:253:2220] sender: [2:255:2067] recipient: [2:240:2214] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-04-04T17:25:22.619728Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [2:250:2218] sender: [2:288:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [2:253:2220] sender: [2:289:2067] recipient: [2:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-04-04T17:25:22.662786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:339:2067] recipient: [2:335:2284] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:339:2067] recipient: [2:335:2284] Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:340:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:340:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409548 is [2:342:2288] sender: [2:343:2067] recipient: [2:335:2284] Leader for TabletID 72075186233409548 is [2:342:2288] sender: [2:346:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-04-04T17:25:22.790610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:419:2067] recipient: [2:414:2332] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:419:2067] recipient: [2:414:2332] Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:421:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:421:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409549 is [2:422:2336] sender: [2:423:2067] recipient: [2:414:2332] 2025-04-04T17:25:22.836467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:22.836529Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 Leader for TabletID 72075186233409549 is [2:422:2336] sender: [2:450:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2025-04-04T17:25:22.893475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:25:22.893533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-04-04T17:25:22.893851Z node 2 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-04-04T17:25:22.893975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-04-04T17:25:22.912229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-04-04T17:25:22.912579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-04-04T17:25:22.961090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 108:0, at schemeshard: 72075186233409549 Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:554:2067] recipient: [2:551:2440] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:554:2067] recipient: [2:551:2440] Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:556:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:556:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409550 is [2:558:2444] sender: [2:559:2067] recipient: [2:551:2440] Leader for TabletID 72075186233409550 is [2:558:2444] sender: [2:562:2067] recipient: [2:24:2071] TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 Forgetting tablet 72075186233409548 TestWaitNotification: OK eventTxId 108 2025-04-04T17:25:24.070587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:25:24.070649Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:24.130356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:25:24.130411Z node 2 :IMPORT WARN: Table profiles were not loaded >> Cdc::KeysOnlyLog[PqRunner] [GOOD] >> Cdc::KeysOnlyLog[YdsRunner] >> KqpYql::BinaryJsonOffsetNormal >> KqpKv::ReadRows_Nulls [GOOD] >> TPQTabletTests::ProposeTx_Missing_Operations [GOOD] >> KqpMergeCn::SortBy_Int32 >> KqpRanges::LiteralOrCompisiteCollision [GOOD] >> KqpRanges::MergeRanges >> TPQTabletTests::ProposeTx_Unknown_Partition_1 |95.6%| [TA] $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSort::ReverseRangeLimitOptimized [GOOD] >> KqpSort::TopParameter >> KqpNewEngine::DeleteON [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumption+UseSink >> TPQTabletTests::ProposeTx_Unknown_Partition_1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::JoinSameKey [GOOD] Test command err: Trying to start YDB, gRPC: 18491, MsgBus: 61295 2025-04-04T17:24:44.899952Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510215083389692:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:44.900365Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014a2/r3tmp/tmpyt7DcY/pdisk_1.dat 2025-04-04T17:24:45.260312Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18491, node 1 2025-04-04T17:24:45.313203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:45.313423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:45.317505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:45.356810Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:45.356843Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:45.356852Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:45.356985Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61295 TClient is connected to server localhost:61295 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:45.833217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:45.845692Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:24:45.860252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:46.000064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:46.164074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:24:46.230110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:47.829819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510227968293374:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:47.829953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:48.027694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:48.087206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:48.118074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:48.153785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:48.182894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:48.214900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:24:48.298632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510232263261188:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:48.298744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:48.298996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510232263261193:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:48.302631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:24:48.314573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510232263261195:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:24:48.388991Z node 1 :TX_PROXY ERROR: Actor# [1:7489510232263261250:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19005, MsgBus: 4396 2025-04-04T17:24:50.309987Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510242426741159:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:50.310035Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014a2/r3tmp/tmpoVoe29/pdisk_1.dat 2025-04-04T17:24:50.449558Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:50.476977Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:50.477059Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:50.478015Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19005, node 2 2025-04-04T17:24:50.534368Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:50.534394Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:50.534403Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:50.534522Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4396 TClient is connected to server localhost:4396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:50.971335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:50.978870Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:51.000354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:24:51.082042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:51.214298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:51.274309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:53.524011Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510255311644810:2406], DatabaseId: /Root, Po ... [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510378486003491:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:22.748955Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:22.809249Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:22.853744Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:22.926489Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:22.968382Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:23.011141Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:23.090634Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:23.160567Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510382780971308:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:23.160670Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:23.160981Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510382780971313:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:23.167327Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:23.177757Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489510382780971315:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:23.235677Z node 6 :TX_PROXY ERROR: Actor# [6:7489510382780971368:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:23.699265Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489510361306132547:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:23.699351Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5860, MsgBus: 27395 2025-04-04T17:25:26.473570Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489510397261546940:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:26.473643Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014a2/r3tmp/tmpV3M20n/pdisk_1.dat 2025-04-04T17:25:26.657121Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:26.692313Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:26.692434Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:26.695749Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5860, node 7 2025-04-04T17:25:26.774801Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:26.774826Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:26.774839Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:26.775002Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27395 TClient is connected to server localhost:27395 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:25:27.390676Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:25:27.419472Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:27.495514Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:27.707631Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:27.799163Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:31.148068Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510418736385187:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:31.148193Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:31.206005Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:31.280301Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:31.329172Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:31.379207Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:31.422707Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:31.465577Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:31.474032Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489510397261546940:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:31.474104Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:31.538071Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510418736385705:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:31.538213Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:31.538809Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510418736385710:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:31.543895Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:31.554270Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489510418736385712:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:31.626689Z node 7 :TX_PROXY ERROR: Actor# [7:7489510418736385767:3455] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TPQTabletTests::ProposeTx_Unknown_WriteId >> KqpYql::BinaryJsonOffsetBound [GOOD] >> KqpYql::AnsiIn >> TPQTabletTests::ProposeTx_Unknown_WriteId [GOOD] >> TPQTabletTests::ProposeTx_Unknown_Partition_2 >> ReadOnlyVDisk::TestReads >> ReadOnlyVDisk::TestWrites [GOOD] >> ReadOnlyVDisk::TestSync >> TPQTabletTests::ProposeTx_Unknown_Partition_2 [GOOD] >> TPQTabletTests::ProposeTx_Command_After_Propose >> KqpScripting::EndOfQueryCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestWrites [GOOD] Test command err: RandomSeed# 3214378662695064188 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-04-04T17:25:33.300473Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-04-04T17:25:33.305754Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-04-04T17:25:33.311062Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-04-04T17:25:33.314059Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2025-04-04T17:25:33.322761Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2025-04-04T17:25:33.325701Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2025-04-04T17:25:33.328866Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2025-04-04T17:25:33.340382Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2025-04-04T17:25:34.464043Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] 2025-04-04T17:25:34.464184Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5316:712] 2025-04-04T17:25:34.464332Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5309:705] 2025-04-04T17:25:34.465346Z 1 00h03m30.111536s :BS_PROXY_PUT ERROR: [83018704788253a3] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2025-04-04T17:25:34.467326Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] 2025-04-04T17:25:34.467501Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5309:705] 2025-04-04T17:25:34.468761Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5316:712] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2025-04-04T17:25:34.470834Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] 2025-04-04T17:25:34.471828Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5309:705] 2025-04-04T17:25:34.472796Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5316:712] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:14:0:0:131072:0] 2025-04-04T17:25:34.474185Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5316:712] 2025-04-04T17:25:34.475449Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] 2025-04-04T17:25:34.476116Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5309:705] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:14:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:15:0:0:32768:0] 2025-04-04T17:25:34.477390Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sende ... 2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but the writes still go through === SEND TEvPut with key [1:1:21:0:0:32768:0] 2025-04-04T17:25:36.945797Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5316:712] 2025-04-04T17:25:36.945978Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5309:705] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:22:0:0:131072:0] 2025-04-04T17:25:36.950033Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5309:705] 2025-04-04T17:25:36.951884Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5316:712] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:23:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:24:0:0:131072:0] 2025-04-04T17:25:36.958354Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5316:712] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:25:0:0:32768:0] 2025-04-04T17:25:36.961696Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5316:712] 2025-04-04T17:25:36.961807Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5309:705] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:26:0:0:131072:0] 2025-04-04T17:25:36.965378Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5316:712] 2025-04-04T17:25:36.965476Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5309:705] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:27:0:0:32768:0] 2025-04-04T17:25:36.969324Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5316:712] 2025-04-04T17:25:36.969418Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5309:705] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:28:0:0:131072:0] 2025-04-04T17:25:36.972798Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5309:705] 2025-04-04T17:25:36.973104Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5316:712] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:29:0:0:32768:0] 2025-04-04T17:25:36.976692Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5316:712] 2025-04-04T17:25:36.976830Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5309:705] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:30:0:0:131072:0] 2025-04-04T17:25:36.980203Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5316:712] 2025-04-04T17:25:36.980372Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5309:705] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} === Read all 31 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:21:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:21:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:22:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:22:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:23:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:23:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:24:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:24:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:25:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:25:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:26:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:26:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:27:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:27:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:28:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:28:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:29:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:29:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:30:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:30:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} >> TPQTabletTests::ProposeTx_Command_After_Propose [GOOD] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> KqpReturning::ReturningWorksIndexedDeleteV2+QueryService [GOOD] >> KqpReturning::ReturningTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::ProposeTx_Command_After_Propose [GOOD] Test command err: 2025-04-04T17:25:35.992018Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:25:35.996723Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:25:35.997045Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-04T17:25:35.997101Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:25:35.997137Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-04T17:25:35.997175Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:25:35.997236Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:35.997293Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-04T17:25:36.014447Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:177:2192], now have 1 active actors on pipe 2025-04-04T17:25:36.014591Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T17:25:36.030856Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:36.033045Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-04T17:25:36.033164Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:36.033843Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:25:36.033942Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:25:36.034255Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:25:36.034600Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-04T17:25:36.035522Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-04T17:25:36.035585Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:185:2198] 2025-04-04T17:25:36.035638Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:25:36.036145Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T17:25:36.036251Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-04T17:25:36.036291Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-04T17:25:36.036452Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:25:36.036579Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:36.038760Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:25:36.039166Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:192:2203], now have 1 active actors on pipe 2025-04-04T17:25:36.039791Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:195:2205], now have 1 active actors on pipe 2025-04-04T17:25:36.040454Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 176 RawX2: 4294969487 } TxId: 2 Data { Immediate: false } 2025-04-04T17:25:36.040509Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2 empty list of operations 2025-04-04T17:25:36.040559Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId: 2 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2025-04-04T17:25:36.461456Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:25:36.465036Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:25:36.465299Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-04T17:25:36.465364Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:25:36.465399Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-04T17:25:36.465430Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:25:36.465472Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:36.465542Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-04T17:25:36.485746Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [2:177:2192], now have 1 active actors on pipe 2025-04-04T17:25:36.485868Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T17:25:36.486117Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 2(current 0) received from actor [2:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:36.488650Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-04T17:25:36.488790Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:36.489466Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:25:36.489598Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:25:36.489970Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:25:36.490276Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:185:2198] 2025-04-04T17:25:36.491178Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-04T17:25:36.491229Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [2:185:2198] 2025-04-04T17:25:36.491300Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:25:36.491702Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T17:25:36.491795Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 2 2025-04-04T17:25:36.491840Z n ... meSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 5 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 5 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 5 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:37.859683Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 5 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 5 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 5 Important: false } 2025-04-04T17:25:37.859789Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:37.860508Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 5 actor [5:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 5 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 5 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 5 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:25:37.860626Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:25:37.860991Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:25:37.861269Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:185:2198] 2025-04-04T17:25:37.862204Z node 5 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-04T17:25:37.862302Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [5:185:2198] 2025-04-04T17:25:37.862358Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:25:37.862690Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T17:25:37.862774Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 5 2025-04-04T17:25:37.862819Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 5 done 2025-04-04T17:25:37.862976Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:25:37.863114Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:37.865681Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:25:37.866072Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:192:2203], now have 1 active actors on pipe 2025-04-04T17:25:37.866731Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:195:2205], now have 1 active actors on pipe 2025-04-04T17:25:37.866836Z node 5 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-04T17:25:37.866881Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-04T17:25:37.866927Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] partition {0, {0, 3}, 100000} for WriteId {0, 3} 2025-04-04T17:25:37.867106Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvSubscribeLock for WriteId {0, 3} 2025-04-04T17:25:37.867200Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:37.869214Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:25:37.869756Z node 5 :PERSQUEUE DEBUG: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:25:37.870079Z node 5 :PERSQUEUE DEBUG: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:25:37.870341Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateInit] bootstrapping {0, {0, 3}, 100000} [5:201:2210] 2025-04-04T17:25:37.871120Z node 5 :PERSQUEUE DEBUG: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitDiskStatusStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:25:37.872235Z node 5 :PERSQUEUE DEBUG: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitMetaStep 2025-04-04T17:25:37.872469Z node 5 :PERSQUEUE DEBUG: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitInfoRangeStep 2025-04-04T17:25:37.872801Z node 5 :PERSQUEUE DEBUG: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitDataRangeStep 2025-04-04T17:25:37.873000Z node 5 :PERSQUEUE DEBUG: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitDataStep 2025-04-04T17:25:37.873039Z node 5 :PERSQUEUE DEBUG: [topic:{0, {0, 3}, 100000}:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-04-04T17:25:37.873079Z node 5 :PERSQUEUE INFO: [topic:{0, {0, 3}, 100000}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:25:37.873119Z node 5 :PERSQUEUE DEBUG: [topic:{0, {0, 3}, 100000}:Initializer] Initializing completed. 2025-04-04T17:25:37.873169Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateInit] init complete for topic 'topic' partition {0, {0, 3}, 100000} generation 2 [5:201:2210] 2025-04-04T17:25:37.873222Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateInit] SYNC INIT topic topic partitition {0, {0, 3}, 100000} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:25:37.873269Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] Process pending events. Count 0 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:25:37.873660Z node 5 :PERSQUEUE INFO: new Cookie -=[ 0wn3r ]=-|a3f8633-b2f8e476-e407b039-21da3a98_0 generated for partition {0, {0, 3}, 100000} topic 'topic' owner -=[ 0wn3r ]=- 2025-04-04T17:25:37.873854Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] TPartition::ReplyOwnerOk. Partition: {0, {0, 3}, 100000} 2025-04-04T17:25:37.873945Z node 5 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 4 2025-04-04T17:25:37.874344Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server disconnected, pipe [5:195:2205] destroyed 2025-04-04T17:25:37.874418Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {0, {0, 3}, 100000}, State: StateIdle] TPartition::DropOwner. 2025-04-04T17:25:37.874567Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:213:2217], now have 1 active actors on pipe 2025-04-04T17:25:37.874788Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 176 RawX2: 21474838671 } TxId: 2 Data { Operations { PartitionId: 0 Path: "/topic" SupportivePartition: 100000 } Immediate: false WriteId { NodeId: 0 KeyId: 3 } } 2025-04-04T17:25:37.874852Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PartitionId {0, {0, 3}, 100000} for WriteId {0, 3} 2025-04-04T17:25:37.874913Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2 has WriteId {0, 3} 2025-04-04T17:25:37.874948Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-04T17:25:37.875013Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 2, WriteId {0, 3} 2025-04-04T17:25:37.875052Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Link TxId 2 with WriteId {0, 3} 2025-04-04T17:25:37.875094Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-04T17:25:37.875129Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2, State UNKNOWN 2025-04-04T17:25:37.875166Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-04T17:25:37.875201Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2, NewState PREPARING 2025-04-04T17:25:37.875368Z node 5 :PERSQUEUE DEBUG: [TxId: 2] save tx TxId: 2 State: PREPARED MinStep: 231 MaxStep: 30231 Operations { PartitionId: 0 Path: "/topic" SupportivePartition: 100000 } Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 21474838671 } WriteId { NodeId: 0 KeyId: 3 } Partitions { } 2025-04-04T17:25:37.875462Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:37.878695Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:25:37.878752Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-04-04T17:25:37.878785Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2, State PREPARING 2025-04-04T17:25:37.878818Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2, NewState PREPARED 2025-04-04T17:25:37.879219Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:220:2223], now have 1 active actors on pipe 2025-04-04T17:25:37.879333Z node 5 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-04T17:25:37.879375Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-04-04T17:25:37.879416Z node 5 :PERSQUEUE WARN: tablet 72057594037927937 topic 'topic error: it is forbidden to write after a commit 2025-04-04T17:25:37.879485Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 2, Error it is forbidden to write after a commit 2025-04-04T17:25:37.879518Z node 5 :PERSQUEUE DEBUG: Answer error topic: 'topic' partition: 0 messageNo: 0 requestId: error: it is forbidden to write after a commit >> ReadOnlyVDisk::TestDiscover >> KqpNewEngine::ComplexLookupLimit [GOOD] >> KqpNotNullColumns::JoinLeftTableWithNotNullPk+StreamLookup [GOOD] >> KqpNotNullColumns::JoinLeftTableWithNotNullPk-StreamLookup |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> KqpNewEngine::DqSourceLiteralRange [GOOD] >> KqpNewEngine::DqSourceLimit >> KqpNewEngine::FullScanCount [GOOD] >> TPQTest::TestReserveBytes [GOOD] >> TPQTest::TestSourceIdDropBySourceIdCount >> Cdc::KeysOnlyLog[YdsRunner] [GOOD] >> Cdc::KeysOnlyLog[TopicRunner] >> Cdc::UuidExchange[YdsRunner] [GOOD] >> Cdc::UuidExchange[TopicRunner] >> KqpScripting::ScriptExplain [GOOD] >> ReadOnlyVDisk::TestGetWithMustRestoreFirst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::ComplexLookupLimit [GOOD] Test command err: Trying to start YDB, gRPC: 17139, MsgBus: 14571 2025-04-04T17:24:52.388819Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510253134390249:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:52.389945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001476/r3tmp/tmpBjk1QI/pdisk_1.dat 2025-04-04T17:24:52.780511Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:52.787267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:52.787370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:52.792409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17139, node 1 2025-04-04T17:24:52.914747Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:52.914770Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:52.914776Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:52.914877Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14571 TClient is connected to server localhost:14571 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:53.417287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:53.435004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:24:53.564016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:53.715632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:53.781101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:55.367371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510266019293920:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:55.367508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:55.632452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.703052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.732459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.801109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.837506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.876840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.917210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510266019294438:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:55.917288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:55.917365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510266019294443:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:55.921013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:24:55.930639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510266019294445:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:24:56.026174Z node 1 :TX_PROXY ERROR: Actor# [1:7489510270314261794:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 12968, MsgBus: 32255 2025-04-04T17:24:58.072673Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510276223652027:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:58.072770Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001476/r3tmp/tmp2R3BCK/pdisk_1.dat 2025-04-04T17:24:58.259026Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:58.278423Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:58.278508Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:58.282917Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12968, node 2 2025-04-04T17:24:58.382718Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:58.382744Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:58.382753Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:58.382862Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32255 TClient is connected to server localhost:32255 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:58.855815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:58.865314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:58.924694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:59.085609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:59.145025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:01.432072Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510289108555671:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:01.432154Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: { ... de 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:27.925198Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:27.969214Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:28.006322Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:28.045731Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:28.107712Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510408173755110:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:28.107809Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:28.108018Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510408173755115:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:28.112056Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:28.122937Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489510408173755117:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:28.218527Z node 6 :TX_PROXY ERROR: Actor# [6:7489510408173755173:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:28.705066Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489510386698916358:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:28.705163Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:29.717631Z node 6 :TX_DATASHARD ERROR: Complete [1743787529764 : 281474976710673] from 72075186224037888 at tablet 72075186224037888, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-04T17:25:29.729711Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=MWQ3OGQ5ZjgtZGQ4OGVlMjAtZTc5MGI4NDAtNWFkOTk4NDk=, ActorId: [6:7489510412468722731:2489], ActorState: ExecuteState, TraceId: 01jr0xc5ff4js9c4gd2dfvzffa, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 12377, MsgBus: 27535 2025-04-04T17:25:30.913056Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489510414028093692:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:30.913127Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001476/r3tmp/tmpNXtzdY/pdisk_1.dat 2025-04-04T17:25:31.037825Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:31.077925Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:31.078066Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:31.080138Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12377, node 7 2025-04-04T17:25:31.174850Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:31.174881Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:31.174892Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:31.175040Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27535 TClient is connected to server localhost:27535 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:31.932252Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:31.951345Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:32.041900Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:32.285814Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:32.387118Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:35.757808Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510435502931946:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:35.758026Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:35.784291Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:35.861648Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:35.902083Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:35.913490Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489510414028093692:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:35.913570Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:35.946811Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:35.993931Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:36.040169Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:36.164852Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510439797899765:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:36.164950Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:36.165125Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510439797899770:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:36.170815Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:36.194181Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489510439797899772:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:36.271360Z node 7 :TX_PROXY ERROR: Actor# [7:7489510439797899827:3456] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:37.748687Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpScanArrowFormat::AggregateWithFunction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::FullScanCount [GOOD] Test command err: Trying to start YDB, gRPC: 16807, MsgBus: 27857 2025-04-04T17:24:48.099327Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510235088318652:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:48.099367Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001491/r3tmp/tmp2omtg8/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16807, node 1 2025-04-04T17:24:48.442719Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:24:48.442778Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:24:48.443705Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:48.503103Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:48.503143Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:48.503153Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:48.503349Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:24:48.505575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:48.505818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:48.507558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27857 TClient is connected to server localhost:27857 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:49.051128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:49.066079Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:24:49.086795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:49.253477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:49.414563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:49.492813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:51.391020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510247973222327:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:51.391146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:51.691328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:51.724194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:51.755875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:51.793793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:51.832508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:51.903846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:24:51.959777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510247973222842:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:51.959844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:51.960018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510247973222847:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:51.963600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:24:51.974503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510247973222849:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:24:52.056112Z node 1 :TX_PROXY ERROR: Actor# [1:7489510252268190197:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:53.100475Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510235088318652:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:53.100557Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20175, MsgBus: 16881 2025-04-04T17:24:54.101147Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510260856826306:2274];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:54.101475Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001491/r3tmp/tmpfOMAlb/pdisk_1.dat 2025-04-04T17:24:54.180965Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:54.209415Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:54.209520Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 20175, node 2 2025-04-04T17:24:54.211678Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:54.262966Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:54.262990Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:54.262997Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:54.263101Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16881 TClient is connected to server localhost:16881 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:54.662537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:54.670915Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:24:54.678756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:54.730208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:54.874807Z node 2 :FLAT_TX_SCHE ... part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:27.172047Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:27.264808Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510400751838419:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:27.264920Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:27.264947Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510400751838424:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:27.269978Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:27.282878Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489510400751838426:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:27.341669Z node 6 :TX_PROXY ERROR: Actor# [6:7489510400751838481:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:27.602687Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489510379276999665:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:27.602787Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:29.016050Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=OGE2NDY3MDItYTk4Y2YwMzctNmNkMDczZGUtZmZhMGJiMzQ=, ActorId: [6:7489510405046806038:2489], ActorState: ExecuteState, TraceId: 01jr0xc4pv3j2t5hg16s0n1j4j, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`, code: 2001 Trying to start YDB, gRPC: 30348, MsgBus: 10779 2025-04-04T17:25:30.362534Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489510414872409229:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:30.362582Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001491/r3tmp/tmpP7VO1I/pdisk_1.dat 2025-04-04T17:25:30.547166Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:30.576009Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:30.576136Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:30.579750Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30348, node 7 2025-04-04T17:25:30.628953Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:30.628992Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:30.629008Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:30.629217Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10779 TClient is connected to server localhost:10779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:31.271313Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:31.281865Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:25:31.294158Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:31.384485Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:31.599120Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:31.697775Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:35.140689Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510436347247493:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:35.140808Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:35.205878Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:35.256638Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:35.308882Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:35.365622Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489510414872409229:2058];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:35.366515Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:35.371814Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:35.433686Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:35.509952Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:35.569516Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510436347248008:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:35.569613Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:35.569796Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510436347248013:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:35.574301Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:35.588634Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489510436347248015:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:35.665921Z node 7 :TX_PROXY ERROR: Actor# [7:7489510436347248070:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Warning: Type annotation, code: 1030
:3:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:3:33: Warning: At function: Filter, At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:3:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:3:33: Warning: At function: Filter, At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptExplain [GOOD] >> KqpScanArrowFormat::AggregateCountStar [GOOD] Test command err: Trying to start YDB, gRPC: 21839, MsgBus: 13778 2025-04-04T17:25:26.655733Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510395518245126:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:26.662608Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00196f/r3tmp/tmpKLfBXy/pdisk_1.dat 2025-04-04T17:25:27.109313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:27.109486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:27.118879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:27.122230Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21839, node 1 2025-04-04T17:25:27.202805Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:27.202838Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:27.202847Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:27.202989Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13778 TClient is connected to server localhost:13778 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:27.749251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:27.780726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:27.920944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:28.096976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:28.173436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:25:29.755766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510408403148645:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:29.755901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:30.025552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:30.051526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:30.078741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:30.108651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:30.143960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:30.213642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:30.304635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510412698116461:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:30.304728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:30.304779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510412698116466:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:30.308484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:30.318598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510412698116468:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:30.404016Z node 1 :TX_PROXY ERROR: Actor# [1:7489510412698116523:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:31.323863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:31.654394Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510395518245126:2195];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:31.694945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23487, MsgBus: 14764 2025-04-04T17:25:34.130357Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510433789400497:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:34.130472Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00196f/r3tmp/tmpEuRP4A/pdisk_1.dat 2025-04-04T17:25:34.222003Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:34.243037Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:34.243124Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:34.244377Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23487, node 2 2025-04-04T17:25:34.322930Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:34.322958Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:34.322966Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:34.323096Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14764 TClient is connected to server localhost:14764 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:34.776100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:34.782035Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:25:34.788949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:34.837985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:34.991645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:25:35.064871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:37.486166Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510446674304128:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:37.486234Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:37.526545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:37.567138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:37.637538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:37.680827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:37.718581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:37.768068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:37.837935Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510446674304642:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:37.838038Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:37.838422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510446674304647:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:37.843025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:37.855133Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510446674304649:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:37.911398Z node 2 :TX_PROXY ERROR: Actor# [2:7489510446674304702:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:39.130872Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489510433789400497:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:39.130964Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:39.492949Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489510455264239638:2495], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:168: Error: At function: DataQueryBlocks
:1:185: Error: At function: TKiDataQueryBlock
:1:208: Error: At function: KiEffects
:1:219: Error: At function: KiWriteTable!
:1:219: Error: Cannot find table 'db.[/Root/ScriptingTest]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:25:39.493208Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjQ0NGZhOTctNWU3NmIxYmQtNGJjZjllZDMtNGUwOTcxYTc=, ActorId: [2:7489510455264239636:2494], ActorState: ExecuteState, TraceId: 01jr0xcf0kb3s5zycbap9e8g7c, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> KqpScanArrowFormat::AggregateByColumn >> KqpYql::BinaryJsonOffsetNormal [GOOD] >> KqpYql::Closure >> ReadOnlyVDisk::TestGarbageCollect [GOOD] >> KqpYql::AnsiIn [GOOD] >> KqpSort::Offset [GOOD] >> TPQTest::TestPartitionPerConsumerQuota [GOOD] >> TPQTest::TestPQRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateWithFunction [GOOD] Test command err: Trying to start YDB, gRPC: 4460, MsgBus: 15572 2025-04-04T17:25:14.030491Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510348050683737:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:14.030540Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e82/r3tmp/tmpvCaK9B/pdisk_1.dat 2025-04-04T17:25:14.485676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:14.485818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:14.487551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:14.506512Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4460, node 1 2025-04-04T17:25:14.590446Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:14.590469Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:14.590476Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:14.590610Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15572 TClient is connected to server localhost:15572 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:15.240637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:15.270034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:15.457818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:15.639969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:15.714797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.354797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510360935587391:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:17.354953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:17.572997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.601042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.628722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.656671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.686125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.715454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.766345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510360935587899:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:17.766472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:17.766766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510360935587904:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:17.771015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:17.780899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510360935587906:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:17.853242Z node 1 :TX_PROXY ERROR: Actor# [1:7489510360935587959:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:18.918785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:19.034359Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510348050683737:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:19.043431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:19.597108Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787519614, txId: 281474976710675] shutting down 864000000000 Trying to start YDB, gRPC: 65008, MsgBus: 29889 2025-04-04T17:25:20.309805Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510370947966730:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:20.309860Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e82/r3tmp/tmp3jZMbA/pdisk_1.dat 2025-04-04T17:25:20.420551Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65008, node 2 2025-04-04T17:25:20.451114Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:20.451205Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:20.454394Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:20.484097Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:20.484122Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:20.484130Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:20.484245Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29889 TClient is connected to server localhost:29889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:20.931379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:20.944962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:21.022845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:21.210079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation t ... 04T17:25:29.680416Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:29.709848Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:29.742545Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:29.792754Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:29.823619Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:29.897540Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:29.944850Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489510409892039530:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:29.944936Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:29.945137Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489510409892039535:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:29.949821Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:29.965119Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489510409892039537:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:30.021438Z node 3 :TX_PROXY ERROR: Actor# [3:7489510414187006886:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:31.338348Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489510397007135377:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:31.338434Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:33.314873Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787531794, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 2060, MsgBus: 6776 2025-04-04T17:25:34.070096Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489510433559084994:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:34.070188Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e82/r3tmp/tmpokHrQL/pdisk_1.dat 2025-04-04T17:25:34.202709Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:34.216388Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:34.216463Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:34.218072Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2060, node 4 2025-04-04T17:25:34.312134Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:34.312165Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:34.312174Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:34.312316Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6776 TClient is connected to server localhost:6776 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:34.900413Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:34.908290Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:25:34.922504Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:35.006394Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:35.189025Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:35.277761Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:37.919905Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489510446443988652:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:37.920012Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:37.972404Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:38.015057Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:38.055174Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:38.094140Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:38.132757Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:38.177351Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:38.237962Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489510450738956462:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:38.238056Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:38.238197Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489510450738956467:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:38.242796Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:38.255071Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489510450738956469:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:38.317523Z node 4 :TX_PROXY ERROR: Actor# [4:7489510450738956522:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:39.070232Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489510433559084994:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:39.070332Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:40.442261Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787539984, txId: 281474976715671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGarbageCollect [GOOD] Test command err: RandomSeed# 13580498445377485981 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 2 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:1:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-04-04T17:25:35.108305Z 1 00h01m40.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2025-04-04T17:25:35.112975Z 1 00h01m40.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] SEND TEvGet with key [1:1:2:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-04-04T17:25:35.943875Z 1 00h03m20.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] 2025-04-04T17:25:35.944519Z 2 00h03m20.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5309:705] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2025-04-04T17:25:36.376437Z 1 00h04m20.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] 2025-04-04T17:25:36.376617Z 2 00h04m20.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5309:705] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-04-04T17:25:36.700878Z 1 00h05m00.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] 2025-04-04T17:25:36.701969Z 2 00h05m00.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5309:705] 2025-04-04T17:25:36.703053Z 3 00h05m00.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5316:712] 2025-04-04T17:25:36.703361Z 1 00h05m00.310512s :BS_PROXY_PUT ERROR: [b9da72651a488a3c] Result# TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} 2025-04-04T17:25:37.114784Z 1 00h06m00.311024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] 2025-04-04T17:25:37.114968Z 2 00h06m00.311024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5309:705] 2025-04-04T17:25:37.115022Z 3 00h06m00.311024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5316:712] === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2025-04-04T17:25:37.788753Z 1 00h07m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] 2025-04-04T17:25:37.788932Z 2 00h07m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5309:705] 2025-04-04T17:25:37.788986Z 3 00h07m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5316:712] 2025-04-04T17:25:37.789030Z 4 00h07m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5323:719] === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2025-04-04T17:25:38.059500Z 1 00h08m20.460512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] 2025-04-04T17:25:38.059685Z 2 00h08m20.460512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5309:705] 2025-04-04T17:25:38.059810Z 3 00h08m20.460512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5316:712] 2025-04-04T17:25:38.059856Z 4 00h08m20.460512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5323:719] 2025-04-04T17:25:38.059906Z 5 00h08m20.460512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5330:726] === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2025-04-04T17:25:38.241406Z 1 00h09m00.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] 2025-04-04T17:25:38.241533Z 2 00h09m00.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5309:705] 2025-04-04T17:25:38.241571Z 3 00h09m00.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5316:712] 2025-04-04T17:25:38.241610Z 4 00h09m00.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5323:719] 2025-04-04T17:25:38.241639Z 5 00h09m00.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5330:726] 2025-04-04T17:25:38.241668Z 6 00h09m00.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5337:733] === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2025-04-04T17:25:38.400416Z 1 00h09m40.560512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5302:698] 2025-04-04T17:25:38.400602Z 2 00h09m40.560512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5309:705] 2025-04-04T17:25:38.400657Z 3 00h09m40.560512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5316:712] 2025-04-04T17:25:38.400703Z 4 00h09m40.560512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5323:719] 2025-04-04T17:25:38.400750Z 5 00h09m40.560512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5330:726] 2025-04-04T17:25:38.400796Z 6 00h09m40.560512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5337:733] 2025-04-04T17:25:38.400841Z 7 00h09m40.560512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5344:740] === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2025-04-04T17:25:38.616990Z 2 00h10m20.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5309:705] 2025-04-04T17:25:38.617074Z 3 00h10m20.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5316:712] 2025-04-04T17:25:38.617128Z 4 00h10m20.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5323:719] 2025-04-04T17:25:38.617198Z 5 00h10m20.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5330:726] 2025-04-04T17:25:38.617252Z 6 00h10m20.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5337:733] 2025-04-04T17:25:38.617297Z 7 00h10m20.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5344:740] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] 2025-04-04T17:25:38.830140Z 3 00h11m00.660512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5316:712] 2025-04-04T17:25:38.830227Z 4 00h11m00.660512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5323:719] 2025-04-04T17:25:38.830303Z 5 00h11m00.660512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5330:726] 2025-04-04T17:25:38.830349Z 6 00h11m00.660512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5337:733] 2025-04-04T17:25:38.830395Z 7 00h11m00.660512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5344:740] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] 2025-04-04T17:25:39.076273Z 4 00h11m40.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5323:719] 2025-04-04T17:25:39.076354Z 5 00h11m40.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5330:726] 2025-04-04T17:25:39.076404Z 6 00h11m40.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5337:733] 2025-04-04T17:25:39.076448Z 7 00h11m40.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5344:740] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2025-04-04T17:25:39.403614Z 5 00h12m20.760512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5330:726] 2025-04-04T17:25:39.403728Z 6 00h12m20.760512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5337:733] 2025-04-04T17:25:39.403779Z 7 00h12m20.760512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5344:740] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2025-04-04T17:25:40.238811Z 6 00h14m00.811536s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5337:733] 2025-04-04T17:25:40.238893Z 7 00h14m00.811536s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5344:740] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2025-04-04T17:25:40.636141Z 7 00h14m40.860512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5344:740] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} SEND TEvPut with key [1:1:4:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} SEND TEvGet with key [1:1:4:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:1:0] NODATA Size# 0}} >> KqpNewEngine::JoinDictWithPure [GOOD] >> KqpNewEngine::IdxLookupExtractMembers >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg [GOOD] >> TopicAutoscaling::Simple_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::Simple_PQv1 >> KqpSqlIn::ComplexKey [GOOD] >> KqpSqlIn::Dict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::AnsiIn [GOOD] Test command err: Trying to start YDB, gRPC: 12222, MsgBus: 20086 2025-04-04T17:25:31.260168Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510417689994892:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:31.260233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00196d/r3tmp/tmpukKARm/pdisk_1.dat 2025-04-04T17:25:31.673761Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:31.678647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:31.678781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:31.681391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12222, node 1 2025-04-04T17:25:31.770777Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:31.770801Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:31.770807Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:31.770901Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20086 TClient is connected to server localhost:20086 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:32.392846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:32.413690Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:32.428893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:32.579586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:32.735127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:32.805929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:34.535381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510430574898549:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:34.535487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:34.808927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:34.840902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:34.910898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:34.940417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:35.011666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:35.083376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:35.132559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510434869866369:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:35.132628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:35.132679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510434869866374:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:35.135535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:35.145831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510434869866376:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:35.216993Z node 1 :TX_PROXY ERROR: Actor# [1:7489510434869866429:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:36.260288Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510417689994892:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:36.260374Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:36.272834Z node 1 :KQP_SESSION ERROR: SessionId: ydb://session/3?node_id=1&id=MzJkYWY3ZDItMjZhNmZmZTItMzY1YjNjMjQtMWIwZWU1ODQ=, ActorId: [1:7489510439164833983:2488], ActorState: ExecuteState, TraceId: 01jr0xcbsg5e5c3r36sph42zj6, Internal error, message: yql/essentials/types/binary_json/read.cpp:161: StringOffset must be inside buffer 2025-04-04T17:25:36.272908Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzJkYWY3ZDItMjZhNmZmZTItMzY1YjNjMjQtMWIwZWU1ODQ=, ActorId: [1:7489510439164833983:2488], ActorState: ExecuteState, TraceId: 01jr0xcbsg5e5c3r36sph42zj6, Create QueryResponse for error on request, msg: yql/essentials/types/binary_json/read.cpp:161: StringOffset must be inside buffer Trying to start YDB, gRPC: 28584, MsgBus: 19024 2025-04-04T17:25:37.073835Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510445319044322:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:37.073894Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00196d/r3tmp/tmpHgmJzy/pdisk_1.dat 2025-04-04T17:25:37.226125Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:37.253282Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:37.253368Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:37.254678Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28584, node 2 2025-04-04T17:25:37.303869Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:37.303894Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:37.303901Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:37.304005Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19024 TClient is connected to server localhost:19024 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:37.749106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:37.758707Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:25:37.768051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:37.827585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:25:37.977982Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:38.050329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.199359Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510458203947962:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:40.199427Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:40.238888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.268254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.298659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.367225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.401950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.451207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.503383Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510458203948474:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:40.503461Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:40.503503Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510458203948479:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:40.508374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:40.518114Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510458203948481:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:40.593757Z node 2 :TX_PROXY ERROR: Actor# [2:7489510458203948534:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSort::TopParameter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::Offset [GOOD] Test command err: Trying to start YDB, gRPC: 1155, MsgBus: 10027 2025-04-04T17:24:50.808558Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510244087502984:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:50.810575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001482/r3tmp/tmpq1nn6P/pdisk_1.dat 2025-04-04T17:24:51.183084Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1155, node 1 2025-04-04T17:24:51.229145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:51.229313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:51.231504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:51.283025Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:51.283047Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:51.283064Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:51.283234Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10027 TClient is connected to server localhost:10027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:51.802747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:51.825937Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:24:51.837324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:51.983440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:52.155914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:24:52.236436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:53.985583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510256972406632:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:53.985722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:54.226008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:54.253325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:54.276467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:54.308008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:54.377696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:54.448902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:24:54.536412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510261267374449:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:54.536467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:54.536677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510261267374454:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:54.541527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:24:54.552840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510261267374456:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:24:54.647929Z node 1 :TX_PROXY ERROR: Actor# [1:7489510261267374513:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:55.608315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.647325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.726407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.808665Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510244087502984:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:55.808723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16011, MsgBus: 6401 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001482/r3tmp/tmp74UUH1/pdisk_1.dat 2025-04-04T17:24:59.376559Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:24:59.429562Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:59.461844Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:59.461930Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:59.463653Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16011, node 2 2025-04-04T17:24:59.503831Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:59.503855Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:59.503861Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:59.503975Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6401 TClient is connected to server localhost:6401 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:59.995640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:00.004729Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:25:00.026043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2 ... WorkloadService] [TPoolFetcherActor] ActorId: [5:7489510414249742438:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:30.151591Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:30.199490Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:30.235303Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:30.271971Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:30.306016Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:30.345443Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:30.395997Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:30.483750Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489510414249742953:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:30.483837Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:30.484189Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489510414249742958:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:30.488176Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:30.500683Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489510414249742960:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:30.603183Z node 5 :TX_PROXY ERROR: Actor# [5:7489510414249743015:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:31.140835Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489510397069871541:2108];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:31.140890Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10246, MsgBus: 13546 2025-04-04T17:25:34.286471Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489510430265146052:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:34.289554Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001482/r3tmp/tmpR4QKOL/pdisk_1.dat 2025-04-04T17:25:34.437887Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:34.479274Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:34.479432Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:34.481582Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10246, node 6 2025-04-04T17:25:34.546962Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:34.546990Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:34.546999Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:34.547127Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13546 TClient is connected to server localhost:13546 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:35.223796Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:35.241086Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:35.324991Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:35.609460Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:35.726693Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:38.782063Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510447445016956:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:38.782166Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:38.842116Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:38.877075Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:38.911335Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:38.946832Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:38.983950Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:39.023517Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:39.077026Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510451739984767:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:39.077118Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:39.077281Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510451739984772:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:39.081152Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:39.092468Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489510451739984774:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:39.169400Z node 6 :TX_PROXY ERROR: Actor# [6:7489510451739984827:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:39.286770Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489510430265146052:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:39.286852Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpScripting::EndOfQueryCommit [GOOD] >> KqpScripting::ExecuteYqlScriptPg >> KqpSqlIn::SecondaryIndex_TupleLiteral [GOOD] >> KqpSqlIn::SecondaryIndex_TupleSelect >> KqpRanges::MergeRanges [GOOD] >> KqpRanges::Like >> KqpMergeCn::SortBy_Int32 [GOOD] >> Cdc::KeysOnlyLog[TopicRunner] [GOOD] >> Cdc::KeysOnlyLogDebezium >> ReadOnlyVDisk::TestDiscover [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumption+UseSink [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumption-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg [GOOD] Test command err: Trying to start YDB, gRPC: 8891, MsgBus: 30835 2025-04-04T17:25:10.811910Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510327514947124:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:10.812003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001381/r3tmp/tmpwNu2Rk/pdisk_1.dat 2025-04-04T17:25:11.265219Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:11.284914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:11.285002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:11.287223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8891, node 1 2025-04-04T17:25:11.415027Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:11.415078Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:11.415095Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:11.415292Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30835 TClient is connected to server localhost:30835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:12.017989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:12.054824Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:14.128357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510344694816974:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:14.128499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:14.339711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:25:14.500486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510344694817077:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:14.500583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:14.500852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510344694817082:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:14.504634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:25:14.522439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510344694817084:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:25:14.621870Z node 1 :TX_PROXY ERROR: Actor# [1:7489510344694817136:2398] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:14.791065Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489510344694817174:2356], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing key column in input: Key for table: /Root/TestReplaceNotNullPk, code: 2029 2025-04-04T17:25:14.791833Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWI3ZjA0OWMtM2JlYzlkYTItYjY3ODQ4ZTAtMzVlOWE0YmI=, ActorId: [1:7489510344694816947:2328], ActorState: ExecuteState, TraceId: 01jr0xbpwb3f9pf3yrz1384hnd, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2025-04-04T17:25:14.812684Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489510344694817184:2360], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:49: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:49: Error: Failed to convert 'Key': Null to Uint64
:1:49: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-04T17:25:14.813960Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWI3ZjA0OWMtM2JlYzlkYTItYjY3ODQ4ZTAtMzVlOWE0YmI=, ActorId: [1:7489510344694816947:2328], ActorState: ExecuteState, TraceId: 01jr0xbpxg07fn9ww883s9cb2t, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 8131, MsgBus: 29535 2025-04-04T17:25:15.691497Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510349857726320:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:15.691573Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001381/r3tmp/tmpGGMMgJ/pdisk_1.dat 2025-04-04T17:25:15.827213Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:15.841749Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:15.841831Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:15.844609Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8131, node 2 2025-04-04T17:25:15.969652Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:15.969677Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:15.969684Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:15.969839Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29535 TClient is connected to server localhost:29535 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:16.415122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:16.420705Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:25:18.752422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510362742628868:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:18.752491Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:18.779876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:25:18.844263Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510362742628970:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:18.844366Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:18.844665Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510362742628975:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permission ... 4037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8309, node 6 2025-04-04T17:25:35.582958Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:35.582984Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:35.582998Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:35.583159Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23995 TClient is connected to server localhost:23995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:36.233923Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:36.243620Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:25:39.367709Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510454428603228:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:39.367801Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:39.380903Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:25:39.472935Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510454428603380:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:39.473048Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:39.473268Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510454428603385:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:39.477847Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:25:39.491957Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489510454428603387:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:25:39.551306Z node 6 :TX_PROXY ERROR: Actor# [6:7489510454428603438:2431] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:40.338480Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489510437248733396:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:40.338590Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:40.708214Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7489510458723570894:2382], TxId: 281474976715664, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=6&id=NDAwNGRkMmYtZDc0NzY0NmYtNmMxZjFhMjAtZjkzYzMyZDM=. TraceId : 01jr0xcfrddwwsax5d8zytz9x8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: BAD_REQUEST KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 }. 2025-04-04T17:25:40.708772Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7489510458723570895:2383], TxId: 281474976715664, task: 2. Ctx: { SessionId : ydb://session/3?node_id=6&id=NDAwNGRkMmYtZDc0NzY0NmYtNmMxZjFhMjAtZjkzYzMyZDM=. CustomerSuppliedId : . TraceId : 01jr0xcfrddwwsax5d8zytz9x8. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [6:7489510458723570890:2329], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2025-04-04T17:25:40.709018Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7489510458723570896:2384], TxId: 281474976715664, task: 3. Ctx: { SessionId : ydb://session/3?node_id=6&id=NDAwNGRkMmYtZDc0NzY0NmYtNmMxZjFhMjAtZjkzYzMyZDM=. TraceId : 01jr0xcfrddwwsax5d8zytz9x8. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [6:7489510458723570890:2329], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2025-04-04T17:25:40.709133Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7489510458723570897:2385], TxId: 281474976715664, task: 4. Ctx: { TraceId : 01jr0xcfrddwwsax5d8zytz9x8. SessionId : ydb://session/3?node_id=6&id=NDAwNGRkMmYtZDc0NzY0NmYtNmMxZjFhMjAtZjkzYzMyZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [6:7489510458723570890:2329], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2025-04-04T17:25:40.711355Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NDAwNGRkMmYtZDc0NzY0NmYtNmMxZjFhMjAtZjkzYzMyZDM=, ActorId: [6:7489510454428603225:2329], ActorState: ExecuteState, TraceId: 01jr0xcfrddwwsax5d8zytz9x8, Create QueryResponse for error on request, msg: 2025-04-04T17:25:40.732657Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-04-04T17:25:40.754991Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7489510458723570922:2389], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2025-04-04T17:25:40.757063Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NDAwNGRkMmYtZDc0NzY0NmYtNmMxZjFhMjAtZjkzYzMyZDM=, ActorId: [6:7489510454428603225:2329], ActorState: ExecuteState, TraceId: 01jr0xcg833dz7wnqbhft5fh4h, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-04T17:25:40.787155Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7489510458723570941:2397], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2025-04-04T17:25:40.789158Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NDAwNGRkMmYtZDc0NzY0NmYtNmMxZjFhMjAtZjkzYzMyZDM=, ActorId: [6:7489510454428603225:2329], ActorState: ExecuteState, TraceId: 01jr0xcg93b47h6fyqy3t972e7, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-04T17:25:40.814431Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7489510458723570961:2405], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2025-04-04T17:25:40.816396Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NDAwNGRkMmYtZDc0NzY0NmYtNmMxZjFhMjAtZjkzYzMyZDM=, ActorId: [6:7489510454428603225:2329], ActorState: ExecuteState, TraceId: 01jr0xcg9yanzytct4128ahmmx, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-04T17:25:41.220723Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:25:41.224334Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7489510458723570981:2413], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2025-04-04T17:25:41.224576Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NDAwNGRkMmYtZDc0NzY0NmYtNmMxZjFhMjAtZjkzYzMyZDM=, ActorId: [6:7489510454428603225:2329], ActorState: ExecuteState, TraceId: 01jr0xcgazdrmy68vwyn3ss4y3, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-04T17:25:41.863554Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:25:41.871186Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7489510463018538304:2422], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2025-04-04T17:25:41.872865Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NDAwNGRkMmYtZDc0NzY0NmYtNmMxZjFhMjAtZjkzYzMyZDM=, ActorId: [6:7489510454428603225:2329], ActorState: ExecuteState, TraceId: 01jr0xcgqp574reyk7e1a8xfhe, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-04T17:25:42.537335Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-04-04T17:25:42.544286Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7489510463018538338:2434], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2025-04-04T17:25:42.549124Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NDAwNGRkMmYtZDc0NzY0NmYtNmMxZjFhMjAtZjkzYzMyZDM=, ActorId: [6:7489510454428603225:2329], ActorState: ExecuteState, TraceId: 01jr0xchbvcnyc79r1vzzpzqrz, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: >> Cdc::UuidExchange[TopicRunner] [GOOD] >> Cdc::UpdatesLog[PqRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestDiscover [GOOD] Test command err: RandomSeed# 3386794738166539216 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 3 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-04-04T17:25:40.884293Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5300:697] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-04-04T17:25:41.150229Z 1 00h02m00.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5300:697] 2025-04-04T17:25:41.151333Z 2 00h02m00.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5307:704] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2025-04-04T17:25:41.396421Z 3 00h02m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5314:711] 2025-04-04T17:25:41.397312Z 1 00h02m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5300:697] 2025-04-04T17:25:41.397959Z 2 00h02m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5307:704] 2025-04-04T17:25:41.398289Z 1 00h02m30.160512s :BS_PROXY_PUT ERROR: [ce43e2cee29d1960] Result# TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Putting VDisk #4 to normal === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Putting VDisk #5 to normal === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Putting VDisk #6 to normal === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] Test command err: RandomSeed# 4824597078609154573 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-04-04T17:25:42.483793Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5299:697] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-04-04T17:25:42.488984Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5299:697] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-04-04T17:25:42.494480Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5299:697] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-04-04T17:25:42.497576Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5299:697] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2025-04-04T17:25:42.506789Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5299:697] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2025-04-04T17:25:42.509790Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5299:697] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2025-04-04T17:25:42.513069Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5299:697] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2025-04-04T17:25:42.524990Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5299:697] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2025-04-04T17:25:44.350209Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5299:697] 2025-04-04T17:25:44.350341Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5313:711] 2025-04-04T17:25:44.350460Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5306:704] 2025-04-04T17:25:44.351065Z 1 00h05m30.211024s :BS_PROXY_PUT ERROR: [0e82909982926c61] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2025-04-04T17:25:44.352313Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5299:697] 2025-04-04T17:25:44.353630Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5306:704] 2025-04-04T17:25:44.354528Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5313:711] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2025-04-04T17:25:44.356707Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5299:697] 2025-04-04T17:25:44.357584Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5306:704] 2025-04-04T17:25:44.358732Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5313:711] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:14:0:0:131072:0] 2025-04-04T17:25:44.360356Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5313:711] 2025-04-04T17:25:44.361841Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5299:697] 2025-04-04T17:25:44.362618Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5306:704] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:14:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:15:0:0:32768:0] 2025-04-04T17:25:44.363625Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5313:711] 2025-04-04T17:25:44.363687Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5306:704] 2025-04-04T17:25:44.364301Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5299:697] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:15:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 1 Situations# EUUUUU } { OrderNumber# 2 Situations# UEUUUU } { OrderNumber# 3 Situations# UUSUUU } { OrderNumber# 4 Situations# UUUSUU } { OrderNumber# 5 Situations# UUUUSU } { OrderNumber# 6 Situations# UUUUUS } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:16:0:0:131072:0] 2025-04-04T17:25:44.365546Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5313:711] 2025-04-04T17:25:44.365607Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5306:704] 2025-04-04T17:25:44.366327Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5299:697] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:16:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 1 Situations# EUUUUU } { OrderNumber# 2 Situations# UEUUUU } { OrderNumber# 3 Situations# UUSUUU } { OrderNumber# 4 Situations# UUUSUU } { OrderNumber# 5 Situations# UUUUSU } { OrderNumber# 6 Situations# UUUUUS } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:17:0:0:32768:0] 2025-04-04T17:25:44.367481Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5299:697] 2025-04-04T17:25:44.367637Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5313:711] 2025-04-04T17:25:44.367673Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5306:704] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:17:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# UEUUUU } { OrderNumber# 2 Situations# UUEUUU } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUSU } { OrderNumber# 5 Situations# UUUUUS } { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:18:0:0:131072:0] 2025-04-04T17:25:44.369064Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5299:697] 2025-04-04T17:25:44.369195Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5306:704] 2025-04-04T17:25:44.369267Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5313:711] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:18:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 7 Situations# SUUUUU } { OrderNumber# 0 Situations# UEUUUU } { OrderNumber# 1 Situations# UUEUUU } { OrderNumber# 2 Situations# UUUEUU } { OrderNumber# 3 Situations# UUUUSU } { OrderNumber# 4 Situations# UUUUUS } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:19:0:0:32768:0] 2025-04-04T17:25:44.370891Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5299:697] 2025-04-04T17:25:44.371048Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5313:711] 2025-04-04T17:25:44.371101Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5306:704] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:19:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# UUEUUU } { OrderNumber# 1 Situations# UUUEUU } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } { OrderNumber# 4 Situations# UUSUUU } { OrderNumber# 5 Situations# UUUUSU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:20:0:0:131072:0] 2025-04-04T17:25:44.372612Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5299:697] 2025-04-04T17:25:44.372745Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5313:711] 2025-04-04T17:25:44.372886Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5306:704] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:20:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvGet with key [1:1:11:0:0:32768:0] 2025-04-04T17:25:44.380300Z 1 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5300:698] 2025-04-04T17:25:44.380580Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5307:705] 2025-04-04T17:25:44.380653Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5314:712] 2025-04-04T17:25:44.381131Z 1 00h05m30.211024s :BS_PROXY_GET ERROR: [550421360e1b4045] Response# TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} Marker# BPG29 2025-04-04T17:25:44.381249Z 2 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5307:705] 2025-04-04T17:25:44.381294Z 3 00h05m30.211024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5314:712] TEvGetResult: TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin+StreamLookupJoin |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::TopParameter [GOOD] Test command err: Trying to start YDB, gRPC: 32479, MsgBus: 12885 2025-04-04T17:25:04.580850Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510301645093609:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:04.580895Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0013a2/r3tmp/tmpjNzfcS/pdisk_1.dat 2025-04-04T17:25:05.119485Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:05.122004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:05.122151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:05.124287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32479, node 1 2025-04-04T17:25:05.201712Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:05.201746Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:05.201761Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:05.201893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12885 TClient is connected to server localhost:12885 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:05.750858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:05.776701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:05.937456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:06.114623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:25:06.197740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.938407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510314529997260:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:07.938593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:08.262982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:08.329344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:08.357319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:08.385166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:08.413699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:08.483443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:08.529110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510318824965075:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:08.529198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:08.529430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510318824965080:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:08.532683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:08.541133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510318824965082:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:08.607169Z node 1 :TX_PROXY ERROR: Actor# [1:7489510318824965137:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:09.583738Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510301645093609:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:09.583802Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16264, MsgBus: 1199 2025-04-04T17:25:10.653907Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510329121367256:2244];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:10.654741Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0013a2/r3tmp/tmpoXdxk7/pdisk_1.dat 2025-04-04T17:25:10.787521Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:10.803720Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:10.803812Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:10.807272Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16264, node 2 2025-04-04T17:25:10.871614Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:10.871639Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:10.871647Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:10.871749Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1199 TClient is connected to server localhost:1199 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:11.287930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:11.316450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:11.405341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:11.557487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:11.631599Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:25:13.806260Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [ ... [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489510426904985154:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:33.250205Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:33.305576Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:33.349274Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:33.423064Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:33.475386Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:33.518153Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:33.589241Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:33.654881Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489510426904985676:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:33.655018Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:33.655055Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489510426904985681:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:33.659947Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:33.671905Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489510426904985683:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:33.728352Z node 5 :TX_PROXY ERROR: Actor# [5:7489510426904985736:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:34.414679Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489510409725114208:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:34.414783Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7843, MsgBus: 13819 2025-04-04T17:25:36.460124Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489510439024934367:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:36.460189Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0013a2/r3tmp/tmpLhRSnt/pdisk_1.dat 2025-04-04T17:25:36.630890Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:36.664180Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:36.664296Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:36.666743Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7843, node 6 2025-04-04T17:25:36.729209Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:36.729234Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:36.729244Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:36.729387Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13819 TClient is connected to server localhost:13819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:37.337924Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:37.353334Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:37.441734Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:37.649422Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:37.740085Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:40.574587Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510456204805330:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:40.574725Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:40.614345Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.690641Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.759232Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.799862Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.850664Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.907965Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.969964Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510456204805850:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:40.970084Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:40.970468Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510456204805855:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:40.975948Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:40.989014Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489510456204805857:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:41.092212Z node 6 :TX_PROXY ERROR: Actor# [6:7489510460499773208:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:41.460274Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489510439024934367:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:41.460346Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpMergeCn::SortBy_Int32 [GOOD] Test command err: Trying to start YDB, gRPC: 6630, MsgBus: 26116 2025-04-04T17:25:04.097466Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510302981942410:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:04.097605Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0013b3/r3tmp/tmp0xuhAa/pdisk_1.dat 2025-04-04T17:25:04.509166Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:04.511785Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:04.512198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:04.517074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6630, node 1 2025-04-04T17:25:04.678835Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:04.678854Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:04.678860Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:04.678957Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26116 TClient is connected to server localhost:26116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:05.244580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:05.279650Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:07.356862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510315866844969:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:07.356957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:07.559529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 IsSuccess(): 1 GetStatus(): SUCCESS Trying to start YDB, gRPC: 30887, MsgBus: 1265 2025-04-04T17:25:08.458647Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510319992586839:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:08.458736Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0013b3/r3tmp/tmp8rhGSp/pdisk_1.dat 2025-04-04T17:25:08.548948Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30887, node 2 2025-04-04T17:25:08.610045Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:08.614333Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:08.616362Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:08.642711Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:08.642730Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:08.642738Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:08.642824Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1265 TClient is connected to server localhost:1265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:09.103805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:09.118949Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:25:11.623275Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510332877489378:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:11.623376Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:11.654395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:25:11.714674Z node 2 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Unknown table '/Root/WrongTable' Trying to start YDB, gRPC: 5385, MsgBus: 4030 2025-04-04T17:25:12.501183Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489510338595357138:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:12.501224Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0013b3/r3tmp/tmpbxIOHv/pdisk_1.dat 2025-04-04T17:25:12.612598Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5385, node 3 2025-04-04T17:25:12.643346Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:12.643443Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:12.648513Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:12.698862Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:12.698889Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:12.698899Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:12.699029Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4030 TClient is connected to server localhost:4030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:13.091696Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:15.519586Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 IsSuccess(): 1 GetStatus(): SUCCESS Trying to start YDB, gRPC: 19072, MsgBus: 26301 2025-04-04T17:25:16.409482Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489510355359835943:2206];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0013b3/r3tmp/tmpVHC9lF/pdisk_1.dat 2025-04-04T17:25:16.463811Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:25:16.553891Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:16.576331Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: ... pe: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:30.138151Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037890 not found Trying to start YDB, gRPC: 24338, MsgBus: 17352 2025-04-04T17:25:31.008801Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489510417380417500:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:31.008870Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0013b3/r3tmp/tmpL4WQnW/pdisk_1.dat 2025-04-04T17:25:31.133602Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:31.161436Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:31.161537Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:31.163277Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24338, node 6 2025-04-04T17:25:31.235929Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:31.235960Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:31.235969Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:31.236135Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17352 TClient is connected to server localhost:17352 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:31.783739Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:35.003192Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510434560287342:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:35.003354Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:35.133595Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 IsSuccess(): 1 GetStatus(): SUCCESS Trying to start YDB, gRPC: 13070, MsgBus: 62248 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0013b3/r3tmp/tmpQHJsBK/pdisk_1.dat 2025-04-04T17:25:36.338465Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:25:36.343708Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13070, node 7 2025-04-04T17:25:36.371436Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:36.371536Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:36.373026Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:36.429861Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:36.429887Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:36.429895Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:36.430054Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62248 TClient is connected to server localhost:62248 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:37.040009Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:37.049596Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:25:37.061979Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:37.166559Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:37.388740Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:37.489804Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:40.586602Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510455919994128:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:40.586736Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:40.621377Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.693970Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.737617Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.781737Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.822282Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.901092Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:41.001570Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510460214961945:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:41.001688Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:41.001863Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510460214961950:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:41.007418Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:41.020376Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489510460214961952:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:41.109619Z node 7 :TX_PROXY ERROR: Actor# [7:7489510460214962007:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:42.517054Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:43.357142Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787543386, txId: 281474976715673] shutting down >> KqpCost::OltpWriteRow+isSink >> KqpCost::IndexLookupJoin-StreamLookupJoin >> KqpReturning::ReturningTypes [GOOD] >> KqpCost::OlapRangeFullScan |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OltpWriteRow-isSink >> KqpCost::IndexLookup+useSink >> KqpCost::QuerySeviceRangeFullScan >> KqpCost::IndexLookupAndTake+useSink >> KqpYql::Closure [GOOD] >> KqpCost::ScanScriptingRangeFullScan+SourceRead >> KqpNewEngine::DqSourceLimit [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> ReadOnlyVDisk::TestReads [GOOD] >> KqpScanArrowFormat::AggregateByColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpReturning::ReturningTypes [GOOD] Test command err: Trying to start YDB, gRPC: 12758, MsgBus: 62977 2025-04-04T17:24:52.048234Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510252184373265:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:52.048291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00147a/r3tmp/tmpmeXg0Q/pdisk_1.dat 2025-04-04T17:24:52.424230Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:52.465781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:52.465957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 12758, node 1 2025-04-04T17:24:52.468998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:52.550262Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:52.550286Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:52.550297Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:52.550407Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62977 TClient is connected to server localhost:62977 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:53.058182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:53.089208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:53.234347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:24:53.386061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:53.452565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:54.948644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510260774309407:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:54.948749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:55.226157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.257836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.283439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.311398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.339740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.414811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:24:55.472927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510265069277220:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:55.473016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:55.473288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510265069277225:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:55.476646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:24:55.486451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510265069277227:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:24:55.568554Z node 1 :TX_PROXY ERROR: Actor# [1:7489510265069277280:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:56.489128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:24:56.530468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:24:56.609328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:24:57.047282Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510252184373265:2266];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:57.047364Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12736, MsgBus: 25704 2025-04-04T17:24:59.974040Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510283208867404:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:59.974100Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00147a/r3tmp/tmpamPzII/pdisk_1.dat 2025-04-04T17:25:00.087011Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:00.114937Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:00.115021Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:00.117203Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12736, node 2 2025-04-04T17:25:00.178861Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:00.178887Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:00.178896Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:00.179020Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25704 TClient is connected to server localhost:25704 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:00.716866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:00.742527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:00.809078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is ... 474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:32.244745Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:32.283784Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:32.328710Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:32.398072Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:32.441768Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:32.503317Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489510423678897492:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:32.503444Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:32.503683Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489510423678897497:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:32.507781Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:32.519082Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489510423678897499:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:32.590354Z node 5 :TX_PROXY ERROR: Actor# [5:7489510423678897552:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:33.054899Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489510406499026007:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:33.054992Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:33.889382Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:33.972092Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:25:34.084592Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2086, MsgBus: 29424 2025-04-04T17:25:38.329796Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489510450034265611:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:38.329885Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00147a/r3tmp/tmpnKHaDc/pdisk_1.dat 2025-04-04T17:25:38.460339Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:38.489944Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:38.490057Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:38.491675Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2086, node 6 2025-04-04T17:25:38.560506Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:38.560538Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:38.560550Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:38.560740Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29424 TClient is connected to server localhost:29424 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:39.145752Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:39.164196Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:39.242526Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:25:39.423001Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:39.508261Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:42.648844Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510467214136543:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:42.648948Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:42.704664Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:42.749702Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:42.796061Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:42.840428Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:42.889060Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:42.975780Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:43.040803Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510471509104354:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:43.040931Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:43.041018Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510471509104359:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:43.045588Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:43.062499Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489510471509104361:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:43.138082Z node 6 :TX_PROXY ERROR: Actor# [6:7489510471509104417:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:43.330211Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489510450034265611:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:43.330385Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpNotNullColumns::JoinLeftTableWithNotNullPk-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::Closure [GOOD] Test command err: Trying to start YDB, gRPC: 5938, MsgBus: 23348 2025-04-04T17:25:36.154386Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510438797901963:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:36.154450Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00196c/r3tmp/tmpL4lz7U/pdisk_1.dat 2025-04-04T17:25:36.559219Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:36.591828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:36.591933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 5938, node 1 2025-04-04T17:25:36.595742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:36.650994Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:36.651027Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:36.651043Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:36.651148Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23348 TClient is connected to server localhost:23348 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:37.230471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:37.246949Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:37.259801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:37.413933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:37.584687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:37.652073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:25:39.344546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510451682805625:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:39.344656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:39.613320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:39.638467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:39.665819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:39.695143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:39.725064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:39.758563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:39.841055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510451682806137:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:39.841122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:39.841171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510451682806142:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:39.845040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:39.855570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510451682806144:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:39.930995Z node 1 :TX_PROXY ERROR: Actor# [1:7489510451682806199:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:41.154795Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510438797901963:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:41.154904Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6009, MsgBus: 5919 2025-04-04T17:25:41.865753Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510460212935165:2162];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:41.870155Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00196c/r3tmp/tmpmQut34/pdisk_1.dat 2025-04-04T17:25:41.969088Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6009, node 2 2025-04-04T17:25:42.019043Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:42.019120Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:42.038572Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:42.074828Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:42.074856Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:42.074863Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:42.074980Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5919 TClient is connected to server localhost:5919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:42.487680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:42.493273Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:25:42.496564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:42.569242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:42.732869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:42.803501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:45.029713Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510477392806005:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:45.029793Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:45.074831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:45.126139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:45.208195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:45.242886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:45.286875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:45.338774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:45.422934Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510477392806522:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:45.423019Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:45.423302Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510477392806527:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:45.427282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:45.436895Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510477392806529:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:45.524646Z node 2 :TX_PROXY ERROR: Actor# [2:7489510477392806584:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> Cdc::KeysOnlyLogDebezium [GOOD] >> Cdc::NewAndOldImagesLog[PqRunner] >> KqpAgg::GroupByLimit [GOOD] >> KqpExtractPredicateLookup::OverflowLookup |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DqSourceLimit [GOOD] Test command err: Trying to start YDB, gRPC: 25440, MsgBus: 28287 2025-04-04T17:24:59.363956Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510283311393848:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:59.364052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001453/r3tmp/tmpsAIlKw/pdisk_1.dat 2025-04-04T17:24:59.750840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:59.750981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:59.753824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:59.754843Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25440, node 1 2025-04-04T17:24:59.857032Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:59.857061Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:59.857068Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:59.857212Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28287 TClient is connected to server localhost:28287 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:00.505318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:00.527156Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:00.537244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:00.709325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:00.867099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:25:00.947249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:02.839795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510296196297503:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:02.839947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:03.119809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:03.153185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:03.221010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:03.265388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:03.303036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:03.348778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:03.401583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510300491265315:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:03.401647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:03.402840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510300491265320:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:03.405990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:03.414116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510300491265322:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:03.493408Z node 1 :TX_PROXY ERROR: Actor# [1:7489510300491265376:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:04.365390Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510283311393848:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:04.365454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24726, MsgBus: 19398 2025-04-04T17:25:05.653989Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510308123738255:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:05.654043Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001453/r3tmp/tmpEwvFwf/pdisk_1.dat 2025-04-04T17:25:05.768091Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24726, node 2 2025-04-04T17:25:05.812571Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:05.812741Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:05.819633Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:05.838737Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:05.838760Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:05.838767Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:05.838866Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19398 TClient is connected to server localhost:19398 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:25:06.290001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:25:06.303892Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:06.312070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:06.401129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:06.573777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting ... [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510438847579117:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:36.570566Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:36.613870Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:36.655063Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:36.711506Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:36.755155Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:36.796159Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:36.877243Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:36.987831Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510438847579635:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:36.987962Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:36.988194Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510438847579640:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:36.993364Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:37.007356Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489510438847579642:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:37.017046Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489510421667708169:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:37.017130Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:37.099696Z node 6 :TX_PROXY ERROR: Actor# [6:7489510443142546995:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 5343, MsgBus: 21887 2025-04-04T17:25:39.919713Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489510453648305245:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:39.919870Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001453/r3tmp/tmpCh6eMy/pdisk_1.dat 2025-04-04T17:25:40.089073Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:40.094430Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:40.094525Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:40.096118Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5343, node 7 2025-04-04T17:25:40.147543Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:40.147567Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:40.147576Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:40.147685Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21887 TClient is connected to server localhost:21887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:40.780077Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:40.803354Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:40.873934Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:41.097170Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:41.226790Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:44.410274Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510475123143489:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:44.410423Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:44.467990Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:44.518286Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:44.568340Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:44.616952Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:44.689085Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:44.740525Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:44.791783Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510475123144004:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:44.791897Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:44.792142Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510475123144009:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:44.799654Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:44.815611Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489510475123144011:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:44.911894Z node 7 :TX_PROXY ERROR: Actor# [7:7489510475123144067:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:44.920309Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489510453648305245:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:44.920388Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestReads [GOOD] Test command err: RandomSeed# 7261457295164858681 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #1 to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #2 to read-only === Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #1 === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #2 === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #3 === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #4 === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #5 === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #6 === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::Range >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition [GOOD] >> TopicAutoscaling::ReadFromTimestamp_BeforeAutoscaleAwareSDK >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink >> KqpScripting::ExecuteYqlScriptPg [GOOD] >> Cdc::UpdatesLog[PqRunner] [GOOD] >> Cdc::UpdatesLog[YdsRunner] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::JoinLeftTableWithNotNullPk-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 11456, MsgBus: 27260 2025-04-04T17:25:08.390334Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510318928636934:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:08.397800Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00138f/r3tmp/tmpcIxI4V/pdisk_1.dat 2025-04-04T17:25:08.836860Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:08.872184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:08.872314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:08.874216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11456, node 1 2025-04-04T17:25:08.957073Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:08.957103Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:08.957118Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:08.957305Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27260 TClient is connected to server localhost:27260 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:09.579229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:09.595065Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:11.530899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510331813539475:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:11.531004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:11.568520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:25:11.694411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510331813539578:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:11.694505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:11.695109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510331813539583:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:11.699650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:25:11.715835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510331813539585:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:25:11.790861Z node 1 :TX_PROXY ERROR: Actor# [1:7489510331813539636:2398] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:12.020923Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489510331813539692:2329], TxId: 281474976710662, task: 1. Ctx: { TraceId : 01jr0xbm3qdvvrdqeprr2n6prp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTg2ZTIzNDktOGY3ZDlmNTQtYTA4OWYyMmMtYzU4M2YyYTk=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: BAD_REQUEST KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: key, code: 2031 }. 2025-04-04T17:25:12.021521Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTg2ZTIzNDktOGY3ZDlmNTQtYTA4OWYyMmMtYzU4M2YyYTk=, ActorId: [1:7489510331813539457:2329], ActorState: ExecuteState, TraceId: 01jr0xbm3qdvvrdqeprr2n6prp, Create QueryResponse for error on request, msg: 2025-04-04T17:25:12.115927Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489510336108507004:2329], TxId: 281474976710663, task: 1. Ctx: { TraceId : 01jr0xbm76fbeb2xtdcvx5b7zr. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTg2ZTIzNDktOGY3ZDlmNTQtYTA4OWYyMmMtYzU4M2YyYTk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: BAD_REQUEST KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: key, code: 2031 }. 2025-04-04T17:25:12.116286Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTg2ZTIzNDktOGY3ZDlmNTQtYTA4OWYyMmMtYzU4M2YyYTk=, ActorId: [1:7489510331813539457:2329], ActorState: ExecuteState, TraceId: 01jr0xbm76fbeb2xtdcvx5b7zr, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 21267, MsgBus: 9776 2025-04-04T17:25:13.026112Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510343023942367:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:13.026201Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00138f/r3tmp/tmpaQ6ApG/pdisk_1.dat 2025-04-04T17:25:13.165262Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:13.195566Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:13.195681Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:13.197396Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21267, node 2 2025-04-04T17:25:13.252432Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:13.252456Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:13.252464Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:13.252569Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9776 TClient is connected to server localhost:9776 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:13.729400Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:16.299953Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510355908844905:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:16.300031Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:16.309727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:25:16.394593Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510355908845008:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:16.394687Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:16.394952Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510355908845013:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:16.399545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part ... 4976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:35.712595Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:35.761241Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:35.805972Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:35.852517Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:35.941795Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489510437131612758:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:35.941857Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:35.941907Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489510437131612763:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:35.948291Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:35.960523Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489510437131612765:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:36.016483Z node 5 :TX_PROXY ERROR: Actor# [5:7489510441426580116:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:36.750767Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489510419951741289:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:36.750842Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:37.303301Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:37.472982Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 63875, MsgBus: 10069 2025-04-04T17:25:39.648199Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489510454392536542:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:39.648253Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00138f/r3tmp/tmplsSohI/pdisk_1.dat 2025-04-04T17:25:39.800022Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:39.824237Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:39.824376Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:39.827664Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63875, node 6 2025-04-04T17:25:39.901990Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:39.902014Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:39.902025Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:39.902137Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10069 TClient is connected to server localhost:10069 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:40.579340Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:40.598813Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:40.697453Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:40.876002Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:40.986519Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:43.939976Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510471572407501:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:43.940104Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:43.989227Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:44.025725Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:44.060602Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:44.096163Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:44.131026Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:44.174984Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:44.225833Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510475867375307:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:44.225956Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:44.226056Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510475867375312:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:44.230881Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:44.243149Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489510475867375314:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:44.327261Z node 6 :TX_PROXY ERROR: Actor# [6:7489510475867375367:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:44.649258Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489510454392536542:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:44.649347Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:45.540373Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:45.752392Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpCost::OlapRange |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 62053, MsgBus: 19215 2025-04-04T17:25:37.917392Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510445270241834:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:37.920175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001965/r3tmp/tmp3skVoa/pdisk_1.dat 2025-04-04T17:25:38.278428Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:38.300757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:38.300938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 62053, node 1 2025-04-04T17:25:38.304346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:38.402797Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:38.402829Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:38.402839Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:38.402987Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19215 TClient is connected to server localhost:19215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:38.920814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:38.944123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:39.076638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:39.206275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:39.263588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:41.152334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510462450112803:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:41.152459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:41.468865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:41.509745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:41.563148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:41.612588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:41.653351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:41.695897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:41.796012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510462450113316:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:41.796084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:41.796326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510462450113321:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:41.800036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:41.813394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510462450113323:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:41.914521Z node 1 :TX_PROXY ERROR: Actor# [1:7489510462450113379:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:42.918425Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510445270241834:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:42.918525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:42.951374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:43.449745Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787543484, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 17128, MsgBus: 13447 2025-04-04T17:25:44.170443Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510476781596737:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:44.176225Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001965/r3tmp/tmpejNZ2C/pdisk_1.dat 2025-04-04T17:25:44.270484Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:44.303952Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:44.304057Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:44.307311Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17128, node 2 2025-04-04T17:25:44.374798Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:44.374819Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:44.374827Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:44.374943Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13447 TClient is connected to server localhost:13447 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:44.840789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:44.860133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:44.934935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:45.112941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:45.184173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:47.223602Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510489666500245:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:47.223678Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:47.271112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:47.310946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:47.348237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:47.400228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:47.445718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:47.488395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:47.578438Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510489666500760:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:47.578524Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:47.579069Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510489666500765:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:47.582857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:47.596363Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510489666500767:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:47.660900Z node 2 :TX_PROXY ERROR: Actor# [2:7489510489666500822:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpCost::ScanQueryRangeFullScan-SourceRead >> KqpRanges::Like [GOOD] >> KqpNewEngine::IdxLookupExtractMembers [GOOD] >> KqpCost::OlapWriteRow >> KqpCost::OltpWriteRow+isSink [GOOD] >> KqpCost::IndexLookupJoin+StreamLookupJoin [GOOD] >> KqpCost::QuerySeviceRangeFullScan [GOOD] >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] >> Cdc::NewAndOldImagesLog[PqRunner] [GOOD] >> Cdc::NewAndOldImagesLog[YdsRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::IdxLookupExtractMembers [GOOD] Test command err: Trying to start YDB, gRPC: 17951, MsgBus: 31647 2025-04-04T17:24:56.349247Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510268639549111:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:56.349441Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001469/r3tmp/tmpzGkd7u/pdisk_1.dat 2025-04-04T17:24:56.683438Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17951, node 1 2025-04-04T17:24:56.759933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:56.761985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:56.763642Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:56.763660Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:56.763671Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:56.763782Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:24:56.767346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31647 TClient is connected to server localhost:31647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:57.297408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:57.317505Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:24:57.328446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:57.475017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:57.662870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:57.740703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:59.507332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510281524452755:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:59.507436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:59.731720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:59.766509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:59.801176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:59.835333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:59.870200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:59.917673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:00.014399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510285819420564:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:00.014475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:00.014552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510285819420569:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:00.017852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:00.030831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510285819420571:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:00.116183Z node 1 :TX_PROXY ERROR: Actor# [1:7489510285819420626:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:01.353089Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510268639549111:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:01.353176Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12293, MsgBus: 19530 2025-04-04T17:25:02.243390Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510295467757462:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:02.243432Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001469/r3tmp/tmpZzmfxA/pdisk_1.dat 2025-04-04T17:25:02.328152Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12293, node 2 2025-04-04T17:25:02.367083Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:02.370441Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:02.393653Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:02.454898Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:02.454935Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:02.454943Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:02.455062Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19530 TClient is connected to server localhost:19530 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:02.847966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:02.861542Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:02.917472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:03.108526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T17:25:03.170828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCre ... [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510454051760811:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:39.854484Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:39.912809Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:39.951289Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:39.994095Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.041403Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.091047Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.173386Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.249311Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510458346728623:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:40.249430Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:40.249647Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510458346728628:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:40.253855Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:40.269580Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489510458346728630:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:40.344124Z node 6 :TX_PROXY ERROR: Actor# [6:7489510458346728685:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:40.470212Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489510436871889852:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:40.470518Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6196, MsgBus: 19196 2025-04-04T17:25:43.597416Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489510471024329741:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:43.597488Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001469/r3tmp/tmpi8MxLu/pdisk_1.dat 2025-04-04T17:25:43.719303Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:43.750986Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:43.751100Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:43.752937Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6196, node 7 2025-04-04T17:25:43.821131Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:43.821159Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:43.821170Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:43.821330Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19196 TClient is connected to server localhost:19196 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:25:44.548248Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:44.563747Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:25:44.651634Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:44.855266Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:44.953544Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.366421Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510492499167977:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:48.366552Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:48.425629Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:48.504639Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:48.600476Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489510471024329741:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:48.600569Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:48.613201Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:48.672412Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:48.727803Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:48.780267Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:48.890055Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510492499168500:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:48.890177Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:48.890402Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510492499168505:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:48.896924Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:48.917726Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489510492499168507:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:48.986047Z node 7 :TX_PROXY ERROR: Actor# [7:7489510492499168561:3460] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpCost::IndexLookup+useSink [GOOD] >> TPartitionTests::UserActCount [GOOD] >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumption-UseSink [GOOD] >> KqpCost::IndexLookupAndTake+useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::Like [GOOD] Test command err: Trying to start YDB, gRPC: 10828, MsgBus: 18332 2025-04-04T17:25:06.473779Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510312042810948:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:06.474131Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001392/r3tmp/tmpCrKxt3/pdisk_1.dat 2025-04-04T17:25:06.883404Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10828, node 1 2025-04-04T17:25:06.927038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:06.927184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:06.929227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:06.990750Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:06.990778Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:06.990786Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:06.990891Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18332 TClient is connected to server localhost:18332 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:07.555358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.584280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.726398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.873634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.950070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:09.776665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510324927714381:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:09.776760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:10.048916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:10.078104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:10.108352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:10.138626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:10.167453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:10.197759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:10.241315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510329222682191:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:10.241546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:10.242107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510329222682196:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:10.247125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:10.257808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510329222682198:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:10.327434Z node 1 :TX_PROXY ERROR: Actor# [1:7489510329222682251:3439] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:11.160722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:11.381171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:25:11.478515Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510312042810948:2271];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:11.478630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:11.595552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:25:11.769598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:25:12.125152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10357, MsgBus: 29485 2025-04-04T17:25:13.247924Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510342123293874:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:13.247961Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001392/r3tmp/tmp8RKIoi/pdisk_1.dat 2025-04-04T17:25:13.352382Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:13.385607Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:13.385706Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 10357, node 2 2025-04-04T17:25:13.388102Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:13.430274Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:13.430299Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:13.430307Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:13.430455Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29485 TClient is connected to server localhost:29485 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:13.853906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.521988Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.564709Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.614012Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.681979Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:40.734784Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489510458625134049:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:40.734895Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:40.735341Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489510458625134054:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:40.739551Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:40.752867Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489510458625134056:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:40.854163Z node 5 :TX_PROXY ERROR: Actor# [5:7489510458625134112:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:41.347446Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489510441445262601:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:41.347647Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:42.089020Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:42.788896Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787542819, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 21363, MsgBus: 15240 2025-04-04T17:25:44.287898Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489510475588657678:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:44.287992Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001392/r3tmp/tmp3BK56K/pdisk_1.dat 2025-04-04T17:25:44.416406Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:44.432119Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:44.432238Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:44.433947Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21363, node 6 2025-04-04T17:25:44.496338Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:44.496364Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:44.496374Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:44.496521Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15240 TClient is connected to server localhost:15240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:45.104376Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:45.122527Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:25:45.139234Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:45.245614Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:45.497915Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:45.611671Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.719675Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510492768528648:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:48.719773Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:48.757765Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:48.812567Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:48.854488Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:48.908721Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:48.960846Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:49.043843Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:49.124083Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510497063496462:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:49.124204Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:49.124310Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510497063496467:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:49.128779Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:49.141046Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489510497063496469:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:49.217763Z node 6 :TX_PROXY ERROR: Actor# [6:7489510497063496524:3455] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:49.290458Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489510475588657678:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:49.290516Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:50.557009Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> TPartitionTests::TooManyImmediateTxs >> KqpSqlIn::Dict [GOOD] >> KqpSqlIn::Delete >> KqpCost::OlapRangeFullScan [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::QuerySeviceRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 19290, MsgBus: 29012 2025-04-04T17:25:47.279759Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510489499804416:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:47.279816Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001595/r3tmp/tmpzxIfu4/pdisk_1.dat 2025-04-04T17:25:47.806633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:47.806755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:47.808575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:47.835077Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19290, node 1 2025-04-04T17:25:47.947552Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:47.947584Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:47.947593Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:47.948241Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29012 TClient is connected to server localhost:29012 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:48.566961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.587142Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:48.597575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.756513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.916976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.987643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:50.666870Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510502384708072:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.666995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.952110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:50.983178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.022480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.090086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.129708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.210013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.310558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510506679675896:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:51.310642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:51.310644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510506679675901:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:51.314193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:51.324300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510506679675903:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:51.399355Z node 1 :TX_PROXY ERROR: Actor# [1:7489510506679675958:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:52.282882Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510489499804416:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:52.282933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin+StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 2252, MsgBus: 20567 2025-04-04T17:25:45.782587Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510477264858764:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:45.789976Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0015a3/r3tmp/tmpFwRx5y/pdisk_1.dat 2025-04-04T17:25:46.233575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:46.234110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:46.235770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:46.253799Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2252, node 1 2025-04-04T17:25:46.290857Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:25:46.290883Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:25:46.449100Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:46.449125Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:46.449133Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:46.449280Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20567 TClient is connected to server localhost:20567 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:47.264131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:47.308467Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:47.333064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:47.533174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:47.713878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:47.802392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:49.229096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510494444729572:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:49.229196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:49.830463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:49.874001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:49.910196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:49.947619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:49.992491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:50.071168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:50.148138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510498739697382:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.148227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.148418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510498739697387:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.153133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:50.166558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510498739697389:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:50.234683Z node 1 :TX_PROXY ERROR: Actor# [1:7489510498739697445:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:50.792076Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510477264858764:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:50.792169Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:51.409914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.439069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.508240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 /Root/Join1_2 1 19 /Root/Join1_1 8 136 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OltpWriteRow+isSink [GOOD] Test command err: Trying to start YDB, gRPC: 24644, MsgBus: 63938 2025-04-04T17:25:45.942868Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510478670550853:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:45.942923Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00159f/r3tmp/tmpoObhgG/pdisk_1.dat 2025-04-04T17:25:46.292908Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:46.297925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:46.298008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:46.300316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24644, node 1 2025-04-04T17:25:46.450974Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:46.451010Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:46.451022Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:46.451150Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63938 TClient is connected to server localhost:63938 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:47.277830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:47.317182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:47.533080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:47.754345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:47.829399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:49.461239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510495850421830:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:49.461391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:49.831496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:49.871972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:49.947737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:49.977320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:50.005234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:50.046770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:50.166409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510500145389644:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.166531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.167399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510500145389650:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.172196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:50.182387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510500145389652:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:50.254679Z node 1 :TX_PROXY ERROR: Actor# [1:7489510500145389706:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:50.943229Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510478670550853:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:50.964220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:51.363911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 query_phases { duration_us: 5313 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1247 affected_shards: 1 } compilation { duration_us: 50378 cpu_time_us: 47220 } process_cpu_time_us: 406 total_duration_us: 59133 total_cpu_time_us: 48873 query_phases { duration_us: 9991 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 999 affected_shards: 1 } compilation { duration_us: 47708 cpu_time_us: 45108 } process_cpu_time_us: 440 total_duration_us: 60022 total_cpu_time_us: 46547 2025-04-04T17:25:51.736406Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=5; 2025-04-04T17:25:51.758290Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037919 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T17:25:51.758474Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037919 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T17:25:51.758655Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489510504440357495:2497], Table: `/Root/TestTable` ([72057594046644480:16:1]), SessionActorId: [1:7489510504440357298:2497]Got CONSTRAINT VIOLATION for table `/Root/TestTable`. ShardID=72075186224037919, Sink=[1:7489510504440357495:2497].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-04T17:25:51.759111Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489510504440357488:2497], SessionActorId: [1:7489510504440357298:2497], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7489510504440357298:2497]. isRollback=0 2025-04-04T17:25:51.759297Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWYwZmUwOGEtN2I2YTBiNTgtNmU2NDJjZjctMTlkOGMwZGM=, ActorId: [1:7489510504440357298:2497], ActorState: ExecuteState, TraceId: 01jr0xctxq305ks4gr7m1ts519, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7489510504440357489:2497] from: [1:7489510504440357488:2497] 2025-04-04T17:25:51.759390Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489510504440357489:2497] TxId: 281474976710675. Ctx: { TraceId: 01jr0xctxq305ks4gr7m1ts519, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWYwZmUwOGEtN2I2YTBiNTgtNmU2NDJjZjctMTlkOGMwZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-04T17:25:51.760274Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWYwZmUwOGEtN2I2YTBiNTgtNmU2NDJjZjctMTlkOGMwZGM=, ActorId: [1:7489510504440357298:2497], ActorState: ExecuteState, TraceId: 01jr0xctxq305ks4gr7m1ts519, Create QueryResponse for error on request, msg: query_phases { duration_us: 32935 cpu_time_us: 1184 affected_shards: 1 } compilation { duration_us: 52824 cpu_time_us: 50451 } process_cpu_time_us: 613 total_duration_us: 87994 total_cpu_time_us: 52248 query_phases { duration_us: 4360 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1501 affected_shards: 1 } compilation { duration_us: 45997 cpu_time_us: 43571 } process_cpu_time_us: 477 total_duration_us: 52223 total_cpu_time_us: 45549 query_phases { duration_us: 2581 cpu_time_us: 1322 affected_shards: 1 } compilation { duration_us: 99373 cpu_time_us: 96857 } process_cpu_time_us: 480 total_duration_us: 104157 total_cpu_time_us: 98659 query_phases { duration_us: 4105 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1555 affected_shards: 1 } compilation { duration_us: 67087 cpu_time_us: 64625 } process_cpu_time_us: 508 total_duration_us: 75103 total_cpu_time_us: 66688 query_phases { duration_us: 4028 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1320 affected_shards: 1 } compilation { duration_us: 44345 cpu_time_us: 42015 } process_cpu_time_us: 457 total_duration_us: 50584 total_cpu_time_us: 43792 query_phases { duration_us: 4670 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1504 affected_shards: 1 } compilation { duration_us: 58027 cpu_time_us: 55137 } process_cpu_time_us: 593 total_duration_us: 64948 total_cpu_time_us: 57234 >> KqpCost::OltpWriteRow-isSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup+useSink [GOOD] >> Cdc::UpdatesLog[YdsRunner] [GOOD] Test command err: Trying to start YDB, gRPC: 27273, MsgBus: 19400 2025-04-04T17:25:47.229993Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510487282875052:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:47.230041Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00159a/r3tmp/tmpe7LekX/pdisk_1.dat 2025-04-04T17:25:47.711247Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:47.729190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:47.729270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:47.732896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27273, node 1 2025-04-04T17:25:47.910933Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:47.910965Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:47.910974Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:47.911136Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19400 TClient is connected to server localhost:19400 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:48.514991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.536554Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:48.554161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.724070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.904069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.992757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:50.664336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510500167778720:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.664447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.888458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:50.917237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:50.970009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.013149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.043953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.079781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.159619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510504462746528:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:51.159733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:51.160063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510504462746533:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:51.164593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:51.173763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510504462746535:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:51.252023Z node 1 :TX_PROXY ERROR: Actor# [1:7489510504462746593:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:51.939770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:52.231185Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510487282875052:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:52.239553Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 >> Cdc::UpdatesLog[TopicRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 27789, MsgBus: 4608 2025-04-04T17:25:47.502797Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510489669778556:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:47.503379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001587/r3tmp/tmpIMWVWC/pdisk_1.dat 2025-04-04T17:25:48.022193Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:48.026951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:48.027037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:48.029239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27789, node 1 2025-04-04T17:25:48.158439Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:48.158462Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:48.158476Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:48.158615Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4608 TClient is connected to server localhost:4608 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:48.782896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.815232Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:48.822015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:48.960790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:25:49.125127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:49.197809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:50.935424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510502554682059:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.935535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:51.225891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.258826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.321736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.351948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.394108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.433020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.513124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510506849649874:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:51.513197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:51.513450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510506849649879:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:51.517198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:51.527828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510506849649881:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:51.619559Z node 1 :TX_PROXY ERROR: Actor# [1:7489510506849649939:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:52.490578Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510489669778556:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:52.490644Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:52.639163Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787552647, txId: 281474976710671] shutting down >> KqpCost::Range [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 11864, MsgBus: 19265 2025-04-04T17:25:47.360956Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510489121237795:2248];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:47.361140Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001588/r3tmp/tmpCbmazA/pdisk_1.dat 2025-04-04T17:25:47.798370Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:47.817550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:47.817679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:47.819807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11864, node 1 2025-04-04T17:25:47.928957Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:47.928974Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:47.928982Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:47.929118Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19265 TClient is connected to server localhost:19265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:48.517764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.538900Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:48.564896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.765270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.958578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:49.070177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:50.686367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510502006141263:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.686511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.929304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:50.976332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.024211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.057533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.128668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.164006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.220456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510506301109076:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:51.220567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:51.220960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510506301109081:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:51.225073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:51.234583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510506301109083:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:51.295245Z node 1 :TX_PROXY ERROR: Actor# [1:7489510506301109135:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:52.056954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:52.355922Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510489121237795:2248];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:52.355989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; /Root/SecondaryKeys/Index/indexImplTable 2 16 /Root/SecondaryKeys 1 8 |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 65208, MsgBus: 9349 2025-04-04T17:25:46.688465Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510482914715235:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:46.688579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00159b/r3tmp/tmp5MSrI0/pdisk_1.dat 2025-04-04T17:25:47.092191Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:47.095917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:47.096023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:47.098610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65208, node 1 2025-04-04T17:25:47.258371Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:47.258389Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:47.258395Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:47.258489Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9349 TClient is connected to server localhost:9349 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:48.013819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.041814Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:48.056084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.254598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.444722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.534591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:50.262485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510500094586151:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.262628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.534804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:50.571357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:50.606099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:50.637473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:50.668528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:50.747609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:50.803340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510500094586668:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.803447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.803736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510500094586673:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.807370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:50.822432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510500094586675:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:50.911868Z node 1 :TX_PROXY ERROR: Actor# [1:7489510500094586730:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:51.682868Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510482914715235:2129];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:51.682921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:51.805300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.834130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.860826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 /Root/Join1_2 1 19 /Root/Join1_1 8 136 |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DeleteWithInputMultiConsumption-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 21240, MsgBus: 63472 2025-04-04T17:25:05.983995Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510308603234800:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:05.984094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001398/r3tmp/tmpN3HCUr/pdisk_1.dat 2025-04-04T17:25:06.368368Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:06.412655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:06.412766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:06.414413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21240, node 1 2025-04-04T17:25:06.465765Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:06.465805Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:06.465812Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:06.465926Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63472 TClient is connected to server localhost:63472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:06.998945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.027182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.205234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.361123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.432925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:09.091155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510325783105760:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:09.091289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:09.357587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:09.389066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:09.420570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:09.452273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:09.483373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:09.514936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:09.602079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510325783106272:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:09.602147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:09.602466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510325783106277:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:09.606454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:09.616881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510325783106279:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:09.718129Z node 1 :TX_PROXY ERROR: Actor# [1:7489510325783106334:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:10.984772Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510308603234800:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:10.984874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23360, MsgBus: 4804 2025-04-04T17:25:11.607156Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510332608146262:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:11.655586Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001398/r3tmp/tmpPms4CP/pdisk_1.dat 2025-04-04T17:25:11.768928Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:11.780573Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:11.780660Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:11.784182Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23360, node 2 2025-04-04T17:25:11.830613Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:11.830635Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:11.830648Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:11.830754Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4804 TClient is connected to server localhost:4804 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:12.346198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:12.352115Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:25:12.365035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:12.435046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:12.560323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:12.624922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreate ... {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:40.959777Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:41.040981Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:41.077786Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:41.112969Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:41.149552Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:41.188291Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:41.263314Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:41.323031Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510462268663762:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:41.323178Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:41.323443Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510462268663767:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:41.328661Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:41.341958Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489510462268663769:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:41.427891Z node 6 :TX_PROXY ERROR: Actor# [6:7489510462268663823:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:41.662025Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489510440793825006:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:41.662113Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19836, MsgBus: 4300 2025-04-04T17:25:44.783021Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489510473314226827:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:44.783143Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001398/r3tmp/tmpjTGyk2/pdisk_1.dat 2025-04-04T17:25:44.912893Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:44.948648Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:44.948772Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:44.952065Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19836, node 7 2025-04-04T17:25:45.014851Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:45.014880Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:45.014891Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:45.015058Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4300 TClient is connected to server localhost:4300 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:45.743992Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:45.763442Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:45.777433Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:45.886485Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:46.086121Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:46.173316Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:49.740148Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510494789065063:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:49.740285Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:49.770700Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:49.790343Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7489510473314226827:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:49.790447Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:49.859923Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:49.914575Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:49.962182Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:50.019313Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:50.132488Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:50.226313Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510499084032884:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.226461Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.226932Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7489510499084032889:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.231960Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:50.246854Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7489510499084032891:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:50.333242Z node 7 :TX_PROXY ERROR: Actor# [7:7489510499084032946:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] >> VDiskBalancing::TestRandom_Block42 [GOOD] >> KqpSqlIn::SecondaryIndex_TupleSelect [GOOD] >> TPartitionTests::TooManyImmediateTxs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 28239, MsgBus: 63395 2025-04-04T17:25:47.090983Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510485822932886:2236];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:47.091291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001597/r3tmp/tmp59l4uf/pdisk_1.dat 2025-04-04T17:25:47.639612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:47.639704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:47.642135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28239, node 1 2025-04-04T17:25:47.674689Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:47.922856Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:47.922881Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:47.922894Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:47.923043Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63395 TClient is connected to server localhost:63395 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:48.514812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.575449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.749266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.917993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.992110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:50.859381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510498707836378:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.859543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:51.083761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.122927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.154229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.219500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.250673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.287064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.346037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510503002804190:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:51.346119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:51.350554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510503002804195:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:51.354424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:51.369217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510503002804197:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:51.465942Z node 1 :TX_PROXY ERROR: Actor# [1:7489510503002804253:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:52.088902Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510485822932886:2236];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:52.088970Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:52.404777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:52.573382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7489510507297771946:2503];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:25:52.573386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489510507297771943:2500];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:25:52.573590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489510507297771943:2500];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:25:52.573865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489510507297771943:2500];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:25:52.573994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489510507297771943:2500];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:25:52.574057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7489510507297771946:2503];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:25:52.574072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489510507297771943:2500];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:25:52.574184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489510507297771943:2500];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:25:52.574216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7489510507297771946:2503];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:25:52.574285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489510507297771943:2500];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:25:52.574350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7489510507297771946:2503];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:25:52.574371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489510507297771943:2500];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:25:52.574451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7489510507297771946:2503];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:25:52.574524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7489510507297771946:2503];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:25:52.574523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72 ... UMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:25:52.709588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:25:52.709661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:25:52.709700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:25:52.709738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:25:52.709758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:25:52.709778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:25:52.709809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:25:52.710846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:25:52.710895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:25:52.711072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:25:52.711153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:25:52.711303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:25:52.711328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:25:52.711504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:25:52.711527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:25:52.711621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:25:52.711640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:25:52.712055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:25:52.712088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:25:52.712181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:25:52.712203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:25:52.712409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:25:52.712449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:25:52.712529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:25:52.712573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:25:52.712647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:25:52.712669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:25:52.712704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:25:52.712726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:25:52.713699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:25:52.713749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:25:52.713919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:25:52.713941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:25:52.714094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:25:52.714118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:25:52.714297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:25:52.714320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:25:52.714406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:25:52.714426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:25:52.772257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:52.772257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:52.778364Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:52.778367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:52.783648Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:52.784202Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:52.788815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:52.789713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:52.794663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:52.794996Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:53.012093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-04-04T17:25:53.012092Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-04-04T17:25:53.012701Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; query_phases { duration_us: 221162 table_access { name: "/Root/TestTable" reads { rows: 2 bytes: 72 } } cpu_time_us: 81858 } compilation { duration_us: 277700 cpu_time_us: 274664 } process_cpu_time_us: 345 total_duration_us: 503213 total_cpu_time_us: 356867 >> TPartitionTests::WriteSubDomainOutOfSpace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OltpWriteRow-isSink [GOOD] Test command err: Trying to start YDB, gRPC: 17023, MsgBus: 21599 2025-04-04T17:25:47.255799Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510486333407762:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:47.256329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001594/r3tmp/tmpeXfCtl/pdisk_1.dat 2025-04-04T17:25:47.683352Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17023, node 1 2025-04-04T17:25:47.727295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:47.727406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:47.731126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:47.834748Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:47.834781Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:47.834788Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:47.834889Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21599 TClient is connected to server localhost:21599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:48.457545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.475515Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:48.492552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.650129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.849059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:48.950566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:50.760468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510499218311428:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:50.760615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:51.040696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.070809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.101898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.134264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.168003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.209736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.299164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510503513279241:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:51.299237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:51.299331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510503513279246:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:51.303020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:51.313004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510503513279248:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:51.386127Z node 1 :TX_PROXY ERROR: Actor# [1:7489510503513279302:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:52.256556Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510486333407762:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:52.256625Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:52.379507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 query_phases { duration_us: 486 cpu_time_us: 486 } query_phases { duration_us: 2510 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 881 affected_shards: 1 } compilation { duration_us: 65446 cpu_time_us: 62463 } process_cpu_time_us: 801 total_duration_us: 70233 total_cpu_time_us: 64631 query_phases { duration_us: 512 cpu_time_us: 512 } query_phases { duration_us: 3110 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 889 affected_shards: 1 } compilation { duration_us: 73740 cpu_time_us: 70923 } process_cpu_time_us: 909 total_duration_us: 79127 total_cpu_time_us: 73233 2025-04-04T17:25:52.976116Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489510507808247070:2527], TxId: 281474976710676, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ODg0MjAzZDgtZGNlMDgyYWEtODI1YjM0NWUtYWM0MjcxN2U=. TraceId : 01jr0xcvym3d6nfb67sskeg7cp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-04T17:25:52.977100Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489510507808247071:2528], TxId: 281474976710676, task: 2. Ctx: { TraceId : 01jr0xcvym3d6nfb67sskeg7cp. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ODg0MjAzZDgtZGNlMDgyYWEtODI1YjM0NWUtYWM0MjcxN2U=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7489510507808247067:2489], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:25:52.977422Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODg0MjAzZDgtZGNlMDgyYWEtODI1YjM0NWUtYWM0MjcxN2U=, ActorId: [1:7489510507808246855:2489], ActorState: ExecuteState, TraceId: 01jr0xcvym3d6nfb67sskeg7cp, Create QueryResponse for error on request, msg: query_phases { duration_us: 907 cpu_time_us: 907 } query_phases { duration_us: 8774 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 6969 affected_shards: 1 } query_phases { duration_us: 24116 cpu_time_us: 24073 } compilation { duration_us: 214246 cpu_time_us: 199672 } process_cpu_time_us: 1802 total_duration_us: 252772 total_cpu_time_us: 233423 query_phases { duration_us: 702 cpu_time_us: 702 } query_phases { duration_us: 2860 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 2561 affected_shards: 1 } query_phases { duration_us: 1556 cpu_time_us: 1900 } query_phases { duration_us: 3662 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1186 affected_shards: 1 } compilation { duration_us: 199960 cpu_time_us: 195672 } process_cpu_time_us: 1827 total_duration_us: 214175 total_cpu_time_us: 203848 query_phases { duration_us: 658 cpu_time_us: 658 } query_phases { duration_us: 5382 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 5375 affected_shards: 1 } query_phases { duration_us: 995 cpu_time_us: 572 affected_shards: 1 } compilation { duration_us: 191145 cpu_time_us: 186665 } process_cpu_time_us: 1420 total_duration_us: 201877 total_cpu_time_us: 194690 query_phases { duration_us: 501 cpu_time_us: 501 } query_phases { duration_us: 3393 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 2996 affected_shards: 1 } query_phases { duration_us: 3995 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1099 affected_shards: 1 } compilation { duration_us: 206995 cpu_time_us: 202551 } process_cpu_time_us: 1253 total_duration_us: 220430 total_cpu_time_us: 208400 query_phases { duration_us: 471 cpu_time_us: 471 } query_phases { duration_us: 3180 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 949 affected_shards: 1 } compilation { duration_us: 51373 cpu_time_us: 48784 } process_cpu_time_us: 806 total_duration_us: 56527 total_cpu_time_us: 51010 query_phases { duration_us: 521 cpu_time_us: 521 } query_phases { duration_us: 2996 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 881 affected_shards: 1 } compilation { duration_us: 94208 cpu_time_us: 90629 } process_cpu_time_us: 870 total_duration_us: 99476 total_cpu_time_us: 92901 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::Range [GOOD] Test command err: Trying to start YDB, gRPC: 29120, MsgBus: 1954 2025-04-04T17:25:49.127522Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510498259616199:2201];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:49.128375Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001581/r3tmp/tmpVkryE9/pdisk_1.dat 2025-04-04T17:25:49.625916Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:49.643185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:49.643297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:49.646312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29120, node 1 2025-04-04T17:25:49.737392Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:49.737420Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:49.737428Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:49.737567Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1954 TClient is connected to server localhost:1954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:50.296198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:50.325213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:50.459580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:50.620052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:50.685054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:52.379352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510511144519719:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:52.379480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:52.590376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:52.618070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:52.686269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:52.718461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:52.784438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:52.815143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:52.854682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510511144520233:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:52.854758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:52.854951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510511144520238:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:52.858382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:52.869867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510511144520240:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:52.953017Z node 1 :TX_PROXY ERROR: Actor# [1:7489510511144520294:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TPartitionTests::WriteSubDomainOutOfSpace [GOOD] >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink [GOOD] >> ReadOnlyVDisk::TestSync [GOOD] >> TPartitionTests::TestNonConflictingActsBatchOk >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 17844, MsgBus: 1533 2025-04-04T17:25:29.132681Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510409127906467:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:29.133220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00196e/r3tmp/tmpazCach/pdisk_1.dat 2025-04-04T17:25:29.453266Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17844, node 1 2025-04-04T17:25:29.509947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:29.514370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:29.522532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:29.551852Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:29.551878Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:29.551885Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:29.551989Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1533 TClient is connected to server localhost:1533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:30.035829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:30.051991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:30.202644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-04-04T17:25:30.361050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:30.438160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:32.200061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510422012809999:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:32.200163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:32.457843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:32.490082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:32.517485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:32.553516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:32.581845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:32.658817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:32.743578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510422012810520:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:32.743711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:32.744017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510422012810525:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:32.747895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:32.760438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510422012810528:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:32.849706Z node 1 :TX_PROXY ERROR: Actor# [1:7489510422012810582:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:34.145398Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510409127906467:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:34.181488Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:34.187061Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787534104, txId: 281474976710671] shutting down 2025-04-04T17:25:34.189242Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787534104, txId: 281474976710673] shutting down 2025-04-04T17:25:34.206917Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787534104, txId: 281474976710672] shutting down 2025-04-04T17:25:34.248025Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787534104, txId: 281474976710674] shutting down 2025-04-04T17:25:34.282225Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787534300, txId: 281474976710679] shutting down 2025-04-04T17:25:34.601559Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787534503, txId: 281474976710681] shutting down 2025-04-04T17:25:34.602425Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787534503, txId: 281474976710682] shutting down 2025-04-04T17:25:34.625781Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787534636, txId: 281474976710686] shutting down 2025-04-04T17:25:34.626544Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787534636, txId: 281474976710685] shutting down 2025-04-04T17:25:34.821447Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787534825, txId: 281474976710690] shutting down 2025-04-04T17:25:34.823393Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787534825, txId: 281474976710689] shutting down 2025-04-04T17:25:34.981977Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787534965, txId: 281474976710693] shutting down 2025-04-04T17:25:34.987757Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787534972, txId: 281474976710694] shutting down 2025-04-04T17:25:35.124138Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787535147, txId: 281474976710698] shutting down 2025-04-04T17:25:35.126490Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787535140, txId: 281474976710697] shutting down 2025-04-04T17:25:35.264786Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787535266, txId: 281474976710701] shutting down 2025-04-04T17:25:35.278877Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787535266, txId: 281474976710702] shutting down 2025-04-04T17:25:35.489516Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787535497, txId: 281474976710706] shutting down 2025-04-04T17:25:35.490391Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787535497, txId: 281474976710705] shutting down 2025-04-04T17:25:35.635588Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787535658, txId: 281474976710709] shutting down 2025-04-04T17:25:35.768476Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787535784, txId: 281474976710712] shutting down 2025-04-04T17:25:35.770697Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787535784, txId: 281474976710711] shutting down 2025-04-04T17:25:35.940821Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapsh ... 17:25:45.372695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:45.386460Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510478932722875:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:45.459274Z node 2 :TX_PROXY ERROR: Actor# [2:7489510478932722929:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:46.506677Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489510461752851498:2150];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:46.506764Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:46.816555Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787546753, txId: 281474976715671] shutting down 2025-04-04T17:25:46.817637Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787546753, txId: 281474976715672] shutting down 2025-04-04T17:25:46.904936Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787546865, txId: 281474976715676] shutting down 2025-04-04T17:25:46.908856Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787546865, txId: 281474976715675] shutting down 2025-04-04T17:25:47.187351Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787547082, txId: 281474976715680] shutting down 2025-04-04T17:25:47.201980Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787547082, txId: 281474976715679] shutting down 2025-04-04T17:25:47.227559Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787547089, txId: 281474976715681] shutting down 2025-04-04T17:25:47.241772Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787547250, txId: 281474976715685] shutting down 2025-04-04T17:25:47.263388Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787547264, txId: 281474976715686] shutting down 2025-04-04T17:25:47.490884Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787547432, txId: 281474976715689] shutting down 2025-04-04T17:25:47.496130Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787547432, txId: 281474976715690] shutting down 2025-04-04T17:25:47.558821Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787547537, txId: 281474976715693] shutting down 2025-04-04T17:25:47.803822Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787547705, txId: 281474976715696] shutting down 2025-04-04T17:25:47.808248Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787547705, txId: 281474976715695] shutting down 2025-04-04T17:25:47.866646Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787547859, txId: 281474976715699] shutting down 2025-04-04T17:25:47.891028Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787547859, txId: 281474976715700] shutting down 2025-04-04T17:25:48.068791Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787548062, txId: 281474976715703] shutting down 2025-04-04T17:25:48.076754Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787548062, txId: 281474976715705] shutting down 2025-04-04T17:25:48.079819Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787548062, txId: 281474976715704] shutting down 2025-04-04T17:25:48.289447Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787548279, txId: 281474976715710] shutting down 2025-04-04T17:25:48.290299Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787548279, txId: 281474976715709] shutting down 2025-04-04T17:25:48.469820Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787548482, txId: 281474976715713] shutting down 2025-04-04T17:25:48.470599Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787548482, txId: 281474976715714] shutting down 2025-04-04T17:25:48.701451Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787548713, txId: 281474976715717] shutting down 2025-04-04T17:25:48.707553Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787548706, txId: 281474976715718] shutting down 2025-04-04T17:25:48.891142Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787548902, txId: 281474976715721] shutting down 2025-04-04T17:25:49.046716Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787549049, txId: 281474976715723] shutting down 2025-04-04T17:25:49.171448Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787549168, txId: 281474976715725] shutting down 2025-04-04T17:25:49.176735Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787549168, txId: 281474976715726] shutting down 2025-04-04T17:25:49.404129Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787549406, txId: 281474976715729] shutting down 2025-04-04T17:25:49.498659Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787549525, txId: 281474976715731] shutting down 2025-04-04T17:25:49.652155Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787549679, txId: 281474976715734] shutting down 2025-04-04T17:25:49.653913Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787549679, txId: 281474976715733] shutting down 2025-04-04T17:25:49.922060Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787549945, txId: 281474976715737] shutting down 2025-04-04T17:25:50.045275Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787550071, txId: 281474976715739] shutting down 2025-04-04T17:25:50.153178Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787550183, txId: 281474976715741] shutting down 2025-04-04T17:25:50.350104Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787550372, txId: 281474976715743] shutting down 2025-04-04T17:25:50.479481Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787550498, txId: 281474976715745] shutting down 2025-04-04T17:25:50.623385Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787550645, txId: 281474976715747] shutting down 2025-04-04T17:25:50.767010Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787550792, txId: 281474976715749] shutting down 2025-04-04T17:25:50.934356Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787550939, txId: 281474976715751] shutting down 2025-04-04T17:25:51.140224Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787551163, txId: 281474976715753] shutting down 2025-04-04T17:25:51.256065Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787551282, txId: 281474976715755] shutting down 2025-04-04T17:25:51.445387Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787551464, txId: 281474976715757] shutting down 2025-04-04T17:25:51.619036Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787551646, txId: 281474976715759] shutting down 2025-04-04T17:25:51.777150Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787551807, txId: 281474976715761] shutting down 2025-04-04T17:25:51.996003Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787552017, txId: 281474976715763] shutting down 2025-04-04T17:25:52.180804Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787552206, txId: 281474976715765] shutting down 2025-04-04T17:25:52.372239Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787552360, txId: 281474976715767] shutting down 2025-04-04T17:25:52.494722Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787552528, txId: 281474976715769] shutting down 2025-04-04T17:25:52.675122Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787552710, txId: 281474976715771] shutting down 2025-04-04T17:25:52.856740Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787552892, txId: 281474976715773] shutting down 2025-04-04T17:25:53.046302Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787553081, txId: 281474976715775] shutting down 2025-04-04T17:25:53.254901Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787553291, txId: 281474976715777] shutting down 2025-04-04T17:25:53.469400Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDM0ODNhOGEtNmMwOWUwMTItM2EzZjE4MmQtZmViOTMwZWI=, ActorId: [2:7489510513292467324:3509], ActorState: ExecuteState, TraceId: 01jr0xcwfkbttqseq0fex8b8xy, Create QueryResponse for error on request, msg: 2025-04-04T17:25:53.574118Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787553508, txId: 281474976715779] shutting down 2025-04-04T17:25:53.758017Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787553774, txId: 281474976715781] shutting down 2025-04-04T17:25:53.882911Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787553914, txId: 281474976715783] shutting down 2025-04-04T17:25:54.052118Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787554089, txId: 281474976715785] shutting down 2025-04-04T17:25:54.251130Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787554285, txId: 281474976715787] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::SecondaryIndex_TupleSelect [GOOD] Test command err: Trying to start YDB, gRPC: 30968, MsgBus: 4864 2025-04-04T17:24:57.160240Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510273540043594:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:57.161166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001467/r3tmp/tmpbzPH5K/pdisk_1.dat 2025-04-04T17:24:57.521046Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:57.544174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:57.544287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:57.550491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30968, node 1 2025-04-04T17:24:57.607093Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:57.607135Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:57.607150Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:57.607322Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4864 TClient is connected to server localhost:4864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:58.171538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:58.192178Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:24:58.199114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:58.404878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:58.560809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:58.637450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:00.424192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510286424947245:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:00.424375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:00.704299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:00.743328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:00.780308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:00.810499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:00.842330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:00.875545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:00.939837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510286424947754:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:00.939903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:00.939931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510286424947759:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:00.944038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:00.953118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510286424947761:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:01.054046Z node 1 :TX_PROXY ERROR: Actor# [1:7489510290719915113:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:02.074454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:02.150588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:25:02.160636Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510273540043594:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:02.173402Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:02.194779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 18994, MsgBus: 18302 2025-04-04T17:25:06.249055Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510309468466692:2081];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:06.249741Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001467/r3tmp/tmp4grouY/pdisk_1.dat 2025-04-04T17:25:06.367772Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:06.399677Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:06.399777Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:06.400964Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18994, node 2 2025-04-04T17:25:06.472245Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (e ... 8.167378Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489510447025567060:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:38.257751Z node 5 :TX_PROXY ERROR: Actor# [5:7489510447025567117:3460] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:38.508827Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489510425550728267:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:38.508910Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:39.596949Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:39.642705Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:25:39.692324Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:60: Warning: At function: Filter, At function: Coalesce
:6:33: Warning: At function: SqlIn
:6:33: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:60: Warning: At function: Filter, At function: Coalesce
:6:33: Warning: At function: SqlIn
:6:33: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 29068, MsgBus: 21228 2025-04-04T17:25:44.316910Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489510474880840899:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:44.317022Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001467/r3tmp/tmpIKWmKV/pdisk_1.dat 2025-04-04T17:25:44.449733Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:44.477976Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:44.478060Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:44.481733Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29068, node 6 2025-04-04T17:25:44.551003Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:44.551047Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:44.551058Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:44.551262Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21228 TClient is connected to server localhost:21228 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:25:45.148445Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:25:45.155064Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:25:45.175701Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:45.268082Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:45.490004Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:45.591167Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:49.021021Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510496355679130:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:49.021143Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:49.072618Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:49.160724Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:49.205659Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:49.255812Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:49.314393Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:49.318066Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489510474880840899:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:49.318676Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:49.394825Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:49.493485Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510496355679659:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:49.493617Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:49.493950Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510496355679664:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:49.501294Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:49.513565Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489510496355679666:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:49.587280Z node 6 :TX_PROXY ERROR: Actor# [6:7489510496355679720:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:50.995966Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.065883Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:25:51.148334Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:17: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:56: Warning: At function: Filter, At function: Coalesce
:7:29: Warning: At function: SqlIn
:7:29: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestSync [GOOD] Test command err: RandomSeed# 12838177016426096394 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:0:0:0:131072:0] 2025-04-04T17:25:39.009716Z 1 00h02m00.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:8798:939] 2025-04-04T17:25:39.010134Z 2 00h02m00.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:8805:946] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-04-04T17:25:41.208806Z 3 00h06m00.360512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:8812:953] 2025-04-04T17:25:41.208942Z 2 00h06m00.360512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:8805:946] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-04-04T17:25:46.097395Z 5 00h14m00.860512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:8826:967] 2025-04-04T17:25:46.097495Z 4 00h14m00.860512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:8819:960] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-04-04T17:25:48.665603Z 6 00h18m01.050512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:8833:974] 2025-04-04T17:25:48.665706Z 5 00h18m01.050512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:8826:967] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2025-04-04T17:25:51.523191Z 7 00h22m01.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:8840:981] 2025-04-04T17:25:51.523321Z 6 00h22m01.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:8833:974] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:6:0:0:131072:0] 2025-04-04T17:25:54.086472Z 7 00h26m01.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:8840:981] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 7 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> KqpScanArrowFormat::AggregateNoColumn [GOOD] >> KqpScanArrowFormat::AggregateEmptySum ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:25:55.735386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:25:55.735487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:25:55.735534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:25:55.735575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:25:55.736997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:25:55.737053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:25:55.737160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:25:55.737241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:25:55.738557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:25:55.810097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:25:55.810169Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:55.825048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:25:55.825330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:25:55.825602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:25:55.837350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:25:55.837672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:25:55.840826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:25:55.841943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:25:55.852191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:25:55.860965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:25:55.861071Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:25:55.861316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:25:55.861375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:25:55.861527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:25:55.861672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:25:55.870572Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:25:56.008712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:25:56.009989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:25:56.010952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:25:56.012964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:25:56.013064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:25:56.016585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:25:56.016806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:25:56.017092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:25:56.017153Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:25:56.017229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:25:56.017266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:25:56.020551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:25:56.020641Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:25:56.020686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:25:56.022828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:25:56.022878Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:25:56.022917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:25:56.022977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:25:56.027687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:25:56.030066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:25:56.030300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:25:56.031451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:25:56.031616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:25:56.031665Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:25:56.033050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:25:56.033140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:25:56.033322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:25:56.033404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:25:56.035868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:25:56.035917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:25:56.036090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:25:56.036146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:25:56.036547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:25:56.036591Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:25:56.036703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:25:56.036740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:25:56.036803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:25:56.036852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:25:56.036892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:25:56.036943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:25:56.036984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:25:56.037017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:25:56.037089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:25:56.037131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:25:56.037167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:25:56.039050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:25:56.039318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:25:56.039359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... NFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:25:56.077852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:25:56.077877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 100 2025-04-04T17:25:56.077926Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 1 2025-04-04T17:25:56.077957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:25:56.078018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 0/1, is published: true 2025-04-04T17:25:56.080726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-04T17:25:56.080775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateRTMR TConfigureParts ProgressState operationId# 100:0 at tablet72057594046678944 2025-04-04T17:25:56.080813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 3 -> 128 2025-04-04T17:25:56.081960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-04T17:25:56.082272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-04T17:25:56.083655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-04T17:25:56.083708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:25:56.083771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-04-04T17:25:56.083957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:25:56.085442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-04-04T17:25:56.085629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-04-04T17:25:56.085934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:25:56.086045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:25:56.086102Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046678944 2025-04-04T17:25:56.086201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-04-04T17:25:56.086411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:25:56.086499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:25:56.089511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:25:56.089554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:25:56.089692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:25:56.089781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:25:56.089812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-04-04T17:25:56.089842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 FAKE_COORDINATOR: Erasing txId 100 2025-04-04T17:25:56.090155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-04-04T17:25:56.090210Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-04-04T17:25:56.090326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-04T17:25:56.090361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:25:56.090415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-04-04T17:25:56.090447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:25:56.090499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-04-04T17:25:56.090541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-04-04T17:25:56.090577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-04-04T17:25:56.090604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-04-04T17:25:56.090676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:25:56.090717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-04-04T17:25:56.090748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-04T17:25:56.090775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-04-04T17:25:56.091395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:25:56.091473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:25:56.091502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-04-04T17:25:56.091558Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:25:56.091596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:25:56.093071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:25:56.093142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-04-04T17:25:56.093168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-04-04T17:25:56.093196Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-04-04T17:25:56.093227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:25:56.093306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-04-04T17:25:56.095594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-04-04T17:25:56.096858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-04-04T17:25:56.097129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-04-04T17:25:56.097171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-04-04T17:25:56.098834Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-04-04T17:25:56.098935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-04-04T17:25:56.098971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:309:2300] TestWaitNotification: OK eventTxId 100 2025-04-04T17:25:56.099503Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/rtmr1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:25:56.099671Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/rtmr1" took 202us result status StatusSuccess 2025-04-04T17:25:56.100095Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/rtmr1" PathDescription { Self { Name: "rtmr1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } RtmrVolumeDescription { Name: "rtmr1" PathId: 2 PartitionsCount: 0 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpCost::OlapRange [GOOD] |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |96.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 32397, MsgBus: 29844 2025-04-04T17:25:49.668121Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510495693331211:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:49.668276Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00157d/r3tmp/tmpMV1lRz/pdisk_1.dat 2025-04-04T17:25:50.106007Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:50.117013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:50.117398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:50.140485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32397, node 1 2025-04-04T17:25:50.202957Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:50.202983Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:50.202997Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:50.203140Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29844 TClient is connected to server localhost:29844 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:50.825480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:50.847336Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:50.855986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:51.001493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:51.174459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:51.250379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:53.042219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510512873202151:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:53.042412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:53.322996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:53.362420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:53.387078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:53.413522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:53.441642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:53.480622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:53.543478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510512873202659:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:53.543550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:53.543602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510512873202664:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:53.548055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:53.559513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510512873202666:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:53.661047Z node 1 :TX_PROXY ERROR: Actor# [1:7489510512873202722:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:54.626975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:54.668484Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510495693331211:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:54.668575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 |96.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 27255, MsgBus: 6477 2025-04-04T17:25:51.107702Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510504970196389:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:51.108522Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001563/r3tmp/tmp5ddSSj/pdisk_1.dat 2025-04-04T17:25:51.497220Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27255, node 1 2025-04-04T17:25:51.530706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:51.530917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:51.542446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:51.560293Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:51.560322Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:51.560338Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:51.560459Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6477 TClient is connected to server localhost:6477 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:52.090784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:52.129188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:52.273862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:52.432077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:52.497942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:54.139074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510517855100063:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:54.139232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:54.375135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:54.404625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:54.433621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:54.463297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:54.491027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:54.522113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:54.572933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510517855100571:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:54.573017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:54.573207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510517855100576:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:54.577471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:54.587648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510517855100578:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:54.689542Z node 1 :TX_PROXY ERROR: Actor# [1:7489510517855100634:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:55.737463Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 2025-04-04T17:25:57.124909Z, after 1.389159s 2025-04-04T17:25:55.737610Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:25:55.747502Z node 1 :KQP_GATEWAY DEBUG: Load table metadata from cache by path, request Path: /Root/Test 2025-04-04T17:25:55.881498Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpSnapshotManager at [1:7489510522150068224:2488] 2025-04-04T17:25:55.881535Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: got snapshot request from [1:7489510522150068191:2488] 2025-04-04T17:25:55.886225Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: snapshot 1743787555930:281474976710671 created 2025-04-04T17:25:55.886498Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489510522150068235:2488] TxId: 281474976710672. Ctx: { TraceId: 01jr0xcywp11dav1jqvxnq93ks, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMzYjY2YTMtZTk0NmEwMGYtNDkxYTIzMmUtMTA2MjJl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 3, stages: 2 2025-04-04T17:25:55.886564Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-04-04T17:25:55.886595Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,1], InputsCount: 1, OutputsCount: 1 2025-04-04T17:25:55.886729Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key sets: 1 2025-04-04T17:25:55.886889Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-04-04T17:25:55.886934Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489510522150068235:2488] TxId: 281474976710672. Ctx: { TraceId: 01jr0xcywp11dav1jqvxnq93ks, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMzYjY2YTMtZTk0NmEwMGYtNDkxYTIzMmUtMTA2MjJl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Start resolving tablets nodes... (1) 2025-04-04T17:25:55.887050Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976710672. Shard resolve complete, resolved shards: 1 2025-04-04T17:25:55.887080Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489510522150068235:2488] TxId: 281474976710672. Ctx: { TraceId: 01jr0xcywp11dav1jqvxnq93ks, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMzYjY2YTMtZTk0NmEwMGYtNDkxYTIzMmUtMTA2MjJl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-04-04T17:25:55.887116Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489510522150068235:2488] TxId: 281474976710672. Ctx: { TraceId: 01jr0xcywp11dav1jqvxnq93ks, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMzYjY2YTMtZTk0NmEwMGYtNDkxYTIzMmUtMTA2MjJl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 1: [72075186224037914] 2025-04-04T17:25:55.887134Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-04-04T17:25:55.887193Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489510522150068235:2488] TxId: 281474976710672. Ctx: { TraceId: 01jr0xcywp11dav1jqvxnq93ks, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMzYjY2YTMtZTk0NmEwMGYtNDkxYTIzMmUtMTA2MjJl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (KqpTable '"/Root/Test" '"72057594046644480:9" '"" '1)) (let $2 '('"Amount" '"Comment" '"Group" '"Name")) (let $3 (KqpWideReadTableRanges $1 (Void) $2 '() '())) (return (FromFlow (WideFilter $3 (lambda '($4 $5 $6 $7) (Coalesce (< $4 (Uint64 '"5000")) (Bool 'false))) (Uint64 '1)))) )))) ) 2025-04-04T17:25:55.888058Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489510522150068235:2488] TxId: 281474976710672. Ctx: { TraceId: 01jr0xcywp11dav1jqvxnq93ks, ... 4T17:25:55.902218Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. pass away 2025-04-04T17:25:55.902225Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489510522150068241:2496], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OGMzYjY2YTMtZTk0NmEwMGYtNDkxYTIzMmUtMTA2MjJl. TraceId : 01jr0xcywp11dav1jqvxnq93ks. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-04-04T17:25:55.902326Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710672;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-04T17:25:55.902444Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489510522150068235:2488] TxId: 281474976710672. Ctx: { TraceId: 01jr0xcywp11dav1jqvxnq93ks, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMzYjY2YTMtZTk0NmEwMGYtNDkxYTIzMmUtMTA2MjJl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7489510522150068239:2495], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 3624 Tasks { TaskId: 1 CpuTimeUs: 929 FinishTimeMs: 1743787555902 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 3 ReadBytes: 96 } IngressBytes: 96 IngressRows: 3 ComputeCpuTimeUs: 168 BuildCpuTimeUs: 761 Sources { IngressName: "CS" Ingress { Bytes: 96 Rows: 3 FirstMessageMs: 1743787555901 LastMessageMs: 1743787555901 } ExternalPartitions { ExternalRows: 3 ExternalBytes: 96 FirstMessageMs: 1743787555901 LastMessageMs: 1743787555901 PartitionId: "72075186224037914" } } HostName: "ghrun-mup5cka3jq" NodeId: 1 CreateTimeMs: 1743787555889 } MaxMemoryUsage: 1048576 } 2025-04-04T17:25:55.902565Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-04T17:25:55.902605Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jr0xcywp11dav1jqvxnq93ks, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMzYjY2YTMtZTk0NmEwMGYtNDkxYTIzMmUtMTA2MjJl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7489510522150068239:2495] 2025-04-04T17:25:55.902776Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489510522150068235:2488] TxId: 281474976710672. Ctx: { TraceId: 01jr0xcywp11dav1jqvxnq93ks, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMzYjY2YTMtZTk0NmEwMGYtNDkxYTIzMmUtMTA2MjJl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7489510522150068241:2496], 2025-04-04T17:25:55.902971Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489510522150068235:2488] TxId: 281474976710672. Ctx: { TraceId: 01jr0xcywp11dav1jqvxnq93ks, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMzYjY2YTMtZTk0NmEwMGYtNDkxYTIzMmUtMTA2MjJl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [1:7489510522150068191:2488], seqNo: 1, nRows: 1 2025-04-04T17:25:55.904873Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672, send ack to channelId: 2, seqNo: 1, enough: 0, freeSpace: 8388488, to: [1:7489510522150068243:2496] 2025-04-04T17:25:55.904952Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489510522150068241:2496], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OGMzYjY2YTMtZTk0NmEwMGYtNDkxYTIzMmUtMTA2MjJl. TraceId : 01jr0xcywp11dav1jqvxnq93ks. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-04-04T17:25:55.905014Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-04-04T17:25:55.905033Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished 2025-04-04T17:25:55.905058Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489510522150068241:2496], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OGMzYjY2YTMtZTk0NmEwMGYtNDkxYTIzMmUtMTA2MjJl. TraceId : 01jr0xcywp11dav1jqvxnq93ks. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-04-04T17:25:55.905138Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. pass away 2025-04-04T17:25:55.905238Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710672;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-04-04T17:25:55.905419Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-04-04T17:25:55.905618Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489510522150068235:2488] TxId: 281474976710672. Ctx: { TraceId: 01jr0xcywp11dav1jqvxnq93ks, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMzYjY2YTMtZTk0NmEwMGYtNDkxYTIzMmUtMTA2MjJl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7489510522150068241:2496], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 7551 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 743 FinishTimeMs: 1743787555904 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 168 BuildCpuTimeUs: 575 HostName: "ghrun-mup5cka3jq" NodeId: 1 CreateTimeMs: 1743787555890 } MaxMemoryUsage: 1048576 } 2025-04-04T17:25:55.905678Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jr0xcywp11dav1jqvxnq93ks, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMzYjY2YTMtZTk0NmEwMGYtNDkxYTIzMmUtMTA2MjJl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7489510522150068241:2496] 2025-04-04T17:25:55.907412Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7489510522150068235:2488] TxId: 281474976710672. Ctx: { TraceId: 01jr0xcywp11dav1jqvxnq93ks, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMzYjY2YTMtZTk0NmEwMGYtNDkxYTIzMmUtMTA2MjJl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 19834 DurationUs: 19206 Tables { TablePath: "/Root/Test" ReadRows: 3 ReadBytes: 96 } ExecuterCpuTimeUs: 8659 StartTimeMs: 1743787555886 FinishTimeMs: 1743787555905 Stages { StageGuid: "f2473620-e6a5fb09-33305688-bdc4eab8" Program: "(\n(return (lambda \'() (block \'(\n (let $1 (KqpTable \'\"/Root/Test\" \'\"72057594046644480:9\" \'\"\" \'1))\n (let $2 \'(\'\"Amount\" \'\"Comment\" \'\"Group\" \'\"Name\"))\n (let $3 (KqpWideReadTableRanges $1 (Void) $2 \'() \'()))\n (return (FromFlow (WideFilter $3 (lambda \'($4 $5 $6 $7) (Coalesce (< $4 (Uint64 \'\"5000\")) (Bool \'false))) (Uint64 \'1))))\n))))\n)\n" ComputeActors { CpuTimeUs: 3624 Tasks { TaskId: 1 CpuTimeUs: 929 FinishTimeMs: 1743787555902 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 3 ReadBytes: 96 } IngressBytes: 96 IngressRows: 3 ComputeCpuTimeUs: 168 BuildCpuTimeUs: 761 Sources { IngressName: "CS" Ingress { Bytes: 96 Rows: 3 FirstMessageMs: 1743787555901 LastMessageMs: 1743787555901 } ExternalPartitions { ExternalRows: 3 ExternalBytes: 96 FirstMessageMs: 1743787555901 LastMessageMs: 1743787555901 PartitionId: "72075186224037914" } } HostName: "ghrun-mup5cka3jq" NodeId: 1 CreateTimeMs: 1743787555889 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743787555901 } Stages { StageId: 1 StageGuid: "b739d6fb-508de8ea-50e40d21-23363b89" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 \'1)) (lambda \'($2 $3 $4 $5) (AsStruct \'(\'\"Amount\" $2) \'(\'\"Comment\" $3) \'(\'\"Group\" $4) \'(\'\"Name\" $5)))))))\n)\n" ComputeActors { CpuTimeUs: 7551 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 743 FinishTimeMs: 1743787555904 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 168 BuildCpuTimeUs: 575 HostName: "ghrun-mup5cka3jq" NodeId: 1 CreateTimeMs: 1743787555890 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743787555901 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":5,\"Plans\":[{\"Node Type\":\"ResultSet\",\"PlanNodeId\":4,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":2}],\"Limit\":\"1\",\"Name\":\"Limit\"}],\"PlanNodeId\":3,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":2,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit-Filter-TableFullScan\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Limit\":\"1\",\"Name\":\"Limit\"},{\"E-Cost\":\"0\",\"E-Rows\":\"0\",\"E-Size\":\"0\",\"Inputs\":[{\"InternalOperatorId\":2}],\"Name\":\"Filter\",\"Predicate\":\"item.Amount \\u003C 5000\"},{\"E-Cost\":\"0\",\"E-Rows\":\"0\",\"E-Size\":\"0\",\"Inputs\":[],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/Test\",\"ReadColumns\":[\"Amount\",\"Comment\",\"Group\",\"Name\"],\"ReadRanges\":[\"Group (-∞, +∞)\",\"Name (-∞, +∞)\"],\"Scan\":\"Parallel\",\"Table\":\"Test\"}],\"PlanNodeId\":1,\"StageGuid\":\"f2473620-e6a5fb09-33305688-bdc4eab8\",\"Stats\":{\"BaseTimeMs\":1743787555901,\"ComputeNodes\":[{\"CpuTimeUs\":3624,\"Tasks\":[{\"ComputeTimeUs\":168,\"FinishTimeMs\":1743787555902,\"Host\":\"ghrun-mup5cka3jq\",\"IngressBytes\":96,\"IngressRows\":3,\"NodeId\":1,\"OutputBytes\":19,\"OutputRows\":1,\"TaskId\":1}]}],\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"Test\"]}],\"SortColumns\":[\"Group (Asc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"b739d6fb-508de8ea-50e40d21-23363b89\",\"Stats\":{\"BaseTimeMs\":1743787555901,\"ComputeNodes\":[{\"CpuTimeUs\":7551,\"Tasks\":[{\"ComputeTimeUs\":168,\"FinishTimeMs\":1743787555904,\"Host\":\"ghrun-mup5cka3jq\",\"InputBytes\":19,\"InputRows\":1,\"NodeId\":1,\"OutputBytes\":19,\"OutputRows\":1,\"ResultBytes\":19,\"ResultRows\":1,\"TaskId\":2}]}],\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 1691 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\001\022\013\010\250\034\020\377:\030\247W \002" } } 2025-04-04T17:25:55.907469Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489510522150068235:2488] TxId: 281474976710672. Ctx: { TraceId: 01jr0xcywp11dav1jqvxnq93ks, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMzYjY2YTMtZTk0NmEwMGYtNDkxYTIzMmUtMTA2MjJl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-04-04T17:25:55.907519Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7489510522150068235:2488] TxId: 281474976710672. Ctx: { TraceId: 01jr0xcywp11dav1jqvxnq93ks, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMzYjY2YTMtZTk0NmEwMGYtNDkxYTIzMmUtMTA2MjJl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.011175s ReadRows: 3 ReadBytes: 96 ru: 7 rate limiter was not found force flag: 1 2025-04-04T17:25:55.908630Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787555930, txId: 281474976710671] shutting down 2025-04-04T17:25:56.108072Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510504970196389:2068];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:56.108197Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TopicAutoscaling::Simple_PQv1 [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 >> Cdc::NewAndOldImagesLog[YdsRunner] [GOOD] >> Cdc::NewAndOldImagesLog[TopicRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRange [GOOD] Test command err: Trying to start YDB, gRPC: 10681, MsgBus: 28916 2025-04-04T17:25:50.644711Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510499326763735:2237];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:50.644799Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001578/r3tmp/tmpA6bjYa/pdisk_1.dat 2025-04-04T17:25:51.036913Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10681, node 1 2025-04-04T17:25:51.060701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:51.060825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:51.062566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:51.158729Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:51.158754Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:51.158760Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:51.158907Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28916 TClient is connected to server localhost:28916 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:51.719001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:51.732955Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:51.742479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:51.880295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:52.026915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:52.092739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:25:53.759511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510512211667219:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:53.759657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:54.004228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:54.032988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:54.059639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:54.125563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:54.153772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:54.186144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:54.237552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510516506635026:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:54.237629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:54.237633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510516506635031:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:54.240894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:54.249247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510516506635033:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:54.305276Z node 1 :TX_PROXY ERROR: Actor# [1:7489510516506635085:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:55.380786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:55.523903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489510520801602794:2499];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:25:55.523902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489510520801602804:2500];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:25:55.524203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489510520801602804:2500];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:25:55.524508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489510520801602804:2500];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:25:55.524635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489510520801602804:2500];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:25:55.524718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489510520801602794:2499];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:25:55.524785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489510520801602804:2500];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:25:55.524908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489510520801602794:2499];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:25:55.524914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489510520801602804:2500];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:25:55.525024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489510520801602794:2499];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:25:55.525073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489510520801602804:2500];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:25:55.525145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489510520801602794:2499];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:25:55.525186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489510520801602804:2500];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:25:55.525284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489510520801602794:2499];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:25:55.525330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489510520801602804:2500];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:25:55.525407Z node ... abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:25:55.693351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:25:55.693360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:25:55.693442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:25:55.693444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:25:55.693464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:25:55.693473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:25:55.693518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:25:55.693543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:25:55.694069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:25:55.694111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:25:55.694282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:25:55.694305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:25:55.694440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:25:55.694466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:25:55.694640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:25:55.694664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:25:55.694763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:25:55.694786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:25:55.695688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:25:55.695741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:25:55.695830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:25:55.695853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:25:55.696024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:25:55.696054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:25:55.696127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:25:55.696176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:25:55.696248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:25:55.696276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:25:55.696310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:25:55.696331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:25:55.697770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:25:55.697818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:25:55.697996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:25:55.698031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:25:55.698170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:25:55.698220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:25:55.698393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:25:55.698415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:25:55.698538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:25:55.698556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:25:55.709601Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:55.747250Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:55.747302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:55.753339Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:55.753374Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:55.759244Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:55.759244Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:55.765208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:55.765210Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:55.771107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:55.771111Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:55.892194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-04-04T17:25:55.892209Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-04-04T17:25:55.892817Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestRandom_Block42 [GOOD] Test command err: RandomSeed# 6434524255223473765 Step = 0 SEND TEvPut with key [1:1:0:0:0:585447:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:585447:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 1 SEND TEvPut with key [1:1:1:0:0:37868:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:37868:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 2 SEND TEvPut with key [1:1:2:0:0:619381:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:619381:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 3 SEND TEvPut with key [1:1:3:0:0:725585:0] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:725585:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 4 SEND TEvPut with key [1:1:4:0:0:2934723:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:2934723:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 4 2025-04-04T17:23:11.611010Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 5 SEND TEvPut with key [1:1:5:0:0:502135:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:502135:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Step = 6 SEND TEvPut with key [1:1:6:0:0:3044947:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:3044947:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Stop node 7 2025-04-04T17:23:11.785911Z 1 00h01m10.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 7 SEND TEvPut with key [1:1:7:0:0:582354:0] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:582354:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 8 SEND TEvPut with key [1:1:8:0:0:1478820:0] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:1478820:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 9 SEND TEvPut with key [1:1:9:0:0:1360774:0] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:1360774:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Start node 4 Step = 10 SEND TEvPut with key [1:1:10:0:0:1727870:0] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:1727870:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 11 SEND TEvPut with key [1:1:11:0:0:1883457:0] TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:1883457:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 12 SEND TEvPut with key [1:1:12:0:0:568368:0] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:568368:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 13 SEND TEvPut with key [1:1:13:0:0:896600:0] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:896600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 14 SEND TEvPut with key [1:1:14:0:0:179270:0] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:179270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 15 SEND TEvPut with key [1:1:15:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 16 SEND TEvPut with key [1:1:16:0:0:670396:0] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:670396:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 17 SEND TEvPut with key [1:1:17:0:0:1584741:0] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:1584741:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 18 SEND TEvPut with key [1:1:18:0:0:2384818:0] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:2384818:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 19 SEND TEvPut with key [1:1:19:0:0:2867010:0] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:2867010:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 20 SEND TEvPut with key [1:1:20:0:0:2911789:0] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:2911789:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 21 SEND TEvPut with key [1:1:21:0:0:2463622:0] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:2463622:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 22 SEND TEvPut with key [1:1:22:0:0:322338:0] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:322338:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 23 SEND TEvPut with key [1:1:23:0:0:2119770:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:2119770:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 24 SEND TEvPut with key [1:1:24:0:0:56036:0] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:56036:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 25 SEND TEvPut with key [1:1:25:0:0:2648607:0] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:2648607:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Stop node 0 2025-04-04T17:23:12.707233Z 3 00h01m30.111024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:185:16] ServerId# [1:288:57] TabletId# 72057594037932033 PipeClientId# [3:185:16] 2025-04-04T17:23:12.707396Z 6 00h01m30.111024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:203:16] ServerId# [1:291:60] TabletId# 72057594037932033 PipeClientId# [6:203:16] 2025-04-04T17:23:12.707473Z 5 00h01m30.111024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:7653:15] ServerId# [1:7662:1091] TabletId# 72057594037932033 PipeClientId# [5:7653:15] 2025-04-04T17:23:12.707537Z 4 00h01m30.111024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:191:16] ServerId# [1:289:58] TabletId# 72057594037932033 PipeClientId# [4:191:16] 2025-04-04T17:23:12.707644Z 2 00h01m30.111024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:179:16] ServerId# [1:287:56] TabletId# 72057594037932033 PipeClientId# [2:179:16] 2025-04-04T17:23:12.707731Z 7 00h01m30.111024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:209:16] ServerId# [1:292:61] TabletId# 72057594037932033 PipeClientId# [7:209:16] Step = 26 SEND TEvPut with key [1:1:26:0:0:539431:0] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:539431:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 27 SEND TEvPut with key [1:1:27:0:0:148482:0] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:148482:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 28 SEND TEvPut with key [1:1:28:0:0:2673563:0] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:2673563:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 29 SEND TEvPut with key [1:1:29:0:0:265170:0] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:265170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 30 SEND TEvPut with key [1:1:30:0:0:2398732:0] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:2398732:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Compact vdisk 2 Step = 31 SEND TEvPut with key [1:1:31:0:0:2302132:0] TEvPutResult: TEvPutResult {Id# [1:1:31:0:0:2302132:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 32 SEND TEvPut with key [1:1:32:0:0:3112269:0] TEvPutResult: TEvPutResult {Id# [1:1:32:0:0:3112269:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 33 SEND TEvPut with key [1:1:33:0:0:883758:0] TEvPutResult: TEvPutResult {Id# [1:1:33:0:0:883758:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 34 SEND TEvPut with key [1:1:34:0:0:1212958:0] TEvPutResult: TEvPutResult {Id# [1:1:34:0:0:1212958:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 35 SEND TEvPut with key [1:1:35:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:35:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 36 SEND TEvPut with key [1:1:36:0:0:139148:0] TEvPutResult: TEvPutResult {Id# [1:1:36:0:0:139148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 37 SEND TEvPut with key [1:1:37:0:0:200198:0] TEvPutResult: TEvPutResult {Id# [1:1:37:0:0:200198:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 38 SEND TEvPut with key [1:1:38:0:0:1252178:0] TEvPutResult: TEvPutResult {Id# [1:1:38:0:0:1252178:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 39 SEND TEvPut with key [1:1:39:0:0:1897783:0] TEvPutResult: TEvPutResult {Id# [1:1:39:0:0:1897783:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 40 SEND TEvPut with key [1:1:40:0:0:1486678:0] TEvPutResult: TEvPutResult {Id# [1:1:40:0:0:1486678:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 41 SEND TEvPut with key [1:1:41:0:0:1285964:0] TEvPutResult: TEvPutResult {Id# [1:1:41:0:0:1285964:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 42 SEND TEvPut with key [1:1:42:0:0:1221731:0] TEvPutResult: TEvPutResult {Id# [1:1:42:0:0:1221731:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 43 SEND TEvPut with key [1:1:43:0:0:1613844:0] TEvPutResult: TEvPutResult {Id# [1:1:43:0:0:1613844:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 44 SEND TEvPut with key [1:1:44:0:0:2582908:0] TEvPutResult: TEvPutResult {Id# [1:1:44:0:0:2582908:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 45 SEND TEvPut with key [1:1:45:0:0:1703743:0] TEvPutResult: TEvPutResult {Id# [1:1:45:0:0:1703743:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 46 SEND TEvPut with key [1:1:46:0:0:1362981:0] TEvPutResult: TEvPutResult {Id# [1:1:46:0:0:1362981:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 47 SEND TEvPut with key [1:1:47:0:0:1469807:0] TEvPutResult: TEvPutResult {Id# [1:1:47:0:0:1469807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 48 SEND TEvPut with key [1:1:48:0:0:2832565:0] TEvPutResult: TEvPutResult {Id# [1:1:48:0:0:2832565:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 49 SEND TEvPut with key [1:1:49:0:0:1960611:0] TEvPutResult: TEvPutResult {Id# [1:1:49:0:0:1960611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 50 SEND TEvPut with key [1:1:50:0:0:1164230:0] TEvPutResult: TEvPutResult {Id# [1:1:50:0:0:1164230:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 51 SEND TEvPut with key [1:1:51:0:0:836900:0] TEvPutResult: TEvPutResult {Id# [1:1:51:0:0:836900:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 52 SEND TEvPut with key [1:1:52:0:0:838380:0] TEvPutResult: TEvPutResult {Id# [1:1:52:0:0:838380:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 53 SEND TEvPut with key [1:1:53:0:0:1975575:0] TEvPutResult: TEvPutResult {Id# [1:1:53:0:0:1975575:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Start node 0 Step = 54 SEND TEvPut with key [1:1:54:0:0:1888556:0] TEvPutResult: TEvPutResult {Id# [1:1:54:0:0:1888556:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 55 SEND TEvPut with key [1:1:55:0:0:715063:0] TEvPutResult: TEvPutResult {Id# [1:1:55:0:0:715063:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 56 SEND TEvPut with key [1:1:56:0:0:42993:0] TEvPutResult: TEvPutResult {Id# [1:1:56:0:0:42993:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 57 SEND TEvPut with key [1:1:57:0:0:1491407:0] TEvPutResult: TEvPutResult {Id# [1:1:57:0:0:1491407:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 58 SEND TEvPut with key [1:1:58:0:0:702845:0] TEvPutResult: TEvPutResult {Id# [1:1:58:0:0:702845:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 59 SEND TEvPut with key [1:1:59:0:0:2539948:0] TEvPutResult: TEvPutResult {Id# [1:1:59:0:0:2539948:0] Status ... ND TEvPut with key [1:1:936:0:0:2748248:0] TEvPutResult: TEvPutResult {Id# [1:1:936:0:0:2748248:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 937 SEND TEvPut with key [1:1:937:0:0:112302:0] TEvPutResult: TEvPutResult {Id# [1:1:937:0:0:112302:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 938 SEND TEvPut with key [1:1:938:0:0:800417:0] TEvPutResult: TEvPutResult {Id# [1:1:938:0:0:800417:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 939 SEND TEvPut with key [1:1:939:0:0:2336442:0] TEvPutResult: TEvPutResult {Id# [1:1:939:0:0:2336442:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 940 SEND TEvPut with key [1:1:940:0:0:982070:0] TEvPutResult: TEvPutResult {Id# [1:1:940:0:0:982070:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Start node 4 Step = 941 SEND TEvPut with key [1:1:941:0:0:713632:0] TEvPutResult: TEvPutResult {Id# [1:1:941:0:0:713632:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 942 SEND TEvPut with key [1:1:942:0:0:1644191:0] TEvPutResult: TEvPutResult {Id# [1:1:942:0:0:1644191:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 943 SEND TEvPut with key [1:1:943:0:0:254634:0] TEvPutResult: TEvPutResult {Id# [1:1:943:0:0:254634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 944 SEND TEvPut with key [1:1:944:0:0:1141270:0] TEvPutResult: TEvPutResult {Id# [1:1:944:0:0:1141270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 945 SEND TEvPut with key [1:1:945:0:0:610103:0] TEvPutResult: TEvPutResult {Id# [1:1:945:0:0:610103:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 946 SEND TEvPut with key [1:1:946:0:0:24822:0] TEvPutResult: TEvPutResult {Id# [1:1:946:0:0:24822:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Compact vdisk 6 Step = 947 SEND TEvPut with key [1:1:947:0:0:100167:0] TEvPutResult: TEvPutResult {Id# [1:1:947:0:0:100167:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 948 SEND TEvPut with key [1:1:948:0:0:645630:0] TEvPutResult: TEvPutResult {Id# [1:1:948:0:0:645630:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 949 SEND TEvPut with key [1:1:949:0:0:2125890:0] TEvPutResult: TEvPutResult {Id# [1:1:949:0:0:2125890:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 950 SEND TEvPut with key [1:1:950:0:0:2544891:0] TEvPutResult: TEvPutResult {Id# [1:1:950:0:0:2544891:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 951 SEND TEvPut with key [1:1:951:0:0:647007:0] TEvPutResult: TEvPutResult {Id# [1:1:951:0:0:647007:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 952 SEND TEvPut with key [1:1:952:0:0:2031652:0] TEvPutResult: TEvPutResult {Id# [1:1:952:0:0:2031652:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 953 SEND TEvPut with key [1:1:953:0:0:2109805:0] TEvPutResult: TEvPutResult {Id# [1:1:953:0:0:2109805:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Stop node 3 2025-04-04T17:25:08.900348Z 1 00h28m30.819893s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 954 SEND TEvPut with key [1:1:954:0:0:1353403:0] TEvPutResult: TEvPutResult {Id# [1:1:954:0:0:1353403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Stop node 4 2025-04-04T17:25:10.265303Z 1 00h28m40.833472s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 955 SEND TEvPut with key [1:1:955:0:0:1286278:0] TEvPutResult: TEvPutResult {Id# [1:1:955:0:0:1286278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Start node 3 Step = 956 SEND TEvPut with key [1:1:956:0:0:1875483:0] TEvPutResult: TEvPutResult {Id# [1:1:956:0:0:1875483:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 957 SEND TEvPut with key [1:1:957:0:0:1021388:0] TEvPutResult: TEvPutResult {Id# [1:1:957:0:0:1021388:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Start node 4 Step = 958 SEND TEvPut with key [1:1:958:0:0:860806:0] TEvPutResult: TEvPutResult {Id# [1:1:958:0:0:860806:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 959 SEND TEvPut with key [1:1:959:0:0:385917:0] TEvPutResult: TEvPutResult {Id# [1:1:959:0:0:385917:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 960 SEND TEvPut with key [1:1:960:0:0:200998:0] TEvPutResult: TEvPutResult {Id# [1:1:960:0:0:200998:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 961 SEND TEvPut with key [1:1:961:0:0:1661659:0] TEvPutResult: TEvPutResult {Id# [1:1:961:0:0:1661659:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 962 SEND TEvPut with key [1:1:962:0:0:771410:0] TEvPutResult: TEvPutResult {Id# [1:1:962:0:0:771410:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 963 SEND TEvPut with key [1:1:963:0:0:1414281:0] TEvPutResult: TEvPutResult {Id# [1:1:963:0:0:1414281:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 964 SEND TEvPut with key [1:1:964:0:0:2848837:0] TEvPutResult: TEvPutResult {Id# [1:1:964:0:0:2848837:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 965 SEND TEvPut with key [1:1:965:0:0:989600:0] TEvPutResult: TEvPutResult {Id# [1:1:965:0:0:989600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 966 SEND TEvPut with key [1:1:966:0:0:2761296:0] TEvPutResult: TEvPutResult {Id# [1:1:966:0:0:2761296:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 967 SEND TEvPut with key [1:1:967:0:0:981163:0] TEvPutResult: TEvPutResult {Id# [1:1:967:0:0:981163:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 968 SEND TEvPut with key [1:1:968:0:0:14298:0] TEvPutResult: TEvPutResult {Id# [1:1:968:0:0:14298:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 969 SEND TEvPut with key [1:1:969:0:0:626285:0] TEvPutResult: TEvPutResult {Id# [1:1:969:0:0:626285:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 970 SEND TEvPut with key [1:1:970:0:0:334566:0] TEvPutResult: TEvPutResult {Id# [1:1:970:0:0:334566:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Stop node 7 2025-04-04T17:25:12.609911Z 1 00h29m10.835008s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 971 SEND TEvPut with key [1:1:971:0:0:972888:0] TEvPutResult: TEvPutResult {Id# [1:1:971:0:0:972888:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 972 SEND TEvPut with key [1:1:972:0:0:786055:0] TEvPutResult: TEvPutResult {Id# [1:1:972:0:0:786055:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 973 SEND TEvPut with key [1:1:973:0:0:2707502:0] TEvPutResult: TEvPutResult {Id# [1:1:973:0:0:2707502:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Stop node 1 2025-04-04T17:25:13.084002Z 1 00h29m20.856624s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 974 SEND TEvPut with key [1:1:974:0:0:2660812:0] TEvPutResult: TEvPutResult {Id# [1:1:974:0:0:2660812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Start node 1 Step = 975 SEND TEvPut with key [1:1:975:0:0:3005283:0] TEvPutResult: TEvPutResult {Id# [1:1:975:0:0:3005283:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Stop node 1 2025-04-04T17:25:14.044202Z 1 00h29m40.862048s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 976 SEND TEvPut with key [1:1:976:0:0:1542748:0] TEvPutResult: TEvPutResult {Id# [1:1:976:0:0:1542748:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 977 SEND TEvPut with key [1:1:977:0:0:2837300:0] TEvPutResult: TEvPutResult {Id# [1:1:977:0:0:2837300:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 978 SEND TEvPut with key [1:1:978:0:0:481535:0] TEvPutResult: TEvPutResult {Id# [1:1:978:0:0:481535:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 979 SEND TEvPut with key [1:1:979:0:0:24668:0] TEvPutResult: TEvPutResult {Id# [1:1:979:0:0:24668:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 980 SEND TEvPut with key [1:1:980:0:0:1760402:0] TEvPutResult: TEvPutResult {Id# [1:1:980:0:0:1760402:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 981 SEND TEvPut with key [1:1:981:0:0:1711812:0] TEvPutResult: TEvPutResult {Id# [1:1:981:0:0:1711812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 982 SEND TEvPut with key [1:1:982:0:0:1422922:0] TEvPutResult: TEvPutResult {Id# [1:1:982:0:0:1422922:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 983 SEND TEvPut with key [1:1:983:0:0:2533122:0] TEvPutResult: TEvPutResult {Id# [1:1:983:0:0:2533122:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 984 SEND TEvPut with key [1:1:984:0:0:347759:0] TEvPutResult: TEvPutResult {Id# [1:1:984:0:0:347759:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 985 SEND TEvPut with key [1:1:985:0:0:1862506:0] TEvPutResult: TEvPutResult {Id# [1:1:985:0:0:1862506:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 986 SEND TEvPut with key [1:1:986:0:0:101043:0] TEvPutResult: TEvPutResult {Id# [1:1:986:0:0:101043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 987 SEND TEvPut with key [1:1:987:0:0:672278:0] TEvPutResult: TEvPutResult {Id# [1:1:987:0:0:672278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 988 SEND TEvPut with key [1:1:988:0:0:2042425:0] TEvPutResult: TEvPutResult {Id# [1:1:988:0:0:2042425:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 989 SEND TEvPut with key [1:1:989:0:0:1201477:0] TEvPutResult: TEvPutResult {Id# [1:1:989:0:0:1201477:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 990 SEND TEvPut with key [1:1:990:0:0:1724337:0] TEvPutResult: TEvPutResult {Id# [1:1:990:0:0:1724337:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 991 SEND TEvPut with key [1:1:991:0:0:2174403:0] TEvPutResult: TEvPutResult {Id# [1:1:991:0:0:2174403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 992 SEND TEvPut with key [1:1:992:0:0:193000:0] TEvPutResult: TEvPutResult {Id# [1:1:992:0:0:193000:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 993 SEND TEvPut with key [1:1:993:0:0:618508:0] TEvPutResult: TEvPutResult {Id# [1:1:993:0:0:618508:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 994 SEND TEvPut with key [1:1:994:0:0:2278246:0] TEvPutResult: TEvPutResult {Id# [1:1:994:0:0:2278246:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 995 SEND TEvPut with key [1:1:995:0:0:2001881:0] TEvPutResult: TEvPutResult {Id# [1:1:995:0:0:2001881:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 996 SEND TEvPut with key [1:1:996:0:0:1759634:0] TEvPutResult: TEvPutResult {Id# [1:1:996:0:0:1759634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 997 SEND TEvPut with key [1:1:997:0:0:2469234:0] TEvPutResult: TEvPutResult {Id# [1:1:997:0:0:2469234:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 998 SEND TEvPut with key [1:1:998:0:0:1329395:0] TEvPutResult: TEvPutResult {Id# [1:1:998:0:0:1329395:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 999 SEND TEvPut with key [1:1:999:0:0:1243807:0] TEvPutResult: TEvPutResult {Id# [1:1:999:0:0:1243807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Starting nodes Start compaction 1 Start checking |96.1%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] >> Cdc::UpdatesLog[TopicRunner] [GOOD] >> Cdc::VirtualTimestamps[PqRunner] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] |96.1%| [TA] $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpExtractPredicateLookup::SimpleRange [GOOD] >> KqpExtractPredicateLookup::PointJoin >> TPartitionTests::TestNonConflictingActsBatchOk [GOOD] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TPartitionTests::TestTxBatchInFederation >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns >> TraverseDatashard::TraverseOneTable >> AnalyzeColumnshard::AnalyzeSameOperationId >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesServerless >> AnalyzeColumnshard::AnalyzeServerless >> AnalyzeDatashard::AnalyzeOneTable >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate >> AnalyzeDatashard::DropTableNavigateError >> TraverseColumnShard::TraverseColumnTableRebootColumnshard >> TraverseColumnShard::TraverseServerlessColumnTable >> TraverseColumnShard::TraverseColumnTable >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse >> TraverseDatashard::TraverseOneTableServerless >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [GOOD] >> Cdc::NewAndOldImagesLog[TopicRunner] [GOOD] >> Cdc::NewAndOldImagesLogDebezium |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode >> KqpScanArrowFormat::AggregateEmptySum [GOOD] >> AnalyzeColumnshard::AnalyzeMultiOperationId |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> KqpSqlIn::Delete [GOOD] >> AnalyzeColumnshard::AnalyzeDeadline >> Cdc::VirtualTimestamps[PqRunner] [GOOD] >> Cdc::VirtualTimestamps[YdsRunner] >> ReadOnlyVDisk::TestStorageLoad [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [GOOD] Test command err: 2025-04-04T17:24:50.265619Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510244878863450:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:50.265742Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:24:50.335385Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510240737740426:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:50.335620Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:24:50.569625Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0014c1/r3tmp/tmpBFPcVT/pdisk_1.dat 2025-04-04T17:24:50.575686Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:24:50.801558Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:50.821034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:50.821160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:50.824096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:50.824157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:50.828136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:24:50.835933Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:24:50.838237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3107, node 1 2025-04-04T17:24:50.974902Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/0014c1/r3tmp/yandexZkWjrC.tmp 2025-04-04T17:24:50.974933Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/0014c1/r3tmp/yandexZkWjrC.tmp 2025-04-04T17:24:50.975080Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/0014c1/r3tmp/yandexZkWjrC.tmp 2025-04-04T17:24:50.975221Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:24:51.034400Z INFO: TTestServer started on Port 19935 GrpcPort 3107 TClient is connected to server localhost:19935 PQClient connected to localhost:3107 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:51.393922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:51.448732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:24:53.826477Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510253622642701:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:53.826558Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510253622642684:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:53.826623Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:53.832268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T17:24:53.859167Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510253622642712:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T17:24:54.066367Z node 2 :TX_PROXY ERROR: Actor# [2:7489510253622642739:2178] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:54.119436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:54.124149Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489510257917610042:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:24:54.125857Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmM0ZTdjYzUtMjYxOWM1MTQtZjNiMDE0ODktMmY4Yjk0ZmY=, ActorId: [2:7489510253622642671:2310], ActorState: ExecuteState, TraceId: 01jr0xb2dy1aqkjy6mv4wr63nq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:24:54.124080Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489510257763766502:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:24:54.125843Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjI2ZGMxNDgtNGNiMDJlZmMtZmZhNjM3NjUtYmI5MzU5NTg=, ActorId: [1:7489510257763766454:2337], ActorState: ExecuteState, TraceId: 01jr0xb2es8xx3efa2atq9yawt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:24:54.128964Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:24:54.128144Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:24:54.218334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:54.367485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:24:54.661487Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0xb326476fzt22wkmh754e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODQ2NWEwOTEtODY3ZGI2YWItMmRlNzAyOGYtNjY2OTZmNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489510262058734248:3087] 2025-04-04T17:24:55.261361Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510244878863450:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:55.261432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:24:55.327887Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489510240737740426:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:55.327952Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:25:00.693473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:1, at schemeshard: 72057594046644480 2025-04-04T17:25:01.333315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-04-04T17:25:01.845104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-04-04T17:25:02.388233Z node 1 :FLAT_TX_SCHEM ... 2025-04-04T17:25:49.802625Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:49.828941Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7489510476075607081:2259];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:49.829524Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:49.913645Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7489510497187366315:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:25:49.916303Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=ZjU1NzRkNDQtOTAzNmYyMTQtYWNhMDBlMjEtMTc2OGMwOWU=, ActorId: [9:7489510497187366282:2343], ActorState: ExecuteState, TraceId: 01jr0xcs5y6e7ge9exp52tcebx, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:25:49.916785Z node 9 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:25:49.939422Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:50.089950Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:50.244914Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:25:50.636167Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0xcsnsctzvzdsm27wf6c8d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=OWQzYTBlY2YtODkyY2Y3ODItMjU5MTM4MS1mNjhlMGY1Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [9:7489510501482333994:3081] === CheckClustersList. Ok 2025-04-04T17:25:55.751673Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:1, at schemeshard: 72057594046644480 2025-04-04T17:25:56.443494Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-04-04T17:25:57.311535Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-04-04T17:25:58.129948Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-04-04T17:25:58.971773Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2025-04-04T17:25:59.692315Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715699:0, at schemeshard: 72057594046644480 2025-04-04T17:26:00.071831Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:26:00.071876Z node 9 :IMPORT WARN: Table profiles were not loaded Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (16261273835729377752, "Root", "00415F536F757263655F3130", 1743787560407, 1743787560407, 0, 13); 2025-04-04T17:26:00.527172Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715705. Ctx: { TraceId: 01jr0xd3fb3y7r4n6ngqx73xdf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=N2Y5OTIzN2QtNWQ5MGJiYTUtOTQ2YmI4MmItYjM4MmJhYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:00.541138Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-04T17:26:00.541170Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-04T17:26:00.541182Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-04T17:26:00.541209Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7489510544432008419:3932] (SourceId=A_Source_10, PreferedPartition=1) GetOwnershipFast Partition=1 TabletId=1001 2025-04-04T17:26:00.541384Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7489510544432008420:3932], Recipient [9:7489510522957170823:3271]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [9:7489510544432008419:3932] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-04-04T17:26:00.541498Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [9:7489510544432008419:3932], Recipient [9:7489510522957170823:3271]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_10" 2025-04-04T17:26:00.541597Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateOwnershipFast, received event# 271188558, Sender [9:7489510522957170823:3271], Recipient [9:7489510544432008419:3932]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-04-04T17:26:00.541640Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7489510544432008419:3932] (SourceId=A_Source_10, PreferedPartition=1) InitTable: SourceId=A_Source_10 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2025-04-04T17:26:00.541749Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7489510544432008419:3932], Recipient [9:7489510522957170823:3271]: NActors::TEvents::TEvPoison 2025-04-04T17:26:00.541880Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [9:7489510475712528597:2070], Recipient [9:7489510544432008419:3932]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2025-04-04T17:26:00.541915Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7489510544432008419:3932] (SourceId=A_Source_10, PreferedPartition=1) StartKqpSession 2025-04-04T17:26:00.544623Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [9:7489510475712528733:2192], Recipient [9:7489510544432008419:3932]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=9&id=Y2EwZDQzZGUtNDI5NTQzODMtYmVmODE5ZTItNzQzM2RhYzQ=" NodeId: 9 } YdbStatus: SUCCESS ResourceExhausted: false 2025-04-04T17:26:00.544672Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7489510544432008419:3932] (SourceId=A_Source_10, PreferedPartition=1) Select from the table 2025-04-04T17:26:00.705568Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [9:7489510475712528733:2192], Recipient [9:7489510544432008419:3932]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=Y2EwZDQzZGUtNDI5NTQzODMtYmVmODE5ZTItNzQzM2RhYzQ=" PreparedQuery: "91220d44-49ff61c7-53559a10-6b6ed740" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01jr0xd3qs5pnyxm4py0gh0pcm" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1743787560407 } items { uint64_value: 1743787560407 } items { uint64_value: 13 } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 100 2025-04-04T17:26:00.705804Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7489510544432008419:3932] (SourceId=A_Source_10, PreferedPartition=1) Selected from table PartitionId=0 SeqNo=13 2025-04-04T17:26:00.705852Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7489510544432008419:3932] (SourceId=A_Source_10, PreferedPartition=1) OnPartitionChosen 2025-04-04T17:26:00.705930Z node 9 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [9:7489510544432008419:3932] (SourceId=A_Source_10, PreferedPartition=1) ReplyError: MessageGroupId A_Source_10 is already bound to PartitionGroupId 1, but client provided 2. MessageGroupId->PartitionGroupId binding cannot be changed, either use another MessageGroupId, specify PartitionGroupId 1, or do not specify PartitionGroupId at all. Received TEvChooseError: MessageGroupId A_Source_10 is already bound to PartitionGroupId 1, but client provided 2. MessageGroupId->PartitionGroupId binding cannot be changed, either use another MessageGroupId, specify PartitionGroupId 1, or do not specify PartitionGroupId at all. Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 16261273835729377752 AND Topic = "Root" AND ProducerId = "00415F536F757263655F3130" 2025-04-04T17:26:00.865326Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715708. Ctx: { TraceId: 01jr0xd3rp7q90svr8cx6k57gt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=NmNhNzcwYWItYzVhYWEzMjktNWU5ZTllYjUtMWUwODJjNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestStorageLoad [GOOD] Test command err: RandomSeed# 2911860247953324297 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2025-04-04T17:25:38.315960Z 1 00h02m38.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:38.318272Z 1 00h02m38.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:38.320866Z 1 00h02m38.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:38.325337Z 1 00h02m38.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:38.325471Z 1 00h02m38.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:38.518485Z 1 00h02m38.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:38.535794Z 1 00h02m38.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:38.753223Z 1 00h02m38.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:38.764353Z 1 00h02m38.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:38.877488Z 1 00h02m38.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:38.992522Z 1 00h02m38.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:39.117099Z 1 00h02m39.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:39.117510Z 1 00h02m39.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:39.131254Z 1 00h02m39.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:39.211358Z 1 00h02m39.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:39.488245Z 1 00h02m39.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:39.512498Z 1 00h02m39.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:39.589069Z 1 00h02m39.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:39.625976Z 1 00h02m40.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:39.641071Z 1 00h02m40.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:39.643638Z 1 00h02m40.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:39.657953Z 1 00h02m40.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:39.782170Z 1 00h02m40.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:39.796157Z 1 00h02m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:39.954649Z 1 00h02m40.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:40.025962Z 1 00h02m40.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:40.086747Z 1 00h02m40.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:40.184633Z 1 00h02m40.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:40.206584Z 1 00h02m40.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:40.223363Z 1 00h02m41.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:40.237609Z 1 00h02m41.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:40.238176Z 1 00h02m41.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:40.339154Z 1 00h02m41.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:40.354625Z 1 00h02m41.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:40.524818Z 1 00h02m41.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:40.629982Z 1 00h02m41.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:40.658388Z 1 00h02m41.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:40.679780Z 1 00h02m42.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:40.813890Z 1 00h02m42.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:40.814543Z 1 00h02m42.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.098156Z 1 00h02m42.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.117734Z 1 00h02m42.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.209295Z 1 00h02m42.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.226275Z 1 00h02m42.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.259020Z 1 00h02m42.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.280176Z 1 00h02m43.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.293420Z 1 00h02m43.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.293961Z 1 00h02m43.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.447464Z 1 00h02m43.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.461277Z 1 00h02m43.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.475615Z 1 00h02m43.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.489171Z 1 00h02m43.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.510076Z 1 00h02m43.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.523216Z 1 00h02m43.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.546278Z 1 00h02m43.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.570459Z 1 00h02m43.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.598686Z 1 00h02m44.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.626298Z 1 00h02m44.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.777466Z 1 00h02m44.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.790817Z 1 00h02m44.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.819588Z 1 00h02m44.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.961430Z 1 00h02m44.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:41.988064Z 1 00h02m44.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:42.002548Z 1 00h02m44.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:42.247646Z 1 00h02m45.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:42.262980Z 1 00h02m45.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:42.276540Z 1 00h02m45.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:42.309612Z 1 00h02m45.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:42.326009Z 1 00h02m45.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:42.356394Z 1 00h02m45.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:42.372948Z 1 00h02m45.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:42.729045Z 1 00h02m46.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:42.729206Z 1 00h02m46.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:42.729758Z 1 00h02m46.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] 2025-04-04T17:25:44.208183Z 1 00h05m16.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:44.208357Z 2 00h05m16.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5308:706] 2025-04-04T17:25:44.212083Z 1 00h05m16.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:44.212289Z 2 00h05m16.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5308:706] 2025-04-04T17:25:44.216183Z 1 00h05m16.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:44.216375Z 2 00h05m16.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5308:706] 2025-04-04T17:25:44.222749Z 1 00h05m16.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:44.223038Z 2 00h05m16.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5308:706] 2025-04-04T17:25:44.223184Z 1 00h05m16.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:44.224109Z 2 00h05m16.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5308:706] 2025-04-04T17:25:44.239931Z 1 00h05m16.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:44.240055Z 2 00h05m16.3 ... 25-04-04T17:25:51.543651Z 1 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5301:699] 2025-04-04T17:25:51.543805Z 2 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5308:706] 2025-04-04T17:25:51.543856Z 3 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5315:713] 2025-04-04T17:25:51.543901Z 4 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5322:720] 2025-04-04T17:25:51.543946Z 5 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5329:727] 2025-04-04T17:25:51.543988Z 6 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5336:734] 2025-04-04T17:25:51.544033Z 7 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5343:741] 2025-04-04T17:25:51.544078Z 8 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:51.544431Z 1 00h10m24.561024s :BS_LOAD_TEST ERROR: TabletId# 1 Generation# 4 recieved not OK, msg# TEvBlockResult {Status# ERROR ErrorReason# "Status# ERROR From# [82000000:1:0:2:0] NodeId# 3 QuorumTracker# {Erroneous# 00000111 Successful# 00000000}"} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2025-04-04T17:25:54.343653Z 8 00h20m55.052560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:54.346443Z 8 00h20m55.052560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:54.351909Z 8 00h20m55.052560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:54.357193Z 8 00h20m55.052560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:54.357791Z 8 00h20m55.052560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:54.531646Z 8 00h20m55.152560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:54.543279Z 8 00h20m55.252560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:54.795148Z 8 00h20m55.452560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:54.916465Z 8 00h20m55.552560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:55.003060Z 8 00h20m55.752560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:55.098681Z 8 00h20m55.852560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:55.139411Z 8 00h20m56.052560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:55.141102Z 8 00h20m56.052560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:55.271010Z 8 00h20m56.152560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:55.476400Z 8 00h20m56.352560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:55.491283Z 8 00h20m56.452560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:55.530714Z 8 00h20m56.652560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:55.551982Z 8 00h20m56.752560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:55.591572Z 8 00h20m56.952560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:55.618914Z 8 00h20m57.052560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:55.620172Z 8 00h20m57.052560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:55.711844Z 8 00h20m57.152560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:55.738410Z 8 00h20m57.352560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:55.753016Z 8 00h20m57.452560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:55.765983Z 8 00h20m57.552560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:55.853519Z 8 00h20m57.652560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:55.867143Z 8 00h20m57.752560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:56.019889Z 8 00h20m57.852560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:56.037170Z 8 00h20m57.952560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:56.082714Z 8 00h20m58.052560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:56.083569Z 8 00h20m58.052560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:56.205493Z 8 00h20m58.252560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:56.218643Z 8 00h20m58.352560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:56.577922Z 8 00h20m58.552560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:56.593797Z 8 00h20m58.652560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:56.623051Z 8 00h20m58.852560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:56.637964Z 8 00h20m58.952560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:56.658485Z 8 00h20m59.052560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:56.660005Z 8 00h20m59.052560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:56.745610Z 8 00h20m59.252560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:56.975475Z 8 00h20m59.352560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:57.013484Z 8 00h20m59.552560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:57.029378Z 8 00h20m59.652560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:57.060539Z 8 00h20m59.852560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:57.078341Z 8 00h20m59.952560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:57.108142Z 8 00h21m00.052560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:57.109925Z 8 00h21m00.052560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:57.261009Z 8 00h21m00.252560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:57.275301Z 8 00h21m00.352560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:57.304079Z 8 00h21m00.552560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:57.320468Z 8 00h21m00.652560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:57.356477Z 8 00h21m00.852560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:57.527118Z 8 00h21m00.952560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:57.574984Z 8 00h21m01.052560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:57.576109Z 8 00h21m01.052560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:57.589991Z 8 00h21m01.152560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:57.603806Z 8 00h21m01.252560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:57.742180Z 8 00h21m01.352560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:57.756110Z 8 00h21m01.452560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:57.769127Z 8 00h21m01.552560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:58.110107Z 8 00h21m01.752560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:58.135554Z 8 00h21m01.852560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:58.253429Z 8 00h21m02.052560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:58.364216Z 8 00h21m02.152560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:58.377171Z 8 00h21m02.252560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:58.509373Z 8 00h21m02.452560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:58.523500Z 8 00h21m02.552560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:58.549886Z 8 00h21m02.752560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:58.567681Z 8 00h21m02.852560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:58.607462Z 8 00h21m03.052560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:58.607931Z 8 00h21m03.052560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] 2025-04-04T17:25:58.610806Z 8 00h21m03.052560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5350:748] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 20544, MsgBus: 14108 2025-04-04T17:25:35.476708Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510434178793287:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:35.477529Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e7e/r3tmp/tmpQStK4P/pdisk_1.dat 2025-04-04T17:25:35.855649Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:35.886312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:35.886422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 20544, node 1 2025-04-04T17:25:35.888233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:35.973060Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:35.973089Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:35.973102Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:35.973221Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14108 TClient is connected to server localhost:14108 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:36.570847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:36.587756Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:36.610663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:36.785169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:36.930406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:37.018482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:38.710667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510447063696941:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:38.710783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:38.907421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:38.932951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:38.957892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:38.984534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:39.011666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:39.042661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:39.083956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510451358664747:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:39.084073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:39.084116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510451358664752:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:39.087896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:39.097928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510451358664754:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:39.203333Z node 1 :TX_PROXY ERROR: Actor# [1:7489510451358664809:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:40.476997Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510434178793287:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:40.477092Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:40.995913Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787540530, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 25280, MsgBus: 24900 2025-04-04T17:25:41.842570Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510461507035944:2209];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e7e/r3tmp/tmpSFRL8t/pdisk_1.dat 2025-04-04T17:25:41.870606Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:25:41.972547Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:42.005163Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:42.005289Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:42.006874Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25280, node 2 2025-04-04T17:25:42.069670Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:42.069696Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:42.069706Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:42.069855Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24900 TClient is connected to server localhost:24900 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:42.553849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:42.569730Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:25:42.584909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:42.656758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:42.807525Z node 2 :FLAT_TX_SCHEMESHARD ... tch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:52.031362Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:52.101749Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:52.136577Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:52.171002Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:52.210561Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:52.250812Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:52.306761Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489510508653964420:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:52.306858Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:52.309522Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489510508653964425:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:52.314762Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:52.327630Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489510508653964427:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:52.389194Z node 3 :TX_PROXY ERROR: Actor# [3:7489510508653964480:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:53.459243Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489510491474093132:2204];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:53.471281Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:56.354905Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787554306, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 1228, MsgBus: 8154 2025-04-04T17:25:57.132461Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489510530650722849:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:57.132529Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e7e/r3tmp/tmpVTGJWe/pdisk_1.dat 2025-04-04T17:25:57.250380Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:57.278118Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:57.278222Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:57.279970Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1228, node 4 2025-04-04T17:25:57.323490Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:57.323510Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:57.323517Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:57.323632Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8154 TClient is connected to server localhost:8154 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:57.800815Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:57.814430Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:57.897096Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:58.094552Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:58.181709Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:00.310899Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489510543535626518:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:00.310973Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:00.342875Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:26:00.375355Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:26:00.404666Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:26:00.435584Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:26:00.466338Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:26:00.499294Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:26:00.557529Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489510543535627031:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:00.557620Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:00.557723Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489510543535627036:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:00.560932Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:26:00.569660Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489510543535627038:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:26:00.648935Z node 4 :TX_PROXY ERROR: Actor# [4:7489510543535627092:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:26:02.416688Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489510530650722849:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:26:02.417147Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:26:02.490882Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787562153, txId: 281474976715671] shutting down |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::Delete [GOOD] Test command err: Trying to start YDB, gRPC: 62431, MsgBus: 5430 2025-04-04T17:25:13.366279Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510339797741257:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:13.366339Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00136b/r3tmp/tmp4FLWZq/pdisk_1.dat 2025-04-04T17:25:13.731866Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:13.752653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:13.752734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:13.754113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62431, node 1 2025-04-04T17:25:13.848364Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:13.848414Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:13.848423Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:13.848575Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5430 TClient is connected to server localhost:5430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:14.558138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:14.583115Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:14.590057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:14.753951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:14.914501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:15.003856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:16.672103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510352682644915:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:16.672219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:16.955100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.035799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.076897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.116679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.152074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.185532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.233707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510356977612722:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:17.233786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:17.234114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510356977612727:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:17.237293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:17.247043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510356977612729:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:17.336046Z node 1 :TX_PROXY ERROR: Actor# [1:7489510356977612783:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:18.369379Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510339797741257:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:18.369436Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25285, MsgBus: 2505 2025-04-04T17:25:20.106363Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510373273115623:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:20.106406Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00136b/r3tmp/tmpwx1UIL/pdisk_1.dat 2025-04-04T17:25:20.197495Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:20.218662Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:20.218754Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:20.220366Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25285, node 2 2025-04-04T17:25:20.262712Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:20.262736Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:20.262744Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:20.262854Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2505 TClient is connected to server localhost:2505 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:20.687349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:20.692409Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:20.708097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:25:20.758116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:20.886297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 20 ... {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:48.326945Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7489510490291879011:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:48.332470Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:48.357015Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7489510490291879013:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:48.452535Z node 5 :TX_PROXY ERROR: Actor# [5:7489510490291879068:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:48.686343Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7489510468817040261:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:48.686429Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:49.821810Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:49.893282Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:25:49.966588Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 22577, MsgBus: 12156 2025-04-04T17:25:54.291099Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7489510519501910130:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:54.291192Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00136b/r3tmp/tmpgePpjz/pdisk_1.dat 2025-04-04T17:25:54.450779Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:54.471622Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:54.471743Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:54.474034Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22577, node 6 2025-04-04T17:25:54.537897Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:54.537927Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:54.537937Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:54.538127Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12156 TClient is connected to server localhost:12156 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:55.272553Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:55.297299Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:55.388521Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:55.597358Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:55.685050Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:58.463001Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510536681781097:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:58.463129Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:58.509270Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:58.552079Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:58.592251Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:58.630604Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:58.668104Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:58.708063Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:58.755454Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510536681781605:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:58.755590Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:58.755731Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7489510536681781610:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:58.759918Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:58.772583Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7489510536681781612:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:25:58.859149Z node 6 :TX_PROXY ERROR: Actor# [6:7489510536681781665:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:59.291212Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7489510519501910130:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:59.291337Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:26:00.073287Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:26:00.114276Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:26:00.158018Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |96.2%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |96.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate |96.2%| [TA] $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} |96.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> AnalyzeColumnshard::AnalyzeStatus >> AnalyzeColumnshard::AnalyzeTable |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet >> TPartitionTests::TestTxBatchInFederation [GOOD] >> AnalyzeColumnshard::Analyze >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution >> Cdc::NewAndOldImagesLogDebezium [GOOD] >> Cdc::OldImageLogDebezium >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamOperationTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::TestTxBatchInFederation [GOOD] Test command err: 2025-04-04T17:25:25.519479Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:25.519546Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:25:25.534712Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:178:2193] 2025-04-04T17:25:25.535467Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:178:2193] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\00 ... d TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Create immediate tx with id = 3 and act no: 4 Create immediate tx with id = 6 and act no: 7 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Create distr tx with id = 8 and act no: 9 Create distr tx with id = 10 and act no: 11 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DEFRAG_SCHEDULER Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 17 Wait batch completion Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Wait kv request Wait tx committed for tx 0 Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Wait immediate tx complete 3 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 3 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 6 Wait tx committed for tx 10 >> Cdc::VirtualTimestamps[YdsRunner] [GOOD] >> Cdc::VirtualTimestamps[TopicRunner] >> AnalyzeDatashard::AnalyzeTwoTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow Test command err: Trying to start YDB, gRPC: 3668, MsgBus: 1484 2025-04-04T17:25:53.055093Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510513713085031:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:53.055708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00155d/r3tmp/tmpEXNv8Z/pdisk_1.dat 2025-04-04T17:25:53.377723Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3668, node 1 2025-04-04T17:25:53.442998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:53.443074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:53.445097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:53.458863Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:53.458887Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:53.458910Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:53.459026Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1484 TClient is connected to server localhost:1484 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:53.941516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:53.968593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:54.113767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:54.281867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:54.352929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:56.025584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510526597988695:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:56.025696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:56.271577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:56.295680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:56.321460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:56.345288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:56.371924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:56.402521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:56.439050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510526597989204:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:56.439134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:56.439203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510526597989209:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:56.442313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:56.450176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510526597989211:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:56.549133Z node 1 :TX_PROXY ERROR: Actor# [1:7489510526597989266:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:57.263436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:57.417674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489510530892957013:2507];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:25:57.417842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489510530892957013:2507];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:25:57.418073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489510530892957013:2507];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:25:57.418154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489510530892957013:2507];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:25:57.418233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489510530892957013:2507];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:25:57.418319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489510530892957013:2507];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:25:57.418384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489510530892957013:2507];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:25:57.418474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489510530892957013:2507];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:25:57.418538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489510530892957013:2507];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:25:57.418603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489510530892957013:2507];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:25:57.418692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489510530892957013:2507];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:25:57.418781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7489510530892957013:2507];tablet_id=72075186224037928;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:25:57.423732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7489510530892957004:2506];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:25:57.423795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7489510530892957004:2506];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:25:57.423970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7489510530892957004:2506];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:25:57.424079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7489510530892957004:2506];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline ... tFromChunks;id=13; 2025-04-04T17:25:57.599142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:25:57.599340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:25:57.599382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:25:57.599494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:25:57.599526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:25:57.638846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:57.639165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:57.644856Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:57.644856Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:57.650680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:57.650680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:57.656571Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:57.656571Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:57.662533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:57.662533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T17:25:57.783000Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-04-04T17:25:57.783136Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;self_id=[1:7489510530892957025:2509];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037927;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037928;receive=72075186224037923; 2025-04-04T17:25:57.783630Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-04-04T17:25:57.783867Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-04-04T17:25:57.866546Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710675;tx_id=281474976710675;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710675; query_phases { duration_us: 7426 cpu_time_us: 2210 affected_shards: 1 } query_phases { duration_us: 6339 cpu_time_us: 268 affected_shards: 1 } compilation { duration_us: 59286 cpu_time_us: 56200 } process_cpu_time_us: 678 total_duration_us: 76699 total_cpu_time_us: 59356 AddressSanitizer:DEADLYSIGNAL ================================================================= ==765209==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000017d741cf bp 0x7ffe2a6fabc0 sp 0x7ffe2a6faa20 T0) ==765209==The signal is caused by a READ memory access. ==765209==Hint: address points to the zero page. 2025-04-04T17:25:58.054871Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510513713085031:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:58.058270Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; warning: address range table at offset 0x0 has a premature terminator entry at offset 0x10 warning: address range table at offset 0x30 has a premature terminator entry at offset 0x40 warning: address range table at offset 0x60 has a premature terminator entry at offset 0x70 warning: address range table at offset 0x90 has a premature terminator entry at offset 0xa0 warning: address range table at offset 0xc0 has a premature terminator entry at offset 0xd0 warning: address range table at offset 0xa20 has a premature terminator entry at offset 0xa30 warning: address range table at offset 0xa50 has a premature terminator entry at offset 0xa60 warning: address range table at offset 0xa80 has a premature terminator entry at offset 0xa90 warning: address range table at offset 0xab0 has a premature terminator entry at offset 0xac0 warning: address range table at offset 0xae0 has a premature terminator entry at offset 0xaf0 warning: address range table at offset 0xb10 has a premature terminator entry at offset 0xb20 warning: address range table at offset 0xb40 has a premature terminator entry at offset 0xb50 warning: address range table at offset 0xb70 has a premature terminator entry at offset 0xb80 warning: address range table at offset 0xba0 has a premature terminator entry at offset 0xbb0 warning: address range table at offset 0xbd0 has a premature terminator entry at offset 0xbe0 warning: address range table at offset 0xc00 has a premature terminator entry at offset 0xc10 warning: address range table at offset 0xc30 has a premature terminator entry at offset 0xc40 warning: address range table at offset 0xc60 has a premature terminator entry at offset 0xc70 warning: address range table at offset 0xc90 has a premature terminator entry at offset 0xca0 warning: address range table at offset 0xcc0 has a premature terminator entry at offset 0xcd0 warning: address range table at offset 0xcf0 has a premature terminator entry at offset 0xd00 warning: address range table at offset 0xd20 has a premature terminator entry at offset 0xd30 warning: address range table at offset 0xd50 has a premature terminator entry at offset 0xd60 warning: address range table at offset 0xd80 has a premature terminator entry at offset 0xd90 warning: address range table at offset 0x1420 has a premature terminator entry at offset 0x1430 warning: address range table at offset 0x14b0 has a premature terminator entry at offset 0x14c0 warning: address range table at offset 0x14e0 has a premature terminator entry at offset 0x14f0 warning: address range table at offset 0x1510 has a premature terminator entry at offset 0x1520 warning: address range table at offset 0x1540 has a premature terminator entry at offset 0x1550 warning: address range table at offset 0x1570 has a premature terminator entry at offset 0x1580 warning: address range table at offset 0x15a0 has a premature terminator entry at offset 0x15b0 warning: address range table at offset 0x15d0 has a premature terminator entry at offset 0x15e0 warning: address range table at offset 0x1600 has a premature terminator entry at offset 0x1610 2025-04-04T17:26:08.372121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:26:08.372158Z node 1 :IMPORT WARN: Table profiles were not loaded #0 0x17d741cf in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x17d741cf in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x17d741cf in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x17d741cf in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x17d741cf in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x17d97987 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x17d97987 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x17d97987 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x17d97987 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #9 0x17d97987 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311:10 #10 0x186e0e55 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428:12 #11 0x186e0e55 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987:10 #12 0x186e0e55 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x186b1b78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x17d96975 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x186b3445 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x186db78c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7f21125bfd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7f21125bfe3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x15ad0028 in _start (/home/runner/.ya/build/build_root/i5tq/00155d/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x15ad0028) (BuildId: b39ae28ee39da90c850ce26824d563677aab4934) AddressSanitizer can not provide additional info. SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==765209==ABORTING >> TopicAutoscaling::ReadFromTimestamp_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadFromTimestamp_AutoscaleAwareSDK |96.2%| [TA] $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |96.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::OldImageLogDebezium [GOOD] >> Cdc::NewImageLogDebezium >> TargetDiscoverer::Dirs >> TargetDiscoverer::Transfer >> Cdc::VirtualTimestamps[TopicRunner] [GOOD] >> Cdc::Write[PqRunner] >> KqpScripting::StreamOperationTimeout [GOOD] >> TraverseDatashard::TraverseOneTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamOperationTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 23843, MsgBus: 20487 2025-04-04T17:24:50.474211Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510244884088780:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:50.474299Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00199b/r3tmp/tmposHS6O/pdisk_1.dat 2025-04-04T17:24:50.862401Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:50.879472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:50.879555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:50.882753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23843, node 1 2025-04-04T17:24:50.988104Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:50.988129Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:50.988139Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:50.988275Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20487 TClient is connected to server localhost:20487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:24:51.539711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:51.561151Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:24:51.573704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:51.716438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:24:51.884552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T17:24:51.954067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:24:53.606222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510257768992444:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:53.606401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:53.919804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:24:53.948464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:24:53.978014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:24:54.008445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:24:54.039602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:24:54.079740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:24:54.161674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510262063960258:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:54.161822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:54.162127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510262063960263:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:24:54.165419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:24:54.177522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510262063960265:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:24:54.245943Z node 1 :TX_PROXY ERROR: Actor# [1:7489510262063960320:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:24:55.365609Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787495394, txId: 281474976710672] shutting down 2025-04-04T17:24:55.475169Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510244884088780:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:55.475257Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:24:55.655164Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787495681, txId: 281474976710675] shutting down 2025-04-04T17:24:55.958654Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787495989, txId: 281474976710678] shutting down 2025-04-04T17:24:56.257090Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787496290, txId: 281474976710681] shutting down 2025-04-04T17:24:56.520398Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787496542, txId: 281474976710684] shutting down 2025-04-04T17:24:56.839971Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787496843, txId: 281474976710687] shutting down 2025-04-04T17:24:57.138565Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787497165, txId: 281474976710690] shutting down 2025-04-04T17:24:57.459739Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787497487, txId: 281474976710693] shutting down 2025-04-04T17:24:57.798940Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787497823, txId: 281474976710696] shutting down 2025-04-04T17:24:58.160598Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787498187, txId: 281474976710699] shutting down 2025-04-04T17:24:58.518714Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787498551, txId: 281474976710702] shutting down 2025-04-04T17:24:58.827752Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787498859, txId: 281474976710705] shutting down 2025-04-04T17:24:59.138310Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787499167, txId: 281474976710708] shutting down 2025-04-04T17:24:59.424988Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787499454, txId: 281474976710711] shutting down 2025-04-04T17:24:59.691583Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787499727, txId: 281474976710714] shutting down 2025-04-04T17:24:59.997831Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787500028, txId: 281474976710717] shutting down 2025-04-04T17:25:00.279338Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787500301, txId: 281474976710720] shutting down 2025-04-04T17:25:00.678466Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787500707, txId: 281474976710723] shutting down 2025-04-04T17:25:01.003545Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787501036, txId: 281474976710726] shutting down 2025-04-04T17:25:01.306755Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787501323, txId: 281474976710729] shutting down 2025-04-04T17:25:01.643660Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787501659, txId: 281474976710732] shutting down 2025-04-04T17:25:02.002984Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 174 ... 78990Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787558079, txId: 281474976711197] shutting down 2025-04-04T17:25:58.443804Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787558471, txId: 281474976711200] shutting down 2025-04-04T17:25:58.906171Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787558933, txId: 281474976711203] shutting down 2025-04-04T17:25:59.246447Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787559276, txId: 281474976711206] shutting down 2025-04-04T17:25:59.577654Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787559605, txId: 281474976711209] shutting down 2025-04-04T17:25:59.917577Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787559948, txId: 281474976711212] shutting down 2025-04-04T17:26:00.221295Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787560249, txId: 281474976711215] shutting down 2025-04-04T17:26:00.583496Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787560613, txId: 281474976711218] shutting down 2025-04-04T17:26:00.923411Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787560956, txId: 281474976711221] shutting down 2025-04-04T17:26:01.329159Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787561355, txId: 281474976711224] shutting down 2025-04-04T17:26:01.711958Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787561740, txId: 281474976711227] shutting down 2025-04-04T17:26:02.134507Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787562153, txId: 281474976711230] shutting down 2025-04-04T17:26:02.509174Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787562531, txId: 281474976711233] shutting down 2025-04-04T17:26:02.917954Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787562944, txId: 281474976711236] shutting down 2025-04-04T17:26:03.317202Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787563336, txId: 281474976711239] shutting down 2025-04-04T17:26:03.698680Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787563721, txId: 281474976711242] shutting down 2025-04-04T17:26:04.079995Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787564078, txId: 281474976711245] shutting down 2025-04-04T17:26:04.422264Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787564449, txId: 281474976711248] shutting down 2025-04-04T17:26:04.796768Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787564820, txId: 281474976711251] shutting down 2025-04-04T17:26:05.166500Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787565191, txId: 281474976711254] shutting down 2025-04-04T17:26:05.537222Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787565562, txId: 281474976711257] shutting down 2025-04-04T17:26:05.865473Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787565898, txId: 281474976711260] shutting down 2025-04-04T17:26:06.189750Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787566220, txId: 281474976711263] shutting down 2025-04-04T17:26:06.550048Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787566577, txId: 281474976711266] shutting down 2025-04-04T17:26:07.032583Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787566990, txId: 281474976711269] shutting down Trying to start YDB, gRPC: 15726, MsgBus: 11559 2025-04-04T17:26:08.008949Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510579878861551:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:26:08.009046Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00199b/r3tmp/tmpLgnHMz/pdisk_1.dat 2025-04-04T17:26:08.164572Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:08.186667Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:08.186805Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:08.188269Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15726, node 2 2025-04-04T17:26:08.249175Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:08.249208Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:08.249218Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:08.249423Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11559 TClient is connected to server localhost:11559 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:26:08.736826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:08.825027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:08.883959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:09.066457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:09.132559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:11.313663Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510592763765218:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.313722Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.344309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:26:11.371720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:26:11.398806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:26:11.424608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:26:11.449368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:26:11.481368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:26:11.553531Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510592763765734:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.553609Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510592763765739:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.553640Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.556664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:26:11.563813Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510592763765741:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:26:11.621779Z node 2 :TX_PROXY ERROR: Actor# [2:7489510592763765795:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TargetDiscoverer::Transfer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTable [GOOD] Test command err: 2025-04-04T17:26:05.069416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:05.069795Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:05.069975Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019ba/r3tmp/tmpgQQQ9F/pdisk_1.dat 2025-04-04T17:26:05.403135Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18713, node 1 2025-04-04T17:26:05.697298Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:05.697349Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:05.697400Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:05.697913Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:05.703571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:05.793092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:05.794108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:05.824071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7856 2025-04-04T17:26:06.311546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:09.708884Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:09.755476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.755592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.783899Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:09.785650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:10.007840Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.008353Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.008772Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.008905Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.008989Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.009197Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.009280Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.009402Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.009490Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.183478Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:10.183609Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:10.196569Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:10.362393Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:10.412517Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:10.412619Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:10.448956Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:10.449161Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:10.449414Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:10.449481Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:10.449555Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:10.449621Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:10.449683Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:10.449745Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:10.450981Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:10.481237Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:10.481363Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:10.486904Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2605] 2025-04-04T17:26:10.491956Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2617] 2025-04-04T17:26:10.492191Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1902:2617], schemeshard id = 72075186224037897 2025-04-04T17:26:10.501767Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:10.526893Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:10.526967Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:10.527045Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:10.538684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:10.546982Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:10.547118Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:10.769727Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:10.914987Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:10.991730Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:11.978999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.979143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.995755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:12.338031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2541:3121], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:12.338195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:12.339578Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2546:3125]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:26:12.339803Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:26:12.339890Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2548:3127] 2025-04-04T17:26:12.339994Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2548:3127] 2025-04-04T17:26:12.340479Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2549:2991] 2025-04-04T17:26:12.340688Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2548:3127], server id = [2:2549:2991], tablet id = 72075186224037894, status = OK 2025-04-04T17:26:12.340888Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2549:2991], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:26:12.340949Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-04T17:26:12.341290Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:26:12.341384Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2546:3125], StatRequests.size() = 1 2025-04-04T17:26:12.360764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2553:3131], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:12.360888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:12.361383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2558:3136], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:12.367046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-04T17:26:12.540594Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:26:12.540675Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:26:12.635487Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2548:3127], schemeshard count = 1 2025-04-04T17:26:13.065560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2560:3138], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-04-04T17:26:13.266850Z node 1 :TX_PROXY ERROR: Actor# [1:2687:3215] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:26:13.275298Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2710:3231]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:26:13.275424Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:26:13.275468Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2710:3231], StatRequests.size() = 1 2025-04-04T17:26:13.333578Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0xdf38czhpqz9axsn2exhq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjU5MGNkNjMtZWI0YmExYTUtZDJiNmFmOWUtYzBmYmMwN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:13.379309Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:2755:3046]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:26:13.381472Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:26:13.381516Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:26:13.381852Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:26:13.381901Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-04T17:26:13.381939Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T17:26:13.400623Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-04T17:26:13.401652Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 >> TargetDiscoverer::Dirs [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Transfer [GOOD] Test command err: 2025-04-04T17:26:12.867807Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510593074105092:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:26:12.867906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001745/r3tmp/tmpxf9JV7/pdisk_1.dat 2025-04-04T17:26:13.191048Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:13.236169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:13.237567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:13.240623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23870 TServer::EnableGrpc on GrpcPort 18625, node 1 2025-04-04T17:26:13.439567Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:13.439584Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:13.439591Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:13.439676Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23870 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:26:13.860378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:26:14.022160Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Topic, owner: root@builtin, type: Topic, size_bytes: 0, created_at: { plan_step: 1743787573997, tx_id: 281474976710658 } } } 2025-04-04T17:26:14.022198Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root/Topic 2025-04-04T17:26:14.047412Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTopicResponse { Result: { status: SUCCESS, issues: } } 2025-04-04T17:26:14.047445Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe topic succeeded: path# /Root/Topic 2025-04-04T17:26:14.047471Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Topic, dstPath# /Root/Replicated/Table, kind# Transfer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Dirs [GOOD] Test command err: 2025-04-04T17:26:12.867791Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510595119487760:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:26:12.867870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001751/r3tmp/tmp5qdG11/pdisk_1.dat 2025-04-04T17:26:13.164660Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:13.242848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:13.243035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:13.244660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22451 TServer::EnableGrpc on GrpcPort 18522, node 1 2025-04-04T17:26:13.439759Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:13.439787Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:13.439800Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:13.439935Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22451 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:26:13.836895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:26:13.872606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:14.020622Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1743787573899, tx_id: 1 } } } 2025-04-04T17:26:14.021229Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-04-04T17:26:14.028358Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Dir, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1743787573913, tx_id: 281474976710658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-04-04T17:26:14.028382Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-04-04T17:26:14.034298Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743787573990, tx_id: 281474976710659 } }] } } 2025-04-04T17:26:14.034314Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root/Dir 2025-04-04T17:26:15.130770Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743787573990, tx_id: 281474976710659 } } } 2025-04-04T17:26:15.130807Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Dir/Table 2025-04-04T17:26:15.130828Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Dir/Table, dstPath# /Root/Replicated/Dir/Table, kind# Table >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] >> TraverseDatashard::TraverseOneTableServerless [GOOD] >> Cdc::Write[PqRunner] [GOOD] >> Cdc::Write[YdsRunner] >> Cdc::NewImageLogDebezium [GOOD] >> Cdc::NaN[PqRunner] >> TargetDiscoverer::Basic >> TargetDiscoverer::Negative |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] Test command err: 2025-04-04T17:26:04.906329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:04.906624Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:04.906741Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00198c/r3tmp/tmpuCjRcd/pdisk_1.dat 2025-04-04T17:26:05.282434Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14100, node 1 2025-04-04T17:26:05.697119Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:05.697171Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:05.697227Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:05.697724Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:05.703798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:05.793051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:05.794153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:05.823922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3114 2025-04-04T17:26:06.331805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:09.614669Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:09.645443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.645524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.672889Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:09.674891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:09.916990Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.917638Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.918074Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.918196Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.918282Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.918430Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.918498Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.918574Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.918622Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.090118Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:10.090220Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:10.102837Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:10.215681Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:10.252821Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:10.252905Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:10.278225Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:10.278387Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:10.278538Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:10.278578Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:10.278616Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:10.278674Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:10.278723Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:10.278766Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:10.279593Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:10.303816Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:10.303904Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:10.309374Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:26:10.313447Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:26:10.314062Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:26:10.317689Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-04T17:26:10.332412Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:10.332454Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:10.332535Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-04T17:26:10.348030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:10.354999Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:10.355148Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:10.523406Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:10.657016Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:10.745022Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:11.581125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:26:12.327725Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:12.485300Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-04T17:26:12.485363Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:26:12.485494Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2586:2943], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:26:12.486698Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2588:2945] 2025-04-04T17:26:12.486990Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2588:2945], schemeshard id = 72075186224037899 2025-04-04T17:26:13.673314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2725:3246], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:13.673463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:13.687080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-04T17:26:13.943092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3028:3294], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:13.952379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:13.953703Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3033:3298]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:26:13.953874Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:26:13.954012Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-04T17:26:13.954065Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3036:3301] 2025-04-04T17:26:13.954138Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3036:3301] 2025-04-04T17:26:13.954743Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3037:3185] 2025-04-04T17:26:13.955058Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:3036:3301], server id = [2:3037:3185], tablet id = 72075186224037894, status = OK 2025-04-04T17:26:13.955304Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:3037:3185], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:26:13.955362Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-04T17:26:13.955609Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:26:13.955691Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:3033:3298], StatRequests.size() = 1 2025-04-04T17:26:13.969166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3041:3305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:13.969355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:13.969941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3046:3310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:13.975353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-04-04T17:26:14.085410Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:26:14.085486Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:26:14.173552Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:3036:3301], schemeshard count = 1 2025-04-04T17:26:14.565913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3048:3312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-04T17:26:14.798915Z node 1 :TX_PROXY ERROR: Actor# [1:3169:3381] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:26:14.809555Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3192:3397]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:26:14.809796Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:26:14.809842Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3192:3397], StatRequests.size() = 1 2025-04-04T17:26:14.882273Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0xdgnn0c7ez4xh19y7e15a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjYwOTJmYmMtMjZjNjVjNTUtZTJjYmUzYWUtZmE2NzFlZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:14.945981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72075186224037899 2025-04-04T17:26:15.261964Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3515:3459]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:26:15.262090Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:26:15.262121Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:3515:3459], StatRequests.size() = 1 2025-04-04T17:26:15.278463Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3524:3468]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:26:15.278566Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-04-04T17:26:15.278586Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:3524:3468], StatRequests.size() = 1 2025-04-04T17:26:15.332173Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr0xdhym9bk2r6cpc6mdhv4p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFkMzc0ZWQtMzBhY2ZiMTgtZDdiMjM3YWUtZjQ5ZGFkODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:15.427743Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3564:3436]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:26:15.430420Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:26:15.430470Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:26:15.430682Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:26:15.430711Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-04T17:26:15.430745Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T17:26:15.441263Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-04T17:26:15.441463Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-04-04T17:26:15.441727Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3589:3449]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:26:15.443551Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:26:15.443595Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:26:15.443800Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:26:15.443834Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-04T17:26:15.443863Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 3] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T17:26:15.445400Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-04-04T17:26:15.445592Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTableServerless [GOOD] Test command err: 2025-04-04T17:26:04.921470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:04.921782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:04.921910Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019bc/r3tmp/tmpHtof1j/pdisk_1.dat 2025-04-04T17:26:05.239344Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3815, node 1 2025-04-04T17:26:05.697079Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:05.697133Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:05.697176Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:05.697698Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:05.703909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:05.793092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:05.794107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:05.823990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3585 2025-04-04T17:26:06.327417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:10.111122Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:10.143547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:10.143634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:10.171005Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:10.173131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:10.387814Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.388395Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.388849Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.388991Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.389060Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.389259Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.389358Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.389464Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.389518Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.562574Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:10.562682Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:10.575618Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:10.691161Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:10.724422Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:10.724492Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:10.748701Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:10.748845Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:10.749060Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:10.749127Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:10.749167Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:10.749208Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:10.749250Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:10.749298Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:10.750074Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:10.772916Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:10.773044Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:10.777699Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:26:10.781932Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:26:10.782557Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:26:10.786162Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-04T17:26:10.801708Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:10.801755Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:10.801815Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-04T17:26:10.816404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:10.823793Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:10.823938Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:11.008847Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:11.142223Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:11.230679Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:12.073657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:26:12.824994Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:12.974923Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-04T17:26:12.974977Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:26:12.975076Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2586:2943], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:26:12.976152Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2588:2945] 2025-04-04T17:26:12.976436Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2588:2945], schemeshard id = 72075186224037899 2025-04-04T17:26:14.155241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2723:3245], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:14.155440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:14.174431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-04T17:26:14.482326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3030:3294], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:14.482621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:14.483940Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3035:3298]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:26:14.484077Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:26:14.484183Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-04T17:26:14.484237Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3038:3301] 2025-04-04T17:26:14.484292Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3038:3301] 2025-04-04T17:26:14.484737Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3039:3186] 2025-04-04T17:26:14.485192Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:3038:3301], server id = [2:3039:3186], tablet id = 72075186224037894, status = OK 2025-04-04T17:26:14.485369Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:3039:3186], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:26:14.485422Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-04T17:26:14.485600Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:26:14.485650Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:3035:3298], StatRequests.size() = 1 2025-04-04T17:26:14.540876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3043:3305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:14.540981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:14.541199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3048:3310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:14.545936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-04-04T17:26:14.695857Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:26:14.695953Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:26:14.717063Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:3038:3301], schemeshard count = 1 2025-04-04T17:26:14.975737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3050:3312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-04T17:26:15.198035Z node 1 :TX_PROXY ERROR: Actor# [1:3173:3382] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:26:15.206898Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3196:3398]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:26:15.207124Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:26:15.207173Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3196:3398], StatRequests.size() = 1 2025-04-04T17:26:15.263771Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0xdh691dyz1ayayk9xr2k5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBhZjBmYmItZGNmOTRlYzgtZTIzMmI0OGEtODY4NjFiMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:15.305262Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3234:3239]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:26:15.307654Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:26:15.307701Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:26:15.308050Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:26:15.308084Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-04T17:26:15.308119Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T17:26:15.351166Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-04T17:26:15.351484Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad >> KqpExtractPredicateLookup::OverflowLookup [GOOD] >> KqpExtractPredicateLookup::ComplexRange >> TargetDiscoverer::IndexedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] Test command err: 2025-04-04T17:26:05.264907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:05.265241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:05.265333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019a4/r3tmp/tmp8NemSr/pdisk_1.dat 2025-04-04T17:26:05.616840Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26881, node 1 2025-04-04T17:26:05.825562Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:05.825601Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:05.825633Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:05.826073Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:05.827929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:05.910380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:05.910486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:05.923676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26207 2025-04-04T17:26:06.411992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:09.127262Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:09.159385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.159491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.186666Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:09.188379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:09.417663Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.418263Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.418768Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.418931Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.419026Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.419298Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.419403Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.419520Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.419601Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.596083Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.596208Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.609483Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:09.765730Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:09.813372Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:09.813489Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:09.848314Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:09.849662Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:09.849945Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:09.850028Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:09.850103Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:09.850176Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:09.850265Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:09.850324Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:09.850717Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:09.878214Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:09.878364Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:09.884913Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:26:09.891212Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:26:09.892185Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:26:09.897535Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-04T17:26:09.919276Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:09.919381Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:09.919470Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-04T17:26:09.936167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:09.944247Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:09.944397Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:10.121121Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:10.309055Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:10.399070Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:11.237168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:26:11.988169Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:12.132256Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-04T17:26:12.132319Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:26:12.132402Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2585:2946], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:26:12.133486Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2587:2948] 2025-04-04T17:26:12.134139Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2587:2948], schemeshard id = 72075186224037899 2025-04-04T17:26:13.148383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:26:13.689148Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:13.911829Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2025-04-04T17:26:13.911900Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037905 2025-04-04T17:26:13.912015Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3069:3147], at schemeshard: 72075186224037905, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037905 2025-04-04T17:26:13.913425Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3070:3148] 2025-04-04T17:26:13.913574Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:3070:3148], schemeshard id = 72075186224037905 2025-04-04T17:26:14.995642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3199:3404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:14.995796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:15.015444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2025-04-04T17:26:15.299047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3502:3451], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:15.299186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:15.309266Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3507:3455]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:26:15.309442Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:26:15.309595Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-04T17:26:15.309651Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3510:3458] 2025-04-04T17:26:15.309718Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3510:3458] 2025-04-04T17:26:15.310227Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3511:3387] 2025-04-04T17:26:15.310551Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:3510:3458], server id = [2:3511:3387], tablet id = 72075186224037894, status = OK 2025-04-04T17:26:15.310670Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:3511:3387], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:26:15.310722Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-04T17:26:15.310927Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:26:15.310995Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:3507:3455], StatRequests.size() = 1 2025-04-04T17:26:15.325041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3515:3462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:15.325233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:15.325705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3520:3467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:15.331258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2025-04-04T17:26:15.566137Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:26:15.566229Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:26:15.656197Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:3510:3458], schemeshard count = 1 2025-04-04T17:26:15.942682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3522:3469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-04-04T17:26:16.122709Z node 1 :TX_PROXY ERROR: Actor# [1:3650:3544] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:26:16.131132Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3673:3560]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:26:16.131275Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:26:16.131317Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3673:3560], StatRequests.size() = 1 2025-04-04T17:26:16.197263Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr0xdj015m0pjyz2h5bvdy47, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjgxZmMyMWEtYjRkMDAyMWEtYWQ2OTM0MGQtYmM4NzVlNDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:16.274618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72075186224037905 2025-04-04T17:26:16.621536Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:4026:3626]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:26:16.621725Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:26:16.622073Z node 2 :STATISTICS DEBUG: [72075186224037894] EvRequestStats, node id = 1, schemeshard count = 1, urgent = 0 2025-04-04T17:26:16.622114Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-04T17:26:16.622363Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:26:16.622421Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:4026:3626], StatRequests.size() = 1 2025-04-04T17:26:16.641837Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:4035:3635]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:26:16.642020Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-04-04T17:26:16.642051Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:4035:3635], StatRequests.size() = 1 2025-04-04T17:26:16.687748Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr0xdk93d4hzbggj4b26h9n8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmIyYmJiZWMtZTgxMDIwYWItN2UxMmY2NTMtZjRhN2Y3YTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:16.723567Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4075:3655]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:26:16.726007Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:26:16.726056Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:26:16.726374Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:26:16.726424Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-04T17:26:16.726477Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T17:26:16.739258Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-04T17:26:16.739478Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-04-04T17:26:16.739863Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4100:3668]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:26:16.742030Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:26:16.742075Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:26:16.742549Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:26:16.742593Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-04T17:26:16.742631Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T17:26:16.744794Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-04-04T17:26:16.744965Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> TargetDiscoverer::Basic [GOOD] >> TargetDiscoverer::SystemObjects >> TPQTest::TestWritePQ [GOOD] >> TPQTest::TestWriteOffsetWithBigMessage >> TargetDiscoverer::Negative [GOOD] >> AnalyzeColumnshard::AnalyzeTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Negative [GOOD] Test command err: 2025-04-04T17:26:17.247228Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510617093983494:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:26:17.247316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001735/r3tmp/tmpfCvhw0/pdisk_1.dat 2025-04-04T17:26:17.503825Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:17.579500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:17.579625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:17.581340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27517 TServer::EnableGrpc on GrpcPort 14166, node 1 2025-04-04T17:26:17.654543Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:17.654568Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:17.654585Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:17.654685Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27517 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:26:17.873753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:17.908509Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2025-04-04T17:26:17.908578Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/Table, status# SCHEME_ERROR, issues# {
: Error: Path not found } >> Cdc::Write[YdsRunner] [GOOD] >> Cdc::Write[TopicRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Basic [GOOD] Test command err: 2025-04-04T17:26:16.995761Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510612637703561:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:26:16.997029Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001738/r3tmp/tmp6G7p1y/pdisk_1.dat 2025-04-04T17:26:17.234644Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:17.239219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:17.239349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:17.241656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6988 TServer::EnableGrpc on GrpcPort 4260, node 1 2025-04-04T17:26:17.427482Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:17.427513Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:17.427527Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:17.427643Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6988 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:26:17.671850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:17.695154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:17.832968Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1743787577735, tx_id: 1 } } } 2025-04-04T17:26:17.833001Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-04-04T17:26:17.840378Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743787577798, tx_id: 281474976715658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-04-04T17:26:17.840404Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-04-04T17:26:19.015226Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743787577798, tx_id: 281474976715658 } } } 2025-04-04T17:26:19.015256Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-04-04T17:26:19.015277Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTable [GOOD] Test command err: 2025-04-04T17:26:08.136173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:08.136483Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:08.136595Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001947/r3tmp/tmpHAOF4W/pdisk_1.dat 2025-04-04T17:26:08.517878Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20854, node 1 2025-04-04T17:26:08.745536Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:08.745587Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:08.745624Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:08.745991Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:08.748039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:08.838668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:08.838807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:08.852336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23817 2025-04-04T17:26:09.351732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:12.097884Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:12.127790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:12.127878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:12.155228Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:12.156765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:12.388400Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:12.389078Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:12.389598Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:12.389760Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:12.389848Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:12.390113Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:12.390208Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:12.390343Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:12.390429Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:12.566836Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:12.566958Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:12.579732Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:12.705017Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:12.745117Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:12.745211Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:12.775863Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:12.776036Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:12.776216Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:12.776266Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:12.776307Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:12.776367Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:12.776411Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:12.776450Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:12.777481Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:12.802868Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:12.802960Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:12.807198Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2605] 2025-04-04T17:26:12.811979Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2617] 2025-04-04T17:26:12.812178Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1902:2617], schemeshard id = 72075186224037897 2025-04-04T17:26:12.821149Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:12.842735Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:12.842778Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:12.842830Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:12.850464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:12.855795Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:12.855879Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:13.007235Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:13.183105Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:13.270810Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:14.251935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:14.252056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:14.270013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:14.352703Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:14.352887Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:14.353092Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:14.353199Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:14.353298Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:14.353378Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:14.353456Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:14.353546Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:14.353637Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:26:14.353710Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:26:14.353800Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:26:14.353880Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:26:14.378400Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:26:14.378505Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:26:14.378634Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:26:14.378677Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:26:14.378879Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:26:14.378950Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:26:14.379058Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:26:14.379105Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:26:14.379172Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:26:14.379219Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:26:14.379279Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:26:14.379361Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:26:14.380059Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:26:14.380128Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:26:14.380350Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:26:14.380393Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:26:14.380558Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:26:14.380623Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:26:14.380814Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:26:14.380854Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:26:14.380987Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:26:14.381034Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:26:14.503441Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-04-04T17:26:15.741934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2584:3122], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:15.742089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:15.745473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037897 2025-04-04T17:26:15.877467Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-04-04T17:26:16.805304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2681:3167], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:16.805448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:16.808181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-04-04T17:26:16.836719Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000015s FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously >> TSchemeShardTest::InitRootAgain >> TSchemeShardTest::MkRmDir >> TargetDiscoverer::IndexedTable [GOOD] >> TSchemeShardTest::CacheEffectiveACL [GOOD] >> TSchemeShardTest::ConsistentCopyTable >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true >> TSchemeShardTest::RmDirTwice >> TargetDiscoverer::SystemObjects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::IndexedTable [GOOD] Test command err: 2025-04-04T17:26:18.768370Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510622389673962:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:26:18.768456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001724/r3tmp/tmphBTXuz/pdisk_1.dat 2025-04-04T17:26:19.014266Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:19.089342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:19.089475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:19.091099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25221 TServer::EnableGrpc on GrpcPort 63028, node 1 2025-04-04T17:26:19.163529Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:19.163602Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:19.163612Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:19.163736Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25221 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:26:19.393297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:19.408972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:19.675944Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1743787579450, tx_id: 1 } } } 2025-04-04T17:26:19.675972Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-04-04T17:26:19.682321Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743787579660, tx_id: 281474976710658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-04-04T17:26:19.682352Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-04-04T17:26:20.793687Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743787579660, tx_id: 281474976710658 } } } 2025-04-04T17:26:20.793725Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-04-04T17:26:20.793753Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table 2025-04-04T17:26:20.793852Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table/Index, dstPath# /Root/Replicated/Table/Index/indexImplTable, kind# IndexTable >> TSchemeShardTest::InitRootAgain [GOOD] >> TSchemeShardTest::InitRootWithOwner >> TSchemeShardTest::MkRmDir [GOOD] >> TSchemeShardTest::PathName >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true [GOOD] >> TSchemeShardTest::AlterTableAndConcurrentSplit >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::DependentOps >> TSchemeShardTest::InitRootWithOwner [GOOD] >> TSchemeShardTest::DropTableTwice >> TSchemeShardTest::RmDirTwice [GOOD] >> TSchemeShardTest::TopicMeteringMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::SystemObjects [GOOD] Test command err: 2025-04-04T17:26:19.592561Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510626652423839:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:26:19.592715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00171f/r3tmp/tmpBc48HX/pdisk_1.dat 2025-04-04T17:26:19.806853Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:19.933501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:19.933611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:19.935343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4009 TServer::EnableGrpc on GrpcPort 29567, node 1 2025-04-04T17:26:19.977139Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:19.977163Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:19.977177Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:19.977312Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4009 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:26:20.226486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:20.237922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:26:20.339469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:20.381704Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1743787580283, tx_id: 1 } } } 2025-04-04T17:26:20.381736Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-04-04T17:26:20.388197Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743787580339, tx_id: 281474976715658 } }, { name: export-100500, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1743787580381, tx_id: 281474976715659 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-04-04T17:26:20.388230Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-04-04T17:26:21.599946Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743787580339, tx_id: 281474976715658 } } } 2025-04-04T17:26:21.599993Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-04-04T17:26:21.600024Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table >> TSchemeShardTest::PathName [GOOD] >> TSchemeShardTest::PathName_SetLocale |96.3%| [TA] $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::InternalTTL_Types [GOOD] >> TSchemeShardTest::DependentOps [GOOD] >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName >> TSchemeShardTest::CreateIndexedTable >> TSchemeShardTest::DropTableTwice [GOOD] >> TSchemeShardTest::IgnoreUserColumnIds >> TSchemeShardTest::PathName_SetLocale [GOOD] >> TSchemeShardTest::ModifyACL >> TSchemeShardTest::TopicMeteringMode [GOOD] >> TSchemeShardTest::Restart >> Cdc::Write[TopicRunner] [GOOD] >> Cdc::UpdateStream >> TSchemeShardTest::AlterTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::AlterTable >> TSchemeShardTest::IgnoreUserColumnIds [GOOD] >> TSchemeShardTest::DropTableAndConcurrentSplit >> TSchemeShardTest::ConsistentCopyTable [GOOD] >> TSchemeShardTest::ConsistentCopyTableAwait >> TSchemeShardTest::ModifyACL [GOOD] >> TSchemeShardTest::NameFormat >> TSchemeShardTest::CreateTable >> TSchemeShardTest::Restart [GOOD] >> TSchemeShardTest::SchemeErrors >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName [GOOD] >> TSchemeShardTest::DropBlockStoreVolume >> TSchemeShardTest::AlterTable [GOOD] >> TSchemeShardTest::AlterTableDropColumnReCreateSplit >> TSchemeShardTest::CreateIndexedTable [GOOD] >> TSchemeShardTest::CreateAlterTableWithCodec >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] >> Cdc::NaN[PqRunner] [GOOD] >> Cdc::NaN[YdsRunner] >> TSchemeShardTest::ConsistentCopyTableAwait [GOOD] >> TSchemeShardTest::ConsistentCopyTableRejects >> TSchemeShardTest::DropTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::DropTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::InternalTTL_Types [GOOD] Test command err: 2025-04-04T17:23:33.613970Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:33.697464Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:23:33.703411Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:23:33.703904Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:33.730393Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:33.730793Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:33.739927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:33.740181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:33.740457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:33.740683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:33.740820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:33.740942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:33.741086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:33.741208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:33.741367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:33.741488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:33.741605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:33.741748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:33.766399Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:101:2135], Recipient [1:137:2169]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:23:33.771533Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:33.771790Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:33.771845Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:33.772073Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:33.772272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:33.772350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:33.772402Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:33.772516Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:33.772605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:33.772674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:33.772718Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:33.772902Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:33.772983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:33.773027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:33.773084Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:33.773198Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:33.773270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:33.773312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:33.773347Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:33.773428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:33.773469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:33.773497Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:33.773550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:33.773599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:33.773637Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:33.774102Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=57; 2025-04-04T17:23:33.774271Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=76; 2025-04-04T17:23:33.774385Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=55; 2025-04-04T17:23:33.774503Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=62; 2025-04-04T17:23:33.774701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:33.774770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:33.774813Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:33.775110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:33.775168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:33.775202Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:33.775374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:33.775441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:33.775477Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:33.775660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:23:33.775696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:23:33.775757Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T1 ... );;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:26:22.438612Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:26:22.438635Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:26:22.438660Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:26:22.438724Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:26:22.438814Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:26:22.438839Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:26:22.438893Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=71; 2025-04-04T17:26:22.438920Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=568;num_rows=71;batch_columns=saved_at; 2025-04-04T17:26:22.439002Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[4:593:2609];bytes=568;rows=71;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2025-04-04T17:26:22.439066Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:26:22.439234Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:26:22.439329Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:26:22.439401Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:26:22.439459Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:26:22.439511Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:26:22.439533Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:600:2616] finished for tablet 9437184 2025-04-04T17:26:22.439893Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[4:593:2609];stats={"p":[{"events":["f_bootstrap"],"t":0.209},{"events":["f_ProduceResults"],"t":1.473},{"events":["l_bootstrap"],"t":2.903},{"events":["f_processing","f_task_result"],"t":3.006},{"events":["l_task_result"],"t":20.068},{"events":["f_ack"],"t":20.099},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":24.799}],"full":{"a":1743787557640309,"name":"_full_task","f":1743787557640309,"d_finished":0,"c":0,"l":1743787582439572,"d":24799263},"events":[{"name":"bootstrap","f":1743787557849646,"d_finished":2694306,"c":1,"l":1743787560543952,"d":2694306},{"a":1743787582439390,"name":"ack","f":1743787577740235,"d_finished":4648837,"c":903,"l":1743787582439347,"d":4649019},{"a":1743787582439383,"name":"processing","f":1743787560646829,"d_finished":16222286,"c":4515,"l":1743787582439349,"d":16222475},{"name":"ProduceResults","f":1743787559113637,"d_finished":8197051,"c":5420,"l":1743787582439524,"d":8197051},{"a":1743787582439525,"name":"Finish","f":1743787582439525,"d_finished":0,"c":0,"l":1743787582439572,"d":47},{"name":"task_result","f":1743787560646848,"d_finished":11494193,"c":3612,"l":1743787577709184,"d":11494193}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:26:22.439944Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[4:593:2609];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:26:22.440237Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[4:593:2609];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.209},{"events":["f_ProduceResults"],"t":1.473},{"events":["l_bootstrap"],"t":2.903},{"events":["f_processing","f_task_result"],"t":3.006},{"events":["l_task_result"],"t":20.068},{"events":["f_ack"],"t":20.099},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":24.799}],"full":{"a":1743787557640309,"name":"_full_task","f":1743787557640309,"d_finished":0,"c":0,"l":1743787582439969,"d":24799660},"events":[{"name":"bootstrap","f":1743787557849646,"d_finished":2694306,"c":1,"l":1743787560543952,"d":2694306},{"a":1743787582439390,"name":"ack","f":1743787577740235,"d_finished":4648837,"c":903,"l":1743787582439347,"d":4649416},{"a":1743787582439383,"name":"processing","f":1743787560646829,"d_finished":16222286,"c":4515,"l":1743787582439349,"d":16222872},{"name":"ProduceResults","f":1743787559113637,"d_finished":8197051,"c":5420,"l":1743787582439524,"d":8197051},{"a":1743787582439525,"name":"Finish","f":1743787582439525,"d_finished":0,"c":0,"l":1743787582439969,"d":444},{"name":"task_result","f":1743787560646848,"d_finished":11494193,"c":3612,"l":1743787577709184,"d":11494193}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-04-04T17:26:22.440288Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:25:57.560395Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=903;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7037528;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7037528;selected_rows=0; 2025-04-04T17:26:22.440312Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:26:22.440482Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TSchemeShardTest::SchemeErrors [GOOD] >> TSchemeShardTest::SerializedCellVec >> TSchemeShardTest::NameFormat [GOOD] >> TSchemeShardTest::ParallelCreateTable >> TSchemeShardTest::DropBlockStoreVolume [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions >> TSchemeShardTest::SerializedCellVec [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate >> TSchemeShardTest::CreateTable [GOOD] >> TSchemeShardTest::CreateTableWithDate >> TSchemeShardTest::CreateAlterTableWithCodec [GOOD] >> TSchemeShardTest::CopyTableTwiceSimultaneously >> TSchemeShardTest::AlterTableDropColumnReCreateSplit [GOOD] >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::DropBlockStoreVolume2 >> TSchemeShardTest::ParallelCreateTable [GOOD] >> TSchemeShardTest::ParallelCreateSameTable >> TSchemeShardCheckProposeSize::CopyTable >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate [GOOD] >> TSchemeShardTest::AlterTableById >> TSchemeShardTest::CopyTableTwiceSimultaneously [GOOD] >> TSchemeShardTest::CopyTableWithAlterConfig >> TSchemeShardTest::DropBlockStoreVolume2 [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig >> TSchemeShardTest::DropTable [GOOD] >> TSchemeShardTest::DropTableById >> TSchemeShardTest::CreateTableWithDate [GOOD] >> TSchemeShardTest::CreateIndexedTableRejects >> TSchemeShardTest::ConsistentCopyTableRejects [GOOD] >> TSchemeShardTest::ConsistentCopyTableToDeletedPath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] Test command err: 2025-04-04T17:19:11.860923Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:11.945557Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:11.969636Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:11.969905Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:11.978105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:11.978344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:11.978567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:11.978696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:11.978824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:11.978928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:11.979028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:11.979171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:11.979317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:11.979450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:11.979559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:11.979658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:12.007117Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:12.007321Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:12.007375Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:12.007546Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:12.007693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:12.007757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:12.007797Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:12.007883Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:12.007951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:12.007999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:12.008032Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:12.008189Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:12.008249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:12.008287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:12.008317Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:12.008400Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:12.008470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:12.008527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:12.008559Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:12.008638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:12.008676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:12.008705Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:12.008750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:12.008786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:12.008815Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:12.009183Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-04-04T17:19:12.009283Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=47; 2025-04-04T17:19:12.009352Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=27; 2025-04-04T17:19:12.009424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2025-04-04T17:19:12.009575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:12.009640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:12.009685Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:12.009878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:12.009926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:12.009960Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:12.010112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:12.010153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:12.010192Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:12.010396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:12.010437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:12.010465Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:12.010596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:12.010631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:12.010687Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... MNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=293480; 2025-04-04T17:26:23.678999Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:10534:12488];tablet_id=9437184;parent=[1:10496:12457];fline=manager.cpp:82;event=ask_data;request=request_id=341;1={portions_count=128};; 2025-04-04T17:26:23.680709Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10496:12457];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=128;path_id=1; 2025-04-04T17:26:23.688868Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10496:12457];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; 2025-04-04T17:26:24.229397Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-04T17:26:24.229482Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=12; 2025-04-04T17:26:24.241384Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=11813; 2025-04-04T17:26:24.253569Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=11019; 2025-04-04T17:26:24.253672Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=12201; 2025-04-04T17:26:24.253818Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=86; 2025-04-04T17:26:24.253921Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=63; 2025-04-04T17:26:24.254062Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=104; 2025-04-04T17:26:24.254177Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=77; 2025-04-04T17:26:24.254378Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=161; 2025-04-04T17:26:24.254415Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=24890; 2025-04-04T17:26:24.258285Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-04T17:26:24.258367Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=13; 2025-04-04T17:26:24.260932Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2487; 2025-04-04T17:26:24.281341Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=20319; 2025-04-04T17:26:24.281457Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=37; 2025-04-04T17:26:24.281519Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=22; 2025-04-04T17:26:24.281560Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-04-04T17:26:24.281602Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2025-04-04T17:26:24.281642Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-04-04T17:26:24.281708Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=35; 2025-04-04T17:26:24.281748Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-04-04T17:26:24.281833Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=52; 2025-04-04T17:26:24.281876Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-04-04T17:26:24.281935Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=28; 2025-04-04T17:26:24.282019Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=48; 2025-04-04T17:26:24.282094Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=41; 2025-04-04T17:26:24.282127Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=23714; 2025-04-04T17:26:24.282316Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=113961708;raw_bytes=176366876;count=47;records=1845000} inactive {blob_bytes=174125508;raw_bytes=270077548;count=81;records=2819164} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T17:26:24.283164Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10496:12457];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T17:26:24.283225Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10496:12457];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T17:26:24.283290Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T17:26:24.283353Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=0; 2025-04-04T17:26:24.283529Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:26:24.283592Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:26:24.283770Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=20; 2025-04-04T17:26:24.283830Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-04T17:26:24.283874Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:26:24.283916Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:26:24.283954Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:26:24.284050Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:26:24.287992Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:26:24.291628Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T17:26:24.293345Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T17:26:24.293391Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T17:26:24.293419Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T17:26:24.293465Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:26:24.293538Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:26:24.293779Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=20; 2025-04-04T17:26:24.293850Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-04T17:26:24.293906Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:26:24.293958Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:26:24.293999Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:26:24.294096Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-04-04T17:26:24.294147Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; >> TSchemeShardTest::AlterTableById [GOOD] >> TSchemeShardTest::AlterTableConfig >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig [GOOD] >> TSchemeShardTest::RejectAlterSolomon >> TSchemeShardTest::DropTableById [GOOD] >> TSchemeShardTest::DropPQ >> TSchemeShardTest::ParallelCreateSameTable [GOOD] >> TSchemeShardTest::MultipleColumnFamilies >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration [GOOD] >> TSchemeShardTest::CreateWithIntermediateDirs >> TSchemeShardCheckProposeSize::CopyTable [GOOD] >> TSchemeShardCheckProposeSize::CopyTables >> Cdc::UpdateStream [GOOD] >> Cdc::UpdateShardCount >> TSchemeShardTest::RejectAlterSolomon [GOOD] >> TSchemeShardTest::SimultaneousDropForceDrop >> TSchemeShardTest::CopyTableWithAlterConfig [GOOD] >> TSchemeShardTest::CopyTableOmitFollowers >> TSchemeShardTest::ConsistentCopyTableToDeletedPath [GOOD] >> TSchemeShardTest::CopyIndexedTable >> TSchemeShardTest::AlterTableConfig [GOOD] >> TSchemeShardTest::AlterTableCompactionPolicy >> TSchemeShardTest::CreateWithIntermediateDirs [GOOD] >> TSchemeShardTest::DocumentApiVersion >> TSchemeShardTest::SimultaneousDropForceDrop [GOOD] >> TSchemeShardTest::RejectSystemViewPath >> RetryPolicy::RetryWithBatching [GOOD] >> TSchemeShardTest::Boot >> TSchemeShardTest::CreateIndexedTableRejects [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDrop >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad [GOOD] >> TSchemeShardTest::CopyTableOmitFollowers [GOOD] >> TSchemeShardTest::CopyTableForBackup >> TSchemeShardTest::RejectSystemViewPath [GOOD] >> TSchemeShardTest::SplitKey [GOOD] >> TSchemeShardTest::SplitAlterCopy >> TSchemeShardTest::CopyIndexedTable [GOOD] >> TSchemeShardTest::CopyTable >> TSchemeShardTest::DocumentApiVersion [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Dir >> TSchemeShardTest::Boot [GOOD] >> TSchemeShardTest::AlterTableKeyColumns >> TSchemeShardTest::AlterTableCompactionPolicy [GOOD] >> TSchemeShardTest::AlterTableFollowers >> TSchemeShardTest::CreateIndexedTableAndForceDrop [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2025-04-04T17:21:19.923167Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.923205Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.923244Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-04-04T17:21:19.923785Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-04T17:21:19.923865Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.923909Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.925413Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009442s 2025-04-04T17:21:19.926069Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-04T17:21:19.926111Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.926137Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.926184Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008939s 2025-04-04T17:21:19.926673Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-04-04T17:21:19.926701Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.926721Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:21:19.926763Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006463s 2025-04-04T17:21:19.939190Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1743787279939163 2025-04-04T17:21:20.268203Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489509338886449142:2070];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:20.268296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:21:20.315842Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489509341862054112:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:21:20.317285Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00170f/r3tmp/tmproX11k/pdisk_1.dat 2025-04-04T17:21:20.535671Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:21:20.535745Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:21:20.692731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:20.692961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:21:20.697581Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:21:20.698932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:20.725124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:21:20.725199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11907, node 1 2025-04-04T17:21:20.730260Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:21:20.756088Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:21:20.756278Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:21:20.757965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:21:20.779687Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/00170f/r3tmp/yandexl006Xf.tmp 2025-04-04T17:21:20.779714Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/00170f/r3tmp/yandexl006Xf.tmp 2025-04-04T17:21:20.779892Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/00170f/r3tmp/yandexl006Xf.tmp 2025-04-04T17:21:20.780043Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:21:20.836491Z INFO: TTestServer started on Port 5517 GrpcPort 11907 TClient is connected to server localhost:5517 PQClient connected to localhost:11907 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:21:21.127080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-04T17:21:23.490390Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509354746956323:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:23.490521Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489509354746956301:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:23.490985Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:21:23.496152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T17:21:23.512883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489509354746956330:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T17:21:23.715752Z node 2 :TX_PROXY ERROR: Actor# [2:7489509354746956358:2129] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:21:23.743143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:21:23.748131Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489509351771352129:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:21:23.748475Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWU3Mzk0NGUtOGIxYmQzMGItMTc5NWI5MS1jN2Q4MmMwNw==, ActorId: [1:7489509351771352082:2336], ActorState: ExecuteState, TraceId: 01jr0x4n1k5g6p7zv3xxsvn4gq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:21:23.750804Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:21:23.762556Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489509354746956373:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:21:23.762787Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTM4ZWYyN2YtNTc2ZjgxZWUtNTc4N2RkZWUtOTRjMTZlMmI=, ActorId: [2:7489509354746956299:2308], ActorState: ExecuteState, TraceId: 01jr0x4n0z3bspvnzxzewt7npk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:21:23.763219Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:21:23.866530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:21:23.989647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:11907", true, true, 1000); 2025-04-04T17:21:24.215484Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr0x4njs0yptsah8jezvvs6m, Database: , DatabaseId: /Root, Sess ... dle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0test-message-group-id' seqNo 10 partNo 0 2025-04-04T17:26:26.681979Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0test-message-group-id' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1612 count 10 nextOffset 10 batches 1 2025-04-04T17:26:26.682829Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 0,10 HeadOffset 0 endOffset 0 curOffset 10 d0000000000_00000000000000000000_00000_0000000010_00000| size 1208 WTime 1743787586682 2025-04-04T17:26:26.683000Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:26:26.683113Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 10 size 1208 2025-04-04T17:26:26.687285Z node 17 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 10 size 1208 actorID [17:7489510647380933076:2625] 2025-04-04T17:26:26.687444Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1230 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:26:26.687515Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:26:26.687541Z node 17 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 10 parts 0 size 1208 2025-04-04T17:26:26.687607Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-04T17:26:26.687657Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:26:26.687689Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-04-04T17:26:26.687707Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:26:26.687737Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-04-04T17:26:26.687765Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:26:26.687806Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2025-04-04T17:26:26.687827Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:26:26.687849Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2025-04-04T17:26:26.687866Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:26:26.687893Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-04-04T17:26:26.687913Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:26:26.687940Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 6 is stored on disk 2025-04-04T17:26:26.687959Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:26:26.687986Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 7 is stored on disk 2025-04-04T17:26:26.688006Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:26:26.688033Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 8 is stored on disk 2025-04-04T17:26:26.688054Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:26:26.688081Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 9 is stored on disk 2025-04-04T17:26:26.688100Z node 17 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-04T17:26:26.688181Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T17:26:26.688252Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04T17:26:26.688308Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-04T17:26:26.688476Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 0 2025-04-04T17:26:26.688527Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 10 2025-04-04T17:26:26.688690Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-04T17:26:26.688719Z node 17 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T17:26:26.688775Z node 17 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1743787586665 queuesize 0 startOffset 0 2025-04-04T17:26:26.689088Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3288d84-8b14017d-618bf6b-64f311f4_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 6 queued_in_partition_duration_ms: 15 } 2025-04-04T17:26:26.689151Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3288d84-8b14017d-618bf6b-64f311f4_0] Write session: acknoledged message 1 2025-04-04T17:26:26.689186Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3288d84-8b14017d-618bf6b-64f311f4_0] Write session: acknoledged message 2 2025-04-04T17:26:26.689213Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3288d84-8b14017d-618bf6b-64f311f4_0] Write session: acknoledged message 3 2025-04-04T17:26:26.689244Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3288d84-8b14017d-618bf6b-64f311f4_0] Write session: acknoledged message 4 2025-04-04T17:26:26.689285Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3288d84-8b14017d-618bf6b-64f311f4_0] Write session: acknoledged message 5 2025-04-04T17:26:26.689344Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3288d84-8b14017d-618bf6b-64f311f4_0] Write session: acknoledged message 6 2025-04-04T17:26:26.689388Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3288d84-8b14017d-618bf6b-64f311f4_0] Write session: acknoledged message 7 2025-04-04T17:26:26.689430Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3288d84-8b14017d-618bf6b-64f311f4_0] Write session: acknoledged message 8 2025-04-04T17:26:26.689496Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3288d84-8b14017d-618bf6b-64f311f4_0] Write session: acknoledged message 9 2025-04-04T17:26:26.689518Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3288d84-8b14017d-618bf6b-64f311f4_0] Write session: acknoledged message 10 2025-04-04T17:26:26.689825Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3288d84-8b14017d-618bf6b-64f311f4_0] Write session: close. Timeout = 0 ms 2025-04-04T17:26:26.689900Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3288d84-8b14017d-618bf6b-64f311f4_0] Write session will now close 2025-04-04T17:26:26.689970Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3288d84-8b14017d-618bf6b-64f311f4_0] Write session: aborting 2025-04-04T17:26:26.690689Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3288d84-8b14017d-618bf6b-64f311f4_0] Write session: gracefully shut down, all writes complete 2025-04-04T17:26:26.690748Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|3288d84-8b14017d-618bf6b-64f311f4_0] Write session: destroy 2025-04-04T17:26:26.691391Z node 17 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message-group-id|3288d84-8b14017d-618bf6b-64f311f4_0 grpc read done: success: 0 data: 2025-04-04T17:26:26.691421Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|3288d84-8b14017d-618bf6b-64f311f4_0 grpc read failed 2025-04-04T17:26:26.691459Z node 17 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 7 sessionId: test-message-group-id|3288d84-8b14017d-618bf6b-64f311f4_0 2025-04-04T17:26:26.691479Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|3288d84-8b14017d-618bf6b-64f311f4_0 is DEAD 2025-04-04T17:26:26.691867Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:26:26.692008Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [17:7489510655970867972:2657] destroyed 2025-04-04T17:26:26.692061Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad [GOOD] Test command err: 2025-04-04T17:25:13.562739Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510339850362997:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:13.562849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:25:13.779663Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000f58/r3tmp/tmpRa76us/pdisk_1.dat 2025-04-04T17:25:14.038877Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:14.047289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:14.047412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:14.053287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5253, node 1 2025-04-04T17:25:14.211040Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000f58/r3tmp/yandexF5TTs8.tmp 2025-04-04T17:25:14.211084Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000f58/r3tmp/yandexF5TTs8.tmp 2025-04-04T17:25:14.211262Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000f58/r3tmp/yandexF5TTs8.tmp 2025-04-04T17:25:14.211393Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:25:14.267083Z INFO: TTestServer started on Port 20869 GrpcPort 5253 TClient is connected to server localhost:20869 PQClient connected to localhost:5253 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:14.618921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:14.636569Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:14.649784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:14.818855Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:14.830664Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-04T17:25:16.799743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510352735265674:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:16.799965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:16.802405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510352735265708:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:16.807947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:25:16.821113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510352735265710:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:25:16.999844Z node 1 :TX_PROXY ERROR: Actor# [1:7489510352735265774:2448] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:17.031448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.069533Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489510357030233079:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:25:17.071275Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWVjYTk1NWQtNGQ3NmE3Y2MtYWJkMDcyOGYtZjA1Mzk2YzI=, ActorId: [1:7489510352735265668:2335], ActorState: ExecuteState, TraceId: 01jr0xbrvfdq83jqqv1frkvdbc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:25:17.073243Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:25:17.101783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.192739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489510357030233364:2629] 2025-04-04T17:25:18.565896Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510339850362997:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:18.565971Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:25:23.453730Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-04T17:25:23.471157Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-04T17:25:23.472494Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7489510382800037446:2803], Recipient [1:7489510339850363432:2203]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:25:23.472537Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:25:23.472567Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:25:23.472624Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7489510382800037442:2800], Recipient [1:7489510339850363432:2203]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-04T17:25:23.472642Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T17:25:23.550464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:25:23.550935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:25:23.551211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-04T17:25:23.551266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-04T17:25:23.551308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-04 ... 5 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [5:7489510659361994698:2490], Partition 1, Sender [5:7489510659361994698:2490], Recipient [5:7489510659361994770:2496], Cookie: 0 2025-04-04T17:26:28.350417Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [5:7489510659361994698:2490], Recipient [5:7489510659361994770:2496]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2025-04-04T17:26:28.350435Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2025-04-04T17:26:28.350973Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][origin] Send TEvPeriodicTopicStats PathId: 14 Generation: 1 StatsReportRound: 1 DataSize: 0 UsedReserveSize: 0 2025-04-04T17:26:28.351233Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][origin] ProcessPendingStats. PendingUpdates size 0 2025-04-04T17:26:28.351370Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [5:7489510659361994561:2470], Recipient [5:7489510620707287734:2142]: NKikimrPQ.TEvPeriodicTopicStats PathId: 14 Generation: 1 Round: 1 DataSize: 0 UsedReserveSize: 0 SubDomainOutOfSpace: false 2025-04-04T17:26:28.351390Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-04-04T17:26:28.351402Z node 5 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046644480, LocalPathId: 14] DataSize 0 UsedReserveSize 0 2025-04-04T17:26:28.351418Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.099997s, queue# 1 2025-04-04T17:26:28.357785Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [5:7489510659361994564:2471], Partition 0, Sender [5:7489510659361994629:2476], Recipient [5:7489510659361994625:2474], Cookie: 0 2025-04-04T17:26:28.357851Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [5:7489510659361994629:2476], Recipient [5:7489510659361994625:2474]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2025-04-04T17:26:28.357873Z node 5 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2025-04-04T17:26:28.361823Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489510659361994564:2471], Partition 0, Sender [0:0:0], Recipient [5:7489510659361994625:2474], Cookie: 0 2025-04-04T17:26:28.361888Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489510659361994625:2474]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:28.361907Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:28.361944Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:26:28.362006Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:26:28.362033Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:26:28.362054Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:26:28.400415Z node 5 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-04-04T17:26:28.400520Z node 5 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request operation_params { } path: "/Root/dir/origin" 2025-04-04T17:26:28.400653Z node 5 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/dir/origin 2025-04-04T17:26:28.416878Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [5:7489510659361994694:2489], Partition 2, Sender [5:7489510659361994777:2500], Recipient [5:7489510659361994773:2498], Cookie: 0 2025-04-04T17:26:28.416929Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [5:7489510659361994777:2500], Recipient [5:7489510659361994773:2498]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2025-04-04T17:26:28.416945Z node 5 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2025-04-04T17:26:28.418124Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [5:7489510659361994698:2490], Partition 1, Sender [5:7489510659361994780:2501], Recipient [5:7489510659361994770:2496], Cookie: 0 2025-04-04T17:26:28.418188Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [5:7489510659361994780:2501], Recipient [5:7489510659361994770:2496]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2025-04-04T17:26:28.418210Z node 5 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2025-04-04T17:26:28.420615Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489510659361994694:2489], Partition 2, Sender [0:0:0], Recipient [5:7489510659361994773:2498], Cookie: 0 2025-04-04T17:26:28.420658Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489510659361994773:2498]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:28.420671Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:28.420703Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:26:28.420761Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:26:28.420781Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:26:28.420802Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:26:28.421150Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489510659361994698:2490], Partition 1, Sender [0:0:0], Recipient [5:7489510659361994770:2496], Cookie: 0 2025-04-04T17:26:28.421192Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489510659361994770:2496]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:28.421210Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:28.421232Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:26:28.421277Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:26:28.421289Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:26:28.421328Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:26:28.451985Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435093, Sender [0:0:0], Recipient [5:7489510620707287734:2142]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-04-04T17:26:28.452043Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-04-04T17:26:28.452068Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-04T17:26:28.452079Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-04T17:26:28.452134Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2025-04-04T17:26:28.452473Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435093, Sender [0:0:0], Recipient [5:7489510620707287734:2142]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-04-04T17:26:28.452509Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-04-04T17:26:28.452529Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-04T17:26:28.462155Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489510659361994564:2471], Partition 0, Sender [0:0:0], Recipient [5:7489510659361994625:2474], Cookie: 0 2025-04-04T17:26:28.462208Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489510659361994625:2474]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:28.462224Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:28.462269Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:26:28.462320Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:26:28.462333Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:26:28.462351Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:26:28.521064Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489510659361994694:2489], Partition 2, Sender [0:0:0], Recipient [5:7489510659361994773:2498], Cookie: 0 2025-04-04T17:26:28.521139Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489510659361994773:2498]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:28.521160Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:28.521198Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:26:28.521264Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:26:28.521292Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:26:28.521316Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:26:28.521476Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489510659361994698:2490], Partition 1, Sender [0:0:0], Recipient [5:7489510659361994770:2496], Cookie: 0 2025-04-04T17:26:28.521515Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489510659361994770:2496]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:28.521529Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:28.521551Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:26:28.521579Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:26:28.521596Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:26:28.521610Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> TSchemeShardTest::MultipleColumnFamilies [GOOD] >> TSchemeShardTest::MultipleColumnFamiliesWithStorage >> TSchemeShardTest::CopyTable [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentChanges >> TSchemeShardTest::DropPQ [GOOD] >> TSchemeShardTest::DropPQFail >> TSchemeShardTest::AlterTableKeyColumns [GOOD] >> TSchemeShardTest::AlterTableSizeToSplit >> TSchemeShardTest::DisablePublicationsOfDropping_Dir [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Table |96.3%| [TA] $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::SplitAlterCopy [GOOD] >> TSchemeShardTest::TopicReserveSize >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::CreateTableWithUniformPartitioning |96.4%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::AlterTableSizeToSplit [GOOD] >> TSchemeShardTest::AlterTableSplitSchema >> TSchemeShardTest::AlterTableFollowers [GOOD] >> TSchemeShardTest::AlterPersQueueGroup >> Cdc::UpdateShardCount [GOOD] >> Cdc::UpdateRetentionPeriod >> TSchemeShardTest::MultipleColumnFamiliesWithStorage [GOOD] >> TSchemeShardTest::ParallelModifying >> TSchemeShardTest::DisablePublicationsOfDropping_Table [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable >> TSchemeShardTest::CopyTableAndConcurrentChanges [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplit >> TSchemeShardTest::TopicReserveSize [GOOD] >> TSchemeShardTest::TopicWithAutopartitioningReserveSize >> TSchemeShardTest::AlterTableSplitSchema [GOOD] >> TSchemeShardTest::AlterTableSettings >> TSchemeShardTest::CreateTableWithUniformPartitioning [GOOD] >> TSchemeShardTest::CreateTableWithSplitBoundaries >> TSchemeShardTest::DropPQFail [GOOD] >> TSchemeShardTest::DropPQAbort >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Pq >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanScalar >> TSchemeShardTest::CreateTableWithSplitBoundaries [GOOD] >> TSchemeShardTest::CreateTableWithConfig >> TSchemeShardTest::CopyTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentMerge >> TSchemeShardTest::AlterTableSettings [GOOD] >> TSchemeShardTest::AssignBlockStoreVolume >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] >> TopicAutoscaling::ReadFromTimestamp_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadFromTimestamp_PQv1 >> Cdc::NaN[YdsRunner] [GOOD] >> Cdc::NaN[TopicRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:26:22.194453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:26:22.194562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:26:22.194615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:26:22.194663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:26:22.194725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:26:22.194766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:26:22.194835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:26:22.194934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:26:22.195261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:26:22.272274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:26:22.272320Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:22.280716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:26:22.280901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:26:22.281032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:26:22.287856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:26:22.288105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:26:22.288561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:22.289139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:26:22.293640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:22.294649Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:22.294701Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:22.294858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:26:22.294909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:26:22.294941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:26:22.295028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:26:22.300246Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:26:22.405985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:26:22.406178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:22.406350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:26:22.406551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:26:22.406598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:22.408909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:22.409105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:26:22.409272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:22.409325Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:26:22.409359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:26:22.409401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:26:22.410931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:22.410979Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:26:22.411006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:26:22.412280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:22.412318Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:22.412358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:22.412412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:26:22.416252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:26:22.418163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:26:22.418361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:26:22.419464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:22.419607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:26:22.419676Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:22.419992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:26:22.420051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:22.420238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:26:22.420362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:26:22.422468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:22.422556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:26:22.422736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:22.422778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:26:22.423115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:22.423162Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:26:22.423269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:26:22.423322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:22.423375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:26:22.423419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:22.423459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:26:22.423501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:22.423537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:26:22.423576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:26:22.423652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:26:22.423720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:26:22.423759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:26:22.425747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:26:22.425874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:26:22.425916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... oseTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 2, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-04T17:26:32.866653Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-04-04T17:26:32.866681Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2025-04-04T17:26:32.867084Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-04-04T17:26:32.867160Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 Status: COMPLETE TxId: 104 Step: 5000005 2025-04-04T17:26:32.867204Z node 13 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409549 Status: COMPLETE TxId: 104 Step: 5000005 2025-04-04T17:26:32.867235Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 1, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-04T17:26:32.867261Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-04-04T17:26:32.867286Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-04-04T17:26:32.867528Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2025-04-04T17:26:32.867604Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000005 2025-04-04T17:26:32.867664Z node 13 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000005 2025-04-04T17:26:32.867696Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:6, shard: 72075186233409551, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-04-04T17:26:32.867721Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-04-04T17:26:32.867923Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-04-04T17:26:32.868174Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-04-04T17:26:32.877410Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:26:32.877577Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:26:32.877656Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:26:32.877728Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:26:32.877840Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:26:32.877947Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:26:32.878125Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:32.878157Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:26:32.878438Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:32.878473Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [13:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-04-04T17:26:32.878790Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-04-04T17:26:32.878837Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-04-04T17:26:32.879010Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:26:32.879060Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:26:32.879116Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-04-04T17:26:32.879179Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:26:32.879241Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-04-04T17:26:32.879330Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-04-04T17:26:32.879403Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-04-04T17:26:32.879467Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-04-04T17:26:32.879686Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 10 2025-04-04T17:26:32.879770Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2025-04-04T17:26:32.879830Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-04T17:26:32.880428Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:26:32.880503Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-04-04T17:26:32.880555Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-04-04T17:26:32.880617Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-04T17:26:32.880675Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-04-04T17:26:32.880790Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-04-04T17:26:32.888859Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-04-04T17:26:32.915452Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-04-04T17:26:32.915522Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-04-04T17:26:32.916023Z node 13 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-04-04T17:26:32.916116Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-04-04T17:26:32.916155Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [13:1463:3264] TestWaitNotification: OK eventTxId 104 2025-04-04T17:26:32.916693Z node 13 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:26:32.916883Z node 13 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 217us result status StatusSuccess 2025-04-04T17:26:32.917402Z node 13 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 4 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 6 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 7 PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409549 KeyRange { FromBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 5 } Partitions { PartitionId: 3 TabletId: 72075186233409550 KeyRange { ToBound: "0" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409551 KeyRange { FromBound: "0" ToBound: "A" } Status: Inactive ParentPartitionIds: 1 ChildPartitionIds: 5 } Partitions { PartitionId: 5 TabletId: 72075186233409552 KeyRange { FromBound: "0" } Status: Active ParentPartitionIds: 2 ParentPartitionIds: 4 } AlterVersion: 4 BalancerTabletID: 72075186233409547 NextPartitionId: 6 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 494 AccountSize: 494 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::AssignBlockStoreVolume [GOOD] >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter >> TSchemeShardTest::CreateTableWithConfig [GOOD] >> TSchemeShardTest::CreateTableWithNamedConfig >> TSchemeShardTest::DisablePublicationsOfDropping_Pq [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon >> YdbIndexTable::OnlineBuild >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex >> TSchemeShardTest::CopyTableAndConcurrentMerge [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] >> TSchemeShardTest::CreateTableWithNamedConfig [GOOD] >> TSchemeShardTest::CreateTableWithUnknownNamedConfig ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] Test command err: 2025-04-04T17:19:18.057544Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:18.133071Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:18.159833Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:18.160258Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:18.169331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:18.169556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:18.169798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:18.169914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:18.170031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:18.170135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:18.170230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:18.170365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:18.170519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:18.170660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:18.170769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:18.170884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:18.200221Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:18.200441Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:18.200508Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:18.200698Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:18.200882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:18.200953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:18.200997Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:18.201088Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:18.201162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:18.201221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:18.201259Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:18.201438Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:18.201503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:18.201543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:18.201572Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:18.201660Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:18.201732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:18.201784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:18.201829Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:18.201907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:18.201944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:18.201974Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:18.202023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:18.202061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:18.202088Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:18.202509Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-04-04T17:19:18.202658Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=83; 2025-04-04T17:19:18.202745Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-04-04T17:19:18.202820Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-04-04T17:19:18.202990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:18.203052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:18.203085Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:18.203269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:18.203327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:18.203364Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:18.203543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:18.203586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:18.203619Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:18.203813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:18.203855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:18.203885Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:18.203990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:18.204028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:18.204093Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... UMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=283672; 2025-04-04T17:26:31.699250Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:10534:12488];tablet_id=9437184;parent=[1:10496:12457];fline=manager.cpp:82;event=ask_data;request=request_id=341;1={portions_count=128};; 2025-04-04T17:26:31.700749Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10496:12457];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=128;path_id=1; 2025-04-04T17:26:31.709197Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10496:12457];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; 2025-04-04T17:26:32.242328Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-04T17:26:32.242415Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=12; 2025-04-04T17:26:32.254153Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=11646; 2025-04-04T17:26:32.266590Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=11232; 2025-04-04T17:26:32.266692Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=12450; 2025-04-04T17:26:32.266847Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=95; 2025-04-04T17:26:32.266956Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=65; 2025-04-04T17:26:32.267107Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=109; 2025-04-04T17:26:32.267221Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=78; 2025-04-04T17:26:32.267423Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=157; 2025-04-04T17:26:32.267469Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=25005; 2025-04-04T17:26:32.272344Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-04T17:26:32.272421Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=12; 2025-04-04T17:26:32.274800Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2303; 2025-04-04T17:26:32.295057Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=20171; 2025-04-04T17:26:32.295185Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=41; 2025-04-04T17:26:32.295254Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=24; 2025-04-04T17:26:32.295296Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2025-04-04T17:26:32.295352Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-04-04T17:26:32.295391Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-04-04T17:26:32.295465Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=39; 2025-04-04T17:26:32.295511Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-04-04T17:26:32.295594Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=48; 2025-04-04T17:26:32.295634Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-04-04T17:26:32.295701Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=36; 2025-04-04T17:26:32.295794Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=53; 2025-04-04T17:26:32.295876Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=50; 2025-04-04T17:26:32.295912Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=23443; 2025-04-04T17:26:32.296100Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=113961708;raw_bytes=176366876;count=47;records=1845000} inactive {blob_bytes=174125508;raw_bytes=270077548;count=81;records=2819164} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T17:26:32.296956Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10496:12457];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T17:26:32.297040Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10496:12457];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T17:26:32.297118Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T17:26:32.297168Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=0; 2025-04-04T17:26:32.297367Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:26:32.297428Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:26:32.297624Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=20; 2025-04-04T17:26:32.297688Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-04T17:26:32.297731Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:26:32.297775Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:26:32.297817Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:26:32.297914Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:26:32.300445Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:26:32.303356Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T17:26:32.303996Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T17:26:32.304034Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T17:26:32.304060Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T17:26:32.304110Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:26:32.304179Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:26:32.304251Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=20; 2025-04-04T17:26:32.304330Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-04T17:26:32.304376Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:26:32.304426Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:26:32.304467Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:26:32.304549Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=1.000000s; 2025-04-04T17:26:32.304615Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10496:12457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon [GOOD] >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas >> YdbIndexTable::MultiShardTableOneUniqIndex >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter >> Cdc::UpdateRetentionPeriod [GOOD] >> Cdc::SupportedTypes >> TSchemeShardTest::ParallelModifying [GOOD] >> TSchemeShardTest::PQGroupExplicitChannels >> TSchemeShardTest::CreateTableWithUnknownNamedConfig [GOOD] >> TSchemeShardTest::CreateIndexedTableAfterBackup >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter [GOOD] >> TSchemeShardTest::BlockStoreVolumeLimits >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge [GOOD] >> TSchemeShardTest::ConsistentCopyTablesForBackup >> YdbIndexTable::MultiShardTableOneIndex >> TSchemeShardTest::AlterPersQueueGroup [GOOD] >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema >> TSchemeShardTest::PQGroupExplicitChannels [GOOD] >> TSchemeShardTest::ReadOnlyMode >> TPQTest::TestPQRead [GOOD] >> TPQTest::TestPQSmallRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:26:21.709341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:26:21.709434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:26:21.709466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:26:21.709491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:26:21.710576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:26:21.710605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:26:21.710654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:26:21.710756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:26:21.711999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:26:21.775722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:26:21.775764Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:21.784164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:26:21.784342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:26:21.784472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:26:21.794403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:26:21.795674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:26:21.797919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:21.800026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:26:21.806442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:21.814651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:21.814714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:21.814915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:26:21.814967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:26:21.815007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:26:21.815098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.820226Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:26:21.920608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:26:21.922051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.923529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:26:21.924891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:26:21.924964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.927882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:21.928008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:26:21.928181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.928242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:26:21.928280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:26:21.928313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:26:21.930157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.930232Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:26:21.930284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:26:21.931876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.931931Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.931982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:21.932025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:26:21.935622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:26:21.937303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:26:21.937447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:26:21.938389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:21.938506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:26:21.938546Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:21.939264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:26:21.939336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:21.939489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:26:21.939585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:26:21.942057Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:21.942117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:26:21.942294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:21.942333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:26:21.942608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.942652Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:26:21.942734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:26:21.942766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:21.942804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:26:21.942843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:21.942874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:26:21.942910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:21.942941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:26:21.942968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:26:21.943018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:26:21.943071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:26:21.943100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:26:21.944762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:26:21.944877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:26:21.944914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4T17:26:35.539745Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:26:35.541577Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:26:35.541758Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:26:35.541806Z node 15 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:26:35.541896Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-04-04T17:26:35.541998Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-04-04T17:26:35.542201Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:26:35.544613Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-04-04T17:26:35.544747Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-04-04T17:26:35.545138Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:35.545329Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 64424511594 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:26:35.545413Z node 15 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-04T17:26:35.545810Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-04-04T17:26:35.545891Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-04-04T17:26:35.546197Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:26:35.546320Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:26:35.546393Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:26:35.548544Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:35.548589Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:26:35.548721Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:26:35.548863Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:35.548903Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-04-04T17:26:35.548950Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-04-04T17:26:35.549304Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-04-04T17:26:35.549366Z node 15 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-04-04T17:26:35.549568Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:26:35.549632Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:26:35.549700Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-04-04T17:26:35.549770Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:26:35.549842Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-04-04T17:26:35.549916Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-04-04T17:26:35.549997Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-04-04T17:26:35.550057Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-04-04T17:26:35.550300Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:26:35.550390Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-04-04T17:26:35.550479Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-04-04T17:26:35.550533Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-04-04T17:26:35.551217Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:26:35.551294Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:26:35.551356Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:26:35.551442Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-04-04T17:26:35.551506Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:26:35.552185Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:26:35.552270Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-04-04T17:26:35.552303Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-04-04T17:26:35.552337Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-04-04T17:26:35.552371Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:26:35.552462Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-04-04T17:26:35.555661Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-04-04T17:26:35.555763Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-04-04T17:26:35.556087Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-04-04T17:26:35.556170Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-04-04T17:26:35.556746Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-04-04T17:26:35.556874Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-04-04T17:26:35.556946Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [15:407:2373] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-04-04T17:26:35.561184Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "Topic1" TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 121 } MeteringMode: METERING_MODE_RESERVED_CAPACITY } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:26:35.561667Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /MyRoot/USER_1/Topic1, opId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:26:35.562024Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_1/Topic1', error: database size limit exceeded, limit: 1 bytes, available: 1 bytes, delta: 363 bytes, at schemeshard: 72057594046678944 2025-04-04T17:26:35.564909Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusResourceExhausted Reason: "Check failed: path: \'/MyRoot/USER_1/Topic1\', error: database size limit exceeded, limit: 1 bytes, available: 1 bytes, delta: 363 bytes" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:26:35.565173Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot/USER_1, subject: , status: StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_1/Topic1', error: database size limit exceeded, limit: 1 bytes, available: 1 bytes, delta: 363 bytes, operation: CREATE PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-04T17:26:35.565564Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:26:35.565641Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T17:26:35.566234Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:26:35.566408Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:26:35.566477Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:414:2380] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] Test command err: 2025-04-04T17:19:11.942676Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:12.025695Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:12.048059Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:12.048323Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:12.055851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:12.056071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:12.056302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:12.056389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:12.056505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:12.056594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:12.056668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:12.056770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:12.056872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:12.056980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:12.057069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:12.057199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:12.082451Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:12.082990Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:12.083058Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:12.083243Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:12.083434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:12.083513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:12.083551Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:12.083657Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:12.083722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:12.083767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:12.083802Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:12.083970Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:12.084023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:12.084057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:12.084082Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:12.084172Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:12.084232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:12.084289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:12.084317Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:12.084379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:12.084420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:12.084459Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:12.084506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:12.084545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:12.084585Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:12.084938Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-04-04T17:19:12.085025Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=40; 2025-04-04T17:19:12.085111Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-04T17:19:12.085184Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=31; 2025-04-04T17:19:12.085340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:12.085391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:12.085436Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:12.085607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:12.085646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:12.085672Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:12.085805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:12.085844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:12.085881Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:12.086064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:12.086145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:12.086172Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:12.086319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:12.086356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:12.086416Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... HARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=245557; 2025-04-04T17:26:33.215072Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:14895:16849];tablet_id=9437184;parent=[1:14857:16818];fline=manager.cpp:82;event=ask_data;request=request_id=340;1={portions_count=126};; 2025-04-04T17:26:33.216648Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:14857:16818];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=126;path_id=1; 2025-04-04T17:26:33.225135Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:14857:16818];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; 2025-04-04T17:26:33.747282Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-04T17:26:33.747418Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=14; 2025-04-04T17:26:33.769149Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=21610; 2025-04-04T17:26:33.783779Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=13122; 2025-04-04T17:26:33.783887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=14627; 2025-04-04T17:26:33.784053Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=107; 2025-04-04T17:26:33.784162Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=69; 2025-04-04T17:26:33.784307Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=109; 2025-04-04T17:26:33.784423Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=81; 2025-04-04T17:26:33.784591Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=132; 2025-04-04T17:26:33.784623Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=37150; 2025-04-04T17:26:33.788914Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-04T17:26:33.788997Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=12; 2025-04-04T17:26:33.791497Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2426; 2025-04-04T17:26:33.814550Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=22950; 2025-04-04T17:26:33.814707Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=44; 2025-04-04T17:26:33.814794Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=35; 2025-04-04T17:26:33.814848Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-04-04T17:26:33.814899Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-04-04T17:26:33.814949Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=10; 2025-04-04T17:26:33.815042Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=49; 2025-04-04T17:26:33.815099Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-04-04T17:26:33.815208Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=69; 2025-04-04T17:26:33.815259Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-04-04T17:26:33.815352Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=53; 2025-04-04T17:26:33.815473Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=76; 2025-04-04T17:26:33.815575Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=61; 2025-04-04T17:26:33.815621Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=26573; 2025-04-04T17:26:33.815842Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=113965260;raw_bytes=176366876;count=47;records=1845000} inactive {blob_bytes=169434716;raw_bytes=262645956;count=79;records=2743332} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T17:26:33.817139Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:14857:16818];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T17:26:33.817245Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:14857:16818];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T17:26:33.817340Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14857:16818];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T17:26:33.817407Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14857:16818];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=0; 2025-04-04T17:26:33.817682Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:26:33.817776Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:26:33.818028Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=20; 2025-04-04T17:26:33.818118Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-04T17:26:33.818165Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:26:33.818210Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:26:33.818262Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:26:33.818358Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:26:33.822642Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:26:33.825984Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14857:16818];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T17:26:33.827616Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14857:16818];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T17:26:33.827667Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T17:26:33.827704Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T17:26:33.827765Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14857:16818];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:26:33.827856Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14857:16818];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:26:33.827934Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14857:16818];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=20; 2025-04-04T17:26:33.828026Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14857:16818];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-04T17:26:33.828096Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14857:16818];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:26:33.828163Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14857:16818];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:26:33.828231Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14857:16818];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:26:33.828336Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14857:16818];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=1.000000s; 2025-04-04T17:26:33.828406Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:14857:16818];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; >> TSchemeShardTest::CreateIndexedTableAfterBackup [GOOD] >> TSchemeShardTest::CreatePersQueueGroup >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::AlterBlockStoreVolume >> TSchemeShardTest::BlockStoreVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreNonreplVolumeLimits >> TSchemeShardTest::ReadOnlyMode [GOOD] >> TSchemeShardTest::PathErrors >> TSchemeShardTest::ConsistentCopyTablesForBackup [GOOD] >> TSchemeShardTest::CopyLockedTableForBackup >> TSchemeShardTest::BlockStoreNonreplVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreSystemVolumeLimits >> TSchemeShardTest::AlterBlockStoreVolume [GOOD] >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions >> TSchemeShardTest::PathErrors [GOOD] >> TSchemeShardTest::ManyDirs >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::AdoptDropSolomon >> TSchemeShardTest::CopyLockedTableForBackup [GOOD] >> TSchemeShardTest::ConfigColumnFamily >> TSchemeShardTest::BlockStoreSystemVolumeLimits [GOOD] >> TSchemeShardTest::AlterTableWithCompactionStrategies >> TSchemeShardTest::CreatePersQueueGroup [GOOD] >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema >> TSchemeShardTest::AdoptDropSolomon [GOOD] >> TSchemeShardTest::AlterTableAndAfterSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] Test command err: Trying to start YDB, gRPC: 15082, MsgBus: 20067 2025-04-04T17:25:25.407586Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510393193798325:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:25.407689Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001977/r3tmp/tmp5rADvs/pdisk_1.dat 2025-04-04T17:25:25.729367Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15082, node 1 2025-04-04T17:25:25.806211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:25.806353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:25.814376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:25.865204Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:25.865229Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:25.865238Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:25.865387Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20067 TClient is connected to server localhost:20067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:26.444783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:26.464698Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:26.471837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:26.620900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:26.806563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:26.889754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:28.523388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510406078701961:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:28.523521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:28.813648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:28.846914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:28.875925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:28.906385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:28.939503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:29.021532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:29.101173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510410373669776:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:29.101261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:29.101475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510410373669781:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:29.104803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:29.115096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510410373669783:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:29.201380Z node 1 :TX_PROXY ERROR: Actor# [1:7489510410373669837:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:30.410378Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510393193798325:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:30.420585Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:30.472617Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787530492, txId: 281474976710672] shutting down 2025-04-04T17:25:30.801428Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787530835, txId: 281474976710675] shutting down 2025-04-04T17:25:31.098133Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787531129, txId: 281474976710678] shutting down 2025-04-04T17:25:31.390650Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787531423, txId: 281474976710681] shutting down 2025-04-04T17:25:31.673100Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787531703, txId: 281474976710684] shutting down 2025-04-04T17:25:31.976852Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787532004, txId: 281474976710687] shutting down 2025-04-04T17:25:32.334965Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787532368, txId: 281474976710690] shutting down 2025-04-04T17:25:32.650523Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787532683, txId: 281474976710693] shutting down 2025-04-04T17:25:32.927343Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787532956, txId: 281474976710696] shutting down 2025-04-04T17:25:33.253828Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787533292, txId: 281474976710699] shutting down 2025-04-04T17:25:33.517606Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787533551, txId: 281474976710702] shutting down 2025-04-04T17:25:33.821721Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787533852, txId: 281474976710705] shutting down 2025-04-04T17:25:34.120275Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787534153, txId: 281474976710708] shutting down 2025-04-04T17:25:34.440523Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787534461, txId: 281474976710711] shutting down 2025-04-04T17:25:34.727486Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787534762, txId: 281474976710714] shutting down 2025-04-04T17:25:35.014829Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787535042, txId: 281474976710717] shutting down 2025-04-04T17:25:35.416712Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787535392, txId: 281474976710720] shutting down 2025-04-04T17:25:35.715370Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787535749, txId: 281474976710723] shutting down 2025-04-04T17:25:35.974741Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787536008, txId: 281474976710726] shutting down 2025-04-04T17:25:36.246466Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787536274, txId: 281474976710729] shutting down 2025-04-04T17:25:36.606756Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787536631, txId: 281474976710732] shutting down 2025-04-04T17:25:36.889638Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 174 ... 62566Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787585498, txId: 281474976711200] shutting down 2025-04-04T17:26:25.717890Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787585750, txId: 281474976711203] shutting down 2025-04-04T17:26:25.992770Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787586023, txId: 281474976711206] shutting down 2025-04-04T17:26:26.258542Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787586289, txId: 281474976711209] shutting down 2025-04-04T17:26:26.525009Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787586555, txId: 281474976711212] shutting down 2025-04-04T17:26:26.862256Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787586891, txId: 281474976711215] shutting down 2025-04-04T17:26:27.164361Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787587192, txId: 281474976711218] shutting down 2025-04-04T17:26:27.479672Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787587514, txId: 281474976711221] shutting down 2025-04-04T17:26:27.737451Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787587766, txId: 281474976711224] shutting down 2025-04-04T17:26:27.992116Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787588025, txId: 281474976711227] shutting down 2025-04-04T17:26:28.294912Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787588326, txId: 281474976711230] shutting down 2025-04-04T17:26:28.582316Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787588613, txId: 281474976711233] shutting down 2025-04-04T17:26:28.861064Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787588893, txId: 281474976711236] shutting down 2025-04-04T17:26:29.176011Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787589208, txId: 281474976711239] shutting down 2025-04-04T17:26:29.498849Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787589530, txId: 281474976711242] shutting down 2025-04-04T17:26:29.814014Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787589845, txId: 281474976711245] shutting down 2025-04-04T17:26:30.138307Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787590174, txId: 281474976711248] shutting down 2025-04-04T17:26:30.461799Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787590447, txId: 281474976711251] shutting down 2025-04-04T17:26:30.756931Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787590783, txId: 281474976711254] shutting down 2025-04-04T17:26:31.071784Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787591105, txId: 281474976711257] shutting down 2025-04-04T17:26:31.350268Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787591385, txId: 281474976711260] shutting down 2025-04-04T17:26:31.621328Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787591658, txId: 281474976711263] shutting down 2025-04-04T17:26:31.919345Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787591952, txId: 281474976711266] shutting down 2025-04-04T17:26:32.208407Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787592239, txId: 281474976711269] shutting down Trying to start YDB, gRPC: 18172, MsgBus: 12972 2025-04-04T17:26:32.912034Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510681959748133:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:26:32.912093Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001977/r3tmp/tmp2Fri19/pdisk_1.dat 2025-04-04T17:26:32.988484Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18172, node 2 2025-04-04T17:26:33.033951Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:33.034068Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:33.035681Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:33.048845Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:33.048864Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:33.048869Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:33.048985Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12972 TClient is connected to server localhost:12972 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:26:33.402395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:33.419023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:33.466946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:33.594343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:33.664913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:35.801429Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510694844651793:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:35.801517Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:35.815988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:26:35.839978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:26:35.864543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:26:35.888835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:26:35.913741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:26:35.981606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:26:36.052064Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510699139619607:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:36.052129Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:36.052221Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510699139619612:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:36.054600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:26:36.060942Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510699139619614:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:26:36.131409Z node 2 :TX_PROXY ERROR: Actor# [2:7489510699139619667:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:26:37.691028Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787597720, txId: 281474976715671] shutting down >> TSchemeShardTest::AlterTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false >> TSchemeShardTest::ConfigColumnFamily [GOOD] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::CreateTableWithCompactionStrategies >> TSchemeShardTest::CopyTableForBackup [GOOD] >> TSchemeShardTest::CreateFinishedInDescription >> TSchemeShardTest::AlterTableAndAfterSplit [GOOD] >> TSchemeShardTest::AlterIndexTableDirectly >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose >> DataShardReadTableSnapshots::ReadTableSnapshot >> TSchemeShardTest::CreateTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::CreateSystemColumn >> TSchemeShardTest::CreateFinishedInDescription [GOOD] >> TSchemeShardTest::CreateBlockStoreVolume >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] >> TSchemeShardTest::CreateBlockStoreVolume [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles >> TSchemeShardTest::CreateSystemColumn [GOOD] >> Cdc::SupportedTypes [GOOD] >> Cdc::SplitTopicPartition_TopicAutoPartitioning >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] Test command err: canonic: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "4@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "1@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 7 } result: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "4@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "1@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 7 } canonic: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "4@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "1@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 7 } result: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "4@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "1@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 7 } canonic: ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } result: ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } canonic: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } result: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } canonic: ACE { AccessType: 0 AccessRight: 59391 SID: "22@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "44@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "55@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "11@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "33@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "66@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 7 } result: ACE { AccessType: 0 AccessRight: 59391 SID: "22@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "44@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "55@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "11@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "33@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "66@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 7 } canonic: ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "33@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "66@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 3 Inherited: true } result: ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "33@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "66@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 3 Inherited: true } canonic: ACE { AccessType: 0 AccessRight: 59391 SID: "22@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 521 SID: "55@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 3 Inherited: true } result: ACE { AccessType: 0 AccessRight: 59391 SID: "22@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 521 SID: "55@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 3 Inherited: true } canonic: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 59391 SID: "22@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "44@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "55@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "11@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "33@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "66@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 7 } result: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 59391 SID: "22@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "44@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "55@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "11@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "33@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "66@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 7 } canonic: ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "33@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "66@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "77@sta ... athType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Copy3" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:26:41.880412Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:26:41.880761Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 345us result status StatusSuccess 2025-04-04T17:26:41.881220Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table1" PathDescription { Self { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:26:41.881944Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:26:41.882225Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy1" took 287us result status StatusSuccess 2025-04-04T17:26:41.882842Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy1" PathDescription { Self { Name: "Copy1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000007 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Copy1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Sync" LocalPathId: 6 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:26:41.883744Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:26:41.883974Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy2" took 231us result status StatusSuccess 2025-04-04T17:26:41.884419Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy2" PathDescription { Self { Name: "Copy2" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Copy2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:26:41.885409Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:26:41.885734Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy3" took 341us result status StatusSuccess 2025-04-04T17:26:41.886107Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy3" PathDescription { Self { Name: "Copy3" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Copy3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 9 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> Cdc::NaN[TopicRunner] [GOOD] >> Cdc::RacyRebootAndSplitWithTxInflight ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateSystemColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:26:24.383903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:26:24.383978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:26:24.384009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:26:24.384036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:26:24.384083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:26:24.384104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:26:24.384153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:26:24.384231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:26:24.384479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:26:24.439047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:26:24.439094Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:24.446965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:26:24.447161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:26:24.447321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:26:24.453848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:26:24.454094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:26:24.454555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:24.455220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:26:24.459151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:24.460167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:24.460209Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:24.460372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:26:24.460410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:26:24.460438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:26:24.460521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:26:24.465819Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:26:24.550656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:26:24.550852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:24.551028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:26:24.551200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:26:24.551240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:24.553208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:24.553342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:26:24.553481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:24.553521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:26:24.553548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:26:24.553572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:26:24.554922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:24.554962Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:26:24.554987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:26:24.556159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:24.556195Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:24.556238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:24.556276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:26:24.558818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:26:24.560206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:26:24.560345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:26:24.561291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:24.561425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:26:24.561475Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:24.561725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:26:24.561773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:24.561928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:26:24.562004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:26:24.563249Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:24.563309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:26:24.563441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:24.563468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:26:24.563710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:24.563753Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:26:24.563825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:26:24.563849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:24.563882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:26:24.563911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:24.563936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:26:24.563969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:24.563997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:26:24.564020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:26:24.564060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:26:24.564098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:26:24.564124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:26:24.565393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:26:24.565466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:26:24.565492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... eadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:26:42.283101Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:26:42.283166Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:26:42.283231Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T17:26:42.283352Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [15:340:2319] message: TxId: 102 2025-04-04T17:26:42.283449Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:26:42.283522Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:26:42.283584Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:26:42.283770Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:26:42.285787Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:26:42.285877Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:341:2320] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-04-04T17:26:42.289543Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "SystemColumnInCopyAllowed" CopyFromTable: "/MyRoot/SystemColumnAllowed" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:26:42.289950Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /MyRoot/SystemColumnInCopyAllowed, opId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:26:42.290659Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: SystemColumnInCopyAllowed, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-04-04T17:26:42.290766Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-04-04T17:26:42.290830Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:26:42.290915Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-04-04T17:26:42.291057Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-04-04T17:26:42.291309Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:26:42.292011Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:26:42.292100Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-04-04T17:26:42.295412Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 3, at schemeshard: 72057594046678944 2025-04-04T17:26:42.295660Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/SystemColumnInCopyAllowed 2025-04-04T17:26:42.295983Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:42.296068Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:26:42.296353Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:26:42.296484Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:42.296556Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:205:2207], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-04-04T17:26:42.296623Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:205:2207], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-04-04T17:26:42.297199Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:26:42.297298Z node 15 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046678944 2025-04-04T17:26:42.297673Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-04-04T17:26:42.298826Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:26:42.298989Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:26:42.299056Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:26:42.299135Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-04-04T17:26:42.299208Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-04-04T17:26:42.300109Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:26:42.300196Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:26:42.300227Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:26:42.300260Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 1 2025-04-04T17:26:42.300295Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:26:42.300381Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-04-04T17:26:42.303828Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:2 msg type: 268697601 2025-04-04T17:26:42.304080Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72057594037968897 2025-04-04T17:26:42.304169Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2025-04-04T17:26:42.304958Z node 15 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-04-04T17:26:42.305321Z node 15 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 2, type DataShard, boot OK, tablet id 72075186233409547 2025-04-04T17:26:42.305551Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-04-04T17:26:42.305619Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2025-04-04T17:26:42.305797Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-04-04T17:26:42.305881Z node 15 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-04-04T17:26:42.306013Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-04-04T17:26:42.306146Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 2 -> 3 2025-04-04T17:26:42.307400Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:26:42.309456Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:26:42.310998Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:26:42.311430Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-04-04T17:26:42.311500Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 103:0 ProgressState at tablet# 72057594046678944 2025-04-04T17:26:42.311582Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 103:0 Propose modify scheme on dstDatashard# 72075186233409547 idx# 72057594046678944:2 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 103:0 seqNo# 2:2 at tablet# 72057594046678944 2025-04-04T17:26:42.316234Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-04-04T17:26:42.316454Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2025-04-04T17:26:42.316549Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409547 2025-04-04T17:26:42.316585Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409546 TestModificationResult got TxId: 103, wait until txId: 103 >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds [GOOD] >> TSchemeShardTest::CreateDropKesus >> DataShardReadTableSnapshots::ReadTableDropColumn >> DataShardReadTableSnapshots::ReadTableSplitBefore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:26:29.050452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:26:29.050521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:26:29.050546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:26:29.050579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:26:29.050621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:26:29.050645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:26:29.050683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:26:29.050749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:26:29.050974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:26:29.101368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:26:29.101407Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:29.108523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:26:29.108679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:26:29.108841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:26:29.114842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:26:29.115072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:26:29.115502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:29.116110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:26:29.119855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:29.120809Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:29.120867Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:29.121022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:26:29.121054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:26:29.121081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:26:29.121168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:26:29.125726Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:26:29.214665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:26:29.214820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:29.214964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:26:29.215137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:26:29.215189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:29.216798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:29.216953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:26:29.217088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:29.217125Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:26:29.217147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:26:29.217171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:26:29.218593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:29.218632Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:26:29.218654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:26:29.219826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:29.219859Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:29.219885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:29.219916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:26:29.222264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:26:29.223562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:26:29.223713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:26:29.224424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:29.224502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:26:29.224545Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:29.224727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:26:29.224765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:29.224876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:26:29.224953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:26:29.226205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:29.226261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:26:29.226381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:29.226407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:26:29.226636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:29.226667Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:26:29.226740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:26:29.226771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:29.226800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:26:29.226824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:29.226846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:26:29.226872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:29.226892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:26:29.226912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:26:29.226949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:26:29.226981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:26:29.227001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:26:29.228114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:26:29.228202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:26:29.228228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 31 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:26:44.014966Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:26:44.015240Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA" took 289us result status StatusSuccess 2025-04-04T17:26:44.015664Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA" PathDescription { Self { Name: "DirA" PathId: 29 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 28 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 } ChildrenExist: true } Children { Name: "DirB" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "Table2" PathId: 32 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 29 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:26:44.016766Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:26:44.017090Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/Table2" took 336us result status StatusSuccess 2025-04-04T17:26:44.017510Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/Table2" PathDescription { Self { Name: "Table2" PathId: 32 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 32 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:26:44.018799Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:26:44.019025Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB" took 245us result status StatusSuccess 2025-04-04T17:26:44.019433Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB" PathDescription { Self { Name: "DirB" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "Table3" PathId: 33 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 30 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 30 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:26:44.020558Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:26:44.020874Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB/Table3" took 338us result status StatusSuccess 2025-04-04T17:26:44.021296Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB/Table3" PathDescription { Self { Name: "Table3" PathId: 33 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 30 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 33 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::CreateDropKesus [GOOD] >> TSchemeShardTest::CreateAlterKesus >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] >> TSchemeShardTest::CreateAlterKesus [GOOD] >> TSchemeShardTest::CreateDropSolomon >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose [GOOD] >> DataShardReadTableSnapshots::ReadTableMaxRows >> DataShardReadTableSnapshots::ReadTableSnapshot [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitAfter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:107:2057] recipient: [1:100:2134] 2025-04-04T17:24:44.037532Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:24:44.037616Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] Leader for TabletID 72057594037927938 is [1:152:2173] sender: [1:153:2057] recipient: [1:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:178:2057] recipient: [1:14:2061] 2025-04-04T17:24:44.055436Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:177:2192], now have 1 active actors on pipe 2025-04-04T17:24:44.055573Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T17:24:44.072512Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:24:44.075624Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2025-04-04T17:24:44.075794Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:24:44.077935Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2025-04-04T17:24:44.078053Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:24:44.078124Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:24:44.078158Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:2:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:24:44.078189Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:3:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:24:44.078763Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:24:44.079117Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2025-04-04T17:24:44.081544Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-04-04T17:24:44.081616Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2025-04-04T17:24:44.081686Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:24:44.083931Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T17:24:44.084038Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-04-04T17:24:44.084087Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-04-04T17:24:44.084142Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit request with generation 1 2025-04-04T17:24:44.084189Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit with generation 1 done 2025-04-04T17:24:44.084442Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04T17:24:44.084505Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04T17:24:44.084650Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:24:44.084938Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:24:44.085171Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2025-04-04T17:24:44.087001Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:1:Initializer] Initializing completed. 2025-04-04T17:24:44.087057Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] 2025-04-04T17:24:44.087100Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:24:44.088834Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-04T17:24:44.088917Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2025-04-04T17:24:44.088997Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2025-04-04T17:24:44.089034Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test reinit request with generation 1 2025-04-04T17:24:44.089071Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test reinit with generation 1 done 2025-04-04T17:24:44.089244Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04T17:24:44.089284Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:24:44.089467Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:24:44.089847Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:24:44.090110Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [1:186:2199] 2025-04-04T17:24:44.091929Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:2:Initializer] Initializing completed. 2025-04-04T17:24:44.091981Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [1:186:2199] 2025-04-04T17:24:44.092030Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:24:44.093709Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-04-04T17:24:44.093789Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user user reinit request with generation 1 2025-04-04T17:24:44.093828Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user user reinit with generation 1 done 2025-04-04T17:24:44.093878Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user test reinit request with generation 1 2025-04-04T17:24:44.093909Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user test reinit with generation 1 done 2025-04-04T17:24:44.094082Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 2, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 2 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04 ... PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:26:44.476323Z node 77 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [78:102:2057] recipient: [78:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [78:102:2057] recipient: [78:100:2134] Leader for TabletID 72057594037927937 is [78:106:2138] sender: [78:107:2057] recipient: [78:100:2134] 2025-04-04T17:26:44.928882Z node 78 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:26:44.928973Z node 78 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [78:148:2057] recipient: [78:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [78:148:2057] recipient: [78:146:2169] Leader for TabletID 72057594037927938 is [78:152:2173] sender: [78:153:2057] recipient: [78:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [78:106:2138] sender: [78:178:2057] recipient: [78:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:26:44.949102Z node 78 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:26:44.950177Z node 78 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 76 actor [78:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 76 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } ReadRuleGenerations: 76 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 76 Important: true } 2025-04-04T17:26:44.951091Z node 78 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [78:184:2197] 2025-04-04T17:26:44.953098Z node 78 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [78:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:26:44.954566Z node 78 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [78:185:2198] 2025-04-04T17:26:44.956112Z node 78 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [78:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:26:44.957502Z node 78 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [78:186:2199] 2025-04-04T17:26:44.958991Z node 78 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [78:186:2199] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:26:44.983689Z node 78 :PERSQUEUE INFO: new Cookie default|f0506005-df448634-2ce3404b-6bce0bdf_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:26:45.056715Z node 78 :PERSQUEUE INFO: new Cookie default|902b584f-16bd0b62-5f191d19-9ab4b2fa_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:26:45.133904Z node 78 :PERSQUEUE INFO: new Cookie default|5fbae73c-1dca20b-55c32d45-53ff3e6b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:26:45.242415Z node 78 :PERSQUEUE INFO: new Cookie default|fb8e0ffa-5bf088a-d5e7fe22-50ab31fe_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:26:45.289888Z node 78 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-04-04T17:26:45.295723Z node 78 :PERSQUEUE INFO: new Cookie default|9d58dec0-1414b8e4-f1d9d9e2-7e5348bd_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:26:45.343690Z node 78 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [79:102:2057] recipient: [79:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [79:102:2057] recipient: [79:100:2134] Leader for TabletID 72057594037927937 is [79:106:2138] sender: [79:107:2057] recipient: [79:100:2134] 2025-04-04T17:26:45.783851Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:26:45.783933Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [79:148:2057] recipient: [79:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [79:148:2057] recipient: [79:146:2169] Leader for TabletID 72057594037927938 is [79:152:2173] sender: [79:153:2057] recipient: [79:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [79:106:2138] sender: [79:178:2057] recipient: [79:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:26:45.808223Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:26:45.809417Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 77 actor [79:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 77 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } ReadRuleGenerations: 77 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 77 Important: true } 2025-04-04T17:26:45.810581Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [79:184:2197] 2025-04-04T17:26:45.813654Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [79:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:26:45.815818Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [79:185:2198] 2025-04-04T17:26:45.818006Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [79:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:26:45.819933Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [79:186:2199] 2025-04-04T17:26:45.822101Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [79:186:2199] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:26:45.851709Z node 79 :PERSQUEUE INFO: new Cookie default|31c5d6e0-55f5d95d-8d1fc140-d2c8a38d_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:26:45.938495Z node 79 :PERSQUEUE INFO: new Cookie default|683b0efc-5a423231-ea1fd526-ed9bce1f_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:26:46.030008Z node 79 :PERSQUEUE INFO: new Cookie default|ed945eef-18a63da4-bea8fdab-af8bfad1_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:26:46.154079Z node 79 :PERSQUEUE INFO: new Cookie default|90bd8fba-2738bb0a-3e9c7aa-64009d86_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:26:46.196164Z node 79 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-04-04T17:26:46.203733Z node 79 :PERSQUEUE INFO: new Cookie default|9fb6c414-744f157f-d7688007-7ecac7c_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:26:46.255655Z node 79 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder >> TSchemeShardTest::CreateDropSolomon [GOOD] >> TSchemeShardTest::CreateAlterDropSolomon >> TTabletPipeTest::TestSendAfterReboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] Test command err: 2025-04-04T17:19:09.852095Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:09.942381Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:09.967781Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:09.968045Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:09.975882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:09.976122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:09.976436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:09.976593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:09.976724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:09.976831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:09.976929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:09.977044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:09.977179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:09.977319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:09.977423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:09.977519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:10.000152Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:10.000403Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:10.000470Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:10.000656Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:10.000848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:10.000937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:10.000996Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:10.001094Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:10.001157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:10.001201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:10.001256Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:10.001503Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:10.001589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:10.001643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:10.001691Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:10.001776Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:10.001854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:10.001956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:10.001999Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:10.002073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:10.002106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:10.002131Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:10.002183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:10.002265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:10.002317Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:10.002793Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=54; 2025-04-04T17:19:10.002903Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=47; 2025-04-04T17:19:10.003005Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-04-04T17:19:10.003105Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-04-04T17:19:10.003311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:10.003384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:10.003419Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:10.003580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:10.003624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:10.003647Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:10.003791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:10.003836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:10.003872Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:10.004117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:10.004178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:10.004210Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:10.004331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:10.004379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:10.004476Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=307731; 2025-04-04T17:26:44.386701Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:15757:17708];tablet_id=9437184;parent=[1:15718:17676];fline=manager.cpp:82;event=ask_data;request=request_id=372;1={portions_count=135};; 2025-04-04T17:26:44.388515Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15718:17676];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=135;path_id=1; 2025-04-04T17:26:44.397675Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15718:17676];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; 2025-04-04T17:26:45.020215Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-04T17:26:45.020300Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=12; 2025-04-04T17:26:45.028299Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=7913; 2025-04-04T17:26:45.037046Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=7399; 2025-04-04T17:26:45.037142Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=8757; 2025-04-04T17:26:45.037298Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=96; 2025-04-04T17:26:45.037398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=58; 2025-04-04T17:26:45.037554Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=115; 2025-04-04T17:26:45.037697Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=95; 2025-04-04T17:26:45.037879Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=143; 2025-04-04T17:26:45.037911Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=17571; 2025-04-04T17:26:45.043554Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-04T17:26:45.043632Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=11; 2025-04-04T17:26:45.050943Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=7227; 2025-04-04T17:26:45.078890Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=27848; 2025-04-04T17:26:45.079009Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=36; 2025-04-04T17:26:45.079072Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=22; 2025-04-04T17:26:45.079114Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2025-04-04T17:26:45.079159Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-04-04T17:26:45.079203Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-04-04T17:26:45.079279Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=36; 2025-04-04T17:26:45.079334Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2025-04-04T17:26:45.079414Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=45; 2025-04-04T17:26:45.079453Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-04-04T17:26:45.079524Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=37; 2025-04-04T17:26:45.079607Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=50; 2025-04-04T17:26:45.079684Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=45; 2025-04-04T17:26:45.079716Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=36041; 2025-04-04T17:26:45.079873Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2425692;raw_bytes=4011492;count=1;records=39328} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=111593408;raw_bytes=187115688;count=44;records=1805672} inactive {blob_bytes=178990352;raw_bytes=300417312;count=90;records=2897034} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T17:26:45.080447Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15718:17676];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T17:26:45.080502Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15718:17676];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T17:26:45.080571Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15718:17676];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T17:26:45.080621Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15718:17676];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=0; 2025-04-04T17:26:45.080774Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:26:45.080838Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:26:45.081008Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=22; 2025-04-04T17:26:45.081077Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-04T17:26:45.081119Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:26:45.081160Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:26:45.081195Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:26:45.081286Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T17:26:45.083756Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:26:45.088091Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15718:17676];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T17:26:45.090219Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15718:17676];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T17:26:45.090286Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T17:26:45.090311Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T17:26:45.090355Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15718:17676];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T17:26:45.090419Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15718:17676];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T17:26:45.090476Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15718:17676];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=22; 2025-04-04T17:26:45.090549Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15718:17676];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-04T17:26:45.090607Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15718:17676];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T17:26:45.090677Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15718:17676];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T17:26:45.090726Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15718:17676];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T17:26:45.090808Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15718:17676];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=1.000000s; 2025-04-04T17:26:45.090858Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15718:17676];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] >> TSchemeShardCheckProposeSize::CopyTables [GOOD] >> TSchemeShardDecimalTypesInTables::Parameterless >> TTabletPipeTest::TestSendAfterReboot [GOOD] >> DataShardReadTableSnapshots::ReadTableDropColumn [GOOD] >> DataShardReadTableSnapshots::CorruptedDyNumber >> DataShardReadTableSnapshots::ReadTableSplitBefore [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitFinished ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterReboot [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:107:2057] recipient: [1:103:2136] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:107:2057] recipient: [1:103:2136] Leader for TabletID 9437185 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] Leader for TabletID 9437184 is [1:115:2144] sender: [1:116:2057] recipient: [1:103:2136] Leader for TabletID 9437185 is [1:117:2145] sender: [1:118:2057] recipient: [1:104:2137] Leader for TabletID 9437184 is [1:115:2144] sender: [1:155:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:117:2145] sender: [1:157:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:117:2145] sender: [1:160:2057] recipient: [1:100:2135] Leader for TabletID 9437185 is [1:117:2145] sender: [1:162:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:117:2145] sender: [1:164:2057] recipient: [1:163:2175] Leader for TabletID 9437185 is [1:165:2176] sender: [1:166:2057] recipient: [1:163:2175] Leader for TabletID 9437185 is [1:165:2176] sender: [1:194:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:115:2144] sender: [1:197:2057] recipient: [1:99:2134] Leader for TabletID 9437184 is [1:115:2144] sender: [1:200:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:115:2144] sender: [1:201:2057] recipient: [1:199:2199] Leader for TabletID 9437184 is [1:202:2200] sender: [1:203:2057] recipient: [1:199:2199] Leader for TabletID 9437184 is [1:202:2200] sender: [1:231:2057] recipient: [1:14:2061] >> TTabletPipeTest::TestShutdown >> TSchemeShardDecimalTypesInTables::Parameterless [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:26:23.598648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:26:23.598728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:26:23.598758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:26:23.598787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:26:23.598832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:26:23.598854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:26:23.598893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:26:23.598962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:26:23.599218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:26:23.653854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:26:23.653899Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:23.661920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:26:23.662126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:26:23.662328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:26:23.669335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:26:23.669573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:26:23.670043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:23.670752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:26:23.674534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:23.675492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:23.675538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:23.675671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:26:23.675709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:26:23.675739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:26:23.675831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:26:23.680543Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:26:23.765916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:26:23.766089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:23.766231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:26:23.766419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:26:23.766496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:23.768206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:23.768347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:26:23.768545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:23.768595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:26:23.768627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:26:23.768650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:26:23.770095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:23.770147Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:26:23.770176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:26:23.771484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:23.771523Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:23.771559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:23.771608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:26:23.774146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:26:23.775499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:26:23.775628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:26:23.776343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:23.776443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:26:23.776482Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:23.776717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:26:23.776761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:23.776877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:26:23.776944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:26:23.778403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:23.778448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:26:23.778596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:23.778625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:26:23.778852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:23.778893Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:26:23.778976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:26:23.779000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:23.779034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:26:23.779064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:23.779093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:26:23.779119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:23.779149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:26:23.779173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:26:23.779226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:26:23.779264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:26:23.779308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:26:23.780591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:26:23.780664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:26:23.780692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ], 18446744073709551615 2025-04-04T17:26:48.726698Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:26:48.726760Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:26:48.726795Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:26:48.726856Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-04-04T17:26:48.726904Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-04-04T17:26:48.727313Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:26:48.727376Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-04-04T17:26:48.727395Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-04-04T17:26:48.727418Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-04-04T17:26:48.727439Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:26:48.727486Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-04-04T17:26:48.729091Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:26:48.729155Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:26:48.729177Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:26:48.729195Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-04-04T17:26:48.730541Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 2025-04-04T17:26:48.731794Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-04-04T17:26:48.732028Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-04-04T17:26:48.732442Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-04-04T17:26:48.733709Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:48.733979Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:26:48.734510Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-04-04T17:26:48.734729Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-04-04T17:26:48.734898Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-04-04T17:26:48.735661Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409549 2025-04-04T17:26:48.736231Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-04-04T17:26:48.736354Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409547 2025-04-04T17:26:48.736773Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-04-04T17:26:48.736851Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:26:48.736962Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:26:48.737344Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:26:48.737518Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-04-04T17:26:48.739021Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-04-04T17:26:48.739095Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-04-04T17:26:48.741169Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-04-04T17:26:48.741222Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-04-04T17:26:48.741306Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-04-04T17:26:48.741334Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-04-04T17:26:48.741391Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-04-04T17:26:48.741446Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-04-04T17:26:48.741686Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-04-04T17:26:48.742023Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-04-04T17:26:48.742094Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-04-04T17:26:48.742640Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-04-04T17:26:48.742742Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-04-04T17:26:48.742794Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [15:535:2489] TestWaitNotification: OK eventTxId 103 2025-04-04T17:26:48.743450Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:26:48.743687Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Solomon" took 292us result status StatusPathDoesNotExist 2025-04-04T17:26:48.743878Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2025-04-04T17:26:48.744348Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-04-04T17:26:48.744454Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-04-04T17:26:48.744512Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2025-04-04T17:26:48.744588Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 2025-04-04T17:26:48.745164Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:26:48.745398Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 269us result status StatusSuccess 2025-04-04T17:26:48.745831Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> Cdc::SplitTopicPartition_TopicAutoPartitioning [GOOD] >> Cdc::ShouldDeliverChangesOnSplitMerge >> TTabletPipeTest::TestShutdown [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestShutdown [GOOD] >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] >> TTabletPipeTest::TestKillClientBeforServerIdKnown >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false >> TTabletCountersPercentile::WithoutZero [GOOD] >> TTabletCountersPercentile::StartFromZero [GOOD] >> Cdc::RacyRebootAndSplitWithTxInflight [GOOD] >> Cdc::RacyActivateAndEnqueue >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] Test command err: 2025-04-04T17:26:43.719783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:43.720352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:43.720409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ce1/r3tmp/tmp1r1EP9/pdisk_1.dat 2025-04-04T17:26:44.184200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:44.243041Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:44.286512Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:26:44.287481Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:26:44.289506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:44.290184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:44.303147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:44.391526Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-04T17:26:44.391618Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T17:26:44.393592Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-04-04T17:26:44.506578Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T17:26:44.506706Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:26:44.508114Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:26:44.508254Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:26:44.508605Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:26:44.508869Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:26:44.508973Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T17:26:44.509284Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T17:26:44.512827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:44.513925Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T17:26:44.513997Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2517] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-04T17:26:44.555733Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:26:44.556985Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:26:44.557499Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:26:44.557750Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:26:44.571723Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:26:44.598209Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:26:44.598390Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:26:44.599995Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:26:44.600082Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:26:44.600137Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:26:44.602078Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:26:44.602261Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:26:44.602344Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:26:44.613164Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:26:44.647445Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:26:44.648939Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:26:44.649107Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:26:44.649151Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:26:44.649187Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:26:44.649240Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:26:44.649445Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:44.649498Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:44.650516Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:26:44.650637Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:26:44.650755Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:26:44.650827Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:26:44.650887Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:26:44.650927Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:26:44.650953Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:26:44.650988Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:26:44.651079Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:26:44.651211Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:26:44.651260Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:26:44.651346Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:26:44.652856Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:26:44.652906Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:26:44.653034Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:26:44.653290Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:26:44.653342Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:26:44.653426Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:26:44.653530Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:26:44.653572Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:26:44.653614Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:26:44.653643Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:26:44.653961Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:26:44.654017Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:26:44.654069Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:26:44.654102Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:26:44.654147Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:26:44.654179Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:26:44.654275Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:26:44.654310Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:26:44.654337Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:26:44.656061Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:26:44.656154Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:26:44.667084Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 1 active planned 0 immediate 1 planned 0 2025-04-04T17:26:50.185148Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715663] at 72075186224037890 for WaitForStreamClearance 2025-04-04T17:26:50.185165Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit WaitForStreamClearance 2025-04-04T17:26:50.185195Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715663] at 72075186224037890 2025-04-04T17:26:50.185225Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-04-04T17:26:50.185244Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit WaitForStreamClearance 2025-04-04T17:26:50.185262Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit ReadTableScan 2025-04-04T17:26:50.185290Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit ReadTableScan 2025-04-04T17:26:50.185454Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Continue 2025-04-04T17:26:50.185485Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:26:50.185509Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2025-04-04T17:26:50.185544Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-04T17:26:50.185575Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-04T17:26:50.185618Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T17:26:50.185964Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:999:2803], Recipient [2:884:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-04T17:26:50.185991Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-04T17:26:50.186042Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:999:2803], Recipient [2:969:2775]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2025-04-04T17:26:50.186059Z node 2 :TX_PROXY DEBUG: [ReadTable [2:969:2775] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-04T17:26:50.186085Z node 2 :TX_PROXY DEBUG: [ReadTable [2:969:2775] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 2 rows at [2:999:2803] 2025-04-04T17:26:50.186165Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2025-04-04T17:26:50.186425Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T17:26:50.186532Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:999:2803], Recipient [2:969:2775]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2025-04-04T17:26:50.186556Z node 2 :TX_PROXY DEBUG: [ReadTable [2:969:2775] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2025-04-04T17:26:50.186572Z node 2 :TX_PROXY TRACE: [ReadTable [2:969:2775] TxId# 281474976715662] Sending TEvStreamDataAck to [2:999:2803] ShardId# 72075186224037890 2025-04-04T17:26:50.186657Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2025-04-04T17:26:50.186751Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:999:2803], Recipient [2:969:2775]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2025-04-04T17:26:50.186767Z node 2 :TX_PROXY DEBUG: [ReadTable [2:969:2775] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-04T17:26:50.186961Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:968:2775], Recipient [2:969:2775]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715662 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-04T17:26:50.186983Z node 2 :TX_PROXY DEBUG: [ReadTable [2:969:2775] TxId# 281474976715662] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-04T17:26:50.186999Z node 2 :TX_PROXY DEBUG: [ReadTable [2:969:2775] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 1 rows at [2:999:2803] 2025-04-04T17:26:50.187032Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2025-04-04T17:26:50.187071Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T17:26:50.187141Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:999:2803], Recipient [2:969:2775]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\005\000\000\000b\005\0357\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\005\000\000\000" 2025-04-04T17:26:50.187155Z node 2 :TX_PROXY DEBUG: [ReadTable [2:969:2775] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2025-04-04T17:26:50.187169Z node 2 :TX_PROXY TRACE: [ReadTable [2:969:2775] TxId# 281474976715662] Sending TEvStreamDataAck to [2:999:2803] ShardId# 72075186224037890 2025-04-04T17:26:50.187209Z node 2 :TX_PROXY INFO: [ReadTable [2:969:2775] TxId# 281474976715662] RESPONSE Status# ExecComplete prepare time: 0.015707s execute time: 0.162066s total time: 0.177773s 2025-04-04T17:26:50.187311Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2025-04-04T17:26:50.187350Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 0 2025-04-04T17:26:50.187605Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:969:2775], Recipient [2:879:2709]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 2025-04-04T17:26:50.187759Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-04T17:26:50.187780Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715663, at: 72075186224037890 2025-04-04T17:26:50.187889Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:884:2711], Recipient [2:884:2711]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:50.187906Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:50.187930Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-04T17:26:50.187951Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:26:50.187972Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715663] at 72075186224037890 for ReadTableScan 2025-04-04T17:26:50.187988Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit ReadTableScan 2025-04-04T17:26:50.188005Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715663] at 72075186224037890 error: , IsFatalError: 0 2025-04-04T17:26:50.188027Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-04-04T17:26:50.188043Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit ReadTableScan 2025-04-04T17:26:50.188059Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit FinishPropose 2025-04-04T17:26:50.188076Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit FinishPropose 2025-04-04T17:26:50.188096Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is DelayComplete 2025-04-04T17:26:50.188111Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit FinishPropose 2025-04-04T17:26:50.188125Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit CompletedOperations 2025-04-04T17:26:50.188139Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit CompletedOperations 2025-04-04T17:26:50.188166Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-04-04T17:26:50.188180Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit CompletedOperations 2025-04-04T17:26:50.188193Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715663] at 72075186224037890 has finished 2025-04-04T17:26:50.188206Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:26:50.188221Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-04T17:26:50.188237Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-04T17:26:50.188252Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-04T17:26:50.188278Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T17:26:50.188296Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715663] at 72075186224037890 on unit FinishPropose 2025-04-04T17:26:50.188316Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715663 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-04T17:26:50.188354Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T17:26:50.188490Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549569, Sender [2:969:2775], Recipient [2:884:2711]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715663 2025-04-04T17:26:50.188517Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2025-04-04T17:26:50.188550Z node 2 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037890 txId 281474976715663 2025-04-04T17:26:50.188590Z node 2 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037890 txId 281474976715663 2025-04-04T17:26:50.188718Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287431, Sender [2:969:2775], Recipient [2:884:2711]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715663 2025-04-04T17:26:50.188748Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2025-04-04T17:26:50.188819Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:969:2775], Recipient [2:884:2711]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::StartFromZero [GOOD] >> TTabletPipeTest::TestTwoNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] Test command err: 2025-04-04T17:26:51.158620Z node 1 :PIPE_SERVER DEBUG: [9437185] Detach 2025-04-04T17:26:51.171841Z node 1 :PIPE_SERVER DEBUG: [9437185] Activate 2025-04-04T17:26:51.179194Z node 1 :PIPE_SERVER DEBUG: [9437185] Activate 2025-04-04T17:26:51.181924Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:127:2153] 2025-04-04T17:26:51.181966Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:127:2153] 2025-04-04T17:26:51.182178Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:127:2153] 2025-04-04T17:26:51.182224Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:127:2153] 2025-04-04T17:26:51.182285Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:127:2153] 2025-04-04T17:26:51.182322Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:127:2153] 2025-04-04T17:26:51.182395Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:127:2153] 2025-04-04T17:26:51.182496Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:127:2153] Type# 269877249 Reason# ActorUnknown 2025-04-04T17:26:51.182623Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:130:2155] 2025-04-04T17:26:51.182642Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:130:2155] 2025-04-04T17:26:51.182673Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:130:2155] 2025-04-04T17:26:51.182691Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:130:2155] 2025-04-04T17:26:51.182737Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:130:2155] 2025-04-04T17:26:51.182758Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:130:2155] 2025-04-04T17:26:51.182797Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:130:2155] 2025-04-04T17:26:51.182859Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:130:2155] Type# 269877249 Reason# ActorUnknown 2025-04-04T17:26:51.182921Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:132:2157] 2025-04-04T17:26:51.182934Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:132:2157] 2025-04-04T17:26:51.182965Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:132:2157] 2025-04-04T17:26:51.182986Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:132:2157] 2025-04-04T17:26:51.183027Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:132:2157] 2025-04-04T17:26:51.183044Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:132:2157] 2025-04-04T17:26:51.183077Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:132:2157] 2025-04-04T17:26:51.183173Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:132:2157] Type# 269877249 Reason# ActorUnknown >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder [GOOD] >> DataShardReadTableSnapshots::ReadTableUUID ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] Test command err: 2025-04-04T17:26:43.948352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:43.948790Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:43.948845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cf5/r3tmp/tmpMheayB/pdisk_1.dat 2025-04-04T17:26:44.305464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:44.347041Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:44.384232Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:26:44.384971Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:26:44.385330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:44.385474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:44.396693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:44.473716Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-04T17:26:44.473780Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T17:26:44.473891Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-04-04T17:26:44.570033Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T17:26:44.570116Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:26:44.570558Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:26:44.570621Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:26:44.570872Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:26:44.571052Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:26:44.571166Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T17:26:44.571439Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T17:26:44.572715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:44.573477Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T17:26:44.573528Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2517] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-04T17:26:44.603773Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:26:44.604474Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:26:44.604784Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:26:44.604946Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:26:44.612796Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:26:44.637108Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:26:44.637207Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:26:44.638441Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:26:44.638504Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:26:44.638553Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:26:44.638842Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:26:44.638943Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:26:44.638997Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:26:44.649692Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:26:44.672963Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:26:44.673152Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:26:44.673260Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:26:44.673304Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:26:44.673347Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:26:44.673390Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:26:44.673591Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:44.673634Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:44.673898Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:26:44.673997Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:26:44.674099Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:26:44.674153Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:26:44.674201Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:26:44.674255Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:26:44.674286Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:26:44.674335Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:26:44.674387Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:26:44.674483Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:26:44.674520Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:26:44.674591Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:26:44.674954Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:26:44.674991Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:26:44.675102Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:26:44.675305Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:26:44.675363Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:26:44.675458Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:26:44.675515Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:26:44.675562Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:26:44.675604Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:26:44.675635Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:26:44.675920Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:26:44.675961Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:26:44.675994Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:26:44.676022Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:26:44.676091Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:26:44.676133Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:26:44.676177Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:26:44.676205Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:26:44.676228Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:26:44.677518Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:26:44.677579Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:26:44.688153Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 86224037890 has no attached operations 2025-04-04T17:26:50.577064Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-04T17:26:50.577118Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T17:26:50.577478Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:996:2799], Recipient [2:898:2724]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-04T17:26:50.577521Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-04T17:26:50.577674Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:996:2799], Recipient [2:859:2693]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-04-04T17:26:50.577708Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-04T17:26:50.577744Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-04-04T17:26:50.577836Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-04-04T17:26:50.578222Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T17:26:50.578390Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:996:2799], Recipient [2:859:2693]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\003\000\000\000b\005\035!\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\003\000\000\000" 2025-04-04T17:26:50.578425Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2025-04-04T17:26:50.578456Z node 2 :TX_PROXY TRACE: [ReadTable [2:859:2693] TxId# 281474976715661] Sending TEvStreamDataAck to [2:996:2799] ShardId# 72075186224037890 2025-04-04T17:26:50.578523Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2025-04-04T17:26:50.578601Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:996:2799], Recipient [2:859:2693]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-04-04T17:26:50.578630Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-04T17:26:50.579001Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:858:2693], Recipient [2:859:2693]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-04T17:26:50.579041Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-04T17:26:50.579074Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-04-04T17:26:50.579122Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-04-04T17:26:50.579209Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T17:26:50.579357Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:996:2799], Recipient [2:859:2693]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2025-04-04T17:26:50.579398Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2025-04-04T17:26:50.579427Z node 2 :TX_PROXY TRACE: [ReadTable [2:859:2693] TxId# 281474976715661] Sending TEvStreamDataAck to [2:996:2799] ShardId# 72075186224037890 2025-04-04T17:26:50.579487Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2025-04-04T17:26:50.579556Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:996:2799], Recipient [2:859:2693]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-04-04T17:26:50.579583Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-04-04T17:26:50.579859Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:858:2693], Recipient [2:859:2693]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-04T17:26:50.579889Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-04T17:26:50.579914Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-04-04T17:26:50.579952Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-04-04T17:26:50.580005Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-04-04T17:26:50.580175Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:996:2799], Recipient [2:859:2693]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715662 ShardId: 72075186224037890 2025-04-04T17:26:50.580211Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037890 2025-04-04T17:26:50.580241Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037890 2025-04-04T17:26:50.580290Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-04T17:26:50.580318Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715662, at: 72075186224037890 2025-04-04T17:26:50.580452Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:898:2724], Recipient [2:898:2724]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:50.580487Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:50.580531Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-04T17:26:50.580564Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:26:50.580597Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037890 for ReadTableScan 2025-04-04T17:26:50.580626Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit ReadTableScan 2025-04-04T17:26:50.580660Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715662] at 72075186224037890 error: , IsFatalError: 0 2025-04-04T17:26:50.580696Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2025-04-04T17:26:50.580727Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit ReadTableScan 2025-04-04T17:26:50.580756Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037890 to execution unit FinishPropose 2025-04-04T17:26:50.580785Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit FinishPropose 2025-04-04T17:26:50.580819Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is DelayComplete 2025-04-04T17:26:50.580845Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit FinishPropose 2025-04-04T17:26:50.580872Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037890 to execution unit CompletedOperations 2025-04-04T17:26:50.580902Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit CompletedOperations 2025-04-04T17:26:50.580942Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2025-04-04T17:26:50.580968Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit CompletedOperations 2025-04-04T17:26:50.580995Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715662] at 72075186224037890 has finished 2025-04-04T17:26:50.581023Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:26:50.581048Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-04T17:26:50.581075Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-04T17:26:50.581103Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-04T17:26:50.581157Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T17:26:50.581191Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715662] at 72075186224037890 on unit FinishPropose 2025-04-04T17:26:50.581226Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715662 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-04T17:26:50.581291Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T17:26:50.581528Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:898:2724], Recipient [2:859:2693]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715662 Step: 0 OrderId: 281474976715662 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 352 } } 2025-04-04T17:26:50.581570Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037890 2025-04-04T17:26:50.581637Z node 2 :TX_PROXY INFO: [ReadTable [2:859:2693] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.013792s execute time: 0.302599s total time: 0.316391s 2025-04-04T17:26:50.582052Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:859:2693], Recipient [2:896:2722]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2025-04-04T17:26:50.582337Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:859:2693], Recipient [2:665:2569]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2025-04-04T17:26:50.582555Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:859:2693], Recipient [2:898:2724]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 >> KqpExtractPredicateLookup::PointJoin [GOOD] >> KqpExtractPredicateLookup::SqlInJoin >> TTabletPipeTest::TestTwoNodes [GOOD] >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] >> TPQTest::TestPQSmallRead [GOOD] >> TPQTest::TestPQReadAhead >> TTabletPipeTest::TestSendWithoutWaitOpen |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodes [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters >> TResourceBroker::TestCounters >> TTabletResolver::TabletResolvePriority [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] >> TResourceBroker::TestCounters [GOOD] >> TResourceBroker::TestChangeTaskType >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] >> TResourceBroker::TestResubmitTask |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletResolver::TabletResolvePriority [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] Test command err: 2025-04-04T17:26:46.979267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:46.979647Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:46.979708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cc5/r3tmp/tmpfj7n3k/pdisk_1.dat 2025-04-04T17:26:47.278235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:47.326146Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:47.363617Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:26:47.364203Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:26:47.364428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:47.364529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:47.375867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:47.452590Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-04T17:26:47.452649Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T17:26:47.452748Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-04-04T17:26:47.540497Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T17:26:47.540581Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:26:47.540996Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:26:47.541063Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:26:47.541298Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:26:47.541432Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:26:47.541493Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T17:26:47.541657Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T17:26:47.542709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:47.543350Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T17:26:47.543399Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2517] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-04T17:26:47.567773Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:26:47.568463Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:26:47.568773Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:26:47.568945Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:26:47.576670Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:26:47.599531Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:26:47.599620Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:26:47.600935Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:26:47.601020Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:26:47.601075Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:26:47.601375Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:26:47.601497Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:26:47.601557Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:26:47.612090Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:26:47.645916Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:26:47.646078Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:26:47.646164Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:26:47.646193Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:26:47.646219Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:26:47.646282Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:26:47.646437Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:47.646469Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:47.646675Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:26:47.646747Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:26:47.646837Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:26:47.646865Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:26:47.646897Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:26:47.646925Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:26:47.646948Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:26:47.646968Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:26:47.647009Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:26:47.647123Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:26:47.647148Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:26:47.647190Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:26:47.647472Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:26:47.647498Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:26:47.647584Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:26:47.647733Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:26:47.647763Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:26:47.647826Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:26:47.647861Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:26:47.647896Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:26:47.647930Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:26:47.647950Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:26:47.648147Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:26:47.648171Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:26:47.648231Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:26:47.648253Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:26:47.648292Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:26:47.648329Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:26:47.648385Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:26:47.648418Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:26:47.648440Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:26:47.649318Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:26:47.649360Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:26:47.659977Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 281474976715659] at 72075186224037888 is Executed 2025-04-04T17:26:52.173860Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit MakeScanSnapshot 2025-04-04T17:26:52.173889Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit WaitForStreamClearance 2025-04-04T17:26:52.173911Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2025-04-04T17:26:52.173965Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:744:2625] for [0:281474976715659] at 72075186224037888 2025-04-04T17:26:52.173988Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2025-04-04T17:26:52.174026Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:26:52.174147Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287427, Sender [2:665:2569], Recipient [2:744:2625]: NKikimrTx.TEvStreamClearanceRequest TxId: 281474976715659 ShardId: 72075186224037888 KeyRange { From: "\001\000\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } 2025-04-04T17:26:52.174181Z node 2 :TX_PROXY DEBUG: [ReadTable [2:744:2625] TxId# 281474976715658] Received TEvStreamClearanceRequest from ShardId# 72075186224037888 2025-04-04T17:26:52.174225Z node 2 :TX_PROXY DEBUG: [ReadTable [2:744:2625] TxId# 281474976715658] Sending TEvStreamClearanceResponse to [2:665:2569] ShardId# 72075186224037888 2025-04-04T17:26:52.174408Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:744:2625], Recipient [2:665:2569]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715659 2025-04-04T17:26:52.174440Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-04-04T17:26:52.174513Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:744:2625], Recipient [2:665:2569]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715659 Cleared: true 2025-04-04T17:26:52.174535Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-04T17:26:52.174635Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:665:2569], Recipient [2:665:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:52.174652Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:52.174681Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:26:52.174703Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:26:52.174727Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-04-04T17:26:52.174751Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2025-04-04T17:26:52.174775Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715659] at 72075186224037888 2025-04-04T17:26:52.174799Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-04-04T17:26:52.174819Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit WaitForStreamClearance 2025-04-04T17:26:52.174837Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit ReadTableScan 2025-04-04T17:26:52.174865Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2025-04-04T17:26:52.174982Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2025-04-04T17:26:52.174997Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:26:52.175012Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-04T17:26:52.175033Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:26:52.175055Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:26:52.175100Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:26:52.175511Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:778:2646], Recipient [2:665:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-04T17:26:52.175536Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-04T17:26:52.175617Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:778:2646], Recipient [2:744:2625]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715659 ShardId: 72075186224037888 2025-04-04T17:26:52.175656Z node 2 :TX_PROXY DEBUG: [ReadTable [2:744:2625] TxId# 281474976715658] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-04-04T17:26:52.175874Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:743:2625], Recipient [2:744:2625]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715658 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-04T17:26:52.175918Z node 2 :TX_PROXY DEBUG: [ReadTable [2:744:2625] TxId# 281474976715658] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-04T17:26:52.175943Z node 2 :TX_PROXY DEBUG: [ReadTable [2:744:2625] TxId# 281474976715658] Reserving quota 1 messages for ShardId# 72075186224037888 2025-04-04T17:26:52.175982Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-04-04T17:26:52.176066Z node 2 :TX_DATASHARD ERROR: Got scan fatal error: Invalid DyNumber binary representation 2025-04-04T17:26:52.176100Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-04-04T17:26:52.176215Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-04T17:26:52.176256Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715659, at: 72075186224037888 2025-04-04T17:26:52.176355Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:778:2646], Recipient [2:744:2625]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715659 ShardId: 72075186224037888 2025-04-04T17:26:52.176382Z node 2 :TX_PROXY DEBUG: [ReadTable [2:744:2625] TxId# 281474976715658] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2025-04-04T17:26:52.176406Z node 2 :TX_PROXY DEBUG: [ReadTable [2:744:2625] TxId# 281474976715658] Released quota 1 reserved messages from ShardId# 72075186224037888 2025-04-04T17:26:52.176528Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:665:2569], Recipient [2:665:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:52.176559Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:52.176595Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:26:52.176632Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:26:52.176665Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for ReadTableScan 2025-04-04T17:26:52.176692Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2025-04-04T17:26:52.176723Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715659] at 72075186224037888 error: Invalid DyNumber binary representation, IsFatalError: 1 2025-04-04T17:26:52.176765Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-04-04T17:26:52.176794Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit ReadTableScan 2025-04-04T17:26:52.176817Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:26:52.176846Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit FinishPropose 2025-04-04T17:26:52.176885Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is DelayComplete 2025-04-04T17:26:52.176916Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:26:52.176941Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:26:52.176958Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:26:52.177000Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-04-04T17:26:52.177017Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:26:52.177033Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715659] at 72075186224037888 has finished 2025-04-04T17:26:52.177056Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:26:52.177088Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-04T17:26:52.177111Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:26:52.177143Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:26:52.177204Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:26:52.177227Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715659] at 72075186224037888 on unit FinishPropose 2025-04-04T17:26:52.177256Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715659 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: EXEC_ERROR 2025-04-04T17:26:52.177290Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715659 at tablet 72075186224037888 status: EXEC_ERROR errors: PROGRAM_ERROR (Invalid DyNumber binary representation) | 2025-04-04T17:26:52.177352Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:26:52.177632Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:665:2569], Recipient [2:744:2625]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037888 Status: EXEC_ERROR Error { Kind: PROGRAM_ERROR Reason: "Invalid DyNumber binary representation" } TxId: 281474976715659 Step: 0 OrderId: 281474976715659 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 311 } } 2025-04-04T17:26:52.177705Z node 2 :TX_PROXY DEBUG: [ReadTable [2:744:2625] TxId# 281474976715658] Received TEvProposeTransactionResult Status# EXEC_ERROR ShardId# 72075186224037888 2025-04-04T17:26:52.177768Z node 2 :TX_PROXY ERROR: [ReadTable [2:744:2625] TxId# 281474976715658] RESPONSE Status# ExecError shard: 72075186224037888 table: /Root/Table 2025-04-04T17:26:52.178123Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:744:2625], Recipient [2:665:2569]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1500 TxId: 281474976715658 >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters [GOOD] >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters >> TResourceBroker::TestChangeTaskType [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] >> TResourceBroker::TestResubmitTask [GOOD] >> TResourceBroker::TestUpdateCookie |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestChangeTaskType [GOOD] >> TResourceBroker::TestUpdateCookie [GOOD] >> TFlatMetrics::TimeSeriesAvg16x60 [GOOD] >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters [GOOD] >> TSchemeShardInfoTypesTest::EmptyFamilies [GOOD] >> TSchemeShardInfoTypesTest::LostId [GOOD] >> TSchemeShardInfoTypesTest::DeduplicationOrder [GOOD] >> TSchemeShardInfoTypesTest::MultipleDeduplications [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] Test command err: 2025-04-04T17:26:47.217067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:47.217518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:47.217575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cb3/r3tmp/tmp6PPdr4/pdisk_1.dat 2025-04-04T17:26:47.537764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:47.567299Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:47.603916Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:26:47.604612Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:26:47.604821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:47.604895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:47.616014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:47.694081Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-04T17:26:47.694148Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T17:26:47.694309Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-04-04T17:26:47.804932Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T17:26:47.805009Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:26:47.805378Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:26:47.805435Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:26:47.805636Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:26:47.805761Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:26:47.805837Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T17:26:47.806013Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T17:26:47.807061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:47.807763Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T17:26:47.807820Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2517] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-04T17:26:47.833789Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:26:47.834500Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:26:47.834823Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:26:47.835001Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:26:47.843567Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:26:47.867170Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:26:47.867253Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:26:47.868453Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:26:47.868525Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:26:47.868571Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:26:47.868824Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:26:47.868911Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:26:47.868966Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:26:47.879700Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:26:47.916349Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:26:47.916494Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:26:47.916576Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:26:47.916614Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:26:47.916644Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:26:47.916677Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:26:47.916836Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:47.916869Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:47.917069Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:26:47.917129Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:26:47.917204Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:26:47.917234Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:26:47.917278Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:26:47.917308Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:26:47.917331Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:26:47.917354Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:26:47.917383Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:26:47.917537Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:26:47.917694Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:26:47.917775Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:26:47.918197Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:26:47.918261Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:26:47.918363Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:26:47.918515Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:26:47.918547Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:26:47.918608Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:26:47.918640Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:26:47.918674Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:26:47.918707Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:26:47.918730Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:26:47.918915Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:26:47.918935Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:26:47.918958Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:26:47.918985Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:26:47.919025Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:26:47.919051Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:26:47.919087Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:26:47.919108Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:26:47.919123Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:26:47.920064Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:26:47.920100Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:26:47.930748Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 186224037896 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:26:53.006827Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037896 2025-04-04T17:26:53.006863Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037896 has no attached operations 2025-04-04T17:26:53.006897Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037896 2025-04-04T17:26:53.006936Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-04-04T17:26:53.007272Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1360:3078], Recipient [2:1079:2857]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715664 ShardId: 72075186224037896 2025-04-04T17:26:53.007305Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1079:2857] TxId# 281474976715663] Received TEvStreamQuotaRequest from ShardId# 72075186224037896 2025-04-04T17:26:53.007353Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1079:2857] TxId# 281474976715663] Reserving quota 1 messages for ShardId# 72075186224037896 2025-04-04T17:26:53.007454Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1360:3078], Recipient [2:1257:2998]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-04T17:26:53.007483Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-04T17:26:53.007544Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-04-04T17:26:53.007873Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037896, TxId: 281474976715664, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T17:26:53.007993Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1360:3078], Recipient [2:1079:2857]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: RESPONSE_DATA TxId: 281474976715664 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\006\000\000\000b\005\035B\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\006\000\000\000" 2025-04-04T17:26:53.008030Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1079:2857] TxId# 281474976715663] Received stream data from ShardId# 72075186224037896 2025-04-04T17:26:53.008052Z node 2 :TX_PROXY TRACE: [ReadTable [2:1079:2857] TxId# 281474976715663] Sending TEvStreamDataAck to [2:1360:3078] ShardId# 72075186224037896 2025-04-04T17:26:53.008113Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037896, TxId: 281474976715664, PendingAcks: 0 2025-04-04T17:26:53.008180Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1360:3078], Recipient [2:1079:2857]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715664 ShardId: 72075186224037896 2025-04-04T17:26:53.008223Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1079:2857] TxId# 281474976715663] Received TEvStreamQuotaRequest from ShardId# 72075186224037896 2025-04-04T17:26:53.008601Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:1078:2857], Recipient [2:1079:2857]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715663 MessageSizeLimit: 1 ReservedMessages: 1 2025-04-04T17:26:53.008647Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1079:2857] TxId# 281474976715663] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-04T17:26:53.008673Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1079:2857] TxId# 281474976715663] Reserving quota 1 messages for ShardId# 72075186224037896 2025-04-04T17:26:53.008713Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-04-04T17:26:53.008760Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-04-04T17:26:53.008901Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:1360:3078], Recipient [2:1079:2857]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715664 ShardId: 72075186224037896 2025-04-04T17:26:53.008926Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1079:2857] TxId# 281474976715663] Received TEvStreamQuotaRelease from ShardId# 72075186224037896 2025-04-04T17:26:53.008949Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1079:2857] TxId# 281474976715663] Released quota 1 reserved messages from ShardId# 72075186224037896 2025-04-04T17:26:53.008994Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037896 2025-04-04T17:26:53.009018Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715664, at: 72075186224037896 2025-04-04T17:26:53.009129Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1257:2998], Recipient [2:1257:2998]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:53.009155Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:53.009189Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-04-04T17:26:53.009213Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:26:53.009239Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715664] at 72075186224037896 for ReadTableScan 2025-04-04T17:26:53.009339Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit ReadTableScan 2025-04-04T17:26:53.009398Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715664] at 72075186224037896 error: , IsFatalError: 0 2025-04-04T17:26:53.009431Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2025-04-04T17:26:53.009455Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit ReadTableScan 2025-04-04T17:26:53.009477Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037896 to execution unit FinishPropose 2025-04-04T17:26:53.009500Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit FinishPropose 2025-04-04T17:26:53.009530Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is DelayComplete 2025-04-04T17:26:53.009552Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit FinishPropose 2025-04-04T17:26:53.009574Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037896 to execution unit CompletedOperations 2025-04-04T17:26:53.009605Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit CompletedOperations 2025-04-04T17:26:53.009657Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2025-04-04T17:26:53.009680Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit CompletedOperations 2025-04-04T17:26:53.009700Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715664] at 72075186224037896 has finished 2025-04-04T17:26:53.009723Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:26:53.009747Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037896 2025-04-04T17:26:53.009772Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037896 has no attached operations 2025-04-04T17:26:53.009801Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037896 2025-04-04T17:26:53.009868Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-04-04T17:26:53.009904Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715664] at 72075186224037896 on unit FinishPropose 2025-04-04T17:26:53.009935Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715664 at tablet 72075186224037896 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-04T17:26:53.009997Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-04-04T17:26:53.010211Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1257:2998], Recipient [2:1079:2857]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: COMPLETE TxId: 281474976715664 Step: 0 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037896 CpuTimeUsec: 289 } } 2025-04-04T17:26:53.010261Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1079:2857] TxId# 281474976715663] Received stream complete from ShardId# 72075186224037896 2025-04-04T17:26:53.010320Z node 2 :TX_PROXY INFO: [ReadTable [2:1079:2857] TxId# 281474976715663] RESPONSE Status# ExecComplete prepare time: 0.014838s execute time: 0.543457s total time: 0.558295s 2025-04-04T17:26:53.010696Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1079:2857], Recipient [2:879:2709]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-04T17:26:53.010897Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1079:2857], Recipient [2:989:2791]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-04T17:26:53.011127Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1079:2857], Recipient [2:994:2793]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-04T17:26:53.011450Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1079:2857], Recipient [2:1254:2996]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-04T17:26:53.011787Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1363:3081], Recipient [2:1144:2914]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:26:53.011819Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:26:53.011849Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1361:3079], serverId# [2:1363:3081], sessionId# [0:0:0] 2025-04-04T17:26:53.011912Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1079:2857], Recipient [2:1257:2998]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-04T17:26:53.012063Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1079:2857], Recipient [2:1144:2914]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-04-04T17:26:53.012206Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1364:3082], Recipient [2:1147:2916]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:26:53.012234Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:26:53.012260Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1362:3080], serverId# [2:1364:3082], sessionId# [0:0:0] 2025-04-04T17:26:53.012365Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1079:2857], Recipient [2:1147:2916]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestUpdateCookie [GOOD] >> TTabletPipeTest::TestPipeWithVersionInfo >> TFlatMetrics::TimeSeriesAvg4 [GOOD] >> TFlatMetrics::TimeSeriesKV [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] >> BootstrapperTest::LoneBootstrapper |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesKV [GOOD] >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false >> TTabletPipeTest::TestSendAfterOpen >> BootstrapperTest::LoneBootstrapper [GOOD] >> BootstrapperTest::MultipleBootstrappers |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] >> TTabletPipeTest::TestSendAfterOpen [GOOD] >> TResourceBrokerConfig::UpdateQueues [GOOD] >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] >> TFlatMetrics::MaximumValue3 [GOOD] >> TFlatMetrics::MaximumValue4 [GOOD] >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] >> TResourceBroker::TestErrors >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue4 [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpen [GOOD] >> TopicAutoscaling::ReadFromTimestamp_PQv1 [GOOD] >> TResourceBroker::TestQueueWithConfigure >> TResourceBroker::TestErrors [GOOD] >> TResourceBroker::TestExecutionStat >> TTabletPipeTest::TestRewriteSameNode >> TResourceBroker::TestExecutionStat [GOOD] >> TResourceBroker::TestQueueWithConfigure [GOOD] >> TResourceBroker::TestOverusageDifferentResources ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] Test command err: 2025-04-04T17:26:49.789452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:49.789806Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:49.789850Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ca7/r3tmp/tmphFhvwQ/pdisk_1.dat 2025-04-04T17:26:50.079829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:50.112372Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:50.150132Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:26:50.151102Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:26:50.151506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:50.151654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:50.162974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:50.240867Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-04T17:26:50.240921Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T17:26:50.241015Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-04-04T17:26:50.347301Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T17:26:50.347408Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:26:50.347817Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:26:50.347880Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:26:50.348081Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:26:50.348206Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:26:50.348271Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T17:26:50.348457Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T17:26:50.349501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:50.350200Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T17:26:50.350283Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2517] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-04T17:26:50.381242Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:26:50.381986Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:26:50.382362Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:26:50.382542Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:26:50.391725Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:26:50.418463Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:26:50.418561Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:26:50.419732Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:26:50.419791Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:26:50.419835Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:26:50.420065Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:26:50.420159Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:26:50.420268Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:26:50.431055Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:26:50.447524Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:26:50.447665Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:26:50.447741Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:26:50.447766Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:26:50.447791Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:26:50.447825Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:26:50.447969Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:50.447999Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:50.448222Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:26:50.448294Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:26:50.448375Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:26:50.448406Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:26:50.448439Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:26:50.448468Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:26:50.448503Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:26:50.448526Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:26:50.448556Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:26:50.448618Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:26:50.448645Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:26:50.448685Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:26:50.448936Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:26:50.448962Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:26:50.449076Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:26:50.449211Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:26:50.449252Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:26:50.449320Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:26:50.449360Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:26:50.449394Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:26:50.449433Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:26:50.449455Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:26:50.449716Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:26:50.449745Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:26:50.449770Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:26:50.449803Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:26:50.449842Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:26:50.449871Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:26:50.449906Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:26:50.449928Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:26:50.449943Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:26:50.451047Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:26:50.451097Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:26:50.461835Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... _DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-04-04T17:26:55.551693Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:859:2693], Recipient [2:665:2569]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715662 Cleared: true 2025-04-04T17:26:55.551731Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-04T17:26:55.551876Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:665:2569], Recipient [2:665:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:55.551909Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:55.551954Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:26:55.551982Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:26:55.552025Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for WaitForStreamClearance 2025-04-04T17:26:55.552060Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit WaitForStreamClearance 2025-04-04T17:26:55.552097Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715662] at 72075186224037888 2025-04-04T17:26:55.552132Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-04-04T17:26:55.552159Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit WaitForStreamClearance 2025-04-04T17:26:55.552187Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit ReadTableScan 2025-04-04T17:26:55.552212Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2025-04-04T17:26:55.552402Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Continue 2025-04-04T17:26:55.552431Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:26:55.552455Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-04T17:26:55.552481Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:26:55.552503Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:26:55.552557Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:26:55.553026Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:871:2704], Recipient [2:665:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-04T17:26:55.553067Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-04T17:26:55.553118Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-04-04T17:26:55.553448Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-04T17:26:55.553496Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-04-04T17:26:55.553559Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-04T17:26:55.553732Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T17:26:55.553815Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2025-04-04T17:26:55.553885Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-04-04T17:26:55.553933Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-04-04T17:26:55.554309Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-04T17:26:55.554344Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-04-04T17:26:55.554382Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-04T17:26:55.554443Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T17:26:55.554500Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2025-04-04T17:26:55.554543Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-04-04T17:26:55.554578Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-04-04T17:26:55.554792Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-04T17:26:55.554824Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-04-04T17:26:55.554858Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-04T17:26:55.554911Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T17:26:55.554959Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2025-04-04T17:26:55.554996Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-04-04T17:26:55.555028Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-04-04T17:26:55.555235Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-04-04T17:26:55.555262Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-04-04T17:26:55.555294Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-04T17:26:55.555366Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-04-04T17:26:55.555540Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-04-04T17:26:55.555577Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715662, at: 72075186224037888 2025-04-04T17:26:55.555650Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2025-04-04T17:26:55.555687Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037888 2025-04-04T17:26:55.555827Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:665:2569], Recipient [2:665:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:55.555869Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:26:55.555935Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:26:55.555974Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-04-04T17:26:55.556018Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for ReadTableScan 2025-04-04T17:26:55.556052Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2025-04-04T17:26:55.556090Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715662] at 72075186224037888 error: , IsFatalError: 0 2025-04-04T17:26:55.556134Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-04-04T17:26:55.556176Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit ReadTableScan 2025-04-04T17:26:55.556213Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:26:55.556244Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-04-04T17:26:55.556279Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is DelayComplete 2025-04-04T17:26:55.556305Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:26:55.556336Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:26:55.556367Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:26:55.556411Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-04-04T17:26:55.556432Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:26:55.556459Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715662] at 72075186224037888 has finished 2025-04-04T17:26:55.556496Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:26:55.556532Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-04T17:26:55.556600Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:26:55.556641Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:26:55.556713Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:26:55.556751Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-04-04T17:26:55.556789Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715662 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-04T17:26:55.556864Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:26:55.557022Z node 2 :TX_PROXY DEBUG: [ReadTable [2:859:2693] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037888 2025-04-04T17:26:55.557094Z node 2 :TX_PROXY INFO: [ReadTable [2:859:2693] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.013584s execute time: 0.105892s total time: 0.119476s 2025-04-04T17:26:55.557430Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:859:2693], Recipient [2:665:2569]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] |96.5%| [TA] $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestExecutionStat [GOOD] >> TResourceBroker::TestOverusageDifferentResources [GOOD] >> Cdc::ShouldDeliverChangesOnSplitMerge [GOOD] >> Cdc::ResolvedTimestamps >> TTabletPipeTest::TestRewriteSameNode [GOOD] >> TTabletLabeledCountersAggregator::HeavyAggregation >> TBlockBlobStorageTest::DelayedErrorsNotIgnored >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReadFromTimestamp_PQv1 [GOOD] Test command err: 2025-04-04T17:25:13.823884Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510341945304540:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:13.823960Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001064/r3tmp/tmpaLqysL/pdisk_1.dat 2025-04-04T17:25:14.090794Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:25:14.319582Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:14.328624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:14.328741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 62838, node 1 2025-04-04T17:25:14.336173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:14.446814Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001064/r3tmp/yandexrefKf5.tmp 2025-04-04T17:25:14.446839Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001064/r3tmp/yandexrefKf5.tmp 2025-04-04T17:25:14.447016Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001064/r3tmp/yandexrefKf5.tmp 2025-04-04T17:25:14.447122Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:25:14.500370Z INFO: TTestServer started on Port 2094 GrpcPort 62838 TClient is connected to server localhost:2094 PQClient connected to localhost:62838 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:14.804867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:14.826106Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:14.839672Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:14.843899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:14.996123Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:15.004142Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-04T17:25:17.140362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510359125174524:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:17.140442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510359125174561:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:17.140523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:17.144931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:25:17.160749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510359125174563:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:25:17.395879Z node 1 :TX_PROXY ERROR: Actor# [1:7489510359125174627:2450] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:17.422209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.448921Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489510359125174635:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:25:17.450698Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTRlYTJjMWUtYzJjZjQxYzYtYzBhOGUwZmMtMjA1MmRiN2I=, ActorId: [1:7489510359125174522:2336], ActorState: ExecuteState, TraceId: 01jr0xbs5kcc0a0bmav5aqnaaw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:25:17.453257Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:25:17.455773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:17.522300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489510359125174911:2629] 2025-04-04T17:25:18.824256Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510341945304540:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:18.824363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:25:24.041684Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-04T17:25:24.059952Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-04T17:25:24.061355Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7489510389189946297:2808], Recipient [1:7489510346240272292:2207]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:25:24.061397Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:25:24.061431Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:25:24.061501Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7489510389189946293:2805], Recipient [1:7489510346240272292:2207]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-04T17:25:24.061517Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T17:25:24.126536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:25:24.127107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:25:24.127465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-04T17:25:24.127518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-04T17:25:24.1275 ... rough write request to KV 2025-04-04T17:26:55.469698Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794752, Sender [5:7489510771145294378:2858], Recipient [5:7489510771145294341:2854]: NKikimrClient.TKeyValueRequest Cookie: 4 CmdWrite { Key: "i0000000002" Value: "\010\000\020\001\030\000(\313\302\335\216\3402" StorageChannel: INLINE } CmdWrite { Key: "m0000000002ctest-consumer" Value: "\010\001\020\001\030\002\"(test-consumer_5_1_9413347517928251404_v1(\0000\002" StorageChannel: INLINE } CmdWrite { Key: "m0000000002utest-consumer" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\002\000\000\000test-consumer_5_1_9413347517928251404_v1" StorageChannel: INLINE } 2025-04-04T17:26:55.469807Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794753, Sender [5:7489510779735229351:2854], Recipient [5:7489510771145294341:2854]: NKikimr::TEvKeyValue::TEvIntermediate 2025-04-04T17:26:55.470585Z node 5 :PERSQUEUE TRACE: StateIdle event# 270795264 (NKikimr::TEvKeyValue::TEvResponse), Tablet [5:7489510771145294341:2854], Partition 2, Sender [5:7489510771145294378:2858], Recipient [5:7489510771145294416:2861], Cookie: 0 2025-04-04T17:26:55.470598Z node 5 :PERSQUEUE TRACE: StateFunc, received event# 271450112, Sender [5:7489510771145294378:2858], Recipient [5:7489510685245945360:2077]: NKikimr::NPQ::TEvPqCache::TEvCacheL2Request 2025-04-04T17:26:55.470621Z node 5 :PERSQUEUE TRACE: StateFunc, processing event TEvPqCache::TEvCacheL2Request 2025-04-04T17:26:55.470687Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 270795264, Sender [5:7489510771145294378:2858], Recipient [5:7489510771145294416:2861]: NKikimrClient.TResponse Status: 1 WriteResult { Status: 0 StatusFlags: 1 } WriteResult { Status: 0 StatusFlags: 1 } WriteResult { Status: 0 StatusFlags: 1 } 2025-04-04T17:26:55.470706Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvKeyValue::TEvResponse 2025-04-04T17:26:55.470746Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Topic 'test-topic' partition 2 user test-consumer readTimeStamp for offset 1 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 2 2025-04-04T17:26:55.470771Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::HandleWriteResponse. 2025-04-04T17:26:55.470817Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:26:55.470838Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:26:55.470860Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::SyncMemoryStateWithKVState. 2025-04-04T17:26:55.470895Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::CheckScaleStatus splitMergeAvgWriteBytes# 113 writeSpeedUsagePercent# 3.592173258e-05 scaleThresholdSeconds# 300 totalPartitionWriteSpeed# 1048576 sourceIdCount=1 canSplit=0 Topic: "test-topic". Partition: 2 2025-04-04T17:26:55.470932Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:26:55.470983Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:26:55.471005Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:26:55.471025Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:26:55.471089Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188493, Sender [5:7489510771145294416:2861], Recipient [5:7489510771145294341:2854]: NKikimr::TEvPQ::TEvProxyResponse 2025-04-04T17:26:55.471107Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvProxyResponse 2025-04-04T17:26:55.471132Z node 5 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 2 messageNo: 0 requestId: cookie: 1 2025-04-04T17:26:55.523670Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489510771145294341:2854], Partition 2, Sender [0:0:0], Recipient [5:7489510771145294416:2861], Cookie: 0 2025-04-04T17:26:55.523714Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489510771145294416:2861]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:55.523732Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:55.523770Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:26:55.523825Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:26:55.523842Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:26:55.523861Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:26:55.524082Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489510771145294343:2855], Partition 1, Sender [0:0:0], Recipient [5:7489510771145294423:2863], Cookie: 0 2025-04-04T17:26:55.524106Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489510771145294423:2863]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:55.524115Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:55.524131Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:26:55.524162Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:26:55.524191Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:26:55.524208Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:26:55.528317Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [5:7489510685245945724:2144]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:26:55.528349Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T17:26:55.528408Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [5:7489510685245945724:2144], Recipient [5:7489510685245945724:2144]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:26:55.528426Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T17:26:55.531276Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489510728195619846:2467], Partition 0, Sender [0:0:0], Recipient [5:7489510728195619907:2471], Cookie: 0 2025-04-04T17:26:55.531322Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489510728195619907:2471]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:55.531350Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:55.531395Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:26:55.531466Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:26:55.531492Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:26:55.531521Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:26:55.624017Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489510771145294341:2854], Partition 2, Sender [0:0:0], Recipient [5:7489510771145294416:2861], Cookie: 0 2025-04-04T17:26:55.624100Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489510771145294416:2861]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:55.624126Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:55.624170Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:26:55.624241Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:26:55.624270Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:26:55.624303Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:26:55.624467Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489510771145294343:2855], Partition 1, Sender [0:0:0], Recipient [5:7489510771145294423:2863], Cookie: 0 2025-04-04T17:26:55.624519Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489510771145294423:2863]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:55.624542Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:55.624558Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:26:55.624582Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:26:55.624605Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:26:55.624622Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:26:55.631733Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489510728195619846:2467], Partition 0, Sender [0:0:0], Recipient [5:7489510728195619907:2471], Cookie: 0 2025-04-04T17:26:55.631811Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489510728195619907:2471]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:55.631835Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:26:55.631873Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:26:55.631927Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:26:55.631951Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:26:55.631989Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestOverusageDifferentResources [GOOD] Test command err: 2025-04-04T17:26:56.629195Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'compaction1\' uses unknown queue \'queue_default1\'" 2025-04-04T17:26:56.629442Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'unknown\' is required" 2025-04-04T17:26:56.629607Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'unknown\' uses unknown queue \'queue_default\'" |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRewriteSameNode [GOOD] >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:26:26.580158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:26:26.580256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:26:26.580293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:26:26.580326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:26:26.580447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:26:26.580482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:26:26.580536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:26:26.580633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:26:26.580919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:26:26.655364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:26:26.655415Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:26.665521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:26:26.665738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:26:26.665900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:26:26.674762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:26:26.675078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:26:26.675662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:26.676447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:26:26.680970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:26.682170Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:26.682229Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:26.682435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:26:26.682469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:26:26.682500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:26:26.682581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:26:26.686941Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:26:26.769305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:26:26.769448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:26.769565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:26:26.769700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:26:26.769736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:26.771269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:26.771394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:26:26.771512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:26.771543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:26:26.771565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:26:26.771589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:26:26.772780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:26.772825Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:26:26.772850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:26:26.773889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:26.773917Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:26.773941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:26.773994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:26:26.776278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:26:26.777315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:26:26.777431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:26:26.778062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:26.778140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:26:26.778169Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:26.778371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:26:26.778416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:26.778532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:26:26.778587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:26:26.780133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:26.780179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:26:26.780292Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:26.780315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:26:26.780494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:26.780533Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:26:26.780614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:26:26.780635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:26.780667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:26:26.780688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:26.780711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:26:26.780736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:26.780757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:26:26.780777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:26:26.780822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:26:26.780867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:26:26.780889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:26:26.782066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:26:26.782143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:26:26.782171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 17:26:56.842584Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 PrepareArriveTime: 61500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 237 } } 2025-04-04T17:26:56.842735Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId# 102:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 PrepareArriveTime: 61500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 237 } } 2025-04-04T17:26:56.842801Z node 12 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-04-04T17:26:56.842953Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 102:0, left await: 0, at schemeshard: 72057594046678944 2025-04-04T17:26:56.843008Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2025-04-04T17:26:56.845477Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:26:56.845691Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:26:56.845760Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:26:56.845870Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-04-04T17:26:56.846075Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:26:56.848242Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-04T17:26:56.848415Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-04-04T17:26:56.848938Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:56.849108Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 51539609708 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:26:56.849190Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-04-04T17:26:56.849531Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-04-04T17:26:56.849715Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-04-04T17:26:56.857272Z node 12 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:56.857345Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:26:56.857625Z node 12 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:56.857672Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [12:203:2205], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-04T17:26:56.858326Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:26:56.858402Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-04-04T17:26:56.858946Z node 12 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:26:56.859051Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:26:56.859100Z node 12 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:26:56.859157Z node 12 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-04-04T17:26:56.859221Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-04-04T17:26:56.859310Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:26:56.861537Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1771 } } 2025-04-04T17:26:56.861587Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-04T17:26:56.861732Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1771 } } 2025-04-04T17:26:56.861884Z node 12 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1771 } } 2025-04-04T17:26:56.863091Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 51539609843 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:26:56.863161Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-04-04T17:26:56.863384Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 51539609843 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:26:56.863457Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-04-04T17:26:56.863576Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 51539609843 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-04-04T17:26:56.863663Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:56.863716Z node 12 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:26:56.863793Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-04-04T17:26:56.863848Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-04-04T17:26:56.867284Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:26:56.868193Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:26:56.868826Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:26:56.868983Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:26:56.869043Z node 12 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:26:56.869228Z node 12 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:26:56.869280Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:26:56.869360Z node 12 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:26:56.869413Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:26:56.869483Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-04-04T17:26:56.869569Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [12:332:2311] message: TxId: 102 2025-04-04T17:26:56.869666Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:26:56.869734Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:26:56.869778Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:26:56.869978Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:26:56.872471Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:26:56.872536Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [12:392:2364] TestWaitNotification: OK eventTxId 102 >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] >> TFlatMetrics::MaximumValue1 [GOOD] >> TFlatMetrics::MaximumValue2 [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] >> Cdc::RacyActivateAndEnqueue [GOOD] >> Cdc::RacyCreateAndSend >> BootstrapperTest::MultipleBootstrappers [GOOD] >> TTabletLabeledCountersAggregator::SimpleAggregation |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue2 [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] >> TTabletPipeTest::TestSendBeforeBootTarget >> TResourceBroker::TestOverusage >> TTabletLabeledCountersAggregator::SimpleAggregation [GOOD] >> TTabletLabeledCountersAggregator::Version3Aggregation >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::MultipleBootstrappers [GOOD] Test command err: ... waiting for pipe to connect ... stopping current instance ... waiting for pipe to disconnect ... waiting for pipe to connect ... sleeping for 2 seconds 2025-04-04T17:26:55.649536Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-04T17:26:55.649595Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-04T17:26:55.649622Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-04T17:26:55.650351Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-04-04T17:26:55.650394Z node 3 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 15249746964198841502 2025-04-04T17:26:55.650578Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-04-04T17:26:55.650606Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 838756400823690829 2025-04-04T17:26:55.650658Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-04-04T17:26:55.650674Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 2303809724928703835 2025-04-04T17:26:55.651543Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-04-04T17:26:55.651669Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-04-04T17:26:55.651694Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-04-04T17:26:55.651712Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-04-04T17:26:55.651731Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.149198s 2025-04-04T17:26:55.651801Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-04-04T17:26:55.651826Z node 4 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2025-04-04T17:26:55.651937Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-04-04T17:26:55.651952Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.190190s 2025-04-04T17:26:55.826090Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-04T17:26:55.826612Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:270:2095] 2025-04-04T17:26:55.826939Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-04T17:26:55.826969Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-04-04T17:26:55.869202Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-04T17:26:55.869649Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:270:2095] 2025-04-04T17:26:55.869985Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-04T17:26:55.870011Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... tablet initially started on node 4 (idx 2) in gen 2 ... disconnecting other nodes ... sleeping for 2 seconds (tablet expected to survive) 2025-04-04T17:26:56.447549Z node 4 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 3 2025-04-04T17:26:56.447615Z node 4 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 5 2025-04-04T17:26:56.447998Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-04-04T17:26:56.448050Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-04T17:26:56.448142Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-04-04T17:26:56.448165Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-04T17:26:56.450781Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:270:2095] 2025-04-04T17:26:56.451241Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:270:2095] 2025-04-04T17:26:56.452343Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-04T17:26:56.452387Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-04-04T17:26:56.453025Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-04T17:26:56.453058Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... disconnecting other nodes (new tablet connections fail) ... sleeping for 2 seconds (tablet expected to survive) 2025-04-04T17:26:56.963653Z node 4 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 3 2025-04-04T17:26:56.963713Z node 4 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 5 2025-04-04T17:26:56.963820Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-04-04T17:26:56.963860Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-04T17:26:56.964095Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-04-04T17:26:56.964123Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-04T17:26:56.965462Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:270:2095] 2025-04-04T17:26:56.965687Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:270:2095] ... disconnecting nodes 2 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... disconnecting nodes 2 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR 2025-04-04T17:26:56.966307Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-04-04T17:26:56.966345Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 6528562917658346564 2025-04-04T17:26:56.966580Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-04-04T17:26:56.966605Z node 3 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 16349739802483488852 2025-04-04T17:26:56.967545Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-04-04T17:26:56.967677Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-04-04T17:26:56.967850Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2025-04-04T17:26:56.967877Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 4 (owner) 2025-04-04T17:26:56.967973Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2025-04-04T17:26:56.967993Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 4 (owner) ... disconnect other nodes (new owner expected) ... sleeping for 2 seconds (new tablet expected to start once) 2025-04-04T17:26:57.465599Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335029 2025-04-04T17:26:57.465660Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-04T17:26:57.465712Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335029 2025-04-04T17:26:57.465738Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-04T17:26:57.466548Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:270:2095] 2025-04-04T17:26:57.466739Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:270:2095] ... disconnecting nodes 2 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... disconnecting nodes 2 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR 2025-04-04T17:26:57.467297Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-04-04T17:26:57.467356Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 13164802727073798053 2025-04-04T17:26:57.467512Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-04-04T17:26:57.467541Z node 3 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 10171326560769670008 ... disconnecting nodes 2 <-> 1 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER ... disconnecting nodes 2 <-> 3 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER 2025-04-04T17:26:57.468116Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335031 2025-04-04T17:26:57.468169Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: DISCONNECTED 2025-04-04T17:26:57.468243Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335031 2025-04-04T17:26:57.468264Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: DISCONNECTED 2025-04-04T17:26:57.468312Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-04-04T17:26:57.468346Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.116418s 2025-04-04T17:26:57.468392Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-04-04T17:26:57.468413Z node 3 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2025-04-04T17:26:57.471904Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, tablet dead 2025-04-04T17:26:57.471963Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-04T17:26:57.475591Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:390:2095] 2025-04-04T17:26:57.488257Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-04T17:26:57.488297Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-04-04T17:26:57.542564Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-04-04T17:26:57.543095Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:390:2095] 2025-04-04T17:26:57.543497Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-04-04T17:26:57.543529Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... disconnecting nodes 2 <-> 0 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to >> TBlockBlobStorageTest::DelayedErrorsNotIgnored [GOOD] >> TFlatMetrics::DecayingAverageAvg [GOOD] >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular >> TResourceBroker::TestOverusage [GOOD] >> TResourceBroker::TestNotifyActorDied >> TPipeTrackerTest::TestShareTablet [GOOD] >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::DecayingAverageAvg [GOOD] Test command err: ... waiting for all block results ... passing block result OK for [1:100:2134] ... blocking block result NO_GROUP for [1:101:2134] ... blocking block result NO_GROUP for [1:102:2134] ... blocking block result NO_GROUP for [1:103:2134] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] Test command err: { LabeledCountersByGroup { Group: "aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } LabeledCountersByGroup { Group: "cons/aaa|1|aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } CounterNames: "value1" } >> TResourceBroker::TestNotifyActorDied [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestNotifyActorDied [GOOD] >> TTabletPipeTest::TestPipeConnectToHint >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] >> TPipeCacheTest::TestIdleRefresh >> TTabletPipeTest::TestPipeConnectToHint [GOOD] >> TSchemeShardTest::DropPQAbort [GOOD] >> TSchemeShardTest::ListNotCreatedDirCase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 9437184 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] Leader for TabletID 9437184 is [1:107:2139] sender: [1:127:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [0:0:0] sender: [1:162:2057] recipient: [1:160:2167] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:162:2057] recipient: [1:160:2167] Leader for TabletID 9437185 is [1:166:2171] sender: [1:167:2057] recipient: [1:160:2167] Leader for TabletID 9437185 is [1:166:2171] sender: [1:186:2057] recipient: [1:14:2061] >> TPipeCacheTest::TestIdleRefresh [GOOD] >> TPipeCacheTest::TestTabletNode >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectToHint [GOOD] >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] >> TPQTest::TestSourceIdDropByUserWrites >> KqpScripting::LimitOnShard >> TPipeCacheTest::TestTabletNode [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpYql::DdlDmlMix >> TSchemeShardTest::ListNotCreatedDirCase [GOOD] >> TSchemeShardTest::ListNotCreatedIndexCase |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestTabletNode [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:26:21.950664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:26:21.950786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:26:21.950827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:26:21.950862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:26:21.950909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:26:21.950939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:26:21.950990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:26:21.951073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:26:21.951455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:26:22.005903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:26:22.005950Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:22.014346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:26:22.014519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:26:22.014659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:26:22.021887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:26:22.022173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:26:22.022902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:22.023701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:26:22.028286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:22.029601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:22.029656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:22.029862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:26:22.029909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:26:22.029959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:26:22.030096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:26:22.036297Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:26:22.126601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:26:22.126800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:22.126951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:26:22.127109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:26:22.127160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:22.128998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:22.129116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:26:22.129242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:22.129279Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:26:22.129309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:26:22.129334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:26:22.130720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:22.130759Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:26:22.130783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:26:22.131910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:22.131945Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:22.131976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:22.132011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:26:22.134460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:26:22.135603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:26:22.135729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:26:22.136351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:22.136430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:26:22.136463Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:22.136669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:26:22.136715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:22.136838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:26:22.136888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:26:22.138103Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:22.138142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:26:22.138299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:22.138329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:26:22.138576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:22.138608Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:26:22.138680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:26:22.138710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:22.138738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:26:22.138758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:22.138783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:26:22.138809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:22.138832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:26:22.138857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:26:22.138897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:26:22.138936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:26:22.138963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:26:22.140129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:26:22.140200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:26:22.140224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... rcentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1592 DataSize: 1592 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:27:00.990398Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table/indexByValue" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:27:00.990707Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/table/indexByValue" took 480us result status StatusSuccess 2025-04-04T17:27:00.991473Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table/indexByValue" PathDescription { Self { Name: "indexByValue" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 3 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1592 DataSize: 1592 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "indexByValue" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 3 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:27:00.992572Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table/indexByValue/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:27:00.992889Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/table/indexByValue/indexImplTable" took 327us result status StatusSuccess 2025-04-04T17:27:00.993654Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table/indexByValue/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 3 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1592 DataSize: 1592 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPartitionTests::SetOffset >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] >> TSchemeShardTest::ListNotCreatedIndexCase [GOOD] >> TSchemeShardTest::FindSubDomainPathId >> KqpYql::InsertCV+useSink >> TPQTest::TestDirectReadHappyWay >> TPartitionTests::SetOffset [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:157:2058] recipient: [1:155:2136] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:157:2058] recipient: [1:155:2136] Leader for TabletID 9437184 is [1:163:2140] sender: [1:164:2058] recipient: [1:155:2136] Leader for TabletID 9437185 is [0:0:0] sender: [2:165:2049] recipient: [2:158:2094] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [2:165:2049] recipient: [2:158:2094] Leader for TabletID 9437185 is [2:179:2097] sender: [2:180:2049] recipient: [2:158:2094] Leader for TabletID 9437184 is [1:163:2140] sender: [1:207:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:179:2097] sender: [1:209:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:179:2097] sender: [2:211:2049] recipient: [2:42:2053] Leader for TabletID 9437185 is [2:179:2097] sender: [2:212:2049] recipient: [2:152:2093] Leader for TabletID 9437185 is [2:179:2097] sender: [1:214:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:179:2097] sender: [2:217:2049] recipient: [2:42:2053] Leader for TabletID 9437185 is [2:179:2097] sender: [2:218:2049] recipient: [2:216:2110] Leader for TabletID 9437185 is [2:219:2111] sender: [2:220:2049] recipient: [2:216:2110] Leader for TabletID 9437185 is [2:219:2111] sender: [1:249:2058] recipient: [1:15:2062] >> TPartitionTests::OldPlanStep >> KqpScripting::StreamExecuteYqlScriptScanCancelation >> TTabletLabeledCountersAggregator::HeavyAggregation [GOOD] >> TTabletLabeledCountersAggregator::DbAggregation >> TPartitionTests::OldPlanStep [GOOD] >> TPartitionTests::ReserveSubDomainOutOfSpace >> TSchemeShardTest::FindSubDomainPathId [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActor >> KqpScripting::StreamScanQuery >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] >> TPartitionTests::ReserveSubDomainOutOfSpace [GOOD] >> TPQTest::TestDirectReadHappyWay [GOOD] >> TPQTest::TestLowWatermark >> TPartitionTests::ShadowPartitionCounters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] Test command err: 2025-04-04T17:26:57.549440Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 [1:7:2054] 2025-04-04T17:26:57.549711Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:8:2055] worker 0 2025-04-04T17:26:57.549753Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:9:2056] worker 1 2025-04-04T17:26:57.549769Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:10:2057] worker 2 2025-04-04T17:26:57.549784Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:11:2058] worker 3 2025-04-04T17:26:57.549796Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:12:2059] worker 4 2025-04-04T17:26:57.549823Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:13:2060] worker 5 2025-04-04T17:26:57.549838Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:14:2061] worker 6 2025-04-04T17:26:57.549851Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:15:2062] worker 7 2025-04-04T17:26:57.549864Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:16:2063] worker 8 2025-04-04T17:26:57.549889Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:17:2064] worker 9 Sending message to [1:9:2056] from [1:7:2054] id 1 Sending message to [1:10:2057] from [1:7:2054] id 2 Sending message to [1:11:2058] from [1:7:2054] id 3 Sending message to [1:12:2059] from [1:7:2054] id 4 Sending message to [1:13:2060] from [1:7:2054] id 5 Sending message to [1:14:2061] from [1:7:2054] id 6 Sending message to [1:15:2062] from [1:7:2054] id 7 Sending message to [1:16:2063] from [1:7:2054] id 8 Sending message to [1:17:2064] from [1:7:2054] id 9 Sending message to [1:8:2055] from [1:7:2054] id 10 2025-04-04T17:26:58.069745Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [1:11:2058] 2025-04-04T17:26:58.069805Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [1:12:2059] 2025-04-04T17:26:58.069845Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [1:13:2060] 2025-04-04T17:26:58.069922Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [1:14:2061] 2025-04-04T17:26:58.069977Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [1:15:2062] 2025-04-04T17:26:58.070008Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [1:16:2063] 2025-04-04T17:26:58.070040Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [1:17:2064] 2025-04-04T17:26:58.070323Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [1:8:2055] 2025-04-04T17:26:58.070355Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [1:9:2056] 2025-04-04T17:26:58.070382Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [1:10:2057] 2025-04-04T17:26:58.070464Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [1:8:2055] 2025-04-04T17:26:58.071368Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [1:8:2055] 2025-04-04T17:26:58.091510Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:8:2055] Initiator [1:7:2054] 2025-04-04T17:26:58.104712Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [1:9:2056] 2025-04-04T17:26:58.105757Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [1:9:2056] 2025-04-04T17:26:58.126977Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:9:2056] Initiator [1:7:2054] 2025-04-04T17:26:58.146422Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [1:10:2057] 2025-04-04T17:26:58.148013Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [1:10:2057] 2025-04-04T17:26:58.179465Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:10:2057] Initiator [1:7:2054] 2025-04-04T17:26:58.203188Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [1:11:2058] 2025-04-04T17:26:58.204567Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [1:11:2058] 2025-04-04T17:26:58.228379Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:11:2058] Initiator [1:7:2054] 2025-04-04T17:26:58.243639Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [1:12:2059] 2025-04-04T17:26:58.244609Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [1:12:2059] 2025-04-04T17:26:58.266450Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:12:2059] Initiator [1:7:2054] 2025-04-04T17:26:58.281550Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [1:13:2060] 2025-04-04T17:26:58.282586Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [1:13:2060] 2025-04-04T17:26:58.303931Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:13:2060] Initiator [1:7:2054] 2025-04-04T17:26:58.317716Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [1:14:2061] 2025-04-04T17:26:58.318750Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [1:14:2061] 2025-04-04T17:26:58.340821Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:14:2061] Initiator [1:7:2054] 2025-04-04T17:26:58.362262Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [1:15:2062] 2025-04-04T17:26:58.363374Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [1:15:2062] 2025-04-04T17:26:58.381225Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:15:2062] Initiator [1:7:2054] 2025-04-04T17:26:58.393880Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [1:16:2063] 2025-04-04T17:26:58.394875Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [1:16:2063] 2025-04-04T17:26:58.416119Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:16:2063] Initiator [1:7:2054] 2025-04-04T17:26:58.436794Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [1:17:2064] 2025-04-04T17:26:58.438152Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [1:17:2064] 2025-04-04T17:26:58.466986Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:17:2064] Initiator [1:7:2054] 2025-04-04T17:26:58.487922Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [1:7:2054] 2025-04-04T17:26:58.488054Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [1:7:2054] 2025-04-04T17:26:58.492913Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [1:7:2054] 2025-04-04T17:26:58.493050Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [1:7:2054] 2025-04-04T17:26:58.498114Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [1:7:2054] 2025-04-04T17:26:58.498236Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [1:7:2054] 2025-04-04T17:26:58.503035Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [1:7:2054] 2025-04-04T17:26:58.503157Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [1:7:2054] 2025-04-04T17:26:58.508667Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [1:7:2054] 2025-04-04T17:26:58.508794Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [1:7:2054] 2025-04-04T17:26:58.513923Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [1:7:2054] 2025-04-04T17:26:58.514061Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [1:7:2054] 2025-04-04T17:26:58.519041Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [1:7:2054] 2025-04-04T17:26:58.519165Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [1:7:2054] 2025-04-04T17:26:58.526078Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [1:7:2054] 2025-04-04T17:26:58.526211Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [1:7:2054] 2025-04-04T17:26:58.531377Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [1:7:2054] 2025-04-04T17:26:58.531517Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [1:7:2054] 2025-04-04T17:26:58.536746Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [1:7:2054] 2025-04-04T17:26:58.536898Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [1:7:2054] 2025-04-04T17:26:58.542057Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:7:2054] Initiator [1:6:2053] TEST 2 10 duration 1.123752s 2025-04-04T17:26:58.807740Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 [2:7:2054] 2025-04-04T17:26:58.808045Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:8:2055] worker 0 2025-04-04T17:26:58.808076Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:9:2056] worker 1 2025-04-04T17:26:58.808092Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:10:2057] worker 2 2025-04-04T17:26:58.808107Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:11:2058] worker 3 2025-04-04T17:26:58.808123Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:12:2059] worker 4 2025-04-04T17:26:58.808136Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:13:2060] worker 5 2025-04-04T17:26:58.808148Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:14:2061] worker 6 2025-04-04T17:26:58.808183Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:15:2062] worker 7 2025-04-04T17:26:58.808212Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:16:2063] worker 8 2025-04-04T17:26:58.808226Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:17:2064] worker 9 2025-04-04T17:26:58.808249Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:18:2065] worker 10 2025-04-04T17:26:58.808264Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:19:2066] worker 11 2025-04-04T17:26:58.808277Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:20:2067] worker 12 2025-04-04T17:26:58.808289Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:21:2068] worker 13 2025-04-04T17:26:58.808303Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:22:2069] worker 14 2025-04-04T17:26:58.808323Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:23:2070] worker 15 2025-04-04T17:26:58.808352Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:24:2071] worker 16 2025-04-04T17:26:58.808366Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:25:2072] worker 17 2025-04-04T17:26:58.808379Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:26:2073] worker 18 2025-04-04T17:26:58.808392Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:27:2074] worker 19 Sending message to [2:9:2056] from [2:7:2054] id 1 Sending message to [2:10:2057] from [2:7:2054] id 2 Sending message to [2:11:2058] from [2:7:2054] id 3 Sending message to [2:12:2059] from [2:7:2054] id 4 Sending message to [2:13:2060] from [2:7:2054] id 5 Sending message to [2:14:2061] from [2:7:2054] id 6 Sending message to [2:15:2062] from [2:7:2054] id 7 Sending message to [2:16:2063] from [2:7:2054] id 8 Sending message to [2:17:2064] from [2:7: ... response node 15 [2:7:2054] 2025-04-04T17:26:59.696605Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 16 [2:7:2054] 2025-04-04T17:26:59.696619Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 16 [2:7:2054] 2025-04-04T17:26:59.696634Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 17 [2:7:2054] 2025-04-04T17:26:59.696645Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 17 [2:7:2054] 2025-04-04T17:26:59.696673Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 18 [2:7:2054] 2025-04-04T17:26:59.696697Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 18 [2:7:2054] 2025-04-04T17:26:59.696735Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 19 [2:7:2054] 2025-04-04T17:26:59.696750Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 19 [2:7:2054] 2025-04-04T17:26:59.696786Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [2:7:2054] 2025-04-04T17:26:59.696797Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [2:7:2054] 2025-04-04T17:26:59.696810Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [2:7:2054] 2025-04-04T17:26:59.696867Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [2:7:2054] 2025-04-04T17:26:59.700166Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [2:7:2054] 2025-04-04T17:26:59.700274Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [2:7:2054] 2025-04-04T17:26:59.703587Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [2:7:2054] 2025-04-04T17:26:59.703678Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [2:7:2054] 2025-04-04T17:26:59.706633Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [2:7:2054] 2025-04-04T17:26:59.706710Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [2:7:2054] 2025-04-04T17:26:59.710210Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [2:7:2054] 2025-04-04T17:26:59.710308Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [2:7:2054] 2025-04-04T17:26:59.714036Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [2:7:2054] 2025-04-04T17:26:59.714133Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [2:7:2054] 2025-04-04T17:26:59.717474Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [2:7:2054] 2025-04-04T17:26:59.717554Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [2:7:2054] 2025-04-04T17:26:59.722677Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [2:7:2054] 2025-04-04T17:26:59.722776Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [2:7:2054] 2025-04-04T17:26:59.726037Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [2:7:2054] 2025-04-04T17:26:59.726134Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [2:7:2054] 2025-04-04T17:26:59.729558Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [2:7:2054] 2025-04-04T17:26:59.729665Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [2:7:2054] 2025-04-04T17:26:59.733554Z node 2 :TABLET_AGGREGATOR INFO: aggregator request processed [2:7:2054] Initiator [2:6:2053] TEST 2 20 duration 1.026325s 2025-04-04T17:26:59.873788Z node 3 :TABLET_AGGREGATOR INFO: aggregator new request V2 [3:7:2054] 2025-04-04T17:26:59.873930Z node 3 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [3:7:2054] self [3:8:2055] worker 0 Sending message to [3:8:2055] from [3:7:2054] id 1 Sending message to [3:8:2055] from [3:7:2054] id 2 Sending message to [3:8:2055] from [3:7:2054] id 3 Sending message to [3:8:2055] from [3:7:2054] id 4 Sending message to [3:8:2055] from [3:7:2054] id 5 Sending message to [3:8:2055] from [3:7:2054] id 6 Sending message to [3:8:2055] from [3:7:2054] id 7 Sending message to [3:8:2055] from [3:7:2054] id 8 Sending message to [3:8:2055] from [3:7:2054] id 9 Sending message to [3:8:2055] from [3:7:2054] id 10 2025-04-04T17:27:00.778674Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [3:8:2055] 2025-04-04T17:27:00.778762Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [3:8:2055] 2025-04-04T17:27:00.778797Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [3:8:2055] 2025-04-04T17:27:00.778918Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [3:8:2055] 2025-04-04T17:27:00.779006Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [3:8:2055] 2025-04-04T17:27:00.779055Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [3:8:2055] 2025-04-04T17:27:00.779152Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [3:8:2055] 2025-04-04T17:27:00.779225Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [3:8:2055] 2025-04-04T17:27:00.779306Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [3:8:2055] 2025-04-04T17:27:00.779364Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [3:8:2055] 2025-04-04T17:27:00.779899Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [3:8:2055] 2025-04-04T17:27:00.781799Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [3:8:2055] 2025-04-04T17:27:00.825810Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [3:8:2055] 2025-04-04T17:27:00.827803Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [3:8:2055] 2025-04-04T17:27:00.872710Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [3:8:2055] 2025-04-04T17:27:00.874643Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [3:8:2055] 2025-04-04T17:27:00.922816Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [3:8:2055] 2025-04-04T17:27:00.924782Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [3:8:2055] 2025-04-04T17:27:00.967516Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [3:8:2055] 2025-04-04T17:27:00.969437Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [3:8:2055] 2025-04-04T17:27:01.024189Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [3:8:2055] 2025-04-04T17:27:01.026138Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [3:8:2055] 2025-04-04T17:27:01.067262Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [3:8:2055] 2025-04-04T17:27:01.069174Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [3:8:2055] 2025-04-04T17:27:01.113285Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [3:8:2055] 2025-04-04T17:27:01.115215Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [3:8:2055] 2025-04-04T17:27:01.157313Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [3:8:2055] 2025-04-04T17:27:01.159250Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [3:8:2055] 2025-04-04T17:27:01.201838Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [3:8:2055] 2025-04-04T17:27:01.203811Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [3:8:2055] 2025-04-04T17:27:01.272151Z node 3 :TABLET_AGGREGATOR INFO: aggregator request processed [3:8:2055] Initiator [3:7:2054] 2025-04-04T17:27:01.709102Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [3:7:2054] 2025-04-04T17:27:01.710328Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [3:7:2054] 2025-04-04T17:27:01.788969Z node 3 :TABLET_AGGREGATOR INFO: aggregator request processed [3:7:2054] Initiator [3:6:2053] TEST 2 1 duration 2.140074s 2025-04-04T17:27:02.193786Z node 4 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [4:6:2053] self [4:7:2054] worker 0 Sending message to [4:7:2054] from [4:7:2054] id 1 Sending message to [4:7:2054] from [4:7:2054] id 2 Sending message to [4:7:2054] from [4:7:2054] id 3 Sending message to [4:7:2054] from [4:7:2054] id 4 Sending message to [4:7:2054] from [4:7:2054] id 5 Sending message to [4:7:2054] from [4:7:2054] id 6 Sending message to [4:7:2054] from [4:7:2054] id 7 Sending message to [4:7:2054] from [4:7:2054] id 8 Sending message to [4:7:2054] from [4:7:2054] id 9 Sending message to [4:7:2054] from [4:7:2054] id 10 2025-04-04T17:27:02.782538Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [4:7:2054] 2025-04-04T17:27:02.782595Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [4:7:2054] 2025-04-04T17:27:02.782614Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [4:7:2054] 2025-04-04T17:27:02.782631Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [4:7:2054] 2025-04-04T17:27:02.782702Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [4:7:2054] 2025-04-04T17:27:02.782732Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [4:7:2054] 2025-04-04T17:27:02.782756Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [4:7:2054] 2025-04-04T17:27:02.782783Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [4:7:2054] 2025-04-04T17:27:02.782822Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [4:7:2054] 2025-04-04T17:27:02.782852Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [4:7:2054] 2025-04-04T17:27:02.783119Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [4:7:2054] 2025-04-04T17:27:02.784591Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [4:7:2054] 2025-04-04T17:27:02.811963Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [4:7:2054] 2025-04-04T17:27:02.813322Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [4:7:2054] 2025-04-04T17:27:02.840877Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [4:7:2054] 2025-04-04T17:27:02.842111Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [4:7:2054] 2025-04-04T17:27:02.881261Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [4:7:2054] 2025-04-04T17:27:02.882706Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [4:7:2054] 2025-04-04T17:27:02.913160Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [4:7:2054] 2025-04-04T17:27:02.914484Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [4:7:2054] 2025-04-04T17:27:02.958222Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [4:7:2054] 2025-04-04T17:27:02.959650Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [4:7:2054] 2025-04-04T17:27:02.987790Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [4:7:2054] 2025-04-04T17:27:02.989704Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [4:7:2054] 2025-04-04T17:27:03.028699Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [4:7:2054] 2025-04-04T17:27:03.030398Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [4:7:2054] 2025-04-04T17:27:03.068221Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [4:7:2054] 2025-04-04T17:27:03.070313Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [4:7:2054] 2025-04-04T17:27:03.112759Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [4:7:2054] 2025-04-04T17:27:03.114693Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [4:7:2054] 2025-04-04T17:27:03.183427Z node 4 :TABLET_AGGREGATOR INFO: aggregator request processed [4:7:2054] Initiator [4:6:2053] TEST 2 1 duration 1.376281s >> TopicAutoscaling::CommitTopPast_BeforeAutoscaleAwareSDK >> TSchemeShardTest::FindSubDomainPathIdActor [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActorAsync >> Cdc::RacyCreateAndSend [GOOD] >> Cdc::RacySplitAndDropTable |96.6%| [TA] $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {RESULT} $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScripting::LimitOnShard [GOOD] >> KqpScripting::NoAstSizeLimit >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] >> KqpYql::DdlDmlMix [GOOD] >> KqpYql::CreateUseTable >> YdbIndexTable::MultiShardTableOneIndex [GOOD] >> YdbIndexTable::MultiShardTableOneIndexDataColumn >> Cdc::ResolvedTimestamps [GOOD] >> Cdc::ResolvedTimestampsMultiplePartitions >> KqpYql::InsertCV+useSink [GOOD] >> KqpYql::InsertCV-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:26:21.709371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:26:21.709493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:26:21.709540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:26:21.709576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:26:21.710622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:26:21.710687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:26:21.710801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:26:21.710922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:26:21.712076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:26:21.775729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:26:21.775777Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:21.784160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:26:21.784338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:26:21.784470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:26:21.793930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:26:21.795688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:26:21.797912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:21.800055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:26:21.807376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:21.814651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:21.814716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:21.814913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:26:21.814955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:26:21.814997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:26:21.815094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.820198Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:26:21.921506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:26:21.922035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.923511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:26:21.924865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:26:21.924971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.927421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:21.927545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:26:21.927697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.927739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:26:21.927769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:26:21.927795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:26:21.929185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.929223Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:26:21.929253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:26:21.930456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.930489Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.930520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:21.930556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:26:21.934161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:26:21.935844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:26:21.936024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:26:21.937821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:21.937936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:26:21.937978Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:21.939219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:26:21.939265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:21.939454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:26:21.939541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:26:21.941983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:21.942033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:26:21.942166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:21.942199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:26:21.942456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.942562Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:26:21.942642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:26:21.942669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:21.942703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:26:21.942732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:21.942773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:26:21.942803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:21.942832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:26:21.942860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:26:21.942918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:26:21.942956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:26:21.942981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:26:21.944273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:26:21.944365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:26:21.944392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... d: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-04-04T17:27:05.648514Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:27:05.648545Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:205:2207], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-04T17:27:05.648581Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:205:2207], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-04-04T17:27:05.649344Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-04-04T17:27:05.649415Z node 15 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-04-04T17:27:05.649621Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:27:05.649676Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:27:05.649733Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-04-04T17:27:05.649779Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:27:05.649833Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-04-04T17:27:05.649895Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-04-04T17:27:05.649950Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:27:05.650015Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:27:05.650218Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-04-04T17:27:05.650308Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-04-04T17:27:05.650360Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-04-04T17:27:05.650408Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-04-04T17:27:05.651638Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:27:05.651715Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:27:05.651751Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:27:05.651803Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-04T17:27:05.651860Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-04-04T17:27:05.652791Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:27:05.652853Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:27:05.652875Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:27:05.652896Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-04-04T17:27:05.652920Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-04-04T17:27:05.652983Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-04-04T17:27:05.660160Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-04-04T17:27:05.660438Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-04T17:27:05.665794Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:27:05.665862Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T17:27:05.666376Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:27:05.666493Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:27:05.666541Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:508:2459] TestWaitNotification: OK eventTxId 102 2025-04-04T17:27:05.667059Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:27:05.667391Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA" took 370us result status StatusSuccess 2025-04-04T17:27:05.667952Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA" PathDescription { Self { Name: "SubDomenA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ChildrenExist: false BalancerTabletID: 72075186233409547 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:27:05.668751Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T17:27:05.669021Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA/Topic1" took 302us result status StatusSuccess 2025-04-04T17:27:05.669509Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:27:06.117985Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:27:06.118357Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 421us result status StatusSuccess 2025-04-04T17:27:06.118972Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:27:06.264118Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: FindTabletSubDomainPathId for tablet 72075186233409546 >> TPartitionTests::ShadowPartitionCounters [GOOD] >> KqpExtractPredicateLookup::ComplexRange [GOOD] >> TPartitionTests::NonConflictingCommitsBatch >> KqpScripting::StreamScanQuery [GOOD] >> KqpScripting::SyncExecuteYqlScriptSeveralQueries >> ColumnStatistics::CountMinSketchStatistics ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpExtractPredicateLookup::ComplexRange [GOOD] Test command err: Trying to start YDB, gRPC: 26324, MsgBus: 28326 2025-04-04T17:24:59.394063Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510281332573796:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:24:59.394119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00143f/r3tmp/tmpySAavY/pdisk_1.dat 2025-04-04T17:24:59.811233Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:24:59.819277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:24:59.819399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:24:59.823118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26324, node 1 2025-04-04T17:24:59.919033Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:24:59.919065Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:24:59.919080Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:24:59.919246Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28326 TClient is connected to server localhost:28326 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:00.575549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:00.599617Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:25:00.618748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:00.772962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:00.975122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:01.048691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:02.878886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510294217477462:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:02.879031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:03.192385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:03.226954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:03.252782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:03.287323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:03.318174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:03.366063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:03.417606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510298512445272:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:03.417674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:03.418129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510298512445277:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:03.422439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:03.436188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510298512445279:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:03.503131Z node 1 :TX_PROXY ERROR: Actor# [1:7489510298512445334:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:04.396558Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510281332573796:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:04.396624Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 64443, MsgBus: 62049 2025-04-04T17:25:06.466907Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510311454730023:2069];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:06.466965Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00143f/r3tmp/tmpwbgi8J/pdisk_1.dat 2025-04-04T17:25:06.624656Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:06.645510Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:06.645597Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:06.646921Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64443, node 2 2025-04-04T17:25:06.718954Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:06.718981Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:06.718989Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:06.719122Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62049 TClient is connected to server localhost:62049 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:07.176247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.192285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.266753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.428285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:07.510544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCre ... e, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:26:53.103001Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:26:53.150675Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:26:53.223548Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:26:53.270820Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:26:53.316533Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:26:53.364957Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:26:53.412572Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14970, MsgBus: 62983 2025-04-04T17:26:56.783721Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7489510784756374464:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:26:56.783815Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00143f/r3tmp/tmps7tCne/pdisk_1.dat 2025-04-04T17:26:56.962235Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:56.978021Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:56.978166Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:56.979922Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14970, node 13 2025-04-04T17:26:57.047687Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:57.047726Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:57.047736Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:57.047941Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62983 TClient is connected to server localhost:62983 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:26:57.833853Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:57.851193Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:57.929174Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:58.242723Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:58.351730Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:01.667379Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489510806231212717:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:01.667506Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:01.723932Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:01.783994Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7489510784756374464:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:01.784087Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:27:01.798791Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:01.847354Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:01.895247Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:01.939509Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:02.053945Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:02.110761Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489510810526180537:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:02.110875Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:02.110902Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7489510810526180542:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:02.116075Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:02.128892Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7489510810526180544:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:27:02.216480Z node 13 :TX_PROXY ERROR: Actor# [13:7489510810526180599:3459] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:03.859210Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:27:03.972266Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-04-04T17:27:04.021723Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:27:04.075172Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-04T17:27:04.127051Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-04-04T17:27:04.168187Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-04T17:27:04.218514Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-04-04T17:27:04.269212Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-04-04T17:27:04.322775Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-04-04T17:27:04.372972Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 >> KqpScripting::NoAstSizeLimit [GOOD] >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus >> KqpScripting::StreamExecuteYqlScriptScanCancelation [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::NoAstSizeLimit [GOOD] Test command err: Trying to start YDB, gRPC: 21008, MsgBus: 16112 2025-04-04T17:27:01.611045Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510804193241585:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:01.611162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001964/r3tmp/tmpnCS1tY/pdisk_1.dat 2025-04-04T17:27:01.861997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:01.862182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:01.864291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:01.903146Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21008, node 1 2025-04-04T17:27:01.912840Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:27:01.945315Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:01.945351Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:01.945360Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:01.945501Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16112 TClient is connected to server localhost:16112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:02.368593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:02.395779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:02.553339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:02.710452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:02.774422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:04.229692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510817078145239:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:04.229766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:04.406888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:04.433687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:04.459661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:04.490544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:04.518100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:04.547298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:04.603374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510817078145749:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:04.603429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:04.603460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510817078145754:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:04.606947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:04.617494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510817078145756:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:27:04.677280Z node 1 :TX_PROXY ERROR: Actor# [1:7489510817078145809:3440] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:05.537665Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787625573, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 12486, MsgBus: 62482 2025-04-04T17:27:06.116200Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510827694866879:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:06.116267Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001964/r3tmp/tmpMgrHVV/pdisk_1.dat 2025-04-04T17:27:06.210359Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12486, node 2 2025-04-04T17:27:06.258348Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:06.258435Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:06.259933Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:06.265272Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:06.265290Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:06.265295Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:06.265410Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62482 TClient is connected to server localhost:62482 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:06.648637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:08.433244Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510836284802108:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:08.433318Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:08.448973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:27:08.486722Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510836284802226:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:08.486812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:08.541730Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510836284802237:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:08.541807Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:08.541939Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510836284802242:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:08.544816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:27:08.551466Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510836284802244:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:27:08.652201Z node 2 :TX_PROXY ERROR: Actor# [2:7489510836284802296:2397] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> CacheEviction::DeleteKeys [GOOD] >> PQCountersLabeled::Partition >> TPartitionTests::NonConflictingCommitsBatch [GOOD] >> KqpYql::CreateUseTable [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexPkOverlap >> KqpYql::InsertCVList+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::NonConflictingCommitsBatch [GOOD] Test command err: 2025-04-04T17:27:02.706232Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:02.706350Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:27:02.722744Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:178:2193] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:27:02.724068Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T17:27:02.000000Z 2025-04-04T17:27:02.724127Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:178:2193] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\360\374\335\216\3402" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\360\374\335\216\3402" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\360\374\335\216\3402" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-04T17:27:03.092150Z node 1 :PERSQUEUE WARN: [PQ: 72057594037927937, Partition: 0, State: StateIdle] commit to future - topic Root/PQ/rt3.dc1--account--topic partition 0 client client EndOffset 10 offset 13 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\360\374\335\216\3402" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\n\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\n\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-04-04T17:27:03.481956Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:03.482029Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:27:03.494858Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:178:2193] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:27:03.496566Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T17:27:03.000000Z 2025-04-04T17:27:03.496631Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:178:2193] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:27:04.074314Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:04.074384Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:27:04.090430Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:176:2191] 2025-04-04T17:27:04.091370Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [3:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:27:04.092104Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2025-04-04T17:27:04.092203Z node 3 :PERSQUEUE INFO: new Cookie owner1|c493016d-e3b5fd-200d2b4-d34d736_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send disk status response with cookie: 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:27:04.393002Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 2025-04-04T17:27:04.831507Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:04.831563Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:27:04.843239Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [4:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:27:04.846125Z node 4 :PERSQUEUE INFO: [rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:27:04.846178Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:27:05.146290Z node 4 :PERSQUEUE INFO: new Cookie owner1|6b874698-6dc716a-d07ba43f-6696d4d_0 generated for partition {0, {0, 1111}, 123} topic 'rt3.dc1--account--topic' owner owner1 Send write: 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 2 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPa ... to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 6 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 7 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send write: 8 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Send write: 9 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Got write info response. Body keys: 0, head: 10, src id info: 1 2025-04-04T17:27:08.255692Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:08.255772Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:27:08.268232Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:27:08.270354Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T17:27:08.000000Z 2025-04-04T17:27:08.270405Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got batch complete: 1 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\0202\030\000(\340\253\336\216\3402" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient-0" Value: "\010\000\020\001\030\001\"\020session-client-0(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient-0" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-client-0" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Created Tx with id 3 as act# 3 Created Tx with id 4 as act# 4 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 6 Wait batch completion Wait kv request Wait tx committed for tx 3 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Wait tx committed for tx 4 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 6 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } >> Cdc::RacySplitAndDropTable [GOOD] >> Cdc::RenameTable >> KqpYql::InsertCV-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::CreateUseTable [GOOD] Test command err: Trying to start YDB, gRPC: 65216, MsgBus: 21494 2025-04-04T17:27:01.852786Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510805769847124:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:01.852878Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00195d/r3tmp/tmpsjar4t/pdisk_1.dat 2025-04-04T17:27:02.129838Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:27:02.133689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:02.133806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:02.136261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65216, node 1 2025-04-04T17:27:02.198005Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:02.198031Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:02.198039Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:02.198163Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21494 TClient is connected to server localhost:21494 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:02.646709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:02.669697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:02.766580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:02.885323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:02.934603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:04.546332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510818654750786:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:04.546449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:04.767203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:04.795038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:04.822680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:04.850705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:04.884406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:04.914339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:04.988397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510818654751304:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:04.988489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:04.988548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510818654751309:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:04.991975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:05.001427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510818654751311:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:27:05.058570Z node 1 :TX_PROXY ERROR: Actor# [1:7489510822949718660:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Optimization, code: 1070
:4:24: Error: Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations., code: 2009 Trying to start YDB, gRPC: 14858, MsgBus: 17793 2025-04-04T17:27:06.663567Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510825564747807:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:06.663634Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00195d/r3tmp/tmpFIi02i/pdisk_1.dat 2025-04-04T17:27:06.782484Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:27:06.813366Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:06.813510Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:06.815493Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14858, node 2 2025-04-04T17:27:06.860821Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:06.860843Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:06.860849Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:06.860937Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17793 TClient is connected to server localhost:17793 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:07.235214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:07.251577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:07.320869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:07.501002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:07.573867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:09.099834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510838449651466:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:09.099947Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:09.128989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:09.151411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:09.175588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:09.199614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:09.223095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:09.251578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:09.285556Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510838449651972:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:09.285651Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:09.285750Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510838449651977:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:09.288630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:09.296233Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510838449651979:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:27:09.388096Z node 2 :TX_PROXY ERROR: Actor# [2:7489510838449652033:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:10.052285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:27:10.223979Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787630263, txId: 281474976715673] shutting down >> PQCountersLabeled::Partition [GOOD] >> PQCountersLabeled::PartitionFirstClass ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertCV-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 5612, MsgBus: 4110 2025-04-04T17:27:03.162306Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510815682833541:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:03.163294Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001954/r3tmp/tmpdQ4kLR/pdisk_1.dat 2025-04-04T17:27:03.439968Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:27:03.446984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:03.447091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:03.450516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5612, node 1 2025-04-04T17:27:03.505386Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:03.505417Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:03.505424Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:03.505556Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4110 TClient is connected to server localhost:4110 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:03.921754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:03.947274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:04.075115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:04.226359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:04.289416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:05.847423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510824272769911:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:05.847526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:06.045798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:06.071011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:06.096632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:06.122652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:06.150490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:06.201324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:06.232288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510828567737717:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:06.232355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:06.232459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510828567737722:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:06.235922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:06.244777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510828567737724:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:27:06.345070Z node 1 :TX_PROXY ERROR: Actor# [1:7489510828567737779:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:06.984853Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-04T17:27:06.991867Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037914 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T17:27:06.992016Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037914 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T17:27:06.992257Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489510828567738091:2500], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [1:7489510828567738073:2500]Got CONSTRAINT VIOLATION for table `/Root/Test`. ShardID=72075186224037914, Sink=[1:7489510828567738091:2500].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-04T17:27:06.992898Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489510828567738083:2500], SessionActorId: [1:7489510828567738073:2500], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7489510828567738073:2500]. isRollback=0 2025-04-04T17:27:06.993155Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGViMzUxODMtN2UwMTM4OGYtYmIxYjZmZjYtNDY0Y2Y2Yjc=, ActorId: [1:7489510828567738073:2500], ActorState: ExecuteState, TraceId: 01jr0xf4cz24qjstn4y599h0en, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7489510828567738084:2500] from: [1:7489510828567738083:2500] 2025-04-04T17:27:06.993261Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489510828567738084:2500] TxId: 281474976710671. Ctx: { TraceId: 01jr0xf4cz24qjstn4y599h0en, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGViMzUxODMtN2UwMTM4OGYtYmIxYjZmZjYtNDY0Y2Y2Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/Test`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-04T17:27:06.994173Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGViMzUxODMtN2UwMTM4OGYtYmIxYjZmZjYtNDY0Y2Y2Yjc=, ActorId: [1:7489510828567738073:2500], ActorState: ExecuteState, TraceId: 01jr0xf4cz24qjstn4y599h0en, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 Trying to start YDB, gRPC: 19556, MsgBus: 12564 2025-04-04T17:27:07.710069Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510833091891073:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:07.710132Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001954/r3tmp/tmp8XTjKL/pdisk_1.dat 2025-04-04T17:27:07.807713Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:27:07.833973Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:07.834033Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 19556, node 2 2025-04-04T17:27:07.835295Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:07.866891Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:07.866908Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:07.866913Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:07.866989Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12564 TClient is connected to server localhost:12564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:08.225798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:08.240939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:08.311425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:08.462199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:08.531665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:10.173742Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510845976794749:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:10.173826Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:10.201446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:10.224137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:10.246188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:10.286002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:10.309709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:10.337530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:10.374093Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510845976795258:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:10.374172Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:10.374278Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510845976795263:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:10.377086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:10.384574Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510845976795265:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:27:10.455115Z node 2 :TX_PROXY ERROR: Actor# [2:7489510845976795319:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:11.266299Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489510850271762922:2501], TxId: 281474976715672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jr0xf8f11kkqfvcp5zzq5sm7. SessionId : ydb://session/3?node_id=2&id=YjQxNmYxNTAtM2JiZjg1NDUtY2I3ZTEyNTAtZGZkMjEwMjA=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-04-04T17:27:11.266653Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489510850271762924:2502], TxId: 281474976715672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjQxNmYxNTAtM2JiZjg1NDUtY2I3ZTEyNTAtZGZkMjEwMjA=. CustomerSuppliedId : . TraceId : 01jr0xf8f11kkqfvcp5zzq5sm7. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7489510850271762919:2491], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:27:11.266964Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjQxNmYxNTAtM2JiZjg1NDUtY2I3ZTEyNTAtZGZkMjEwMjA=, ActorId: [2:7489510850271762871:2491], ActorState: ExecuteState, TraceId: 01jr0xf8f11kkqfvcp5zzq5sm7, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Conflict with existing key., code: 2012 >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] >> KqpYql::TestUuidDefaultColumn >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test >> Cdc::ResolvedTimestampsMultiplePartitions [GOOD] >> Cdc::ResolvedTimestampsVolatileOutOfOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] Test command err: Trying to start YDB, gRPC: 12301, MsgBus: 9915 2025-04-04T17:27:04.063764Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510819751336929:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:04.063899Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001952/r3tmp/tmpCXHy40/pdisk_1.dat 2025-04-04T17:27:04.344079Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12301, node 1 2025-04-04T17:27:04.418652Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:04.418903Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:04.418916Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:04.419069Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:04.419990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:04.420134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:04.421887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9915 TClient is connected to server localhost:9915 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:04.834065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:04.854670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:04.967780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:05.135413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:05.198675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:06.484538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510828341273297:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:06.484655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:06.696353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:06.720774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:06.747238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:06.775145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:06.804257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:06.858322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:06.890465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510828341273805:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:06.890551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:06.890656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510828341273810:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:06.893523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:06.901922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510828341273812:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:27:06.998886Z node 1 :TX_PROXY ERROR: Actor# [1:7489510828341273866:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:07.970646Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787627995, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 65173, MsgBus: 11550 2025-04-04T17:27:08.722744Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510835800347243:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:08.722792Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001952/r3tmp/tmphqUscb/pdisk_1.dat 2025-04-04T17:27:08.815236Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65173, node 2 2025-04-04T17:27:08.855357Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:08.855430Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:08.856954Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:08.882476Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:08.882497Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:08.882505Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:08.882616Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11550 TClient is connected to server localhost:11550 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:09.227698Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:09.244190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:09.313937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:09.435021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:09.487039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:11.287764Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510848685250904:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:11.287852Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:11.329983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:11.355382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:11.380019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:11.403950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:11.430516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:11.470652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:11.512954Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510848685251412:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:11.513037Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510848685251417:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:11.513053Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:11.515665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:11.523478Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510848685251419:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:27:11.581991Z node 2 :TX_PROXY ERROR: Actor# [2:7489510848685251473:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpYql::UpdateBadType >> TColumnShardTestSchema::ForgetAfterFail [GOOD] >> KqpYql::UpdatePk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143788008.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123788008.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123786808.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-04-04T17:23:30.360569Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:30.463348Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:30.493144Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:30.493522Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:30.503431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:30.503681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:30.503954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:30.504090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:30.504220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:30.504404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:30.504548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:30.504682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:30.504828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:30.504953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:30.505076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:30.505195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:30.546507Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:30.547024Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:30.547236Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:30.547721Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:30.548055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:30.548245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:30.548355Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:30.548554Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:30.548720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:30.548802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:30.548840Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:30.549337Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:30.549456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:30.549516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:30.549572Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:30.549766Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:30.549895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:30.549974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:30.550013Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:30.550117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:30.550167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:30.550211Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:30.550293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:30.550352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:30.550410Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:30.551355Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=165; 2025-04-04T17:23:30.551602Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=139; 2025-04-04T17:23:30.551814Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=92; 2025-04-04T17:23:30.551966Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=67; 2025-04-04T17:23:30.552277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:30.552436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:30.552487Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:30.552793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:30.552869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:30.552925Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:30.553152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:30.553214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:30.553259Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:30.553531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;descriptio ... mn_ids=1;column_names=timestamp;);;;); 2025-04-04T17:27:14.356615Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1890:3864];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:14867;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:27:14.356636Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1890:3864];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:27:14.356657Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1890:3864];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:27:14.356729Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1890:3864];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:27:14.356786Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1890:3864];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:1;records_count:14867;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:27:14.356806Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1890:3864];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-04-04T17:27:14.356857Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1890:3864];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:213;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=14867; 2025-04-04T17:27:14.356886Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1890:3864];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:233;stage=data_format;batch_size=118936;num_rows=14867;batch_columns=timestamp; 2025-04-04T17:27:14.357016Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1890:3864];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1889:3863];bytes=118936;rows=14867;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; Got TEvKqpCompute::TEvScanData [1:1890:3864]->[1:1889:3863] 2025-04-04T17:27:14.357101Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1890:3864];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:251;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:27:14.357167Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1890:3864];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:27:14.357234Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1890:3864];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:27:14.357315Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1890:3864];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:104;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-04-04T17:27:14.357376Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1890:3864];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:27:14.357460Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1890:3864];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:173;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:27:14.357500Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1890:3864] finished for tablet 9437184 2025-04-04T17:27:14.357969Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1890:3864];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:398;event=scan_finish;compute_actor_id=[1:1889:3863];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.01},{"events":["f_ack","l_task_result"],"t":0.707},{"events":["l_ProduceResults","f_Finish"],"t":0.711},{"events":["l_ack","l_processing","l_Finish"],"t":0.712}],"full":{"a":1743787633645498,"name":"_full_task","f":1743787633645498,"d_finished":0,"c":0,"l":1743787634357556,"d":712058},"events":[{"name":"bootstrap","f":1743787633645670,"d_finished":10529,"c":1,"l":1743787633656199,"d":10529},{"a":1743787634357302,"name":"ack","f":1743787634352966,"d_finished":3958,"c":7,"l":1743787634357250,"d":4212},{"a":1743787634357295,"name":"processing","f":1743787633656295,"d_finished":310964,"c":56,"l":1743787634357251,"d":311225},{"name":"ProduceResults","f":1743787633649674,"d_finished":12106,"c":65,"l":1743787634357481,"d":12106},{"a":1743787634357484,"name":"Finish","f":1743787634357484,"d_finished":0,"c":0,"l":1743787634357556,"d":72},{"name":"task_result","f":1743787633656318,"d_finished":306016,"c":49,"l":1743787634352842,"d":306016}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-04-04T17:27:14.358053Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1890:3864];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:350;event=send_data;compute_actor_id=[1:1889:3863];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-04-04T17:27:14.358490Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1890:3864];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:355;event=scan_finished;compute_actor_id=[1:1889:3863];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.01},{"events":["f_ack","l_task_result"],"t":0.707},{"events":["l_ProduceResults","f_Finish"],"t":0.711},{"events":["l_ack","l_processing","l_Finish"],"t":0.712}],"full":{"a":1743787633645498,"name":"_full_task","f":1743787633645498,"d_finished":0,"c":0,"l":1743787634358094,"d":712596},"events":[{"name":"bootstrap","f":1743787633645670,"d_finished":10529,"c":1,"l":1743787633656199,"d":10529},{"a":1743787634357302,"name":"ack","f":1743787634352966,"d_finished":3958,"c":7,"l":1743787634357250,"d":4750},{"a":1743787634357295,"name":"processing","f":1743787633656295,"d_finished":310964,"c":56,"l":1743787634357251,"d":311763},{"name":"ProduceResults","f":1743787633649674,"d_finished":12106,"c":65,"l":1743787634357481,"d":12106},{"a":1743787634357484,"name":"Finish","f":1743787634357484,"d_finished":0,"c":0,"l":1743787634358094,"d":610},{"name":"task_result","f":1743787633656318,"d_finished":306016,"c":49,"l":1743787634352842,"d":306016}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1890:3864]->[1:1889:3863] 2025-04-04T17:27:14.358560Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1890:3864];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-04-04T17:27:13.645027Z;index_granules=0;index_portions=7;index_batches=1260;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=10402524;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10402524;selected_rows=0; 2025-04-04T17:27:14.358586Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1890:3864];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-04-04T17:27:14.358765Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1890:3864];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:82;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 0/0 160000/10402524 >> KqpYql::InsertCVList+useSink [GOOD] >> KqpYql::InsertCVList-useSink >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus [GOOD] >> THealthCheckTest::YellowGroupIssueOnYellowSpace >> KqpYql::TestUuidDefaultColumn [GOOD] >> HttpRequest::Status >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidDefaultColumn [GOOD] Test command err: Trying to start YDB, gRPC: 23103, MsgBus: 27533 2025-04-04T17:27:13.094930Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510855538224886:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:13.095055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001949/r3tmp/tmpTpP0t3/pdisk_1.dat 2025-04-04T17:27:13.341112Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23103, node 1 2025-04-04T17:27:13.412924Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:13.412947Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:13.412957Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:13.413116Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:13.438538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:13.438654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:13.440224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27533 TClient is connected to server localhost:27533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:13.853779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:15.330289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510864128160144:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:15.330424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:15.554597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:27:15.661170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510864128160246:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:15.661266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:15.661296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510864128160251:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:15.664776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:27:15.672978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510864128160253:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:27:15.770022Z node 1 :TX_PROXY ERROR: Actor# [1:7489510864128160304:2392] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TPQTest::TestPQReadAhead [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 10743, MsgBus: 10953 2025-04-04T17:27:03.518986Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510815103696043:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:03.519053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001953/r3tmp/tmpSjf1Y1/pdisk_1.dat 2025-04-04T17:27:03.801411Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10743, node 1 2025-04-04T17:27:03.879781Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:03.879810Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:03.879816Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:03.879984Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:03.910305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:03.910410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:03.912172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10953 TClient is connected to server localhost:10953 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:04.325717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:04.347487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:04.482337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:04.627190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:04.702674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:05.864945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510823693632426:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:05.865062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:06.075904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:06.101683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:06.128955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:06.155957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:06.181948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:06.248931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:06.322512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510827988600239:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:06.322569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510827988600244:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:06.322580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:06.325434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:06.332741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510827988600246:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:27:06.389400Z node 1 :TX_PROXY ERROR: Actor# [1:7489510827988600298:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:07.985141Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510827988600546:2486] 2025-04-04T17:27:07.985925Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489510832283567907:2492] TxId: 281474976710672. Ctx: { TraceId: 01jr0xf4f8dek0vf91h91qhm4n, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2RkNTM1YTYtMzE2NDhlOTEtNTJjMjQyNTUtY2U4Nzc2YzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-04T17:27:07.986150Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2RkNTM1YTYtMzE2NDhlOTEtNTJjMjQyNTUtY2U4Nzc2YzM=, ActorId: [1:7489510832283567856:2492], ActorState: ExecuteState, TraceId: 01jr0xf4f8dek0vf91h91qhm4n, Create QueryResponse for error on request, msg: 2025-04-04T17:27:07.992074Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787627183, txId: 281474976710671] shutting down 2025-04-04T17:27:07.992141Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489510832283567912:2496], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=Y2RkNTM1YTYtMzE2NDhlOTEtNTJjMjQyNTUtY2U4Nzc2YzM=. CustomerSuppliedId : . TraceId : 01jr0xf4f8dek0vf91h91qhm4n. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489510832283567907:2492], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:27:07.992171Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489510832283567918:2501], TxId: 281474976710672, task: 6. Ctx: { SessionId : ydb://session/3?node_id=1&id=Y2RkNTM1YTYtMzE2NDhlOTEtNTJjMjQyNTUtY2U4Nzc2YzM=. TraceId : 01jr0xf4f8dek0vf91h91qhm4n. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7489510832283567907:2492], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:27:07.992483Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489510832283567919:2502], TxId: 281474976710672, task: 7. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=Y2RkNTM1YTYtMzE2NDhlOTEtNTJjMjQyNTUtY2U4Nzc2YzM=. TraceId : 01jr0xf4f8dek0vf91h91qhm4n. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489510832283567907:2492], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:27:07.992523Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489510832283567913:2497], TxId: 281474976710672, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0xf4f8dek0vf91h91qhm4n. SessionId : ydb://session/3?node_id=1&id=Y2RkNTM1YTYtMzE2NDhlOTEtNTJjMjQyNTUtY2U4Nzc2YzM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489510832283567907:2492], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:27:07.992598Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489510832283567920:2503], TxId: 281474976710672, task: 8. Ctx: { CustomerSuppliedId : . TraceId : 01jr0xf4f8dek0vf91h91qhm4n. SessionId : ydb://session/3?node_id=1&id=Y2RkNTM1YTYtMzE2NDhlOTEtNTJjMjQyNTUtY2U4Nzc2YzM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489510832283567907:2492], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:27:07.992701Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489510832283567921:2504], TxId: 281474976710672, task: 9. Ctx: { TraceId : 01jr0xf4f8dek0vf91h91qhm4n. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=Y2RkNTM1YTYtMzE2NDhlOTEtNTJjMjQyNTUtY2U4Nzc2YzM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489510832283567907:2492], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:27:07.992848Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489510832283567915:2498], TxId: 281474976710672, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=Y2RkNTM1YTYtMzE2NDhlOTEtNTJjMjQyNTUtY2U4Nzc2YzM=. TraceId : 01jr0xf4f8dek0vf91h91qhm4n. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489510832283567907:24 ... 63Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:11.952779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:11.975574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:11.998740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:12.022790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:12.047022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:12.113108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:12.185376Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510853282579599:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:12.185456Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510853282579605:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:12.185499Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:12.188024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:12.195211Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510853282579607:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:27:12.294863Z node 2 :TX_PROXY ERROR: Actor# [2:7489510853282579661:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:12.942699Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510853282579907:2486] 2025-04-04T17:27:12.948298Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510853282579913:2489] 2025-04-04T17:27:12.960309Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510853282579930:2495] 2025-04-04T17:27:12.975335Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510853282579941:2499] 2025-04-04T17:27:13.008097Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510853282579951:2503] 2025-04-04T17:27:13.017512Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510857577547262:2509] 2025-04-04T17:27:13.039355Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510857577547276:2515] 2025-04-04T17:27:13.067182Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510857577547289:2519] 2025-04-04T17:27:13.097330Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510857577547315:2525] 2025-04-04T17:27:13.130403Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510857577547329:2531] 2025-04-04T17:27:13.166459Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510857577547345:2537] 2025-04-04T17:27:13.206353Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510857577547360:2543] 2025-04-04T17:27:13.248396Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510857577547378:2549] 2025-04-04T17:27:13.293323Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510857577547409:2557] 2025-04-04T17:27:13.342568Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510857577547425:2563] 2025-04-04T17:27:13.393318Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510857577547438:2567] 2025-04-04T17:27:13.450513Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510857577547459:2575] 2025-04-04T17:27:13.505281Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510857577547493:2584] 2025-04-04T17:27:13.565002Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510857577547508:2590] 2025-04-04T17:27:13.628441Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510857577547545:2599] 2025-04-04T17:27:13.694427Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510857577547561:2605] 2025-04-04T17:27:13.763794Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510857577547588:2615] 2025-04-04T17:27:13.836441Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510857577547615:2621] 2025-04-04T17:27:13.913556Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510857577547638:2630] 2025-04-04T17:27:13.914379Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7489510857577547683:2635] TxId: 281474976715672. Ctx: { TraceId: 01jr0xfb5gfhfw1s281gnjc9jy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjJkMGYzMTEtMzI0OWUxNDktZDBjN2RmMDYtZDFkNzg1MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-04T17:27:13.915842Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjJkMGYzMTEtMzI0OWUxNDktZDBjN2RmMDYtZDFkNzg1MDk=, ActorId: [2:7489510857577547653:2635], ActorState: ExecuteState, TraceId: 01jr0xfb5gfhfw1s281gnjc9jy, Create QueryResponse for error on request, msg: 2025-04-04T17:27:13.916458Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787633952, txId: 281474976715671] shutting down 2025-04-04T17:27:13.916873Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489510857577547692:2640], TxId: 281474976715672, task: 4. Ctx: { TraceId : 01jr0xfb5gfhfw1s281gnjc9jy. SessionId : ydb://session/3?node_id=2&id=ZjJkMGYzMTEtMzI0OWUxNDktZDBjN2RmMDYtZDFkNzg1MDk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7489510857577547683:2635], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:27:13.917349Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489510857577547690:2638], TxId: 281474976715672, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0xfb5gfhfw1s281gnjc9jy. SessionId : ydb://session/3?node_id=2&id=ZjJkMGYzMTEtMzI0OWUxNDktZDBjN2RmMDYtZDFkNzg1MDk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7489510857577547683:2635], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:27:13.917560Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489510857577547693:2641], TxId: 281474976715672, task: 5. Ctx: { TraceId : 01jr0xfb5gfhfw1s281gnjc9jy. SessionId : ydb://session/3?node_id=2&id=ZjJkMGYzMTEtMzI0OWUxNDktZDBjN2RmMDYtZDFkNzg1MDk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7489510857577547683:2635], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:27:13.918384Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489510857577547691:2639], TxId: 281474976715672, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jr0xfb5gfhfw1s281gnjc9jy. SessionId : ydb://session/3?node_id=2&id=ZjJkMGYzMTEtMzI0OWUxNDktZDBjN2RmMDYtZDFkNzg1MDk=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7489510857577547683:2635], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:27:13.991381Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510857577547746:2646] 2025-04-04T17:27:14.064655Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787634099, txId: 281474976715674] shutting down 2025-04-04T17:27:14.072182Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510857577547778:2655] 2025-04-04T17:27:14.157277Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510861872515175:2670] 2025-04-04T17:27:14.245423Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510861872515197:2679] 2025-04-04T17:27:14.324224Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787634358, txId: 281474976715676] shutting down 2025-04-04T17:27:14.430484Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510861872515327:2703] 2025-04-04T17:27:14.527430Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510861872515348:2712] 2025-04-04T17:27:14.628815Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510861872515362:2718] 2025-04-04T17:27:14.677917Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489510840397675429:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:14.677992Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:27:14.707628Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787634743, txId: 281474976715678] shutting down 2025-04-04T17:27:14.830400Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510861872515488:2743] 2025-04-04T17:27:14.939281Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510861872515510:2752] 2025-04-04T17:27:15.031752Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787635065, txId: 281474976715680] shutting down 2025-04-04T17:27:15.160517Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510866167482932:2777] 2025-04-04T17:27:15.278814Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510866167482959:2789] 2025-04-04T17:27:15.399464Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510866167482988:2798] 2025-04-04T17:27:15.523694Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510866167483012:2807] 2025-04-04T17:27:15.650534Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510866167483043:2816] 2025-04-04T17:27:15.780852Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510866167483064:2825] 2025-04-04T17:27:15.913464Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510866167483089:2836] 2025-04-04T17:27:16.049453Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510866167483112:2846] 2025-04-04T17:27:16.188604Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510870462450430:2855] 2025-04-04T17:27:16.330682Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7489510870462450451:2864] 2025-04-04T17:27:16.404528Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787636437, txId: 281474976715682] shutting down >> Cdc::RenameTable [GOOD] >> Cdc::InitialScan_WithTopicSchemeTx >> PQCountersLabeled::PartitionFirstClass [GOOD] >> PQCountersLabeled::ImportantFlagSwitching ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestPQReadAhead [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:107:2057] recipient: [1:100:2134] 2025-04-04T17:25:07.920384Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:07.920496Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] Leader for TabletID 72057594037927938 is [1:152:2173] sender: [1:153:2057] recipient: [1:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:178:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:07.943096Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:07.964706Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "important_user" Generation: 1 Important: true } 2025-04-04T17:25:07.965807Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2025-04-04T17:25:07.968394Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:07.980113Z node 1 :PERSQUEUE INFO: new Cookie default|b00ca268-41a095a7-22504944-6995c419_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:176:2191] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_BATCH Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:176:2191] Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_BATCH Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEve ... to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 3 Count: 2147483647 Bytes: 102400 } Cookie: 123 } via pipe: [61:174:2189] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [61:251:2252] sender: [61:356:2057] recipient: [61:14:2061] 2025-04-04T17:27:15.521553Z node 61 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 12 partno 2 count 8 parts 15 size 7877895 Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:102:2057] recipient: [62:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:102:2057] recipient: [62:100:2134] Leader for TabletID 72057594037927937 is [62:106:2138] sender: [62:107:2057] recipient: [62:100:2134] 2025-04-04T17:27:16.050386Z node 62 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:16.050464Z node 62 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [62:148:2057] recipient: [62:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [62:148:2057] recipient: [62:146:2169] Leader for TabletID 72057594037927938 is [62:152:2173] sender: [62:153:2057] recipient: [62:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [62:106:2138] sender: [62:178:2057] recipient: [62:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:16.076748Z node 62 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:16.077647Z node 62 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 62 actor [62:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 62 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 62 ReadRuleGenerations: 62 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 62 Important: false } Consumers { Name: "aaa" Generation: 62 Important: true } 2025-04-04T17:27:16.078319Z node 62 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [62:184:2197] 2025-04-04T17:27:16.080462Z node 62 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [62:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:27:16.082592Z node 62 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [62:185:2198] 2025-04-04T17:27:16.084047Z node 62 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [62:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:16.118580Z node 62 :PERSQUEUE INFO: new Cookie default|c5142cbe-f5b81c8f-2cdb790-7f1595dd_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [62:176:2191] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 1 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [62:176:2191] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 2 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [62:176:2191] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 3 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [62:176:2191] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 4 Count: 10 Bytes: 104857600 } Cookie: 123 } via pipe: [62:176:2191] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 102400 } Cookie: 123 } via pipe: [62:176:2191] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 1 Count: 2147483647 Bytes: 102400 } Cookie: 123 } via pipe: [62:176:2191] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 2 Count: 2147483647 Bytes: 102400 } Cookie: 123 } via pipe: [62:176:2191] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 3 Count: 2147483647 Bytes: 102400 } Cookie: 123 } via pipe: [62:176:2191] Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:102:2057] recipient: [63:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:102:2057] recipient: [63:100:2134] Leader for TabletID 72057594037927937 is [63:106:2138] sender: [63:107:2057] recipient: [63:100:2134] 2025-04-04T17:27:17.096119Z node 63 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:17.096182Z node 63 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [63:148:2057] recipient: [63:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [63:148:2057] recipient: [63:146:2169] Leader for TabletID 72057594037927938 is [63:152:2173] sender: [63:153:2057] recipient: [63:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [63:106:2138] sender: [63:178:2057] recipient: [63:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:17.112461Z node 63 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:17.113181Z node 63 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 63 actor [63:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 63 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 63 ReadRuleGenerations: 63 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 63 Important: false } Consumers { Name: "aaa" Generation: 63 Important: true } 2025-04-04T17:27:17.113698Z node 63 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [63:184:2197] 2025-04-04T17:27:17.115918Z node 63 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [63:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:27:17.118319Z node 63 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [63:185:2198] 2025-04-04T17:27:17.119946Z node 63 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [63:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:17.153073Z node 63 :PERSQUEUE INFO: new Cookie default|f48c5441-beec56d5-28b03eb5-949e88d1_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [63:176:2191] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 1 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [63:176:2191] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 2 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [63:176:2191] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 3 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [63:176:2191] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 4 Count: 10 Bytes: 104857600 } Cookie: 123 } via pipe: [63:176:2191] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 102400 } Cookie: 123 } via pipe: [63:176:2191] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 1 Count: 2147483647 Bytes: 102400 } Cookie: 123 } via pipe: [63:176:2191] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 2 Count: 2147483647 Bytes: 102400 } Cookie: 123 } via pipe: [63:176:2191] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 3 Count: 2147483647 Bytes: 102400 } Cookie: 123 } via pipe: [63:176:2191] >> KqpYql::UpdateBadType [GOOD] >> TPQTestInternal::TestPartitionedBlobSimpleTest [GOOD] >> TPQTestInternal::TestPartitionedBigTest >> TopicAutoscaling::CommitTopPast_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ControlPlane_BackCompatibility ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 21978, MsgBus: 1225 2025-04-04T17:27:02.962573Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510809233969294:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:02.962660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00195c/r3tmp/tmp4DNTLK/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21978, node 1 2025-04-04T17:27:03.246172Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:27:03.246199Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:27:03.256139Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:27:03.281071Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:03.281100Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:03.281108Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:03.281236Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:03.336919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:03.337040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:03.338956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1225 TClient is connected to server localhost:1225 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:03.702791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:03.723127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:03.865344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:04.009038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:04.067891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:05.676431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510822118872972:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:05.676553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:05.897000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:05.922903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:05.950303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:05.980186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:06.009085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:06.037612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:06.073436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510826413840778:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:06.073504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:06.073506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510826413840783:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:06.076741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:06.084225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510826413840785:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:27:06.152415Z node 1 :TX_PROXY ERROR: Actor# [1:7489510826413840839:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:06.798489Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510826413841085:2486] 2025-04-04T17:27:06.815080Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510826413841097:2491] 2025-04-04T17:27:06.827500Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510826413841108:2495] 2025-04-04T17:27:06.843334Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510826413841120:2499] 2025-04-04T17:27:06.872076Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510826413841132:2503] 2025-04-04T17:27:06.885468Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510826413841144:2508] 2025-04-04T17:27:06.909360Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510826413841164:2515] 2025-04-04T17:27:06.937041Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510826413841177:2519] 2025-04-04T17:27:06.967405Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510826413841204:2525] 2025-04-04T17:27:07.001270Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510826413841221:2531] 2025-04-04T17:27:07.037280Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510830708808534:2538] 2025-04-04T17:27:07.076278Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510830708808553:2544] 2025-04-04T17:27:07.118430Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510830708808570:2550] 2025-04-04T17:27:07.163198Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510830708808599:2557] 2025-04-04T17:27:07.211242Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510830708808618:2564] 2025-04-04T17:27:07.262216Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510830708808632:2568] 2025-04-04T17:27:07.316115Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510830708808673:2579] 2025-04-04T17:27:07.373233Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510830708808689:2585] 2025-04-04T17:27:07.433522Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510830708808715:2594] 2025-04-04T17:27:07.497472Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510830708808744:2600] 2025-04-04T17:27:07.564505Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510830708808769:2609] 2025-04-04T17:27:07.634372Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510830708808781:2613] 2025-04-04T17:27:07.706335Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510830708808822:2624] 2025-04-04T17:27:07.781397Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510830708808851:2634] 2025-04-04T17:27:07.860299Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510830708808880:2640] 2025-04-04T17:27:07.941349Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510830708808902:2649] 2025-04-04T17:27:07.982403Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510809233969294:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:07.984141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:27:08.025860Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510830708808924:2658] 2025-04-04T17:27:08.026054Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489510835003776262:2664] TxId: 281474976710672. Ctx: { TraceId: 01jr0xf5d9fjfdtb219y20k39k, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjVmMGQ4MmUtYTczMThjNS0yOWVhNWY4LTdjODY0NzMw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-04T17:27:08.027386Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489510835003776272:2669], TxId: 281474976710672, task: 5. Ctx: { CustomerSuppliedId : . TraceId : 01jr0xf5d9fjfdtb219y20k39k. SessionId : ydb://session/3?node_id=1&id=ZjVmMGQ4MmUtYTczMThjNS0yOWVhNWY4LTdjODY0NzMw. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489510835003776262:2664], status: ABORTED, reason: {
: Error: Terminate execution } 2025-04-04T17:27:08.027709Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjVmMGQ4MmUtYTczMThjNS0 ... 9675, txId: 281474976710681] shutting down 2025-04-04T17:27:09.644157Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510839298744258:2827] 2025-04-04T17:27:09.777281Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7489510839298744368:2845] 2025-04-04T17:27:09.895102Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787629934, txId: 281474976710683] shutting down 2025-04-04T17:27:10.029784Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787630067, txId: 281474976710685] shutting down 2025-04-04T17:27:10.159700Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787630200, txId: 281474976710687] shutting down Trying to start YDB, gRPC: 14804, MsgBus: 64604 2025-04-04T17:27:10.839103Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510843233739477:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:10.839166Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00195c/r3tmp/tmpwdqE6d/pdisk_1.dat 2025-04-04T17:27:10.897694Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14804, node 2 2025-04-04T17:27:10.948017Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:10.948040Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:10.948044Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:10.948147Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:10.958460Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:10.958567Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:10.960337Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64604 TClient is connected to server localhost:64604 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:11.214160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:11.222903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:11.290166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:11.403193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:11.472904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:13.038207Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510856118643145:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:13.038290Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:13.053266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:13.117561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:13.144675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:13.170822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:13.198156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:13.265276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:13.302372Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510856118643659:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:13.302473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:13.302475Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510856118643664:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:13.305585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:13.314291Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510856118643666:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:27:13.392813Z node 2 :TX_PROXY ERROR: Actor# [2:7489510856118643720:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:14.237513Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTAzZTEyZjUtNGNmMTFlOTAtMzYzNGViYTQtY2YxNzAyZDQ=, ActorId: [2:7489510860413611279:2490], ActorState: ExecuteState, TraceId: 01jr0xfbhmb1ba3zmcx6dv4ybc, Create QueryResponse for error on request, msg: 2025-04-04T17:27:14.302213Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWU4NWQ4NWYtNjg0MmY5OTMtNmFjZDkwMjctODFmOTFjMTU=, ActorId: [2:7489510860413611316:2504], ActorState: ExecuteState, TraceId: 01jr0xfbjsdba089nhhgfq3f76, Create QueryResponse for error on request, msg: 2025-04-04T17:27:14.388662Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTBlNjNmMDktM2Y5N2YwZjItNTRmNDcwZjMtOTMzMmQ1M2M=, ActorId: [2:7489510860413611360:2522], ActorState: ExecuteState, TraceId: 01jr0xfbnd99cefym2jrykrf3h, Create QueryResponse for error on request, msg: 2025-04-04T17:27:14.477425Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTZkMTYyMGEtYTg2OTE1YzktZDZlYjAxZDEtNzE4MDc5YmM=, ActorId: [2:7489510860413611416:2540], ActorState: ExecuteState, TraceId: 01jr0xfbr92g12yp8am7g8z610, Create QueryResponse for error on request, msg: 2025-04-04T17:27:15.143630Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7489510864708579025:2635] TxId: 281474976715672. Ctx: { TraceId: 01jr0xfcc33mj5mn1m3wy3nxhv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmE2NzFiMWEtYzI5ZDM0ZjQtNzJiY2FlMTMtNzExMWYxYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-04-04T17:27:15.143855Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmE2NzFiMWEtYzI5ZDM0ZjQtNzJiY2FlMTMtNzExMWYxYzE=, ActorId: [2:7489510864708578997:2635], ActorState: ExecuteState, TraceId: 01jr0xfcc33mj5mn1m3wy3nxhv, Create QueryResponse for error on request, msg: 2025-04-04T17:27:15.144083Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787635184, txId: 281474976715671] shutting down 2025-04-04T17:27:15.369220Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787635408, txId: 281474976715674] shutting down 2025-04-04T17:27:15.548340Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787635576, txId: 281474976715676] shutting down 2025-04-04T17:27:15.837649Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787635870, txId: 281474976715678] shutting down 2025-04-04T17:27:15.839486Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489510843233739477:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:15.839537Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:27:16.055344Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGJmNDJiNTYtNTUwMTRiZGYtYmM4OGI0MjUtMmE4NTgwYTM=, ActorId: [2:7489510864708579498:2737], ActorState: ExecuteState, TraceId: 01jr0xfd7k86ykem51x4ssgnbw, Create QueryResponse for error on request, msg: 2025-04-04T17:27:16.330885Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787636367, txId: 281474976715680] shutting down 2025-04-04T17:27:17.438803Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787637473, txId: 281474976715682] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdateBadType [GOOD] Test command err: Trying to start YDB, gRPC: 31516, MsgBus: 63950 2025-04-04T17:27:14.222126Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510863399511811:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:14.222190Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001941/r3tmp/tmpgZrIpi/pdisk_1.dat 2025-04-04T17:27:14.455509Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31516, node 1 2025-04-04T17:27:14.518718Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:14.518743Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:14.518751Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:14.518929Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:14.571384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:14.571494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:14.573250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63950 TClient is connected to server localhost:63950 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:14.889757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:14.903930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:14.996702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:15.113673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:15.170877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:16.804432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510871989448190:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:16.804537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:16.977592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:17.007558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:17.035213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:17.063560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:17.090939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:17.123856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:17.167161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510876284415996:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:17.167218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510876284416001:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:17.167266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:17.170703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:17.180232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510876284416003:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:27:17.273818Z node 1 :TX_PROXY ERROR: Actor# [1:7489510876284416058:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:4:26: Error: At function: KiUpdateTable!
:3:20: Error: Failed to convert type: Struct<'Amount':String?> to Struct<'Amount':Uint64?>
:3:20: Error: Failed to convert 'Amount': Optional to Optional
:3:20: Error: Row type mismatch for table: db.[/Root/Test] >> KqpYql::TestUuidPrimaryKeyPrefixSearch >> KqpYql::UpdatePk [GOOD] >> KqpYql::FlexibleTypes >> TPQTestInternal::TestPartitionedBigTest [GOOD] >> TPQTestInternal::TestToHex [GOOD] >> TPQUserInfoTest::UserDataDeprecatedSerializaion [GOOD] >> TPQUtilsTest::TLastCounter [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQUtilsTest::TLastCounter [GOOD] >> KqpYql::InsertCVList-useSink [GOOD] >> TPQTest::TestPartitionWriteQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdatePk [GOOD] Test command err: Trying to start YDB, gRPC: 19864, MsgBus: 9316 2025-04-04T17:27:15.556671Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510865544094625:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:15.556794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00193c/r3tmp/tmpAJ7f1r/pdisk_1.dat 2025-04-04T17:27:15.778994Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19864, node 1 2025-04-04T17:27:15.851136Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:15.851159Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:15.851168Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:15.851253Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:15.894741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:15.894870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:15.897045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9316 TClient is connected to server localhost:9316 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:16.298977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:16.325391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:16.481387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:16.610936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:16.673312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:17.843260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510874134031006:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:17.843381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:18.031813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:18.055808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:18.078271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:18.104068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:18.125660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:18.192227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:18.229433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510878428998815:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:18.229505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:18.229737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510878428998820:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:18.233471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:18.241592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510878428998822:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:27:18.341282Z node 1 :TX_PROXY ERROR: Actor# [1:7489510878428998876:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:3:20: Warning: At function: AsStruct
:4:31: Warning: At function: +
:4:31: Warning: Integral type implicit bitcast: Optional and Int32, code: 1107
:5:27: Error: At function: KiUpdateTable!
:5:27: Error: Cannot update primary key column: Group >> KqpYql::NonStrictDml ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertCVList-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 7250, MsgBus: 16302 2025-04-04T17:27:11.667608Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510847004134610:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:11.667654Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00194b/r3tmp/tmptIQmaE/pdisk_1.dat 2025-04-04T17:27:11.900351Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7250, node 1 2025-04-04T17:27:11.953473Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:11.953508Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:11.953516Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:11.953629Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:11.995427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:11.995520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:11.997149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16302 TClient is connected to server localhost:16302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:12.333294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:12.355021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:12.474597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:12.604969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:12.653157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:14.024517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510859889038269:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:14.024662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:14.271182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:14.299407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:14.326468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:14.353818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:14.380871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:14.447756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:14.481824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510859889038783:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:14.481892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:14.481922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510859889038788:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:14.485263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:14.495166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510859889038790:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:27:14.552505Z node 1 :TX_PROXY ERROR: Actor# [1:7489510859889038844:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:15.360693Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-04-04T17:27:15.368199Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037914 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T17:27:15.368364Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037914 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-04-04T17:27:15.368553Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489510864184006460:2500], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [1:7489510864184006442:2500]Got CONSTRAINT VIOLATION for table `/Root/Test`. ShardID=72075186224037914, Sink=[1:7489510864184006460:2500].{
: Error: Duplicate keys have been found., code: 2012 } 2025-04-04T17:27:15.369022Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489510864184006453:2500], SessionActorId: [1:7489510864184006442:2500], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7489510864184006442:2500]. isRollback=0 2025-04-04T17:27:15.369204Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTA3OTk5MTMtMTU4YzE5MDYtY2M0MTc0YTQtYWMyZTQ1Njk=, ActorId: [1:7489510864184006442:2500], ActorState: ExecuteState, TraceId: 01jr0xfcjrb0gv0rzxkscan843, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7489510864184006454:2500] from: [1:7489510864184006453:2500] 2025-04-04T17:27:15.369268Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489510864184006454:2500] TxId: 281474976715671. Ctx: { TraceId: 01jr0xfcjrb0gv0rzxkscan843, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTA3OTk5MTMtMTU4YzE5MDYtY2M0MTc0YTQtYWMyZTQ1Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/Test`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-04-04T17:27:15.370040Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTA3OTk5MTMtMTU4YzE5MDYtY2M0MTc0YTQtYWMyZTQ1Njk=, ActorId: [1:7489510864184006442:2500], ActorState: ExecuteState, TraceId: 01jr0xfcjrb0gv0rzxkscan843, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 Trying to start YDB, gRPC: 21118, MsgBus: 11614 2025-04-04T17:27:15.922213Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510867380332694:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:15.922369Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00194b/r3tmp/tmpFcFykD/pdisk_1.dat 2025-04-04T17:27:15.997912Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21118, node 2 2025-04-04T17:27:16.048035Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:16.048116Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:16.049573Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:16.059511Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:16.059542Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:16.059551Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:16.059657Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11614 TClient is connected to server localhost:11614 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:16.438426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:16.455203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:16.528112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:16.673074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:16.743875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:18.453565Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510880265236353:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:18.453643Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:18.487069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:18.511450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:18.536489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:18.559872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:18.607162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:18.674107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:18.714439Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510880265236868:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:18.714517Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510880265236873:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:18.714530Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:18.718125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:18.726090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510880265236875:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:27:18.810043Z node 2 :TX_PROXY ERROR: Actor# [2:7489510880265236928:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:19.736741Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489510884560204541:2501], TxId: 281474976715672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NTAyMzFhYWMtMWRlZDk2OTMtYjZjZDEyMDMtZTc2YWMxM2M=. CustomerSuppliedId : . TraceId : 01jr0xfgpecba7eqn7gt36xtek. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-04-04T17:27:19.736987Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7489510884560204543:2502], TxId: 281474976715672, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jr0xfgpecba7eqn7gt36xtek. SessionId : ydb://session/3?node_id=2&id=NTAyMzFhYWMtMWRlZDk2OTMtYjZjZDEyMDMtZTc2YWMxM2M=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7489510884560204538:2492], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-04-04T17:27:19.737278Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTAyMzFhYWMtMWRlZDk2OTMtYjZjZDEyMDMtZTc2YWMxM2M=, ActorId: [2:7489510884560204488:2492], ActorState: ExecuteState, TraceId: 01jr0xfgpecba7eqn7gt36xtek, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Duplicated keys found., code: 2012 >> KqpYql::ColumnNameConflict >> THealthCheckTest::Issues100GroupsListing >> THealthCheckTest::YellowGroupIssueOnYellowSpace [GOOD] >> THealthCheckTest::YellowIssueReadyVDisksOnFaultyPDisks |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] >> TMeteringSink::FlushThroughputV1 [GOOD] >> TMeteringSink::UsedStorageV1 [GOOD] >> TMicrosecondsSlidingWindow::Basic [GOOD] >> TMultiBucketCounter::InsertAndUpdate [GOOD] >> TMultiBucketCounter::ManyCounters [GOOD] >> TPQRBDescribes::PartitionLocations >> KqpYql::FlexibleTypes [GOOD] >> KqpYql::FromBytes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] Test command err: Trying to start YDB, gRPC: 14914, MsgBus: 21998 2025-04-04T17:27:19.757082Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510881796521445:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:19.757180Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001938/r3tmp/tmpGb1aYy/pdisk_1.dat 2025-04-04T17:27:20.003916Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14914, node 1 2025-04-04T17:27:20.069200Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:20.069243Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:20.069252Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:20.069353Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:20.103737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:20.103868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:20.105516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21998 TClient is connected to server localhost:21998 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:20.445997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:22.220491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510894681423999:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:22.220591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:22.420968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:27:22.561107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510894681424101:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:22.561188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510894681424106:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:22.561198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:22.564853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:27:22.574178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510894681424108:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:27:22.642028Z node 1 :TX_PROXY ERROR: Actor# [1:7489510894681424159:2393] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> Cdc::InitialScan_WithTopicSchemeTx [GOOD] >> Cdc::InitialScan_TopicAutoPartitioning >> KqpYql::NonStrictDml [GOOD] >> KqpYql::JsonNumberPrecision >> Cdc::ResolvedTimestampsVolatileOutOfOrder [GOOD] >> Cdc::SequentialSplitMerge >> KqpYql::ColumnNameConflict [GOOD] >> KqpYql::ColumnTypeMismatch >> KqpYql::RefSelect >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test >> KqpYql::FromBytes [GOOD] >> TopicAutoscaling::ControlPlane_BackCompatibility [GOOD] >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention >> THealthCheckTest::Issues100GroupsListing [GOOD] >> THealthCheckTest::Issues100VCardListing >> PQCountersLabeled::ImportantFlagSwitching [GOOD] >> PQCountersLabeled::NewConsumersCountersAppear ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::FromBytes [GOOD] Test command err: Trying to start YDB, gRPC: 15642, MsgBus: 7525 2025-04-04T17:27:20.064791Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510885703725914:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:20.064859Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00192e/r3tmp/tmpa2CUZf/pdisk_1.dat 2025-04-04T17:27:20.323926Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15642, node 1 2025-04-04T17:27:20.388597Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:20.388628Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:20.388641Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:20.388763Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:20.388942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:20.389088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:20.390663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7525 TClient is connected to server localhost:7525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:20.858687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:20.878015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:20.974110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:21.102743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:21.176247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:22.395826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510894293662304:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:22.395951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:22.635336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:22.662537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:22.690936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:22.717917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:22.746339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:22.833285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:22.869356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510894293662821:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:22.869454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:22.869522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510894293662826:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:22.872588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:22.880622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510894293662828:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:27:22.944828Z node 1 :TX_PROXY ERROR: Actor# [1:7489510894293662881:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 3827, MsgBus: 12002 2025-04-04T17:27:24.309537Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510904998747972:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:24.309588Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00192e/r3tmp/tmpjLWGaS/pdisk_1.dat 2025-04-04T17:27:24.372937Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3827, node 2 2025-04-04T17:27:24.411655Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:24.411733Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:24.413428Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:24.436675Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:24.436702Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:24.436725Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:24.436827Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12002 TClient is connected to server localhost:12002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:24.785195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:24.802736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:24.913019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:25.050590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:25.116738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:26.718907Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510913588684321:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:26.719002Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:26.751545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:26.775440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:26.799976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:26.826179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:26.849224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:26.878847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:26.914079Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510913588684829:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:26.914128Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:26.914172Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510913588684834:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:26.917361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:26.924663Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510913588684836:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:27:26.991930Z node 2 :TX_PROXY ERROR: Actor# [2:7489510913588684890:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> THealthCheckTest::YellowIssueReadyVDisksOnFaultyPDisks [GOOD] >> THealthCheckTest::TestTabletIsDead >> KqpYql::JsonNumberPrecision [GOOD] >> KqpScripting::StreamExecuteYqlScriptScan >> KqpYql::ColumnTypeMismatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::JsonNumberPrecision [GOOD] Test command err: Trying to start YDB, gRPC: 22135, MsgBus: 17071 2025-04-04T17:27:21.017668Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510890731529577:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:21.017787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001928/r3tmp/tmpJQg81X/pdisk_1.dat 2025-04-04T17:27:21.308075Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22135, node 1 2025-04-04T17:27:21.375792Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:21.375814Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:21.375822Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:21.375960Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:21.394264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:21.394348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:21.396186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17071 TClient is connected to server localhost:17071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:21.783717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:21.808614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:21.931487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:22.058331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:22.110779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:23.243648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510899321465942:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:23.243759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:23.463904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:23.488772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:23.516413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:23.546806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:23.572518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:23.603716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:23.642012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510899321466449:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:23.642075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510899321466454:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:23.642075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:23.645054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:23.652877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510899321466456:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:27:23.716021Z node 1 :TX_PROXY ERROR: Actor# [1:7489510899321466510:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9393, MsgBus: 14808 2025-04-04T17:27:25.551954Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510908819629227:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:25.552054Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001928/r3tmp/tmpolq7YH/pdisk_1.dat 2025-04-04T17:27:25.626546Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9393, node 2 2025-04-04T17:27:25.673854Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:25.673936Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:25.675597Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:25.685499Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:25.685516Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:25.685521Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:25.685607Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14808 TClient is connected to server localhost:14808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:26.085743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:26.094382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:26.164983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:26.300768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:26.374087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:28.031874Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510921704532891:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:28.032025Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:28.047020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:28.071092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:28.094407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:28.117794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:28.141280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:28.169582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:28.208267Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510921704533397:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:28.208390Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:28.208420Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510921704533402:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:28.212099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:28.221335Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510921704533404:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:27:28.299341Z node 2 :TX_PROXY ERROR: Actor# [2:7489510921704533458:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpYql::RefSelect [GOOD] >> KqpYql::PgIntPrimaryKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::ColumnTypeMismatch [GOOD] Test command err: Trying to start YDB, gRPC: 18333, MsgBus: 23115 2025-04-04T17:27:21.949021Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510892085573750:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:21.949462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001926/r3tmp/tmpcaYQr9/pdisk_1.dat 2025-04-04T17:27:22.231981Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18333, node 1 2025-04-04T17:27:22.297078Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:22.297102Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:22.297111Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:22.297230Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:22.318821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:22.318998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:22.320995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23115 TClient is connected to server localhost:23115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:22.727311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:22.760165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:22.906552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:23.039730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:23.094409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:24.345469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510904970477415:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:24.345560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:24.626117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:24.652552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:24.678074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:24.703058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:24.729943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:24.797539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:24.848945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510904970477930:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:24.849020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:24.849033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510904970477935:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:24.852454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:24.861568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510904970477937:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:27:24.945808Z node 1 :TX_PROXY ERROR: Actor# [1:7489510904970477990:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:7:30: Error: At function: KiCreateTable!
:7:30: Error: Duplicate column: Value. Trying to start YDB, gRPC: 1562, MsgBus: 10039 2025-04-04T17:27:26.355755Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510913008592152:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:26.355829Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001926/r3tmp/tmpwAoZnb/pdisk_1.dat 2025-04-04T17:27:26.440703Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1562, node 2 2025-04-04T17:27:26.490784Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:26.490873Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:26.492623Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:26.495373Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:26.495390Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:26.495396Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:26.495486Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10039 TClient is connected to server localhost:10039 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:26.897978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:26.911648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:26.958288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:27.118032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:27.195218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:28.873571Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510921598528526:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:28.873655Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:28.889627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:28.915044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:28.940649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:28.965737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:28.992910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:29.058946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:29.096290Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510925893496335:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:29.096347Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:29.096467Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510925893496340:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:29.098915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:29.105387Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510925893496342:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:27:29.187644Z node 2 :TX_PROXY ERROR: Actor# [2:7489510925893496396:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:29.930339Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489510925893496656:2492], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 2025-04-04T17:27:29.930614Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjgwNzc5M2EtNDNlMjVhODQtZWRiMzRlYzYtODlkNjU3NDQ=, ActorId: [2:7489510925893496647:2487], ActorState: ExecuteState, TraceId: 01jr0xftvvbzmvvcasnrxmkz5r, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 >> Cdc::InitialScan_TopicAutoPartitioning [GOOD] >> Cdc::InitialScanUpdatedRows |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Status [GOOD] >> KqpScripting::SelectNullType >> Cdc::SequentialSplitMerge [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentSchemeTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Status [GOOD] Test command err: 2025-04-04T17:27:19.580860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:19.581062Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:27:19.581143Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000781/r3tmp/tmpXU5ahk/pdisk_1.dat 2025-04-04T17:27:19.937198Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19919, node 1 2025-04-04T17:27:20.149270Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:20.149320Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:20.149352Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:20.149805Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:20.152125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:27:20.237261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:20.237389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:20.251132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6103 2025-04-04T17:27:20.751910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:23.314857Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:27:23.340155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:23.340240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:23.367591Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:27:23.369133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:23.585102Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:23.585622Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:23.586021Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:23.586139Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:23.586204Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:23.586358Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:23.586407Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:23.586480Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:23.586531Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:23.764874Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:23.764968Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:23.778437Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:23.927663Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:27:23.976817Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:27:23.976922Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:27:24.006917Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:27:24.007146Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:27:24.007403Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:27:24.007463Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:27:24.007542Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:27:24.007602Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:27:24.007671Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:27:24.007730Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:27:24.008868Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:27:24.037085Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:27:24.037229Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:27:24.043550Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:27:24.049064Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:27:24.049959Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:27:24.055033Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:27:24.073637Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:27:24.073696Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:27:24.073770Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:27:24.087129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:27:24.095329Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:27:24.095534Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:27:24.310284Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:27:24.491062Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:27:24.582302Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:27:25.627572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:25.627765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:25.649270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:27:25.936908Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:27:25.937215Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:27:25.937536Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:27:25.937624Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:27:25.937702Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:27:25.937848Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:27:25.937971Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:27:25.938061Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:27:25.938140Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:27:25.938229Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:27:25.938362Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:27:25.938436Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:27:25.984617Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2381:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:27:25.984726Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2381:2882];tablet_id=72075186224037899;process=T ... storeV1Chunks_V2;id=15; 2025-04-04T17:27:26.267300Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:27:26.267448Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:27:26.267492Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:27:26.759586Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-04-04T17:27:26.763897Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-04-04T17:27:26.770867Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-04-04T17:27:26.775467Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-04-04T17:27:26.780564Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-04-04T17:27:26.785067Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-04-04T17:27:26.789180Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-04-04T17:27:26.794192Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-04-04T17:27:26.798279Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-04-04T17:27:26.802962Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-04-04T17:27:28.012788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3177:3172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:28.023417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:28.027258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037897 2025-04-04T17:27:28.603307Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-04-04T17:27:28.603878Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-04-04T17:27:28.604328Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-04-04T17:27:28.604852Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-04-04T17:27:28.605944Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-04-04T17:27:28.606459Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-04-04T17:27:28.606938Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-04-04T17:27:28.608017Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-04-04T17:27:28.608672Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-04-04T17:27:28.609453Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-04-04T17:27:29.395459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3822:3234], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:29.395652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:29.406798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-04-04T17:27:29.452693Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:27:29.453131Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:27:29.453589Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:27:29.454113Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:27:29.454599Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:27:29.455489Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:27:29.455844Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:27:29.456981Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:27:29.457335Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:27:29.457626Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000015s 2025-04-04T17:27:31.360536Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:4117:4267] 2025-04-04T17:27:31.363026Z node 2 :STATISTICS DEBUG: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION Answer: 'No analyze operation' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce >> THealthCheckTest::Issues100VCardListing [GOOD] >> THealthCheckTest::Issues100GroupsMerging |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpScripting::StreamExecuteYqlScriptScan [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce >> TPQRBDescribes::PartitionLocations [GOOD] >> KqpYql::PgIntPrimaryKey [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQRBDescribes::PartitionLocations [GOOD] Test command err: Bucket: 100 elems count: 97 Bucket: 200 elems count: 104 Bucket: 500 elems count: 288 Bucket: 1000 elems count: 528 Bucket: 2000 elems count: 1008 Bucket: 5000 elems count: 2976 2025-04-04T17:27:24.499298Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510903280504268:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:24.499351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:24.517432Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510904546485029:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:24.518176Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:24.676930Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:27:24.686745Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001442/r3tmp/tmpkPdrcb/pdisk_1.dat 2025-04-04T17:27:24.834660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:24.834754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:24.837926Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:27:24.839183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:24.866557Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3994, node 1 2025-04-04T17:27:24.881137Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:27:24.881293Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:27:24.905018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:24.905157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:24.907969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:24.910514Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001442/r3tmp/yandexZJa2Rm.tmp 2025-04-04T17:27:24.910536Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001442/r3tmp/yandexZJa2Rm.tmp 2025-04-04T17:27:24.910752Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001442/r3tmp/yandexZJa2Rm.tmp 2025-04-04T17:27:24.910886Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:24.957527Z INFO: TTestServer started on Port 12959 GrpcPort 3994 TClient is connected to server localhost:12959 PQClient connected to localhost:3994 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:25.146140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:27:25.184357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:27:27.228338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510916165407293:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:27.228446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510916165407312:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:27.228472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:27.232499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:27:27.237175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510916165407355:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:27.237353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:27.251270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510916165407322:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:27:27.426861Z node 1 :TX_PROXY ERROR: Actor# [1:7489510916165407404:2803] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:27.454648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:27.465354Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489510916165407422:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:27:27.465695Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWUxODM1OTEtNmE5NjYxNzAtMzE0MTM4Ni00MzIxMmU4MQ==, ActorId: [1:7489510916165407290:2340], ActorState: ExecuteState, TraceId: 01jr0xfr7b683f3cn461yjyjsx, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:27:27.467911Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:27:27.544579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:27.619420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:27:27.845259Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0xfrp69wsd98m8xacayb9d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGJhNmRjODctMmNiNTRiMWUtOTYzNTBkOWQtMjQwZGYxM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489510916165407779:3079] 2025-04-04T17:27:29.499264Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510903280504268:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:29.499320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:27:29.516644Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489510904546485029:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:29.516698Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic with 5 partitions CallPersQueueGRPC request to localhost:3994 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } 2025-04-04T17:27:33.729209Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:3994 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic" NumPartitions: 5 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976710679 SchemeShardTabletId: 72057594046644480 ... node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710679, NewState DELETING 2025-04-04T17:27:33.863398Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976710679 2025-04-04T17:27:33.863449Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:27:33.864960Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:27:33.865005Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-04-04T17:27:33.865016Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710679, State DELETING 2025-04-04T17:27:33.865032Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976710679 ===Query complete TClient::Ls request: /Root/PQ/rt3.dc1--topic TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710679 CreateStep: 1743787653867 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic" PathId: 13 TotalGroupCount: 5 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 2000... (TRUNCATED) GetTopicVersionFromPath: record Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710679 CreateStep: 1743787653867 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic" PathId: 13 TotalGroupCount: 5 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 YdbDatabasePath: "/Root" } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186224037892 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186224037892 Status: Active } Partitions { PartitionId: 3 TabletId: 72075186224037892 Status: Active } Partitions { PartitionId: 4 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 5 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--topic" name rt3.dc1--topic version1 CallPersQueueGRPC request to localhost:3994 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } 2025-04-04T17:27:33.873485Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:3994 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } 2025-04-04T17:27:34.378352Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--topic" NumPartitions: 5 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 TClient::Ls request: /Root/PQ/rt3.dc1--topic TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710679 CreateStep: 1743787653867 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic" PathId: 13 TotalGroupCount: 5 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 2000... (TRUNCATED) 2025-04-04T17:27:34.383391Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [1:7489510946230179620:3533] connected; active server actors: 1 2025-04-04T17:27:34.383653Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 1, Generation 1 2025-04-04T17:27:34.383681Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 1, NodeId 1, Generation 1 2025-04-04T17:27:34.383695Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 2, NodeId 1, Generation 1 2025-04-04T17:27:34.383709Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 1, Generation 1 2025-04-04T17:27:34.383721Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 4, NodeId 1, Generation 1 2025-04-04T17:27:34.384243Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [1:7489510946230179621:3534] connected; active server actors: 1 2025-04-04T17:27:34.384374Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 1, Generation 1 2025-04-04T17:27:34.384399Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 1, NodeId 1, Generation 1 2025-04-04T17:27:34.384415Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 2, NodeId 1, Generation 1 2025-04-04T17:27:34.384430Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 1, Generation 1 2025-04-04T17:27:34.384445Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 4, NodeId 1, Generation 1 response: Status: true Locations { PartitionId: 0 NodeId: 1 Generation: 1 } Locations { PartitionId: 1 NodeId: 1 Generation: 1 } Locations { PartitionId: 2 NodeId: 1 Generation: 1 } Locations { PartitionId: 3 NodeId: 1 Generation: 1 } Locations { PartitionId: 4 NodeId: 1 Generation: 1 } 2025-04-04T17:27:34.385101Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [1:7489510946230179622:3535] connected; active server actors: 1 2025-04-04T17:27:34.385349Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 1, Generation 1 response: Status: true Locations { PartitionId: 3 NodeId: 1 Generation: 1 } 2025-04-04T17:27:34.385872Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [1:7489510946230179623:3536] connected; active server actors: 1 response: Status: false 2025-04-04T17:27:34.955349Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489510946230179671:2517] TxId: 281474976710684. Ctx: { TraceId: 01jr0xfzhy3b3b3vcqvfyrsykn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODc4MTNmNDYtYjQ3ZTU2YzktZjQ0MGQ1NWQtZjc3MDUxMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 2025-04-04T17:27:34.956119Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489510946230179677:2524], TxId: 281474976710684, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ODc4MTNmNDYtYjQ3ZTU2YzktZjQ0MGQ1NWQtZjc3MDUxMDc=. TraceId : 01jr0xfzhy3b3b3vcqvfyrsykn. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7489510946230179671:2517], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-04T17:27:34.956162Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489510946230179678:2525], TxId: 281474976710684, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=ODc4MTNmNDYtYjQ3ZTU2YzktZjQ0MGQ1NWQtZjc3MDUxMDc=. CustomerSuppliedId : . TraceId : 01jr0xfzhy3b3b3vcqvfyrsykn. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7489510946230179671:2517], status: UNAVAILABLE, reason: {
: Error: Terminate execution } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::PgIntPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 22172, MsgBus: 13207 2025-04-04T17:27:26.478513Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510912149402249:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:26.478647Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00191d/r3tmp/tmp0RuZCQ/pdisk_1.dat 2025-04-04T17:27:26.765635Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22172, node 1 2025-04-04T17:27:26.834309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:26.834436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:26.836054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:26.841744Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:26.841782Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:26.841804Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:26.841947Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13207 TClient is connected to server localhost:13207 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:27.263203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:27.286282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:27.417966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:27.570033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:27.630994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:29.188310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510925034305922:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:29.188426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:29.382315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:29.407054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:29.430088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:29.453451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:29.477366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:29.542561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:29.575798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510925034306438:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:29.575860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:29.575896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510925034306443:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:29.578770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:29.586104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510925034306445:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:27:29.654098Z node 1 :TX_PROXY ERROR: Actor# [1:7489510925034306499:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Optimization, code: 1070
:4:20: Error: RefSelect mode isn't supported by provider: kikimr Trying to start YDB, gRPC: 4787, MsgBus: 14515 2025-04-04T17:27:30.943926Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510930691563035:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:30.943977Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00191d/r3tmp/tmpUID7wa/pdisk_1.dat 2025-04-04T17:27:31.015722Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4787, node 2 2025-04-04T17:27:31.067310Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:31.067404Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:31.069252Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:31.069513Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:31.069527Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:31.069531Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:31.069607Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14515 TClient is connected to server localhost:14515 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:31.405260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:33.114769Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510943576465577:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:33.114851Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:33.128595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:27:33.161536Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510943576465678:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:33.161627Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:33.161687Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510943576465683:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:33.164976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:27:33.201327Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510943576465685:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:27:33.256993Z node 2 :TX_PROXY ERROR: Actor# [2:7489510943576465736:2390] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpScripting::SelectNullType [GOOD] >> KqpScripting::StreamDdlAndDml >> HttpRequest::Analyze >> KqpPragma::OrderedColumns >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention [GOOD] >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad >> Cdc::InitialScanUpdatedRows [GOOD] >> Cdc::MustNotLoseSchemaSnapshot >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] >> THealthCheckTest::TestTabletIsDead [GOOD] >> THealthCheckTest::Issues100GroupsMerging [GOOD] >> THealthCheckTest::Issues100VCardMerging ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 29471, MsgBus: 24173 2025-04-04T17:27:30.617645Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510931698216976:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:30.617701Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001912/r3tmp/tmpNucFSd/pdisk_1.dat 2025-04-04T17:27:30.869144Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29471, node 1 2025-04-04T17:27:30.927303Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:30.927335Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:30.927345Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:30.927463Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:30.951886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:30.952005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:30.953673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24173 TClient is connected to server localhost:24173 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:31.404350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:31.426968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:31.548800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:31.703018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:31.766278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:32.987905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510940288153343:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:32.987993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:33.152545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:33.178009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:33.202121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:33.230083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:33.257742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:33.285358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:33.321083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510944583121146:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:33.321175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:33.321308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510944583121151:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:33.324791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:33.331954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510944583121153:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:27:33.402012Z node 1 :TX_PROXY ERROR: Actor# [1:7489510944583121207:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:34.446336Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787654469, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 24224, MsgBus: 9966 2025-04-04T17:27:35.077868Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510951943984356:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:35.077907Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001912/r3tmp/tmpgYEtrY/pdisk_1.dat 2025-04-04T17:27:35.152374Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24224, node 2 2025-04-04T17:27:35.211294Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:35.211431Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:35.213057Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:35.234926Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:35.234948Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:35.234955Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:35.235052Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9966 TClient is connected to server localhost:9966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:35.573755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:35.590548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:35.632271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:35.764517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:35.822534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:37.585341Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510960533920722:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:37.585415Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:37.614630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:37.640071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:37.670776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:37.700954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:37.732540Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:37.764954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:37.859917Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510960533921235:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:37.859986Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:37.860079Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510960533921240:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:37.863363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:37.871193Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510960533921242:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:27:37.948295Z node 2 :TX_PROXY ERROR: Actor# [2:7489510960533921297:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:38.909436Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787658942, txId: 281474976715671] shutting down 2025-04-04T17:27:39.094840Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787659131, txId: 281474976715673] shutting down 2025-04-04T17:27:39.278563Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787659313, txId: 281474976715675] shutting down >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase >> Cdc::ShouldBreakLocksOnConcurrentSchemeTx [GOOD] >> Cdc::ResolvedTimestampsContinueAfterMerge >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test >> PQCountersLabeled::NewConsumersCountersAppear [GOOD] >> PQCountersSimple::Partition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestTabletIsDead [GOOD] Test command err: 2025-04-04T17:27:14.213619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:14.214335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:27:14.214561Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:27:14.216568Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:14.216794Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:27:14.217096Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001681/r3tmp/tmpj3VwtQ/pdisk_1.dat 2025-04-04T17:27:14.676271Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18973, node 1 TClient is connected to server localhost:19902 2025-04-04T17:27:15.287727Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:15.287800Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:15.287835Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:15.288052Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:20.786912Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:697:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:20.787236Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:27:20.787494Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:27:20.788092Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:694:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:20.788146Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:27:20.788332Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001681/r3tmp/tmpN18GTg/pdisk_1.dat 2025-04-04T17:27:21.060899Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18192, node 3 TClient is connected to server localhost:20761 2025-04-04T17:27:21.372407Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:21.372454Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:21.372481Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:21.372695Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-9a33-be81" status: YELLOW message: "Database has storage issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-5321" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-5321" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-9a33-595f-ab18" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-9a33-595f-ab18" status: YELLOW message: "Pool degraded" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "YELLOW-9a33-ef3e-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "YELLOW-9a33-99d2-3-2147483648-3-55-0-55" status: YELLOW message: "VDisks have space issue" location { storage { node { id: 3 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483648-3-55-0-55" id: "2147483648-3-56-0-56" id: "2147483648-3-57-0-57" } } } } database { name: "/Root" } } reason: "YELLOW-e463-3-3-42" reason: "YELLOW-e463-3-3-43" reason: "YELLOW-e463-3-3-44" type: "VDISK" level: 5 listed: 3 count: 3 } issue_log { id: "YELLOW-e463-3-3-42" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-42" path: "/home/runner/.ya/build/build_root/i5tq/001681/r3tmp/tmpN18GTg/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-e463-3-3-43" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-43" path: "/home/runner/.ya/build/build_root/i5tq/001681/r3tmp/tmpN18GTg/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-e463-3-3-44" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-44" path: "/home/runner/.ya/build/build_root/i5tq/001681/r3tmp/tmpN18GTg/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-9a33-ef3e-2147483648" status: YELLOW message: "Group degraded" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "YELLOW-9a33-99d2-3-2147483648-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 3 host: "::1" port: 12001 } 2025-04-04T17:27:27.788396Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:697:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:27.788826Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:27:27.789230Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:27:27.790136Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:694:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:27.790479Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:27:27.790742Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001681/r3tmp/tmpEVik3D/pdisk_1.dat 2025-04-04T17:27:28.048252Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13968, node 5 TClient is connected to server localhost:29510 2025-04-04T17:27:28.421210Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:28.421264Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:28.421291Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:28.421699Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-9a33-be81" status: YELLOW message: "Database has storage issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-5321" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-5321" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-9a33-595f-ab18" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-9a33-595f-ab18" status: YELLOW message: "Pool degraded" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "YELLOW-9a33-ef3e-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-a594-5-5-42" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-42" path: "/home/runner/.ya/build/build_root/i5tq/001681/r3tmp/tmpEVik3D/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a594-5-5-43" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-43" path: "/home/runner/.ya/build/build_root/i5tq/001681/r3tmp/tmpEVik3D/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a594-5-5-44" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-44" path: "/home/runner/.ya/build/build_root/i5tq/001681/r3tmp/tmpEVik3D/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-9a33-ef3e-2147483648" status: YELLOW message: "Group degraded" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } type: "STORAGE_GROUP" level: 4 } location { id: 5 host: "::1" port: 12001 } 2025-04-04T17:27:35.048697Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:765:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:35.049330Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:27:35.049561Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:27:35.049936Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:762:2352], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:35.050189Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:27:35.050285Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001681/r3tmp/tmp6WiYz4/pdisk_1.dat 2025-04-04T17:27:35.448513Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23931, node 7 TClient is connected to server localhost:8766 2025-04-04T17:27:39.060561Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:39.060620Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:39.060663Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:39.061581Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:39.079721Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:39.080511Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:39.126085Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-04-04T17:27:39.126699Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:39.282395Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-04-04T17:27:39.282912Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected self_check_result: EMERGENCY issue_log { id: "RED-9a33-f489" status: RED message: "Database has compute issues" location { database { name: "/Root" } } reason: "RED-9a33-6fa7" type: "DATABASE" level: 1 } issue_log { id: "RED-9a33-6fa7" status: RED message: "Compute has issues with tablets" location { database { name: "/Root" } } reason: "RED-9a33-e5e3-PersQueue" type: "COMPUTE" level: 2 } issue_log { id: "RED-9a33-e5e3-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" level: 4 } location { id: 7 host: "::1" port: 12001 } >> PQCountersSimple::Partition [GOOD] >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptPg >> KqpScripting::StreamDdlAndDml [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersSimple::Partition [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:125:2057] recipient: [1:123:2157] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:125:2057] recipient: [1:123:2157] Leader for TabletID 72057594037927937 is [1:129:2161] sender: [1:130:2057] recipient: [1:123:2157] 2025-04-04T17:27:10.810534Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:10.810584Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:171:2057] recipient: [1:169:2192] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:171:2057] recipient: [1:169:2192] Leader for TabletID 72057594037927938 is [1:175:2196] sender: [1:176:2057] recipient: [1:169:2192] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:129:2161] sender: [1:199:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:10.824065Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:10.833742Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:197:2212] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-04T17:27:10.834479Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:205:2218] 2025-04-04T17:27:10.836107Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:205:2218] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:27:10.837538Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:206:2219] 2025-04-04T17:27:10.838587Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:206:2219] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR 2025-04-04T17:27:10.844049Z node 1 :PERSQUEUE INFO: new Cookie default|d79ebce8-cd28ab16-f36a8565-deb615dd_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:10.849367Z node 1 :PERSQUEUE INFO: new Cookie default|bdf98606-e53678ae-859fb6f3-67ab262f_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:10.853803Z node 1 :PERSQUEUE INFO: new Cookie default|f813b2c6-f014ac16-95218143-1fffe615_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_BATCH Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Expected: { "sensors": [ { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByCommitted" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/PartitionMaxReadQuotaUsage" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgMin" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgSec" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesQuota" }, "value": 1000000000 }, { "kind": "RATE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadOffsetRewindSum" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadTimeLagMs" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/SizeLagByCommitted" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", ... Q::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::TRequestReportingThrottler Captured TEvents::TSystem::Wakeup to BS_SKELETON_FRONT Captured TEvents::TSystem::Wakeup to PERSQUEUE_READ_BALANCER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR 2025-04-04T17:27:41.631654Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:41.631746Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:41.649405Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:41.650218Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 10 actor [5:197:2212] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 10 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 10 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 10 Important: false } 2025-04-04T17:27:41.650803Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:205:2218] 2025-04-04T17:27:41.652900Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [5:205:2218] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:27:41.654829Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [5:206:2219] 2025-04-04T17:27:41.656470Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [5:206:2219] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:41.663639Z node 5 :PERSQUEUE INFO: new Cookie default|827b31d1-5505a89b-8557350e-e91cdcac_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:41.669957Z node 5 :PERSQUEUE INFO: new Cookie default|d30069ec-8b4d8862-9ee63718-3e6dc261_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:41.674654Z node 5 :PERSQUEUE INFO: new Cookie default|f11a6103-9a75a17d-13b93cc7-10261479_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:41.682633Z node 5 :PERSQUEUE INFO: new Cookie default|b938e436-85ae7900-c1630c42-eef433b6_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-04T17:27:41.683940Z node 5 :PERSQUEUE INFO: new Cookie default|803952a6-a172fee2-b6f54921-ea5064a4_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest >> KqpPragma::OrderedColumns [GOOD] >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamDdlAndDml [GOOD] Test command err: Trying to start YDB, gRPC: 1954, MsgBus: 23906 2025-04-04T17:27:32.987047Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510939897637204:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:32.987166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00190d/r3tmp/tmp0qeSuH/pdisk_1.dat 2025-04-04T17:27:33.208329Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1954, node 1 2025-04-04T17:27:33.264973Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:33.264998Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:33.265011Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:33.265130Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:33.338314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:33.338415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:33.340056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23906 TClient is connected to server localhost:23906 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:33.661511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:33.685012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:33.819839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:33.937223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:33.997439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:35.520108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510952782540880:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:35.520213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:35.688634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:35.713459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:35.740112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:35.765464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:35.792520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:35.819989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:35.867820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510952782541388:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:35.867877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:35.867928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510952782541393:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:35.871559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:35.879327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510952782541395:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:27:35.958923Z node 1 :TX_PROXY ERROR: Actor# [1:7489510952782541450:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:36.668314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:27:37.002936Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787657038, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 7851, MsgBus: 32377 2025-04-04T17:27:37.694604Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510959853192195:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:37.694662Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00190d/r3tmp/tmpKXGW7p/pdisk_1.dat 2025-04-04T17:27:37.779689Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7851, node 2 2025-04-04T17:27:37.824303Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:37.824376Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:37.826185Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:37.855634Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:37.855664Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:37.855673Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:37.855789Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32377 TClient is connected to server localhost:32377 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:38.284122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:38.299774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:38.354523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:38.512672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:38.587899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:40.242788Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510972738095849:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:40.242939Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:40.280967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:40.307693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:40.331009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:40.354129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:40.380452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:40.411558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:40.447988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510972738096356:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:40.448050Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:40.448125Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510972738096361:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:40.450967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:40.459518Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510972738096363:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:27:40.516057Z node 2 :TX_PROXY ERROR: Actor# [2:7489510972738096416:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:41.267468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:27:41.724912Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787661763, txId: 281474976715673] shutting down >> TPQTest::TestSourceIdDropBySourceIdCount [GOOD] >> TPQTest::TestSetClientOffset >> YdbIndexTable::MultiShardTableOneIndexDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap >> YdbIndexTable::MultiShardTableOneUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase [GOOD] >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] >> BasicUsage::PropagateSessionClosed >> BasicUsage::WaitEventBlocksBeforeDiscovery >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 24437, MsgBus: 23405 2025-04-04T17:27:33.560384Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510942668216208:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:33.560524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018f9/r3tmp/tmp8JOzov/pdisk_1.dat 2025-04-04T17:27:33.849234Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24437, node 1 2025-04-04T17:27:33.897744Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:33.897768Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:33.897776Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:33.897919Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:33.929813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:33.929922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:33.931817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23405 TClient is connected to server localhost:23405 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:34.325680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:34.348650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:34.440485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:34.554165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:34.623313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:36.049257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510955553119875:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:36.049395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:36.311229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:36.336896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:36.360023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:36.384150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:36.408480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:36.475312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:36.526890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510955553120388:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:36.526966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:36.527002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510955553120393:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:36.530391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:36.539396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510955553120395:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:27:36.625692Z node 1 :TX_PROXY ERROR: Actor# [1:7489510955553120450:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:37.269171Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTk1MjFhYWEtMWQzYzg3NC0zYWE0NjRjMC1iOWIzNjU2NA==, ActorId: [1:7489510959848087994:2487], ActorState: ExecuteState, TraceId: 01jr0xg2194d9myjcrp051a13w, Create QueryResponse for error on request, msg: 2025-04-04T17:27:37.284052Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjNjYzk1MDgtOTRlNTU2ZjEtYWJkYTYxYmMtOWY5MGRmMzk=, ActorId: [1:7489510959848088005:2492], ActorState: ExecuteState, TraceId: 01jr0xg21w8m93t0f5yh9p704z, Create QueryResponse for error on request, msg: 2025-04-04T17:27:37.298404Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTYzNmE5OGItYjFiZDhjNDUtMmZiNjRjMGUtNmI1MmY3MDg=, ActorId: [1:7489510959848088014:2496], ActorState: ExecuteState, TraceId: 01jr0xg22a2k5zk428ehh2xntc, Create QueryResponse for error on request, msg: 2025-04-04T17:27:37.369748Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjdiZTRjZmYtNTVmZDM0NjYtMzE4NTk4YmYtZGFjM2UzMTc=, ActorId: [1:7489510959848088035:2505], ActorState: ExecuteState, TraceId: 01jr0xg23mb8kt4ydqgzc1fv0m, Create QueryResponse for error on request, msg: 2025-04-04T17:27:37.413814Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDhkMWFhMC1lNTI1YjI1OS04OTQ2NGVmZS1lMzU5NTc5NQ==, ActorId: [1:7489510959848088051:2511], ActorState: ExecuteState, TraceId: 01jr0xg2509m1zp94av5885zev, Create QueryResponse for error on request, msg: 2025-04-04T17:27:37.465699Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2JhMzY1YzAtYjYzODc5N2EtNTBjMTVkNzktY2QyZGY0NDU=, ActorId: [1:7489510959848088076:2517], ActorState: ExecuteState, TraceId: 01jr0xg26e684kj6et0vsvx55x, Create QueryResponse for error on request, msg: 2025-04-04T17:27:37.495829Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787657507, txId: 281474976710673] shutting down 2025-04-04T17:27:37.496240Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787657507, txId: 281474976710671] shutting down 2025-04-04T17:27:37.497850Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787657507, txId: 281474976710672] shutting down 2025-04-04T17:27:37.498786Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjBlYzI2YjUtMjAyOWRjNTEtNGI0NDgzNzYtNzg1OTM1, ActorId: [1:7489510959848088246:2553], ActorState: ExecuteState, TraceId: 01jr0xg280ae7mqa8pcxn2ftr0, Create QueryResponse for error on request, msg: 2025-04-04T17:27:37.542045Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjQ3NDkzZDEtZTA3YTMzOTAtZjY4YTUyMmYtZmQ3NTM4NmM=, ActorId: [1:7489510959848088323:2557], ActorState: ExecuteState, TraceId: 01jr0xg2911wxnndczp3cm2r9m, Create QueryResponse for error on request, msg: 2025-04-04T17:27:37.603338Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTczMjYxMTQtMzNkZTU4YWQtNzdiNjMxMTMtMzg1ZmQ3ZTk=, ActorId: [1:7489510959848088340:2563], ActorState: ExecuteState, TraceId: 01jr0xg2ade5qvhdxjsyzczbjh, Create QueryResponse for error on request, msg: 2025-04-04T17:27:37.612380Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787657619, txId: 281474976710677] shutting down 2025-04-04T17:27:37.612576Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787657619, txId: 281474976710678] shutting down 2025-04-04T17:27:37.646427Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGUzMDY2NDktNDMwY2I2MmYtZjkxOTBlMDQtY2E0MWNiODY=, ActorId: [1:7489510959848088508:2591], ActorState: ExecuteState, TraceId: 01jr0xg2cbah5tcyg0qhhn5era, Create QueryResponse for error on request, msg: 2025-04-04T17:27:37.688644Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzgwNTBlYS0yNTZhMjc0MS00ZmQ1NjI1Yy1jNjJjMjk5NA==, ActorId: [1:7489510959848088532:2597], ActorState: ExecuteState, TraceId: 01jr0xg2dp88kzpzsrxzqg7j5f, Create QueryResponse for error on request, msg: 2025-04-04T17:27:37.737646Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGMzNjljMTYtZjdjMWJkYTYtMmU2Mzk1MzEtZGRhYmQyMDE=, ActorId: [1:7489510959848088544:2603], ActorState: ExecuteState, TraceId: 01jr0xg2ez50cxjmc05bbdp3np, Create QueryResponse for error o ... ager: discarding snapshot; our snapshot: [step: 1743787660020, txId: 281474976710738] shutting down 2025-04-04T17:27:39.986766Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787660020, txId: 281474976710737] shutting down 2025-04-04T17:27:40.122155Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2QyNjkzZWEtYWIxZjBkNjItNWQzYjlkMGItNWZkNWM5NWQ=, ActorId: [1:7489510972732993345:3088], ActorState: ExecuteState, TraceId: 01jr0xg4q57sdrkbq5vs2hr8kf, Create QueryResponse for error on request, msg: 2025-04-04T17:27:40.170376Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787660202, txId: 281474976710741] shutting down 2025-04-04T17:27:40.248562Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDM3OGUwZjEtNzFkMjNlOTMtZGQ3MGM0ZGQtZjgzMzIwMQ==, ActorId: [1:7489510972732993366:3098], ActorState: ExecuteState, TraceId: 01jr0xg4v1cy1n7g542cx20xnp, Create QueryResponse for error on request, msg: 2025-04-04T17:27:40.292136Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787660321, txId: 281474976710743] shutting down 2025-04-04T17:27:40.378518Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWNjNjQ2MWItZDBlZjZkNjEtZDM3YjAzNzQtYWM3YWI2ODU=, ActorId: [1:7489510972732993481:3116], ActorState: ExecuteState, TraceId: 01jr0xg4yz19wfxyc5te4079cm, Create QueryResponse for error on request, msg: 2025-04-04T17:27:40.381708Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787660419, txId: 281474976710745] shutting down 2025-04-04T17:27:40.512312Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWI0MWU2N2YtMjE3NGM1MzEtMTY5MjllZTAtOTY1ODYxODE=, ActorId: [1:7489510972732993678:3143], ActorState: ExecuteState, TraceId: 01jr0xg5311jyw0ftaq7yqg4xy, Create QueryResponse for error on request, msg: 2025-04-04T17:27:40.554815Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787660580, txId: 281474976710747] shutting down 2025-04-04T17:27:40.648982Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTc3ZTk5NTAtZWExNzk3ZmEtYjgxMjUyM2ItYTk5NzVmNDA=, ActorId: [1:7489510972732993697:3152], ActorState: ExecuteState, TraceId: 01jr0xg5774nt44pzda4077h2s, Create QueryResponse for error on request, msg: 2025-04-04T17:27:40.686893Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787660720, txId: 281474976710749] shutting down 2025-04-04T17:27:40.791844Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YThhNTY3ZGYtMjM2YmVlM2MtNTg2NTY0YmQtNDNiNzU1MA==, ActorId: [1:7489510972732993815:3172], ActorState: ExecuteState, TraceId: 01jr0xg5bg832tcna7n10t3y6h, Create QueryResponse for error on request, msg: 2025-04-04T17:27:40.844075Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787660874, txId: 281474976710751] shutting down 2025-04-04T17:27:40.932621Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjZiMDEyZTktMzgyN2M5ZjQtYmJhN2I1ZGEtMWJlNjVhM2Q=, ActorId: [1:7489510972732993923:3190], ActorState: ExecuteState, TraceId: 01jr0xg5fx1ta3txx9ynk2gf4v, Create QueryResponse for error on request, msg: 2025-04-04T17:27:40.964135Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787660993, txId: 281474976710753] shutting down 2025-04-04T17:27:41.076946Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzAxOGQyMWMtYjI5YjA2ODctNWNjOWFjODAtNjAxZmEzZGU=, ActorId: [1:7489510972732994031:3208], ActorState: ExecuteState, TraceId: 01jr0xg5mbdza6hq5vqa17rcax, Create QueryResponse for error on request, msg: 2025-04-04T17:27:41.083429Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787661119, txId: 281474976710755] shutting down 2025-04-04T17:27:41.212191Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787661252, txId: 281474976710757] shutting down 2025-04-04T17:27:41.357538Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787661392, txId: 281474976710759] shutting down Trying to start YDB, gRPC: 27090, MsgBus: 3240 2025-04-04T17:27:42.112032Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510981395227026:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:42.112098Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018f9/r3tmp/tmp4na7S8/pdisk_1.dat 2025-04-04T17:27:42.207685Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27090, node 2 2025-04-04T17:27:42.238769Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:42.239019Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:42.240615Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:42.272471Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:42.272495Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:42.272502Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:42.272612Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3240 TClient is connected to server localhost:3240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:42.654033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:42.671906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:42.718636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:42.864612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:42.937078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:44.639635Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510989985163394:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:44.639725Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:44.677013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:44.703078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:44.730341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:44.766626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:44.791506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:44.818433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:44.854210Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510989985163900:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:44.854293Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:44.854334Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510989985163905:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:44.857414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:44.866358Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510989985163907:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:27:44.929637Z node 2 :TX_PROXY ERROR: Actor# [2:7489510989985163961:3438] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> Cdc::MustNotLoseSchemaSnapshot [GOOD] >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] >> THealthCheckTest::Issues100VCardMerging [GOOD] >> THealthCheckTest::Issues100Groups100VCardMerging ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 22100, MsgBus: 28285 2025-04-04T17:26:38.754876Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510707489297138:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:26:38.755014Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000cae/r3tmp/tmppFPlu3/pdisk_1.dat 2025-04-04T17:26:38.975623Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22100, node 1 2025-04-04T17:26:39.032936Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:39.032960Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:39.032971Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:39.033136Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:39.094044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:39.094176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:39.095805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28285 TClient is connected to server localhost:28285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:26:39.425743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:39.448638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:39.559847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:39.675939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:39.727885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:41.266791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510720374200824:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:41.266871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:41.446273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:26:41.470050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:26:41.495743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:26:41.522643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:26:41.551449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:26:41.631879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:26:41.664562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510720374201340:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:41.664645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:41.664685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510720374201345:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:41.667716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:26:41.675299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510720374201347:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:26:41.770361Z node 1 :TX_PROXY ERROR: Actor# [1:7489510720374201400:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:26:42.398677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:26:43.027131Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jr0xed2bb0mkjww0nejyddt0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjRhNTQ0MzUtMzllYTVlNTMtNDUxY2QxNDctMzMzNTliYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:43.027768Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jr0xed2d8nw6xmktwqz47603, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjYwNzNhMmYtZTk0ZmZkMTctZTFiOGQzNzctZDBkOWVkZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:43.028256Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jr0xed2d0dmdkwvseqpjv1xp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjg5NGI1NDYtNTEzYzY1MTgtMzFmOWNjNGUtZjBiYzMwOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:43.040175Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jr0xed2ffgrm1q24043htwhy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjUyMGU0NzUtM2FmNTZmMGMtOGJhOGRmNDktZWE3M2NkZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:43.043440Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jr0xed2f3p1vf099002vc0sz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmUzZGVlZTMtYWUzYTFiNGItMTE4NDM2MDYtNmJlNTRhYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:43.043954Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jr0xed2f331k46dyhcnacznh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzA0YWY0YjAtZGQ0ZWNhM2EtNDUxNTRiOWYtNzNjYWFhYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:43.045272Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jr0xed2ha83x3cjaxsdh2bqw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGY5MzljOTUtYzQwNmNhNC1kMmUzNjUxZC1mMWI3ZDJkYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:43.046296Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jr0xed2g2gzskdmmtc4j5m1r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTAzN2UwYWYtMWQ4ZWZhZWEtZjhmYjMyYzUtMTZmYWQwNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:43.049250Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jr0xed2g5sgej3btw9054aw2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmRlYzkzOTktMmRkZDM1YzAtY2ExOTg4ODItYjM1YjM4ZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:43.050072Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jr0xed2d8nw6xmktwqz47603, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjYwNzNhMmYtZTk0ZmZkMTctZTFiOGQzNzctZDBkOWVkZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:43.050994Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jr0xed2bb0mkjww0nejyddt0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjRhNTQ0MzUtMzllYTVlNTMtNDUxY2QxNDctMzMzNTliYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:43.055157Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jr0xed2d0dmdkwvseqpjv1xp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjg5NGI1NDYtNTEzYzY1MTgtMzFmOWNjNGUtZjBiYzMwOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:43.059706Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jr0xed2g1e30691at8h1h2as, Database: , DatabaseId: /Root, SessionId: ydb:/ ... sion/3?node_id=2&id=ZjM5MDRkMTgtYmQyOWE5YTQtNGMyMTYxYTYtMTI4MWQ0ZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.611673Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721626. Ctx: { TraceId: 01jr0xg95q08ck7wrq2d60zpm9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGI1OWVjZjctOTE4MzdmMmUtNTI1YTI1M2QtYThiOTY3MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.613581Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721627. Ctx: { TraceId: 01jr0xg966cpxepsy0e6101nf7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmZkNjg0NTctOWIwY2RhMjItZTY0ODNjZTItMmI3ZDE1ODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.614556Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721628. Ctx: { TraceId: 01jr0xg96x0skz61jstf7qt3hp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjM5MDRkMTgtYmQyOWE5YTQtNGMyMTYxYTYtMTI4MWQ0ZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.617546Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721629. Ctx: { TraceId: 01jr0xg9722v1p5zj64exfyj9y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWYyODllNTUtMjMxYjkyNTAtNzk5NDc3OTQtNDE1MzhlOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.618662Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721630. Ctx: { TraceId: 01jr0xg966cpxepsy0e6101nf7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmZkNjg0NTctOWIwY2RhMjItZTY0ODNjZTItMmI3ZDE1ODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.619814Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721631. Ctx: { TraceId: 01jr0xg97868tp5d89j3hsqq5k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzVjNGQwZjEtMTc2OWRlMTAtMjRjY2Q0YzUtNmZiMTgxYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.625526Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721632. Ctx: { TraceId: 01jr0xg9722v1p5zj64exfyj9y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWYyODllNTUtMjMxYjkyNTAtNzk5NDc3OTQtNDE1MzhlOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.626002Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721633. Ctx: { TraceId: 01jr0xg97868tp5d89j3hsqq5k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzVjNGQwZjEtMTc2OWRlMTAtMjRjY2Q0YzUtNmZiMTgxYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.628419Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721634. Ctx: { TraceId: 01jr0xg9722v1p5zj64exfyj9y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWYyODllNTUtMjMxYjkyNTAtNzk5NDc3OTQtNDE1MzhlOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.629223Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721635. Ctx: { TraceId: 01jr0xg97h4rc1mpdw0bgke594, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGI1OWVjZjctOTE4MzdmMmUtNTI1YTI1M2QtYThiOTY3MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.632237Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721636. Ctx: { TraceId: 01jr0xg97h4rc1mpdw0bgke594, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGI1OWVjZjctOTE4MzdmMmUtNTI1YTI1M2QtYThiOTY3MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.632867Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721637. Ctx: { TraceId: 01jr0xg9722v1p5zj64exfyj9y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWYyODllNTUtMjMxYjkyNTAtNzk5NDc3OTQtNDE1MzhlOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.634960Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721638. Ctx: { TraceId: 01jr0xg97hegq6npdg26vkf6ag, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGEwOWUwMGYtZDlmMmQxMDMtYjQ4YTdlOTgtYjMwYTdhZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.638884Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721639. Ctx: { TraceId: 01jr0xg97hegq6npdg26vkf6ag, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGEwOWUwMGYtZDlmMmQxMDMtYjQ4YTdlOTgtYjMwYTdhZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.640017Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721640. Ctx: { TraceId: 01jr0xg97v9j1bye1sv8npswkb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjM5MDRkMTgtYmQyOWE5YTQtNGMyMTYxYTYtMTI4MWQ0ZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.641694Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721642. Ctx: { TraceId: 01jr0xg97hegq6npdg26vkf6ag, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGEwOWUwMGYtZDlmMmQxMDMtYjQ4YTdlOTgtYjMwYTdhZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.641883Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721641. Ctx: { TraceId: 01jr0xg97wbcxv2nyd5fceqcha, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmZkNjg0NTctOWIwY2RhMjItZTY0ODNjZTItMmI3ZDE1ODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.647791Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721643. Ctx: { TraceId: 01jr0xg9813xcwzb1hppgr20d5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGI1OWVjZjctOTE4MzdmMmUtNTI1YTI1M2QtYThiOTY3MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.648563Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721644. Ctx: { TraceId: 01jr0xg97hegq6npdg26vkf6ag, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGEwOWUwMGYtZDlmMmQxMDMtYjQ4YTdlOTgtYjMwYTdhZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.649132Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721645. Ctx: { TraceId: 01jr0xg97v9j1bye1sv8npswkb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjM5MDRkMTgtYmQyOWE5YTQtNGMyMTYxYTYtMTI4MWQ0ZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.650969Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721646. Ctx: { TraceId: 01jr0xg97wbcxv2nyd5fceqcha, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmZkNjg0NTctOWIwY2RhMjItZTY0ODNjZTItMmI3ZDE1ODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.654425Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721647. Ctx: { TraceId: 01jr0xg97v9j1bye1sv8npswkb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjM5MDRkMTgtYmQyOWE5YTQtNGMyMTYxYTYtMTI4MWQ0ZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.654661Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721648. Ctx: { TraceId: 01jr0xg9818de1trq84xab8t80, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzVjNGQwZjEtMTc2OWRlMTAtMjRjY2Q0YzUtNmZiMTgxYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.659179Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721649. Ctx: { TraceId: 01jr0xg97wbcxv2nyd5fceqcha, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmZkNjg0NTctOWIwY2RhMjItZTY0ODNjZTItMmI3ZDE1ODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.659722Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721650. Ctx: { TraceId: 01jr0xg9813xcwzb1hppgr20d5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGI1OWVjZjctOTE4MzdmMmUtNTI1YTI1M2QtYThiOTY3MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.662703Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721651. Ctx: { TraceId: 01jr0xg9818de1trq84xab8t80, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzVjNGQwZjEtMTc2OWRlMTAtMjRjY2Q0YzUtNmZiMTgxYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-04T17:27:44.667519Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721652. Ctx: { TraceId: 01jr0xg9818de1trq84xab8t80, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzVjNGQwZjEtMTc2OWRlMTAtMjRjY2Q0YzUtNmZiMTgxYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-04T17:27:44.673563Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721654. Ctx: { TraceId: 01jr0xg98y72fxm4stwd5wr1gt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGEwOWUwMGYtZDlmMmQxMDMtYjQ4YTdlOTgtYjMwYTdhZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.673612Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721653. Ctx: { TraceId: 01jr0xg98z8zprm39vjp1wznq3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjM5MDRkMTgtYmQyOWE5YTQtNGMyMTYxYTYtMTI4MWQ0ZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.677716Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721655. Ctx: { TraceId: 01jr0xg98z8zprm39vjp1wznq3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjM5MDRkMTgtYmQyOWE5YTQtNGMyMTYxYTYtMTI4MWQ0ZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.678714Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721656. Ctx: { TraceId: 01jr0xg98y72fxm4stwd5wr1gt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGEwOWUwMGYtZDlmMmQxMDMtYjQ4YTdlOTgtYjMwYTdhZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-04T17:27:44.682867Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721657. Ctx: { TraceId: 01jr0xg98y72fxm4stwd5wr1gt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGEwOWUwMGYtZDlmMmQxMDMtYjQ4YTdlOTgtYjMwYTdhZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-04T17:27:44.692108Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721658. Ctx: { TraceId: 01jr0xg99h7pnn7xfeep6ehbj3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWYyODllNTUtMjMxYjkyNTAtNzk5NDc3OTQtNDE1MzhlOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.696235Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721659. Ctx: { TraceId: 01jr0xg99h7pnn7xfeep6ehbj3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWYyODllNTUtMjMxYjkyNTAtNzk5NDc3OTQtNDE1MzhlOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:27:44.699993Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721660. Ctx: { TraceId: 01jr0xg99h7pnn7xfeep6ehbj3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWYyODllNTUtMjMxYjkyNTAtNzk5NDc3OTQtNDE1MzhlOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS >> TopicAutoscaling::PartitionSplit_PQv1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 14521, MsgBus: 3920 2025-04-04T17:27:38.432449Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510963073290154:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:38.432609Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018ea/r3tmp/tmptzRkny/pdisk_1.dat 2025-04-04T17:27:38.677507Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14521, node 1 2025-04-04T17:27:38.757217Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:38.757250Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:38.757272Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:38.757406Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:38.803536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:38.803663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:38.805318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3920 TClient is connected to server localhost:3920 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:39.222954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:39.245876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:39.337160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:39.464519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:39.517424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:40.921714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510971663226520:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:40.921798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:41.141285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:41.164933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:41.191049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:41.215331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:41.240274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:41.297022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:41.327070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510975958194324:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:41.327118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510975958194329:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:41.327123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:41.330159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:41.337879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510975958194331:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:27:41.421961Z node 1 :TX_PROXY ERROR: Actor# [1:7489510975958194386:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:42.192405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:27:42.495337Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787662533, txId: 281474976715675] shutting down Trying to start YDB, gRPC: 12663, MsgBus: 18881 2025-04-04T17:27:43.223689Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510983964267225:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:43.223759Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018ea/r3tmp/tmpVfxDty/pdisk_1.dat 2025-04-04T17:27:43.339897Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:27:43.369900Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:43.369996Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:43.372019Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12663, node 2 2025-04-04T17:27:43.410513Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:43.410535Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:43.410541Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:43.410706Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18881 TClient is connected to server localhost:18881 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:43.787556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:43.803601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:43.841462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:43.954785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:44.009545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:27:45.800024Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510992554203597:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:45.800113Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:45.834311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-04-04T17:27:45.856984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:45.880552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:45.907494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:45.932951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:45.962834Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-04-04T17:27:45.997953Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510992554204104:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:45.998023Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:45.998026Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489510992554204109:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:46.000277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-04-04T17:27:46.006985Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489510992554204111:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-04-04T17:27:46.073267Z node 2 :TX_PROXY ERROR: Actor# [2:7489510996849171461:3438] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:46.850861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-04-04T17:27:47.495997Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787667531, txId: 281474976715675] shutting down |96.8%| [TA] $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} >> THealthCheckTest::StorageLimit95 >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad [GOOD] >> TopicAutoscaling::ControlPlane_CDC >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] Test command err: === Server->StartServer(false); 2025-04-04T17:27:41.493137Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510978089614933:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:41.493246Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:41.534335Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510979132490771:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:41.536202Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:41.669032Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:27:41.680700Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cec/r3tmp/tmpgBlrRC/pdisk_1.dat 2025-04-04T17:27:41.905807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:41.906230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:41.908212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:41.908285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:41.909910Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:27:41.910929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:41.912284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:41.938618Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5923, node 1 2025-04-04T17:27:41.969712Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:27:41.969741Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:27:42.084829Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001cec/r3tmp/yandexeC3Zz3.tmp 2025-04-04T17:27:42.084857Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001cec/r3tmp/yandexeC3Zz3.tmp 2025-04-04T17:27:42.085105Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001cec/r3tmp/yandexeC3Zz3.tmp 2025-04-04T17:27:42.085267Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:42.280064Z INFO: TTestServer started on Port 9594 GrpcPort 5923 TClient is connected to server localhost:9594 PQClient connected to localhost:5923 === TenantModeEnabled() = 1 === Init PQ - start server on port 5923 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:42.644871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:27:42.645167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:42.648383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T17:27:42.649352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:27:42.649416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:42.651649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T17:27:42.652974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T17:27:42.653242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:42.653307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T17:27:42.653385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-04T17:27:42.653396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-04T17:27:42.654924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:27:42.654968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-04T17:27:42.654999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:27:42.656744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:42.656806Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:27:42.656823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-04T17:27:42.658846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:42.658881Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:42.658902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:27:42.658932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:42.664727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:27:42.666879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-04T17:27:42.667969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:27:42.669678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743787662715, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:27:42.669814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743787662715 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:27:42.669846Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:27:42.671083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-04T17:27:42.671157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:27:42.671356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:27:42.671422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-04T17:27:42.674082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:27:42.674102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:27:42.674307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:27:42.674354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489510978089615673:2450], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-04T17:27:42.674400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:42.674418Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-04T17:27:42.674501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-04T17:27:42.674519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:42.674539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-04T17:27:42.674562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:42.674573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-04T17:27:42.674586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:42.674596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-04T17:27:42.674603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 2 ... 720664 ready parts: 2/2 2025-04-04T17:27:49.150465Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720664:0 2025-04-04T17:27:49.150472Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720664:0 2025-04-04T17:27:49.150501Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 3 2025-04-04T17:27:49.150511Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720664:1 2025-04-04T17:27:49.150516Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720664:1 2025-04-04T17:27:49.150592Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 5 2025-04-04T17:27:49.150608Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720664, publications: 2, subscribers: 1 2025-04-04T17:27:49.150617Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720664, [OwnerId: 72057594046644480, LocalPathId: 11], 5 2025-04-04T17:27:49.150624Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720664, [OwnerId: 72057594046644480, LocalPathId: 12], 2 2025-04-04T17:27:49.151015Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976720664 2025-04-04T17:27:49.151100Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976720664 2025-04-04T17:27:49.151117Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976720664 2025-04-04T17:27:49.151127Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976720664, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 5 2025-04-04T17:27:49.151138Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 2 2025-04-04T17:27:49.151262Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976720664 2025-04-04T17:27:49.151296Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976720664 2025-04-04T17:27:49.151302Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976720664 2025-04-04T17:27:49.151309Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976720664, pathId: [OwnerId: 72057594046644480, LocalPathId: 12], version: 2 2025-04-04T17:27:49.151316Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 4 2025-04-04T17:27:49.151338Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976720664, subscribers: 1 2025-04-04T17:27:49.151348Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:7489511010800984111:2369] 2025-04-04T17:27:49.153168Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976720664 2025-04-04T17:27:49.153199Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976720664 Create topic result: 1 === EnablePQLogs === CreateChannel === NewStub === InitializeWritePQService === InitializeWritePQService start iteration === InitializeWritePQService create streamingWriter === InitializeWritePQService Write 2025-04-04T17:27:49.258510Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-04T17:27:49.258544Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-04-04T17:27:49.258884Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "Root/acc/topic1" message_group_id: "12345678" } 2025-04-04T17:27:49.258973Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "Root/acc/topic1" message_group_id: "12345678" from ipv6:[::1]:39016 2025-04-04T17:27:49.258996Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:39016 proto=v1 topic=Root/acc/topic1 durationSec=0 2025-04-04T17:27:49.259005Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-04T17:27:49.260740Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-04-04T17:27:49.260864Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-04T17:27:49.260879Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-04T17:27:49.260887Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-04T17:27:49.260916Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489511010800984357:2380] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-04T17:27:49.260933Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-04T17:27:49.261435Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-04T17:27:49.261540Z node 3 :PERSQUEUE INFO: new Cookie 12345678|525e3309-cee0cdeb-ee6073d7-e61dfa0a_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-04-04T17:27:49.261896Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|525e3309-cee0cdeb-ee6073d7-e61dfa0a_0 2025-04-04T17:27:49.262991Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|525e3309-cee0cdeb-ee6073d7-e61dfa0a_0 grpc read done: success: 0 data: 2025-04-04T17:27:49.263015Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|525e3309-cee0cdeb-ee6073d7-e61dfa0a_0 grpc read failed 2025-04-04T17:27:49.263213Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|525e3309-cee0cdeb-ee6073d7-e61dfa0a_0 2025-04-04T17:27:49.263233Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|525e3309-cee0cdeb-ee6073d7-e61dfa0a_0 is DEAD Finish: 0 === InitializeWritePQService done === PersQueueClient 2025-04-04T17:27:49.263518Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison === InitializePQ completed 2025-04-04T17:27:49.271251Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-04T17:27:49.271278Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-04-04T17:27:49.271647Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "topic1" message_group_id: "12345678" } 2025-04-04T17:27:49.271746Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "topic1" message_group_id: "12345678" from ipv6:[::1]:39016 2025-04-04T17:27:49.271765Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:39016 proto=v1 topic=topic1 durationSec=0 2025-04-04T17:27:49.271775Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-04T17:27:49.273428Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-04-04T17:27:49.273557Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-04T17:27:49.273573Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-04T17:27:49.273581Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-04T17:27:49.273609Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489511010800984377:2389] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-04T17:27:49.273626Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-04T17:27:49.274073Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-04T17:27:49.274143Z node 3 :PERSQUEUE INFO: new Cookie 12345678|f3eba4bf-ff15e402-75423deb-cb0c323_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-04-04T17:27:49.274516Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 12345678|f3eba4bf-ff15e402-75423deb-cb0c323_0 2025-04-04T17:27:49.276297Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: 12345678|f3eba4bf-ff15e402-75423deb-cb0c323_0 grpc read done: success: 0 data: 2025-04-04T17:27:49.276320Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|f3eba4bf-ff15e402-75423deb-cb0c323_0 grpc read failed 2025-04-04T17:27:49.276872Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 2 sessionId: 12345678|f3eba4bf-ff15e402-75423deb-cb0c323_0 2025-04-04T17:27:49.276897Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|f3eba4bf-ff15e402-75423deb-cb0c323_0 is DEAD 2025-04-04T17:27:49.277225Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] Test command err: 2025-04-04T17:25:30.995965Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510413059574871:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:30.996218Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00158c/r3tmp/tmphr7uk0/pdisk_1.dat 2025-04-04T17:25:31.386131Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:31.439578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:31.439694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:31.442567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28694, node 1 2025-04-04T17:25:31.615954Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:31.615985Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:31.615993Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:31.616127Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:25:31.916789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:25:31.957432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:25:32.003118Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7489510417354542631:2309] 2025-04-04T17:25:32.003377Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:25:32.018805Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:25:32.018897Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:25:32.020861Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:25:32.020912Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:25:32.020961Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:25:32.021329Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:25:32.021373Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:25:32.021407Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7489510421649509941:2309] in generation 1 2025-04-04T17:25:32.022615Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:25:32.068729Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:25:32.069836Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:25:32.069927Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7489510421649509945:2310] 2025-04-04T17:25:32.069942Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:25:32.069951Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:25:32.069960Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:25:32.073071Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7489510417354542619:2300], serverId# [1:7489510421649509944:2311], sessionId# [0:0:0] 2025-04-04T17:25:32.073120Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:25:32.073189Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:25:32.073283Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:25:32.073319Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:25:32.073337Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:25:32.073365Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:25:32.073389Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:25:32.073598Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:25:32.073664Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-04T17:25:32.075273Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:25:32.077653Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:25:32.077711Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:25:32.081489Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7489510421649509959:2319], serverId# [1:7489510421649509960:2320], sessionId# [0:0:0] 2025-04-04T17:25:32.088041Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1743787532123 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743787532123 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:25:32.088081Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:25:32.088219Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:25:32.088276Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:25:32.088307Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:25:32.088328Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743787532123:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-04T17:25:32.088559Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743787532123:281474976710657 keys extracted: 0 2025-04-04T17:25:32.088686Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:25:32.088771Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:25:32.088803Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:25:32.091807Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:25:32.092193Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:25:32.094595Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743787532122 2025-04-04T17:25:32.094623Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:25:32.094664Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743787532123} 2025-04-04T17:25:32.094713Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:25:32.094746Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:25:32.094759Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:25:32.094785Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:25:32.094843Z node 1 :TX_DATASHARD DEBUG: Complete [1743787532123 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7489510417354542428:2187], exec latency: 3 ms, propose latency: 6 ms 2025-04-04T17:25:32.094871Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-04T17:25:32.094922Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:25:32.096808Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743787532130 2025-04-04T17:25:32.099900Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:7489510421649509945:2310][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-04-04T17:25:32.101462Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-04T17:25:32.101510Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:25:32.118723Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:25:32.119723Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:25:32.120473Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:2 2025-04-04T17:25:32.120584Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976710658 2025-04-04T17:25:32.120649Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2025-04-04T17:25:32.124668Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:25:32.184761Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:25:32.185967Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Registered with mediator time cast 2025-04-04T17:25:32.192484Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:25:32.192866Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] doesn't have tx info 2025-04-04T17:25:32.192892Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:25:32.193681Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] no config, start with empty partitions and default config 2025-04-04T17:25:32.193711Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:25:32.193809Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037889] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:32.193871Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037889] doesn't have tx writes info 2025-04-04T17:25:32.195608Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [1:7489510421649510046:2312], now have 1 active actors on pipe 2025-04-04T17:25:32.195647Z node 1 :PERSQUEUE DEBUG: [P ... one> cookie: 0 2025-04-04T17:27:49.275856Z node 24 :TX_DATASHARD INFO: TTxActivateChangeSenderAck Complete: origin# 72075186224037893, at tablet# 72075186224037891 2025-04-04T17:27:49.276056Z node 24 :TX_DATASHARD INFO: TTxActivateChangeSenderAck Complete: origin# 72075186224037893, at tablet# 72075186224037892 2025-04-04T17:27:49.280452Z node 24 :TX_DATASHARD DEBUG: 72075186224037891 ack split partitioning changed to schemeshard 281474976715660 2025-04-04T17:27:49.280638Z node 24 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T17:27:49.281452Z node 24 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-04-04T17:27:49.283626Z node 24 :TX_DATASHARD DEBUG: 72075186224037892 ack split partitioning changed to schemeshard 281474976715660 2025-04-04T17:27:49.283698Z node 24 :TX_DATASHARD DEBUG: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T17:27:49.284846Z node 24 :TX_DATASHARD INFO: 72075186224037892 Initiating switch from PreOffline to Offline state 2025-04-04T17:27:49.286659Z node 24 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T17:27:49.287259Z node 24 :TX_DATASHARD INFO: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T17:27:49.288486Z node 24 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-04-04T17:27:49.288823Z node 24 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-04-04T17:27:49.300786Z node 24 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-04-04T17:27:49.302264Z node 24 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-04-04T17:27:49.304759Z node 24 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-04-04T17:27:49.306308Z node 24 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-04-04T17:27:49.309114Z node 24 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 24, TabletId: 72075186224037891 not found 2025-04-04T17:27:49.309655Z node 24 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 24, TabletId: 72075186224037892 not found 2025-04-04T17:27:49.332201Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [24:1264:2644], now have 1 active actors on pipe ... release register requests ... wait for merge tx notification 2025-04-04T17:27:49.353745Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-04T17:27:49.353900Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-04T17:27:49.354479Z node 24 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:27:49.356195Z node 24 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 ... wait for final heartbeat >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-04-04T17:27:49.359670Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-04T17:27:49.359810Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-04T17:27:49.360862Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 2 max time lag 0ms effective offset 0 2025-04-04T17:27:49.360993Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 2 2025-04-04T17:27:49.361159Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-04T17:27:49.361250Z node 24 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T17:27:49.361986Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:27:49.373194Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:27:49.373479Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:27:49.373869Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-04T17:27:49.374002Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-04T17:27:49.374230Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:27:49.374661Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][24:1315:3040] Handle NKikimr::NPQ::TEvPartitionWriter::TEvInitResult { SessionId: TxId: Success { OwnerCookie: 72075186224037893|1e52f86b-8bb93e15-a05b2264-ec32433f_0 SourceIdInfo: SourceId: "\00072075186224037893" SeqNo: 0 Offset: 2 WriteTimestampMS: 0 Explicit: true State: STATE_REGISTERED } } 2025-04-04T17:27:49.374884Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037893:1][24:1312:3040] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-04T17:27:49.375172Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][24:1315:3040] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-04T17:27:49.375589Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-04T17:27:49.375640Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-04T17:27:49.375762Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-04T17:27:49.375901Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-04T17:27:49.375934Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-04T17:27:49.376030Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037893' SeqNo: 1 partNo : 0 messageNo: 1 size 26 offset: -1 2025-04-04T17:27:49.376296Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037893' version v6000/0 2025-04-04T17:27:49.376476Z node 24 :PERSQUEUE INFO: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v6000/0 2025-04-04T17:27:49.376757Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-04-04T17:27:49.448704Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 107 count 1 nextOffset 3 batches 1 2025-04-04T17:27:49.450173Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 93 WTime 6505 2025-04-04T17:27:49.450700Z node 24 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:27:49.450933Z node 24 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 93 2025-04-04T17:27:49.452596Z node 24 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 93 actorID [24:1284:3020] 2025-04-04T17:27:49.452920Z node 24 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 2 partno 0 count 1 parts 0 size 93 2025-04-04T17:27:49.463420Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:27:49.463629Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:27:49.463824Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037893', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 1, partNo: 0, Offset: 2 is stored on disk 2025-04-04T17:27:49.464351Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-04T17:27:49.464778Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][24:1315:3040] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037893" SeqNo: 1 Offset: 2 WriteTimestampMS: 6505 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2025-04-04T17:27:49.464951Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037893:1][24:1312:3040] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-04T17:27:49.465176Z node 24 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037893 2025-04-04T17:27:49.465263Z node 24 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 1, at tablet: 72075186224037893 2025-04-04T17:27:49.476536Z node 24 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037893 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-04-04T17:27:49.867204Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-04T17:27:49.867281Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-04T17:27:49.867446Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 3 max time lag 0ms effective offset 0 2025-04-04T17:27:49.867495Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 3 2025-04-04T17:27:49.867571Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2025-04-04T17:27:49.867612Z node 24 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T17:27:49.867797Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> YdbIndexTable::OnlineBuild [GOOD] >> YdbIndexTable::OnlineBuildWithDataColumn >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule >> HttpRequest::Analyze [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 >> TPQTest::TestPartitionWriteQuota [GOOD] >> TPQTest::TestReadRuleVersions >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2025-04-04T17:27:42.741367Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510981301457198:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:42.741447Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:42.760274Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510983369950644:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:42.760369Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cdc/r3tmp/tmpFl0NR6/pdisk_1.dat 2025-04-04T17:27:42.926541Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:27:42.930161Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:27:43.066330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:43.066408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:43.068234Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:27:43.068985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:43.087433Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16504, node 1 2025-04-04T17:27:43.103170Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:27:43.103208Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:27:43.128515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:43.128633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:43.131602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:43.135348Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001cdc/r3tmp/yandexWY0G5k.tmp 2025-04-04T17:27:43.135389Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001cdc/r3tmp/yandexWY0G5k.tmp 2025-04-04T17:27:43.135571Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001cdc/r3tmp/yandexWY0G5k.tmp 2025-04-04T17:27:43.135693Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:43.186089Z INFO: TTestServer started on Port 21209 GrpcPort 16504 TClient is connected to server localhost:21209 PQClient connected to localhost:16504 === TenantModeEnabled() = 1 === Init PQ - start server on port 16504 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:43.563633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:27:43.563931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:43.564177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T17:27:43.564430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:27:43.564476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:43.566252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T17:27:43.566400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T17:27:43.566582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:43.566637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T17:27:43.566664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-04T17:27:43.566677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-04T17:27:43.567950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:27:43.568021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-04T17:27:43.568076Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:27:43.568731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:43.568804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:27:43.568823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-04T17:27:43.570484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:43.570551Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:43.570579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:27:43.570615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:43.575606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:27:43.577652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-04T17:27:43.577891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:27:43.580352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743787663625, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:27:43.580518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743787663625 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:27:43.580560Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:27:43.580824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-04T17:27:43.580854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:27:43.580983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:27:43.581064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-04T17:27:43.582840Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:27:43.582874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:27:43.583073Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:27:43.583096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489510985596425208:2427], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-04T17:27:43.583128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:43.583156Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-04T17:27:43.583272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-04T17:27:43.583310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:43.583349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-04T17:27:43.583364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:43.583386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-04T17:27:43.583399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:43.583418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-04T17:27:43.583426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for ... XY INFO: session v1 cookie: 4 sessionId: describe result for acl check 2025-04-04T17:27:51.484185Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-04T17:27:51.484202Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-04T17:27:51.484211Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-04T17:27:51.484244Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489511021563748099:2426] (SourceId=123, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-04T17:27:51.484268Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 4 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-04T17:27:51.484696Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037893, NodeId 3, Generation: 1 2025-04-04T17:27:51.484723Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server connected, pipe [3:7489511021563748102:2426], now have 1 active actors on pipe 2025-04-04T17:27:51.484769Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-04T17:27:51.484796Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-04-04T17:27:51.484875Z node 3 :PERSQUEUE INFO: new Cookie 123|59153b32-2502f336-dd9e6e10-30fb1006_0 generated for partition 0 topic 'PQ/account/topic' owner 123 2025-04-04T17:27:51.484954Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-04T17:27:51.485013Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:27:51.485112Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-04T17:27:51.485134Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-04-04T17:27:51.485205Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:27:51.485275Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 4 partition: 0 MaxSeqNo: 2 sessionId: 123|59153b32-2502f336-dd9e6e10-30fb1006_0 2025-04-04T17:27:51.485786Z :INFO: [] MessageGroupId [123] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743787671485 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:27:51.485885Z :INFO: [] MessageGroupId [123] SessionId [] Write session established. Init response: last_sequence_number: 2 session_id: "123|59153b32-2502f336-dd9e6e10-30fb1006_0" topic: "PQ/account/topic" 2025-04-04T17:27:51.486150Z :DEBUG: [] MessageGroupId [123] SessionId [123|59153b32-2502f336-dd9e6e10-30fb1006_0] Write 1 messages with Id from 1 to 1 2025-04-04T17:27:51.486269Z :DEBUG: [] MessageGroupId [123] SessionId [123|59153b32-2502f336-dd9e6e10-30fb1006_0] Write session: try to update token 2025-04-04T17:27:51.486309Z :DEBUG: [] MessageGroupId [123] SessionId [123|59153b32-2502f336-dd9e6e10-30fb1006_0] Send 1 message(s) (0 left), first sequence number is 3 2025-04-04T17:27:51.486489Z :INFO: [] MessageGroupId [123] SessionId [123|59153b32-2502f336-dd9e6e10-30fb1006_0] Write session: close. Timeout = 10000 ms 2025-04-04T17:27:51.486693Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: 123|59153b32-2502f336-dd9e6e10-30fb1006_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-04T17:27:51.486952Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-04T17:27:51.487052Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-04T17:27:51.487079Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-04-04T17:27:51.487152Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-04T17:27:51.487222Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T17:27:51.487365Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-04T17:27:51.487399Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-04-04T17:27:51.487436Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message topic: PQ/account/topic partition: 0 SourceId: '\000123' SeqNo: 3 partNo : 0 messageNo: 1 size 372 offset: -1 2025-04-04T17:27:51.487538Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Send write quota request. Topic: "PQ/account/topic". Partition: 0. Amount: 376. Cookie: 3 2025-04-04T17:27:51.487602Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Got quota. Topic: "PQ/account/topic". Partition: 0: Cookie: 3 2025-04-04T17:27:51.487728Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic 'PQ/account/topic' partition 0 part blob processing sourceId '\000123' seqNo 3 partNo 0 2025-04-04T17:27:51.526536Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic 'PQ/account/topic' partition 0 part blob complete sourceId '\000123' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 443 count 1 nextOffset 3 batches 1 2025-04-04T17:27:51.526963Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Add new write blob: topic 'PQ/account/topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 431 WTime 1743787671526 2025-04-04T17:27:51.527092Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:27:51.527146Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 431 2025-04-04T17:27:51.529620Z node 3 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 431 actorID [3:7489511021563747800:2401] 2025-04-04T17:27:51.529736Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 376 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:27:51.529739Z node 3 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037893' partition 0 offset 2 partno 0 count 1 parts 0 size 431 2025-04-04T17:27:51.529784Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:27:51.529837Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Answering for message sourceid: '\000123', Topic: 'PQ/account/topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-04-04T17:27:51.529852Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-04T17:27:51.529927Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T17:27:51.530689Z :DEBUG: [] MessageGroupId [123] SessionId [123|59153b32-2502f336-dd9e6e10-30fb1006_0] Write session got write response: sequence_numbers: 3 offsets: 2 already_written: false write_statistics { persist_duration_ms: 3 queued_in_partition_duration_ms: 38 } 2025-04-04T17:27:51.530739Z :DEBUG: [] MessageGroupId [123] SessionId [123|59153b32-2502f336-dd9e6e10-30fb1006_0] Write session: acknoledged message 1 2025-04-04T17:27:51.586662Z :INFO: [] MessageGroupId [123] SessionId [123|59153b32-2502f336-dd9e6e10-30fb1006_0] Write session will now close 2025-04-04T17:27:51.586732Z :DEBUG: [] MessageGroupId [123] SessionId [123|59153b32-2502f336-dd9e6e10-30fb1006_0] Write session: aborting 2025-04-04T17:27:51.587552Z :INFO: [] MessageGroupId [123] SessionId [123|59153b32-2502f336-dd9e6e10-30fb1006_0] Write session: gracefully shut down, all writes complete 2025-04-04T17:27:51.587573Z :DEBUG: [] MessageGroupId [123] SessionId [123|59153b32-2502f336-dd9e6e10-30fb1006_0] Write session is aborting and will not restart 2025-04-04T17:27:51.587649Z :DEBUG: [] MessageGroupId [123] SessionId [123|59153b32-2502f336-dd9e6e10-30fb1006_0] Write session: destroy 2025-04-04T17:27:51.587828Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: 123|59153b32-2502f336-dd9e6e10-30fb1006_0 grpc read done: success: 0 data: 2025-04-04T17:27:51.587856Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|59153b32-2502f336-dd9e6e10-30fb1006_0 grpc read failed 2025-04-04T17:27:51.587886Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|59153b32-2502f336-dd9e6e10-30fb1006_0 grpc closed 2025-04-04T17:27:51.587918Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|59153b32-2502f336-dd9e6e10-30fb1006_0 is DEAD 2025-04-04T17:27:51.588701Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:27:51.588857Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server disconnected, pipe [3:7489511021563748102:2426] destroyed 2025-04-04T17:27:51.588905Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-04T17:27:51.771505Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489511021563748117:2435], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:27:51.771733Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=N2U2YjAzODItZWFkZjcwOTUtODYzNDU4LThkMTc1NmJm, ActorId: [3:7489511021563748110:2431], ActorState: ExecuteState, TraceId: 01jr0xgg6b5rk3apzj3m36jjm0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:27:51.772218Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> KqpExtractPredicateLookup::SqlInJoin [GOOD] >> KqpKv::BulkUpsert >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx [GOOD] >> Cdc::ResolvedTimestampForDisplacedUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Analyze [GOOD] Test command err: 2025-04-04T17:27:40.663855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:40.664202Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:27:40.664303Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000abb/r3tmp/tmptGAay0/pdisk_1.dat 2025-04-04T17:27:40.981677Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17273, node 1 2025-04-04T17:27:41.199122Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:41.199163Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:41.199188Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:41.199512Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:41.201217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:27:41.283946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:41.284069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:41.297699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20386 2025-04-04T17:27:41.780886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:44.542004Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:27:44.573653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:44.573765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:44.601825Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:27:44.603390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:44.814227Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:44.814862Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:44.815393Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:44.815570Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:44.815670Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:44.815873Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:44.815951Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:44.816064Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:44.816142Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:44.991068Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:44.991174Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:45.004719Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:45.116559Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:27:45.152075Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:27:45.152164Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:27:45.182619Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:27:45.182850Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:27:45.183086Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:27:45.183164Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:27:45.183230Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:27:45.183291Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:27:45.183359Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:27:45.183433Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:27:45.184566Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:27:45.212847Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:27:45.213005Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:27:45.219741Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:27:45.225928Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:27:45.226854Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:27:45.232178Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:27:45.254812Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:27:45.254865Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:27:45.254928Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:27:45.267362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:27:45.273318Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:27:45.273441Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:27:45.421196Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:27:45.596852Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:27:45.687046Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:27:46.642797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:46.642924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:46.658681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:27:46.907910Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:27:46.908107Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:27:46.908341Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:27:46.908452Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:27:46.908552Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:27:46.908700Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:27:46.908808Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:27:46.908903Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:27:46.909022Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:27:46.909110Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:27:46.909198Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:27:46.909273Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:27:46.944001Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2381:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:27:46.944086Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2381:2882];tablet_id=72075186224037899;process= ... WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:27:47.638298Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-04-04T17:27:47.643740Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-04-04T17:27:47.650327Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-04-04T17:27:47.656067Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-04-04T17:27:47.662588Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-04-04T17:27:47.667841Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-04-04T17:27:47.673183Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-04-04T17:27:47.679466Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-04-04T17:27:47.684906Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-04-04T17:27:47.691433Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-04-04T17:27:48.875134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3177:3172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:48.886625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:48.890536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037897 2025-04-04T17:27:49.454954Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-04-04T17:27:49.455548Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-04-04T17:27:49.456028Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-04-04T17:27:49.456513Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-04-04T17:27:49.457587Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-04-04T17:27:49.458048Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-04-04T17:27:49.458484Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-04-04T17:27:49.459432Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-04-04T17:27:49.459984Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-04-04T17:27:49.460713Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-04-04T17:27:50.260499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3822:3234], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:50.260677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:50.275985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-04-04T17:27:50.332855Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:27:50.333501Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:27:50.334020Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:27:50.334522Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:27:50.335000Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:27:50.336186Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:27:50.336651Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:27:50.338480Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:27:50.338996Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-04-04T17:27:50.339462Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000018s 2025-04-04T17:27:52.250645Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:4117:4267] 2025-04-04T17:27:52.252887Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:4114:3320] , Record { OperationId: "\000\000\000\000\026^d\260o\010\237\301u\324\301M" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } } 2025-04-04T17:27:52.252949Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId=^douM 2025-04-04T17:27:52.252990Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId=^douM , PathId [OwnerId: 72075186224037897, LocalPathId: 4] Answer: 'Analyze sent. OperationId: 00000005jycjr6y24zr5tx9gad' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] Test command err: === Server->StartServer(false); 2025-04-04T17:27:43.370281Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510987329816338:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:43.370556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:43.390661Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510987063845720:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:43.391338Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:43.522697Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cbf/r3tmp/tmpuw3J0z/pdisk_1.dat 2025-04-04T17:27:43.525003Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:27:43.686484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:43.686609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:43.689614Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:27:43.690458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:43.721579Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:27:43.732769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:43.732843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 1539, node 1 2025-04-04T17:27:43.742912Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:27:43.743122Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:27:43.752033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:43.765486Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001cbf/r3tmp/yandexSrOFPL.tmp 2025-04-04T17:27:43.765521Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001cbf/r3tmp/yandexSrOFPL.tmp 2025-04-04T17:27:43.765678Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001cbf/r3tmp/yandexSrOFPL.tmp 2025-04-04T17:27:43.765830Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:43.808192Z INFO: TTestServer started on Port 30408 GrpcPort 1539 TClient is connected to server localhost:30408 PQClient connected to localhost:1539 === TenantModeEnabled() = 1 === Init PQ - start server on port 1539 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:44.161111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:27:44.161348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:44.161646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T17:27:44.161956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:27:44.162005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:44.163953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T17:27:44.164055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T17:27:44.164182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:44.164221Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T17:27:44.164235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-04T17:27:44.164244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-04T17:27:44.165244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:27:44.165268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-04T17:27:44.165286Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:27:44.166043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:44.166097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:27:44.166116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-04T17:27:44.167790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:44.167836Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:44.167856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:27:44.167886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:44.172295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:27:44.173876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-04T17:27:44.173995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:27:44.176399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743787664220, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:27:44.176545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743787664220 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:27:44.176579Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:27:44.176869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-04T17:27:44.176918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:27:44.177104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:27:44.177213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-04T17:27:44.179001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:27:44.179035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:27:44.179271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:27:44.179307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489510987329817075:2448], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-04T17:27:44.179371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:44.179424Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-04T17:27:44.179512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-04T17:27:44.179538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:44.179571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-04T17:27:44.179582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:44.179596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-04T17:27:44.179615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:44.179632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-04T17:27:44.179642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid ... : "/Root/acc/topic1" message_group_id: "test-group-id" } 2025-04-04T17:27:51.121108Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-group-id" from ipv6:[::1]:43888 2025-04-04T17:27:51.121123Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:43888 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-04-04T17:27:51.121131Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-04T17:27:51.121783Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-04-04T17:27:51.121929Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-04T17:27:51.121945Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-04T17:27:51.121974Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-04T17:27:51.122024Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489511018178561149:2392] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-04T17:27:51.122051Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-04T17:27:51.122435Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-04T17:27:51.122507Z node 3 :PERSQUEUE INFO: new Cookie test-group-id|7b4c7044-4e6e786a-d6453246-1e7c27be_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2025-04-04T17:27:51.122827Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|7b4c7044-4e6e786a-d6453246-1e7c27be_0 ===Assert streaming op1 ===Assert streaming op2 2025-04-04T17:27:51.123747Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|7b4c7044-4e6e786a-d6453246-1e7c27be_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-04T17:27:51.123954Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-04T17:27:51.124197Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T17:27:51.127919Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse ===ModifyAcl BEFORE MODIFY PERMISSIONS 2025-04-04T17:27:51.135363Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin" } } TxId: 281474976715666 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:43924" , at schemeshard: 72057594046644480 2025-04-04T17:27:51.135519Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:51.135623Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2025-04-04T17:27:51.135650Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-04T17:27:51.135771Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715666:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-04T17:27:51.135795Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-04-04T17:27:51.135861Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715666:0 progress is 1/1 2025-04-04T17:27:51.135877Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2025-04-04T17:27:51.135896Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715666:0 progress is 1/1 2025-04-04T17:27:51.135905Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2025-04-04T17:27:51.135937Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2025-04-04T17:27:51.135981Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715666, ready parts: 1/1, is published: false 2025-04-04T17:27:51.136004Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2025-04-04T17:27:51.136018Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2025-04-04T17:27:51.136030Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715666:0 2025-04-04T17:27:51.136040Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715666, publications: 1, subscribers: 0 2025-04-04T17:27:51.136051Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715666, [OwnerId: 72057594046644480, LocalPathId: 10], 4 2025-04-04T17:27:51.137664Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715666, response: Status: StatusSuccess TxId: 281474976715666 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:27:51.137830Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715666, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, remove access: -():test_user_0@builtin:- 2025-04-04T17:27:51.137979Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:27:51.137999Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715666, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-04T17:27:51.138120Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:27:51.138139Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7489511000998690891:2378], at schemeshard: 72057594046644480, txId: 281474976715666, path id: 10 2025-04-04T17:27:51.138659Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715666 2025-04-04T17:27:51.138716Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715666 2025-04-04T17:27:51.138731Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715666 2025-04-04T17:27:51.138748Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715666, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 4 2025-04-04T17:27:51.138763Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2025-04-04T17:27:51.138833Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715666, subscribers: 0 ===Wait for session created with token with removed ACE to die2025-04-04T17:27:51.140127Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715666 2025-04-04T17:27:51.830882Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489511018178561186:2401], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:27:51.831092Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDkwNjc4NzQtN2IzM2E1OTYtZjllMzVlMDQtODg0NjY0Y2I=, ActorId: [3:7489511018178561179:2397], ActorState: ExecuteState, TraceId: 01jr0xgg869qzn50jtpmnzxjfp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:27:51.831567Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:27:52.123442Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-04T17:27:52.124139Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|7b4c7044-4e6e786a-d6453246-1e7c27be_0 describe result for acl check 2025-04-04T17:27:52.124256Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_0@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-group-id|7b4c7044-4e6e786a-d6453246-1e7c27be_0 2025-04-04T17:27:52.124522Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|7b4c7044-4e6e786a-d6453246-1e7c27be_0 is DEAD 2025-04-04T17:27:52.124806Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison status: UNAUTHORIZED issues { message: "access to topic \'Topic /Root/acc/topic1 in database: /Root\' denied for \'test_user_0@builtin\' due to \'no WriteTopic rights\', Marker# PQ1125" issue_code: 500018 severity: 1 } 2025-04-04T17:27:52.635970Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489511000998690264:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:52.636065Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] Test command err: 2025-04-04T17:26:04.968781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:04.968983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:04.969063Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019bf/r3tmp/tmpAYz5j3/pdisk_1.dat 2025-04-04T17:26:05.313958Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21710, node 1 2025-04-04T17:26:05.697077Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:05.697131Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:05.697179Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:05.697666Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:05.703979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:05.793084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:05.794095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:05.824064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18056 2025-04-04T17:26:06.341957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:09.170852Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:09.205123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.205216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.233405Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:09.235221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:09.459758Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.460306Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.460758Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.460932Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.461044Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.461307Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.461443Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.461676Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.461807Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.638359Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.638478Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.651627Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:09.782922Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:09.825640Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:09.825721Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:09.851077Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:09.851231Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:09.851424Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:09.851493Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:09.851543Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:09.851599Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:09.851657Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:09.851705Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:09.852546Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:09.877798Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:09.877939Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:09.883960Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:26:09.889564Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:26:09.890449Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:26:09.895322Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-04T17:26:09.915468Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:09.915532Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:09.915604Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-04T17:26:09.931471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:09.944384Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:09.944549Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:10.157841Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:10.317603Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:10.405769Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:11.247070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:26:12.001574Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:12.146678Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-04T17:26:12.146752Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:26:12.146893Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2586:2943], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:26:12.148227Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2588:2945] 2025-04-04T17:26:12.148555Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2588:2945], schemeshard id = 72075186224037899 2025-04-04T17:26:13.442490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2725:3246], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:13.442746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:13.474515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-04T17:26:13.745898Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2869:3086];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:13.746169Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2869:3086];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:13.746494Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2869:3086];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:13.746664Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2869:3086];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:13.746787Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2869:3086];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:13.746917Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2869:3086];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:13.747050Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2869:3086];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:13.747200Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2869:3086];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:13.747368Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2869:3086];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17 ... 025-04-04T17:27:50.461862Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:9570:7225], schemeshard count = 1 2025-04-04T17:27:51.406143Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-04T17:27:51.406226Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 191.000000s, at schemeshard: 72075186224037899 2025-04-04T17:27:51.406522Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2025-04-04T17:27:51.421147Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:27:51.976323Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:27:51.976383Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:27:51.976425Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-04T17:27:51.976473Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-04T17:27:51.985633Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:27:52.002827Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:27:52.003286Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:27:52.003373Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:27:52.004208Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:27:52.017793Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:27:52.018099Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-04T17:27:52.019268Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9713:7309], server id = [2:9718:7314], tablet id = 72075186224037905, status = OK 2025-04-04T17:27:52.019600Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9713:7309], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:27:52.021542Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9714:7310], server id = [2:9719:7315], tablet id = 72075186224037906, status = OK 2025-04-04T17:27:52.021608Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9714:7310], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:27:52.022237Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9715:7311], server id = [2:9720:7316], tablet id = 72075186224037907, status = OK 2025-04-04T17:27:52.022305Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9715:7311], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:27:52.024368Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9716:7312], server id = [2:9721:7317], tablet id = 72075186224037908, status = OK 2025-04-04T17:27:52.024418Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9716:7312], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:27:52.024954Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9717:7313], server id = [2:9722:7318], tablet id = 72075186224037909, status = OK 2025-04-04T17:27:52.025005Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9717:7313], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:27:52.030179Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-04T17:27:52.030900Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9713:7309], server id = [2:9718:7314], tablet id = 72075186224037905 2025-04-04T17:27:52.030944Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:27:52.031492Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-04T17:27:52.032048Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9714:7310], server id = [2:9719:7315], tablet id = 72075186224037906 2025-04-04T17:27:52.032077Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:27:52.032903Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9737:7329], server id = [2:9738:7330], tablet id = 72075186224037910, status = OK 2025-04-04T17:27:52.032976Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9737:7329], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:27:52.033652Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-04T17:27:52.033868Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9739:7331], server id = [2:9741:7332], tablet id = 72075186224037911, status = OK 2025-04-04T17:27:52.033913Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9739:7331], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:27:52.034814Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9715:7311], server id = [2:9720:7316], tablet id = 72075186224037907 2025-04-04T17:27:52.034838Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:27:52.035071Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-04T17:27:52.035914Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9716:7312], server id = [2:9721:7317], tablet id = 72075186224037908 2025-04-04T17:27:52.035939Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:27:52.036038Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037909 2025-04-04T17:27:52.036537Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9743:7334], server id = [2:9745:7336], tablet id = 72075186224037912, status = OK 2025-04-04T17:27:52.036589Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9743:7334], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:27:52.037334Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9717:7313], server id = [2:9722:7318], tablet id = 72075186224037909 2025-04-04T17:27:52.037358Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:27:52.037597Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9746:7337], server id = [2:9750:7341], tablet id = 72075186224037913, status = OK 2025-04-04T17:27:52.037645Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9746:7337], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:27:52.038850Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9748:7339], server id = [2:9752:7342], tablet id = 72075186224037914, status = OK 2025-04-04T17:27:52.038908Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9748:7339], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:27:52.039809Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037910 2025-04-04T17:27:52.040334Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9737:7329], server id = [2:9738:7330], tablet id = 72075186224037910 2025-04-04T17:27:52.040365Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:27:52.042174Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2025-04-04T17:27:52.042877Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9739:7331], server id = [2:9741:7332], tablet id = 72075186224037911 2025-04-04T17:27:52.042900Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:27:52.043801Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2025-04-04T17:27:52.043919Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037913 2025-04-04T17:27:52.044322Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9743:7334], server id = [2:9745:7336], tablet id = 72075186224037912 2025-04-04T17:27:52.044343Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:27:52.044634Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9746:7337], server id = [2:9750:7341], tablet id = 72075186224037913 2025-04-04T17:27:52.044653Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:27:52.044715Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037914 2025-04-04T17:27:52.044744Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:27:52.044885Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:27:52.045025Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:27:52.045240Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-04T17:27:52.046768Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9748:7339], server id = [2:9752:7342], tablet id = 72075186224037914 2025-04-04T17:27:52.046791Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:27:52.047293Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:27:52.074123Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:9779:7365]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:27:52.074358Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:27:52.074411Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:9779:7365], StatRequests.size() = 1 2025-04-04T17:27:52.228867Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Nzk0OWUwNDUtOWFjYzc1OWQtZGY3Zjk2NjktYjY5YjY4NWU=, TxId: 2025-04-04T17:27:52.228933Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Nzk0OWUwNDUtOWFjYzc1OWQtZGY3Zjk2NjktYjY5YjY4NWU=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-04T17:27:52.229718Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:9787:7371]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:27:52.230052Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:27:52.230514Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:27:52.230580Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:27:52.234156Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:27:52.234231Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-04T17:27:52.234308Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T17:27:52.240770Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TPQTest::TestReadRuleVersions [GOOD] >> TPQTest::TestPartitionedBlobFails >> TPQTest::TestSetClientOffset [GOOD] >> TPQTest::TestReadSubscription >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit >> THealthCheckTest::Issues100Groups100VCardMerging [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test >> THealthCheckTest::StorageLimit95 [GOOD] >> THealthCheckTest::StorageNoQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::Issues100Groups100VCardMerging [GOOD] Test command err: 2025-04-04T17:27:26.899189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:26.899669Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:27:26.899876Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:27:26.901165Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:26.901318Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:27:26.901500Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00167b/r3tmp/tmpnkb5qa/pdisk_1.dat 2025-04-04T17:27:27.209147Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6554, node 1 TClient is connected to server localhost:19914 2025-04-04T17:27:27.545611Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:27.545658Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:27.545686Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:27.545845Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:32.945916Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:697:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:32.946198Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:27:32.946444Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:27:32.946980Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:694:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:32.947051Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:27:32.947255Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00167b/r3tmp/tmpDqbyBa/pdisk_1.dat 2025-04-04T17:27:33.161673Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16269, node 3 TClient is connected to server localhost:1944 2025-04-04T17:27:33.467518Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:33.467583Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:33.467619Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:33.467880Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:39.185758Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:697:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:39.186054Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:27:39.186398Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:27:39.187026Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:694:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:39.187249Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:27:39.187429Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00167b/r3tmp/tmpld4gzt/pdisk_1.dat 2025-04-04T17:27:39.478911Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8402, node 5 TClient is connected to server localhost:20674 2025-04-04T17:27:39.828634Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:39.828689Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:39.828715Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:39.828997Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:46.592121Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:478:2409], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:46.592501Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:27:46.592591Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:27:46.593843Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:700:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:46.594023Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:27:46.594116Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00167b/r3tmp/tmpwnrjhl/pdisk_1.dat 2025-04-04T17:27:46.883914Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14273, node 7 TClient is connected to server localhost:12058 2025-04-04T17:27:47.314120Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:47.314196Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:47.314279Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:47.314880Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:53.908884Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:53.909305Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:27:53.909408Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:27:53.909886Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:53.910312Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:27:53.910400Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00167b/r3tmp/tmpabgSiP/pdisk_1.dat 2025-04-04T17:27:54.173323Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3981, node 9 TClient is connected to server localhost:13836 2025-04-04T17:27:54.587039Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:54.587098Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:54.587133Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:54.587366Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule [GOOD] >> TPersqueueDataPlaneTestSuite::WriteSession >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError >> TPQTest::TestReadSubscription [GOOD] >> TopicAutoscaling::ControlPlane_CDC [GOOD] >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadSubscription [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:107:2057] recipient: [1:100:2134] 2025-04-04T17:25:28.817713Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:28.817795Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] Leader for TabletID 72057594037927938 is [1:152:2173] sender: [1:153:2057] recipient: [1:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:178:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:28.842520Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:28.863886Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-04T17:25:28.864895Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2025-04-04T17:25:28.868079Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:25:28.870277Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2025-04-04T17:25:28.872435Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:28.880094Z node 1 :PERSQUEUE INFO: new Cookie owner1|937a928-1907e990-c0111eb5-fe24ded9_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2025-04-04T17:25:28.880636Z node 1 :PERSQUEUE INFO: new Cookie owner2|57352f65-15da741f-8256f026-1383df51_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:28.896447Z node 1 :PERSQUEUE INFO: new Cookie owner1|1dbe6310-a25a670e-e284ee12-1c314b24_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:102:2057] recipient: [2:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:102:2057] recipient: [2:100:2134] Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:107:2057] recipient: [2:100:2134] 2025-04-04T17:25:29.426167Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:29.426255Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:148:2057] recipient: [2:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:148:2057] recipient: [2:146:2169] Leader for TabletID 72057594037927938 is [2:152:2173] sender: [2:153:2057] recipient: [2:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:178:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:29.446081Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:29.447089Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-04T17:25:29.447744Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:184:2197] 2025-04-04T17:25:29.450015Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:25:29.451947Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:185:2198] 2025-04-04T17:25:29.453597Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:29.459648Z node 2 :PERSQUEUE INFO: new Cookie owner1|cc5c9a60-b668813-1b96a5d2-1cd1d6de_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2025-04-04T17:25:29.460144Z node 2 :PERSQUEUE INFO: new Cookie owner2|d853fb51-38d91038-816159a-e3efb1bf_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:29.477060Z node 2 :PERSQUEUE INFO: new Cookie owner1|f818bbc9-98193f37-eafe9ede-ca85f2d7_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:102:2057] recipient: [3:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:102:2057] recipient: [3:100:2134] Leader for TabletID 72057594037927937 is [3:106:2138] sender: [3:107:2057] recipient: [3:100:2134] 2025-04-04T17:25:29.822232Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:29.822353Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:148:2057] recipient: [3:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:148:2057] recipient: [3:146:2169] Leader for TabletID 72057594037927938 is [3:152:2173] sender: [3:153:2057] recipient: [3:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [3:106:2138] sender: [3:178:2057] recipient: [3:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:29.838789Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:29.839659Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-04-04T17:25:29.840404Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:184:2197] 2025-04-04T17:25:29.842027Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:184:2197] 2025-04-04T17:25:29.843362Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:185:2198] 2025-04-04T17:25:29.844546Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:29.854129Z node 3 :PERSQUEUE INFO: new Cookie owner1|d9e3e78a-94703edd-d819dbbb-9df9e166_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2025-04-04T17:25:29.854682Z node 3 :PERSQUEUE INFO: new Cookie owner2|ef75122f-2a3363c-ffa59659-8cb58359_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:25:29.873059Z node 3 :PERSQUEUE INFO: new Cookie owner1|4f19fb39-5af5b9b0-aa068492-57bc1327_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:102:2057] recipient: [4:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:102:2057] recipient: [4:100:2134] Leader for TabletID 72057594037927937 is [4:106:2138] sender: [4:107:2057] recipient: [4:100:2134] 2025-04-04T17:25:30.331079Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:30.331178Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:148:2057] recipient: [4:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:148:2057] recipient: [4:146:2169] Leader for TabletID 72057594037927938 is [4:152:2173] sender: ... Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_BATCH Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:27:58.067502Z node 102 :PERSQUEUE INFO: new Cookie default|c3d6fdb1-abc916ed-3edacae5-7fa27e65_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:58.078773Z node 102 :PERSQUEUE INFO: new Cookie default|ecec375d-9d5d7801-5d3430d7-5cc8ff8f_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [103:102:2057] recipient: [103:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [103:102:2057] recipient: [103:100:2134] Leader for TabletID 72057594037927937 is [103:106:2138] sender: [103:107:2057] recipient: [103:100:2134] 2025-04-04T17:27:58.483724Z node 103 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:58.483788Z node 103 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [103:148:2057] recipient: [103:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [103:148:2057] recipient: [103:146:2169] Leader for TabletID 72057594037927938 is [103:152:2173] sender: [103:153:2057] recipient: [103:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [103:106:2138] sender: [103:178:2057] recipient: [103:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:58.503012Z node 103 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:58.504733Z node 103 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 103 actor [103:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 103 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } ReadRuleGenerations: 103 ReadRuleGenerations: 103 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 103 Important: false } Consumers { Name: "user1" Generation: 103 Important: true } 2025-04-04T17:27:58.505844Z node 103 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [103:184:2197] 2025-04-04T17:27:58.508077Z node 103 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [103:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:27:58.510897Z node 103 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [103:185:2198] 2025-04-04T17:27:58.512501Z node 103 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [103:185:2198] 2025-04-04T17:27:58.514727Z node 103 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [103:186:2199] 2025-04-04T17:27:58.516189Z node 103 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [103:186:2199] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:27:58.517941Z node 103 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [103:187:2200] 2025-04-04T17:27:58.519378Z node 103 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [103:187:2200] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:27:58.521548Z node 103 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] bootstrapping 4 [103:188:2201] 2025-04-04T17:27:58.523127Z node 103 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [103:188:2201] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:58.534868Z node 103 :PERSQUEUE INFO: new Cookie default|7d6d0d2d-32fe3716-6cdaefd7-51ac03b9_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_BATCH Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:27:59.509490Z node 103 :PERSQUEUE INFO: new Cookie default|36514529-f157645b-8db81005-fcb80f82_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:59.518853Z node 103 :PERSQUEUE INFO: new Cookie default|73713f1c-782e773e-27b23bb8-335248e1_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> TBlobStorageProxyTest::TestDoubleEmptyGet >> KqpKv::BulkUpsert [GOOD] >> KqpKv::ReadRows_NonExistentKeys >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] >> BasicUsage::PropagateSessionClosed [GOOD] >> BasicUsage::ReadMirrored >> THealthCheckTest::StorageNoQuota [GOOD] >> THealthCheckTest::TestBootingTabletIsNotDead ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] Test command err: === Server->StartServer(false); 2025-04-04T17:27:53.087149Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511029678267363:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:53.087251Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:53.104311Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489511028580203760:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:53.105081Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:53.219616Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001cad/r3tmp/tmpXOdGqn/pdisk_1.dat 2025-04-04T17:27:53.235408Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:27:53.407888Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:27:53.412590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:53.412700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:53.418973Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:27:53.422436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:53.423295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:53.423377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:53.428264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4392, node 1 2025-04-04T17:27:53.497145Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001cad/r3tmp/yandexep8FkS.tmp 2025-04-04T17:27:53.497187Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001cad/r3tmp/yandexep8FkS.tmp 2025-04-04T17:27:53.497354Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001cad/r3tmp/yandexep8FkS.tmp 2025-04-04T17:27:53.497543Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:53.548728Z INFO: TTestServer started on Port 6720 GrpcPort 4392 TClient is connected to server localhost:6720 PQClient connected to localhost:4392 === TenantModeEnabled() = 1 === Init PQ - start server on port 4392 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:53.881510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:27:53.881755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:53.881961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T17:27:53.882271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:27:53.882315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:53.884903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T17:27:53.885040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T17:27:53.885213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:53.885262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T17:27:53.885289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-04T17:27:53.885300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-04T17:27:53.886351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:27:53.886368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-04T17:27:53.886383Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:27:53.887633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:53.887697Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:27:53.887715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-04T17:27:53.891778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:53.891842Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:53.891867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:27:53.891895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:53.896507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:27:53.899074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-04T17:27:53.899251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:27:53.902419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743787673943, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:27:53.902601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743787673943 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:27:53.902662Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:27:53.903004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-04T17:27:53.903045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:27:53.903257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:27:53.903347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-04T17:27:53.905621Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:27:53.905666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:27:53.905884Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:27:53.905916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489511029678268010:2393], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-04T17:27:53.905982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:53.906020Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-04T17:27:53.906126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-04T17:27:53.906157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:53.906179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-04T17:27:53.906188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:53.906204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-04T17:27:53.906236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:53.906283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-04T17:27:53.906294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-04T17:27:53.906357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 720575940466444 ... 7:28:02.110889Z :NOTICE: [/Root] [/Root] [f0318b2-b7cbf5c7-dfc41b8b-a208ef5] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:28:02.111227Z :INFO: [/Root] [/Root] [86bece8f-fe02687c-ca169a9c-e6c43965] Starting read session 2025-04-04T17:28:02.111291Z :DEBUG: [/Root] [/Root] [86bece8f-fe02687c-ca169a9c-e6c43965] Starting session to cluster null (localhost:6304) 2025-04-04T17:28:02.111469Z :DEBUG: [/Root] [/Root] [86bece8f-fe02687c-ca169a9c-e6c43965] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:28:02.111511Z :DEBUG: [/Root] [/Root] [86bece8f-fe02687c-ca169a9c-e6c43965] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:28:02.111557Z :DEBUG: [/Root] [/Root] [86bece8f-fe02687c-ca169a9c-e6c43965] [null] Reconnecting session to cluster null in 0.000000s 2025-04-04T17:28:02.114966Z :DEBUG: [/Root] [/Root] [86bece8f-fe02687c-ca169a9c-e6c43965] [null] Successfully connected. Initializing session 2025-04-04T17:28:02.115452Z node 3 :PQ_READ_PROXY DEBUG: new grpc connection 2025-04-04T17:28:02.115486Z node 3 :PQ_READ_PROXY DEBUG: new session created cookie 2 2025-04-04T17:28:02.115870Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { topic: "/Root/account1/write_topic" } read_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2025-04-04T17:28:02.116016Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1541814177975847000_v1 read init: from# ipv6:[::1]:54716, request# { init_request { topics_read_settings { topic: "/Root/account1/write_topic" } read_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2025-04-04T17:28:02.116096Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1541814177975847000_v1 auth for : consumer_aba 2025-04-04T17:28:02.116595Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1541814177975847000_v1 Handle describe topics response 2025-04-04T17:28:02.116687Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1541814177975847000_v1 auth is DEAD 2025-04-04T17:28:02.116720Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1541814177975847000_v1 auth ok: topics# 1, initDone# 0 2025-04-04T17:28:02.117791Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1541814177975847000_v1 register session: topic# /Root/account1/write_topic 2025-04-04T17:28:02.118149Z :INFO: [/Root] [/Root] [86bece8f-fe02687c-ca169a9c-e6c43965] [null] Server session id: consumer_aba_3_2_1541814177975847000_v1 2025-04-04T17:28:02.118450Z :DEBUG: [/Root] [/Root] [86bece8f-fe02687c-ca169a9c-e6c43965] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:28:02.118476Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] pipe [3:7489511069094835807:2416] connected; active server actors: 1 2025-04-04T17:28:02.118857Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1541814177975847000_v1 grpc read done: success# 1, data# { read { } } 2025-04-04T17:28:02.118927Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][write_topic] consumer "consumer_aba" register session for pipe [3:7489511069094835807:2416] session consumer_aba_3_2_1541814177975847000_v1 2025-04-04T17:28:02.118979Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1541814177975847000_v1 got read request: guid# 8c5c4f4d-df772818-bbb7d188-b28da30c 2025-04-04T17:28:02.118992Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba register readable partition 0 2025-04-04T17:28:02.119062Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba family created family=1 (Status=Free, Partitions=[0]) 2025-04-04T17:28:02.119113Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] consumer consumer_aba register reading session ReadingSession "consumer_aba_3_2_1541814177975847000_v1" (Sender=[3:7489511069094835804:2416], Pipe=[3:7489511069094835807:2416], Partitions=[], ActiveFamilyCount=0) 2025-04-04T17:28:02.119143Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba rebalancing was scheduled 2025-04-04T17:28:02.119197Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2025-04-04T17:28:02.119259Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "consumer_aba_3_2_1541814177975847000_v1" (Sender=[3:7489511069094835804:2416], Pipe=[3:7489511069094835807:2416], Partitions=[], ActiveFamilyCount=0) 2025-04-04T17:28:02.119337Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] consumer consumer_aba family 1 status Active partitions [0] session "consumer_aba_3_2_1541814177975847000_v1" sender [3:7489511069094835804:2416] lock partition 0 for ReadingSession "consumer_aba_3_2_1541814177975847000_v1" (Sender=[3:7489511069094835804:2416], Pipe=[3:7489511069094835807:2416], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-04-04T17:28:02.119470Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-04-04T17:28:02.119505Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing duration: 0.000279s 2025-04-04T17:28:02.122395Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1541814177975847000_v1 assign: record# { Partition: 0 TabletId: 72075186224037893 Topic: "write_topic" Generation: 1 Step: 1 Session: "consumer_aba_3_2_1541814177975847000_v1" ClientId: "consumer_aba" PipeClient { RawX1: 7489511069094835807 RawX2: 4503612512274800 } Path: "/Root/account1/write_topic" } 2025-04-04T17:28:02.122519Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1541814177975847000_v1 INITING TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) 2025-04-04T17:28:02.122819Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1541814177975847000_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037893 Generation: 1, pipe: [3:7489511069094835809:2419] 2025-04-04T17:28:02.122867Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: consumer_aba_3_2_1541814177975847000_v1:1 with generation 1 2025-04-04T17:28:02.132483Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1541814177975847000_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 1 WriteTimestampMS: 1743787681997 CreateTimestampMS: 1743787681995 SizeLag: 165 WriteTimestampEstimateMS: 1743787681997 } Cookie: 18446744073709551615 } 2025-04-04T17:28:02.132558Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1541814177975847000_v1 INIT DONE TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) EndOffset 1 readOffset 0 committedOffset 0 2025-04-04T17:28:02.132674Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1541814177975847000_v1 sending to client partition status Got new read session event: CreatePartitionStream { PartitionStreamId: 1 TopicPath: account1/write_topic Cluster: PartitionId: 0 CommittedOffset: 0 EndOffset: 1 } 2025-04-04T17:28:02.133652Z :INFO: [/Root] [/Root] [86bece8f-fe02687c-ca169a9c-e6c43965] Closing read session. Close timeout: 0.000000s 2025-04-04T17:28:02.133712Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account1/write_topic:0:1:0:0 2025-04-04T17:28:02.133754Z :INFO: [/Root] [/Root] [86bece8f-fe02687c-ca169a9c-e6c43965] Counters: { Errors: 0 CurrentSessionLifetimeMs: 22 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:28:02.133878Z :NOTICE: [/Root] [/Root] [86bece8f-fe02687c-ca169a9c-e6c43965] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-04T17:28:02.133928Z :DEBUG: [/Root] [/Root] [86bece8f-fe02687c-ca169a9c-e6c43965] [null] Abort session to cluster 2025-04-04T17:28:02.134902Z :NOTICE: [/Root] [/Root] [86bece8f-fe02687c-ca169a9c-e6c43965] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:28:02.135050Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1541814177975847000_v1 grpc read done: success# 0, data# { } 2025-04-04T17:28:02.135072Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1541814177975847000_v1 grpc read failed 2025-04-04T17:28:02.135115Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1541814177975847000_v1 grpc closed 2025-04-04T17:28:02.135150Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_1541814177975847000_v1 is DEAD 2025-04-04T17:28:02.135424Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: consumer_aba_3_2_1541814177975847000_v1 2025-04-04T17:28:02.136410Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] pipe [3:7489511069094835807:2416] disconnected; active server actors: 1 2025-04-04T17:28:02.136450Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][write_topic] pipe [3:7489511069094835807:2416] client consumer_aba disconnected session consumer_aba_3_2_1541814177975847000_v1 2025-04-04T17:28:02.639832Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7489511069094835827:2425], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:28:02.640078Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDlkZmI1YjYtZjAxMWZjOWMtZmRmYzkwNDItNWI4ZjcwMjg=, ActorId: [3:7489511069094835820:2421], ActorState: ExecuteState, TraceId: 01jr0xgtsvc5rmar5jtzc4xfsw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:28:02.640594Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] >> TopicAutoscaling::PartitionSplit_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK >> TBlobStorageProxyTest::TestDoubleEmptyGet [GOOD] >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] Test command err: === Server->StartServer(false); 2025-04-04T17:27:54.909538Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511034232662470:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:54.909606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:54.934613Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489511034087872130:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:54.935371Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c9b/r3tmp/tmpZFUSgC/pdisk_1.dat 2025-04-04T17:27:55.113142Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:27:55.113268Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:27:55.297518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:55.297665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:55.299638Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:27:55.300401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:55.301994Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:27:55.329421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:55.329480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:55.333299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4944, node 1 2025-04-04T17:27:55.374120Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001c9b/r3tmp/yandexcYJPuo.tmp 2025-04-04T17:27:55.374147Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001c9b/r3tmp/yandexcYJPuo.tmp 2025-04-04T17:27:55.374335Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001c9b/r3tmp/yandexcYJPuo.tmp 2025-04-04T17:27:55.374490Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:55.428742Z INFO: TTestServer started on Port 30184 GrpcPort 4944 TClient is connected to server localhost:30184 PQClient connected to localhost:4944 === TenantModeEnabled() = 1 === Init PQ - start server on port 4944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:55.789710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:27:55.789906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:55.790093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T17:27:55.790343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:27:55.790406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:55.792517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T17:27:55.792629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T17:27:55.792812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:55.792863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T17:27:55.792892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-04T17:27:55.792903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-04T17:27:55.793810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:27:55.793838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-04T17:27:55.793854Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:27:55.794851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:55.794898Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:27:55.794910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-04T17:27:55.796198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:55.796228Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:55.796252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:27:55.796275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:55.799296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:27:55.800930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-04T17:27:55.801071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:27:55.803226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743787675847, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:27:55.803381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743787675847 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:27:55.803424Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:27:55.803630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-04T17:27:55.803661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:27:55.803781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:27:55.803854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-04T17:27:55.805172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:27:55.805210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:27:55.805374Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:27:55.805428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489511038527630428:2409], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-04T17:27:55.805504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:55.805535Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-04T17:27:55.805627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-04T17:27:55.805646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:55.805665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-04T17:27:55.805675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:55.805718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-04T17:27:55.805740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:55.805754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-04T17:27:55.805782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-04T17:27:55.805821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 7205759404664 ... ER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-04T17:28:03.432005Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489511071405290827:2381] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-04T17:28:03.432026Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-04T17:28:03.432424Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-04T17:28:03.432583Z node 3 :PERSQUEUE INFO: new Cookie 12345678|e55d528c-79ed52c0-8465525d-5e5f8d00_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-04-04T17:28:03.432938Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|e55d528c-79ed52c0-8465525d-5e5f8d00_0 2025-04-04T17:28:03.433883Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|e55d528c-79ed52c0-8465525d-5e5f8d00_0 grpc read done: success: 0 data: 2025-04-04T17:28:03.433905Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|e55d528c-79ed52c0-8465525d-5e5f8d00_0 grpc read failed 2025-04-04T17:28:03.434055Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|e55d528c-79ed52c0-8465525d-5e5f8d00_0 2025-04-04T17:28:03.434073Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|e55d528c-79ed52c0-8465525d-5e5f8d00_0 is DEAD Finish: 0 === InitializeWritePQService done === PersQueueClient 2025-04-04T17:28:03.434404Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison === InitializePQ completed BEFORE MODIFY PERMISSIONS 2025-04-04T17:28:03.448608Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\027\010\001\022\023\032\021test_user@builtin\n\037\010\000\022\033\010\001\020\366\213\001\032\021test_user@builtin \003" } } TxId: 281474976715665 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:51940" , at schemeshard: 72057594046644480 2025-04-04T17:28:03.448751Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:28:03.448833Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2025-04-04T17:28:03.448845Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-04T17:28:03.448926Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-04T17:28:03.448942Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:28:03.449007Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2025-04-04T17:28:03.449020Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-04-04T17:28:03.449032Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2025-04-04T17:28:03.449037Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-04-04T17:28:03.449063Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2025-04-04T17:28:03.449095Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715665, ready parts: 1/1, is published: false 2025-04-04T17:28:03.449110Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2025-04-04T17:28:03.449120Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-04-04T17:28:03.449130Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2025-04-04T17:28:03.449145Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 0 2025-04-04T17:28:03.449153Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715665, [OwnerId: 72057594046644480, LocalPathId: 10], 3 2025-04-04T17:28:03.450737Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715665, response: Status: StatusSuccess TxId: 281474976715665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:28:03.450929Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user@builtin, remove access: -():test_user@builtin:- 2025-04-04T17:28:03.451050Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:28:03.451068Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715665, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-04T17:28:03.451239Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:28:03.451265Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7489511058520387878:2369], at schemeshard: 72057594046644480, txId: 281474976715665, path id: 10 2025-04-04T17:28:03.451816Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-04-04T17:28:03.451904Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-04-04T17:28:03.451924Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715665 2025-04-04T17:28:03.451937Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 3 2025-04-04T17:28:03.451954Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2025-04-04T17:28:03.452033Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 0 2025-04-04T17:28:03.453581Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715665 2025-04-04T17:28:03.453843Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-04T17:28:03.453893Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-04-04T17:28:03.454216Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-message-group" } 2025-04-04T17:28:03.454332Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-message-group" from ipv6:[::1]:51912 2025-04-04T17:28:03.454349Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:51912 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-04-04T17:28:03.454357Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-04T17:28:03.455027Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-04-04T17:28:03.455197Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-04T17:28:03.455214Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-04T17:28:03.455219Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-04T17:28:03.455240Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489511071405290857:2392] (SourceId=test-message-group, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-04T17:28:03.455252Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-04T17:28:03.455653Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-04T17:28:03.455697Z node 3 :PERSQUEUE INFO: new Cookie test-message-group|f2deda27-e84576e4-2518eff4-2a3cc43e_0 generated for partition 0 topic 'acc/topic1' owner test-message-group 2025-04-04T17:28:03.455962Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-message-group|f2deda27-e84576e4-2518eff4-2a3cc43e_0 2025-04-04T17:28:03.456797Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-message-group|f2deda27-e84576e4-2518eff4-2a3cc43e_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-04-04T17:28:03.457043Z node 3 :PQ_WRITE_PROXY INFO: updating token 2025-04-04T17:28:03.457081Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-04T17:28:03.457574Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-message-group|f2deda27-e84576e4-2518eff4-2a3cc43e_0 describe result for acl check 2025-04-04T17:28:03.457653Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_2@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-message-group|f2deda27-e84576e4-2518eff4-2a3cc43e_0 2025-04-04T17:28:03.457898Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-message-group|f2deda27-e84576e4-2518eff4-2a3cc43e_0 is DEAD 2025-04-04T17:28:03.458210Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] Test command err: 2025-04-04T17:26:12.072393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:12.072680Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:12.072794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0018ae/r3tmp/tmpyRgIPL/pdisk_1.dat 2025-04-04T17:26:12.449020Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10807, node 1 2025-04-04T17:26:12.668796Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:12.668851Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:12.668895Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:12.669391Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:12.672029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:12.752988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:12.753104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:12.765882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32243 2025-04-04T17:26:13.234988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:15.864447Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:15.890176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:15.890312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:15.917477Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:15.919003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:16.121704Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:16.122445Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:16.123032Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:16.123224Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:16.123348Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:16.123585Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:16.123733Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:16.123873Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:16.123979Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:16.292584Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:16.292687Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:16.305613Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:16.428873Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:16.462211Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:16.462306Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:16.490042Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:16.491634Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:16.491935Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:16.492015Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:16.492136Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:16.492223Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:16.492298Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:16.492384Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:16.492905Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:16.525262Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:16.525407Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:16.533618Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:26:16.541341Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:26:16.542491Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:26:16.549325Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:16.576899Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:16.576963Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:16.577060Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:16.593222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:16.601138Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:16.601291Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:16.770214Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:16.939318Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:17.069459Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:17.997551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:17.997651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:18.012504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:18.425744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2539:3119], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:18.425848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:18.426892Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2544:3123]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:26:18.427062Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:26:18.427145Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2546:3125] 2025-04-04T17:26:18.427206Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2546:3125] 2025-04-04T17:26:18.427661Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2547:2992] 2025-04-04T17:26:18.427876Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2546:3125], server id = [2:2547:2992], tablet id = 72075186224037894, status = OK 2025-04-04T17:26:18.427956Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2547:2992], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:26:18.427998Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-04T17:26:18.428182Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:26:18.428247Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2544:3123], StatRequests.size() = 1 2025-04-04T17:26:18.440433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2551:3129], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:18.440505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:18.440756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2556:3134], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:18.445063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-04T17:26:18.601868Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:26:18.601939Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:26:18.685624Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2546:3125], schemeshard count = 1 2025-04-04T17:26:19.163005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... ry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-04T17:27:59.352891Z node 2 :TX_PROXY ERROR: Actor# [2:7207:5032] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:59.378490Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7236:5047]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:27:59.378784Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:27:59.378886Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7238:5049] 2025-04-04T17:27:59.378945Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7238:5049] 2025-04-04T17:27:59.379308Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7238:5049], server id = [2:7239:5050], tablet id = 72075186224037894, status = OK 2025-04-04T17:27:59.379372Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7239:5050] 2025-04-04T17:27:59.379460Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7239:5050], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:27:59.379510Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-04T17:27:59.379618Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:27:59.379691Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7236:5047], StatRequests.size() = 1 2025-04-04T17:27:59.516233Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjA4NzA3YzctMWQ2MGEwYmUtNWUxZDg2NDItNmJiOGUxNmI=, TxId: 2025-04-04T17:27:59.516318Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjA4NzA3YzctMWQ2MGEwYmUtNWUxZDg2NDItNmJiOGUxNmI=, TxId: 2025-04-04T17:27:59.516925Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:27:59.530847Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-04T17:27:59.530928Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:27:59.563510Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:27:59.563585Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:27:59.617802Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7238:5049], schemeshard count = 1 2025-04-04T17:28:00.413747Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:28:00.413864Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-04T17:28:00.413916Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:01.193726Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:28:01.215364Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:01.215526Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-04T17:28:01.215571Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:01.216003Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:01.218678Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:28:01.232774Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDY2OGI3YjYtMTYwMzAzNzktM2Y4MzkyNWYtZDU2MzUwZGU=, TxId: 2025-04-04T17:28:01.232841Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDY2OGI3YjYtMTYwMzAzNzktM2Y4MzkyNWYtZDU2MzUwZGU=, TxId: 2025-04-04T17:28:01.233596Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:01.247596Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:01.247674Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3110:3317] 2025-04-04T17:28:02.074518Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:28:02.074594Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is data table. 2025-04-04T17:28:02.074630Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-04T17:28:02.784599Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-04T17:28:02.786466Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:28:02.786882Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:28:02.808738Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:02.808843Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-04T17:28:02.808885Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:28:02.809239Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:02.812039Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:28:02.824498Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDNmNWM0ZGUtOWJlM2E3YjgtMjA2YTdhMzItMzgwYzBhNjk=, TxId: 2025-04-04T17:28:02.824563Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDNmNWM0ZGUtOWJlM2E3YjgtMjA2YTdhMzItMzgwYzBhNjk=, TxId: 2025-04-04T17:28:02.825005Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:02.839822Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:28:02.839910Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:28:03.624031Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:28:03.624102Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-04T17:28:03.624148Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-04T17:28:04.382475Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:04.382572Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is data table. 2025-04-04T17:28:04.382596Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-04T17:28:04.382956Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:04.384706Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:28:04.394101Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NGQ0MjExZWYtMWRmMjk1ZWMtZWMwNDlkYTctOWFkNzQ2NmY=, TxId: 2025-04-04T17:28:04.394163Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGQ0MjExZWYtMWRmMjk1ZWMtZWMwNDlkYTctOWFkNzQ2NmY=, TxId: 2025-04-04T17:28:04.394627Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:04.407862Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-04T17:28:04.407920Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3110:3317] 2025-04-04T17:28:04.408449Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7558:5235]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:04.411277Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:04.411338Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:28:04.415174Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:04.415223Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-04T17:28:04.415263Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T17:28:04.417530Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-04T17:28:04.417777Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 2025-04-04T17:28:04.418051Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:7588:5247]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:04.420373Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:28:04.420428Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:28:04.420949Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:28:04.421013Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-04T17:28:04.421060Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 5] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T17:28:04.423589Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-04-04T17:28:04.423948Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block >> BasicStatistics::Simple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] Test command err: 2025-04-04T17:25:30.994685Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510415065535353:2208];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:30.995044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001586/r3tmp/tmpa8Nfxn/pdisk_1.dat 2025-04-04T17:25:31.421357Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:31.426163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:31.426744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:31.433671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1692, node 1 2025-04-04T17:25:31.614589Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:31.614620Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:31.614627Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:31.614927Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:25:31.910175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:25:31.955226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:25:32.001493Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7489510419360503114:2309] 2025-04-04T17:25:32.001748Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:25:32.015086Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:25:32.015135Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:25:32.017965Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:25:32.018060Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:25:32.018159Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:25:32.019844Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:25:32.019916Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:25:32.019970Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7489510423655470424:2309] in generation 1 2025-04-04T17:25:32.021608Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:25:32.070741Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:25:32.070860Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:25:32.070903Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7489510423655470428:2310] 2025-04-04T17:25:32.070918Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:25:32.070925Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:25:32.070934Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:25:32.071091Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:25:32.071170Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:25:32.071188Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:25:32.071232Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:25:32.071271Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:25:32.071289Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:25:32.071839Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7489510419360503102:2299], serverId# [1:7489510423655470427:2310], sessionId# [0:0:0] 2025-04-04T17:25:32.071980Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:25:32.072309Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:25:32.072402Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-04T17:25:32.073855Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:25:32.073925Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:25:32.073977Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:25:32.077326Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7489510423655470442:2318], serverId# [1:7489510423655470444:2320], sessionId# [0:0:0] 2025-04-04T17:25:32.082552Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1743787532123 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743787532123 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:25:32.082581Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:25:32.082703Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:25:32.082718Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:25:32.082743Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743787532123:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-04T17:25:32.084224Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743787532123:281474976710657 keys extracted: 0 2025-04-04T17:25:32.084401Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:25:32.084520Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:25:32.084615Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:25:32.084653Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:25:32.088773Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:25:32.090763Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:25:32.092076Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743787532123} 2025-04-04T17:25:32.092105Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:25:32.095158Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743787532130 2025-04-04T17:25:32.095187Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:25:32.095214Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:25:32.095308Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:25:32.095332Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:25:32.095361Z node 1 :TX_DATASHARD DEBUG: Complete [1743787532123 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7489510419360502901:2185], exec latency: 6 ms, propose latency: 10 ms 2025-04-04T17:25:32.095388Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-04T17:25:32.095418Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:25:32.096796Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743787532130 2025-04-04T17:25:32.098254Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:7489510423655470428:2310][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-04-04T17:25:32.102576Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-04T17:25:32.102633Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:25:32.119002Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:25:32.120300Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:25:32.120515Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:2 2025-04-04T17:25:32.120566Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976710658 2025-04-04T17:25:32.120578Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2025-04-04T17:25:32.121339Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:25:32.175180Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:25:32.182447Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Registered with mediator time cast 2025-04-04T17:25:32.194588Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:25:32.194863Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] doesn't have tx info 2025-04-04T17:25:32.194884Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:25:32.194925Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] no config, start with empty partitions and default config 2025-04-04T17:25:32.194947Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:25:32.194973Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037889] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:25:32.195016Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037889] doesn't have tx writes info 2025-04-04T17:25:32.195417Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [1:7489510423655470504:2366], now have 1 active actors on pipe 2025-04-04T17:25:32.195453Z node 1 :PERSQUEUE DEBUG: [P ... tOffset 4,1 HeadOffset 0 endOffset 4 curOffset 5 d0000000000_00000000000000000004_00000_0000000001_00000| size 155 WTime 8969 2025-04-04T17:28:04.591662Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:28:04.591880Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 4 partNo 0 count 1 size 155 2025-04-04T17:28:04.593212Z node 27 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 4 count 1 size 155 actorID [27:792:2657] 2025-04-04T17:28:04.593500Z node 27 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 4 partno 0 count 1 parts 0 size 155 2025-04-04T17:28:04.604074Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 70 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:28:04.604293Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:28:04.604482Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2025-04-04T17:28:04.604971Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 9 requestId: cookie: 5 2025-04-04T17:28:04.605357Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:938:2694] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 5 Offset: 4 WriteTimestampMS: 8969 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 5 } } } 2025-04-04T17:28:04.605563Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:852:2694] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-04T17:28:04.605774Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-04-04T17:28:04.605855Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 5, at tablet: 72075186224037888 2025-04-04T17:28:04.606556Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 ... unblocking updates ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking NKikimr::TEvMediatorTimecast::TEvUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking NKikimr::TEvMediatorTimecast::TEvUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-04T17:28:04.712749Z node 27 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 9000 at tablet 72075186224037888 2025-04-04T17:28:04.712926Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:28:04.713089Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Emit change records: edge# v9000/18446744073709551615, at tablet# 72075186224037888 2025-04-04T17:28:04.713355Z node 27 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-04-04T17:28:04.717166Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Enqueue 1 change record(s): at tablet# 72075186224037888 2025-04-04T17:28:04.717308Z node 27 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 } 2025-04-04T17:28:04.717401Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:28:04.717492Z node 27 :TX_DATASHARD DEBUG: Waiting for PlanStep# 12000 from mediator time cast 2025-04-04T17:28:04.717708Z node 27 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][27:683:2579] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 }] } 2025-04-04T17:28:04.717908Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:852:2694] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 }] } 2025-04-04T17:28:04.718283Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037888 2025-04-04T17:28:04.718649Z node 27 :TX_DATASHARD DEBUG: Send 1 change records: to# [27:852:2694], at tablet# 72075186224037888 2025-04-04T17:28:04.718754Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 1, forgotten# 0, left# 0, at tablet# 72075186224037888 2025-04-04T17:28:04.719020Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:852:2694] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-04T17:28:04.719422Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:938:2694] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-04-04T17:28:04.719859Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-04T17:28:04.719952Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-04T17:28:04.720120Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 10 requestId: cookie: 6 2025-04-04T17:28:04.720319Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-04T17:28:04.720357Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-04T17:28:04.720457Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 6 partNo : 0 messageNo: 11 size 26 offset: -1 2025-04-04T17:28:04.720708Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v9000/0 2025-04-04T17:28:04.720841Z node 27 :PERSQUEUE INFO: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v9000/0 2025-04-04T17:28:04.721026Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-04-04T17:28:04.721826Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 5 PartNo 0 PackedSize 107 count 1 nextOffset 6 batches 1 2025-04-04T17:28:04.723064Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 5,1 HeadOffset 0 endOffset 5 curOffset 6 d0000000000_00000000000000000005_00000_0000000001_00000| size 93 WTime 8979 2025-04-04T17:28:04.723519Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:28:04.723767Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 5 partNo 0 count 1 size 93 2025-04-04T17:28:04.725092Z node 27 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 5 count 1 size 93 actorID [27:792:2657] 2025-04-04T17:28:04.725321Z node 27 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 5 partno 0 count 1 parts 0 size 93 2025-04-04T17:28:04.735987Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:28:04.736238Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:28:04.736445Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-04-04T17:28:04.736998Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 11 requestId: cookie: 6 2025-04-04T17:28:04.737503Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:938:2694] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 6 Offset: 5 WriteTimestampMS: 8979 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 6 } } } 2025-04-04T17:28:04.737780Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:852:2694] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-04-04T17:28:04.738081Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-04-04T17:28:04.738179Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 6, at tablet: 72075186224037888 2025-04-04T17:28:04.738925Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 ... checking the update is logged before the new resolved timestamp >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-04-04T17:28:04.847715Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-04-04T17:28:04.847862Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-04-04T17:28:04.848149Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 8 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 6 max time lag 0ms effective offset 0 2025-04-04T17:28:04.848269Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 8 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 6 2025-04-04T17:28:04.848470Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 8. All data is from uncompacted head. 2025-04-04T17:28:04.848563Z node 27 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T17:28:04.848896Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> TBlobStorageProxyTest::TestBlock >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] >> BasicStatistics::NotFullStatisticsColumnshard |96.9%| [TA] $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} |96.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] Test command err: === Server->StartServer(false); 2025-04-04T17:27:57.660704Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511047431862677:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:57.660771Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:57.692348Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489511047481166033:2210];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:57.844797Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c7f/r3tmp/tmpxUcSZp/pdisk_1.dat 2025-04-04T17:27:57.865259Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:27:57.879829Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:58.022255Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:27:58.054008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:58.054095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:58.057799Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:27:58.058562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:58.070259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:58.070369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:58.074773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19657, node 1 2025-04-04T17:27:58.114063Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001c7f/r3tmp/yandexF5h52L.tmp 2025-04-04T17:27:58.114091Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001c7f/r3tmp/yandexF5h52L.tmp 2025-04-04T17:27:58.114199Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001c7f/r3tmp/yandexF5h52L.tmp 2025-04-04T17:27:58.114317Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:58.158825Z INFO: TTestServer started on Port 1587 GrpcPort 19657 TClient is connected to server localhost:1587 PQClient connected to localhost:19657 === TenantModeEnabled() = 1 === Init PQ - start server on port 19657 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:58.483365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:27:58.483627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:58.483900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T17:27:58.484162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:27:58.484206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:58.486276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T17:27:58.486410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T17:27:58.486607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:58.486659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T17:27:58.486681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-04-04T17:27:58.486696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-04T17:27:58.487733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:27:58.487764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-04-04T17:27:58.487811Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:27:58.488792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:58.488847Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:27:58.488866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-04T17:27:58.490963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:58.490995Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:58.491032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-04T17:27:58.491081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-04-04T17:27:58.495813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:27:58.497896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-04-04T17:27:58.498067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:27:58.501059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743787678542, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:27:58.501221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743787678542 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:27:58.501259Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-04T17:27:58.501536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-04T17:27:58.501618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-04T17:27:58.501825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:27:58.501912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-04T17:27:58.504104Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:27:58.504137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:27:58.504299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:27:58.504324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489511051726830608:2384], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2025-04-04T17:27:58.504385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:58.504403Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-04T17:27:58.504513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-04T17:27:58.504534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-04T17:27:58.504557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-04T17:27:58.504566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-04T17:27:58.504580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 1/1, is published: false 2025-04-04T17:27:58.504594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-04T17:27:58.504605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-04T17:27:58.504623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2025-04-04T17:27:58.504785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046 ... UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-04T17:28:05.952221Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489511082279289757:2381] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-04T17:28:05.952243Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-04T17:28:05.952640Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-04T17:28:05.952686Z node 3 :PERSQUEUE INFO: new Cookie 12345678|98c0c9a3-d387ebeb-bfc42d54-175eef57_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-04-04T17:28:05.952980Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|98c0c9a3-d387ebeb-bfc42d54-175eef57_0 2025-04-04T17:28:05.953787Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|98c0c9a3-d387ebeb-bfc42d54-175eef57_0 grpc read done: success: 0 data: 2025-04-04T17:28:05.953805Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|98c0c9a3-d387ebeb-bfc42d54-175eef57_0 grpc read failed 2025-04-04T17:28:05.953949Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|98c0c9a3-d387ebeb-bfc42d54-175eef57_0 2025-04-04T17:28:05.953965Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|98c0c9a3-d387ebeb-bfc42d54-175eef57_0 is DEAD Finish: 0 === InitializeWritePQService done === PersQueueClient 2025-04-04T17:28:05.954189Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison === InitializePQ completed BEFORE MODIFY PERMISSIONS 2025-04-04T17:28:05.969057Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_0@builtin \003\n\031\010\001\022\025\032\023test_user_1@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_1@builtin \003\n\031\010\001\022\025\032\023test_user_2@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_2@builtin \003" } } TxId: 281474976715665 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:52602" , at schemeshard: 72057594046644480 2025-04-04T17:28:05.969233Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:28:05.969376Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2025-04-04T17:28:05.969392Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-04T17:28:05.969525Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-04-04T17:28:05.969545Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:28:05.969607Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2025-04-04T17:28:05.969626Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-04-04T17:28:05.969642Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2025-04-04T17:28:05.969655Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-04-04T17:28:05.969689Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2025-04-04T17:28:05.969751Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715665, ready parts: 1/1, is published: false 2025-04-04T17:28:05.969778Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2025-04-04T17:28:05.969790Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-04-04T17:28:05.969801Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2025-04-04T17:28:05.969821Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 0 2025-04-04T17:28:05.969833Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715665, [OwnerId: 72057594046644480, LocalPathId: 10], 3 2025-04-04T17:28:05.971687Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715665, response: Status: StatusSuccess TxId: 281474976715665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:28:05.971932Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user_0@builtin, add access: +W:test_user_1@builtin, add access: +W:test_user_2@builtin, remove access: -():test_user_0@builtin:-, remove access: -():test_user_1@builtin:-, remove access: -():test_user_2@builtin:- 2025-04-04T17:28:05.972103Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:28:05.972125Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715665, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-04-04T17:28:05.972334Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:28:05.972365Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7489511069394386819:2380], at schemeshard: 72057594046644480, txId: 281474976715665, path id: 10 2025-04-04T17:28:05.972819Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-04-04T17:28:05.972937Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-04-04T17:28:05.972985Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715665 2025-04-04T17:28:05.973003Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 3 2025-04-04T17:28:05.973017Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2025-04-04T17:28:05.973098Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 0 2025-04-04T17:28:05.974100Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-04-04T17:28:05.974118Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-04-04T17:28:05.974359Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-group-id" } 2025-04-04T17:28:05.974440Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-group-id" from ipv6:[::1]:52580 2025-04-04T17:28:05.974459Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:52580 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-04-04T17:28:05.974466Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-04-04T17:28:05.974734Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715665 2025-04-04T17:28:05.975156Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-04-04T17:28:05.975306Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-04T17:28:05.975338Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-04T17:28:05.975346Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-04T17:28:05.975383Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489511082279289787:2392] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-04-04T17:28:05.975415Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-04T17:28:05.975783Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-04-04T17:28:05.975886Z node 3 :PERSQUEUE INFO: new Cookie test-group-id|b1f5733-f723840e-b3d661a0-6c91123f_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2025-04-04T17:28:05.976210Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|b1f5733-f723840e-b3d661a0-6c91123f_0 2025-04-04T17:28:05.977060Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|b1f5733-f723840e-b3d661a0-6c91123f_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-04-04T17:28:05.977171Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|b1f5733-f723840e-b3d661a0-6c91123f_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-04-04T17:28:05.977210Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: got another 'update_token_request' while previous still in progress, only single token update is allowed at a time sessionId: test-group-id|b1f5733-f723840e-b3d661a0-6c91123f_0 2025-04-04T17:28:05.977358Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|b1f5733-f723840e-b3d661a0-6c91123f_0 is DEAD 2025-04-04T17:28:05.977556Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison >> TBlobStorageProxyTest::TestProxyPutInvalidSize >> KqpKv::ReadRows_NonExistentKeys [GOOD] >> KqpKv::ReadRows_NotFullPK >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ShardsLimit995 >> TBlobStorageProxyTest::TestBlock [GOOD] >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe >> TBlobStorageProxyTest::TestProxyPutInvalidSize [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2025-04-04T17:27:55.737877Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511038479989600:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:55.737950Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:55.758037Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489511035716174738:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:55.758181Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c90/r3tmp/tmpo9Q2zv/pdisk_1.dat 2025-04-04T17:27:55.929228Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:27:55.930687Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:27:56.087078Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:27:56.109020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:56.109125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:56.113072Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:27:56.113787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19237, node 1 2025-04-04T17:27:56.148847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:56.148937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:56.152893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:56.179309Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001c90/r3tmp/yandexfByng4.tmp 2025-04-04T17:27:56.179345Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001c90/r3tmp/yandexfByng4.tmp 2025-04-04T17:27:56.179509Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001c90/r3tmp/yandexfByng4.tmp 2025-04-04T17:27:56.179650Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:56.222899Z INFO: TTestServer started on Port 6053 GrpcPort 19237 TClient is connected to server localhost:6053 PQClient connected to localhost:19237 === TenantModeEnabled() = 1 === Init PQ - start server on port 19237 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:56.564989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:27:56.565226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:56.565439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T17:27:56.565694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:27:56.565731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:56.568090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T17:27:56.568257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T17:27:56.568446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:56.568506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T17:27:56.568523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-04T17:27:56.568538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-04-04T17:27:56.569693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:27:56.569720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-04T17:27:56.569774Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:27:56.571076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:56.571151Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:27:56.571174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-04T17:27:56.573373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:56.573415Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:56.573433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:27:56.573462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:56.577866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:27:56.579981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-04-04T17:27:56.580149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:27:56.582535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743787676624, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:27:56.582669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743787676624 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:27:56.582699Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:27:56.582967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-04-04T17:27:56.583002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:27:56.583164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:27:56.583242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-04T17:27:56.584808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:27:56.584834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:27:56.585036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:27:56.585081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7489511042774957529:2380], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-04-04T17:27:56.585131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:56.585175Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-04-04T17:27:56.585261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-04T17:27:56.585280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:56.585298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-04-04T17:27:56.585310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:56.585325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-04-04T17:27:56.585345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:27:56.585360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-04-04T17:27:56.585368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-04-04T17:27:56.585414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046 ... 8:08.016717Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2025-04-04T17:28:08.016797Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-04T17:28:08.016297Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-04T17:28:08.017008Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NActors::IEventHandle 2025-04-04T17:28:08.020865Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-04-04T17:28:08.020911Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2025-04-04T17:28:08.021386Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 0 messageNo: 1 size: 511961 2025-04-04T17:28:08.021877Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 1 messageNo: 1 size: 511961 2025-04-04T17:28:08.022079Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 2 messageNo: 1 size: 176151 2025-04-04T17:28:08.022110Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 2 messageNo: 1 size 176151 offset: -1 2025-04-04T17:28:08.022278Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Send write quota request. Topic: "PQ/account3/folder1/folder2/topic". Partition: 0. Amount: 1200088. Cookie: 7 2025-04-04T17:28:08.122944Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489511094314567598:2599], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:28:08.123114Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Mjg1Y2UwYzAtMzA5Zjk2ODktN2ZmYTk0MDEtMjg5ZmQ0NTc=, ActorId: [1:7489511094314567596:2598], ActorState: ExecuteState, TraceId: 01jr0xh05a6kk73wmsaag9x2tn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:28:08.123461Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:28:08.785957Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Got quota. Topic: "PQ/account3/folder1/folder2/topic". Partition: 0: Cookie: 7 2025-04-04T17:28:08.786134Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 0 2025-04-04T17:28:08.786180Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 1 2025-04-04T17:28:08.786211Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 2 2025-04-04T17:28:08.793787Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob complete sourceId '\0001236' seqNo 1 partNo 2 FormedBlobsCount 0 NewHead: Offset 6 PartNo 0 PackedSize 1200285 count 1 nextOffset 7 batches 3 2025-04-04T17:28:08.794665Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Add new write blob: topic 'PQ/account3/folder1/folder2/topic' partition 0 compactOffset 6,1 HeadOffset 6 endOffset 6 curOffset 7 d0000000000_00000000000000000006_00000_0000000001_00002| size 1200275 WTime 1743787688794 2025-04-04T17:28:08.795776Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:28:08.795866Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 6 partNo 0 count 1 size 1200275 2025-04-04T17:28:08.802930Z node 2 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 6 count 1 size 1200275 actorID [2:7489511078665848598:2363] 2025-04-04T17:28:08.803024Z node 2 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037899' partition 0 offset 6 partno 0 count 1 parts 2 size 1200275 2025-04-04T17:28:08.803042Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1200088 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:28:08.803072Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:28:08.803125Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 6 is stored on disk 2025-04-04T17:28:08.803160Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:28:08.803188Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 1, Offset: 6 is stored on disk 2025-04-04T17:28:08.803229Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T17:28:08.803261Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 2, Offset: 6 is stored on disk 2025-04-04T17:28:08.803275Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-04T17:28:08.803478Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NActors::IEventHandle 2025-04-04T17:28:08.804214Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|7e16cf4d-e5800202-e7686b4f-7ce7dffd_0] Write session got write response: sequence_numbers: 1 offsets: 6 already_written: false write_statistics { persist_duration_ms: 16 queued_in_partition_duration_ms: 763 throttled_on_partition_duration_ms: 763 } 2025-04-04T17:28:08.804265Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|7e16cf4d-e5800202-e7686b4f-7ce7dffd_0] Write session: acknoledged message 1 2025-04-04T17:28:08.816692Z :INFO: [] MessageGroupId [1236] SessionId [1236|7e16cf4d-e5800202-e7686b4f-7ce7dffd_0] Write session will now close 2025-04-04T17:28:08.816755Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|7e16cf4d-e5800202-e7686b4f-7ce7dffd_0] Write session: aborting 2025-04-04T17:28:08.817462Z :INFO: [] MessageGroupId [1236] SessionId [1236|7e16cf4d-e5800202-e7686b4f-7ce7dffd_0] Write session: gracefully shut down, all writes complete 2025-04-04T17:28:08.817457Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|7e16cf4d-e5800202-e7686b4f-7ce7dffd_0] Write session is aborting and will not restart 2025-04-04T17:28:08.817542Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|7e16cf4d-e5800202-e7686b4f-7ce7dffd_0] Write session: destroy 2025-04-04T17:28:08.817769Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 22 sessionId: 1236|7e16cf4d-e5800202-e7686b4f-7ce7dffd_0 grpc read done: success: 0 data: 2025-04-04T17:28:08.817795Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|7e16cf4d-e5800202-e7686b4f-7ce7dffd_0 grpc read failed 2025-04-04T17:28:08.817815Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|7e16cf4d-e5800202-e7686b4f-7ce7dffd_0 grpc closed 2025-04-04T17:28:08.817826Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|7e16cf4d-e5800202-e7686b4f-7ce7dffd_0 is DEAD 2025-04-04T17:28:08.818561Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NActors::TEvents::TEvPoison DURATION 2.960351s 2025-04-04T17:28:08.818929Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899] server disconnected, pipe [1:7489511094314567595:2596] destroyed 2025-04-04T17:28:08.818975Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-04T17:28:09.008961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 8] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-04-04T17:28:09.109462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-04T17:28:09.109597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 8 shard idx 72057594046644480:1 data size 0 row count 0 2025-04-04T17:28:09.109714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 8], pathId map=user, is column=0, is olap=0 2025-04-04T17:28:09.109749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 8: RowCount 0, DataSize 0 2025-04-04T17:28:09.109960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-04T17:28:09.142276Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489511098609534922:2607], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:28:09.142510Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmUyODcxMDctZTRkMDQwODktZTJkOGQ0NDUtZDZiZjUwNmQ=, ActorId: [1:7489511098609534920:2606], ActorState: ExecuteState, TraceId: 01jr0xh1555p00w7f3qc6nmc84, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:28:09.142866Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC [GOOD] >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscover >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block >> THealthCheckTest::TestBootingTabletIsNotDead [GOOD] >> THealthCheckTest::TestReBootingTabletIsDead >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] Test command err: 2025-04-04T17:26:06.208923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:06.209141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:06.209221Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001960/r3tmp/tmp8T2MiU/pdisk_1.dat 2025-04-04T17:26:06.507419Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23930, node 1 2025-04-04T17:26:06.706912Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:06.706951Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:06.706986Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:06.707448Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:06.711789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:06.793246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:06.793406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:06.807193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23413 2025-04-04T17:26:07.334113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:10.352776Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:10.384972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:10.385088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:10.412849Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:10.414544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:10.621247Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.621720Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.622104Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.622216Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.622308Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.622488Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.622587Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.622662Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.622715Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.792480Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:10.792572Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:10.805270Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:10.929284Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:10.969313Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:10.969406Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:10.995339Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:10.995489Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:10.995650Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:10.995703Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:10.995758Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:10.995849Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:10.995907Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:10.995944Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:10.996802Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:11.020799Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:11.020939Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:11.026034Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:26:11.030345Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:26:11.031065Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:26:11.035346Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:11.054436Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:11.054519Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:11.054594Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:11.068119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:11.074204Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:11.074369Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:11.222827Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:11.370712Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:11.458452Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:12.499921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:12.500130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:12.518957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:12.627145Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:12.627416Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:12.627728Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:12.627851Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:12.627987Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:12.628122Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:12.628268Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:12.628402Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:12.628556Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:26:12.628695Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:26:12.628871Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:26:12.629033Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:26:12.657042Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:26:12.657147Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... ble, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-04-04T17:26:15.197610Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000013s ... waiting for TEvAnalyzeTableResponse 2025-04-04T17:26:17.341817Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2800:3165] 2025-04-04T17:26:17.344296Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2798:3218] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-04T17:26:17.344347Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId=operationId 2025-04-04T17:26:17.344382Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId=operationId , PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:26:17.371330Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-04T17:26:26.939057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:26:26.939129Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:28.156993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:26:28.157053Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:04.454511Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-04T17:28:04.454592Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:28:04.454633Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:28:04.454671Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-04T17:28:05.598572Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-04T17:28:05.598676Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 223.000000s, at schemeshard: 72075186224037897 2025-04-04T17:28:05.599115Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-04-04T17:28:05.613827Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:28:06.439013Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:06.439118Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-04T17:28:06.439163Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-04T17:28:06.439214Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-04T17:28:06.439258Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:28:06.439699Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:06.442857Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:28:06.446479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6977:5162], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:06.446584Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6986:5167], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:06.446664Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:06.460255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-04T17:28:06.515828Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6991:5170], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-04T17:28:06.719623Z node 2 :TX_PROXY ERROR: Actor# [2:7087:5216] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:06.745531Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7116:5231]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:06.745781Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:28:06.745869Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7118:5233] 2025-04-04T17:28:06.745941Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7118:5233] 2025-04-04T17:28:06.746299Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7119:5234] 2025-04-04T17:28:06.746441Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7119:5234], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:28:06.746504Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-04T17:28:06.746655Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7118:5233], server id = [2:7119:5234], tablet id = 72075186224037894, status = OK 2025-04-04T17:28:06.746731Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:28:06.746830Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7116:5231], StatRequests.size() = 1 2025-04-04T17:28:06.887280Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzIxNWZlNmYtMTliYWVkNGYtZmM4ZmUyOTItZTY5ZTZmODI=, TxId: 2025-04-04T17:28:06.887368Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzIxNWZlNmYtMTliYWVkNGYtZmM4ZmUyOTItZTY5ZTZmODI=, TxId: 2025-04-04T17:28:06.887989Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:06.901846Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:28:06.901910Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:28:06.934500Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:28:06.934587Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:28:06.988518Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7118:5233], schemeshard count = 1 2025-04-04T17:28:07.686631Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:28:07.686739Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:07.690370Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:07.707434Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:07.708042Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:07.708136Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-04-04T17:28:07.721658Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:07.743862Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) 2025-04-04T17:28:08.151080Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:08.151161Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-04T17:28:08.151201Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-04T17:28:08.151249Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:08.151294Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:08.154917Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:08.168786Z node 2 :STATISTICS ERROR: [72075186224037894] Delete long analyze operation, OperationId=operationId 2025-04-04T17:28:08.463494Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:28:08.463683Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:28:10.379276Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-04T17:28:10.379384Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:28:10.379457Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:28:10.379500Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-04T17:28:12.591051Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-04T17:28:12.591173Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 199.000000s, at schemeshard: 72075186224037897 2025-04-04T17:28:12.591391Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-04-04T17:28:12.709951Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:12.710224Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeDeadline::Complete. Send TEvAnalyzeResponse for deleted operation, OperationId=operationId, ActorId=[1:2798:3218] 2025-04-04T17:28:12.710313Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:28:12.710745Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:12.710809Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:12.711485Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 >> TBlobStorageProxyTest::TestEmptyDiscover [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi >> TBlobStorageProxyTest::TestProxyLongTailDiscover >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 >> KqpKv::ReadRows_NotFullPK [GOOD] >> KqpKv::ReadRows_Decimal >> THealthCheckTest::ShardsLimit995 [GOOD] >> THealthCheckTest::ShardsLimit905 >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe >> TPQTest::TestSourceIdDropByUserWrites [GOOD] >> TPQTest::TestTimeRetention >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition [GOOD] >> BasicUsage::ReadMirrored [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror >> TPQTest::TestTimeRetention [GOOD] >> TPQTest::TestStorageRetention >> TBlobStorageProxyTest::TestProxyLongTailDiscover [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition [GOOD] Test command err: 2025-04-04T17:27:05.014016Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510823073868094:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:05.014148Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001007/r3tmp/tmpdv4LGZ/pdisk_1.dat 2025-04-04T17:27:05.160238Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:27:05.282159Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29496, node 1 2025-04-04T17:27:05.344878Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001007/r3tmp/yandexUj0Ujl.tmp 2025-04-04T17:27:05.344909Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001007/r3tmp/yandexUj0Ujl.tmp 2025-04-04T17:27:05.345092Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001007/r3tmp/yandexUj0Ujl.tmp 2025-04-04T17:27:05.345250Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:05.375218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:05.375388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:05.377590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:05.387290Z INFO: TTestServer started on Port 1510 GrpcPort 29496 TClient is connected to server localhost:1510 PQClient connected to localhost:29496 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:05.597130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:27:05.621340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:27:07.211218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510831663803517:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:07.211221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510831663803495:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:07.211289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:07.214666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:27:07.218015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510831663803555:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:07.218087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:07.222562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510831663803523:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:27:07.371364Z node 1 :TX_PROXY ERROR: Actor# [1:7489510831663803579:2447] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:07.393292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:07.403427Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489510831663803595:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:27:07.404957Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWY4NDFhMzYtNDc4ZTM2N2ItMTMwNjljNzYtYzYzY2Y4Yw==, ActorId: [1:7489510831663803491:2336], ActorState: ExecuteState, TraceId: 01jr0xf4p24mc6smp48ymgq8q7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:27:07.406658Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:27:07.419301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:07.472654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489510831663803871:2626] 2025-04-04T17:27:10.014306Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510823073868094:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:10.014392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:27:13.589322Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-04T17:27:13.601284Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-04T17:27:13.602398Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7489510857433607939:2787], Recipient [1:7489510823073868552:2204]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:27:13.602424Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:27:13.602439Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:27:13.602472Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7489510857433607935:2784], Recipient [1:7489510823073868552:2204]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-04T17:27:13.602482Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T17:27:13.637887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:27:13.638266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:27:13.638471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-04T17:27:13.638504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-04T17:27:13.638526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new s ... PQ::TEvPipeDisconnected 2025-04-04T17:28:16.792283Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-04T17:28:16.792319Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessChangeOwnerRequests. 2025-04-04T17:28:16.792357Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:28:16.792427Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:28:16.792458Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:28:16.792492Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >>>>> Session-1 Received TSessionClosedEvent message 2025-04-04T17:28:16.792526Z :INFO: [/Root] [/Root] [579c0005-48c9a9df-1c7fdb3-79615198] Closing read session. Close timeout: 0.000000s SessionClosed { Status: SUCCESS Issues: "
: Error: Session was gracefully closed " } 2025-04-04T17:28:16.792588Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:1:1:0:0 2025-04-04T17:28:16.792665Z :INFO: [/Root] [/Root] [579c0005-48c9a9df-1c7fdb3-79615198] Counters: { Errors: 0 CurrentSessionLifetimeMs: 34 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:28:16.792771Z :NOTICE: [/Root] [/Root] [579c0005-48c9a9df-1c7fdb3-79615198] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:28:16.792852Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7489511099179339501:2456], Partition 0, Sender [0:0:0], Recipient [6:7489511099179339559:2459], Cookie: 0 2025-04-04T17:28:16.792906Z node 6 :PQ_READ_PROXY DEBUG: session cookie 2 consumer test-consumer session test-consumer_6_2_10069212452115279342_v1 grpc read done: success# 0, data# { } 2025-04-04T17:28:16.792916Z node 6 :PQ_READ_PROXY INFO: session cookie 2 consumer test-consumer session test-consumer_6_2_10069212452115279342_v1 grpc read failed 2025-04-04T17:28:16.792918Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7489511099179339559:2459]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:28:16.792931Z node 6 :PQ_READ_PROXY INFO: session cookie 2 consumer test-consumer session test-consumer_6_2_10069212452115279342_v1 closed 2025-04-04T17:28:16.792938Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:28:16.792978Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:28:16.793026Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:28:16.793057Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:28:16.793087Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:28:16.793392Z node 6 :PQ_READ_PROXY INFO: session cookie 2 consumer test-consumer session test-consumer_6_2_10069212452115279342_v1 is DEAD 2025-04-04T17:28:16.793579Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [6:7489511129244112051:3545], Recipient [6:7489511120654177080:2734]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:28:16.793614Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:28:16.793639Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:28:16.793662Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037897] Destroy direct read session test-consumer_6_2_10069212452115279342_v1 2025-04-04T17:28:16.793693Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037897] server disconnected, pipe [6:7489511129244112050:2827] destroyed 2025-04-04T17:28:16.793698Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_6_2_10069212452115279342_v1 2025-04-04T17:28:16.793726Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [6:7489511129244112047:2824] disconnected; active server actors: 1 2025-04-04T17:28:16.793741Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [6:7489511129244112047:2824] client test-consumer disconnected session test-consumer_6_2_10069212452115279342_v1 2025-04-04T17:28:16.793791Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] consumer test-consumer rebalancing was scheduled 2025-04-04T17:28:16.793842Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] consumer test-consumer balancing. Sessions=1, Families=3, UnradableFamilies=2 [2 (1), 1 (0), ], RequireBalancing=0 [] 2025-04-04T17:28:16.793881Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] consumer test-consumer balancing of the family=2 (Status=Free, Partitions=[1]) failed because there are no suitable reading sessions. 2025-04-04T17:28:16.793905Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] consumer test-consumer balancing of the family=1 (Status=Free, Partitions=[0]) failed because there are no suitable reading sessions. 2025-04-04T17:28:16.793925Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] consumer test-consumer balancing duration: 0.000057s >>>>> Session-2 Release() >>>>> Session-2 Closing reading session 2025-04-04T17:28:16.794924Z :INFO: [/Root] [/Root] [742430b2-7cd49a80-845edca3-446cd3cb] Closing read session. Close timeout: 5.000000s 2025-04-04T17:28:16.794971Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:2:1:0:0 2025-04-04T17:28:16.795100Z :INFO: [/Root] [/Root] [742430b2-7cd49a80-845edca3-446cd3cb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 18 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } >>>>> Session-2 Received TSessionClosedEvent message 2025-04-04T17:28:16.795933Z :INFO: [/Root] [/Root] [742430b2-7cd49a80-845edca3-446cd3cb] Closing read session. Close timeout: 0.000000s SessionClosed { Status: SUCCESS Issues: "
: Error: Session was gracefully closed " } 2025-04-04T17:28:16.795927Z node 6 :PQ_READ_PROXY DEBUG: session cookie 3 consumer test-consumer session test-consumer_6_3_5485198349460721154_v1 grpc read done: success# 0, data# { } 2025-04-04T17:28:16.795981Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:2:1:0:0 2025-04-04T17:28:16.795947Z node 6 :PQ_READ_PROXY INFO: session cookie 3 consumer test-consumer session test-consumer_6_3_5485198349460721154_v1 grpc read failed 2025-04-04T17:28:16.795983Z node 6 :PQ_READ_PROXY INFO: session cookie 3 consumer test-consumer session test-consumer_6_3_5485198349460721154_v1 grpc closed 2025-04-04T17:28:16.796030Z :INFO: [/Root] [/Root] [742430b2-7cd49a80-845edca3-446cd3cb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 19 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:28:16.796040Z node 6 :PQ_READ_PROXY INFO: session cookie 3 consumer test-consumer session test-consumer_6_3_5485198349460721154_v1 is DEAD 2025-04-04T17:28:16.796134Z :NOTICE: [/Root] [/Root] [742430b2-7cd49a80-845edca3-446cd3cb] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:28:16.796201Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [6:7489511129244112067:3550], Recipient [6:7489511120654177079:2733]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:28:16.796243Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:28:16.796270Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:28:16.796290Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session test-consumer_6_3_5485198349460721154_v1 2025-04-04T17:28:16.796318Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [6:7489511129244112066:2835] destroyed 2025-04-04T17:28:16.796357Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_6_3_5485198349460721154_v1 2025-04-04T17:28:16.797145Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [6:7489511129244112063:2832] disconnected; active server actors: 1 2025-04-04T17:28:16.797178Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [6:7489511129244112063:2832] client test-consumer disconnected session test-consumer_6_3_5485198349460721154_v1 2025-04-04T17:28:16.798754Z :DEBUG: [/Root] TraceId [] SessionId [producer-1|43ceb461-453b8e46-7e7b7a46-9eaf9603_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-04T17:28:16.799685Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7489511120654177079:2733], Partition 2, Sender [0:0:0], Recipient [6:7489511120654177174:2746], Cookie: 0 2025-04-04T17:28:16.799738Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7489511120654177174:2746]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:28:16.799755Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:28:16.799804Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:28:16.799859Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:28:16.799877Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:28:16.799924Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:28:16.806710Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7489511120654177080:2734], Partition 1, Sender [0:0:0], Recipient [6:7489511120654177183:2748], Cookie: 0 2025-04-04T17:28:16.806793Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7489511120654177183:2748]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:28:16.806824Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:28:16.806872Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:28:16.806944Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:28:16.806972Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:28:16.807015Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableTwoIndexes ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::ReadMirrored [GOOD] Test command err: 2025-04-04T17:27:46.897815Z :PropagateSessionClosed INFO: Random seed for debugging is 1743787666897765 2025-04-04T17:27:47.245352Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511004666318809:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:47.245474Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:47.280686Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489511003006772437:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:47.280791Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:47.389799Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:27:47.407641Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bef/r3tmp/tmpfO7Ds0/pdisk_1.dat 2025-04-04T17:27:47.604672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:47.604765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:47.607862Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:27:47.608523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:47.641646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:47.641711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:47.648260Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:27:47.648839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64642, node 1 2025-04-04T17:27:47.658304Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:27:47.658343Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:27:47.763309Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000bef/r3tmp/yandexx8cPT0.tmp 2025-04-04T17:27:47.763346Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000bef/r3tmp/yandexx8cPT0.tmp 2025-04-04T17:27:47.763569Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000bef/r3tmp/yandexx8cPT0.tmp 2025-04-04T17:27:47.763722Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:47.956177Z INFO: TTestServer started on Port 11179 GrpcPort 64642 TClient is connected to server localhost:11179 PQClient connected to localhost:64642 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:48.210948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-04T17:27:49.780208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511013256254431:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:49.780226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511013256254405:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:49.780379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:49.787264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-04T17:27:49.787607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511013256254463:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:49.787677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:49.803498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489511013256254434:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T17:27:49.887075Z node 1 :TX_PROXY ERROR: Actor# [1:7489511013256254513:2688] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:50.158805Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489511013256254532:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:27:50.164471Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Nzg1YzQwNzMtZjNiYzdiN2QtYTkyMjY5YjQtMTExZDZkZTM=, ActorId: [1:7489511013256254402:2335], ActorState: ExecuteState, TraceId: 01jr0xge8aeqck0w36mqp856zy, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:27:50.166128Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:27:50.213107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:50.346313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:50.366600Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489511015891674666:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:27:50.375592Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmUyYTIyZi03NDljYjcwZS03NzdjNGZkOC00YWZkMWM2, ActorId: [2:7489511015891674626:2308], ActorState: ExecuteState, TraceId: 01jr0xget9ey7g23zrwqwrg2fy, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:27:50.376031Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:27:50.460454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:64642", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-04T17:27:50.822729Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jr0xgezr7c47f9r90ev7h7n2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTgwZGVhM2QtYWZmNGYwY2EtNTZjODhjNjgtZjM2N2U2ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489511017551222293:3045] 2025-04-04T17:27:52.245543Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489511004666318809:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:52.245659Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:27:52.280864Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489511003006772437:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:52.280949Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:27:56.610562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is ... cf829-2ab6971e-67edf71b-89991942] [] Got ReadResponse, serverBytesSize = 1568, now ReadSizeBudget = 0, ReadSizeServerDelta = 8387040 2025-04-04T17:28:17.740838Z :DEBUG: [/Root] [/Root] [bd1cf829-2ab6971e-67edf71b-89991942] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 8387040 2025-04-04T17:28:17.741026Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 2 (1-4) 2025-04-04T17:28:17.741086Z :DEBUG: [/Root] [/Root] [bd1cf829-2ab6971e-67edf71b-89991942] [] Returning serverBytesSize = 1568 to budget 2025-04-04T17:28:17.741113Z :DEBUG: [/Root] [/Root] [bd1cf829-2ab6971e-67edf71b-89991942] [] In ContinueReadingDataImpl, ReadSizeBudget = 1568, ReadSizeServerDelta = 8387040 2025-04-04T17:28:17.741331Z :DEBUG: [/Root] [/Root] [bd1cf829-2ab6971e-67edf71b-89991942] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 8388608 2025-04-04T17:28:17.741396Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (1-1) 2025-04-04T17:28:17.741446Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (2-2) 2025-04-04T17:28:17.741482Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 1} (3-3) 2025-04-04T17:28:17.741516Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 2} (4-4) 2025-04-04T17:28:17.741569Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_14480055741255291162_v1 grpc read done: success# 1, data# { read_request { bytes_size: 1568 } } >>> event from dataHandler: 2025-04-04T17:28:17.741693Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_14480055741255291162_v1 got read request: guid# 440db541-820c152c-bcb27861-3564763f DataReceived { Partition session id: 2 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 Message { Data: ..130 bytes.. Information: { Offset: 1 ProducerId: "src_id" SeqNo: 2 CreateTime: 2025-04-04T17:28:17.693000Z WriteTime: 2025-04-04T17:28:17.716000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "_ip": "ipv6:[::1]:57160", "server": "ipv6:[::1]:57160", "logtype": "unknown" } MessageMeta: { } } Partition session id: 2 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..240 bytes.. Information: { Offset: 2 ProducerId: "src_id" SeqNo: 3 CreateTime: 2025-04-04T17:28:17.693000Z WriteTime: 2025-04-04T17:28:17.717000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "_ip": "ipv6:[::1]:57160", "server": "ipv6:[::1]:57160" } MessageMeta: { } } Partition session id: 2 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..350 bytes.. Information: { Offset: 3 ProducerId: "src_id" SeqNo: 4 CreateTime: 2025-04-04T17:28:17.693000Z WriteTime: 2025-04-04T17:28:17.717000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "_ip": "ipv6:[::1]:57160", "server": "ipv6:[::1]:57160" } MessageMeta: { } } Partition session id: 2 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..460 bytes.. Information: { Offset: 4 ProducerId: "src_id" SeqNo: 5 CreateTime: 2025-04-04T17:28:17.694000Z WriteTime: 2025-04-04T17:28:17.717000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "_ip": "ipv6:[::1]:57160", "server": "ipv6:[::1]:57160" } MessageMeta: { } } Partition session id: 2 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } } >>> get 4 messages in this event 2025-04-04T17:28:17.741895Z :DEBUG: [/Root] [/Root] [bd1cf829-2ab6971e-67edf71b-89991942] [] The application data is transferred to the client. Number of messages 4, size 1180 bytes 2025-04-04T17:28:17.741937Z :DEBUG: [/Root] [/Root] [bd1cf829-2ab6971e-67edf71b-89991942] [] Returning serverBytesSize = 0 to budget 2025-04-04T17:28:17.794373Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|ee891b6a-2ff1c883-1f678372-35e4b246_0] Write session will now close 2025-04-04T17:28:17.794452Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|ee891b6a-2ff1c883-1f678372-35e4b246_0] Write session: aborting 2025-04-04T17:28:17.794978Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|ee891b6a-2ff1c883-1f678372-35e4b246_0] Write session: gracefully shut down, all writes complete >>> Writes to test-topic-mirrored-from-dc3 successful 2025-04-04T17:28:17.795025Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|ee891b6a-2ff1c883-1f678372-35e4b246_0] Write session: destroy 2025-04-04T17:28:17.795184Z :INFO: [/Root] [/Root] [bd1cf829-2ab6971e-67edf71b-89991942] Closing read session. Close timeout: 18446744073709.551615s 2025-04-04T17:28:17.795280Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc2:0:3:4:0 -:test-topic-mirrored-from-dc3:0:2:4:0 -:test-topic:0:1:4:0 2025-04-04T17:28:17.795329Z :INFO: [/Root] [/Root] [bd1cf829-2ab6971e-67edf71b-89991942] Counters: { Errors: 0 CurrentSessionLifetimeMs: 405 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:28:17.796165Z :INFO: [/Root] [/Root] [bd1cf829-2ab6971e-67edf71b-89991942] Closing read session. Close timeout: 0.000000s 2025-04-04T17:28:17.796274Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc2:0:3:4:0 -:test-topic-mirrored-from-dc3:0:2:4:0 -:test-topic:0:1:4:0 2025-04-04T17:28:17.796312Z :INFO: [/Root] [/Root] [bd1cf829-2ab6971e-67edf71b-89991942] Counters: { Errors: 0 CurrentSessionLifetimeMs: 406 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:28:17.796349Z :INFO: [/Root] [/Root] [bd1cf829-2ab6971e-67edf71b-89991942] Closing read session. Close timeout: 0.000000s 2025-04-04T17:28:17.796416Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc2:0:3:4:0 -:test-topic-mirrored-from-dc3:0:2:4:0 -:test-topic:0:1:4:0 2025-04-04T17:28:17.796452Z :INFO: [/Root] [/Root] [bd1cf829-2ab6971e-67edf71b-89991942] Counters: { Errors: 0 CurrentSessionLifetimeMs: 406 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:28:17.796559Z :NOTICE: [/Root] [/Root] [bd1cf829-2ab6971e-67edf71b-89991942] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:28:17.795788Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|ee891b6a-2ff1c883-1f678372-35e4b246_0 grpc read done: success: 0 data: 2025-04-04T17:28:17.795816Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|ee891b6a-2ff1c883-1f678372-35e4b246_0 grpc read failed 2025-04-04T17:28:17.795845Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|ee891b6a-2ff1c883-1f678372-35e4b246_0 grpc closed 2025-04-04T17:28:17.795859Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|ee891b6a-2ff1c883-1f678372-35e4b246_0 is DEAD 2025-04-04T17:28:17.796713Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:28:17.796845Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_14480055741255291162_v1 grpc read done: success# 0, data# { } 2025-04-04T17:28:17.796866Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_14480055741255291162_v1 grpc read failed 2025-04-04T17:28:17.796884Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_14480055741255291162_v1 grpc closed 2025-04-04T17:28:17.796944Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_14480055741255291162_v1 is DEAD 2025-04-04T17:28:17.797891Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7489511131069680708:2596] disconnected; active server actors: 1 2025-04-04T17:28:17.797926Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7489511131069680708:2596] client user disconnected session shared/user_3_1_14480055741255291162_v1 2025-04-04T17:28:17.797994Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489511131069680709:2596] disconnected; active server actors: 1 2025-04-04T17:28:17.798011Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489511131069680709:2596] client user disconnected session shared/user_3_1_14480055741255291162_v1 2025-04-04T17:28:17.798053Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7489511131069680710:2596] disconnected; active server actors: 1 2025-04-04T17:28:17.798071Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7489511131069680710:2596] client user disconnected session shared/user_3_1_14480055741255291162_v1 2025-04-04T17:28:17.798370Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [3:7489511131069680851:2617] destroyed 2025-04-04T17:28:17.798454Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-04T17:28:17.798524Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session shared/user_3_1_14480055741255291162_v1 2025-04-04T17:28:17.798549Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [3:7489511131069680721:2602] destroyed 2025-04-04T17:28:17.798577Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_14480055741255291162_v1 2025-04-04T17:28:17.798599Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7489511131069680717:2600] destroyed 2025-04-04T17:28:17.798665Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_14480055741255291162_v1 2025-04-04T17:28:17.798693Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_14480055741255291162_v1 2025-04-04T17:28:17.798720Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session shared/user_3_1_14480055741255291162_v1 2025-04-04T17:28:17.798747Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [3:7489511131069680722:2603] destroyed 2025-04-04T17:28:17.798772Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_14480055741255291162_v1 2025-04-04T17:28:18.066579Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7489511135364648186:2624], TxId: 281474976710705, task: 1, CA Id [3:7489511135364648184:2624]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-04-04T17:28:18.100001Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7489511135364648186:2624], TxId: 281474976710705, task: 1, CA Id [3:7489511135364648184:2624]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T17:28:18.149525Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7489511135364648186:2624], TxId: 281474976710705, task: 1, CA Id [3:7489511135364648184:2624]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-04-04T17:28:18.214111Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7489511135364648186:2624], TxId: 281474976710705, task: 1, CA Id [3:7489511135364648184:2624]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK >> TPQTest::TestStorageRetention [GOOD] >> TPQTest::TestStatusWithMultipleConsumers |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] >> TPQTest::TestStatusWithMultipleConsumers [GOOD] >> TPQTest::TestTabletRestoreEventsOrder >> THealthCheckTest::Issues100Groups100VCardListing |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TPQTest::TestTabletRestoreEventsOrder [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestTabletRestoreEventsOrder [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:107:2057] recipient: [1:100:2134] 2025-04-04T17:27:01.651402Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:01.651479Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] Leader for TabletID 72057594037927938 is [1:152:2173] sender: [1:153:2057] recipient: [1:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:178:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:01.668347Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:01.683240Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-04T17:27:01.684135Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2025-04-04T17:27:01.685667Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:27:01.687161Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2025-04-04T17:27:01.688632Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:01.693559Z node 1 :PERSQUEUE INFO: new Cookie default|60ede791-ca3b5a17-e60ab2ea-80aabc2b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:27:01.698200Z node 1 :PERSQUEUE INFO: new Cookie default|d666aa92-435fd661-973486fb-3e4c2ae0_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:01.705995Z node 1 :PERSQUEUE INFO: new Cookie default|c0746d6c-e2b4abee-97d803da-ce5571bc_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:27:01.717400Z node 1 :PERSQUEUE INFO: new Cookie default|b2acac27-d1d66689-f52a856c-2a387a83_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:01.720978Z node 1 :PERSQUEUE INFO: new Cookie default|84e85c1e-d863813f-8930cb93-ae438f8f_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:01.725767Z node 1 :PERSQUEUE INFO: new Cookie default|9c1319f5-2bc6f4c6-e8167f68-755f79c6_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:102:2057] recipient: [2:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:102:2057] recipient: [2:100:2134] Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:107:2057] recipient: [2:100:2134] 2025-04-04T17:27:02.024277Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:02.024342Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:148:2057] recipient: [2:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:148:2057] recipient: [2:146:2169] Leader for TabletID 72057594037927938 is [2:152:2173] sender: [2:153:2057] recipient: [2:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:178:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:106:2138]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:180:2057] recipient: [2:98:2133] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:183:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:184:2057] recipient: [2:182:2194] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:185:2195] sender: [2:186:2057] recipient: [2:182:2194] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_BATCH Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:02.059467Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:02.059545Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:106:2138]) rebooted! Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST !Reboot 72057594037927937 (actor [2:106:2138]) tablet resolver refreshed! new actor is[2:185:2195] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_BATCH Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:185:2195] sender: [2:262:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:03.546381Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:03.547237Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-04T17:27:03.548029Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, S ... er actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.542867Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:637:2631] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.544852Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:642:2636] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.546381Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:647:2641] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.548454Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:652:2646] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.551008Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:657:2651] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.553296Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:662:2656] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.555641Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:667:2661] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.557937Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:672:2666] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.559885Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:677:2671] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.561515Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:682:2676] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.563179Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:687:2681] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.564856Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:692:2686] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.566596Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:697:2691] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.570498Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:702:2696] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.572419Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:707:2701] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.574491Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:712:2706] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.576569Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:717:2711] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.578336Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:722:2716] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.580326Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:727:2721] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.582372Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:732:2726] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.584502Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:737:2731] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.586565Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:742:2736] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.588861Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:747:2741] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:21.591124Z node 58 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [58:752:2746], now have 1 active actors on pipe Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:28:21.592332Z node 58 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [58:755:2749], now have 1 active actors on pipe Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:28:21.593151Z node 58 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [58:758:2752], now have 1 active actors on pipe Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:28:21.593935Z node 58 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [58:761:2755] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_READ_BALANCER_ACTOR Captured TEvents::TSystem::Wakeup to ACTORLIB_LONG_TIMER Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:22.058905Z node 59 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:22.059026Z node 59 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_BATCH Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:22.097036Z node 59 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:22.097103Z node 59 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:22.099922Z node 59 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:22.101086Z node 59 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 65 actor [59:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 65 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 65 ReadRuleGenerations: 65 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 65 Important: false } Consumers { Name: "aaa" Generation: 65 Important: true } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:22.101803Z node 59 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [59:244:2244] 2025-04-04T17:28:22.102956Z node 59 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [59:244:2244] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:22.103922Z node 59 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [59:246:2246] 2025-04-04T17:28:22.104551Z node 59 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 3 [59:246:2246] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_BATCH Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:22.134288Z node 59 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:22.134350Z node 59 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:22.134983Z node 59 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [59:323:2306] 2025-04-04T17:28:22.136018Z node 59 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [59:325:2308] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:22.139809Z node 59 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:28:22.139871Z node 59 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 4 [59:323:2306] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:28:22.140128Z node 59 :PERSQUEUE INFO: [topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:28:22.140152Z node 59 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 4 [59:325:2308] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] Test command err: 2025-04-04T17:27:53.430691Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511027922665474:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:53.430752Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:53.452141Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489511029771100824:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:53.452736Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:53.567063Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:27:53.574876Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001ca3/r3tmp/tmpRENHr5/pdisk_1.dat 2025-04-04T17:27:53.758439Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:27:53.773815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:53.773954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:53.779969Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:27:53.780953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:53.785178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:53.785287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:53.805992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62900, node 1 2025-04-04T17:27:53.851407Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001ca3/r3tmp/yandex2qLl6s.tmp 2025-04-04T17:27:53.851434Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001ca3/r3tmp/yandex2qLl6s.tmp 2025-04-04T17:27:53.851559Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001ca3/r3tmp/yandex2qLl6s.tmp 2025-04-04T17:27:53.851711Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:53.892723Z INFO: TTestServer started on Port 20460 GrpcPort 62900 TClient is connected to server localhost:20460 PQClient connected to localhost:62900 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:54.105999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:27:54.143805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:27:56.425365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511040807568519:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:56.425473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:56.425623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511040807568531:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:56.428861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-04-04T17:27:56.432545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511040807568565:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:56.432666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:56.450603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489511040807568533:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-04T17:27:56.594721Z node 1 :TX_PROXY ERROR: Actor# [1:7489511040807568615:2803] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:56.620481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:56.653159Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489511040807568633:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:27:56.653421Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzFiYzdiZjQtY2UzOWRmNWQtNmNhOGFjYi04ZmZjYzY0NQ==, ActorId: [1:7489511040807568516:2340], ActorState: ExecuteState, TraceId: 01jr0xgmqr7mzmdwqt9pkj3n13, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:27:56.655569Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:27:56.700228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:56.777543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:27:57.152409Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr0xgn5bc62ftj7havqze8jv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjc4MzNiYjctMWZlM2UyZDctMzU5NmRlMy01ZDliYmRiYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489511045102536295:3089] 2025-04-04T17:27:58.430542Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489511027922665474:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:58.430644Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:27:58.452451Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489511029771100824:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:58.452522Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok >>>>> Prepare scheme WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-04-04T17:28:03.219287Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489511027922665762:2151], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:28:03.219558Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489511027922665762:2151], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-04-04T17:28:03.219661Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489511027922665762:2151], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7489511032217633511:2446] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743787674167 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:28:03.219772Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489511027922665762:2151], cacheItem# { Subscriber: { Subscriber: [1:7489511032217633511:2446] DomainOwnerId: 72057 ... 7525862:2871] destroyed 2025-04-04T17:28:20.331234Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: userx_3_31_10005883244598027131_v1 2025-04-04T17:28:20.331239Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: userx_3_31_10005883244598027131_v1 2025-04-04T17:28:20.331249Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: userx_3_31_10005883244598027131_v1 2025-04-04T17:28:20.331252Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: userx_3_31_10005883244598027131_v1 2025-04-04T17:28:20.331264Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: userx_3_31_10005883244598027131_v1 2025-04-04T17:28:20.331347Z :INFO: [/Root] MessageGroupId [account2/topic2] SessionId [account2/topic2|47c6fa51-b351e08-ae495eb5-da7a6992_0] Write session: close. Timeout = 0 ms 2025-04-04T17:28:20.331426Z :INFO: [/Root] MessageGroupId [account2/topic2] SessionId [account2/topic2|47c6fa51-b351e08-ae495eb5-da7a6992_0] Write session will now close 2025-04-04T17:28:20.331489Z :DEBUG: [/Root] MessageGroupId [account2/topic2] SessionId [account2/topic2|47c6fa51-b351e08-ae495eb5-da7a6992_0] Write session: aborting 2025-04-04T17:28:20.332310Z :INFO: [/Root] MessageGroupId [account2/topic2] SessionId [account2/topic2|47c6fa51-b351e08-ae495eb5-da7a6992_0] Write session: gracefully shut down, all writes complete 2025-04-04T17:28:20.332323Z :DEBUG: [/Root] MessageGroupId [account2/topic2] SessionId [account2/topic2|47c6fa51-b351e08-ae495eb5-da7a6992_0] Write session is aborting and will not restart 2025-04-04T17:28:20.332408Z :DEBUG: [/Root] MessageGroupId [account2/topic2] SessionId [account2/topic2|47c6fa51-b351e08-ae495eb5-da7a6992_0] Write session: destroy 2025-04-04T17:28:20.332500Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: account2/topic2|47c6fa51-b351e08-ae495eb5-da7a6992_0 grpc read done: success: 0 data: 2025-04-04T17:28:20.332523Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: account2/topic2|47c6fa51-b351e08-ae495eb5-da7a6992_0 grpc read failed 2025-04-04T17:28:20.332894Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: account2/topic2|47c6fa51-b351e08-ae495eb5-da7a6992_0 2025-04-04T17:28:20.332920Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: account2/topic2|47c6fa51-b351e08-ae495eb5-da7a6992_0 is DEAD 2025-04-04T17:28:20.333217Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037902 (partition=1) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:28:20.333495Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902] server disconnected, pipe [3:7489511125342688098:2571] destroyed 2025-04-04T17:28:20.333517Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037902, Partition: 1, State: StateIdle] TPartition::DropOwner. 2025-04-04T17:28:20.360519Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7489511081342490005:2105], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:28:20.360651Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7489511081342490005:2105], cacheItem# { Subscriber: { Subscriber: [4:7489511085637457383:2152] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743787686333 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:28:20.360874Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7489511145767000660:2924], recipient# [4:7489511081342489835:2160], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:28:20.535863Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [3:7489511082393011507:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-04T17:28:20.536017Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [3:7489511082393011507:2127], cacheItem# { Subscriber: { Subscriber: [3:7489511095277914516:2884] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 24 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743787688881 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:28:20.536114Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [3:7489511082393011507:2127], cacheItem# { Subscriber: { Subscriber: [3:7489511095277914304:2734] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 24 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743787688741 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:28:20.536369Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489511146817525886:5514], recipient# [3:7489511146817525885:2862], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-04T17:28:20.537134Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489511082393011507:2127], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:28:20.537233Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489511082393011507:2127], cacheItem# { Subscriber: { Subscriber: [3:7489511086687979291:2444] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 28 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743787686333 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:28:20.537381Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489511146817525889:5515], recipient# [3:7489511146817525888:2876], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:28:20.607032Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489511082393011507:2127], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:28:20.607173Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489511082393011507:2127], cacheItem# { Subscriber: { Subscriber: [3:7489511095277914202:2689] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:28:20.607273Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489511146817525911:5525], recipient# [3:7489511146817525910:2881], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] >> TraverseColumnShard::TraverseColumnTable [GOOD] >> TBlobStorageProxyTest::TestPartialGetBlock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] Test command err: 2025-04-04T17:28:13.935186Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/000e5c/r3tmp/tmppyhuDH//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-04-04T17:28:13.937691Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-04T17:28:15.436504Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/000e5c/r3tmp/tmppyhuDH//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-04-04T17:28:15.441926Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-04T17:28:16.917272Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/000e5c/r3tmp/tmppyhuDH//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-04-04T17:28:16.921011Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-04T17:28:18.476772Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/000e5c/r3tmp/tmppyhuDH//vdisk_bad_3/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 4 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 4 2025-04-04T17:28:18.490151Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:3:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-04T17:28:20.028085Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/000e5c/r3tmp/tmppyhuDH//vdisk_bad_4/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 5 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 5 2025-04-04T17:28:20.032420Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:4:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-04T17:28:21.568010Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/000e5c/r3tmp/tmppyhuDH//vdisk_bad_5/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 6 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 6 2025-04-04T17:28:21.573415Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:5:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] >> THealthCheckTest::ShardsLimit905 [GOOD] >> THealthCheckTest::ShardsLimit800 >> KqpKv::ReadRows_Decimal [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone >> TBlobStorageProxyTest::TestProxySimpleDiscover ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 29291, MsgBus: 62556 2025-04-04T17:26:36.311814Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510699588705744:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:26:36.312028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000d0d/r3tmp/tmptb4Qso/pdisk_1.dat 2025-04-04T17:26:36.558338Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29291, node 1 2025-04-04T17:26:36.612097Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:36.612114Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:36.612130Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:36.612245Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:36.643853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:36.643952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:36.645535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62556 TClient is connected to server localhost:62556 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:26:37.065624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:37.085519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:37.179625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:37.288630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:37.361713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:38.352037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510708178642111:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:38.352206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:38.518229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:26:38.540396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:26:38.563459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:26:38.585459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:26:38.612092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:26:38.635721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:26:38.665860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510708178642618:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:38.665905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:38.666006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510708178642623:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:38.669054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:26:38.677358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510708178642625:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:26:38.747529Z node 1 :TX_PROXY ERROR: Actor# [1:7489510708178642678:3441] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:26:39.439691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:26:40.024790Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jr0xea4k326tcf74kbzfmj6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVlMmYwZjktYjM2ZmZmY2UtOGVjNGMzZjItZjBmN2M4ZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:40.025172Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr0xea4j8ftmw8t0enybrmnf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2YxZTI5MWItMTk5ZTBmYWItYzk2Yjk0ZC02YTA2YzJkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:40.028640Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jr0xea4m90dwnhf2axtbygf9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjA2M2QzMmEtYTY2N2IzMmUtZDhhZWNjMTUtNzk3ZWU5Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:40.030771Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr0xea4kcmnn03stfbwhjgcs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjM1NWEzYzctZGZmMjU0ZDAtNTUwOTBkMi01N2RmMmQzZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:40.035764Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr0xea4m3yw07vcca9j25mmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODVmZDhjM2ItYmU0ZDY3ODMtNjY3Y2U0MDEtZWJiZGI5YzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:40.044841Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jr0xea4m90dwnhf2axtbygf9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjA2M2QzMmEtYTY2N2IzMmUtZDhhZWNjMTUtNzk3ZWU5Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:40.045452Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jr0xea4j8ftmw8t0enybrmnf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2YxZTI5MWItMTk5ZTBmYWItYzk2Yjk0ZC02YTA2YzJkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:40.047010Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr0xea4nfk4swfettr431stw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWYzMDNlNjktMWU1Njk1MTYtYmZjMzBmNS1kMmZlNDE5Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:40.048448Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jr0xea4k326tcf74kbzfmj6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGVlMmYwZjktYjM2ZmZmY2UtOGVjNGMzZjItZjBmN2M4ZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:40.050719Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jr0xea4n3mvbn6ds13rt5210, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTAzMTMzYjgtODE1MzVjMGEtMjcyZTE3NTItODk0OWJhYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:40.052285Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jr0xea4kcmnn03stfbwhjgcs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjM1NWEzYzctZGZmMjU0ZDAtNTUwOTBkMi01N2RmMmQzZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:40.058978Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710686. Ctx: { TraceId: 01jr0xea4m3yw07vcca9j25mmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODVmZDhjM2ItYmU0ZDY3ODMtNjY3Y2U0MDEtZWJiZGI5YzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:40.060085Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710685. Ctx: { TraceId: 01jr0xea4qe4h655nqzbp51tp7, Database: , DatabaseId: /Root, SessionId: ydb:/ ... sion/3?node_id=3&id=Nzg3MjA3YmEtZTI5ODA5YTgtNDQxMjU5NGEtOWU5ZDIwOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.080355Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721519. Ctx: { TraceId: 01jr0xhbv3fdrc41kss7g5nw87, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmIxMWU0YzAtNDc1ZDljYTMtMWViMzg1NGItNzg5NDRhZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.080502Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721520. Ctx: { TraceId: 01jr0xhbtt45nxp8dkpx41jnyc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Mjg2MzJkZWEtMTI2NDVmZWQtY2FiNWFmMDEtMzBjZTU3NWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.086421Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721521. Ctx: { TraceId: 01jr0xhbtt45nxp8dkpx41jnyc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Mjg2MzJkZWEtMTI2NDVmZWQtY2FiNWFmMDEtMzBjZTU3NWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.086970Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721522. Ctx: { TraceId: 01jr0xhbv3fdrc41kss7g5nw87, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmIxMWU0YzAtNDc1ZDljYTMtMWViMzg1NGItNzg5NDRhZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.090348Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721523. Ctx: { TraceId: 01jr0xhbv3fdrc41kss7g5nw87, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmIxMWU0YzAtNDc1ZDljYTMtMWViMzg1NGItNzg5NDRhZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.092836Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721524. Ctx: { TraceId: 01jr0xhbvt4d6vzzyazkbhfvy6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTQ4ODA3MDUtYWNlYTc4MjMtYjljMDY0NzAtYWZlNWIwNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.095720Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721525. Ctx: { TraceId: 01jr0xhbvt8vf7xfvnkx5wdvke, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDZkYzIzZjItNGFkYTczMTgtMzA0Y2NkMjMtMjAyZTFhMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.096961Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721526. Ctx: { TraceId: 01jr0xhbvt4d6vzzyazkbhfvy6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTQ4ODA3MDUtYWNlYTc4MjMtYjljMDY0NzAtYWZlNWIwNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.100567Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721527. Ctx: { TraceId: 01jr0xhbvwfx9mykhst47z7cpg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWE1ODAwNWQtYTgyYmY0NDMtZjgxNDY3OTgtNTkzZTcwMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.102303Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721528. Ctx: { TraceId: 01jr0xhbvt8vf7xfvnkx5wdvke, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDZkYzIzZjItNGFkYTczMTgtMzA0Y2NkMjMtMjAyZTFhMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.103827Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721529. Ctx: { TraceId: 01jr0xhbw08k0m262g0wpn5mab, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Nzg3MjA3YmEtZTI5ODA5YTgtNDQxMjU5NGEtOWU5ZDIwOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.109450Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721530. Ctx: { TraceId: 01jr0xhbvwfx9mykhst47z7cpg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWE1ODAwNWQtYTgyYmY0NDMtZjgxNDY3OTgtNTkzZTcwMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.109991Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721531. Ctx: { TraceId: 01jr0xhbvt8vf7xfvnkx5wdvke, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDZkYzIzZjItNGFkYTczMTgtMzA0Y2NkMjMtMjAyZTFhMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.115512Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721532. Ctx: { TraceId: 01jr0xhbvwfx9mykhst47z7cpg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWE1ODAwNWQtYTgyYmY0NDMtZjgxNDY3OTgtNTkzZTcwMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.115514Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721533. Ctx: { TraceId: 01jr0xhbw08k0m262g0wpn5mab, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Nzg3MjA3YmEtZTI5ODA5YTgtNDQxMjU5NGEtOWU5ZDIwOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.119315Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721534. Ctx: { TraceId: 01jr0xhbw9akr0e6mvjwwjbjzt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Mjg2MzJkZWEtMTI2NDVmZWQtY2FiNWFmMDEtMzBjZTU3NWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.123855Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721536. Ctx: { TraceId: 01jr0xhbw08k0m262g0wpn5mab, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Nzg3MjA3YmEtZTI5ODA5YTgtNDQxMjU5NGEtOWU5ZDIwOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.126032Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721535. Ctx: { TraceId: 01jr0xhbvwfx9mykhst47z7cpg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWE1ODAwNWQtYTgyYmY0NDMtZjgxNDY3OTgtNTkzZTcwMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.126324Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721537. Ctx: { TraceId: 01jr0xhbwk5m6ztmak0kky7h5x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmIxMWU0YzAtNDc1ZDljYTMtMWViMzg1NGItNzg5NDRhZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.128047Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721538. Ctx: { TraceId: 01jr0xhbw9akr0e6mvjwwjbjzt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Mjg2MzJkZWEtMTI2NDVmZWQtY2FiNWFmMDEtMzBjZTU3NWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.136865Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721540. Ctx: { TraceId: 01jr0xhbx50c797mc5b387bhdy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTQ4ODA3MDUtYWNlYTc4MjMtYjljMDY0NzAtYWZlNWIwNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.144071Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721541. Ctx: { TraceId: 01jr0xhbx50c797mc5b387bhdy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTQ4ODA3MDUtYWNlYTc4MjMtYjljMDY0NzAtYWZlNWIwNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.146733Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721542. Ctx: { TraceId: 01jr0xhbx50c797mc5b387bhdy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTQ4ODA3MDUtYWNlYTc4MjMtYjljMDY0NzAtYWZlNWIwNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.149551Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721539. Ctx: { TraceId: 01jr0xhbwk5m6ztmak0kky7h5x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmIxMWU0YzAtNDc1ZDljYTMtMWViMzg1NGItNzg5NDRhZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.151209Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721543. Ctx: { TraceId: 01jr0xhbxg0e4ess6h9xsageqw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Mjg2MzJkZWEtMTI2NDVmZWQtY2FiNWFmMDEtMzBjZTU3NWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.151840Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721544. Ctx: { TraceId: 01jr0xhbxmbq2etkc7hfxa4de7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Nzg3MjA3YmEtZTI5ODA5YTgtNDQxMjU5NGEtOWU5ZDIwOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.154310Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721545. Ctx: { TraceId: 01jr0xhbxg0e4ess6h9xsageqw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Mjg2MzJkZWEtMTI2NDVmZWQtY2FiNWFmMDEtMzBjZTU3NWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS 2025-04-04T17:28:20.155916Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721546. Ctx: { TraceId: 01jr0xhbxf5arkqn9yx297vm4c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDZkYzIzZjItNGFkYTczMTgtMzA0Y2NkMjMtMjAyZTFhMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.157431Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721547. Ctx: { TraceId: 01jr0xhbxmbq2etkc7hfxa4de7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Nzg3MjA3YmEtZTI5ODA5YTgtNDQxMjU5NGEtOWU5ZDIwOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.160683Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721548. Ctx: { TraceId: 01jr0xhbxf5arkqn9yx297vm4c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDZkYzIzZjItNGFkYTczMTgtMzA0Y2NkMjMtMjAyZTFhMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-04T17:28:20.163218Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721549. Ctx: { TraceId: 01jr0xhbxf5arkqn9yx297vm4c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDZkYzIzZjItNGFkYTczMTgtMzA0Y2NkMjMtMjAyZTFhMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.163433Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721550. Ctx: { TraceId: 01jr0xhbxz6ety8sy4z752x5s0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWE1ODAwNWQtYTgyYmY0NDMtZjgxNDY3OTgtNTkzZTcwMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.167754Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721551. Ctx: { TraceId: 01jr0xhbxf5arkqn9yx297vm4c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDZkYzIzZjItNGFkYTczMTgtMzA0Y2NkMjMtMjAyZTFhMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:20.170540Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721552. Ctx: { TraceId: 01jr0xhbxz6ety8sy4z752x5s0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWE1ODAwNWQtYTgyYmY0NDMtZjgxNDY3OTgtNTkzZTcwMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-04T17:28:20.173917Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721553. Ctx: { TraceId: 01jr0xhbxz6ety8sy4z752x5s0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWE1ODAwNWQtYTgyYmY0NDMtZjgxNDY3OTgtNTkzZTcwMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS >> BasicStatistics::TwoDatabases ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTable [GOOD] Test command err: 2025-04-04T17:26:05.476906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:05.477241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:05.477383Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019be/r3tmp/tmpkm6DQz/pdisk_1.dat 2025-04-04T17:26:05.848510Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12623, node 1 2025-04-04T17:26:06.079928Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:06.079985Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:06.080031Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:06.080557Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:06.083040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:06.173238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:06.173381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:06.187447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20512 2025-04-04T17:26:06.647921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:09.633517Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:09.672935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.673057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.700930Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:09.702823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:09.936424Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.936997Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.937390Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.937520Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.937589Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.937842Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.937927Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.938003Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.938071Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.109238Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:10.109314Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:10.122081Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:10.247086Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:10.288322Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:10.288420Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:10.313988Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:10.314183Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:10.314388Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:10.314454Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:10.314493Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:10.314545Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:10.314633Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:10.314681Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:10.315523Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:10.340160Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:10.340276Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:10.346003Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:26:10.350966Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:26:10.351887Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:26:10.357202Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:10.374658Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:10.374739Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:10.374824Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:10.389954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:10.400696Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:10.400818Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:10.617271Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:10.770968Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:10.860026Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:11.839661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.839790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.855938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:12.114933Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:12.115166Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:12.115395Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:12.115475Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:12.115561Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:12.115654Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:12.115754Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:12.115843Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:12.115925Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:26:12.116028Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:26:12.116104Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:26:12.116183Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:26:12.155610Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2387:2883];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:12.155681Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2387:2883];tablet_id=72075186224037900;process= ... ode 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:28:19.848641Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:28:19.902705Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:28:19.902783Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:28:19.946874Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:8520:6456], schemeshard count = 1 2025-04-04T17:28:22.083162Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:22.083221Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:28:22.083271Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:22.083313Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:22.086352Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:22.102132Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:22.102797Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:22.102910Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:22.103833Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:22.117311Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:22.117497Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-04T17:28:22.118362Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8645:6524], server id = [2:8650:6529], tablet id = 72075186224037899, status = OK 2025-04-04T17:28:22.118702Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8645:6524], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:22.119001Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8646:6525], server id = [2:8651:6530], tablet id = 72075186224037900, status = OK 2025-04-04T17:28:22.119036Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8646:6525], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:22.119147Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8647:6526], server id = [2:8653:6532], tablet id = 72075186224037901, status = OK 2025-04-04T17:28:22.119194Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8647:6526], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:22.120539Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8648:6527], server id = [2:8652:6531], tablet id = 72075186224037902, status = OK 2025-04-04T17:28:22.120598Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8648:6527], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:22.121429Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8649:6528], server id = [2:8654:6533], tablet id = 72075186224037903, status = OK 2025-04-04T17:28:22.121470Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8649:6528], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:22.125829Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:28:22.126640Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8645:6524], server id = [2:8650:6529], tablet id = 72075186224037899 2025-04-04T17:28:22.126679Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:22.127015Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-04T17:28:22.127485Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8646:6525], server id = [2:8651:6530], tablet id = 72075186224037900 2025-04-04T17:28:22.127512Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:22.128039Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8668:6544], server id = [2:8671:6546], tablet id = 72075186224037904, status = OK 2025-04-04T17:28:22.128098Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8668:6544], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:22.128898Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-04T17:28:22.129646Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8669:6545], server id = [2:8673:6547], tablet id = 72075186224037905, status = OK 2025-04-04T17:28:22.129694Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8669:6545], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:22.130352Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8647:6526], server id = [2:8653:6532], tablet id = 72075186224037901 2025-04-04T17:28:22.130373Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:22.131113Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-04T17:28:22.131475Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8648:6527], server id = [2:8652:6531], tablet id = 72075186224037902 2025-04-04T17:28:22.131497Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:22.131785Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-04T17:28:22.132214Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8674:6548], server id = [2:8677:6551], tablet id = 72075186224037906, status = OK 2025-04-04T17:28:22.132258Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8674:6548], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:22.132619Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8649:6528], server id = [2:8654:6533], tablet id = 72075186224037903 2025-04-04T17:28:22.132655Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:22.133196Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8679:6553], server id = [2:8683:6556], tablet id = 72075186224037907, status = OK 2025-04-04T17:28:22.133236Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8679:6553], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:22.133382Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8681:6555], server id = [2:8684:6557], tablet id = 72075186224037908, status = OK 2025-04-04T17:28:22.133410Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8681:6555], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:22.135357Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-04T17:28:22.135791Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8668:6544], server id = [2:8671:6546], tablet id = 72075186224037904 2025-04-04T17:28:22.135817Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:22.137300Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-04T17:28:22.137785Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8669:6545], server id = [2:8673:6547], tablet id = 72075186224037905 2025-04-04T17:28:22.137814Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:22.139000Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-04T17:28:22.139462Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8674:6548], server id = [2:8677:6551], tablet id = 72075186224037906 2025-04-04T17:28:22.139487Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:22.139735Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-04T17:28:22.140148Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8679:6553], server id = [2:8683:6556], tablet id = 72075186224037907 2025-04-04T17:28:22.140167Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:22.140535Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-04T17:28:22.140570Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:22.140786Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:22.140966Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:22.141280Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:22.142975Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8681:6555], server id = [2:8684:6557], tablet id = 72075186224037908 2025-04-04T17:28:22.143000Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:22.143608Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:22.160735Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8711:6580]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:22.160899Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:22.160936Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8711:6580], StatRequests.size() = 1 2025-04-04T17:28:22.300303Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MmQ5NzdkNi02MDBjY2JmMC1hODFkYmJhNi1kNmUzYjdiYw==, TxId: 2025-04-04T17:28:22.300368Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmQ5NzdkNi02MDBjY2JmMC1hODFkYmJhNi1kNmUzYjdiYw==, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-04T17:28:22.300883Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8719:6586]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:22.301136Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:28:22.301184Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:28:22.301326Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:22.304202Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:28:22.304277Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-04T17:28:22.304329Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T17:28:22.309927Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] >> TBlobStorageProxyTest::TestProxyGetSingleTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] Test command err: 2025-04-04T17:27:59.632331Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511053685488299:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:59.632398Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:59.655670Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489511053169990375:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:59.655853Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:59.829031Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:27:59.832065Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c74/r3tmp/tmpaykmIg/pdisk_1.dat 2025-04-04T17:27:59.990735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:59.990864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:59.992535Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:27:59.993338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:00.023179Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4767, node 1 2025-04-04T17:28:00.029035Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:28:00.029130Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:28:00.048361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:00.048433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:00.051535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:00.060317Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001c74/r3tmp/yandexGob2Sv.tmp 2025-04-04T17:28:00.060349Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001c74/r3tmp/yandexGob2Sv.tmp 2025-04-04T17:28:00.060538Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001c74/r3tmp/yandexGob2Sv.tmp 2025-04-04T17:28:00.060698Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:00.112071Z INFO: TTestServer started on Port 7532 GrpcPort 4767 TClient is connected to server localhost:7532 PQClient connected to localhost:4767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:00.335008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:28:00.379656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:28:02.791696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511066570391358:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:02.791698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511066570391367:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:02.791804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:02.795095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:28:02.798197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511066570391406:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:02.798296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:02.811137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489511066570391372:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:28:02.988234Z node 1 :TX_PROXY ERROR: Actor# [1:7489511066570391450:2820] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:03.014055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:28:03.021598Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489511066570391471:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:28:03.021928Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjEwYjkwODctNjVhOGYzYy0zYjZkNDM1Yi0zYjdlZTllNA==, ActorId: [1:7489511066570391355:2341], ActorState: ExecuteState, TraceId: 01jr0xgtyq9jvh5g0qsqb9w24w, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:28:03.023825Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:28:03.097901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:28:03.176501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:28:03.389192Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0xgvd51afyprx9w19hhczn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjYyNjZkOTItYWUzNDMwMzMtODg4NTA1YmUtODg3NWUyYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489511070865359121:3097] 2025-04-04T17:28:04.632355Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489511053685488299:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:04.632463Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:28:04.655914Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489511053169990375:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:04.655975Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-04-04T17:28:09.071851Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489511053685488575:2146], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:28:09.072138Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489511053685488575:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-04-04T17:28:09.072262Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489511053685488575:2146], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7489511057980456341:2458] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743787680397 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:28:09.072372Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: F ... ctRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:28:22.347155Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489511104903836706:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:12:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:28:22.347200Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489511104903836706:2127], cacheItem# { Subscriber: { Subscriber: [3:7489511117788739663:2838] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743787694957 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: TableId: [72057594046644480:12:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:28:22.347222Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489511152148480750:4951], recipient# [3:7489511104903836687:2289], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:10:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:28:22.347345Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489511104903836706:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:28:22.347352Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489511152148480751:4952], recipient# [3:7489511104903836687:2289], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:12:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:28:22.347445Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489511104903836706:2127], cacheItem# { Subscriber: { Subscriber: [3:7489511109198804487:2441] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 29 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743787692304 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:28:22.347558Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489511152148480752:4953], recipient# [3:7489511104903836687:2289], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:28:22.567285Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [3:7489511104903836706:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-04T17:28:22.567390Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [3:7489511104903836706:2127], cacheItem# { Subscriber: { Subscriber: [3:7489511117788739663:2838] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743787694957 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:28:22.567465Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [3:7489511104903836706:2127], cacheItem# { Subscriber: { Subscriber: [3:7489511117788739505:2736] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743787694817 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:28:22.567740Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489511152148480757:4956], recipient# [3:7489511152148480756:2601], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-04-04T17:28:22.568820Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489511104903836706:2127], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:28:22.568930Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489511104903836706:2127], cacheItem# { Subscriber: { Subscriber: [3:7489511109198804487:2441] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 29 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743787692304 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:28:22.569081Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489511152148480760:4957], recipient# [3:7489511152148480759:2607], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:28:22.673419Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7489511104903836706:2127], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:28:22.673580Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7489511104903836706:2127], cacheItem# { Subscriber: { Subscriber: [3:7489511117788739390:2687] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:28:22.673678Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7489511152148480776:4961], recipient# [3:7489511152148480775:2612], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TBlobStorageProxyTest::TestSingleFailureMirror >> TBlobStorageProxyTest::TestPartialGetBlock [GOOD] >> TBlobStorageProxyTest::TestPartialGetMirror >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone [GOOD] >> TBlobStorageProxyTest::TestPutGetMany >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK >> AnalyzeDatashard::DropTableNavigateError [GOOD] >> TBlobStorageProxyTest::TestVPutVGetPersistence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpKv::ReadRows_Decimal [GOOD] Test command err: Trying to start YDB, gRPC: 63869, MsgBus: 7906 2025-04-04T17:25:10.445105Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510330110684245:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:10.445155Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001383/r3tmp/tmp0oiWsJ/pdisk_1.dat 2025-04-04T17:25:10.899627Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:25:10.914680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:10.914789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:10.916616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63869, node 1 2025-04-04T17:25:11.042923Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:11.042957Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:11.042966Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:11.043095Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7906 TClient is connected to server localhost:7906 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:25:11.654197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:11.672661Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:11.690101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:25:11.845357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:25:12.012158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:25:12.094319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:25:13.951359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510342995587915:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:13.951494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:14.226738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:25:14.261394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:25:14.338560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:25:14.382624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:25:14.411448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:25:14.442354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:25:14.506669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510347290555728:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:14.506765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:14.506941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510347290555733:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:25:14.511879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:25:14.522068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510347290555735:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:25:14.592571Z node 1 :TX_PROXY ERROR: Actor# [1:7489510347290555788:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:25:15.445400Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510330110684245:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:15.445488Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:25:15.622681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:25:15.662444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-04-04T17:25:15.743211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:25:15.807931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T17:25:15.862693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T17:25:15.899674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-04-04T17:25:15.935566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-04-04T17:25:15.969505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-04-04T17:25:15.998286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-04-04T17:25:16.064279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8364, MsgBus: 4479 2025-04-04T17:25:18.326824Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510362097701738:2057];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:18.326926Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001383/r3tmp/tmp5vDt7Y/pdisk_1.dat 2025-04-04T17:25:18.421769Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8364, node 2 2025-04-04T17:25:18.465737Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:25:18.465829Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:25:18.467612Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:25:18.490779Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:25:18.490803Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:25:18.490811Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:25:18.490925Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4479 TClient is connec ... ted, will use file: (empty maybe) 2025-04-04T17:28:02.111710Z node 18 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:02.111728Z node 18 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:02.111913Z node 18 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4892 TClient is connected to server localhost:4892 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:03.120341Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:06.824789Z node 18 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[18:7489511063748612824:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:06.824912Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:28:07.465313Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [18:7489511089518417270:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:07.465488Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:07.496125Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 IsSuccess(): 1 GetStatus(): SUCCESS [] IsSuccess(): 1 GetStatus(): SUCCESS 2025-04-04T17:28:07.669566Z node 18 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: no keys are found in request's proto Trying to start YDB, gRPC: 2927, MsgBus: 20489 2025-04-04T17:28:08.912901Z node 19 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7489511091893398798:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:08.913073Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001383/r3tmp/tmp9GXuWq/pdisk_1.dat 2025-04-04T17:28:09.080559Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:09.128732Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:09.128893Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:09.130497Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2927, node 19 2025-04-04T17:28:09.201502Z node 19 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:09.201529Z node 19 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:09.201544Z node 19 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:09.201715Z node 19 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20489 TClient is connected to server localhost:20489 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:10.156522Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:13.912918Z node 19 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[19:7489511091893398798:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:13.913046Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:28:15.248854Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [19:7489511121958170542:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:15.249044Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:15.265078Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:28:15.362195Z node 19 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Missing key columns: Key Trying to start YDB, gRPC: 3010, MsgBus: 27840 2025-04-04T17:28:16.473491Z node 20 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7489511127577391849:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:16.473603Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001383/r3tmp/tmp4a40P5/pdisk_1.dat 2025-04-04T17:28:16.649578Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:16.688195Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:16.688323Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:16.690266Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3010, node 20 2025-04-04T17:28:16.762114Z node 20 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:16.762140Z node 20 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:16.762157Z node 20 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:16.762352Z node 20 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27840 TClient is connected to server localhost:27840 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:17.798198Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:21.473644Z node 20 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[20:7489511127577391849:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:21.473762Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:28:22.917419Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7489511153347196298:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:22.917584Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:22.941680Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:28:23.088012Z node 20 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Type mismatch, got type Uint64 for column Key22, but expected Decimal(22,9) 2025-04-04T17:28:23.096845Z node 20 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Type mismatch, got type Decimal(35,10) for column Key22, but expected Decimal(22,9) >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] >> TBlobStorageProxyTest::TestInFlightPuts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] Test command err: 2025-04-04T17:26:04.747038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:04.747496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:04.747642Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001995/r3tmp/tmp59dCzw/pdisk_1.dat 2025-04-04T17:26:05.201084Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12477, node 1 2025-04-04T17:26:05.697779Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:05.697827Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:05.697875Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:05.698690Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:05.707989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:05.793076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:05.794063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:05.823899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19226 2025-04-04T17:26:06.315921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:09.280170Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:09.318663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.318773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.346708Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:09.348588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:09.572754Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.573405Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.573919Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.574075Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.574151Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.574425Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.574518Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.574629Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.574704Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.748124Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.748234Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.761595Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:09.877700Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:09.912985Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:09.913073Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:09.940302Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:09.940514Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:09.940724Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:09.940837Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:09.940894Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:09.940966Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:09.941018Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:09.941073Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:09.942222Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:09.968462Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:09.968556Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:09.973622Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:26:09.978037Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:26:09.978668Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:26:09.982090Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:09.999047Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:09.999104Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:09.999174Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:10.015758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:10.023470Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:10.023614Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:10.180108Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:10.310820Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:10.388550Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:11.399967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.400103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.482787Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:11.856251Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:11.856577Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:11.856908Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:11.857059Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:11.857270Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:11.857512Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:11.857746Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:11.857985Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:11.858222Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:26:11.858467Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:26:11.858693Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:26:11.858911Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:26:11.928204Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2387:2883];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:11.928325Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2387:2883];tablet_id=72075186224037900;process= ... extTraversal 2025-04-04T17:28:22.987689Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:28:22.987733Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:22.987791Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:22.991696Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:23.008318Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:23.008904Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:23.009011Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:23.009969Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:23.022658Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:23.022840Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-04T17:28:23.023593Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8648:6522], server id = [2:8653:6527], tablet id = 72075186224037899, status = OK 2025-04-04T17:28:23.024028Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8648:6522], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:23.024324Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8649:6523], server id = [2:8654:6528], tablet id = 72075186224037900, status = OK 2025-04-04T17:28:23.024369Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8649:6523], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:23.024459Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8650:6524], server id = [2:8655:6529], tablet id = 72075186224037901, status = OK 2025-04-04T17:28:23.024500Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8650:6524], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:23.025883Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8651:6525], server id = [2:8656:6530], tablet id = 72075186224037902, status = OK 2025-04-04T17:28:23.025953Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8651:6525], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:23.027208Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8652:6526], server id = [2:8657:6531], tablet id = 72075186224037903, status = OK 2025-04-04T17:28:23.027261Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8652:6526], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:23.031157Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:28:23.031826Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8648:6522], server id = [2:8653:6527], tablet id = 72075186224037899 2025-04-04T17:28:23.031862Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:23.032552Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-04T17:28:23.033042Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8649:6523], server id = [2:8654:6528], tablet id = 72075186224037900 2025-04-04T17:28:23.033069Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:23.033318Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8671:6542], server id = [2:8674:6543], tablet id = 72075186224037904, status = OK 2025-04-04T17:28:23.033377Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8671:6542], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:23.033585Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-04T17:28:23.033875Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8650:6524], server id = [2:8655:6529], tablet id = 72075186224037901 2025-04-04T17:28:23.033894Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:23.034614Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8675:6544], server id = [2:8677:6546], tablet id = 72075186224037905, status = OK 2025-04-04T17:28:23.034675Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8675:6544], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:23.036025Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-04T17:28:23.036313Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8676:6545], server id = [2:8679:6548], tablet id = 72075186224037906, status = OK 2025-04-04T17:28:23.036356Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8676:6545], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:23.036625Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-04T17:28:23.036843Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8651:6525], server id = [2:8656:6530], tablet id = 72075186224037902 2025-04-04T17:28:23.036864Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:23.037240Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8652:6526], server id = [2:8657:6531], tablet id = 72075186224037903 2025-04-04T17:28:23.037260Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:23.037766Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8682:6551], server id = [2:8686:6554], tablet id = 72075186224037907, status = OK 2025-04-04T17:28:23.037808Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8682:6551], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:23.037923Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8684:6553], server id = [2:8687:6555], tablet id = 72075186224037908, status = OK 2025-04-04T17:28:23.037954Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8684:6553], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:23.040346Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-04T17:28:23.040857Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8671:6542], server id = [2:8674:6543], tablet id = 72075186224037904 2025-04-04T17:28:23.040888Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:23.041283Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-04T17:28:23.041661Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8675:6544], server id = [2:8677:6546], tablet id = 72075186224037905 2025-04-04T17:28:23.041690Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:23.042488Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-04T17:28:23.042921Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8676:6545], server id = [2:8679:6548], tablet id = 72075186224037906 2025-04-04T17:28:23.042944Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:23.043992Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-04T17:28:23.044082Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-04T17:28:23.044112Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:23.044263Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:23.044437Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:23.044838Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:23.045108Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8682:6551], server id = [2:8686:6554], tablet id = 72075186224037907 2025-04-04T17:28:23.045142Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:23.047010Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8684:6553], server id = [2:8687:6555], tablet id = 72075186224037908 2025-04-04T17:28:23.047035Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:23.047289Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:23.063817Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8714:6578]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:23.064018Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:23.064077Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8714:6578], StatRequests.size() = 1 2025-04-04T17:28:23.180538Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTVlNmU4ZGEtOGUyNGZjMWQtYWI1NmU1ZjgtY2M0N2FiY2Q=, TxId: 2025-04-04T17:28:23.180594Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTVlNmU4ZGEtOGUyNGZjMWQtYWI1NmU1ZjgtY2M0N2FiY2Q=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-04T17:28:23.181064Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:23.181963Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2383:2880];ev=NActors::IEventHandle;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:28:23.222529Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:23.222594Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:28:23.277257Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:8729:6588];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-04T17:28:23.524314Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8839:6683]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:23.524647Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:28:23.524709Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:28:23.528066Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:28:23.528146Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-04T17:28:23.528215Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T17:28:23.533166Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TBlobStorageProxyTest::TestVPutVCollectVGetRace >> TBlobStorageProxyTest::TestProxySimpleDiscover [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] |97.0%| [TA] $(B)/ydb/core/kqp/ut/opt/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::DropTableNavigateError [GOOD] Test command err: 2025-04-04T17:26:04.966033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:04.966428Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:04.966557Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019af/r3tmp/tmpVlOgRc/pdisk_1.dat 2025-04-04T17:26:05.328314Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31729, node 1 2025-04-04T17:26:05.697783Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:05.697827Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:05.697876Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:05.698463Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:05.703823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:05.793035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:05.794055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:05.824025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22611 2025-04-04T17:26:06.312035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:09.254457Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:09.282562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.282654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.313714Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:09.315999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:09.545277Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.545856Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.546429Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.546596Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.546684Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.546900Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.546988Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.547085Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.547158Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.725159Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.725275Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.741888Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:09.891134Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:09.935789Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:09.935885Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:09.964953Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:09.965234Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:09.965401Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:09.965454Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:09.965547Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:09.965587Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:09.965629Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:09.965671Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:09.966620Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:09.990979Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:09.991099Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:09.997022Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:26:10.001282Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:26:10.001937Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:26:10.006466Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:10.026560Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:10.026613Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:10.026745Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:10.040329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:10.046211Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:10.046344Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:10.217839Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:10.358600Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:10.447668Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:11.416346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.416464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.476622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:11.994182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2542:3117], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.994310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.995243Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2546:3121]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:26:11.995414Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:26:11.995489Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2548:3123] 2025-04-04T17:26:11.995551Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2548:3123] 2025-04-04T17:26:11.996101Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2549:2995] 2025-04-04T17:26:11.996362Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2548:3123], server id = [2:2549:2995], tablet id = 72075186224037894, status = OK 2025-04-04T17:26:11.996513Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2549:2995], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:26:11.996573Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-04T17:26:11.996800Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:26:11.996876Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2546:3121], StatRequests.size() = 1 2025-04-04T17:26:12.015548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2553:3127], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:12.015703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:12.016102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2558:3132], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:12.022714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-04T17:26:12.207932Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:26:12.207996Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:26:12.292841Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2548:3123], schemeshard count = 1 2025-04-04T17:26:12.752440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... 04-04T17:27:23.257949Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:27:25.673874Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:27:25.674159Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:27:28.935140Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:27:31.412657Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:27:31.413146Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:27:34.655754Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:27:36.794488Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:27:36.794764Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:27:40.156981Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:27:42.322733Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:27:42.323095Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:27:45.664305Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:27:48.140255Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:27:48.140584Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:27:51.406540Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:27:53.619216Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:27:53.619545Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:27:56.797008Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:27:59.226756Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:27:59.227075Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:28:02.474444Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:28:04.891223Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:28:04.891664Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:28:08.173714Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:28:10.460852Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:28:10.461145Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:28:13.784340Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:28:15.963983Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:28:15.964406Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:28:19.276833Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:28:20.447934Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-04T17:28:20.448047Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:28:20.448100Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:28:20.448139Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-04T17:28:21.841396Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:28:21.841902Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:28:21.896810Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037897 2025-04-04T17:28:21.896950Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 184.000000s, at schemeshard: 72075186224037897 2025-04-04T17:28:21.897379Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 25 2025-04-04T17:28:21.911890Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:28:22.986420Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:22.986536Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-04T17:28:22.986590Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-04T17:28:22.986658Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-04T17:28:22.986701Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:28:22.987151Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:22.990581Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:28:22.993951Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6725:4739], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:22.994055Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6734:4744], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:22.994164Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:23.006167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-04T17:28:23.058152Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6739:4747], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-04T17:28:23.198512Z node 2 :TX_PROXY ERROR: Actor# [2:6832:4793] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:23.224464Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6861:4808]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:23.224706Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:28:23.224823Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:6863:4810] 2025-04-04T17:28:23.224888Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:6863:4810] 2025-04-04T17:28:23.225255Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:6864:4811] 2025-04-04T17:28:23.225385Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:6863:4810], server id = [2:6864:4811], tablet id = 72075186224037894, status = OK 2025-04-04T17:28:23.225481Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:6864:4811], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:28:23.225540Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-04T17:28:23.225662Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:28:23.225732Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:6861:4808], StatRequests.size() = 1 2025-04-04T17:28:23.359168Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2QzMTUzZTUtNzdhMDgzYzctMzRjNjBkN2YtYTcyNjY4OWY=, TxId: 2025-04-04T17:28:23.359244Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2QzMTUzZTUtNzdhMDgzYzctMzRjNjBkN2YtYTcyNjY4OWY=, TxId: 2025-04-04T17:28:23.359765Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:23.373785Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:28:23.373859Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:28:23.417415Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:28:23.417533Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:28:23.470723Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:6863:4810], schemeshard count = 1 2025-04-04T17:28:24.460081Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:28:24.460163Z node 2 :STATISTICS ERROR: [72075186224037894] IsColumnTable. traversal path [OwnerId: 72075186224037897, LocalPathId: 4] is not known to schemeshard 2025-04-04T17:28:24.460402Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:24.462200Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:28:24.470607Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmVkZjUyOTctZjVlNjVkNjYtMTUwYTYxY2YtZDU2YWEzZWU=, TxId: 2025-04-04T17:28:24.470680Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmVkZjUyOTctZjVlNjVkNjYtMTUwYTYxY2YtZDU2YWEzZWU=, TxId: 2025-04-04T17:28:24.471121Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:24.485151Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:24.485230Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2858:3270] 2025-04-04T17:28:24.486197Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6950:4866]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:24.490387Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:24.490461Z node 2 :STATISTICS ERROR: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] Navigate failed 2025-04-04T17:28:24.490530Z node 2 :STATISTICS DEBUG: ReplyFailed(), request id = 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] Test command err: 2025-04-04T17:26:09.186771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:09.187006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:09.187089Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00192d/r3tmp/tmpUuBg4k/pdisk_1.dat 2025-04-04T17:26:09.556558Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29645, node 1 2025-04-04T17:26:09.795029Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:09.795086Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:09.795133Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:09.795662Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:09.798054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:09.879618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.879748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.893154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15010 2025-04-04T17:26:10.391666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:13.343520Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:13.375540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:13.375648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:13.402878Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:13.404534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:13.622092Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.622647Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.623055Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.623212Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.623303Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.623556Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.623643Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.623739Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.623824Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.791748Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:13.791827Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:13.804325Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:13.929094Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:13.964632Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:13.964703Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:13.991732Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:13.991915Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:13.992147Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:13.992212Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:13.992272Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:13.992321Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:13.992367Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:13.992416Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:13.993475Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:14.018630Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:14.018714Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:14.023192Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2605] 2025-04-04T17:26:14.027165Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2617] 2025-04-04T17:26:14.027326Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1902:2617], schemeshard id = 72075186224037897 2025-04-04T17:26:14.034447Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:14.052532Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:14.052656Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:14.052724Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:14.063151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:14.074890Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:14.074998Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:14.256781Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:14.418645Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:14.494649Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:15.439218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:15.439338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:15.459967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:15.721238Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:15.721438Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:15.721648Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:15.721729Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:15.721835Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:15.721921Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:15.721999Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:15.722080Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:15.722163Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:26:15.722273Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:26:15.722353Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:26:15.722429Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:26:15.756534Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:15.756598Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process= ... Execute 2025-04-04T17:28:24.185964Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:24.186041Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing ... blocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR 2025-04-04T17:28:24.249316Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8702:6553] 2025-04-04T17:28:24.249576Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8702:6553], schemeshard id = 72075186224037897 2025-04-04T17:28:24.249652Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8656:6524], server id = [2:8703:6554], tablet id = 72075186224037894, status = OK 2025-04-04T17:28:24.249757Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8703:6554] 2025-04-04T17:28:24.249837Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8703:6554], node id = 2, have schemeshards count = 1, need schemeshards count = 0 ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse 2025-04-04T17:28:24.325556Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:24.325648Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:24.326549Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:24.340225Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:24.340387Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-04T17:28:24.341216Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8708:6559], server id = [2:8713:6564], tablet id = 72075186224037899, status = OK 2025-04-04T17:28:24.341549Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8708:6559], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:24.341830Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8709:6560], server id = [2:8714:6565], tablet id = 72075186224037900, status = OK 2025-04-04T17:28:24.341881Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8709:6560], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:24.343829Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8710:6561], server id = [2:8716:6567], tablet id = 72075186224037901, status = OK 2025-04-04T17:28:24.343892Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8710:6561], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:24.344087Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8711:6562], server id = [2:8715:6566], tablet id = 72075186224037902, status = OK 2025-04-04T17:28:24.344130Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8711:6562], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:24.345321Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8712:6563], server id = [2:8717:6568], tablet id = 72075186224037903, status = OK 2025-04-04T17:28:24.345374Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8712:6563], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:24.349598Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:28:24.349964Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8708:6559], server id = [2:8713:6564], tablet id = 72075186224037899 2025-04-04T17:28:24.350003Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:24.351919Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-04T17:28:24.352534Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8731:6579], server id = [2:8734:6580], tablet id = 72075186224037904, status = OK 2025-04-04T17:28:24.352590Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8731:6579], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:24.352916Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8709:6560], server id = [2:8714:6565], tablet id = 72075186224037900 2025-04-04T17:28:24.352946Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:24.354063Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-04T17:28:24.354402Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-04T17:28:24.354747Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8710:6561], server id = [2:8716:6567], tablet id = 72075186224037901 2025-04-04T17:28:24.354767Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:24.355221Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8735:6581], server id = [2:8736:6582], tablet id = 72075186224037905, status = OK 2025-04-04T17:28:24.355290Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8735:6581], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:24.355731Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8711:6562], server id = [2:8715:6566], tablet id = 72075186224037902 2025-04-04T17:28:24.355762Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:24.356458Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-04T17:28:24.356684Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8738:6584], server id = [2:8741:6587], tablet id = 72075186224037906, status = OK 2025-04-04T17:28:24.356725Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8738:6584], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:24.356989Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8739:6585], server id = [2:8742:6588], tablet id = 72075186224037907, status = OK 2025-04-04T17:28:24.357022Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8739:6585], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:24.357761Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8712:6563], server id = [2:8717:6568], tablet id = 72075186224037903 2025-04-04T17:28:24.357782Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:24.358757Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8744:6590], server id = [2:8749:6594], tablet id = 72075186224037908, status = OK 2025-04-04T17:28:24.358797Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8744:6590], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:24.360666Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-04T17:28:24.360833Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8731:6579], server id = [2:8734:6580], tablet id = 72075186224037904 2025-04-04T17:28:24.360854Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:24.361864Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-04T17:28:24.362103Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8735:6581], server id = [2:8736:6582], tablet id = 72075186224037905 2025-04-04T17:28:24.362122Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:24.362613Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-04T17:28:24.362955Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8738:6584], server id = [2:8741:6587], tablet id = 72075186224037906 2025-04-04T17:28:24.362977Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:24.363268Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-04T17:28:24.363509Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8739:6585], server id = [2:8742:6588], tablet id = 72075186224037907 2025-04-04T17:28:24.363536Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:24.363818Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-04T17:28:24.363870Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:24.364165Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:24.364373Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:24.364682Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:24.366582Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8744:6590], server id = [2:8749:6594], tablet id = 72075186224037908 2025-04-04T17:28:24.366604Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:24.367047Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:24.384705Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8774:6615]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:24.384912Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:24.384950Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8774:6615], StatRequests.size() = 1 2025-04-04T17:28:24.492734Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTk3OTdhOGMtOTI1NjBiYzYtMWUyZDQxOGItMzI0NDZiZTI=, TxId: 2025-04-04T17:28:24.492802Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTk3OTdhOGMtOTI1NjBiYzYtMWUyZDQxOGItMzI0NDZiZTI=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-04T17:28:24.493428Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8782:6621]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:24.493841Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:24.494177Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:28:24.494262Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:28:24.497876Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:28:24.497932Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-04T17:28:24.498004Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T17:28:24.504009Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] Test command err: 2025-04-04T17:26:04.990992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:04.991288Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:04.991404Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00199f/r3tmp/tmpAWSMbi/pdisk_1.dat 2025-04-04T17:26:05.293303Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24349, node 1 2025-04-04T17:26:05.697026Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:05.697078Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:05.697125Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:05.697616Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:05.703960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:05.793103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:05.794119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:05.823935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15003 2025-04-04T17:26:06.311544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:09.540024Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:09.572834Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.572944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.601377Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:09.603304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:09.836126Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.837069Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.837631Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.837791Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.837891Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.838127Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.838236Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.838395Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.838479Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.014721Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:10.014841Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:10.028115Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:10.178207Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:10.218659Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:10.218741Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:10.247201Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:10.247409Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:10.247639Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:10.247701Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:10.247760Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:10.247822Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:10.247880Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:10.247929Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:10.248987Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:10.277431Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:10.277554Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:10.282650Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2605] 2025-04-04T17:26:10.287195Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2617] 2025-04-04T17:26:10.287402Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1902:2617], schemeshard id = 72075186224037897 2025-04-04T17:26:10.295801Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:10.317406Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:10.317473Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:10.317550Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:10.328291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:10.336060Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:10.336201Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:10.532661Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:10.691893Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:10.768127Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:11.801687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.801820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.823513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:12.103332Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:12.103587Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:12.103897Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:12.104061Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:12.104206Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:12.104338Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:12.104465Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:12.104664Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:12.104840Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:26:12.104978Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:26:12.105090Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:26:12.105222Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:26:12.148061Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:12.148131Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process= ... [72075186224037894] Subscribed for config changes 2025-04-04T17:28:24.191191Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:24.191259Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:24.191373Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:28:24.192541Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:24.192611Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:24.194062Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:24.255356Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:24.255524Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-04T17:28:24.256378Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8698:6552], server id = [2:8703:6557], tablet id = 72075186224037899, status = OK 2025-04-04T17:28:24.256699Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8698:6552], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:24.256853Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8699:6553], server id = [2:8704:6558], tablet id = 72075186224037900, status = OK 2025-04-04T17:28:24.256900Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8699:6553], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:24.258569Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8700:6554], server id = [2:8705:6559], tablet id = 72075186224037901, status = OK 2025-04-04T17:28:24.258630Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8700:6554], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:24.259151Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8701:6555], server id = [2:8706:6560], tablet id = 72075186224037902, status = OK 2025-04-04T17:28:24.259205Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8701:6555], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:24.259985Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8702:6556], server id = [2:8710:6564], tablet id = 72075186224037903, status = OK 2025-04-04T17:28:24.260042Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8702:6556], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:24.264782Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:28:24.265391Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8698:6552], server id = [2:8703:6557], tablet id = 72075186224037899 2025-04-04T17:28:24.265433Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:24.266099Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-04T17:28:24.266563Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8699:6553], server id = [2:8704:6558], tablet id = 72075186224037900 2025-04-04T17:28:24.266591Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:24.267107Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-04T17:28:24.267762Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8721:6572], server id = [2:8724:6574], tablet id = 72075186224037904, status = OK 2025-04-04T17:28:24.267839Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8721:6572], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:24.268269Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8700:6554], server id = [2:8705:6559], tablet id = 72075186224037901 2025-04-04T17:28:24.268301Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:24.269214Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8723:6573], server id = [2:8727:6576], tablet id = 72075186224037905, status = OK 2025-04-04T17:28:24.269261Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8723:6573], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:24.269481Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-04T17:28:24.270285Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8726:6575], server id = [2:8728:6577], tablet id = 72075186224037906, status = OK 2025-04-04T17:28:24.270329Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8726:6575], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:24.270612Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8701:6555], server id = [2:8706:6560], tablet id = 72075186224037902 2025-04-04T17:28:24.270629Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:24.271201Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-04T17:28:24.271611Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8702:6556], server id = [2:8710:6564], tablet id = 72075186224037903 2025-04-04T17:28:24.271632Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:24.271741Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8730:6579], server id = [2:8734:6583], tablet id = 72075186224037907, status = OK 2025-04-04T17:28:24.271773Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8730:6579], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:24.273055Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8736:6585], server id = [2:8739:6587], tablet id = 72075186224037908, status = OK 2025-04-04T17:28:24.273098Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8736:6585], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:24.275123Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-04T17:28:24.275598Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8721:6572], server id = [2:8724:6574], tablet id = 72075186224037904 2025-04-04T17:28:24.275620Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:24.275999Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-04T17:28:24.276438Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8723:6573], server id = [2:8727:6576], tablet id = 72075186224037905 2025-04-04T17:28:24.276459Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:24.276523Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-04T17:28:24.276763Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8726:6575], server id = [2:8728:6577], tablet id = 72075186224037906 2025-04-04T17:28:24.276780Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:24.277234Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-04T17:28:24.277464Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8730:6579], server id = [2:8734:6583], tablet id = 72075186224037907 2025-04-04T17:28:24.277483Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:24.277542Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-04T17:28:24.277568Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:24.277748Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:24.277878Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:24.278042Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:24.279689Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8736:6585], server id = [2:8739:6587], tablet id = 72075186224037908 2025-04-04T17:28:24.279708Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:24.280132Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:24.296941Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8764:6608]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:24.297080Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:24.297115Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8764:6608], StatRequests.size() = 1 2025-04-04T17:28:24.409872Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjI4MTE0MDYtYzFiYThiMDEtOTYxOWU3MGItNjdkMWVjNzY=, TxId: 2025-04-04T17:28:24.409965Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjI4MTE0MDYtYzFiYThiMDEtOTYxOWU3MGItNjdkMWVjNzY=, TxId: 2025-04-04T17:28:24.410570Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:24.433459Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8774:6614] 2025-04-04T17:28:24.433578Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8650:6520], server id = [2:8774:6614], tablet id = 72075186224037894, status = OK 2025-04-04T17:28:24.433692Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8774:6614], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-04T17:28:24.433811Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8775:6615] 2025-04-04T17:28:24.433922Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8775:6615], schemeshard id = 72075186224037897 2025-04-04T17:28:24.457856Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:24.457919Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:28:24.544374Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8778:6618]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:24.544667Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:28:24.544734Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:28:24.547310Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:28:24.547356Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-04T17:28:24.547392Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T17:28:24.552131Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |97.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/opt/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] >> TBlobStorageProxyTest::TestDoubleFailure |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] >> TBlobStorageProxyTest::TestVPutVCollectVGetRace [GOOD] >> TBlobStorageProxyTest::TestVGetNoData >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] >> TBlobStorageProxyTest::TestProxyGetSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] Test command err: 2025-04-04T17:26:09.154628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:09.154892Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:09.155000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00193d/r3tmp/tmpMnYH3V/pdisk_1.dat 2025-04-04T17:26:09.551718Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63688, node 1 2025-04-04T17:26:09.789311Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:09.789357Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:09.789416Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:09.789867Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:09.791978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:09.870968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.871096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.884084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20881 2025-04-04T17:26:10.347345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:12.965186Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:12.996470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:12.996573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:13.023978Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:13.025527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:13.246105Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.246723Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.247224Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.247373Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.247454Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.247695Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.247781Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.247891Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.247961Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.420857Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:13.421009Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:13.434017Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:13.576555Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:13.619893Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:13.619995Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:13.649384Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:13.649517Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:13.649687Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:13.649733Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:13.649773Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:13.649813Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:13.649854Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:13.649893Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:13.650667Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:13.676079Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:13.676167Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:13.679888Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2605] 2025-04-04T17:26:13.683411Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2617] 2025-04-04T17:26:13.683603Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1902:2617], schemeshard id = 72075186224037897 2025-04-04T17:26:13.689673Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:13.708996Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:13.709061Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:13.709129Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:13.720251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:13.727331Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:13.727466Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:13.899173Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:14.085073Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:14.161970Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:15.122721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:15.122828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:15.140559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:15.396701Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:15.396989Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:15.397329Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:15.397476Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:15.397639Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:15.397778Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:15.397905Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:15.398033Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:15.398182Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:26:15.398333Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:26:15.398504Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:26:15.398635Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:26:15.448824Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:15.448902Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process= ... = [2:8529:6459], schemeshard count = 1 2025-04-04T17:28:24.123608Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:24.123657Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:28:24.123692Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:24.123753Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:24.126590Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:24.142165Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:24.142735Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:24.142842Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:24.143681Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:24.143739Z node 2 :STATISTICS WARN: [72075186224037894] TTxResponseTabletDistribution::Execute. Some tablets do not exist in Hive anymore; tablet count = 3 2025-04-04T17:28:24.143783Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:25.233451Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:25.233555Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:25.233915Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:25.247395Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:25.247597Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-04T17:28:25.248272Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8692:6546], server id = [2:8697:6551], tablet id = 72075186224037899, status = OK 2025-04-04T17:28:25.248749Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8692:6546], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:25.249040Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8693:6547], server id = [2:8698:6552], tablet id = 72075186224037900, status = OK 2025-04-04T17:28:25.249080Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8693:6547], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:25.250354Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8694:6548], server id = [2:8699:6553], tablet id = 72075186224037901, status = OK 2025-04-04T17:28:25.250403Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8694:6548], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:25.251931Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8695:6549], server id = [2:8700:6554], tablet id = 72075186224037902, status = OK 2025-04-04T17:28:25.251974Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8695:6549], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:25.252298Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8696:6550], server id = [2:8703:6557], tablet id = 72075186224037903, status = OK 2025-04-04T17:28:25.252346Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8696:6550], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:25.256395Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:28:25.256933Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8692:6546], server id = [2:8697:6551], tablet id = 72075186224037899 2025-04-04T17:28:25.256977Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:25.257972Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8714:6566], server id = [2:8718:6567], tablet id = 72075186224037904, status = OK 2025-04-04T17:28:25.258039Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8714:6566], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:25.259251Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-04T17:28:25.260312Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8693:6547], server id = [2:8698:6552], tablet id = 72075186224037900 2025-04-04T17:28:25.260337Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:25.260587Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-04T17:28:25.260820Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-04T17:28:25.261434Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8694:6548], server id = [2:8699:6553], tablet id = 72075186224037901 2025-04-04T17:28:25.261467Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:25.261898Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8720:6569], server id = [2:8721:6570], tablet id = 72075186224037905, status = OK 2025-04-04T17:28:25.262168Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8720:6569], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:25.262413Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8695:6549], server id = [2:8700:6554], tablet id = 72075186224037902 2025-04-04T17:28:25.262440Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:25.262608Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-04T17:28:25.263611Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8722:6571], server id = [2:8725:6574], tablet id = 72075186224037906, status = OK 2025-04-04T17:28:25.263677Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8722:6571], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:25.264357Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8696:6550], server id = [2:8703:6557], tablet id = 72075186224037903 2025-04-04T17:28:25.264379Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:25.264530Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8723:6572], server id = [2:8726:6575], tablet id = 72075186224037907, status = OK 2025-04-04T17:28:25.264582Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8723:6572], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:25.265738Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8727:6576], server id = [2:8731:6579], tablet id = 72075186224037908, status = OK 2025-04-04T17:28:25.265780Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8727:6576], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:25.267280Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-04T17:28:25.267574Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8714:6566], server id = [2:8718:6567], tablet id = 72075186224037904 2025-04-04T17:28:25.267607Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:25.269124Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-04T17:28:25.269413Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8720:6569], server id = [2:8721:6570], tablet id = 72075186224037905 2025-04-04T17:28:25.269435Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:25.270661Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-04T17:28:25.270932Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8722:6571], server id = [2:8725:6574], tablet id = 72075186224037906 2025-04-04T17:28:25.270952Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:25.271364Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-04T17:28:25.271565Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8723:6572], server id = [2:8726:6575], tablet id = 72075186224037907 2025-04-04T17:28:25.271584Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:25.271916Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-04T17:28:25.271951Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:25.272146Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8727:6576], server id = [2:8731:6579], tablet id = 72075186224037908 2025-04-04T17:28:25.272166Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:25.272231Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:25.272397Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:25.272719Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:25.274986Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:25.294192Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8758:6602]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:25.294423Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:25.294470Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8758:6602], StatRequests.size() = 1 2025-04-04T17:28:25.450296Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmY2YmYwMTYtZWU5YWFmYWQtOWEzZWY2LWNhM2FjZjA1, TxId: 2025-04-04T17:28:25.450362Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmY2YmYwMTYtZWU5YWFmYWQtOWEzZWY2LWNhM2FjZjA1, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-04T17:28:25.451061Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8766:6608]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:25.451274Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:25.451764Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:28:25.451826Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:28:25.455013Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:28:25.455083Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-04T17:28:25.455141Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T17:28:25.460728Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] >> TBlobStorageProxyTest::TestSingleFailureMirror [GOOD] >> TBlobStorageProxyTest::TestVBlockVPutVGet >> TBlobStorageProxyTest::TestGetMultipart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] Test command err: 2025-04-04T17:26:06.217422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:06.217643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:06.217728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00197b/r3tmp/tmpjY83PC/pdisk_1.dat 2025-04-04T17:26:06.526220Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62527, node 1 2025-04-04T17:26:06.730706Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:06.730764Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:06.730803Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:06.731304Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:06.737038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:06.817627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:06.817788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:06.831761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3292 2025-04-04T17:26:07.335580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:10.609066Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:10.635960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:10.636058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:10.662828Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:10.664363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:10.871964Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.872626Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.873214Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.873380Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.873496Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.873729Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.873838Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.873957Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.874040Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:11.046926Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:11.047088Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:11.060105Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:11.178889Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:11.215937Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:11.216016Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:11.242685Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:11.242870Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:11.243099Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:11.243161Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:11.243219Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:11.243276Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:11.243347Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:11.243408Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:11.244387Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:11.268927Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:11.269039Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:11.273898Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:26:11.279538Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:26:11.280463Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:26:11.284620Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:11.304672Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:11.304735Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:11.304818Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:11.321323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:11.329158Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:11.329328Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:11.537199Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:11.658528Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:11.757301Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:12.758972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:12.759099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:12.776149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:13.107981Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:13.108247Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:13.108555Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:13.108703Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:13.108850Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:13.109029Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:13.109165Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:13.109293Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:13.109440Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:26:13.109567Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:26:13.109691Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:26:13.109813Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2389:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:26:13.158782Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2394:2887];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:13.158871Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2394:2887];tablet_id=72075186224037900;process=T ... 625848Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:25.625959Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:25.627110Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:25.641559Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:25.641810Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-04T17:28:25.642682Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8608:6529], server id = [2:8613:6534], tablet id = 72075186224037899, status = OK 2025-04-04T17:28:25.643084Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8608:6529], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:25.644829Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8609:6530], server id = [2:8614:6535], tablet id = 72075186224037900, status = OK 2025-04-04T17:28:25.644912Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8609:6530], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:25.645632Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8610:6531], server id = [2:8615:6536], tablet id = 72075186224037901, status = OK 2025-04-04T17:28:25.645695Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8610:6531], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:25.646698Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8611:6532], server id = [2:8616:6537], tablet id = 72075186224037902, status = OK 2025-04-04T17:28:25.646762Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8611:6532], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:25.647682Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8612:6533], server id = [2:8619:6540], tablet id = 72075186224037903, status = OK 2025-04-04T17:28:25.647770Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8612:6533], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:25.652017Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:28:25.652739Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8608:6529], server id = [2:8613:6534], tablet id = 72075186224037899 2025-04-04T17:28:25.652791Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:25.653727Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-04T17:28:25.654050Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8631:6549], server id = [2:8633:6550], tablet id = 72075186224037904, status = OK 2025-04-04T17:28:25.654136Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8631:6549], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:25.654805Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8609:6530], server id = [2:8614:6535], tablet id = 72075186224037900 2025-04-04T17:28:25.654842Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:25.656339Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-04T17:28:25.656984Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8610:6531], server id = [2:8615:6536], tablet id = 72075186224037901 2025-04-04T17:28:25.657018Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:25.657357Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-04T17:28:25.657794Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8634:6551], server id = [2:8636:6552], tablet id = 72075186224037905, status = OK 2025-04-04T17:28:25.657875Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8634:6551], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:25.658304Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8611:6532], server id = [2:8616:6537], tablet id = 72075186224037902 2025-04-04T17:28:25.658336Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:25.659511Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-04T17:28:25.660198Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8638:6554], server id = [2:8641:6557], tablet id = 72075186224037906, status = OK 2025-04-04T17:28:25.660278Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8638:6554], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:25.660672Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8612:6533], server id = [2:8619:6540], tablet id = 72075186224037903 2025-04-04T17:28:25.660705Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:25.660882Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8640:6556], server id = [2:8643:6558], tablet id = 72075186224037907, status = OK 2025-04-04T17:28:25.660938Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8640:6556], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:25.662496Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8645:6560], server id = [2:8647:6562], tablet id = 72075186224037908, status = OK 2025-04-04T17:28:25.662559Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8645:6560], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:25.663271Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-04T17:28:25.663996Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8631:6549], server id = [2:8633:6550], tablet id = 72075186224037904 2025-04-04T17:28:25.664020Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:25.665437Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-04T17:28:25.666309Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8634:6551], server id = [2:8636:6552], tablet id = 72075186224037905 2025-04-04T17:28:25.666333Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:25.667044Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-04T17:28:25.667301Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8638:6554], server id = [2:8641:6557], tablet id = 72075186224037906 2025-04-04T17:28:25.667320Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:25.667609Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-04T17:28:25.667924Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8640:6556], server id = [2:8643:6558], tablet id = 72075186224037907 2025-04-04T17:28:25.667944Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:25.668271Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-04T17:28:25.668308Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:25.668446Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:25.668534Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:25.668730Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8645:6560], server id = [2:8647:6562], tablet id = 72075186224037908 2025-04-04T17:28:25.668749Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:25.669026Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:25.706283Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:25.706514Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-04T17:28:25.707100Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8662:6571], server id = [2:8663:6572], tablet id = 72075186224037900, status = OK 2025-04-04T17:28:25.707198Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8662:6571], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:25.708715Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-04T17:28:25.708854Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:25.709012Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:25.709152Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:25.709470Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:25.710952Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8662:6571], server id = [2:8663:6572], tablet id = 72075186224037900 2025-04-04T17:28:25.710979Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:25.711577Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:25.729982Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8681:6590]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:25.730341Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:25.730390Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8681:6590], StatRequests.size() = 1 2025-04-04T17:28:25.869712Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmVmMzY4MzMtZGIyMGFkNjYtMzgzNmNkZGUtNjViZWE2ZjA=, TxId: 2025-04-04T17:28:25.869777Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmVmMzY4MzMtZGIyMGFkNjYtMzgzNmNkZGUtNjViZWE2ZjA=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-04T17:28:25.870469Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8690:6596]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:25.870669Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:25.871133Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:28:25.871186Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:28:25.874731Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:28:25.874796Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-04T17:28:25.874868Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T17:28:25.880983Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 probe = 3 >> TBlobStorageProxyTest::TestDoubleGroups >> TBlobStorageProxyTest::TestProxyPutSingleTimeout >> TBlobStorageProxyTest::TestPutGetMany [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] >> TBlobStorageProxyTest::TestInFlightPuts [GOOD] >> TBlobStorageProxyTest::TestHugeCollectGarbage >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] >> TBlobStorageProxyTest::TestQuadrupleGroups >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] >> THealthCheckTest::Issues100Groups100VCardListing [GOOD] >> THealthCheckTest::GreenStatusWhenInitPending |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetMany [GOOD] >> TBlobStorageProxyTest::TestBlockPersistence >> TBlobStorageProxyTest::TestVGetNoData [GOOD] >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] >> TBlobStorageProxyTest::TestCollectGarbagePersistence >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] >> TBlobStorageProxyTest::TestPersistence >> TBlobStorageProxyTest::TestNormal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] Test command err: 2025-04-04T17:26:05.023499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:05.023744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:05.023832Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00198d/r3tmp/tmpbUwe7o/pdisk_1.dat 2025-04-04T17:26:05.400401Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12939, node 1 2025-04-04T17:26:05.697685Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:05.697736Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:05.697789Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:05.698330Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:05.703820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:05.794259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:05.794395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:05.824303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12598 2025-04-04T17:26:06.314727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:09.128098Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:09.154388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.154502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.181916Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:09.183753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:09.412640Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.413205Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.413731Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.413883Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.413957Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.414140Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.414219Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.414322Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.414374Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.586054Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.586182Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.599687Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:09.745025Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:09.797473Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:09.797566Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:09.834397Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:09.834582Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:09.834783Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:09.834847Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:09.834913Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:09.834956Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:09.834996Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:09.835039Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:09.836048Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:09.859827Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:09.859936Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:09.863612Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2605] 2025-04-04T17:26:09.866850Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2617] 2025-04-04T17:26:09.866977Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1902:2617], schemeshard id = 72075186224037897 2025-04-04T17:26:09.873293Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:09.890462Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:09.890533Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:09.890587Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:09.900776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:09.912994Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:09.913146Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:10.096854Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:10.252075Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:10.328730Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:11.416009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.416145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.478389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:11.589670Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:11.589931Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:11.590263Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:11.590458Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:11.590607Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:11.590743Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:11.590899Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:11.591024Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:11.591175Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:26:11.591327Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:26:11.591443Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:26:11.591566Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:26:11.620672Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:26:11.620772Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... 04-04T17:26:26.994816Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:21.804103Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-04T17:28:21.804190Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:28:21.804237Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:28:21.804279Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-04T17:28:23.267771Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-04T17:28:23.267860Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 232.000000s, at schemeshard: 72075186224037897 2025-04-04T17:28:23.268227Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-04-04T17:28:23.281882Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:28:24.381845Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:24.381931Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-04T17:28:24.381976Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-04T17:28:24.382028Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-04T17:28:24.382072Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:28:24.382484Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:24.385909Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:28:24.389543Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6980:5164], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:24.389644Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6990:5169], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:24.389730Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:24.403041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-04T17:28:24.456722Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6994:5172], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-04T17:28:24.663020Z node 2 :TX_PROXY ERROR: Actor# [2:7090:5218] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:24.690023Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7119:5233]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:24.690444Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:28:24.690550Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7121:5235] 2025-04-04T17:28:24.690630Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7121:5235] 2025-04-04T17:28:24.690943Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7122:5236] 2025-04-04T17:28:24.691074Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7122:5236], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:28:24.691133Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-04T17:28:24.691267Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7121:5235], server id = [2:7122:5236], tablet id = 72075186224037894, status = OK 2025-04-04T17:28:24.691318Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:28:24.691382Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7119:5233], StatRequests.size() = 1 2025-04-04T17:28:24.815630Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzMwMWJmYzQtMjUzNDgyMmMtNjMwZmEyOTUtOTQyZmMwNmM=, TxId: 2025-04-04T17:28:24.815693Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzMwMWJmYzQtMjUzNDgyMmMtNjMwZmEyOTUtOTQyZmMwNmM=, TxId: 2025-04-04T17:28:24.816061Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:24.830033Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:28:24.830098Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:28:24.883810Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:28:24.883886Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:28:24.948166Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7121:5235], schemeshard count = 1 2025-04-04T17:28:25.874868Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:28:25.874961Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:25.878044Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:25.894553Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:25.895107Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:25.895189Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-04-04T17:28:25.908760Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:25.931008Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-04T17:28:25.931947Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-04T17:28:25.932044Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-04T17:28:25.945339Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-04T17:28:27.083835Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:27.083960Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:27.084013Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:27.084536Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:27.098334Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:27.098685Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:27.098741Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:27.099398Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:27.113516Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:27.113736Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-04T17:28:27.114335Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7248:5313], server id = [2:7249:5314], tablet id = 72075186224037899, status = OK 2025-04-04T17:28:27.114444Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7248:5313], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:27.118283Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:28:27.118410Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:27.118749Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7248:5313], server id = [2:7249:5314], tablet id = 72075186224037899 2025-04-04T17:28:27.118794Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:27.118876Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:27.119059Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:27.119400Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:27.121454Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:27.138234Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7269:5333]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:27.138575Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:27.138616Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7269:5333], StatRequests.size() = 1 2025-04-04T17:28:27.251934Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODViMjc2NjgtY2YyYjUwODAtMjNlZjFmM2ItZDliMDI1Ng==, TxId: 2025-04-04T17:28:27.252001Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODViMjc2NjgtY2YyYjUwODAtMjNlZjFmM2ItZDliMDI1Ng==, TxId: 2025-04-04T17:28:27.252471Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:27.266407Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:27.266484Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2800:3219] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVGetNoData [GOOD] >> TBlobStorageProxyTest::TestProxyPutSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] Test command err: 2025-04-04T17:28:25.331778Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/000cf5/r3tmp/tmpwuPF0t//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-04-04T17:28:25.335490Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> AnalyzeColumnshard::AnalyzeStatus [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] Test command err: 2025-04-04T17:26:09.405219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:09.405563Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:09.405686Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001922/r3tmp/tmpjzI8Ag/pdisk_1.dat 2025-04-04T17:26:09.798501Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64546, node 1 2025-04-04T17:26:10.025175Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:10.025227Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:10.025271Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:10.025774Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:10.031776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:10.113190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:10.113336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:10.126740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61033 2025-04-04T17:26:10.625026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:13.361253Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:13.384893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:13.384969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:13.411091Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:13.412426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:13.609982Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.610589Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.611012Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.611144Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.611203Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.611438Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.611508Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.611588Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.611641Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.783745Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:13.783837Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:13.796368Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:13.926356Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:13.965435Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:13.965504Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:13.992125Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:13.992302Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:13.992527Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:13.992599Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:13.992681Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:13.992741Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:13.992828Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:13.992891Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:13.994003Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:14.021435Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:14.021556Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:14.026770Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2605] 2025-04-04T17:26:14.030160Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2617] 2025-04-04T17:26:14.030327Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1902:2617], schemeshard id = 72075186224037897 2025-04-04T17:26:14.037113Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:14.055318Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:14.055372Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:14.055427Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:14.063539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:14.070363Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:14.070479Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:14.232294Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:14.462921Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:14.529648Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:15.619926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:15.620072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:15.645121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:15.936085Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:15.936300Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:15.936612Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:15.936722Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:15.936863Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:15.936954Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:15.937027Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:15.937111Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:15.937225Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:26:15.937305Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:26:15.937397Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:26:15.937520Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2382:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:26:15.981256Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:15.981331Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process= ... TEvStatisticsRequest send, client id = [2:8651:6524], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:23.771794Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8652:6525], server id = [2:8657:6530], tablet id = 72075186224037901, status = OK 2025-04-04T17:28:23.771865Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8652:6525], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:23.772245Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8653:6526], server id = [2:8658:6531], tablet id = 72075186224037902, status = OK 2025-04-04T17:28:23.772287Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8653:6526], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:23.773119Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8654:6527], server id = [2:8659:6532], tablet id = 72075186224037903, status = OK 2025-04-04T17:28:23.773172Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8654:6527], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:23.777355Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:28:23.777730Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8650:6523], server id = [2:8655:6528], tablet id = 72075186224037899 2025-04-04T17:28:23.777767Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:23.778632Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8674:6543], server id = [2:8675:6544], tablet id = 72075186224037904, status = OK 2025-04-04T17:28:23.778684Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8674:6543], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:23.779234Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-04T17:28:23.780006Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8651:6524], server id = [2:8656:6529], tablet id = 72075186224037900 2025-04-04T17:28:23.780028Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:23.780672Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-04T17:28:23.781082Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-04T17:28:23.781371Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8652:6525], server id = [2:8657:6530], tablet id = 72075186224037901 2025-04-04T17:28:23.781392Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:23.781575Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8653:6526], server id = [2:8658:6531], tablet id = 72075186224037902 2025-04-04T17:28:23.781600Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:23.781702Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-04T17:28:23.782015Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8677:6545], server id = [2:8681:6549], tablet id = 72075186224037905, status = OK 2025-04-04T17:28:23.782065Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8677:6545], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:23.782391Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8680:6548], server id = [2:8683:6551], tablet id = 72075186224037906, status = OK 2025-04-04T17:28:23.782428Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8680:6548], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:23.782621Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8654:6527], server id = [2:8659:6532], tablet id = 72075186224037903 2025-04-04T17:28:23.782649Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:23.783528Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8682:6550], server id = [2:8686:6553], tablet id = 72075186224037907, status = OK 2025-04-04T17:28:23.783569Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8682:6550], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:23.783776Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8684:6552], server id = [2:8687:6554], tablet id = 72075186224037908, status = OK 2025-04-04T17:28:23.783810Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8684:6552], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:23.784530Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-04T17:28:23.785253Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8674:6543], server id = [2:8675:6544], tablet id = 72075186224037904 2025-04-04T17:28:23.785293Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:23.787833Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-04T17:28:23.788557Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8677:6545], server id = [2:8681:6549], tablet id = 72075186224037905 2025-04-04T17:28:23.788583Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:23.789052Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-04T17:28:23.789385Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8680:6548], server id = [2:8683:6551], tablet id = 72075186224037906 2025-04-04T17:28:23.789414Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:23.790055Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-04T17:28:23.790347Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8682:6550], server id = [2:8686:6553], tablet id = 72075186224037907 2025-04-04T17:28:23.790369Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:23.790482Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-04T17:28:23.790527Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:23.790675Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:23.791086Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8684:6552], server id = [2:8687:6554], tablet id = 72075186224037908 2025-04-04T17:28:23.791138Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:23.815296Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:23.815561Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-04T17:28:24.486647Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 3 2025-04-04T17:28:24.486724Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T17:28:26.676615Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:28:26.676849Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:28:27.244094Z node 2 :STATISTICS INFO: Node 3 is unavailable 2025-04-04T17:28:27.244177Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:27.244317Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-04T17:28:27.244339Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T17:28:27.244405Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:27.244459Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:27.244894Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:27.258299Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:27.258482Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-04-04T17:28:27.258888Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8814:6620], server id = [2:8815:6621], tablet id = 72075186224037900, status = OK 2025-04-04T17:28:27.258979Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8814:6620], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:27.260301Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-04T17:28:27.260387Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:27.260582Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:27.260742Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:27.261144Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:27.263000Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8814:6620], server id = [2:8815:6621], tablet id = 72075186224037900 2025-04-04T17:28:27.263028Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:27.263714Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:27.281685Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8833:6639]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:27.281939Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:27.281973Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8833:6639], StatRequests.size() = 1 2025-04-04T17:28:27.397388Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTY4YTk1OWMtYjUxZmI2NTMtOTdhNmYxYWQtOWJlZjIzYTM=, TxId: 2025-04-04T17:28:27.397439Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTY4YTk1OWMtYjUxZmI2NTMtOTdhNmYxYWQtOWJlZjIzYTM=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-04-04T17:28:27.397931Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8842:6645]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:27.398172Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:28:27.398213Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:28:27.398435Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:27.401077Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:28:27.401127Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-04T17:28:27.401175Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T17:28:27.407142Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 probe = 3 >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] Test command err: 2025-04-04T17:26:05.490138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:05.490533Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:05.490684Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001988/r3tmp/tmpLCqlIh/pdisk_1.dat 2025-04-04T17:26:05.903453Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6652, node 1 2025-04-04T17:26:06.127306Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:06.127377Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:06.127438Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:06.128016Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:06.130898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:06.217350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:06.217533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:06.235724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12600 2025-04-04T17:26:06.701727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:10.044062Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:10.078175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:10.078303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:10.106375Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:10.108257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:10.323488Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.323992Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.324335Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.324475Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.324528Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.324755Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.324833Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.324922Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.324978Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.500067Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:10.500185Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:10.513172Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:10.625365Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:10.659699Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:10.659770Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:10.688327Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:10.688522Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:10.688741Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:10.688822Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:10.688879Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:10.688927Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:10.688997Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:10.689057Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:10.690134Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:10.714423Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:10.714561Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:10.719864Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:26:10.724660Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:26:10.725311Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:26:10.729256Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:10.745982Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:10.746033Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:10.746089Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:10.757416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:10.762961Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:10.763086Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:10.977090Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:11.109582Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:11.198426Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:12.218910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:12.219063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:12.240533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:12.509311Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:12.509573Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:12.509902Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:12.510049Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:12.510170Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:12.510363Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:12.510493Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:12.510625Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:12.510763Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:26:12.510887Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:26:12.511029Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:26:12.511200Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:26:12.568998Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2381:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:12.569101Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2381:2882];tablet_id=72075186224037899;process=T ... 4-04T17:28:27.901011Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:28:27.902125Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:27.902203Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:27.903432Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:27.957878Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:27.958059Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 2 2025-04-04T17:28:27.958823Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8595:6520], server id = [2:8602:6527], tablet id = 72075186224037903 2025-04-04T17:28:27.958878Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:27.959451Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8688:6585], server id = [2:8693:6590], tablet id = 72075186224037899, status = OK 2025-04-04T17:28:27.959580Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8688:6585], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:27.959802Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8689:6586], server id = [2:8694:6591], tablet id = 72075186224037900, status = OK 2025-04-04T17:28:27.959854Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8689:6586], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:27.960868Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8690:6587], server id = [2:8695:6592], tablet id = 72075186224037901, status = OK 2025-04-04T17:28:27.960916Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8690:6587], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:27.961491Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8691:6588], server id = [2:8696:6593], tablet id = 72075186224037902, status = OK 2025-04-04T17:28:27.961539Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8691:6588], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:27.961840Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8692:6589], server id = [2:8697:6594], tablet id = 72075186224037903, status = OK 2025-04-04T17:28:27.961873Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8692:6589], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:27.962862Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:28:27.963750Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8688:6585], server id = [2:8693:6590], tablet id = 72075186224037899 2025-04-04T17:28:27.963783Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:27.964829Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-04T17:28:27.965050Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-04T17:28:27.965954Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8689:6586], server id = [2:8694:6591], tablet id = 72075186224037900 2025-04-04T17:28:27.965995Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:27.966547Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8691:6588], server id = [2:8696:6593], tablet id = 72075186224037902 2025-04-04T17:28:27.966580Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:27.966811Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-04T17:28:27.967076Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8702:6599], server id = [2:8703:6600], tablet id = 72075186224037904, status = OK 2025-04-04T17:28:27.967148Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8702:6599], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:27.967697Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8705:6602], server id = [2:8708:6605], tablet id = 72075186224037905, status = OK 2025-04-04T17:28:27.967757Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8705:6602], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:27.968800Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8690:6587], server id = [2:8695:6592], tablet id = 72075186224037901 2025-04-04T17:28:27.968855Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:27.969617Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8706:6603], server id = [2:8709:6606], tablet id = 72075186224037906, status = OK 2025-04-04T17:28:27.969674Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8706:6603], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:27.970084Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8710:6607], server id = [2:8712:6608], tablet id = 72075186224037907, status = OK 2025-04-04T17:28:27.970154Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8710:6607], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:27.971694Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-04T17:28:27.972222Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-04T17:28:27.973018Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8702:6599], server id = [2:8703:6600], tablet id = 72075186224037904 2025-04-04T17:28:27.973048Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:27.973288Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-04T17:28:27.973795Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8705:6602], server id = [2:8708:6605], tablet id = 72075186224037905 2025-04-04T17:28:27.973825Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:27.974259Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8692:6589], server id = [2:8697:6594], tablet id = 72075186224037903 2025-04-04T17:28:27.974288Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:27.974357Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-04T17:28:27.974562Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8716:6612], server id = [2:8718:6614], tablet id = 72075186224037908, status = OK 2025-04-04T17:28:27.974628Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8716:6612], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:27.974774Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-04T17:28:27.975440Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8706:6603], server id = [2:8709:6606], tablet id = 72075186224037906 2025-04-04T17:28:27.975465Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:27.975582Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8710:6607], server id = [2:8712:6608], tablet id = 72075186224037907 2025-04-04T17:28:27.975609Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:27.976006Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-04T17:28:27.976052Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:27.976337Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:27.976533Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:27.976794Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:27.979254Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8716:6612], server id = [2:8718:6614], tablet id = 72075186224037908 2025-04-04T17:28:27.979287Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:27.980055Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:28.001026Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8736:6632]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:28.001216Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:28.001252Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8736:6632], StatRequests.size() = 1 2025-04-04T17:28:28.137876Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzZjNzQwZmUtZjY4YzAzYjgtZWQ0YzA2NWMtMWU4Y2NmMmY=, TxId: 2025-04-04T17:28:28.137949Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzZjNzQwZmUtZjY4YzAzYjgtZWQ0YzA2NWMtMWU4Y2NmMmY=, TxId: 2025-04-04T17:28:28.138611Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:28.162591Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8746:6638] 2025-04-04T17:28:28.162747Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8747:6639] 2025-04-04T17:28:28.162836Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8611:6534], server id = [2:8746:6638], tablet id = 72075186224037894, status = OK 2025-04-04T17:28:28.162922Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8746:6638], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-04T17:28:28.163031Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8747:6639], schemeshard id = 72075186224037897 2025-04-04T17:28:28.179926Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:28.180002Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:28:28.266912Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8750:6642]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:28.267230Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:28:28.267303Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:28:28.270154Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:28:28.270215Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-04T17:28:28.270287Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T17:28:28.276635Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TBlobStorageProxyTest::TestDoubleFailure [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] Test command err: 2025-04-04T17:27:55.037235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:55.037867Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:27:55.038117Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:27:55.040004Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:55.040219Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:27:55.040492Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001679/r3tmp/tmpBAdJkU/pdisk_1.dat 2025-04-04T17:27:55.440243Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17457, node 1 TClient is connected to server localhost:14149 2025-04-04T17:27:55.791469Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:55.791521Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:55.791555Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:55.791807Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:02.316817Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:697:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:02.317248Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:02.317578Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:28:02.318484Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:694:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:02.318568Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:02.318881Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001679/r3tmp/tmpOYZe7R/pdisk_1.dat 2025-04-04T17:28:02.585423Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13576, node 3 TClient is connected to server localhost:13490 2025-04-04T17:28:02.866766Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:02.866807Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:02.866826Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:02.866973Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:08.960244Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:767:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:08.960676Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:08.961035Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:28:08.961910Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:764:2353], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:08.962262Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:08.962396Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001679/r3tmp/tmpLfZ3KQ/pdisk_1.dat 2025-04-04T17:28:09.241092Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3404, node 5 TClient is connected to server localhost:22302 2025-04-04T17:28:12.541223Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:12.541292Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:12.541331Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:12.541627Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:12.562603Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:12.562776Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:12.590161Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 7 Cookie 7 2025-04-04T17:28:12.590838Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected self_check_result: GOOD location { id: 5 host: "::1" port: 12001 } 2025-04-04T17:28:20.321386Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:849:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:20.321703Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:20.322008Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:28:20.322874Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:846:2356], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:20.323087Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:20.323318Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001679/r3tmp/tmpuhFQ0E/pdisk_1.dat 2025-04-04T17:28:20.645551Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8218, node 8 TClient is connected to server localhost:13017 2025-04-04T17:28:23.807903Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:23.807974Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:23.808013Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:23.808899Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:23.809391Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([8:1337:2703]) [8:1601:2708] 2025-04-04T17:28:23.809964Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(PersQueue(72057594046578946,0)) 2025-04-04T17:28:23.823805Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046578946 OwnerIdx: 0 TabletType: PersQueue BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } 2025-04-04T17:28:23.823913Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-04-04T17:28:23.824233Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type PersQueue: {} 2025-04-04T17:28:23.824341Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-04-04T17:28:23.824572Z node 8 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet PersQueue.72075186224037888.Leader.0 2025-04-04T17:28:23.824670Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-04-04T17:28:23.824833Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-04-04T17:28:23.826806Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2025-04-04T17:28:23.833388Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([10:1583:2365]) [8:1644:2713] 2025-04-04T17:28:23.838538Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [10:1582:2365] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Pr ... 8:23.888222Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - executing 2025-04-04T17:28:23.888252Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-04T17:28:23.888275Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-04T17:28:23.888297Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 1) 2025-04-04T17:28:23.888325Z node 8 :HIVE TRACE: HIVE#72057594037968897 Tablet 72075186224037888.0 has priority 1.000002048 2025-04-04T17:28:23.888358Z node 8 :HIVE DEBUG: HIVE#72057594037968897 [FBN] Finding best node for tablet PersQueue.72075186224037888.Leader.0 2025-04-04T17:28:23.888400Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 family {PersQueue.72075186224037888.Leader.0 Booting} 2025-04-04T17:28:23.888484Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected usage 0.000003877 of node 10 2025-04-04T17:28:23.888527Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 node 8 is not alive 2025-04-04T17:28:23.888555Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected nodes count 1 2025-04-04T17:28:23.888630Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected max priority nodes count 1 2025-04-04T17:28:23.888687Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected node 10 2025-04-04T17:28:23.888754Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.0) VolatileState: Booting -> Starting (Node 10) 2025-04-04T17:28:23.888804Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,0,0,0)->(0,1048576,0,0)) 2025-04-04T17:28:23.888925Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {} -> {Memory: 1048576} 2025-04-04T17:28:23.889015Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {} -> {Memory: 1048576} 2025-04-04T17:28:23.889080Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-04T17:28:23.889175Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2025-04-04T17:28:23.889346Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Execute, Sending TEvBootTablet(PersQueue.72075186224037888.Leader.1) to node 10 storage {Version# 1 TabletID# 72075186224037888 TabletType# PersQueue Channels# {0:{Channel# 0 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.067536Z}}, 1:{Channel# 1 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.067536Z}}, 2:{Channel# 2 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.067536Z}}} Tenant: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:28:23.901984Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(10)::Complete 2025-04-04T17:28:23.902059Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-04T17:28:23.902346Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Complete Tablet (72075186224037888,0) SideEffects: {Notifications: 0x10080002 [10:1582:2365] NKikimrLocal.TEvBootTablet Info { TabletID: 72075186224037888 Channels { Channel: 0 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038080 } StoragePool: "/Root:test" } Channels { Channel: 1 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038080 } StoragePool: "/Root:test" } Channels { Channel: 2 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038080 } StoragePool: "/Root:test" } TabletType: PersQueue Version: 1 TenantIdOwner: 72057594046644480 TenantIdLocalId: 1 } SuggestedGeneration: 1 BootMode: BOOT_MODE_LEADER FollowerId: 0} 2025-04-04T17:28:23.902831Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected (duplicate), NodeId 10 Cookie 72075186224037888 2025-04-04T17:28:23.967681Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2025-04-04T17:28:23.967817Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletStatus::Execute for tablet PersQueue.72075186224037888.Leader.1 status 0 generation 1 follower 0 from local [10:1582:2365] 2025-04-04T17:28:23.967915Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Starting -> Running (Node 10) 2025-04-04T17:28:23.967979Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,1048576,0,0)->(0,0,0,0)) 2025-04-04T17:28:23.968120Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {Memory: 1048576} -> {} 2025-04-04T17:28:23.968225Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {Memory: 1048576} -> {} 2025-04-04T17:28:23.968288Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,0,0,0)->(0,1048576,0,0)) 2025-04-04T17:28:23.968381Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {} -> {Memory: 1048576} 2025-04-04T17:28:23.968463Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {} -> {Memory: 1048576} 2025-04-04T17:28:23.968574Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-04-04T17:28:23.968620Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - sending 2025-04-04T17:28:23.968857Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - executing 2025-04-04T17:28:23.968939Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-04-04T17:28:23.968998Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-04-04T17:28:23.969035Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-04-04T17:28:23.994583Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletStatus::Complete TabletId: 72075186224037888 SideEffects: {Notifications: 0x10040207 [8:1336:2702] {EvTabletCreationResult Status: OK TabletID: 72075186224037888}} 2025-04-04T17:28:23.994664Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-04-04T17:28:27.100947Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 10: Status: 2 2025-04-04T17:28:27.101059Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(10)::Execute 2025-04-04T17:28:27.101125Z node 8 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2025-04-04T17:28:27.101240Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(10)::Complete 2025-04-04T17:28:27.101893Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRestartTablet(PersQueue.72075186224037888.Leader.1)::Execute 2025-04-04T17:28:27.102023Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Running -> Stopped (Node 10) 2025-04-04T17:28:27.102086Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,1048576,0,0)->(0,0,0,0)) 2025-04-04T17:28:27.102194Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {Memory: 1048576} -> {} 2025-04-04T17:28:27.102314Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {Memory: 1048576} -> {} 2025-04-04T17:28:27.102395Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(PersQueue.72075186224037888.Leader.1 gen 1) to node 10 2025-04-04T17:28:27.102461Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Stopped -> Booting 2025-04-04T17:28:27.102507Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2025-04-04T17:28:27.102556Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - sending 2025-04-04T17:28:27.102827Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxKillNode(10)::Execute 2025-04-04T17:28:27.102924Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-04-04T17:28:27.102966Z node 8 :HIVE TRACE: Node(10) DeregisterInDomains (72057594046644480:1) : 1 -> 0 2025-04-04T17:28:27.103028Z node 8 :HIVE DEBUG: HIVE#72057594037968897 RemoveRegisteredDataCentersNode(3, 10) 2025-04-04T17:28:27.103083Z node 8 :HIVE TRACE: HIVE#72057594037968897 THive::TTxKillNode - killing pipe server [8:1644:2713] 2025-04-04T17:28:27.103137Z node 8 :HIVE DEBUG: HIVE#72057594037968897 TryToDeleteNode(10): waiting 3600.000000s 2025-04-04T17:28:27.103538Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([10:1583:2365]) [8:1644:2713] 2025-04-04T17:28:27.106755Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([8:2042:2735]) [8:2043:2740] 2025-04-04T17:28:27.113967Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([8:2042:2735]) [8:2043:2740] 2025-04-04T17:28:27.117289Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([11:2018:2365]) [8:2077:2743] 2025-04-04T17:28:27.121423Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [11:2017:2365] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-04-04T17:28:27.121551Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(11)::Execute 2025-04-04T17:28:27.121697Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:27.121764Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-04T17:28:27.121815Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2025-04-04T17:28:27.121866Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-04-04T17:28:27.121911Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2025-04-04T17:28:27.122024Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:27.122629Z node 8 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 11 Location DataCenter: "4" Module: "4" Rack: "4" Unit: "4" self_check_result: EMERGENCY issue_log { id: "RED-9a33-f489" status: RED message: "Database has compute issues" location { database { name: "/Root" } } reason: "RED-9a33-6fa7" type: "DATABASE" level: 1 } issue_log { id: "RED-9a33-6fa7" status: RED message: "Compute has issues with tablets" location { database { name: "/Root" } } reason: "RED-9a33-e5e3-PersQueue" type: "COMPUTE" level: 2 } issue_log { id: "RED-9a33-e5e3-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" level: 4 } location { id: 8 host: "::1" port: 12001 } >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestGetMultipart [GOOD] >> TBlobStorageProxyTest::TestGetFail >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeStatus [GOOD] Test command err: 2025-04-04T17:26:07.975337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:07.975671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:07.975799Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00194c/r3tmp/tmp4NVT8y/pdisk_1.dat 2025-04-04T17:26:08.370901Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24749, node 1 2025-04-04T17:26:08.606180Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:08.606265Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:08.606313Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:08.606834Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:08.613601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:08.702363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:08.702519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:08.718735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28964 2025-04-04T17:26:09.216853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:12.171959Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:12.198847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:12.198947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:12.225318Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:12.227694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:12.431892Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:12.432412Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:12.433000Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:12.433135Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:12.433328Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:12.433451Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:12.433548Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:12.433619Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:12.433683Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:12.603380Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:12.603472Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:12.615876Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:12.726567Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:12.760547Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:12.760623Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:12.783878Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:12.785006Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:12.785234Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:12.785288Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:12.785341Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:12.785403Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:12.785513Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:12.785572Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:12.785998Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:12.813815Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:12.813959Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:12.818935Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:26:12.823791Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1905:2618] 2025-04-04T17:26:12.823979Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1905:2618], schemeshard id = 72075186224037897 2025-04-04T17:26:12.828649Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:12.844867Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:12.844908Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:12.844982Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:12.855204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:12.860424Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:12.860526Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:13.043499Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:13.194117Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:13.280215Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:14.057905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2235:3070], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:14.058043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:14.072120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:14.173721Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2319:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:14.173934Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2319:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:14.174260Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2319:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:14.174393Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2319:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:14.174521Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2319:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:14.174614Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2319:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:14.174737Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2319:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:14.174845Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2319:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:14.174935Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2319:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:26:14.175039Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2319:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:26:14.175118Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2319:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:26:14.175192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2319:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:26:14.195583Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:26:14.195682Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... ase 2025-04-04T17:28:25.922628Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:28:25.926371Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6983:5166], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:25.926496Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6993:5171], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:25.926594Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:25.939570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-04T17:28:25.989977Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6997:5174], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-04T17:28:26.209557Z node 2 :TX_PROXY ERROR: Actor# [2:7093:5220] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:26.238325Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7122:5235]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:26.238654Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:28:26.238760Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7124:5237] 2025-04-04T17:28:26.238835Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7124:5237] 2025-04-04T17:28:26.239286Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7125:5238] 2025-04-04T17:28:26.239490Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7125:5238], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:28:26.239560Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-04T17:28:26.239783Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7124:5237], server id = [2:7125:5238], tablet id = 72075186224037894, status = OK 2025-04-04T17:28:26.239876Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:28:26.239981Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7122:5235], StatRequests.size() = 1 2025-04-04T17:28:26.428199Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmY5ZDMxYzMtZTJhN2FkNWYtODU1ODgyOTQtY2NjZDBiZWQ=, TxId: 2025-04-04T17:28:26.428282Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmY5ZDMxYzMtZTJhN2FkNWYtODU1ODgyOTQtY2NjZDBiZWQ=, TxId: 2025-04-04T17:28:26.428811Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:26.445439Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:28:26.445503Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:28:26.489258Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:28:26.489356Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:28:26.564913Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7124:5237], schemeshard count = 1 2025-04-04T17:28:27.502196Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:28:27.502299Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:27.504635Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:27.520837Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:27.521336Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:27.521406Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-04-04T17:28:27.535391Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:27.546552Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) 2025-04-04T17:28:27.548554Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7203:5287] 2025-04-04T17:28:27.549206Z node 2 :STATISTICS DEBUG: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_ENQUEUED 2025-04-04T17:28:27.551070Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7205:5288]
---- StatisticsAggregator ----
Database: /Root/Database
BaseStatistics: 1
SchemeShards: 1
    72075186224037897
Nodes: 1
    2
RequestedSchemeShards: 1
    72075186224037897
FastCounter: 3
FastCheckInFlight: 0
FastSchemeShards: 0
FastNodes: 0
PropagationInFlight: 0
PropagationSchemeShards: 0
PropagationNodes: 0
LastSSIndex: 0
PendingRequests: 0
ProcessUrgentInFlight: 0
Columns: 2
DatashardRanges: 0
CountMinSketches: 0
ScheduleTraversalsByTime: 2
  oldest table: [OwnerId: 72075186224037897, LocalPathId: 4], update time: 1970-01-01T00:00:00Z
ScheduleTraversalsBySchemeShard: 1
    72075186224037897
    [OwnerId: 72075186224037897, LocalPathId: 4], [OwnerId: 72075186224037897, LocalPathId: 3]
ForceTraversals: 1
    1970-01-01T00:00:05Z
NavigateType: Analyze
NavigateAnalyzeOperationId: 
NavigatePathId: 
ForceTraversalOperationId: 
TraversalStartTime: 1970-01-01T00:00:00Z
TraversalPathId: 
TraversalIsColumnTable: 0
TraversalStartKey: 
GlobalTraversalRound: 1
TraversalRound: 0
HiveRequestRound: 0
... unblocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR 2025-04-04T17:28:27.552183Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-04T17:28:27.552290Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-04T17:28:27.565897Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-04T17:28:28.734899Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:28.735034Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:28.735092Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:28.735661Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:28.749402Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:28.749845Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:28.749928Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:28.750867Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:28.764256Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:28.764434Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-04T17:28:28.764910Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7255:5317], server id = [2:7256:5318], tablet id = 72075186224037899, status = OK 2025-04-04T17:28:28.764997Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7255:5317], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:28.768402Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:28:28.768494Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:28.768765Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:28.768945Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:28.769118Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7255:5317], server id = [2:7256:5318], tablet id = 72075186224037899 2025-04-04T17:28:28.769158Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:28.769343Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:28.772355Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:28.792651Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7276:5337]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:28.792873Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:28.792929Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7276:5337], StatRequests.size() = 1 2025-04-04T17:28:28.917745Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=N2RlMTRhOGMtOGQ4MGEyODItNTMyYTkwMzgtNzFlY2YwNTQ=, TxId: 2025-04-04T17:28:28.917827Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2RlMTRhOGMtOGQ4MGEyODItNTMyYTkwMzgtNzFlY2YwNTQ=, TxId: 2025-04-04T17:28:28.918670Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:28.933444Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:28.933530Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:949:2754] 2025-04-04T17:28:28.934737Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7288:5345] 2025-04-04T17:28:28.935282Z node 2 :STATISTICS DEBUG: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] >> THealthCheckTest::ShardsLimit800 [GOOD] >> AnalyzeDatashard::AnalyzeOneTable [GOOD] >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution [GOOD] >> TBlobStorageProxyTest::TestVPutVGet >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test [GOOD] Test command err: 2025-04-04T17:27:13.530121Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510857863476466:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:13.530325Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:13.547568Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489510855668278737:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:13.548269Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:13.678721Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:27:13.694760Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00146b/r3tmp/tmpMtnu3b/pdisk_1.dat 2025-04-04T17:27:13.840666Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:27:13.861878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:13.861970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:13.866655Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:27:13.867888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23318, node 1 2025-04-04T17:27:13.907693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:13.907763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:13.913382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:13.933385Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/00146b/r3tmp/yandex2kKVo7.tmp 2025-04-04T17:27:13.933414Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/00146b/r3tmp/yandex2kKVo7.tmp 2025-04-04T17:27:13.933627Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/00146b/r3tmp/yandex2kKVo7.tmp 2025-04-04T17:27:13.933748Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:13.970624Z INFO: TTestServer started on Port 24463 GrpcPort 23318 TClient is connected to server localhost:24463 PQClient connected to localhost:23318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:14.153901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:27:14.189716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:27:16.151824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510870748379495:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:16.151883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510870748379522:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:16.151964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:16.155095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:27:16.158505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510870748379558:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:16.158574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:16.170985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510870748379524:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:27:16.336897Z node 1 :TX_PROXY ERROR: Actor# [1:7489510870748379602:2805] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:16.361727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:16.362327Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489510870748379620:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:27:16.362570Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjVlZTc4YjMtNjQ4NTg3NTAtYjExNDU3YTctY2U4MTY0OGY=, ActorId: [1:7489510870748379492:2340], ActorState: ExecuteState, TraceId: 01jr0xfdd73gvw9tsppk1xf7wj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:27:16.364195Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489510868553180964:2313], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:27:16.364310Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:27:16.364517Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWY2YzhhNDctNjRlODYxOTMtY2MwZmU1NzMtZWVkMzJjZDE=, ActorId: [2:7489510868553180939:2307], ActorState: ExecuteState, TraceId: 01jr0xfdhc8n9hv58q7xp8zp01, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:27:16.364915Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:27:16.439653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:16.513695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:27:16.734896Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0xfdv3672twry314zgqtw8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTlkZWM5ODUtZTY3NTY4NWUtMTFhNDdiYjUtODlmZTA0OTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489510870748379977:3081] 2025-04-04T17:27:18.530229Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510857863476466:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:18.530353Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:27:18.547605Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489510855668278737:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:18.547705Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:27:22.586892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCrea ... ed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-04-04T17:28:26.953705Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2025-04-04T17:28:27.589033Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715701:0, at schemeshard: 72057594046644480 Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (6541068412312944787, "Root", "00415F536F757263655F37", 1743787708530, 1743787708530, 0, 13); 2025-04-04T17:28:28.698720Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715707. Ctx: { TraceId: 01jr0xhm48bgcfy7540mmc40s5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=MTU0YzNkMzktMWUxMjFjNzEtZDBmZTNhMTctMjQzMDA1YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:28.711639Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-04-04T17:28:28.711663Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-04T17:28:28.711699Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-04-04T17:28:28.711723Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7489511178089696406:4027] (SourceId=A_Source_7, PreferedPartition=(NULL)) GetOwnershipFast Partition=1 TabletId=1001 2025-04-04T17:28:28.711856Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7489511178089696407:4027], Recipient [9:7489511160909826131:3363]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [9:7489511178089696406:4027] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-04-04T17:28:28.711944Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [9:7489511178089696406:4027], Recipient [9:7489511160909826131:3363]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_7" 2025-04-04T17:28:28.712020Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateOwnershipFast, received event# 271188558, Sender [9:7489511160909826131:3363], Recipient [9:7489511178089696406:4027]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-04-04T17:28:28.712052Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7489511178089696406:4027] (SourceId=A_Source_7, PreferedPartition=(NULL)) InitTable: SourceId=A_Source_7 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2025-04-04T17:28:28.712102Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7489511178089696406:4027], Recipient [9:7489511160909826131:3363]: NActors::TEvents::TEvPoison 2025-04-04T17:28:28.712248Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [9:7489511117960151063:2070], Recipient [9:7489511178089696406:4027]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2025-04-04T17:28:28.712278Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7489511178089696406:4027] (SourceId=A_Source_7, PreferedPartition=(NULL)) StartKqpSession 2025-04-04T17:28:28.715098Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [9:7489511117960151289:2280], Recipient [9:7489511178089696406:4027]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=9&id=MmNkNGJiN2YtZWNjYzJhOTctNzQ0ZTE3YTItZTZlNmY1NmY=" NodeId: 9 } YdbStatus: SUCCESS ResourceExhausted: false 2025-04-04T17:28:28.715138Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7489511178089696406:4027] (SourceId=A_Source_7, PreferedPartition=(NULL)) Select from the table 2025-04-04T17:28:28.881145Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [9:7489511117960151289:2280], Recipient [9:7489511178089696406:4027]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=MmNkNGJiN2YtZWNjYzJhOTctNzQ0ZTE3YTItZTZlNmY1NmY=" PreparedQuery: "60b12418-49e8189c-4ccb75a3-7b44613b" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01jr0xhmea62ev6899rq75reyh" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1743787708530 } items { uint64_value: 1743787708530 } items { uint64_value: 13 } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 102 2025-04-04T17:28:28.881327Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7489511178089696406:4027] (SourceId=A_Source_7, PreferedPartition=(NULL)) Selected from table PartitionId=0 SeqNo=13 2025-04-04T17:28:28.881352Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7489511178089696406:4027] (SourceId=A_Source_7, PreferedPartition=(NULL)) GetOldSeqNo 2025-04-04T17:28:28.881485Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7489511178089696446:4027], Recipient [9:7489511160909826130:3362]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1000 Status: OK ServerId: [9:7489511178089696406:4027] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-04-04T17:28:28.881585Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271187968, Sender [9:7489511178089696406:4027], Recipient [9:7489511160909826130:3362]: NKikimrClient.TPersQueueRequest PartitionRequest { Partition: 0 CmdGetMaxSeqNo { SourceId: "\000A_Source_7" } PipeClient { RawX1: 7489511178089696446 RawX2: 38654709691 } } 2025-04-04T17:28:28.881629Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7489511178089696406:4027] (SourceId=A_Source_7, PreferedPartition=(NULL)) OnPartitionChosen 2025-04-04T17:28:28.881730Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7489511178089696447:4027], Recipient [9:7489511160909826131:3363]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [9:7489511178089696406:4027] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-04-04T17:28:28.881749Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7489511178089696406:4027], Recipient [9:7489511160909826130:3362]: NActors::TEvents::TEvPoison 2025-04-04T17:28:28.881889Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [9:7489511178089696406:4027], Recipient [9:7489511160909826131:3363]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 2025-04-04T17:28:28.882023Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCheckPartition, received event# 271188558, Sender [9:7489511160909826131:3363], Recipient [9:7489511178089696406:4027]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-04-04T17:28:28.882099Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7489511178089696406:4027] (SourceId=A_Source_7, PreferedPartition=(NULL)) Update the table 2025-04-04T17:28:28.882550Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7489511178089696406:4027], Recipient [9:7489511160909826131:3363]: NActors::TEvents::TEvPoison 2025-04-04T17:28:29.017603Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateUpdate, received event# 271646721, Sender [9:7489511117960151289:2280], Recipient [9:7489511178089696406:4027]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=MmNkNGJiN2YtZWNjYzJhOTctNzQ0ZTE3YTItZTZlNmY1NmY=" PreparedQuery: "185dda24-8c60ffef-8d51d399-e110f673" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 84 2025-04-04T17:28:29.017661Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7489511178089696406:4027] (SourceId=A_Source_7, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS Received TEvChooseResult: 2025-04-04T17:28:29.017706Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7489511178089696406:4027] (SourceId=A_Source_7, PreferedPartition=(NULL)) ReplyResult: Partition=1, SeqNo=13 1 2025-04-04T17:28:29.017737Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7489511178089696406:4027] (SourceId=A_Source_7, PreferedPartition=(NULL)) Start idle Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 6541068412312944787 AND Topic = "Root" AND ProducerId = "00415F536F757263655F37" 2025-04-04T17:28:29.202330Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715710. Ctx: { TraceId: 01jr0xhmkh63ap8jntd7jrx1f6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=NmJlNTk1MGYtYzRmMWU0Yy1hN2NhOWY1MC1mMmI5ZmM0Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:29.753945Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:28:29.754118Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:29.788373Z node 9 :KQP_EXECUTER ERROR: ActorId: [9:7489511182384663866:2707] TxId: 281474976715711. Ctx: { TraceId: 01jr0xhn3497440ps42y5hwcc3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=NzYyMmM5OWMtYmNmNzM5YTMtZmViNzQ5OTQtM2EzNzFkNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 10 2025-04-04T17:28:29.788570Z node 9 :KQP_COMPUTE ERROR: SelfId: [9:7489511182384663871:2707], TxId: 281474976715711, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=9&id=NzYyMmM5OWMtYmNmNzM5YTMtZmViNzQ5OTQtM2EzNzFkNTM=. TraceId : 01jr0xhn3497440ps42y5hwcc3. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [9:7489511182384663866:2707], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> TBlobStorageProxyTest::TestBlockPersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbage >> TBlobStorageProxyTest::TestDoubleGroups [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 >> TComputeScheduler::ResourceWeight [GOOD] >> TKqpScanData::UnboxedValueSize |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::DifferentNumberOfInputAndResultColumns >> TBlobStorageProxyTest::TestGetFail [GOOD] >> TKqpScanData::EmptyColumns >> TKqpScanData::FailOnUnsupportedPgType >> TKqpScanData::ArrowToUnboxedValueConverter >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch >> TBlobStorageProxyTest::TestPersistence [GOOD] >> TBlobStorageProxyTest::TestPartialGetStripe >> TKqpScanData::EmptyColumns [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsLimit800 [GOOD] Test command err: 2025-04-04T17:28:04.157405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:04.157819Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:04.157876Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00166f/r3tmp/tmpKj53ro/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20836, node 1 TClient is connected to server localhost:62728 2025-04-04T17:28:07.554397Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:449:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:07.554729Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:07.554939Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00166f/r3tmp/tmpIw3Bo1/pdisk_1.dat 2025-04-04T17:28:07.819024Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25403, node 2 TClient is connected to server localhost:26675 2025-04-04T17:28:08.209410Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:08.209460Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:08.209489Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:08.209933Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:14.851265Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:14.851421Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:14.851465Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:28:14.852487Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:14.852809Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:14.852866Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00166f/r3tmp/tmpyWn4Ve/pdisk_1.dat 2025-04-04T17:28:15.153162Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18241, node 4 TClient is connected to server localhost:25375 2025-04-04T17:28:15.563338Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:15.563422Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:15.563477Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:15.564428Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:22.378954Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:22.379300Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:28:22.379453Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:22.380626Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:22.380977Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:22.381048Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00166f/r3tmp/tmpTTJPxp/pdisk_1.dat 2025-04-04T17:28:22.698952Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28607, node 6 TClient is connected to server localhost:9959 2025-04-04T17:28:22.949306Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:22.949349Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:22.949372Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:22.949816Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:29.825896Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:29.826171Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:29.826421Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:28:29.826962Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:29.827346Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:29.827441Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00166f/r3tmp/tmpYg87ot/pdisk_1.dat 2025-04-04T17:28:30.139717Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15625, node 8 TClient is connected to server localhost:16222 2025-04-04T17:28:30.464662Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:30.464717Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:30.464743Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:30.465562Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeOneTable [GOOD] Test command err: 2025-04-04T17:26:04.780893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:04.781205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:04.781322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00199e/r3tmp/tmpr5Q5Un/pdisk_1.dat 2025-04-04T17:26:05.200639Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18013, node 1 2025-04-04T17:26:05.696980Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:05.697035Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:05.697079Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:05.697577Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:05.704130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:05.797177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:05.797324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:05.824096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14892 2025-04-04T17:26:06.328794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:08.988908Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:09.028469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.028578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.058147Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:09.059805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:09.297390Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.300624Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.301235Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.301407Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.301516Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.301808Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.301932Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.302050Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.302134Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.480255Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.480382Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.493459Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:09.613649Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:09.652858Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:09.652958Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:09.686438Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:09.687114Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:09.687355Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:09.687418Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:09.687477Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:09.687529Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:09.687581Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:09.687663Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:09.688362Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:09.721687Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:09.721798Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:09.728364Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:26:09.735513Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:26:09.736386Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:26:09.743015Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:09.765481Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:09.765531Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:09.765636Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:09.789814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:09.800661Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:09.800821Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:10.043310Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:10.197619Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:10.296475Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:11.400352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.400488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.476218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:11.928887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2542:3120], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.929170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.930552Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2547:3124]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:26:11.930748Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:26:11.930824Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2549:3126] 2025-04-04T17:26:11.931544Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2549:3126] 2025-04-04T17:26:11.932177Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2550:2995] 2025-04-04T17:26:11.932418Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2549:3126], server id = [2:2550:2995], tablet id = 72075186224037894, status = OK 2025-04-04T17:26:11.932581Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2550:2995], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:26:11.932639Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-04T17:26:11.932881Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:26:11.932962Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2547:3124], StatRequests.size() = 1 2025-04-04T17:26:11.976028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2554:3130], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.976129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.976485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2559:3135], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.982521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-04T17:26:12.172526Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:26:12.172608Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:26:12.257561Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2549:3126], schemeshard count = 1 2025-04-04T17:26:12.720102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... 24037894] EvPropagateTimeout 2025-04-04T17:27:52.402529Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:27:52.402883Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:27:55.826024Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:27:58.036463Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:27:58.036751Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:28:01.261163Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:28:03.639835Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:28:03.640096Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:28:06.935775Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:28:09.424934Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:28:09.425262Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:28:12.816686Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:28:15.132176Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:28:15.132523Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:28:18.582221Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:28:20.758024Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:28:20.758305Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:28:24.111451Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:28:25.301248Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-04T17:28:25.301317Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:28:25.301350Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:28:25.301383Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-04T17:28:26.701698Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:28:26.701985Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:28:26.755270Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-04T17:28:26.755349Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 204.000000s, at schemeshard: 72075186224037897 2025-04-04T17:28:26.755649Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 49 2025-04-04T17:28:26.768646Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:28:27.864026Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:27.864106Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-04T17:28:27.864166Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-04T17:28:27.864223Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-04T17:28:27.864265Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:28:27.864713Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:27.890632Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:28:27.918668Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6635:4682], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:27.918765Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6645:4687], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:27.918876Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:27.929182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-04T17:28:27.975887Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6649:4690], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-04T17:28:28.138119Z node 2 :TX_PROXY ERROR: Actor# [2:6747:4738] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:28.157901Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6776:4753]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:28.158061Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:28:28.158125Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:6778:4755] 2025-04-04T17:28:28.158171Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:6778:4755] 2025-04-04T17:28:28.158583Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:6779:4756] 2025-04-04T17:28:28.158689Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:6779:4756], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:28:28.158740Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-04T17:28:28.158867Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:6778:4755], server id = [2:6779:4756], tablet id = 72075186224037894, status = OK 2025-04-04T17:28:28.158923Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:28:28.158977Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:6776:4753], StatRequests.size() = 1 2025-04-04T17:28:28.293629Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmUxYzdhZTEtODA2ZmRlZTUtNGQ2NThhNzktMzA3MWI2NjU=, TxId: 2025-04-04T17:28:28.293688Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmUxYzdhZTEtODA2ZmRlZTUtNGQ2NThhNzktMzA3MWI2NjU=, TxId: 2025-04-04T17:28:28.294220Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:28.312431Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:28:28.312506Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:28:28.366293Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:28:28.366390Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:28:28.432334Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:6778:4755], schemeshard count = 1 2025-04-04T17:28:29.381310Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:28:29.381389Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-04T17:28:29.381425Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:30.570798Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:28:30.593263Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:30.593394Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-04T17:28:30.593446Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:30.593760Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:30.596125Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:28:30.609932Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjQ3YmMwNWMtNjgxMzc1ZmUtYTBkNzQyMzctZTJiMjk1MGQ=, TxId: 2025-04-04T17:28:30.610001Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjQ3YmMwNWMtNjgxMzc1ZmUtYTBkNzQyMzctZTJiMjk1MGQ=, TxId: 2025-04-04T17:28:30.610575Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:30.624507Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:30.624592Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2753:3239] 2025-04-04T17:28:30.625139Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6900:4831]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:30.628194Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:30.628257Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:28:30.632572Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:30.632634Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-04T17:28:30.632707Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T17:28:30.634917Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-04-04T17:28:30.635188Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] >> TKqpScanData::UnboxedValueSize [GOOD] >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] >> TKqpScanData::FailOnUnsupportedPgType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution [GOOD] Test command err: 2025-04-04T17:26:10.798598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:10.798825Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:10.798909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001910/r3tmp/tmpIP1WQr/pdisk_1.dat 2025-04-04T17:26:11.144789Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4657, node 1 2025-04-04T17:26:11.350016Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:11.350059Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:11.350092Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:11.350503Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:11.355431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:11.439118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:11.439271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:11.452590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14344 2025-04-04T17:26:11.953470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:14.583106Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:14.609123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:14.609218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:14.635687Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:14.637081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:14.836691Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:14.837178Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:14.837503Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:14.837663Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:14.837748Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:14.837951Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:14.838014Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:14.838083Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:14.838138Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:15.007963Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:15.008083Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:15.021193Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:15.148868Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:15.183236Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:15.183324Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:15.212258Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:15.212943Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:15.213183Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:15.213265Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:15.213336Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:15.213456Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:15.213508Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:15.213566Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:15.214408Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:15.241656Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:15.241770Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:15.247857Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:26:15.253645Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:26:15.254562Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:26:15.259782Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:15.275530Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:15.275578Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:15.275639Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:15.286972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:15.295646Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:15.295774Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:15.446340Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:15.616968Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:15.726148Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:16.718616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:16.718746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:16.731682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:16.808341Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:16.808511Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:16.808725Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:16.808814Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:16.808930Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:16.809059Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:16.809142Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:16.809230Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:16.809332Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:26:16.809422Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:26:16.809513Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:26:16.809588Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:26:16.827753Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:26:16.827838Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... 037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:28.304147Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7119:5234], server id = [2:7120:5235], tablet id = 72075186224037894 2025-04-04T17:28:28.304225Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7245:5311] 2025-04-04T17:28:28.304276Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7245:5311] 2025-04-04T17:28:28.354596Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:28:28.354701Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:28:28.355330Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:28:28.356249Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:28:28.356675Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded database: /Root/Database 2025-04-04T17:28:28.356728Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start key 2025-04-04T17:28:28.356770Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-04-04T17:28:28.356809Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table local path id: 4 2025-04-04T17:28:28.356851Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start time: 1743787708287455 2025-04-04T17:28:28.356890Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-04-04T17:28:28.356976Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-04-04T17:28:28.357040Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:28:28.357160Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-04-04T17:28:28.357253Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2025-04-04T17:28:28.357345Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2025-04-04T17:28:28.357410Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:28:28.357559Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:28.358903Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:28.358979Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:28.359477Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:28:28.359583Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:28:28.360673Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:28.360738Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:28.362850Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:28.428446Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:28.428653Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-04T17:28:28.429224Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7292:5342], server id = [2:7293:5343], tablet id = 72075186224037899, status = OK 2025-04-04T17:28:28.429335Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7292:5342], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:28.433309Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:28:28.433397Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:28.433542Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:28.433710Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:28.433962Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:28.436523Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7292:5342], server id = [2:7293:5343], tablet id = 72075186224037899 2025-04-04T17:28:28.436567Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:28.437149Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:28.461350Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7313:5362]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:28.461814Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:28.461864Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7313:5362], StatRequests.size() = 1 2025-04-04T17:28:28.594830Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzU2NDdhNjEtNzFjODExZmYtNjZlMDhhNTgtNTQzNWIzZDE=, TxId: 2025-04-04T17:28:28.594888Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzU2NDdhNjEtNzFjODExZmYtNjZlMDhhNTgtNTQzNWIzZDE=, TxId: 2025-04-04T17:28:28.595289Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:28.608499Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7323:5368] 2025-04-04T17:28:28.608768Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7323:5368], schemeshard id = 72075186224037897 2025-04-04T17:28:28.608926Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7245:5311], server id = [2:7324:5369], tablet id = 72075186224037894, status = OK 2025-04-04T17:28:28.608978Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7324:5369] 2025-04-04T17:28:28.609102Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7324:5369], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-04T17:28:28.624293Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:28.624366Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:28:28.713559Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7335:5372] 2025-04-04T17:28:28.714395Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2798:3218] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-04T17:28:28.714454Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2798:3218] 2025-04-04T17:28:28.714519Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-04T17:28:29.091190Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-04T17:28:29.091300Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T17:28:29.693343Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:28:29.693404Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-04T17:28:29.693435Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-04T17:28:30.670786Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:30.670995Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:30.671054Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:30.671767Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:30.685540Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:30.685963Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:30.686042Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:30.686574Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:30.700339Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:30.700564Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-04T17:28:30.701257Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7407:5415], server id = [2:7408:5416], tablet id = 72075186224037899, status = OK 2025-04-04T17:28:30.701396Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7407:5415], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:30.703034Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:28:30.703148Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:30.703320Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:30.703546Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:30.703881Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:30.706753Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7407:5415], server id = [2:7408:5416], tablet id = 72075186224037899 2025-04-04T17:28:30.706801Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:30.707392Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:30.728382Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTQyOGU5MzktMTBjNjgyYjItYzVlMmNiNGYtMzE4NmU2Njc=, TxId: 2025-04-04T17:28:30.728469Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTQyOGU5MzktMTBjNjgyYjItYzVlMmNiNGYtMzE4NmU2Njc=, TxId: 2025-04-04T17:28:30.729053Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:30.765323Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:30.765412Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2798:3218] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TComputeScheduler::ResourceWeight [GOOD] Test command err: 510 500 1510 1500 990 1000 1000 1000 >> TBlobStorageProxyTest::TestNormal [GOOD] >> TBlobStorageProxyTest::TestNormalMirror >> AnalyzeColumnshard::Analyze [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumns [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestGetFail [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::UnboxedValueSize [GOOD] >> AnalyzeColumnshard::AnalyzeRebootColumnShard [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave [GOOD] Test command err: 2025-04-04T17:26:05.209877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:05.210168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:05.210348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00199a/r3tmp/tmpOo1nGi/pdisk_1.dat 2025-04-04T17:26:05.532947Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25674, node 1 2025-04-04T17:26:05.741380Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:05.741429Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:05.741461Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:05.741797Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:05.743695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:05.826041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:05.826142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:05.839506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63513 2025-04-04T17:26:06.339444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:09.561303Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:09.598215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.598365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.627152Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:09.629229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:09.865034Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.865517Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.865942Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.866064Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.866123Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.866358Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.866434Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.866528Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.866583Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.037820Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:10.037993Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:10.051217Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:10.161580Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:10.196941Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:10.197009Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:10.223238Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:10.223436Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:10.223640Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:10.223701Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:10.223753Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:10.223800Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:10.223853Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:10.223912Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:10.225015Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:10.253015Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:10.253125Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:10.258205Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2605] 2025-04-04T17:26:10.262813Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2617] 2025-04-04T17:26:10.263005Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1902:2617], schemeshard id = 72075186224037897 2025-04-04T17:26:10.271701Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:10.293828Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:10.293911Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:10.293989Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:10.304884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:10.311904Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:10.312021Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:10.514155Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:10.666921Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:10.784606Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:11.712755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.712856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.725797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:11.827150Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:11.827410Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:11.827689Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:11.827868Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:11.828013Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:11.828165Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:11.828295Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:11.828443Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:11.828580Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:26:11.828707Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:26:11.828846Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:26:11.828991Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:26:11.858533Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:26:11.858648Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7256:5318] 2025-04-04T17:28:28.211511Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7256:5318] 2025-04-04T17:28:28.247215Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:28:28.247311Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:28:28.247922Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:28:28.248523Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:28:28.248858Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded database: /Root/Database 2025-04-04T17:28:28.248900Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start key 2025-04-04T17:28:28.248946Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-04-04T17:28:28.248992Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table local path id: 4 2025-04-04T17:28:28.249028Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start time: 1743787708172865 2025-04-04T17:28:28.249061Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-04-04T17:28:28.249093Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded global traversal round: 2 2025-04-04T17:28:28.249167Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-04-04T17:28:28.249230Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:28:28.249327Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-04-04T17:28:28.249398Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2025-04-04T17:28:28.249470Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2025-04-04T17:28:28.249541Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:28:28.249666Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:28.250474Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:28:28.251009Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:28.251056Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:28.251152Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:28:28.252332Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:28.252388Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:28.253975Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:28.329027Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:28.329208Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-04T17:28:28.329576Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7303:5349], server id = [2:7304:5350], tablet id = 72075186224037899, status = OK 2025-04-04T17:28:28.329657Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7303:5349], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:28.330809Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:28:28.330891Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:28.331012Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:28.331174Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:28.331441Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:28.333458Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7303:5349], server id = [2:7304:5350], tablet id = 72075186224037899 2025-04-04T17:28:28.333491Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:28.333904Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:28.353692Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7322:5368]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:28.353888Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:28.353932Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7322:5368], StatRequests.size() = 1 2025-04-04T17:28:28.480823Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NzdiMjU3MTgtZjg5YmE5Y2MtYTUwN2UxMjItNTYzOWVkYTg=, TxId: 2025-04-04T17:28:28.480906Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzdiMjU3MTgtZjg5YmE5Y2MtYTUwN2UxMjItNTYzOWVkYTg=, TxId: 2025-04-04T17:28:28.481495Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:28.494055Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7332:5374] 2025-04-04T17:28:28.494367Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7332:5374], schemeshard id = 72075186224037897 2025-04-04T17:28:28.494546Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7256:5318], server id = [2:7333:5375], tablet id = 72075186224037894, status = OK 2025-04-04T17:28:28.494589Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7333:5375] 2025-04-04T17:28:28.494679Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7333:5375], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-04T17:28:28.507495Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:28.507558Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:28:28.595886Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7344:5378] 2025-04-04T17:28:28.596491Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2800:3219] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-04T17:28:28.596532Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2800:3219] 2025-04-04T17:28:28.596576Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-04T17:28:29.035645Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-04T17:28:29.035729Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T17:28:29.046523Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-04T17:28:29.046586Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T17:28:29.745931Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:28:29.746004Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-04T17:28:29.746047Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-04T17:28:30.916952Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:30.917157Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:30.917212Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:30.917763Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:30.931451Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:30.931765Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:30.931831Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:30.932147Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:30.947017Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:30.947237Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-04-04T17:28:30.947826Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7416:5421], server id = [2:7417:5422], tablet id = 72075186224037899, status = OK 2025-04-04T17:28:30.947944Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7416:5421], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:30.949498Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:28:30.949602Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:30.949765Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:30.949930Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:30.950258Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:30.952880Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7416:5421], server id = [2:7417:5422], tablet id = 72075186224037899 2025-04-04T17:28:30.952923Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:30.953526Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:30.976405Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NzM3MTZlN2EtZjI2OTQxOTUtYTA1MjI5NzEtYzI3NTQwNDg=, TxId: 2025-04-04T17:28:30.976502Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzM3MTZlN2EtZjI2OTQxOTUtYTA1MjI5NzEtYzI3NTQwNDg=, TxId: 2025-04-04T17:28:30.977123Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:30.990783Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:30.990868Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2800:3219] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::FailOnUnsupportedPgType [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] >> TBlobStorageProxyTest::TestVPutVGet [GOOD] >> TBlobStorageProxyTest::TestVPutVGetLimit >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] Test command err: 2025-04-04T17:28:27.841459Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/000c6c/r3tmp/tmpPfPOyA//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-04-04T17:28:27.841973Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/000c6c/r3tmp/tmpPfPOyA//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-04-04T17:28:27.844976Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-04T17:28:27.846283Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-04T17:28:31.113536Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/000c6c/r3tmp/tmpzF8amu//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-04-04T17:28:31.114542Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/000c6c/r3tmp/tmpzF8amu//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-04-04T17:28:31.119281Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-04T17:28:31.119559Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> BasicUsage::WaitEventBlocksBeforeDiscovery [GOOD] >> BasicUsage::SimpleHandlers >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::Analyze [GOOD] Test command err: 2025-04-04T17:26:09.980309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:09.980466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:09.980560Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001918/r3tmp/tmpFIDIyL/pdisk_1.dat 2025-04-04T17:26:10.293630Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16181, node 1 2025-04-04T17:26:10.512036Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:10.512087Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:10.512130Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:10.512616Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:10.518151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:10.598699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:10.598868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:10.611835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8387 2025-04-04T17:26:11.055097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:13.615610Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:13.639333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:13.639424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:13.666283Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:13.668169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:13.866566Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.867127Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.867631Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.867829Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.867919Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.868116Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.868197Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.868299Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.868353Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:14.034714Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:14.034810Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:14.047619Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:14.158548Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:14.191083Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:14.191160Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:14.213618Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:14.213768Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:14.213936Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:14.213982Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:14.214018Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:14.214054Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:14.214137Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:14.214175Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:14.215154Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:14.240167Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:14.240265Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:14.245084Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2605] 2025-04-04T17:26:14.249117Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2617] 2025-04-04T17:26:14.249290Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1902:2617], schemeshard id = 72075186224037897 2025-04-04T17:26:14.256812Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:14.277979Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:14.278048Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:14.278127Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:14.288633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:14.295567Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:14.295692Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:14.465255Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:14.634458Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:14.719593Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:15.661029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:15.661122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:15.674813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:15.753951Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:15.754121Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:15.754346Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:15.754456Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:15.754590Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:15.754705Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:15.754830Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:15.754959Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:15.755046Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:26:15.755178Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:26:15.755250Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:26:15.755354Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:26:15.774043Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:26:15.774126Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... 480 2025-04-04T17:28:26.496502Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:28:26.496573Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:28:26.496638Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-04T17:28:27.994999Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-04T17:28:27.995077Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 202.000000s, at schemeshard: 72075186224037897 2025-04-04T17:28:27.995376Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-04-04T17:28:28.008965Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:28:29.128407Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:29.128484Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-04T17:28:29.128518Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-04T17:28:29.128567Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-04T17:28:29.128623Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:28:29.128991Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:29.132396Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:28:29.136344Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6982:5164], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:29.136415Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6992:5169], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:29.136468Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:29.148403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-04T17:28:29.207447Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6996:5172], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-04T17:28:29.418895Z node 2 :TX_PROXY ERROR: Actor# [2:7092:5218] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:29.441599Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7121:5233]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:29.441829Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:28:29.441903Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7123:5235] 2025-04-04T17:28:29.441966Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7123:5235] 2025-04-04T17:28:29.442356Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7124:5236] 2025-04-04T17:28:29.442473Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7124:5236], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:28:29.442544Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-04T17:28:29.442665Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7123:5235], server id = [2:7124:5236], tablet id = 72075186224037894, status = OK 2025-04-04T17:28:29.442743Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:28:29.442825Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7121:5233], StatRequests.size() = 1 2025-04-04T17:28:29.567743Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGU1YzA5MWUtN2I4M2ZmMDgtYjE5NmEyMTQtNTliMTk4NTg=, TxId: 2025-04-04T17:28:29.567806Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGU1YzA5MWUtN2I4M2ZmMDgtYjE5NmEyMTQtNTliMTk4NTg=, TxId: 2025-04-04T17:28:29.568300Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:29.583570Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:28:29.583633Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:28:29.626396Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:28:29.626482Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:28:29.706660Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7123:5235], schemeshard count = 1 2025-04-04T17:28:30.697908Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:28:30.698028Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:30.701177Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:30.717995Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:30.718590Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:30.718675Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-04-04T17:28:30.732396Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:30.754573Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-04T17:28:30.755684Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-04T17:28:30.755808Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-04T17:28:30.771007Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-04T17:28:31.957583Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:31.957723Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:31.957787Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:31.958556Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:31.972529Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:31.972987Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:31.973064Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:31.974159Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:31.988638Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:31.988925Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-04T17:28:31.989653Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7250:5313], server id = [2:7251:5314], tablet id = 72075186224037899, status = OK 2025-04-04T17:28:31.989785Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7250:5313], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:31.994159Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:28:31.994388Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:31.994653Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:31.994884Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:31.995091Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7250:5313], server id = [2:7251:5314], tablet id = 72075186224037899 2025-04-04T17:28:31.995138Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:31.995378Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:31.998462Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:32.021038Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7271:5333]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:32.021344Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:32.021391Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7271:5333], StatRequests.size() = 1 2025-04-04T17:28:32.131036Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:04.000000Z, event interval end# 2025-04-04T17:28:30.000000Z 2025-04-04T17:28:32.131182Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTViMGI5ZTctZDk2ZjZlZTgtYTIwMGM1MGEtOTczZmI3MGI=, TxId: 2025-04-04T17:28:32.131222Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTViMGI5ZTctZDk2ZjZlZTgtYTIwMGM1MGEtOTczZmI3MGI=, TxId: 2025-04-04T17:28:32.131900Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:32.145574Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:32.145630Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2800:3219] >> TComputeScheduler::QueryLimits [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard [GOOD] Test command err: 2025-04-04T17:26:09.186694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:09.187017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:09.187134Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001942/r3tmp/tmptnD9pz/pdisk_1.dat 2025-04-04T17:26:09.584686Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23644, node 1 2025-04-04T17:26:09.820839Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:09.820893Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:09.820967Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:09.821502Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:09.824054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:09.907702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.907862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.921667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20597 2025-04-04T17:26:10.388097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:12.898259Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:12.952538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:12.952625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:12.980030Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:12.981786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:13.187407Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.188033Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.188506Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.188676Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.188764Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.189013Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.189104Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.189216Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.189298Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:13.363585Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:13.363663Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:13.376290Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:13.495748Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:13.529841Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:13.529923Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:13.554797Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:13.554945Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:13.555108Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:13.555153Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:13.555189Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:13.555237Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:13.555303Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:13.555354Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:13.556149Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:13.579154Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:13.579299Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:13.583891Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:26:13.588259Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:26:13.588921Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:26:13.592562Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:13.608069Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:13.608114Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:13.608183Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:13.619259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:13.625607Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:13.625714Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:13.774514Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:13.954297Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:14.043233Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:15.068024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:15.068238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:15.085931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:15.188914Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:15.189174Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:15.189477Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:15.189607Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:15.189764Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:15.189912Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:15.190052Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:15.190180Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:15.190357Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:26:15.190499Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:26:15.190639Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:26:15.190804Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:26:15.219636Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:26:15.219752Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... =, TxId: 2025-04-04T17:28:27.573450Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:27.598222Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:28:27.598306Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:28:27.631569Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:28:27.631643Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:28:27.696358Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7118:5235], schemeshard count = 1 2025-04-04T17:28:28.709985Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:28:28.710062Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:28.712321Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:28.728015Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:28.728546Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:28.728621Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-04-04T17:28:28.752530Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:29.939018Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:29.939080Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-04T17:28:29.939108Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-04T17:28:29.939142Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:29.939197Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:29.939822Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:29.953565Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-04T17:28:29.953667Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:29.954164Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:29.954230Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) 2025-04-04T17:28:29.955929Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];ev=NActors::IEventHandle;fline=columnshard_impl.cpp:1155;event=tablet_die; 2025-04-04T17:28:29.959436Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:29.960808Z node 2 :STATISTICS ERROR: [72075186224037894] TEvDeliveryProblem with ColumnShard=72075186224037899 2025-04-04T17:28:29.988022Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:29.988257Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-04T17:28:30.069776Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:7252:5314];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-04-04T17:28:30.271399Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7248:5312], server id = [2:7363:5409], tablet id = 72075186224037899, status = OK 2025-04-04T17:28:30.271558Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7248:5312], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:30.281635Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:28:30.281755Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:30.282045Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:30.282269Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:30.282502Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7248:5312], server id = [2:7363:5409], tablet id = 72075186224037899 2025-04-04T17:28:30.282540Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:30.282836Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:30.286142Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:30.312833Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7383:5428]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:30.313178Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:30.313242Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7383:5428], StatRequests.size() = 1 2025-04-04T17:28:30.452996Z node 2 :STATISTICS DEBUG: [72075186224037894] Reset DeliveryProblem to ColumnShard=72075186224037899 2025-04-04T17:28:30.560929Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmJlMTNhNWMtNGNjZmMzMGItZGZlNmQwMWItMWUwOTNmYzE=, TxId: 2025-04-04T17:28:30.561011Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmJlMTNhNWMtNGNjZmMzMGItZGZlNmQwMWItMWUwOTNmYzE=, TxId: 2025-04-04T17:28:30.561565Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:30.575766Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:30.575841Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:28:30.896485Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-04T17:28:30.896570Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T17:28:31.421592Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:28:31.421676Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-04T17:28:31.421720Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-04T17:28:31.421899Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-04T17:28:31.422830Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-04T17:28:31.422949Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-04T17:28:31.436670Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-04T17:28:32.272525Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:28:32.272788Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:28:32.294935Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:32.295088Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:32.295145Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:32.295833Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:32.310099Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:32.310540Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:32.310619Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:32.311094Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:32.324578Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:32.324856Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-04T17:28:32.325534Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7478:5484], server id = [2:7479:5485], tablet id = 72075186224037899, status = OK 2025-04-04T17:28:32.325642Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7478:5484], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:32.327071Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:28:32.327198Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:32.327553Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:32.327754Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:32.327946Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7478:5484], server id = [2:7479:5485], tablet id = 72075186224037899 2025-04-04T17:28:32.327995Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:32.328244Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:32.331690Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:32.376395Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjNlZTI0YTQtMThhZjRiMGQtOTdjZTdjNTMtN2Y1ZWEwNzE=, TxId: 2025-04-04T17:28:32.376467Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjNlZTI0YTQtMThhZjRiMGQtOTdjZTdjNTMtN2Y1ZWEwNzE=, TxId: 2025-04-04T17:28:32.376974Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:32.391379Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:32.391453Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2798:3218] >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] >> TPartitionTests::DataTxCalcPredicateOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TComputeScheduler::QueryLimits [GOOD] Test command err: 800 800 800 800 >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration >> TPQTabletTests::Parallel_Transactions_1 >> TPartitionTests::CorrectRange_Multiple_Transactions |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] >> TPQTest::TestDescribeBalancer >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] |97.2%| [TA] $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TA] {RESULT} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTabletTests::Parallel_Transactions_1 [GOOD] >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] >> TPQTabletTests::Parallel_Transactions_2 >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TPQTabletTests::UpdateConfig_1 >> LocalPartitionReader::FeedSlowly >> LocalPartitionReader::Simple >> TPartitionTests::CorrectRange_Multiple_Transactions [GOOD] >> LocalPartitionReader::Booting |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TBlobStorageProxyTest::TestNormalMirror [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline >> TPQTest::TestMessageNo |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] >> LocalPartitionReader::Simple [GOOD] >> TPartitionTests::CorrectRange_Multiple_Consumers >> TBlobStorageProxyTest::TestQuadrupleGroups [GOOD] >> TBlobStorageProxyTest::TestSingleFailure >> LocalPartitionReader::Booting [GOOD] >> LocalPartitionReader::FeedSlowly [GOOD] >> TPQTabletTests::Parallel_Transactions_2 [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] >> TTypeCodecsTest::TestFixedLenCodec [GOOD] >> TTypeCodecsTest::TestVarLenCodec [GOOD] >> TTypeCodecsTest::TestVarIntCodec [GOOD] >> TTypeCodecsTest::TestZigZagCodec [GOOD] >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] >> TPQTabletTests::PQTablet_Send_RS_With_Abort >> TPQTabletTests::UpdateConfig_1 [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::FeedSlowly [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting [GOOD] >> TPQTabletTests::UpdateConfig_2 >> TPQTabletTests::PQTablet_Send_RS_With_Abort [GOOD] >> TQuotaTracker::TestSmallMessages >> THealthCheckTest::GreenStatusWhenInitPending [GOOD] >> THealthCheckTest::IgnoreOtherGenerations |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestNormalMirror [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] Test command err: Size: 8002 Create chunk: 0.000251s Read by index: 0.000027s Iterate: 0.000180s Size: 8256 Create chunk: 0.000248s Read by index: 0.000042s Iterate: 0.000091s Size: 8532 Create chunk: 0.000114s Read by index: 0.000049s Iterate: 0.000044s Size: 7769 Create chunk: 0.000145s Read by index: 0.000058s Iterate: 0.000052s Size: 2853 Create chunk: 0.000094s Read by index: 0.000090s Iterate: 0.000045s Size: 2419 Create chunk: 0.000112s Read by index: 0.000105s Iterate: 0.000051s Size: 2929 Create chunk: 0.000132s Read by index: 0.000093s Iterate: 0.000044s Size: 2472 Create chunk: 0.000116s Read by index: 0.000092s Iterate: 0.000052s Size: 2407 Create chunk: 0.000102s Read by index: 0.000102s Iterate: 0.000047s Size: 2061 Create chunk: 0.000107s Read by index: 0.000099s Iterate: 0.000054s >> TPQTabletTests::Partition_Send_Predicate_With_False >> TPartitionTests::CorrectRange_Multiple_Consumers [GOOD] >> TQuotaTracker::TestSmallMessages [GOOD] >> TQuotaTracker::TestBigMessages [GOOD] >> TSourceIdTests::ExpensiveCleanup >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse [GOOD] >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] Test command err: 2025-04-04T17:28:33.694843Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/000c51/r3tmp/tmp3PxtLF//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-04-04T17:28:33.695332Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/000c51/r3tmp/tmp3PxtLF//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-04-04T17:28:33.701414Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-04T17:28:33.708307Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> TPartitionTests::CorrectRange_Rollback >> ColumnStatistics::CountMinSketchServerlessStatistics ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] Test command err: 2025-04-04T17:26:04.993160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:04.993387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:04.993469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001991/r3tmp/tmpi3Jzqg/pdisk_1.dat 2025-04-04T17:26:05.366467Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8418, node 1 2025-04-04T17:26:05.697300Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:05.697347Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:05.697396Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:05.697913Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:05.703707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:05.793075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:05.794130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:05.824028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22377 2025-04-04T17:26:06.326307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:09.693128Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:09.728448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.728563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.757172Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:09.759029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:09.985830Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.986478Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.986978Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.987137Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.987214Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.987485Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.987601Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.987697Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.987775Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.160034Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:10.160116Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:10.172713Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:10.299440Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:10.331751Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:10.331817Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:10.356330Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:10.356471Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:10.356627Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:10.356673Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:10.356718Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:10.356765Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:10.356800Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:10.356836Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:10.357604Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:10.381583Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:10.381724Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:10.387203Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:26:10.392806Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:26:10.393649Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:26:10.398490Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:10.417249Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:10.417304Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:10.417391Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:10.432282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:10.439632Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:10.439784Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:10.620558Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:10.743786Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:10.841775Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:11.829456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.829642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.848995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:11.937309Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:11.937562Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:11.937888Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:11.938020Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:11.938132Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:11.938264Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:11.938369Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:11.938471Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:11.938577Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:26:11.938676Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:26:11.938772Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:26:11.938857Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:26:11.962197Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:26:11.962334Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... p traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:28:25.985385Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:25.988826Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:28:25.993554Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6982:5165], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:25.993666Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6992:5170], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:25.994186Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:26.007195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-04T17:28:26.069099Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6996:5173], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-04T17:28:26.290477Z node 2 :TX_PROXY ERROR: Actor# [2:7092:5219] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:26.312258Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7121:5234]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:26.312511Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:28:26.312611Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7123:5236] 2025-04-04T17:28:26.312697Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7123:5236] 2025-04-04T17:28:26.313061Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7124:5237] 2025-04-04T17:28:26.313211Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7124:5237], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:28:26.313275Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-04T17:28:26.313447Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7123:5236], server id = [2:7124:5237], tablet id = 72075186224037894, status = OK 2025-04-04T17:28:26.313529Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:28:26.313617Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7121:5234], StatRequests.size() = 1 2025-04-04T17:28:26.443785Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzRlMTg3MDEtNzM1OGE3YWUtNjYxMGY4YTgtYzQ1ODU1ODM=, TxId: 2025-04-04T17:28:26.443845Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzRlMTg3MDEtNzM1OGE3YWUtNjYxMGY4YTgtYzQ1ODU1ODM=, TxId: 2025-04-04T17:28:26.444304Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:26.458420Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:28:26.458492Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:28:26.502020Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:28:26.502125Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:28:26.577692Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7123:5236], schemeshard count = 1 2025-04-04T17:28:27.499887Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:28:27.499965Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:27.502912Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:27.519424Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:27.519866Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:27.519928Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-04-04T17:28:27.534400Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:27.556451Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) ... unblocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR 2025-04-04T17:28:27.557894Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-04T17:28:27.557997Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-04T17:28:27.558723Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2801:3221] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-04T17:28:27.558787Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2801:3221] 2025-04-04T17:28:27.587713Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-04T17:28:27.587816Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-04T17:28:28.747097Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:28.747226Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:28.747289Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:28.747883Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:28.761865Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:28.762312Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:28.762391Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:28.763292Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:28.786111Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:28.786393Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-04T17:28:28.786940Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7250:5314], server id = [2:7251:5315], tablet id = 72075186224037899, status = OK 2025-04-04T17:28:28.787062Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7250:5314], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:28.795262Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:28:28.795390Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:28.795786Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:28.796013Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:28.796299Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7250:5314], server id = [2:7251:5315], tablet id = 72075186224037899 2025-04-04T17:28:28.796355Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:28.796546Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:28.799793Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:28.837805Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7271:5334]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:28.838146Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:28.838221Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7271:5334], StatRequests.size() = 1 2025-04-04T17:28:28.993524Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NmM2OTdlYjItOGFlMzllNmUtYzc2ZTBmZDAtZGJkOGJkNjQ=, TxId: 2025-04-04T17:28:28.993604Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmM2OTdlYjItOGFlMzllNmUtYzc2ZTBmZDAtZGJkOGJkNjQ=, TxId: 2025-04-04T17:28:28.994184Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:29.008291Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:29.008375Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2801:3221] 2025-04-04T17:28:29.536619Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-04T17:28:29.536730Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T17:28:31.381687Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:28:31.381865Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:28:31.403579Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:33.686492Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:33.686574Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:28:34.806901Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:28:34.828594Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-04T17:28:34.828701Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout |97.3%| [TA] $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 >> TPQTabletTests::UpdateConfig_2 [GOOD] >> TPQTabletTests::Partition_Send_Predicate_With_False [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients |97.3%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTest::TestDescribeBalancer [GOOD] >> TPQTest::TestCheckACL >> TPQTabletTests::One_Tablet_For_All_Partitions >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients [GOOD] >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients >> TPQTabletTests::One_Tablet_For_All_Partitions [GOOD] >> TPartitionTests::CorrectRange_Rollback [GOOD] >> TSourceIdTests::ExpensiveCleanup [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TPQTest::TestMessageNo [GOOD] >> TPQTest::TestPQPartialRead >> TPartitionTests::DataTxCalcPredicateOk >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients [GOOD] >> TSourceIdTests::SourceIdWriterAddMessage [GOOD] >> TSourceIdTests::SourceIdWriterClean [GOOD] >> TSourceIdTests::SourceIdWriterFormCommand [GOOD] >> TTypeCodecsTest::TestBoolCodec [GOOD] >> TTypeCodecsTest::TestDeltaVarIntCodecAndRev [GOOD] >> TPQTabletTests::One_New_Partition_In_Another_Tablet >> THealthCheckTest::SpecificServerless >> BasicStatistics::SimpleGlobalIndex >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders >> BasicStatistics::NotFullStatisticsDatashard >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse [GOOD] Test command err: 2025-04-04T17:26:04.747042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:04.747460Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:04.747656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019a3/r3tmp/tmp7vR4vi/pdisk_1.dat 2025-04-04T17:26:05.201023Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26359, node 1 2025-04-04T17:26:05.697504Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:05.697553Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:05.697600Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:05.698139Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:05.703973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:05.793040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:05.794050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:05.823782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31541 2025-04-04T17:26:06.323775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:09.439876Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:09.474102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.474216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.502384Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:09.504217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:09.722493Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.723132Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.723757Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.723942Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.724033Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.724274Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.724386Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.724521Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.724610Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.897048Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.897167Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.910132Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:10.046287Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:10.080147Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:10.080225Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:10.106862Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:10.107051Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:10.107325Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:10.107391Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:10.107454Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:10.107521Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:10.107564Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:10.107615Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:10.108671Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:10.135176Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:10.135314Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:10.141112Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:26:10.146125Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:26:10.147010Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:26:10.151849Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:10.171185Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:10.171248Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:10.171344Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:10.185928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:10.193074Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:10.193207Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:10.363314Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:10.482918Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:10.581019Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:11.528669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.528777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:11.541778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:11.620378Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:11.620581Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:11.620793Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:11.620871Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:11.620937Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:11.621010Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:11.621088Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:11.621183Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:11.621304Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:26:11.621457Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:26:11.621603Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:26:11.621757Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2846];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:26:11.644004Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:26:11.644094Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... 15] 2025-04-04T17:28:29.816556Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7250:5315], schemeshard id = 72075186224037897 2025-04-04T17:28:29.816703Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7206:5288], server id = [2:7251:5316], tablet id = 72075186224037894, status = OK 2025-04-04T17:28:29.816776Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7251:5316] 2025-04-04T17:28:29.816871Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7251:5316], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-04-04T17:28:29.895357Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7260:5317] 2025-04-04T17:28:29.896183Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2801:3222] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-04T17:28:29.896249Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2801:3222] 2025-04-04T17:28:29.896312Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-04T17:28:31.057512Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:31.057588Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-04T17:28:31.057625Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-04T17:28:31.057689Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:31.057739Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:31.058484Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:31.075890Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:31.076301Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:31.076383Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:31.077178Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:31.090797Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:31.090980Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-04T17:28:31.091438Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7301:5344], server id = [2:7302:5345], tablet id = 72075186224037899, status = OK 2025-04-04T17:28:31.091564Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7301:5344], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:31.095256Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:28:31.095356Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:31.095516Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:31.095678Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:31.095894Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:31.098271Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7301:5344], server id = [2:7302:5345], tablet id = 72075186224037899 2025-04-04T17:28:31.098350Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:31.099025Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:31.120373Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7322:5364]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:31.120555Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:31.120595Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7322:5364], StatRequests.size() = 1 2025-04-04T17:28:31.243469Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YWQ3NGFkMzEtNmU5ZjQyYzUtMTE5MzlmNzktYWFjNDA3ZDU=, TxId: 2025-04-04T17:28:31.243567Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YWQ3NGFkMzEtNmU5ZjQyYzUtMTE5MzlmNzktYWFjNDA3ZDU=, TxId: 2025-04-04T17:28:31.244222Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:31.259612Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:31.259688Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:28:31.769788Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-04T17:28:31.769868Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T17:28:32.441066Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:28:32.441200Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:32.441917Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:32.456690Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:32.457068Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:32.457131Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-04-04T17:28:32.484095Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:33.642929Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:33.642995Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-04T17:28:33.643025Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-04T17:28:33.643237Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-04T17:28:33.643606Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-04T17:28:33.643683Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-04T17:28:33.660723Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-04T17:28:34.748790Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:28:34.748891Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-04T17:28:34.748950Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-04T17:28:35.843702Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:28:35.843950Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:28:35.855057Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:35.855206Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:35.855250Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:35.855986Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:35.870110Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:35.870470Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:35.870537Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:35.870942Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:35.895980Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:35.896161Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-04T17:28:35.896544Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7485:5454], server id = [2:7486:5455], tablet id = 72075186224037899, status = OK 2025-04-04T17:28:35.896604Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7485:5454], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:35.897596Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:28:35.897655Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:35.897751Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:35.897889Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:35.898091Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:35.899833Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7485:5454], server id = [2:7486:5455], tablet id = 72075186224037899 2025-04-04T17:28:35.899858Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:35.900415Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:35.919687Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmQxZGQ5M2QtODhhYWJhYTAtNDY2YmQ4MmEtYjY0ZDg5MDE=, TxId: 2025-04-04T17:28:35.919756Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmQxZGQ5M2QtODhhYWJhYTAtNDY2YmQ4MmEtYjY0ZDg5MDE=, TxId: 2025-04-04T17:28:35.920225Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:35.934481Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:35.934558Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2801:3222] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::ExpensiveCleanup [GOOD] Test command err: 2025-04-04T17:28:35.559062Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:35.559146Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:35.575968Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:28:35.576371Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:28:35.576761Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:178:2193] 2025-04-04T17:28:35.577693Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Initializing completed. 2025-04-04T17:28:35.577751Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:178:2193] 2025-04-04T17:28:35.577804Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:28:35.578288Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-04T17:28:35.578590Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:35.578779Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2025-04-04T17:28:35.578883Z node 1 :PERSQUEUE INFO: new Cookie owner1|3896d4c6-8e23e7fd-83001ddc-4b0b11ff_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send disk status response with cookie: 0 2025-04-04T17:28:35.579301Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:28:35.579404Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 1 2025-04-04T17:28:35.579702Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 1 2025-04-04T17:28:35.579837Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 1 2025-04-04T17:28:35.579983Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 0 partNo 0 2025-04-04T17:28:35.580870Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2025-04-04T17:28:35.581466Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 d0000000001_00000000000000000100_00000_0000000001_00000| size 104 WTime 128 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-04T17:28:35.613107Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:28:35.613236Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-04-04T17:28:35.613351Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 0, partNo: 0, Offset: 100 is stored on disk 2025-04-04T17:28:35.613536Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 100 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:35.894857Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-04T17:28:35.926619Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 2 2025-04-04T17:28:35.926768Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 2 2025-04-04T17:28:35.926954Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 1 partNo 0 2025-04-04T17:28:35.927437Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob sourceId 'SourceId' seqNo 1 partNo 0 result is x0000000001_00000000000000000100_00000_0000000001_00000 size 104 2025-04-04T17:28:35.927524Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 old key x0000000001_00000000000000000100_00000_0000000001_00000 new key d0000000001_00000000000000000100_00000_0000000001_00000 size 104 WTime 1329 2025-04-04T17:28:35.928410Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 1 partNo 0 FormedBlobsCount 1 NewHead: Offset 200 PartNo 0 PackedSize 118 count 1 nextOffset 201 batches 1 2025-04-04T17:28:35.929048Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 200,1 HeadOffset 100 endOffset 101 curOffset 201 d0000000001_00000000000000000200_00000_0000000001_00000| size 105 WTime 1329 2025-04-04T17:28:35.949966Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:28:35.950061Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-04-04T17:28:35.950127Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 1, partNo: 0, Offset: 200 is stored on disk 2025-04-04T17:28:36.405119Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:36.405205Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:36.419160Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:178:2193] 2025-04-04T17:28:36.419832Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [2:178:2193] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:36.420354Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2025-04-04T17:28:36.420420Z node 2 :PERSQUEUE INFO: new Cookie owner1|18fcc330-f02cebe-d0c2875d-61e5ebb5_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Send disk status response with cookie: 0 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST processed_blobs=41800 quoted_time=9.980000s Iteration 0 Iteration 1 Iteration 2 Iteration 3 Iteration 4 Iteration 5 Iteration 6 Iteration 7 Iteration 8 Iteration 9 Iteration 10 Iteration 11 Iteration 12 Iteration 13 Iteration 14 Iteration 15 Iteration 16 Iteration 17 Iteration 18 Iteration 19 Iteration 20 Iteration 21 Iteration 22 Iteration 23 Iteration 24 Iteration 25 Iteration 26 Iteration 27 Iteration 28 Iteration 29 Iteration 30 Iteration 31 Iteration 32 Iteration 33 Iteration 34 Iteration 35 Iteration 36 Iteration 37 Iteration 38 Iteration 39 Iteration 40 Iteration 41 Iteration 42 Iteration 43 Iteration 44 Iteration 45 Iteration 46 Iteration 47 Iteration 48 Iteration 49 Iteration 50 Iteration 51 Iteration 52 Iteration 53 Iteration 54 Iteration 55 Iteration 56 Iteration 57 Iteration 58 Iteration 59 Iteration 60 Iteration 61 Iteration 62 Iteration 63 Iteration 64 Iteration 65 Iteration 66 Iteration 67 Iteration 68 Iteration 69 Iteration 70 Iteration 71 Iteration 72 Iteration 73 Iteration 74 Iteration 75 Iteration 76 Iteration 77 Iteration 78 Iteration 79 Iteration 80 Iteration 81 Iteration 82 Iteration 83 Iteration 84 Iteration 85 Iteration 86 Iteration 87 Iteration 88 Iteration 89 Iteration 90 Iteration 91 Iteration 92 Iteration 93 Iteration 94 Iteration 95 Iteration 96 Iteration 97 Iteration 98 Iteration 99 Iteration 100 Iteration 101 Iteration 102 Iteration 103 Iteration 104 Iteration 105 Iteration 106 Iteration 107 Iteration 108 Iteration 109 Iteration 110 Iteration 111 Iteration 112 Iteration 113 Iteration 114 Iteration 115 Iteration 116 Iteration 117 Iteration 118 Iteration 119 Iteration 120 Iteration 121 Iteration 122 Iteration 123 Iteration 124 Iteration 125 Iteration 126 Iteration 127 Iteration 128 Iteration 129 Iteration 130 Iteration 131 Iteration 132 Iteration 133 Iteration 134 Iteration 135 Iteration 136 Iteration 137 Iteration 138 Iteration 139 Iteration 140 Iteration 141 Iteration 142 Iteration 143 Iteration 144 Iteration 145 Iteration 146 Iteration 147 Iteration 148 Iteration 149 Iteration 150 Iteration 151 Iteration 152 Iteration 153 Iteration 154 Iteration 155 Iteration 156 Iteration 157 Iteration 158 Iteration 159 Iteration 160 Iteration 161 Iteration 162 Iteration 163 Iteration 164 Iteration 165 Iteration 166 Iteration 167 Iteration 168 Iteration 169 Iteration 170 Iteration 171 Iteration 172 Iteration 173 Iteration 174 Iteration 175 Iteration 176 Iteration 177 Iteration 178 Iteration 179 Iteration 180 Iteration 181 Iteration 182 Iteration 183 Iteration 184 Iteration 185 Iteration 186 Iteration 187 Iteration 188 Iteration 189 Iteration 190 Iteration 191 Iteration 192 Iteration 193 Iteration 194 Iteration 195 Iteration 196 Iteration 197 Iteration 198 Iteration 199 Iteration 200 Iteration 201 Iteration 202 Iteration 203 Iteration 204 Iteration 205 Iteration 206 Iteration 207 Iteration 208 Iteration 209 Iteration 210 Iteration 211 Iteration 212 Iteration 213 Iteration 214 Iteration 215 Iteration 216 Iteration 217 Iteration 218 Iteration 219 Iteration 220 Iteration 221 Iteration 222 Iteration 223 Iteration 224 Iteration 225 Iteration 226 Iteration 227 Iteration 228 Iteration 229 Iteration 230 Iteration 231 Iteration 232 Iteration 233 Iteration 234 Iteration 235 Iteration 236 Iteration 237 Iteration 238 Iteration 239 Iteration 240 Iteration 241 Iteration 242 Iteration 243 Iteration 244 Iteration 245 Iteration 246 Iteration 247 Iteration 248 Iteration 249 Iteration 250 Iteration 251 Iteration 252 Iteration 253 Iteration 254 Iteration 255 Iteration 256 Iteration 257 Iteration 258 Iteration 259 Iteration 260 Iteration 261 Iteration 262 Iteration 263 Iteration 264 Iteration 265 Iteration 266 Iteration 267 Iteration 268 Iteration 269 Iteration 270 Iteration 271 Iteration 272 Iteration 273 Iteration 274 Iteration 275 Iteration 276 Iteration 277 Iteration 278 Iteration 279 Iteration 280 Iteration 281 Iteration 282 Iteration 283 Iteration 284 Iteration 285 Iteration 286 Iteration 287 Iteration 288 Iteration 289 Iteration 290 Iteration 291 Iteration 292 Iteration 293 Iteration 294 Iteration 295 Iteration 296 Iteration 297 Iteration 298 Iteration 299 Iteration 300 Iteration 301 Iteration 302 Iteration 303 Iteration 304 Iteration 305 Iteration 306 Iteration 307 Iteration 308 Iteration 309 Iteration 310 Iteration 311 Iteration 312 Iteration 313 Iteration 314 Iteration 315 Iteration 316 Iteration 317 Iteration 318 Iteration 319 Iteration 320 Iteration 321 Iteration 322 Iteration 323 Iteration 324 Iteration 325 Iteration 326 Iteration 327 Iteration 328 Iteration 329 Iteration 330 Iteration 331 Iteration 332 Iteration 333 Iteration 334 Iteration 335 Iteration 336 Iteration 337 Iteration 338 Iteration 339 Iteration 340 Iteration 341 Iteration 342 Iteration 343 Iteration 344 Iteration 345 Iteration 346 Iteration 347 Iteration 348 Iteration 349 Iteration 350 Iteration 351 Iteration 352 Iteration 353 Iteration 354 Iteration 355 Iteration 356 Iteration 357 Iteration 358 Iteration 359 Iteration 360 Iteration 361 Iteration 362 Iteration 363 Iteration 364 Iteration 365 Iteration 366 Iteration 367 Iteration 368 Iteration 369 Iteration 370 Iteration 371 Iteration 372 Iteration 373 Iteration 374 Iteration 375 Iteration 376 Iteration 377 Iteration 378 Iteration 379 Iteration 380 Iteration 381 Iteration 382 Iteration 383 Iteration 384 Iteration 385 Iteration 386 Iteration 387 Iteration 388 Iteration 389 Iteration 390 Iteration 391 Iteration 392 Iteration 393 Iteration 394 Iteration 395 Iteration 396 Iteration 397 Iteration 398 Iteration 399 Iteration 400 Iteration 401 Iteration 402 Iteration 403 Iteration 404 Iteration 405 Iteration 406 Iteration 407 Iteration 408 Iteration 409 Iteration 410 Iteration 411 Iteration 412 Iteration 413 Iteration 414 Iteration 415 Iteration 416 Iteration 417 Iteration 418 Iteration 419 Iteration 420 Iteration 421 Iteration 422 Iteration 423 Iteration 424 Iteration 425 Iteration 426 Iteration 427 Iteration 428 Iteration 429 Iteration 430 Iteration 431 Iteration 432 Iteration 433 Iteration 434 Iteration 435 Iteration 436 Iteration 437 Iteration 438 Iteration 439 Iteration 440 Iteration 441 Iteration 442 Iteration 443 Iteration 444 Iteration 445 Iteration 446 Iteration 447 Iteration 448 Iteration 449 Iteration 450 Iteration 451 Iteration 452 Iteration 453 Iteration 454 Iteration 455 Iteration 456 Iteration 457 Iteration 458 Iteration 459 Iteration 460 Iteration 461 Iteration 462 Iteration 463 Iteration 464 Iteration 465 Iteration 466 Iteration 467 Iteration 468 Iteration 469 Iteration 470 Iteration 471 Iteration 472 Iteration 473 Iteration 474 Iteration 475 Iteration 476 Iteration 477 Iteration 478 Iteration 479 Iteration 480 Iteration 481 Iteration 482 Iteration 483 Iteration 484 Iteration 485 Iteration 486 Iteration 487 Iteration 488 Iteration 489 Iteration 490 Iteration 491 Iteration 492 Iteration 493 Iteration 494 Iteration 495 Iteration 496 Iteration 497 Iteration 498 Iteration 499 Iteration 500 Iteration 501 Iteration 502 Iteration 503 Iteration 504 Iteration 505 Iteration 506 Iteration 507 Iteration 508 Iteration 509 Iteration 510 Iteration 511 Iteration 512 Iteration 513 Iteration 514 Iteration 515 Iteration 516 Iteration 517 Iteration 518 Iteration 519 Iteration 520 Iteration 521 Iteration 522 Iteration 523 Iteration 524 Iteration 525 Iteration 526 Iteration 527 Iteration 528 Iteration 529 Iteration 530 Iteration 531 Iteration 532 Iteration 533 Iteration 534 Iteration 535 Iteration 536 Iteration 537 Iteration 538 Iteration 539 Iteration 540 Iteration 541 Iteration 542 Iteration 543 Iteration 544 Iteration 545 Iteration 546 Iteration 547 Iteration 548 Iteration 549 Iteration 550 Iteration 551 Iteration 552 Iteration 553 Iteration 554 Iteration 555 Iteration 556 Iteration 557 Iteration 558 Iteration 559 Iteration 560 Iteration 561 Iteration 562 Iteration 563 Iteration 564 Iteration 565 Iteration 566 Iteration 567 Iteration 568 Iteration 569 Iteration 570 Iteration 571 Iteration 572 Iteration 573 Iteration 574 Iteration 575 Iteration 576 Iteration 577 Iteration 578 Iteration 579 Iteration 580 Iteration 581 Iteration 582 Iteration 583 Iteration 584 Iteration 585 Iteration 586 Iteration 587 Iteration 588 Iteration 589 Iteration 590 Iteration 591 Iteration 592 Iteration 593 Iteration 594 Iteration 595 Iteration 596 Iteration 597 Iteration 598 Iteration 599 Iteration 600 Iteration 601 Iteration 602 Iteration 603 Iteration 604 Iteration 605 Iteration 606 Iteration 607 Iteration 608 Iteration 609 Iteration 610 Iteration 611 Iteration 612 Iteration 613 Iteration 614 Iteration 615 Iteration 616 Iteration 617 Iteration 618 Iteration 619 Iteration 620 Iteration 621 Iteration 622 Iteration 623 Iteration 624 Iteration 625 Iteration 626 Iteration 627 Iteration 628 Iteration 629 Iteration 630 Iteration 631 Iteration 632 Iteration 633 Iteration 634 Iteration 635 Iteration 636 Iteration 637 Iteration 638 Iteration 639 Iteration 640 Iteration 641 Iteration 642 Iteration 643 Iteration 644 Iteration 645 Iteration 646 Iteration 647 Iteration 648 Iteration 649 Iteration 650 Iteration 651 Iteration 652 Iteration 653 Iteration 654 Iteration 655 Iteration 656 Iteration 657 Iteration 658 Iteration 659 Iteration 660 Iteration 661 Iteration 662 Iteration 663 Iteration 664 Iteration 665 Iteration 666 Iteration 667 Iteration 668 Iteration 669 Iteration 670 Iteration 671 Iteration 672 Iteration 673 Iteration 674 Iteration 675 Iteration 676 Iteration 677 Iteration 678 Iteration 679 Iteration 680 Iteration 681 Iteration 682 Iteration 683 Iteration 684 Iteration 685 Iteration 686 Iteration 687 Iteration 688 Iteration 689 Iteration 690 Iteration 691 Iteration 692 Iteration 693 Iteration 694 Iteration 695 Iteration 696 Iteration 697 Iteration 698 Iteration 699 Iteration 700 Iteration 701 Iteration 702 Iteration 703 Iteration 704 Iteration 705 Iteration 706 Iteration 707 Iteration 708 Iteration 709 Iteration 710 Iteration 711 Iteration 712 Iteration 713 Iteration 714 Iteration 715 Iteration 716 Iteration 717 Iteration 718 Iteration 719 Iteration 720 Iteration 721 Iteration 722 Iteration 723 Iteration 724 Iteration 725 Iteration 726 Iteration 727 Iteration 728 Iteration 729 Iteration 730 Iteration 731 Iteration 732 Iteration 733 Iteration 734 Iteration 735 Iteration 736 Iteration 737 Iteration 738 Iteration 739 Iteration 740 Iteration 741 Iteration 742 Iteration 743 Iteration 744 Iteration 745 Iteration 746 Iteration 747 Iteration 748 Iteration 749 Iteration 750 Iteration 751 Iteration 752 Iteration 753 Iteration 754 Iteration 755 Iteration 756 Iteration 757 Iteration 758 Iteration 759 Iteration 760 Iteration 761 Iteration 762 Iteration 763 Iteration 764 Iteration 765 Iteration 766 Iteration 767 Iteration 768 Iteration 769 Iteration 770 Iteration 771 Iteration 772 Iteration 773 Iteration 774 Iteration 775 Iteration 776 Iteration 777 Iteration 778 Iteration 779 Iteration 780 Iteration 781 Iteration 782 Iteration 783 Iteration 784 Iteration 785 Iteration 786 Iteration 787 Iteration 788 Iteration 789 Iteration 790 Iteration 791 Iteration 792 Iteration 793 Iteration 794 Iteration 795 Iteration 796 Iteration 797 Iteration 798 Iteration 799 Iteration 800 Iteration 801 Iteration 802 Iteration 803 Iteration 804 Iteration 805 Iteration 806 Iteration 807 Iteration 808 Iteration 809 Iteration 810 Iteration 811 Iteration 812 Iteration 813 Iteration 814 Iteration 815 Iteration 816 Iteration 817 Iteration 818 Iteration 819 Iteration 820 Iteration 821 Iteration 822 Iteration 823 Iteration 824 Iteration 825 Iteration 826 Iteration 827 Iteration 828 Iteration 829 Iteration 830 Iteration 831 Iteration 832 Iteration 833 Iteration 834 Iteration 835 Iteration 836 Iteration 837 Iteration 838 Iteration 839 Iteration 840 Iteration 841 Iteration 842 Iteration 843 Iteration 844 Iteration 845 Iteration 846 Iteration 847 Iteration 848 Iteration 849 Iteration 850 Iteration 851 Iteration 852 Iteration 853 Iteration 854 Iteration 855 Iteration 856 Iteration 857 Iteration 858 Iteration 859 Iteration 860 Iteration 861 Iteration 862 Iteration 863 Iteration 864 Iteration 865 Iteration 866 Iteration 867 Iteration 868 Iteration 869 Iteration 870 Iteration 871 Iteration 872 Iteration 873 Iteration 874 Iteration 875 Iteration 876 Iteration 877 Iteration 878 Iteration 879 Iteration 880 Iteration 881 Iteration 882 Iteration 883 Iteration 884 Iteration 885 Iteration 886 Iteration 887 Iteration 888 Iteration 889 Iteration 890 Iteration 891 Iteration 892 Iteration 893 Iteration 894 Iteration 895 Iteration 896 Iteration 897 Iteration 898 Iteration 899 Iteration 900 Iteration 901 Iteration 902 Iteration 903 Iteration 904 Iteration 905 Iteration 906 Iteration 907 Iteration 908 Iteration 909 Iteration 910 Iteration 911 Iteration 912 Iteration 913 Iteration 914 Iteration 915 Iteration 916 Iteration 917 Iteration 918 Iteration 919 Iteration 920 Iteration 921 Iteration 922 Iteration 923 Iteration 924 Iteration 925 Iteration 926 Iteration 927 Iteration 928 Iteration 929 Iteration 930 Iteration 931 Iteration 932 Iteration 933 Iteration 934 Iteration 935 Iteration 936 Iteration 937 Iteration 938 Iteration 939 Iteration 940 Iteration 941 Iteration 942 Iteration 943 Iteration 944 Iteration 945 Iteration 946 Iteration 947 Iteration 948 Iteration 949 Iteration 950 Iteration 951 Iteration 952 Iteration 953 Iteration 954 Iteration 955 Iteration 956 Iteration 957 Iteration 958 Iteration 959 Iteration 960 Iteration 961 Iteration 962 Iteration 963 Iteration 964 Iteration 965 Iteration 966 Iteration 967 Iteration 968 Iteration 969 Iteration 970 Iteration 971 Iteration 972 Iteration 973 Iteration 974 Iteration 975 Iteration 976 Iteration 977 Iteration 978 Iteration 979 Iteration 980 Iteration 981 Iteration 982 Iteration 983 Iteration 984 Iteration 985 Iteration 986 Iteration 987 Iteration 988 Iteration 989 Iteration 990 Iteration 991 Iteration 992 Iteration 993 Iteration 994 Iteration 995 Iteration 996 Iteration 997 Iteration 998 Iteration 999 >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders [GOOD] >> TPQTabletTests::One_New_Partition_In_Another_Tablet [GOOD] >> BasicStatistics::Serverless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TTypeCodecsTest::TestDeltaVarIntCodecAndRev [GOOD] Test command err: Size: 128 Create chunk: 0.000017s Read by index: 0.000009s Iterate: 0.000009s Size: 252 Create chunk: 0.000024s Read by index: 0.000010s Iterate: 0.000011s Size: 1887 Create chunk: 0.000054s Read by index: 0.000099s Iterate: 0.000057s Size: 1658 Create chunk: 0.000071s Read by index: 0.000079s Iterate: 0.000069s Size: 1889 Create chunk: 0.000063s Read by index: 0.000058s Iterate: 0.000023s Size: 1660 Create chunk: 0.000054s Read by index: 0.000084s Iterate: 0.000032s >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> YdbSdkSessions::TestSessionPool >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] Test command err: 2025-04-04T17:28:30.152549Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/000c2b/r3tmp/tmpFaYLP9//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-04-04T17:28:30.158264Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-04T17:28:33.109590Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/000c2b/r3tmp/tmpFaYLP9//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-04-04T17:28:33.122884Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-04T17:28:34.400154Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/000c2b/r3tmp/tmpFaYLP9//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-04-04T17:28:34.401860Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-04T17:28:35.669003Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/000c2b/r3tmp/tmpFaYLP9//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-04-04T17:28:35.672744Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-04-04T17:28:37.020178Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/000c2b/r3tmp/tmpFaYLP9//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-04-04T17:28:37.026767Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> YdbSdkSessions::SessionsServerLimit >> TBlobStorageProxyTest::TestCollectGarbagePersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders [GOOD] Test command err: 2025-04-04T17:28:36.308987Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:28:36.314448Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:28:36.314822Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-04T17:28:36.314881Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:28:36.314919Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-04T17:28:36.314963Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:28:36.315016Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:36.315074Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-04T17:28:36.340738Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:177:2192], now have 1 active actors on pipe 2025-04-04T17:28:36.340898Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T17:28:36.362128Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:36.365655Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-04T17:28:36.365822Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:36.367618Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:36.367787Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:36.367873Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:28:36.368445Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:28:36.368884Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-04T17:28:36.369868Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-04T17:28:36.369952Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:185:2198] 2025-04-04T17:28:36.370012Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:28:36.370651Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T17:28:36.370762Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-04T17:28:36.370812Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-04T17:28:36.371029Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04T17:28:36.371206Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:28:36.371490Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:187:2200] 2025-04-04T17:28:36.372197Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Initializing completed. 2025-04-04T17:28:36.372247Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:187:2200] 2025-04-04T17:28:36.372313Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:28:36.372672Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-04-04T17:28:36.372751Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-04-04T17:28:36.372788Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-04-04T17:28:36.372899Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:36.373103Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:28:36.373205Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:36.377208Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:28:36.377351Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:28:36.377686Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:198:2207], now have 1 active actors on pipe 2025-04-04T17:28:36.378516Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:201:2209], now have 1 active actors on pipe 2025-04-04T17:28:36.379498Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 176 RawX2: 4294969487 } TxId: 67890 Config { TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "client-1" Generation: 0 Important: false } Consumers { Name: "client-3" Generation: 7 Important: false } } BootstrapConfig { } } 2025-04-04T17:28:36.379647Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-04-04T17:28:36.379702Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-04T17:28:36.379748Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-04-04T17:28:36.379792Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-04T17:28:36.379835Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-04-04T17:28:36.380109Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 230 MaxStep: 18446744073709551615 Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 176 RawX2: 4294969487 } Partitions { } 2025-04-04T17:28:36.380233Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:36.384447Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:28:36.384510Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-04-04T17:28:36.384547Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-04-04T17:28:36.384586Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-04-04T17:28:36.388618Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 176 RawX2: 4294969487 } } Step: 100 2025-04-04T17:28:36.388739Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARED 2025-04-04T17:28:36.38878 ... Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [5:251:2243], now have 1 active actors on pipe 2025-04-04T17:28:38.256993Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 176 RawX2: 21474838671 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } ReceivingShards: 33333 ReceivingShards: 33334 Immediate: false } 2025-04-04T17:28:38.257025Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-04T17:28:38.257077Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-04-04T17:28:38.257109Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-04T17:28:38.257136Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-04-04T17:28:38.257169Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-04T17:28:38.257203Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-04-04T17:28:38.257313Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 138 MaxStep: 30138 PredicateRecipients: 33334 PredicateRecipients: 33333 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 21474838671 } Partitions { } 2025-04-04T17:28:38.257379Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:38.260023Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:28:38.260073Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-04-04T17:28:38.260104Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-04-04T17:28:38.260133Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-04-04T17:28:38.260329Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 176 RawX2: 21474838671 } } Step: 100 2025-04-04T17:28:38.260370Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARED 2025-04-04T17:28:38.260397Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARED 2025-04-04T17:28:38.260425Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNING 2025-04-04T17:28:38.260456Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890 2025-04-04T17:28:38.260605Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PLANNED MinStep: 138 MaxStep: 30138 PredicateRecipients: 33334 PredicateRecipients: 33333 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 21474838671 } Partitions { } 2025-04-04T17:28:38.260671Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:38.263009Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:28:38.263058Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PLANNING 2025-04-04T17:28:38.263086Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNING 2025-04-04T17:28:38.263115Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNED 2025-04-04T17:28:38.263148Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from PLANNING to PLANNED 2025-04-04T17:28:38.263177Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2025-04-04T17:28:38.263205Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2025-04-04T17:28:38.263248Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2025-04-04T17:28:38.263277Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from PLANNED to CALCULATING 2025-04-04T17:28:38.263337Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2025-04-04T17:28:38.263510Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2025-04-04T17:28:38.263539Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvTxCalcPredicateResult 2025-04-04T17:28:38.263566Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2025-04-04T17:28:38.263590Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-04-04T17:28:38.263622Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2025-04-04T17:28:38.263649Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATING FrontTxId 67890 2025-04-04T17:28:38.263675Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-04T17:28:38.263703Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2025-04-04T17:28:38.263730Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATING to CALCULATED 2025-04-04T17:28:38.263845Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 138 MaxStep: 30138 PredicateRecipients: 33334 PredicateRecipients: 33333 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 21474838671 } Partitions { } 2025-04-04T17:28:38.263899Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:38.271368Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:28:38.271415Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-04-04T17:28:38.271461Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-04-04T17:28:38.271495Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATED FrontTxId 67890 2025-04-04T17:28:38.271524Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-04-04T17:28:38.271555Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-04-04T17:28:38.271588Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 2 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2025-04-04T17:28:38.271638Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 33334 2025-04-04T17:28:38.271729Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 33333 2025-04-04T17:28:38.271791Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-04-04T17:28:38.271833Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2025-04-04T17:28:38.271864Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2025-04-04T17:28:38.271892Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 1 2025-04-04T17:28:38.271945Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-04-04T17:28:38.271990Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 67890 2025-04-04T17:28:38.272132Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-04T17:28:38.273769Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-04-04T17:28:38.273813Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 33334 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:38.274968Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-04-04T17:28:38.275005Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 33333 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:38.276115Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:28:38.276279Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2025-04-04T17:28:38.276314Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-04-04T17:28:38.276343Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-04-04T17:28:38.276373Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-04-04T17:28:38.276402Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-04-04T17:28:38.276433Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-04-04T17:28:38.276466Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] complete TxId 67890 2025-04-04T17:28:38.276495Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-04-04T17:28:38.276525Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-04-04T17:28:38.276554Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-04-04T17:28:38.276715Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 138 MaxStep: 30138 PredicateRecipients: 33334 PredicateRecipients: 33333 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 21474838671 } Partitions { } 2025-04-04T17:28:38.276778Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:38.280505Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:28:38.280553Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-04-04T17:28:38.280583Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-04-04T17:28:38.280613Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-04-04T17:28:38.280644Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-04-04T17:28:38.280673Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-04-04T17:28:38.280703Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-04-04T17:28:38.280739Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/2 2025-04-04T17:28:38.280763Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-04T17:28:38.280788Z node 5 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/2 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate [GOOD] Test command err: 2025-04-04T17:26:07.590253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:07.590497Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:07.590580Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00194e/r3tmp/tmph5LTEG/pdisk_1.dat 2025-04-04T17:26:07.987369Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18144, node 1 2025-04-04T17:26:08.232841Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:08.232906Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:08.232953Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:08.233769Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:08.236734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:08.328323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:08.328466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:08.345054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8272 2025-04-04T17:26:08.839987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:11.685268Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:11.717278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:11.717373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:11.745587Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:11.747708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:11.971513Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:11.972125Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:11.972598Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:11.972743Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:11.972816Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:11.973040Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:11.973136Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:11.973235Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:11.973307Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:12.148325Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:12.148487Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:12.161499Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:12.298722Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:12.330434Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:12.330497Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:12.356554Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:12.356723Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:12.356928Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:12.356998Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:12.357046Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:12.357140Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:12.357198Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:12.357252Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:12.358281Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:12.384875Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:12.384988Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:12.389197Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:26:12.393047Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:26:12.393590Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:26:12.397019Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:12.411840Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:12.411883Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:12.411935Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:12.423825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:12.429031Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:12.429155Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:12.578357Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:12.697286Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:12.805147Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:13.725137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:13.725268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:13.741596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:13.997785Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:13.998079Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:13.998398Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:13.998533Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:13.998627Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:13.998699Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:13.998760Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:13.998829Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:13.998898Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:26:13.998965Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:26:13.999049Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:26:13.999136Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2389:2885];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:26:14.031835Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2381:2882];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:14.031932Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2381:2882];tablet_id=72075186224037899;process=T ... ATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8783:6672] 2025-04-04T17:28:33.784690Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8783:6672], schemeshard id = 72075186224037897 2025-04-04T17:28:33.798940Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:33.799021Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:28:33.887579Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8787:6675] 2025-04-04T17:28:33.888428Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:4114:3320] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-04-04T17:28:33.888513Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:4114:3320] 2025-04-04T17:28:33.888586Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-04-04T17:28:34.323232Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-04T17:28:34.323339Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T17:28:34.334330Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-04-04T17:28:34.334401Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T17:28:35.027461Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:28:35.027564Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-04-04T17:28:35.027606Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-04T17:28:36.187903Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:36.188064Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:28:36.188132Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:36.188931Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:36.205493Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:36.206049Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:36.206141Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:36.206725Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:36.224015Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:36.224210Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-04-04T17:28:36.225179Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8864:6716], server id = [2:8869:6721], tablet id = 72075186224037899, status = OK 2025-04-04T17:28:36.225341Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8864:6716], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:36.225581Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8865:6717], server id = [2:8870:6722], tablet id = 72075186224037900, status = OK 2025-04-04T17:28:36.225669Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8865:6717], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:36.228632Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8866:6718], server id = [2:8872:6724], tablet id = 72075186224037901, status = OK 2025-04-04T17:28:36.228721Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8866:6718], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:36.229216Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8867:6719], server id = [2:8871:6723], tablet id = 72075186224037902, status = OK 2025-04-04T17:28:36.229286Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8867:6719], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:36.229434Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8868:6720], server id = [2:8874:6726], tablet id = 72075186224037903, status = OK 2025-04-04T17:28:36.229483Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8868:6720], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:36.240058Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:28:36.241237Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-04T17:28:36.241879Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8864:6716], server id = [2:8869:6721], tablet id = 72075186224037899 2025-04-04T17:28:36.241927Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:36.242633Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8865:6717], server id = [2:8870:6722], tablet id = 72075186224037900 2025-04-04T17:28:36.242674Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:36.243124Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-04T17:28:36.243522Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-04T17:28:36.243832Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-04T17:28:36.244121Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8867:6719], server id = [2:8871:6723], tablet id = 72075186224037902 2025-04-04T17:28:36.244149Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:36.244430Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8878:6730], server id = [2:8881:6733], tablet id = 72075186224037904, status = OK 2025-04-04T17:28:36.244522Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8878:6730], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:36.244662Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8880:6732], server id = [2:8882:6734], tablet id = 72075186224037905, status = OK 2025-04-04T17:28:36.244709Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8880:6732], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:36.245520Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8866:6718], server id = [2:8872:6724], tablet id = 72075186224037901 2025-04-04T17:28:36.245556Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:36.246197Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8868:6720], server id = [2:8874:6726], tablet id = 72075186224037903 2025-04-04T17:28:36.246226Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:36.248187Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8883:6735], server id = [2:8886:6738], tablet id = 72075186224037906, status = OK 2025-04-04T17:28:36.248281Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8883:6735], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:36.248620Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8884:6736], server id = [2:8887:6739], tablet id = 72075186224037907, status = OK 2025-04-04T17:28:36.248679Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8884:6736], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:36.248875Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8885:6737], server id = [2:8888:6740], tablet id = 72075186224037908, status = OK 2025-04-04T17:28:36.248925Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8885:6737], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:28:36.249895Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-04T17:28:36.250632Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-04T17:28:36.251100Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8878:6730], server id = [2:8881:6733], tablet id = 72075186224037904 2025-04-04T17:28:36.251132Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:36.251459Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8880:6732], server id = [2:8882:6734], tablet id = 72075186224037905 2025-04-04T17:28:36.251487Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:36.251662Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-04T17:28:36.251958Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-04T17:28:36.252187Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8884:6736], server id = [2:8887:6739], tablet id = 72075186224037907 2025-04-04T17:28:36.252214Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:36.252277Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-04T17:28:36.252329Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:36.252594Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:36.252852Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:36.253165Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:28:36.256098Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8883:6735], server id = [2:8886:6738], tablet id = 72075186224037906 2025-04-04T17:28:36.256136Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:36.256619Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8885:6737], server id = [2:8888:6740], tablet id = 72075186224037908 2025-04-04T17:28:36.256649Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:36.256936Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:36.281894Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTY3NzlkODAtM2YzYjM0OWUtNTY4NTVlMDUtNTBjMzliYmI=, TxId: 2025-04-04T17:28:36.281987Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTY3NzlkODAtM2YzYjM0OWUtNTY4NTVlMDUtNTBjMzliYmI=, TxId: 2025-04-04T17:28:36.282652Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:36.298403Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:28:36.298485Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:4114:3320] >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::One_New_Partition_In_Another_Tablet [GOOD] Test command err: 2025-04-04T17:28:35.566568Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:28:35.571226Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:28:35.571563Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-04T17:28:35.571623Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:28:35.571664Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-04T17:28:35.571701Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:28:35.571758Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:35.571809Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-04T17:28:35.602905Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:205:2211], now have 1 active actors on pipe 2025-04-04T17:28:35.603049Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T17:28:35.622741Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:35.625786Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-04-04T17:28:35.625908Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:35.626988Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:35.627187Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:28:35.627620Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:28:35.628045Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:213:2217] 2025-04-04T17:28:35.629076Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-04-04T17:28:35.629148Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:213:2217] 2025-04-04T17:28:35.629220Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:28:35.630079Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T17:28:35.630204Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-04-04T17:28:35.630295Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-04-04T17:28:35.630347Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit request with generation 1 2025-04-04T17:28:35.630385Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit with generation 1 done 2025-04-04T17:28:35.630621Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04T17:28:35.630660Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:35.630834Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:35.633911Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:28:35.634365Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:220:2222], now have 1 active actors on pipe 2025-04-04T17:28:35.635049Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:223:2224], now have 1 active actors on pipe 2025-04-04T17:28:35.635927Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 176 RawX2: 4294969487 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-04-04T17:28:35.636000Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-04T17:28:35.636076Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-04-04T17:28:35.636118Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-04T17:28:35.636158Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-04-04T17:28:35.636201Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-04T17:28:35.636245Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-04-04T17:28:35.636405Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 4294969487 } Partitions { } 2025-04-04T17:28:35.636544Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:35.640573Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:28:35.640685Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-04-04T17:28:35.640746Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-04-04T17:28:35.640799Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-04-04T17:28:35.641134Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 176 RawX2: 4294969487 } TxId: 67891 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-04-04T17:28:35.641190Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-04-04T17:28:35.641290Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67891, WriteId (empty maybe) 2025-04-04T17:28:35.641339Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-04-04T17:28:35.641376Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State UNKNOWN 2025-04-04T17:28:35.641414Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-04-04T17:28:35.641475Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PREPARING 2025-04-04T17:28:35.641639Z node 1 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: PREPARED MinStep: 136 MaxStep: 30136 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 176 RawX2: 4294969487 } Partitions { } 2025-04-04T17:28:35.641821Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:35.645637Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:28:35.645701Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-04-04T17:28:35.645745Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State PREPARING 2025-04-04T17:28:35.645782Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PREPARED 2025-04-04T17:28:35.649702Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67891 AckTo { RawX1: 176 RawX2: 4294969487 } } Step: 100 2025-04-04T17:28:35.649788Z node 1 :P ... 90] save tx TxId: 67890 State: CALCULATED MinStep: 134 MaxStep: 18446744073709551615 PredicateRecipients: 22222 Step: 100 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ChildPartitionIds: 1 ChildPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 0 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ParentPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 176 RawX2: 25769805967 } Partitions { Partition { PartitionId: 0 } Partition { PartitionId: 1 } } 2025-04-04T17:28:38.351569Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:38.355211Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:28:38.355276Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-04-04T17:28:38.355351Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-04-04T17:28:38.355400Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATED FrontTxId 67890 2025-04-04T17:28:38.355463Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-04-04T17:28:38.355512Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-04-04T17:28:38.355592Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2025-04-04T17:28:38.355638Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-04-04T17:28:38.355778Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-04-04T17:28:38.355901Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2025-04-04T17:28:38.356076Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2025-04-04T17:28:38.356113Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 2 2025-04-04T17:28:38.356270Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-04-04T17:28:38.356442Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user client-1 reinit with generation 2 done 2025-04-04T17:28:38.356485Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user client-3 reinit with generation 2 done 2025-04-04T17:28:38.356526Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user drop done 2025-04-04T17:28:38.356947Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-04-04T17:28:38.357269Z node 6 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T17:28:38.357409Z node 6 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-04T17:28:38.359833Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-04-04T17:28:38.359898Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:38.363740Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:28:38.364020Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2025-04-04T17:28:38.364080Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-04-04T17:28:38.364124Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-04-04T17:28:38.364171Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-04-04T17:28:38.364215Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 2 2025-04-04T17:28:38.364256Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 status has not changed 2025-04-04T17:28:38.365060Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:28:38.365158Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 1 2025-04-04T17:28:38.365192Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-04-04T17:28:38.365222Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-04-04T17:28:38.365253Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-04-04T17:28:38.365280Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 2, Expected 2 2025-04-04T17:28:38.365324Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-04-04T17:28:38.365370Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] complete TxId 67890 2025-04-04T17:28:38.365622Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ChildPartitionIds: 1 ChildPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 0 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ParentPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } 2025-04-04T17:28:38.365704Z node 6 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:38.365786Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-04-04T17:28:38.365834Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-04-04T17:28:38.365878Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-04-04T17:28:38.366161Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 134 MaxStep: 18446744073709551615 PredicateRecipients: 22222 Step: 100 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ChildPartitionIds: 1 ChildPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 0 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ParentPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 176 RawX2: 25769805967 } Partitions { Partition { PartitionId: 0 } Partition { PartitionId: 1 } } 2025-04-04T17:28:38.366433Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:38.370023Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:28:38.370073Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-04-04T17:28:38.370109Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-04-04T17:28:38.370143Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-04-04T17:28:38.370180Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-04-04T17:28:38.370224Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-04-04T17:28:38.370289Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-04-04T17:28:38.370334Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-04-04T17:28:38.370365Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-04T17:28:38.370401Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-04T17:28:38.375028Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [6:364:2336], now have 1 active actors on pipe 2025-04-04T17:28:38.375208Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2025-04-04T17:28:38.375259Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvReadSetAck to 22222 2025-04-04T17:28:38.375369Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSetAck Step: 100 TxId: 67890 TabletSource: 72057594037927937 TabletDest: 22222 TabletConsumer: 22222 Flags: 0 Seqno: 0 2025-04-04T17:28:38.375408Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvReadSetAck 2025-04-04T17:28:38.375474Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Predicate acks 1/1 2025-04-04T17:28:38.375529Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state WAIT_RS_ACKS 2025-04-04T17:28:38.375570Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State WAIT_RS_ACKS 2025-04-04T17:28:38.375617Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 1/1 2025-04-04T17:28:38.375650Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-04T17:28:38.375685Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 1/1 2025-04-04T17:28:38.375723Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2025-04-04T17:28:38.375771Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState DELETING 2025-04-04T17:28:38.375817Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete key for TxId 67890 2025-04-04T17:28:38.375895Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:28:38.375956Z node 6 :PERSQUEUE DEBUG: Connected to tablet 72057594037927937 from tablet 22222 >> TPartitionTests::DataTxCalcPredicateOrder [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoNodes >> THealthCheckTest::Basic >> BasicUsage::FallbackToSingleDb >> TPartitionTests::DifferentWriteTxBatchingOptions >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TBlobStorageProxyTest::TestSingleFailure [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient >> BasicStatistics::TwoServerlessTwoSharedDbs |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoTables |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> THealthCheckTest::OneIssueListing ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestSingleFailure [GOOD] Test command err: 2025-04-04T17:28:37.161126Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/000c17/r3tmp/tmpBQuye2//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-04-04T17:28:37.203487Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> YdbSdkSessions::TestMultipleSessions >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] Test command err: 2025-04-04T17:23:27.548184Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:23:27.708936Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:23:27.735303Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:23:27.735593Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:23:27.743647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:23:27.743925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:23:27.744146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:23:27.744260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:23:27.744362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:23:27.744480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:23:27.744599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:23:27.744728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:23:27.744867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:23:27.744990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:23:27.745104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:23:27.745255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:23:27.773948Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:23:27.774101Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:23:27.774171Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:23:27.774348Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:27.774500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:23:27.774567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:23:27.774624Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:23:27.774722Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:23:27.774782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:23:27.774820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:23:27.774863Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:23:27.775031Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:23:27.775107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:23:27.775144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:23:27.775171Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:23:27.775274Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:23:27.775327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:23:27.775370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:23:27.775396Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:23:27.775456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:23:27.775493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:23:27.775524Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:23:27.775564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:23:27.775598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:23:27.775627Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:23:27.776043Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=38; 2025-04-04T17:23:27.776132Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-04-04T17:23:27.776205Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-04T17:23:27.776272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=28; 2025-04-04T17:23:27.776444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:23:27.776500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:23:27.776534Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:23:27.776711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:23:27.776756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:23:27.776787Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:23:27.776936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:23:27.776995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:23:27.777034Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:23:27.777220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:23:27.777260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:23:27.777288Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:23:27.777415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:23:27.777473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:23:27.777517Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... 37184:2:99:255:108:2776:0]; 2025-04-04T17:28:34.463361Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:109:2792:0]; 2025-04-04T17:28:34.463410Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:110:2776:0]; 2025-04-04T17:28:34.463493Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:111:2784:0]; 2025-04-04T17:28:34.463548Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:112:2760:0]; 2025-04-04T17:28:34.463589Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:113:2776:0]; 2025-04-04T17:28:34.463648Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:114:9568:0]; 2025-04-04T17:28:34.463698Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:115:2848:0]; 2025-04-04T17:28:34.463748Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:116:2840:0]; 2025-04-04T17:28:34.463805Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:117:2856:0]; 2025-04-04T17:28:34.463861Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:118:2856:0]; 2025-04-04T17:28:34.463901Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:119:2848:0]; 2025-04-04T17:28:34.463964Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:120:2840:0]; 2025-04-04T17:28:34.464020Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:121:2856:0]; 2025-04-04T17:28:34.464065Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:122:2848:0]; 2025-04-04T17:28:34.464105Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:123:2848:0]; 2025-04-04T17:28:34.464152Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:124:2856:0]; 2025-04-04T17:28:34.464212Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:125:2856:0]; 2025-04-04T17:28:34.464270Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:126:2848:0]; 2025-04-04T17:28:34.464327Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:127:2856:0]; 2025-04-04T17:28:34.464391Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:128:2832:0]; 2025-04-04T17:28:34.464447Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:129:2840:0]; 2025-04-04T17:28:34.464508Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:130:2848:0]; 2025-04-04T17:28:34.464564Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:131:2840:0]; 2025-04-04T17:28:34.464616Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:132:2840:0]; 2025-04-04T17:28:34.464672Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:133:2848:0]; 2025-04-04T17:28:34.464713Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:134:2848:0]; 2025-04-04T17:28:34.464756Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:135:2832:0]; 2025-04-04T17:28:34.464800Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:136:2848:0]; 2025-04-04T17:28:34.464839Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:137:2848:0]; 2025-04-04T17:28:34.464880Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:138:2840:0]; 2025-04-04T17:28:34.464921Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:139:2832:0]; 2025-04-04T17:28:34.464965Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:140:2840:0]; 2025-04-04T17:28:34.465021Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:141:2848:0]; 2025-04-04T17:28:34.465072Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:142:2848:0]; 2025-04-04T17:28:34.465148Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:143:2776:0]; 2025-04-04T17:28:34.465228Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:144:2792:0]; 2025-04-04T17:28:34.465304Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:145:2784:0]; 2025-04-04T17:28:34.465375Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:146:2784:0]; 2025-04-04T17:28:34.465432Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:147:2776:0]; 2025-04-04T17:28:34.465490Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:148:2792:0]; 2025-04-04T17:28:34.465549Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:149:2776:0]; 2025-04-04T17:28:34.465599Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:150:2768:0]; 2025-04-04T17:28:34.465644Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:151:2776:0]; 2025-04-04T17:28:34.465683Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:152:9576:0]; 2025-04-04T17:28:34.469815Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=2799958;external_task_id=3c57f134-117a11f0-99582244-8b39eae2;type=CS::INDEXATION;priority=0;; 2025-04-04T17:28:34.471520Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=100;task=cpu=0;mem=3571882;external_task_id=3c57bac0-117a11f0-850dab42-614ff1dc;type=CS::INDEXATION;priority=0;; 2025-04-04T17:28:34.471563Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=3c57bac0-117a11f0-850dab42-614ff1dc;mem=3571882;cpu=0; 2025-04-04T17:28:34.471594Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=3c57bac0-117a11f0-850dab42-614ff1dc;task_id=100;mem=3571882;cpu=0; 2025-04-04T17:28:34.473119Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=3c57bac0-117a11f0-850dab42-614ff1dc;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=3c57bac0-117a11f0-850dab42-614ff1dc; 2025-04-04T17:28:36.265578Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=3c57bac0-117a11f0-850dab42-614ff1dc;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-04-04T17:28:36.267522Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=2; 2025-04-04T17:28:36.277833Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=101;task=cpu=0;mem=2799958;external_task_id=3c57f134-117a11f0-99582244-8b39eae2;type=CS::INDEXATION;priority=0;; 2025-04-04T17:28:36.277907Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=3c57f134-117a11f0-99582244-8b39eae2;mem=2799958;cpu=0; 2025-04-04T17:28:36.277947Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:137:2169];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=3c57f134-117a11f0-99582244-8b39eae2;task_id=101;mem=2799958;cpu=0; 2025-04-04T17:28:36.280112Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=3c57f134-117a11f0-99582244-8b39eae2;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=3c57f134-117a11f0-99582244-8b39eae2; 2025-04-04T17:28:37.677074Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=3c57f134-117a11f0-99582244-8b39eae2;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-04-04T17:28:37.681820Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:137:2169];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=2; 2025-04-04T17:28:38.201523Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-04-04T17:28:38.202171Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[306] (CS::GENERAL) apply at tablet 9437184 2025-04-04T17:28:38.276949Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:125 Blob count: 422 2025-04-04T17:28:38.280734Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=3445356;raw_bytes=5239242;count=3;records=53332} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=5183316;raw_bytes=7864534;count=3;records=80000} inactive {blob_bytes=215061504;raw_bytes=326598142;count=144;records=3322060} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/hot' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/hot' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 >> YdbSdkSessions::MultiThreadSync |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TopicAutoscaling::ControlPlane_CreateAlterDescribe |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] >> TPQTest::TestCheckACL [GOOD] >> TPQTest::TestAlreadyWritten >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] >> HttpRequest::Probe >> YdbSdkSessions::TestActiveSessionCountAfterBadSession |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> THealthCheckTest::SpecificServerless [GOOD] >> THealthCheckTest::SpecificServerlessWithExclusiveNodes >> THealthCheckTest::Basic [GOOD] >> THealthCheckTest::BasicNodeCheckRequest >> TPartitionTests::DataTxCalcPredicateOk [GOOD] >> TPartitionTests::DataTxCalcPredicateError >> THealthCheckTest::StaticGroupIssue |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] Test command err: 2025-04-04T17:27:56.481507Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511040878398260:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:56.481558Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:56.505592Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489511040434433436:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:56.505862Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:56.644116Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001c86/r3tmp/tmpqHzPKN/pdisk_1.dat 2025-04-04T17:27:56.660326Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:27:56.858904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:56.859084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:56.863115Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:27:56.864019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:56.864814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:56.864902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:56.868034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:56.895553Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25677, node 1 2025-04-04T17:27:56.914630Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:27:56.914966Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:27:56.944065Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/001c86/r3tmp/yandexMzHO86.tmp 2025-04-04T17:27:56.944091Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/001c86/r3tmp/yandexMzHO86.tmp 2025-04-04T17:27:56.944246Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/001c86/r3tmp/yandexMzHO86.tmp 2025-04-04T17:27:56.944356Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:56.984268Z INFO: TTestServer started on Port 5298 GrpcPort 25677 TClient is connected to server localhost:5298 PQClient connected to localhost:25677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:57.195364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:27:57.235148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:27:59.682993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511053763301322:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:59.683442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511053763301300:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:59.683540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:59.686969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:27:59.704215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489511053763301328:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:27:59.886985Z node 1 :TX_PROXY ERROR: Actor# [1:7489511053763301415:2812] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:59.912899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:59.921654Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489511053763301432:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:27:59.922365Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjEzZjYyMGYtZmY3YjBjOGYtZjBlNzFiMmQtOGVlM2UzZg==, ActorId: [1:7489511053763301296:2341], ActorState: ExecuteState, TraceId: 01jr0xgqx74jk9tqfthtqzbh7s, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:27:59.924482Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:28:00.000292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:28:00.110375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:28:00.369356Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0xgreg1z46z0bpc6fsvcrj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTU4OWEyZWYtZWM0Yjg0ZjMtNGYyNzk3NGYtYTdmZTgwY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489511058058269086:3091] 2025-04-04T17:28:01.481486Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489511040878398260:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:01.481614Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:28:01.505406Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489511040434433436:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:01.505486Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:28:06.312767Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489511040878398546:2151], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:28:06.313035Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489511040878398546:2151], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-04-04T17:28:06.313131Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489511040878398546:2151], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7489511045173366300:2451] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743787677254 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T17:28:06.313219Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489511040878398546:2151], cacheItem# { Subscriber: { Subscriber: [1:7489511045173366300:2451] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743787677254 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true ... 82269Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [7:7489511185886805133:2126], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root/PQ/Config/V2/Versions PathId: Partial: 0 } 2025-04-04T17:28:40.682312Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [7:7489511185886805133:2126], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root/PQ/Config/V2/Versions PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [7:7489511203066675513:2936] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743787714375 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, by pathId# nullptr 2025-04-04T17:28:40.682362Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7489511185886805133:2126], cacheItem# { Subscriber: { Subscriber: [7:7489511203066675513:2936] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743787714375 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: Root/PQ/Config/V2/Versions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 16 IsSync: true Partial: 0 } 2025-04-04T17:28:40.682616Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7489511228836480743:4017], recipient# [7:7489511228836480739:2529], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:10:1] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } },{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:10:1] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:28:40.682758Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7489511228836480744:4018], recipient# [7:7489511228836480740:2530], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:12:1] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } },{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:12:1] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:28:40.682928Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7489511185886805133:2126], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:10:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:28:40.683000Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7489511185886805133:2126], cacheItem# { Subscriber: { Subscriber: [7:7489511203066675333:2807] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743787714165 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: TableId: [72057594046644480:10:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:28:40.683071Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7489511185886805133:2126], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:12:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:28:40.683114Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7489511185886805133:2126], cacheItem# { Subscriber: { Subscriber: [7:7489511203066675513:2936] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743787714375 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: TableId: [72057594046644480:12:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:28:40.683552Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7489511228836480747:4019], recipient# [7:7489511185886805116:2290], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:10:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:28:40.683639Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7489511228836480748:4020], recipient# [7:7489511185886805116:2290], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:12:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:28:40.683685Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7489511185886805133:2126], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:28:40.683742Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7489511185886805133:2126], cacheItem# { Subscriber: { Subscriber: [7:7489511185886805627:2445] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 27 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743787710903 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:28:40.683836Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7489511228836480749:4021], recipient# [7:7489511185886805116:2290], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-04-04T17:28:41.010496Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7489511185886805133:2126], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T17:28:41.010631Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7489511185886805133:2126], cacheItem# { Subscriber: { Subscriber: [7:7489511198771707916:2745] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-04-04T17:28:41.010717Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7489511233131448056:4030], recipient# [7:7489511233131448055:2534], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> YdbSdkSessions::SessionsServerLimit [GOOD] >> YdbSdkSessions::SessionsServerLimitWithSessionPool |97.4%| [TA] $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.4%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool >> YdbSdkSessions::TestSessionPool [GOOD] >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [GOOD] >> THealthCheckTest::IgnoreOtherGenerations [GOOD] >> THealthCheckTest::IgnoreServerlessWhenNotSpecific >> BlobDepotWithTestShard::PlainGroup [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSessionPool [GOOD] Test command err: 2025-04-04T17:28:39.071155Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511228393690162:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:39.071227Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001687/r3tmp/tmpBbKsT9/pdisk_1.dat 2025-04-04T17:28:39.501008Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:39.520424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:39.526368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:39.543921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63769, node 1 2025-04-04T17:28:39.796555Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:39.796639Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:39.796651Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:39.796779Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61227 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:40.382921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:42.135045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511241278593127:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:42.135097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511241278593115:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:42.135160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:42.142294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:28:42.170770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489511241278593129:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:28:42.243942Z node 1 :TX_PROXY ERROR: Actor# [1:7489511241278593204:2688] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_testshard/unittest >> BlobDepotWithTestShard::PlainGroup [GOOD] |97.4%| [TM] {RESULT} ydb/core/blobstorage/ut_testshard/unittest >> YdbSdkSessions::TestMultipleSessions [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [GOOD] Test command err: 2025-04-04T17:28:39.926272Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511226437429254:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:39.926352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00167c/r3tmp/tmpmfQiB0/pdisk_1.dat 2025-04-04T17:28:40.304902Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:40.341764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:40.342104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 4383, node 1 2025-04-04T17:28:40.353438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:40.456620Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:40.456642Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:40.456650Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:40.457164Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21842 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:40.727373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:42.888693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511239322332165:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:42.888800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.148169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:28:43.370283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511243617299646:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.370391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.370653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511243617299651:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.374348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:28:43.391982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489511243617299653:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:28:43.457213Z node 1 :TX_PROXY ERROR: Actor# [1:7489511243617299733:2808] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:43.550189Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0xj2k99d613en88yrt3xwd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjViNDgxOWQtMzQyZTZmN2QtYzJlYjg3NmMtYzZiYWVmNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:28:43.626437Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jr0xj2v96nhyayfewfktekg1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2ExMmVhOGItNDI5NmQxMzktMTM4Mzg4NzYtMWQ1NzNlZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> YdbSdkSessions::MultiThreadSync [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient >> TPartitionTests::DataTxCalcPredicateError [GOOD] >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus [GOOD] >> THealthCheckTest::RedGroupIssueOnRedSpace >> THealthCheckTest::BasicNodeCheckRequest [GOOD] >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestMultipleSessions [GOOD] Test command err: 2025-04-04T17:28:41.137230Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511235818630970:2148];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:41.138791Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00166e/r3tmp/tmp6Jeqzz/pdisk_1.dat 2025-04-04T17:28:41.480779Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28581, node 1 2025-04-04T17:28:41.499296Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:28:41.506967Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:28:41.532548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:41.532636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:41.535710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:41.538786Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:41.538806Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:41.538814Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:41.538951Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:41.828941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:43.937238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511244408566512:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.937326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511244408566523:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.937387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.940782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511244408566557:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.940784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511244408566555:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.940843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.941730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:28:43.944797Z node 1 :TX_PROXY ERROR: Actor# [1:7489511244408566561:2647] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:28:43.982025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489511244408566560:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:28:43.982083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489511244408566526:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:28:44.059056Z node 1 :TX_PROXY ERROR: Actor# [1:7489511248703533975:2724] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:44.084866Z node 1 :TX_PROXY ERROR: Actor# [1:7489511248703533992:2733] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::DataTxCalcPredicateError [GOOD] Test command err: 2025-04-04T17:28:35.654736Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:35.654806Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:35.672014Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:178:2193] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:35.673802Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T17:28:35.000000Z 2025-04-04T17:28:35.673854Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:178:2193] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\270\323\343\216\3402" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\270\323\343\216\3402" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\264\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\001\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-04T17:28:36.427904Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:36.427975Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:36.438987Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:178:2193] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:36.440645Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T17:28:36.000000Z 2025-04-04T17:28:36.440709Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:178:2193] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\240\333\343\216\3402" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\000\020\001\030\001\"\tsession-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\240\333\343\216\3402" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\001\030\001\"\tsession-2(\0000\003" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\240\333\343\216\3402" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\003\020\001\030\001\"\tsession-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\003\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\240\333\343\216\3402" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\001\020\001\030\001\"\tsession-2(\0000\003" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\006\020\001\030\001\"\tsession-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\006\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } 2025-04-04T17:28:37.244025Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:37.244073Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:37.263879Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [3:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:37.265024Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T17:28:37.000000Z 2025-04-04T17:28:37.265073Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [3:176:2191] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\210\343\343\216\3402" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST 2025-04-04T17:28:38.048218Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:38.048290Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:28:38.070635Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:175:2190] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:38.072339Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T17:28:38.000000Z 2025-04-04T17:28:38.072397Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:175:2190] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::T ... TOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Wait second predicate result Got batch complete: 1 Send disk status response with cookie: 0 2025-04-04T17:28:41.685883Z node 4 :PERSQUEUE INFO: new Cookie owner1|6bffaae9-bdee939a-68b8a614-fdd734d8_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Got batch complete: 1 Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Got batch complete: 1 Send disk status response with cookie: 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Wait third predicate result Create distr tx with id = 4 and act no: 5 Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 2025-04-04T17:28:43.289355Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:43.289424Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:28:43.301053Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:43.302794Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T17:28:43.000000Z 2025-04-04T17:28:43.302857Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:28:44.589422Z node 5 :PERSQUEUE INFO: new Cookie SourceId|e9c60836-e2d1aece-a18db503-124d4a5c_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner SourceId Got batch complete: 1 Wait write response Wait kv request Got batch complete: 1 Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Wait second predicate result Create distr tx with id = 0 and act no: 1 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 >> YdbSdkSessions::TestActiveSessionCountAfterBadSession [GOOD] >> YdbSdkSessions::TestActiveSessionCountAfterTransportError >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 >> THealthCheckTest::SpecificServerlessWithExclusiveNodes [GOOD] >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes |97.5%| [TA] $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} >> THealthCheckTest::OneIssueListing [GOOD] >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus >> TPQTest::TestPartitionedBlobFails [GOOD] >> YdbSdkSessions::SessionsServerLimitWithSessionPool [GOOD] >> test.py::test[solomon-BadDownsamplingAggregation-] >> TPQTest::TestReadSessions >> BasicUsage::SimpleHandlers [GOOD] >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit >> AnalyzeColumnshard::AnalyzeServerless [GOOD] >> TGRpcRateLimiterTest::CreateResource ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::SessionsServerLimitWithSessionPool [GOOD] Test command err: 2025-04-04T17:28:39.211834Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511227867764458:2236];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:39.216108Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00167e/r3tmp/tmpj5rERY/pdisk_1.dat 2025-04-04T17:28:39.625935Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:39.638707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:39.638828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:39.646965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10811, node 1 2025-04-04T17:28:39.798170Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:39.798199Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:39.798206Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:39.798327Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:40.358406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:42.136299Z node 1 :KQP_PROXY WARN: TraceId: "01jr0xj1crc2jtqyy95rnq8g3p", Active sessions limit exceeded, maximum allowed: 2
: Error: Active sessions limit exceeded, maximum allowed: 2 2025-04-04T17:28:42.169378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511240752667232:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:42.169509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511240752667223:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:42.169609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:42.175341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:28:42.205537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489511240752667237:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:28:42.287730Z node 1 :TX_PROXY ERROR: Actor# [1:7489511240752667315:2700] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:42.737104Z node 1 :KQP_PROXY WARN: TraceId: "01jr0xj1zgc3788ntdp4t71y1p", Active sessions limit exceeded, maximum allowed: 2
: Error: Active sessions limit exceeded, maximum allowed: 2 2025-04-04T17:28:44.200943Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489511249371732799:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:44.201008Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00167e/r3tmp/tmpFPARF7/pdisk_1.dat 2025-04-04T17:28:44.369320Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:44.416814Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:44.416978Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:44.420225Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20340, node 4 2025-04-04T17:28:44.483763Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:44.483796Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:44.483803Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:44.483968Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14664 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:44.752242Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:47.300180Z node 4 :KQP_PROXY WARN: TraceId: "01jr0xj6e305jjxkb160kvkxv6", Active sessions limit exceeded, maximum allowed: 2 2025-04-04T17:28:47.307719Z node 4 :KQP_PROXY WARN: TraceId: "01jr0xj6ebd55edam57954xkax", Active sessions limit exceeded, maximum allowed: 2 2025-04-04T17:28:47.315026Z node 4 :KQP_PROXY WARN: TraceId: "01jr0xj6ej7n3qnmfc486sktp9", Active sessions limit exceeded, maximum allowed: 2 2025-04-04T17:28:47.322333Z node 4 :KQP_PROXY WARN: TraceId: "01jr0xj6es7k6rddbwg98dqdsw", Active sessions limit exceeded, maximum allowed: 2 2025-04-04T17:28:47.331174Z node 4 :KQP_PROXY WARN: TraceId: "01jr0xj6f21kefvr2qdzgr61k6", Active sessions limit exceeded, maximum allowed: 2 2025-04-04T17:28:47.339070Z node 4 :KQP_PROXY WARN: TraceId: "01jr0xj6fa3r7nhvewd2kzwbb0", Active sessions limit exceeded, maximum allowed: 2 2025-04-04T17:28:47.356901Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511262256635710:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:47.356955Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:47.357110Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511262256635722:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:47.360294Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:28:47.386825Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489511262256635724:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:28:47.479720Z node 4 :TX_PROXY ERROR: Actor# [4:7489511262256635809:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:47.539995Z node 4 :KQP_PROXY WARN: TraceId: "01jr0xj6nk7fa9nwe4dwh1zyn3", Active sessions limit exceeded, maximum allowed: 2 2025-04-04T17:28:47.545985Z node 4 :KQP_PROXY WARN: TraceId: "01jr0xj6nse0sxran2qg6pnwfk", Active sessions limit exceeded, maximum allowed: 2 2025-04-04T17:28:47.551803Z node 4 :KQP_PROXY WARN: TraceId: "01jr0xj6nz02m6qwv4033m7k56", Active sessions limit exceeded, maximum allowed: 2 2025-04-04T17:28:47.556911Z node 4 :KQP_PROXY WARN: TraceId: "01jr0xj6p448wkehypywyewmc3", Active sessions limit exceeded, maximum allowed: 2 2025-04-04T17:28:47.562446Z node 4 :KQP_PROXY WARN: TraceId: "01jr0xj6padprq5p57eksqz47y", Active sessions limit exceeded, maximum allowed: 2 2025-04-04T17:28:47.568429Z node 4 :KQP_PROXY WARN: TraceId: "01jr0xj6pg8b9mfck1c7xjajw6", Active sessions limit exceeded, maximum allowed: 2 2025-04-04T17:28:47.576367Z node 4 :KQP_PROXY WARN: TraceId: "01jr0xj6pr851cr60z4tn74y6j", Active sessions limit exceeded, maximum allowed: 2 2025-04-04T17:28:47.582201Z node 4 :KQP_PROXY WARN: TraceId: "01jr0xj6pxbssm3ptpxyv1f49m", Active sessions limit exceeded, maximum allowed: 2 2025-04-04T17:28:47.587939Z node 4 :KQP_PROXY WARN: TraceId: "01jr0xj6q305pwh4qgcy2dn6mv", Active sessions limit exceeded, maximum allowed: 2 2025-04-04T17:28:47.593290Z node 4 :KQP_PROXY WARN: TraceId: "01jr0xj6q99ynrrtwn8fzqefgy", Active sessions limit exceeded, maximum allowed: 2 2025-04-04T17:28:47.600051Z node 4 :KQP_PROXY WARN: TraceId: "01jr0xj6qffdwntpxd9cs4x3sj", Active sessions limit exceeded, maximum allowed: 2 2025-04-04T17:28:47.606052Z node 4 :KQP_PROXY WARN: TraceId: "01jr0xj6qnbf9w39sbgetygd36", Active sessions limit exceeded, maximum allowed: 2 >> DataShardBackgroundCompaction::ShouldCompact >> THealthCheckTest::StaticGroupIssue [GOOD] >> THealthCheckTest::IgnoreServerlessWhenNotSpecific [GOOD] >> THealthCheckTest::HealthCheckConfigUpdate >> THealthCheckTest::StorageLimit87 |97.5%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::SimpleHandlers [GOOD] Test command err: 2025-04-04T17:27:46.931902Z :WaitEventBlocksBeforeDiscovery INFO: Random seed for debugging is 1743787666931876 2025-04-04T17:27:47.245399Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511004862228460:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:47.245592Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:47.281936Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489511005119335759:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:47.281986Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:47.395309Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:27:47.405193Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000bb9/r3tmp/tmpdr9gL6/pdisk_1.dat 2025-04-04T17:27:47.601096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:47.601210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:47.604896Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:27:47.605952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:47.646697Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24828, node 1 2025-04-04T17:27:47.658907Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:27:47.658994Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:27:47.659112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:47.659181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:47.669045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:47.763348Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000bb9/r3tmp/yandexQZ3k2S.tmp 2025-04-04T17:27:47.763408Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000bb9/r3tmp/yandexQZ3k2S.tmp 2025-04-04T17:27:47.763667Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000bb9/r3tmp/yandexQZ3k2S.tmp 2025-04-04T17:27:47.763813Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:47.956287Z INFO: TTestServer started on Port 21952 GrpcPort 24828 TClient is connected to server localhost:21952 PQClient connected to localhost:24828 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:48.269076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-04T17:27:49.771369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511013452164042:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:49.771465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:49.776939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511013452164070:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:49.787240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-04-04T17:27:49.788075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511013452164101:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:49.788173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:49.788149Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489511013709270672:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:49.788255Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489511013709270645:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:49.788566Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:49.798703Z node 2 :TX_PROXY ERROR: Actor# [2:7489511013709270681:2122] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:27:49.804338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489511013452164072:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T17:27:49.804408Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489511013709270680:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-04-04T17:27:49.869856Z node 2 :TX_PROXY ERROR: Actor# [2:7489511013709270708:2128] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:49.883849Z node 1 :TX_PROXY ERROR: Actor# [1:7489511013452164163:2687] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:50.158966Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489511013709270723:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:27:50.159287Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTdlMDZlNGUtMTliN2Q5ZC0zZTIxYWRmLWI1ZjU5ZmU=, ActorId: [2:7489511013709270641:2307], ActorState: ExecuteState, TraceId: 01jr0xge8t3erpnqvr9wtzw2yp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:27:50.160184Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489511013452164182:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:27:50.162104Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:27:50.164478Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTZiM2VjYzQtZmRiZWFjMDYtOGU3YjkwMGItNzk4MGE3ODQ=, ActorId: [1:7489511013452164040:2335], ActorState: ExecuteState, TraceId: 01jr0xge863tvdh23mz3zp2zzz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:27:50.164848Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:27:50.213295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:27:50.335170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, ... 4T17:28:47.335952Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_14813023239548421321_v1 grpc read failed 2025-04-04T17:28:47.335982Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_14813023239548421321_v1 closed 2025-04-04T17:28:47.336383Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_14813023239548421321_v1 is DEAD 2025-04-04T17:28:47.336679Z :INFO: [/Root] [/Root] [e6c322b-4744138d-dcbdf2c1-4f304667] Closing read session. Close timeout: 0.000000s 2025-04-04T17:28:47.336726Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-04T17:28:47.336725Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489511254620907849:2529] disconnected; active server actors: 1 2025-04-04T17:28:47.336772Z :INFO: [/Root] [/Root] [e6c322b-4744138d-dcbdf2c1-4f304667] Counters: { Errors: 0 CurrentSessionLifetimeMs: 647 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:28:47.336757Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489511254620907849:2529] client user disconnected session shared/user_3_1_14813023239548421321_v1 2025-04-04T17:28:47.336805Z :INFO: [/Root] [/Root] [5aeb68eb-946d8707-5b36528d-a9ad9911] Closing read session. Close timeout: 0.000000s 2025-04-04T17:28:47.336832Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2025-04-04T17:28:47.336807Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2025-04-04T17:28:47.336856Z :INFO: [/Root] [/Root] [5aeb68eb-946d8707-5b36528d-a9ad9911] Counters: { Errors: 0 CurrentSessionLifetimeMs: 644 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:28:47.336873Z :INFO: [/Root] [/Root] [a1cc55d3-1f1e502b-697079fd-c2e0035] Closing read session. Close timeout: 0.000000s 2025-04-04T17:28:47.336861Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing. Sessions=2, Families=1, UnradableFamilies=0 [], RequireBalancing=0 [] 2025-04-04T17:28:47.336892Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-04T17:28:47.336892Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user start rebalancing. familyCount=1, sessionCount=2, desiredFamilyCount=0, allowPlusOne=1 2025-04-04T17:28:47.336912Z :INFO: [/Root] [/Root] [a1cc55d3-1f1e502b-697079fd-c2e0035] Counters: { Errors: 0 CurrentSessionLifetimeMs: 643 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:28:47.336936Z :INFO: [/Root] [/Root] [a1cc55d3-1f1e502b-697079fd-c2e0035] Closing read session. Close timeout: 0.000000s 2025-04-04T17:28:47.336918Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing duration: 0.000033s 2025-04-04T17:28:47.336970Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-04T17:28:47.337027Z :INFO: [/Root] [/Root] [a1cc55d3-1f1e502b-697079fd-c2e0035] Counters: { Errors: 0 CurrentSessionLifetimeMs: 643 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:28:47.337129Z :NOTICE: [/Root] [/Root] [a1cc55d3-1f1e502b-697079fd-c2e0035] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:28:47.337244Z :INFO: [/Root] [/Root] [5aeb68eb-946d8707-5b36528d-a9ad9911] Closing read session. Close timeout: 0.000000s 2025-04-04T17:28:47.337268Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2025-04-04T17:28:47.337292Z :INFO: [/Root] [/Root] [5aeb68eb-946d8707-5b36528d-a9ad9911] Counters: { Errors: 0 CurrentSessionLifetimeMs: 645 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:28:47.337331Z :NOTICE: [/Root] [/Root] [5aeb68eb-946d8707-5b36528d-a9ad9911] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:28:47.337384Z :INFO: [/Root] [/Root] [e6c322b-4744138d-dcbdf2c1-4f304667] Closing read session. Close timeout: 0.000000s 2025-04-04T17:28:47.337402Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-04T17:28:47.337422Z :INFO: [/Root] [/Root] [e6c322b-4744138d-dcbdf2c1-4f304667] Counters: { Errors: 0 CurrentSessionLifetimeMs: 647 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:28:47.337458Z :NOTICE: [/Root] [/Root] [e6c322b-4744138d-dcbdf2c1-4f304667] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:28:47.343967Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_3_2_9604104991052126043_v1 grpc read done: success# 0, data# { } 2025-04-04T17:28:47.343998Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_9604104991052126043_v1 grpc read failed 2025-04-04T17:28:47.344022Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_9604104991052126043_v1 grpc closed 2025-04-04T17:28:47.344103Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer shared/user session shared/user_3_3_12698847610076791252_v1 grpc read done: success# 0, data# { } 2025-04-04T17:28:47.344112Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_12698847610076791252_v1 grpc read failed 2025-04-04T17:28:47.344127Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_12698847610076791252_v1 grpc closed 2025-04-04T17:28:47.344140Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_12698847610076791252_v1 is DEAD 2025-04-04T17:28:47.346519Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_9604104991052126043_v1 is DEAD 2025-04-04T17:28:47.347451Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|46a3c398-a7f807a2-9feb9e47-345efb1d_0 grpc read done: success: 0 data: 2025-04-04T17:28:47.347472Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|46a3c398-a7f807a2-9feb9e47-345efb1d_0 grpc read failed 2025-04-04T17:28:47.347498Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|46a3c398-a7f807a2-9feb9e47-345efb1d_0 grpc closed 2025-04-04T17:28:47.347515Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|46a3c398-a7f807a2-9feb9e47-345efb1d_0 is DEAD 2025-04-04T17:28:47.352891Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7489511254620907890:2534] destroyed 2025-04-04T17:28:47.352966Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-04T17:28:47.353050Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_2_9604104991052126043_v1 2025-04-04T17:28:47.353071Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7489511254620907861:2542] destroyed 2025-04-04T17:28:47.353134Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_2_9604104991052126043_v1 2025-04-04T17:28:47.352561Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:28:47.352955Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489511254620907850:2531] disconnected; active server actors: 1 2025-04-04T17:28:47.352975Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489511254620907850:2531] client user disconnected session shared/user_3_3_12698847610076791252_v1 2025-04-04T17:28:47.353007Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2025-04-04T17:28:47.353041Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489511254620907853:2530] disconnected; active server actors: 1 2025-04-04T17:28:47.353054Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489511254620907853:2530] client user disconnected session shared/user_3_2_9604104991052126043_v1 2025-04-04T17:28:47.675251Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7489511258915875274:2546] TxId: 281474976715690. Ctx: { TraceId: 01jr0xj6ff2dky92w6kjeap2gv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTNjMDg1NGEtNTMxNGI5Y2QtZDFhZTFhNWUtNGFmYzc0N2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 2025-04-04T17:28:47.676237Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7489511258915875281:2554], TxId: 281474976715690, task: 2. Ctx: { SessionId : ydb://session/3?node_id=3&id=OTNjMDg1NGEtNTMxNGI5Y2QtZDFhZTFhNWUtNGFmYzc0N2E=. TraceId : 01jr0xj6ff2dky92w6kjeap2gv. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7489511258915875274:2546], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-04T17:28:47.676457Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7489511258915875283:2555], TxId: 281474976715690, task: 4. Ctx: { SessionId : ydb://session/3?node_id=3&id=OTNjMDg1NGEtNTMxNGI5Y2QtZDFhZTFhNWUtNGFmYzc0N2E=. TraceId : 01jr0xj6ff2dky92w6kjeap2gv. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7489511258915875274:2546], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-04-04T17:28:48.154811Z node 3 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715691. Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T17:28:48.154944Z node 3 :KQP_EXECUTER WARN: ActorId: [3:7489511258915875298:2556] TxId: 281474976715691. Ctx: { TraceId: 01jr0xj73je7fr7qy1js5c288q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTg2MTUzNmMtZWMwMDNlOTItOGQ1MjcxNTktOTY1MzU5OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-04-04T17:28:48.155171Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTg2MTUzNmMtZWMwMDNlOTItOGQ1MjcxNTktOTY1MzU5OA==, ActorId: [3:7489511258915875295:2556], ActorState: ExecuteState, TraceId: 01jr0xj73je7fr7qy1js5c288q, Create QueryResponse for error on request, msg: 2025-04-04T17:28:48.156114Z node 3 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jr0xj73k9d73gh4nd2skxag8" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeServerless [GOOD] Test command err: 2025-04-04T17:26:04.887881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:04.888166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:04.888280Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0019b9/r3tmp/tmpdfIjoq/pdisk_1.dat 2025-04-04T17:26:05.284110Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7084, node 1 2025-04-04T17:26:05.697980Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:05.698041Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:05.698087Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:05.698734Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:05.704341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:05.793035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:05.794005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:05.823925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23425 2025-04-04T17:26:06.326659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:09.537351Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:09.573618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:09.573734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:09.602344Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:09.606677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:09.828879Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.829535Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.830120Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.830768Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.830905Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.831157Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.831267Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.831447Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:09.831533Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.010765Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:10.010849Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:10.024098Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:10.164495Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:10.206124Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:10.206256Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:10.241823Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:10.243086Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:10.243343Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:10.243412Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:10.243466Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:10.243526Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:10.243587Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:10.243662Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:10.244053Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:10.269750Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:10.269845Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:10.274659Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:26:10.279126Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:26:10.279802Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:26:10.284669Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-04T17:26:10.302791Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:10.302839Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:10.302888Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-04T17:26:10.314527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:10.319653Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:10.319784Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:10.505084Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:10.639250Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:10.727747Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:11.565140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:26:12.312662Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:12.477739Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-04T17:26:12.477800Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:26:12.477867Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2586:2948], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:26:12.478100Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2587:2949] 2025-04-04T17:26:12.478276Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2587:2949], schemeshard id = 72075186224037899 2025-04-04T17:26:13.593878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2710:3235], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:13.593979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:13.606408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-04T17:26:13.698303Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2796:3045];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:13.698525Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2796:3045];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:13.698796Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2796:3045];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:13.698927Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2796:3045];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:13.699048Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2796:3045];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:13.699187Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2796:3045];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:13.699334Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2796:3045];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:13.699453Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2796:3045];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:13.699576Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2796:3045];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17: ... 42.004685Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-04T17:28:43.763547Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037897 2025-04-04T17:28:43.763622Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 189.000000s, at schemeshard: 72075186224037897 2025-04-04T17:28:43.763958Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 25 2025-04-04T17:28:43.788784Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:28:45.072355Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:45.072481Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-04-04T17:28:45.072528Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-04-04T17:28:45.072609Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-04T17:28:45.072664Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:28:45.073148Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-04T17:28:45.076917Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:28:45.081475Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8036:5943], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:45.081594Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8046:5948], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:45.081714Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:45.096453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-04T17:28:45.168667Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:8050:5951], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-04T17:28:45.396036Z node 2 :TX_PROXY ERROR: Actor# [2:8148:5999] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Shared/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:45.440638Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:8177:6014]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:45.440877Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:28:45.440976Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:8179:6016] 2025-04-04T17:28:45.441054Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:8179:6016] 2025-04-04T17:28:45.441486Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8180:6017] 2025-04-04T17:28:45.441645Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8179:6016], server id = [2:8180:6017], tablet id = 72075186224037894, status = OK 2025-04-04T17:28:45.441719Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8180:6017], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:28:45.441785Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-04T17:28:45.441935Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:28:45.442025Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:8177:6014], StatRequests.size() = 1 2025-04-04T17:28:45.580349Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjBmMmUzMmYtYTdkMzdiZDktZmU4MzM1YzktYTc1ODU5ODU=, TxId: 2025-04-04T17:28:45.580438Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjBmMmUzMmYtYTdkMzdiZDktZmU4MzM1YzktYTc1ODU5ODU=, TxId: 2025-04-04T17:28:45.580957Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:45.596945Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:28:45.597022Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:28:45.651563Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:28:45.651639Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:28:45.717959Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:8179:6016], schemeshard count = 1 2025-04-04T17:28:45.966084Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-04T17:28:45.966168Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 232.000000s, at schemeshard: 72075186224037899 2025-04-04T17:28:45.966507Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2025-04-04T17:28:45.981145Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:28:46.879383Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:28:46.879487Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-04T17:28:46.885325Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:46.905037Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:46.905706Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:46.905775Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037899, LocalPathId: 2], AnalyzedShards 1 2025-04-04T17:28:46.920046Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:46.953652Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-04-04T17:28:46.954994Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-04-04T17:28:46.955122Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-04-04T17:28:46.969249Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-04-04T17:28:48.203196Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:28:48.203324Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-04T17:28:48.203364Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-04T17:28:48.204133Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:28:48.218049Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:28:48.218533Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:28:48.218585Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:28:48.219512Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:28:48.233436Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:28:48.233723Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-04T17:28:48.234285Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8314:6106], server id = [2:8315:6107], tablet id = 72075186224037905, status = OK 2025-04-04T17:28:48.234387Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8314:6106], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:28:48.238191Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-04T17:28:48.238318Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:28:48.238642Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8314:6106], server id = [2:8315:6107], tablet id = 72075186224037905 2025-04-04T17:28:48.238680Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:28:48.238765Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:28:48.238974Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:28:48.239307Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-04T17:28:48.241760Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:28:48.260823Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8335:6126]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:48.261029Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:48.261068Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8335:6126], StatRequests.size() = 1 2025-04-04T17:28:48.386921Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Zjg4YTc0ZWEtOTgyOGYwN2UtNDU0MGVjZjItNGJlNGQ3Ng==, TxId: 2025-04-04T17:28:48.386965Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Zjg4YTc0ZWEtOTgyOGYwN2UtNDU0MGVjZjItNGJlNGQ3Ng==, TxId: 2025-04-04T17:28:48.387368Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:28:48.401075Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-04T17:28:48.401132Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3302:3390] >> KesusProxyTest::ReconnectsWithKesusWhenNotConnected [GOOD] >> KesusProxyTest::ReconnectsWithKesusWhenPipeDestroyed >> KesusProxyTest::ReconnectsWithKesusWhenPipeDestroyed [GOOD] >> KesusProxyTest::RejectsNotCanonizedResourceName [GOOD] >> KesusProxyTest::SubscribesOnResource [GOOD] >> KesusProxyTest::SubscribesOnResourcesWhenReconnected [GOOD] >> KesusProxyTest::ProxyRequestDuringDisconnection [GOOD] >> KesusProxyTest::DeactivateSessionWhenResourceClosed [GOOD] >> KesusProxyTest::SendsProxySessionOnceOnSuccess >> KesusProxyTest::SendsProxySessionOnceOnSuccess [GOOD] >> KesusProxyTest::SendsProxySessionOnceOnFailure [GOOD] >> KesusProxyTest::AnswersWithSessionWhenResourceIsAlreadyKnown [GOOD] >> KesusProxyTest::SendsBrokenUpdateWhenKesusPassesError [GOOD] >> KesusProxyTest::AllocatesResourceWithKesus [GOOD] >> KesusProxyTest::DisconnectsDuringActiveSession [GOOD] >> KesusProxyTest::AllocatesResourceOffline [GOOD] >> KesusProxyTest::ConnectsDuringOfflineAllocation [GOOD] >> KesusResourceAllocationStatisticsTest::ReturnsDefaultValues [GOOD] >> KesusResourceAllocationStatisticsTest::CalculatesAverage [GOOD] >> KesusResourceAllocationStatisticsTest::TakesBestStat [GOOD] >> TQuoterServiceTest::StaticRateLimiter >> test_example.py::TestExample::test_example >> TTxDataShardLocalKMeansScan::BadRequest >> TopicAutoscaling::ControlPlane_CreateAlterDescribe [GOOD] >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient >> TPartitionTests::DifferentWriteTxBatchingOptions [GOOD] >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes [GOOD] >> THealthCheckTest::ShardsLimit999 >> TPartitionTests::EndWriteTimestamp_DataKeysBody >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] >> BasicUsage::FallbackToSingleDb [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest >> TPartitionTests::EndWriteTimestamp_DataKeysBody [GOOD] >> THealthCheckTest::RedGroupIssueOnRedSpace [GOOD] >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks [GOOD] >> THealthCheckTest::GreenStatusWhenCreatingGroup >> TPartitionTests::EndWriteTimestamp_FromMeta >> TGRpcRateLimiterTest::CreateResource [GOOD] >> TGRpcRateLimiterTest::UpdateResource ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] Test command err: 2025-04-04T17:28:42.782715Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511241331663553:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:42.782783Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001660/r3tmp/tmpeAPqbK/pdisk_1.dat 2025-04-04T17:28:43.149229Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6008, node 1 2025-04-04T17:28:43.164339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:43.164430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:43.178599Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:28:43.179963Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:28:43.233280Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:43.233305Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:43.233319Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:43.233512Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:43.237171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2773 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:43.526183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:45.533395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511254216566467:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:45.533407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511254216566459:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:45.533528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:45.537379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:28:45.537442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511254216566502:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:45.537505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:45.537537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511254216566504:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:45.542190Z node 1 :TX_PROXY ERROR: Actor# [1:7489511254216566510:2637] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-04-04T17:28:45.557957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489511254216566507:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:28:45.558031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489511254216566473:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:28:45.622699Z node 1 :TX_PROXY ERROR: Actor# [1:7489511254216566625:2704] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:45.632228Z node 1 :TX_PROXY ERROR: Actor# [1:7489511254216566635:2712] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:47.081853Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489511261523968997:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:47.081923Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001660/r3tmp/tmpuDQi5I/pdisk_1.dat 2025-04-04T17:28:47.249231Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:47.283803Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:47.283884Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:47.287830Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21980, node 4 2025-04-04T17:28:47.354959Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:47.354984Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:47.354992Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:47.355141Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13784 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:47.604196Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:50.396284Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511274408871923:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:50.396414Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:50.413582Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:28:51.625011Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511278703839758:2604], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:51.625138Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:51.625384Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511278703839763:2607], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:51.628815Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:28:51.647708Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489511278703839765:2608], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:28:51.733588Z node 4 :TX_PROXY ERROR: Actor# [4:7489511278703839871:2809] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TestFilterSet::FilterGroup >> TPartitionTests::EndWriteTimestamp_FromMeta [GOOD] >> TPartitionTests::EndWriteTimestamp_HeadKeys >> TPQTest::TestPQPartialRead [GOOD] >> TPQTest::TestOwnership >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus [GOOD] >> THealthCheckTest::NoStoragePools >> TPQTest::TestLowWatermark [GOOD] >> TPQTest::TestGetTimestamps >> TTxDataShardLocalKMeansScan::BadRequest [GOOD] >> TTxDataShardLocalKMeansScan::MainToPosting >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 >> TQuoterServiceTest::StaticRateLimiter [GOOD] >> TQuoterServiceTest::StaticMultipleAndResources >> DataShardBackgroundCompaction::ShouldCompact [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenBorrowed >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] Test command err: 2025-04-04T17:28:35.490999Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:35.491080Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:28:35.507614Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:35.508992Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T17:28:35.000000Z 2025-04-04T17:28:35.509047Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 0 and act no: 1 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 Create distr tx with id = 2 and act no: 3 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Got batch complete: 1 Send disk status response with cookie: 0 Wait tx committed for tx 0 Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Wait tx committed for tx 2 2025-04-04T17:28:39.699071Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:39.699132Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:28:39.711490Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:177:2192] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:39.712941Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T17:28:39.000000Z 2025-04-04T17:28:39.712989Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [2:177:2192] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup t ... TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_HULL_HUGE_KEEPER Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Create distr tx with id = 24 and act no: 25 Create distr tx with id = 26 and act no: 27 Create immediate tx with id = 28 and act no: 29 Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Wait batch completion Got batch complete: 2 Wait batch completion Got batch complete: 1 Wait for no tx committed Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Send disk status response with cookie: 0 Wait tx committed for tx 26 Wait immediate tx complete 28 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 28 2025-04-04T17:28:53.234663Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:53.234750Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:53.255346Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [3:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:53.257185Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T17:28:53.000000Z 2025-04-04T17:28:53.257246Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [3:176:2191] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:54.066105Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:54.066168Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:54.082637Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [4:176:2191] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:54.084411Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:28:54.084476Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [4:176:2191] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:54.738561Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:54.738645Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:54.763500Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [5:178:2193] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase >>>> ADD BLOB 0 writeTimestamp=2025-04-04T17:28:54.755184Z >>>> ADD BLOB 1 writeTimestamp=2025-04-04T17:28:54.755207Z >>>> ADD BLOB 2 writeTimestamp=2025-04-04T17:28:54.755223Z >>>> ADD BLOB 3 writeTimestamp=2025-04-04T17:28:54.755237Z >>>> ADD BLOB 4 writeTimestamp=2025-04-04T17:28:54.755251Z >>>> ADD BLOB 5 writeTimestamp=2025-04-04T17:28:54.755266Z >>>> ADD BLOB 6 writeTimestamp=2025-04-04T17:28:54.755280Z >>>> ADD BLOB 7 writeTimestamp=2025-04-04T17:28:54.755294Z >>>> ADD BLOB 8 writeTimestamp=2025-04-04T17:28:54.755308Z >>>> ADD BLOB 9 writeTimestamp=2025-04-04T17:28:54.755324Z 2025-04-04T17:28:54.766181Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-04-04T17:28:54.000000Z 2025-04-04T17:28:54.766269Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [5:178:2193] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] Test command err: 2025-04-04T17:28:26.787853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:26.788526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:26.788757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:28:26.790630Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:26.790842Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:26.791082Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00166b/r3tmp/tmp1sllti/pdisk_1.dat 2025-04-04T17:28:27.168617Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32242, node 1 TClient is connected to server localhost:30108 2025-04-04T17:28:27.558487Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:27.558548Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:27.558580Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:27.558796Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:34.746559Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:697:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:34.747071Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:34.747478Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:28:34.748248Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:694:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:34.748556Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:34.748889Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00166b/r3tmp/tmpYYEKE7/pdisk_1.dat 2025-04-04T17:28:35.058813Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2255, node 3 TClient is connected to server localhost:19646 2025-04-04T17:28:35.441011Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:35.441070Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:35.441105Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:35.441350Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD location { id: 3 host: "::1" port: 12001 } 2025-04-04T17:28:42.938686Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:697:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:42.939171Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:42.939626Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:28:42.940634Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:694:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:42.940982Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:42.941266Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00166b/r3tmp/tmp2VciIN/pdisk_1.dat 2025-04-04T17:28:43.306188Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9745, node 5 TClient is connected to server localhost:27170 2025-04-04T17:28:43.716616Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:43.716685Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:43.716724Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:43.717248Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD location { id: 5 host: "::1" port: 12001 } 2025-04-04T17:28:48.255277Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:449:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:48.255624Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:48.255760Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00166b/r3tmp/tmpmzk9z4/pdisk_1.dat 2025-04-04T17:28:48.571951Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14783, node 7 TClient is connected to server localhost:13755 2025-04-04T17:28:48.942873Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:48.942946Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:48.942990Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:48.943584Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:53.899628Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:449:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:53.900015Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:53.900197Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00166b/r3tmp/tmpLrKqLO/pdisk_1.dat 2025-04-04T17:28:54.254618Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9148, node 9 TClient is connected to server localhost:3981 2025-04-04T17:28:54.700285Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:54.700364Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:54.700412Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:54.701042Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TPQTest::TestGetTimestamps [GOOD] >> TPQTest::TestMaxTimeLagRewind >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] >> TTxDataShardBuildIndexScan::RunScan >> TestFilterSet::FilterGroup [GOOD] >> QueryActorTest::SimpleQuery >> THealthCheckTest::StorageLimit87 [GOOD] >> THealthCheckTest::StorageLimit80 >> TestFilterSet::DuplicationValidation >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] >> test.py::test[solomon-BadDownsamplingAggregation-] [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] >> TGRpcRateLimiterTest::UpdateResource [GOOD] >> TGRpcRateLimiterTest::DropResource >> TQuoterServiceTest::StaticMultipleAndResources [GOOD] >> TQuoterServiceTest::StaticDeadlines ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] Test command err: 2025-04-04T17:28:44.554307Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511247560065790:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:44.557208Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001654/r3tmp/tmpbFsteY/pdisk_1.dat 2025-04-04T17:28:44.973773Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:44.976222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:44.976347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 16309, node 1 2025-04-04T17:28:44.993957Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:28:44.994005Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:28:45.036935Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:45.036960Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:45.036969Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:45.037082Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:45.055600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:45.293299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:49.367114Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489511267875079591:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:49.368753Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001654/r3tmp/tmpzK1FA2/pdisk_1.dat 2025-04-04T17:28:49.539077Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:49.576548Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:49.576676Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:49.580857Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23175, node 4 2025-04-04T17:28:49.670886Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:49.670920Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:49.670927Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:49.671060Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:49.900760Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:54.367595Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489511267875079591:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:54.367658Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test_example.py::TestExample::test_example [GOOD] >> test_example.py::TestExample::test_example2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] Test command err: 2025-04-04T17:28:41.518176Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511235507997083:2211];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:41.519544Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001662/r3tmp/tmpin8uvM/pdisk_1.dat 2025-04-04T17:28:41.888561Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:41.921038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:41.921100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 12271, node 1 2025-04-04T17:28:41.926843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:41.982875Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:41.982897Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:41.982908Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:41.983037Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17432 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:42.220157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:46.162092Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489511255844700166:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:46.162216Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001662/r3tmp/tmp32Vh7j/pdisk_1.dat 2025-04-04T17:28:46.327026Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:46.369810Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:46.369910Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:46.374685Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24311, node 4 2025-04-04T17:28:46.429749Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:46.429784Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:46.429792Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:46.429959Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27303 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:46.642228Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:49.263131Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511268729603208:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:49.263625Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:49.263831Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511268729603334:2524], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:49.263893Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511268729603313:2511], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:49.263915Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511268729603339:2529], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:49.263991Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511268729603350:2537], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:49.264002Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511268729603337:2527], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:49.264053Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511268729603318:2516], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:49.264124Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511268729603340:2530], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:49.264149Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511268729603312:2510], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:49.264163Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511268729603316:2514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:49.264246Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511268729603315:2513], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:49.264262Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511268729603314:2512], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:49.264289Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511268729603343:2533], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:49.264337Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511268729603328:2520], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:49.264429Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511268729603317:2515], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:49.264430Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511268729603342:2532], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:49.264534Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511268729603331:2522], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:49.264560Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511268729603344:2534], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:49.264604Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511268729603341:2531], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:49.264615Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFe ... ROR: Actor# [4:7489511268729604024:3160] txid# 281474976710716, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.411735Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604045:3175] txid# 281474976710717, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.412156Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604051:3179] txid# 281474976710719, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.412426Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604050:3178] txid# 281474976710718, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.412685Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604085:3203] txid# 281474976710725, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.413686Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604074:3194] txid# 281474976710720, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.414023Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604083:3201] txid# 281474976710723, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.415138Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604076:3195] txid# 281474976710721, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.417652Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604084:3202] txid# 281474976710724, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.418054Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604082:3200] txid# 281474976710722, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.418267Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604110:3221] txid# 281474976710726, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.418421Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604114:3225] txid# 281474976710727, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.418708Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604134:3238] txid# 281474976710728, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.425838Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604149:3252] txid# 281474976710730, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.426372Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604141:3245] txid# 281474976710729, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.426438Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604154:3257] txid# 281474976710731, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.429928Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604183:3283] txid# 281474976710733, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.430062Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604211:3305] txid# 281474976710737, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.430192Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604213:3307] txid# 281474976710739, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.430335Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604233:3322] txid# 281474976710741, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.430480Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604206:3301] txid# 281474976710735, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.430516Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604198:3293] txid# 281474976710734, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.430585Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604182:3282] txid# 281474976710732, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.430671Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604214:3308] txid# 281474976710740, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.430805Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604207:3302] txid# 281474976710736, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.432005Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604212:3306] txid# 281474976710738, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.432225Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604243:3329] txid# 281474976710743, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.432360Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604241:3327] txid# 281474976710742, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.432528Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604255:3341] txid# 281474976710744, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.435386Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604283:3359] txid# 281474976710745, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.435655Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604291:3365] txid# 281474976710746, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:49.435822Z node 4 :TX_PROXY ERROR: Actor# [4:7489511268729604301:3374] txid# 281474976710747, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:51.162383Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489511255844700166:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:51.162460Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test_scheduling.py::TestSchedule::test_skip_busy[kikimr0] [SKIPPED] >> test_dc_local.py::TestAlloc::test_dc_locality[kikimr0] >> THealthCheckTest::NoStoragePools [GOOD] >> THealthCheckTest::NoBscResponse >> TestFilterSet::DuplicationValidation [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenBorrowed [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenCopyTable >> test_crud.py::TestYdbCrudOperations::test_crud_operations >> TestFilterSet::CompilationValidation >> TTxDataShardLocalKMeansScan::MainToPosting [GOOD] >> TTxDataShardLocalKMeansScan::MainToBuild >> CoordinatorTests::Route >> CoordinatorTests::Route [GOOD] >> CoordinatorTests::RouteTwoTopicWichSameName >> CoordinatorTests::RouteTwoTopicWichSameName [GOOD] >> LeaderElectionTests::Test1 >> LeaderElectionTests::Test1 [GOOD] >> LeaderElectionTests::TestLocalMode >> THealthCheckTest::ShardsLimit999 [GOOD] >> THealthCheckTest::ShardsNoLimit >> LeaderElectionTests::TestLocalMode [GOOD] >> TopicSessionTests::TwoSessionsWithoutOffsets >> SdkCredProvider::PingFromProviderSyncDiscovery >> QueryActorTest::SimpleQuery [GOOD] >> QueryActorTest::Rollback >> THealthCheckTest::GreenStatusWhenCreatingGroup [GOOD] >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues >> TGRpcRateLimiterTest::DropResource [GOOD] >> TGRpcRateLimiterTest::DescribeResource >> TQuoterServiceTest::StaticDeadlines [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedQuoterPath >> TabletService_ChangeSchema::Basics >> BlobDepot::BasicPutAndGet >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning [GOOD] >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning >> TestFilterSet::CompilationValidation [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] >> TestFormatHandler::ManyJsonClients >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource [GOOD] >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSourcesAndWithChannel >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSourcesAndWithChannel [GOOD] >> TCheckpointCoordinatorTests::ShouldAllSnapshots >> TCheckpointCoordinatorTests::ShouldAllSnapshots [GOOD] >> TCheckpointCoordinatorTests::Should2Increments1Snapshot [GOOD] >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved [GOOD] >> SdkCredProvider::PingFromProviderSyncDiscovery [GOOD] >> SdkCredProvider::PingFromProviderAsyncDiscovery >> TTxDataShardBuildIndexScan::RunScan [GOOD] >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction >> BlobDepot::BasicPutAndGet [GOOD] >> BlobDepot::TestBlockedEvGetRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpointing/ut/unittest >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved [GOOD] Test command err: 2025-04-04T17:29:03.198871Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-04-04T17:29:03.199056Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-04-04T17:29:03.199164Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-04-04T17:29:03.199184Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Successfully registered in storage 2025-04-04T17:29:03.199212Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-04-04T17:29:03.200345Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-04-04T17:29:03.217616Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-04-04T17:29:03.217697Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-04-04T17:29:03.217780Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-04-04T17:29:03.222495Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-04-04T17:29:03.222560Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-04-04T17:29:03.222611Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-04-04T17:29:03.222728Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-04T17:29:03.222763Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-04-04T17:29:03.222815Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-04T17:29:03.222842Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2025-04-04T17:29:03.222874Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-04T17:29:03.222900Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2025-04-04T17:29:03.222926Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-04-04T17:29:03.223002Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2025-04-04T17:29:03.223033Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-04-04T17:29:03.223242Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2025-04-04T17:29:03.223274Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] State committed [1:6:2053], need 1 more acks 2025-04-04T17:29:03.223305Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2025-04-04T17:29:03.223334Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] State committed [1:8:2055], need 0 more acks 2025-04-04T17:29:03.223362Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-04-04T17:29:03.223417Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2025-04-04T17:29:03.223460Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint completed 2025-04-04T17:29:03.294579Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-04-04T17:29:03.294749Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-04-04T17:29:03.294845Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-04-04T17:29:03.294886Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Successfully registered in storage 2025-04-04T17:29:03.294926Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-04-04T17:29:03.294992Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-04-04T17:29:03.295166Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-04-04T17:29:03.295198Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-04-04T17:29:03.295224Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-04-04T17:29:03.295365Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-04-04T17:29:03.295401Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-04-04T17:29:03.295434Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-04-04T17:29:03.295540Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-04T17:29:03.295569Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-04-04T17:29:03.295610Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-04T17:29:03.295660Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2025-04-04T17:29:03.295709Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-04T17:29:03.295748Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2025-04-04T17:29:03.295784Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-04-04T17:29:03.295847Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2025-04-04T17:29:03.295875Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-04-04T17:29:03.295983Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2025-04-04T17:29:03.296028Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] State committed [2:6:2053], need 1 more acks 2025-04-04T17:29:03.296069Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2025-04-04T17:29:03.296102Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] State committed [2:8:2055], need 0 more acks 2025-04-04T17:29:03.296142Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-04-04T17:29:03.296208Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2025-04-04T17:29:03.296236Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint completed 2025-04-04T17:29:03.382191Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-04-04T17:29:03.382604Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-04-04T17:29:03.382717Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-04-04T17:29:03.382746Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Successfully registered in storage 2025-04-04T17:29:03.382774Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-04-04T17:29:03.382870Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-04-04T17:29:03.383078Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-04-04T17:29:03.383114Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-04-04T17:29:03.383146Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-04-04T17:29:03.383272Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-04-04T17:29:03.383328Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-04-04T17:29:03.383363Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-04-04T17:29:03.383470Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-04T17:29:03.383500Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-04-04T17:29:03.383544Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-04T17:29:03.383576Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2025-04-04T17:29:03.383610Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-04T17:29:03.383639Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2025-04-04T17:29:03.383664Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-04-04T17:29:03.383733Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2025-04-04T17:29:03.383768Z node 3 :STREAMS_C ... :2] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-04T17:29:03.467823Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Task state saved, need 0 more acks 2025-04-04T17:29:03.467861Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-04-04T17:29:03.467937Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvSetCheckpointPendingCommitStatusResponse 2025-04-04T17:29:03.467963Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-04-04T17:29:03.468045Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvStateCommitted; task: 1 2025-04-04T17:29:03.468075Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] State committed [4:6:2053], need 1 more acks 2025-04-04T17:29:03.468109Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvStateCommitted; task: 3 2025-04-04T17:29:03.468136Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] State committed [4:8:2055], need 0 more acks 2025-04-04T17:29:03.468159Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-04-04T17:29:03.468208Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvCompleteCheckpointResponse 2025-04-04T17:29:03.468241Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Checkpoint completed 2025-04-04T17:29:03.468272Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvScheduleCheckpointing 2025-04-04T17:29:03.468302Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-04-04T17:29:03.468371Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvCreateCheckpointResponse 2025-04-04T17:29:03.468406Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2025-04-04T17:29:03.468472Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-04T17:29:03.468509Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Task state saved, need 2 more acks 2025-04-04T17:29:03.468546Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-04T17:29:03.468602Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Task state saved, need 1 more acks 2025-04-04T17:29:03.468644Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-04T17:29:03.468676Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Task state saved, need 0 more acks 2025-04-04T17:29:03.468702Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-04-04T17:29:03.468746Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvSetCheckpointPendingCommitStatusResponse 2025-04-04T17:29:03.468771Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-04-04T17:29:03.468894Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 1 2025-04-04T17:29:03.468924Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] State committed [4:6:2053], need 1 more acks 2025-04-04T17:29:03.468960Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 3 2025-04-04T17:29:03.469002Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] State committed [4:8:2055], need 0 more acks 2025-04-04T17:29:03.469027Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-04-04T17:29:03.469070Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvCompleteCheckpointResponse 2025-04-04T17:29:03.469095Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Checkpoint completed 2025-04-04T17:29:03.469125Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvScheduleCheckpointing 2025-04-04T17:29:03.469157Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-04-04T17:29:03.469225Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvCreateCheckpointResponse 2025-04-04T17:29:03.469265Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2025-04-04T17:29:03.469328Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-04T17:29:03.469360Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Task state saved, need 2 more acks 2025-04-04T17:29:03.469393Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-04T17:29:03.469418Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Task state saved, need 1 more acks 2025-04-04T17:29:03.469450Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-04T17:29:03.469479Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Task state saved, need 0 more acks 2025-04-04T17:29:03.469503Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-04-04T17:29:03.469561Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvSetCheckpointPendingCommitStatusResponse 2025-04-04T17:29:03.469590Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-04-04T17:29:03.469673Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 1 2025-04-04T17:29:03.469715Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] State committed [4:6:2053], need 1 more acks 2025-04-04T17:29:03.469747Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 3 2025-04-04T17:29:03.469778Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] State committed [4:8:2055], need 0 more acks 2025-04-04T17:29:03.469817Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-04-04T17:29:03.469861Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvCompleteCheckpointResponse 2025-04-04T17:29:03.469888Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Checkpoint completed 2025-04-04T17:29:03.551005Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-04-04T17:29:03.551166Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-04-04T17:29:03.551283Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-04-04T17:29:03.551312Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Successfully registered in storage 2025-04-04T17:29:03.551341Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-04-04T17:29:03.551396Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-04-04T17:29:03.551557Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-04-04T17:29:03.551586Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-04-04T17:29:03.551615Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-04-04T17:29:03.551750Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-04-04T17:29:03.551784Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-04-04T17:29:03.551820Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-04-04T17:29:03.551930Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-04-04T17:29:03.551974Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-04-04T17:29:03.552009Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2025-04-04T17:29:03.552033Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2025-04-04T17:29:03.552073Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2025-04-04T17:29:03.552104Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2025-04-04T17:29:03.552154Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: [my-graph-id.42] [42:1] Got all acks for aborted checkpoint, aborting in storage Waiting for TEvAbortCheckpointRequest (storage) 2025-04-04T17:29:03.552216Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvAbortCheckpointResponse 2025-04-04T17:29:03.552246Z node 5 :STREAMS_CHECKPOINT_COORDINATOR WARN: [my-graph-id.42] [42:1] Checkpoint aborted 2025-04-04T17:29:03.552283Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvScheduleCheckpointing 2025-04-04T17:29:03.552322Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-04-04T17:29:03.552396Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvCreateCheckpointResponse 2025-04-04T17:29:03.552427Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) |97.5%| [TM] {RESULT} ydb/core/fq/libs/checkpointing/ut/unittest >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] >> BlobDepot::TestBlockedEvGetRequest [GOOD] >> BlobDepot::BasicRange >> THealthCheckTest::StorageLimit80 [GOOD] >> THealthCheckTest::StorageLimit50 >> DataShardBackgroundCompaction::ShouldNotCompactWhenCopyTable [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactEmptyTable >> TTxDataShardLocalKMeansScan::MainToBuild [GOOD] >> TTxDataShardLocalKMeansScan::BuildToPosting ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] Test command err: 2025-04-04T17:28:40.501569Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511230036543015:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:40.501829Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001672/r3tmp/tmp5A6tYn/pdisk_1.dat 2025-04-04T17:28:40.902533Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:40.910722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:40.910807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 3266, node 1 2025-04-04T17:28:40.917101Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:28:40.924946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:40.928502Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:28:40.979011Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:40.979031Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:40.979039Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:40.979133Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31587 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:41.247331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:43.828795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921445946:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.829045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.829167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446002:2507], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.829201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446005:2510], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.829249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921445998:2503], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.829297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446000:2505], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.829364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921445999:2504], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.830656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446004:2509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.830979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446003:2508], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.831246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446001:2506], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.834752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446026:2515], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.839379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446035:2520], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.839484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.840673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446069:2536], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.840740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446072:2538], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.840832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446080:2545], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.841378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446074:2539], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.841587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446081:2546], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.841664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446085:2550], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.841716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446082:2547], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.841759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446083:2548], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.841787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446084:2549], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.841817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446086:2551], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.842027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446102:2553], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.842099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446118:2558], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.842218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.842536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446071:2537], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.843195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446148:2579], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.843387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511242921446117:2557], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.844099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748951 ... : ydb://session/3?node_id=4&id=MmViMWQxZWMtNDJkYTRlMDQtMzA4MjQwYmItYmQ5ODk2NzI=, ActorId: [4:7489511295092224220:2349], ActorState: ExecuteState, TraceId: 01jr0xjfat0gf27xavtcxcc8m1, Reply query error, msg: Pending previous query completion proxyRequestId: 469 2025-04-04T17:28:56.888839Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MmViMWQxZWMtNDJkYTRlMDQtMzA4MjQwYmItYmQ5ODk2NzI=, ActorId: [4:7489511295092224220:2349], ActorState: ExecuteState, TraceId: 01jr0xjfat0gf27xavtcxcc8m1, Reply query error, msg: Pending previous query completion proxyRequestId: 471 2025-04-04T17:28:56.888875Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MmViMWQxZWMtNDJkYTRlMDQtMzA4MjQwYmItYmQ5ODk2NzI=, ActorId: [4:7489511295092224220:2349], ActorState: ExecuteState, TraceId: 01jr0xjfat0gf27xavtcxcc8m1, Reply query error, msg: Pending previous query completion proxyRequestId: 479 2025-04-04T17:28:56.888931Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MmViMWQxZWMtNDJkYTRlMDQtMzA4MjQwYmItYmQ5ODk2NzI=, ActorId: [4:7489511295092224220:2349], ActorState: ExecuteState, TraceId: 01jr0xjfat0gf27xavtcxcc8m1, Reply query error, msg: Pending previous query completion proxyRequestId: 482 2025-04-04T17:28:56.888970Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MmViMWQxZWMtNDJkYTRlMDQtMzA4MjQwYmItYmQ5ODk2NzI=, ActorId: [4:7489511295092224220:2349], ActorState: ExecuteState, TraceId: 01jr0xjfat0gf27xavtcxcc8m1, Reply query error, msg: Pending previous query completion proxyRequestId: 485 2025-04-04T17:28:56.889004Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MmViMWQxZWMtNDJkYTRlMDQtMzA4MjQwYmItYmQ5ODk2NzI=, ActorId: [4:7489511295092224220:2349], ActorState: ExecuteState, TraceId: 01jr0xjfat0gf27xavtcxcc8m1, Reply query error, msg: Pending previous query completion proxyRequestId: 496 2025-04-04T17:28:56.889069Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTRhZGJiYjEtMWZjMTBmZjEtOTJmMjVlYmYtZTYxNzhkY2U=, ActorId: [4:7489511295092224221:2350], ActorState: ExecuteState, TraceId: 01jr0xjfc91n7pcy7nqa42b04e, Reply query error, msg: Pending previous query completion proxyRequestId: 403 2025-04-04T17:28:56.889108Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTRhZGJiYjEtMWZjMTBmZjEtOTJmMjVlYmYtZTYxNzhkY2U=, ActorId: [4:7489511295092224221:2350], ActorState: ExecuteState, TraceId: 01jr0xjfc91n7pcy7nqa42b04e, Reply query error, msg: Pending previous query completion proxyRequestId: 421 2025-04-04T17:28:56.889166Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTRhZGJiYjEtMWZjMTBmZjEtOTJmMjVlYmYtZTYxNzhkY2U=, ActorId: [4:7489511295092224221:2350], ActorState: ExecuteState, TraceId: 01jr0xjfc91n7pcy7nqa42b04e, Reply query error, msg: Pending previous query completion proxyRequestId: 422 2025-04-04T17:28:56.889198Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTRhZGJiYjEtMWZjMTBmZjEtOTJmMjVlYmYtZTYxNzhkY2U=, ActorId: [4:7489511295092224221:2350], ActorState: ExecuteState, TraceId: 01jr0xjfc91n7pcy7nqa42b04e, Reply query error, msg: Pending previous query completion proxyRequestId: 428 2025-04-04T17:28:56.889232Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTRhZGJiYjEtMWZjMTBmZjEtOTJmMjVlYmYtZTYxNzhkY2U=, ActorId: [4:7489511295092224221:2350], ActorState: ExecuteState, TraceId: 01jr0xjfc91n7pcy7nqa42b04e, Reply query error, msg: Pending previous query completion proxyRequestId: 430 2025-04-04T17:28:56.889303Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTRhZGJiYjEtMWZjMTBmZjEtOTJmMjVlYmYtZTYxNzhkY2U=, ActorId: [4:7489511295092224221:2350], ActorState: ExecuteState, TraceId: 01jr0xjfc91n7pcy7nqa42b04e, Reply query error, msg: Pending previous query completion proxyRequestId: 431 2025-04-04T17:28:56.889351Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTRhZGJiYjEtMWZjMTBmZjEtOTJmMjVlYmYtZTYxNzhkY2U=, ActorId: [4:7489511295092224221:2350], ActorState: ExecuteState, TraceId: 01jr0xjfc91n7pcy7nqa42b04e, Reply query error, msg: Pending previous query completion proxyRequestId: 440 2025-04-04T17:28:56.889378Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTRhZGJiYjEtMWZjMTBmZjEtOTJmMjVlYmYtZTYxNzhkY2U=, ActorId: [4:7489511295092224221:2350], ActorState: ExecuteState, TraceId: 01jr0xjfc91n7pcy7nqa42b04e, Reply query error, msg: Pending previous query completion proxyRequestId: 441 2025-04-04T17:28:56.889416Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTRhZGJiYjEtMWZjMTBmZjEtOTJmMjVlYmYtZTYxNzhkY2U=, ActorId: [4:7489511295092224221:2350], ActorState: ExecuteState, TraceId: 01jr0xjfc91n7pcy7nqa42b04e, Reply query error, msg: Pending previous query completion proxyRequestId: 452 2025-04-04T17:28:56.889527Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTRhZGJiYjEtMWZjMTBmZjEtOTJmMjVlYmYtZTYxNzhkY2U=, ActorId: [4:7489511295092224221:2350], ActorState: ExecuteState, TraceId: 01jr0xjfc91n7pcy7nqa42b04e, Reply query error, msg: Pending previous query completion proxyRequestId: 456 2025-04-04T17:28:56.889598Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTRhZGJiYjEtMWZjMTBmZjEtOTJmMjVlYmYtZTYxNzhkY2U=, ActorId: [4:7489511295092224221:2350], ActorState: ExecuteState, TraceId: 01jr0xjfc91n7pcy7nqa42b04e, Reply query error, msg: Pending previous query completion proxyRequestId: 458 2025-04-04T17:28:56.889634Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTRhZGJiYjEtMWZjMTBmZjEtOTJmMjVlYmYtZTYxNzhkY2U=, ActorId: [4:7489511295092224221:2350], ActorState: ExecuteState, TraceId: 01jr0xjfc91n7pcy7nqa42b04e, Reply query error, msg: Pending previous query completion proxyRequestId: 463 2025-04-04T17:28:56.889699Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTRhZGJiYjEtMWZjMTBmZjEtOTJmMjVlYmYtZTYxNzhkY2U=, ActorId: [4:7489511295092224221:2350], ActorState: ExecuteState, TraceId: 01jr0xjfc91n7pcy7nqa42b04e, Reply query error, msg: Pending previous query completion proxyRequestId: 464 2025-04-04T17:28:56.889732Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTRhZGJiYjEtMWZjMTBmZjEtOTJmMjVlYmYtZTYxNzhkY2U=, ActorId: [4:7489511295092224221:2350], ActorState: ExecuteState, TraceId: 01jr0xjfc91n7pcy7nqa42b04e, Reply query error, msg: Pending previous query completion proxyRequestId: 470 2025-04-04T17:28:56.889801Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTRhZGJiYjEtMWZjMTBmZjEtOTJmMjVlYmYtZTYxNzhkY2U=, ActorId: [4:7489511295092224221:2350], ActorState: ExecuteState, TraceId: 01jr0xjfc91n7pcy7nqa42b04e, Reply query error, msg: Pending previous query completion proxyRequestId: 476 2025-04-04T17:28:56.889866Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTRhZGJiYjEtMWZjMTBmZjEtOTJmMjVlYmYtZTYxNzhkY2U=, ActorId: [4:7489511295092224221:2350], ActorState: ExecuteState, TraceId: 01jr0xjfc91n7pcy7nqa42b04e, Reply query error, msg: Pending previous query completion proxyRequestId: 477 2025-04-04T17:28:56.889904Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTRhZGJiYjEtMWZjMTBmZjEtOTJmMjVlYmYtZTYxNzhkY2U=, ActorId: [4:7489511295092224221:2350], ActorState: ExecuteState, TraceId: 01jr0xjfc91n7pcy7nqa42b04e, Reply query error, msg: Pending previous query completion proxyRequestId: 480 2025-04-04T17:28:56.890754Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjdlMmVjZDYtZmE1NWQ5Yy0yMGQ2Mzc3Yi0zYmIxYjlkMw==, ActorId: [4:7489511295092224199:2343], ActorState: ExecuteState, TraceId: 01jr0xjfdy8bvsj9grcanm8nn4, Reply query error, msg: Pending previous query completion proxyRequestId: 499 2025-04-04T17:28:56.890838Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjdlMmVjZDYtZmE1NWQ5Yy0yMGQ2Mzc3Yi0zYmIxYjlkMw==, ActorId: [4:7489511295092224199:2343], ActorState: ExecuteState, TraceId: 01jr0xjfdy8bvsj9grcanm8nn4, Reply query error, msg: Pending previous query completion proxyRequestId: 503 2025-04-04T17:28:56.890878Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjdlMmVjZDYtZmE1NWQ5Yy0yMGQ2Mzc3Yi0zYmIxYjlkMw==, ActorId: [4:7489511295092224199:2343], ActorState: ExecuteState, TraceId: 01jr0xjfdy8bvsj9grcanm8nn4, Reply query error, msg: Pending previous query completion proxyRequestId: 507 2025-04-04T17:28:56.890918Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjdlMmVjZDYtZmE1NWQ5Yy0yMGQ2Mzc3Yi0zYmIxYjlkMw==, ActorId: [4:7489511295092224199:2343], ActorState: ExecuteState, TraceId: 01jr0xjfdy8bvsj9grcanm8nn4, Reply query error, msg: Pending previous query completion proxyRequestId: 508 2025-04-04T17:28:56.890955Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjdlMmVjZDYtZmE1NWQ5Yy0yMGQ2Mzc3Yi0zYmIxYjlkMw==, ActorId: [4:7489511295092224199:2343], ActorState: ExecuteState, TraceId: 01jr0xjfdy8bvsj9grcanm8nn4, Reply query error, msg: Pending previous query completion proxyRequestId: 509 2025-04-04T17:28:56.890988Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjdlMmVjZDYtZmE1NWQ5Yy0yMGQ2Mzc3Yi0zYmIxYjlkMw==, ActorId: [4:7489511295092224199:2343], ActorState: ExecuteState, TraceId: 01jr0xjfdy8bvsj9grcanm8nn4, Reply query error, msg: Pending previous query completion proxyRequestId: 510 2025-04-04T17:28:56.891017Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjdlMmVjZDYtZmE1NWQ5Yy0yMGQ2Mzc3Yi0zYmIxYjlkMw==, ActorId: [4:7489511295092224199:2343], ActorState: ExecuteState, TraceId: 01jr0xjfdy8bvsj9grcanm8nn4, Reply query error, msg: Pending previous query completion proxyRequestId: 511 2025-04-04T17:28:56.891075Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MTgzYWZkMjUtYjBhMzYzODMtZmVmZjMyNDUtOTljOGU3NTc=, ActorId: [4:7489511295092224204:2348], ActorState: ExecuteState, TraceId: 01jr0xjfan9p9w7g2behpeqxw5, Reply query error, msg: Pending previous query completion proxyRequestId: 500 2025-04-04T17:28:56.891116Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MTgzYWZkMjUtYjBhMzYzODMtZmVmZjMyNDUtOTljOGU3NTc=, ActorId: [4:7489511295092224204:2348], ActorState: ExecuteState, TraceId: 01jr0xjfan9p9w7g2behpeqxw5, Reply query error, msg: Pending previous query completion proxyRequestId: 501 2025-04-04T17:28:56.891151Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OWE2Y2EyY2YtMzFjYjYyOTYtMzgzYWQ2MTUtNDlkNzE2MzQ=, ActorId: [4:7489511295092224223:2352], ActorState: ExecuteState, TraceId: 01jr0xjfdf5ey7yzk82xgrzac5, Reply query error, msg: Pending previous query completion proxyRequestId: 502 2025-04-04T17:28:56.891187Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OWE2Y2EyY2YtMzFjYjYyOTYtMzgzYWQ2MTUtNDlkNzE2MzQ=, ActorId: [4:7489511295092224223:2352], ActorState: ExecuteState, TraceId: 01jr0xjfdf5ey7yzk82xgrzac5, Reply query error, msg: Pending previous query completion proxyRequestId: 504 2025-04-04T17:28:56.891226Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OTBhODM5NmQtN2Y5ZWNmYzctMTBhZjMxMTktNDYzMWE4ZGY=, ActorId: [4:7489511295092224222:2351], ActorState: ExecuteState, TraceId: 01jr0xjfd6381sa2sa3qxa9w9j, Reply query error, msg: Pending previous query completion proxyRequestId: 505 2025-04-04T17:28:56.891261Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MjhiYmNhMGItZDkxNTJmM2MtODQ3NWUwMzEtNzhiOWQxYTg=, ActorId: [4:7489511295092224201:2345], ActorState: ExecuteState, TraceId: 01jr0xjfcf43m719vmq4q4kj4r, Reply query error, msg: Pending previous query completion proxyRequestId: 506 2025-04-04T17:28:57.640680Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489511282207321311:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:57.640814Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> QueryActorTest::Rollback [GOOD] >> QueryActorTest::Commit >> BlobDepot::BasicRange [GOOD] >> BlobDepot::BasicDiscover |97.5%| [TA] $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.5%| [TA] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestFormatHandler::ManyJsonClients [GOOD] >> TGRpcRateLimiterTest::DescribeResource [GOOD] >> TGRpcRateLimiterTest::ListResources >> TPQTest::TestOwnership [GOOD] >> TPQTest::TestPQCacheSizeManagement >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedQuoterPath [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedResourcePath >> TestFormatHandler::ManyRawClients >> BlobDepot::BasicDiscover [GOOD] >> BlobDepot::BasicBlock >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] >> ServerRestartTest::RestartOnGetSession >> TabletService_ChangeSchema::Basics [GOOD] >> TabletService_ChangeSchema::OnlyAdminsAllowed >> BlobDepot::BasicBlock [GOOD] >> BlobDepot::BasicCollectGarbage >> test.py::test[solomon-BadDownsamplingFill-] [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] Test command err: 2025-04-04T17:28:39.671379Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511224313015757:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:39.671419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001651/r3tmp/tmprfQxWn/pdisk_1.dat 2025-04-04T17:28:40.065411Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:40.067240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:40.067323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:40.072324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29897, node 1 2025-04-04T17:28:40.176191Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:40.176233Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:40.176246Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:40.176389Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21940 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:40.526062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:42.943891Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489511237301364674:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:42.943949Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001651/r3tmp/tmpzYXiqN/pdisk_1.dat 2025-04-04T17:28:43.063388Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25501, node 2 2025-04-04T17:28:43.098717Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:43.098842Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:43.100867Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:43.115462Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:43.115484Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:43.115491Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:43.115596Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:43.300112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:51.976440Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:697:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:51.977013Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:51.977390Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:28:51.978423Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:694:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:51.978478Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:51.978716Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001651/r3tmp/tmpaNrP7b/pdisk_1.dat 2025-04-04T17:28:52.325293Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17901, node 3 TClient is connected to server localhost:28685 2025-04-04T17:28:52.716611Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:52.716670Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:52.716706Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:52.716938Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:59.662603Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:697:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:59.663029Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:59.663399Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:28:59.664305Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:694:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:59.664587Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:59.664829Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001651/r3tmp/tmpJT5ttw/pdisk_1.dat 2025-04-04T17:29:00.006813Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5274, node 5 TClient is connected to server localhost:13090 2025-04-04T17:29:00.390114Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:00.390196Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:00.390262Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:00.390982Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD location { id: 5 host: "::1" port: 12001 } 2025-04-04T17:29:05.188037Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:527:2415], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:05.188465Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:05.188614Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001651/r3tmp/tmp7rkg8J/pdisk_1.dat 2025-04-04T17:29:05.546207Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61720, node 7 TClient is connected to server localhost:9121 2025-04-04T17:29:06.020536Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:06.020610Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:06.020660Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:06.020955Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration >> QueryActorTest::Commit [GOOD] >> QueryActorTest::StreamQuery >> DataShardBackgroundCompaction::ShouldNotCompactEmptyTable [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime >> TTxDataShardLocalKMeansScan::BuildToPosting [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuild >> TPQTest::TestAlreadyWritten [GOOD] >> TPQTest::TestAlreadyWrittenWithoutDeduplication ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] Test command err: 2025-04-04T17:28:39.531071Z :FallbackToSingleDb INFO: Random seed for debugging is 1743787719531038 2025-04-04T17:28:39.852169Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511227637757187:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:39.852251Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:28:39.895983Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489511224948814542:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:39.897522Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:28:40.033182Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:28:40.035711Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000be0/r3tmp/tmp5TCyZ4/pdisk_1.dat 2025-04-04T17:28:40.367933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:40.368064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:40.369960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:40.370057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:40.370202Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:40.377799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:40.380617Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:28:40.381177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19565, node 1 2025-04-04T17:28:40.512346Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000be0/r3tmp/yandexyE7BO6.tmp 2025-04-04T17:28:40.512371Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000be0/r3tmp/yandexyE7BO6.tmp 2025-04-04T17:28:40.512503Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000be0/r3tmp/yandexyE7BO6.tmp 2025-04-04T17:28:40.514416Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:40.566311Z INFO: TTestServer started on Port 65340 GrpcPort 19565 TClient is connected to server localhost:65340 PQClient connected to localhost:19565 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:40.884448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-04T17:28:43.218035Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489511242128684044:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.218106Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489511242128684030:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.218272Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:43.224077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T17:28:43.260535Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489511242128684054:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T17:28:43.352489Z node 2 :TX_PROXY ERROR: Actor# [2:7489511242128684082:2129] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:43.656571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T17:28:43.668619Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489511244817627448:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:28:43.668936Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTQxNWYxMjgtM2JhOWUwMmMtOGMyMTkzZjUtNjY0YTY2ZA==, ActorId: [1:7489511244817627399:2336], ActorState: ExecuteState, TraceId: 01jr0xj2jw5j3yj20d51399fg7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:28:43.670997Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:28:43.675053Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489511242128684097:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:28:43.675364Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTNjNjVhMmUtNjFlOTg5YzUtMjQyOTkwOS1jZGQ5ZDU0Yw==, ActorId: [2:7489511242128684023:2308], ActorState: ExecuteState, TraceId: 01jr0xj2ee9eqp6kwg95k0n89c, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:28:43.675839Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:28:43.769170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:28:43.907995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:19565", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-04T17:28:44.217428Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr0xj36z9s2mye9t4wf9r4sv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmE2MDg3NWMtYWM0NDIxNzktMWU0ZWI4YzEtZDczNzhjYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489511249112595138:2966] 2025-04-04T17:28:44.852058Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489511227637757187:2075];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:44.852131Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:28:44.892518Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489511224948814542:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:44.892575Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:28:50.088208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:19565 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-04T17:28:50.149330Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:19565 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 ... me, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-04-04T17:29:05.588910Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-04-04T17:29:05.588919Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-04-04T17:29:05.588953Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489511337310537258:2525] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-04-04T17:29:05.591115Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489511337310537258:2525] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-04-04T17:29:05.741507Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489511337310537297:2525] connected; active server actors: 1 2025-04-04T17:29:05.741029Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489511337310537258:2525] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-04-04T17:29:05.741787Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489511337310537258:2525] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-04-04T17:29:05.741804Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489511337310537258:2525] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-04-04T17:29:05.743630Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489511337310537297:2525] disconnected; active server actors: 1 2025-04-04T17:29:05.743658Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7489511337310537297:2525] disconnected no session 2025-04-04T17:29:05.864616Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489511337310537258:2525] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-04T17:29:05.864660Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489511337310537258:2525] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-04T17:29:05.864677Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7489511337310537258:2525] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-04-04T17:29:05.864708Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-04T17:29:05.865212Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 3, Generation: 1 2025-04-04T17:29:05.865254Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7489511337310537318:2525], now have 1 active actors on pipe 2025-04-04T17:29:05.865307Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T17:29:05.865332Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T17:29:05.865434Z node 3 :PERSQUEUE INFO: new Cookie src|e32bc579-534ba44-fb71e407-451f6ee4_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-04-04T17:29:05.865565Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-04T17:29:05.865641Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:29:05.865765Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T17:29:05.865780Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T17:29:05.865843Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:29:05.865983Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|e32bc579-534ba44-fb71e407-451f6ee4_0 2025-04-04T17:29:05.866820Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743787745866 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:29:05.866986Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|e32bc579-534ba44-fb71e407-451f6ee4_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-04T17:29:05.870429Z :INFO: [] MessageGroupId [src] SessionId [src|e32bc579-534ba44-fb71e407-451f6ee4_0] Write session: close. Timeout = 0 ms 2025-04-04T17:29:05.870482Z :INFO: [] MessageGroupId [src] SessionId [src|e32bc579-534ba44-fb71e407-451f6ee4_0] Write session will now close 2025-04-04T17:29:05.870525Z :DEBUG: [] MessageGroupId [src] SessionId [src|e32bc579-534ba44-fb71e407-451f6ee4_0] Write session: aborting 2025-04-04T17:29:05.871538Z :INFO: [] MessageGroupId [src] SessionId [src|e32bc579-534ba44-fb71e407-451f6ee4_0] Write session: gracefully shut down, all writes complete 2025-04-04T17:29:05.872368Z :DEBUG: [] MessageGroupId [src] SessionId [src|e32bc579-534ba44-fb71e407-451f6ee4_0] Write session is aborting and will not restart 2025-04-04T17:29:05.872480Z :DEBUG: [] MessageGroupId [src] SessionId [src|e32bc579-534ba44-fb71e407-451f6ee4_0] Write session: destroy 2025-04-04T17:29:05.873528Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|e32bc579-534ba44-fb71e407-451f6ee4_0 grpc read done: success: 0 data: 2025-04-04T17:29:05.873552Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|e32bc579-534ba44-fb71e407-451f6ee4_0 grpc read failed 2025-04-04T17:29:05.873605Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|e32bc579-534ba44-fb71e407-451f6ee4_0 grpc closed 2025-04-04T17:29:05.873649Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|e32bc579-534ba44-fb71e407-451f6ee4_0 is DEAD 2025-04-04T17:29:05.874675Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:29:05.874813Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7489511337310537318:2525] destroyed 2025-04-04T17:29:05.874861Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. PORTS 32040 22815 Session was created >>> Ready to answer: ok 2025-04-04T17:29:06.915664Z :INFO: [/Root] OnFederationDiscovery fall back to single mode, database=/Root 2025-04-04T17:29:06.915819Z :INFO: [/Root] [] [9e218203-9c5521e5-1cccf985-72268054] Open read subsessions to databases: { name: , endpoint: localhost:22815, path: /Root } 2025-04-04T17:29:06.916008Z :INFO: [/Root] [/Root] [fc8b1c0-83a59cf1-f03237a-7b3ac450] Starting read session 2025-04-04T17:29:06.916045Z :DEBUG: [/Root] [/Root] [fc8b1c0-83a59cf1-f03237a-7b3ac450] Starting single session 2025-04-04T17:29:06.916727Z :DEBUG: [/Root] [/Root] [fc8b1c0-83a59cf1-f03237a-7b3ac450] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-04-04T17:29:06.916790Z :DEBUG: [/Root] [/Root] [fc8b1c0-83a59cf1-f03237a-7b3ac450] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-04-04T17:29:06.916968Z :DEBUG: [/Root] [/Root] [fc8b1c0-83a59cf1-f03237a-7b3ac450] [] Reconnecting session to cluster in 0.000000s 2025-04-04T17:29:06.917250Z :ERROR: [/Root] [/Root] [fc8b1c0-83a59cf1-f03237a-7b3ac450] [] Got error. Status: CLIENT_CALL_UNIMPLEMENTED. Description:
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:22815
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:22815. 2025-04-04T17:29:06.917311Z :DEBUG: [/Root] [/Root] [fc8b1c0-83a59cf1-f03237a-7b3ac450] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-04-04T17:29:06.917375Z :DEBUG: [/Root] [/Root] [fc8b1c0-83a59cf1-f03237a-7b3ac450] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-04-04T17:29:06.917511Z :INFO: [/Root] [/Root] [fc8b1c0-83a59cf1-f03237a-7b3ac450] [] Closing session to cluster: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:22815" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:22815
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:22815. " } 2025-04-04T17:29:06.917877Z :NOTICE: [/Root] [/Root] [fc8b1c0-83a59cf1-f03237a-7b3ac450] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:29:06.917927Z :DEBUG: [/Root] [/Root] [fc8b1c0-83a59cf1-f03237a-7b3ac450] [] Abort session to cluster Got new read session event: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:22815" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:22815
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:22815. " } 2025-04-04T17:29:06.918073Z :INFO: [/Root] [/Root] [fc8b1c0-83a59cf1-f03237a-7b3ac450] Closing read session. Close timeout: 0.010000s 2025-04-04T17:29:06.918115Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-04T17:29:06.918156Z :INFO: [/Root] [/Root] [fc8b1c0-83a59cf1-f03237a-7b3ac450] Counters: { Errors: 1 CurrentSessionLifetimeMs: 2 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:29:06.918203Z :INFO: [/Root] [/Root] [fc8b1c0-83a59cf1-f03237a-7b3ac450] Closing read session. Close timeout: 0.000000s 2025-04-04T17:29:06.918233Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-04T17:29:06.918293Z :INFO: [/Root] [/Root] [fc8b1c0-83a59cf1-f03237a-7b3ac450] Counters: { Errors: 1 CurrentSessionLifetimeMs: 2 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:29:06.918344Z :INFO: [/Root] [/Root] [fc8b1c0-83a59cf1-f03237a-7b3ac450] Closing read session. Close timeout: 0.000000s 2025-04-04T17:29:06.918375Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-04-04T17:29:06.918416Z :INFO: [/Root] [/Root] [fc8b1c0-83a59cf1-f03237a-7b3ac450] Counters: { Errors: 1 CurrentSessionLifetimeMs: 2 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:29:06.918488Z :NOTICE: [/Root] [/Root] [fc8b1c0-83a59cf1-f03237a-7b3ac450] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> BlobDepot::BasicCollectGarbage [GOOD] >> BlobDepot::VerifiedRandom >> TestFormatHandler::ManyRawClients [GOOD] >> THealthCheckTest::ShardsNoLimit [GOOD] >> DataCleanup::ForceDataCleanup >> TestFormatHandler::ClientValidation ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] Test command err: 2 2 |97.5%| [TM] {RESULT} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues |97.6%| [TA] $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.6%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TabletService_ChangeSchema::OnlyAdminsAllowed [GOOD] >> TabletService_ExecuteMiniKQL::BasicMiniKQLRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsNoLimit [GOOD] Test command err: 2025-04-04T17:28:41.102846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:41.103185Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:41.103324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00165f/r3tmp/tmpDaEkIN/pdisk_1.dat 2025-04-04T17:28:41.476542Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2210, node 1 TClient is connected to server localhost:28529 2025-04-04T17:28:41.847807Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:41.847879Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:41.847908Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:41.848459Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:45.910639Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:525:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:45.910818Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:45.910895Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00165f/r3tmp/tmpJP2PN8/pdisk_1.dat 2025-04-04T17:28:46.209332Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19702, node 3 TClient is connected to server localhost:7647 2025-04-04T17:28:46.555726Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:46.555773Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:46.555802Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:46.556480Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:50.975536Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:252:2217], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:50.975914Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:28:50.976111Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00165f/r3tmp/tmp1oesKW/pdisk_1.dat 2025-04-04T17:28:51.335036Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18185, node 6 TClient is connected to server localhost:18891 2025-04-04T17:28:51.800969Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:51.801042Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:51.801082Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:51.801763Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:59.228016Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:59.228303Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:59.228533Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:28:59.229064Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:59.229400Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:59.229457Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00165f/r3tmp/tmpCw5ijc/pdisk_1.dat 2025-04-04T17:28:59.518966Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65392, node 8 TClient is connected to server localhost:6925 2025-04-04T17:28:59.923046Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:59.923102Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:59.923137Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:59.923841Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:29:07.859229Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:07.859642Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:07.859865Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:07.860736Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:07.861090Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:07.861247Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00165f/r3tmp/tmpidhNfM/pdisk_1.dat 2025-04-04T17:29:08.190609Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2862, node 10 TClient is connected to server localhost:10182 2025-04-04T17:29:08.514529Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:08.514580Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:08.514606Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:08.515483Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/example/py3test >> test_example.py::TestExample::test_example2 [GOOD] |97.6%| [TM] {RESULT} ydb/tests/example/py3test >> DataShardCompaction::CompactBorrowed >> TGRpcRateLimiterTest::ListResources [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi >> QuoterWithKesusTest::ForbidsNotCanonizedResourcePath [GOOD] >> QuoterWithKesusTest::HandlesNonExistentResource >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning [GOOD] >> TopicAutoscaling::ControlPlane_CDC_Enable >> TPQTest::TestReadSessions [GOOD] >> TPQTest::TestReadAndDeleteConsumer >> THealthCheckTest::NoBscResponse [GOOD] >> THealthCheckTest::LayoutIncorrect >> THealthCheckTest::StorageLimit50 [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] [GOOD] >> test.py::test[solomon-Basic-default.txt] >> TPQTest::TestPQCacheSizeManagement [GOOD] >> TPQTest::TestOffsetEstimation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 2025-04-04 17:29:10,713 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-04 17:29:10,880 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 670477 48.6M 48.6M 23.5M test_tool run_ut @/home/runner/.ya/build/build_root/i5tq/000f6e/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk29/testing_out_stuff/test_tool.args 670612 1.9G 1.9G 1.7G └─ ydb-core-tx-columnshard-ut_rw --trace-path-append /home/runner/.ya/build/build_root/i5tq/000f6e/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chu Test command err: 2025-04-04T17:19:11.888318Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:11.973726Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:11.997076Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:11.997361Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:12.004748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:12.004965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:12.005190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:12.005310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:12.005431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:12.005525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:12.005634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:12.005743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:12.005808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:12.005883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:12.005990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:12.006102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:12.032741Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:12.032941Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:12.033006Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:12.033170Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:12.033319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:12.033386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:12.033426Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:12.033510Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:12.033563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:12.033602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:12.033628Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:12.033833Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:12.033895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:12.033936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:12.033964Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:12.034058Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:12.034126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:12.034175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:12.034202Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:12.034283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:12.034332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:12.034376Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:12.034426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:12.034459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:12.034483Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:12.034832Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=37; 2025-04-04T17:19:12.034922Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-04-04T17:19:12.034989Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-04-04T17:19:12.035073Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-04-04T17:19:12.035235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:12.035296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:12.035359Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:12.035569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:12.035634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:12.035668Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:12.035816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:12.035857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:12.035885Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:12.036054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:12.036092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:12.036118Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:12.036223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:12.036261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:12.036313Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... ;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-04T17:29:10.406294Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-04T17:29:10.406339Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:29:10.411327Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T17:29:10.411517Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-04T17:29:10.411564Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-04T17:29:10.411608Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:29:10.416985Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-04T17:29:10.429852Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:52;scan_step=name=ASSEMBLER::SPEC;duration=0.036593s;size=0.008692976;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};;scan_step_idx=2; 2025-04-04T17:29:10.431633Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:52;scan_step=name=ASSEMBLER::LAST_PK;duration=1.327541s;size=0.023116103;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};;scan_step_idx=3; 2025-04-04T17:29:10.482007Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:52;scan_step=name=SNAPSHOT;duration=0.140440s;size=0;details={};;scan_step_idx=4; 2025-04-04T17:29:10.489304Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:52;scan_step=name=ALLOCATE_MEMORY::Fetching;duration=0.000402s;size=0;details={stage=Fetching;};;scan_step_idx=5; 2025-04-04T17:29:10.489447Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:52;scan_step=name=OPTIONAL_ASSEMBLER::LAST;duration=0.010045s;size=0.000288156;details={columns=(column_ids=6;column_names=message;);;};;scan_step_idx=6; 2025-04-04T17:29:10.489968Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:52;scan_step=name=BUILD_STAGE_RESULT;duration=0.002179s;size=0;details={};;scan_step_idx=7; 2025-04-04T17:29:10.494564Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T17:29:10.494636Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=fetching.cpp:12;event=apply; 2025-04-04T17:29:10.494682Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=3; 2025-04-04T17:29:10.494734Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=interval.cpp:17;event=start_construct_result;interval_idx=3;interval_id=4;memory=11195882;count=1; 2025-04-04T17:29:10.495277Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=merge.cpp:149;event=DoExecute;interval_idx=3; 2025-04-04T17:29:10.600190Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=merge.cpp:60;event=update_memory_merger;before_data=462726;before_memory=462912;after_memory=462912;after_data=462726;guard=11195882; 2025-04-04T17:29:10.600309Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=35; 2025-04-04T17:29:10.600467Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-04T17:29:10.600519Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-04T17:29:10.600565Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:29:10.604608Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T17:29:10.604787Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-04T17:29:10.604836Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-04T17:29:10.604883Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:29:10.610481Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-04T17:29:10.623208Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:52;scan_step=name=ASSEMBLER::SPEC;duration=0.038233s;size=0.009121584;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};;scan_step_idx=2; 2025-04-04T17:29:10.625003Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:52;scan_step=name=ASSEMBLER::LAST_PK;duration=1.377801s;size=0.024259044;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};;scan_step_idx=3; 2025-04-04T17:29:10.686682Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:52;scan_step=name=SNAPSHOT;duration=0.147590s;size=0;details={};;scan_step_idx=4; 2025-04-04T17:29:10.691806Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:52;scan_step=name=ALLOCATE_MEMORY::Fetching;duration=0.000419s;size=0;details={stage=Fetching;};;scan_step_idx=5; 2025-04-04T17:29:10.691925Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:52;scan_step=name=OPTIONAL_ASSEMBLER::LAST;duration=0.010506s;size=0.000301569;details={columns=(column_ids=6;column_names=message;);;};;scan_step_idx=6; 2025-04-04T17:29:10.692344Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:52;scan_step=name=BUILD_STAGE_RESULT;duration=0.002277s;size=0;details={};;scan_step_idx=7; 2025-04-04T17:29:10.696097Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T17:29:10.696156Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=fetching.cpp:12;event=apply; 2025-04-04T17:29:10.696205Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=5; 2025-04-04T17:29:10.696257Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=interval.cpp:17;event=start_construct_result;interval_idx=5;interval_id=6;memory=11157700;count=1; 2025-04-04T17:29:10.696747Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12678:14657];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=merge.cpp:149;event=DoExecute;interval_idx=5; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8067063302/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/i5tq/000f6e/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk29/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8067063302/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/i5tq/000f6e/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk29/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> test_crud.py::TestYdbCrudOperations::test_crud_operations [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestOffsetEstimation [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:107:2057] recipient: [1:100:2134] 2025-04-04T17:28:36.569253Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:36.569331Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] Leader for TabletID 72057594037927938 is [1:152:2173] sender: [1:153:2057] recipient: [1:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:178:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:36.589475Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:36.605339Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-04-04T17:28:36.606412Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2025-04-04T17:28:36.608368Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:36.609962Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2025-04-04T17:28:36.611055Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:36.616012Z node 1 :PERSQUEUE INFO: new Cookie default|700ca86d-c415d94a-28a46d44-7b272d1_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:102:2057] recipient: [2:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:102:2057] recipient: [2:100:2134] Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:107:2057] recipient: [2:100:2134] 2025-04-04T17:28:37.206945Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:37.207037Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:148:2057] recipient: [2:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:148:2057] recipient: [2:146:2169] Leader for TabletID 72057594037927938 is [2:152:2173] sender: [2:153:2057] recipient: [2:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:178:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:37.226464Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:37.227309Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-04-04T17:28:37.227804Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:184:2197] 2025-04-04T17:28:37.229426Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:28:37.230669Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:185:2198] 2025-04-04T17:28:37.231787Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:37.236630Z node 2 :PERSQUEUE INFO: new Cookie default|e6ae8a3-79084a20-15207c64-b1652323_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:102:2057] recipient: [3:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:102:2057] recipient: [3:100:2134] Leader for TabletID 72057594037927937 is [3:106:2138] sender: [3:107:2057] recipient: [3:100:2134] 2025-04-04T17:28:37.622410Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:37.622488Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:148:2057] recipient: [3:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:148:2057] recipient: [3:146:2169] Leader for TabletID 72057594037927938 is [3:152:2173] sender: [3:153:2057] recipient: [3:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [3:106:2138] sender: [3:178:2057] recipient: [3:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:37.642457Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:37.643293Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-04-04T17:28:37.644001Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:184:2197] 2025-04-04T17:28:37.646462Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:184:2197] 2025-04-04T17:28:37.648034Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:185:2198] 2025-04-04T17:28:37.649904Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:37.656927Z node 3 :PERSQUEUE INFO: new Cookie default|e97bb63e-be4f3d30-7a2ada44-f3c11a29_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:102:2057] recipient: [4:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:102:2057] recipient: [4:100:2134] Leader for TabletID 72057594037927937 is [4:106:2138] sender: [4:107:2057] recipient: [4:100:2134] 2025-04-04T17:28:38.081546Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:38.081640Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:148:2057] recipient: [4:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:148:2057] recipient: [4:146:2169] Leader for TabletID 72057594037927938 is [4:152:2173] sender: [4:153:2057] recipient: [4:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [4:106:2138] sender: [4:178:2057] recipient: [4:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:38.106554Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:38.107509Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 4 actor [4:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 4 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 4 ReadRuleGenerations: 4 MeteringMode: METERI ... s 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [42:176:2191] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:694:2570] sender: [42:784:2057] recipient: [42:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:694:2570] sender: [42:787:2057] recipient: [42:98:2133] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:694:2570] sender: [42:790:2057] recipient: [42:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:694:2570] sender: [42:791:2057] recipient: [42:789:2642] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:792:2643] sender: [42:793:2057] recipient: [42:789:2642] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_BATCH Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:12.502603Z node 42 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:29:12.502675Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:29:12.503492Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [42:853:2696] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:29:12.506466Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [42:854:2697] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:29:12.517816Z node 42 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:29:12.517889Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 9 [42:854:2697] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:29:12.546207Z node 42 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:29:12.546323Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 9 [42:853:2696] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:29:12.583137Z node 42 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [42:176:2191] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:792:2643] sender: [42:886:2057] recipient: [42:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:792:2643] sender: [42:889:2057] recipient: [42:98:2133] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:792:2643] sender: [42:892:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [42:792:2643] sender: [42:893:2057] recipient: [42:891:2719] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:894:2720] sender: [42:895:2057] recipient: [42:891:2719] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_BATCH Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:12.644467Z node 42 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:29:12.644528Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:29:12.645527Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [42:957:2775] 2025-04-04T17:29:12.648139Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [42:958:2776] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:29:12.658647Z node 42 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:29:12.658717Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 10 [42:958:2776] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:29:12.677489Z node 42 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:29:12.677583Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 10 [42:957:2775] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:29:12.717949Z node 42 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [42:176:2191] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:894:2720] sender: [42:990:2057] recipient: [42:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:894:2720] sender: [42:993:2057] recipient: [42:98:2133] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:894:2720] sender: [42:996:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [42:894:2720] sender: [42:997:2057] recipient: [42:995:2798] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:998:2799] sender: [42:999:2057] recipient: [42:995:2798] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_BATCH Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:12.786169Z node 42 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:29:12.786224Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:29:12.786852Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [42:1063:2856] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:29:12.789579Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [42:1064:2857] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:29:12.800197Z node 42 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:29:12.800271Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 11 [42:1064:2857] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:29:12.819332Z node 42 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:29:12.819420Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 11 [42:1063:2856] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:29:12.845348Z node 42 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [42:176:2191] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:998:2799] sender: [42:1094:2057] recipient: [42:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:998:2799] sender: [42:1097:2057] recipient: [42:98:2133] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:998:2799] sender: [42:1100:2057] recipient: [42:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:998:2799] sender: [42:1101:2057] recipient: [42:1099:2877] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [42:1102:2878] sender: [42:1103:2057] recipient: [42:1099:2877] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_BATCH Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:12.907489Z node 42 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:29:12.907552Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:29:12.908161Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [42:1169:2937] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:29:12.910797Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [42:1170:2938] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:29:12.920513Z node 42 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:29:12.920585Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 12 [42:1170:2938] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:29:12.949790Z node 42 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:29:12.949889Z node 42 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 12 [42:1169:2937] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:29:12.979152Z node 42 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 >> TestFormatHandler::ClientValidation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::StorageLimit50 [GOOD] Test command err: 2025-04-04T17:28:48.644642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:48.645016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:48.645242Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:28:48.646551Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:48.646694Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:48.646876Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00163f/r3tmp/tmppgLmoq/pdisk_1.dat 2025-04-04T17:28:48.986526Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24531, node 1 TClient is connected to server localhost:2856 2025-04-04T17:28:49.375454Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:49.375511Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:49.375559Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:49.375790Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-9a33-be81" status: YELLOW message: "Database has storage issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-5321" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-5321" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-9a33-595f-8d1d" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-9a33-595f-8d1d" status: YELLOW message: "Pool degraded" location { storage { pool { name: "static" } } database { name: "/Root" } } reason: "YELLOW-9a33-ef3e-0" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-9a33-4847-1-0-3-55-0-55" status: RED message: "VDisk is not available" location { storage { node { id: 1 host: "::1" port: 12001 } pool { name: "static" group { vdisk { id: "0-3-55-0-55" } } } } database { name: "/Root" } } type: "VDISK" level: 5 } issue_log { id: "YELLOW-9a33-ef3e-0" status: YELLOW message: "Group degraded" location { storage { pool { name: "static" group { id: "0" } } } database { name: "/Root" } } reason: "RED-9a33-4847-1-0-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 1 host: "::1" port: 12001 } 2025-04-04T17:28:55.751365Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:697:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:55.751875Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:55.752267Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:28:55.753255Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:694:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:55.753339Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:55.753687Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00163f/r3tmp/tmpi0TLCY/pdisk_1.dat 2025-04-04T17:28:56.039627Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2038, node 3 TClient is connected to server localhost:15739 2025-04-04T17:28:56.403059Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:56.403121Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:56.403157Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:56.403404Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:29:02.949441Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:697:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:02.949839Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:02.950200Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:02.951127Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:694:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:02.951430Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:02.951701Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00163f/r3tmp/tmpesroey/pdisk_1.dat 2025-04-04T17:29:03.253065Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2153, node 5 TClient is connected to server localhost:29655 2025-04-04T17:29:03.574548Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:03.574599Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:03.574625Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:03.575043Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:29:10.949177Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:478:2409], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:10.949667Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:10.949766Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:10.951815Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:700:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:10.952107Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:10.952267Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00163f/r3tmp/tmpyfiI8A/pdisk_1.dat 2025-04-04T17:29:11.224452Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6865, node 7 TClient is connected to server localhost:23010 2025-04-04T17:29:11.559828Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:11.559884Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:11.559912Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:11.560474Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TopicSessionTests::TwoSessionsWithoutOffsets [GOOD] >> TestFormatHandler::ClientError >> TopicSessionTests::TwoSessionWithoutPredicate >> Coordinator::ReadStepSubscribe >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime [GOOD] >> TabletService_ExecuteMiniKQL::BasicMiniKQLRead [GOOD] >> TabletService_ExecuteMiniKQL::ParamsMiniKQLRead >> TTxDataShardLocalKMeansScan::BuildToBuild [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_local_kmeans/unittest >> TTxDataShardLocalKMeansScan::BuildToBuild [GOOD] Test command err: 2025-04-04T17:28:52.316343Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511280998228136:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:52.316402Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0009ac/r3tmp/tmp70RVSq/pdisk_1.dat 2025-04-04T17:28:52.718556Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:52.742817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:52.743535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:52.749188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:52.797312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:28:52.851669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:28:52.882303Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7489511280998228583:2295] 2025-04-04T17:28:52.882579Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:28:52.897418Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:28:52.897458Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:28:52.900154Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:28:52.900189Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:28:52.900232Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:28:52.906509Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:28:52.906680Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:28:52.906719Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7489511280998228597:2295] in generation 1 2025-04-04T17:28:52.911208Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:28:52.958114Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:28:52.960055Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:28:52.960125Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7489511280998228603:2296] 2025-04-04T17:28:52.960260Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:28:52.960292Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:28:52.960308Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:28:52.961800Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7489511280998228580:2294], serverId# [1:7489511280998228600:2303], sessionId# [0:0:0] 2025-04-04T17:28:52.961943Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:28:52.962113Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:28:52.962142Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:28:52.962156Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:28:52.962226Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:28:52.962260Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:28:52.962284Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:28:52.962569Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:28:52.963002Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-04T17:28:52.964985Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:28:52.965409Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:28:52.966640Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:28:52.970340Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7489511280998228616:2312], serverId# [1:7489511280998228618:2314], sessionId# [0:0:0] 2025-04-04T17:28:52.983807Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1743787733016 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743787733016 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:28:52.983864Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:28:52.984057Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:28:52.984179Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:28:52.984207Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:28:52.984263Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743787733016:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-04T17:28:52.984541Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743787733016:281474976710657 keys extracted: 0 2025-04-04T17:28:52.984676Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:28:52.984772Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:28:52.984815Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:28:52.988442Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:28:52.989251Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:28:52.990670Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:28:52.990729Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:28:52.990778Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743787733016} 2025-04-04T17:28:52.990838Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:28:52.990876Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:28:52.990906Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:28:52.990949Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:28:52.991022Z node 1 :TX_DATASHARD DEBUG: Complete [1743787733016 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7489511280998228429:2194], exec latency: 4 ms, propose latency: 6 ms 2025-04-04T17:28:52.991062Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-04T17:28:52.991095Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:28:52.995703Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-04T17:28:52.995791Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:28:52.996012Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743787733023 2025-04-04T17:28:53.008712Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7489511285293195951:2341], serverId# [1:7489511285293195952:2342], sessionId# [0:0:0] 2025-04-04T17:28:53.010147Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:28:53.010321Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710658 at tablet 72075186224037888 2025-04-04T17:28:53.011536Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:28:53.012909Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710658 at step 1743787733058 at tablet 72075186224037888 { Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743787733058 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:28:53.012963Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:28:53.013091Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:28:53.013111Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:28:53.013129Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743787733058:281474976710658] in PlanQueue unit at 72075186224037888 2025-04-04T17:28:53.013308Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743787733058:281474976710658 keys extracted: 0 2025-04-04T17:28:53.013628Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:28:53.014021Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743787733058} 2025-04-04T17:28:53.014069Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:28:53.014120Z node 1 :TX_DATASHARD DEBUG: Complete [1743787733058 : 281474976710658] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7489511285293195946:2337], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:28:53.014178Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:28:53.017734Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7489511285293195962:2352], serverId# [1:7489511285293195963:2353], sessionId# [0:0:0] 2025-04-04T17:28:53.018603Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:28:53.018722Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710659 at tablet 72075186224037888 2025-04-04T17:28:53.020142Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04- ... ARD DEBUG: GetNextActiveOp at 72075186224037903 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:29:14.320083Z node 5 :TX_DATASHARD DEBUG: Found ready operation [1743787754366:281474976715710] in PlanQueue unit at 72075186224037903 2025-04-04T17:29:14.320340Z node 5 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037903 loaded tx from db 1743787754366:281474976715710 keys extracted: 0 2025-04-04T17:29:14.320480Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037903 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:29:14.320570Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037903 2025-04-04T17:29:14.320618Z node 5 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037903 tableId# [OwnerId: 72057594046644480, LocalPathId: 21] schema version# 1 2025-04-04T17:29:14.321051Z node 5 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037903 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:29:14.321443Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037903 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:14.322437Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:29:14.322972Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037903 time 1743787754365 2025-04-04T17:29:14.322999Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037903 2025-04-04T17:29:14.323017Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037903 2025-04-04T17:29:14.323425Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037903 coordinator 72057594046316545 last step 0 next step 1743787754366 2025-04-04T17:29:14.323497Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037903 step# 1743787754366} 2025-04-04T17:29:14.323537Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037903 2025-04-04T17:29:14.323584Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037903 2025-04-04T17:29:14.323608Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037903 2025-04-04T17:29:14.323634Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037903 2025-04-04T17:29:14.323684Z node 5 :TX_DATASHARD DEBUG: Complete [1743787754366 : 281474976715710] from 72075186224037903 at tablet 72075186224037903 send result to client [5:7489511358176126814:2136], exec latency: 0 ms, propose latency: 3 ms 2025-04-04T17:29:14.323715Z node 5 :TX_DATASHARD INFO: 72075186224037903 Sending notify to schemeshard 72057594046644480 txId 281474976715710 state Ready TxInFly 0 2025-04-04T17:29:14.323755Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037903 2025-04-04T17:29:14.325045Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715710 datashard 72075186224037903 state Ready 2025-04-04T17:29:14.325087Z node 5 :TX_DATASHARD DEBUG: 72075186224037903 Got TEvSchemaChangedResult from SS at 72075186224037903 2025-04-04T17:29:14.328883Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2025-04-04T17:29:14.330800Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:29:14.330808Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037903 2025-04-04T17:29:14.334808Z node 5 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037904 actor [5:7489511375355998506:2400] 2025-04-04T17:29:14.335025Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:29:14.343173Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:29:14.343223Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:29:14.344290Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037904 2025-04-04T17:29:14.344333Z node 5 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037904 2025-04-04T17:29:14.344356Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037904 2025-04-04T17:29:14.344580Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:29:14.344647Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:29:14.344682Z node 5 :TX_DATASHARD DEBUG: DataShard 72075186224037904 persisting started state actor id [5:7489511375355998523:2400] in generation 1 2025-04-04T17:29:14.345843Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:29:14.345891Z node 5 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037904 2025-04-04T17:29:14.345948Z node 5 :TX_DATASHARD DEBUG: 72075186224037904 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:29:14.345985Z node 5 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037904, actorId: [5:7489511375355998525:2401] 2025-04-04T17:29:14.346002Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037904 2025-04-04T17:29:14.346013Z node 5 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037904, state: WaitScheme 2025-04-04T17:29:14.346025Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037904 2025-04-04T17:29:14.346103Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037904 2025-04-04T17:29:14.346164Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037904 2025-04-04T17:29:14.346199Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037904, clientId# [5:7489511375355998508:4006], serverId# [5:7489511375355998512:4008], sessionId# [0:0:0] 2025-04-04T17:29:14.346291Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037904 2025-04-04T17:29:14.346311Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037904 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:14.346327Z node 5 :TX_DATASHARD INFO: No tx to execute at 72075186224037904 TxInFly 0 2025-04-04T17:29:14.346343Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037904 2025-04-04T17:29:14.346365Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037904 2025-04-04T17:29:14.346544Z node 5 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037904 txId 281474976715711 ssId 72057594046644480 seqNo 2:31 2025-04-04T17:29:14.346597Z node 5 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715711 at tablet 72075186224037904 2025-04-04T17:29:14.346918Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037904 2025-04-04T17:29:14.347860Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037904 2025-04-04T17:29:14.347919Z node 5 :TX_DATASHARD DEBUG: 72075186224037904 not sending time cast registration request in state WaitScheme 2025-04-04T17:29:14.349488Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037904, clientId# [5:7489511375355998531:4021], serverId# [5:7489511375355998532:4022], sessionId# [0:0:0] 2025-04-04T17:29:14.349719Z node 5 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715711 at step 1743787754394 at tablet 72075186224037904 { Transactions { TxId: 281474976715711 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743787754394 MediatorID: 72057594046382081 TabletID: 72075186224037904 } 2025-04-04T17:29:14.349744Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037904 2025-04-04T17:29:14.349832Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037904 2025-04-04T17:29:14.349853Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037904 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:29:14.349875Z node 5 :TX_DATASHARD DEBUG: Found ready operation [1743787754394:281474976715711] in PlanQueue unit at 72075186224037904 2025-04-04T17:29:14.350097Z node 5 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037904 loaded tx from db 1743787754394:281474976715711 keys extracted: 0 2025-04-04T17:29:14.350210Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037904 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:29:14.350310Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037904 2025-04-04T17:29:14.350356Z node 5 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037904 tableId# [OwnerId: 72057594046644480, LocalPathId: 22] schema version# 1 2025-04-04T17:29:14.350754Z node 5 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037904 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:29:14.351082Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037904 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:14.351268Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:29:14.351372Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037903 2025-04-04T17:29:14.352467Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037904 time 1743787754393 2025-04-04T17:29:14.352489Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037904 2025-04-04T17:29:14.352508Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037904 2025-04-04T17:29:14.352544Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037904 coordinator 72057594046316545 last step 0 next step 1743787754401 2025-04-04T17:29:14.352587Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037904 step# 1743787754394} 2025-04-04T17:29:14.352617Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037904 2025-04-04T17:29:14.352651Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037904 2025-04-04T17:29:14.352673Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037904 2025-04-04T17:29:14.352696Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037904 2025-04-04T17:29:14.352737Z node 5 :TX_DATASHARD DEBUG: Complete [1743787754394 : 281474976715711] from 72075186224037904 at tablet 72075186224037904 send result to client [5:7489511358176126814:2136], exec latency: 0 ms, propose latency: 2 ms 2025-04-04T17:29:14.352772Z node 5 :TX_DATASHARD INFO: 72075186224037904 Sending notify to schemeshard 72057594046644480 txId 281474976715711 state Ready TxInFly 0 2025-04-04T17:29:14.352813Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037904 2025-04-04T17:29:14.353340Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715711 datashard 72075186224037904 state Ready 2025-04-04T17:29:14.353374Z node 5 :TX_DATASHARD DEBUG: 72075186224037904 Got TEvSchemaChangedResult from SS at 72075186224037904 |97.6%| [TM] {RESULT} ydb/core/tx/datashard/ut_local_kmeans/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_background_compaction/unittest >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime [GOOD] Test command err: 2025-04-04T17:28:53.090278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:53.090915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:53.090977Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b68/r3tmp/tmpNwK840/pdisk_1.dat 2025-04-04T17:28:53.559354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:28:53.643401Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:53.690579Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:28:53.691427Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:28:53.693310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:53.693964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:53.706653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:53.794170Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-04T17:28:53.794257Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T17:28:53.798203Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-04-04T17:28:53.903467Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T17:28:53.903577Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:28:53.905077Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:28:53.905164Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:28:53.905418Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:28:53.905690Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:28:53.905792Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T17:28:53.905985Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T17:28:53.910095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:28:53.911139Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T17:28:53.911292Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2517] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-04T17:28:53.961528Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:28:53.962760Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:28:53.963271Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:28:53.963602Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:28:53.977387Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:28:54.016390Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:28:54.016550Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:28:54.019009Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:28:54.019100Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:28:54.019168Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:28:54.020869Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:28:54.021011Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:28:54.021093Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:28:54.031859Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:28:54.066653Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:28:54.068704Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:28:54.068899Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:28:54.068944Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:28:54.068980Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:28:54.069044Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:28:54.069311Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:28:54.069354Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:28:54.071718Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:28:54.071878Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:28:54.072019Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:28:54.072060Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:28:54.072167Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:28:54.072234Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:28:54.072276Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:28:54.072304Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:28:54.072355Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:28:54.072469Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:28:54.072512Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:28:54.072579Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:28:54.074021Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:28:54.074077Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:28:54.074206Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:28:54.074617Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:28:54.074689Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:28:54.074793Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:28:54.074839Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:28:54.074895Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:28:54.074938Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:28:54.074985Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:28:54.075311Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:28:54.075358Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:28:54.075393Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:28:54.075428Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:28:54.075485Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:28:54.075537Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:28:54.075577Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:28:54.075607Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:28:54.075632Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:28:54.077088Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:28:54.077157Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:28:54.087934Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... de 5 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:29:14.047509Z node 5 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T17:29:14.047591Z node 5 :TX_DATASHARD TRACE: Activated operation [0:281474976715660] at 72075186224037888 2025-04-04T17:29:14.047638Z node 5 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is Executed 2025-04-04T17:29:14.047664Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:29:14.047685Z node 5 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-04T17:29:14.047710Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-04T17:29:14.047771Z node 5 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-04-04T17:29:14.047843Z node 5 :TX_DATASHARD TRACE: Operation [0:281474976715660] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191860 2025-04-04T17:29:14.048143Z node 5 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-04T17:29:14.048225Z node 5 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:29:14.048264Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-04T17:29:14.048309Z node 5 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:29:14.048348Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit FinishPropose 2025-04-04T17:29:14.048428Z node 5 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:29:14.048481Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:29:14.048526Z node 5 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:29:14.048564Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:29:14.048619Z node 5 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is Executed 2025-04-04T17:29:14.048642Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:29:14.048670Z node 5 :TX_DATASHARD TRACE: Execution plan for [0:281474976715660] at 72075186224037888 has finished 2025-04-04T17:29:14.059592Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:29:14.059689Z node 5 :TX_DATASHARD TRACE: Complete execution for [0:281474976715660] at 72075186224037888 on unit FinishPropose 2025-04-04T17:29:14.059757Z node 5 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715660 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-04T17:29:14.059893Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:14.062101Z node 5 :TX_PROXY DEBUG: actor# [5:59:2106] Handle TEvNavigate describe path /Root/table-1 2025-04-04T17:29:14.062273Z node 5 :TX_PROXY DEBUG: Actor# [5:855:2690] HANDLE EvNavigateScheme /Root/table-1 2025-04-04T17:29:14.062828Z node 5 :TX_PROXY DEBUG: Actor# [5:855:2690] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T17:29:14.062989Z node 5 :TX_PROXY DEBUG: Actor# [5:855:2690] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table-1" Options { ShowPrivateTable: true } 2025-04-04T17:29:14.064330Z node 5 :TX_PROXY DEBUG: Actor# [5:855:2690] Handle TEvDescribeSchemeResult Forward to# [5:593:2518] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table-1" PathDescription { Self { Name: "table-1" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2025-04-04T17:29:14.065527Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [5:859:2694], Recipient [5:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:14.065606Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:14.065663Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [5:858:2693], serverId# [5:859:2694], sessionId# [0:0:0] 2025-04-04T17:29:14.065823Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [5:857:2692], Recipient [5:666:2570]: NKikimrTxDataShard.TEvGetInfoRequest 2025-04-04T17:29:14.066789Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [5:862:2697], Recipient [5:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:14.066891Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:14.066947Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [5:861:2696], serverId# [5:862:2697], sessionId# [0:0:0] 2025-04-04T17:29:14.067139Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [5:860:2695], Recipient [5:666:2570]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-04-04T17:29:14.067299Z node 5 :TX_DATASHARD INFO: Started background compaction# 1 of 72075186224037888 tableId# 2 localTid# 1001, requested from [5:860:2695], partsCount# 0, memtableSize# 728, memtableWaste# 3880, memtableRows# 3 2025-04-04T17:29:14.122597Z node 5 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 1, ts 1970-01-01T00:00:01.513764Z 2025-04-04T17:29:14.122698Z node 5 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 1, front# 1 2025-04-04T17:29:14.122768Z node 5 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [5:860:2695]pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T17:29:14.123794Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [5:657:2564], Recipient [5:666:2570]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-04T17:29:14.124364Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [5:869:2703], Recipient [5:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:14.124436Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:14.124495Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [5:868:2702], serverId# [5:869:2703], sessionId# [0:0:0] 2025-04-04T17:29:14.124709Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [5:867:2701], Recipient [5:666:2570]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-04-04T17:29:14.124824Z node 5 :TX_DATASHARD DEBUG: Background compaction of tablet# 72075186224037888 of path# [OwnerId: 72057594046644480, LocalPathId: 2], requested from# [5:867:2701] is not needed |97.6%| [TM] {RESULT} ydb/core/tx/datashard/ut_background_compaction/unittest >> BlobDepot::VerifiedRandom [GOOD] >> BlobDepot::LoadPutAndRead >> TCreateAndDropViewTest::CheckCreatedView >> TDqSolomonWriteActorTest::TestWriteFormat >> data_correctness.py::TestDataCorrectness::test >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi >> QuoterWithKesusTest::HandlesNonExistentResource [GOOD] >> QuoterWithKesusTest::HandlesAllRequestsForNonExistentResource >> DataCleanup::ForceDataCleanup [GOOD] >> DataCleanup::ForceDataCleanupWithoutCompaction >> AttributesMD5Test::AmazonSampleWithString [GOOD] >> AttributesMD5Test::AmazonSampleWithBinary [GOOD] >> InflyTest::AddMessage [GOOD] >> InflyTest::DeleteMessage [GOOD] >> InflyTest::ChangeMesageVisibility [GOOD] >> InflyTest::ReceiveMessages [GOOD] >> InflyTest::DeleteReceivedMessage [GOOD] >> MessageDelayStatsTest::All [GOOD] >> MessageDelayStatsTest::BigTimeDiff [GOOD] >> MessageDelayStatsTest::MaxMessageDelay [GOOD] >> Metering::BillingRecords >> TPQTest::TestReadAndDeleteConsumer [GOOD] >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString 2025-04-04 17:29:15,154 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-04 17:29:15,326 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 671266 48.8M 48.8M 23.8M test_tool run_ut @/home/runner/.ya/build/build_root/i5tq/000f52/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk28/testing_out_stuff/test_tool.args 671295 2.1G 2.0G 1.8G └─ ydb-core-tx-columnshard-ut_rw --trace-path-append /home/runner/.ya/build/build_root/i5tq/000f52/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chu Test command err: 2025-04-04T17:19:16.346521Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T17:19:16.429425Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T17:19:16.451112Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T17:19:16.451373Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T17:19:16.458842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:19:16.459051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:19:16.459259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:19:16.459374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:19:16.459513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:19:16.459626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:19:16.459732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:19:16.459879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:19:16.460040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:19:16.460181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:19:16.460286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:19:16.460385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:19:16.489480Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T17:19:16.489689Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T17:19:16.489751Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T17:19:16.489927Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:16.490073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:19:16.490137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T17:19:16.490195Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T17:19:16.490312Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T17:19:16.490373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T17:19:16.490418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T17:19:16.490447Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T17:19:16.490640Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T17:19:16.490710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T17:19:16.490752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T17:19:16.490783Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T17:19:16.490871Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T17:19:16.490943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T17:19:16.490999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T17:19:16.491035Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T17:19:16.491102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T17:19:16.491133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T17:19:16.491163Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T17:19:16.491232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T17:19:16.491274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T17:19:16.491317Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T17:19:16.491671Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=38; 2025-04-04T17:19:16.491776Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=52; 2025-04-04T17:19:16.491853Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-04T17:19:16.491930Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-04-04T17:19:16.492091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T17:19:16.492152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T17:19:16.492185Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T17:19:16.492366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T17:19:16.492417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T17:19:16.492467Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T17:19:16.492634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T17:19:16.492674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T17:19:16.492701Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T17:19:16.492857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T17:19:16.492891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T17:19:16.492919Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T17:19:16.493046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T17:19:16.493088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T17:19:16.493152Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... 2025-04-04T17:29:15.102140Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T17:29:15.102346Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-04T17:29:15.102402Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-04T17:29:15.102452Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:29:15.110802Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T17:29:15.110990Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-04T17:29:15.111043Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-04T17:29:15.111089Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:29:15.129319Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T17:29:15.129504Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-04T17:29:15.129556Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-04T17:29:15.129604Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:29:15.136495Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T17:29:15.136678Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-04T17:29:15.136730Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-04T17:29:15.136776Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:29:15.151546Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T17:29:15.151732Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-04T17:29:15.151786Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-04T17:29:15.151834Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:29:15.165415Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T17:29:15.165604Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-04T17:29:15.165657Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-04T17:29:15.165705Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-04-04T17:29:15.167443Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-04-04T17:29:15.167906Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:52;scan_step=name=ASSEMBLER::SPEC;duration=0.000000s;size=0;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};;scan_step_idx=2; 2025-04-04T17:29:15.168611Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:52;scan_step=name=ASSEMBLER::LAST_PK;duration=0.000000s;size=0;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};;scan_step_idx=3; 2025-04-04T17:29:15.171799Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:52;scan_step=name=SNAPSHOT;duration=0.000000s;size=0;details={};;scan_step_idx=4; 2025-04-04T17:29:15.172938Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:52;scan_step=name=ALLOCATE_MEMORY::Fetching;duration=0.000000s;size=0;details={stage=Fetching;};;scan_step_idx=5; 2025-04-04T17:29:15.173069Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:52;scan_step=name=OPTIONAL_ASSEMBLER::LAST;duration=0.000000s;size=0;details={columns=(column_ids=6;column_names=message;);;};;scan_step_idx=6; 2025-04-04T17:29:15.173263Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:52;scan_step=name=BUILD_STAGE_RESULT;duration=0.000000s;size=0;details={};;scan_step_idx=7; 2025-04-04T17:29:15.178750Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:85;event=TEvTaskProcessedResult; 2025-04-04T17:29:15.178928Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:168;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-04-04T17:29:15.178977Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-04-04T17:29:15.179024Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:12680:14659];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:179;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8067063302/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/i5tq/000f52/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk28/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8067063302/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/i5tq/000f52/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk28/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> test.py::test[solomon-Basic-default.txt] [GOOD] >> test.py::test[solomon-BrokenJsonResponse-] >> QueryActorTest::StreamQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadAndDeleteConsumer [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:107:2057] recipient: [1:100:2134] 2025-04-04T17:27:20.817779Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:20.817857Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] Leader for TabletID 72057594037927938 is [1:152:2173] sender: [1:153:2057] recipient: [1:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:178:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:20.833017Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:20.843172Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "important_user" Generation: 1 Important: true } Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:27:20.843912Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-04T17:27:20.846139Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:20.855603Z node 1 :PERSQUEUE INFO: new Cookie default|c2b96c35-d377bbe7-5aff8c90-d8c6c199_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [1:201:2211] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:20.884320Z node 1 :PERSQUEUE INFO: new Cookie default|b09673e3-6329c7d-d4baf285-28526730_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_BATCH Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_SYNCER_COMMITTER Captured TEvents::TSystem::Wakeup to BS_RECOVERY_LOG_CUTTER Captured TEvents::TSystem::Wakeup to BS_SYNCER_GUID_PROPAGATOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_BATCH Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeu ... ] bootstrapping 0 [44:184:2197] 2025-04-04T17:29:14.449683Z node 44 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [44:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:14.460417Z node 44 :PERSQUEUE INFO: new Cookie default|8287266c-db13948-d5e2029a-11e1db0d_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:15.331842Z node 44 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-04-04T17:29:15.388075Z node 44 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [44:106:2138] sender: [44:239:2057] recipient: [44:98:2133] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [44:106:2138] sender: [44:242:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [44:106:2138] sender: [44:243:2057] recipient: [44:241:2242] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [44:244:2243] sender: [44:245:2057] recipient: [44:241:2242] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_BATCH Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:15.429524Z node 44 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:29:15.429606Z node 44 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:29:15.430264Z node 44 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [44:293:2284] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:29:15.458937Z node 44 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:29:15.459057Z node 44 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [44:293:2284] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:29:15.487948Z node 44 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 size 8296398 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [44:244:2243] sender: [44:316:2057] recipient: [44:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:15.496774Z node 44 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:15.500865Z node 44 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1003 actor [44:313:2297] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1003 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1002 AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1002 Important: true } Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:102:2057] recipient: [45:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:102:2057] recipient: [45:100:2134] Leader for TabletID 72057594037927937 is [45:106:2138] sender: [45:107:2057] recipient: [45:100:2134] 2025-04-04T17:29:16.040827Z node 45 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:29:16.040920Z node 45 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [45:148:2057] recipient: [45:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [45:148:2057] recipient: [45:146:2169] Leader for TabletID 72057594037927938 is [45:152:2173] sender: [45:153:2057] recipient: [45:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [45:106:2138] sender: [45:178:2057] recipient: [45:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:16.064587Z node 45 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:29:16.065333Z node 45 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1004 actor [45:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1004 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1004 ReadRuleGenerations: 1004 ReadRuleGenerations: 1004 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1004 Important: false } Consumers { Name: "user1" Generation: 1004 Important: true } Consumers { Name: "user2" Generation: 1004 Important: true } 2025-04-04T17:29:16.066000Z node 45 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [45:184:2197] 2025-04-04T17:29:16.069223Z node 45 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [45:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:16.084474Z node 45 :PERSQUEUE INFO: new Cookie default|a885bdcb-c877ffa7-27edd2cf-2a372cb2_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:17.048066Z node 45 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-04-04T17:29:17.108915Z node 45 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [45:106:2138] sender: [45:239:2057] recipient: [45:98:2133] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [45:106:2138] sender: [45:242:2057] recipient: [45:241:2242] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [45:106:2138] sender: [45:243:2057] recipient: [45:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [45:244:2243] sender: [45:245:2057] recipient: [45:241:2242] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_BATCH Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:17.149273Z node 45 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:29:17.149337Z node 45 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:29:17.149919Z node 45 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [45:293:2284] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:29:17.175563Z node 45 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-04-04T17:29:17.175650Z node 45 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [45:293:2284] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:29:17.199975Z node 45 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 size 8296398 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [45:244:2243] sender: [45:316:2057] recipient: [45:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:17.207621Z node 45 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:17.211395Z node 45 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1005 actor [45:313:2297] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1005 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1004 AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1004 Important: true } >> THealthCheckTest::LayoutIncorrect [FAIL] >> SequenceProxy::Basics >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] >> TabletService_ExecuteMiniKQL::ParamsMiniKQLRead [GOOD] >> TabletService_ExecuteMiniKQL::MalformedParams >> TestFormatHandler::ClientError [GOOD] >> LongTxService::BasicTransactions >> TestFormatHandler::ClientErrorWithEmptyFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 3493, MsgBus: 19649 2025-04-04T17:26:34.293361Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510687541363612:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:26:34.293476Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000e78/r3tmp/tmp3bhpo6/pdisk_1.dat 2025-04-04T17:26:34.588434Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3493, node 1 2025-04-04T17:26:34.660030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:34.660142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:34.662311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:34.757217Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:34.757251Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:34.757260Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:34.757379Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19649 TClient is connected to server localhost:19649 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:26:35.334155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:35.369572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:35.493024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:35.595180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:35.641560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:36.474508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510696131299837:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:36.474633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:36.840012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:26:36.862417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:26:36.884371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:26:36.905552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:26:36.931939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:26:36.965780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:26:37.048888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510700426267655:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:37.049449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:37.049539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510700426267660:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:37.054020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:26:37.060725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510700426267662:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:26:37.165384Z node 1 :TX_PROXY ERROR: Actor# [1:7489510700426267717:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:26:37.963393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:26:38.879526Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr0xe90kbykx6c0101ytb4ha, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBiZDQxNTUtOTI3ZTQ0YTctOWQ4YzdmYWQtNmE3MDMwZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.881382Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jr0xe90j8n742qsmskpk8pyf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2VhZGY2YzgtYmVlZGU1ZTAtMWQ5NDkyNmYtMzFkYzkxMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.886538Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jr0xe90kfdfdj9j9mbdx6qb7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBkMjE4ODctZmZlY2EzN2ItZjM1Nzc1My03YzBkN2U3Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.890053Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr0xe90m4qgjrsbafywp230y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTA3ZDg3YjAtZDM4M2Y3OWEtZTU5OWY3MjctODczZTRmMDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.895256Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr0xe90m3h4s1kx5kj244afz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjc1YWMxMGMtNGUyMDBiYi0zNTNkOGYwOS1jOTRkNjVjOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.896830Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jr0xe90m7dt02t5dem222bnw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWZlMzQ1NjItZWQ1ZmM2Y2YtOTUzZWJjYzMtOWU0YmEzZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.898421Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr0xe90mfkss4m5qtd91n8f1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg4N2I2OWItZDNkNmJhY2QtYzEyNTFhZmEtODA1MDZjMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.905465Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jr0xe90j8n742qsmskpk8pyf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2VhZGY2YzgtYmVlZGU1ZTAtMWQ5NDkyNmYtMzFkYzkxMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.906223Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jr0xe90kbykx6c0101ytb4ha, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBiZDQxNTUtOTI3ZTQ0YTctOWQ4YzdmYWQtNmE3MDMwZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.907806Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jr0xe90n3eg10j2ym2d9fawy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODBkMTc4YjUtMTk1Y2IwY2ItZGI1YTIxOTItMmJkZWMyZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.908585Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jr0xe90n8jnzmd5n9qywx2jt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTg3NDNiMjEtMjA5MjI0NjQtOWU4NmMxYjUtNzFkODU2YWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.944202Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710684. Ctx: { TraceId: 01jr0xe90kfdfdj9j9mbdx6qb7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBkMjE4ODctZmZlY2EzN2ItZjM1Nzc1My03YzBkN2U3Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.949061Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710683. Ctx: { TraceId: 01jr0xe90p394wd5dq63rfdyzh, Database: , DatabaseId: /Root, SessionId: ydb://s ... ot, SessionId: ydb://session/3?node_id=2&id=YTU1YTQwYjktN2Q4YzBjOGMtOTA2MTkwYmYtNWUwOTY0YmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.221855Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721635. Ctx: { TraceId: 01jr0xjyrs4ewnt89mtjebyygv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTVlNTQ2YTktMjc4Zjg4NjAtZjRmNjZlZTctNzYyYzE1OTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.224881Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721636. Ctx: { TraceId: 01jr0xjyrs5vywpm32fa3s03g6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2Y4M2VhMTUtYzFmMTExYTgtNmI2MGU3M2EtZjE1NjNiMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.230567Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721638. Ctx: { TraceId: 01jr0xjyrs4ewnt89mtjebyygv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTVlNTQ2YTktMjc4Zjg4NjAtZjRmNjZlZTctNzYyYzE1OTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.231003Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721637. Ctx: { TraceId: 01jr0xjyrx9qtdxage0xqm302p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2Q5OTc0NGMtOGE0MTQwMC1jNGMyNmZhLTljOTAwNmJj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.231044Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721640. Ctx: { TraceId: 01jr0xjyrs5vywpm32fa3s03g6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2Y4M2VhMTUtYzFmMTExYTgtNmI2MGU3M2EtZjE1NjNiMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.232455Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721639. Ctx: { TraceId: 01jr0xjys3288e63y18kzmm2h6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxYmFlOGItNDM5Mjg1MDItOGUzYzU0Ni1hNTBlZmVmZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.237706Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721641. Ctx: { TraceId: 01jr0xjys9ccr9z30hteahftq4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTVlNmQ2YmMtMWQyZTlmODItYzkxYjZhZTktNjE4M2UyNWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.239279Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721642. Ctx: { TraceId: 01jr0xjyrx9qtdxage0xqm302p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2Q5OTc0NGMtOGE0MTQwMC1jNGMyNmZhLTljOTAwNmJj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.239697Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721643. Ctx: { TraceId: 01jr0xjyrs5vywpm32fa3s03g6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2Y4M2VhMTUtYzFmMTExYTgtNmI2MGU3M2EtZjE1NjNiMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.239773Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721644. Ctx: { TraceId: 01jr0xjys3288e63y18kzmm2h6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxYmFlOGItNDM5Mjg1MDItOGUzYzU0Ni1hNTBlZmVmZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.247220Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721645. Ctx: { TraceId: 01jr0xjyrx9qtdxage0xqm302p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2Q5OTc0NGMtOGE0MTQwMC1jNGMyNmZhLTljOTAwNmJj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.248832Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721646. Ctx: { TraceId: 01jr0xjyrs5vywpm32fa3s03g6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2Y4M2VhMTUtYzFmMTExYTgtNmI2MGU3M2EtZjE1NjNiMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.252253Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721647. Ctx: { TraceId: 01jr0xjys9ccr9z30hteahftq4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTVlNmQ2YmMtMWQyZTlmODItYzkxYjZhZTktNjE4M2UyNWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.259210Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721648. Ctx: { TraceId: 01jr0xjyrx9qtdxage0xqm302p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2Q5OTc0NGMtOGE0MTQwMC1jNGMyNmZhLTljOTAwNmJj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.271697Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721649. Ctx: { TraceId: 01jr0xjyssfv0k3s2dr8m38e9v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTU1YTQwYjktN2Q4YzBjOGMtOTA2MTkwYmYtNWUwOTY0YmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.277783Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721650. Ctx: { TraceId: 01jr0xjyssfv0k3s2dr8m38e9v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTU1YTQwYjktN2Q4YzBjOGMtOTA2MTkwYmYtNWUwOTY0YmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.280425Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721651. Ctx: { TraceId: 01jr0xjytm1x6z7d5bhj8errsr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTVlNmQ2YmMtMWQyZTlmODItYzkxYjZhZTktNjE4M2UyNWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.283381Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721652. Ctx: { TraceId: 01jr0xjyssfv0k3s2dr8m38e9v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTU1YTQwYjktN2Q4YzBjOGMtOTA2MTkwYmYtNWUwOTY0YmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.287161Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721654. Ctx: { TraceId: 01jr0xjytm1x6z7d5bhj8errsr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTVlNmQ2YmMtMWQyZTlmODItYzkxYjZhZTktNjE4M2UyNWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.288185Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721653. Ctx: { TraceId: 01jr0xjytkbbya9359kygd0ggr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxYmFlOGItNDM5Mjg1MDItOGUzYzU0Ni1hNTBlZmVmZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.291109Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721655. Ctx: { TraceId: 01jr0xjytt5erd9vz4nre58std, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2U3NzY0ZjktOTEwMzQ0ZTYtZGEzMjQzOWUtNjNkMzBkMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.295793Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721656. Ctx: { TraceId: 01jr0xjyssfv0k3s2dr8m38e9v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTU1YTQwYjktN2Q4YzBjOGMtOTA2MTkwYmYtNWUwOTY0YmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.297776Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721657. Ctx: { TraceId: 01jr0xjytkbbya9359kygd0ggr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxYmFlOGItNDM5Mjg1MDItOGUzYzU0Ni1hNTBlZmVmZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.299891Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721658. Ctx: { TraceId: 01jr0xjytt5erd9vz4nre58std, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2U3NzY0ZjktOTEwMzQ0ZTYtZGEzMjQzOWUtNjNkMzBkMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.300927Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721659. Ctx: { TraceId: 01jr0xjytkbbya9359kygd0ggr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxYmFlOGItNDM5Mjg1MDItOGUzYzU0Ni1hNTBlZmVmZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.305843Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721660. Ctx: { TraceId: 01jr0xjyvcchh7bsdpktw1dyyg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTVlNTQ2YTktMjc4Zjg4NjAtZjRmNjZlZTctNzYyYzE1OTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.307746Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721661. Ctx: { TraceId: 01jr0xjytkbbya9359kygd0ggr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzgxYmFlOGItNDM5Mjg1MDItOGUzYzU0Ni1hNTBlZmVmZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.315719Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721662. Ctx: { TraceId: 01jr0xjyvcchh7bsdpktw1dyyg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTVlNTQ2YTktMjc4Zjg4NjAtZjRmNjZlZTctNzYyYzE1OTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.318172Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721663. Ctx: { TraceId: 01jr0xjyvqcgme2czxyqr64dfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2Q5OTc0NGMtOGE0MTQwMC1jNGMyNmZhLTljOTAwNmJj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-04T17:29:12.326567Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721665. Ctx: { TraceId: 01jr0xjyvqcgme2czxyqr64dfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2Q5OTc0NGMtOGE0MTQwMC1jNGMyNmZhLTljOTAwNmJj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.326740Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721664. Ctx: { TraceId: 01jr0xjyvx3vssd5ra9v72b91a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTVlNmQ2YmMtMWQyZTlmODItYzkxYjZhZTktNjE4M2UyNWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-04T17:29:12.331090Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721666. Ctx: { TraceId: 01jr0xjyvcchh7bsdpktw1dyyg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTVlNTQ2YTktMjc4Zjg4NjAtZjRmNjZlZTctNzYyYzE1OTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.333209Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721667. Ctx: { TraceId: 01jr0xjyw66ytd53rhv01r1hep, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTU1YTQwYjktN2Q4YzBjOGMtOTA2MTkwYmYtNWUwOTY0YmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:12.334776Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721668. Ctx: { TraceId: 01jr0xjyvx3vssd5ra9v72b91a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTVlNmQ2YmMtMWQyZTlmODItYzkxYjZhZTktNjE4M2UyNWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-04-04T17:29:12.338534Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721669. Ctx: { TraceId: 01jr0xjyw66ytd53rhv01r1hep, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTU1YTQwYjktN2Q4YzBjOGMtOTA2MTkwYmYtNWUwOTY0YmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues [GOOD] >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/library/query_actor/ut/unittest >> QueryActorTest::StreamQuery [GOOD] Test command err: 2025-04-04T17:28:57.662574Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511305654483692:2067];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:57.662724Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000717/r3tmp/tmp0ji2Rl/pdisk_1.dat 2025-04-04T17:28:58.019465Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:58.056552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:58.057779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:58.060397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13576 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:28:58.329372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:58.373981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:58.507494Z node 1 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-04-04T17:29:00.356370Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:29:00.359459Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:29:00.361525Z node 1 :KQP_PROXY DEBUG: Request has 18445000285969.190135s seconds to be completed 2025-04-04T17:29:00.384189Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=YjhjOTlmZDYtODZjOWYwZGQtNmM0ZjUyZi03NmY5YjBhYg==, workerId: [1:7489511318539386276:2314], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-04T17:29:00.384263Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:29:00.384446Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T17:29:00.384533Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-04T17:29:00.384573Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-04-04T17:29:00.384595Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:29:00.384660Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:29:00.384747Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:29:00.384900Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:29:00.384960Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:29:00.384994Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:29:00.385012Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:29:00.386729Z node 1 :KQP_PROXY DEBUG: [TQueryBase] RunDataQuery: SELECT 42 2025-04-04T17:29:00.392875Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=YjhjOTlmZDYtODZjOWYwZGQtNmM0ZjUyZi03NmY5YjBhYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7489511318539386276:2314] 2025-04-04T17:29:00.392928Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7489511318539386278:2356] 2025-04-04T17:29:00.395499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511318539386279:2316], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:00.395504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511318539386291:2319], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:00.395600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:00.402308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T17:29:00.411866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489511318539386293:2320], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T17:29:00.502004Z node 1 :TX_PROXY ERROR: Actor# [1:7489511318539386345:2392] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:29:01.163784Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7489511318539386277:2315], selfId: [1:7489511305654483928:2278], source: [1:7489511318539386276:2314] 2025-04-04T17:29:01.164024Z node 1 :KQP_PROXY DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=YjhjOTlmZDYtODZjOWYwZGQtNmM0ZjUyZi03NmY5YjBhYg==, TxId: 2025-04-04T17:29:01.165570Z node 1 :KQP_PROXY DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=YjhjOTlmZDYtODZjOWYwZGQtNmM0ZjUyZi03NmY5YjBhYg==, TxId: 2025-04-04T17:29:01.166040Z node 1 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=1&id=YjhjOTlmZDYtODZjOWYwZGQtNmM0ZjUyZi03NmY5YjBhYg==, workerId: [1:7489511318539386276:2314], local sessions count: 0 2025-04-04T17:29:01.616266Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489511320055907292:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:01.616308Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000717/r3tmp/tmpkrSd3v/pdisk_1.dat 2025-04-04T17:29:01.703403Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:01.755965Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:01.756062Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:01.757284Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5391 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:29:01.869481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:29:01.875367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:29:01.903888Z node 2 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-04-04T17:29:04.439233Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:29:04.440036Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:29:04.442902Z node 2 :KQP_PROXY DEBUG: Request has 18445000285965.108747s seconds to be completed 2025-04-04T17:29:04.445020Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=MTYyMjkxOTItMjliMzA1NS1mNTZkOTc2NC04N2Q3N2Fi, workerId: [2:7489511332940809857:2312], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-04T17:29:04.445073Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:29:04.445171Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T17:29:04.445242Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:29:04.445274Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:29:04.445339Z node 2 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-04T17:29:04.445360Z node 2 :KQP_PROXY DEBUG: Updated table service config. 2025-04-04T17:29:04.445375Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:29:04.445429Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboa ... etadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000717/r3tmp/tmpyzjj7Q/pdisk_1.dat 2025-04-04T17:29:09.649901Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:09.675215Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:09.675324Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:09.677197Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62436 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T17:29:09.931117Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:29:09.940795Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:29:09.974021Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-04-04T17:29:12.554203Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:29:12.555078Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:29:12.557650Z node 4 :KQP_PROXY DEBUG: Request has 18445000285956.995710s seconds to be completed 2025-04-04T17:29:12.559934Z node 4 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=4&id=NTQzYzNjYWQtYzVhMDIxYS1iMWYxMjBjMy0xNTMyODI3NQ==, workerId: [4:7489511366418229863:2314], database: /dc-1, longSession: 1, local sessions count: 1 2025-04-04T17:29:12.559978Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:29:12.560130Z node 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T17:29:12.560182Z node 4 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-04-04T17:29:12.560215Z node 4 :KQP_PROXY DEBUG: Updated table service config. 2025-04-04T17:29:12.560258Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-04-04T17:29:12.560313Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-04-04T17:29:12.560413Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:29:12.560460Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:29:12.561409Z node 4 :KQP_PROXY DEBUG: [TQueryBase] RunStreamQuery: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2025-04-04T17:29:12.562432Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:29:12.562468Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:29:12.562488Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:29:12.566910Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Start read next stream part 2025-04-04T17:29:12.570079Z node 4 :KQP_PROXY DEBUG: TraceId: "01jr0xjz3pbpkv0s284jxv4a94", Created new session, sessionId: ydb://session/3?node_id=4&id=Mjk1NjQyZDAtOTdkN2MxYTYtZGY3MzUyMDItMmZmMWU1ZTY=, workerId: [4:7489511366418229866:2315], database: /dc-1, longSession: 0, local sessions count: 2 2025-04-04T17:29:12.570353Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jr0xjz3pbpkv0s284jxv4a94, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=Mjk1NjQyZDAtOTdkN2MxYTYtZGY3MzUyMDItMmZmMWU1ZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [4:7489511366418229866:2315] 2025-04-04T17:29:12.570426Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [4:7489511366418229867:2356] 2025-04-04T17:29:12.578854Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511366418229868:2316], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:12.578931Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489511366418229876:2320], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:12.579008Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:12.586309Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:29:12.597875Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489511366418229884:2321], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:29:12.698521Z node 4 :TX_PROXY ERROR: Actor# [4:7489511366418229935:2392] txid# 281474976715660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:29:13.583226Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:29:14.572791Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489511353533327300:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:14.572968Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:29:16.377167Z node 4 :KQP_PROXY DEBUG: [TQueryBase] TEvStreamQueryResultPart SUCCESS, Issues: 2025-04-04T17:29:16.379574Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Cancel stream request 2025-04-04T17:29:16.379648Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=NTQzYzNjYWQtYzVhMDIxYS1iMWYxMjBjMy0xNTMyODI3NQ==, TxId: 2025-04-04T17:29:16.380812Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-04-04T17:29:16.491722Z node 4 :RPC_REQUEST WARN: Client lost 2025-04-04T17:29:16.521099Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=NTQzYzNjYWQtYzVhMDIxYS1iMWYxMjBjMy0xNTMyODI3NQ==, workerId: [4:7489511366418229863:2314], local sessions count: 1 2025-04-04T17:29:16.521156Z node 4 :KQP_PROXY DEBUG: Request has 18445000285953.030472s seconds to be completed 2025-04-04T17:29:16.522767Z node 4 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=4&id=NWVjOWEwNDctMTc2ZDAxNDUtZmI3ODM5NWEtYzYwYmY4YTc=, workerId: [4:7489511383598099174:2336], database: /dc-1, longSession: 1, local sessions count: 2 2025-04-04T17:29:16.522905Z node 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-04-04T17:29:16.522956Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-04-04T17:29:16.523173Z node 4 :KQP_PROXY DEBUG: [TQueryBase] RunStreamQuery: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2025-04-04T17:29:16.523305Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Start read next stream part 2025-04-04T17:29:16.644582Z node 4 :KQP_PROXY DEBUG: TraceId: "01jr0xk2zbetvmmzzay1v204s6", Created new session, sessionId: ydb://session/3?node_id=4&id=ZjZhNjA0ZjMtZGFkZGZlNGItMzE5ZjY2ZjctZmY4NmE2YjI=, workerId: [4:7489511383598099177:2337], database: /dc-1, longSession: 0, local sessions count: 3 2025-04-04T17:29:16.644819Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jr0xk2zbetvmmzzay1v204s6, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=ZjZhNjA0ZjMtZGFkZGZlNGItMzE5ZjY2ZjctZmY4NmE2YjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [4:7489511383598099177:2337] 2025-04-04T17:29:16.644862Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 600.000000s actor id: [4:7489511383598099178:2421] 2025-04-04T17:29:16.727047Z node 4 :KQP_PROXY DEBUG: [TQueryBase] TEvStreamQueryResultPart SUCCESS, Issues: 2025-04-04T17:29:16.728353Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743787756716, txId: 281474976715663] shutting down 2025-04-04T17:29:16.730213Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Start read next stream part 2025-04-04T17:29:16.731002Z node 4 :KQP_PROXY DEBUG: TraceId: "01jr0xk2zbetvmmzzay1v204s6", Forwarded response to sender actor, requestId: 5, sender: [4:7489511383598099175:2419], selfId: [4:7489511353533327514:2275], source: [4:7489511383598099177:2337] 2025-04-04T17:29:16.731319Z node 4 :KQP_PROXY DEBUG: [TQueryBase] TEvStreamQueryResultPart SUCCESS, Issues: 2025-04-04T17:29:16.731401Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=NWVjOWEwNDctMTc2ZDAxNDUtZmI3ODM5NWEtYzYwYmY4YTc=, TxId: 2025-04-04T17:29:16.731818Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=ZjZhNjA0ZjMtZGFkZGZlNGItMzE5ZjY2ZjctZmY4NmE2YjI=, workerId: [4:7489511383598099177:2337], local sessions count: 2 2025-04-04T17:29:16.732056Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=NWVjOWEwNDctMTc2ZDAxNDUtZmI3ODM5NWEtYzYwYmY4YTc=, workerId: [4:7489511383598099174:2336], local sessions count: 1 |97.6%| [TS] {RESULT} ydb/library/query_actor/ut/unittest >> ServerRestartTest::RestartOnGetSession [GOOD] >> TSchemeShardTest::ManyDirs [GOOD] >> TSchemeShardTest::NestedDirs >> LongTxService::BasicTransactions [GOOD] >> LongTxService::AcquireSnapshot >> TCreateAndDropViewTest::CheckCreatedView [GOOD] >> TCreateAndDropViewTest::CreateViewDisabledFeatureFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 19249, MsgBus: 24848 2025-04-04T17:26:35.156166Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510695273649080:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:26:35.156256Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000d2c/r3tmp/tmpT3WlLt/pdisk_1.dat 2025-04-04T17:26:35.381211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:35.381298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:35.382438Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19249, node 1 2025-04-04T17:26:35.384286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:35.386742Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:26:35.386819Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:26:35.415590Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:35.415620Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:35.415626Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:35.415784Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24848 TClient is connected to server localhost:24848 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:26:35.888986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:35.906322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:36.040654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:36.167278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:36.236006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:37.274261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510703863585449:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:37.274411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:37.418314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:26:37.440421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:26:37.502997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:26:37.526694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:26:37.548538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:26:37.572815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:26:37.606813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510703863585959:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:37.606937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510703863585964:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:37.606949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:37.631382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:26:37.638717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510703863585966:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:26:37.705377Z node 1 :TX_PROXY ERROR: Actor# [1:7489510703863586019:3441] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:26:38.309858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:26:38.920981Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jr0xe92383h6sbzt7y59fv1t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODExZmY0ZGUtY2Y1MGI3M2MtYzFiMTEwYWMtMzNlZDU1Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.920993Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr0xe924949np2k3gj9d9fn6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTgzNWM0YzAtY2ExOGVkNGYtNDYxOGFhNzQtYjRiNWUzZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.926739Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jr0xe9255vt33fz1thxwhzf8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDIxZWM3NmYtY2QxNDliZDItYzExNzU1NTMtNTNlZDIyOWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.930496Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr0xe92383h6sbzt7y59fv1t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODExZmY0ZGUtY2Y1MGI3M2MtYzFiMTEwYWMtMzNlZDU1Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.931234Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr0xe92712e5k8z2w8jhj31x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTQyNzdkOTYtMTM1MjlkYjUtYWU5NWJlNjUtNzQ2NzFkYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.931956Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jr0xe928bbezbbp2f25cvt3g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTU1OTNlZWMtYTNlNTJkZTUtMzcwYTVlMWYtOGMxZDNhZmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.936550Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr0xe924949np2k3gj9d9fn6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTgzNWM0YzAtY2ExOGVkNGYtNDYxOGFhNzQtYjRiNWUzZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.940128Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jr0xe927fbth8a56r6qaf26j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTE1N2Y2YTEtNjMxOWYwOWItZTUxNTIzZjAtMzYwZjUzZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.941200Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jr0xe9285mydpxcc6sz0fqq0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWJiYzQwMzAtNmI1YTU0NTUtZDEzZmZjMy03MDg1YTQ3Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.941680Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jr0xe927d2bxw3pr0bg5vn4r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTM4MDZlZTMtNDA4OGM3NzYtNjRkYWU5OTEtMzM0MTQyYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.941799Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jr0xe927b6jb154evdfmjzwk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTM5NDM2NTUtNjIxZGMxNGMtODRiY2ZjMGYtMjEwZTE2NmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.943397Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710683. Ctx: { TraceId: 01jr0xe928c2rhxah8yh4yxtqr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjAwYjEwMzgtMTkyNzYwZmMtZWJhYWRkNWEtMjVhNWM4NWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /R ... sion/3?node_id=2&id=ZmMwNGUwZDEtMmJlOGNmNTgtNTE5NDVhZDEtMjQ3YWMzYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.566527Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731634. Ctx: { TraceId: 01jr0xk02rah0eyn9emhhq1z7c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzVhZmZkNzktZWRhMDg5MmYtNzgwNTNiNmYtNzg3OGYzMGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.570293Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731635. Ctx: { TraceId: 01jr0xk01rekj2927r9rerh7cp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmMwNGUwZDEtMmJlOGNmNTgtNTE5NDVhZDEtMjQ3YWMzYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.572376Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731636. Ctx: { TraceId: 01jr0xk01w9kctysf0x3frnph6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzY3MGU3MGQtMjZiYzViOGEtMjc1MTJkZGMtNjQwMGI1M2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.576431Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731637. Ctx: { TraceId: 01jr0xk02rah0eyn9emhhq1z7c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzVhZmZkNzktZWRhMDg5MmYtNzgwNTNiNmYtNzg3OGYzMGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.578452Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731638. Ctx: { TraceId: 01jr0xk01w9kctysf0x3frnph6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzY3MGU3MGQtMjZiYzViOGEtMjc1MTJkZGMtNjQwMGI1M2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.582363Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731639. Ctx: { TraceId: 01jr0xk03462hntck9n31pbyea, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWNmMDZjZjEtMjQ4MDQxYzYtODM5ZmJjZjYtYWU0MjAyMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.583216Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731641. Ctx: { TraceId: 01jr0xk01w9kctysf0x3frnph6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzY3MGU3MGQtMjZiYzViOGEtMjc1MTJkZGMtNjQwMGI1M2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.584979Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731640. Ctx: { TraceId: 01jr0xk02z2g5sqhrfsnsg18h2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjMwOWIyNWItMmIwNGQ5Ny0xN2YyZTZlMi0yNDI5YmQyYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.592739Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731642. Ctx: { TraceId: 01jr0xk03462hntck9n31pbyea, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWNmMDZjZjEtMjQ4MDQxYzYtODM5ZmJjZjYtYWU0MjAyMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.600755Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731643. Ctx: { TraceId: 01jr0xk03462hntck9n31pbyea, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWNmMDZjZjEtMjQ4MDQxYzYtODM5ZmJjZjYtYWU0MjAyMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.602573Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731644. Ctx: { TraceId: 01jr0xk03k4ayw0r2wydsejx2s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzZkNDdjNGItNjZjMjljZDktOTE3NzcyNi1iYTE0NGNjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.609098Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731645. Ctx: { TraceId: 01jr0xk03k4ayw0r2wydsejx2s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzZkNDdjNGItNjZjMjljZDktOTE3NzcyNi1iYTE0NGNjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.612739Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731646. Ctx: { TraceId: 01jr0xk03r5kh94dnyepmekv0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmMwNGUwZDEtMmJlOGNmNTgtNTE5NDVhZDEtMjQ3YWMzYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.623972Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731647. Ctx: { TraceId: 01jr0xk03r5kh94dnyepmekv0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmMwNGUwZDEtMmJlOGNmNTgtNTE5NDVhZDEtMjQ3YWMzYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.627655Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731648. Ctx: { TraceId: 01jr0xk02z2g5sqhrfsnsg18h2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjMwOWIyNWItMmIwNGQ5Ny0xN2YyZTZlMi0yNDI5YmQyYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.628571Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731649. Ctx: { TraceId: 01jr0xk03r5kh94dnyepmekv0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmMwNGUwZDEtMmJlOGNmNTgtNTE5NDVhZDEtMjQ3YWMzYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.629102Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731650. Ctx: { TraceId: 01jr0xk04gc67efcbdn3zb6x5b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzVhZmZkNzktZWRhMDg5MmYtNzgwNTNiNmYtNzg3OGYzMGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.630731Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731651. Ctx: { TraceId: 01jr0xk04me2ztxr7rv64yg5wa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzY3MGU3MGQtMjZiYzViOGEtMjc1MTJkZGMtNjQwMGI1M2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.631871Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731652. Ctx: { TraceId: 01jr0xk04r23smaq7dacsp0afp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWNmMDZjZjEtMjQ4MDQxYzYtODM5ZmJjZjYtYWU0MjAyMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.637245Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731653. Ctx: { TraceId: 01jr0xk04gc67efcbdn3zb6x5b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzVhZmZkNzktZWRhMDg5MmYtNzgwNTNiNmYtNzg3OGYzMGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.639593Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731654. Ctx: { TraceId: 01jr0xk04me2ztxr7rv64yg5wa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzY3MGU3MGQtMjZiYzViOGEtMjc1MTJkZGMtNjQwMGI1M2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.639908Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731655. Ctx: { TraceId: 01jr0xk02z2g5sqhrfsnsg18h2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjMwOWIyNWItMmIwNGQ5Ny0xN2YyZTZlMi0yNDI5YmQyYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.643763Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731656. Ctx: { TraceId: 01jr0xk04me2ztxr7rv64yg5wa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzY3MGU3MGQtMjZiYzViOGEtMjc1MTJkZGMtNjQwMGI1M2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.644395Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731657. Ctx: { TraceId: 01jr0xk04r23smaq7dacsp0afp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWNmMDZjZjEtMjQ4MDQxYzYtODM5ZmJjZjYtYWU0MjAyMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.646822Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731658. Ctx: { TraceId: 01jr0xk02z2g5sqhrfsnsg18h2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjMwOWIyNWItMmIwNGQ5Ny0xN2YyZTZlMi0yNDI5YmQyYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.648829Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731659. Ctx: { TraceId: 01jr0xk04gc67efcbdn3zb6x5b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzVhZmZkNzktZWRhMDg5MmYtNzgwNTNiNmYtNzg3OGYzMGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS 2025-04-04T17:29:13.653811Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731660. Ctx: { TraceId: 01jr0xk02z2g5sqhrfsnsg18h2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjMwOWIyNWItMmIwNGQ5Ny0xN2YyZTZlMi0yNDI5YmQyYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.654487Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731661. Ctx: { TraceId: 01jr0xk05e3dn4t2qxmpgj6afs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzZkNDdjNGItNjZjMjljZDktOTE3NzcyNi1iYTE0NGNjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.654616Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731662. Ctx: { TraceId: 01jr0xk04gc67efcbdn3zb6x5b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzVhZmZkNzktZWRhMDg5MmYtNzgwNTNiNmYtNzg3OGYzMGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.658227Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731663. Ctx: { TraceId: 01jr0xk04gc67efcbdn3zb6x5b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzVhZmZkNzktZWRhMDg5MmYtNzgwNTNiNmYtNzg3OGYzMGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.662771Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731664. Ctx: { TraceId: 01jr0xk05e3dn4t2qxmpgj6afs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzZkNDdjNGItNjZjMjljZDktOTE3NzcyNi1iYTE0NGNjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.664233Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731665. Ctx: { TraceId: 01jr0xk04gc67efcbdn3zb6x5b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzVhZmZkNzktZWRhMDg5MmYtNzgwNTNiNmYtNzg3OGYzMGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS 2025-04-04T17:29:13.668552Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731666. Ctx: { TraceId: 01jr0xk05e3dn4t2qxmpgj6afs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzZkNDdjNGItNjZjMjljZDktOTE3NzcyNi1iYTE0NGNjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.671661Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731667. Ctx: { TraceId: 01jr0xk05e3dn4t2qxmpgj6afs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzZkNDdjNGItNjZjMjljZDktOTE3NzcyNi1iYTE0NGNjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:13.675350Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731668. Ctx: { TraceId: 01jr0xk05e3dn4t2qxmpgj6afs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzZkNDdjNGItNjZjMjljZDktOTE3NzcyNi1iYTE0NGNjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues [GOOD] Test command err: 2025-04-04T17:28:44.093053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:44.093762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:44.093985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:28:44.095891Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:44.096140Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:44.096389Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00165a/r3tmp/tmpdD9O8R/pdisk_1.dat 2025-04-04T17:28:44.483421Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1263, node 1 TClient is connected to server localhost:27845 2025-04-04T17:28:44.866624Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:44.866708Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:44.866747Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:44.866990Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:51.740843Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:697:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:51.741388Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:51.741766Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:28:51.742726Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:694:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:51.742821Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:51.743107Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00165a/r3tmp/tmp909Ceh/pdisk_1.dat 2025-04-04T17:28:52.060569Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22631, node 3 TClient is connected to server localhost:13004 2025-04-04T17:28:52.453845Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:52.453919Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:52.453961Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:52.454501Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: EMERGENCY issue_log { id: "RED-9a33-be81" status: RED message: "Database has storage issues" location { database { name: "/Root" } } reason: "RED-9a33-d6d1" type: "DATABASE" level: 1 } issue_log { id: "RED-9a33-d6d1" status: RED message: "Storage failed" location { database { name: "/Root" } } reason: "RED-9a33-258e-ab18" type: "STORAGE" level: 2 } issue_log { id: "RED-9a33-258e-ab18" status: RED message: "Pool failed" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "RED-9a33-819b-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-9a33-99d2-3-2147483648-3-55-0-55" status: RED message: "VDisks have space issue" location { storage { node { id: 3 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483648-3-55-0-55" id: "2147483648-3-56-0-56" id: "2147483648-3-57-0-57" } } } } database { name: "/Root" } } reason: "RED-8ac8-3-3-42" reason: "RED-8ac8-3-3-43" reason: "RED-8ac8-3-3-44" type: "VDISK" level: 5 listed: 3 count: 3 } issue_log { id: "RED-8ac8-3-3-42" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-42" path: "/home/runner/.ya/build/build_root/i5tq/00165a/r3tmp/tmp909Ceh/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-8ac8-3-3-43" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-43" path: "/home/runner/.ya/build/build_root/i5tq/00165a/r3tmp/tmp909Ceh/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-8ac8-3-3-44" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-44" path: "/home/runner/.ya/build/build_root/i5tq/00165a/r3tmp/tmp909Ceh/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-9a33-819b-2147483648" status: RED message: "Group failed" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "RED-9a33-99d2-3-2147483648-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 3 host: "::1" port: 12001 } 2025-04-04T17:28:59.838175Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:697:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:59.838729Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:59.839177Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:28:59.840212Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:694:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:59.840572Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:59.840864Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00165a/r3tmp/tmpuQfNet/pdisk_1.dat 2025-04-04T17:29:00.163515Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17489, node 5 TClient is connected to server localhost:61898 2025-04-04T17:29:00.563725Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:00.563787Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:00.563818Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:00.564322Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:29:08.400095Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:478:2409], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:08.400683Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:08.400826Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:08.402862Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:700:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:08.403160Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:08.403322Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00165a/r3tmp/tmpEcZdjk/pdisk_1.dat 2025-04-04T17:29:08.709201Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64296, node 7 TClient is connected to server localhost:11111 2025-04-04T17:29:09.146125Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:09.146188Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:09.146223Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:09.146784Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:29:17.059877Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:17.060430Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:17.060578Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:17.061181Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:17.061639Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:17.061721Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00165a/r3tmp/tmpb4c61Z/pdisk_1.dat 2025-04-04T17:29:17.356020Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9138, node 9 TClient is connected to server localhost:27447 2025-04-04T17:29:17.758299Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:17.758349Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:17.758389Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:17.758640Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> SequenceProxy::Basics [GOOD] >> SequenceProxy::DropRecreate >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_build_index/unittest >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction [GOOD] Test command err: 2025-04-04T17:29:00.396966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:00.397627Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:00.397696Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000932/r3tmp/tmpNXw5rW/pdisk_1.dat 2025-04-04T17:29:00.864495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:00.931555Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:00.981809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:00.982844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:00.995835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:01.103070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:01.159000Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:29:01.159302Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:29:01.206037Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:29:01.206175Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:29:01.208925Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:29:01.209020Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:29:01.209079Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:29:01.211249Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:29:01.211530Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:29:01.211648Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:29:01.222627Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:29:01.258398Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:29:01.259507Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:29:01.259635Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:29:01.259724Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:29:01.259754Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:29:01.259789Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:01.260898Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:29:01.261056Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:29:01.261195Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:29:01.261275Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:01.261393Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:29:01.261469Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:29:01.261568Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:29:01.262979Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:29:01.263933Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:29:01.264060Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:29:01.266085Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:29:01.276896Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:29:01.277082Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:29:01.419223Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:29:01.430153Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:29:01.430287Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:01.431191Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:29:01.431245Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:29:01.431310Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:29:01.431604Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:29:01.431787Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:29:01.432430Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:29:01.432540Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:29:01.435769Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:29:01.436303Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:01.438829Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:29:01.438899Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:01.441083Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:29:01.441143Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:29:01.442325Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:29:01.442735Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:29:01.442773Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:29:01.442821Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:29:01.442887Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:29:01.442961Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:29:01.443082Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:01.447761Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:29:01.447976Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:29:01.448040Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:29:01.478811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:735:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:01.478996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:01.479098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:01.490167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:29:01.497083Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:29:01.651214Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:29:01.653725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:749:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:29:01.720331Z node 1 :TX_PROXY ERROR: Actor# [1:823:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:29:02.528160Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0xjm9291je88kzwg7mw244, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWNhMWZjNzctZmU0MmJiNGYtYzNmOWNhMGYtODc5Nzk0ZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:02.537913Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:850:2687], serverId# [1:851:2688], sessionId# [0:0:0] 2025-04-04T17:29:02.539746Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:29:02.559792Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17 ... 557072Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-04T17:29:18.557096Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:29:18.557127Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [63000:281474976715666] at 72075186224037890 for WaitForStreamClearance 2025-04-04T17:29:18.557448Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:29:18.557523Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:26} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{15, redo 134b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-04-04T17:29:18.557560Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:26} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:18.557636Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxProgressTransaction 2025-04-04T17:29:18.557682Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:18.557716Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-04T17:29:18.557739Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:29:18.557764Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [63000:281474976715666] at 72075186224037891 for WaitForStreamClearance 2025-04-04T17:29:18.557791Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:29:18.557844Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{15, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T17:29:18.557876Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:18.558134Z node 2 :TABLET_EXECUTOR INFO: Leader{72075186224037890:1:27} starting Scan{8 on 1001, TReadTableScan} 2025-04-04T17:29:18.559020Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 1 2025-04-04T17:29:18.559241Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} commited cookie 1 for step 24 2025-04-04T17:29:18.559314Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 63000} 2025-04-04T17:29:18.559374Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T17:29:18.559912Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} commited cookie 8 for step 25 2025-04-04T17:29:18.560314Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715666, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T17:29:18.560417Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715666, PendingAcks: 0 2025-04-04T17:29:18.560461Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 0 2025-04-04T17:29:18.561901Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-04T17:29:18.561938Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715666, at: 72075186224037890 2025-04-04T17:29:18.562281Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxProgressTransaction 2025-04-04T17:29:18.562324Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:18.562360Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-04T17:29:18.562428Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:29:18.562457Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [63000:281474976715666] at 72075186224037890 for ReadTableScan 2025-04-04T17:29:18.562675Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:18.562732Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{16, redo 336b alter 0b annex 0, ~{ 1, 3, 4, 12, 7, 8 } -{ }, 0 gb} 2025-04-04T17:29:18.562770Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:18.573651Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} commited cookie 1 for step 26 2025-04-04T17:29:18.573740Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T17:29:18.573794Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T17:29:18.573869Z node 2 :TX_DATASHARD DEBUG: Complete [63000 : 281474976715666] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1531:3309], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:29:18.573945Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T17:29:18.574277Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} commited cookie 1 for step 24 2025-04-04T17:29:18.574340Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 63000} 2025-04-04T17:29:18.574390Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-04T17:29:18.574431Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-04T17:29:18.574647Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxProgressTransaction 2025-04-04T17:29:18.574690Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:18.574717Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-04T17:29:18.574741Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:29:18.574778Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [63000:281474976715666] at 72075186224037891 for WaitForStreamClearance 2025-04-04T17:29:18.575048Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:29:18.575144Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:26} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{15, redo 134b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-04-04T17:29:18.575195Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:26} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:18.575431Z node 2 :TABLET_EXECUTOR INFO: Leader{72075186224037891:1:27} starting Scan{8 on 1001, TReadTableScan} 2025-04-04T17:29:18.575747Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:87} Tx{87, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations 2025-04-04T17:29:18.575808Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:87} Tx{87, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:18.575973Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:87} Tx{87, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} hope 1 -> done Change{81, redo 184b alter 0b annex 0, ~{ 4, 0 } -{ }, 0 gb} 2025-04-04T17:29:18.576049Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:87} Tx{87, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:18.576413Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037891, TxId: 281474976715666, MessageQuota: 1 2025-04-04T17:29:18.576801Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} commited cookie 8 for step 25 2025-04-04T17:29:18.577238Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:88} commited cookie 1 for step 87 2025-04-04T17:29:18.577458Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037891, TxId: 281474976715666, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-04-04T17:29:18.626824Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037891, TxId: 281474976715666, PendingAcks: 0 2025-04-04T17:29:18.626898Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037891, TxId: 281474976715666, MessageQuota: 0 2025-04-04T17:29:18.628485Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037891 2025-04-04T17:29:18.628522Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715666, at: 72075186224037891 2025-04-04T17:29:18.628705Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} Tx{40, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxProgressTransaction 2025-04-04T17:29:18.628744Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} Tx{40, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:18.628773Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-04T17:29:18.628800Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:29:18.628826Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [63000:281474976715666] at 72075186224037891 for ReadTableScan 2025-04-04T17:29:18.629019Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:18.629094Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} Tx{40, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{16, redo 336b alter 0b annex 0, ~{ 1, 3, 4, 12, 7, 8 } -{ }, 0 gb} 2025-04-04T17:29:18.629130Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} Tx{40, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:18.639729Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} commited cookie 1 for step 26 2025-04-04T17:29:18.639793Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-04T17:29:18.639823Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-04-04T17:29:18.639877Z node 2 :TX_DATASHARD DEBUG: Complete [63000 : 281474976715666] from 72075186224037891 at tablet 72075186224037891 send result to client [2:1531:3309], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:29:18.639912Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 |97.6%| [TM] {RESULT} ydb/core/tx/datashard/ut_build_index/unittest >> TopicSessionTests::TwoSessionWithoutPredicate [GOOD] >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate >> BlobDepot::LoadPutAndRead [GOOD] >> BlobDepot::DecommitPutAndRead >> LongTxService::AcquireSnapshot [GOOD] >> LongTxService::LockSubscribe >> TSchemeShardTest::NestedDirs [GOOD] >> DataCleanup::ForceDataCleanupWithoutCompaction [GOOD] >> DataCleanup::MultipleDataCleanups >> TopicAutoscaling::ControlPlane_CDC_Enable [GOOD] >> TopicAutoscaling::ControlPlane_CDC_Disable >> TTxDataShardSampleKScan::RunScan >> TMemoryController::Counters >> QuoterWithKesusTest::HandlesAllRequestsForNonExistentResource [GOOD] >> QuoterWithKesusTest::GetsQuota >> TabletService_ExecuteMiniKQL::MalformedParams [GOOD] >> TabletService_ExecuteMiniKQL::MalformedProgram >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter >> SequenceProxy::DropRecreate [GOOD] >> TestFormatHandler::ClientErrorWithEmptyFilter [GOOD] >> test_dc_local.py::TestAlloc::test_dc_locality[kikimr0] [GOOD] >> LongTxService::LockSubscribe [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceproxy/ut/unittest >> SequenceProxy::DropRecreate [GOOD] Test command err: 2025-04-04T17:29:18.767258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:29:18.767322Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:18.846723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:29:19.498996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944 2025-04-04T17:29:19.698553Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:29:19.699139Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/0008ab/r3tmp/tmpB4LCUw/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:29:19.700062Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/0008ab/r3tmp/tmpB4LCUw/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/0008ab/r3tmp/tmpB4LCUw/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 51653797368890530 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:29:20.575229Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:29:20.575311Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:20.624953Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:29:21.045782Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944 2025-04-04T17:29:21.214424Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:29:21.214842Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/0008ab/r3tmp/tmpkKmYJO/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:29:21.215146Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/0008ab/r3tmp/tmpkKmYJO/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/0008ab/r3tmp/tmpkKmYJO/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17141901457790141606 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:29:21.334021Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976715658:0, at schemeshard: 72057594046578944 2025-04-04T17:29:21.551480Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715659:0, at schemeshard: 72057594046578944 |97.7%| [TS] {RESULT} ydb/core/tx/sequenceproxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::NestedDirs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:26:21.709349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:26:21.709451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:26:21.709493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:26:21.709531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:26:21.710585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:26:21.710631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:26:21.710688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:26:21.710784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:26:21.712005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:26:21.775686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:26:21.775734Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:21.784114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:26:21.784303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:26:21.784478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:26:21.793828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:26:21.795684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:26:21.797913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:21.800049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:26:21.806662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:21.814655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:21.814722Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:21.814900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:26:21.814943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:26:21.815058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:26:21.815161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.820397Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:26:21.928169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:26:21.928351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.928489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:26:21.928641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:26:21.928694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.930827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:21.930949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:26:21.931086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.931126Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:26:21.931155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:26:21.931179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:26:21.932570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.932610Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:26:21.932636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:26:21.933824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.933856Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.933890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:21.933930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:26:21.936876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:26:21.938182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:26:21.938331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:26:21.939081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:26:21.939174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:26:21.939215Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:21.939422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:26:21.939473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:26:21.939595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:26:21.939659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:26:21.942066Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:26:21.942117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:26:21.942255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:26:21.942302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:26:21.942523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:26:21.942561Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:26:21.942630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:26:21.942658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:21.942692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:26:21.942719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:21.942747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:26:21.942774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:26:21.942800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:26:21.942829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:26:21.942877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:26:21.942917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:26:21.942942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:26:21.944279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:26:21.944360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:26:21.944385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... _TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 148, at schemeshard: 72057594046678944 2025-04-04T17:29:20.996168Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 141: got EvNotifyTxCompletionResult 2025-04-04T17:29:20.996184Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 141: satisfy waiter [15:1064:3055] 2025-04-04T17:29:20.996220Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 149, at schemeshard: 72057594046678944 2025-04-04T17:29:20.996326Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 150, at schemeshard: 72057594046678944 2025-04-04T17:29:20.996350Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 142: got EvNotifyTxCompletionResult 2025-04-04T17:29:20.996362Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 142: satisfy waiter [15:1064:3055] 2025-04-04T17:29:20.996417Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 151, at schemeshard: 72057594046678944 2025-04-04T17:29:20.996454Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 143: got EvNotifyTxCompletionResult 2025-04-04T17:29:20.996469Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 143: satisfy waiter [15:1064:3055] 2025-04-04T17:29:20.996563Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 152, at schemeshard: 72057594046678944 2025-04-04T17:29:20.996650Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 153, at schemeshard: 72057594046678944 2025-04-04T17:29:20.996699Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 144: got EvNotifyTxCompletionResult 2025-04-04T17:29:20.996718Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 144: satisfy waiter [15:1064:3055] 2025-04-04T17:29:20.996744Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 145: got EvNotifyTxCompletionResult 2025-04-04T17:29:20.996758Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 145: satisfy waiter [15:1064:3055] 2025-04-04T17:29:20.996902Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 154, at schemeshard: 72057594046678944 2025-04-04T17:29:20.996933Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 146: got EvNotifyTxCompletionResult 2025-04-04T17:29:20.996949Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 146: satisfy waiter [15:1064:3055] 2025-04-04T17:29:20.997033Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2025-04-04T17:29:20.997079Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 147: got EvNotifyTxCompletionResult 2025-04-04T17:29:20.997103Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 147: satisfy waiter [15:1064:3055] 2025-04-04T17:29:20.997189Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 148: got EvNotifyTxCompletionResult 2025-04-04T17:29:20.997210Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 148: satisfy waiter [15:1064:3055] 2025-04-04T17:29:20.997298Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-04-04T17:29:20.997344Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 149: got EvNotifyTxCompletionResult 2025-04-04T17:29:20.997360Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 149: satisfy waiter [15:1064:3055] 2025-04-04T17:29:20.997455Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 150: got EvNotifyTxCompletionResult 2025-04-04T17:29:20.997477Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 150: satisfy waiter [15:1064:3055] 2025-04-04T17:29:20.997546Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-04-04T17:29:20.997617Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-04-04T17:29:20.997651Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 151: got EvNotifyTxCompletionResult 2025-04-04T17:29:20.997672Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 151: satisfy waiter [15:1064:3055] 2025-04-04T17:29:20.997775Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 152: got EvNotifyTxCompletionResult 2025-04-04T17:29:20.997801Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 152: satisfy waiter [15:1064:3055] 2025-04-04T17:29:20.997924Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 153: got EvNotifyTxCompletionResult 2025-04-04T17:29:20.997958Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 153: satisfy waiter [15:1064:3055] 2025-04-04T17:29:20.998152Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 154: got EvNotifyTxCompletionResult 2025-04-04T17:29:20.998186Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 154: satisfy waiter [15:1064:3055] 2025-04-04T17:29:20.998383Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2025-04-04T17:29:20.998414Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [15:1064:3055] 2025-04-04T17:29:20.998606Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-04-04T17:29:20.998640Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [15:1064:3055] 2025-04-04T17:29:20.998796Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-04-04T17:29:20.998815Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [15:1064:3055] 2025-04-04T17:29:20.998882Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-04-04T17:29:20.998899Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [15:1064:3055] TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 TestWaitNotification: OK eventTxId 132 TestWaitNotification: OK eventTxId 133 TestWaitNotification: OK eventTxId 134 TestWaitNotification: OK eventTxId 135 TestWaitNotification: OK eventTxId 136 TestWaitNotification: OK eventTxId 137 TestWaitNotification: OK eventTxId 138 TestWaitNotification: OK eventTxId 139 TestWaitNotification: OK eventTxId 140 TestWaitNotification: OK eventTxId 141 TestWaitNotification: OK eventTxId 142 TestWaitNotification: OK eventTxId 143 TestWaitNotification: OK eventTxId 144 TestWaitNotification: OK eventTxId 145 TestWaitNotification: OK eventTxId 146 TestWaitNotification: OK eventTxId 147 TestWaitNotification: OK eventTxId 148 TestWaitNotification: OK eventTxId 149 TestWaitNotification: OK eventTxId 150 TestWaitNotification: OK eventTxId 151 TestWaitNotification: OK eventTxId 152 TestWaitNotification: OK eventTxId 153 TestWaitNotification: OK eventTxId 154 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestModificationResults wait txId: 155 2025-04-04T17:29:21.003715Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/1/2/3/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20/21/22/23/24/25/26/27/28/29/30/31" OperationType: ESchemeOpMkDir MkDir { Name: "fail" } } TxId: 155 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:29:21.004314Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/1/2/3/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20/21/22/23/24/25/26/27/28/29/30/31/fail, operationId: 155:0, at schemeshard: 72057594046678944 2025-04-04T17:29:21.004566Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 155:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/1/2/3/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20/21/22/23/24/25/26/27/28/29/30/31/fail', error: paths depth limit exceeded, limit: 32, depth: 33, delta: 0, at schemeshard: 72057594046678944 2025-04-04T17:29:21.007864Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 155, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/1/2/3/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20/21/22/23/24/25/26/27/28/29/30/31/fail\', error: paths depth limit exceeded, limit: 32, depth: 33, delta: 0" TxId: 155 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:29:21.008269Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 155, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/1/2/3/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20/21/22/23/24/25/26/27/28/29/30/31/fail', error: paths depth limit exceeded, limit: 32, depth: 33, delta: 0, operation: CREATE DIRECTORY, path: /MyRoot/1/2/3/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20/21/22/23/24/25/26/27/28/29/30/31/fail TestModificationResult got TxId: 155, wait until txId: 155 2025-04-04T17:29:21.009356Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:29:21.009639Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 318us result status StatusSuccess 2025-04-04T17:29:21.010120Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 31 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test[solomon-BrokenJsonResponse-] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] >> TestJsonParser::Simple1 >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues >> TestJsonParser::Simple1 [GOOD] >> TCreateAndDropViewTest::CreateViewDisabledFeatureFlag [GOOD] >> TCreateAndDropViewTest::InvalidQuery >> TestJsonParser::Simple2 >> Metering::BillingRecords [GOOD] >> Metering::MockedNetClassifierOnly >> HttpRouter::Basic [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/long_tx_service/ut/unittest >> LongTxService::LockSubscribe [GOOD] Test command err: 2025-04-04T17:29:19.156540Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:29:19.157076Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/0007aa/r3tmp/tmpsV4yNx/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:29:19.157575Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/0007aa/r3tmp/tmpsV4yNx/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/0007aa/r3tmp/tmpsV4yNx/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9495494008196751482 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:29:19.208280Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvBeginTx from [1:431:2320] 2025-04-04T17:29:19.209283Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Created new LongTxId# ydb://long-tx/000000001h3601fz8yz0f9drfc?node_id=1 2025-04-04T17:29:19.219163Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvAttachColumnShardWrites from [2:432:2098] LongTxId# ydb://long-tx/000000001h3601fz8yz0f9drfc?node_id=1 2025-04-04T17:29:19.219286Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvNodeConnected for NodeId# 1 from session [2:94:2048] 2025-04-04T17:29:19.219423Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvAttachColumnShardWrites from [2:145:2087] LongTxId# ydb://long-tx/000000001h3601fz8yz0f9drfc?node_id=1 2025-04-04T17:29:19.219659Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvCommitTx from [2:432:2098] LongTxId# ydb://long-tx/000000001h3601fz8yz0f9drfc?node_id=1 2025-04-04T17:29:19.219819Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvCommitTx from [2:145:2087] LongTxId# ydb://long-tx/000000001h3601fz8yz0f9drfc?node_id=1 2025-04-04T17:29:19.221465Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Committed LongTxId# ydb://long-tx/000000001h3601fz8yz0f9drfc?node_id=1 without side-effects 2025-04-04T17:29:19.221737Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvRollbackTx from [2:432:2098] LongTxId# ydb://long-tx/000000001h3601fz8yz0f9drfc?node_id=1 2025-04-04T17:29:19.221945Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvRollbackTx from [2:145:2087] LongTxId# ydb://long-tx/000000001h3601fz8yz0f9drfc?node_id=1 2025-04-04T17:29:19.223065Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvRollbackTx from [2:432:2098] LongTxId# ydb://long-tx/000000001h3601fz8yz0f9drfc?node_id=1 2025-04-04T17:29:19.223202Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvRollbackTx from [2:145:2087] LongTxId# ydb://long-tx/000000001h3601fz8yz0f9drfc?node_id=1 2025-04-04T17:29:19.223390Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2025-04-04T17:29:19.223700Z node 1 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 2 2025-04-04T17:29:19.223970Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvNodeDisconnected for NodeId# 1 from session [2:94:2048] 2025-04-04T17:29:19.224165Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:77:2072] ServerId# [1:351:2270] TabletId# 72057594037932033 PipeClientId# [2:77:2072] 2025-04-04T17:29:19.227795Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvCommitTx from [2:432:2098] LongTxId# ydb://long-tx/000000001h3601fz8yz0f9drfc?node_id=3 2025-04-04T17:29:19.228066Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvNodeDisconnected for NodeId# 3 from session [2:470:2100] 2025-04-04T17:29:20.029433Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:29:20.029496Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:20.109281Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-04-04T17:29:20.667803Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:29:20.668195Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/0007aa/r3tmp/tmppzsVdQ/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:29:20.668366Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/0007aa/r3tmp/tmppzsVdQ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/0007aa/r3tmp/tmppzsVdQ/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2137910757861166635 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:29:20.968871Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireReadSnapshot from [3:510:2384] for database /dc-1 2025-04-04T17:29:20.968956Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2025-04-04T17:29:20.979267Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2025-04-04T17:29:20.979546Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:562:2423] Sending navigate request for /dc-1 2025-04-04T17:29:20.983043Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:562:2423] Received navigate response status Ok 2025-04-04T17:29:20.983097Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:562:2423] Sending acquire step to coordinator 72057594046316545 2025-04-04T17:29:20.986061Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:562:2423] Received read step 1000 2025-04-04T17:29:20.986143Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 1 2025-04-04T17:29:20.986755Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvBeginTx from [3:510:2384] 2025-04-04T17:29:20.986798Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2025-04-04T17:29:20.997129Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2025-04-04T17:29:20.997292Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:580:2435] Sending navigate request for /dc-1 2025-04-04T17:29:20.997451Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:580:2435] Received navigate response status Ok 2025-04-04T17:29:20.997486Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:580:2435] Sending acquire step to coordinator 72057594046316545 2025-04-04T17:29:20.997633Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:580:2435] Received read step 1500 2025-04-04T17:29:20.997735Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 2 2025-04-04T17:29:20.997777Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Created new read-only LongTxId# ydb://long-tx/read-only?snapshot=1500%3Amax 2025-04-04T17:29:20.997901Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvBeginTx from [3:510:2384] 2025-04-04T17:29:20.997928Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2025-04-04T17:29:21.008275Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2025-04-04T17:29:21.008454Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:582:2437] Sending navigate request for /dc-1 2025-04-04T17:29:21.008675Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:582:2437] Received navigate response status Ok 2025-04-04T17:29:21.008732Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:582:2437] Sending acquire step to coordinator 72057594046316545 2025-04-04T17:29:21.008932Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:582:2437] Received read step 1500 2025-04-04T17:29:21.008998Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 3 2025-04-04T17:29:21.009066Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Created new read-write LongTxId# ydb://long-tx/00000001eacjbspe9qmwdp1qj6?node_id=3&snapshot=1500%3Amax 2025-04-04T17:29:21.669974Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-04-04T17:29:21.670344Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/i5tq/0007aa/r3tmp/tmpFJhv03/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-04-04T17:29:21.670553Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/i5tq/0007aa/r3tmp/tmpFJhv03/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/i5tq/0007aa/r3tmp/tmpFJhv03/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2409707337237433505 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-04-04T17:29:21.702615Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvRegisterLock for LockId# 123 2025-04-04T17:29:21.702730Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [5:428:2317] for LockId# 987 LockNode# 5 2025-04-04T17:29:21.710016Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvSubscribeLock from [6:429:2098] for LockId# 987 LockNode# 5 2025-04-04T17:29:21.711271Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:95:2048] 2025-04-04T17:29:21.711392Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [6:145:2087] for LockId# 987 LockNode# 5 2025-04-04T17:29:21.712687Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvLockStatus from [5:144:2134] for LockId# 987 LockNode# 5 LockStatus# STATUS_NOT_FOUND 2025-04-04T17:29:21.713408Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [5:428:2317] for LockId# 123 LockNode# 5 2025-04-04T17:29:21.713549Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvSubscribeLock from [6:429:2098] for LockId# 123 LockNode# 5 2025-04-04T17:29:21.713645Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [6:145:2087] for LockId# 123 LockNode# 5 2025-04-04T17:29:21.713817Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvLockStatus from [5:144:2134] for LockId# 123 LockNode# 5 LockStatus# STATUS_SUBSCRIBED 2025-04-04T17:29:21.713918Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvUnregisterLock for LockId# 123 2025-04-04T17:29:21.714027Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvLockStatus from [5:144:2134] for LockId# 123 LockNode# 5 LockStatus# STATUS_NOT_FOUND 2025-04-04T17:29:21.714178Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvSubscribeLock from [6:429:2098] for LockId# 234 LockNode# 5 2025-04-04T17:29:21.714350Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-04T17:29:21.714668Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-04-04T17:29:21.714873Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:95:2048] 2025-04-04T17:29:21.714975Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:79:2073] ServerId# [5:348:2267] TabletId# 72057594037932033 PipeClientId# [6:79:2073] 2025-04-04T17:29:21.898099Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:461:2048] 2025-04-04T17:29:21.898353Z node 5 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 6 2025-04-04T17:29:21.898429Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-04T17:29:21.898459Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-04-04T17:29:21.898822Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:461:2048] 2025-04-04T17:29:21.898977Z node 6 :TX_PROXY WARN: actor# [6:143:2086] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-04-04T17:29:21.899152Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:458:2099] ServerId# [5:464:2336] TabletId# 72057594037932033 PipeClientId# [6:458:2099] 2025-04-04T17:29:22.130627Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:490:2048] 2025-04-04T17:29:22.131115Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:490:2048] 2025-04-04T17:29:22.131254Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-04-04T17:29:22.131332Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-04T17:29:22.131656Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:491:2100] ServerId# [5:495:2358] TabletId# 72057594037932033 PipeClientId# [6:491:2100] 2025-04-04T17:29:22.366157Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:510:2048] 2025-04-04T17:29:22.366381Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-04-04T17:29:22.366713Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-04-04T17:29:22.366765Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:510:2048] 2025-04-04T17:29:22.366913Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:511:2102] ServerId# [5:516:2372] TabletId# 72057594037932033 PipeClientId# [6:511:2102] |97.7%| [TS] {RESULT} ydb/core/tx/long_tx_service/ut/unittest >> TestJsonParser::Simple2 [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/server_restart/gtest >> ServerRestartTest::RestartOnGetSession [GOOD] |97.7%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/server_restart/gtest >> TestJsonParser::Simple3 |97.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/public_http/ut/unittest >> HttpRouter::Basic [GOOD] |97.7%| [TS] {RESULT} ydb/core/public_http/ut/unittest >> TestJsonParser::Simple3 [GOOD] >> TestJsonParser::Simple4 >> TestJsonParser::Simple4 [GOOD] >> TIndexProcesorTests::TestCreateIndexProcessor |97.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} >> TestJsonParser::LargeStrings >> SequenceShardTests::Basics >> TestJsonParser::LargeStrings [GOOD] >> TPQTest::TestMaxTimeLagRewind [GOOD] >> TPQTest::TestManyConsumers >> TestJsonParser::ManyValues |97.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} >> TestJsonParser::ManyValues [GOOD] >> TestJsonParser::MissingFields >> NodeWardenDsProxyConfigRetrieval::Disconnect >> TestJsonParser::MissingFields [GOOD] >> test_result_limits.py::TestResultLimits::test_many_rows >> TestJsonParser::NestedTypes >> SequenceShardTests::Basics [GOOD] >> SequenceShardTests::MarkedPipeRetries >> TestJsonParser::NestedTypes [GOOD] >> TabletService_ExecuteMiniKQL::MalformedProgram [GOOD] >> TabletService_ExecuteMiniKQL::DryRunEraseRow |97.7%| [TA] $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} >> TestJsonParser::SimpleBooleans >> TTxDataShardSampleKScan::RunScan [GOOD] >> TTxDataShardSampleKScan::ScanBadParameters >> SequenceShardTests::MarkedPipeRetries [GOOD] >> SequenceShardTests::FreezeRestoreRedirect >> BlobDepot::DecommitPutAndRead [GOOD] >> BlobDepot::DecommitVerifiedRandom >> NodeWardenDsProxyConfigRetrieval::Disconnect [GOOD] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] >> TestJsonParser::SimpleBooleans [GOOD] >> TDqSolomonWriteActorTest::TestWriteFormat [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring >> TestJsonParser::ManyBatches >> TestJsonParser::ManyBatches [GOOD] >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate [GOOD] >> SequenceShardTests::FreezeRestoreRedirect [GOOD] >> SequenceShardTests::NegativeIncrement >> DataCleanup::MultipleDataCleanups [GOOD] >> TIndexProcesorTests::TestCreateIndexProcessor [GOOD] >> DataCleanup::MultipleDataCleanupsWithOldGenerations >> TestJsonParser::LittleBatches >> TIndexProcesorTests::TestSingleCreateQueueEvent >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected >> TCreateAndDropViewTest::InvalidQuery [GOOD] >> QuoterWithKesusTest::GetsQuota [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK [GOOD] >> TMemoryController::Counters [GOOD] >> SequenceShardTests::NegativeIncrement [GOOD] >> TCreateAndDropViewTest::ParsingSecurityInvoker >> test.py::test[solomon-Downsampling-default.txt] [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter [GOOD] >> TestJsonParser::LittleBatches [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] >> QuoterWithKesusTest::GetsBigQuota >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter >> TestJsonParser::MissingFieldsValidation >> TMemoryController::Counters_HardLimit >> TestJsonParser::MissingFieldsValidation [GOOD] >> TestJsonParser::TypeKindsValidation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut_sequence/unittest >> NodeWardenDsProxyConfigRetrieval::Disconnect [GOOD] Test command err: Caught NodeWarden registration actorId# [1:11:2058] 2025-04-04T17:29:25.457456Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:317} Bootstrap 2025-04-04T17:29:25.485573Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/i5tq/000abf/r3tmp/tmpLtM9Ej/static.dat" PDiskGuid: 3330122045893170349 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 3330122045893170349 } VDiskKind: Default } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 3330122045893170349 } } } } AvailabilityDomains: 0 } 2025-04-04T17:29:25.486492Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:/home/runner/.ya/build/build_root/i5tq/000abf/r3tmp/tmpLtM9Ej/static.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-04-04T17:29:25.491228Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-04T17:29:25.494222Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:1 PDiskGuid# 3330122045893170349 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-04T17:29:25.496127Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:257} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:1 PDiskGuid# 3330122045893170349 2025-04-04T17:29:25.496194Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-04-04T17:29:25.498011Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:27:2074] ControllerId# 72057594037932033 2025-04-04T17:29:25.498068Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T17:29:25.498174Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:292} StartInvalidGroupProxy GroupId# 4294967295 2025-04-04T17:29:25.498810Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:304} StartRequestReportingThrottler 2025-04-04T17:29:25.517225Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-04-04T17:29:25.521769Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-04-04T17:29:25.545005Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T17:29:25.545075Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-04T17:29:25.554961Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-04-04T17:29:25.555017Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-04-04T17:29:25.559672Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-04-04T17:29:25.563076Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-04-04T17:29:25.563740Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-04-04T17:29:25.564169Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/i5tq/000abf/r3tmp/tmpLtM9Ej/static.dat" PDiskGuid: 3330122045893170349 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 3330122045893170349 } VDiskKind: Default } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 3330122045893170349 } } } } AvailabilityDomains: 0 } 2025-04-04T17:29:25.579834Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-04-04T17:29:25.607565Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-04-04T17:29:25.607626Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-04-04T17:29:25.608573Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\tx~\021\274\006G\245\030\323\032\022\360\245\374E\275\\zH" } 2025-04-04T17:29:25.608873Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-04-04T17:29:25.608941Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:317} StateFunc Type# 2146435075 Sender# [1:47:2089] SessionId# [0:0:0] Cookie# 0 2025-04-04T17:29:25.608984Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.053810s 2025-04-04T17:29:25.609049Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-04-04T17:29:25.609094Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:317} StateFunc Type# 268639248 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2025-04-04T17:29:25.618652Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1126} Handle(TEvStatusUpdate) 2025-04-04T17:29:25.627129Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1126} Handle(TEvStatusUpdate) 2025-04-04T17:29:25.648735Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-04-04T17:29:25.670420Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-04-04T17:29:25.674603Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-04-04T17:29:25.678542Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:29:25.679823Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-04-04T17:29:25.682763Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-04-04T17:29:25.682813Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-04-04T17:29:25.683143Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-04-04T17:29:25.699581Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-04-04T17:29:25.699806Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-04-04T17:29:25.699967Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-04-04T17:29:25.700083Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T17:29:25.700223Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-04-04T17:29:25.700385Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-04T17:29:25.729523Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-04-04T17:29:25.729671Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T17:29:25.742791Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-04-04T17:29:25.742937Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T17:29:25.743007Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-04-04T17:29:25.743090Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T17:29:25.743217Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-04-04T17:29:25.743283Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T17:29:25.743317Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-04-04T17:29:25.743362Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T17:29:25.755293Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-04-04T17:29:25.755447Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T17:29:25.767444Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-04-04T17:29:25.767609Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-04-04T17:29:25.769783Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-04-04T17:29:25.769839Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-04-04T17:29:25.784878Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-04-04T17:29:25.784961Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed Pipe connected clientId# [1:27:2074] 2025-04-04T17:29:25.785552Z node 1 :BS_NODE DEBUG: {NW05@node_warden_pipe.cpp:52} TEvTabletPipe::TEvClientConnected OK ClientId# [1:27:2074] ServerId# [1:123:2145] TabletId# 72057594037932033 PipeClientId# [1:27:2074] 2025-04-04T17:29:25.785961Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 3330122045893170349 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-04-04T17:29:25.787453Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "SectorMap:/home/runner/.ya/build/build_root/i5tq/000abf/r3tmp/tmpLtM9Ej/static.dat" PDiskConfig { ExpectedSlotCount: 2 } } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 ErasureSpecies: "none" VDiskKind: "Default" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-04-04T17:29:25.789293Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1 Path# SectorMap:/home/runner/.ya/build/build_root/i5tq/000abf/r3tmp/tmpLtM9Ej/static.dat 2025-04-04T17:29:25.796690Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } State: Initial Replicated: false DiskSpace: Green } } 2025-04-04T17:29:25.796933Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2025-04-04T17:29:25.797033Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } 2025-04-04T17:29:25.797242Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 3330122045893170349 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-04-04T17:29:25.797433Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 3330122045893170349 Status: READY OnlyPhantomsRemain: false } } 2025-04-04T17:29:25.798451Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1126} Handle(TEvStatusUpdate) 2025-04-04T17:29:25.799997Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1126} Handle(TEvStatusUpdate) 2025-04-04T17:29:25.800132Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } Success: true } 2025-04-04T17:29:25.800306Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1126} Handle(TEvStatusUpdate) 2025-04-04T17:29:25.800431Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2025-04-04T17:29:25.813059Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] === Waiting for pipe to establish === === Breaking pipe === === Sending put === Pipe disconnected clientId# [1:27:2074] 2025-04-04T17:29:25.813934Z node 1 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [1:27:2074] ServerId# [1:123:2145] TabletId# 72057594037932033 PipeClientId# [1:27:2074] 2025-04-04T17:29:25.814053Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:138:2158] ControllerId# 72057594037932033 2025-04-04T17:29:25.814085Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-04-04T17:29:25.814224Z node 1 :BS_NODE DEBUG: {NW46@node_warden_proxy.cpp:127} HandleForwarded GroupId# 2147483648 EnableProxyMock# false NoGroup# false 2025-04-04T17:29:25.814843Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 2147483648 2025-04-04T17:29:25.814883Z node 1 :BS_NODE DEBUG: {NW98@node_warden_group.cpp:265} RequestGroupConfig GroupId# 2147483648 2025-04-04T17:29:25.815433Z node 1 :BS_NODE INFO: {NW79@node_warden_group_resolver.cpp:74} TGroupResolverActor::Bootstrap GroupId# 2147483648 Pipe connected clientId# [1:138:2158] 2025-04-04T17:29:25.815746Z node 1 :BS_NODE DEBUG: {NW05@node_warden_pipe.cpp:52} TEvTabletPipe::TEvClientConnected OK ClientId# [1:138:2158] ServerId# [1:141:2160] TabletId# 72057594037932033 PipeClientId# [1:138:2158] 2025-04-04T17:29:25.815965Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 3330122045893170349 Status: READY OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-04-04T17:29:25.816239Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } Success: true } 2025-04-04T17:29:25.816414Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [1:11:2058] Cookie# 0 Recipient# [1:141:2160] RecipientRewrite# [1:89:2121] Request# {NodeID: 1 GroupIDs: 2147483648 } StopGivingGroups# false 2025-04-04T17:29:25.816479Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 1 GroupIDs: 2147483648 } 2025-04-04T17:29:25.816598Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2025-04-04T17:29:25.816725Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 0 } } 2025-04-04T17:29:25.830370Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:785} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/i5tq/000abf/r3tmp/tmpLtM9Ej/static.dat" PDiskGuid: 3330122045893170349 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 3330122045893170349 } VDiskKind: Default StoragePoolName: "" } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 3330122045893170349 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } InstanceId: "e07326fa-b2e1eca4-76a277e3-f7576099" Comprehensive: true AvailDomain: 0 } 2025-04-04T17:29:25.830597Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# true Origin# controller ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/i5tq/000abf/r3tmp/tmpLtM9Ej/static.dat" PDiskGuid: 3330122045893170349 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 3330122045893170349 } VDiskKind: Default StoragePoolName: "" } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 3330122045893170349 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2025-04-04T17:29:25.830800Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [80000000:1:0:0:0] VSlotId# 1:1:1000 PDiskGuid# 3330122045893170349 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-04-04T17:29:25.831658Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:257} StartLocalVDiskActor done VDiskId# [80000000:1:0:0:0] VSlotId# 1:1:1000 PDiskGuid# 3330122045893170349 2025-04-04T17:29:25.831934Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-04-04T17:29:25.836460Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:785} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 3330122045893170349 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } } 2025-04-04T17:29:25.836582Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 3330122045893170349 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2025-04-04T17:29:25.838397Z node 1 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 2147483648 2025-04-04T17:29:25.839783Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 3330122045893170349 Status: INIT_PENDING OnlyPhantomsRemain: false } } 2025-04-04T17:29:25.840836Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } State: Initial Replicated: false DiskSpace: Green } } 2025-04-04T17:29:25.845241Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } } 2025-04-04T17:29:25.850187Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1126} Handle(TEvStatusUpdate) 2025-04-04T17:29:25.850793Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 3330122045893170349 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-04-04T17:29:25.851311Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1126} Handle(TEvStatusUpdate) 2025-04-04T17:29:25.851950Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 3330122045893170349 Status: READY OnlyPhantomsRemain: false } } |97.7%| [TM] {RESULT} ydb/core/blobstorage/nodewarden/ut_sequence/unittest >> TestJsonParser::TypeKindsValidation [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceshard/ut/unittest >> SequenceShardTests::NegativeIncrement [GOOD] Test command err: 2025-04-04T17:29:25.168156Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-04-04T17:29:25.168378Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-04-04T17:29:25.184544Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-04-04T17:29:25.188131Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-04-04T17:29:25.188184Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-04-04T17:29:25.195238Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2025-04-04T17:29:25.195447Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 1 Cache# 1 Increment# 1 Cycle# false State# Active 2025-04-04T17:29:25.218958Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-04-04T17:29:25.219253Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2025-04-04T17:29:25.219287Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SEQUENCE_ALREADY_EXISTS PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-04T17:29:25.219351Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-04-04T17:29:25.219567Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } StartValue: 100001 Cache: 10 2025-04-04T17:29:25.219659Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 100001 Cache# 10 Increment# 1 Cycle# false State# Active 2025-04-04T17:29:25.232156Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-04-04T17:29:25.232496Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-04T17:29:25.232580Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 1 AllocationCount# 1 AllocationIncrement# 1 2025-04-04T17:29:25.244613Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-04T17:29:25.244904Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 10 2025-04-04T17:29:25.244996Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 2 AllocationCount# 10 AllocationIncrement# 1 2025-04-04T17:29:25.256907Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-04T17:29:25.257208Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-04-04T17:29:25.257293Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100001 AllocationCount# 10 AllocationIncrement# 1 2025-04-04T17:29:25.269432Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-04T17:29:25.269940Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 50 2025-04-04T17:29:25.270069Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100011 AllocationCount# 50 AllocationIncrement# 1 2025-04-04T17:29:25.282259Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-04T17:29:25.282605Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 99] Cache# 0 2025-04-04T17:29:25.282651Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 99] 2025-04-04T17:29:25.282716Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-04T17:29:25.282966Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2025-04-04T17:29:25.283047Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 12 AllocationCount# 9223372036854775796 AllocationIncrement# 1 2025-04-04T17:29:25.295857Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-04T17:29:25.296361Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2025-04-04T17:29:25.296418Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-04T17:29:25.296484Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-04T17:29:25.296805Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-04T17:29:25.296896Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxDropSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-04T17:29:25.309227Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2025-04-04T17:29:25.309805Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-04T17:29:25.309857Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-04T17:29:25.309920Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2025-04-04T17:29:25.327219Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-04-04T17:29:25.327329Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-04-04T17:29:25.327967Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-04-04T17:29:25.328604Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-04-04T17:29:25.328817Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-04-04T17:29:25.333162Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-04T17:29:25.333220Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-04T17:29:25.333274Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-04T17:29:25.333562Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-04-04T17:29:25.333656Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100061 AllocationCount# 10 AllocationIncrement# 1 2025-04-04T17:29:25.345844Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-04T17:29:25.346437Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } NextValue: 200000 NextUsed: true 2025-04-04T17:29:25.346547Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2025-04-04T17:29:25.358825Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2025-04-04T17:29:25.359261Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-04-04T17:29:25.359370Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200001 AllocationCount# 10 AllocationIncrement# 1 2025-04-04T17:29:25.392386Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-04T17:29:25.392962Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } Cache: 5 2025-04-04T17:29:25.393068Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2025-04-04T17:29:25.426345Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2025-04-04T17:29:25.426821Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-04-04T17:29:25.426917Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200011 AllocationCount# 5 AllocationIncrement# 1 2025-04-04T17:29:25.439668Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-04T17:29:25.440048Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxGetSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] 2025-04-04T17:29:25.440114Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxGetSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2025-04-04T17:29:25.440184Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxGetSequence.Complete 2025-04-04T17:29:25.872379Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-04-04T17:29:25.872464Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-04-04T17:29:25.880267Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-04-04T17:29:25.883870Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-04-04T17:29:25.883951Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-04-04T17:29:25.886064Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxMarkSchemeShardPipe.Execute SchemeShardId# 123 Generation# 1 Round# 1 2025-04-04T17:29:25.886376Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2025-04-04T17:29:25.886505Z node 2 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 1 Cache# 1 Increment# 1 Cycle# false State# Active 2025-04-04T17:29:25.909719Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 7205 ... ENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-04-04T17:29:26.392545Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-04T17:29:26.392624Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-04T17:29:26.404851Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-04-04T17:29:26.405252Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 10 2025-04-04T17:29:26.405305Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_FROZEN PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-04T17:29:26.405376Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-04T17:29:26.405718Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Record# PathId { OwnerId: 123 LocalId: 43 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 11 Cache: 100 Increment: 1 2025-04-04T17:29:26.405824Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] 2025-04-04T17:29:26.418232Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2025-04-04T17:29:26.418659Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Cache# 0 2025-04-04T17:29:26.418772Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] AllocationStart# 11 AllocationCount# 100 AllocationIncrement# 1 2025-04-04T17:29:26.431122Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-04T17:29:26.431727Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Record# PathId { OwnerId: 123 LocalId: 43 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 11 Cache: 100 Increment: 1 2025-04-04T17:29:26.431791Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SEQUENCE_ALREADY_ACTIVE PathId# [OwnerId: 123, LocalPathId: 43] 2025-04-04T17:29:26.431862Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2025-04-04T17:29:26.432149Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-04T17:29:26.432240Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-04T17:29:26.444606Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-04-04T17:29:26.445016Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-04T17:29:26.445115Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-04T17:29:26.457212Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-04-04T17:29:26.457531Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-04T17:29:26.457616Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-04-04T17:29:26.469780Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-04-04T17:29:26.470036Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-04T17:29:26.470089Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 42] MovedTo# 12345 2025-04-04T17:29:26.470149Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-04T17:29:26.470376Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2025-04-04T17:29:26.470439Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] 2025-04-04T17:29:26.482234Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-04-04T17:29:26.482719Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2025-04-04T17:29:26.482852Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2025-04-04T17:29:26.494920Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2025-04-04T17:29:26.495245Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2025-04-04T17:29:26.495316Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2025-04-04T17:29:26.507100Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-04-04T17:29:26.507399Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2025-04-04T17:29:26.507438Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 43] MovedTo# 54321 2025-04-04T17:29:26.507501Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-04-04T17:29:26.507711Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-04T17:29:26.507792Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 111 AllocationCount# 100 AllocationIncrement# 1 2025-04-04T17:29:26.520059Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-04T17:29:26.956397Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-04-04T17:29:26.956484Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-04-04T17:29:26.964358Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-04-04T17:29:26.968132Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-04-04T17:29:26.968194Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-04-04T17:29:26.969644Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cache: 10 Increment: -1 2025-04-04T17:29:26.969796Z node 4 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# -9223372036854775808 MaxValue# -1 StartValue# -1 Cache# 10 Increment# -1 Cycle# false State# Active 2025-04-04T17:29:26.992699Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-04-04T17:29:26.993074Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-04T17:29:26.993193Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2025-04-04T17:29:27.005356Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-04T17:29:27.005708Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-04T17:29:27.005817Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2025-04-04T17:29:27.018054Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-04T17:29:27.018480Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2025-04-04T17:29:27.018590Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -21 AllocationCount# 9223372036854775788 AllocationIncrement# -1 2025-04-04T17:29:27.030769Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-04T17:29:27.031162Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2025-04-04T17:29:27.031218Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-04T17:29:27.031284Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-04T17:29:27.031635Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cycle: true 2025-04-04T17:29:27.031740Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2025-04-04T17:29:27.043912Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2025-04-04T17:29:27.044289Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-04T17:29:27.044397Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2025-04-04T17:29:27.056662Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-04-04T17:29:27.057053Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-04-04T17:29:27.057156Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2025-04-04T17:29:27.069396Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete >> TestJsonParser::NumbersValidation >> TestJsonParser::NumbersValidation [GOOD] >> DataShardCompaction::CompactBorrowed [GOOD] >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs [GOOD] >> DataShardCompaction::CompactBorrowedTxStatus >> TBlobStorageProxyTest::TestEmptyRange >> TestJsonParser::StringsValidation |97.7%| [TS] {RESULT} ydb/core/tx/sequenceshard/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK [GOOD] Test command err: 2025-04-04T17:27:49.064255Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511010901952174:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:49.064498Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:27:49.189230Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ff8/r3tmp/tmpELfCGx/pdisk_1.dat 2025-04-04T17:27:49.321112Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2771, node 1 2025-04-04T17:27:49.381536Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000ff8/r3tmp/yandexfZ2x7H.tmp 2025-04-04T17:27:49.381570Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000ff8/r3tmp/yandexfZ2x7H.tmp 2025-04-04T17:27:49.381731Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000ff8/r3tmp/yandexfZ2x7H.tmp 2025-04-04T17:27:49.381871Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:49.397320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:49.397467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:49.399305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:49.419261Z INFO: TTestServer started on Port 29566 GrpcPort 2771 TClient is connected to server localhost:29566 PQClient connected to localhost:2771 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:27:49.642823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:27:49.665770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:27:51.294382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511019491887583:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:51.294475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511019491887602:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:51.294482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:51.299018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-04-04T17:27:51.300740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511019491887634:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:51.300850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:51.309777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489511019491887604:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-04-04T17:27:51.463573Z node 1 :TX_PROXY ERROR: Actor# [1:7489511019491887660:2446] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:27:51.490083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T17:27:51.498981Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489511019491887676:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:27:51.500790Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWIyYjM3OS03MjllMWU3Zi05ZmY4NzdhNC1kMDk1YWE5Ng==, ActorId: [1:7489511019491887572:2336], ActorState: ExecuteState, TraceId: 01jr0xgfqn7p3v20t42wvwv16h, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:27:51.503168Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:27:51.520988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-04-04T17:27:51.578618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489511019491887954:2625] 2025-04-04T17:27:54.064116Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489511010901952174:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:27:54.064241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:27:57.809141Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-04T17:27:57.823983Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-04T17:27:57.825455Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7489511045261692035:2799], Recipient [1:7489511010901952594:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:27:57.825509Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:27:57.825526Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:27:57.825563Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7489511045261692031:2796], Recipient [1:7489511010901952594:2186]: {TEvModifySchemeTransaction txid# 281474976715673 TabletId# 72057594046644480} 2025-04-04T17:27:57.825579Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T17:27:57.867692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976715673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:27:57.868127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T17:27:57.868401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-04T17:27:57.868446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-04T17:27:57.868489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new sh ... items to delete old stuff 2025-04-04T17:29:26.571997Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:29:26.572020Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:29:26.572050Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:29:26.572172Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511425014349210:2733], Partition 1, Sender [0:0:0], Recipient [5:7489511425014349294:2744], Cookie: 0 2025-04-04T17:29:26.572218Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511425014349294:2744]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:26.572233Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:26.572261Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:29:26.572293Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:29:26.572312Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:29:26.572331Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:29:26.572684Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511425014349216:2734], Partition 2, Sender [0:0:0], Recipient [5:7489511425014349305:2748], Cookie: 0 2025-04-04T17:29:26.572729Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511425014349305:2748]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:26.572744Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:26.572766Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:29:26.572794Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:29:26.572809Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:29:26.572826Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >>>>> Session-0 Wait partitions {{test-topic : [0, 1, 2]}, } After split must read all partitions because parent partition is empty >>>>> Session-0 Partitions {{test-topic : [0, 1, 2]}, } received #2 >>>>> Session-0 Release() >>>>> Session-0 Release() >>>>> Session-0 Closing reading session 2025-04-04T17:29:26.607232Z :INFO: [/Root] [/Root] [67567764-3e00de3b-516fade7-aa4f7305] Closing read session. Close timeout: 5.000000s 2025-04-04T17:29:26.607324Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:2:3:0:0 -:test-topic:1:2:0:0 -:test-topic:0:1:0:0 2025-04-04T17:29:26.607370Z :INFO: [/Root] [/Root] [67567764-3e00de3b-516fade7-aa4f7305] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2232 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } >>>>> Session-0 Received TSessionClosedEvent message 2025-04-04T17:29:26.608406Z :INFO: [/Root] [/Root] [67567764-3e00de3b-516fade7-aa4f7305] Closing read session. Close timeout: 0.000000s SessionClosed { Status: SUCCESS Issues: "
: Error: Session was gracefully closed " } 2025-04-04T17:29:26.608469Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:2:3:0:0 -:test-topic:1:2:0:0 -:test-topic:0:1:0:0 2025-04-04T17:29:26.608523Z :INFO: [/Root] [/Root] [67567764-3e00de3b-516fade7-aa4f7305] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2233 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:29:26.608609Z :NOTICE: [/Root] [/Root] [67567764-3e00de3b-516fade7-aa4f7305] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:29:26.608700Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_5_1_8462680567903361109_v1 grpc read done: success# 0, data# { } 2025-04-04T17:29:26.608740Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_5_1_8462680567903361109_v1 grpc read failed 2025-04-04T17:29:26.608774Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_5_1_8462680567903361109_v1 grpc closed 2025-04-04T17:29:26.608829Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_5_1_8462680567903361109_v1 is DEAD 2025-04-04T17:29:26.609167Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7489511425014349348:3406], Recipient [5:7489511425014349210:2733]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:29:26.609207Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:29:26.609229Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:29:26.609249Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897] Destroy direct read session test-consumer_5_1_8462680567903361109_v1 2025-04-04T17:29:26.609278Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897] server disconnected, pipe [5:7489511425014349346:2757] destroyed 2025-04-04T17:29:26.609324Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7489511425014349355:3411], Recipient [5:7489511425014349216:2734]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:29:26.609338Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:29:26.609349Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:29:26.609363Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session test-consumer_5_1_8462680567903361109_v1 2025-04-04T17:29:26.609380Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [5:7489511425014349343:2756] destroyed 2025-04-04T17:29:26.609419Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7489511420719381821:3273], Recipient [5:7489511403539511667:2454]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:29:26.609433Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:29:26.609446Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:29:26.609459Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_5_1_8462680567903361109_v1 2025-04-04T17:29:26.609479Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7489511420719381820:2715] destroyed 2025-04-04T17:29:26.609522Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_8462680567903361109_v1 2025-04-04T17:29:26.609542Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_8462680567903361109_v1 2025-04-04T17:29:26.609557Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_8462680567903361109_v1 2025-04-04T17:29:26.609898Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [5:7489511420719381817:2712] disconnected; active server actors: 1 2025-04-04T17:29:26.609931Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [5:7489511420719381817:2712] client test-consumer disconnected session test-consumer_5_1_8462680567903361109_v1 2025-04-04T17:29:26.672098Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511403539511667:2454], Partition 0, Sender [0:0:0], Recipient [5:7489511403539511728:2458], Cookie: 0 2025-04-04T17:29:26.672194Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511403539511728:2458]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:26.672220Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:26.672261Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:29:26.672333Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:29:26.672361Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:29:26.672385Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:29:26.672536Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511425014349210:2733], Partition 1, Sender [0:0:0], Recipient [5:7489511425014349294:2744], Cookie: 0 2025-04-04T17:29:26.672571Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511425014349294:2744]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:26.672584Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:26.672607Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:29:26.672628Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:29:26.672638Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:29:26.672650Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:29:26.673057Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511425014349216:2734], Partition 2, Sender [0:0:0], Recipient [5:7489511425014349305:2748], Cookie: 0 2025-04-04T17:29:26.673100Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511425014349305:2748]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:26.673112Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:26.673138Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:29:26.673168Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:29:26.673196Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:29:26.673220Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> TTxDataShardSampleKScan::ScanBadParameters [GOOD] >> TTxDataShardTestInit::TestGetShardStateAfterInitialization >> TestJsonParser::StringsValidation [GOOD] >> ConfigGRPCService::ReplaceConfig >> TestJsonParser::NestedJsonValidation >> BasicExample::BasicExample >> TestJsonParser::NestedJsonValidation [GOOD] >> TestJsonParser::BoolsValidation >> Coordinator::ReadStepSubscribe [GOOD] >> TIndexProcesorTests::TestSingleCreateQueueEvent [GOOD] >> TabletService_ExecuteMiniKQL::DryRunEraseRow [GOOD] >> TTxDataShardTestInit::TestGetShardStateAfterInitialization [GOOD] >> TestJsonParser::BoolsValidation [GOOD] >> Coordinator::LastStepSubscribe >> TIndexProcesorTests::TestReindexSingleQueue >> TabletService_ExecuteMiniKQL::OnlyAdminsAllowed >> TTxDataShardTestInit::TestTableHasPath >> TestJsonParser::JsonStructureValidation |97.7%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_sample_k/unittest >> TTxDataShardSampleKScan::ScanBadParameters [GOOD] Test command err: 2025-04-04T17:29:21.898176Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511407843761705:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:21.898283Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0007cd/r3tmp/tmpveVD02/pdisk_1.dat 2025-04-04T17:29:22.276262Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:22.320442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:22.321001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:22.325778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:22.376680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:22.417090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:22.454757Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7489511412138729584:2295] 2025-04-04T17:29:22.455073Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:29:22.472074Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:29:22.472135Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:29:22.477433Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:29:22.477485Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:29:22.477535Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:29:22.479329Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:29:22.479401Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:29:22.479435Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7489511412138729598:2295] in generation 1 2025-04-04T17:29:22.481084Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:29:22.533959Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:29:22.535032Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:29:22.535104Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7489511412138729602:2296] 2025-04-04T17:29:22.535115Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:29:22.535141Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:29:22.535153Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:22.536017Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7489511412138729581:2295], serverId# [1:7489511412138729601:2304], sessionId# [0:0:0] 2025-04-04T17:29:22.536128Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:29:22.536312Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:29:22.536346Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:29:22.536374Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:22.536439Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:29:22.536469Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:29:22.536489Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:29:22.537463Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:29:22.537578Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-04T17:29:22.539064Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:29:22.539759Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:29:22.541092Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:29:22.544812Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7489511412138729616:2312], serverId# [1:7489511412138729617:2313], sessionId# [0:0:0] 2025-04-04T17:29:22.550355Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1743787762591 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743787762591 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:29:22.550390Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:22.550573Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:29:22.550654Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:29:22.550674Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:29:22.550717Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743787762591:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-04T17:29:22.550989Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743787762591:281474976710657 keys extracted: 0 2025-04-04T17:29:22.551161Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:29:22.551351Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:29:22.551429Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:29:22.554677Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:29:22.555129Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:22.556286Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743787762591} 2025-04-04T17:29:22.556330Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:29:22.556376Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743787762598 2025-04-04T17:29:22.556392Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:22.556452Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743787762598 2025-04-04T17:29:22.556507Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:29:22.556523Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:29:22.556547Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:29:22.556622Z node 1 :TX_DATASHARD DEBUG: Complete [1743787762591 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7489511412138729416:2188], exec latency: 3 ms, propose latency: 5 ms 2025-04-04T17:29:22.556659Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-04T17:29:22.556686Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:22.560789Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-04T17:29:22.560870Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:29:24.613086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511420728664302:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:24.613110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511420728664313:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:24.613192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:24.622161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T17:29:24.626744Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:29:24.634101Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:29:24.635703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489511420728664316:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T17:29:24.801888Z node 1 :TX_PROXY ERROR: Actor# [1:7489511420728664367:2391] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:29:25.478941Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710660. Ctx: { TraceId: 01jr0xkaw2ate8qcyypdm3e1e1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU0NTUyNDMtOTZhYTBmYzQtNTQ5OGEzZGEtMTNhM2FmMTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:25.491302Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7489511425023631693:2405], serverId# [1:7489511425023631694:2406], sessionId# [0:0:0] 2025-04-04T17:29:25.492430Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:29:25.501017Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-0 ... osed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:26.247628Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:26.247707Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:26.249789Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:26.251296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:26.259328Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:7489511427264539454:2295] 2025-04-04T17:29:26.259636Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:29:26.269480Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:29:26.269550Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:29:26.271046Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:29:26.271100Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:29:26.271133Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:29:26.271410Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:29:26.271455Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:29:26.271495Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:7489511427264539472:2295] in generation 1 2025-04-04T17:29:26.272786Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:29:26.272817Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:29:26.272885Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:29:26.272918Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:7489511427264539474:2296] 2025-04-04T17:29:26.272946Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:29:26.272959Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:29:26.272972Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:26.273053Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:29:26.273131Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:29:26.273222Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7489511427264539458:2295], serverId# [2:7489511427264539464:2299], sessionId# [0:0:0] 2025-04-04T17:29:26.273255Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:29:26.273275Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:26.273288Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:29:26.273304Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:29:26.273546Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:29:26.273756Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:29:26.273843Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:29:26.275264Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:29:26.275343Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:29:26.275398Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:29:26.277680Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7489511427264539488:2311], serverId# [2:7489511427264539489:2312], sessionId# [0:0:0] 2025-04-04T17:29:26.277975Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1743787766322 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743787766322 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:29:26.278007Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:26.278127Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:29:26.278153Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:29:26.278172Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743787766322:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:29:26.278412Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743787766322:281474976715657 keys extracted: 0 2025-04-04T17:29:26.278588Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:29:26.278704Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:29:26.278764Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:29:26.279259Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:29:26.279658Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:26.280938Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:29:26.281048Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743787766321 2025-04-04T17:29:26.281079Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:26.281128Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743787766322} 2025-04-04T17:29:26.281168Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:29:26.281238Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743787766329 2025-04-04T17:29:26.281625Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:29:26.281653Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:29:26.281667Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:29:26.281709Z node 2 :TX_DATASHARD DEBUG: Complete [1743787766322 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:7489511427264539237:2151], exec latency: 0 ms, propose latency: 3 ms 2025-04-04T17:29:26.281741Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:29:26.281774Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:26.282793Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:29:26.282857Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:29:26.286858Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7489511427264539527:2340], serverId# [2:7489511427264539528:2341], sessionId# [0:0:0] 2025-04-04T17:29:26.286963Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:29:26.287231Z node 2 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976715658 at tablet 72075186224037888 2025-04-04T17:29:26.288590Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:29:26.289823Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1743787766336 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743787766336 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:29:26.289855Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:26.290013Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:29:26.290045Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:29:26.290065Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743787766336:281474976715658] in PlanQueue unit at 72075186224037888 2025-04-04T17:29:26.290224Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743787766336:281474976715658 keys extracted: 0 2025-04-04T17:29:26.290604Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:26.291222Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743787766336} 2025-04-04T17:29:26.291268Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:29:26.291308Z node 2 :TX_DATASHARD DEBUG: Complete [1743787766336 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [2:7489511427264539522:2336], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:29:26.291332Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:26.293381Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7489511427264539538:2351], serverId# [2:7489511427264539539:2352], sessionId# [0:0:0] 2025-04-04T17:29:26.295127Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7489511427264539543:2356], serverId# [2:7489511427264539544:2357], sessionId# [0:0:0] 2025-04-04T17:29:26.296821Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7489511427264539548:2361], serverId# [2:7489511427264539549:2362], sessionId# [0:0:0] 2025-04-04T17:29:26.298462Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7489511427264539553:2366], serverId# [2:7489511427264539554:2367], sessionId# [0:0:0] 2025-04-04T17:29:26.299920Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7489511427264539558:2371], serverId# [2:7489511427264539559:2372], sessionId# [0:0:0] 2025-04-04T17:29:26.301333Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7489511427264539563:2376], serverId# [2:7489511427264539564:2377], sessionId# [0:0:0] |97.7%| [TM] {RESULT} ydb/core/tx/datashard/ut_sample_k/unittest >> TestJsonParser::JsonStructureValidation [GOOD] >> TBlobStorageProxyTest::TestEmptyRange [GOOD] >> TSequence::CreateTableWithDefaultFromSequence >> DataShardReassign::AutoReassignOnYellowFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::LayoutIncorrect [FAIL] Test command err: 2025-04-04T17:28:45.978357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:45.978932Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:45.979175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:28:45.980988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:45.981205Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:45.981451Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00164a/r3tmp/tmpLqf07k/pdisk_1.dat 2025-04-04T17:28:46.387744Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20339, node 1 TClient is connected to server localhost:7750 2025-04-04T17:28:46.749290Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:46.749332Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:46.749364Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:46.749518Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:53.225434Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:697:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:53.225903Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:53.226285Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:28:53.227191Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:694:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:53.227272Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:53.227601Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00164a/r3tmp/tmpcOgUQ1/pdisk_1.dat 2025-04-04T17:28:53.535998Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25641, node 3 TClient is connected to server localhost:4173 2025-04-04T17:28:53.919032Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:53.919090Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:53.919125Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:53.919395Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:58.103145Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:444:2409], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:58.103516Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:58.103604Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00164a/r3tmp/tmpJ6lc53/pdisk_1.dat 2025-04-04T17:28:58.400213Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5569, node 5 TClient is connected to server localhost:31489 2025-04-04T17:28:58.814584Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:58.814654Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:58.814688Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:58.815455Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:29:03.793759Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:449:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:03.794093Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:03.794225Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00164a/r3tmp/tmpaJLyga/pdisk_1.dat 2025-04-04T17:29:04.145106Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5061, node 7 TClient is connected to server localhost:21477 2025-04-04T17:29:04.628620Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:04.628696Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:04.628737Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:04.629044Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:29:04.703082Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:04.703250Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:04.724045Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:16.409604Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:449:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:16.409968Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:16.410138Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00164a/r3tmp/tmp6VXPNs/pdisk_1.dat 2025-04-04T17:29:16.751680Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64391, node 9 TClient is connected to server localhost:24058 2025-04-04T17:29:17.164911Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:17.164965Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:17.165009Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:17.165382Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration assertion failed at ydb/core/health_check/health_check_ut.cpp:2170, virtual void NKikimr::NTestSuiteTHealthCheckTest::TTestCaseLayoutIncorrect::Execute_(NUnitTest::TTestContext &): (issue_log.message() == "Database has storage issues") failed: ("Database has multiple issues" != Database has storage issues) , with diff: ("|)Database has (mul|s)t(ipl|orag)e issues("|) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1840F6E0) NKikimr::NTestSuiteTHealthCheckTest::TTestCaseLayoutIncorrect::Execute_(NUnitTest::TTestContext&)+45497 (0x17A7AE29) std::__y1::__function::__func, void ()>::operator()()+280 (0x17B88478) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x184453E6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x18416109) NKikimr::NTestSuiteTHealthCheckTest::TCurrentTest::Execute()+1094 (0x17B87466) NUnitTest::TTestFactory::Execute()+2438 (0x184179D6) NUnitTest::RunMain(int, char**)+5213 (0x1843FD1D) ??+0 (0x7FB9EC934D90) __libc_start_main+128 (0x7FB9EC934E40) _start+41 (0x1591C029) >> TestPurecalcFilter::Simple1 >> TMemoryController::Counters_HardLimit [GOOD] >> TMemoryController::Counters_NoHardLimit |97.7%| [TA] $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.8%| [TA] {RESULT} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TIndexProcesorTests::TestReindexSingleQueue [GOOD] >> TIndexProcesorTests::TestDeletedQueueNotReindexed >> KqpTpch::Query01 >> TCreateAndDropViewTest::ParsingSecurityInvoker [GOOD] >> TCreateAndDropViewTest::ListCreatedView >> BlobDepot::DecommitVerifiedRandom [GOOD] >> TopicAutoscaling::ControlPlane_CDC_Disable [GOOD] >> TopicAutoscaling::MidOfRange [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyRange [GOOD] >> TPQTest::TestAlreadyWrittenWithoutDeduplication [GOOD] >> TPQTest::TestChangeConfig |97.8%| [TA] $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/unittest >> BlobDepot::DecommitVerifiedRandom [GOOD] Test command err: Mersenne random seed 2163068654 RandomSeed# 10130762557381161929 Mersenne random seed 2953139404 Mersenne random seed 3911639758 Mersenne random seed 674586211 Mersenne random seed 1210199638 2025-04-04T17:29:07.553601Z 1 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.553762Z 3 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:2:0]: TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.553805Z 4 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:3:0]: TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.553862Z 7 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:6:0]: TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.553916Z 8 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:7:0]: TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.553957Z 2 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:1:0]: TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:2] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.554005Z 6 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:5:0]: TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.554042Z 5 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:4:0]: TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.554350Z 1 00h00m25.012048s :BS_PROXY_PUT ERROR: [7ea19534cae17e32] Result# TEvPutResult {Id# [15:1:1:0:1:100:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000000:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-04-04T17:29:07.555621Z 1 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:2] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.555786Z 2 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:1:0]: TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.555829Z 3 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:2:0]: TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.555876Z 6 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:5:0]: TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.555921Z 7 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:6:0]: TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.555981Z 5 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:4:0]: TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.556021Z 8 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:7:0]: TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.556060Z 4 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:3:0]: TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.575977Z 1 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.576497Z 5 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:4:0]: TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.576593Z 6 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:5:0]: TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.576715Z 4 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:3:0]: TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:2] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.576791Z 3 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:2:0]: TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.576864Z 8 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:7:0]: TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.576934Z 7 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:6:0]: TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.577006Z 2 00h00m25.012048s :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:1:0]: TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-04-04T17:29:07.577406Z 1 00h00m25.012048s :BS_PROXY_PUT ERROR: [597f7368e398af5b] Result# TEvPutResult {Id# [16:2:2:0:2:100:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000000:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 Mersenne random seed 1308258847 Read over the barrier, blob id# [15:1:1:0:1:100:0] Read over the barrier, blob id# [15:1:2:0:1:100:0] 2025-04-04T17:29:08.964554Z 1 00h00m25.012048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:0:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-04-04T17:29:08.964988Z 2 00h00m25.012048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:1:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-04-04T17:29:08.965146Z 3 00h00m25.012048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:2:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-04-04T17:29:08.965258Z 4 00h00m25.012048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:3:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-04-04T17:29:08.965347Z 5 00h00m25.012048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:4:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-04-04T17:29:08.965461Z 6 00h00m25.012048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:5:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-04-04T17:29:08.965549Z 7 00h00m25.012048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:6:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-04-04T17:29:08.965647Z 8 00h00m25.012048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:7:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 Put over the barrier, blob id# [15:1:1:0:99:100:0] Put over the barrier, blob id# [15:1:3:0:99:100:0] 2025-04-04T17:29:09.003483Z 1 00h00m25.012048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:0:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-04-04T17:29:09.003837Z 2 00h00m25.012048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:1:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-04-04T17:29:09.003928Z 3 00h00m25.012048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:2:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-04-04T17:29:09.004004Z 4 00h00m25.012048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:3:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-04-04T17:29:09.004072Z 5 00h00m25.012048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:4:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-04-04T17:29:09.004142Z 6 00h00m25.012048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:5:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-04-04T17:29:09.004228Z 7 00h00m25.012048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:6:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-04-04T17:29:09.004317Z 8 00h00m25.012048s :BS_HULLRECS CRIT: VDISK[82000000:_:0:7:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 Read over the barrier, blob id# [15:1:5:0:1:100:0] Read over the barrier, blob id# [15:1:6:0:1:100:0] Read over the barrier, blob id# [15:1:19:0:1:100:0] Read over the barrier, blob id# [15:2:1:0:1:100:0] Read over the barrier, blob id# [15:2:2:0:1:100:0] TEvRange returned collected blob with id# [15:1:17:0:1:100:0] TEvRange returned collected blob with id# [15:1:19:0:1:100:0] TEvRange returned collected blob with id# [15:2:1:0:1:100:0] TEvRange returned collected blob with id# [15:2:2:0:1:100:0] TEvRange returned collected blob with id# [15:2:3:0:1:100:0] TEvRange returned collected blob with id# [15:2:4:0:1:100:0] TEvRange returned collected blob with id# [15:2:5:0:1:100:0] TEvRange returned collected blob with id# [15:2:6:0:1:100:0] Read over the barrier, blob id# [100:1:3:0:1:100:0] Read over the barrier, blob id# [100:1:5:0:1:100:0] Read over the barrier, blob id# [100:1:6:0:1:100:0] Read over the barrier, blob id# [100:2:1:0:1:100:0] Read over the barrier, blob id# [100:2:2:0:1:100:0] TEvRange returned collected blob with id# [100:2:2:0:1:100:0] TEvRange returned collected blob with id# [100:2:3:0:1:100:0] TEvRange returned collected blob with id# [100:2:4:0:1:100:0] TEvRange returned collected blob with id# [100:2:5:0:1:100:0] TEvRange returned collected blob with id# [100:2:6:0:1:100:0] Mersenne random seed 3099101357 Read over the barrier, blob id# [102:1:2:1:3549508:223:0] TEvRange returned collected blob with id# [102:1:2:2:14623428:280:0] Read over the barrier, blob id# [102:1:2:1:3549508:223:0] Read over the barrier, blob id# [101:2:5:1:1106056:615:0] Read over the barrier, blob id# [102:1:2:2:14623428:280:0] Read over the barrier, blob id# [102:1:2:1:3549508:223:0] Read over the barrier, blob id# [101:2:5:1:1106056:615:0] Read over the barrier, blob id# [101:3:8:1:13017174:423:0] Read over the barrier, blob id# [101:2:5:1:1106056:615:0] Read over the barrier, blob id# [101:2:5:1:1106056:615:0] TEvRange returned collected blob with id# [101:3:8:1:13017174:423:0] Read over the barrier, blob id# [101:3:8:1:13017174:423:0] Read over the barrier, blob id# [100:2:1:2:3693976:403:0] Read over the barrier, blob id# [101:2:5:1:1106056:615:0] TEvRange returned collected blob with id# [100:2:1:1:5871104:803:0] TEvRange returned collected blob with id# [100:2:1:1:11188523:178:0] Read over the barrier, blob id# [100:2:1:1:11188523:178:0] Read over the barrier, blob id# [101:2:5:1:1106056:615:0] Read over the barrier, blob id# [101:2:5:1:1106056:615:0] Read over the barrier, blob id# [100:2:1:1:5871104:803:0] Read over the barrier, blob id# [102:1:2:2:14623428:280:0] Read over the barrier, blob id# [102:1:2:2:14623428:280:0] Read over the barrier, blob id# [10 ... s ValidateGCCmd: incorrect collect cmd: tabletID# 15 key# [15 2 22 3 soft] existing barrier# 1:0 new barrier# 2:0 2025-04-04T17:29:29.173260Z 6 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:5:0]: Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 15 key# [15 2 22 3 soft] existing barrier# 1:0 new barrier# 2:0 2025-04-04T17:29:29.173404Z 7 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:6:0]: Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 15 key# [15 2 22 3 soft] existing barrier# 1:0 new barrier# 2:0 2025-04-04T17:29:29.173554Z 8 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:7:0]: Db# Barriers ValidateGCCmd: incorrect collect cmd: tabletID# 15 key# [15 2 22 3 soft] existing barrier# 1:0 new barrier# 2:0 TEvRange returned collected blob with id# [15:1:3:2:6627539:904:0] Read over the barrier, blob id# [16:2:3:2:860080:350:0] Read over the barrier, blob id# [16:2:3:2:860080:350:0] Read over the barrier, blob id# [16:2:2:2:15018599:277:0] Read over the barrier, blob id# [16:2:2:2:15018599:277:0] Read over the barrier, blob id# [15:1:3:0:10363292:682:0] Read over the barrier, blob id# [17:1:1:2:5507925:106:0] Read over the barrier, blob id# [17:1:1:2:13943012:264:0] 2025-04-04T17:29:29.472912Z 6 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:5:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 1 20 0 soft] barrier# 1:1 new key# [15 1 23 3 soft] barrier# 0:2 2025-04-04T17:29:29.473343Z 1 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:0:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 1 20 0 soft] barrier# 1:1 new key# [15 1 23 3 soft] barrier# 0:2 2025-04-04T17:29:29.473501Z 2 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:1:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 1 20 0 soft] barrier# 1:1 new key# [15 1 23 3 soft] barrier# 0:2 2025-04-04T17:29:29.473646Z 3 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:2:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 1 20 0 soft] barrier# 1:1 new key# [15 1 23 3 soft] barrier# 0:2 2025-04-04T17:29:29.473821Z 4 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:3:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 1 20 0 soft] barrier# 1:1 new key# [15 1 23 3 soft] barrier# 0:2 2025-04-04T17:29:29.473991Z 5 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:4:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 1 20 0 soft] barrier# 1:1 new key# [15 1 23 3 soft] barrier# 0:2 2025-04-04T17:29:29.474152Z 7 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:6:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 1 20 0 soft] barrier# 1:1 new key# [15 1 23 3 soft] barrier# 0:2 2025-04-04T17:29:29.474312Z 8 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:7:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 1 20 0 soft] barrier# 1:1 new key# [15 1 23 3 soft] barrier# 0:2 Read over the barrier, blob id# [15:1:3:2:6627539:904:0] 2025-04-04T17:29:29.757949Z 1 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:0:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 2 28 0 soft] barrier# 4:3 new key# [16 2 32 4 soft] barrier# 3:2 2025-04-04T17:29:29.758967Z 2 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:1:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 2 28 0 soft] barrier# 4:3 new key# [16 2 32 4 soft] barrier# 3:2 2025-04-04T17:29:29.759128Z 3 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:2:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 2 28 0 soft] barrier# 4:3 new key# [16 2 32 4 soft] barrier# 3:2 2025-04-04T17:29:29.759299Z 4 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:3:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 2 28 0 soft] barrier# 4:3 new key# [16 2 32 4 soft] barrier# 3:2 2025-04-04T17:29:29.759456Z 5 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:4:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 2 28 0 soft] barrier# 4:3 new key# [16 2 32 4 soft] barrier# 3:2 2025-04-04T17:29:29.759618Z 6 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:5:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 2 28 0 soft] barrier# 4:3 new key# [16 2 32 4 soft] barrier# 3:2 2025-04-04T17:29:29.759788Z 7 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:6:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 2 28 0 soft] barrier# 4:3 new key# [16 2 32 4 soft] barrier# 3:2 2025-04-04T17:29:29.759974Z 8 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:7:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 2 28 0 soft] barrier# 4:3 new key# [16 2 32 4 soft] barrier# 3:2 TEvRange returned collected blob with id# [16:2:2:2:15018599:277:0] TEvRange returned collected blob with id# [16:2:3:2:860080:350:0] Read over the barrier, blob id# [17:1:1:2:13943012:264:0] TEvRange returned collected blob with id# [15:1:3:2:6627539:904:0] Read over the barrier, blob id# [17:1:1:2:5507925:106:0] Read over the barrier, blob id# [15:1:2:1:6185562:148:0] Read over the barrier, blob id# [16:2:3:2:860080:350:0] Read over the barrier, blob id# [16:2:3:2:860080:350:0] Read over the barrier, blob id# [16:2:3:2:860080:350:0] Read over the barrier, blob id# [16:2:3:0:487043:801:0] Read over the barrier, blob id# [16:2:3:2:860080:350:0] 2025-04-04T17:29:30.184753Z 4 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:3:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 1 29 2 hard] barrier# 4:0 new key# [16 1 33 0 hard] barrier# 3:1 2025-04-04T17:29:30.185453Z 1 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:0:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 1 29 2 hard] barrier# 4:0 new key# [16 1 33 0 hard] barrier# 3:1 2025-04-04T17:29:30.185639Z 2 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:1:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 1 29 2 hard] barrier# 4:0 new key# [16 1 33 0 hard] barrier# 3:1 2025-04-04T17:29:30.185813Z 3 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:2:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 1 29 2 hard] barrier# 4:0 new key# [16 1 33 0 hard] barrier# 3:1 2025-04-04T17:29:30.185981Z 5 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:4:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 1 29 2 hard] barrier# 4:0 new key# [16 1 33 0 hard] barrier# 3:1 2025-04-04T17:29:30.186142Z 6 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:5:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 1 29 2 hard] barrier# 4:0 new key# [16 1 33 0 hard] barrier# 3:1 2025-04-04T17:29:30.186325Z 7 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:6:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 1 29 2 hard] barrier# 4:0 new key# [16 1 33 0 hard] barrier# 3:1 2025-04-04T17:29:30.186486Z 8 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:7:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 1 29 2 hard] barrier# 4:0 new key# [16 1 33 0 hard] barrier# 3:1 TEvRange returned collected blob with id# [17:1:1:2:5507925:106:0] TEvRange returned collected blob with id# [17:1:1:2:13943012:264:0] Read over the barrier, blob id# [15:1:3:0:10363292:682:0] Read over the barrier, blob id# [15:2:4:1:336992:64:0] Read over the barrier, blob id# [15:2:4:1:336992:64:0] Read over the barrier, blob id# [15:2:4:1:336992:64:0] Read over the barrier, blob id# [15:1:3:2:6627539:904:0] Read over the barrier, blob id# [15:2:4:1:336992:64:0] 2025-04-04T17:29:30.607859Z 4 00h00m25.013072s :BS_HULLRECS ERROR: VDISK[82000000:_:0:3:0]: Db# Barriers ValidateGCCmd: out-of-order requests: existing key# [17 1 30 3 hard] new key# [17 1 30 1 hard] new barrier# 0:2 2025-04-04T17:29:30.608602Z 1 00h00m25.013072s :BS_HULLRECS ERROR: VDISK[82000000:_:0:0:0]: Db# Barriers ValidateGCCmd: out-of-order requests: existing key# [17 1 30 3 hard] new key# [17 1 30 1 hard] new barrier# 0:2 2025-04-04T17:29:30.608770Z 2 00h00m25.013072s :BS_HULLRECS ERROR: VDISK[82000000:_:0:1:0]: Db# Barriers ValidateGCCmd: out-of-order requests: existing key# [17 1 30 3 hard] new key# [17 1 30 1 hard] new barrier# 0:2 2025-04-04T17:29:30.608929Z 3 00h00m25.013072s :BS_HULLRECS ERROR: VDISK[82000000:_:0:2:0]: Db# Barriers ValidateGCCmd: out-of-order requests: existing key# [17 1 30 3 hard] new key# [17 1 30 1 hard] new barrier# 0:2 2025-04-04T17:29:30.609125Z 5 00h00m25.013072s :BS_HULLRECS ERROR: VDISK[82000000:_:0:4:0]: Db# Barriers ValidateGCCmd: out-of-order requests: existing key# [17 1 30 3 hard] new key# [17 1 30 1 hard] new barrier# 0:2 2025-04-04T17:29:30.609275Z 6 00h00m25.013072s :BS_HULLRECS ERROR: VDISK[82000000:_:0:5:0]: Db# Barriers ValidateGCCmd: out-of-order requests: existing key# [17 1 30 3 hard] new key# [17 1 30 1 hard] new barrier# 0:2 2025-04-04T17:29:30.609440Z 7 00h00m25.013072s :BS_HULLRECS ERROR: VDISK[82000000:_:0:6:0]: Db# Barriers ValidateGCCmd: out-of-order requests: existing key# [17 1 30 3 hard] new key# [17 1 30 1 hard] new barrier# 0:2 2025-04-04T17:29:30.609613Z 8 00h00m25.013072s :BS_HULLRECS ERROR: VDISK[82000000:_:0:7:0]: Db# Barriers ValidateGCCmd: out-of-order requests: existing key# [17 1 30 3 hard] new key# [17 1 30 1 hard] new barrier# 0:2 TEvRange returned collected blob with id# [17:1:1:2:5507925:106:0] TEvRange returned collected blob with id# [17:1:1:2:13943012:264:0] Read over the barrier, blob id# [15:1:3:0:10363292:682:0] Read over the barrier, blob id# [15:1:3:0:10363292:682:0] Read over the barrier, blob id# [17:1:1:2:13943012:264:0] TEvRange returned collected blob with id# [16:2:2:2:15018599:277:0] TEvRange returned collected blob with id# [16:2:3:2:860080:350:0] Read over the barrier, blob id# [16:2:3:2:860080:350:0] Read over the barrier, blob id# [15:1:3:0:10363292:682:0] TEvRange returned collected blob with id# [17:1:1:2:5507925:106:0] TEvRange returned collected blob with id# [17:1:1:2:13943012:264:0] 2025-04-04T17:29:31.102105Z 7 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:6:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 30 3 hard] barrier# 1:1 new key# [17 1 34 0 hard] barrier# 1:0 2025-04-04T17:29:31.102514Z 1 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:0:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 30 3 hard] barrier# 1:1 new key# [17 1 34 0 hard] barrier# 1:0 2025-04-04T17:29:31.102714Z 2 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:1:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 30 3 hard] barrier# 1:1 new key# [17 1 34 0 hard] barrier# 1:0 2025-04-04T17:29:31.102913Z 3 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:2:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 30 3 hard] barrier# 1:1 new key# [17 1 34 0 hard] barrier# 1:0 2025-04-04T17:29:31.103089Z 4 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:3:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 30 3 hard] barrier# 1:1 new key# [17 1 34 0 hard] barrier# 1:0 2025-04-04T17:29:31.103261Z 5 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:4:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 30 3 hard] barrier# 1:1 new key# [17 1 34 0 hard] barrier# 1:0 2025-04-04T17:29:31.103431Z 6 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:5:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 30 3 hard] barrier# 1:1 new key# [17 1 34 0 hard] barrier# 1:0 2025-04-04T17:29:31.103627Z 8 00h00m25.013072s :BS_HULLRECS CRIT: VDISK[82000000:_:0:7:0]: Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [17 1 30 3 hard] barrier# 1:1 new key# [17 1 34 0 hard] barrier# 1:0 Read over the barrier, blob id# [16:2:2:2:15018599:277:0] |97.8%| [TS] {RESULT} ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/unittest |97.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> DataCleanup::MultipleDataCleanupsWithOldGenerations [GOOD] >> DataCleanup::ForceDataCleanupWithRestart >> QuoterWithKesusTest::GetsBigQuota [GOOD] >> QuoterWithKesusTest::GetsBigQuotaWithDeadline >> TGenerateQueueIdTests::MakeQueueIdBasic [GOOD] >> TParseParamsTests::CreateUser [GOOD] >> TParseParamsTests::ChangeMessageVisibilityBatchRequest [GOOD] >> TParseParamsTests::DeleteMessageBatchRequest [GOOD] >> TParseParamsTests::MessageBody [GOOD] >> TParseParamsTests::SendMessageBatchRequest [GOOD] >> TParseParamsTests::DeleteQueueBatchRequest [GOOD] >> TParseParamsTests::PurgeQueueBatchRequest [GOOD] >> TParseParamsTests::GetQueueAttributesBatchRequest [GOOD] >> TParseParamsTests::UnnumberedAttribute [GOOD] >> TParseParamsTests::UnnumberedAttributeName [GOOD] >> TParseParamsTests::FailsOnInvalidDeduplicationId [GOOD] >> TParseParamsTests::FailsOnInvalidGroupId [GOOD] >> TParseParamsTests::FailsOnInvalidReceiveRequestAttemptId [GOOD] >> TParseParamsTests::FailsOnInvalidMaxNumberOfMessages [GOOD] >> TParseParamsTests::FailsOnInvalidWaitTime [GOOD] >> TParseParamsTests::FailsOnInvalidDelaySeconds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::MidOfRange [GOOD] Test command err: 2025-04-04T17:28:41.813482Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511236459777637:2262];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:41.813722Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000fc0/r3tmp/tmpNSJNiM/pdisk_1.dat 2025-04-04T17:28:41.965470Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:28:42.111781Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:42.112998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:42.113089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:42.118697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11943, node 1 2025-04-04T17:28:42.201393Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000fc0/r3tmp/yandexTTCP6K.tmp 2025-04-04T17:28:42.201427Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000fc0/r3tmp/yandexTTCP6K.tmp 2025-04-04T17:28:42.201557Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000fc0/r3tmp/yandexTTCP6K.tmp 2025-04-04T17:28:42.201690Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:42.252426Z INFO: TTestServer started on Port 29545 GrpcPort 11943 TClient is connected to server localhost:29545 PQClient connected to localhost:11943 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:42.529629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:28:42.558511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:28:44.740487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511249344680121:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:44.740487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511249344680144:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:44.740597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:44.745049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:28:44.759907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489511249344680150:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:28:44.939640Z node 1 :TX_PROXY ERROR: Actor# [1:7489511249344680216:2449] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:44.965479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:28:45.001985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:28:45.006463Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489511249344680231:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:28:45.008436Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzIzOTY4NTAtYmQ5OWRhMzctNWE2MDkxNjAtY2YzOWYyODI=, ActorId: [1:7489511249344680118:2336], ActorState: ExecuteState, TraceId: 01jr0xj3xhaceeep8y2rena2x8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:28:45.012280Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:28:45.076851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489511253639647802:2628] 2025-04-04T17:28:46.812544Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489511236459777637:2262];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:46.812638Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:28:51.195747Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-04T17:28:51.218025Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-04T17:28:51.219368Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7489511279409451865:2783], Recipient [1:7489511240754745159:2196]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:28:51.219403Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:28:51.219422Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:28:51.219471Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7489511279409451861:2780], Recipient [1:7489511240754745159:2196]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-04T17:28:51.219492Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T17:28:51.293424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "autoscalit-topic" TotalGroupCount: 5 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 5 MaxPartitionCount: 10 ScaleThresholdSeconds: 500 ScaleUpPartitionWriteSpeedThresholdPercent: 80 ScaleDownPartitionWriteSpeedThresholdPercent: 20 PartitionStrategyType: CAN_SPLIT } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:28:51.293832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/autoscalit-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:28:51.294150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: autoscalit-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-04T17:28:51.294191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-04T17:28:51.294228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-04T17:28:51.294487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-04-04T17:28:51.294512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-04-04T17:28:51.294535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 4 2025-04-04T17:28:51.294560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Incremen ... 6284Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715673 State EXECUTED FrontTxId 281474976715673 2025-04-04T17:29:31.106304Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TPersQueue::SendEvReadSetAckToSenders 2025-04-04T17:29:31.106323Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715673, NewState WAIT_RS_ACKS 2025-04-04T17:29:31.106339Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715673 moved from EXECUTED to WAIT_RS_ACKS 2025-04-04T17:29:31.106363Z node 5 :PERSQUEUE DEBUG: [TxId: 281474976715673] PredicateAcks: 0/0 2025-04-04T17:29:31.106372Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-04-04T17:29:31.106388Z node 5 :PERSQUEUE DEBUG: [TxId: 281474976715673] PredicateAcks: 0/0 2025-04-04T17:29:31.106406Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] add an TxId 281474976715673 to the list for deletion 2025-04-04T17:29:31.106431Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715673, NewState DELETING 2025-04-04T17:29:31.106456Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete key for TxId 281474976715673 2025-04-04T17:29:31.106456Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [5:7489511405010581629:2235], Recipient [5:7489511405010581459:2140]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 15] Version: 2 } 2025-04-04T17:29:31.106474Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-04-04T17:29:31.106513Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T17:29:31.106519Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 15 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715673 2025-04-04T17:29:31.106573Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [5:7489511447960255638:2473], Recipient [5:7489511447960255638:2473]: NKikimr::TEvKeyValue::TEvCollect 2025-04-04T17:29:31.106575Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 15 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715673 2025-04-04T17:29:31.106584Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715673 2025-04-04T17:29:31.106598Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715673, pathId: [OwnerId: 72057594046644480, LocalPathId: 15], version: 2 2025-04-04T17:29:31.106611Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 15] was 4 2025-04-04T17:29:31.106659Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715673, subscribers: 1 2025-04-04T17:29:31.106677Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [5:7489511447960255606:2470] 2025-04-04T17:29:31.106698Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:29:31.106736Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794752, Sender [5:7489511447960255638:2473], Recipient [5:7489511447960255638:2473]: NKikimrClient.TKeyValueRequest Cookie: 5 CmdDeleteRange { Range { From: "tx_00000281474976715673" IncludeFrom: true To: "tx_00000281474976715673" IncludeTo: true } } CmdWrite { Key: "_txinfo" Value: "\020\373\211\347\216\3402\030\231\247\200\200\200\200@(\240\215\0060\373\211\347\216\34028\231\247\200\200\200\200@" StorageChannel: INLINE } 2025-04-04T17:29:31.106804Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715673 2025-04-04T17:29:31.106822Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:29:31.106862Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [5:7489511447960255757:2482], Recipient [5:7489511447960255638:2473]: NKikimr::TEvKeyValue::TEvCompleteGC 2025-04-04T17:29:31.106908Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715673 2025-04-04T17:29:31.106926Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:29:31.106985Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [5:7489511447960255606:2470] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715673 at schemeshard: 72057594046644480 2025-04-04T17:29:31.107111Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794753, Sender [5:7489511447960255758:2473], Recipient [5:7489511447960255638:2473]: NKikimr::TEvKeyValue::TEvIntermediate 2025-04-04T17:29:31.107183Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [5:7489511447960255613:2821], Recipient [5:7489511405010581459:2140]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:29:31.107208Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:29:31.107219Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-04T17:29:31.107555Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [5:7489511447960255638:2473], Recipient [5:7489511447960255638:2473]: NKikimr::TEvKeyValue::TEvCollect 2025-04-04T17:29:31.107682Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270795264, Sender [5:7489511447960255638:2473], Recipient [5:7489511447960255638:2473]: NKikimrClient.TResponse Status: 1 Cookie: 5 DeleteRangeResult { Status: 0 } WriteResult { Status: 0 StatusFlags: 1 } 2025-04-04T17:29:31.107705Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvKeyValue::TEvResponse 2025-04-04T17:29:31.107721Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T17:29:31.107738Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Try execute txs with state DELETING 2025-04-04T17:29:31.107754Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715673, State DELETING 2025-04-04T17:29:31.107775Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete TxId 281474976715673 2025-04-04T17:29:31.108002Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [5:7489511447960255762:2483], Recipient [5:7489511447960255638:2473]: NKikimr::TEvKeyValue::TEvCompleteGC 2025-04-04T17:29:31.115637Z node 5 :PQ_READ_PROXY DEBUG: new alter topic request 2025-04-04T17:29:31.117359Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [5:7489511447960255771:2911], Recipient [5:7489511405010581459:2140]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:31.117401Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:31.117428Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:29:31.117473Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [5:7489511447960255768:2909], Recipient [5:7489511405010581459:2140]: {TEvModifySchemeTransaction txid# 281474976715674 TabletId# 72057594046644480} 2025-04-04T17:29:31.117493Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T17:29:31.119938Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "Root/origin/feed" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "streamImpl" PathId: 15 TotalGroupCount: 3 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "feed" TopicPath: "/Root/origin/feed/streamImpl" YdbDatabasePath: "/Root" MeteringMode: METERING_MODE_REQUEST_UNITS PartitionStrategy { MinPartitionCount: 3 MaxPartitionCount: 107 ScaleThresholdSeconds: 30 PartitionStrategyType: DISABLED } } Partitions { PartitionId: 0 TabletId: 72075186224037893 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037894 NextPartitionId: 1 } ApplyIf { PathId: 15 PathVersion: 2 } AllowAccessToPrivatePaths: true } TxId: 281474976715674 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:29:31.120248Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: Root/origin/feed/streamImpl, pathId: [OwnerId: 72057594046644480, LocalPathId: 15], opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-04-04T17:29:31.120404Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715674:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046644480 2025-04-04T17:29:31.120591Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T17:29:31.121058Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715674, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 281474976715674 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:29:31.121204Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715674, database: /Root, subject: root@builtin, status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: Root/origin/feed/streamImpl 2025-04-04T17:29:31.121235Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T17:29:31.121352Z node 5 :TX_PROXY ERROR: Actor# [5:7489511447960255768:2909] txid# 281474976715674, issues: { message: "Can`t disable auto partitioning." severity: 1 } 2025-04-04T17:29:31.121572Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [5:7489511447960255771:2911], Recipient [5:7489511405010581459:2140]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:29:31.121605Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:29:31.121617Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-04-04T17:29:31.195059Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511447960255638:2473], Partition 0, Sender [0:0:0], Recipient [5:7489511447960255712:2478], Cookie: 0 2025-04-04T17:29:31.195145Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511447960255712:2478]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:31.195174Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:31.195226Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:29:31.195307Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:29:31.195333Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:29:31.195363Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi >> BasicExample::BasicExample [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] [GOOD] >> test.py::test[solomon-HistResponse-default.txt] |97.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/ut/unittest >> TParseParamsTests::FailsOnInvalidDelaySeconds [GOOD] |97.8%| [TS] {RESULT} ydb/core/ymq/ut/unittest >> ConfigGRPCService::ReplaceConfig [GOOD] >> ConfigGRPCService::FetchConfig >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected [GOOD] >> TabletService_ExecuteMiniKQL::OnlyAdminsAllowed [GOOD] >> TabletService_Restart::Basics >> TopicSessionTests::TwoSessionsWithOffsets >> Metering::MockedNetClassifierOnly [GOOD] >> Metering::MockedNetClassifierLabelTransformation >> TTxDataShardTestInit::TestTableHasPath [GOOD] >> TTxDataShardTestInit::TestResolvePathAfterRestart >> TPQTest::TestChangeConfig [GOOD] >> TRangeTreap::Simple [GOOD] >> TRangeTreap::Sequential >> TIndexProcesorTests::TestDeletedQueueNotReindexed [GOOD] >> TIndexProcesorTests::TestManyMessages ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestChangeConfig [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:107:2057] recipient: [1:100:2134] 2025-04-04T17:28:35.737127Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:35.737243Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] Leader for TabletID 72057594037927938 is [1:152:2173] sender: [1:153:2057] recipient: [1:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927938 is [1:152:2173] sender: [1:206:2057] recipient: [1:14:2061] 2025-04-04T17:28:35.764575Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][] pipe [1:205:2211] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_READ_BALANCER_ACTOR Captured TEvents::TSystem::Wakeup to ACTORLIB_LONG_TIMER Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927938 is [1:152:2173] sender: [1:216:2057] recipient: [1:144:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927938 is [1:152:2173] sender: [1:219:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927938 is [1:152:2173] sender: [1:220:2057] recipient: [1:218:2219] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927938 is [1:221:2220] sender: [1:222:2057] recipient: [1:218:2219] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_READ_BALANCER_ACTOR Captured TEvents::TSystem::Wakeup to ACTORLIB_LONG_TIMER Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927938 is [1:221:2220] sender: [1:261:2057] recipient: [1:14:2061] 2025-04-04T17:28:35.789175Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [1:260:2252] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:263:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:264:2057] recipient: [1:98:2133] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:267:2057] recipient: [1:266:2254] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:268:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:269:2255] sender: [1:270:2057] recipient: [1:266:2254] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_BATCH Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:35.818060Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:35.818153Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:28:35.818656Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [1:322:2300] connected; active server actors: 1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:102:2057] recipient: [2:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:102:2057] recipient: [2:100:2134] Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:107:2057] recipient: [2:100:2134] 2025-04-04T17:28:36.403883Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:36.403968Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:148:2057] recipient: [2:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:148:2057] recipient: [2:146:2169] Leader for TabletID 72057594037927938 is [2:152:2173] sender: [2:153:2057] recipient: [2:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927938 is [2:152:2173] sender: [2:206:2057] recipient: [2:14:2061] 2025-04-04T17:28:36.438848Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][] pipe [2:205:2211] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_READ_BALANCER_ACTOR Captured TEvents::TSystem::Wakeup to ACTORLIB_LONG_TIMER Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927938 is [2:152:2173] sender: [2:216:2057] recipient: [2:144:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927938 is [2:152:2173] sender: [2:219:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927938 is [2:152:2173] sender: [2:220:2057] recipient: [2:218:2219] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927938 is [2:221:2220] sender: [2:222:2057] recipient: [2:218:2219] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_READ_BALANCER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NSchemeShard::TFindSubDomainPathIdActor Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927938 is [2:221:2220] sender: [2:260:2057] recipient: [2:14:2061] 2025-04-04T17:28:36.464355Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [2:259:2251] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:262:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:263:2057] recipient: [2:98:2133] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:265:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:106:2138] sender: [2:267:2057] recipient: [2:266:2253] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:268:2254] sender: [2:269:2057] recipient: [2:266:2253] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to CLIENT_SCHEME_CACHE_LOOKUP_BATCH Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:28:36.496024Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:36.496090Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-04-04T17:28:36.496417Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [2:321:2299] connected; active server actors: 1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:102:2057] recipient: [3:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:102:2057] recipient: [3:100:2134] Leader for TabletID 72057594037927937 is [3:106:2138] sender: [3:107:2057] recipient: [3:100:2134] 2025-04-04T17:28:37.023657Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:28:37.023751Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:148:2057] recipient: [3:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:148:2057] recipient: [3:146:2169] Leader for TabletID 72057594037927938 is [3:152:2173] sender: [3:153:2057] recipient: [3:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927938 is [3:152:2173] sender: [3:206:2057] recipient: [3:14:2061] 2025-04-04T17:28:37.053091Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][] pipe [3:205:2211] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_READ_BALANCER_ACTOR Captured TEvents::TSystem::Wakeup to ACTORLIB_LONG_TIMER Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927938 is [3:152:2173] sender: [3:216:2057] recipient: [3:144:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927938 is [3:152:2173] sender: [3:219:2057] recipient: [3:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927938 is [3:152:2173] sender: [3:220:2057] recipient: [3:218:2219] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927938 is [3:221:2220] sender: [3:222:2057] recipient: [3:218:2219] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_READ_BALANCER_ACTOR Captured TEvents::TSystem::Wakeup to ACTORLIB_LONG_TIMER Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927938 is [3:221:2220] sender: [3:261:2057] recipient: [3:14:2061] 2025-04-04T17:28:37.078884Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [3:260:2252] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [3:106:2138] sender: [3:263:2057] recipient: [3:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [3:106:2138] sender: [3:264:2057] recipient: [3:98:2133] Captured TEvents::TSystem::Wa ... nId: 8 } Partitions { PartitionId: 9 } ReadRuleGenerations: 34 ReadRuleGenerations: 35 ReadRuleGenerations: 35 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } AllPartitions { PartitionId: 5 } AllPartitions { PartitionId: 6 } AllPartitions { PartitionId: 7 } AllPartitions { PartitionId: 8 } AllPartitions { PartitionId: 9 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 34 Important: false } Consumers { Name: "bbb" Generation: 35 Important: true } Consumers { Name: "ccc" Generation: 35 Important: true } 2025-04-04T17:29:33.531639Z node 39 :PERSQUEUE INFO: new Cookie default|85264eaf-958dc7ce-bef8449d-1416ecd7_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:33.543176Z node 39 :PERSQUEUE INFO: new Cookie default|8779d19c-dd6f502e-37e42c6e-b2287a93_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:33.551263Z node 39 :PERSQUEUE INFO: new Cookie default|ad34329-71f28ffc-19c2f695-147983cc_0 generated for partition 9 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:102:2057] recipient: [40:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:102:2057] recipient: [40:100:2134] Leader for TabletID 72057594037927937 is [40:106:2138] sender: [40:107:2057] recipient: [40:100:2134] 2025-04-04T17:29:34.035642Z node 40 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:29:34.035714Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [40:148:2057] recipient: [40:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [40:148:2057] recipient: [40:146:2169] Leader for TabletID 72057594037927938 is [40:152:2173] sender: [40:153:2057] recipient: [40:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [40:106:2138] sender: [40:178:2057] recipient: [40:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:34.065939Z node 40 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:29:34.068006Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 36 actor [40:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 36 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } ReadRuleGenerations: 36 ReadRuleGenerations: 36 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 36 Important: false } Consumers { Name: "aaa" Generation: 36 Important: true } 2025-04-04T17:29:34.069016Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [40:184:2197] 2025-04-04T17:29:34.072367Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [40:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:29:34.075779Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [40:185:2198] 2025-04-04T17:29:34.077898Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [40:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:29:34.080596Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [40:186:2199] 2025-04-04T17:29:34.082848Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [40:186:2199] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:29:34.085564Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [40:187:2200] 2025-04-04T17:29:34.087842Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [40:187:2200] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:29:34.090763Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] bootstrapping 4 [40:188:2201] 2025-04-04T17:29:34.092973Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [40:188:2201] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:34.105367Z node 40 :PERSQUEUE INFO: new Cookie default|3dcecd7a-dd55cb89-5d2025bf-6dabe5_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:34.112960Z node 40 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:29:34.117246Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 5, State: StateInit] bootstrapping 5 [40:232:2232] 2025-04-04T17:29:34.119443Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 5, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 5 generation 2 [40:232:2232] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:29:34.123299Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 6, State: StateInit] bootstrapping 6 [40:233:2233] 2025-04-04T17:29:34.125409Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 6, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 6 generation 2 [40:233:2233] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:29:34.128981Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 7, State: StateInit] bootstrapping 7 [40:234:2234] 2025-04-04T17:29:34.131073Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 7 generation 2 [40:234:2234] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:29:34.134577Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 8, State: StateInit] bootstrapping 8 [40:235:2235] 2025-04-04T17:29:34.136655Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 8 generation 2 [40:235:2235] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2025-04-04T17:29:34.140116Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 9, State: StateInit] bootstrapping 9 [40:236:2236] 2025-04-04T17:29:34.142160Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 9 generation 2 [40:236:2236] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:34.177634Z node 40 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 37 actor [40:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 5 MaxSizeInPartition: 1048576 LifetimeSeconds: 86400 ImportantClientId: "bbb" ImportantClientId: "ccc" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 PartitionIds: 5 PartitionIds: 6 PartitionIds: 7 PartitionIds: 8 PartitionIds: 9 TopicName: "rt3.dc1--asdfgs--topic" Version: 37 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } Partitions { PartitionId: 5 } Partitions { PartitionId: 6 } Partitions { PartitionId: 7 } Partitions { PartitionId: 8 } Partitions { PartitionId: 9 } ReadRuleGenerations: 36 ReadRuleGenerations: 37 ReadRuleGenerations: 37 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } AllPartitions { PartitionId: 5 } AllPartitions { PartitionId: 6 } AllPartitions { PartitionId: 7 } AllPartitions { PartitionId: 8 } AllPartitions { PartitionId: 9 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 36 Important: false } Consumers { Name: "bbb" Generation: 37 Important: true } Consumers { Name: "ccc" Generation: 37 Important: true } 2025-04-04T17:29:34.179828Z node 40 :PERSQUEUE INFO: new Cookie default|f4e0e42e-a3e2bf86-52cdd0ec-34876c5b_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:34.193113Z node 40 :PERSQUEUE INFO: new Cookie default|3fcac0aa-98bbc39d-d95ea50d-4d83addc_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:29:34.202063Z node 40 :PERSQUEUE INFO: new Cookie default|e384b30f-6eb8cad2-6f6394b3-620f9b5_0 generated for partition 9 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config >> TMemoryController::Counters_NoHardLimit [GOOD] >> TMemoryController::Config_ConsumerLimits >> TestPurecalcFilter::Simple1 [GOOD] >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] >> TCreateAndDropViewTest::ListCreatedView [GOOD] >> TCreateAndDropViewTest::CreateSameViewTwice >> TestPurecalcFilter::Simple2 |97.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/library/ut/py3test >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] |97.8%| [TS] {RESULT} ydb/tests/library/ut/py3test >> KeyValueGRPCService::SimpleAcquireLock |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/basic_example/gtest >> BasicExample::BasicExample [GOOD] |97.8%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/basic_example/gtest >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::SequencesIndex >> ConfigGRPCService::FetchConfig [GOOD] >> TIndexProcesorTests::TestManyMessages [GOOD] >> TIndexProcesorTests::TestOver1000Queues >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchSolomon [GOOD] >> TDqSolomonWriteActorTest::TestWriteWithTimeseries >> TPQTest::TestManyConsumers [GOOD] >> Graph::CreateGraphShard >> KqpTpch::Query01 [GOOD] >> KqpTpch::Query02 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/config/ut/unittest >> ConfigGRPCService::FetchConfig [GOOD] Test command err: 2025-04-04T17:29:29.731028Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511442327957101:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:29.731177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0007f7/r3tmp/tmpByqQv7/pdisk_1.dat 2025-04-04T17:29:30.155314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:30.155934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:30.178082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:30.216577Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18090, node 1 2025-04-04T17:29:30.246529Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-04-04T17:29:30.246832Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-04-04T17:29:30.246910Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-04-04T17:29:30.246975Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-04-04T17:29:30.248122Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-04-04T17:29:30.248157Z node 1 :GRPC_SERVER INFO: Updated app config 2025-04-04T17:29:30.248276Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-04-04T17:29:30.248301Z node 1 :GRPC_SERVER INFO: Updated app config 2025-04-04T17:29:30.249275Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-04-04T17:29:30.249309Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-04-04T17:29:30.250850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:29:30.252307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:29:30.253209Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:29:30.253228Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:29:30.255093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-04T17:29:30.256881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-04T17:29:30.267796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:29:30.281728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:29:30.281832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:29:30.281886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:29:30.281918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:29:30.282046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:29:30.282139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-04T17:29:30.282920Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:29:30.282970Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:29:30.282980Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:29:30.282989Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:29:30.308918Z node 1 :GRPC_SERVER DEBUG: [0x51a000028e80] created request Name# BlobStorageConfig 2025-04-04T17:29:30.309990Z node 1 :GRPC_SERVER DEBUG: [0x51a000029480] created request Name# HiveCreateTablet 2025-04-04T17:29:30.310304Z node 1 :GRPC_SERVER DEBUG: [0x51a000029a80] created request Name# LocalEnumerateTablets 2025-04-04T17:29:30.311378Z node 1 :GRPC_SERVER DEBUG: [0x51b000019d80] created request Name# KeyValue 2025-04-04T17:29:30.311837Z node 1 :GRPC_SERVER DEBUG: [0x51a00002a080] created request Name# TabletStateRequest 2025-04-04T17:29:30.312829Z node 1 :GRPC_SERVER DEBUG: [0x51a00002a680] created request Name# LocalMKQL 2025-04-04T17:29:30.313197Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ac80] created request Name# LocalSchemeTx 2025-04-04T17:29:30.313491Z node 1 :GRPC_SERVER DEBUG: [0x51a00002b280] created request Name# TabletKillRequest 2025-04-04T17:29:30.313744Z node 1 :GRPC_SERVER DEBUG: [0x51a00002b880] created request Name# SchemeOperationStatus 2025-04-04T17:29:30.314838Z node 1 :GRPC_SERVER DEBUG: [0x51a00002be80] created request Name# ChooseProxy 2025-04-04T17:29:30.315182Z node 1 :GRPC_SERVER DEBUG: [0x51a00002c480] created request Name# ResolveNode 2025-04-04T17:29:30.318231Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ca80] created request Name# FillNode 2025-04-04T17:29:30.318578Z node 1 :GRPC_SERVER DEBUG: [0x51a00002d080] created request Name# DrainNode 2025-04-04T17:29:30.318944Z node 1 :GRPC_SERVER DEBUG: [0x51a00002d680] created request Name# InterconnectDebug 2025-04-04T17:29:30.319296Z node 1 :GRPC_SERVER DEBUG: [0x51a00002dc80] created request Name# TestShardControl 2025-04-04T17:29:30.319605Z node 1 :GRPC_SERVER DEBUG: [0x51a00002e280] created request Name# RegisterNode 2025-04-04T17:29:30.319975Z node 1 :GRPC_SERVER DEBUG: [0x51a00002e880] created request Name# CmsRequest 2025-04-04T17:29:30.320345Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ee80] created request Name# ConsoleRequest 2025-04-04T17:29:30.320667Z node 1 :GRPC_SERVER DEBUG: [0x51a0000c0080] created request Name# SchemeInitRoot 2025-04-04T17:29:30.320913Z node 1 :GRPC_SERVER DEBUG: [0x51a0000c0680] created request Name# PersQueueRequest 2025-04-04T17:29:30.321238Z node 1 :GRPC_SERVER DEBUG: [0x51a0000c0c80] created request Name# SchemeOperation 2025-04-04T17:29:30.321499Z node 1 :GRPC_SERVER DEBUG: [0x51a0000c1280] created request Name# SchemeDescribe 2025-04-04T17:29:30.391073Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:30.391090Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:30.391095Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:30.391176Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:29:30.925878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "hdd2" Kind: "hdd2" } StoragePools { Name: "hdd" Kind: "hdd" } StoragePools { Name: "hdd1" Kind: "hdd1" } StoragePools { Name: "ssd" Kind: "ssd" } StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:29:30.926907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:30.929107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T17:29:30.930110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:29:30.930164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:30.932338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T17:29:30.933464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T17:29:30.933710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:29:30.934719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T17:29:30.934782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-04T17:29:30.934795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-04T17:29:30.936565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:29:30.936599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-04T17:29:30.936622Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:29:30.938354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:30.938406Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:29:30.938420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-04T17:29:30.940396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:30.940446Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:30.940487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:29:30.940523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:29:30.946028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 Tx ... create, do next state 2025-04-04T17:29:33.999401Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-04-04T17:29:34.001417Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:34.001455Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:29:34.001474Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-04-04T17:29:34.001787Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:29:34.001823Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-04-04T17:29:34.001841Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:29:34.003449Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:34.003506Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:34.003524Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-04T17:29:34.003548Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-04-04T17:29:34.003675Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:29:34.005146Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-04-04T17:29:34.005265Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2025-04-04T17:29:34.007836Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743787774050, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T17:29:34.007941Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743787774050 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T17:29:34.007978Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-04T17:29:34.008280Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-04-04T17:29:34.008316Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-04-04T17:29:34.008418Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:29:34.008451Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-04T17:29:34.010360Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:29:34.010401Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:29:34.010561Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:29:34.010584Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7489511457446401833:2371], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2025-04-04T17:29:34.010622Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:34.010643Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-04-04T17:29:34.010709Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-04T17:29:34.010718Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-04T17:29:34.010736Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-04-04T17:29:34.010743Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-04T17:29:34.010756Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 1/1, is published: false 2025-04-04T17:29:34.010771Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-04-04T17:29:34.010781Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-04-04T17:29:34.010790Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2025-04-04T17:29:34.010833Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-04-04T17:29:34.010844Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-04-04T17:29:34.010853Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-04-04T17:29:34.011325Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-04T17:29:34.011381Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-04-04T17:29:34.011390Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-04-04T17:29:34.011402Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-04-04T17:29:34.011414Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-04-04T17:29:34.011499Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-04-04T17:29:34.011510Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:7489511457446402122:2316] 2025-04-04T17:29:34.012320Z node 3 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:29:34.012375Z node 3 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:29:34.012383Z node 3 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:29:34.012402Z node 3 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:29:34.013745Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715657 2025-04-04T17:29:34.102493Z node 3 :GRPC_SERVER DEBUG: Got grpc request# FetchConfigRequest, traceId# 01jr0xkm4k9p5pndeyxqzktfha, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33864, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:29:34.106606Z node 3 :GRPC_SERVER DEBUG: [0x51a00009a280] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-04T17:29:34.106608Z node 3 :GRPC_SERVER DEBUG: [0x51a00001f280] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-04T17:29:34.106835Z node 3 :GRPC_SERVER DEBUG: [0x51a000030c80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-04T17:29:34.106961Z node 3 :GRPC_SERVER DEBUG: [0x51a000092480] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-04T17:29:34.107002Z node 3 :GRPC_SERVER DEBUG: [0x51a0000d5080] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-04T17:29:34.107154Z node 3 :GRPC_SERVER DEBUG: [0x51a000030080] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-04T17:29:34.107170Z node 3 :GRPC_SERVER DEBUG: [0x51a00008b880] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-04T17:29:34.107297Z node 3 :GRPC_SERVER DEBUG: [0x51a00008be80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-04T17:29:34.107313Z node 3 :GRPC_SERVER DEBUG: [0x51a00009de80] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-04T17:29:34.107480Z node 3 :GRPC_SERVER DEBUG: [0x51a0000d3e80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-04T17:29:34.107486Z node 3 :GRPC_SERVER DEBUG: [0x51a0000d4480] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-04T17:29:34.107636Z node 3 :GRPC_SERVER DEBUG: [0x51b000043d80] received request Name# KeyValue ok# false data# peer# current inflight# 0 2025-04-04T17:29:34.107639Z node 3 :GRPC_SERVER DEBUG: [0x51a0000d4a80] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2025-04-04T17:29:34.107743Z node 3 :GRPC_SERVER DEBUG: [0x51a00009f080] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-04T17:29:34.107812Z node 3 :GRPC_SERVER DEBUG: [0x51a00009e480] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-04T17:29:34.107868Z node 3 :GRPC_SERVER DEBUG: [0x51a00009d880] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-04T17:29:34.107951Z node 3 :GRPC_SERVER DEBUG: [0x51a00008a080] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-04T17:29:34.107993Z node 3 :GRPC_SERVER DEBUG: [0x51a00009c680] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2025-04-04T17:29:34.108102Z node 3 :GRPC_SERVER DEBUG: [0x51a00009ba80] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2025-04-04T17:29:34.108122Z node 3 :GRPC_SERVER DEBUG: [0x51a00009ae80] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2025-04-04T17:29:34.108206Z node 3 :GRPC_SERVER DEBUG: [0x51a00009ea80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-04T17:29:34.108270Z node 3 :GRPC_SERVER DEBUG: [0x51a00009d280] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 |97.8%| [TM] {RESULT} ydb/services/config/ut/unittest >> TabletService_Restart::Basics [GOOD] >> TabletService_Restart::OnlyAdminsAllowed >> TTxDataShardTestInit::TestResolvePathAfterRestart [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts >> test.py::test[solomon-HistResponse-default.txt] [GOOD] >> test.py::test[solomon-InvalidProject-] >> QuoterWithKesusTest::GetsBigQuotaWithDeadline [GOOD] >> QuoterWithKesusTest::FailsToGetBigQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestManyConsumers [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:107:2057] recipient: [1:100:2134] 2025-04-04T17:27:03.426644Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T17:27:03.430835Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T17:27:03.431129Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-04-04T17:27:03.431170Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T17:27:03.431211Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-04-04T17:27:03.431281Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-04-04T17:27:03.431331Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:03.431400Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:148:2057] recipient: [1:146:2169] Leader for TabletID 72057594037927938 is [1:152:2173] sender: [1:153:2057] recipient: [1:146:2169] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:106:2138] sender: [1:179:2057] recipient: [1:14:2061] 2025-04-04T17:27:03.449326Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:178:2193], now have 1 active actors on pipe 2025-04-04T17:27:03.449490Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-04-04T17:27:03.465939Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:03.469082Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-04-04T17:27:03.469242Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T17:27:03.469912Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:176:2191] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-04-04T17:27:03.469985Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-04-04T17:27:03.470363Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-04-04T17:27:03.470619Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-04-04T17:27:03.472791Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-04-04T17:27:03.472855Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-04-04T17:27:03.472932Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-04-04T17:27:03.474590Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-04-04T17:27:03.474676Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-04-04T17:27:03.474759Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-04-04T17:27:03.474824Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2025-04-04T17:27:03.474854Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2025-04-04T17:27:03.475115Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04T17:27:03.475159Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2025-04-04T17:27:03.475388Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2025-04-04T17:27:03.477970Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T17:27:03.478336Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:192:2203], now have 1 active actors on pipe 2025-04-04T17:27:03.480878Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:195:2205], now have 1 active actors on pipe 2025-04-04T17:27:03.480967Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-04-04T17:27:03.481003Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-04-04T17:27:03.481596Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 0 messageNo: 0 size: 511957 2025-04-04T17:27:03.481991Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 1 messageNo: 0 size: 511957 2025-04-04T17:27:03.482415Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 2 messageNo: 0 size: 511957 2025-04-04T17:27:03.482804Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 3 messageNo: 0 size: 511957 2025-04-04T17:27:03.482908Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 4 messageNo: 0 size: 49324 2025-04-04T17:27:03.482937Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 4 messageNo: 0 size 49324 offset: 0 2025-04-04T17:27:03.483024Z node 1 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2025-04-04T17:27:03.483131Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2025-04-04T17:27:03.483166Z node 1 :PERSQUEUE DEBUG: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-04-04T17:27:03.483445Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:197:2207], now have 1 active actors on pipe 2025-04-04T17:27:03.483535Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-04-04T17:27:03.483578Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-04-04T17:27:03.483659Z node 1 :PERSQUEUE INFO: new Cookie default|13e0ae72-4c12fa66-53e99400-71625df1_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-04-04T17:27:03.483744Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-04T17:27:03.483806Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T17:27:03.484025Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:199:2209], now have 1 active actors on pipe 2025-04-04T17:27:03.484053Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-04-04T17:27:03.484073Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-04-04T17:27:03.484502Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 0 messageNo: 0 size: 511957 2025-04-04T17:27:03.484884Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 1 messageNo: 0 size: 511957 2025-04-04T17:27:03.485271Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 2 messageNo: 0 size: 511957 2025-04-04T1 ... 0000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:29:35.935292Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:999:2995], now have 1 active actors on pipe Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:29:35.937146Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-04T17:29:35.953884Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-04T17:29:35.991631Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1002:2998], now have 1 active actors on pipe 2025-04-04T17:29:35.993551Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-04T17:29:36.011170Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:29:36.050441Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1005:3001], now have 1 active actors on pipe 2025-04-04T17:29:36.052233Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-04T17:29:36.069508Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:29:36.140320Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1008:3004], now have 1 active actors on pipe 2025-04-04T17:29:36.142106Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-04T17:29:36.160841Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:29:36.200381Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1011:3007], now have 1 active actors on pipe 2025-04-04T17:29:36.202202Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-04T17:29:36.220019Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:29:36.258713Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1014:3010], now have 1 active actors on pipe 2025-04-04T17:29:36.260593Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-04T17:29:36.277939Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:29:36.317850Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1017:3013], now have 1 active actors on pipe 2025-04-04T17:29:36.319695Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-04T17:29:36.336896Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:29:36.401055Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1020:3016], now have 1 active actors on pipe 2025-04-04T17:29:36.402992Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-04T17:29:36.421728Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:29:36.462156Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1023:3019], now have 1 active actors on pipe 2025-04-04T17:29:36.463958Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-04T17:29:36.481362Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:29:36.521265Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1026:3022], now have 1 active actors on pipe 2025-04-04T17:29:36.523052Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-04T17:29:36.539799Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:29:36.578099Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1029:3025], now have 1 active actors on pipe 2025-04-04T17:29:36.579907Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-04T17:29:36.597682Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:29:36.638869Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [87:1032:3028], now have 1 active actors on pipe 2025-04-04T17:29:36.640751Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-04-04T17:29:36.659887Z node 87 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR 2025-04-04T17:29:36.726257Z node 87 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [87:1035:3031] connected; active server actors: 1 Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_READ_BALANCER_ACTOR Captured TEvents::TSystem::Wakeup to ACTORLIB_LONG_TIMER Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> AnalyzeColumnshard::AnalyzeMultiOperationId [GOOD] >> DataCleanup::ForceDataCleanupWithRestart [GOOD] >> DataCleanup::OutReadSetsCleanedAfterCopyTable >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi >> Graph::CreateGraphShard [GOOD] >> Graph::UseGraphShard >> DataShardReassign::AutoReassignOnYellowFlag [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_init/unittest >> TTxDataShardTestInit::TestResolvePathAfterRestart [GOOD] Test command err: 2025-04-04T17:29:29.754321Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:101:2135], Recipient [1:107:2139]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:29:29.772823Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:101:2135], Recipient [1:107:2139]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:29:29.777323Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:107:2139] 2025-04-04T17:29:29.778258Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:29:29.837495Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:101:2135], Recipient [1:107:2139]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:29:29.844684Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:29:29.845148Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:29:29.847098Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:29:29.847180Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:29:29.847245Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:29:29.849163Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:29:29.849272Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:29:29.849349Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:131:2139] in generation 2 2025-04-04T17:29:29.873651Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:29:29.911168Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:29:29.912677Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:29:29.912830Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:135:2159] 2025-04-04T17:29:29.912875Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:29:29.912912Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:29:29.912948Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:29:29.913124Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:107:2139], Recipient [1:107:2139]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:29.913834Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:29.916212Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:29:29.916328Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:29:29.916407Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:29:29.916468Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:29.916511Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:29:29.916551Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:29:29.916588Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:29:29.916625Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:29:29.916665Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:29:29.919821Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [1:98:2133], Recipient [1:107:2139]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 98 RawX2: 4294969429 } 2025-04-04T17:29:29.919891Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-04-04T17:29:32.892125Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:32.892339Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:32.892586Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000af0/r3tmp/tmpdQ2W57/pdisk_1.dat 2025-04-04T17:29:33.431286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:33.482431Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:33.523547Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:33.524446Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:33.536760Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:33.632283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:33.665634Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:665:2569] 2025-04-04T17:29:33.665880Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:29:33.712978Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:29:33.713117Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:29:33.714833Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:29:33.714913Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:29:33.714972Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:29:33.715305Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:29:33.715441Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:29:33.715535Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:681:2569] in generation 1 2025-04-04T17:29:33.726387Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:29:33.726473Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:29:33.726585Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:29:33.726668Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:683:2579] 2025-04-04T17:29:33.726701Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:29:33.726731Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:29:33.726761Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:33.727136Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:29:33.727229Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:29:33.727337Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:29:33.727379Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:33.727430Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:29:33.727492Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:29:33.727567Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:661:2566], serverId# [2:670:2571], sessionId# [0:0:0] 2025-04-04T17:29:33.729465Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:29:33.730534Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:29:33.730789Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:29:33.732690Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:29:33.743358Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:29:33.743479Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:29:33.906899Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:702:2592], serverId# [2:704:2594], sessionId# [0:0:0] 2025-04-04T17:29:33.910812Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:29:33.910891Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:33.912518Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:29:33.912598Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:29:33.912695Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:29:33.913011Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:29:33.913179Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:29:33.913980Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:29:33.914061Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:29:33.917592Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:29:33.918083Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:33.920140Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:29:33.920186Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:33.920625Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:29:33.920681Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:29:33.921424Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:29:33.921481Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:29:33.921569Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037 ... ve planned 0 immediate 0 planned 0 2025-04-04T17:29:37.767123Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:29:37.767153Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:29:37.767510Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:661:2566], serverId# [3:672:2573], sessionId# [0:0:0] 2025-04-04T17:29:37.767799Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:29:37.767923Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:29:37.767990Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:29:37.769348Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:29:37.780100Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:29:37.780189Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:29:37.929275Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:704:2594], sessionId# [0:0:0] 2025-04-04T17:29:37.930335Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:29:37.930384Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:37.930914Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:29:37.930952Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:29:37.931011Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-04-04T17:29:37.931243Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-04-04T17:29:37.931344Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:29:37.931606Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:29:37.931658Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:29:37.931988Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:29:37.932306Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:37.933761Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-04-04T17:29:37.933798Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:37.934586Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-04-04T17:29:37.934659Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:29:37.935732Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:29:37.935770Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:29:37.935806Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:29:37.935853Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:408:2403], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:29:37.935889Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:29:37.935945Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:37.936508Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:29:37.938690Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:29:37.938750Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:29:37.939139Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:29:37.943393Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-04-04T17:29:37.944559Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-04-04T17:29:37.988190Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:741:2621] 2025-04-04T17:29:37.988410Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:29:37.993504Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:29:37.994491Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:29:37.996573Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:29:37.996643Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:29:37.996707Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:29:37.997072Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:29:37.997412Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:29:37.997481Z node 3 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [3:756:2621] in generation 2 2025-04-04T17:29:38.018949Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:29:38.019061Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037888 2025-04-04T17:29:38.019143Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-04-04T17:29:38.019253Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:29:38.019347Z node 3 :TX_DATASHARD DEBUG: Resolve path at 72075186224037888: reason# empty path 2025-04-04T17:29:38.019518Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:760:2631] 2025-04-04T17:29:38.019558Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:29:38.019634Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:29:38.019674Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:38.019952Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-04-04T17:29:38.020148Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-04-04T17:29:38.021034Z node 3 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 281474976715657 message# Source { RawX1: 741 RawX2: 12884904509 } Origin: 72075186224037888 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-04-04T17:29:38.021156Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:29:38.021246Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:29:38.021729Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1000 2025-04-04T17:29:38.021773Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:38.022164Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:29:38.022389Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:29:38.022438Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:38.022479Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:29:38.022525Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:29:38.022775Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-04-04T17:29:38.082198Z node 3 :TX_DATASHARD DEBUG: Got scheme resolve result at 72075186224037888: Status: StatusSuccess Path: "/Root/table-1" PathDescription { Self { Name: "table-1" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2025-04-04T17:29:38.082486Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-04-04T17:29:38.082613Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:29:38.082766Z node 3 :TX_DATASHARD DEBUG: TTxStoreTablePath::Execute at 72075186224037888 2025-04-04T17:29:38.085394Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:764:2635], serverId# [3:766:2636], sessionId# [0:0:0] 2025-04-04T17:29:38.098563Z node 3 :TX_DATASHARD DEBUG: TTxStoreTablePath::Complete at 72075186224037888 |97.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_init/unittest >> StatisticsScan::RunScanOnShard >> ColumnStatistics::CountMinSketchStatistics [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] >> TMemoryController::Config_ConsumerLimits [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_reassign/unittest >> DataShardReassign::AutoReassignOnYellowFlag [GOOD] >> TMemoryController::SharedCache Test command err: 2025-04-04T17:29:33.755454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:33.756126Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:33.756192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000a28/r3tmp/tmpeDjMZl/pdisk_1.dat 2025-04-04T17:29:34.313714Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose 2025-04-04T17:29:34.313800Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:34.322265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:34.322684Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} hope 1 -> done Change{4, redo 987b alter 0b annex 0, ~{ 1, 33, 35, 42, 4 } -{ }, 0 gb} 2025-04-04T17:29:34.322769Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:34.323950Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:5:1:24576:515:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:34.324088Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:34.324206Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} commited cookie 1 for step 5 2025-04-04T17:29:34.335235Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-04-04T17:29:34.335352Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:34.336845Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{5, redo 174b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-04-04T17:29:34.336933Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:34.337338Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:6:1:24576:129:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:34.337393Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:6:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:34.337487Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} commited cookie 1 for step 6 2025-04-04T17:29:34.337644Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-04-04T17:29:34.337687Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:34.337870Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{6, redo 174b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-04-04T17:29:34.337909Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:34.338199Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:7:1:24576:130:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:34.338276Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:7:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:34.338372Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} commited cookie 1 for step 7 2025-04-04T17:29:34.338488Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-04-04T17:29:34.338536Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:34.339628Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{7, redo 120b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-04-04T17:29:34.339681Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:34.339961Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:8:1:24576:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:34.340015Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:8:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:34.340090Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} commited cookie 1 for step 8 2025-04-04T17:29:34.343406Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion 2025-04-04T17:29:34.343503Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:34.343597Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T17:29:34.343654Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:34.354685Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} queued, type NKikimr::NBsController::TBlobStorageController::TTxRegisterNode 2025-04-04T17:29:34.354807Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:34.355085Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} hope 1 -> done Change{7, redo 79b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-04T17:29:34.355165Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:34.384817Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:34.385039Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} queued, type NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig 2025-04-04T17:29:34.385125Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:34.391569Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} hope 1 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T17:29:34.391694Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:34.391908Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} queued, type NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig 2025-04-04T17:29:34.391995Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:34.393803Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} hope 1 -> done Change{3, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T17:29:34.393906Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:34.409388Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037932033:2:8:0:0:87:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:34.409524Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} commited cookie 1 for step 8 2025-04-04T17:29:34.409653Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{15, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} queued, type NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives 2025-04-04T17:29:34.409749Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{15, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:34.410186Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{15, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T17:29:34.410269Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{15, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:34.434557Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:29:34.434733Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTxAllocator::TTxAllocator::TTxReserve} queued, type NKikimr::NTxAllocator::TTxAllocator::TTxReserve 2025-04-04T17:29:34.434817Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTxAllocator::TTxAllocator::TTxReserve} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:34.435017Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTxAllocator::TTxAllocator::TTxReserve} hope 1 -> done Change{3, redo 76b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-04-04T17:29:34.435084Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTx ... er{72057594046316545:2:26} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{20, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-04T17:29:37.841400Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:26} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:37.841719Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:26:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:37.841771Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:37.841857Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} commited cookie 1 for step 26 2025-04-04T17:29:37.864495Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{17, NKikimr::NHive::TTxUpdateTabletMetrics} queued, type NKikimr::NHive::TTxUpdateTabletMetrics 2025-04-04T17:29:37.864575Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{17, NKikimr::NHive::TTxUpdateTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:37.864767Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{17, NKikimr::NHive::TTxUpdateTabletMetrics} hope 1 -> done Change{12, redo 82b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-04-04T17:29:37.864818Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{17, NKikimr::NHive::TTxUpdateTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:37.875451Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037968897:2:10:0:0:94:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:37.875577Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} commited cookie 1 for step 10 2025-04-04T17:29:37.999141Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-04T17:29:37.999231Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:37.999376Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{21, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-04T17:29:37.999423Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:37.999835Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:27:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:37.999898Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:37.999988Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} commited cookie 1 for step 27 2025-04-04T17:29:38.136469Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-04T17:29:38.136574Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:38.136734Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{22, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-04T17:29:38.136805Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:38.137231Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:28:1:24576:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:38.137326Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:38.137435Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} commited cookie 1 for step 28 2025-04-04T17:29:38.273171Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-04T17:29:38.273251Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:38.273404Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{23, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-04T17:29:38.273467Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:38.273880Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:29:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:38.273980Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:38.274086Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} commited cookie 1 for step 29 2025-04-04T17:29:38.409929Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-04T17:29:38.410017Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:38.410165Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{24, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-04T17:29:38.410226Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:38.410697Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:30:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:38.410775Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:38.410876Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} commited cookie 1 for step 30 2025-04-04T17:29:38.423529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:29:38.423592Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:38.446727Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} queued, type NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics 2025-04-04T17:29:38.446831Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:38.446925Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} hope 1 -> done Change{9, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T17:29:38.447005Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:38.530436Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-04T17:29:38.530514Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-04T17:29:38.530619Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxCleanupTransaction 2025-04-04T17:29:38.530685Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:38.530853Z node 1 :TX_DATASHARD TRACE: No cleanup at 72075186224037888 outdated step 15000 last cleanup 0 2025-04-04T17:29:38.530921Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:38.530958Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-04-04T17:29:38.530998Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:29:38.531033Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:29:38.531111Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T17:29:38.531177Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:38.531340Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-04T17:29:38.593203Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-04-04T17:29:38.593285Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:29:38.593445Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{25, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-04-04T17:29:38.593512Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:29:38.593949Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:31:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:38.594049Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:31:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-04-04T17:29:38.594151Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:32} commited cookie 1 for step 31 --- Captured TEvCheckBlobstorageStatusResult event --- Waiting for TEvReassignTablet event... 2025-04-04T17:29:38.720224Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} CheckYellow current light yellow move channels: 0 1 2 --- Captured TEvReassignTablet event |97.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_reassign/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeMultiOperationId [GOOD] Test command err: 2025-04-04T17:26:05.923835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:26:05.924007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:26:05.924100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/001971/r3tmp/tmpVmkDVz/pdisk_1.dat 2025-04-04T17:26:06.229422Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14243, node 1 2025-04-04T17:26:06.457890Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:06.457953Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:06.458005Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:06.458572Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:26:06.461163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:26:06.545972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:06.546139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:06.560466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21642 2025-04-04T17:26:07.095864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:26:10.393177Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:26:10.416928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:10.417011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:10.443087Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:26:10.444474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:10.645134Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.645815Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.646357Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.646506Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.646589Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.646800Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.646898Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.647010Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.647083Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:26:10.816778Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:10.816897Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:10.829700Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:10.966098Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:26:11.013885Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:26:11.013994Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:26:11.046419Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:26:11.046632Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:26:11.046846Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:26:11.046906Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:26:11.046970Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:26:11.047024Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:26:11.047071Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:26:11.047114Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:26:11.048153Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:26:11.077291Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:11.077381Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:26:11.081532Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2605] 2025-04-04T17:26:11.085398Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2617] 2025-04-04T17:26:11.085654Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1902:2617], schemeshard id = 72075186224037897 2025-04-04T17:26:11.093447Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:26:11.116919Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:26:11.116987Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:26:11.117065Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:26:11.128823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:26:11.136835Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:26:11.136991Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:26:11.335383Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:26:11.500735Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:26:11.577434Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:26:12.596257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:12.596403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:12.615767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:26:12.724995Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:26:12.725252Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:26:12.725571Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:26:12.725764Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:26:12.725932Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:26:12.726083Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:26:12.726217Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:26:12.726387Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:26:12.726547Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:26:12.726699Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:26:12.726840Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:26:12.726996Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:26:12.756056Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:26:12.756165Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:29:28.517884Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NzgwZjc3NmQtNTQ1ZTM4ZGYtMjJiNjEyNGQtNzZhYjU4Y2Q=, TxId: 2025-04-04T17:29:28.517941Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzgwZjc3NmQtNTQ1ZTM4ZGYtMjJiNjEyNGQtNzZhYjU4Y2Q=, TxId: 2025-04-04T17:29:28.518291Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:29:28.543641Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:29:28.543713Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId7, ActorId=[1:2800:3219] 2025-04-04T17:29:29.053055Z node 2 :STATISTICS DEBUG: Event round 10 is different from the current 0 2025-04-04T17:29:29.053142Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T17:29:29.654759Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:29:29.654919Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:29:29.687350Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:29:29.687406Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId8 2025-04-04T17:29:29.687438Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-04-04T17:29:29.687458Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-04T17:29:29.687621Z node 2 :STATISTICS DEBUG: Event round 9 is different from the current 0 2025-04-04T17:29:29.687650Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-04-04T17:29:30.895521Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:29:32.027522Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:29:32.027600Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId8 2025-04-04T17:29:32.027633Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-04-04T17:29:32.027660Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-04T17:29:33.112833Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:29:33.145789Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:29:33.145929Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:29:33.145971Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:29:33.146710Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:29:33.163915Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:29:33.164322Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:29:33.164379Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:29:33.164752Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:29:33.179338Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:29:33.179563Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 11, current Round: 0 2025-04-04T17:29:33.180166Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9314:6416], server id = [2:9315:6417], tablet id = 72075186224037899, status = OK 2025-04-04T17:29:33.180268Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9314:6416], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:29:33.181287Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:29:33.181350Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:29:33.181510Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:29:33.181632Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:29:33.181901Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:29:33.183464Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9314:6416], server id = [2:9315:6417], tablet id = 72075186224037899 2025-04-04T17:29:33.183510Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:29:33.184267Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:29:33.215161Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NGYxNTczMTItMTVjMGEyZjUtOWFhNDY0MjMtZjIwZTQ4NA==, TxId: 2025-04-04T17:29:33.215227Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGYxNTczMTItMTVjMGEyZjUtOWFhNDY0MjMtZjIwZTQ4NA==, TxId: 2025-04-04T17:29:33.216233Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:29:33.243705Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:29:33.243780Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId8, ActorId=[1:2800:3219] 2025-04-04T17:29:33.726164Z node 2 :STATISTICS DEBUG: Event round 11 is different from the current 0 2025-04-04T17:29:33.726232Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T17:29:34.335968Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:29:34.336039Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-04-04T17:29:34.336068Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-04T17:29:34.336238Z node 2 :STATISTICS DEBUG: Event round 10 is different from the current 0 2025-04-04T17:29:34.336269Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-04-04T17:29:35.398611Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:29:35.398809Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:29:35.431453Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:29:36.515074Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:29:36.515140Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-04-04T17:29:36.515168Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-04T17:29:37.644724Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:29:37.644847Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:29:37.644883Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:29:37.645375Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:29:37.659286Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:29:37.659552Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:29:37.659592Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:29:37.659845Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:29:37.684185Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:29:37.684350Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 12, current Round: 0 2025-04-04T17:29:37.684690Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9478:6505], server id = [2:9479:6506], tablet id = 72075186224037899, status = OK 2025-04-04T17:29:37.684764Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9478:6505], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:29:37.685550Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:29:37.685628Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:29:37.685785Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9478:6505], server id = [2:9479:6506], tablet id = 72075186224037899 2025-04-04T17:29:37.685804Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:29:37.685850Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:29:37.685964Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:29:37.686168Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:29:37.687994Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:29:37.706485Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YWExNzNmODQtYTFlNDNjYjMtYmIzM2Y2YmQtNmEwYTliYzE=, TxId: 2025-04-04T17:29:37.706545Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YWExNzNmODQtYTFlNDNjYjMtYmIzM2Y2YmQtNmEwYTliYzE=, TxId: 2025-04-04T17:29:37.706957Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:29:37.732405Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:29:37.732464Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId9, ActorId=[1:2800:3219] >> TCreateAndDropViewTest::CreateSameViewTwice [GOOD] >> TCreateAndDropViewTest::CreateViewOccupiedName >> TopicSessionTests::TwoSessionsWithOffsets [GOOD] >> KeyValueGRPCService::SimpleAcquireLock [GOOD] >> KeyValueGRPCService::SimpleExecuteTransaction >> Metering::MockedNetClassifierLabelTransformation [GOOD] >> SHA256Test::SHA256Test [GOOD] >> TestPurecalcFilter::Simple2 [GOOD] >> Graph::UseGraphShard [GOOD] >> Graph::MemoryBackendFullCycle |97.9%| [TA] $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicSessionTests::BadDataSessionError |97.9%| [TA] $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestPurecalcFilter::ManyValues >> Coordinator::LastStepSubscribe [GOOD] >> Coordinator::RestoreDomainConfiguration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchStatistics [GOOD] Test command err: 2025-04-04T17:27:11.327332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:27:11.327536Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:27:11.327618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0007bf/r3tmp/tmpgXxtwO/pdisk_1.dat 2025-04-04T17:27:11.621037Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7034, node 1 2025-04-04T17:27:11.834532Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:27:11.834585Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:27:11.834616Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:27:11.835080Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:27:11.837466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:27:11.923074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:11.923203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:11.937280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17011 2025-04-04T17:27:12.399859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:27:14.933746Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:27:14.958673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:14.958761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:14.985949Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:27:14.987822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:15.191081Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:15.191796Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:15.192340Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:15.192521Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:15.192618Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:15.192841Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:15.192923Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:15.193038Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:15.193130Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:27:15.371626Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:27:15.371749Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:27:15.385694Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:27:15.513874Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:27:15.556708Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:27:15.556823Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:27:15.585722Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:27:15.585948Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:27:15.586163Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:27:15.586223Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:27:15.586294Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:27:15.586350Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:27:15.586409Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:27:15.586468Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:27:15.587656Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:27:15.617631Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:27:15.617763Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:27:15.623573Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2605] 2025-04-04T17:27:15.628640Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2617] 2025-04-04T17:27:15.628871Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1902:2617], schemeshard id = 72075186224037897 2025-04-04T17:27:15.636130Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:27:15.658464Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:27:15.658519Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:27:15.658611Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:27:15.669840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:27:15.684157Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:27:15.684372Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:27:15.871069Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:27:16.109321Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:27:16.186524Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:27:17.213342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:17.213487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:27:17.229554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:27:17.320906Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:27:17.321180Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:27:17.321498Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:27:17.321729Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:27:17.321886Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:27:17.322009Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:27:17.322095Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:27:17.322189Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:27:17.322298Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:27:17.322398Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:27:17.322495Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:27:17.322568Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2320:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:27:17.346847Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T17:27:17.346972Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... ARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-04-04T17:27:19.857155Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000015s 2025-04-04T17:27:31.600879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:27:31.600943Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:27:32.880893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:27:32.880968Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:33.182412Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-04T17:29:33.182513Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:29:33.182560Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:29:33.182605Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-04T17:29:34.691074Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-04T17:29:34.691165Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 182.000000s, at schemeshard: 72075186224037897 2025-04-04T17:29:34.691542Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-04-04T17:29:34.705191Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:29:35.836483Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:29:35.836580Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:29:35.836644Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-04T17:29:35.836701Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:29:35.837167Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:29:35.840867Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:29:35.844952Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6976:5164], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:35.845062Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6986:5169], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:35.845153Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:35.860161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-04T17:29:35.949221Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6990:5172], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-04T17:29:36.196901Z node 2 :TX_PROXY ERROR: Actor# [2:7086:5218] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:29:36.228071Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7115:5233]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:29:36.228364Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:29:36.228470Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7117:5235] 2025-04-04T17:29:36.228561Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7117:5235] 2025-04-04T17:29:36.228964Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7118:5236] 2025-04-04T17:29:36.229156Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7118:5236], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:29:36.229220Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-04T17:29:36.229366Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7117:5235], server id = [2:7118:5236], tablet id = 72075186224037894, status = OK 2025-04-04T17:29:36.229461Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:29:36.229546Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7115:5233], StatRequests.size() = 1 2025-04-04T17:29:36.397971Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDM3OGRkNzYtZTJkNWQwYWYtYjdjMDljYmItNTU0ZjkwMjI=, TxId: 2025-04-04T17:29:36.398058Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDM3OGRkNzYtZTJkNWQwYWYtYjdjMDljYmItNTU0ZjkwMjI=, TxId: 2025-04-04T17:29:36.399122Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:29:36.413579Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:29:36.413675Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:29:36.467800Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:29:36.467898Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:29:36.544820Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7117:5235], schemeshard count = 1 2025-04-04T17:29:38.674561Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:29:38.674615Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:29:38.674641Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:29:38.674669Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:29:38.677373Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:29:38.693893Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:29:38.694421Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:29:38.694496Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:29:38.695349Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:29:38.709196Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:29:38.709494Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-04-04T17:29:38.710089Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7234:5303], server id = [2:7235:5304], tablet id = 72075186224037899, status = OK 2025-04-04T17:29:38.710472Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7234:5303], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:29:38.714516Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:29:38.714623Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:29:38.714938Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:29:38.715171Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:29:38.715429Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7234:5303], server id = [2:7235:5304], tablet id = 72075186224037899 2025-04-04T17:29:38.715468Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:29:38.715710Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:29:38.718175Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:29:38.739089Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7255:5323]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:29:38.739344Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:29:38.739386Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7255:5323], StatRequests.size() = 1 2025-04-04T17:29:38.853686Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=M2VkZTIwOGMtMTNjMTMxNzktY2MzOGE0ODYtNTdlOTc4MGE=, TxId: 2025-04-04T17:29:38.853752Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2VkZTIwOGMtMTNjMTMxNzktY2MzOGE0ODYtNTdlOTc4MGE=, TxId: 2025-04-04T17:29:38.854575Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:29:38.855573Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:7263:5451]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:29:38.855860Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:29:38.855909Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:29:38.858012Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:29:38.858067Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-04T17:29:38.858124Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T17:29:38.871706Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 |97.9%| [TA] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/actor/ut/unittest >> SHA256Test::SHA256Test [GOOD] Test command err: 2025-04-04T17:29:17.627449Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511389225797855:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:17.627530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000837/r3tmp/tmpowpwzT/pdisk_1.dat 2025-04-04T17:29:17.921896Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:17.970690Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:17.970727Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:17.970735Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:17.970882Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:29:17.991903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:17.992871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:17.995819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:22.627702Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489511389225797855:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:22.627823Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:29:23.423857Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489511413289997724:2212];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000837/r3tmp/tmpehEaDp/pdisk_1.dat 2025-04-04T17:29:23.482003Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:29:23.515407Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:23.535295Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:23.535373Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:23.539374Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:23.545844Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:23.545873Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:23.545882Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:23.545989Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:29:28.419406Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489511413289997724:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:28.419535Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:29:34.304554Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489511464347793415:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:34.304651Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000837/r3tmp/tmp1m5AHR/pdisk_1.dat 2025-04-04T17:29:34.397209Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:34.432178Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:34.432211Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:34.432219Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:34.432353Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:29:34.434128Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:34.434231Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:34.436060Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:39.305715Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7489511464347793415:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:39.305825Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |97.9%| [TS] {RESULT} ydb/core/ymq/actor/ut/unittest >> TSentinelUnstableTests::BSControllerCantChangeStatus |97.9%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> GenericProviderLookupActor::Lookup >> GenericProviderLookupActor::Lookup [GOOD] >> GenericProviderLookupActor::LookupWithErrors >> GenericProviderLookupActor::LookupWithErrors [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/actors/ut/unittest >> GenericProviderLookupActor::LookupWithErrors [GOOD] Test command err: 2025-04-04 17:29:42.151 INFO ydb-library-yql-providers-generic-actors-ut(pid=846323, tid=0x00007F6526BE8B00) [generic] yql_generic_lookup_actor.cpp:125: New generic proivider lookup source actor(ActorId=[1:4:2051]) for kind=YDB, endpoint=host: "some_host" port: 2135, database=some_db, use_tls=1, protocol=NATIVE, table=lookup_test 2025-04-04 17:29:42.170 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=846323, tid=0x00007F6526BE8B00) [generic] yql_generic_lookup_actor.cpp:263: ActorId=[1:4:2051] Got LookupRequest for 3 keys Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } ite ... left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 ListSplits result. GRpcStatusCode: 0 2025-04-04 17:29:42.281 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=846323, tid=0x00007F65236BC640) [generic] yql_generic_lookup_actor.cpp:294: ActorId=[2:7489511497936629506:2051] Got TListSplitsStreamIterator 2025-04-04 17:29:42.281 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=846323, tid=0x00007F65236BC640) [generic] yql_generic_lookup_actor.cpp:170: ActorId=[2:7489511497936629506:2051] Got TListSplitsResponse from Connector Call ReadSplits. data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY CRAB Expected: data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY CRAB Actual: data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY ReadSplits result. GRpcStatusCode: 0 2025-04-04 17:29:42.282 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=846323, tid=0x00007F65236BC640) [generic] yql_generic_lookup_actor.cpp:203: ActorId=[2:7489511497936629506:2051] Got ReadSplitsStreamIterator from Connector 2025-04-04 17:29:42.282 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=846323, tid=0x00007F65236BC640) [generic] yql_generic_lookup_actor.cpp:316: ActorId=[2:7489511497936629506:2051] Got DataChunk 2025-04-04 17:29:42.283 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=846323, tid=0x00007F65236BC640) [generic] yql_generic_lookup_actor.cpp:327: ActorId=[2:7489511497936629506:2051] Got EOF 2025-04-04 17:29:42.283 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=846323, tid=0x00007F65236BC640) [generic] yql_generic_lookup_actor.cpp:377: Sending lookup results for 3 keys |97.9%| [TS] {RESULT} ydb/library/yql/providers/generic/actors/ut/unittest >> TSequence::SequencesIndex [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceFromSelect >> MediatorTest::BasicTimecastUpdates >> test.py::test[solomon-InvalidProject-] [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] >> TabletService_Restart::OnlyAdminsAllowed [GOOD] >> KeyValueGRPCService::SimpleExecuteTransaction [GOOD] >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGeneration >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] >> QuoterWithKesusTest::FailsToGetBigQuota [GOOD] >> QuoterWithKesusTest::PrefetchCoefficient >> TRangeTreap::Sequential [GOOD] >> TRangeTreap::Random >> XmlBuilderTest::WritesProperly [GOOD] >> XmlBuilderTest::MacroBuilder [GOOD] >> test.py::test_order_conflict [GOOD] >> test.py::test_missing_value [GOOD] >> test.py::test_unexpected_value [GOOD] >> test.py::test_local >> TRangeTreap::Random [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/tablet/ut/unittest >> TabletService_Restart::OnlyAdminsAllowed [GOOD] Test command err: 2025-04-04T17:29:05.396029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:05.396545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:05.396619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0006b5/r3tmp/tmpf45gHC/pdisk_1.dat 2025-04-04T17:29:05.975934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:06.048479Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:06.097399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:06.098192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:06.111922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected ... reading schema ... changing schema (dry run) ... reading schema ... changing schema ... reading schema 2025-04-04T17:29:10.324656Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:10.324738Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:10.324863Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0006b5/r3tmp/tmpJFGmGo/pdisk_1.dat 2025-04-04T17:29:10.583408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:10.610416Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:10.646572Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:10.646690Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:10.658231Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... reading schema (without token) ... reading schema (non-admin token) ... reading schema (admin token) 2025-04-04T17:29:14.017963Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:99:2145], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:14.018302Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:14.018374Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0006b5/r3tmp/tmprnNHcf/pdisk_1.dat 2025-04-04T17:29:14.280009Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:14.307020Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:14.343794Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:14.343933Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:14.355669Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:17.445950Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:304:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:17.446226Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:17.446296Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0006b5/r3tmp/tmpz4WXqm/pdisk_1.dat 2025-04-04T17:29:17.661503Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:17.687913Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:17.723618Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:17.723737Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:17.735025Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:20.993268Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:325:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:20.993401Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:20.993585Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0006b5/r3tmp/tmpTAOeDe/pdisk_1.dat 2025-04-04T17:29:21.277592Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:21.301749Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:21.336978Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:21.337116Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:21.348352Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:24.829975Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:99:2145], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:24.830328Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:24.830513Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0006b5/r3tmp/tmpXgMUQo/pdisk_1.dat 2025-04-04T17:29:25.125747Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:25.160577Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:25.197242Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:25.197427Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:25.209212Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:28.690965Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:28.691263Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:28.691320Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0006b5/r3tmp/tmptHH0tc/pdisk_1.dat 2025-04-04T17:29:28.981809Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:29.066313Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:29.102566Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:29.102726Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:29.114275Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:32.641496Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:323:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:32.641696Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:32.641845Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0006b5/r3tmp/tmpQuO0IW/pdisk_1.dat 2025-04-04T17:29:32.941187Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:32.971237Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:33.007796Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:33.007963Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:33.019586Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:36.869508Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:36.869795Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:36.870207Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0006b5/r3tmp/tmpjjarzE/pdisk_1.dat 2025-04-04T17:29:37.157804Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:37.184762Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:37.220804Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:37.220960Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:37.232354Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected ... restarting tablet 72057594046644480 2025-04-04T17:29:37.364474Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:42.306457Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:42.306565Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:42.306853Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:297:2343], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0006b5/r3tmp/tmpO7fKMD/pdisk_1.dat 2025-04-04T17:29:42.633059Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:42.669042Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:42.707470Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:42.707666Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:42.719462Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected ... restarting tablet 72057594046644480 (without token) ... restarting tablet 72057594046644480 (non-admin token) ... restarting tablet 72057594046644480 (admin token) 2025-04-04T17:29:43.081839Z node 10 :IMPORT WARN: Table profiles were not loaded |97.9%| [TM] {RESULT} ydb/core/grpc_services/tablet/ut/unittest >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter |97.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/http/ut/unittest >> XmlBuilderTest::MacroBuilder [GOOD] |97.9%| [TS] {RESULT} ydb/core/ymq/http/ut/unittest >> TCreateAndDropViewTest::CreateViewOccupiedName [GOOD] >> TCreateAndDropViewTest::CreateViewIfNotExists >> DataShardCompaction::CompactBorrowedTxStatus [GOOD] >> TRUCalculatorTests::TestReadTable [GOOD] >> TRUCalculatorTests::TestBulkUpsert [GOOD] >> StatisticsScan::RunScanOnShard [GOOD] >> Coordinator::RestoreDomainConfiguration [GOOD] >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-false >> TxKeys::ComparePointKeys >> TestPurecalcFilter::ManyValues [GOOD] >> DataCleanup::OutReadSetsCleanedAfterCopyTable [GOOD] >> DataCleanup::BorrowerDataCleanedAfterCopyTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 7353, MsgBus: 5229 2025-04-04T17:26:34.293619Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489510689625961334:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:26:34.293789Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000d3d/r3tmp/tmpZBu8EM/pdisk_1.dat 2025-04-04T17:26:34.631872Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7353, node 1 2025-04-04T17:26:34.656913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:26:34.657712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:26:34.659572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:26:34.757061Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:26:34.757084Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:26:34.757098Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:26:34.757238Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5229 TClient is connected to server localhost:5229 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:26:35.354374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:35.371820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:35.491374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:35.602331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:35.651323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:26:36.474818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510698215897567:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:36.474948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:36.839982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T17:26:36.861030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T17:26:36.882607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:26:36.904060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:26:36.967587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T17:26:37.032295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T17:26:37.077922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510702510865395:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:37.077991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:37.078016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489510702510865400:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:26:37.080494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T17:26:37.087404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489510702510865402:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T17:26:37.180775Z node 1 :TX_PROXY ERROR: Actor# [1:7489510702510865454:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:26:37.963264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T17:26:38.456444Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr0xe8k7a5s6h8tqspmx89vx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGQ4NWNkN2ItYTMzODgxOTYtYTM0ZTdmYi1kZGFmYzNiNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.481340Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jr0xe8k7a5s6h8tqspmx89vx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGQ4NWNkN2ItYTMzODgxOTYtYTM0ZTdmYi1kZGFmYzNiNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.488536Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jr0xe8k7a5s6h8tqspmx89vx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGQ4NWNkN2ItYTMzODgxOTYtYTM0ZTdmYi1kZGFmYzNiNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.516804Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr0xe8nhdcdwb8g6zdtgmdgh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2MWJiZDEtYTYxNGUzMjQtNDFkMjc1MmItMmQ1MTZmZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.521411Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr0xe8nhdcdwb8g6zdtgmdgh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2MWJiZDEtYTYxNGUzMjQtNDFkMjc1MmItMmQ1MTZmZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.525104Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr0xe8nhdcdwb8g6zdtgmdgh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2MWJiZDEtYTYxNGUzMjQtNDFkMjc1MmItMmQ1MTZmZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.543426Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jr0xe8pced627q7nr7p5mb5q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGQ4NWNkN2ItYTMzODgxOTYtYTM0ZTdmYi1kZGFmYzNiNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.547023Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jr0xe8pced627q7nr7p5mb5q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGQ4NWNkN2ItYTMzODgxOTYtYTM0ZTdmYi1kZGFmYzNiNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.550737Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jr0xe8pced627q7nr7p5mb5q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGQ4NWNkN2ItYTMzODgxOTYtYTM0ZTdmYi1kZGFmYzNiNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.568450Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jr0xe8q53adv2t2w632x2y2f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2MWJiZDEtYTYxNGUzMjQtNDFkMjc1MmItMmQ1MTZmZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.572281Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jr0xe8q53adv2t2w632x2y2f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2MWJiZDEtYTYxNGUzMjQtNDFkMjc1MmItMmQ1MTZmZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.574912Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710683. Ctx: { TraceId: 01jr0xe8q53adv2t2w632x2y2f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY2MWJiZDEtYTYxNGUzMjQtNDFkMjc1MmItMmQ1MTZmZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:26:38.591720Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710684. Ctx: { TraceId: 01jr0xe8qx798bc1e1wjtrwbe5, Database: , DatabaseId: /Root, SessionId: ydb://sess ... : TxId: 281474976723132. Ctx: { TraceId: 01jr0xkswd09kaddcdt0cdby86, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk4NTA2YzgtNjkyMTQ2ZDQtZDQxNGI2NTYtMjQxMmZlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:39.993207Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723133. Ctx: { TraceId: 01jr0xkswd09kaddcdt0cdby86, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk4NTA2YzgtNjkyMTQ2ZDQtZDQxNGI2NTYtMjQxMmZlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.006485Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723134. Ctx: { TraceId: 01jr0xkswd09kaddcdt0cdby86, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk4NTA2YzgtNjkyMTQ2ZDQtZDQxNGI2NTYtMjQxMmZlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.029106Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723135. Ctx: { TraceId: 01jr0xksxsa3rmtx2rpn5b7bst, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTJhNjg3OTEtMzg0YTc1OTgtZjAzMTQ1YmMtMjQyNTU1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.037113Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723136. Ctx: { TraceId: 01jr0xksxsa3rmtx2rpn5b7bst, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTJhNjg3OTEtMzg0YTc1OTgtZjAzMTQ1YmMtMjQyNTU1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.046773Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723137. Ctx: { TraceId: 01jr0xksxsa3rmtx2rpn5b7bst, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTJhNjg3OTEtMzg0YTc1OTgtZjAzMTQ1YmMtMjQyNTU1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.087838Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723138. Ctx: { TraceId: 01jr0xkszk38a4j19c82jkpgjn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk4NTA2YzgtNjkyMTQ2ZDQtZDQxNGI2NTYtMjQxMmZlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.095592Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723139. Ctx: { TraceId: 01jr0xkszk38a4j19c82jkpgjn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk4NTA2YzgtNjkyMTQ2ZDQtZDQxNGI2NTYtMjQxMmZlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.108983Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723140. Ctx: { TraceId: 01jr0xkszk38a4j19c82jkpgjn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk4NTA2YzgtNjkyMTQ2ZDQtZDQxNGI2NTYtMjQxMmZlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.143753Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723141. Ctx: { TraceId: 01jr0xkt1b7bcwgj3dcn7pbft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTJhNjg3OTEtMzg0YTc1OTgtZjAzMTQ1YmMtMjQyNTU1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.150643Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723142. Ctx: { TraceId: 01jr0xkt1b7bcwgj3dcn7pbft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTJhNjg3OTEtMzg0YTc1OTgtZjAzMTQ1YmMtMjQyNTU1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.161687Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723143. Ctx: { TraceId: 01jr0xkt1b7bcwgj3dcn7pbft9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTJhNjg3OTEtMzg0YTc1OTgtZjAzMTQ1YmMtMjQyNTU1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.191396Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723144. Ctx: { TraceId: 01jr0xkt2va3mvbzq9rnrnvr3g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk4NTA2YzgtNjkyMTQ2ZDQtZDQxNGI2NTYtMjQxMmZlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.199081Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723145. Ctx: { TraceId: 01jr0xkt2va3mvbzq9rnrnvr3g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk4NTA2YzgtNjkyMTQ2ZDQtZDQxNGI2NTYtMjQxMmZlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.208852Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723146. Ctx: { TraceId: 01jr0xkt2va3mvbzq9rnrnvr3g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk4NTA2YzgtNjkyMTQ2ZDQtZDQxNGI2NTYtMjQxMmZlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.239272Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723147. Ctx: { TraceId: 01jr0xkt4bd82n8937jhjrj5rh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTJhNjg3OTEtMzg0YTc1OTgtZjAzMTQ1YmMtMjQyNTU1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.248208Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723148. Ctx: { TraceId: 01jr0xkt4bd82n8937jhjrj5rh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTJhNjg3OTEtMzg0YTc1OTgtZjAzMTQ1YmMtMjQyNTU1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.258946Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723149. Ctx: { TraceId: 01jr0xkt4bd82n8937jhjrj5rh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTJhNjg3OTEtMzg0YTc1OTgtZjAzMTQ1YmMtMjQyNTU1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.295745Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723150. Ctx: { TraceId: 01jr0xkt6145j540d9pxrkv0fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk4NTA2YzgtNjkyMTQ2ZDQtZDQxNGI2NTYtMjQxMmZlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.346857Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723151. Ctx: { TraceId: 01jr0xkt6145j540d9pxrkv0fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk4NTA2YzgtNjkyMTQ2ZDQtZDQxNGI2NTYtMjQxMmZlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.358403Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723152. Ctx: { TraceId: 01jr0xkt6145j540d9pxrkv0fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk4NTA2YzgtNjkyMTQ2ZDQtZDQxNGI2NTYtMjQxMmZlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.388964Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723153. Ctx: { TraceId: 01jr0xkt903bfrxd8dk9v5cbb7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTJhNjg3OTEtMzg0YTc1OTgtZjAzMTQ1YmMtMjQyNTU1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.395652Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723154. Ctx: { TraceId: 01jr0xkt903bfrxd8dk9v5cbb7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTJhNjg3OTEtMzg0YTc1OTgtZjAzMTQ1YmMtMjQyNTU1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.407124Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723155. Ctx: { TraceId: 01jr0xkt903bfrxd8dk9v5cbb7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTJhNjg3OTEtMzg0YTc1OTgtZjAzMTQ1YmMtMjQyNTU1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.431611Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723156. Ctx: { TraceId: 01jr0xktacafhkt66h481jqxrb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk4NTA2YzgtNjkyMTQ2ZDQtZDQxNGI2NTYtMjQxMmZlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.439162Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723157. Ctx: { TraceId: 01jr0xktacafhkt66h481jqxrb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk4NTA2YzgtNjkyMTQ2ZDQtZDQxNGI2NTYtMjQxMmZlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.449618Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723158. Ctx: { TraceId: 01jr0xktacafhkt66h481jqxrb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk4NTA2YzgtNjkyMTQ2ZDQtZDQxNGI2NTYtMjQxMmZlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.473584Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723159. Ctx: { TraceId: 01jr0xktbp6365svk4eaphqkc3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTJhNjg3OTEtMzg0YTc1OTgtZjAzMTQ1YmMtMjQyNTU1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.480265Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723160. Ctx: { TraceId: 01jr0xktbp6365svk4eaphqkc3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTJhNjg3OTEtMzg0YTc1OTgtZjAzMTQ1YmMtMjQyNTU1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.491178Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723161. Ctx: { TraceId: 01jr0xktbp6365svk4eaphqkc3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTJhNjg3OTEtMzg0YTc1OTgtZjAzMTQ1YmMtMjQyNTU1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.524242Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723162. Ctx: { TraceId: 01jr0xktd73wjq3dwhjsqg6g0n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk4NTA2YzgtNjkyMTQ2ZDQtZDQxNGI2NTYtMjQxMmZlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.530984Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723163. Ctx: { TraceId: 01jr0xktd73wjq3dwhjsqg6g0n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk4NTA2YzgtNjkyMTQ2ZDQtZDQxNGI2NTYtMjQxMmZlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.541694Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723164. Ctx: { TraceId: 01jr0xktd73wjq3dwhjsqg6g0n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk4NTA2YzgtNjkyMTQ2ZDQtZDQxNGI2NTYtMjQxMmZlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.593500Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723165. Ctx: { TraceId: 01jr0xktfd9hy8ves2ywpcnsez, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTJhNjg3OTEtMzg0YTc1OTgtZjAzMTQ1YmMtMjQyNTU1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.600370Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723166. Ctx: { TraceId: 01jr0xktfd9hy8ves2ywpcnsez, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTJhNjg3OTEtMzg0YTc1OTgtZjAzMTQ1YmMtMjQyNTU1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:40.609440Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723167. Ctx: { TraceId: 01jr0xktfd9hy8ves2ywpcnsez, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTJhNjg3OTEtMzg0YTc1OTgtZjAzMTQ1YmMtMjQyNTU1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS |97.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ru_calculator/unittest >> TRUCalculatorTests::TestBulkUpsert [GOOD] |97.9%| [TS] {RESULT} ydb/core/tx/schemeshard/ut_ru_calculator/unittest >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/locks/ut_range_treap/unittest >> TRangeTreap::Random [GOOD] Test command err: NOTE: building treap of size 1000000 got height 51 and needed 1000000 ops (1000000 inserts 0 updates 0 deletes) and 30789479 comparisons (30.789479 per op) NOTE: building treap of size 8342 got height 36 and needed 11422 ops (9764 inserts 236 updates 1422 deletes) and 232004 comparisons (20.31202942 per op) Checking point 9268 ... found 2123 ranges, needed 7652 comparisons (3.60433349 per range) Checking point 9247 ... found 2120 ranges, needed 7667 comparisons (3.616509434 per range) Checking point 6700 ... found 2184 ranges, needed 7879 comparisons (3.607600733 per range) Checking point 6114 ... found 2171 ranges, needed 7982 comparisons (3.676646707 per range) Checking point 9115 ... found 2120 ranges, needed 7664 comparisons (3.61509434 per range) Checking point 5467 ... found 2178 ranges, needed 7965 comparisons (3.657024793 per range) Checking point 7780 ... found 2168 ranges, needed 8003 comparisons (3.691420664 per range) Checking point 5741 ... found 2161 ranges, needed 7875 comparisons (3.644146229 per range) Checking point 1098 ... found 846 ranges, needed 2083 comparisons (2.462174941 per range) Checking point 1895 ... found 1296 ranges, needed 3487 comparisons (2.69058642 per range) |97.9%| [TM] {RESULT} ydb/core/tx/locks/ut_range_treap/unittest >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update >> TestPurecalcFilter::NullValues >> DataShardReplication::SimpleApplyChanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_compaction/unittest >> DataShardCompaction::CompactBorrowedTxStatus [GOOD] Test command err: 2025-04-04T17:29:14.490228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:14.490943Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:14.491052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000ad4/r3tmp/tmpqFvLt6/pdisk_1.dat 2025-04-04T17:29:14.976942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:15.037657Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:15.083930Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:29:15.084747Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:29:15.086920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:15.087638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:15.100362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:15.193798Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-04T17:29:15.193884Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T17:29:15.199851Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-04-04T17:29:15.337296Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T17:29:15.337405Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:29:15.338684Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:29:15.338791Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:29:15.339103Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:29:15.339279Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:29:15.339426Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T17:29:15.339724Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T17:29:15.343523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:15.344508Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T17:29:15.344572Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2517] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-04T17:29:15.391799Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:29:15.392641Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:29:15.392965Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:29:15.393152Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:29:15.406594Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:29:15.432163Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:29:15.432282Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:29:15.433653Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:29:15.433710Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:29:15.433744Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:29:15.435615Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:29:15.435730Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:29:15.435796Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:29:15.446420Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:29:15.488638Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:29:15.489818Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:29:15.489925Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:29:15.489953Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:29:15.489984Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:29:15.490034Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:15.490216Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:15.490295Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:15.491579Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:29:15.491733Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:29:15.491833Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:29:15.491864Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:15.491943Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:29:15.491970Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:29:15.491993Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:29:15.492033Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:29:15.492065Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:29:15.492146Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:15.492177Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:15.492225Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:29:15.493403Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:29:15.493440Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:29:15.493515Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:29:15.493793Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:29:15.493833Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:29:15.493910Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:29:15.494006Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:29:15.494033Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:29:15.494075Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:29:15.494101Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:29:15.494401Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:29:15.494428Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:29:15.494450Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:29:15.494474Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:29:15.494517Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:29:15.494550Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:29:15.494593Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:29:15.494620Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:29:15.494635Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:29:15.495792Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:29:15.495839Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:29:15.506473Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 4-04T17:29:44.601299Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037892 2025-04-04T17:29:44.601356Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037892 2025-04-04T17:29:44.601609Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:931:2761], Recipient [2:931:2761]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:44.601657Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:44.601729Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-04-04T17:29:44.601768Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:29:44.601813Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [71500:281474976715661] at 72075186224037892 for ReadTableScan 2025-04-04T17:29:44.601852Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037892 on unit ReadTableScan 2025-04-04T17:29:44.601894Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [71500:281474976715661] at 72075186224037892 error: , IsFatalError: 0 2025-04-04T17:29:44.601942Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037892 is Executed 2025-04-04T17:29:44.601980Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037892 executing on unit ReadTableScan 2025-04-04T17:29:44.602014Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037892 to execution unit CompleteOperation 2025-04-04T17:29:44.602050Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037892 on unit CompleteOperation 2025-04-04T17:29:44.602301Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037892 is DelayComplete 2025-04-04T17:29:44.602341Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037892 executing on unit CompleteOperation 2025-04-04T17:29:44.602375Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037892 to execution unit CompletedOperations 2025-04-04T17:29:44.602408Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037892 on unit CompletedOperations 2025-04-04T17:29:44.602446Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037892 is Executed 2025-04-04T17:29:44.602474Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037892 executing on unit CompletedOperations 2025-04-04T17:29:44.602501Z node 2 :TX_DATASHARD TRACE: Execution plan for [71500:281474976715661] at 72075186224037892 has finished 2025-04-04T17:29:44.602534Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:44.602565Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037892 2025-04-04T17:29:44.602604Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2025-04-04T17:29:44.602637Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2025-04-04T17:29:44.613572Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-04T17:29:44.613661Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-04-04T17:29:44.613715Z node 2 :TX_DATASHARD TRACE: Complete execution for [71500:281474976715661] at 72075186224037892 on unit CompleteOperation 2025-04-04T17:29:44.613787Z node 2 :TX_DATASHARD DEBUG: Complete [71500 : 281474976715661] from 72075186224037892 at tablet 72075186224037892 send result to client [2:1475:3276], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:29:44.613838Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-04-04T17:29:44.614034Z node 2 :TX_PROXY DEBUG: Actor# [2:1475:3276] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037892 marker# P12 2025-04-04T17:29:44.614085Z node 2 :TX_PROXY DEBUG: Send stream clearance, shard: 72075186224037890, txid: 281474976715661, cleared: 1 2025-04-04T17:29:44.614277Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:1475:3276], Recipient [2:768:2642]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2025-04-04T17:29:44.614323Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-04-04T17:29:44.614418Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:768:2642], Recipient [2:768:2642]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:44.614453Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:44.614512Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-04T17:29:44.614560Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:29:44.614612Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [71500:281474976715661] at 72075186224037890 for WaitForStreamClearance 2025-04-04T17:29:44.614647Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit WaitForStreamClearance 2025-04-04T17:29:44.614687Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [71500:281474976715661] at 72075186224037890 2025-04-04T17:29:44.614728Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Executed 2025-04-04T17:29:44.614765Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit WaitForStreamClearance 2025-04-04T17:29:44.614798Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037890 to execution unit ReadTableScan 2025-04-04T17:29:44.614833Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit ReadTableScan 2025-04-04T17:29:44.615086Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Continue 2025-04-04T17:29:44.615120Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:29:44.615150Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2025-04-04T17:29:44.615184Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-04T17:29:44.615219Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-04T17:29:44.615833Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1507:3305], Recipient [2:768:2642]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-04-04T17:29:44.615875Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-04-04T17:29:44.616091Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715661, MessageQuota: 1 2025-04-04T17:29:44.616201Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715661, MessageQuota: 1 2025-04-04T17:29:44.670085Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-04-04T17:29:44.670162Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037890 2025-04-04T17:29:44.670499Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:768:2642], Recipient [2:768:2642]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:44.670549Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:44.670618Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-04-04T17:29:44.670659Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:29:44.670704Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [71500:281474976715661] at 72075186224037890 for ReadTableScan 2025-04-04T17:29:44.670737Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit ReadTableScan 2025-04-04T17:29:44.670777Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [71500:281474976715661] at 72075186224037890 error: , IsFatalError: 0 2025-04-04T17:29:44.670822Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Executed 2025-04-04T17:29:44.670857Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit ReadTableScan 2025-04-04T17:29:44.670891Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037890 to execution unit CompleteOperation 2025-04-04T17:29:44.670921Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit CompleteOperation 2025-04-04T17:29:44.671131Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is DelayComplete 2025-04-04T17:29:44.671166Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit CompleteOperation 2025-04-04T17:29:44.671196Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037890 to execution unit CompletedOperations 2025-04-04T17:29:44.671227Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit CompletedOperations 2025-04-04T17:29:44.671258Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Executed 2025-04-04T17:29:44.671283Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit CompletedOperations 2025-04-04T17:29:44.671307Z node 2 :TX_DATASHARD TRACE: Execution plan for [71500:281474976715661] at 72075186224037890 has finished 2025-04-04T17:29:44.671341Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:44.671372Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-04-04T17:29:44.671406Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-04-04T17:29:44.671436Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-04-04T17:29:44.682374Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T17:29:44.682452Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-04-04T17:29:44.682495Z node 2 :TX_DATASHARD TRACE: Complete execution for [71500:281474976715661] at 72075186224037890 on unit CompleteOperation 2025-04-04T17:29:44.682562Z node 2 :TX_DATASHARD DEBUG: Complete [71500 : 281474976715661] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1475:3276], exec latency: 1 ms, propose latency: 1 ms 2025-04-04T17:29:44.682609Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T17:29:44.682804Z node 2 :TX_PROXY DEBUG: Actor# [2:1475:3276] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037890 marker# P12 2025-04-04T17:29:44.682900Z node 2 :TX_PROXY INFO: Actor# [2:1475:3276] txid# 281474976715661 RESPONSE Status# ExecComplete prepare time: 0.000500s execute time: 0.001500s total time: 0.002000s marker# P13 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_column_stats/unittest >> StatisticsScan::RunScanOnShard [GOOD] Test command err: 2025-04-04T17:29:42.959999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:42.960447Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:42.960504Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b60/r3tmp/tmp2YZblw/pdisk_1.dat 2025-04-04T17:29:43.431353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:43.482882Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:43.530058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:43.530217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:43.543237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:43.639553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:43.981688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:735:2618], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:43.981838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:43.981930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:43.996090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:29:44.154527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:749:2626], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:29:44.227238Z node 1 :TX_PROXY ERROR: Actor# [1:823:2669] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:29:44.993522Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0xkxs94908ymajvjhtq6kb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWVkMTljYTEtZDQxNDk3NjUtNWRmZTFkOWQtMzRhMTM3YzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |97.9%| [TM] {RESULT} ydb/core/tx/datashard/ut_column_stats/unittest |98.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_compaction/unittest >> KqpTpch::Query02 [GOOD] >> KqpTpch::Query03 |98.0%| [TA] $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} >> MediatorTest::BasicTimecastUpdates [GOOD] |98.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} >> TxKeys::ComparePointKeys [GOOD] >> TxKeys::ComparePointKeysWithNull >> MediatorTest::MultipleTablets >> TopicSessionTests::BadDataSessionError [GOOD] >> TTestYqlToMiniKQLCompile::CheckResolve >> TTestYqlToMiniKQLCompile::CheckResolve [GOOD] >> TTestYqlToMiniKQLCompile::OnlyResult >> TDqSolomonWriteActorTest::TestWriteWithTimeseries [GOOD] >> TDqSolomonWriteActorTest::TestCheckpoints >> TTestYqlToMiniKQLCompile::OnlyResult [GOOD] >> TTestYqlToMiniKQLCompile::EraseRow >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGeneration [GOOD] >> KeyValueGRPCService::SimpleRenameUnexistedKey >> TTestYqlToMiniKQLCompile::EraseRow [GOOD] >> TTestYqlToMiniKQLCompile::UpdateRow [GOOD] >> TTestYqlToMiniKQLCompile::SelectRow >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] >> TopicSessionTests::WrongFieldType >> TxKeys::ComparePointKeysWithNull [GOOD] >> TxKeys::ComparePointAndRange >> TTestYqlToMiniKQLCompile::SelectRow [GOOD] >> TTestYqlToMiniKQLCompile::SelectRange >> MediatorTimeCast::ReadStepSubscribe >> DiscoveryIsNotBroken::NoKafkaEndpointInDiscovery >> TTestYqlToMiniKQLCompile::SelectRange [GOOD] >> TTestYqlToMiniKQLCompile::SimpleCrossShardTx >> TSequence::CreateTableWithDefaultFromSequenceFromSelect [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceBadRequest >> TTestYqlToMiniKQLCompile::SimpleCrossShardTx [GOOD] >> TTestYqlToMiniKQLCompile::AcquireLocks >> TTestYqlToMiniKQLCompile::AcquireLocks [GOOD] >> TTestYqlToMiniKQLCompile::StaticMapTypeOf >> TTestYqlToMiniKQLCompile::StaticMapTypeOf [GOOD] >> TTestYqlToMiniKQLCompile::SelectRangeAtomInRange [GOOD] >> Graph::MemoryBackendFullCycle [GOOD] >> Graph::LocalBackendFullCycle >> TTestYqlToMiniKQLCompile::Extract [GOOD] >> TDqSolomonWriteActorTest::TestCheckpoints [GOOD] >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint >> test.py::test[solomon-LabelColumns-default.txt] [GOOD] >> test.py::test[solomon-Subquery-default.txt] [SKIPPED] >> test.py::test[solomon-UnknownSetting-] |98.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/client/minikql_compile/ut/unittest >> TTestYqlToMiniKQLCompile::Extract [GOOD] |98.0%| [TS] {RESULT} ydb/core/client/minikql_compile/ut/unittest >> TxKeys::ComparePointAndRange [GOOD] >> TxKeys::ComparePointAndRangeWithNull >> KqpTpch::Query03 [GOOD] >> KqpTpch::Query04 >> test.py::test_wait_for_cluster_ready [GOOD] >> test.py::test_counter >> TxKeys::ComparePointAndRangeWithNull [GOOD] >> TxKeys::ComparePointAndRangeWithInf >> MetadataConversion::MakeAuthTest [GOOD] >> MetadataConversion::ConvertingExternalSourceMetadata [GOOD] >> TCreateAndDropViewTest::CreateViewIfNotExists [GOOD] >> TCreateAndDropViewTest::DropView >> MediatorTest::MultipleTablets [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] |98.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/gateway/ut/gtest >> MetadataConversion::ConvertingExternalSourceMetadata [GOOD] |98.0%| [TS] {RESULT} ydb/core/kqp/gateway/ut/gtest >> QuoterWithKesusTest::PrefetchCoefficient [GOOD] >> QuoterWithKesusTest::GetsQuotaAfterPause >> TestPurecalcFilter::NullValues [GOOD] >> MediatorTest::TabletAckBeforePlanComplete >> test.py::test_counter [GOOD] >> test.py::test_viewer_nodes >> TxKeys::ComparePointAndRangeWithInf [GOOD] >> test.py::test_viewer_nodes [GOOD] >> test.py::test_storage_groups [GOOD] >> test.py::test_viewer_sysinfo [GOOD] >> test.py::test_viewer_vdiskinfo >> test.py::test_viewer_vdiskinfo [GOOD] >> test.py::test_viewer_pdiskinfo [GOOD] >> test.py::test_viewer_bsgroupinfo [GOOD] >> test.py::test_viewer_tabletinfo >> RangeOps::Intersection [GOOD] >> KqpTpch::Query04 [GOOD] >> KqpTpch::Query05 >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter >> test.py::test_viewer_tabletinfo [GOOD] >> test.py::test_viewer_describe >> TestPurecalcFilter::PartialPush >> test.py::test_viewer_describe [GOOD] >> test.py::test_viewer_cluster [GOOD] >> test.py::test_viewer_tenantinfo >> test.py::test_viewer_tenantinfo [GOOD] >> test.py::test_viewer_tenantinfo_db >> test.py::test_viewer_tenantinfo_db [GOOD] >> test.py::test_viewer_healthcheck >> test.py::test_viewer_healthcheck [GOOD] >> test.py::test_viewer_acl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_keys/unittest >> TxKeys::ComparePointAndRangeWithInf [GOOD] Test command err: 2025-04-04T17:29:46.201738Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:29:46.313116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:29:46.313192Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:46.321594Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:29:46.322121Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2025-04-04T17:29:46.322432Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:29:46.339900Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:108:2139], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:29:46.382480Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:29:46.382596Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:29:46.384004Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:29:46.384067Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:29:46.384125Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:29:46.385924Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:29:46.386373Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:29:46.386482Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:193:2153] in generation 2 2025-04-04T17:29:46.478737Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:29:46.526871Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-04-04T17:29:46.528239Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:29:46.528419Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:213:2211] 2025-04-04T17:29:46.528467Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:29:46.528520Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-04-04T17:29:46.528555Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:29:46.528716Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:46.528767Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:46.529867Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:29:46.530027Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:29:46.530172Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:29:46.530290Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:46.530380Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:29:46.530421Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:29:46.530455Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:29:46.530496Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:29:46.530537Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:29:46.530639Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:209:2208], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:46.530674Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:46.530733Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:207:2207], serverId# [1:209:2208], sessionId# [0:0:0] 2025-04-04T17:29:46.534353Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:98:2133], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 98 RawX2: 4294969429 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-04-04T17:29:46.534438Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:29:46.534516Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-04-04T17:29:46.534775Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-04-04T17:29:46.534835Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-04-04T17:29:46.534895Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-04-04T17:29:46.534950Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:29:46.534984Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-04-04T17:29:46.535019Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-04-04T17:29:46.535050Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:29:46.536409Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-04-04T17:29:46.536452Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-04-04T17:29:46.536486Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-04-04T17:29:46.536527Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:29:46.536580Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-04-04T17:29:46.536607Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-04-04T17:29:46.536651Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-04-04T17:29:46.536690Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-04-04T17:29:46.536721Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-04-04T17:29:46.549410Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-04-04T17:29:46.549584Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-04-04T17:29:46.549620Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-04-04T17:29:46.549662Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-04-04T17:29:46.550478Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-04-04T17:29:46.554811Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:219:2217], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:46.554875Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:46.554921Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:218:2216], serverId# [1:219:2217], sessionId# [0:0:0] 2025-04-04T17:29:46.555072Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:98:2133], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-04-04T17:29:46.555102Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:29:46.555244Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-04-04T17:29:46.555289Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:29:46.555334Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-04-04T17:29:46.555371Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-04-04T17:29:46.559288Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 98 RawX2: 4294969429 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-04-04T17:29:46.559375Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:29:46.559644Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:46.559683Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:46.559736Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:29:46.559772Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:29:46.559805Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:29:46.559849Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-04-04T17:29:46.559885Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-04-04T17:29:46.559930Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:29:46.559975Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-04-04T17:29:46.560021Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-04-04T17:29:46.560052Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-04-04T17:29:46.560256Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-04-04T17:29:46.560291Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:29:46.560313Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-04-04T17:29:46.560335Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-04-04T17:29:46.560358Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-04-04T17:29:46.560416Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-04-04T17:29:46.560445Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-04-04T17:29:46.560480Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-04-04T17:29:46.560524Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-04-04T17:29:46.560580Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-04-04T17:29:46.560617Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-04-04T17:29:46.560650Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at ... 25-04-04T17:29:50.353641Z node 5 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit CreateIncrementalRestoreSrc 2025-04-04T17:29:50.353668Z node 5 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:29:50.353696Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit CreateIncrementalRestoreSrc 2025-04-04T17:29:50.353719Z node 5 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit CompleteOperation 2025-04-04T17:29:50.353743Z node 5 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit CompleteOperation 2025-04-04T17:29:50.353968Z node 5 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is DelayComplete 2025-04-04T17:29:50.354006Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit CompleteOperation 2025-04-04T17:29:50.354045Z node 5 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit CompletedOperations 2025-04-04T17:29:50.354085Z node 5 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit CompletedOperations 2025-04-04T17:29:50.354142Z node 5 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-04-04T17:29:50.354168Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit CompletedOperations 2025-04-04T17:29:50.354201Z node 5 :TX_DATASHARD TRACE: Execution plan for [1000001:1] at 9437184 has finished 2025-04-04T17:29:50.354260Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:50.354299Z node 5 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-04-04T17:29:50.354344Z node 5 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:29:50.354381Z node 5 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:29:50.355808Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [5:24:2071], Recipient [5:129:2152]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-04-04T17:29:50.355864Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-04T17:29:50.355911Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-04-04T17:29:50.355953Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:29:50.358494Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000001 txid# 1} 2025-04-04T17:29:50.358560Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000001} 2025-04-04T17:29:50.358627Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:29:50.358802Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:29:50.358846Z node 5 :TX_DATASHARD TRACE: Complete execution for [1000001:1] at 9437184 on unit CreateTable 2025-04-04T17:29:50.358887Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:29:50.358937Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 9437184 2025-04-04T17:29:50.358996Z node 5 :TX_DATASHARD TRACE: Complete execution for [1000001:1] at 9437184 on unit CompleteOperation 2025-04-04T17:29:50.359060Z node 5 :TX_DATASHARD DEBUG: Complete [1000001 : 1] from 9437184 at tablet 9437184 send result to client [5:98:2133], exec latency: 0 ms, propose latency: 1 ms 2025-04-04T17:29:50.359117Z node 5 :TX_DATASHARD INFO: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2025-04-04T17:29:50.359225Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:29:50.359946Z node 5 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 129 RawX2: 21474838632 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 2 2025-04-04T17:29:50.360071Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [5:224:2222], Recipient [5:129:2152]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [5:226:2223] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-04T17:29:50.360117Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-04T17:29:50.360211Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [5:122:2148], Recipient [5:129:2152]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-04-04T17:29:50.360250Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-04T17:29:50.360294Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-04-04T17:29:50.360358Z node 5 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-04-04T17:29:50.360761Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 65543, Sender [5:98:2133], Recipient [5:129:2152]: NActors::TEvents::TEvPoison 2025-04-04T17:29:50.361140Z node 5 :TX_DATASHARD INFO: OnDetach: 9437184 2025-04-04T17:29:50.361255Z node 5 :TX_DATASHARD INFO: Change sender killed: at tablet: 9437184 2025-04-04T17:29:50.371189Z node 5 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [5:229:2224], Recipient [5:232:2225]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:29:50.374317Z node 5 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [5:229:2224], Recipient [5:232:2225]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:29:50.374431Z node 5 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [5:229:2224], Recipient [5:232:2225]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:29:50.380572Z node 5 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [5:232:2225] 2025-04-04T17:29:50.380784Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:29:50.385085Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Execute Persist Sys_SubDomainInfo 2025-04-04T17:29:50.411855Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:29:50.412002Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:29:50.413943Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-04-04T17:29:50.414034Z node 5 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-04-04T17:29:50.414094Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-04-04T17:29:50.414524Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:29:50.414704Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:29:50.414770Z node 5 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [5:275:2225] in generation 3 2025-04-04T17:29:50.457851Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:29:50.457981Z node 5 :TX_DATASHARD INFO: Switched to work state Ready tabletId 9437184 2025-04-04T17:29:50.458073Z node 5 :TX_DATASHARD INFO: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2025-04-04T17:29:50.458208Z node 5 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 9437184 mediators count is 0 coordinators count is 1 buckets per mediator 2 2025-04-04T17:29:50.458469Z node 5 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [5:280:2264] 2025-04-04T17:29:50.458519Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-04-04T17:29:50.458579Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 9437184 2025-04-04T17:29:50.458622Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:29:50.458778Z node 5 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-04-04T17:29:50.458868Z node 5 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-04-04T17:29:50.459050Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [5:232:2225], Recipient [5:232:2225]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:50.459092Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:50.459296Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-04-04T17:29:50.459362Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-04-04T17:29:50.459457Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [5:24:2071], Recipient [5:232:2225]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-04-04T17:29:50.459501Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-04-04T17:29:50.459534Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-04-04T17:29:50.459563Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-04-04T17:29:50.459657Z node 5 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 232 RawX2: 21474838705 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2025-04-04T17:29:50.459704Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-04-04T17:29:50.459739Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:50.459770Z node 5 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-04-04T17:29:50.459808Z node 5 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-04-04T17:29:50.459841Z node 5 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-04-04T17:29:50.459872Z node 5 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-04-04T17:29:50.459907Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-04-04T17:29:50.459982Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [5:24:2071], Recipient [5:232:2225]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2025-04-04T17:29:50.460011Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-04-04T17:29:50.460046Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2025-04-04T17:29:50.460128Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [5:278:2262], Recipient [5:232:2225]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [5:282:2266] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-04T17:29:50.460150Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-04T17:29:50.460212Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [5:122:2148], Recipient [5:232:2225]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-04-04T17:29:50.460239Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-04T17:29:50.460267Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-04-04T17:29:50.460310Z node 5 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-04-04T17:29:50.472689Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [5:278:2262], Recipient [5:232:2225]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [5:278:2262] ServerId: [5:282:2266] } 2025-04-04T17:29:50.472762Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_range_ops/unittest >> RangeOps::Intersection [GOOD] Test command err: first [(Uint64 : NULL, Uint64 : NULL) ; ()) second [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] result [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] correct [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] first [(Uint64 : NULL) ; ()) second [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] result [(Uint64 : NULL) ; (Uint64 : 20, Uint64 : 20)] correct [(Uint64 : NULL) ; (Uint64 : 20, Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 5)] result [(Uint64 : 10) ; (Uint64 : 5)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)] result [(Uint64 : 10) ; (Uint64 : 10)] correct [(Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)] result [(Uint64 : 10) ; (Uint64 : 15)] correct [(Uint64 : 10) ; (Uint64 : 15)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 20)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 30)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 10)] result [(Uint64 : 10) ; (Uint64 : 10)] correct [(Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 15)] result [(Uint64 : 10) ; (Uint64 : 15)] correct [(Uint64 : 10) ; (Uint64 : 15)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 20)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 30)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 17)] result [(Uint64 : 15) ; (Uint64 : 17)] correct [(Uint64 : 15) ; (Uint64 : 17)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 20)] result [(Uint64 : 15) ; (Uint64 : 20)] correct [(Uint64 : 15) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 30)] result [(Uint64 : 15) ; (Uint64 : 20)] correct [(Uint64 : 15) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 20) ; (Uint64 : 20)] result [(Uint64 : 20) ; (Uint64 : 20)] correct [(Uint64 : 20) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 20) ; (Uint64 : 30)] result [(Uint64 : 20) ; (Uint64 : 20)] correct [(Uint64 : 20) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 25) ; (Uint64 : 30)] result [(Uint64 : 25) ; (Uint64 : 20)] first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)] result ((Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result [(Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result ((Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)] result ((Uint64 : 10) ; (Uint64 : 15)] correct ((Uint64 : 10) ; (Uint64 : 15)] first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)) result ((Uint64 : 10) ; (Uint64 : 15)) correct ((Uint64 : 10) ; (Uint64 : 15)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first ((Uint64 : 10) ; (Uint64 : 20)) second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first [(Uint64 : NULL) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 1) ; (Uint64 : 20)) correct [(Uint64 : 1) ; (Uint64 : 20)) first [(Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 10) ; (Uint64 : 20)) correct [(Uint64 : 10) ; (Uint64 : 20)) first ((Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 10)) result ((Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first [(Uint64 : NULL) ; (Uint64 : 10)] second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 1) ; (Uint64 : 10)] correct [(Uint64 : 1) ; (Uint64 : 10)] first [(Uint64 : NULL) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result [(Uint64 : 1) ; (Uint64 : 10)) correct [(Uint64 : 1) ; (Uint64 : 10)) |98.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_range_ops/unittest |98.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_keys/unittest >> test.py::test_viewer_acl [GOOD] >> test.py::test_viewer_autocomplete >> KeyValueGRPCService::SimpleRenameUnexistedKey [GOOD] >> KeyValueGRPCService::SimpleConcatUnexistedKey >> test.py::test_viewer_autocomplete [GOOD] >> test.py::test_viewer_check_access [GOOD] >> test.py::test_viewer_query >> DataShardReplication::SimpleApplyChanges [GOOD] >> DataShardReplication::SplitMergeChanges >> DataShardFollowers::FollowerKeepsWorkingAfterMvccReadTable >> test.py::test_viewer_query [GOOD] >> test.py::test_viewer_query_issue_13757 >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] >> DataCleanup::BorrowerDataCleanedAfterCopyTable [GOOD] >> test.py::test_viewer_query_issue_13757 [GOOD] >> test.py::test_viewer_query_issue_13945 >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK >> DataShardStats::OneChannelStatsCorrect >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] >> test.py::test_viewer_query_issue_13945 [GOOD] >> test.py::test_pqrb_tablet >> test.py::test_pqrb_tablet [GOOD] >> test.py::test[solomon-UnknownSetting-] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK [GOOD] Test command err: 2025-04-04T17:28:21.023032Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511149091062481:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:21.023155Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000fe8/r3tmp/tmpzw2oGu/pdisk_1.dat 2025-04-04T17:28:21.184933Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T17:28:21.332087Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11106, node 1 2025-04-04T17:28:21.383147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:21.383275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:21.384847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:21.394801Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000fe8/r3tmp/yandexnZTLqz.tmp 2025-04-04T17:28:21.394832Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000fe8/r3tmp/yandexnZTLqz.tmp 2025-04-04T17:28:21.395025Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000fe8/r3tmp/yandexnZTLqz.tmp 2025-04-04T17:28:21.395145Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:21.447955Z INFO: TTestServer started on Port 29546 GrpcPort 11106 TClient is connected to server localhost:29546 PQClient connected to localhost:11106 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:21.739068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:28:21.767590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:28:23.322825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511157680997894:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:23.322931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:23.323083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511157680997909:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:23.326635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:28:23.328461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511157680997943:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:23.328525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:23.335815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489511157680997911:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:28:23.496085Z node 1 :TX_PROXY ERROR: Actor# [1:7489511157680997967:2446] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:23.516779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:28:23.534758Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489511157680997983:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:28:23.535010Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjcxYmQ5MS1lYjEwZTdhMS1hYzU3N2ZiLWYyMWM3Zjkw, ActorId: [1:7489511157680997879:2336], ActorState: ExecuteState, TraceId: 01jr0xhf0b28q7n8jx4rm4sc8w, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:28:23.537140Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:28:23.542337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:28:23.594233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489511157680998260:2625] 2025-04-04T17:28:26.023274Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489511149091062481:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:26.023380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:28:29.808902Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-04T17:28:29.829948Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-04T17:28:29.831306Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7489511183450802328:2786], Recipient [1:7489511149091062906:2189]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:28:29.831336Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:28:29.831353Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:28:29.831397Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7489511183450802324:2783], Recipient [1:7489511149091062906:2189]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-04T17:28:29.831412Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T17:28:29.902499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:28:29.902935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:28:29.903214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-04T17:28:29.903255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-04T17:28:29.903290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new sha ... session deregistered: test-consumer_5_2_5097933292025351734_v1 2025-04-04T17:29:49.544202Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [5:7489511518560565465:2710] disconnected; active server actors: 1 2025-04-04T17:29:49.544244Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [5:7489511518560565465:2710] client test-consumer disconnected session test-consumer_5_2_5097933292025351734_v1 2025-04-04T17:29:49.545449Z :INFO: [/Root] TraceId [] SessionId [producer-1|cfc828f3-4a9855bd-29cf0d35-e9293387_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-04-04T17:29:49.545520Z :INFO: [/Root] TraceId [] SessionId [producer-1|cfc828f3-4a9855bd-29cf0d35-e9293387_0] PartitionId [0] Generation [1] Write session will now close 2025-04-04T17:29:49.545641Z :DEBUG: [/Root] TraceId [] SessionId [producer-1|cfc828f3-4a9855bd-29cf0d35-e9293387_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-04T17:29:49.546798Z :INFO: [/Root] TraceId [] SessionId [producer-1|cfc828f3-4a9855bd-29cf0d35-e9293387_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-04T17:29:49.546864Z :DEBUG: [/Root] TraceId [] SessionId [producer-1|cfc828f3-4a9855bd-29cf0d35-e9293387_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-04-04T17:29:49.546929Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2025-04-04T17:29:49.546990Z :DEBUG: [/Root] TraceId [] SessionId [producer-1|cfc828f3-4a9855bd-29cf0d35-e9293387_0] PartitionId [0] Generation [1] Write session is aborting and will not restart 2025-04-04T17:29:49.547061Z :DEBUG: [/Root] TraceId [] SessionId [producer-1|cfc828f3-4a9855bd-29cf0d35-e9293387_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-04T17:29:49.547060Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: producer-1|cfc828f3-4a9855bd-29cf0d35-e9293387_0 grpc read done: success: 0 data: 2025-04-04T17:29:49.547102Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|cfc828f3-4a9855bd-29cf0d35-e9293387_0 grpc read failed 2025-04-04T17:29:49.547152Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|cfc828f3-4a9855bd-29cf0d35-e9293387_0 grpc closed 2025-04-04T17:29:49.547170Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|cfc828f3-4a9855bd-29cf0d35-e9293387_0 is DEAD 2025-04-04T17:29:49.548566Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateIdle, received event# 65543, Sender [5:7489511518560565485:2717], Recipient [5:7489511518560565487:2717]: NActors::TEvents::TEvPoison 2025-04-04T17:29:49.548672Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T17:29:49.550859Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7489511518560565518:3280], Recipient [5:7489511501380695338:2456]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:29:49.550914Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:29:49.550948Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-04-04T17:29:49.550995Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7489511518560565516:2717] destroyed 2025-04-04T17:29:49.551108Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188506 (NKikimr::TEvPQ::TEvPipeDisconnected), Tablet [5:7489511501380695338:2456], Partition 0, Sender [5:7489511501380695338:2456], Recipient [5:7489511501380695399:2460], Cookie: 0 2025-04-04T17:29:49.551180Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188506, Sender [5:7489511501380695338:2456], Recipient [5:7489511501380695399:2460]: NKikimr::TEvPQ::TEvPipeDisconnected 2025-04-04T17:29:49.551212Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPipeDisconnected 2025-04-04T17:29:49.551254Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-04-04T17:29:49.551303Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessChangeOwnerRequests. 2025-04-04T17:29:49.551348Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:29:49.551433Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:29:49.551492Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:29:49.551529Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:29:49.614435Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511522855532935:2749], Partition 2, Sender [0:0:0], Recipient [5:7489511522855533016:2758], Cookie: 0 2025-04-04T17:29:49.614514Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511522855533016:2758]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:49.614538Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:49.614602Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:29:49.614672Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:29:49.614693Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:29:49.614723Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:29:49.614920Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511501380695338:2456], Partition 0, Sender [0:0:0], Recipient [5:7489511501380695399:2460], Cookie: 0 2025-04-04T17:29:49.614952Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511522855532937:2750], Partition 1, Sender [0:0:0], Recipient [5:7489511522855533026:2762], Cookie: 0 2025-04-04T17:29:49.614990Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511501380695399:2460]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:49.615001Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511522855533026:2762]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:49.615007Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:49.615018Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:49.615037Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:29:49.615048Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:29:49.615087Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:29:49.615095Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:29:49.615105Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:29:49.615115Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:29:49.615140Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:29:49.615140Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:29:49.714810Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511522855532935:2749], Partition 2, Sender [0:0:0], Recipient [5:7489511522855533016:2758], Cookie: 0 2025-04-04T17:29:49.714910Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511522855533016:2758]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:49.714935Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:49.714975Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:29:49.715057Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:29:49.715085Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:29:49.715126Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:29:49.715223Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511501380695338:2456], Partition 0, Sender [0:0:0], Recipient [5:7489511501380695399:2460], Cookie: 0 2025-04-04T17:29:49.715271Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511501380695399:2460]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:49.715292Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:49.715304Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511522855532937:2750], Partition 1, Sender [0:0:0], Recipient [5:7489511522855533026:2762], Cookie: 0 2025-04-04T17:29:49.715319Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:29:49.715355Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:29:49.715384Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511522855533026:2762]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:49.715395Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:29:49.715405Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:29:49.715416Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:29:49.715441Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:29:49.715519Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:29:49.715551Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:29:49.715577Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] >> MediatorTest::TabletAckBeforePlanComplete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_data_cleanup/unittest >> DataCleanup::BorrowerDataCleanedAfterCopyTable [GOOD] Test command err: 2025-04-04T17:29:13.438766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:310:2353], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:13.439474Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:13.439562Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000af7/r3tmp/tmpH9MNil/pdisk_1.dat 2025-04-04T17:29:13.933520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:13.984844Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:14.031249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:14.031911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:14.044833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:14.140011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:14.578776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:769:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:14.578898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:778:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:14.578978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:14.587580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:29:14.747255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:783:2658], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:29:14.825436Z node 1 :TX_PROXY ERROR: Actor# [1:857:2701] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:29:15.890637Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0xk12fecej4kezt2zeqbe6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjU4OGIxMDgtNzU2NTNkYTktMjc1NTRjYTUtZjA4M2ZlMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:15.919419Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0xk12fecej4kezt2zeqbe6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjU4OGIxMDgtNzU2NTNkYTktMjc1NTRjYTUtZjA4M2ZlMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:19.473660Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:19.473792Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:19.473991Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000af7/r3tmp/tmprvDGCv/pdisk_1.dat 2025-04-04T17:29:19.704938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:19.734830Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:19.770771Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:19.770895Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:19.782440Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:19.863481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:20.181935Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:762:2644], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:20.182038Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:772:2649], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:20.182106Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:20.186953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:29:20.314674Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:776:2652], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:29:20.348815Z node 2 :TX_PROXY ERROR: Actor# [2:849:2694] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:29:20.562743Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0xk6hm6ezesx404evgm6w9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjMxYjMxYzctMTM1NmMyMzctOTc5NTRmODYtZGI2YzhiYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:20.567213Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0xk6hm6ezesx404evgm6w9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjMxYjMxYzctMTM1NmMyMzctOTc5NTRmODYtZGI2YzhiYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:24.626579Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:24.626950Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:24.627034Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000af7/r3tmp/tmpHVeUMb/pdisk_1.dat 2025-04-04T17:29:24.901609Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:24.931220Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:24.967670Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:24.967803Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:24.979602Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:25.061480Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:25.407088Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:769:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:25.407198Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:780:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:25.407292Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:25.412962Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:29:25.573312Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:783:2658], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:29:25.608831Z node 3 :TX_PROXY ERROR: Actor# [3:857:2701] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:29:25.867475Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0xkbmx8hdknth1za05yb2n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTYwNTQ1MWQtNWIyYzNlOGItOTJkYWZkZWMtYTBlNmY2NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:25.872686Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: ... 644480 2025-04-04T17:29:37.015487Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:769:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:37.015589Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:779:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:37.015655Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:37.020183Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:29:37.179497Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:783:2658], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:29:37.214529Z node 5 :TX_PROXY ERROR: Actor# [5:857:2701] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:29:37.536803Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0xkpzn8jw34kyxr7d3m65p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NWE1MDA0YS1lZDU5MTJlOC01NGY5YWVkYS1kODc2YmQyMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:37.543113Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0xkpzn8jw34kyxr7d3m65p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NWE1MDA0YS1lZDU5MTJlOC01NGY5YWVkYS1kODc2YmQyMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:42.332520Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:101:2147], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:42.332858Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:42.333008Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000af7/r3tmp/tmpPfvZ1Y/pdisk_1.dat 2025-04-04T17:29:42.632842Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:42.662360Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:42.699790Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:42.699950Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:42.711493Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:42.794550Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:43.454495Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:878:2728], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:43.454579Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:889:2733], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:43.454658Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:43.459603Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:29:43.605630Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:892:2736], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:29:43.642893Z node 6 :TX_PROXY ERROR: Actor# [6:953:2778] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:29:43.975119Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0xkx8x2w92k8pktnw5exbd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=NzBlZTA0NmEtZmM5N2VjMGMtZTU0NTRkZjMtOTNhYzdkNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:43.982046Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0xkx8x2w92k8pktnw5exbd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=NzBlZTA0NmEtZmM5N2VjMGMtZTU0NTRkZjMtOTNhYzdkNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:44.319979Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0xkxt1011yp8wjrjdx2gx0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=ZTY2ZTFhZGUtMjQ5NDk3NTMtNzZjYmI5ZmQtMWM3MzIwOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:44.327756Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0xkxt1011yp8wjrjdx2gx0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=ZTY2ZTFhZGUtMjQ5NDk3NTMtNzZjYmI5ZmQtMWM3MzIwOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:49.338843Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:304:2348], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:49.339190Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:49.339266Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000af7/r3tmp/tmpMXUeJo/pdisk_1.dat 2025-04-04T17:29:49.650552Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:49.677624Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:49.714680Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:49.714819Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:49.726370Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:49.808890Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:50.468259Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:878:2728], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:50.468381Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:887:2733], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:50.468458Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:50.474063Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:29:50.624651Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:892:2736], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:29:50.662376Z node 7 :TX_PROXY ERROR: Actor# [7:953:2778] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:29:51.035473Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0xm442e8phy6k702ad72s6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Nzg1Njg2YzUtNmE3NWI5NWYtYTZiZGU4NDYtNGVkNWMxYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:51.041599Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0xm442e8phy6k702ad72s6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Nzg1Njg2YzUtNmE3NWI5NWYtYTZiZGU4NDYtNGVkNWMxYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:51.468011Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0xm4pn5n8v3f003s5yetn5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZGQwOTEwNjgtOTgxNDNjNmYtYTU0NzEwZGEtN2JiM2YwNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:51.476242Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0xm4pn5n8v3f003s5yetn5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZGQwOTEwNjgtOTgxNDNjNmYtYTU0NzEwZGEtN2JiM2YwNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:52.005050Z node 7 :TX_DATASHARD WARN: DataCleanup of tablet# 72075186224037888: has borrowed parts, requested from [7:593:2518] |98.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_data_cleanup/unittest >> MediatorTimeCast::ReadStepSubscribe [GOOD] >> MediatorTimeCast::GranularTimecast >> test_query_cache.py::TestQueryCache::test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] Test command err: 2025-04-04T17:29:33.643813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:33.644410Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:33.644490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0006f5/r3tmp/tmpVluKIC/pdisk_1.dat 2025-04-04T17:29:34.212877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:34.258742Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:34.306471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:34.307259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:34.320123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:34.430520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:34.807568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:769:2641], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:34.807747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:34.807882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:34.814442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:29:34.974002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:783:2649], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:29:35.038707Z node 1 :TX_PROXY ERROR: Actor# [1:857:2692] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:29:35.780463Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0xkmtm478f0810smdx30tv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzM4NWNiYjMtMjI2YmZmYzQtZGI1N2JlNTUtZGY4YTE1ZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:35.846769Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0xkmtm478f0810smdx30tv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzM4NWNiYjMtMjI2YmZmYzQtZGI1N2JlNTUtZGY4YTE1ZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:36.209605Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0xknwa7pkmmjpreqy9b4n3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2NkMWU2NS1hMWY0ZjNjYS0zOTA0YWQwNy04NjM1ODI5YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:36.258812Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0xknwa7pkmmjpreqy9b4n3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2NkMWU2NS1hMWY0ZjNjYS0zOTA0YWQwNy04NjM1ODI5YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:36.274380Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0xknwa7pkmmjpreqy9b4n3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2NkMWU2NS1hMWY0ZjNjYS0zOTA0YWQwNy04NjM1ODI5YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:36.280138Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0xknwa7pkmmjpreqy9b4n3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2NkMWU2NS1hMWY0ZjNjYS0zOTA0YWQwNy04NjM1ODI5YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:36.407941Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr0xkp9a5dvb5mqy7sdjbj4s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjVlZGNkZDgtYzM0ZGNiMTktMTgzMzNjOTAtMWQ4MDJiM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:36.436599Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr0xkp9a5dvb5mqy7sdjbj4s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjVlZGNkZDgtYzM0ZGNiMTktMTgzMzNjOTAtMWQ4MDJiM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:36.600951Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr0xkpe4a78q5g2v4qxfe468, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWU1NzkxZC1mNDkzZTQzZi04MTgwYjVkNS05OTczYzkxOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_value: 5 } items { uint32_value: 5 } }, { items { int64_value: 6 } items { uint32_value: 6 } }, { items { int64_value: 7 } items { uint32_value: 7 } }, { items { int64_value: 8 } items { uint32_value: 8 } }, { items { int64_value: 9 } items { uint32_value: 9 } } 2025-04-04T17:29:39.590563Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:39.590666Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:39.590842Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0006f5/r3tmp/tmpDIjGTo/pdisk_1.dat 2025-04-04T17:29:39.843375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:39.868389Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:39.904531Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:39.904675Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:39.916288Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:39.999666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:40.331436Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:820:2680], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:40.331582Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:830:2685], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:40.331663Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:40.336321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:29:40.499721Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:834:2688], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:29:40.534880Z node 2 :TX_PROXY ERROR: Actor# [2:912:2735] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:29:41.073210Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0xkt790h8xnka5yw2hrz69, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWM5ZmJkOTAtOGFjMzA5MTktY2NhYjJiZTMtOWQ0MmZkNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:41.103207Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0xkt790h8xnka5yw2hrz69, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWM5ZmJkOTAtOGFjMzA5MTktY2NhYjJiZTMtOWQ0MmZkNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:41.117937Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0xkt790h8xnka5yw2hrz69, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWM5ZmJkOTAtOGFjMzA5MTktY2NhYjJiZTMtOWQ0MmZkNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:41.127617Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0xkt790h8xnka5yw2hrz69, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWM5ZmJkOTAtOGFjMzA5MTktY2NhYjJiZTMtOWQ0MmZkNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:41.502428Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0xkv1262mneme1kp061kt5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTMyNzIyNjMtMjVkZjI2ZTItMWEyNWM5NTMtY2U0N2IxYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Ro ... dId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_value: 5 } items { uint32_value: 5 } }, { items { int64_value: 6 } items { uint32_value: 6 } }, { items { int64_value: 7 } items { uint32_value: 7 } }, { items { int64_value: 8 } items { uint32_value: 8 } }, { items { int64_value: 9 } items { uint32_value: 9 } } 2025-04-04T17:29:45.596036Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:99:2145], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:45.596426Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:45.596510Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0006f5/r3tmp/tmpGpXT9p/pdisk_1.dat 2025-04-04T17:29:45.880437Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:45.909354Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:45.946653Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:45.946762Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:45.958263Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:46.041159Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:46.329875Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:769:2641], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:46.329983Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:779:2646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:46.330057Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:46.336107Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:29:46.496441Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:783:2649], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:29:46.532466Z node 3 :TX_PROXY ERROR: Actor# [3:857:2692] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:29:46.650154Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jr0xm02q36r4qy93ea4s21v1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDdiZWQ3ZWYtZDY2NGFhYzgtMTNkYjllZWYtM2IzMGVjMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:46.671833Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0xm02q36r4qy93ea4s21v1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDdiZWQ3ZWYtZDY2NGFhYzgtMTNkYjllZWYtM2IzMGVjMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:46.800750Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jr0xm0e14g19eedewva1g6fm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZjNiOGZhNzctOTAzMjk5OTUtYmQ5ZGJjNi05OWRhZDJjOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 303 } } 2025-04-04T17:29:46.968313Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jr0xm0hs4ctz45s9aa7ceff4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODQwOGQ5M2ItYmVkMzZkMDUtYmI2ZTBjZWQtNWRkNmQ0OTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:46.986286Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jr0xm0hs4ctz45s9aa7ceff4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODQwOGQ5M2ItYmVkMzZkMDUtYmI2ZTBjZWQtNWRkNmQ0OTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:47.133900Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0xm0qw8cy7hk0fx85va8fg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDYwYzk5MTYtN2Y5YTVlNzMtMjAxZmQ2YzktNzY3Y2I4Y2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } } 2025-04-04T17:29:47.302669Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jr0xm0w7643wt3t0qe57j1rm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzJiMzQ1ODItOWRiMDI3ODktMTA1YzU4ZmQtYThlNzk5MzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:47.326731Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jr0xm0w7643wt3t0qe57j1rm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzJiMzQ1ODItOWRiMDI3ODktMTA1YzU4ZmQtYThlNzk5MzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:47.458984Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jr0xm12mf16g2k35txzyn6wy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzBlOTNlOTAtNTUzODQwNGItNTJmYjA4NTgtMjc3MWRlNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } }, { items { int64_value: 3 } items { uint32_value: 303 } } 2025-04-04T17:29:51.528690Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:304:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:51.529136Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:51.529207Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0006f5/r3tmp/tmpRxDUpv/pdisk_1.dat 2025-04-04T17:29:51.851931Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:51.885096Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:51.924240Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:51.924389Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:51.938913Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:52.031558Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:52.403840Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:769:2641], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:52.403955Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:779:2646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:52.404034Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:52.410210Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T17:29:52.570010Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:783:2649], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T17:29:52.606005Z node 4 :TX_PROXY ERROR: Actor# [4:857:2692] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:29:52.658833Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:867:2701], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017 2025-04-04T17:29:52.660820Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NmIyNjgxZWYtNDhhZjEyZDQtZjFiZmM3ZDAtNWY3NGEzNQ==, ActorId: [4:766:2638], ActorState: ExecuteState, TraceId: 01jr0xm60h5j2qcphqb52z2979, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-04-04T17:29:52.699181Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:889:2717], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017 2025-04-04T17:29:52.701306Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWYwNGY1ZDctMWNlM2NkMjctM2JmN2Q2ZDAtOWQxNzc0ZDQ=, ActorId: [4:881:2709], ActorState: ExecuteState, TraceId: 01jr0xm68resdcc84rgfeywrys, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: |98.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_sequence/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/tests/py3test >> test.py::test_pqrb_tablet [GOOD] |98.0%| [TM] {RESULT} ydb/core/viewer/tests/py3test >> MediatorTest::TabletAckWhenDead >> TDqPqRdReadActorTests::TestReadFromTopic2 [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, pic, release} ydb/tests/fq/solomon/py3test >> test.py::test[solomon-UnknownSetting-] [GOOD] |98.0%| [TM] {RESULT} ydb/tests/fq/solomon/py3test >> TopicSessionTests::WrongFieldType [GOOD] >> TDqPqRdReadActorTests::IgnoreUndeliveredWithWrongGeneration [GOOD] >> TDqPqRdReadActorTests::SessionError [GOOD] >> TopicSessionTests::RestartSessionIfNewClientWithOffset >> TCreateAndDropViewTest::DropView [GOOD] >> TCreateAndDropViewTest::DropViewDisabledFeatureFlag >> TDqPqRdReadActorTests::ReadWithFreeSpace [GOOD] >> Graph::LocalBackendFullCycle [GOOD] >> Graph::MemoryBordersOnGet >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] [GOOD] >> TestPurecalcFilter::PartialPush [GOOD] >> KeyValueGRPCService::SimpleConcatUnexistedKey [GOOD] >> KeyValueGRPCService::SimpleCopyUnexistedKey >> KqpTpch::Query05 [GOOD] >> KqpTpch::Query06 >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] >> TestPurecalcFilter::CompilationValidation >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead [GOOD] >> Graph::MemoryBordersOnGet [GOOD] >> Graph::LocalBordersOnGet >> TDqPqRdReadActorTests::CoordinatorChanged >> QuoterWithKesusTest::GetsQuotaAfterPause [GOOD] >> QuoterWithKesusTest::GetsSeveralQuotas >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] >> TMemoryController::SharedCache [GOOD] >> TMemoryController::SharedCache_ConfigLimit >> GraphShard::NormalizeAndDownsample1 [GOOD] >> GraphShard::NormalizeAndDownsample2 [GOOD] >> GraphShard::NormalizeAndDownsample3 [GOOD] >> GraphShard::NormalizeAndDownsample4 [GOOD] >> GraphShard::NormalizeAndDownsample5 [GOOD] >> GraphShard::NormalizeAndDownsample6 [GOOD] >> GraphShard::CheckHistogramToPercentileConversions [GOOD] >> GraphShard::CreateGraphShard >> KqpTpch::Query06 [GOOD] >> KqpTpch::Query07 >> DataShardReplication::SplitMergeChanges [GOOD] >> DataShardReplication::SplitMergeChangesReboots >> Splitter::Simple >> TIndexProcesorTests::TestOver1000Queues [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple >> Splitter::Simple [GOOD] >> Splitter::Small [GOOD] >> Splitter::Minimal [GOOD] >> Splitter::Trivial [GOOD] >> Splitter::BigAndSmall >> DiscoveryIsNotBroken::NoKafkaEndpointInDiscovery [GOOD] >> DiscoveryIsNotBroken::NoKafkaSslEndpointInDiscovery >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] >> test_http_api.py::TestHttpApi::test_simple_analytics_query >> Graph::LocalBordersOnGet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/rate_limiter/ut/unittest >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] Test command err: 2025-04-04T17:28:50.204520Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511273740314746:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:50.205225Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000aca/r3tmp/tmpfhTJGt/pdisk_1.dat 2025-04-04T17:28:50.629868Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:50.667171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:50.668131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:50.682383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65286, node 1 2025-04-04T17:28:50.871118Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:50.871139Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:50.871145Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:50.871268Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14279 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:51.335454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:51.446927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:28:54.134722Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489511292662793008:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:54.134787Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000aca/r3tmp/tmpt0mhx1/pdisk_1.dat 2025-04-04T17:28:54.247076Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:54.274443Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:54.274517Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:54.277868Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21175, node 4 2025-04-04T17:28:54.342987Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:54.343008Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:54.343015Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:54.343143Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19776 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:54.616163Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:54.694013Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:28:58.301139Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7489511306691748965:2077];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:58.302297Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000aca/r3tmp/tmpKH814Y/pdisk_1.dat 2025-04-04T17:28:58.425045Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:58.453862Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:58.454100Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:58.460406Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19189, node 7 2025-04-04T17:28:58.547993Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:58.548014Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:58.548025Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:58.548178Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24432 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:58.770527Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:58.825983Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:29:02.276900Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7489511324379238459:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:02.276950Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000aca/r3tmp/tmpYuOBQW/pdisk_1.dat 2025-04-04T17:29:02.440830Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:02.487801Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:02.487880Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:02.492670Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8781, node 10 2025-04-04T17:29:02.590903Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:02.590933Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:02.590942Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:02.591086Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22500 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:29 ... /r3tmp/tmpVF7m8z/pdisk_1.dat 2025-04-04T17:29:33.495109Z node 28 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12434, node 28 2025-04-04T17:29:33.608674Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:33.608793Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:33.612472Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:33.614525Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:33.614552Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:33.614558Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:33.614713Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5209 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:29:34.045309Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:29:34.120975Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:29:39.202714Z node 31 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[31:7489511484028208925:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:39.202835Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000aca/r3tmp/tmpHS9rrj/pdisk_1.dat 2025-04-04T17:29:39.369660Z node 31 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:39.410990Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:39.411118Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:39.416372Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28411, node 31 2025-04-04T17:29:39.522590Z node 31 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:39.522624Z node 31 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:39.522635Z node 31 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:39.522807Z node 31 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:29:39.853070Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:29:39.921177Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-04-04T17:29:45.195334Z node 34 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[34:7489511508438828583:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:45.200429Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000aca/r3tmp/tmpgY6dqk/pdisk_1.dat 2025-04-04T17:29:45.357556Z node 34 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:45.419226Z node 34 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:45.419336Z node 34 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:45.422380Z node 34 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13450, node 34 2025-04-04T17:29:45.476181Z node 34 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:45.476209Z node 34 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:45.476224Z node 34 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:45.476385Z node 34 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:29:45.675748Z node 34 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:29:45.750639Z node 34 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T17:29:51.466949Z node 37 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[37:7489511535046954817:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:51.467784Z node 37 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000aca/r3tmp/tmpXmj9ho/pdisk_1.dat 2025-04-04T17:29:51.659366Z node 37 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:51.697245Z node 37 :HIVE WARN: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:51.697347Z node 37 :HIVE WARN: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:51.700044Z node 37 :HIVE WARN: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25225, node 37 2025-04-04T17:29:51.759895Z node 37 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:51.759924Z node 37 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:51.759935Z node 37 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:51.760118Z node 37 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62977 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:29:51.962086Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:29:52.043299Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 |98.1%| [TM] {RESULT} ydb/services/rate_limiter/ut/unittest >> MediatorTimeCast::GranularTimecast [GOOD] >> Splitter::BigAndSmall [GOOD] >> Splitter::CritSmallPortions >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_hive] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] >> GraphShard::CreateGraphShard [GOOD] >> TDqPqRdReadActorTests::CoordinatorChanged [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_tx_coordinator] [GOOD] >> TSentinelBaseTests::PDiskInitialStatus [GOOD] >> TSentinelBaseTests::PDiskErrorState [GOOD] >> TSentinelBaseTests::PDiskInactiveAfterStateChange [GOOD] >> TSentinelBaseTests::PDiskFaultyState [GOOD] >> TSentinelBaseTests::PDiskStateChangeNormalFlow [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodePermanentlyBad [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodeNotExpectedRestart [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodeExpectedRestart [GOOD] >> TSentinelBaseTests::GuardianDataCenterRatio >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_allocator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] >> TSentinelBaseTests::GuardianDataCenterRatio [GOOD] >> TSentinelBaseTests::GuardianRackRatio [GOOD] >> TSentinelTests::Smoke ------- [TS] {asan, default-linux-x86_64, release} ydb/core/graph/ut/unittest >> Graph::LocalBordersOnGet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:29:38.224250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:29:38.224421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:29:38.224488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:29:38.224533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:29:38.225301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:29:38.225376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:29:38.225484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:29:38.225614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:29:38.226925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:29:38.316022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:29:38.316185Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:38.327966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:29:38.328255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:29:38.328492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:29:38.340579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:29:38.340907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:29:38.343632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:29:38.345568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:29:38.354544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:29:38.361961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:29:38.362039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:29:38.362210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:29:38.362284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:29:38.362406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:29:38.362605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:29:38.369400Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:29:38.484023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:29:38.485330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:29:38.486793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:29:38.488726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:29:38.488850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:29:38.491999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:29:38.492159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:29:38.492370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:29:38.492424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:29:38.492508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:29:38.492540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:29:38.494821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:29:38.494895Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:29:38.494943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:29:38.497013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:29:38.497078Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:29:38.497125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:29:38.497187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:29:38.502417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:29:38.505017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:29:38.505230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:29:38.506446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:29:38.506599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:29:38.506653Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:29:38.508125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:29:38.508202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:29:38.508433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:29:38.508515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:29:38.511331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:29:38.511385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:29:38.511548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:29:38.511581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:29:38.511871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:29:38.511909Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:29:38.512008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:29:38.512070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:29:38.512146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:29:38.512183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:29:38.512220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:29:38.512261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:29:38.512295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:29:38.512325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:29:38.512404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:29:38.512457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:29:38.512485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:29:38.514313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:29:38.514445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:29:38.514494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... cs 2025-04-04T17:29:58.415826Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 101 } Time: 101 2025-04-04T17:29:58.415842Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-04T17:29:58.415860Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-04T17:29:58.415881Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-04T17:29:58.415934Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 102 } Time: 102 2025-04-04T17:29:58.415952Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-04T17:29:58.415972Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-04T17:29:58.415996Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-04T17:29:58.416055Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 103 } Time: 103 2025-04-04T17:29:58.416078Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-04T17:29:58.416104Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-04T17:29:58.416136Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-04T17:29:58.416198Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 104 } Time: 104 2025-04-04T17:29:58.416216Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-04T17:29:58.416238Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-04T17:29:58.416261Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-04T17:29:58.416322Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 105 } Time: 105 2025-04-04T17:29:58.416347Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-04T17:29:58.416375Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-04T17:29:58.416399Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-04T17:29:58.416459Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 106 } Time: 106 2025-04-04T17:29:58.416476Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-04T17:29:58.416495Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-04T17:29:58.416516Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-04T17:29:58.416579Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 107 } Time: 107 2025-04-04T17:29:58.416631Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-04T17:29:58.416652Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-04T17:29:58.416675Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-04T17:29:58.416763Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 108 } Time: 108 2025-04-04T17:29:58.416782Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-04T17:29:58.416808Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-04T17:29:58.416839Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-04T17:29:58.416893Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 109 } Time: 109 2025-04-04T17:29:58.416911Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-04T17:29:58.416930Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-04T17:29:58.416952Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-04T17:29:58.416999Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 110 } Time: 110 2025-04-04T17:29:58.417019Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-04T17:29:58.417045Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-04T17:29:58.417091Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-04T17:29:58.417182Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 111 } Time: 111 2025-04-04T17:29:58.417202Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-04T17:29:58.417224Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-04T17:29:58.417248Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-04T17:29:58.417288Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 112 } Time: 112 2025-04-04T17:29:58.417305Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-04T17:29:58.417325Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-04T17:29:58.417354Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-04T17:29:58.417415Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 113 } Time: 113 2025-04-04T17:29:58.417433Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-04T17:29:58.417453Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-04T17:29:58.417473Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-04T17:29:58.417525Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 114 } Time: 114 2025-04-04T17:29:58.417542Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-04T17:29:58.417560Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-04T17:29:58.417580Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-04T17:29:58.417653Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 115 } Time: 115 2025-04-04T17:29:58.417672Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-04T17:29:58.417692Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-04T17:29:58.417722Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-04T17:29:58.417809Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 116 } Time: 116 2025-04-04T17:29:58.417828Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-04T17:29:58.417851Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-04T17:29:58.417884Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-04T17:29:58.417949Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 117 } Time: 117 2025-04-04T17:29:58.417967Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-04T17:29:58.417985Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-04T17:29:58.418008Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-04T17:29:58.418053Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 118 } Time: 118 2025-04-04T17:29:58.418070Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-04T17:29:58.418087Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-04T17:29:58.418111Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-04T17:29:58.418182Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 119 } Time: 119 2025-04-04T17:29:58.418206Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-04-04T17:29:58.418227Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-04-04T17:29:58.418274Z node 6 :GRAPH TRACE: DB Stored metrics 2025-04-04T17:29:58.418336Z node 6 :GRAPH TRACE: SHARD Handle TEvGraph::TEvGetMetrics from [6:567:2496] 2025-04-04T17:29:58.418398Z node 6 :GRAPH DEBUG: SHARD TTxGetMetrics::Execute 2025-04-04T17:29:58.418450Z node 6 :GRAPH DEBUG: DB Querying from 0 to 119 2025-04-04T17:29:58.431367Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.431471Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.431511Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.431538Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.431563Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.431589Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.431613Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.431638Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.431663Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.431687Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.431711Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.431737Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.431773Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.431798Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.431823Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.431846Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.431889Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.431913Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.431938Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.431964Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.431988Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432011Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432034Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432057Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432082Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432106Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432130Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432154Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432177Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432201Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432226Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432249Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432269Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432291Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432313Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432348Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432374Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432401Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432420Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432444Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432468Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432492Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432514Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432539Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432562Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432584Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432607Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432630Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432655Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432678Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432704Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432727Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432751Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432775Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432796Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432818Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432839Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432861Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432884Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432911Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-04-04T17:29:58.432950Z node 6 :GRAPH DEBUG: SHARD TTxGetMetric::Complete 2025-04-04T17:29:58.433001Z node 6 :GRAPH TRACE: SHARD TxGetMetrics returned 60 points for request 3 2025-04-04T17:29:58.433172Z node 6 :GRAPH TRACE: SVC TEvMetricsResult 3 2025-04-04T17:29:58.433234Z node 6 :GRAPH TRACE: SVC TEvMetricsResult found request 3 resending to [6:568:2497] |98.1%| [TS] {RESULT} ydb/core/graph/ut/unittest >> TDqPqRdReadActorTests::Backpressure ------- [TS] {asan, default-linux-x86_64, release} ydb/core/graph/shard/ut/unittest >> GraphShard::CreateGraphShard [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:124:2058] recipient: [1:107:2139] 2025-04-04T17:29:58.416820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T17:29:58.417023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:29:58.417087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-04-04T17:29:58.417134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T17:29:58.417934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T17:29:58.417996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T17:29:58.418103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T17:29:58.418224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T17:29:58.419520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:29:58.505805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T17:29:58.505877Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:58.519535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:29:58.519820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T17:29:58.520044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T17:29:58.535573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:29:58.535850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:29:58.537966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T17:29:58.539542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T17:29:58.552414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:29:58.561208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:29:58.561297Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:29:58.561466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:29:58.561531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:29:58.561658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:29:58.561817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-04-04T17:29:58.569033Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:237:2058] recipient: [1:15:2062] 2025-04-04T17:29:58.677633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T17:29:58.679449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:29:58.681200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T17:29:58.686362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T17:29:58.686463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:29:58.689625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T17:29:58.689810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T17:29:58.690096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:29:58.690160Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T17:29:58.690283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T17:29:58.690320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T17:29:58.692464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:29:58.692534Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T17:29:58.692570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T17:29:58.694577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:29:58.694643Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:29:58.694687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:29:58.694740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T17:29:58.698774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:29:58.700897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T17:29:58.701052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T17:29:58.701907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:29:58.702060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:29:58.702097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:29:58.703820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T17:29:58.703933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T17:29:58.704199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T17:29:58.704305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T17:29:58.706615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:29:58.706698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T17:29:58.706886Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:29:58.706930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-04-04T17:29:58.707323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T17:29:58.707388Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T17:29:58.707479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:29:58.707527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:29:58.707570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T17:29:58.707624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:29:58.707667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T17:29:58.707716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T17:29:58.707749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-04-04T17:29:58.707790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-04-04T17:29:58.707857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-04-04T17:29:58.707917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-04-04T17:29:58.707965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-04-04T17:29:58.709951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:29:58.710089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-04-04T17:29:58.710118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 546 shardIdx: 72057594046678944:2 at schemeshard: 72057594046678944 2025-04-04T17:29:58.939612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186234409548, partId: 1 2025-04-04T17:29:58.939782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:1, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186234409548 2025-04-04T17:29:58.939829Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 102:1 HandleReply TEvConfigureStatus operationId:102:1 at schemeshard:72057594046678944 2025-04-04T17:29:58.939865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 102:1 Got OK TEvConfigureStatus from tablet# 72075186234409548 shardIdx# 72057594046678944:4 at schemeshard# 72057594046678944 2025-04-04T17:29:58.939903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 3 -> 128 2025-04-04T17:29:58.940503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-04T17:29:58.940574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-04-04T17:29:58.941067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-04T17:29:58.943378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-04T17:29:58.943512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-04T17:29:58.943552Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-04T17:29:58.943598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 102:1, at tablet# 72057594046678944 2025-04-04T17:29:58.943631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 2/2 2025-04-04T17:29:58.943731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T17:29:58.945155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-04-04T17:29:58.945283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-04-04T17:29:58.945557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T17:29:58.945645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T17:29:58.945672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute operation part is already done, operationId: 102:0 2025-04-04T17:29:58.945701Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:1, at tablet# 72057594046678944 2025-04-04T17:29:58.945879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 128 -> 240 2025-04-04T17:29:58.945938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:1, at tablet# 72057594046678944 2025-04-04T17:29:58.946015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-04T17:29:58.946077Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[1:409:2374], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 72075186234409549, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-04-04T17:29:58.947758Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T17:29:58.947784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-04-04T17:29:58.947928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T17:29:58.947963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-04-04T17:29:58.948160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-04T17:29:58.948200Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 102:1, ProgressState, NeedSyncHive: 0 2025-04-04T17:29:58.948222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 240 -> 240 2025-04-04T17:29:58.948656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:29:58.948748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-04-04T17:29:58.948795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-04-04T17:29:58.948830Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-04-04T17:29:58.948861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-04-04T17:29:58.948925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/2, is published: true 2025-04-04T17:29:58.953141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-04-04T17:29:58.953208Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:1 ProgressState 2025-04-04T17:29:58.953287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-04-04T17:29:58.953314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-04-04T17:29:58.953338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-04-04T17:29:58.953360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-04-04T17:29:58.953386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/2, is published: true 2025-04-04T17:29:58.953417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-04-04T17:29:58.953462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-04-04T17:29:58.953525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-04-04T17:29:58.953739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-04-04T17:29:58.953784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-04-04T17:29:58.953805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-04-04T17:29:58.953878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-04-04T17:29:58.954339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-04-04T17:29:58.956051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-04-04T17:29:58.956091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-04-04T17:29:58.956502Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-04-04T17:29:58.956580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-04-04T17:29:58.956610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:565:2494] TestWaitNotification: OK eventTxId 102 2025-04-04T17:29:58.957974Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/db1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-04-04T17:29:58.958134Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/db1" took 212us result status StatusSuccess 2025-04-04T17:29:58.959537Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/db1" PathDescription { Self { Name: "db1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 GraphShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |98.1%| [TS] {RESULT} ydb/core/graph/shard/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/time_cast/ut/unittest >> MediatorTimeCast::GranularTimecast [GOOD] Test command err: 2025-04-04T17:29:51.356402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:51.357074Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:51.357130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000a84/r3tmp/tmpH7WmRs/pdisk_1.dat 2025-04-04T17:29:51.892466Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimr::TEvMediatorTimecast::TEvSubscribeReadStep{ CoordinatorId# 72057594046316545 } 2025-04-04T17:29:51.893286Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE EvClientConnected 2025-04-04T17:29:51.901418Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 1 LastAcquireStep: 0 NextAcquireStep: 0 2025-04-04T17:29:51.950649Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:52.001102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:52.001739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:52.019236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:52.107370Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 500 2025-04-04T17:29:52.218333Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 1000 2025-04-04T17:29:52.415384Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 2000 2025-04-04T17:29:52.547499Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 3000 2025-04-04T17:29:52.677340Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 4000 2025-04-04T17:29:52.818082Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 5000 2025-04-04T17:29:52.864098Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimr::TEvMediatorTimecast::TEvWaitReadStep{ CoordinatorId# 72057594046316545 ReadStep# 7000 } 2025-04-04T17:29:52.999933Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 6000 2025-04-04T17:29:53.148390Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 7000 2025-04-04T17:29:53.151917Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE EvClientDestroyed 2025-04-04T17:29:53.175056Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE EvClientConnected 2025-04-04T17:29:53.175834Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 2 LastAcquireStep: 0 NextAcquireStep: 7000 2025-04-04T17:29:53.188336Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimr::TEvMediatorTimecast::TEvWaitReadStep{ CoordinatorId# 72057594046316545 ReadStep# 12000 } 2025-04-04T17:29:53.296333Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 7500 2025-04-04T17:29:53.392252Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 8000 2025-04-04T17:29:53.540124Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 9000 2025-04-04T17:29:53.667792Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 10000 2025-04-04T17:29:53.815894Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 11000 2025-04-04T17:29:53.964288Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 12000 2025-04-04T17:29:57.314856Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:57.315001Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:57.315219Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000a84/r3tmp/tmpgDAUU2/pdisk_1.dat 2025-04-04T17:29:57.634894Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvRegisterTablet TabletId# 72057594047365120 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2025-04-04T17:29:57.635718Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 1 Tablets: 72057594047365120 MinStep: 0 2025-04-04T17:29:57.635777Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-04-04T17:29:57.635833Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Sender# [2:649:2547] {TEvRegisterTabletResult TabletId# 72057594047365120 Entry# 0} 2025-04-04T17:29:57.636148Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected 2025-04-04T17:29:57.636356Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 0 2025-04-04T17:29:57.636488Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 0} 2025-04-04T17:29:57.636722Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvRegisterTablet TabletId# 72057594047365121 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2025-04-04T17:29:57.636818Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 2 AddTablets: 72057594047365121 2025-04-04T17:29:57.636875Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Sender# [2:652:2549] {TEvRegisterTabletResult TabletId# 72057594047365121 Entry# 0} 2025-04-04T17:29:57.637111Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 2 LatestStep: 0 2025-04-04T17:29:57.637321Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvRegisterTablet TabletId# 72057594047365123 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2025-04-04T17:29:57.637407Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 3 AddTablets: 72057594047365123 2025-04-04T17:29:57.637457Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Sender# [2:653:2550] {TEvRegisterTabletResult TabletId# 72057594047365123 Entry# 0} 2025-04-04T17:29:57.637632Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 0 2025-04-04T17:29:57.660485Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:57.697104Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:57.697225Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:57.709456Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:57.784165Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 500 2025-04-04T17:29:57.784252Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 500} 2025-04-04T17:29:57.891197Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 1000 2025-04-04T17:29:57.891281Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 1000} 2025-04-04T17:29:58.071239Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 2000 2025-04-04T17:29:58.071323Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 2000} ... have step 0 and 2000 after sleep 2025-04-04T17:29:58.159317Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 2500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 2499 FrozenSteps: 2499 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... tx1 planned at step 2500 ... tablet1 at 2499 ... tablet2 at 2499 ... tablet3 at 2500 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet 2025-04-04T17:29:58.254687Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 3000 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... tx2 planned at step 3000 ... tablet1 at 2499 ... tablet2 at 2499 ... tablet3 at 3000 ... unblocking tx1 at tablet2 ... unblocking NKikimr::TEvTxProcessing: ... R_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 0 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-04T17:29:58.438808Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 2500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 2499 FrozenSteps: 2499 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-04T17:29:58.449523Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 2500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 2499 FrozenSteps: 2499 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3000 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-04T17:29:58.460081Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3000 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-04T17:29:58.470775Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 2999 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-04T17:29:58.492157Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 2999 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 3499 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-04T17:29:58.513510Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 3499 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 UnfrozenTablets: 72057594047365121 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-04T17:29:58.534892Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 UnfrozenTablets: 72057594047365121 ... restarting mediator 2025-04-04T17:29:58.546421Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientDestroyed 2025-04-04T17:29:58.546607Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 8 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 3500 2025-04-04T17:29:58.546659Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-04-04T17:29:58.547431Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected 2025-04-04T17:29:58.547587Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 9 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 3500 2025-04-04T17:29:58.547637Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-04-04T17:29:58.548265Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected 2025-04-04T17:29:58.548350Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 10 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 3500 2025-04-04T17:29:58.548379Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-04-04T17:29:58.549120Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected 2025-04-04T17:29:58.549225Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 11 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 3500 2025-04-04T17:29:58.549256Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-04-04T17:29:58.554034Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-04T17:29:58.554544Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 0} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... fully unblocking tx1 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR 2025-04-04T17:29:58.577247Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 2500} ... tablet1 at 2500 ... tablet2 at 3500 ... tablet3 at 3500 ... fully unblocking tx2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-04-04T17:29:58.588171Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3000} ... tablet1 at 3000 ... tablet2 at 3500 ... tablet3 at 3500 ... fully unblocking tx3 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR 2025-04-04T17:29:58.599124Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3500} ... tablet1 at 3500 ... tablet2 at 3500 ... tablet3 at 3500 >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution >> DataShardFollowers::FollowerKeepsWorkingAfterMvccReadTable [GOOD] >> DataShardFollowers::FollowerStaleRo ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/solomon/async_io/ut/unittest >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] Test command err: 2025-04-04T17:29:16.367402Z node 1 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-04-04T17:29:16.369334Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-04-04T17:29:16.370532Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-04-04T17:29:16.372629Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-04-04T17:29:16.372663Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-04T17:29:16.377232Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Fri, 04 Apr 2025 17:29:16 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1} 2025-04-04T17:29:16.381198Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-04T17:29:26.576887Z node 2 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-04-04T17:29:26.583415Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 7500 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-04-04T17:29:26.598539Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2025-04-04T17:29:26.609016Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-04-04T17:29:26.620871Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-04-04T17:29:26.632575Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-04-04T17:29:26.648155Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-04-04T17:29:26.662862Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-04-04T17:29:26.677587Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-04-04T17:29:26.685769Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 54513 bytes of data to buffer 2025-04-04T17:29:26.686160Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2025-04-04T17:29:26.686604Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-04-04T17:29:26.687145Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-04-04T17:29:26.687196Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-04-04T17:29:26.790754Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Fri, 04 Apr 2025 17:29:26 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-04T17:29:26.791235Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-04-04T17:29:26.791318Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-04-04T17:29:26.838160Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Fri, 04 Apr 2025 17:29:26 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-04T17:29:26.838530Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-04-04T17:29:26.838555Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-04-04T17:29:26.876934Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Fri, 04 Apr 2025 17:29:26 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-04T17:29:26.877310Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-04-04T17:29:26.877342Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-04-04T17:29:26.970541Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[5]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Fri, 04 Apr 2025 17:29:26 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-04T17:29:26.970879Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-04-04T17:29:26.970903Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-04-04T17:29:27.018084Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[4]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Fri, 04 Apr 2025 17:29:27 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-04T17:29:27.018353Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 500 metrics with size of 54513 bytes to solomon 2025-04-04T17:29:27.018393Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer MaxRequestsInflight 2025-04-04T17:29:27.068233Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[3]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Fri, 04 Apr 2025 17:29:27 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-04T17:29:27.068367Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-04T17:29:27.096558Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[7]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Fri, 04 Apr 2025 17:29:27 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 500} 2025-04-04T17:29:27.096694Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-04T17:29:27.169113Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[6]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Fri, 04 Apr 2025 17:29:27 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-04T17:29:27.169253Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-04T17:29:37.606780Z node 3 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-04-04T17:29:37.607032Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 10 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-04-04T17:29:37.607390Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 579 bytes of data to buffer 2025-04-04T17:29:37.607523Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 10 metrics with size of 579 bytes to solomon 2025-04-04T17:29:37.607548Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-04T17:29:37.612053Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 27 Date: Fri, 04 Apr 2025 17:29:37 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 10} 2025-04-04T17:29:37.612194Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-04T17:29:47.838721Z node 4 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-04-04T17:29:47.844167Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 2400 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-04-04T17:29:47.862311Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2025-04-04T17:29:47.877365Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-04-04T17:29:47.883451Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 43613 bytes of data to buffer 2025-04-04T17:29:47.883809Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2025-04-04T17:29:47.884050Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-04-04T17:29:47.884121Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 400 metrics with size of 43613 bytes to solomon 2025-04-04T17:29:47.884136Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-04-04T17:29:47.914900Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Fri, 04 Apr 2025 17:29:47 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 400} 2025-04-04T17:29:47.915024Z node 4 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 2 2025-04-04T17:29:47.988539Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Fri, 04 Apr 2025 17:29:47 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-04T17:29:47.988668Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: CheckpointInProgress Empty buffer 2025-04-04T17:29:48.070079Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Fri, 04 Apr 2025 17:29:48 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-04-04T17:29:48.070216Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-04T17:29:48.604290Z node 5 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-04-04T17:29:48.604650Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-04-04T17:29:48.604777Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-04-04T17:29:48.604992Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-04-04T17:29:48.605020Z node 5 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 1 2025-04-04T17:29:48.609086Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Fri, 04 Apr 2025 17:29:48 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1} 2025-04-04T17:29:48.609241Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-04T17:29:48.609777Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-04-04T17:29:48.609920Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-04-04T17:29:48.610060Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-04-04T17:29:48.610078Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-04-04T17:29:48.613586Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Fri, 04 Apr 2025 17:29:48 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1} 2025-04-04T17:29:48.613703Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer |98.1%| [TM] {RESULT} ydb/core/tx/time_cast/ut/unittest |98.1%| [TS] {RESULT} ydb/library/yql/providers/solomon/async_io/ut/unittest >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/ymq/actor/yc_search_ut/unittest >> TIndexProcesorTests::TestOver1000Queues [GOOD] Test command err: 2025-04-04T17:29:23.743054Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511413569012188:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:23.743096Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00088b/r3tmp/tmpqS1NHQ/pdisk_1.dat 2025-04-04T17:29:24.121971Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3175, node 1 2025-04-04T17:29:24.151535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:24.151708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:24.153569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:24.197176Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:24.197205Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:24.197214Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:24.197387Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25336 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:29:24.503918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:29:24.560901Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 TClient is connected to server localhost:25336 waiting... 2025-04-04T17:29:26.661900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-04-04T17:29:26.663653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:25336 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743787764586 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SQS" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743787764600 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 184467... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:29:26.994347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:29:27.000286Z node 1 :TX_PROXY ERROR: Actor# [1:7489511426453914986:2469] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) TClient is connected to server localhost:25336 waiting... 2025-04-04T17:29:27.223565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:1, at schemeshard: 72057594046644480 2025-04-04T17:29:27.225086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 ===Execute query: UPSERT INTO `/Root/SQS/SingleCreateQueueEvent/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "queue1", 1, "myQueueCustomName", 1743787767015, "myFolder", "{\"k1\": \"v1\"}"); 2025-04-04T17:29:27.317255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511430748882451:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:27.317287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511430748882462:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:27.317335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:27.325599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710667:3, at schemeshard: 72057594046644480 2025-04-04T17:29:27.334639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489511430748882465:2365], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710667 completed, doublechecking } 2025-04-04T17:29:27.417259Z node 1 :TX_PROXY ERROR: Actor# [1:7489511430748882518:2624] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:29:28.207992Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jr0xkdgh8v9grsawvg6ca6vg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWI4YTJkNzYtNDgxNDI4Y2EtMWRmMmM0NzMtNTE2N2RmNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:28.536705Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jr0xkedt8fjshadyaka3mh5k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2M3MTM3ODItYTExYTdmN2MtNDk3YTI2MGUtYjVkYWJhZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:28.665145Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jr0xkeqg6r69a9xnst0z32cw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2MwYzgyZWMtYjE2Yjc3ZGMtMmUwM2ZkMjItNDRiMzNiZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:28.743246Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489511413569012188:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:28.743316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:29:28.792681Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jr0xkety7y0ccdej1q5zmcsa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzEwMWY2ZTEtYmYwY2U5OGItODUzNTk5YTctZmZkZGU0NDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-04T17:29:27.015000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-04T17:29:27.015000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-04T17:29:28.795706Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} 2025-04-04T17:29:28.898371Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jr0xkf212v36yd2b0hbxng0n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQ1MmQ0YzMtNGRjYTUzNjUtNzdjYjFiNzgtYjRjMmViYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:28.903364Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jr0xkf269zy91ktf0fpwthxr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzExMTg0NzEtZmExNDk0OTctYzI4YTI3MzYtZmQwNDIzNDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:29.009318Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jr0xkf5gee9bywkxpsewajg1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTJhYTkyMDgtZGVhNGEzZDctMTVjMWMwZjktYWI1YzhjYWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:29.015092Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jr0xkf5p2txhcxpge0sgf4p5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzM1NWI0YmItOTg5ZjJkYmItMjE2NWIzYTctYjg1NTFlZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:29.121121Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jr0xkf8zfrtfsv33ecx31m93, Database: , DatabaseId: /Root, Session ... Database not set, use /Root ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-04T17:29:34.986000Z","resource_id":"deleting2","name":"myQueueCustomName","service":"message-queue","deleted":"2025-04-04T17:29:34.986000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-04T17:29:34.986000Z","resource_id":"deleting1","name":"myQueueCustomName","service":"message-queue","deleted":"2025-04-04T17:29:34.986000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-04T17:29:34.986000Z","resource_id":"creating1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-04T17:29:34.986000Z","resource_id":"creating2","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-04T17:29:33.000000Z","resource_id":"existing1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-04T17:29:36.982640Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-04T17:29:33.000000Z","resource_id":"existing2","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-04T17:29:36.982711Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-04T17:29:34.986000Z","resource_id":"creating1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-04T17:29:36.982762Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-04T17:29:33.000000Z","resource_id":"existing3","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-04T17:29:36.982789Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-04-04T17:29:34.986000Z","resource_id":"creating2","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-04-04T17:29:36.982809Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} 2025-04-04T17:29:37.087797Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710747. Ctx: { TraceId: 01jr0xkq1y93dfsgmmejfqmd6g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWE0ZjFhNDEtOThjMzkzNzYtODZkZDc0MzUtZmRmM2IxY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:37.093334Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710748. Ctx: { TraceId: 01jr0xkq248r8dc4ddjm2b4bpd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWE0ZjFhNDEtOThjMzkzNzYtODZkZDc0MzUtZmRmM2IxY2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:37.099401Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710749. Ctx: { TraceId: 01jr0xkq2a5ecftespvwxjt4d9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZmNzU5MTQtNmY4Yzc1OGItYmJlMDBmYjYtZjY4YTRjYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:37.205703Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710750. Ctx: { TraceId: 01jr0xkq5mf1mfsm87q683szc1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ExNDYzZjctYjk1YWVhMGMtMzcxOTNhMzUtNDRjZTRlM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:37.210652Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710751. Ctx: { TraceId: 01jr0xkq5sethfp02003dgmhxm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ExNDYzZjctYjk1YWVhMGMtMzcxOTNhMzUtNDRjZTRlM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:37.216284Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710752. Ctx: { TraceId: 01jr0xkq5z738k89bx50h3z47c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmNhNGEzODQtNmVmOWZkN2QtZWQ4OWU3ZjQtYjg1YjllNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:37.323369Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710753. Ctx: { TraceId: 01jr0xkq9a9r6f2rrgmh9w77jv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU5Mzc1NmYtYWQ0MmM2NzgtNTk0MjBkYzgtZTVmOTk1NGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:37.329009Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710754. Ctx: { TraceId: 01jr0xkq9f1tdkk29sptbcvyx1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU5Mzc1NmYtYWQ0MmM2NzgtNTk0MjBkYzgtZTVmOTk1NGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:37.335570Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710755. Ctx: { TraceId: 01jr0xkq9p21h3xnwghmsbdz28, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjBkNDgzOGYtNTc5ZTAyYWEtMzgxODhlOWItZmI1MTJjYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:37.361743Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710756. Ctx: { TraceId: 01jr0xkpz6f59qb1bcmszgnckt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWI4YTJkNzYtNDgxNDI4Y2EtMWRmMmM0NzMtNTE2N2RmNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient is connected to server localhost:25336 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743787764586 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 5 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".metadata" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710667 CreateStep: 1743787767379 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "SQS" Pat... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:29:37.572036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710757:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:29:37.579122Z node 1 :TX_PROXY ERROR: Actor# [1:7489511473698557318:3595] txid# 281474976710758, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) TClient is connected to server localhost:25336 waiting... 2025-04-04T17:29:37.847353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710761:1, at schemeshard: 72057594046644480 2025-04-04T17:29:37.849174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 ===Started add queue batch 2025-04-04T17:29:39.088190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:29:39.088236Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:55.965497Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710762. Ctx: { TraceId: 01jr0xm8vne6t2g283x65wt7kg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWI4YTJkNzYtNDgxNDI4Y2EtMWRmMmM0NzMtNTE2N2RmNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:57.064289Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710763. Ctx: { TraceId: 01jr0xma3r4cgswxvf0g7wz13t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWI4YTJkNzYtNDgxNDI4Y2EtMWRmMmM0NzMtNTE2N2RmNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:58.270584Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710764. Ctx: { TraceId: 01jr0xmbj18p12g0yzreamx087, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODAyNWQ3NC00ZDQ5ODdiYy02Mzg2MmMwOC05OWYwZWU4Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:58.319867Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710765. Ctx: { TraceId: 01jr0xmbse3eb2tmhky0hrdeq7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODAyNWQ3NC00ZDQ5ODdiYy02Mzg2MmMwOC05OWYwZWU4Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:58.370654Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710766. Ctx: { TraceId: 01jr0xmbv142p2q8n2qq4vaxn8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODAyNWQ3NC00ZDQ5ODdiYy02Mzg2MmMwOC05OWYwZWU4Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:58.378356Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710767. Ctx: { TraceId: 01jr0xmbv92b5td9t6naxkkzcc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODAyNWQ3NC00ZDQ5ODdiYy02Mzg2MmMwOC05OWYwZWU4Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:29:58.525503Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710768. Ctx: { TraceId: 01jr0xmbvfd4g5s9hw79z0wmdt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGM5ZjBkM2UtNTJkMWFmZDQtYTkwODZjMDYtNzdhYzIzMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |98.1%| [TM] {RESULT} ydb/core/ymq/actor/yc_search_ut/unittest >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] >> MediatorTest::TabletAckWhenDead [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] >> BulkUpsert::BulkUpsert >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data >> TCreateAndDropViewTest::DropViewDisabledFeatureFlag [GOOD] >> TCreateAndDropViewTest::DropNonexistingView >> TTxDataShardReshuffleKMeansScan::BadRequest >> MediatorTest::PlanStepAckToReconnectedMediator >> KeyValueGRPCService::SimpleCopyUnexistedKey [GOOD] >> KeyValueGRPCService::SimpleWriteRead >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_mediator] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_mediator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] >> TestPurecalcFilter::CompilationValidation [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel >> Splitter::CritSmallPortions [GOOD] >> Splitter::Crit >> TSentinelTests::Smoke [GOOD] >> TSentinelTests::PDiskUnknownState >> test_query_cache.py::TestQueryCache::test [GOOD] >> TestRawParser::Simple >> TestRawParser::Simple [GOOD] >> TMemoryController::SharedCache_ConfigLimit [GOOD] >> TMemoryController::MemTable >> TestRawParser::ManyValues >> TestRawParser::ManyValues [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] >> TestRawParser::TypeKindsValidation >> TestRawParser::TypeKindsValidation [GOOD] >> TSentinelTests::PDiskUnknownState [GOOD] >> TSentinelTests::PDiskErrorState >> DataShardReplication::SplitMergeChangesReboots [GOOD] >> DataShardReplication::ReplicatedTable+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/tests/py3test >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. |98.1%| [TM] {RESULT} ydb/tests/tools/kqprun/tests/py3test >> QuoterWithKesusTest::GetsSeveralQuotas [GOOD] >> QuoterWithKesusTest::KesusRecreation >> MediatorTest::PlanStepAckToReconnectedMediator [GOOD] >> TTxDataShardReshuffleKMeansScan::BadRequest [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToPosting >> DataShardFollowers::FollowerStaleRo [GOOD] >> DataShardFollowers::FollowerRebootAfterSysCompaction >> MediatorTest::WatcherReconnect >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> KqpTpch::Query07 [GOOD] >> KqpTpch::Query08 >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues [GOOD] >> test.py::test_local [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest >> TestRawParser::TypeKindsValidation [GOOD] Test command err: 2025-04-04T17:28:54.353789Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-04-04T17:28:54.353833Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where col_0 == "str1"' (filter id: [0:0:0]) 2025-04-04T17:28:54.353857Z node 1 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where col_0 == "str1"; 2025-04-04T17:28:54.353891Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-04-04T17:28:54.354075Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 1 from [1:7489511291777338233:2051] 2025-04-04T17:28:56.814569Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7489511291777338233:2051] [id 1]: Started compile request 2025-04-04T17:28:57.283169Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7489511291777338233:2051] [id 1]: Compilation completed for request 2025-04-04T17:28:57.283323Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 1 from [1:7489511291777338233:2051] 2025-04-04T17:28:57.283466Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-04-04T17:28:57.283490Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation finished 2025-04-04T17:28:57.283561Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [1:0:0] 2025-04-04T17:28:57.283579Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where col_1 == "str2"' (filter id: [1:0:0]) 2025-04-04T17:28:57.283595Z node 1 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where col_1 == "str2"; 2025-04-04T17:28:57.283610Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [1:0:0] : Send compile request with id 2 2025-04-04T17:28:57.283650Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 2 from [1:7489511291777338233:2051] 2025-04-04T17:28:57.283700Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7489511291777338233:2051] [id 2]: Started compile request 2025-04-04T17:28:57.303291Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7489511291777338233:2051] [id 2]: Compilation completed for request 2025-04-04T17:28:57.303374Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 2 from [1:7489511291777338233:2051] 2025-04-04T17:28:57.303479Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 2 2025-04-04T17:28:57.303520Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [1:0:0] : Filter compilation finished 2025-04-04T17:28:57.303555Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [2:0:0] 2025-04-04T17:28:57.303618Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: FilterData for 3 clients, number rows: 3 2025-04-04T17:28:57.303647Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 3 rows to purecalc filter (filter id: [1:0:0]) 2025-04-04T17:28:57.303670Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 3 rows 2025-04-04T17:28:57.306532Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Add 3 rows to client [2:0:0] without filtering 2025-04-04T17:28:57.306563Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 3 rows to purecalc filter (filter id: [0:0:0]) 2025-04-04T17:28:57.306574Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 3 rows 2025-04-04T17:28:57.306624Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Remove filter with id [2:0:0] 2025-04-04T17:28:57.306669Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: FilterData for 2 clients, number rows: 1 2025-04-04T17:28:57.306679Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 1 rows to purecalc filter (filter id: [1:0:0]) 2025-04-04T17:28:57.306688Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-04T17:28:57.306728Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 1 rows to purecalc filter (filter id: [0:0:0]) 2025-04-04T17:28:57.306732Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-04T17:28:57.592529Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-04-04T17:28:57.592573Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where a1 = "str1"' (filter id: [0:0:0]) 2025-04-04T17:28:57.592617Z node 2 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a1 = "str1"; 2025-04-04T17:28:57.592652Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-04-04T17:28:57.592805Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 1 from [2:7489511303209438502:2051] 2025-04-04T17:28:59.941054Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [2:7489511303209438502:2051] [id 1]: Started compile request 2025-04-04T17:28:59.960380Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [2:7489511303209438502:2051] [id 1]: Compilation completed for request 2025-04-04T17:28:59.960451Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 1 from [2:7489511303209438502:2051] 2025-04-04T17:28:59.960608Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-04-04T17:28:59.960640Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation finished 2025-04-04T17:28:59.960658Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-04-04T17:29:00.265799Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-04-04T17:29:00.265820Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where a2 ... 50' (filter id: [0:0:0]) 2025-04-04T17:29:00.265841Z node 3 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 ... 50; 2025-04-04T17:29:00.265875Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-04-04T17:29:00.265969Z node 3 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 1 from [3:7489511318281402726:2051] 2025-04-04T17:29:02.710226Z node 3 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [3:7489511318281402726:2051] [id 1]: Started compile request 2025-04-04T17:29:02.746424Z node 3 :FQ_ROW_DISPATCHER ERROR: TPurecalcCompileActor [3:7489511318281402726:2051] [id 1]: Compilation failed for request 2025-04-04T17:29:02.746606Z node 3 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 1 from [3:7489511318281402726:2051] 2025-04-04T17:29:02.746672Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-04-04T17:29:02.746758Z node 3 :FQ_ROW_DISPATCHER ERROR: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation error: {
: Error: Failed to compile purecalc program subissue: {
: Error: Compile issues: generated.sql:2:36: Error: Unexpected token '.' : cannot match to any predicted input... } subissue: {
: Error: Final yql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 ... 50; } } 2025-04-04T17:29:05.961780Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Add client with id [0:0:0] 2025-04-04T17:29:05.962996Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: UpdateParser to new schema with size 2 2025-04-04T17:29:06.042690Z node 4 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-04T17:29:06.042889Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Parser was updated on new schema with 2 columns 2025-04-04T17:29:06.043808Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-04-04T17:29:06.043830Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'WHERE col_first = "str_first__large__"' (filter id: [0:0:0]) 2025-04-04T17:29:06.043862Z node 4 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input WHERE col_first = "str_first__large__"; 2025-04-04T17:29:06.043888Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-04-04T17:29:06.078336Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-04-04T17:29:06.078376Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation finished 2025-04-04T17:29:06.078841Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Add client with id [1:0:0] 2025-04-04T17:29:06.156876Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: UpdateParser to new schema with size 3 2025-04-04T17:29:06.252337Z node 4 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-04T17:29:06.252575Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Parser was updated on new schema with 3 columns 2025-04-04T17:29:06.252608Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [1:0:0] 2025-04-04T17:29:06.252622Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'WHERE col_second = "str_second"' (filter id: [1:0:0]) 2025-04-04T17:29:06.252646Z node 4 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input WHERE col_second = "str_second"; 2025-04-04T17:29:06.252671Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [1:0:0] : Send compile request with id 2 2025-04-04T17:29:06.277347Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 2 2025-04-04T17:29:06.277378Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [1:0:0] : Filter compilation finished 2025-04-04T17:29:06.278634Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFormatHandler [json_each_row]: Send 2 messages to parser 2025-04-04T17:29:06.278659Z node 4 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 2 messages to parse 2025-04-04T17:29:06.278779Z node 4 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"com_col": "event1", "col_first": "some_str", "col_second": "str_second"}{"com_col": "event2", "col_second": "some_str", "col_first": "str_first__large__"} 2025-04-04T17:29:06.280631Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFormatHandler [json_each_row]: TParserHandler: Got parsed data, number rows: 2 2025-04-04T17:29:06.280664Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFormatHandler [json_each_row]: Send 2 messages to filters, first offset: 42, last offset: 43 2025-04-04T17:29:06.280679Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: FilterData for 2 clients, number rows: 2 2025-04-04T17:29:06.280694Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 2 rows to purecalc filter (filter id: [1:0:0]) 2025-04-04T17:29:06.280701Z node 4 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-04T17:29:06.283355Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFormatHandler [json_each_row]: TClientHandler [1:0:0]: OnFilteredData, row id: 0, offset: 42, new packer size: 32 2025-04-04T17:29:06.283434Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 2 rows to purecalc filter (filter id: [0:0:0]) 2025-04-04T17:29:06.283442Z node 4 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-04T17: ... 72Z node 20 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "hello1", "a2": null, "event": "event1"} 2025-04-04T17:29:27.896669Z node 21 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-04T17:29:28.054219Z node 21 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-04T17:29:28.515995Z node 22 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-04T17:29:28.516337Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-04T17:29:28.516384Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": 456, "a2": 42} 2025-04-04T17:29:28.516891Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-04T17:29:28.516932Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "456", "a2": -42} 2025-04-04T17:29:28.517237Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-04T17:29:28.517282Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 44, values: {"a1": "str", "a2": 99999} 2025-04-04T17:29:28.517580Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-04T17:29:28.517615Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 45, values: {"a1": "456", "a2": 42, "a3": 1.11.1} 2025-04-04T17:29:28.922984Z node 23 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-04T17:29:28.923222Z node 23 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-04T17:29:28.923256Z node 23 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": "-456"} 2025-04-04T17:29:29.402096Z node 24 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-04T17:29:29.402498Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-04T17:29:29.402565Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": {"key": "value"}, "a2": {"key2": "value2"}} 2025-04-04T17:29:29.403072Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-04T17:29:29.403126Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": {"key": "value", "nested": {"a": "b", "c":}}, "a2": "str"} 2025-04-04T17:29:29.403461Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-04T17:29:29.403525Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 44, values: {"a1": {"key" "value"}, "a2": "str"} 2025-04-04T17:29:29.880391Z node 25 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-04T17:29:29.880713Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-04T17:29:29.880756Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": true, "a2": false} 2025-04-04T17:29:29.881163Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-04T17:29:29.881201Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "true", "a2": falce} 2025-04-04T17:29:30.374819Z node 26 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-04-04T17:29:30.375070Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-04T17:29:30.375118Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": Yelse} 2025-04-04T17:29:30.375526Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-04T17:29:30.375568Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "st""r"} 2025-04-04T17:29:30.375733Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-04T17:29:30.375767Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 44, values: {"a1": "x"} {"a1": "y"} 2025-04-04T17:29:30.375948Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-04-04T17:29:30.375985Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 45, values: { 2025-04-04T17:29:30.780842Z node 27 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 100; 2025-04-04T17:29:30.781048Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [27:7489511443334228899:2051] 2025-04-04T17:29:35.359527Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [27:7489511443334228899:2051] [id 0]: Started compile request 2025-04-04T17:29:35.410439Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [27:7489511443334228899:2051] [id 0]: Compilation completed for request 2025-04-04T17:29:35.410579Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [27:7489511443334228899:2051] 2025-04-04T17:29:35.410808Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-04T17:29:35.411797Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-04T17:29:35.865209Z node 28 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 100; 2025-04-04T17:29:35.865576Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [28:7489511465925238928:2051] 2025-04-04T17:29:40.459226Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [28:7489511465925238928:2051] [id 0]: Started compile request 2025-04-04T17:29:40.506087Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [28:7489511465925238928:2051] [id 0]: Compilation completed for request 2025-04-04T17:29:40.506253Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [28:7489511465925238928:2051] 2025-04-04T17:29:40.507288Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-04T17:29:40.507439Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-04T17:29:41.116639Z node 29 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 100; 2025-04-04T17:29:41.116799Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [29:7489511492686052220:2051] 2025-04-04T17:29:45.566607Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [29:7489511492686052220:2051] [id 0]: Started compile request 2025-04-04T17:29:45.618859Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [29:7489511492686052220:2051] [id 0]: Compilation completed for request 2025-04-04T17:29:45.619005Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [29:7489511492686052220:2051] 2025-04-04T17:29:45.619228Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-04T17:29:45.619368Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-04T17:29:45.619447Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-04T17:29:45.619508Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-04T17:29:45.619548Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-04-04T17:29:46.205765Z node 30 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a1 is null; 2025-04-04T17:29:46.206009Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [30:7489511512808431213:2051] 2025-04-04T17:29:50.694288Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [30:7489511512808431213:2051] [id 0]: Started compile request 2025-04-04T17:29:50.750034Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [30:7489511512808431213:2051] [id 0]: Compilation completed for request 2025-04-04T17:29:50.750207Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [30:7489511512808431213:2051] 2025-04-04T17:29:50.750421Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-04T17:29:51.340232Z node 31 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 50; 2025-04-04T17:29:51.340390Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [31:7489511536021758981:2051] 2025-04-04T17:29:56.188848Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [31:7489511536021758981:2051] [id 0]: Started compile request 2025-04-04T17:29:56.235967Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [31:7489511536021758981:2051] [id 0]: Compilation completed for request 2025-04-04T17:29:56.236109Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [31:7489511536021758981:2051] 2025-04-04T17:29:56.236378Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-04-04T17:29:56.910764Z node 32 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 ... 50; 2025-04-04T17:29:56.910962Z node 32 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [32:7489511556157025596:2051] 2025-04-04T17:30:01.710372Z node 32 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [32:7489511556157025596:2051] [id 0]: Started compile request 2025-04-04T17:30:01.714592Z node 32 :FQ_ROW_DISPATCHER ERROR: TPurecalcCompileActor [32:7489511556157025596:2051] [id 0]: Compilation failed for request 2025-04-04T17:30:01.714728Z node 32 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [32:7489511556157025596:2051] 2025-04-04T17:30:02.803707Z node 33 :FQ_ROW_DISPATCHER TRACE: TRawParser: Add 1 messages to parse 2025-04-04T17:30:02.803775Z node 33 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 42, value: {"a1": "hello1__large_str", "a2": 101, "event": "event1"} 2025-04-04T17:30:03.393911Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Add 3 messages to parse 2025-04-04T17:30:03.393965Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 42, value: {"a1": "hello1", "a2": "101", "event": "event1"} 2025-04-04T17:30:03.394055Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 43, value: {"a1": "hello1", "a2": "101", "event": "event2"} 2025-04-04T17:30:03.394077Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 44, value: {"a2": "101", "a1": "hello1", "event": "event3"} |98.1%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions >> TopicSessionTests::RestartSessionIfNewClientWithOffset [GOOD] >> TopicSessionTests::ReadNonExistentTopic >> DataShardStats::OneChannelStatsCorrect [GOOD] >> DataShardStats::MultipleChannelsStatsCorrect >> Splitter::Crit [GOOD] >> Splitter::CritSimple >> KeyValueGRPCService::SimpleWriteRead [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPath >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateQuery >> test_timeout.py::TestTimeout::test_timeout >> TCreateAndDropViewTest::DropNonexistingView [GOOD] >> TCreateAndDropViewTest::CallDropViewOnTable >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListQueries >> TDataShardRSTest::TestCleanupInRS+UseSink >> DiscoveryIsNotBroken::NoKafkaSslEndpointInDiscovery [GOOD] >> DiscoveryIsNotBroken::HaveKafkaEndpointInDiscovery >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeQuery >> MediatorTest::WatcherReconnect [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetQueryStatus >> MediatorTest::MultipleSteps >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyQuery >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteQuery >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-false [GOOD] >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-true >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendControlQuery >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetResultData >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] >> test_timeout.py::TestTimeout::test_timeout [GOOD] >> test_commit.py::TestCommit::test_commit >> DataShardReplication::ReplicatedTable+UseSink [GOOD] >> DataShardReplication::ReplicatedTable-UseSink >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListJobs >> Splitter::CritSimple [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToPosting [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToBuild >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeJob ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/splitter/ut/unittest >> Splitter::CritSimple [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280336;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=2088936;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2088936;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5184936;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5184936;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50200;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=seria ... 82944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=71282912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964800;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964800;columns=1; |98.1%| [TS] {RESULT} ydb/core/tx/columnshard/splitter/ut/unittest |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serializable/py3test >> test.py::test_local [GOOD] |98.1%| [TM] {RESULT} ydb/tests/functional/serializable/py3test >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnection |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_tests/py3test >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] |98.1%| [TM] {RESULT} ydb/tests/functional/scheme_tests/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] >> test_commit.py::TestCommit::test_commit [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnectionWithServiceAccount >> DataShardFollowers::FollowerRebootAfterSysCompaction [GOOD] >> DataShardFollowers::FollowerAfterSysCompaction >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListConnections >> MediatorTest::MultipleSteps [GOOD] >> TopicSessionTests::ReadNonExistentTopic [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeConnection >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPath [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutToken >> TopicSessionTests::SlowSession >> QuoterWithKesusTest::KesusRecreation [GOOD] >> QuoterWithKesusTest::AllocationStatistics >> MediatorTest::WatchesBeforeFirstStep >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnection >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnectionWithServiceAccount >> test_result_limits.py::TestResultLimits::test_many_rows [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK [GOOD] Test command err: 2025-04-04T17:28:38.816568Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511222660798030:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:38.816809Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T17:28:38.963657Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000fd3/r3tmp/tmpJLb2EO/pdisk_1.dat 2025-04-04T17:28:39.138822Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:39.155908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:39.156016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:39.158596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2043, node 1 2025-04-04T17:28:39.237873Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/000fd3/r3tmp/yandexdNdGGq.tmp 2025-04-04T17:28:39.237897Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/000fd3/r3tmp/yandexdNdGGq.tmp 2025-04-04T17:28:39.238048Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/000fd3/r3tmp/yandexdNdGGq.tmp 2025-04-04T17:28:39.238162Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:39.290964Z INFO: TTestServer started on Port 27619 GrpcPort 2043 TClient is connected to server localhost:27619 PQClient connected to localhost:2043 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:28:39.606920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:39.630188Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:28:39.649139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:28:41.732666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511235545700741:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:41.732770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:41.733739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511235545700753:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:41.738598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:28:41.765234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489511235545700755:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:28:41.928097Z node 1 :TX_PROXY ERROR: Actor# [1:7489511235545700819:2448] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:28:41.955006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:28:41.963168Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489511235545700836:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:28:41.964930Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzAyZjQ3YzUtNmM4MjRlODktNjE5Y2QyYTYtNzAwZGY4NGI=, ActorId: [1:7489511235545700715:2336], ActorState: ExecuteState, TraceId: 01jr0xj0zmdsy97tjafjpybt0f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:28:41.967213Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:28:41.987245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:28:42.055549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489511239840668407:2628] 2025-04-04T17:28:43.816769Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489511222660798030:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:28:43.816852Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T17:28:48.221499Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-04-04T17:28:48.235182Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-04T17:28:48.236678Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7489511265610472471:2786], Recipient [1:7489511226955765750:2191]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:28:48.236715Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:28:48.236743Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T17:28:48.236784Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7489511265610472467:2783], Recipient [1:7489511226955765750:2191]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-04-04T17:28:48.236806Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T17:28:48.279199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:28:48.279716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-04-04T17:28:48.280046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-04-04T17:28:48.280095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-04-04T17:28:48.280152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-04-04T17:28:48.280229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-04-04T17:28:48.280305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was ... PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:30:14.398758Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:30:14.496135Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511619586473885:2830], Partition 3, Sender [0:0:0], Recipient [5:7489511619586474009:2848], Cookie: 0 2025-04-04T17:30:14.496207Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511619586474009:2848]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:30:14.496231Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:30:14.496272Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:30:14.496343Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:30:14.496365Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:30:14.496392Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:30:14.496440Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511619586473883:2829], Partition 4, Sender [0:0:0], Recipient [5:7489511619586474005:2846], Cookie: 0 2025-04-04T17:30:14.496475Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511619586474005:2846]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:30:14.496490Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:30:14.496513Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:30:14.496541Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:30:14.496556Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:30:14.496572Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:30:14.496611Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511580931766643:2455], Partition 0, Sender [0:0:0], Recipient [5:7489511580931766704:2459], Cookie: 0 2025-04-04T17:30:14.496643Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511580931766704:2459]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:30:14.496658Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:30:14.496677Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:30:14.496706Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:30:14.496719Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:30:14.496736Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:30:14.497717Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511606701571614:2756], Partition 1, Sender [0:0:0], Recipient [5:7489511606701571698:2766], Cookie: 0 2025-04-04T17:30:14.497774Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511606701571698:2766]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:30:14.497790Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:30:14.497814Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:30:14.497848Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:30:14.497864Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:30:14.497884Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:30:14.518498Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511606701571612:2755], Partition 2, Sender [0:0:0], Recipient [5:7489511606701571695:2764], Cookie: 0 2025-04-04T17:30:14.518589Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511606701571695:2764]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:30:14.518618Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:30:14.518670Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:30:14.518764Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:30:14.518788Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:30:14.518817Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:30:14.602453Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511580931766643:2455], Partition 0, Sender [0:0:0], Recipient [5:7489511580931766704:2459], Cookie: 0 2025-04-04T17:30:14.602546Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511580931766704:2459]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:30:14.602573Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:30:14.602620Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:30:14.602696Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:30:14.602719Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:30:14.602746Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:30:14.602807Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511606701571614:2756], Partition 1, Sender [0:0:0], Recipient [5:7489511606701571698:2766], Cookie: 0 2025-04-04T17:30:14.602839Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511606701571698:2766]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:30:14.602849Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:30:14.602872Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:30:14.602898Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:30:14.602911Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:30:14.602925Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:30:14.602965Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511619586473885:2830], Partition 3, Sender [0:0:0], Recipient [5:7489511619586474009:2848], Cookie: 0 2025-04-04T17:30:14.602990Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511619586474009:2848]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:30:14.603001Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:30:14.603022Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:30:14.603046Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:30:14.603060Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:30:14.603073Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:30:14.603108Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511619586473883:2829], Partition 4, Sender [0:0:0], Recipient [5:7489511619586474005:2846], Cookie: 0 2025-04-04T17:30:14.603134Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511619586474005:2846]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:30:14.603144Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:30:14.603166Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:30:14.603193Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:30:14.603205Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:30:14.603219Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-04-04T17:30:14.626502Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7489511606701571612:2755], Partition 2, Sender [0:0:0], Recipient [5:7489511606701571695:2764], Cookie: 0 2025-04-04T17:30:14.626585Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7489511606701571695:2764]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:30:14.626618Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-04-04T17:30:14.626665Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-04-04T17:30:14.626742Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-04-04T17:30:14.626765Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-04-04T17:30:14.626792Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> test_http_api.py::TestHttpApi::test_simple_analytics_query [GOOD] >> test_http_api.py::TestHttpApi::test_empty_query [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> test_http_api.py::TestHttpApi::test_warning >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteConnection >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnection >> TCreateAndDropViewTest::CallDropViewOnTable [FAIL] >> TCreateAndDropViewTest::DropSameViewTwice |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/query_cache/py3test >> test_query_cache.py::TestQueryCache::test [GOOD] |98.2%| [TM] {RESULT} ydb/tests/functional/query_cache/py3test >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnectionWithServiceAccount >> TTxDataShardReshuffleKMeansScan::MainToBuild [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToPosting >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateBinding >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListBindings >> DataShardStats::MultipleChannelsStatsCorrect [GOOD] >> DataShardStats::HistogramStatsCorrect >> test_http_api.py::TestHttpApi::test_warning [GOOD] >> test_http_api.py::TestHttpApi::test_get_unknown_query >> test_http_api.py::TestHttpApi::test_get_unknown_query [GOOD] >> test_http_api.py::TestHttpApi::test_unauthenticated [GOOD] >> test_http_api.py::TestHttpApi::test_create_idempotency |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> MediatorTest::WatchesBeforeFirstStep [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeBinding |98.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/pq_read/test/py3test >> test_commit.py::TestCommit::test_commit [GOOD] |98.2%| [TS] {RESULT} ydb/tests/tools/pq_read/test/py3test >> MediatorTest::RebootTargetTablets >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyBinding |98.2%| [TA] $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReplication::ReplicatedTable-UseSink [GOOD] >> DataShardReplication::ApplyChangesToReplicatedTable >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteBinding |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateQuery >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateQuery [GOOD] >> DataShardFollowers::FollowerAfterSysCompaction [GOOD] >> DiscoveryIsNotBroken::HaveKafkaEndpointInDiscovery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListQueries >> DataShardFollowers::FollowerAfterDataCompaction >> DiscoveryIsNotBroken::HaveKafkaSslEndpointInDiscovery |98.2%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} >> KeyValueGRPCService::SimpleWriteReadWithoutToken [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1 >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeQuery >> KqpTpch::Query08 [GOOD] >> KqpTpch::Query09 >> TSentinelTests::PDiskErrorState [GOOD] >> TSentinelTests::PDiskFaultyState >> TTxDataShardReshuffleKMeansScan::BuildToPosting [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToBuild >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetQueryStatus >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyQuery >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteQuery >> MediatorTest::RebootTargetTablets [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendControlQuery >> TCreateAndDropViewTest::DropSameViewTwice [GOOD] >> TCreateAndDropViewTest::DropViewIfExists >> MediatorTest::ResendSubset >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetResultData >> QuoterWithKesusTest::AllocationStatistics [GOOD] >> QuoterWithKesusTest::UpdatesCountersForParentResources >> test_http_api.py::TestHttpApi::test_create_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_stop_idempotency >> DataShardReplication::ApplyChangesToReplicatedTable [GOOD] >> DataShardReplication::ApplyChangesToCommonTable >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListJobs >> TopicSessionTests::SlowSession [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListJobs [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeJob |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TopicSessionTests::TwoSessionsWithDifferentSchemes |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnection >> TMemoryController::MemTable [GOOD] >> TMemoryController::ResourceBroker >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount >> TTxDataShardReshuffleKMeansScan::BuildToBuild [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListConnections >> TSentinelTests::PDiskFaultyState [GOOD] >> TSentinelTests::PDiskRackGuardHalfRack >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2 >> DataShardFollowers::FollowerAfterDataCompaction [GOOD] >> DataShardFollowers::FollowerDuringSysPartSwitch >> MediatorTest::ResendSubset [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeConnection >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_reshuffle_kmeans/unittest >> TTxDataShardReshuffleKMeansScan::BuildToBuild [GOOD] Test command err: 2025-04-04T17:30:01.667364Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511580223787127:2252];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:30:01.667465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000a44/r3tmp/tmpsQiNpZ/pdisk_1.dat 2025-04-04T17:30:02.108630Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:30:02.143541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:30:02.143654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:30:02.147092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:30:02.186555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:30:02.245215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:30:02.281667Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7489511584518754807:2295] 2025-04-04T17:30:02.281987Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:30:02.306902Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:30:02.306970Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:30:02.311407Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:30:02.311472Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:30:02.311566Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:30:02.313439Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:30:02.313506Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:30:02.313540Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7489511584518754823:2295] in generation 1 2025-04-04T17:30:02.314589Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:30:02.365933Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:30:02.367082Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:30:02.367168Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7489511584518754825:2296] 2025-04-04T17:30:02.367184Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:30:02.367214Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:30:02.367225Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:30:02.370709Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7489511584518754802:2293], serverId# [1:7489511584518754822:2303], sessionId# [0:0:0] 2025-04-04T17:30:02.370740Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:30:02.370863Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:30:02.370960Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:30:02.370991Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:30:02.371060Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:30:02.371076Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:30:02.371109Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:30:02.372126Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:30:02.372231Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-04-04T17:30:02.373486Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:30:02.373876Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:30:02.373972Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:30:02.378890Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7489511584518754839:2312], serverId# [1:7489511584518754840:2313], sessionId# [0:0:0] 2025-04-04T17:30:02.383946Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1743787802421 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743787802421 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:30:02.383996Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:30:02.384152Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:30:02.384212Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:30:02.384226Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:30:02.384262Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743787802421:281474976710657] in PlanQueue unit at 72075186224037888 2025-04-04T17:30:02.384494Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743787802421:281474976710657 keys extracted: 0 2025-04-04T17:30:02.384631Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:30:02.384705Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:30:02.384752Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-04-04T17:30:02.392190Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:30:02.392732Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:30:02.394029Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743787802420 2025-04-04T17:30:02.394059Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:30:02.394085Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743787802428 2025-04-04T17:30:02.394141Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743787802421} 2025-04-04T17:30:02.394175Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:30:02.394208Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:30:02.394222Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:30:02.394257Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-04-04T17:30:02.394295Z node 1 :TX_DATASHARD DEBUG: Complete [1743787802421 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7489511580223787347:2189], exec latency: 7 ms, propose latency: 9 ms 2025-04-04T17:30:02.394327Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-04-04T17:30:02.394359Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:30:02.399070Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-04-04T17:30:02.399124Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-04-04T17:30:02.408842Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7489511584518754878:2341], serverId# [1:7489511584518754879:2342], sessionId# [0:0:0] 2025-04-04T17:30:02.414421Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:30:02.414572Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710658 at tablet 72075186224037888 2025-04-04T17:30:02.415942Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:30:02.416969Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710658 at step 1743787802463 at tablet 72075186224037888 { Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743787802463 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:30:02.416993Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:30:02.417115Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:30:02.417143Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:30:02.417164Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743787802463:281474976710658] in PlanQueue unit at 72075186224037888 2025-04-04T17:30:02.417311Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743787802463:281474976710658 keys extracted: 0 2025-04-04T17:30:02.417583Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:30:02.417892Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743787802463} 2025-04-04T17:30:02.417938Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:30:02.417966Z node 1 :TX_DATASHARD DEBUG: Complete [1743787802463 : 281474976710658] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7489511584518754873:2337], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:30:02.417982Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:30:02.420909Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7489511584518754889:2352], serverId# [1:7489511584518754890:2353], sessionId# [0:0:0] 2025-04-04T17:30:02.421704Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:30:02.421772Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710659 at tablet 72075186224037888 2025-04-04T17:30:02.423951Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037 ... -04T17:30:29.957936Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:30:29.957955Z node 5 :TX_DATASHARD DEBUG: Found ready operation [1743787830001:281474976715688] in PlanQueue unit at 72075186224037895 2025-04-04T17:30:29.958109Z node 5 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037895 loaded tx from db 1743787830001:281474976715688 keys extracted: 0 2025-04-04T17:30:29.958197Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:30:29.958313Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037895 2025-04-04T17:30:29.958368Z node 5 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037895 2025-04-04T17:30:29.958806Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:30:29.960285Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:30:29.960285Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037895 2025-04-04T17:30:29.961247Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037895 step# 1743787830001} 2025-04-04T17:30:29.961287Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037895 2025-04-04T17:30:29.961343Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037895 2025-04-04T17:30:29.961409Z node 5 :TX_DATASHARD DEBUG: Complete [1743787830001 : 281474976715688] from 72075186224037895 at tablet 72075186224037895 send result to client [5:7489511679701368623:2144], exec latency: 0 ms, propose latency: 3 ms 2025-04-04T17:30:29.961443Z node 5 :TX_DATASHARD INFO: 72075186224037895 Sending notify to schemeshard 72057594046644480 txId 281474976715688 state PreOffline TxInFly 0 2025-04-04T17:30:29.961485Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-04-04T17:30:29.962013Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715688 datashard 72075186224037895 state PreOffline 2025-04-04T17:30:29.962043Z node 5 :TX_DATASHARD DEBUG: 72075186224037895 Got TEvSchemaChangedResult from SS at 72075186224037895 2025-04-04T17:30:29.962927Z node 5 :TX_DATASHARD DEBUG: 72075186224037895 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-04-04T17:30:29.962976Z node 5 :TX_DATASHARD INFO: 72075186224037895 Initiating switch from PreOffline to Offline state 2025-04-04T17:30:29.964961Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-04-04T17:30:29.969921Z node 5 :TX_DATASHARD INFO: 72075186224037895 Reporting state Offline to schemeshard 72057594046644480 2025-04-04T17:30:29.970039Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037895 2025-04-04T17:30:29.970088Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:30:29.971770Z node 5 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037895 state Offline 2025-04-04T17:30:29.974472Z node 5 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037896 actor [5:7489511696881239328:2362] 2025-04-04T17:30:29.974687Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:30:29.975970Z node 5 :TX_DATASHARD INFO: OnTabletStop: 72075186224037895 reason = ReasonStop 2025-04-04T17:30:29.976417Z node 5 :TX_DATASHARD INFO: OnTabletDead: 72075186224037895 2025-04-04T17:30:29.976504Z node 5 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037895 2025-04-04T17:30:29.976909Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037895 not found 2025-04-04T17:30:29.998676Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:30:29.998778Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:30:30.000737Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037896 2025-04-04T17:30:30.000802Z node 5 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037896 2025-04-04T17:30:30.000842Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037896 2025-04-04T17:30:30.001199Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:30:30.001262Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:30:30.001291Z node 5 :TX_DATASHARD DEBUG: DataShard 72075186224037896 persisting started state actor id [5:7489511696881239357:2362] in generation 1 2025-04-04T17:30:30.003571Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:30:30.003629Z node 5 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037896 2025-04-04T17:30:30.003706Z node 5 :TX_DATASHARD DEBUG: 72075186224037896 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:30:30.003743Z node 5 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037896, actorId: [5:7489511701176206655:2363] 2025-04-04T17:30:30.003756Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037896 2025-04-04T17:30:30.003766Z node 5 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037896, state: WaitScheme 2025-04-04T17:30:30.003778Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-04-04T17:30:30.003893Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037896 2025-04-04T17:30:30.003996Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037896 2025-04-04T17:30:30.004028Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-04-04T17:30:30.004044Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:30:30.004060Z node 5 :TX_DATASHARD INFO: No tx to execute at 72075186224037896 TxInFly 0 2025-04-04T17:30:30.004077Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-04-04T17:30:30.004966Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037896, clientId# [5:7489511696881239336:3246], serverId# [5:7489511696881239342:3250], sessionId# [0:0:0] 2025-04-04T17:30:30.005048Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037896 2025-04-04T17:30:30.005264Z node 5 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037896 txId 281474976715689 ssId 72057594046644480 seqNo 2:16 2025-04-04T17:30:30.005336Z node 5 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715689 at tablet 72075186224037896 2025-04-04T17:30:30.006617Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037896 2025-04-04T17:30:30.006900Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037896 2025-04-04T17:30:30.006962Z node 5 :TX_DATASHARD DEBUG: 72075186224037896 not sending time cast registration request in state WaitScheme 2025-04-04T17:30:30.011726Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037896, clientId# [5:7489511701176206661:3267], serverId# [5:7489511701176206662:3268], sessionId# [0:0:0] 2025-04-04T17:30:30.011855Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:30:30.012039Z node 5 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715689 at step 1743787830057 at tablet 72075186224037896 { Transactions { TxId: 281474976715689 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743787830057 MediatorID: 72057594046382081 TabletID: 72075186224037896 } 2025-04-04T17:30:30.012055Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-04-04T17:30:30.012154Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037896 2025-04-04T17:30:30.012200Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-04-04T17:30:30.012215Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 active 0 active planned 0 immediate 0 planned 1 2025-04-04T17:30:30.012236Z node 5 :TX_DATASHARD DEBUG: Found ready operation [1743787830057:281474976715689] in PlanQueue unit at 72075186224037896 2025-04-04T17:30:30.012501Z node 5 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037896 loaded tx from db 1743787830057:281474976715689 keys extracted: 0 2025-04-04T17:30:30.012628Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-04-04T17:30:30.012811Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-04-04T17:30:30.012854Z node 5 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037896 tableId# [OwnerId: 72057594046644480, LocalPathId: 14] schema version# 1 2025-04-04T17:30:30.013283Z node 5 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037896 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-04-04T17:30:30.013618Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:30:30.016283Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037896 time 1743787830056 2025-04-04T17:30:30.016310Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-04-04T17:30:30.016354Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037896 step# 1743787830057} 2025-04-04T17:30:30.016400Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-04-04T17:30:30.016448Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-04-04T17:30:30.016466Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037896 2025-04-04T17:30:30.016487Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037896 2025-04-04T17:30:30.016530Z node 5 :TX_DATASHARD DEBUG: Complete [1743787830057 : 281474976715689] from 72075186224037896 at tablet 72075186224037896 send result to client [5:7489511679701368623:2144], exec latency: 0 ms, propose latency: 3 ms 2025-04-04T17:30:30.016554Z node 5 :TX_DATASHARD INFO: 72075186224037896 Sending notify to schemeshard 72057594046644480 txId 281474976715689 state Ready TxInFly 0 2025-04-04T17:30:30.016587Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-04-04T17:30:30.016645Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037896 coordinator 72057594046316545 last step 0 next step 1743787830064 2025-04-04T17:30:30.020268Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715689 datashard 72075186224037896 state Ready 2025-04-04T17:30:30.020314Z node 5 :TX_DATASHARD DEBUG: 72075186224037896 Got TEvSchemaChangedResult from SS at 72075186224037896 |98.2%| [TM] {RESULT} ydb/core/tx/datashard/ut_reshuffle_kmeans/unittest |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/wardens/py3test >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] |98.2%| [TM] {RESULT} ydb/tests/functional/wardens/py3test >> MediatorTest::ResendNotSubset >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnection >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteConnection >> DiscoveryIsNotBroken::HaveKafkaSslEndpointInDiscovery [GOOD] >> Functions::CreateRequest [GOOD] >> Functions::CreateResponse [GOOD] >> KafkaProtocol::ProduceScenario |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> DataShardReplication::ApplyChangesToCommonTable [GOOD] >> DataShardReplication::ApplyChangesWithConcurrentTx >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnection >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnectionWithServiceAccount |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> KqpTpch::Query09 [GOOD] >> KqpTpch::Query10 >> TCreateAndDropViewTest::DropViewIfExists [FAIL] >> TCreateAndDropViewTest::DropViewInFolder >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateBinding |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListBindings >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeBinding >> MediatorTest::ResendNotSubset [GOOD] >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyBinding >> QuoterWithKesusTest::UpdatesCountersForParentResources [GOOD] >> QuoterWithKesusTest::CanDeleteResourceWhenUsingIt >> MediatorTest::OneCoordinatorResendTxNotLost >> TMemoryController::ResourceBroker [GOOD] >> TMemoryController::ResourceBroker_ConfigLimit >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteBinding |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TopicSessionTests::TwoSessionsWithDifferentSchemes [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateQuery >> BasicStatistics::TwoTables [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatus >> KafkaProtocol::ProduceScenario [GOOD] >> KafkaProtocol::FetchScenario >> test_http_api.py::TestHttpApi::test_stop_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_restart_idempotency >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListQueries |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> DataShardFollowers::FollowerDuringSysPartSwitch [GOOD] >> DataShardFollowers::FollowerDuringDataPartSwitch >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeQuery |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> DataShardReplication::ApplyChangesWithConcurrentTx [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoTables [GOOD] Test command err: 2025-04-04T17:28:43.649016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:43.649355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:43.649499Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00096f/r3tmp/tmpge6fh1/pdisk_1.dat 2025-04-04T17:28:44.023459Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1348, node 1 2025-04-04T17:28:44.271132Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:44.271191Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:44.271227Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:44.271774Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:44.279496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:28:44.364775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:44.364918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:44.379107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22467 2025-04-04T17:28:44.935408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:28:48.217432Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:28:48.255343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:48.255476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:48.283210Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:28:48.285266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:48.512493Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:48.513028Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:48.513447Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:48.513562Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:48.513631Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:48.513836Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:48.513895Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:48.513990Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:48.514043Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:48.688989Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:48.689109Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:48.702409Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:48.851775Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:48.900846Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:28:48.900942Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:28:48.934393Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:28:48.934621Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:28:48.934850Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:28:48.934911Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:28:48.934987Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:28:48.935048Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:28:48.935105Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:28:48.935160Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:28:48.936378Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:28:48.964220Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:48.964348Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:48.970617Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:28:48.976252Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:28:48.977104Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:28:48.982818Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:28:49.001156Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:28:49.001223Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:28:49.001315Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:28:49.016307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:28:49.032465Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:28:49.076499Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:28:49.245758Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:28:49.397950Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:28:49.508841Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:28:50.486058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:50.486174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:50.502317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:28:50.701565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2373:3104], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:50.701729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:50.703089Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2378:3108]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:50.703301Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:28:50.703398Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2380:3110] 2025-04-04T17:28:50.703508Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2380:3110] 2025-04-04T17:28:50.704070Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2381:2874] 2025-04-04T17:28:50.704300Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2380:3110], server id = [2:2381:2874], tablet id = 72075186224037894, status = OK 2025-04-04T17:28:50.704446Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2381:2874], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:28:50.704490Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-04T17:28:50.704681Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:28:50.704743Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2378:3108], StatRequests.size() = 1 2025-04-04T17:28:50.722871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2385:3114], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:50.722962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:50.723326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2390:3119], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:50.729562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-04T17:28:50.928765Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:28:50.928850Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:28:51.037265Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2380:3110], schemeshard count = 1 2025-04-04T17:28:51.507806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorA ... estsCount[ 1 ] 2025-04-04T17:30:32.456242Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-04T17:30:32.456293Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:6683:4769], StatRequests.size() = 1 2025-04-04T17:30:32.950232Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-04T17:30:32.950622Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:30:32.950930Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:30:33.023027Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 3, at schemeshard: 72075186224037897 2025-04-04T17:30:33.023119Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 224.000000s, at schemeshard: 72075186224037897 2025-04-04T17:30:33.023537Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 73 2025-04-04T17:30:33.044347Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:30:33.324065Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6714:4783]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:30:33.324329Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-04T17:30:33.324388Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6714:4783], StatRequests.size() = 1 2025-04-04T17:30:33.780152Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:30:33.780232Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:30:33.780283Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is data table. 2025-04-04T17:30:33.780328Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-04T17:30:33.780702Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:30:33.845315Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:30:33.850691Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6741:4806], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:30:33.850842Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6750:4811], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:30:33.851004Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:30:33.901755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-04T17:30:34.030655Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6755:4814], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-04T17:30:34.198953Z node 2 :TX_PROXY ERROR: Actor# [2:6853:4861] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:30:34.230962Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6882:4876]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:30:34.231267Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-04T17:30:34.231320Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6882:4876], StatRequests.size() = 1 2025-04-04T17:30:34.418179Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=N2JiYjFkZmEtMWM5ODUwYjEtYmUxNDBlZjAtNzAxMDg4YzE=, TxId: 2025-04-04T17:30:34.419183Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2JiYjFkZmEtMWM5ODUwYjEtYmUxNDBlZjAtNzAxMDg4YzE=, TxId: 2025-04-04T17:30:34.419966Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:30:34.436726Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-04-04T17:30:34.436803Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:30:34.742652Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:6914:4896]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:30:34.743045Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-04T17:30:34.743103Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:6914:4896], StatRequests.size() = 1 2025-04-04T17:30:35.557438Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:6955:4918]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:30:35.557771Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-04T17:30:35.557814Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:6955:4918], StatRequests.size() = 1 2025-04-04T17:30:35.922893Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:30:35.939069Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:30:35.939132Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:30:35.939176Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-04T17:30:35.939211Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:30:35.939558Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:30:35.942168Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:30:35.968200Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTQ1ZmZkMDQtY2IxYjFjZC0yODM5ZTJmLTZkYWUwZTE4, TxId: 2025-04-04T17:30:35.968269Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTQ1ZmZkMDQtY2IxYjFjZC0yODM5ZTJmLTZkYWUwZTE4, TxId: 2025-04-04T17:30:35.968833Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:30:35.989771Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:30:35.989833Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:30:36.284619Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7021:4958]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:30:36.284907Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-04T17:30:36.284951Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7021:4958], StatRequests.size() = 1 2025-04-04T17:30:37.025555Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:7064:4982]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:30:37.025857Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-04-04T17:30:37.025902Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:7064:4982], StatRequests.size() = 1 2025-04-04T17:30:37.374422Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-04T17:30:37.374592Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:30:37.374983Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:30:37.385930Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:30:37.385990Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:30:37.386044Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-04T17:30:37.386080Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:30:37.386407Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:30:37.389002Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:30:37.403056Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjAxOTI0NmYtNjFiNWRmYTctZTUwM2UyNjEtYjI4MzAxYjk=, TxId: 2025-04-04T17:30:37.403121Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjAxOTI0NmYtNjFiNWRmYTctZTUwM2UyNjEtYjI4MzAxYjk=, TxId: 2025-04-04T17:30:37.403668Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:30:37.417909Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:30:37.417969Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:30:37.710840Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:7125:5017]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:30:37.711188Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-04-04T17:30:37.711237Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:7125:5017], StatRequests.size() = 1 2025-04-04T17:30:37.712184Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 127 ], ReplyToActorId[ [2:7127:5019]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:30:37.716970Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 127 ] 2025-04-04T17:30:37.717040Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 127, ReplyToActorId = [2:7127:5019], StatRequests.size() = 1 >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyQuery >> KqpTpch::Query10 [GOOD] >> KqpTpch::Query11 >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteQuery >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-true [GOOD] >> Coordinator::LastEmptyStepResent ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_replication/unittest >> DataShardReplication::ApplyChangesWithConcurrentTx [GOOD] Test command err: 2025-04-04T17:29:49.365897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:49.366546Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:49.366605Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000a3e/r3tmp/tmpsGv9gV/pdisk_1.dat 2025-04-04T17:29:49.836575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:49.894303Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:49.938122Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:29:49.938962Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:29:49.940855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:49.941768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:49.954535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:50.042313Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-04T17:29:50.042388Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T17:29:50.044428Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-04-04T17:29:50.180790Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_GLOBAL } } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T17:29:50.180912Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:29:50.182077Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:29:50.182176Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:29:50.182453Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:29:50.182599Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:29:50.182816Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T17:29:50.183069Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T17:29:50.189589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:50.190622Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T17:29:50.190699Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2517] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-04T17:29:50.246400Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:29:50.247451Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:29:50.247874Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:29:50.248712Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:29:50.261432Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:29:50.291009Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:29:50.291155Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:29:50.292846Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:29:50.292909Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:29:50.292951Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:29:50.295522Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:29:50.295675Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:29:50.295764Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:29:50.306559Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:29:50.345960Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:29:50.347256Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:29:50.347417Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:29:50.347459Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:29:50.347510Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:29:50.347557Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:50.347777Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:50.347821Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:50.348885Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:29:50.349081Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:29:50.349225Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:29:50.349280Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:50.349422Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:29:50.349475Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:29:50.349508Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:29:50.349539Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:29:50.349583Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:29:50.349692Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:50.349724Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:50.349797Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:29:50.351058Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:29:50.351114Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:29:50.351252Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:29:50.351569Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:29:50.351639Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:29:50.351733Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:29:50.351880Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:29:50.351929Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:29:50.351976Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:29:50.352054Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:29:50.352351Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:29:50.352392Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:29:50.352441Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:29:50.352484Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:29:50.352532Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:29:50.352575Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:29:50.352622Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:29:50.352653Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:29:50.352678Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:29:50.354168Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:29:50.354230Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 720751862 ... 2025-04-04T17:30:39.581705Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:30:39.581767Z node 8 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [8:872:2704], serverId# [8:873:2705], sessionId# [0:0:0] 2025-04-04T17:30:39.582070Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269549570, Sender [8:871:2703], Recipient [8:664:2569]: NKikimrTxDataShard.TEvApplyReplicationChanges TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Source: "my-source" Changes { SourceOffset: 1 WriteTxId: 0 Key: "\001\000\004\000\000\000\001\000\000\000" Upsert { Tags: 2 Data: "\001\000\004\000\000\000\025\000\000\000" } } 2025-04-04T17:30:39.582961Z node 8 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v1500/18446744073709551615 ImmediateWriteEdge# v1000/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2025-04-04T17:30:39.583159Z node 8 :TX_DATASHARD TRACE: Lock 281474976715660 marked broken at v{min} 2025-04-04T17:30:39.599167Z node 8 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-04-04T17:30:39.600087Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 270270977, Sender [8:24:2071], Recipient [8:664:2569]: {TEvNotifyPlanStep TabletId# 72075186224037888 PlanStep# 1501} 2025-04-04T17:30:39.600155Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvNotifyPlanStep 2025-04-04T17:30:39.600213Z node 8 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2025-04-04T17:30:39.600283Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:30:39.747833Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jr0xnm3gexvmzthqca8q5hke, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=YTc1NTRmYTQtNDMyN2U5MDMtNGVjMjMwYzEtMzExYTVlMjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:30:39.749366Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [8:889:2624], Recipient [8:664:2569]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 889 RawX2: 34359740992 } TxBody: " \0018\001j7\010\001\032\'\n#\t\214\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000 \003\"\n\010\340\247\022\020\0020\000@\n\220\001\000" TxId: 281474976715661 ExecLevel: 0 Flags: 8 MvccSnapshot { Step: 1500 TxId: 18446744073709551615 } 2025-04-04T17:30:39.749449Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:30:39.749965Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [8:664:2569], Recipient [8:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-04T17:30:39.750012Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-04T17:30:39.750102Z node 8 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:30:39.750376Z node 8 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715660, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-04T17:30:39.750517Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit CheckDataTx 2025-04-04T17:30:39.750605Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-04T17:30:39.750661Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CheckDataTx 2025-04-04T17:30:39.750716Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:30:39.750755Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:30:39.750817Z node 8 :TX_DATASHARD TRACE: Activated operation [0:281474976715661] at 72075186224037888 2025-04-04T17:30:39.750862Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-04T17:30:39.750886Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:30:39.750908Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-04T17:30:39.750932Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-04T17:30:39.751008Z node 8 :TX_DATASHARD TRACE: Operation [0:281474976715661] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193440 2025-04-04T17:30:39.751123Z node 8 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715660 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2025-04-04T17:30:39.751224Z node 8 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-04T17:30:39.751292Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-04T17:30:39.751318Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-04T17:30:39.751343Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:30:39.751368Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit FinishPropose 2025-04-04T17:30:39.751413Z node 8 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715661 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-04T17:30:39.751492Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is DelayComplete 2025-04-04T17:30:39.751548Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:30:39.751605Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:30:39.751667Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:30:39.751723Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-04-04T17:30:39.751747Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:30:39.751782Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:281474976715661] at 72075186224037888 has finished 2025-04-04T17:30:39.751859Z node 8 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:30:39.751915Z node 8 :TX_DATASHARD TRACE: Complete execution for [0:281474976715661] at 72075186224037888 on unit FinishPropose 2025-04-04T17:30:39.751966Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:30:39.752683Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [8:897:2624], Recipient [8:664:2569]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-04T17:30:39.752778Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-04-04T17:30:39.752866Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-04-04T17:30:39.752977Z node 8 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-04T17:30:39.753037Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-04-04T17:30:39.753086Z node 8 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:30:39.753129Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:30:39.753191Z node 8 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-04-04T17:30:39.753252Z node 8 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-04T17:30:39.753288Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:30:39.753319Z node 8 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-04-04T17:30:39.753342Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-04-04T17:30:39.753483Z node 8 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-04-04T17:30:39.753756Z node 8 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v1500/18446744073709551615 2025-04-04T17:30:39.753830Z node 8 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[8:897:2624], 0} after executionsCount# 1 2025-04-04T17:30:39.753899Z node 8 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[8:897:2624], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:30:39.754003Z node 8 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[8:897:2624], 0} finished in read 2025-04-04T17:30:39.754089Z node 8 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-04T17:30:39.754115Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-04-04T17:30:39.754141Z node 8 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:30:39.754167Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:30:39.754208Z node 8 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-04-04T17:30:39.754233Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:30:39.754285Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-04-04T17:30:39.754335Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-04-04T17:30:39.754447Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-04-04T17:30:39.755240Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [8:897:2624], Recipient [8:664:2569]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-04T17:30:39.755298Z node 8 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-04-04T17:30:39.757944Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [8:61:2108], Recipient [8:664:2569]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715660 LockNode: 8 Status: STATUS_NOT_FOUND { items { uint32_value: 1 } items { uint32_value: 11 } } |98.3%| [TM] {RESULT} ydb/core/tx/datashard/ut_replication/unittest >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendControlQuery >> MediatorTest::OneCoordinatorResendTxNotLost [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendGetResultData >> TCreateAndDropViewTest::DropViewInFolder [GOOD] >> TCreateAndDropViewTest::ContextPollution >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListJobs >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeJob >> TMemoryController::ResourceBroker_ConfigLimit [GOOD] >> TMemTableMemoryConsumersCollection::Empty [GOOD] >> TMemTableMemoryConsumersCollection::Destruction [GOOD] >> TMemTableMemoryConsumersCollection::Register ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/mediator/ut/unittest >> MediatorTest::OneCoordinatorResendTxNotLost [GOOD] Test command err: 2025-04-04T17:29:46.153720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:46.154282Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:46.154344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000a74/r3tmp/tmpwGQYnp/pdisk_1.dat 2025-04-04T17:29:46.673194Z node 1 :TX_MEDIATOR INFO: tablet# 72057594047365120 TTxSchema Complete 2025-04-04T17:29:46.673869Z node 1 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit wait TEvMediatorConfiguration for switching to StateWork from external 2025-04-04T17:29:46.674679Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [1:616:2531] connected 2025-04-04T17:29:46.674812Z node 1 :TX_MEDIATOR NOTICE: tablet# 72057594047365120 actor# [1:599:2521] HANDLE TEvMediatorConfiguration Version# 1 2025-04-04T17:29:46.675338Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 version# 1 TTxConfigure Complete 2025-04-04T17:29:46.675517Z node 1 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit Complete ... waiting for watcher to connect 2025-04-04T17:29:46.675965Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [1:622:2536] connected 2025-04-04T17:29:46.676032Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [1:620:2535] to# [1:618:2533] ExecQueue 2025-04-04T17:29:46.676112Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:618:2533] MediatorId# 72057594047365120 HANDLE TEvGranularWatch from# [1:620:2535] bucket# 0 ... waiting for watcher to connect (done) 2025-04-04T17:29:46.676861Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [1:620:2535] to# [1:618:2533] ExecQueue 2025-04-04T17:29:46.676916Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:618:2533] MediatorId# 72057594047365120 HANDLE TEvWatch 2025-04-04T17:29:46.676963Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:618:2533] MediatorId# 72057594047365120 SEND TEvWatchBucket to# [1:619:2534] bucket.ActiveActor 2025-04-04T17:29:46.677025Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:619:2534] Mediator# 72057594047365120 HANDLE {TEvWatchBucket Source# [1:620:2535]} 2025-04-04T17:29:46.677098Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:619:2534] Mediator# 72057594047365120 SEND to# [1:620:2535] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 0} 2025-04-04T17:29:46.688602Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [1:626:2540] connected 2025-04-04T17:29:46.688751Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-04-04T17:29:46.688837Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [1:624:2538] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 4 Coordinator# 72057594046316545 2025-04-04T17:29:46.689192Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:618:2533] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 0 To# 1000Steps: {{TCoordinatorStep step# 1000 PrevStep# 0}}} marker# M1 2025-04-04T17:29:46.689268Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:618:2533] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [1:619:2534] bucket.ActiveActor step# 1000 2025-04-04T17:29:46.689342Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:619:2534] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1000} 2025-04-04T17:29:46.689608Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:619:2534] Mediator# 72057594047365120 SEND to# [1:620:2535] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 1000} ... waiting for blocked plan step 2025-04-04T17:29:46.712574Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1010 2025-04-04T17:29:46.712656Z node 1 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316545], step# [1010] transactions [1] 2025-04-04T17:29:46.712810Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCommitStep to# [1:618:2533] ExecQueue {TMediateStep From 1000 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [1:624:2538]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}}}}} marker# M0 2025-04-04T17:29:46.712921Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:618:2533] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 1000 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [1:624:2538]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}}}}} marker# M1 2025-04-04T17:29:46.712968Z node 1 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 1 marker# M2 2025-04-04T17:29:46.713017Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:618:2533] MediatorId# 72057594047365120 SEND Ev to# [1:619:2534] step# 1010 forTablet# 72057594047365121 txid# 1 marker# M3 2025-04-04T17:29:46.713081Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:618:2533] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [1:619:2534] bucket.ActiveActor step# 1010 2025-04-04T17:29:46.713205Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:619:2534] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [1:624:2538]}}} marker# M4 2025-04-04T17:29:46.713420Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:619:2534] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1010} 2025-04-04T17:29:46.714700Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:619:2534] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [1:648:2552] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-04T17:29:46.714796Z node 1 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2025-04-04T17:29:46.714849Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:619:2534] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet ... waiting for blocked plan step (done) ... waiting for no pending commands 2025-04-04T17:29:46.715464Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [1:620:2535] to# [1:618:2533] ExecQueue 2025-04-04T17:29:46.715554Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:618:2533] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [1:620:2535] bucket# 0 ... waiting for no pending commands (done) ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet ... waiting for watch updates 2025-04-04T17:29:46.715913Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:619:2534] Mediator# 72057594047365120 HANDLE {TEvPlanStepAccepted TabletId# 72057594047365121 step# 1010} 2025-04-04T17:29:46.715977Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:619:2534] Mediator# 72057594047365120 SEND to# [1:624:2538] {TEvPlanStepAck TabletId# 72057594047365121 step# 1010 txid# 1} 2025-04-04T17:29:46.716077Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:619:2534] Mediator# 72057594047365120 SEND to# [1:620:2535] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 1010} ... waiting for watch updates (done) 2025-04-04T17:29:50.028075Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:322:2364], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:50.028192Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:50.028397Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000a74/r3tmp/tmp4Fri7M/pdisk_1.dat 2025-04-04T17:29:50.300531Z node 2 :TX_MEDIATOR INFO: tablet# 72057594047365120 TTxSchema Complete 2025-04-04T17:29:50.301088Z node 2 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit wait TEvMediatorConfiguration for switching to StateWork from external 2025-04-04T17:29:50.301594Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [2:616:2531] connected 2025-04-04T17:29:50.301689Z node 2 :TX_MEDIATOR NOTICE: tablet# 72057594047365120 actor# [2:599:2521] HANDLE TEvMediatorConfiguration Version# 1 2025-04-04T17:29:50.302094Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 version# 1 TTxConfigure Complete 2025-04-04T17:29:50.302360Z node 2 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit Complete ... waiting for watcher to connect 2025-04-04T17:29:50.302873Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [2:622:2536] connected 2025-04-04T17:29:50.302947Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:620:2535] to# [2:618:2533] ExecQueue 2025-04-04T17:29:50.302986Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:618:2533] MediatorId# 72057594047365120 HANDLE TEvGranularWatch from# [2:620:2535] bucket# 0 ... waiting for watcher to connect (done) 2025-04-04T17:29:50.303227Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:620:2535] to# [2:618:2533] ExecQueue 2025-04-04T17:29:50.303276Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:618:2533] MediatorId# 72057594047365120 HANDLE TEvWatch 2025-04-04T17:29:50.303313Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:618:2533] MediatorId# 72057594047365120 SEND TEvWatchBucket to# [2:619:2534] bucket.ActiveActor 2025-04-04T17:29:50.303374Z node 2 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [2:619:2534] Mediator# 72057594047365120 HANDLE {TEvWatchBucket Source# [2:620:2535]} 2025-04-04T17:29:50.303456Z node 2 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [2:619:2534] Mediator# 72057594047365120 SEND to# [2:620:2535] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 0} ... waiting for no pending commands 2025-04-04T17:29:50.317602Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:620:2535] to# [2:618:2533] ExecQueue 2025-04-04T17:29:50.317691Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:618:2533] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [2:620:2535] bucket# 0 2025-04-04T17:29:50.317799Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:620:2535] to# [2:618:2533] ExecQueue 2025-04-04T17:29:50.317831Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:618:2533] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [2:620:2535] bucket# 0 2025-04-04T17:29:50.317892Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:620:2535] to# [2:618:2533] ExecQueue 2025-04-04T17:29:50.317920Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:618:2533] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [2:620:2535] bucket# 0 ... waiti ... ket# 0 ... waiting for no pending commands (done) 2025-04-04T17:30:41.806493Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [12:664:2560] connected 2025-04-04T17:30:41.806593Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-04-04T17:30:41.806643Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:662:2558] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 3 Coordinator# 72057594046316545 2025-04-04T17:30:41.807060Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [12:667:2563] connected 2025-04-04T17:30:41.807112Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-04-04T17:30:41.807159Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:665:2561] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 3 Coordinator# 72057594046316546 2025-04-04T17:30:41.807455Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1010 2025-04-04T17:30:41.807505Z node 12 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316545], step# [1010] transactions [1] 2025-04-04T17:30:41.807597Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316546 step# 1010 2025-04-04T17:30:41.807627Z node 12 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316546], step# [1010] transactions [1] 2025-04-04T17:30:41.807746Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCommitStep to# [12:619:2534] ExecQueue {TMediateStep From 0 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [12:662:2558]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}}}{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [12:665:2561]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}}}} marker# M0 2025-04-04T17:30:41.807864Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 0 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [12:662:2558]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}}}{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [12:665:2561]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}}}} marker# M1 2025-04-04T17:30:41.807924Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 1 txid# 2 marker# M2 2025-04-04T17:30:41.807982Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 SEND Ev to# [12:620:2535] step# 1010 forTablet# 72057594047365121 txid# 1 txid# 2 marker# M3 2025-04-04T17:30:41.808033Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365122]. TxIds: txid# 1 txid# 2 marker# M2 2025-04-04T17:30:41.808061Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 SEND Ev to# [12:620:2535] step# 1010 forTablet# 72057594047365122 txid# 1 txid# 2 marker# M3 2025-04-04T17:30:41.808102Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [12:620:2535] bucket.ActiveActor step# 1010 2025-04-04T17:30:41.808210Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [12:662:2558]}{TTx Moderator# 0 txid# 2 AckTo# [12:665:2561]}}} marker# M4 2025-04-04T17:30:41.808486Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365122 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [12:662:2558]}{TTx Moderator# 0 txid# 2 AckTo# [12:665:2561]}}} marker# M4 2025-04-04T17:30:41.808593Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1010} 2025-04-04T17:30:41.809212Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [12:673:2567] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-04T17:30:41.809278Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2025-04-04T17:30:41.809316Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 2, marker M5lu 2025-04-04T17:30:41.809358Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-04-04T17:30:41.809708Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365122 Status: OK ServerId: [12:674:2568] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-04T17:30:41.809754Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 1, marker M5lu 2025-04-04T17:30:41.809781Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 2, marker M5lu 2025-04-04T17:30:41.809828Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 SEND to# 72057594047365122 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365122} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-04-04T17:30:41.820907Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [12:677:2571] connected 2025-04-04T17:30:41.821040Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-04-04T17:30:41.821112Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:675:2569] Cookie# 2 CompleteStep# 1010 LatestKnownStep# 1010 SubjectiveTime# 4 Coordinator# 72057594046316546 2025-04-04T17:30:41.821381Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316546 step# 1010 2025-04-04T17:30:41.821429Z node 12 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316546], step# [1010] transactions [1] 2025-04-04T17:30:41.821525Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvRequestLostAcks to# [12:619:2534] ExecQueue step {TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [0:0:0]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}} 2025-04-04T17:30:41.821643Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 HANDLE TEvRequestLostAcks {TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [0:0:0]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}} AckTo# [12:675:2569] 2025-04-04T17:30:41.821698Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 2 marker# M2 2025-04-04T17:30:41.821754Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 SEND Ev to# [12:620:2535] step# 1010 forTablet# 72057594047365121 txid# 2 marker# M3 2025-04-04T17:30:41.821817Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365122]. TxIds: txid# 2 marker# M2 2025-04-04T17:30:41.821865Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 SEND Ev to# [12:620:2535] step# 1010 forTablet# 72057594047365122 txid# 2 marker# M3 2025-04-04T17:30:41.821964Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE {TEvOoOTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 2 AckTo# [12:675:2569]}}} 2025-04-04T17:30:41.822048Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE {TEvOoOTabletStep step# 1010 TabletId# 72057594047365122 Transactions {{TTx Moderator# 0 txid# 2 AckTo# [12:675:2569]}}} 2025-04-04T17:30:41.833732Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594047365121 ClientId: [12:669:2565] ServerId: [12:673:2567] } 2025-04-04T17:30:41.876742Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:30:41.903334Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [12:702:2584] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-04-04T17:30:41.903421Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2025-04-04T17:30:41.903452Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 2, marker M5lu 2025-04-04T17:30:41.903512Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-04-04T17:30:41.915982Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594047365122 ClientId: [12:671:2566] ServerId: [12:674:2568] } 2025-04-04T17:30:41.945303Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365122 Status: OK ServerId: [12:738:2597] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-04-04T17:30:41.945412Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 1, marker M5lu 2025-04-04T17:30:41.945479Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 2, marker M5lu 2025-04-04T17:30:41.945543Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 SEND to# 72057594047365122 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365122} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-04-04T17:30:41.959764Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:30:41.959939Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:30:41.975867Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected |98.3%| [TM] {RESULT} ydb/core/tx/mediator/ut/unittest >> TMemTableMemoryConsumersCollection::Register [GOOD] >> TMemTableMemoryConsumersCollection::Unregister [GOOD] >> TMemTableMemoryConsumersCollection::SetConsumption [GOOD] >> TMemTableMemoryConsumersCollection::CompactionComplete [GOOD] >> TMemTableMemoryConsumersCollection::SelectForCompaction [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> KafkaProtocol::FetchScenario [GOOD] >> KafkaProtocol::BalanceScenario >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnection |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> DataShardStats::HistogramStatsCorrect [GOOD] >> DataShardStats::BlobsStatsCorrect >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnectionWithServiceAccount >> BasicStatistics::Simple [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatus [GOOD] >> KeyValueGRPCService::SimpleWriteReadOverrun >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListConnections ------- [TM] {asan, default-linux-x86_64, release} ydb/core/memory_controller/ut/unittest >> TMemTableMemoryConsumersCollection::SelectForCompaction [GOOD] Test command err: ResourceBrokerSelfConfig: LimitBytes: 0 QueryExecutionLimitBytes: 0 2025-04-04T17:29:24.538893Z node 1 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: none MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 60MiB ConsumersConsumption: 0B OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 6MiB Coefficient: 0.9999990463 2025-04-04T17:29:24.540553Z node 1 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 0B Limit: 6MiB Min: 2MiB Max: 6MiB 2025-04-04T17:29:24.541315Z node 1 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 0B Limit: 40MiB 2025-04-04T17:29:24.542848Z node 1 :MEMORY_CONTROLLER INFO: Bootstrapped with config HardLimitBytes: 209715200 2025-04-04T17:29:24.552283Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-04-04T17:29:24.554387Z node 1 :TABLET_SAUSAGECACHE NOTICE: Bootstrap with config MemoryLimit: 33554432 2025-04-04T17:29:24.570371Z node 1 :MEMORY_CONTROLLER INFO: Consumer SharedCache [1:20:2067] registered 2025-04-04T17:29:24.570627Z node 1 :TABLET_SAUSAGECACHE NOTICE: Register memory consumer 2025-04-04T17:29:24.585191Z node 1 :RESOURCE_BROKER INFO: New config diff: Queues { Name: "queue_kqp_resource_manager" Limit { Memory: 41943040 } } ResourceLimit { Memory: 62914560 } 2025-04-04T17:29:24.585890Z node 1 :RESOURCE_BROKER INFO: New config: Queues { Name: "queue_default" Weight: 30 Limit { Cpu: 2 } } Queues { Name: "queue_compaction_gen0" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_compaction_gen1" Weight: 100 Limit { Cpu: 6 } } Queues { Name: "queue_compaction_gen2" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_gen3" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_borrowed" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_cs_indexation" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_ttl" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_general" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_scan_read" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_normalizer" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_transaction" Weight: 100 Limit { Cpu: 4 } } Queues { Name: "queue_background_compaction" Weight: 10 Limit { Cpu: 1 } } Queues { Name: "queue_scan" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_backup" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_restore" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_kqp_resource_manager" Weight: 30 Limit { Cpu: 4 Memory: 41943040 } } Queues { Name: "queue_build_index" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_ttl" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_datashard_build_stats" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_cdc_initial_scan" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_statistics_scan" Weight: 100 Limit { Cpu: 1 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 60000000 } Tasks { Name: "compaction_gen0" QueueName: "queue_compaction_gen0" DefaultDuration: 10000000 } Tasks { Name: "compaction_gen1" QueueName: "queue_compaction_gen1" DefaultDuration: 30000000 } Tasks { Name: "compaction_gen2" QueueName: "queue_compaction_gen2" DefaultDuration: 120000000 } Tasks { Name: "compaction_gen3" QueueName: "queue_compaction_gen3" DefaultDuration: 600000000 } Tasks { Name: "compaction_borrowed" QueueName: "queue_compaction_borrowed" DefaultDuration: 600000000 } Tasks { Name: "CS::TTL" QueueName: "queue_cs_ttl" DefaultDuration: 600000000 } Tasks { Name: "CS::INDEXATION" QueueName: "queue_cs_indexation" DefaultDuration: 600000000 } Tasks { Name: "CS::GENERAL" QueueName: "queue_cs_general" DefaultDuration: 600000000 } Tasks { Name: "CS::SCAN_READ" QueueName: "queue_cs_scan_read" DefaultDuration: 600000000 } Tasks { Name: "CS::NORMALIZER" QueueName: "queue_cs_normalizer" DefaultDuration: 600000000 } Tasks { Name: "transaction" QueueName: "queue_transaction" DefaultDuration: 600000000 } Tasks { Name: "background_compaction" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen0" QueueName: "queue_background_compaction" DefaultDuration: 10000000 } Tasks { Name: "background_compaction_gen1" QueueName: "queue_background_compaction" DefaultDuration: 20000000 } Tasks { Name: "background_compaction_gen2" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen3" QueueName: "queue_background_compaction" DefaultDuration: 300000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 300000000 } Tasks { Name: "backup" QueueName: "queue_backup" DefaultDuration: 300000000 } Tasks { Name: "restore" QueueName: "queue_restore" DefaultDuration: 300000000 } Tasks { Name: "kqp_query" QueueName: "queue_kqp_resource_manager" DefaultDuration: 600000000 } Tasks { Name: "build_index" QueueName: "queue_build_index" DefaultDuration: 600000000 } Tasks { Name: "ttl" QueueName: "queue_ttl" DefaultDuration: 300000000 } Tasks { Name: "datashard_build_stats" QueueName: "queue_datashard_build_stats" DefaultDuration: 5000000 } Tasks { Name: "cdc_initial_scan" QueueName: "queue_cdc_initial_scan" DefaultDuration: 600000000 } Tasks { Name: "statistics_scan" QueueName: "queue_statistics_scan" DefaultDuration: 600000000 } ResourceLimit { Cpu: 20 Memory: 62914560 } 2025-04-04T17:29:24.587072Z node 1 :RESOURCE_BROKER INFO: Configure result: Success: true 2025-04-04T17:29:24.587862Z node 1 :MEMORY_CONTROLLER INFO: ResourceBroker configure result Success: true 2025-04-04T17:29:24.612772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:303:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:24.613471Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:24.613599Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:24.757203Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:438:2398] 1 registered 2025-04-04T17:29:24.772171Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:450:2400] 0 registered 2025-04-04T17:29:24.778108Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:450:2400] 2 registered 2025-04-04T17:29:24.778468Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:450:2400] 4 registered 2025-04-04T17:29:24.780713Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:450:2400] 5 registered 2025-04-04T17:29:24.780952Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:455:2402] 1 registered 2025-04-04T17:29:24.784952Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:455:2402] 2 registered 2025-04-04T17:29:24.834994Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 1 registered 2025-04-04T17:29:24.864328Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 2 registered 2025-04-04T17:29:24.864892Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 3 registered 2025-04-04T17:29:24.865051Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 4 registered 2025-04-04T17:29:24.865264Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 5 registered 2025-04-04T17:29:24.865353Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 6 registered 2025-04-04T17:29:24.865531Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 7 registered 2025-04-04T17:29:24.866503Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 8 registered 2025-04-04T17:29:24.866872Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 9 registered 2025-04-04T17:29:24.867563Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 10 registered 2025-04-04T17:29:24.867899Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 11 registered 2025-04-04T17:29:24.873796Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 12 registered 2025-04-04T17:29:24.893058Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 13 registered 2025-04-04T17:29:24.893353Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 14 registered 2025-04-04T17:29:24.893727Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 15 registered 2025-04-04T17:29:24.894138Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 16 registered 2025-04-04T17:29:24.894527Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 17 registered 2025-04-04T17:29:24.894649Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 18 registered 2025-04-04T17:29:24.894792Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 19 registered 2025-04-04T17:29:24.894926Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 20 registered 2025-04-04T17:29:24.895942Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 22 registered 2025-04-04T17:29:24.896107Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 23 registered 2025-04-04T17:29:24.896875Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 24 registered 2025-04-04T17:29:24.914983Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 25 registered 2025-04-04T17:29:24.915445Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 26 registered 2025-04-04T17:29:24.915802Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 27 registered 2025-04-04T17:29:24.919389Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 28 registered 2025-04-04T17:29:24.923557Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 29 registered 2025-04-04T17:29:24.923887Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 30 registered 2025-04-04T17:29:24.924028Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 31 registered 2025-04-04T17:29:24.924293Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 32 registered 2025-04-04T17:29:24.925807Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 33 registered 2025-04-04T17:29:24.926081Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 34 registered 2025-04-04T17:29:24.926198Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 35 registered 2025-04-04T17:29:24.926385Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 36 registered 2025-04-04T17:29:24.926479Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 37 registered 2025-04-04T17:29:24.931281Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 38 registered 2025-04-04T17:29:24.934986Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 39 registered 2025-04-04T17:29:24.935481Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 40 registered 2025-04-04T17:29:24.935959Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 41 registered 2025-04-04T17:29:24.936158Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 42 registered 2025-04-04T17:29:24.936355Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 43 registered 2025-04-04T17:29:24.939078Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 44 registered 2025-04-04T17:29:24.939425Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 45 registered 2025-04-04T17:29:24.945275Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 46 registered 2025-04-04T17:29:24.945811Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 47 registered 2025-04-04T17:29:24.946190Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 48 registered 2025-04-04T17:29:24.947656Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 49 registered 2025-04-04T17:29:24.947812Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:468:2404] 50 registered 2025-04-04T17:29:24.948063Z node 1 :MEMORY_CONTROLLER TRACE: ... 573049Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2404] 105 registered 2025-04-04T17:30:42.573095Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2404] 106 registered 2025-04-04T17:30:42.573129Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2404] 107 registered 2025-04-04T17:30:42.573169Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2404] 108 registered 2025-04-04T17:30:42.573207Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2404] 109 registered 2025-04-04T17:30:42.573243Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2404] 110 registered 2025-04-04T17:30:42.573282Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2404] 111 registered 2025-04-04T17:30:42.573329Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2404] 113 registered 2025-04-04T17:30:42.573362Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2404] 114 registered 2025-04-04T17:30:42.573397Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2404] 115 registered 2025-04-04T17:30:42.573434Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2404] 116 registered 2025-04-04T17:30:42.573477Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2404] 117 registered 2025-04-04T17:30:42.573892Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:468:2404] 118 registered 2025-04-04T17:30:42.573999Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2406] 17 registered 2025-04-04T17:30:42.574765Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2406] 0 registered 2025-04-04T17:30:42.574885Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2406] 18 registered 2025-04-04T17:30:42.574990Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2406] 1 registered 2025-04-04T17:30:42.575154Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2406] 19 registered 2025-04-04T17:30:42.575391Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2406] 2 registered 2025-04-04T17:30:42.575554Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2406] 20 registered 2025-04-04T17:30:42.575655Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2406] 3 registered 2025-04-04T17:30:42.575754Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2406] 21 registered 2025-04-04T17:30:42.575880Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2406] 4 registered 2025-04-04T17:30:42.576066Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2406] 6 registered 2025-04-04T17:30:42.576254Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2406] 9 registered 2025-04-04T17:30:42.576323Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2406] 10 registered 2025-04-04T17:30:42.576377Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2406] 13 registered 2025-04-04T17:30:42.576428Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2406] 14 registered 2025-04-04T17:30:42.576463Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2406] 16 registered 2025-04-04T17:30:42.576546Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2410] 7 registered 2025-04-04T17:30:42.576593Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2410] 1 registered 2025-04-04T17:30:42.576688Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2410] 2 registered 2025-04-04T17:30:42.576753Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2410] 3 registered 2025-04-04T17:30:42.576791Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2410] 4 registered 2025-04-04T17:30:42.576829Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2410] 5 registered 2025-04-04T17:30:42.576869Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:511:2410] 6 registered 2025-04-04T17:30:42.576907Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2412] 102 registered 2025-04-04T17:30:42.576957Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2412] 103 registered 2025-04-04T17:30:42.576997Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2412] 1 registered 2025-04-04T17:30:42.577028Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2412] 104 registered 2025-04-04T17:30:42.577066Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2412] 2 registered 2025-04-04T17:30:42.577106Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2412] 3 registered 2025-04-04T17:30:42.577138Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2412] 4 registered 2025-04-04T17:30:42.577181Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2412] 5 registered 2025-04-04T17:30:42.577216Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2412] 6 registered 2025-04-04T17:30:42.577252Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2412] 7 registered 2025-04-04T17:30:42.577293Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2412] 100 registered 2025-04-04T17:30:42.577332Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:524:2412] 101 registered 2025-04-04T17:30:42.577367Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:535:2414] 1 registered 2025-04-04T17:30:42.577411Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:535:2414] 2 registered 2025-04-04T17:30:42.577447Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:535:2414] 3 registered test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000954/r3tmp/tmpmun7mj/pdisk_1.dat 2025-04-04T17:30:42.604707Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 1 registered 2025-04-04T17:30:42.604970Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 2 registered 2025-04-04T17:30:42.605032Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 3 registered 2025-04-04T17:30:42.605186Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 4 registered 2025-04-04T17:30:42.605317Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 120 registered 2025-04-04T17:30:42.605458Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 5 registered 2025-04-04T17:30:42.605562Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 121 registered 2025-04-04T17:30:42.605737Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 6 registered 2025-04-04T17:30:42.605780Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 122 registered 2025-04-04T17:30:42.605821Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 7 registered 2025-04-04T17:30:42.605857Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 123 registered 2025-04-04T17:30:42.606003Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 8 registered 2025-04-04T17:30:42.606049Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 125 registered 2025-04-04T17:30:42.606082Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 126 registered 2025-04-04T17:30:42.606116Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 127 registered 2025-04-04T17:30:42.606147Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 128 registered 2025-04-04T17:30:42.606357Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 129 registered 2025-04-04T17:30:42.606401Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 100 registered 2025-04-04T17:30:42.606435Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 101 registered 2025-04-04T17:30:42.606470Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 131 registered 2025-04-04T17:30:42.606497Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 102 registered 2025-04-04T17:30:42.606528Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 103 registered 2025-04-04T17:30:42.606598Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:507:2408] 105 registered 2025-04-04T17:30:42.670585Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:30:42.708547Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:30:42.709022Z node 9 :MEMORY_CONTROLLER INFO: Config updated QueryExecutionLimitPercent: 15 2025-04-04T17:30:42.709577Z node 9 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-04-04T17:30:42.746775Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:30:42.746973Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:30:42.758458Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:30:42.969516Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 500MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 500MiB SoftLimit: 375MiB TargetUtilization: 250MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 32.6KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 250MiB ResultingConsumersConsumption: 250MiB Coefficient: 0.90625 2025-04-04T17:30:42.970305Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 236MiB Min: 100MiB Max: 250MiB 2025-04-04T17:30:42.970484Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 32.6KiB Limit: 14.1MiB Min: 5MiB Max: 15MiB 2025-04-04T17:30:42.970526Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-04-04T17:30:42.970650Z node 9 :TABLET_SAUSAGECACHE INFO: Limit memory consumer with 236MiB 2025-04-04T17:30:43.153994Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 500MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 500MiB SoftLimit: 375MiB TargetUtilization: 250MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 32.9KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 250MiB ResultingConsumersConsumption: 250MiB Coefficient: 0.90625 2025-04-04T17:30:43.154604Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 236MiB Min: 100MiB Max: 250MiB 2025-04-04T17:30:43.154660Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 32.9KiB Limit: 14.1MiB Min: 5MiB Max: 15MiB 2025-04-04T17:30:43.154696Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-04-04T17:30:43.154795Z node 9 :TABLET_SAUSAGECACHE INFO: Limit memory consumer with 236MiB 2025-04-04T17:30:43.314624Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 200MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 33KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 100MiB Coefficient: 0.90625 2025-04-04T17:30:43.315364Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 94.4MiB Min: 40MiB Max: 100MiB 2025-04-04T17:30:43.315435Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 33KiB Limit: 5.63MiB Min: 2MiB Max: 6MiB 2025-04-04T17:30:43.315497Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-04-04T17:30:43.315653Z node 9 :TABLET_SAUSAGECACHE INFO: Limit memory consumer with 94.4MiB 2025-04-04T17:30:43.452143Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 200MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 33.1KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 100MiB Coefficient: 0.90625 2025-04-04T17:30:43.452716Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 94.4MiB Min: 40MiB Max: 100MiB 2025-04-04T17:30:43.452765Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 33.1KiB Limit: 5.63MiB Min: 2MiB Max: 6MiB 2025-04-04T17:30:43.452800Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-04-04T17:30:43.452888Z node 9 :TABLET_SAUSAGECACHE INFO: Limit memory consumer with 94.4MiB |98.3%| [TM] {RESULT} ydb/core/memory_controller/ut/unittest >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeConnection >> QuoterWithKesusTest::CanDeleteResourceWhenUsingIt [GOOD] >> QuoterWithKesusTest::CanKillKesusWhenUsingIt ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Simple [GOOD] Test command err: 2025-04-04T17:28:09.939829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:09.940206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:09.940375Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000a76/r3tmp/tmpQFrJxr/pdisk_1.dat 2025-04-04T17:28:10.295912Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6694, node 1 2025-04-04T17:28:10.509967Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:10.510016Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:10.510048Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:10.510496Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:10.512274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:28:10.593407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:10.593539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:10.607322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64350 2025-04-04T17:28:11.085016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:28:13.810913Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:28:13.837608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:13.837692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:13.865676Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:28:13.867304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:14.075853Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:14.076323Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:14.076687Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:14.076815Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:14.076893Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:14.077103Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:14.077213Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:14.077369Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:14.077478Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:14.251131Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:14.251234Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:14.264800Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:14.403569Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:14.446941Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:28:14.447027Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:28:14.478408Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:28:14.478632Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:28:14.478866Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:28:14.478925Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:28:14.478982Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:28:14.479061Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:28:14.479143Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:28:14.479212Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:28:14.480313Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:28:14.507736Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:14.507826Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:14.514056Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:28:14.518545Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:28:14.519486Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:28:14.524632Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:28:14.545768Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:28:14.545832Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:28:14.545940Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:28:14.561721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:28:14.569462Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:28:14.569621Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:28:14.735862Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:28:14.920476Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:28:15.009767Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:28:16.053035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:16.053176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:16.071177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:28:16.281499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2373:3104], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:16.281638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:16.282868Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2378:3108]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:16.282990Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:28:16.283060Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2380:3110] 2025-04-04T17:28:16.283131Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2380:3110] 2025-04-04T17:28:16.283659Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2381:2874] 2025-04-04T17:28:16.283915Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2380:3110], server id = [2:2381:2874], tablet id = 72075186224037894, status = OK 2025-04-04T17:28:16.284040Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2381:2874], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:28:16.284080Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-04T17:28:16.284258Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:28:16.284317Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2378:3108], StatRequests.size() = 1 2025-04-04T17:28:16.303465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2385:3114], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:16.303548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:16.303861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2390:3119], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:16.309272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-04T17:28:16.513524Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:28:16.513617Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:28:16.622630Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2380:3110], schemeshard count = 1 2025-04-04T17:28:17.076312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorA ... eplyToActorId[ [2:6395:4624]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:30:32.700399Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 116 ] 2025-04-04T17:30:32.700447Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 116, ReplyToActorId = [2:6395:4624], StatRequests.size() = 1 2025-04-04T17:30:33.940619Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 117 ], ReplyToActorId[ [2:6430:4640]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:30:33.940899Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 117 ] 2025-04-04T17:30:33.940939Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [2:6430:4640], StatRequests.size() = 1 2025-04-04T17:30:34.641942Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:30:35.210632Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:6465:4656]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:30:35.210947Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-04-04T17:30:35.210992Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:6465:4656], StatRequests.size() = 1 2025-04-04T17:30:36.011537Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-04T17:30:36.011630Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:30:36.011680Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:30:36.011735Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-04T17:30:36.754118Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:6505:4673]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:30:36.754423Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-04T17:30:36.754467Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:6505:4673], StatRequests.size() = 1 2025-04-04T17:30:37.517144Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-04T17:30:37.517349Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:30:37.517799Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:30:37.554913Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-04T17:30:37.554985Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 211.000000s, at schemeshard: 72075186224037897 2025-04-04T17:30:37.555302Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 49 2025-04-04T17:30:37.568690Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:30:38.130137Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6538:4689]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:30:38.130438Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-04T17:30:38.130488Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6538:4689], StatRequests.size() = 1 2025-04-04T17:30:38.779442Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:30:38.779565Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:30:38.779643Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-04T17:30:38.779693Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:30:38.780196Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:30:38.795796Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:30:38.800433Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6563:4709], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:30:38.800586Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6574:4714], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:30:38.800849Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:30:38.815600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-04T17:30:38.895326Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6577:4717], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-04T17:30:39.031872Z node 2 :TX_PROXY ERROR: Actor# [2:6672:4765] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:30:39.061970Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6701:4780]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:30:39.062511Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-04T17:30:39.062575Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6701:4780], StatRequests.size() = 1 2025-04-04T17:30:39.209792Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTQ4NWE0MWYtYThhZDlkNzItN2Y3OTY2ZWYtNDc4NDAxNWU=, TxId: 2025-04-04T17:30:39.209867Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTQ4NWE0MWYtYThhZDlkNzItN2Y3OTY2ZWYtNDc4NDAxNWU=, TxId: 2025-04-04T17:30:39.210447Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:30:39.224545Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:30:39.224617Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:30:39.709618Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:6736:4800]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:30:39.709918Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-04T17:30:39.709967Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:6736:4800], StatRequests.size() = 1 2025-04-04T17:30:40.938554Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:6775:4822]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:30:40.938876Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-04T17:30:40.938927Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:6775:4822], StatRequests.size() = 1 2025-04-04T17:30:41.567263Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:30:41.567591Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:30:41.567630Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:30:41.567672Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-04T17:30:41.567707Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:30:41.567967Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:30:41.570401Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:30:41.585482Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZWVjMzlhMWEtMTUzY2VmMGEtNWMzOWYwNmUtMzYyNTBiMTQ=, TxId: 2025-04-04T17:30:41.585564Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZWVjMzlhMWEtMTUzY2VmMGEtNWMzOWYwNmUtMzYyNTBiMTQ=, TxId: 2025-04-04T17:30:41.586451Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:30:41.601062Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:30:41.601126Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:30:42.114097Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:6843:4862]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:30:42.114484Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-04T17:30:42.114539Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:6843:4862], StatRequests.size() = 1 2025-04-04T17:30:43.392457Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:6886:4886]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:30:43.392859Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-04-04T17:30:43.392914Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:6886:4886], StatRequests.size() = 1 2025-04-04T17:30:44.044549Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-04T17:30:44.044836Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:30:44.044877Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:30:44.045002Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:30:44.045387Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:30:44.548394Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:6919:4902]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:30:44.548659Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-04-04T17:30:44.548705Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:6919:4902], StatRequests.size() = 1 |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnection >> DataShardFollowers::FollowerDuringDataPartSwitch [GOOD] >> DataShardFollowers::FollowerReadDuringSplit >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes [GOOD] >> KqpTpch::Query11 [GOOD] >> KqpTpch::Query12 >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnectionWithServiceAccount >> TopicSessionTests::RestartSessionIfQueryStopped >> Coordinator::LastEmptyStepResent [GOOD] >> CoordinatorVolatile::PlanResentOnReboots |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteConnection >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnection |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TSentinelTests::PDiskRackGuardHalfRack [GOOD] >> TSentinelTests::PDiskRackGuardFullRack >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnectionWithServiceAccount >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateBinding >> KqpTpch::Query12 [GOOD] >> KqpTpch::Query13 |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListBindings >> TCreateAndDropViewTest::ContextPollution [GOOD] >> TEvaluateExprInViewTest::EvaluateExpr >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeBinding |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyBinding >> test_http_api.py::TestHttpApi::test_restart_idempotency [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_http_api.py::TestHttpApi::test_simple_streaming_query >> KeyValueGRPCService::SimpleWriteReadOverrun [GOOD] >> KeyValueGRPCService::SimpleWriteReadRange >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteBinding >> CoordinatorVolatile::PlanResentOnReboots [GOOD] >> CoordinatorVolatile::MediatorReconnectPlanRace |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateQuery >> KqpTpch::Query13 [GOOD] >> KqpTpch::Query14 >> DataShardFollowers::FollowerReadDuringSplit [GOOD] >> test_http_api.py::TestHttpApi::test_simple_streaming_query [GOOD] >> test_http_api.py::TestHttpApi::test_integral_results >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListQueries >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_followers/unittest >> DataShardFollowers::FollowerReadDuringSplit [GOOD] Test command err: 2025-04-04T17:29:55.377222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:55.377857Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:55.377921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0008e5/r3tmp/tmpFlzQBS/pdisk_1.dat 2025-04-04T17:29:55.955522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:56.018528Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:56.067548Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-04-04T17:29:56.068500Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-04-04T17:29:56.070702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:56.071325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:56.083953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:56.170418Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-04-04T17:29:56.170494Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-04-04T17:29:56.172427Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:640:2548] 2025-04-04T17:29:56.291207Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 PartitionConfig { FollowerGroups { FollowerCount: 1 AllowLeaderPromotion: false } } } } } ExecTimeoutPeriod: 18446744073709551615 2025-04-04T17:29:56.291348Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T17:29:56.292719Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T17:29:56.292824Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T17:29:56.293177Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T17:29:56.293403Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T17:29:56.293646Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-04-04T17:29:56.293967Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 HANDLE EvClientConnected 2025-04-04T17:29:56.296959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:56.298005Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-04-04T17:29:56.298079Z node 1 :TX_PROXY DEBUG: Actor# [1:640:2548] txid# 281474976715657 SEND to# [1:592:2517] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-04-04T17:29:56.341978Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:29:56.342818Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:29:56.343234Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:29:56.343456Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:29:56.356568Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:29:56.389983Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:29:56.390116Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:29:56.391846Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:29:56.391931Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:29:56.391997Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:29:56.393429Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:29:56.393560Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:29:56.393628Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:29:56.404415Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:29:56.463466Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:29:56.464936Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:29:56.465082Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:29:56.465119Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:29:56.465157Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:29:56.465186Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:56.465439Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:56.466551Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:56.468002Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:29:56.468113Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:29:56.468249Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:29:56.468297Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:56.468345Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:29:56.468381Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:29:56.468420Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:29:56.468490Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:29:56.468531Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:29:56.468672Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:56.468729Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:56.468791Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:29:56.470354Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:29:56.470407Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:29:56.470508Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:29:56.470856Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:29:56.470912Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:29:56.471019Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:29:56.471158Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:29:56.471212Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:29:56.471255Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:29:56.471291Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:29:56.471595Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:29:56.471636Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:29:56.471680Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:29:56.471724Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:29:56.471778Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:29:56.471823Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:29:56.471857Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:29:56.471885Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:29:56.471908Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:29:56.473637Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:29:56.473683Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025- ... cessing event TEvDataShard::TEvReadAck 2025-04-04T17:30:54.071341Z node 8 :TX_DATASHARD DEBUG: 72075186224037889 ReadAck from [8:1091:2864] on missing iterator: { ReadId: 0 SeqNo: 1 MaxRows: 999 MaxBytes: 5242880 } 2025-04-04T17:30:54.071482Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553218, Sender [8:1091:2864], Recipient [8:1081:2860]: NKikimrTxDataShard.TEvReadAck ReadId: 0 SeqNo: 2 MaxRows: 999 MaxBytes: 5242880 2025-04-04T17:30:54.071508Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadAck 2025-04-04T17:30:54.071562Z node 8 :TX_DATASHARD DEBUG: 72075186224037889 ReadAck from [8:1091:2864] on missing iterator: { ReadId: 0 SeqNo: 2 MaxRows: 999 MaxBytes: 5242880 } 2025-04-04T17:30:54.071858Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553219, Sender [8:1091:2864], Recipient [8:1081:2860]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-04T17:30:54.071896Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadCancel 2025-04-04T17:30:54.071969Z node 8 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 22 } } ... reading from the right follower 2025-04-04T17:30:54.262916Z node 8 :TX_PROXY DEBUG: actor# [8:59:2106] Handle TEvExecuteKqpTransaction 2025-04-04T17:30:54.263007Z node 8 :TX_PROXY DEBUG: actor# [8:59:2106] TxId# 281474976715665 ProcessProposeKqpTransaction 2025-04-04T17:30:54.264581Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269877761, Sender [8:1110:2879], Recipient [8:1083:2861]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:30:54.264662Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:30:54.264800Z node 8 :TX_DATASHARD DEBUG: Server connected at follower 1 tablet# 72075186224037890, clientId# [8:1108:2878], serverId# [8:1110:2879], sessionId# [0:0:0] 2025-04-04T17:30:54.265026Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0xp280d7b1yssvmsjm70ax, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=OTJiMjAzYWItOGY1YjlhNDEtN2VjMjZiNGUtZmNhYjYwMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:30:54.267221Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553215, Sender [8:1114:2880], Recipient [8:1083:2861]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-04-04T17:30:54.267277Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvRead 2025-04-04T17:30:54.267419Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2025-04-04T17:30:54.267499Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:30:54.267591Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 1 2025-04-04T17:30:54.267677Z node 8 :TX_DATASHARD DEBUG: Updating sys metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=6, epoch=1} 2025-04-04T17:30:54.268425Z node 8 :TX_DATASHARD DEBUG: Updating tables metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=4, epoch=1} 2025-04-04T17:30:54.268850Z node 8 :TX_DATASHARD DEBUG: Updating snapshots metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=0, epoch=1} 2025-04-04T17:30:54.268949Z node 8 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to repeatable v1500/18446744073709551615 2025-04-04T17:30:54.269042Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit CheckRead 2025-04-04T17:30:54.269150Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-04T17:30:54.269206Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit CheckRead 2025-04-04T17:30:54.269248Z node 8 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-04-04T17:30:54.269289Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit BuildAndWaitDependencies 2025-04-04T17:30:54.269328Z node 8 :TX_DATASHARD TRACE: Activated operation [0:1] at 72075186224037890 2025-04-04T17:30:54.269377Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-04T17:30:54.269402Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-04-04T17:30:54.269424Z node 8 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit ExecuteRead 2025-04-04T17:30:54.269445Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2025-04-04T17:30:54.269609Z node 8 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 } 2025-04-04T17:30:54.269862Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Restart 2025-04-04T17:30:54.269895Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Restart at tablet# 72075186224037890 2025-04-04T17:30:54.269988Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> retry Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T17:30:54.270091Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} touch new 0b, 65b lo load (65b in total), 0b requested for data (4194304b in total) 2025-04-04T17:30:54.270170Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 8388608b of static mem, Memory{8388608 dyn 0} 2025-04-04T17:30:54.270279Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} requests PageCollection [72075186224037888:1:28:1:12288:172:0] 65 bytes, 1 pages: [0 4] 2025-04-04T17:30:54.270385Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} postponed, 65b, pages {1 wait, 1 load}, freshly touched 2 pages 2025-04-04T17:30:54.270631Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} got result TEvResult{1 pages [72075186224037888:1:28:1:12288:172:0] ok OK}, category 1 2025-04-04T17:30:54.270766Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 1 2025-04-04T17:30:54.270823Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2025-04-04T17:30:54.270967Z node 8 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 2, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 } 2025-04-04T17:30:54.271212Z node 8 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[8:1114:2880], 0} after executionsCount# 2 2025-04-04T17:30:54.271294Z node 8 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[8:1114:2880], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-04-04T17:30:54.271416Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-04T17:30:54.271466Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit ExecuteRead 2025-04-04T17:30:54.271512Z node 8 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit CompletedOperations 2025-04-04T17:30:54.271575Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit CompletedOperations 2025-04-04T17:30:54.271622Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-04-04T17:30:54.271646Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit CompletedOperations 2025-04-04T17:30:54.271672Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:1] at 72075186224037890 has finished 2025-04-04T17:30:54.271717Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-04-04T17:30:54.271866Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 2 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T17:30:54.271978Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 8388608b of static, Memory{0 dyn 0} 2025-04-04T17:30:54.272060Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-04-04T17:30:54.272274Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553217, Sender [8:1083:2861], Recipient [8:1083:2861]: NKikimr::TEvDataShard::TEvReadContinue 2025-04-04T17:30:54.272319Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadContinue 2025-04-04T17:30:54.272423Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} queued, type NKikimr::NDataShard::TDataShard::TTxReadContinue 2025-04-04T17:30:54.272497Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-04-04T17:30:54.272585Z node 8 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[8:1114:2880], 0}, firstUnprocessedQuery# 0 2025-04-04T17:30:54.272671Z node 8 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[8:1114:2880], 0}, FirstUnprocessedQuery# 0 2025-04-04T17:30:54.272810Z node 8 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[8:1114:2880], 0} sends rowCount# 0, bytes# 0, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-04-04T17:30:54.272880Z node 8 :TX_DATASHARD DEBUG: 72075186224037890 read iterator# {[8:1114:2880], 0} finished in ReadContinue 2025-04-04T17:30:54.272997Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-04-04T17:30:54.273083Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} release 4194304b of static, Memory{0 dyn 0} 2025-04-04T17:30:54.274049Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553219, Sender [8:1114:2880], Recipient [8:1083:2861]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-04-04T17:30:54.274094Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadCancel 2025-04-04T17:30:54.274162Z node 8 :TX_DATASHARD TRACE: 72075186224037890 ReadCancel: { ReadId: 0 } { items { uint32_value: 3 } items { uint32_value: 33 } } |98.4%| [TM] {RESULT} ydb/core/tx/datashard/ut_followers/unittest >> QuoterWithKesusTest::CanKillKesusWhenUsingIt [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetQueryStatus >> KqpTpch::Query14 [GOOD] >> KqpTpch::Query15 |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_http_api.py::TestHttpApi::test_integral_results [GOOD] >> test_http_api.py::TestHttpApi::test_optional_results >> DataShardStats::BlobsStatsCorrect [GOOD] >> DataShardStats::SharedCacheGarbage >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/quoter/ut/unittest >> QuoterWithKesusTest::CanKillKesusWhenUsingIt [GOOD] Test command err: 2025-04-04T17:28:51.457696Z node 1 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-04T17:28:51.457854Z node 1 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-04T17:28:51.461650Z node 1 :QUOTER_PROXY WARN: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2025-04-04T17:28:51.461709Z node 1 :QUOTER_PROXY INFO: [/Path/KesusName]: Reconnecting to kesus 2025-04-04T17:28:51.486566Z node 2 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-04T17:28:51.486712Z node 2 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-04T17:28:51.487022Z node 2 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-04T17:28:51.487195Z node 2 :QUOTER_PROXY WARN: [/Path/KesusName]: Disconnected from tablet 2025-04-04T17:28:51.487233Z node 2 :QUOTER_PROXY INFO: [/Path/KesusName]: Reconnecting to kesus 2025-04-04T17:28:51.487533Z node 2 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-04T17:28:51.505440Z node 3 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-04T17:28:51.505547Z node 3 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-04T17:28:51.505888Z node 3 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "/resource" 2025-04-04T17:28:51.505922Z node 3 :QUOTER_PROXY WARN: [/Path/KesusName]: Resource "/resource" has incorrect name. Maybe this was some error on client side. 2025-04-04T17:28:51.505962Z node 3 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("/resource", Error: GenericError) 2025-04-04T17:28:51.506103Z node 3 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-04T17:28:51.506194Z node 3 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "resource//resource" 2025-04-04T17:28:51.506258Z node 3 :QUOTER_PROXY WARN: [/Path/KesusName]: Resource "resource//resource" has incorrect name. Maybe this was some error on client side. 2025-04-04T17:28:51.506284Z node 3 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("resource//resource", Error: GenericError) 2025-04-04T17:28:51.513179Z node 4 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-04T17:28:51.513263Z node 4 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-04T17:28:51.513468Z node 4 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-04-04T17:28:51.514619Z node 4 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-04T17:28:51.537434Z node 4 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2025-04-04T17:28:51.537522Z node 4 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res" 2025-04-04T17:28:51.537570Z node 4 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res", 42) 2025-04-04T17:28:51.537667Z node 4 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-04-04T17:28:51.547493Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-04T17:28:51.547619Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-04T17:28:51.547768Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res0" 2025-04-04T17:28:51.547862Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-04T17:28:51.548107Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-04-04T17:28:51.548136Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res0" 2025-04-04T17:28:51.548177Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res0", 42) 2025-04-04T17:28:51.548239Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res0", Normal, {0: Front(1, 2)} }]) 2025-04-04T17:28:51.548342Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res1" 2025-04-04T17:28:51.548445Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Subscribe on resource "res1" 2025-04-04T17:28:51.548637Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 43 Error { Status: SUCCESS } EffectiveProps { ResourceId: 43 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-04-04T17:28:51.548665Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res1" 2025-04-04T17:28:51.548709Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res1", 43) 2025-04-04T17:28:51.548777Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res1", Normal, {0: Front(1, 2)} }]) 2025-04-04T17:28:51.548862Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res2" 2025-04-04T17:28:51.548949Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Subscribe on resource "res2" 2025-04-04T17:28:51.549129Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 44 Error { Status: SUCCESS } EffectiveProps { ResourceId: 44 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-04-04T17:28:51.549157Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res2" 2025-04-04T17:28:51.549185Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res2", 44) 2025-04-04T17:28:51.549221Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res2", Normal, {0: Front(1, 2)} }]) 2025-04-04T17:28:51.549423Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyStats([{"res1", Consumed: 0, Queue: 5}]) 2025-04-04T17:28:51.549452Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: Set info for resource "res1": { Available: 1, QueueWeight: 5 } 2025-04-04T17:28:51.549498Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Activate session to "res1". Connected: 1 2025-04-04T17:28:51.550203Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 43 ConsumeResource: true Amount: inf } ActorID { RawX1: 5 RawX2: 21474838532 } }) 2025-04-04T17:28:51.550286Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res1", Normal, {0: Front(1, 2)} }]) 2025-04-04T17:28:51.550554Z node 5 :QUOTER_PROXY WARN: [/Path/KesusName]: Disconnected from tablet 2025-04-04T17:28:51.550603Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Reconnecting to kesus 2025-04-04T17:28:51.550719Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: Mark "res1" for offline allocation. Connected: 0, SessionIsActive: 1, AverageDuration: 0.100000s, AverageAmount: 0.5 2025-04-04T17:28:51.550765Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: Schedule offline allocation in 0.000000s: [{ "res1", 0.5 }] 2025-04-04T17:28:51.550886Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-04T17:28:51.551156Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } Results { ResourceId: 43 Error { Status: SUCCESS } EffectiveProps { ResourceId: 43 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } Results { ResourceId: 44 Error { Status: SUCCESS } EffectiveProps { ResourceId: 44 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-04-04T17:28:51.551191Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res0" 2025-04-04T17:28:51.551242Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res1" 2025-04-04T17:28:51.551267Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res2" 2025-04-04T17:28:51.551310Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res0", Normal, {0: Front(1, 2)} }, { "res1", Normal, {0: Front(1, 2)} }, { "res2", Normal, {0: Front(1, 2)} }]) 2025-04-04T17:28:51.561180Z node 6 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-04T17:28:51.561289Z node 6 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-04T17:28:51.561636Z node 6 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-04-04T17:28:51.561851Z node 6 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-04T17:28:51.562120Z node 6 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2025-04-04T17:28:51.562149Z node 6 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res" 2025-04-04T17:28:51.562206Z node 6 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res", 42) 2025-04-04T17:28:51.562288Z node 6 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-04-04T17:28:51.570849Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-04T17:28:51.570986Z node 7 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-04T17:28:51.571100Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-04-04T17:28:51.571196Z node 7 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-04T17:28:51.571417Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2025-04-04T17:28:51.571467Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res" 2025-04-04T17:28:51.571505Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res", 42) 2025-04-04T17:28:51.571562Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-04-04T17:28:51.572602Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyStats([{"res", Consumed: 0, Queue: 25}]) 2025-04-04T17:28:51.572672Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: Set info for resource "res": { Available: 20, QueueWeight: 25 } 2025-04-04T17:28:51.572709Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Activate session to "res". Connected: 1 2025-04-04T17:28:51.572802Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 42 ConsumeResource: true Amount: inf } ActorID { RawX1: 5 RawX2: 30064773124 } }) 2025-04-04T17:28:51.572835Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-04-04T17:28:51.572975Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyCloseSession("res", 42) 2025-04-04T17:28:51.572998Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Deactivate session to "res". Connected: 1 2025-04-04T17:28:51.573059Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 42 } ActorID { RawX1: 5 RawX2: 30064773124 } }) 2025-04-04T17:28:51.595098Z node 8 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-04-04T17:28:51.595227Z node 8 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-04-04T17:28:51.595548Z node 8 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-04-04T17:28:51.595693Z node 8 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-04-04T17:28:51.595985Z node 8 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 H ... [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-04T17:30:48.801389Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-04T17:30:48.801438Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-04T17:30:48.881443Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-04T17:30:48.881495Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-04T17:30:48.980000Z 2025-04-04T17:30:48.881520Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-04T17:30:48.881775Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-04-04T17:30:48.881838Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -4.000100577, QueueWeight: 5 } 2025-04-04T17:30:48.881880Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-04T17:30:48.881938Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-04T17:30:48.900724Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7489511778438064718:2302]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-04T17:30:48.901136Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-04T17:30:48.901205Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-04T17:30:48.901257Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-04T17:30:48.901327Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-04T17:30:48.981316Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-04T17:30:48.981407Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-04T17:30:49.080000Z 2025-04-04T17:30:48.981432Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-04T17:30:48.981661Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-04-04T17:30:48.981732Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -3.000100577, QueueWeight: 5 } 2025-04-04T17:30:48.981786Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-04T17:30:48.981937Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-04T17:30:49.000483Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7489511778438064718:2302]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-04T17:30:49.000949Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-04T17:30:49.001013Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-04T17:30:49.001077Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-04T17:30:49.001149Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-04T17:30:49.080647Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-04T17:30:49.080706Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-04T17:30:49.180000Z 2025-04-04T17:30:49.080743Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-04T17:30:49.080896Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-04-04T17:30:49.080940Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -2.000100577, QueueWeight: 5 } 2025-04-04T17:30:49.080989Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-04T17:30:49.081021Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-04T17:30:49.100858Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7489511778438064718:2302]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-04T17:30:49.101336Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-04T17:30:49.101395Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-04T17:30:49.101472Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-04T17:30:49.101525Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-04T17:30:49.181502Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-04T17:30:49.181548Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-04T17:30:49.280000Z 2025-04-04T17:30:49.181572Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-04T17:30:49.181910Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-04-04T17:30:49.181950Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -1.000100577, QueueWeight: 5 } 2025-04-04T17:30:49.181998Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-04T17:30:49.182050Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-04T17:30:49.200742Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7489511778438064718:2302]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-04T17:30:49.201354Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-04T17:30:49.201421Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-04T17:30:49.201468Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-04T17:30:49.201507Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-04T17:30:49.283261Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-04T17:30:49.283313Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-04T17:30:49.380000Z 2025-04-04T17:30:49.283335Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-04T17:30:49.283507Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-04-04T17:30:49.283577Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -0.0001005772413, QueueWeight: 5 } 2025-04-04T17:30:49.283634Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-04T17:30:49.284264Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-04T17:30:49.300547Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7489511778438064718:2302]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-04T17:30:49.300938Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-04T17:30:49.300994Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-04T17:30:49.301052Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Front(0.9998994228, 2)} }]) 2025-04-04T17:30:49.301108Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-04T17:30:49.380658Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0.9998994228. FreeBalance: 0.9998994228 2025-04-04T17:30:49.380718Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-04-04T17:30:49.480000Z 2025-04-04T17:30:49.380737Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-04-04T17:30:49.380808Z node 49 :QUOTER_SERVICE TRACE: Charge "Resource" for 5. Balance: 0.9998994228. FreeBalance: 0.9998994228. TicksToFullfill: 5.000502937. DurationToFullfillInUs: 500050.2937. TimeToFullfill: 2025-04-04T17:30:48.877774Z. Now: 2025-04-04T17:30:49.380588Z. LastAllocated: 2025-04-04T17:30:48.377724Z 2025-04-04T17:30:49.380994Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 5, Queue: 0}]) 2025-04-04T17:30:49.381061Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -4.000100577, QueueWeight: 0 } 2025-04-04T17:30:49.381117Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-04T17:30:49.381193Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-04T17:30:49.401077Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7489511778438064718:2302]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-04T17:30:49.401813Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-04T17:30:49.401954Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-04T17:30:49.402029Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-04T17:30:49.402117Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-04T17:30:49.481530Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-04T17:30:49.502305Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7489511778438064718:2302]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-04-04T17:30:49.502865Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-04-04T17:30:49.502916Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-04-04T17:30:49.502970Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-04-04T17:30:49.503273Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-04-04T17:30:49.503305Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-04-04T17:30:52.409843Z node 49 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[49:7489511774143096570:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:30:52.409958Z node 49 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.4%| [TM] {RESULT} ydb/core/quoter/ut/unittest >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteQuery >> test_http_api.py::TestHttpApi::test_optional_results [GOOD] >> test_http_api.py::TestHttpApi::test_pg_results |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendControlQuery |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetResultData >> CoordinatorVolatile::MediatorReconnectPlanRace [GOOD] >> CoordinatorVolatile::CoordinatorMigrateUncommittedVolatileTx >> test_http_api.py::TestHttpApi::test_pg_results [GOOD] >> test_http_api.py::TestHttpApi::test_set_result >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListJobs >> KeyValueGRPCService::SimpleWriteReadRange [GOOD] >> KeyValueGRPCService::SimpleWriteListRange >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeJob >> KqpTpch::Query15 [GOOD] >> KqpTpch::Query16 >> TopicSessionTests::RestartSessionIfQueryStopped [GOOD] >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] >> test_drain.py::TestHive::test_drain_tablets >> RowDispatcherTests::OneClientOneSession >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnection >> RowDispatcherTests::OneClientOneSession [GOOD] >> RowDispatcherTests::TwoClientOneSession |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> RowDispatcherTests::TwoClientOneSession [GOOD] >> RowDispatcherTests::SessionError >> RowDispatcherTests::SessionError [GOOD] >> test_http_api.py::TestHttpApi::test_set_result [GOOD] >> test_http_api.py::TestHttpApi::test_complex_results >> RowDispatcherTests::CoordinatorSubscribe >> RowDispatcherTests::CoordinatorSubscribe [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnectionWithServiceAccount >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged [GOOD] >> RowDispatcherTests::TwoClients4Sessions >> RowDispatcherTests::TwoClients4Sessions [GOOD] >> RowDispatcherTests::ReinitConsumerIfNewGeneration >> RowDispatcherTests::ReinitConsumerIfNewGeneration [GOOD] >> RowDispatcherTests::HandleTEvUndelivered >> RowDispatcherTests::HandleTEvUndelivered [GOOD] >> TEvaluateExprInViewTest::EvaluateExpr [GOOD] >> TEvaluateExprInViewTest::NakedCallToCurrentTimeFunction >> RowDispatcherTests::TwoClientTwoConnection >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListConnections >> RowDispatcherTests::TwoClientTwoConnection [GOOD] >> RowDispatcherTests::ProcessNoSession >> RowDispatcherTests::ProcessNoSession [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> RowDispatcherTests::IgnoreWrongPartitionId |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeConnection >> RowDispatcherTests::IgnoreWrongPartitionId [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnection >> test_http_api.py::TestHttpApi::test_complex_results [GOOD] >> test_http_api.py::TestHttpApi::test_result_offset_limit >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnectionWithServiceAccount >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteConnection >> CoordinatorVolatile::CoordinatorMigrateUncommittedVolatileTx [GOOD] >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep >> KqpTpch::Query16 [GOOD] >> KqpTpch::Query17 >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] [GOOD] >> test_http_api.py::TestHttpApi::test_result_offset_limit [GOOD] >> test_http_api.py::TestHttpApi::test_openapi_spec >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnection >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] >> TSentinelTests::PDiskRackGuardFullRack [GOOD] >> TSentinelTests::BSControllerUnresponsive |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnectionWithServiceAccount >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateBinding >> KeyValueGRPCService::SimpleWriteListRange [GOOD] >> KeyValueGRPCService::SimpleGetStorageChannelStatus |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/row_dispatcher/ut/unittest >> RowDispatcherTests::IgnoreWrongPartitionId [GOOD] Test command err: 2025-04-04T17:29:00.612154Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [1:30:2057] 2025-04-04T17:29:00.612940Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [1:25:2054] 2025-04-04T17:29:00.613009Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [1:25:2054] 2025-04-04T17:29:00.613165Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:26:2054] 2025-04-04T17:29:00.613205Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:26:2054] 2025-04-04T17:29:00.613244Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [3:27:2054] 2025-04-04T17:29:00.613262Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [3:27:2054] 2025-04-04T17:29:00.613347Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2025-04-04T17:29:00.613460Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2025-04-04T17:29:00.613601Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 0 2025-04-04T17:29:00.613652Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-04-04T17:29:00.623565Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2025-04-04T17:29:00.623703Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-04-04T17:29:00.623831Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:31:2055] 2025-04-04T17:29:00.623868Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Move all Locations from old actor [2:26:2054] to new [2:31:2055] 2025-04-04T17:29:00.623902Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:31:2055] 2025-04-04T17:29:00.623954Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:32:2056] 2025-04-04T17:29:00.623986Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Move all Locations from old actor [2:31:2055] to new [2:32:2056] 2025-04-04T17:29:00.624018Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:32:2056] 2025-04-04T17:29:00.624085Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2025-04-04T17:29:00.624141Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2025-04-04T17:29:00.624266Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2025-04-04T17:29:00.624313Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-04-04T17:29:00.696707Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [5:30:2057] 2025-04-04T17:29:00.697006Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [5:25:2054] 2025-04-04T17:29:00.697051Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [5:25:2054] 2025-04-04T17:29:00.697127Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [6:26:2054] 2025-04-04T17:29:00.697143Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [6:26:2054] 2025-04-04T17:29:00.697173Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [7:27:2054] 2025-04-04T17:29:00.697184Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [7:27:2054] 2025-04-04T17:29:00.697238Z node 5 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [5:28:2055], topic1, partIds: 0, 1, 2 2025-04-04T17:29:00.697337Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [5:28:2055] 2025-04-04T17:29:00.697533Z node 5 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [5:29:2056], topic1, partIds: 3 2025-04-04T17:29:00.697603Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [5:29:2056] 2025-04-04T17:29:00.830528Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:8:2055] Successfully bootstrapped, local coordinator id [9:5:2052] 2025-04-04T17:29:00.830801Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:9:2056] Successfully bootstrapped, local coordinator id [9:6:2053] 2025-04-04T17:29:00.830882Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:10:2057] Successfully bootstrapped, local coordinator id [9:7:2054] 2025-04-04T17:29:00.830957Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-04-04T17:29:00.831006Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.831052Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.836917Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-04-04T17:29:00.836991Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.837042Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.837129Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-04-04T17:29:00.837154Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.837185Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.903096Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-04T17:29:00.904074Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.904173Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.905417Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-04T17:29:00.913110Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-04T17:29:00.913234Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.913258Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.923470Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-04T17:29:00.923633Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.923668Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.934700Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.934777Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.935906Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-04T17:29:00.945435Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-04T17:29:00.945637Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.945678Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.956775Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.956857Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.958495Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-04T17:29:00.958688Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.958747Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.965367Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-04T17:29:00.965508Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.965550Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.967206Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-04-04T17:29:00.967343Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.967378Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.973222Z node 9 :FQ_ROW_DISPATCHER DEBUG: Successfully created coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.973306Z node 9 :FQ_ROW_DISPATCHER DEBUG: Reply for create coordination node "local/row_dispatcher//tenant": 2025-04-04T17:29:00.973410Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:8:2055] Coordination node successfully created 2025-04-04T17:29:00.973447Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:8:2055] Start session 2025-04-04T17:29:00.974484Z node 9 :FQ_ROW_DISPATCHER DEBUG: Successfully created coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.974559Z node 9 :FQ_ROW_DISPATCHER DEBUG: Reply for create coordination node "local/row_dispatcher//tenant": {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exist, request accepts it (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateNoChanges) } 2025-04-04T17:29:00.974603Z node 9 :FQ_ROW_DISPATCHER DEBUG: Successfully created coordination node "local/row_dispatcher//tenant" 2025-04-04T17:29:00.974650Z node 9 :FQ_ROW_DISPATCHER DEBUG: Reply for create coordination node "local/row_dispatcher//tenant": { ... ation node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-04T17:31:03.783939Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [38:18:2059] 2025-04-04T17:31:03.784226Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [38:14:2056], read group connection_id1, topicPath topic part id 0,1 query id QueryId cookie 1 2025-04-04T17:31:03.784465Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-04-04T17:31:03.784710Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 1 2025-04-04T17:31:03.785123Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [38:15:2057], read group connection_id1, topicPath topic part id 0,1 query id QueryId cookie 1 2025-04-04T17:31:03.785554Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:22:2063] to [38:14:2056] query id QueryId 2025-04-04T17:31:03.785648Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:14:2056] part id 0 query id QueryId 2025-04-04T17:31:03.785758Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:22:2063] to [38:14:2056] query id QueryId 2025-04-04T17:31:03.785858Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:23:2064] to [38:14:2056] query id QueryId 2025-04-04T17:31:03.785937Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:14:2056] part id 1 query id QueryId 2025-04-04T17:31:03.786032Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:23:2064] to [38:14:2056] query id QueryId 2025-04-04T17:31:03.786129Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:22:2063] to [38:15:2057] query id QueryId 2025-04-04T17:31:03.786236Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:15:2057] part id 0 query id QueryId 2025-04-04T17:31:03.786359Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:22:2063] to [38:15:2057] query id QueryId 2025-04-04T17:31:03.786451Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:23:2064] to [38:15:2057] query id QueryId 2025-04-04T17:31:03.786538Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:15:2057] part id 1 query id QueryId 2025-04-04T17:31:03.786624Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:23:2064] to [38:15:2057] query id QueryId 2025-04-04T17:31:03.786723Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvUndelivered, from [38:14:2056], reason ActorUnknown 2025-04-04T17:31:03.786785Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [38:14:2056] query id QueryId 2025-04-04T17:31:03.787026Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvUndelivered, from [38:15:2057], reason ActorUnknown 2025-04-04T17:31:03.787097Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [38:15:2057] query id QueryId 2025-04-04T17:31:03.787186Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [38:22:2063] 2025-04-04T17:31:03.787260Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [38:23:2064] 2025-04-04T17:31:03.953897Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [40:17:2058], tenant Tenant 2025-04-04T17:31:03.963356Z node 40 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [40:18:2059] 2025-04-04T17:31:03.963443Z node 40 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [40:19:2060] Successfully bootstrapped, local coordinator id [40:18:2059] 2025-04-04T17:31:03.963547Z node 40 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-04-04T17:31:03.963582Z node 40 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-04T17:31:03.963615Z node 40 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-04T17:31:03.963851Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [40:18:2059] 2025-04-04T17:31:03.964037Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [40:14:2056], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 1 2025-04-04T17:31:03.964269Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-04-04T17:31:03.964738Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [40:15:2057], read group connection_id2, topicPath topic part id 0 query id QueryId cookie 1 2025-04-04T17:31:03.964917Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id2 topic topic part id 0 2025-04-04T17:31:03.965254Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [40:22:2063] to [40:14:2056] query id QueryId 2025-04-04T17:31:03.965349Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [40:14:2056] part id 0 query id QueryId 2025-04-04T17:31:03.965446Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [40:22:2063] to [40:14:2056] query id QueryId 2025-04-04T17:31:03.965527Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [40:23:2064] to [40:15:2057] query id QueryId 2025-04-04T17:31:03.965620Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [40:15:2057] part id 0 query id QueryId 2025-04-04T17:31:03.965706Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [40:23:2064] to [40:15:2057] query id QueryId 2025-04-04T17:31:03.965837Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [40:14:2056] topic topic query id QueryId 2025-04-04T17:31:03.965894Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [40:14:2056] query id QueryId 2025-04-04T17:31:03.965979Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [40:22:2063] 2025-04-04T17:31:03.966172Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [40:15:2057] topic topic query id QueryId 2025-04-04T17:31:03.975972Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [40:15:2057] query id QueryId 2025-04-04T17:31:03.982566Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [40:23:2064] 2025-04-04T17:31:04.287461Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [42:17:2058], tenant Tenant 2025-04-04T17:31:04.348404Z node 42 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [42:18:2059] 2025-04-04T17:31:04.348498Z node 42 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [42:19:2060] Successfully bootstrapped, local coordinator id [42:18:2059] 2025-04-04T17:31:04.348575Z node 42 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-04-04T17:31:04.348609Z node 42 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-04T17:31:04.348640Z node 42 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-04T17:31:04.348904Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [42:18:2059] 2025-04-04T17:31:04.349105Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [43:16:2053], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 42 2025-04-04T17:31:04.351019Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-04-04T17:31:04.361089Z node 42 :FQ_ROW_DISPATCHER ERROR: Create coordination node "YDB_DATABASE/RowDispatcher/Tenant" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): DNS resolution failed for YDB_ENDPOINT: UNKNOWN: Temporary failure in name resolution } {
: Error: Grpc error response on endpoint YDB_ENDPOINT } ] 2025-04-04T17:31:04.361380Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvTryConnect to node id 43 2025-04-04T17:31:04.362499Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: EvNodeConnected, node id 43 2025-04-04T17:31:04.364935Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [42:22:2063] to [43:16:2053] query id QueryId 2025-04-04T17:31:04.365923Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [43:16:2053] part id 0 query id QueryId 2025-04-04T17:31:04.366068Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [42:22:2063] to [43:16:2053] query id QueryId 2025-04-04T17:31:04.366401Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvNoSession from [43:16:2053], generation 41 2025-04-04T17:31:04.366607Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [42:22:2063] to [43:16:2053] query id QueryId 2025-04-04T17:31:04.375317Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [43:16:2053] part id 0 query id QueryId 2025-04-04T17:31:04.375467Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [42:22:2063] to [43:16:2053] query id QueryId 2025-04-04T17:31:04.375784Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvNoSession from [43:16:2053], generation 42 2025-04-04T17:31:04.375857Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [43:16:2053] query id QueryId 2025-04-04T17:31:04.375961Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [42:22:2063] 2025-04-04T17:31:04.551236Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [44:17:2058], tenant Tenant 2025-04-04T17:31:04.632214Z node 44 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [44:18:2059] 2025-04-04T17:31:04.632305Z node 44 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [44:19:2060] Successfully bootstrapped, local coordinator id [44:18:2059] 2025-04-04T17:31:04.632382Z node 44 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-04-04T17:31:04.632414Z node 44 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-04T17:31:04.632442Z node 44 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-04-04T17:31:04.633770Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [44:18:2059] 2025-04-04T17:31:04.634005Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [44:14:2056], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 1 2025-04-04T17:31:04.634228Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-04-04T17:31:04.634682Z node 44 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [44:22:2063] to [44:14:2056] query id QueryId 2025-04-04T17:31:04.634793Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [44:14:2056] topic topic query id QueryId 2025-04-04T17:31:04.634847Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [44:14:2056] query id QueryId 2025-04-04T17:31:04.634952Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [44:22:2063] |98.4%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListBindings >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeBinding |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyBinding >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteBinding |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [FAIL] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateQuery >> KqpTpch::Query17 [GOOD] >> KqpTpch::Query18 |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [FAIL] >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListQueries |98.5%| [TM] {RESULT} ydb/tests/functional/postgresql/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeQuery >> KafkaProtocol::BalanceScenario [GOOD] >> KafkaProtocol::OffsetCommitAndFetchScenario |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetQueryStatus ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/coordinator/ut/unittest >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep [GOOD] Test command err: 2025-04-04T17:29:20.323177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:696:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:20.323931Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:20.324135Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:20.325512Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:693:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:20.325657Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:20.325829Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000841/r3tmp/tmpCnknqn/pdisk_1.dat 2025-04-04T17:29:20.803052Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for the first mediator step 2025-04-04T17:29:21.069091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:21.069668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:21.074297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:21.074401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:21.089046Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:29:21.089609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:21.090026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... found first step to be 500 2025-04-04T17:29:21.338799Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE TEvAcquireReadStep ... acquired read step 500 ... waiting for the next mediator step ... found second step to be 1000 ... read step subscribe result: [500, 1000] ... read step subscribe update: 2000 2025-04-04T17:29:22.446425Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE TEvAcquireReadStep ... acquired read step 2000 ... read step subscribe result: [2000, 2000] ... read step subscribe update: 2500 ... read step subscribe update: 2500 ... read step subscribe update: 3000 ... read step subscribe update: 4000 ... read step subscribe update: 5000 ... read step subscribe update: 6000 ... read step subscribe result: [2000, 6000] 2025-04-04T17:29:25.180358Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeDisconnected, NodeId 2 2025-04-04T17:29:25.180467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnecting 2025-04-04T17:29:25.180613Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2025-04-04T17:29:25.180710Z node 1 :PIPE_SERVER ERROR: [72057594037936131] NodeDisconnected NodeId# 2 2025-04-04T17:29:25.180762Z node 1 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 2 2025-04-04T17:29:25.180795Z node 1 :PIPE_SERVER ERROR: [72057594037968897] NodeDisconnected NodeId# 2 2025-04-04T17:29:25.180853Z node 1 :PIPE_SERVER ERROR: [72057594037936129] NodeDisconnected NodeId# 2 2025-04-04T17:29:25.181039Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:107:2089] ServerId# [1:1071:2629] TabletId# 72057594037932033 PipeClientId# [2:107:2089] 2025-04-04T17:29:25.181299Z node 2 :TX_PROXY WARN: actor# [2:236:2128] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-04-04T17:29:25.181877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnecting -> Disconnected 2025-04-04T17:29:25.189101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:25.213708Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:29:25.215093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... read step subscribe update: 7000 ... read step subscribe update: 8000 ... read step subscribe update: 9000 ... read step subscribe update: 10000 ... read step subscribe update: 11000 2025-04-04T17:29:35.219861Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:693:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:35.220261Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:35.220633Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:35.221491Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:690:2352], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:35.221989Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:35.222041Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000841/r3tmp/tmpo2ZqsT/pdisk_1.dat 2025-04-04T17:29:35.539029Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:35.691859Z node 3 :TX_COORDINATOR DEBUG: Processing TEvSubscribeLastStep from [4:1142:2375] at coordinator 72057594046316545 with seqNo 123 and cookie 234 2025-04-04T17:29:35.774834Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:35.774980Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:35.779606Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:35.779725Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:35.794488Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-04T17:29:35.795194Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:35.795695Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:36.395155Z node 3 :TX_COORDINATOR DEBUG: Processing TEvSubscribeLastStep from [4:1143:2376] at coordinator 72057594046316545 with seqNo 234 and cookie 345 2025-04-04T17:29:37.126675Z node 3 :TX_COORDINATOR DEBUG: Processing TEvSubscribeLastStep from [4:1142:2375] at coordinator 72057594046316545 with seqNo 124 and cookie 245 2025-04-04T17:29:37.138813Z node 3 :TX_COORDINATOR DEBUG: Ignored TEvSubscribeLastStep from [4:1142:2375] at coordinator 72057594046316545 with seqNo 123 existing seqNo 124 2025-04-04T17:29:37.767167Z node 3 :TX_COORDINATOR DEBUG: Processing TEvUnsubscribeLastStep from [4:1142:2375] at coordinator 72057594046316545 with seqNo 124 2025-04-04T17:29:43.806437Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:325:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:43.806560Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:29:43.806754Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000841/r3tmp/tmpqJnu0f/pdisk_1.dat 2025-04-04T17:29:44.091538Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:44.133591Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:44.133709Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:44.145226Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected Rebooting coordinator to restore config 2025-04-04T17:29:44.367714Z node 5 :TX_COORDINATOR INFO: tablet# 72057594046316545 CreateTxInit Complete 2025-04-04T17:29:44.368509Z node 5 :TX_COORDINATOR INFO: Coordinator# 72057594046316545 restoring static processing params 2025-04-04T17:29:44.368892Z node 5 :TX_COORDINATOR NOTICE: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2025-04-04T17:29:44.369023Z node 5 :TX_COORDINATOR INFO: tablet# 72057594046316545 version# 0 TTxConfigure Complete Rebooting coordinator a second time 2025-04-04T17:29:44.658808Z node 5 :TX_COORDINATOR INFO: tablet# 72057594046316545 CreateTxInit Complete 2025-04-04T17:29:44.660096Z node 5 :TX_COORDINATOR NOTICE: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2025-04-04T17:29:48.680177Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:681:2415], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:48.680559Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:48.680691Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000841/r3tmp/tmp9A8Hw6/pdisk_1.dat 2025-04-04T17:29:48.995681Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:49.213963Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:49.214095Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:49.227850Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected Sending CreateDatabase request 2025-04-04T17:29:50.020734Z node 6 :CMS_TENANT ... d ... coordinator 72057594046316545 gen 2 is planning step 1000 2025-04-04T17:31:13.184686Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:569:2495] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 50 To# 1000Steps: {{TCoordinatorStep step# 1000 PrevStep# 50}}} marker# M1 2025-04-04T17:31:13.184812Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:569:2495] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:570:2496] bucket.ActiveActor step# 1000 2025-04-04T17:31:13.184899Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:569:2495] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:571:2497] bucket.ActiveActor step# 1000 2025-04-04T17:31:13.184980Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:570:2496] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1000} 2025-04-04T17:31:13.185082Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1000} ... waiting for blocked put responses 2025-04-04T17:31:13.197157Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000000 HANDLE EvProposeTransaction marker# C0 2025-04-04T17:31:13.197279Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000000 step# 1050 Status# 16 SEND to# [20:594:2518] Proxy marker# C1 ... waiting for blocked put responses ... coordinator 72057594046316545 gen 2 is planning step 1050 2025-04-04T17:31:13.209639Z node 20 :TX_COORDINATOR DEBUG: Transaction 10000000 has been planned 2025-04-04T17:31:13.209759Z node 20 :TX_COORDINATOR DEBUG: Planned transaction 10000000 for mediator 72057594046382081 tablet 72057594047365120 ... blocking put [72057594046316545:2:12:1:24576:168:0] response ... waiting for planning for the required step ... waiting for planning for the required step ... coordinator 72057594046316545 gen 2 is planning step 1100 ... starting a new coordinator instance ... waiting for migrated state 2025-04-04T17:31:13.266694Z node 20 :TX_COORDINATOR INFO: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2025-04-04T17:31:13.267219Z node 20 :TX_COORDINATOR INFO: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2025-04-04T17:31:13.275528Z node 20 :TX_COORDINATOR INFO: tablet# 72057594046316545 CreateTxInit Complete 2025-04-04T17:31:13.276043Z node 20 :TX_COORDINATOR INFO: OnTabletStop: 72057594046316545 reason = ReasonDemoted ... blocking state response from [20:538:2399] to [20:689:2562] LastSentStep: 1000 LastAcquiredStep: 0 LastConfirmedStep: 0 ... unblocking put responses and requests 2025-04-04T17:31:13.276812Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000000 stepId# 1050 Status# 17 SEND EvProposeTransactionStatus to# [20:594:2518] Proxy 2025-04-04T17:31:13.278657Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 server# [20:550:2484] disconnnected 2025-04-04T17:31:13.278771Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:569:2495] MediatorId# 72057594046382081 HANDLE TEvServerDisconnected server# [20:550:2484] ... trying to plan tx 10000011 ... waiting for planned another persistent tx 2025-04-04T17:31:13.302490Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 server# [20:696:2572] connected 2025-04-04T17:31:13.302727Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 HANDLE EvCoordinatorSync 2025-04-04T17:31:13.302805Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 SEND EvCoordinatorSyncResult to# [20:693:2570] Cookie# 1 CompleteStep# 1000 LatestKnownStep# 1000 SubjectiveTime# 952 Coordinator# 72057594046316545 2025-04-04T17:31:13.303004Z node 20 :TX_COORDINATOR NOTICE: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2025-04-04T17:31:13.303093Z node 20 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1050, txid# 10000000 marker# C2 2025-04-04T17:31:13.303253Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000011 HANDLE EvProposeTransaction marker# C0 2025-04-04T17:31:13.303326Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000011 step# 1100 Status# 16 SEND to# [20:594:2518] Proxy marker# C1 ... observed step: Transactions { AffectedSet: 72057594047365120 TxId: 10000000 } Step: 1050 PrevStep: 0 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2025-04-04T17:31:13.310595Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1050 2025-04-04T17:31:13.310698Z node 20 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594046382081], Coordinator [72057594046316545], step# [1050] transactions [1] 2025-04-04T17:31:13.310887Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 SEND EvCommitStep to# [20:569:2495] ExecQueue {TMediateStep From 1000 To# 1050Steps: {{TCoordinatorStep step# 1050 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 10000000 AckTo# [20:693:2570]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000000}}}}} marker# M0 2025-04-04T17:31:13.311101Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:569:2495] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 1000 To# 1050Steps: {{TCoordinatorStep step# 1050 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 10000000 AckTo# [20:693:2570]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000000}}}}} marker# M1 2025-04-04T17:31:13.311197Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:569:2495] MediatorId# 72057594046382081 SEND Ev to# [20:570:2496] step# 1050 forTablet# 72057594047365120 txid# 10000000 marker# M3 2025-04-04T17:31:13.311293Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:569:2495] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:570:2496] bucket.ActiveActor step# 1050 2025-04-04T17:31:13.311376Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:569:2495] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:571:2497] bucket.ActiveActor step# 1050 2025-04-04T17:31:13.311552Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:570:2496] Mediator# 72057594046382081 HANDLE {TEvCommitTabletStep step# 1050 TabletId# 72057594047365120 Transactions {{TTx Moderator# 0 txid# 10000000 AckTo# [20:693:2570]}}} marker# M4 2025-04-04T17:31:13.311752Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1050} 2025-04-04T17:31:13.311922Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:570:2496] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1050} 2025-04-04T17:31:13.312593Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:570:2496] Mediator# 72057594046382081 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365120 Status: OK ServerId: [20:700:2575] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-04T17:31:13.312714Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:570:2496] Mediator# 72057594046382081 SEND to# 72057594047365120 {TEvPlanStep step# 1050 MediatorId# 72057594046382081 TabletID 72057594047365120} ... observed tablet step: Transactions { TxId: 10000000 AckTo { RawX1: 0 RawX2: 0 } } Step: 1050 MediatorID: 72057594046382081 TabletID: 72057594047365120 ... blocked accept from 72057594047365120 ... waiting for planned another persistent tx ... coordinator 72057594046316545 gen 3 is planning step 1100 2025-04-04T17:31:13.325248Z node 20 :TX_COORDINATOR DEBUG: Transaction 10000011 has been planned 2025-04-04T17:31:13.325424Z node 20 :TX_COORDINATOR DEBUG: Planned transaction 10000011 for mediator 72057594046382081 tablet 72057594047365120 2025-04-04T17:31:13.326500Z node 20 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1100, txid# 10000011 marker# C2 2025-04-04T17:31:13.326616Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000011 stepId# 1100 Status# 17 SEND EvProposeTransactionStatus to# [20:594:2518] Proxy ... observed step: Transactions { AffectedSet: 72057594047365120 TxId: 10000011 } Step: 1100 PrevStep: 1050 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2025-04-04T17:31:13.327038Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1100 2025-04-04T17:31:13.327115Z node 20 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594046382081], Coordinator [72057594046316545], step# [1100] transactions [1] 2025-04-04T17:31:13.327323Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 SEND EvCommitStep to# [20:569:2495] ExecQueue {TMediateStep From 1050 To# 1100Steps: {{TCoordinatorStep step# 1100 PrevStep# 1050Transactions: {{TTx Moderator# 0 txid# 10000011 AckTo# [20:693:2570]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000011}}}}} marker# M0 2025-04-04T17:31:13.327531Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:569:2495] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 1050 To# 1100Steps: {{TCoordinatorStep step# 1100 PrevStep# 1050Transactions: {{TTx Moderator# 0 txid# 10000011 AckTo# [20:693:2570]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000011}}}}} marker# M1 2025-04-04T17:31:13.327643Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:569:2495] MediatorId# 72057594046382081 SEND Ev to# [20:570:2496] step# 1100 forTablet# 72057594047365120 txid# 10000011 marker# M3 2025-04-04T17:31:13.327742Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:569:2495] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:570:2496] bucket.ActiveActor step# 1100 2025-04-04T17:31:13.327815Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:569:2495] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:571:2497] bucket.ActiveActor step# 1100 2025-04-04T17:31:13.327964Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:570:2496] Mediator# 72057594046382081 HANDLE {TEvCommitTabletStep step# 1100 TabletId# 72057594047365120 Transactions {{TTx Moderator# 0 txid# 10000011 AckTo# [20:693:2570]}}} marker# M4 2025-04-04T17:31:13.328072Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:570:2496] Mediator# 72057594046382081 SEND to# 72057594047365120 {TEvPlanStep step# 1100 MediatorId# 72057594046382081 TabletID 72057594047365120} 2025-04-04T17:31:13.328168Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1100} 2025-04-04T17:31:13.328364Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:570:2496] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1100} ... observed tablet step: Transactions { TxId: 10000011 AckTo { RawX1: 0 RawX2: 0 } } Step: 1100 MediatorID: 72057594046382081 TabletID: 72057594047365120 ... blocked accept from 72057594047365120 ... coordinator 72057594046316545 gen 3 is planning step 1150 ... observed step: Step: 1150 PrevStep: 1100 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2025-04-04T17:31:13.339961Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:569:2495] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 1100 To# 1150Steps: {{TCoordinatorStep step# 1150 PrevStep# 1100}}} marker# M1 2025-04-04T17:31:13.340037Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:569:2495] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:570:2496] bucket.ActiveActor step# 1150 2025-04-04T17:31:13.340103Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:569:2495] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:571:2497] bucket.ActiveActor step# 1150 2025-04-04T17:31:13.340175Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:570:2496] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1150} 2025-04-04T17:31:13.340227Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:571:2497] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1150} |98.5%| [TM] {RESULT} ydb/core/tx/coordinator/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TEvaluateExprInViewTest::NakedCallToCurrentTimeFunction [GOOD] >> TSelectFromViewTest::OneTable >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyQuery >> KqpTpch::Query18 [GOOD] >> KqpTpch::Query19 |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteQuery >> KeyValueGRPCService::SimpleGetStorageChannelStatus [GOOD] >> KeyValueGRPCService::SimpleCreateAlterDropVolume >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendControlQuery >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetResultData |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/http_api/py3test >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] |98.5%| [TM] {RESULT} ydb/tests/fq/http_api/py3test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListJobs |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/common/py3test >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] [GOOD] |98.5%| [TM] {RESULT} ydb/tests/fq/common/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeJob >> KafkaProtocol::OffsetCommitAndFetchScenario [GOOD] >> KafkaProtocol::CreateTopicsScenarioWithKafkaAuth |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnection >> BasicStatistics::SimpleGlobalIndex [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount >> KqpTpch::Query19 [GOOD] >> KqpTpch::Query20 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::SimpleGlobalIndex [GOOD] Test command err: 2025-04-04T17:28:40.979577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:40.979873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:40.979988Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0006b0/r3tmp/tmpGV0Lbc/pdisk_1.dat 2025-04-04T17:28:41.329566Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4688, node 1 2025-04-04T17:28:41.561710Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:41.561767Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:41.561795Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:41.562228Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:41.564545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:28:41.651493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:41.651622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:41.667821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22024 2025-04-04T17:28:42.220717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:28:45.452695Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:28:45.485803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:45.485914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:45.513642Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:28:45.515308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:45.739036Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.739616Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.740097Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.740224Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.740292Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.740491Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.740560Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.740648Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.740702Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.922796Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:45.922928Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:45.935970Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:46.070066Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:46.116713Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:28:46.116805Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:28:46.150034Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:28:46.150298Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:28:46.150549Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:28:46.150649Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:28:46.150721Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:28:46.150790Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:28:46.150848Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:28:46.150912Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:28:46.152223Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:28:46.183031Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:46.183164Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:46.188658Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2605] 2025-04-04T17:28:46.193488Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2617] 2025-04-04T17:28:46.193702Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1902:2617], schemeshard id = 72075186224037897 2025-04-04T17:28:46.208958Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:28:46.232409Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:28:46.232501Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:28:46.232597Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:28:46.244281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:28:46.251572Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:28:46.251743Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:28:46.474132Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:28:46.638285Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:28:46.720558Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:28:47.773679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:47.773810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:47.792327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:28:48.120194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2445:3114], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:48.120348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:48.121848Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2450:3118]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:48.122099Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:28:48.122180Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2452:3120] 2025-04-04T17:28:48.122272Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2452:3120] 2025-04-04T17:28:48.122875Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2453:2926] 2025-04-04T17:28:48.123233Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2452:3120], server id = [2:2453:2926], tablet id = 72075186224037894, status = OK 2025-04-04T17:28:48.123490Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2453:2926], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:28:48.123554Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-04T17:28:48.123753Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:28:48.123819Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2450:3118], StatRequests.size() = 1 2025-04-04T17:28:48.131584Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2486:3129]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:48.131762Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:28:48.131801Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2486:3129], StatRequests.size() = 1 2025-04-04T17:28:48.132037Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2488:3131]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:48.132198Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:28:48.132229Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:2488:3131], StatRequests.size() = 1 2025-04-04T17:28:48.153074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2493:3136], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:48.153186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:48.153837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2498:3141], DatabaseId ... 0.000000s, at schemeshard: 72057594046644480 2025-04-04T17:31:12.773046Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:6754:4771]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:12.773327Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-04T17:31:12.773373Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:6754:4771], StatRequests.size() = 1 2025-04-04T17:31:13.494457Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-04T17:31:13.494625Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:31:13.494985Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:31:13.538722Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 3, at schemeshard: 72075186224037897 2025-04-04T17:31:13.538819Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 221.000000s, at schemeshard: 72075186224037897 2025-04-04T17:31:13.539214Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 73 2025-04-04T17:31:13.561398Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:31:14.105392Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6787:4787]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:14.105731Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-04T17:31:14.105780Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6787:4787], StatRequests.size() = 1 2025-04-04T17:31:14.769749Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:31:14.769846Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:31:14.769911Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-04T17:31:14.769968Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:31:14.770370Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:31:14.791685Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:31:14.797149Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6810:4806], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:31:14.797296Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6821:4811], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:31:14.797647Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:31:14.818320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-04T17:31:14.915147Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6824:4814], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-04T17:31:15.100575Z node 2 :TX_PROXY ERROR: Actor# [2:6922:4862] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:31:15.133862Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6951:4877]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:15.134175Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-04T17:31:15.134233Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6951:4877], StatRequests.size() = 1 2025-04-04T17:31:15.292303Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWJhZDAxZmUtYzRmZTYzOWMtOGYwMjk1YmQtZDg2NzhmNjE=, TxId: 2025-04-04T17:31:15.292393Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWJhZDAxZmUtYzRmZTYzOWMtOGYwMjk1YmQtZDg2NzhmNjE=, TxId: 2025-04-04T17:31:15.293008Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:31:15.309033Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:31:15.309120Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:31:15.758006Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:6983:4897]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:15.758318Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-04T17:31:15.758364Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:6983:4897], StatRequests.size() = 1 2025-04-04T17:31:16.979764Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:7022:4919]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:16.980060Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-04T17:31:16.980107Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:7022:4919], StatRequests.size() = 1 2025-04-04T17:31:17.638503Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:31:17.649604Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:31:17.649696Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:31:17.649749Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 6] is data table. 2025-04-04T17:31:17.649786Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 6] 2025-04-04T17:31:17.650131Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:31:17.653225Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:31:17.675089Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTdkMDNiMjctNGYwOTg4NGMtMmFlM2ZlNTItOTliMTcwYmY=, TxId: 2025-04-04T17:31:17.675167Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTdkMDNiMjctNGYwOTg4NGMtMmFlM2ZlNTItOTliMTcwYmY=, TxId: 2025-04-04T17:31:17.676009Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:31:17.696435Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 6] 2025-04-04T17:31:17.696508Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:31:18.243046Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7090:4959]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:18.243392Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-04T17:31:18.243437Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7090:4959], StatRequests.size() = 1 2025-04-04T17:31:19.612254Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:7133:4983]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:19.612526Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-04-04T17:31:19.612566Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:7133:4983], StatRequests.size() = 1 2025-04-04T17:31:20.379195Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-04T17:31:20.379724Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:31:20.380256Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:31:20.391784Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:31:20.391857Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:31:20.391902Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-04T17:31:20.391968Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:31:20.392514Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:31:20.397160Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:31:20.417439Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:06.000000Z, event interval end# 2025-04-04T17:31:18.000000Z 2025-04-04T17:31:20.417584Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWI2YjVhZmEtMTYzNjI4YWUtMzI0ODQ4NzYtZWI1YTMyZDc=, TxId: 2025-04-04T17:31:20.417631Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWI2YjVhZmEtMTYzNjI4YWUtMzI0ODQ4NzYtZWI1YTMyZDc=, TxId: 2025-04-04T17:31:20.421071Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:31:20.443868Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:31:20.443934Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:31:21.033144Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:7198:5020]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:21.033412Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-04-04T17:31:21.033456Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:7198:5020], StatRequests.size() = 1 >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListConnections >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeConnection >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnection |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> BasicStatistics::TwoNodes [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteConnection >> test_drain.py::TestHive::test_drain_on_stop >> KeyValueGRPCService::SimpleCreateAlterDropVolume [GOOD] >> KeyValueGRPCService::SimpleListPartitions [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoNodes [GOOD] Test command err: 2025-04-04T17:28:42.686007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:325:2217], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:42.686382Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T17:28:42.686510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0009c2/r3tmp/tmpG92D4A/pdisk_1.dat 2025-04-04T17:28:43.099628Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10137, node 1 2025-04-04T17:28:43.341508Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:43.341566Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:43.341599Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:43.342137Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:43.349575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:28:43.438552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:43.438679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:43.453651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3258 2025-04-04T17:28:43.986964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:28:49.789931Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:28:49.798153Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-04-04T17:28:49.858320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:49.858460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:49.858997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:49.859053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:49.898726Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:28:49.898988Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T17:28:49.902333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:49.902690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:50.146090Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:50.146792Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:50.147354Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:50.147537Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:50.147782Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:50.147900Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:50.148039Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:50.148143Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:50.148240Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:50.316502Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:50.316598Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:50.329806Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:50.342982Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:50.343082Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:50.356622Z node 2 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T17:28:50.359028Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:50.477451Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:50.523321Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:28:50.523422Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:28:50.555382Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:28:50.555642Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:28:50.555812Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:28:50.555862Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:28:50.555911Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:28:50.555956Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:28:50.556003Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:28:50.556064Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:28:50.556436Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:28:50.594637Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:50.594740Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2285:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:50.601226Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2295:2609] 2025-04-04T17:28:50.608767Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2335:2625] 2025-04-04T17:28:50.610217Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2335:2625], schemeshard id = 72075186224037897 2025-04-04T17:28:50.617272Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:28:50.666674Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:28:50.666742Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:28:50.666821Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:28:50.678645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:28:50.691946Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:28:50.692097Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:28:50.780634Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:28:51.074996Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:28:51.166888Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:28:52.017737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2683:3081], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:52.017911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:52.035482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:28:52.323506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2833:3119], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:52.323881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:52.325279Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2838:3123]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:52.325497Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:28:52.325620Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2840:3125] 2025-04-04T17:28:52.325713Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2840:3125] 2025-04-04T17:28:52.326484Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2841:2812] 2025-04-04T17:28:52.326738Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2840:3125], server id = [2:2841:2812], tablet id = 72075186224037894, status = OK 2025-04-04T17:28:52.326974Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2841:2812], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:28:52.327038Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-04T17:28:52.327305Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:28:52.327385Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2838:3123], StatRequests.size() = 1 2025-04-04T17:28:52.349762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2845:3129], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:52.349968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool ... ), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-04T17:31:15.006495Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:31:15.006535Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:31:15.006579Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-04T17:31:15.831137Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:7480:4582]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:15.831438Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-04T17:31:15.831482Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:7480:4582], StatRequests.size() = 1 2025-04-04T17:31:16.642869Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-04T17:31:16.643018Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:31:16.643300Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:31:16.691036Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-04T17:31:16.691138Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 200.000000s, at schemeshard: 72075186224037897 2025-04-04T17:31:16.691464Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 49 2025-04-04T17:31:16.723746Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:31:17.390806Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:7519:4599]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:17.391066Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-04T17:31:17.391106Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:7519:4599], StatRequests.size() = 1 2025-04-04T17:31:18.211113Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:31:18.211205Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:31:18.211261Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-04T17:31:18.211309Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:31:18.211856Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:31:18.249497Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:31:18.263742Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7546:4618], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:31:18.263901Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7556:4623], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:31:18.264095Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:31:18.297393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-04T17:31:18.415229Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7560:4626], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-04T17:31:18.648637Z node 2 :TX_PROXY ERROR: Actor# [2:7660:4675] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:31:18.678845Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:7690:4691]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:18.679225Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-04T17:31:18.679291Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:7690:4691], StatRequests.size() = 1 2025-04-04T17:31:18.827050Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjljYjhlMWMtMTEwY2ZjZmMtNjBkY2QyMDYtZTkxYjcxODE=, TxId: 2025-04-04T17:31:18.827144Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjljYjhlMWMtMTEwY2ZjZmMtNjBkY2QyMDYtZTkxYjcxODE=, TxId: 2025-04-04T17:31:18.828174Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:31:18.848608Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:31:18.848675Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:31:19.362225Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:7726:4702]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:19.362619Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-04T17:31:19.362668Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:7726:4702], StatRequests.size() = 1 2025-04-04T17:31:20.742371Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:7769:4716]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:20.742663Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-04T17:31:20.742697Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:7769:4716], StatRequests.size() = 1 2025-04-04T17:31:21.485630Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:31:21.496927Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:31:21.496987Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:31:21.497026Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-04T17:31:21.497060Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:31:21.497447Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:31:21.501635Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:31:21.523496Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzFlZWJlZjMtNGFjYzY0MWMtMmFjNDEzZGUtNWRiMDY5ODY=, TxId: 2025-04-04T17:31:21.523579Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzFlZWJlZjMtNGFjYzY0MWMtMmFjNDEzZGUtNWRiMDY5ODY=, TxId: 2025-04-04T17:31:21.524270Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:31:21.539916Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:31:21.539975Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:31:22.222568Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7841:4748]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:22.222853Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-04T17:31:22.222911Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7841:4748], StatRequests.size() = 1 2025-04-04T17:31:23.730889Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:7890:4764]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:23.731232Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-04-04T17:31:23.731281Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:7890:4764], StatRequests.size() = 1 2025-04-04T17:31:24.497977Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-04T17:31:24.498431Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:31:24.498754Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:31:24.510092Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:31:24.510158Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:31:25.112507Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:7927:4780]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:25.112786Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-04-04T17:31:25.112829Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:7927:4780], StatRequests.size() = 1 2025-04-04T17:31:25.113524Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [3:7929:3039]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:25.117311Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:31:25.117424Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [3:7939:3043] 2025-04-04T17:31:25.117492Z node 3 :STATISTICS DEBUG: SyncNode(), pipe client id = [3:7939:3043] 2025-04-04T17:31:25.123578Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7947:4782] 2025-04-04T17:31:25.124076Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:7939:3043], server id = [2:7947:4782], tablet id = 72075186224037894, status = OK 2025-04-04T17:31:25.125052Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7947:4782], node id = 3, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:31:25.125122Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 3, schemeshard count = 1 2025-04-04T17:31:25.125482Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2025-04-04T17:31:25.125601Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [3:7929:3039], StatRequests.size() = 1 >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnection >> BasicStatistics::Serverless [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnectionWithServiceAccount |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> KafkaProtocol::CreateTopicsScenarioWithKafkaAuth [GOOD] >> KafkaProtocol::CreateTopicsScenarioWithoutKafkaAuth >> TSentinelTests::BSControllerUnresponsive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/keyvalue/ut/unittest >> KeyValueGRPCService::SimpleListPartitions [GOOD] Test command err: 2025-04-04T17:29:36.961870Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511472070215275:2205];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:36.962106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000970/r3tmp/tmpZXlUPr/pdisk_1.dat 2025-04-04T17:29:37.358178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:37.358959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:37.362970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:37.401562Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26898, node 1 2025-04-04T17:29:37.415158Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-04-04T17:29:37.415638Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-04-04T17:29:37.416557Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-04-04T17:29:37.416842Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-04-04T17:29:37.416875Z node 1 :GRPC_SERVER INFO: Updated app config 2025-04-04T17:29:37.416950Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-04-04T17:29:37.417105Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-04-04T17:29:37.418335Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-04-04T17:29:37.418375Z node 1 :GRPC_SERVER INFO: Updated app config 2025-04-04T17:29:37.419023Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-04-04T17:29:37.427333Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:29:37.427368Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T17:29:37.437294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T17:29:37.438603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T17:29:37.440913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-04-04T17:29:37.442920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-04-04T17:29:37.451987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:29:37.464050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-04-04T17:29:37.464099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-04-04T17:29:37.464156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T17:29:37.464183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-04-04T17:29:37.464388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T17:29:37.464589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-04-04T17:29:37.472625Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:29:37.472703Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:29:37.472717Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-04-04T17:29:37.472738Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-04-04T17:29:37.480078Z node 1 :GRPC_SERVER DEBUG: [0x51a000028280] created request Name# BlobStorageConfig 2025-04-04T17:29:37.481258Z node 1 :GRPC_SERVER DEBUG: [0x51a000028880] created request Name# HiveCreateTablet 2025-04-04T17:29:37.481757Z node 1 :GRPC_SERVER DEBUG: [0x51a000028e80] created request Name# LocalEnumerateTablets 2025-04-04T17:29:37.482904Z node 1 :GRPC_SERVER DEBUG: [0x51b00001b280] created request Name# KeyValue 2025-04-04T17:29:37.483409Z node 1 :GRPC_SERVER DEBUG: [0x51a000029480] created request Name# TabletStateRequest 2025-04-04T17:29:37.484145Z node 1 :GRPC_SERVER DEBUG: [0x51a000029a80] created request Name# LocalMKQL 2025-04-04T17:29:37.484542Z node 1 :GRPC_SERVER DEBUG: [0x51a00002a080] created request Name# LocalSchemeTx 2025-04-04T17:29:37.484852Z node 1 :GRPC_SERVER DEBUG: [0x51a00002a680] created request Name# TabletKillRequest 2025-04-04T17:29:37.485198Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ac80] created request Name# SchemeOperationStatus 2025-04-04T17:29:37.485489Z node 1 :GRPC_SERVER DEBUG: [0x51a00002b280] created request Name# ChooseProxy 2025-04-04T17:29:37.485761Z node 1 :GRPC_SERVER DEBUG: [0x51a00002b880] created request Name# ResolveNode 2025-04-04T17:29:37.486089Z node 1 :GRPC_SERVER DEBUG: [0x51a00002be80] created request Name# FillNode 2025-04-04T17:29:37.486382Z node 1 :GRPC_SERVER DEBUG: [0x51a00002c480] created request Name# DrainNode 2025-04-04T17:29:37.487398Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ca80] created request Name# InterconnectDebug 2025-04-04T17:29:37.487739Z node 1 :GRPC_SERVER DEBUG: [0x51a00002d080] created request Name# TestShardControl 2025-04-04T17:29:37.488793Z node 1 :GRPC_SERVER DEBUG: [0x51a00002d680] created request Name# RegisterNode 2025-04-04T17:29:37.489164Z node 1 :GRPC_SERVER DEBUG: [0x51a00002dc80] created request Name# CmsRequest 2025-04-04T17:29:37.490361Z node 1 :GRPC_SERVER DEBUG: [0x51a00002e280] created request Name# ConsoleRequest 2025-04-04T17:29:37.490713Z node 1 :GRPC_SERVER DEBUG: [0x51a00002e880] created request Name# SchemeInitRoot 2025-04-04T17:29:37.491684Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ee80] created request Name# PersQueueRequest 2025-04-04T17:29:37.492137Z node 1 :GRPC_SERVER DEBUG: [0x51a0000c0080] created request Name# SchemeOperation 2025-04-04T17:29:37.492679Z node 1 :GRPC_SERVER DEBUG: [0x51a0000c0680] created request Name# SchemeDescribe 2025-04-04T17:29:37.554950Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:37.554981Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:37.554989Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:37.555160Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27255 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:29:38.100772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "hdd2-pool" Kind: "hdd2" } StoragePools { Name: "hdd-pool" Kind: "hdd" } StoragePools { Name: "hdd1-pool" Kind: "hdd1" } StoragePools { Name: "ssd-pool" Kind: "ssd" } StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-04-04T17:29:38.100999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:38.103044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T17:29:38.104092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T17:29:38.104152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:38.106358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T17:29:38.107544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T17:29:38.107761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:29:38.108538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T17:29:38.108565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-04-04T17:29:38.108577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-04-04T17:29:38.110099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:29:38.110151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-04-04T17:29:38.110170Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T17:29:38.111928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:38.111981Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T17:29:38.111997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-04-04T17:29:38.113652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:38.113686Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:38.113704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-04-04T17:29:38.113748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-04-04T17:29:38.118651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: ... operation is done id#281474976715662:0 progress is 1/1 2025-04-04T17:31:20.821868Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-04-04T17:31:20.821903Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715662, ready parts: 1/1, is published: false 2025-04-04T17:31:20.821935Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-04-04T17:31:20.821963Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:0 2025-04-04T17:31:20.821981Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:0 2025-04-04T17:31:20.822147Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-04-04T17:31:20.822186Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715662, publications: 2, subscribers: 1 2025-04-04T17:31:20.822207Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715662, [OwnerId: 72057594046644480, LocalPathId: 2], 7 2025-04-04T17:31:20.822222Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715662, [OwnerId: 72057594046644480, LocalPathId: 3], 18446744073709551615 2025-04-04T17:31:20.822750Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-04-04T17:31:20.822855Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-04-04T17:31:20.822875Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715662 2025-04-04T17:31:20.822905Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715662, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 18446744073709551615 2025-04-04T17:31:20.822936Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-04-04T17:31:20.823293Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-04-04T17:31:20.823374Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-04-04T17:31:20.823388Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715662 2025-04-04T17:31:20.823407Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715662, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2025-04-04T17:31:20.823434Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-04-04T17:31:20.823502Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715662, subscribers: 1 2025-04-04T17:31:20.823533Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [33:7489511916721337653:2345] 2025-04-04T17:31:20.831023Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:31:20.831073Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:31:20.831091Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-04-04T17:31:20.831271Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715662 2025-04-04T17:31:20.831333Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715662 2025-04-04T17:31:20.836101Z node 33 :GRPC_SERVER DEBUG: Got grpc request# ListDirectoryRequest, traceId# 01jr0xpwc3e9b4vj94w97hq6nc, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:39044, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-04-04T17:31:20.838261Z node 33 :KEYVALUE DEBUG: KeyValue# 72075186224037889 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2025-04-04T17:31:20.838431Z node 33 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 33, TabletId: 72075186224037889 not found 2025-04-04T17:31:20.838488Z node 33 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 33, TabletId: 72075186224037890 not found 2025-04-04T17:31:20.838492Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-04-04T17:31:20.838513Z node 33 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 33, TabletId: 72075186224037888 not found 2025-04-04T17:31:20.838938Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-04-04T17:31:20.839266Z node 33 :KEYVALUE DEBUG: KeyValue# 72075186224037890 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2025-04-04T17:31:20.839730Z node 33 :KEYVALUE DEBUG: KeyValue# 72075186224037888 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2025-04-04T17:31:20.841298Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-04-04T17:31:20.841600Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-04-04T17:31:20.841777Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-04-04T17:31:20.841938Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-04-04T17:31:20.842099Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-04-04T17:31:20.842189Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-04-04T17:31:20.842294Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-04-04T17:31:20.849859Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-04-04T17:31:20.849928Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-04-04T17:31:20.850047Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-04-04T17:31:20.850065Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-04-04T17:31:20.850095Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-04-04T17:31:20.850132Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-04-04T17:31:20.850201Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-04-04T17:31:20.856080Z node 33 :GRPC_SERVER DEBUG: [0x51a0000b7080] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-04-04T17:31:20.856099Z node 33 :GRPC_SERVER DEBUG: [0x51a000012080] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-04-04T17:31:20.856477Z node 33 :GRPC_SERVER DEBUG: [0x51a000026a80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-04-04T17:31:20.856503Z node 33 :GRPC_SERVER DEBUG: [0x51a0000d9e80] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-04-04T17:31:20.856793Z node 33 :GRPC_SERVER DEBUG: [0x51a0000d4480] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-04-04T17:31:20.856828Z node 33 :GRPC_SERVER DEBUG: [0x51a0000f4e80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-04-04T17:31:20.857099Z node 33 :GRPC_SERVER DEBUG: [0x51a000003c80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-04-04T17:31:20.857155Z node 33 :GRPC_SERVER DEBUG: [0x51a00000f680] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-04-04T17:31:20.857399Z node 33 :GRPC_SERVER DEBUG: [0x51a00010f880] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-04-04T17:31:20.857441Z node 33 :GRPC_SERVER DEBUG: [0x51a000115e80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-04-04T17:31:20.857707Z node 33 :GRPC_SERVER DEBUG: [0x51a00009b480] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-04-04T17:31:20.857714Z node 33 :GRPC_SERVER DEBUG: [0x51a0000efa80] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2025-04-04T17:31:20.858009Z node 33 :GRPC_SERVER DEBUG: [0x51b0000b6780] received request Name# KeyValue ok# false data# peer# current inflight# 0 2025-04-04T17:31:20.858042Z node 33 :GRPC_SERVER DEBUG: [0x51a000088280] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-04-04T17:31:20.858256Z node 33 :GRPC_SERVER DEBUG: [0x51a000117680] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-04-04T17:31:20.858337Z node 33 :GRPC_SERVER DEBUG: [0x51a0000f5480] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-04-04T17:31:20.858479Z node 33 :GRPC_SERVER DEBUG: [0x51a0000a6e80] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-04-04T17:31:20.858595Z node 33 :GRPC_SERVER DEBUG: [0x51a000013280] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2025-04-04T17:31:20.858719Z node 33 :GRPC_SERVER DEBUG: [0x51a000101a80] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2025-04-04T17:31:20.858879Z node 33 :GRPC_SERVER DEBUG: [0x51a0000a5080] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2025-04-04T17:31:20.858929Z node 33 :GRPC_SERVER DEBUG: [0x51a000030680] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-04-04T17:31:20.859157Z node 33 :GRPC_SERVER DEBUG: [0x51a000006680] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 |98.6%| [TM] {RESULT} ydb/services/keyvalue/ut/unittest >> KqpTpch::Query20 [GOOD] >> KqpTpch::Query21 >> TSelectFromViewTest::OneTable [GOOD] >> TSelectFromViewTest::OneTableUsingRelativeName >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateBinding ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Serverless [GOOD] Test command err: 2025-04-04T17:28:41.858319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:41.858602Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:41.858721Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0009e6/r3tmp/tmpOCXBUs/pdisk_1.dat 2025-04-04T17:28:42.247483Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14688, node 1 2025-04-04T17:28:42.511291Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:42.511338Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:42.511362Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:42.511710Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:42.513689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:28:42.598097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:42.598268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:42.613058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26962 2025-04-04T17:28:43.163274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:28:46.152910Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:28:46.182757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:46.182888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:46.212018Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:28:46.214146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:46.446500Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:46.447064Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:46.447584Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:46.447751Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:46.447836Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:46.448063Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:46.448139Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:46.448235Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:46.448315Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:46.629183Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:46.629326Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:46.642810Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:46.798436Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:46.854116Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:28:46.854352Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:28:46.892106Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:28:46.892893Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:28:46.893109Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:28:46.893161Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:28:46.893218Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:28:46.893300Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:28:46.893432Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:28:46.893514Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:28:46.894434Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:28:46.925813Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:46.925935Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:46.932636Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:28:46.938429Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:28:46.939346Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:28:46.944225Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-04T17:28:46.965221Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:28:46.965284Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:28:46.965368Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-04T17:28:46.980592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:28:46.987978Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:28:46.988135Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:28:47.151528Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:28:47.290704Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:28:47.382273Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:28:48.236137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:28:49.005248Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:49.137117Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-04T17:28:49.137224Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:28:49.137332Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2583:2944], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:28:49.137917Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2584:2945] 2025-04-04T17:28:49.138586Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2584:2945], schemeshard id = 72075186224037899 2025-04-04T17:28:50.428566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2712:3236], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:50.428730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:50.446638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-04-04T17:28:50.605173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2863:3271], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:50.605338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:50.617780Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2868:3275]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:50.617949Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:28:50.618147Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-04-04T17:28:50.618231Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2871:3278] 2025-04-04T17:28:50.618333Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2871:3278] 2025-04-04T17:28:50.618900Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2872:3076] 2025-04-04T17:28:50.619205Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2871:3278], server id = [2:2872:3076], tablet id = 72075186224037894, status = OK 2025-04-04T17:28:50.619449Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2872:3076], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:28:50.619507Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-04T17:28:50.619728Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:28:50.619815Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2868:3275], StatRequests.size() = 1 2025-04-04T17:28:50.642640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2876:3282], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't ... plyToActorId = [2:7528:5418], StatRequests.size() = 1 2025-04-04T17:31:17.948546Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-04T17:31:17.948660Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:31:17.948704Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:31:17.948749Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-04T17:31:18.276944Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 117 ], ReplyToActorId[ [2:7572:5438]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:18.277317Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 117 ] 2025-04-04T17:31:18.277379Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [2:7572:5438], StatRequests.size() = 1 2025-04-04T17:31:19.742320Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-04T17:31:19.742519Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:31:19.742877Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:31:19.798518Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037897 2025-04-04T17:31:19.798641Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 221.000000s, at schemeshard: 72075186224037897 2025-04-04T17:31:19.799087Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 25 2025-04-04T17:31:19.814630Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:31:19.864668Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:7615:5462]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:19.865020Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-04-04T17:31:19.865077Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:7615:5462], StatRequests.size() = 1 2025-04-04T17:31:21.258604Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:31:21.258699Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:31:21.258757Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-04-04T17:31:21.258811Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:31:21.259297Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-04T17:31:21.297122Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:31:21.303959Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7653:5489], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:31:21.304122Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7662:5494], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:31:21.304415Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:31:21.337842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-04-04T17:31:21.446693Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7667:5497], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-04-04T17:31:21.550181Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:7762:5545]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:21.550525Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-04T17:31:21.550572Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:7762:5545], StatRequests.size() = 1 2025-04-04T17:31:21.647749Z node 2 :TX_PROXY ERROR: Actor# [2:7767:5547] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Shared/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:31:21.685506Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:7796:5562]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:21.685843Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-04T17:31:21.686143Z node 2 :STATISTICS DEBUG: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-04-04T17:31:21.686214Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-04T17:31:21.686431Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:31:21.686503Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:7796:5562], StatRequests.size() = 1 2025-04-04T17:31:21.850775Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NGJiZmY1NjMtOWY3ZjA0M2EtYzYyYTgxNGYtMTk4MzQ5ZjQ=, TxId: 2025-04-04T17:31:21.850853Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGJiZmY1NjMtOWY3ZjA0M2EtYzYyYTgxNGYtMTk4MzQ5ZjQ=, TxId: 2025-04-04T17:31:21.851628Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:31:21.869644Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:31:21.869733Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:31:21.935132Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:31:21.935197Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:31:22.002955Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:3046:3111], schemeshard count = 1 2025-04-04T17:31:22.310920Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-04T17:31:22.310989Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 188.000000s, at schemeshard: 72075186224037899 2025-04-04T17:31:22.311238Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2025-04-04T17:31:22.331947Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:31:23.376948Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:7864:5606]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:23.377207Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-04T17:31:23.377248Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:7864:5606], StatRequests.size() = 1 2025-04-04T17:31:24.684886Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:31:24.698946Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:31:24.699009Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:31:24.699047Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-04-04T17:31:24.699080Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-04T17:31:24.699347Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-04T17:31:24.701861Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:31:24.716995Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZWU1MjlmNzItZDU3OGY0OWUtODQ5YWQxOTEtZGQ0ZjE0MmE=, TxId: 2025-04-04T17:31:24.717041Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZWU1MjlmNzItZDU3OGY0OWUtODQ5YWQxOTEtZGQ0ZjE0MmE=, TxId: 2025-04-04T17:31:24.717500Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:31:24.732904Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-04T17:31:24.732959Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:31:24.783375Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:7935:5650]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:24.783650Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-04T17:31:24.783692Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:7935:5650], StatRequests.size() = 1 2025-04-04T17:31:26.279420Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:7989:5682]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:26.279754Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-04T17:31:26.279795Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:7989:5682], StatRequests.size() = 1 2025-04-04T17:31:27.591480Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-04-04T17:31:27.591696Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:31:27.592101Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:31:27.603411Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:31:27.603475Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:31:27.689984Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:8027:5703]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:31:27.690272Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-04T17:31:27.690316Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:8027:5703], StatRequests.size() = 1 >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListBindings |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::BSControllerUnresponsive [GOOD] Test command err: 2025-04-04T17:30:00.599895Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-04-04T17:30:00.599995Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-04-04T17:30:00.600094Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-04T17:30:00.600128Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-04-04T17:30:00.600187Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-04-04T17:30:00.600303Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-04-04T17:30:00.612304Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-04-04T17:30:00.618326Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 18 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: ... h: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 266 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 267 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860026 2025-04-04T17:31:28.209575Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 67, response# PDiskStateInfo { PDiskId: 268 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 269 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 270 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 271 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860026 2025-04-04T17:31:28.209706Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 68, response# PDiskStateInfo { PDiskId: 272 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 273 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 274 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 275 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860026 2025-04-04T17:31:28.209831Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 69, response# PDiskStateInfo { PDiskId: 276 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 277 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 278 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 279 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860026 2025-04-04T17:31:28.209888Z node 65 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-04T17:31:28.210293Z node 65 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 68:272, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-04T17:31:28.210405Z node 65 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 72:289, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-04T17:31:28.210456Z node 65 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 67:270, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-04T17:31:28.210493Z node 65 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-04T17:31:28.221802Z node 65 :CMS DEBUG: [Sentinel] [Main] Retrying: attempt# 1 2025-04-04T17:31:28.221878Z node 65 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-04T17:31:28.232336Z node 65 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-04T17:31:28.232407Z node 65 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-04T17:31:28.232517Z node 65 :CMS DEBUG: [Sentinel] [Main] Retrying: attempt# 2 2025-04-04T17:31:28.232544Z node 65 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-04T17:31:28.232686Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 65, wbId# [65:8388350642965737326:1634689637] 2025-04-04T17:31:28.232729Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 66, wbId# [66:8388350642965737326:1634689637] 2025-04-04T17:31:28.232762Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 67, wbId# [67:8388350642965737326:1634689637] 2025-04-04T17:31:28.232795Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 68, wbId# [68:8388350642965737326:1634689637] 2025-04-04T17:31:28.232843Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 69, wbId# [69:8388350642965737326:1634689637] 2025-04-04T17:31:28.232882Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 70, wbId# [70:8388350642965737326:1634689637] 2025-04-04T17:31:28.232912Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 71, wbId# [71:8388350642965737326:1634689637] 2025-04-04T17:31:28.232941Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 72, wbId# [72:8388350642965737326:1634689637] 2025-04-04T17:31:28.233401Z node 65 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { }, cookie# 123 2025-04-04T17:31:28.233442Z node 65 :CMS ERROR: [Sentinel] [Main] Unsuccesful response from BSC: error# 2025-04-04T17:31:28.233813Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 65, response# PDiskStateInfo { PDiskId: 260 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 261 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 262 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 263 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880026 2025-04-04T17:31:28.234320Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 70, response# PDiskStateInfo { PDiskId: 280 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 281 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 282 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 283 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880026 2025-04-04T17:31:28.234651Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 71, response# PDiskStateInfo { PDiskId: 284 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 285 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 286 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 287 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880026 2025-04-04T17:31:28.234800Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 72, response# PDiskStateInfo { PDiskId: 288 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 289 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 290 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 291 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880026 2025-04-04T17:31:28.234927Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 66, response# PDiskStateInfo { PDiskId: 264 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 265 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 266 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 267 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880026 2025-04-04T17:31:28.235064Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 67, response# PDiskStateInfo { PDiskId: 268 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 269 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 270 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 271 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880026 2025-04-04T17:31:28.235194Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 68, response# PDiskStateInfo { PDiskId: 272 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 273 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 274 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 275 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880026 2025-04-04T17:31:28.235323Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 69, response# PDiskStateInfo { PDiskId: 276 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 277 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 278 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 279 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880026 2025-04-04T17:31:28.235510Z node 65 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s >> data_correctness.py::TestDataCorrectness::test [FAIL] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeBinding |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation >> TDataShardRSTest::TestCleanupInRS+UseSink [GOOD] >> TDataShardRSTest::TestCleanupInRS-UseSink >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyBinding |98.6%| [TM] {RESULT} ydb/core/cms/ut_sentinel/unittest |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteBinding |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyShouldPassHids::ShouldCheckScenario >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] >> KafkaProtocol::CreateTopicsScenarioWithoutKafkaAuth [GOOD] >> KafkaProtocol::CreatePartitionsScenario |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> DataShardStats::SharedCacheGarbage [GOOD] >> DataShardStats::CollectStatsForSeveralParts |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] >> TSelectFromViewTest::OneTableUsingRelativeName [GOOD] >> TSelectFromViewTest::DisabledFeatureFlag >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] >> KafkaProtocol::CreatePartitionsScenario [GOOD] >> KafkaProtocol::AlterConfigsScenario >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> KqpTpch::Query21 [GOOD] >> KqpTpch::Query22 |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> DataShardStats::CollectStatsForSeveralParts [GOOD] >> DataShardStats::NoData >> ttl_unavailable_s3.py::TestUnavailableS3::test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/std/py3test >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] |98.7%| [TM] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] >> KafkaProtocol::AlterConfigsScenario [GOOD] >> KafkaProtocol::LoginWithApiKey |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> TSelectFromViewTest::DisabledFeatureFlag [GOOD] >> TSelectFromViewTest::ReadTestCasesFromFiles |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] >> KqpTpch::Query22 [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] >> KafkaProtocol::LoginWithApiKey [GOOD] >> KafkaProtocol::LoginWithApiKeyWithoutAt ------- [TM] {asan, default-linux-x86_64, pic, release} ydb/core/kqp/tests/kikimr_tpch/unittest >> KqpTpch::Query22 [GOOD] Test command err: -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 5 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 20 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 28 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 37 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 5 |98.8%| [TM] {RESULT} ydb/core/kqp/tests/kikimr_tpch/unittest >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_dynumber >> test_kv.py::TestYdbKvWorkload::test_dynumber [GOOD] >> TDqPqRdReadActorTests::Backpressure [GOOD] >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session >> DataShardStats::NoData [GOOD] >> DataShardStats::Follower >> ColumnStatistics::CountMinSketchServerlessStatistics [GOOD] >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 [GOOD] >> KafkaProtocol::LoginWithApiKeyWithoutAt [GOOD] >> KafkaProtocol::MetadataScenario >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchServerlessStatistics [GOOD] Test command err: 2025-04-04T17:28:40.267381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:603:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:40.267834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:40.267985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000a21/r3tmp/tmp5GQadw/pdisk_1.dat 2025-04-04T17:28:40.669706Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20923, node 1 2025-04-04T17:28:40.941490Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:40.941549Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:40.941591Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:40.942070Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:40.944264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:28:41.029032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:41.029194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:41.045106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28128 2025-04-04T17:28:41.579519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:28:44.849562Z node 4 :STATISTICS INFO: Subscribed for config changes on node 4 2025-04-04T17:28:44.883162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:44.883257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:44.912295Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-04-04T17:28:44.914814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:45.145260Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.145813Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.146589Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.146749Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.146969Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.147044Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.147134Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.147221Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.147287Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.307030Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:45.307152Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:45.321641Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:45.493798Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:45.552339Z node 4 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:28:45.552454Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:28:45.581404Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:28:45.582607Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:28:45.582801Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:28:45.582870Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:28:45.582928Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:28:45.582980Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:28:45.583036Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:28:45.583097Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:28:45.583599Z node 4 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:28:45.609448Z node 4 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:45.609560Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:2029:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:45.615993Z node 4 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [4:2042:2607] 2025-04-04T17:28:45.620000Z node 4 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [4:2061:2618] 2025-04-04T17:28:45.620489Z node 4 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [4:2061:2618], schemeshard id = 72075186224037897 2025-04-04T17:28:45.626757Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-04-04T17:28:45.695407Z node 4 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:28:45.695491Z node 4 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:28:45.695568Z node 4 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-04-04T17:28:45.716203Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:28:45.722713Z node 4 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:28:45.722839Z node 4 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:28:45.906237Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:28:46.072268Z node 4 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:28:46.170604Z node 4 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:28:46.987197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:28:50.655092Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-04-04T17:28:50.736495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:50.736618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:50.737008Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:50.737078Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:50.783370Z node 4 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T17:28:50.786977Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:50.882919Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:50.949108Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T17:28:50.949735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:51.082727Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-04-04T17:28:51.082798Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:28:51.082918Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:3089:2949], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-04-04T17:28:51.084220Z node 4 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [4:3090:2950] 2025-04-04T17:28:51.084802Z node 4 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [4:3090:2950], schemeshard id = 72075186224037899 2025-04-04T17:28:52.201579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-04-04T17:28:55.437472Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:28:55.488384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:55.488480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:55.488956Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:55.489010Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:55.525778Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:28:55.527956Z node 4 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:28:55.528688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:55.530840Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:55.625126Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:55.906142Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2025-04-04T17:28:55.906210Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037905 2025-04-04T17:28:55.906330Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:3924:3154], at schemeshard: 72075186224037905, StatisticsAggregatorId: 720751862 ... DE5NWQyNzItMmYzNGM1My02MDIzYWZmLWE3MTBjNGRi, TxId: 2025-04-04T17:32:03.219666Z node 4 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=NDE5NWQyNzItMmYzNGM1My02MDIzYWZmLWE3MTBjNGRi, TxId: 2025-04-04T17:32:03.220442Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:32:03.239041Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-04-04T17:32:03.239129Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:32:03.286980Z node 4 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:32:03.287094Z node 4 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:32:03.356542Z node 4 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [4:11606:7151], schemeshard count = 1 2025-04-04T17:32:05.082672Z node 4 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-04T17:32:05.082747Z node 4 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 200.000000s, at schemeshard: 72075186224037899 2025-04-04T17:32:05.083303Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2025-04-04T17:32:05.104243Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:32:06.241942Z node 4 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:32:06.242000Z node 4 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:32:06.242038Z node 4 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-04-04T17:32:06.242081Z node 4 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-04T17:32:06.249402Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:32:06.272558Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:32:06.273222Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:32:06.273298Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:32:06.280602Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:32:06.300004Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:32:06.300280Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 4, Round: 2, current Round: 0 2025-04-04T17:32:06.301285Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:11773:7249], server id = [4:11774:7250], tablet id = 72075186224037911, status = OK 2025-04-04T17:32:06.301889Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:11773:7249], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-04-04T17:32:06.320486Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2025-04-04T17:32:06.320626Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 4 2025-04-04T17:32:06.321006Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:32:06.321261Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:32:06.321628Z node 4 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-04T17:32:06.324276Z node 4 :STATISTICS DEBUG: EvClientDestroyed, node id = 4, client id = [4:11773:7249], server id = [4:11774:7250], tablet id = 72075186224037911 2025-04-04T17:32:06.324326Z node 4 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:32:06.325026Z node 4 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:32:06.350634Z node 4 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [4:11794:7269]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:32:06.350910Z node 4 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:32:06.350965Z node 4 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [4:11794:7269], StatRequests.size() = 1 2025-04-04T17:32:06.571813Z node 4 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=OWE5MDU2MjItNWQ1ZTkwMjUtOWFjOTBlNTItMTYxNWY2ODg=, TxId: 2025-04-04T17:32:06.571901Z node 4 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=OWE5MDU2MjItNWQ1ZTkwMjUtOWFjOTBlNTItMTYxNWY2ODg=, TxId: 2025-04-04T17:32:06.572660Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:32:06.591340Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-04T17:32:06.591407Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:32:07.204008Z node 4 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-04-04T17:32:07.204126Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-04-04T17:32:07.694405Z node 4 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037905 2025-04-04T17:32:07.694481Z node 4 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 237.000000s, at schemeshard: 72075186224037905 2025-04-04T17:32:07.694716Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037905, stats size# 28 2025-04-04T17:32:07.712471Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:32:09.659093Z node 4 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:32:09.659169Z node 4 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:32:09.659209Z node 4 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037905, LocalPathId: 2] is column table. 2025-04-04T17:32:09.659244Z node 4 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037905, LocalPathId: 2] 2025-04-04T17:32:09.664928Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:32:09.684441Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:32:09.685097Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:32:09.685167Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:32:09.685977Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:32:09.720109Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:32:09.720429Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 4, Round: 3, current Round: 0 2025-04-04T17:32:09.721386Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:11935:7341], server id = [4:11936:7342], tablet id = 72075186224037912, status = OK 2025-04-04T17:32:09.721484Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:11935:7341], path = { OwnerId: 72075186224037905 LocalId: 2 } 2025-04-04T17:32:09.725943Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2025-04-04T17:32:09.726059Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 4 2025-04-04T17:32:09.732292Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:32:09.732574Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:32:09.733304Z node 4 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-04-04T17:32:09.735480Z node 4 :STATISTICS DEBUG: EvClientDestroyed, node id = 4, client id = [4:11935:7341], server id = [4:11936:7342], tablet id = 72075186224037912 2025-04-04T17:32:09.735520Z node 4 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:32:09.736459Z node 4 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:32:09.778147Z node 4 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=Yjk1ZGFlYjktYWIzNzU2MTQtOTVjNzkwYmUtOWE1ZmUwZDE=, TxId: 2025-04-04T17:32:09.778213Z node 4 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=Yjk1ZGFlYjktYWIzNzU2MTQtOTVjNzkwYmUtOWE1ZmUwZDE=, TxId: 2025-04-04T17:32:09.779467Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:32:09.780569Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:11954:6035]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:32:09.781066Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:32:09.781137Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:32:09.784935Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:32:09.785012Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-04T17:32:09.785072Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T17:32:09.802325Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-04-04T17:32:09.803434Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:11954:6035]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:32:09.803811Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:32:09.803882Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:32:09.804158Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:32:09.804221Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-04-04T17:32:09.804279Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-04-04T17:32:09.808479Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted [GOOD] >> TDqPqRdReadActorTests::IgnoreMessageIfNoSessions [GOOD] >> TDqPqRdReadActorTests::MetadataFields [GOOD] >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState >> test_tpch.py::TestTpchS1::test_tpch[1] >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] >> BasicStatistics::TwoDatabases [GOOD] >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState [GOOD] >> TDqPqReadActorTest::TestReadFromTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoDatabases [GOOD] Test command err: 2025-04-04T17:28:27.336755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:524:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:27.337071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:27.337291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000a34/r3tmp/tmp7cRONQ/pdisk_1.dat 2025-04-04T17:28:27.713974Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26066, node 1 2025-04-04T17:28:27.945356Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:27.945401Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:27.945432Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:27.945837Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:27.947784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:28:28.040939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:28.041078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:28.055338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11101 2025-04-04T17:28:28.582724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:28:31.708948Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-04-04T17:28:31.737102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:31.737219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:31.778287Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T17:28:31.780246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:32.023084Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:32.023595Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:32.024128Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:32.024250Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:32.024430Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:32.024498Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:32.024591Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:32.024680Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:32.024727Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:32.195546Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:32.195662Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:32.208770Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:32.359816Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:32.405769Z node 3 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:28:32.405886Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:28:32.443116Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:28:32.444635Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:28:32.444873Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:28:32.444944Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:28:32.445057Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:28:32.445128Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:28:32.445179Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:28:32.445234Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:28:32.446686Z node 3 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:28:32.492660Z node 3 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:32.492772Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [3:1945:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:32.499932Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:1958:2607] 2025-04-04T17:28:32.504319Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:1975:2618] 2025-04-04T17:28:32.504560Z node 3 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:1975:2618], schemeshard id = 72075186224037897 2025-04-04T17:28:32.512282Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database1 2025-04-04T17:28:32.535235Z node 3 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:28:32.535299Z node 3 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:28:32.535388Z node 3 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database1/.metadata/_statistics 2025-04-04T17:28:32.551795Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:28:32.560559Z node 3 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:28:32.560781Z node 3 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:28:32.820861Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:28:32.988946Z node 3 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:28:33.102343Z node 3 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:28:33.964397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:28:37.660424Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:28:37.701377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:37.701503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:37.743399Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:28:37.745516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:37.948647Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:37.949277Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:37.949912Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:37.950091Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:37.950261Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:37.950512Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:37.950603Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:37.950711Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:37.950828Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:38.120161Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:38.120278Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:38.137543Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:38.280071Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:38.315738Z node 2 :STATISTICS INFO: [72075186224038895] OnActivateExecutor 2025-04-04T17:28:38.315834Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInitSchema::Execute 2025-04-04T17:28:38.354877Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInitSchema::Complete 2025-04-04T17:28:38.356357Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInit::Execute 2025-04-04T17:28:38.356553Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:28:38.356630Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ColumnStatistics: column count# 0 2025-04-04T17:28:38.356688Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:28:38.356761Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:28:38.356811Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:28:38.356860Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInit::Complete 2025-04-04T17:28:38.358105Z node 2 :STATISTICS INFO: [72075186224038895] Subscribed for config changes 2025-04-04T17:28:38.392225Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224038895, at schemeshard: 72075186224038898 2025-04-04T17:28:38.392353Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3218:2598], at schemeshard: 72075186224038898, StatisticsAggregatorId: 72075186224038895, at schemeshard: 72075186224038898 2025-04-04T17:28:38.400295Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:3231:2609] 2025-04-04T17:28:38.405032Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:3252:261 ... eStatsToSA in 202.000000s, at schemeshard: 72075186224038898 2025-04-04T17:32:07.380826Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id# 72075186224038898, stats size# 49 2025-04-04T17:32:07.397945Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxSchemeShardStats::Complete 2025-04-04T17:32:08.731676Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [3:10325:4786]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:32:08.732075Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-04T17:32:08.732126Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [3:10325:4786], StatRequests.size() = 1 2025-04-04T17:32:09.465080Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal 2025-04-04T17:32:09.465151Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. No force traversals. 2025-04-04T17:32:09.465194Z node 2 :STATISTICS DEBUG: [72075186224038895] IsColumnTable. Path [OwnerId: 72075186224038898, LocalPathId: 3] is data table. 2025-04-04T17:32:09.465230Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038898, LocalPathId: 3] 2025-04-04T17:32:09.465587Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database2 2025-04-04T17:32:09.468937Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:32:09.473587Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:10347:4462], DatabaseId: /Root/Database2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:32:09.473737Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:10357:4467], DatabaseId: /Root/Database2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:32:09.473904Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database2, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:32:09.494152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976730658:2, at schemeshard: 72075186224038898 2025-04-04T17:32:09.596313Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:10361:4470], DatabaseId: /Root/Database2, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976730658 completed, doublechecking } 2025-04-04T17:32:09.773174Z node 2 :TX_PROXY ERROR: Actor# [2:10449:4518] txid# 281474976730659, issues: { message: "Check failed: path: \'/Root/Database2/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224038898, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:32:09.798166Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:10478:4533]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:32:09.799015Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:32:09.799138Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:10480:4535] 2025-04-04T17:32:09.799223Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:10480:4535] 2025-04-04T17:32:09.799979Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:10480:4535], server id = [2:10481:4536], tablet id = 72075186224038895, status = OK 2025-04-04T17:32:09.800086Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:10481:4536] 2025-04-04T17:32:09.800256Z node 2 :STATISTICS DEBUG: [72075186224038895] EvConnectNode, pipe server id = [2:10481:4536], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:32:09.800333Z node 2 :STATISTICS DEBUG: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-04T17:32:09.800480Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:32:09.800573Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:10478:4533], StatRequests.size() = 1 2025-04-04T17:32:09.957253Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzhkZjE4M2YtZmNlNGU1ZTgtZjU4ZGM1OGQtZmIwMmViNDY=, TxId: 2025-04-04T17:32:09.957327Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzhkZjE4M2YtZmNlNGU1ZTgtZjU4ZGM1OGQtZmIwMmViNDY=, TxId: 2025-04-04T17:32:09.957939Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Execute 2025-04-04T17:32:09.976056Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038898, LocalPathId: 3] 2025-04-04T17:32:09.976120Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:32:10.044438Z node 2 :STATISTICS DEBUG: [72075186224038895] EvFastPropagateCheck 2025-04-04T17:32:10.044512Z node 2 :STATISTICS DEBUG: [72075186224038895] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:32:10.142534Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:10480:4535], schemeshard count = 1 2025-04-04T17:32:11.091977Z node 3 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:32:11.092258Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:32:11.092301Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:32:11.092352Z node 3 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-04-04T17:32:11.092390Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:32:11.092806Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database1 2025-04-04T17:32:11.110293Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:32:11.152735Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=YjAzODc3OTUtMjkyOWM1ZDItMWVhYmIyZC02NzVlZTY5Zg==, TxId: 2025-04-04T17:32:11.152814Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=YjAzODc3OTUtMjkyOWM1ZDItMWVhYmIyZC02NzVlZTY5Zg==, TxId: 2025-04-04T17:32:11.153640Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:32:11.176183Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:32:11.176243Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:32:11.402456Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [3:10579:4824]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:32:11.402837Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-04-04T17:32:11.402888Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [3:10579:4824], StatRequests.size() = 1 2025-04-04T17:32:13.556094Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [3:10650:4850]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:32:13.556357Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-04-04T17:32:13.556402Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [3:10650:4850], StatRequests.size() = 1 2025-04-04T17:32:14.204098Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal 2025-04-04T17:32:14.204159Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. No force traversals. 2025-04-04T17:32:14.204209Z node 2 :STATISTICS DEBUG: [72075186224038895] IsColumnTable. Path [OwnerId: 72075186224038898, LocalPathId: 4] is data table. 2025-04-04T17:32:14.204245Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038898, LocalPathId: 4] 2025-04-04T17:32:14.204642Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database2 2025-04-04T17:32:14.208028Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:32:14.236770Z node 2 :SYSTEM_VIEWS WARN: [72075186224038891] TEvIntervalQuerySummary, wrong stage: node id# 2 2025-04-04T17:32:14.237495Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDY1ZDAxYTctMjVmNGExNTUtZDI2YWQxNGItMWI4NmU5Mzg=, TxId: 2025-04-04T17:32:14.237546Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDY1ZDAxYTctMjVmNGExNTUtZDI2YWQxNGItMWI4NmU5Mzg=, TxId: 2025-04-04T17:32:14.238157Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Execute 2025-04-04T17:32:14.253748Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038898, LocalPathId: 4] 2025-04-04T17:32:14.253807Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:32:15.282920Z node 3 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-04-04T17:32:15.283213Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2025-04-04T17:32:15.283391Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:32:15.283422Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:32:15.283884Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:32:15.496861Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [3:10734:4864]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:32:15.497262Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-04-04T17:32:15.497315Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [3:10734:4864], StatRequests.size() = 1 2025-04-04T17:32:15.498660Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:10736:4619]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:32:15.515749Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:32:15.515842Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:10736:4619], StatRequests.size() = 1 >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[0] >> DataShardStats::Follower [GOOD] >> DataShardStats::Tli >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session [GOOD] >> KafkaProtocol::MetadataScenario [GOOD] >> KafkaProtocol::MetadataInServerlessScenario >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] >> test_drain.py::TestHive::test_drain_on_stop [FAIL] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/sql/py3test >> test_kv.py::TestYdbKvWorkload::test_dynumber [GOOD] |98.8%| [TM] {RESULT} ydb/tests/sql/py3test >> BulkUpsert::BulkUpsert [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] >> TDqPqReadActorTest::TestReadFromTopic [GOOD] >> TDqPqReadActorTest::TestReadFromTopicFromNow >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario >> KafkaProtocol::MetadataInServerlessScenario [GOOD] >> TMetadataActorTests::TopicMetadataGoodAndBad |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest >> BulkUpsert::BulkUpsert [GOOD] |98.8%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest >> DataShardStats::Tli [GOOD] >> DataShardStats::HasSchemaChanges_BTreeIndex >> TDqPqReadActorTest::TestReadFromTopicFromNow [GOOD] >> TDqPqReadActorTest::ReadWithFreeSpace |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [FAIL] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] >> test_drain.py::TestHive::test_drain_tablets [GOOD] |98.8%| [TA] $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} >> TDqPqReadActorTest::ReadWithFreeSpace [GOOD] |98.8%| [TA] {RESULT} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} >> TDqPqReadActorTest::ReadNonExistentTopic >> TDqPqReadActorTest::ReadNonExistentTopic [GOOD] >> TDqPqReadActorTest::TestSaveLoadPqRead >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_tablets [GOOD] >> TSelectFromViewTest::ReadTestCasesFromFiles [GOOD] >> TSelectFromViewTest::QueryCacheIsUpdated >> DataShardStats::HasSchemaChanges_BTreeIndex [GOOD] >> DataShardStats::HasSchemaChanges_ByKeyFilter >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] |98.9%| [TA] $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |98.9%| [TA] {RESULT} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} >> ttl_unavailable_s3.py::TestUnavailableS3::test [FAIL] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot [GOOD] >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> TMetadataActorTests::TopicMetadataGoodAndBad [GOOD] >> PublishKafkaEndpoints::HaveEndpointInLookup >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] >> TDqPqReadActorTest::TestSaveLoadPqRead [GOOD] >> TDqPqReadActorTest::LoadCorruptedState >> TSelectFromViewTest::QueryCacheIsUpdated [GOOD] >> TDqPqReadActorTest::LoadCorruptedState [GOOD] >> TDqPqReadActorTest::TestLoadFromSeveralStates >> DataShardStats::HasSchemaChanges_ByKeyFilter [GOOD] >> DataShardStats::HasSchemaChanges_Columns >> test_clickbench.py::TestClickbench::test_clickbench[0] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[1] >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] >> TDataShardRSTest::TestCleanupInRS-UseSink [GOOD] >> TDataShardRSTest::TestDelayedRSAckForUnknownTx |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/view/unittest >> TSelectFromViewTest::QueryCacheIsUpdated [GOOD] Test command err: Trying to start YDB, gRPC: 22927, MsgBus: 11288 2025-04-04T17:29:16.346757Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511384721103104:2194];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:16.346826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b84/r3tmp/tmp2JW0jh/pdisk_1.dat 2025-04-04T17:29:16.727511Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:16.761492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:16.762107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:16.773657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22927, node 1 2025-04-04T17:29:16.941911Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:16.941935Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:16.941961Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:16.942092Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11288 TClient is connected to server localhost:11288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:29:17.549895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:29:18.958382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511393311038229:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:18.958494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 14855, MsgBus: 14041 2025-04-04T17:29:19.933052Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489511397930126080:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:19.933156Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b84/r3tmp/tmpEuThrJ/pdisk_1.dat 2025-04-04T17:29:20.022340Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14855, node 2 2025-04-04T17:29:20.064364Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:20.064474Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:20.066115Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:20.078380Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:20.078405Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:20.078414Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:20.078580Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14041 TClient is connected to server localhost:14041 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:29:20.465114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:29:22.545759Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489511410815028625:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:22.545870Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 21714, MsgBus: 23687 2025-04-04T17:29:23.277479Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489511413989942031:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:23.277528Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b84/r3tmp/tmp4mAbtB/pdisk_1.dat 2025-04-04T17:29:23.392754Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:23.422460Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 21714, node 3 2025-04-04T17:29:23.422571Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:23.428738Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:23.478769Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:29:23.478791Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:29:23.478801Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:29:23.478916Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23687 TClient is connected to server localhost:23687 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:29:23.905169Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:29:23.918777Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:29:26.545129Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489511426874844579:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:26.545210Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 17422, MsgBus: 8129 2025-04-04T17:29:27.165722Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489511432651935823:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:27.165770Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b84/r3tmp/tmpOuXQPE/pdisk_1.dat 2025-04-04T17:29:27.307217Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:27.315025Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:27.315105Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:27.319221Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17422, node 4 2025-04-04T17:29:27.353026Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: ... rn (Commit! (Commit! $10 $9) $1 $7)) ) 2025-04-04 17:32:37.692 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B14A2640) [KQP] yql_optimize.cpp:135: KqpLogical-ApplyExtractMembersToReadTableRanges 2025-04-04 17:32:37.698 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B14A2640) [KQP] yql_optimize.cpp:135: KqpLogical-RewriteAggregate 2025-04-04 17:32:37.709 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B14A2640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildReadTableRangesStage 2025-04-04 17:32:37.722 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B14A2640) [KQP] yql_optimize.cpp:135: KqpPhysical-PushAggregateCombineToStage 2025-04-04 17:32:37.732 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B14A2640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-04-04 17:32:37.744 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B14A2640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-04-04 17:32:37.757 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B14A2640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-04-04 17:32:37.777 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B14A2640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-04-04 17:32:37.811 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B14A2640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildShuffleStage 2025-04-04 17:32:37.828 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B14A2640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildSortStage 2025-04-04 17:32:37.856 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B14A2640) [KQP] yql_optimize.cpp:135: KqpPhysical-RewriteKqpReadTable 2025-04-04 17:32:38.043 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B14A2640) [KQP] yql_optimize.cpp:135: KqpPeepholeFinal-SetCombinerMemoryLimit 2025-04-04 17:32:38.192 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B0198640) [KQP] kqp_host.cpp:1379: Compiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/count_episodes")) '('typeId (String '"VIEW"))) (Void) '('('mode 'dropObject)))) ) 2025-04-04 17:32:38.200 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B0198640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (KiDropObject! world $1 '"/Root/count_episodes" '"VIEW" '() '0)) (return (Commit! $2 $1 '('('"mode" '"flush")))) ) 2025-04-04 17:32:38.249 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B0198640) [KQP] kqp_host.cpp:1379: Compiled query: ( (let $1 (Right! (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/view_series"))) (Void) '()))) (let $2 '('('"query_ast" (RemoveSystemMembers (PersistableRepr (SqlProject $1 '((SqlProjectStarItem (TypeOf $1) '"" (lambda '($3) $3) '())))))) '('"query_text" '"SELECT * FROM `/Root/view_series`") '('"security_invoker" (Bool '"true")))) (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/read_from_one_view")) '('typeId (String '"VIEW"))) (Void) '('('mode 'createObject) '('features $2)))) ) 2025-04-04 17:32:38.317 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B14A2640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 '('('"query_ast" (Right! (KiReadTable! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/series"))) (Void) '()))) '('"query_text" '"SELECT * FROM `/Root/view_series`") '('"security_invoker" (Bool '"true")))) (let $3 (KiCreateObject! world $1 '"/Root/read_from_one_view" '"VIEW" $2 '0 '0)) (return (Commit! $3 $1 '('('"mode" '"flush")))) ) 2025-04-04 17:32:38.387 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B14A2640) [KQP] kqp_host.cpp:1379: Compiled query: ( (let $1 (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/read_from_one_view"))) (Void) '())) (let $2 (DataSink 'result)) (let $3 (Right! $1)) (let $4 (Write! (Left! $1) $2 (Key) (RemoveSystemMembers (Sort (PersistableRepr (SqlProject $3 '((SqlProjectStarItem (TypeOf $3) '"" (lambda '($5) $5) '())))) (Bool 'true) (lambda '($6) (PersistableRepr (Member $6 '"series_id"))))) '('('type) '('autoref)))) (return (Commit! $4 $2)) ) 2025-04-04 17:32:38.569 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B14A2640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (Sort (Right! (KiReadTable! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/series"))) (Void) '())) (Bool 'true) (lambda '($7) (Member $7 '"series_id")))) (let $3 '('('"mode" '"flush"))) (let $4 (KiExecDataQuery! world $1 (DataQueryBlocks (TKiDataQueryBlock '('($2 '() '0)) (KiEffects) '('('"db" '"/Root/series" '"Select")) '())) $3 (Void))) (let $5 (DataSink 'result)) (let $6 (ResPull! (Left! $4) $5 (Key) (Nth (Right! $4) '0) '('('type) '('autoref)) '"kikimr")) (return (Commit! (Commit! $6 $5) $1 $3)) ) 2025-04-04 17:32:38.579 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B14A2640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildReadTableRangesStage 2025-04-04 17:32:38.585 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B14A2640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildSortStage 2025-04-04 17:32:38.594 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B14A2640) [KQP] yql_optimize.cpp:135: KqpPhysical-RemoveRedundantSortByPk 2025-04-04 17:32:38.602 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B14A2640) [KQP] yql_optimize.cpp:135: KqpPhysical-RewriteKqpReadTable 2025-04-04 17:32:38.723 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B0198640) [KQP] kqp_host.cpp:1379: Compiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/read_from_one_view")) '('typeId (String '"VIEW"))) (Void) '('('mode 'dropObject)))) ) 2025-04-04 17:32:38.733 INFO ydb-core-kqp-ut-view(pid=835260, tid=0x00007F04B0198640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (KiDropObject! world $1 '"/Root/read_from_one_view" '"VIEW" '() '0)) (return (Commit! $2 $1 '('('"mode" '"flush")))) ) Trying to start YDB, gRPC: 25821, MsgBus: 7003 2025-04-04T17:32:40.446808Z node 23 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7489512263590063223:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:32:40.473379Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000b84/r3tmp/tmpHMTbwB/pdisk_1.dat 2025-04-04T17:32:40.783679Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:32:40.810105Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:32:40.814501Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:32:40.816668Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25821, node 23 2025-04-04T17:32:40.918386Z node 23 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:32:40.918424Z node 23 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:32:40.918443Z node 23 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:32:40.918662Z node 23 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7003 TClient is connected to server localhost:7003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:32:42.401525Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:32:42.416003Z node 23 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T17:32:45.428930Z node 23 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[23:7489512263590063223:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:32:45.429050Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T17:32:48.893386Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7489512297949802128:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:32:48.893568Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:32:48.960647Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7489512297949802156:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:32:48.960822Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:32:48.960948Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7489512297949802161:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:32:48.969362Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-04-04T17:32:48.989367Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [23:7489512297949802163:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-04-04T17:32:49.059025Z node 23 :TX_PROXY ERROR: Actor# [23:7489512302244769510:2369] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |98.9%| [TM] {RESULT} ydb/core/kqp/ut/view/unittest >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c >> TDqPqReadActorTest::TestLoadFromSeveralStates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] Test command err: 2025-04-04T17:28:42.923902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:523:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:42.924328Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:42.924379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00099d/r3tmp/tmprrnCVu/pdisk_1.dat 2025-04-04T17:28:43.293540Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8745, node 1 2025-04-04T17:28:43.520828Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:43.520881Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:43.520912Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:43.521455Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:43.524290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:28:43.609432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:43.609587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:43.623190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27531 2025-04-04T17:28:44.170210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:28:47.066358Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-04-04T17:28:47.105769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:47.105893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:47.135623Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-04-04T17:28:47.137912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:47.408122Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:47.408657Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:47.409171Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:47.409289Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:47.409518Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:47.409590Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:47.409648Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:47.409719Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:47.409842Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:47.585169Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:47.585290Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:47.598770Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:47.754156Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:47.797126Z node 3 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:28:47.797232Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:28:47.837751Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:28:47.838945Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:28:47.839111Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:28:47.839164Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:28:47.839212Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:28:47.839257Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:28:47.839354Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:28:47.839411Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:28:47.840545Z node 3 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:28:47.866569Z node 3 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:47.866697Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [3:1948:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:47.874115Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:1961:2606] 2025-04-04T17:28:47.875805Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:1971:2613] 2025-04-04T17:28:47.876072Z node 3 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:1971:2613], schemeshard id = 72075186224037897 2025-04-04T17:28:47.886044Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared1 2025-04-04T17:28:47.927787Z node 3 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:28:47.927854Z node 3 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:28:47.927920Z node 3 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared1/.metadata/_statistics 2025-04-04T17:28:47.942501Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:28:47.950282Z node 3 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:28:47.950437Z node 3 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:28:48.174642Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:28:48.344768Z node 3 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:28:48.421675Z node 3 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:28:49.390874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-04-04T17:28:53.182283Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:28:53.221190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:53.221310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:53.270697Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:28:53.273005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:53.539162Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:53.539791Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:53.540271Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:53.540462Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:53.540733Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:53.540833Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:53.540926Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:53.541006Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:53.541098Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:53.668689Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:53.668829Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:53.682591Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:53.809774Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:53.863679Z node 2 :STATISTICS INFO: [72075186224038895] OnActivateExecutor 2025-04-04T17:28:53.863777Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInitSchema::Execute 2025-04-04T17:28:53.893157Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInitSchema::Complete 2025-04-04T17:28:53.893373Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInit::Execute 2025-04-04T17:28:53.893536Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:28:53.893588Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ColumnStatistics: column count# 0 2025-04-04T17:28:53.893641Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:28:53.893707Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:28:53.893767Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:28:53.893818Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInit::Complete 2025-04-04T17:28:53.894337Z node 2 :STATISTICS INFO: [72075186224038895] Subscribed for config changes 2025-04-04T17:28:53.928884Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224038895, at schemeshard: 72075186224038898 2025-04-04T17:28:53.929014Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3227:2599], at schemeshard: 72075186224038898, StatisticsAggregatorId: 72075186224038895, at schemeshard: 72075186224038898 2025-04-04T17:28:53.936601Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:3239:2608] 2025-04-04T17:28:53.945880Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:3277:2624] 20 ... erId: 72075186224038898, LocalPathId: 3] 2025-04-04T17:32:48.214796Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared2 2025-04-04T17:32:48.217624Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:32:48.224919Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:12582:5261], DatabaseId: /Root/Shared2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:32:48.225036Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:12592:5266], DatabaseId: /Root/Shared2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:32:48.225120Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared2, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:32:48.249364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976730658:2, at schemeshard: 72075186224038898 2025-04-04T17:32:48.349058Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:12596:5269], DatabaseId: /Root/Shared2, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976730658 completed, doublechecking } 2025-04-04T17:32:48.552339Z node 2 :TX_PROXY ERROR: Actor# [2:12687:5317] txid# 281474976730659, issues: { message: "Check failed: path: \'/Root/Shared2/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224038898, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:32:48.572377Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:12716:5332]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:32:48.572899Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:32:48.573006Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:12718:5334] 2025-04-04T17:32:48.573071Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:12718:5334] 2025-04-04T17:32:48.573774Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:12719:5335] 2025-04-04T17:32:48.574004Z node 2 :STATISTICS DEBUG: [72075186224038895] EvConnectNode, pipe server id = [2:12719:5335], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:32:48.574112Z node 2 :STATISTICS DEBUG: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-04T17:32:48.574814Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:12718:5334], server id = [2:12719:5335], tablet id = 72075186224038895, status = OK 2025-04-04T17:32:48.575064Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:32:48.575132Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:12716:5332], StatRequests.size() = 1 2025-04-04T17:32:48.682183Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTZkM2JjZjktZGMzMWM0ZDEtN2I5YTYyMDgtZjM0OWY2ZjE=, TxId: 2025-04-04T17:32:48.682335Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTZkM2JjZjktZGMzMWM0ZDEtN2I5YTYyMDgtZjM0OWY2ZjE=, TxId: 2025-04-04T17:32:48.683134Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Execute 2025-04-04T17:32:48.712183Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038898, LocalPathId: 3] 2025-04-04T17:32:48.712236Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:32:48.757777Z node 2 :STATISTICS DEBUG: [72075186224038895] EvFastPropagateCheck 2025-04-04T17:32:48.757840Z node 2 :STATISTICS DEBUG: [72075186224038895] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:32:48.848753Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:12718:5334], schemeshard count = 1 2025-04-04T17:32:49.390899Z node 3 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-04-04T17:32:49.390971Z node 3 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 185.000000s, at schemeshard: 72075186224037899 2025-04-04T17:32:49.391283Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2025-04-04T17:32:49.418704Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:32:49.712831Z node 3 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:32:49.724450Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:32:49.724513Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:32:49.724553Z node 3 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-04-04T17:32:49.724586Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-04T17:32:49.724857Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared1 2025-04-04T17:32:49.727625Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:32:49.747844Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=NTNhMDMyOWMtZWNiNTlmMTAtOTQ4MDI5NTktMzYzNWMzZjA=, TxId: 2025-04-04T17:32:49.747917Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=NTNhMDMyOWMtZWNiNTlmMTAtOTQ4MDI5NTktMzYzNWMzZjA=, TxId: 2025-04-04T17:32:49.748816Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:32:49.766041Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-04-04T17:32:49.766100Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:32:49.852258Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [3:12821:5662]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:32:49.852676Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-04-04T17:32:49.852726Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [3:12821:5662], StatRequests.size() = 1 2025-04-04T17:32:51.635775Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [3:12904:5696]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:32:51.636151Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-04-04T17:32:51.636197Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [3:12904:5696], StatRequests.size() = 1 2025-04-04T17:32:52.182838Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224038900 2025-04-04T17:32:52.182908Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 202.000000s, at schemeshard: 72075186224038900 2025-04-04T17:32:52.183232Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id# 72075186224038900, stats size# 26 2025-04-04T17:32:52.205907Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxSchemeShardStats::Complete 2025-04-04T17:32:52.476941Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal 2025-04-04T17:32:52.477008Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. No force traversals. 2025-04-04T17:32:52.477044Z node 2 :STATISTICS DEBUG: [72075186224038895] IsColumnTable. Path [OwnerId: 72075186224038900, LocalPathId: 2] is data table. 2025-04-04T17:32:52.477077Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038900, LocalPathId: 2] 2025-04-04T17:32:52.477709Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared2 2025-04-04T17:32:52.480463Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-04-04T17:32:52.505288Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTM5Yjc0MDYtMjQxMDU3OTUtZDYzZDMyYTktYzdjZjM1N2I=, TxId: 2025-04-04T17:32:52.505356Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTM5Yjc0MDYtMjQxMDU3OTUtZDYzZDMyYTktYzdjZjM1N2I=, TxId: 2025-04-04T17:32:52.506128Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Execute 2025-04-04T17:32:52.525936Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038900, LocalPathId: 2] 2025-04-04T17:32:52.526008Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-04-04T17:32:53.415226Z node 3 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 3 2025-04-04T17:32:53.416178Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:32:53.416758Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2025-04-04T17:32:53.429188Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:32:53.429251Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:32:53.505738Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [3:13006:5716]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:32:53.506117Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-04-04T17:32:53.506168Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [3:13006:5716], StatRequests.size() = 1 2025-04-04T17:32:53.508239Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:13008:5435]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:32:53.515189Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-04-04T17:32:53.515500Z node 2 :STATISTICS DEBUG: [72075186224038895] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-04-04T17:32:53.515561Z node 2 :STATISTICS DEBUG: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-04-04T17:32:53.516241Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:32:53.516348Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:13008:5435], StatRequests.size() = 1 >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c [GOOD] >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change >> test_clickbench.py::TestClickbench::test_clickbench[1] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[2] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 >> TDataShardRSTest::TestDelayedRSAckForUnknownTx [GOOD] >> TDataShardRSTest::TestDelayedRSAckForOutOfOrderCompletedTx >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] >> PublishKafkaEndpoints::HaveEndpointInLookup [GOOD] >> PublishKafkaEndpoints::MetadataActorGetsEndpoint >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate [GOOD] >> DataShardStats::HasSchemaChanges_Columns [GOOD] >> DataShardStats::HasSchemaChanges_Families >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item >> test_clickbench.py::TestClickbench::test_clickbench[2] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[3] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can |98.9%| [TA] $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges |98.9%| [TA] {RESULT} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 [GOOD] >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] >> TDataShardRSTest::TestDelayedRSAckForOutOfOrderCompletedTx [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionCommit >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_keep_alive >> test_session_pool.py::TestSessionPool::test_session_pool_keep_alive [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_3 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_4 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_release_logic >> test_session_pool.py::TestSessionPool::test_session_pool_release_logic [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure >> TControlPlaneProxyShouldPassHids::ShouldCheckScenario [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateQuery >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] >> TControlPlaneProxyTest::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyTest::FailsOnCreateQueryWhenRateLimiterResourceNotCreated >> test_clickbench.py::TestClickbench::test_clickbench[3] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[4] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success >> TControlPlaneProxyTest::FailsOnCreateQueryWhenRateLimiterResourceNotCreated [GOOD] >> TControlPlaneProxyTest::ShouldSendListQueries >> TControlPlaneProxyTest::ShouldSendListQueries [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeQuery >> TDataShardRSTest::TestGenericReadSetDecisionCommit [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionAbort >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendGetQueryStatus >> TPqWriterTest::TestWriteToTopic >> TPqWriterTest::TestWriteToTopic [GOOD] >> TPqWriterTest::TestWriteToTopicMultiBatch >> TControlPlaneProxyTest::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyQuery >> TPqWriterTest::TestWriteToTopicMultiBatch [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> TPqWriterTest::TestDeferredWriteToTopic >> TPqWriterTest::TestDeferredWriteToTopic [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteQuery >> TPqWriterTest::WriteNonExistentTopic >> TPqWriterTest::WriteNonExistentTopic [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[4] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[5] >> TPqWriterTest::TestCheckpoints >> TControlPlaneProxyTest::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendControlQuery >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed1 [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed2 >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed2 [GOOD] >> DataShardStats::HasSchemaChanges_Families [GOOD] >> TControlPlaneProxyTest::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendGetResultData >> TPqWriterTest::TestCheckpoints [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionAbort [GOOD] >> TPqWriterTest::TestCheckpointWithEmptyBatch >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] >> TControlPlaneProxyTest::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyTest::ShouldSendListJobs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_stats/unittest >> DataShardStats::HasSchemaChanges_Families [GOOD] Test command err: 2025-04-04T17:29:56.233655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:29:56.234318Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:29:56.234404Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/00075e/r3tmp/tmpxpMMUe/pdisk_1.dat 2025-04-04T17:29:56.762417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:29:56.817069Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:56.829413Z node 1 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-04-04T17:29:56.871864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:56.873111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:56.886597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:29:56.985251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:29:57.043763Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvBoot 2025-04-04T17:29:57.044832Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvRestored 2025-04-04T17:29:57.045326Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:664:2569] 2025-04-04T17:29:57.045607Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:29:57.063434Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2563], Recipient [1:664:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T17:29:57.094914Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:29:57.095032Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:29:57.096603Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:29:57.096717Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:29:57.096768Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:29:57.098043Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:29:57.098209Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:29:57.098313Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:680:2569] in generation 1 2025-04-04T17:29:57.109207Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:29:57.134614Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:29:57.137116Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:29:57.137286Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2579] 2025-04-04T17:29:57.137326Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:29:57.137359Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:29:57.137389Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:57.137574Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:57.138482Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:57.139841Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:29:57.139987Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:29:57.140187Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:29:57.140230Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:29:57.140282Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:29:57.140319Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:29:57.140355Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:29:57.140389Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:29:57.140435Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:29:57.140536Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:671:2573], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:57.140578Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:57.140619Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:671:2573], sessionId# [0:0:0] 2025-04-04T17:29:57.142678Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:408:2403], Recipient [1:671:2573] 2025-04-04T17:29:57.142739Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:29:57.142928Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:29:57.143240Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-04-04T17:29:57.143302Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:29:57.143409Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:29:57.143577Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-04-04T17:29:57.143621Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-04-04T17:29:57.143656Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-04-04T17:29:57.143700Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:29:57.144019Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-04-04T17:29:57.144056Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-04-04T17:29:57.144091Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:29:57.144123Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:29:57.144216Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-04-04T17:29:57.144244Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:29:57.144276Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-04-04T17:29:57.144312Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:29:57.144365Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-04-04T17:29:57.146024Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:683:2580], Recipient [1:664:2569]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-04-04T17:29:57.146077Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-04-04T17:29:57.156832Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:29:57.156909Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-04-04T17:29:57.156957Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-04-04T17:29:57.157010Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-04-04T17:29:57.157106Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-04-04T17:29:57.297578Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:703:2594], Recipient [1:664:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:57.297690Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:29:57.297778Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2592], serverId# [1:703:2594], sessionId# [0:0:0] 2025-04-04T17:29:57.298162Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:664:2569]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-04-04T17:29:57.298267Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T17:29:57.298451Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-04-04T17:29:57.298502Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-04-04T17:29:57.298539Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-04-04T17:29:57.298575Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-04-04T17:29:57.302991Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-04-04T17:29:57.303067Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:29:57.304908Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:664:2569], Recipient [1:664:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:57.304961Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-04-04T17:29:57.305007Z ... lanQueue at 72075186224037888 2025-04-04T17:33:14.126121Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:33:14.126174Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:33:14.128385Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 25500} 2025-04-04T17:33:14.128490Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:33:14.130155Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:33:14.130216Z node 13 :TX_DATASHARD TRACE: Complete execution for [25500:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-04-04T17:33:14.130405Z node 13 :TX_DATASHARD DEBUG: Complete [25500 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [13:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:33:14.130483Z node 13 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715664 state Ready TxInFly 0 2025-04-04T17:33:14.130629Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:33:14.133014Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [13:1160:2975], Recipient [13:931:2763]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [13:1164:2979] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-04T17:33:14.133075Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-04T17:33:14.135712Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [13:409:2404], Recipient [13:931:2763]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715664 2025-04-04T17:33:14.135767Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-04-04T17:33:14.135825Z node 13 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715664 datashard 72075186224037888 state Ready 2025-04-04T17:33:14.135913Z node 13 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 waiting for schema changes 2025-04-04T17:33:14.148465Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [13:1160:2975], Recipient [13:931:2763]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [13:1160:2975] ServerId: [13:1164:2979] } 2025-04-04T17:33:14.148569Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-04-04T17:33:14.943944Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:931:2763]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-04T17:33:14.944033Z node 13 :TABLET_STATS_BUILDER INFO: UpdateTableStats at datashard 72075186224037888 2025-04-04T17:33:14.944296Z node 13 :TABLET_STATS_BUILDER INFO: Skipped at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 PartCount 1, with schema changes 2025-04-04T17:33:14.944504Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 2 Round: 9 TableStats { DataSize: 130 RowCount: 3 IndexSize: 82 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 65 IndexSize: 82 } Channels { Channel: 2 DataSize: 65 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: true LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 2234 Memory: 124336 Storage: 254 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 13 StartTime: 5451 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-04T17:33:14.945625Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [13:1198:3013], Recipient [13:931:2763]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:33:14.945716Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:33:14.945788Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [13:1197:3012], serverId# [13:1198:3013], sessionId# [0:0:0] 2025-04-04T17:33:14.946011Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [13:1196:3011], Recipient [13:931:2763]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-04-04T17:33:14.946198Z node 13 :TX_DATASHARD INFO: Started background compaction# 3 of 72075186224037888 tableId# 2 localTid# 1001, requested from [13:1196:3011], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2025-04-04T17:33:14.947973Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 2, ts 1970-01-01T00:00:20.452024Z 2025-04-04T17:33:14.948040Z node 13 :TABLET_STATS_BUILDER INFO: UpdateTableStats at datashard 72075186224037888 2025-04-04T17:33:14.948112Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 2, front# 3 2025-04-04T17:33:14.950182Z node 13 :TABLET_STATS_BUILDER TRACE: Building stats at datashard 72075186224037888, for tableId 2: starting for mixed index 2025-04-04T17:33:14.951503Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [13:928:2761], Recipient [13:931:2763]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-04T17:33:14.953422Z node 13 :TABLET_STATS_BUILDER TRACE: Building stats at datashard 72075186224037888, for tableId 2: finished for mixed index ready: 1 stats: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 2025-04-04T17:33:14.953537Z node 13 :TABLET_STATS_BUILDER INFO: Stats at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 PartCount: 1, with schema changes, LoadedSize 82, Spent{time=0.000s,wait=0.000s,interrupts=2} 2025-04-04T17:33:14.953878Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [13:1203:3017], Recipient [13:931:2763]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-04-04T17:33:14.953950Z node 13 :TABLET_STATS_BUILDER INFO: Result received at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 2025-04-04T17:33:14.954064Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 2025-04-04T17:33:14.959019Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 3, ts 1970-01-01T00:00:30.452024Z 2025-04-04T17:33:14.959099Z node 13 :TABLET_STATS_BUILDER INFO: UpdateTableStats at datashard 72075186224037888 2025-04-04T17:33:14.959145Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 3, front# 3 2025-04-04T17:33:14.959210Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [13:1196:3011]pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-04-04T17:33:14.961396Z node 13 :TABLET_STATS_BUILDER TRACE: Building stats at datashard 72075186224037888, for tableId 2: starting for mixed index 2025-04-04T17:33:14.961809Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [13:928:2761], Recipient [13:931:2763]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-04-04T17:33:14.963911Z node 13 :TABLET_STATS_BUILDER TRACE: Building stats at datashard 72075186224037888, for tableId 2: finished for mixed index ready: 1 stats: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 2025-04-04T17:33:14.964035Z node 13 :TABLET_STATS_BUILDER INFO: Stats at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 PartCount: 1, LoadedSize 82, Spent{time=0.000s,wait=0.000s,interrupts=2} 2025-04-04T17:33:14.964270Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [13:1210:3023], Recipient [13:931:2763]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-04-04T17:33:14.964322Z node 13 :TABLET_STATS_BUILDER INFO: Result received at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 2025-04-04T17:33:14.964392Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 waiting for no schema changes 2025-04-04T17:33:14.975766Z node 13 :TX_DATASHARD DEBUG: Updated last full compaction of tablet# 72075186224037888, tableId# 2, last full compaction# 1970-01-01T00:00:30.452024Z 2025-04-04T17:33:15.756419Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [13:931:2763]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-04-04T17:33:15.756561Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-04-04T17:33:15.756690Z node 13 :TX_DATASHARD TRACE: No cleanup at 72075186224037888 outdated step 35000 last cleanup 0 2025-04-04T17:33:15.756790Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:33:15.756852Z node 13 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-04-04T17:33:15.756935Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-04-04T17:33:15.756996Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-04-04T17:33:15.757183Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:931:2763]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-04-04T17:33:15.757225Z node 13 :TABLET_STATS_BUILDER INFO: UpdateTableStats at datashard 72075186224037888 2025-04-04T17:33:15.757404Z node 13 :TABLET_STATS_BUILDER INFO: Skipped at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 PartCount 1 2025-04-04T17:33:15.757577Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 2 Round: 12 TableStats { DataSize: 130 RowCount: 3 IndexSize: 82 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 30 HasLoanedParts: false Channels { Channel: 1 DataSize: 80 IndexSize: 82 } Channels { Channel: 2 DataSize: 50 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1390 Memory: 124336 Storage: 254 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 13 StartTime: 5451 TableOwnerId: 72057594046644480 FollowerId: 0 |99.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_stats/unittest >> PublishKafkaEndpoints::MetadataActorGetsEndpoint [GOOD] >> PublishKafkaEndpoints::DiscoveryResponsesWithNoNode |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] >> TControlPlaneProxyTest::ShouldSendListJobs [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeJob ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_rs/unittest >> TDataShardRSTest::TestGenericReadSetDecisionAbort [GOOD] Test command err: 2025-04-04T17:30:14.854928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:30:14.855638Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:30:14.855713Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000a94/r3tmp/tmpfpFhlQ/pdisk_1.dat 2025-04-04T17:30:15.984185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:30:16.072283Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:30:16.142729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:30:16.144101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:30:16.156807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:30:16.285178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:30:16.399967Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:683:2578] 2025-04-04T17:30:16.400274Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:30:16.515194Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:685:2580] 2025-04-04T17:30:16.515424Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:30:16.524579Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:30:16.524756Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:30:16.526717Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-04-04T17:30:16.526804Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-04-04T17:30:16.526868Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-04-04T17:30:16.530773Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:30:16.531052Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:30:16.531146Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:718:2578] in generation 1 2025-04-04T17:30:16.531682Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:30:16.531825Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:30:16.533147Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-04-04T17:30:16.533203Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-04-04T17:30:16.533248Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-04-04T17:30:16.533522Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:30:16.533634Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:30:16.533696Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:722:2580] in generation 1 2025-04-04T17:30:16.540903Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:689:2582] 2025-04-04T17:30:16.541097Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:30:16.562861Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:691:2584] 2025-04-04T17:30:16.563096Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-04-04T17:30:16.580516Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:30:16.580668Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:30:16.581843Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-04-04T17:30:16.581907Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037891 2025-04-04T17:30:16.581962Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037891 2025-04-04T17:30:16.586346Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:30:16.586601Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:30:16.586679Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037891 persisting started state actor id [1:750:2582] in generation 1 2025-04-04T17:30:16.587011Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-04-04T17:30:16.587096Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-04-04T17:30:16.588321Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-04-04T17:30:16.588377Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-04-04T17:30:16.588432Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-04-04T17:30:16.588728Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-04-04T17:30:16.588849Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-04-04T17:30:16.588921Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:751:2584] in generation 1 2025-04-04T17:30:16.602841Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:30:16.665357Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-04-04T17:30:16.667044Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:30:16.667251Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:756:2619] 2025-04-04T17:30:16.667291Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-04-04T17:30:16.667324Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-04-04T17:30:16.667356Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:30:16.674496Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:30:16.674572Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-04-04T17:30:16.674661Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:30:16.674741Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:757:2620] 2025-04-04T17:30:16.674783Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-04-04T17:30:16.674811Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-04-04T17:30:16.674838Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:30:16.675290Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:30:16.675325Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2025-04-04T17:30:16.675375Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:30:16.675452Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [1:758:2621] 2025-04-04T17:30:16.675477Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2025-04-04T17:30:16.675525Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-04-04T17:30:16.675550Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-04-04T17:30:16.675750Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-04-04T17:30:16.675849Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-04-04T17:30:16.675947Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-04-04T17:30:16.675976Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-04-04T17:30:16.676025Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-04-04T17:30:16.676088Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:759:2622] 2025-04-04T17:30:16.676111Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-04-04T17:30:16.676131Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-04-04T17:30:16.676151Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-04-04T17:30:16.676341Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-04-04T17:30:16.676378Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:30:16.676420Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-04-04T17:30:16.676468Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:30:16.676528Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-04-04T17:30:16.676595Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-04-04T17:30:16.677096Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:669:2572], serverId# [1:714:2597], sessionId# [0:0:0] 2025-04-04T17:30:16.677148Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-04-04T17:30:16.677177Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:30:16.677200Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-04-04T17:30:16.677228Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T17:30:16.677261Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037891 2025-04-04T17:30:16.677309Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037891 2025-04-04T17:30:16.677443Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:30:16.677815Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-04-04T17:30:16.677926Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-04-04T17:30:16.682636Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:670:2573], serverId# [1:717:2600], sessionId# [0:0:0] 2025-04-04T17:30:16.682724Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-04-04T17:30:16.682764Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active ... e 6 :TX_DATASHARD TRACE: Execution status for [2043:281474976715664] at 72075186224037889 is Executed 2025-04-04T17:33:16.968318Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [2043:281474976715664] at 72075186224037889 executing on unit CompletedOperations 2025-04-04T17:33:16.968349Z node 6 :TX_DATASHARD TRACE: Execution plan for [2043:281474976715664] at 72075186224037889 has finished 2025-04-04T17:33:16.968381Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T17:33:16.968412Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-04-04T17:33:16.968439Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-04-04T17:33:16.968469Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-04-04T17:33:16.969307Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2043} 2025-04-04T17:33:16.969696Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1002:2807], Recipient [6:665:2569]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T17:33:16.969723Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T17:33:16.969749Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:999:2804], serverId# [6:1002:2807], sessionId# [0:0:0] 2025-04-04T17:33:16.969798Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 2043} 2025-04-04T17:33:16.969950Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [6:755:2634], Recipient [6:665:2569]: {TEvReadSet step# 2043 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-04T17:33:16.969975Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-04T17:33:16.970016Z node 6 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715664 2025-04-04T17:33:16.970083Z node 6 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 2043 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-04-04T17:33:16.970227Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-04-04T17:33:16.970283Z node 6 :TX_DATASHARD TRACE: Complete execution for [2043:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-04-04T17:33:16.970348Z node 6 :TX_DATASHARD DEBUG: Complete [2043 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [6:992:2754], exec latency: 0 ms, propose latency: 0 ms 2025-04-04T17:33:16.970399Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:33:16.970434Z node 6 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-04-04T17:33:16.970491Z node 6 :TX_DATASHARD DEBUG: Send RS Reply at 72075186224037888 {TEvReadSet step# 2043 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} ... nodata readset 2025-04-04T17:33:16.970670Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [6:665:2569], Recipient [6:755:2634]: {TEvReadSet step# 2043 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-04-04T17:33:16.970692Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-04-04T17:33:16.970722Z node 6 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715664 2025-04-04T17:33:16.970769Z node 6 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 2043 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-04-04T17:33:16.970815Z node 6 :TX_DATASHARD TRACE: Processed readset without data from 72075186224037888 to 72075186224037889 at tablet 72075186224037889 2025-04-04T17:33:16.971508Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=MzVlMDY4NTYtOWI2NjQ0NzItMzAyMGQyYjktZmEyNzQyNzQ=, ActorId: [6:937:2754], ActorState: ExecuteState, TraceId: 01jr0xtdmw385c6rvmbtwf41ef, Create QueryResponse for error on request, msg: 2025-04-04T17:33:16.972129Z node 6 :TX_PROXY DEBUG: actor# [6:59:2106] Handle TEvExecuteKqpTransaction 2025-04-04T17:33:16.972161Z node 6 :TX_PROXY DEBUG: actor# [6:59:2106] TxId# 281474976715665 ProcessProposeKqpTransaction 2025-04-04T17:33:16.972263Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-04-04T17:33:16.972285Z node 6 :TX_DATASHARD TRACE: Complete execution for [2043:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-04-04T17:33:16.972319Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:33:16.972349Z node 6 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-04-04T17:33:16.972723Z node 6 :TX_DATASHARD ERROR: Complete [2043 : 281474976715664] from 72075186224037889 at tablet 72075186224037889, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-04-04T17:33:16.972800Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-04-04T17:33:16.972953Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jr0xtdmw385c6rvmbtwf41ef, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=MzVlMDY4NTYtOWI2NjQ0NzItMzAyMGQyYjktZmEyNzQyNzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T17:33:16.973198Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [6:1003:2754], Recipient [6:665:2569]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 1003 RawX2: 25769806530 } TxBody: " \0018\001j5\010\001\032\'\n#\t\216\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000 \003\"\006\020\0020\000@\n8\001\220\001\000" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2025-04-04T17:33:16.973234Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-04-04T17:33:16.973330Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [6:665:2569], Recipient [6:665:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-04-04T17:33:16.973354Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-04-04T17:33:16.973407Z node 6 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-04-04T17:33:16.973610Z node 6 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715662, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-04-04T17:33:16.973732Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2025-04-04T17:33:16.973790Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-04T17:33:16.973829Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2025-04-04T17:33:16.973865Z node 6 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-04-04T17:33:16.973900Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2025-04-04T17:33:16.973933Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2043/281474976715664 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2001/0 ImmediateWriteEdgeReplied# v2001/0 2025-04-04T17:33:16.973991Z node 6 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2025-04-04T17:33:16.974026Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-04T17:33:16.974045Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-04-04T17:33:16.974067Z node 6 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-04-04T17:33:16.974089Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ExecuteKqpDataTx 2025-04-04T17:33:16.974151Z node 6 :TX_DATASHARD TRACE: Operation [0:281474976715665] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193444 2025-04-04T17:33:16.974264Z node 6 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2025-04-04T17:33:16.974315Z node 6 :TX_DATASHARD TRACE: add locks to result: 0 2025-04-04T17:33:16.974373Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-04T17:33:16.974402Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-04-04T17:33:16.974429Z node 6 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2025-04-04T17:33:16.974455Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-04T17:33:16.974496Z node 6 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-04-04T17:33:16.974575Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2025-04-04T17:33:16.974609Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2025-04-04T17:33:16.974649Z node 6 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2025-04-04T17:33:16.974690Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2025-04-04T17:33:16.974727Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-04-04T17:33:16.974743Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2025-04-04T17:33:16.974763Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-04-04T17:33:16.974814Z node 6 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-04-04T17:33:16.974860Z node 6 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-04-04T17:33:16.974913Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-04-04T17:33:16.976011Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [6:61:2108], Recipient [6:665:2569]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715662 LockNode: 6 Status: STATUS_NOT_FOUND |99.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_rs/unittest |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> TControlPlaneProxyTest::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateConnection >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [FAIL] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete >> test_clickbench.py::TestClickbench::test_clickbench[5] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[6] >> TControlPlaneProxyTest::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendListConnections >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown >> TControlPlaneProxyTest::ShouldSendListConnections [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeConnection >> test_result_limits.py::TestResultLimits::test_large_row ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/pq_async_io/ut/unittest >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] Test command err: 2025-04-04T17:29:54.662783Z node 1 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [1:7489511547033961468:2053], metadatafields: , partitions: 666 2025-04-04T17:29:54.664041Z node 1 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-04-04T17:29:54.664160Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7489511547033961474:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([1:7489511547033961468:2053]) 2025-04-04T17:29:54.664347Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7489511547033961474:2048], TxId: query_1, task: 0. PQ source. TEvCoordinatorChanged, new coordinator [1:7489511547033961469:2054] 2025-04-04T17:29:54.664397Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7489511547033961474:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorRequest to coordinator [1:7489511547033961469:2054], partIds: 666 cookie 1 2025-04-04T17:29:54.665473Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7489511547033961474:2048], TxId: query_1, task: 0. PQ source. Received TEvCoordinatorResult from [1:7489511547033961469:2054], cookie 1 2025-04-04T17:29:54.665501Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7489511547033961474:2048], TxId: query_1, task: 0. PQ source. UpdateSessions, Sessions size 0 2025-04-04T17:29:54.665513Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7489511547033961474:2048], TxId: query_1, task: 0. PQ source. Distribution is changed, remove sessions 2025-04-04T17:29:54.665558Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7489511547033961474:2048], TxId: query_1, task: 0. PQ source. Create session to [1:7489511547033961471:2056], generation 1 2025-04-04T17:29:54.665617Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7489511547033961474:2048], TxId: query_1, task: 0. PQ source. Send TEvStartSession to [1:7489511547033961471:2056], connection id 1 partitions offsets (666 / ), 2025-04-04T17:29:54.665879Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7489511547033961474:2048], TxId: query_1, task: 0. PQ source. Received TEvStartSessionAck from [1:7489511547033961471:2056], seqNo 0, ConfirmedSeqNo 0, generation 1 2025-04-04T17:29:54.665985Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7489511547033961474:2048], TxId: query_1, task: 0. PQ source. Received TEvNewDataArrived from [1:7489511547033961471:2056], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-04T17:29:54.670475Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7489511547033961474:2048], TxId: query_1, task: 0. PQ source. Received TEvMessageBatch from [1:7489511547033961471:2056], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-04T17:29:54.670506Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7489511547033961474:2048], TxId: query_1, task: 0. PQ source. TEvMessageBatch NextOffset 1 2025-04-04T17:29:54.670511Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7489511547033961474:2048], TxId: query_1, task: 0. PQ source. TEvMessageBatch NextOffset 2 2025-04-04T17:29:54.670542Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7489511547033961474:2048], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 1000 2025-04-04T17:29:54.670767Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7489511547033961474:2048], TxId: query_1, task: 0. PQ source. NextOffset 2 2025-04-04T17:29:54.670790Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7489511547033961474:2048], TxId: query_1, task: 0. PQ source. Return 2 rows, buffer size 0, free space 948, result size 52 2025-04-04T17:29:54.671230Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7489511547033961474:2048], TxId: query_1, task: 0. PQ source. PassAway 2025-04-04T17:29:54.671349Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7489511547033961474:2048], TxId: query_1, task: 0. PQ source. SelfId: [1:7489511547033961474:2048], TxId: query_1, task: 0. PQ source. State: used buffer size 0 ready buffer event size 0 state 3 InFlyAsyncInputData 0 Counters: GetAsyncInputData 1 CoordinatorChanged 1 CoordinatorResult 0 MessageBatch 1 StartSessionAck 1 NewDataArrived 1 SessionError 0 Statistics 0 NodeDisconnected 0 NodeConnected 0 Undelivered 0 Retry 0 PrivateHeartbeat 0 SessionClosed 0 Pong 0 Heartbeat 0 PrintState 0 ProcessState 0 NotifyCA 1 [1:7489511547033961471:2056] status 2 is waiting ack 0 connection id 1 id 1, LocalRecipient partId 666 next offset 2 is waiting batch 0 has pending data 0 2025-04-04T17:29:54.671376Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7489511547033961474:2048], TxId: query_1, task: 0. PQ source. Send StopSession to [1:7489511547033961471:2056] generation 1 2025-04-04T17:29:55.004720Z node 2 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [2:7489511551822744936:2053], metadatafields: , partitions: 666 2025-04-04T17:29:55.004950Z node 2 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-04-04T17:29:55.004991Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([2:7489511551822744936:2053]) 2025-04-04T17:29:55.005194Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. TEvCoordinatorChanged, new coordinator [2:7489511551822744937:2054] 2025-04-04T17:29:55.005241Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorRequest to coordinator [2:7489511551822744937:2054], partIds: 666 cookie 1 2025-04-04T17:29:55.005464Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. Received TEvCoordinatorResult from [2:7489511551822744937:2054], cookie 1 2025-04-04T17:29:55.005518Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. UpdateSessions, Sessions size 0 2025-04-04T17:29:55.005526Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. Distribution is changed, remove sessions 2025-04-04T17:29:55.005552Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. Create session to [2:7489511551822744939:2056], generation 1 2025-04-04T17:29:55.005583Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. Send TEvStartSession to [2:7489511551822744939:2056], connection id 1 partitions offsets (666 / ), 2025-04-04T17:29:55.005911Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. Received TEvStartSessionAck from [2:7489511551822744939:2056], seqNo 0, ConfirmedSeqNo 0, generation 1 2025-04-04T17:29:55.006010Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. Received TEvNewDataArrived from [2:7489511551822744939:2056], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-04T17:29:55.007160Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. Received TEvMessageBatch from [2:7489511551822744939:2056], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-04T17:29:55.007201Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. TEvMessageBatch NextOffset 1 2025-04-04T17:29:55.007226Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. TEvMessageBatch NextOffset 2 2025-04-04T17:29:55.007252Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 1000 2025-04-04T17:29:55.007364Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. NextOffset 2 2025-04-04T17:29:55.007381Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. Return 2 rows, buffer size 0, free space 948, result size 52 2025-04-04T17:29:55.007645Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. Received TEvUndelivered, TSystem::Undelivered from [2:7489511551822744939:2056], reason Disconnected, cookie 999 2025-04-04T17:29:55.007760Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. Received TEvNewDataArrived from [2:7489511551822744939:2056], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-04T17:29:55.008386Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. Received TEvRetry, EventQueueId 1 2025-04-04T17:29:55.008457Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. Received TEvMessageBatch from [2:7489511551822744939:2056], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-04-04T17:29:55.008473Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. TEvMessageBatch NextOffset 3 2025-04-04T17:29:55.008491Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 1000 2025-04-04T17:29:55.008548Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. NextOffset 3 2025-04-04T17:29:55.008580Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. Return 1 rows, buffer size 0, free space 974, result size 26 2025-04-04T17:29:55.009115Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. PassAway 2025-04-04T17:29:55.009325Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. State: used buffer size 0 ready buffer event size 0 state 3 InFlyAsyncInputData 0 Counters: GetAsyncInputData 1 CoordinatorChanged 1 CoordinatorResult 0 MessageBatch 2 StartSessionAck 1 NewDataArrived 2 SessionError 0 Statistics 0 NodeDisconnected 0 NodeConnected 0 Undelivered 1 Retry 1 PrivateHeartbeat 0 SessionClosed 0 Pong 0 Heartbeat 0 PrintState 0 ProcessState 0 NotifyCA 2 [2:7489511551822744939:2056] status 2 is waiting ack 0 connection id 1 id 1, LocalRecipient partId 666 next offset 3 is waiting batch 0 has pending data 0 2025-04-04T17:29:55.009360Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7489511551822744942:2048], TxId: query_1, task: 0. PQ source. Send StopSession to [2:7489511551822744939:2056] generation 1 2025-04-04T17:29:55.380277Z node 3 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [3:7489511554431988531:2053], metadatafields: , partitions: 666 2025-04-04T17:29:55.380451Z node 3 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-04-04T17:29:55.380479Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7489511554431988537:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([3:7489511554431988531:2053]) 2025-04-04T17:29:55.380630Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:7489511554431988537:2048], TxId: query_1, task: 0. PQ source. TEvCoordinatorChanged, new coordinator [3:7489511554431988532:2054] 2025-04-04T17:29:55.380656Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7489511554431988537:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorRequest to coordinator [3:7489511554431988532:2054], partIds: 666 cookie 1 2025-04-04T17:29:55.380847Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7489511554431988537:2048], TxId: query_1, task: 0. PQ source. Received TEvCoordinatorResult from [3:7489511554431988532:2054], cookie 1 2025-04-04T17:29:55.380895Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7489511554431988537:2048], TxId: query_1, task: 0. PQ source. UpdateSessions, Sessions size 0 2025-04-04T17:29:55.380904Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7489511554431988537:2048], TxId: query_1, task: 0. PQ source. Distribution is chan ... TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [6c102b81-cd6d38cc-52c636d5-f7c9921e] Write session: OnWriteDone gRpcStatusCode: 0 2025-04-04T17:33:17.008233Z :INFO: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] Starting read session 2025-04-04T17:33:17.008311Z :DEBUG: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] Starting single session 2025-04-04T17:33:17.009044Z :DEBUG: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] [] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:33:17.009121Z :DEBUG: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] [] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:33:17.009284Z :DEBUG: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] [] Reconnecting session to cluster in 0.000000s 2025-04-04T17:33:17.013077Z :DEBUG: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] [] Successfully connected. Initializing session 2025-04-04T17:33:17.015904Z :INFO: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] [] Server session id: test_client_1_22_15809349045755671001_v1 2025-04-04T17:33:17.015979Z :DEBUG: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] [] In ContinueReadingDataImpl, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-04-04T17:33:17.016151Z :DEBUG: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-04T17:33:17.019305Z :INFO: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "Checkpoints". Partition: 0. Read offset: (NULL) 2025-04-04T17:33:17.020582Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [6c102b81-cd6d38cc-52c636d5-f7c9921e] Write session: OnReadDone gRpcStatusCode: 0 2025-04-04T17:33:17.020669Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [6c102b81-cd6d38cc-52c636d5-f7c9921e] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743787997020 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:33:17.020809Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [6c102b81-cd6d38cc-52c636d5-f7c9921e] Write session established. Init response: last_seq_no: 5 session_id: "6c102b81-cd6d38cc-52c636d5-f7c9921e|c02cd0c-420e26e2-2cbe23f5-8ddf16dc_0" 2025-04-04T17:33:17.020878Z :TRACE: [local] TRACE_EVENT InitResponse partition_id=0 session_id=6c102b81-cd6d38cc-52c636d5-f7c9921e|c02cd0c-420e26e2-2cbe23f5-8ddf16dc_0 2025-04-04T17:33:17.020942Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [6c102b81-cd6d38cc-52c636d5-f7c9921e|c02cd0c-420e26e2-2cbe23f5-8ddf16dc_0] MessageGroupId [6c102b81-cd6d38cc-52c636d5-f7c9921e] Write session: set DirectWriteToPartitionId 0 2025-04-04T17:33:17.021078Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [6c102b81-cd6d38cc-52c636d5-f7c9921e|c02cd0c-420e26e2-2cbe23f5-8ddf16dc_0] PartitionId [0] Generation [0] Get partition location async, partition 0, delay 0.000000s 2025-04-04T17:33:17.021148Z :TRACE: [local] TRACE_EVENT DescribePartitionRequest path=local/Checkpoints partition_id=0 2025-04-04T17:33:17.021280Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [6c102b81-cd6d38cc-52c636d5-f7c9921e|c02cd0c-420e26e2-2cbe23f5-8ddf16dc_0] PartitionId [0] Generation [0] Getting partition location, partition 0 2025-04-04T17:33:17.026448Z :DEBUG: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] [] Got ReadResponse, serverBytesSize = 940, now ReadSizeBudget = 0, ReadSizeServerDelta = 52427860 2025-04-04T17:33:17.026623Z :DEBUG: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52427860 2025-04-04T17:33:17.026946Z :DEBUG: [local] Decompression task done. Partition/PartitionSessionId: 1 (0-4) 2025-04-04T17:33:17.027051Z :DEBUG: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] [] Returning serverBytesSize = 940 to budget 2025-04-04T17:33:17.027136Z :DEBUG: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] [] In ContinueReadingDataImpl, ReadSizeBudget = 940, ReadSizeServerDelta = 52427860 2025-04-04T17:33:17.027455Z :DEBUG: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-04-04T17:33:17.027528Z :DEBUG: [local] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-04-04T17:33:17.027579Z :DEBUG: [local] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-04-04T17:33:17.027601Z :DEBUG: [local] Take Data. Partition 0. Read: {1, 1} (2-2) 2025-04-04T17:33:17.027632Z :DEBUG: [local] Take Data. Partition 0. Read: {2, 0} (3-3) 2025-04-04T17:33:17.027701Z :DEBUG: [local] Take Data. Partition 0. Read: {3, 0} (4-4) 2025-04-04T17:33:17.027847Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [6c102b81-cd6d38cc-52c636d5-f7c9921e|c02cd0c-420e26e2-2cbe23f5-8ddf16dc_0] PartitionId [0] Generation [0] Got PartitionLocation response. Status SUCCESS, proto: partition { active: true partition_location { node_id: 1 generation: 1 } } 2025-04-04T17:33:17.027911Z :TRACE: [local] TRACE_EVENT DescribePartitionResponse partition_id=0 active=1 pl_node_id=1 pl_generation=1 2025-04-04T17:33:17.027975Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [6c102b81-cd6d38cc-52c636d5-f7c9921e|c02cd0c-420e26e2-2cbe23f5-8ddf16dc_0] PartitionId [0] Generation [0] GetPreferredEndpoint: partitionId 0, partitionNodeId 1 exists in the endpoint pool. 2025-04-04T17:33:17.028009Z :TRACE: [local] TRACE_EVENT PreferredPartitionLocation Endpoint= NodeId=1 Generation=1 2025-04-04T17:33:17.028018Z :DEBUG: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] [] The application data is transferred to the client. Number of messages 5, size 5 bytes 2025-04-04T17:33:17.028048Z :INFO: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] Closing read session. Close timeout: 0.000000s 2025-04-04T17:33:17.028061Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [6c102b81-cd6d38cc-52c636d5-f7c9921e|c02cd0c-420e26e2-2cbe23f5-8ddf16dc_0] PartitionId [0] Generation [1] Start write session. Will connect to nodeId: 1 2025-04-04T17:33:17.028105Z :DEBUG: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] [] Returning serverBytesSize = 0 to budget 2025-04-04T17:33:17.028170Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2025-04-04T17:33:17.028269Z :INFO: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] Counters: { Errors: 0 CurrentSessionLifetimeMs: 20 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:33:17.028452Z :NOTICE: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-04-04T17:33:17.028525Z :DEBUG: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] [] Abort session to cluster 2025-04-04T17:33:17.028717Z :INFO: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] Closing read session. Close timeout: 0.000000s 2025-04-04T17:33:17.028786Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2025-04-04T17:33:17.028848Z :INFO: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] Counters: { Errors: 0 CurrentSessionLifetimeMs: 20 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:33:17.028988Z :NOTICE: [local] [local] [95cb9993-26f9ad65-2046ca4c-6d9f5ee2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-04-04T17:33:17.029199Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [6c102b81-cd6d38cc-52c636d5-f7c9921e|c02cd0c-420e26e2-2cbe23f5-8ddf16dc_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-04-04T17:33:17.031935Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [6c102b81-cd6d38cc-52c636d5-f7c9921e|c02cd0c-420e26e2-2cbe23f5-8ddf16dc_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-04-04T17:33:17.031989Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [6c102b81-cd6d38cc-52c636d5-f7c9921e|c02cd0c-420e26e2-2cbe23f5-8ddf16dc_0] PartitionId [0] Generation [1] Write session will now close 2025-04-04T17:33:17.032061Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [6c102b81-cd6d38cc-52c636d5-f7c9921e|c02cd0c-420e26e2-2cbe23f5-8ddf16dc_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-04T17:33:17.032175Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [6c102b81-cd6d38cc-52c636d5-f7c9921e|c02cd0c-420e26e2-2cbe23f5-8ddf16dc_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-04T17:33:17.032262Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [6c102b81-cd6d38cc-52c636d5-f7c9921e|c02cd0c-420e26e2-2cbe23f5-8ddf16dc_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-04T17:33:17.684157Z node 40 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. SendData. Batch: 0. Checkpoint: 1. Finished: 0 2025-04-04T17:33:17.699090Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [82457266-c1198a82-923ff0f8-39d48fbd] Write session: try to update token 2025-04-04T17:33:17.699952Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [82457266-c1198a82-923ff0f8-39d48fbd] Start write session. Will connect to nodeId: 0 2025-04-04T17:33:17.716231Z node 40 :KQP_COMPUTE DEBUG: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Send checkpoint state immediately 2025-04-04T17:33:17.716444Z node 40 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Save checkpoint { Id: 0 Generation: 0 } state: { SourceId: "82457266-c1198a82-923ff0f8-39d48fbd" } 2025-04-04T17:33:17.717066Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [82457266-c1198a82-923ff0f8-39d48fbd] Write session: close. Timeout 0.000000s 2025-04-04T17:33:17.717122Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [82457266-c1198a82-923ff0f8-39d48fbd] Write session will now close 2025-04-04T17:33:17.717189Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [82457266-c1198a82-923ff0f8-39d48fbd] Write session: aborting 2025-04-04T17:33:17.717330Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [82457266-c1198a82-923ff0f8-39d48fbd] Write session: gracefully shut down, all writes complete 2025-04-04T17:33:17.717421Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [82457266-c1198a82-923ff0f8-39d48fbd] Write session: destroy |99.0%| [TM] {RESULT} ydb/tests/fq/pq_async_io/ut/unittest >> TControlPlaneProxyTest::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyConnection |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> TControlPlaneProxyTest::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteConnection |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> TControlPlaneProxyTest::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendTestConnection >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[6] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[7] >> TControlPlaneProxyTest::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateBinding >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success >> TControlPlaneProxyTest::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendListBindings >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] >> TControlPlaneProxyTest::ShouldSendListBindings [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeBinding |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> TControlPlaneProxyTest::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyBinding >> TControlPlaneProxyTest::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteBinding >> TControlPlaneProxyTest::ShouldSendDeleteBinding [GOOD] >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes >> test_public_api.py::TestExplain::test_explain_data_query >> test_clickbench.py::TestClickbench::test_clickbench[7] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[8] >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/control_plane_proxy/ut/unittest >> TControlPlaneProxyTest::ShouldSendDeleteBinding [GOOD] Test command err: 2025-04-04T17:30:08.874524Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateQueryRequest, validation failed: test_user@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2025-04-04T17:30:09.735110Z node 2 :YQ_CONTROL_PLANE_STORAGE ERROR: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:10.249578Z node 3 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:11.079738Z node 4 :YQ_CONTROL_PLANE_STORAGE ERROR: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:11.795890Z node 5 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:12.501435Z node 6 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:13.180582Z node 7 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:13.928056Z node 8 :YQ_CONTROL_PLANE_STORAGE ERROR: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:14.796889Z node 9 :YQ_CONTROL_PLANE_STORAGE ERROR: ListJobsRequest, validation failed: test_user@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2025-04-04T17:30:15.447989Z node 10 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:16.081503Z node 11 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:16.644179Z node 12 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:17.297552Z node 13 :YQ_CONTROL_PLANE_STORAGE ERROR: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:18.100750Z node 14 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:18.693102Z node 15 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:19.240115Z node 16 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:19.715868Z node 17 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:20.242879Z node 18 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:20.797327Z node 19 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:21.388860Z node 20 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:21.975055Z node 21 :YQ_CONTROL_PLANE_STORAGE ERROR: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:22.495809Z node 22 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:23.064130Z node 23 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:23.593377Z node 24 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:55.136003Z node 72 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateQueryRequest, validation failed: test_user@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2025-04-04T17:30:55.963464Z node 73 :YQ_CONTROL_PLANE_STORAGE ERROR: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:56.820293Z node 74 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:57.725788Z node 75 :YQ_CONTROL_PLANE_STORAGE ERROR: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:58.408797Z node 76 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:59.175796Z node 77 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:30:59.912758Z node 78 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:31:00.626288Z node 79 :YQ_CONTROL_PLANE_STORAGE ERROR: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:31:01.330349Z node 80 :YQ_CONTROL_PLANE_STORAGE ERROR: ListJobsRequest, validation failed: test_user@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2025-04-04T17:31:02.002875Z node 81 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:31:02.893210Z node 82 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:31:03.625615Z node 83 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:31:04.415390Z node 84 :YQ_CONTROL_PLANE_STORAGE ERROR: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:31:05.084485Z node 85 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:31:05.884100Z node 86 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:31:06.682219Z node 87 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:31:07.639862Z node 88 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:31:08.531690Z node 89 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:31:09.249791Z node 90 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:31:10.013974Z node 91 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:31:10.722798Z node 92 :YQ_CONTROL_PLANE_STORAGE ERROR: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:31:11.467915Z node 93 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:31:12.245344Z node 94 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:31:13.073461Z node 95 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:23.437513Z node 163 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateQueryRequest, validation failed: test_user_3@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2025-04-04T17:32:26.481323Z node 166 :YQ_CONTROL_PLANE_STORAGE ERROR: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:27.476001Z node 167 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:28.359284Z node 168 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:29.361756Z node 169 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:30.404001Z node 170 :YQ_CONTROL_PLANE_STORAGE ERROR: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:33.675903Z node 173 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:37.104654Z node 176 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:38.360515Z node 177 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:39.531284Z node 178 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:40.701062Z node 179 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:44.220094Z node 182 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:45.260618Z node 183 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:47.334905Z node 185 :YQ_CONTROL_PLANE_STORAGE ERROR: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:48.384000Z node 186 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:50.455218Z node 188 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:51.438440Z node 189 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:52.450388Z node 190 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:54.865031Z node 192 :YQ_CONTROL_PLANE_STORAGE ERROR: ListJobsRequest, validation failed: test_user_4@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2025-04-04T17:32:55.917127Z node 193 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:56.898074Z node 194 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:57.816899Z node 195 :YQ_CONTROL_PLANE_STORAGE ERROR: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:58.809564Z node 196 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:32:59.749636Z node 197 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:33:00.758059Z node 198 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:33:01.805682Z node 199 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:33:02.937778Z node 200 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:33:04.004423Z node 201 :YQ_CONTROL_PLANE_STORAGE ERROR: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:33:05.143580Z node 202 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:33:06.160745Z node 203 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-04-04T17:33:07.220439Z node 204 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 |99.0%| [TM] {RESULT} ydb/core/fq/libs/control_plane_proxy/ut/unittest >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case >> PublishKafkaEndpoints::DiscoveryResponsesWithNoNode [GOOD] >> PublishKafkaEndpoints::DiscoveryResponsesWithError >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_revert_basis >> test_insert.py::TestInsertOperations::test_insert_revert_basis [GOOD] >> test_insert.py::TestInsertOperations::test_query_pairs >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 >> test_public_api.py::TestExplain::test_explain_data_query [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(a, b) from t1-Aggregation function Min requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[8] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[9] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] >> test_clickbench.py::TestClickbench::test_clickbench[9] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[10] >> PublishKafkaEndpoints::DiscoveryResponsesWithError [GOOD] >> PublishKafkaEndpoints::DiscoveryResponsesWithOtherPort >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] [GOOD] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[10] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[11] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[11] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[12] >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline >> test_clickbench.py::TestClickbench::test_clickbench[12] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[13] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops >> test_clickbench.py::TestClickbench::test_clickbench[13] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[14] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_operation_errors_handle [GOOD] >> test_public_api.py::TestCRUDOperations::test_none_values >> test_public_api.py::TestCRUDOperations::test_none_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_list_type >> test_public_api.py::TestCRUDOperations::test_parse_list_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_tuple >> test_public_api.py::TestCRUDOperations::test_parse_tuple [GOOD] >> test_public_api.py::TestCRUDOperations::test_dict_type >> test_public_api.py::TestCRUDOperations::test_dict_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_data_types >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query [GOOD] >> test_public_api.py::TestCRUDOperations::test_data_types [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type_parameter [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values >> PublishKafkaEndpoints::DiscoveryResponsesWithOtherPort [GOOD] >> PublishKafkaEndpoints::MetadataActorDoubleTopic >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_upsert >> test_public_api.py::TestCRUDOperations::test_bulk_upsert [GOOD] >> test_public_api.py::TestCRUDOperations::test_all_enums_are_presented_as_exceptions [GOOD] >> test_public_api.py::TestCRUDOperations::test_type_builders_str_methods [GOOD] >> test_public_api.py::TestCRUDOperations::test_create_and_delete_session_then_use_it_again [GOOD] >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_2 >> test_clickbench.py::TestClickbench::test_clickbench[14] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[15] >> test_public_api.py::TestCRUDOperations::test_tcl_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_clickbench.py::TestClickbench::test_clickbench[15] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[16] >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak [GOOD] >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts >> BasicStatistics::NotFullStatisticsDatashard [GOOD] >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts [GOOD] >> test_public_api.py::TestCRUDOperations::test_presented_in_cache [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values_negative_stories >> test_public_api.py::TestCRUDOperations::test_decimal_values_negative_stories [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic >> test_clickbench.py::TestClickbench::test_clickbench[16] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[17] >> test_public_api.py::TestCRUDOperations::test_decimal_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children [GOOD] >> test_public_api.py::TestCRUDOperations::test_validate_describe_path_result [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modifications_1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modification_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success [GOOD] >> test_public_api.py::TestCRUDOperations::test_modify_permissions_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_directory_that_doesnt_exists [GOOD] >> test_public_api.py::TestCRUDOperations::test_crud_acl_actions >> test_public_api.py::TestCRUDOperations::test_crud_acl_actions [GOOD] >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions [GOOD] >> test_public_api.py::TestCRUDOperations::test_query_set1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsDatashard [GOOD] Test command err: 2025-04-04T17:28:41.125833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:41.126131Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:41.126271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0009fb/r3tmp/tmpTceFBb/pdisk_1.dat 2025-04-04T17:28:41.489589Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29315, node 1 2025-04-04T17:28:41.740206Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:41.740267Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:41.740296Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:41.740788Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:41.743329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:28:41.832593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:41.832730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:41.856684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61840 2025-04-04T17:28:42.397798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:28:45.602276Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:28:45.634754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:45.634853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:45.662128Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:28:45.664067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:45.876283Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.876894Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.877386Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.877546Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.877639Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.877882Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.877974Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.878080Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:45.878159Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:46.055864Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:46.056034Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:46.069551Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:46.233551Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:46.284082Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:28:46.284198Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:28:46.318531Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:28:46.318751Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:28:46.318968Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:28:46.319046Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:28:46.319109Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:28:46.319172Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:28:46.319225Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:28:46.319298Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:28:46.320506Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:28:46.346808Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:46.346928Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:46.353514Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:28:46.359463Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:28:46.360326Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:28:46.365444Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:28:46.386855Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:28:46.386921Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:28:46.387008Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:28:46.403044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:28:46.410793Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:28:46.410973Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:28:46.637730Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:28:46.800130Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:28:46.903798Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:28:47.900344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2236:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:47.900478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:47.917039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:28:48.321230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2542:3122], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:48.321413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:48.322985Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2547:3126]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:28:48.323203Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:28:48.323301Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2549:3128] 2025-04-04T17:28:48.323376Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2549:3128] 2025-04-04T17:28:48.324012Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2550:2991] 2025-04-04T17:28:48.324340Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2549:3128], server id = [2:2550:2991], tablet id = 72075186224037894, status = OK 2025-04-04T17:28:48.324586Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2550:2991], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-04-04T17:28:48.324647Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-04-04T17:28:48.324886Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:28:48.324964Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2547:3126], StatRequests.size() = 1 2025-04-04T17:28:48.344156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2554:3132], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:48.344273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:48.344694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2559:3137], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:48.350115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-04-04T17:28:48.493660Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-04-04T17:28:48.493732Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-04-04T17:28:48.613179Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2549:3128], schemeshard count = 1 2025-04-04T17:28:49.077350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... 4T17:33:27.482024Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:33:28.389032Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-04T17:33:28.389400Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:33:28.389732Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:33:29.316742Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:33:29.316813Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:33:31.031969Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:33:31.042750Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:33:31.042817Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:33:33.109877Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-04T17:33:33.110315Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:33:33.110594Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:33:33.121641Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:33:33.121721Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:33:34.977220Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:33:34.977290Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:33:35.891433Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:33:36.945638Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:33:36.945716Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:33:37.988880Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-04T17:33:37.989231Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:33:37.989520Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:33:39.091014Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:33:39.091083Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:33:40.751683Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:33:40.773188Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:33:40.773258Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:33:42.378632Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-04T17:33:42.378797Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:33:42.379223Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:33:42.390130Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:33:42.390204Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:33:44.195246Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:33:44.195321Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:33:45.104537Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:33:46.118889Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-04T17:33:46.118978Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:33:46.119016Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:33:46.119047Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-04T17:33:46.316907Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:33:46.316979Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:33:47.281838Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-04T17:33:47.282189Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:33:47.282469Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:33:48.269814Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:33:48.269877Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:33:50.031720Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:33:50.042492Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:33:50.042561Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:33:51.776122Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-04T17:33:51.776337Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:33:51.776597Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:33:51.787413Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:33:51.787486Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:33:53.491700Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:33:53.491761Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:33:54.359753Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:33:55.419300Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:33:55.419364Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:33:56.300146Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-04T17:33:56.300336Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:33:56.300672Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:33:57.224842Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:33:57.224909Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:33:58.900343Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:33:58.910949Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:33:58.911010Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:00.907299Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-04T17:34:00.907467Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:34:00.907700Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:34:00.918344Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:00.918396Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:02.730164Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:02.730217Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:03.555137Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:34:04.559573Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:04.559654Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:05.468007Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-04T17:34:05.468172Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:34:05.468465Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:34:06.442148Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:06.442205Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:08.134516Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:34:08.145249Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:08.145319Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:09.868798Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-04T17:34:09.869173Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-04-04T17:34:09.869412Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:34:09.880280Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:09.880351Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:11.649540Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:11.649594Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:11.660326Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-04T17:34:11.660404Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 190.000000s, at schemeshard: 72075186224037897 2025-04-04T17:34:11.660601Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 49 ... waiting for TEvSchemeShardStats 2 (done) ... waiting for TEvPropagateStatistics 2025-04-04T17:34:11.673865Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:34:12.515773Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:34:13.468100Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-04T17:34:13.468187Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:34:13.468229Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:34:13.468269Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-04T17:34:13.629102Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:13.629183Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:14.582762Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-04-04T17:34:14.583149Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 ... waiting for TEvPropagateStatistics (done) 2025-04-04T17:34:14.583509Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:13466:7491]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:34:14.583903Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:34:14.586540Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-04-04T17:34:14.586596Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [2:13466:7491], StatRequests.size() = 1 |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] >> test_public_api.py::TestCRUDOperations::test_query_set1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_queries_set2 >> test_clickbench.py::TestClickbench::test_clickbench[17] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[18] >> test_public_api.py::TestCRUDOperations::test_queries_set2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete [FAIL] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_result_limits.py::TestResultLimits::test_large_row [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[18] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[19] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success >> PublishKafkaEndpoints::MetadataActorDoubleTopic [GOOD] >> Serialization::RequestHeader [GOOD] >> Serialization::ResponseHeader [GOOD] >> Serialization::ApiVersionsRequest [GOOD] >> Serialization::ApiVersionsResponse [GOOD] >> Serialization::ApiVersion_WithoutSupportedFeatures [GOOD] >> Serialization::ProduceRequest [GOOD] >> Serialization::UnsignedVarint32 [GOOD] >> Serialization::UnsignedVarint64 [GOOD] >> Serialization::Varint32 [GOOD] >> Serialization::Varint64 [GOOD] >> Serialization::UnsignedVarint32_Wrong [GOOD] >> Serialization::UnsignedVarint64_Wrong [GOOD] >> Serialization::UnsignedVarint32_Deserialize [GOOD] >> Serialization::TKafkaInt8_NotPresentVersion [GOOD] >> Serialization::TKafkaInt8_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaInt8_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaInt8_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::Struct_IsDefault [GOOD] >> Serialization::TKafkaString_IsDefault [GOOD] >> Serialization::TKafkaString_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaString_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaString_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::TKafkaArray_IsDefault [GOOD] >> Serialization::TKafkaArray_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaArray_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaArray_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::TKafkaBytes_IsDefault [GOOD] >> Serialization::TKafkaBytes_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaBytes_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaBytes_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::TRequestHeaderData_reference [GOOD] >> Serialization::TKafkaFloat64_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::RequestHeader_reference [GOOD] >> Serialization::ProduceRequestData [GOOD] >> Serialization::ProduceRequestData_Record_v0 [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[19] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[20] >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kafka_proxy/ut/unittest >> Serialization::ProduceRequestData_Record_v0 [GOOD] Test command err: 2025-04-04T17:29:48.313679Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489511524142401787:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:48.313737Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0007b8/r3tmp/tmpTevbRA/pdisk_1.dat 2025-04-04T17:29:48.726533Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:48.751502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:48.752198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:48.756097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9583, node 1 2025-04-04T17:29:48.927808Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/0007b8/r3tmp/yandexhOrc0S.tmp 2025-04-04T17:29:48.927841Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/0007b8/r3tmp/yandexhOrc0S.tmp 2025-04-04T17:29:48.928077Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/0007b8/r3tmp/yandexhOrc0S.tmp 2025-04-04T17:29:48.928267Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:29:49.141513Z INFO: TTestServer started on Port 12926 GrpcPort 9583 TClient is connected to server localhost:12926 PQClient connected to localhost:9583 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T17:29:49.396221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T17:29:49.434610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:29:51.110925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511537027304444:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:51.111097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:51.115470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489511537027304457:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:29:51.132127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T17:29:51.149570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489511537027304459:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T17:29:51.231550Z node 1 :TX_PROXY ERROR: Actor# [1:7489511537027304523:2452] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T17:29:51.583844Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489511537027304538:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T17:29:51.590775Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2QxZjM2MS1kNjhjZmQ2YS1mNTA3ZDZmOS04NGZjYzc0NQ==, ActorId: [1:7489511537027304442:2340], ActorState: ExecuteState, TraceId: 01jr0xm4qa7z30znx2c1t8k8r3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:29:51.611182Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:29:51.664781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T17:29:51.696956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:29:51.810685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:29:52.281481Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0xm5gmadc432bsmpeckqb0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTU4YzAzNDUtNTk5ZDY5YTMtNjkxYTIwMjMtYzE3OTJkOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489511541322272126:2642] 2025-04-04T17:29:53.314108Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489511524142401787:2128];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:53.314233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok Run with port = 9583, kafka port = 10250 2025-04-04T17:29:58.767416Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489511566638392460:2213];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:29:58.767867Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/0007b8/r3tmp/tmpvG8wKs/pdisk_1.dat 2025-04-04T17:29:58.872473Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:29:58.900009Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:29:58.900091Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:29:58.901983Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22843, node 2 2025-04-04T17:29:58.951755Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/i5tq/0007b8/r3tmp/yandexekrvLT.tmp 2025-04-04T17:29:58.951785Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/i5tq/0007b8/r3tmp/yandexekrvLT.tmp 2025-04-04T17:29:58.951946Z node 2 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/i5tq/0007b8/r3tmp/yandexekrvLT.tmp 2025-04-04T17:29:58.952139Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:29:58.989011Z INFO: TTestServer started on Port 22383 GrpcPort 22843 TClient is connected to server localhost:22383 PQClient connected to localhost:22843 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-04-04T17:29:59.106471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T17:29:59.127043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T17:30:01.723893Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489511579523294996:2342], DatabaseId: /Root, PoolId: default, Failed ... is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T17:34:16.126582Z node 23 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=23&id=ZGVmNjY4YmEtNDM0ZjY3NmItOTE4ZDk0MjktNWFlMGFmNjc=, ActorId: [23:7489512669774307414:2348], ActorState: ExecuteState, TraceId: 01jr0xw7cy661d4y8ptprc6ty2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T17:34:16.127328Z node 23 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T17:34:16.297110Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T17:34:16.893517Z node 23 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0xw7vh7wq8vf3x9ve8w1c5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=23&id=YmI0NGY3MzYtMmU5YjljYmEtYjBmY2ZhYzgtYTU1OWVkZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [23:7489512674069275116:2652] === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 1 partitions CallPersQueueGRPC request to localhost:29291 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:29291 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic1" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976710678 SchemeShardTabletId: 72057594046644480 PathId: 13 } ErrorCode: OK AddTopic: rt3.dc1--topic1 ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic1, dc = dc1 2025-04-04T17:34:23.826198Z node 23 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jr0xwetkdexcgy79jkv3zf3g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=23&id=MmFjMmI2NmEtNGY0MjU4NWYtNzBmMDg4NWYtNmNhNTcyNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===Query complete TClient::Ls request: /Root/PQ/rt3.dc1--topic1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic1" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710678 CreateStep: 1743788063598 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic1" PathId: 13 TotalGroupCount: 1 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20... (TRUNCATED) GetTopicVersionFromPath: record Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic1" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710678 CreateStep: 1743788063598 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic1" PathId: 13 TotalGroupCount: 1 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 YdbDatabasePath: "/Root" } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--topic1" name rt3.dc1--topic1 version1 CallPersQueueGRPC request to localhost:29291 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:29291 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC 2025-04-04T17:34:24.599352Z node 23 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T17:34:24.599411Z node 23 :IMPORT WARN: Table profiles were not loaded CallPersQueueGRPC request to localhost:29291 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--topic1" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--topic1" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2025-04-04T17:34:24.870060Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2025-04-04T17:34:24.870700Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-04-04T17:34:24.870829Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:29291 2025-04-04T17:34:24.879574Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "topic1" message_group_id: "src" } 2025-04-04T17:34:25.425620Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743788065425 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T17:34:25.425803Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|1b8f1aff-ead156ff-5f6b94f7-6079396d_0" topic: "topic1" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-04-04T17:34:25.426127Z :INFO: [] MessageGroupId [src] SessionId [src|1b8f1aff-ead156ff-5f6b94f7-6079396d_0] Write session: close. Timeout = 0 ms 2025-04-04T17:34:25.426225Z :INFO: [] MessageGroupId [src] SessionId [src|1b8f1aff-ead156ff-5f6b94f7-6079396d_0] Write session will now close 2025-04-04T17:34:25.426358Z :DEBUG: [] MessageGroupId [src] SessionId [src|1b8f1aff-ead156ff-5f6b94f7-6079396d_0] Write session: aborting 2025-04-04T17:34:25.427254Z :INFO: [] MessageGroupId [src] SessionId [src|1b8f1aff-ead156ff-5f6b94f7-6079396d_0] Write session: gracefully shut down, all writes complete 2025-04-04T17:34:25.427353Z :DEBUG: [] MessageGroupId [src] SessionId [src|1b8f1aff-ead156ff-5f6b94f7-6079396d_0] Write session: destroy Broker 23 - [::]:61032 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483647 >>>>> Check value=4294967295 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483647 >>>>> Check value=2147483648 >>>>> Check value=9223372036854775807 >>>>> Check value=18446744073709551615 >>>>> Check value=-2147483648 >>>>> Check value=-167966 >>>>> Check value=-1 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483647 >>>>> Check value=-9223372036854775808 >>>>> Check value=-2147483648 >>>>> Check value=-167966 >>>>> Check value=-1 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483648 >>>>> Check value=9223372036854775807 >>>>> Buffer size: 251 >>>>> Buffer size: 104 |99.1%| [TM] {RESULT} ydb/core/kafka_proxy/ut/unittest >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test >> test_clickbench.py::TestClickbench::test_clickbench[20] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[21] >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key >> test_clickbench.py::TestClickbench::test_clickbench[21] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[22] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[1] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[2] >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] [GOOD] >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[22] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[23] >> test_clickbench.py::TestClickbench::test_clickbench[23] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[24] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/restarts/py3test >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[24] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[25] |99.1%| [TM] {RESULT} ydb/tests/fq/restarts/py3test >> test_public_api.py::TestSessionNotFound::test_session_not_found >> test_clickbench.py::TestClickbench::test_clickbench[25] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[26] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[26] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[27] >> test_tpch.py::TestTpchS1::test_tpch[2] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[3] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_clickbench.py::TestClickbench::test_clickbench[27] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[28] >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [FAIL] >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] >> test_clickbench.py::TestClickbench::test_clickbench[28] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[29] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can >> unstable_connection.py::TestUnstableConnection::test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tpch.py::TestTpchS1::test_tpch[3] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[4] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false] >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] Test command err: 2025-04-04T17:28:10.388106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:10.388349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:10.388431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000a69/r3tmp/tmpCgCiZN/pdisk_1.dat 2025-04-04T17:28:10.691954Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2721, node 1 2025-04-04T17:28:10.904146Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:10.904194Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:10.904217Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:10.904564Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:10.906972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:28:10.990111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:10.990283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:11.004364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14657 2025-04-04T17:28:11.518612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:28:14.306575Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:28:14.364892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:14.365012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:14.394102Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:28:14.396232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:14.627618Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:14.628295Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:14.628838Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:14.629006Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:14.629092Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:14.629309Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:14.629391Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:14.629506Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:14.629589Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:14.803434Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:14.803534Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:14.816508Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:14.957256Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:15.006272Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:28:15.006399Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:28:15.035417Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:28:15.035667Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:28:15.035896Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:28:15.035961Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:28:15.036031Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:28:15.036101Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:28:15.036164Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:28:15.036245Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:28:15.037336Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:28:15.064337Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:15.064475Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1871:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:15.070508Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1881:2605] 2025-04-04T17:28:15.075777Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1902:2617] 2025-04-04T17:28:15.075989Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1902:2617], schemeshard id = 72075186224037897 2025-04-04T17:28:15.085789Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:28:15.110541Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:28:15.110613Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:28:15.110710Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:28:15.122200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:28:15.130202Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:28:15.130380Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:28:15.296666Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:28:15.477984Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:28:15.555265Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:28:16.584489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:16.584602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:16.600882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:28:16.783865Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2339:2859];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:28:16.784143Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2339:2859];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:28:16.784496Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2339:2859];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:28:16.784652Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2339:2859];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:28:16.784826Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2339:2859];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:28:16.784972Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2339:2859];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:28:16.785101Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2339:2859];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:28:16.785263Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2339:2859];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:28:16.785426Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2339:2859];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:28:16.785563Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2339:2859];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:28:16.785745Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2339:2859];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:28:16.785898Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2339:2859];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:28:16.840195Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2342:2860];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:28:16.840304Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2342:2860];tablet_id=72075186224037900;process=T ... NextTraversal. No force traversals. 2025-04-04T17:34:12.659672Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:34:13.935009Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:13.935085Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:14.858320Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:34:14.858480Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:34:16.075013Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:16.075085Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:18.196362Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:34:18.351972Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:18.352055Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:20.832306Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:34:20.832499Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:34:21.036229Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:21.036302Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:23.268736Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:23.268809Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:24.233853Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:34:25.682362Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:25.682420Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:26.728304Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:34:26.728507Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:34:28.080177Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:28.080253Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:30.111321Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:34:30.286827Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:30.286902Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:32.509247Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:34:32.509472Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:34:32.717503Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:32.717560Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:34.906095Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:34.906166Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:35.781603Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:34:37.029908Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:37.029963Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:37.976344Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:34:37.976513Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:34:39.197283Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:39.197359Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:41.226111Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:34:41.401764Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:41.401837Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:42.401363Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-04T17:34:42.401431Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:34:42.401465Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:34:42.401494Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-04T17:34:43.834761Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:34:43.834978Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:34:44.058556Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:44.058628Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:46.293024Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:46.293098Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:47.237357Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:34:48.541762Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:48.541839Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:49.485316Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:34:49.485557Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:34:50.740701Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:50.740766Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:52.764257Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:34:52.928254Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:52.928310Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:55.255350Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:34:55.255580Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:34:55.453560Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:55.453629Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:57.641919Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:34:57.641991Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:34:58.587786Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:35:00.034787Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:35:00.034857Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:35:01.083685Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:35:01.083892Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:35:02.429576Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:35:02.429658Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:35:04.460656Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:35:04.636142Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:35:04.636227Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:35:06.872244Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:35:06.872478Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:35:07.070577Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:35:07.070654Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:35:09.311770Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:35:09.311845Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:35:10.255743Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:35:11.573304Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:35:11.573380Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:35:12.568327Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:35:12.568568Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:35:12.579623Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-04-04T17:35:12.579720Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 235.000000s, at schemeshard: 72075186224037897 2025-04-04T17:35:12.579960Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 53 ... waiting for TEvSchemeShardStats 2 (done) ... waiting for TEvPropagateStatistics 2025-04-04T17:35:12.596120Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-04-04T17:35:13.906910Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:35:13.906992Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:35:15.973463Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-04-04T17:35:16.126575Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:35:16.126643Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-04-04T17:35:17.227723Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-04-04T17:35:17.227830Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:35:17.227869Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-04-04T17:35:17.227914Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-04T17:35:18.713249Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:35:18.713487Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 ... waiting for TEvPropagateStatistics (done) 2025-04-04T17:35:18.713842Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:16063:9744]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-04-04T17:35:18.716900Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-04-04T17:35:18.716957Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [2:16063:9744], StatRequests.size() = 1 >> test_clickbench.py::TestClickbench::test_clickbench[29] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[30] >> test_tpch.py::TestTpchS1::test_tpch[4] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[5] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[30] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[31] >> test_public_api.py::TestSessionNotFound::test_session_not_found [GOOD] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[31] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[32] >> test_alloc_default.py::TestAlloc::test_default_delta[kikimr0] >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false] >> test_clickbench.py::TestClickbench::test_clickbench[32] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[33] >> test_clickbench.py::TestClickbench::test_clickbench[33] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[34] >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_ok_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_cannot_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_invalid_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_keep_in_cache_disabled >> test_public_api.py::TestSessionNotFoundOperations::test_keep_in_cache_disabled [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explict_partitions_case_2 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_simple_table_profile_settings [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[34] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[35] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[5] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[6] >> test_clickbench.py::TestClickbench::test_clickbench[35] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[36] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[36] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[37] >> test_tpch.py::TestTpchS1::test_tpch[6] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[7] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false] >> test_clickbench.py::TestClickbench::test_clickbench[37] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[38] >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> HttpRequest::Probe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Probe [GOOD] Test command err: 2025-04-04T17:28:45.395498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T17:28:45.395922Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T17:28:45.396071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/i5tq/000924/r3tmp/tmpurF6wp/pdisk_1.dat 2025-04-04T17:28:45.797257Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5848, node 1 2025-04-04T17:28:46.039340Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T17:28:46.039391Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T17:28:46.039415Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T17:28:46.039845Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T17:28:46.042356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T17:28:46.134291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:46.134467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:46.148944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21313 2025-04-04T17:28:46.639985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-04-04T17:28:49.731049Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-04-04T17:28:49.766274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:49.766397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:49.794232Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T17:28:49.796026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:50.024018Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:50.024669Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:50.025172Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:50.025368Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:50.025496Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:50.025744Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:50.025829Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:50.025944Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:50.026025Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-04-04T17:28:50.203633Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T17:28:50.203714Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T17:28:50.217581Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T17:28:50.347280Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T17:28:50.387111Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-04-04T17:28:50.387238Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-04-04T17:28:50.420618Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-04-04T17:28:50.421979Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-04-04T17:28:50.422236Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-04-04T17:28:50.422335Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-04-04T17:28:50.422401Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-04-04T17:28:50.422490Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-04-04T17:28:50.422542Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-04-04T17:28:50.422597Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-04-04T17:28:50.423028Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-04-04T17:28:50.450600Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:50.450720Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2595], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-04-04T17:28:50.456851Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2606] 2025-04-04T17:28:50.462102Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2616] 2025-04-04T17:28:50.462980Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2616], schemeshard id = 72075186224037897 2025-04-04T17:28:50.468706Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-04-04T17:28:50.489225Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-04-04T17:28:50.489310Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-04-04T17:28:50.489381Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-04-04T17:28:50.504640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-04-04T17:28:50.514719Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-04-04T17:28:50.514890Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-04-04T17:28:50.739278Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-04-04T17:28:50.891347Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-04-04T17:28:50.992053Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-04-04T17:28:52.060506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2236:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:52.060649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T17:28:52.081402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-04-04T17:28:52.436391Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:28:52.436648Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T17:28:52.436974Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T17:28:52.437161Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T17:28:52.437311Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T17:28:52.437438Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T17:28:52.437562Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T17:28:52.437688Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T17:28:52.437809Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T17:28:52.437934Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T17:28:52.438076Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T17:28:52.438192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2390:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T17:28:52.496243Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2399:2891];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T17:28:52.496342Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2399:2891];tablet_id=72075186224037900;process=T ... t schemeshard: 72057594046644480 2025-04-04T17:35:51.268802Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-04-04T17:35:52.093691Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-04-04T17:35:52.093783Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=߈B?yW!A 2025-04-04T17:35:52.093831Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-04-04T17:35:52.782626Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-04-04T17:35:52.782818Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-04-04T17:35:53.361153Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-04-04T17:35:53.361282Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-04-04T17:35:53.361346Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:35:53.362048Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-04-04T17:35:53.375351Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-04-04T17:35:53.375713Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-04-04T17:35:53.375774Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-04-04T17:35:53.376173Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-04-04T17:35:53.389825Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-04-04T17:35:53.390082Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-04-04T17:35:53.391206Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:17233:10623], server id = [2:17238:10628], tablet id = 72075186224037899, status = OK 2025-04-04T17:35:53.391326Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:17233:10623], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:35:53.391504Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:17234:10624], server id = [2:17239:10629], tablet id = 72075186224037900, status = OK 2025-04-04T17:35:53.391563Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:17234:10624], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:35:53.391916Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:17235:10625], server id = [2:17240:10630], tablet id = 72075186224037901, status = OK 2025-04-04T17:35:53.392000Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:17235:10625], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:35:53.393334Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:17236:10626], server id = [2:17241:10631], tablet id = 72075186224037902, status = OK 2025-04-04T17:35:53.393402Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:17236:10626], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:35:53.394644Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:17237:10627], server id = [2:17242:10632], tablet id = 72075186224037903, status = OK 2025-04-04T17:35:53.394712Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:17237:10627], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:35:53.395427Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-04-04T17:35:53.396316Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:17233:10623], server id = [2:17238:10628], tablet id = 72075186224037899 2025-04-04T17:35:53.396351Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:35:53.396680Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-04-04T17:35:53.396838Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-04-04T17:35:53.397550Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:17234:10624], server id = [2:17239:10629], tablet id = 72075186224037900 2025-04-04T17:35:53.397576Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:35:53.397610Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:17235:10625], server id = [2:17240:10630], tablet id = 72075186224037901 2025-04-04T17:35:53.397625Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:35:53.397833Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-04-04T17:35:53.398077Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:17247:10637], server id = [2:17250:10640], tablet id = 72075186224037904, status = OK 2025-04-04T17:35:53.398124Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:17247:10637], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:35:53.398324Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-04-04T17:35:53.398396Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:17236:10626], server id = [2:17241:10631], tablet id = 72075186224037902 2025-04-04T17:35:53.398412Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:35:53.399030Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:17249:10639], server id = [2:17252:10642], tablet id = 72075186224037905, status = OK 2025-04-04T17:35:53.399076Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:17249:10639], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:35:53.399139Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:17251:10641], server id = [2:17254:10644], tablet id = 72075186224037906, status = OK 2025-04-04T17:35:53.399188Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:17251:10641], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:35:53.400080Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:17237:10627], server id = [2:17242:10632], tablet id = 72075186224037903 2025-04-04T17:35:53.400107Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:35:53.400321Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:17253:10643], server id = [2:17256:10646], tablet id = 72075186224037907, status = OK 2025-04-04T17:35:53.400359Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:17253:10643], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:35:53.400545Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-04-04T17:35:53.401096Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:17255:10645], server id = [2:17258:10648], tablet id = 72075186224037908, status = OK 2025-04-04T17:35:53.401142Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:17255:10645], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-04-04T17:35:53.401248Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:17247:10637], server id = [2:17250:10640], tablet id = 72075186224037904 2025-04-04T17:35:53.401267Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:35:53.401899Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-04-04T17:35:53.402134Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-04-04T17:35:53.402509Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:17251:10641], server id = [2:17254:10644], tablet id = 72075186224037906 2025-04-04T17:35:53.402533Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:35:53.402788Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:17249:10639], server id = [2:17252:10642], tablet id = 72075186224037905 2025-04-04T17:35:53.402808Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:35:53.402881Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-04-04T17:35:53.403014Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-04-04T17:35:53.403048Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-04-04T17:35:53.403192Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-04-04T17:35:53.403354Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-04-04T17:35:53.403731Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-04-04T17:35:53.406392Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:17253:10643], server id = [2:17256:10646], tablet id = 72075186224037907 2025-04-04T17:35:53.406429Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:35:53.407012Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:17255:10645], server id = [2:17258:10648], tablet id = 72075186224037908 2025-04-04T17:35:53.407045Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-04-04T17:35:53.407354Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-04-04T17:35:53.432680Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=N2I4MzljMmItMWFmNmViNjYtMWM5MTIxMjAtNDRiYTU4MmQ=, TxId: 2025-04-04T17:35:53.432761Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2I4MzljMmItMWFmNmViNjYtMWM5MTIxMjAtNDRiYTU4MmQ=, TxId: 2025-04-04T17:35:53.433387Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-04-04T17:35:53.447076Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-04-04T17:35:53.447160Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=߈B?yW!A, ActorId=[1:4118:3321] 2025-04-04T17:35:53.449931Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:17294:10020]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-04-04T17:35:53.450191Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:35:53.450259Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-04-04T17:35:53.452316Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-04-04T17:35:53.452401Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-04-04T17:35:53.452458Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-04-04T17:35:53.463113Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table1[Value]=4' |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_clickbench.py::TestClickbench::test_clickbench[38] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[39] |99.2%| [TA] $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.2%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] >> test_alloc_default.py::TestAlloc::test_default_delta[kikimr0] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_clickbench.py::TestClickbench::test_clickbench[39] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[40] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[40] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[41] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false] >> test_public_api.py::TestBadSession::test_simple >> test_alloc_default.py::TestAlloc::test_node_limit[kikimr0] >> test_clickbench.py::TestClickbench::test_clickbench[41] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[42] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] >> test_clickbench.py::TestClickbench::test_clickbench[42] [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/py3test >> test_clickbench.py::TestClickbench::test_clickbench[42] [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] >> test_tpch.py::TestTpchS1::test_tpch[7] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[8] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_public_api.py::TestBadSession::test_simple [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] >> test_public_api.py::TestDriverCanRecover::test_driver_recovery >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] [GOOD] >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir [GOOD] >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[8] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[9] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestDriverCanRecover::test_driver_recovery [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config >> test_alloc_default.py::TestAlloc::test_node_limit[kikimr0] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_tpch.py::TestTpchS1::test_tpch[9] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[10] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_alloc_default.py::TestAlloc::test_alloc_and_free[kikimr0] >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata [GOOD] >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config [GOOD] >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[10] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[11] >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata [GOOD] >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store [GOOD] >> test_alloc_default.py::TestAlloc::test_alloc_and_free[kikimr0] [GOOD] >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> unstable_connection.py::TestUnstableConnection::test [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store [GOOD] >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata [GOOD] >> test_alloc_default.py::TestAlloc::test_up_down[kikimr0] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata [GOOD] >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[11] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[12] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestJsonExample::test_json_unexpected_failure |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] >> test_tpch.py::TestTpchS1::test_tpch[12] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[13] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestJsonExample::test_json_unexpected_failure [GOOD] >> test_public_api.py::TestJsonExample::test_json_success |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_public_api.py::TestJsonExample::test_json_success [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] >> test_alloc_default.py::TestAlloc::test_up_down[kikimr0] [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tpch.py::TestTpchS1::test_tpch[13] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[14] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_alloc_default.py::TestAlloc::test_mkql_not_increased[kikimr0] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_tpch.py::TestTpchS1::test_tpch[14] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[15] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata [GOOD] >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> unstable_connection.py::TestUnstableConnection::test [GOOD] >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup [GOOD] >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> test_public_api.py::TestRecursiveCreation::test_mkdir |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] >> test_public_api.py::TestRecursiveCreation::test_mkdir [GOOD] >> test_public_api.py::TestRecursiveCreation::test_create_table >> test_public_api.py::TestRecursiveCreation::test_create_table [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[15] [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] >> test_alloc_default.py::TestAlloc::test_mkql_not_increased[kikimr0] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[16] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] |99.6%| [TM] {RESULT} ydb/tests/olap/ttl_tiering/py3test |99.6%| [TA] $(B)/ydb/tests/functional/config/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TA] {RESULT} $(B)/ydb/tests/functional/config/test-results/py3test/{meta.json ... results_accumulator.log} >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] >> test_self_heal.py::TestEnableSelfHeal::test_replication >> test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tpch.py::TestTpchS1::test_tpch[16] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[17] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] >> test_public_api.py::TestAttributes::test_create_table >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestAttributes::test_create_table [GOOD] >> test_public_api.py::TestAttributes::test_copy_table >> test_public_api.py::TestAttributes::test_copy_table [GOOD] >> test_public_api.py::TestAttributes::test_create_indexed_table >> test_public_api.py::TestAttributes::test_create_indexed_table [GOOD] >> test_public_api.py::TestAttributes::test_alter_table >> test_public_api.py::TestAttributes::test_alter_table [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes0] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes1] >> test_public_api.py::TestAttributes::test_limits[attributes1] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes2] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes3] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes4] >> test_public_api.py::TestAttributes::test_limits[attributes4] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[17] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[18] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TSentinelUnstableTests::BSControllerCantChangeStatus [FAIL] >> test_workload.py::TestYdbKvWorkload::test[row] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/mem_alloc/py3test >> test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] 2025-04-04 17:38:54,441 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-04 17:38:54,767 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 827177 805M 778M 492M ydb-tests-fq-mem_alloc --basetemp /home/runner/.ya/build/build_root/i5tq/000fc1/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules 1012538 720M 25.8M 468M ├─ ydb-tests-fq-mem_alloc --basetemp /home/runner/.ya/build/build_root/i5tq/000fc1/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-m 1012543 1.5G 1.5G 1.1G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/i5tq/000fc1/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/test_allo Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/fq/mem_alloc/test_alloc_default.py", line 396, in test_hard_limit status = client.wait_query_status(query_id, [fq.QueryMeta.COMPLETED, fq.QueryMeta.FAILED]) File "ydb/tests/tools/fq_runner/fq_client.py", line 307, in wait_query_status return self.wait_query(query_id, timeout, statuses=statuses).query.meta.status File "ydb/tests/tools/fq_runner/fq_client.py", line 302, in wait_query time.sleep(plain_or_under_sanitizer(0.5, 2)) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...d '['/home/runner/.ya/build/build_root/i5tq/000fc1/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc', '--basetemp', '/home/runner/.ya/build/build_root/i5tq/000fc1/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/i5tq/000fc1/ydb/tests/fq/mem_alloc/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/i5tq/000fc1', '--source-root', '/home/runner/.ya/build/build_root/i5tq/000fc1/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/i5tq/000fc1/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/fq/mem_alloc', '--test-tool-bin', '/home/runner/.ya/tools/v4/8067063302/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/fq/mem_alloc', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...d '['/home/runner/.ya/build/build_root/i5tq/000fc1/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc', '--basetemp', '/home/runner/.ya/build/build_root/i5tq/000fc1/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/i5tq/000fc1/ydb/tests/fq/mem_alloc/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/i5tq/000fc1', '--source-root', '/home/runner/.ya/build/build_root/i5tq/000fc1/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/i5tq/000fc1/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/fq/mem_alloc', '--test-tool-bin', '/home/runner/.ya/tools/v4/8067063302/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/fq/mem_alloc', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.7%| [TM] {RESULT} ydb/tests/fq/mem_alloc/py3test >> test_public_api.py::TestDocApiTables::test_create_table >> test_workload.py::TestYdbLogWorkload::test[row] >> test_workload.py::TestYdbWorkload::test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[18] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[19] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel_unstable/unittest >> TSentinelUnstableTests::BSControllerCantChangeStatus [FAIL] Test command err: 2025-04-04T17:29:42.473902Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-04-04T17:29:42.473968Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-04-04T17:29:42.474048Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-04T17:29:42.474076Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-04-04T17:29:42.474128Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-04-04T17:29:42.474220Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-04-04T17:29:42.476346Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-04-04T17:29:42.492845Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 18 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: ... skStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 825380026 2025-04-04T17:38:59.869854Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: CommonLoggerInitError } PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 825380026 2025-04-04T17:38:59.869992Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 825380026 2025-04-04T17:38:59.870104Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: CommonLoggerInitError } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 825380026 2025-04-04T17:38:59.870226Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 825380026 2025-04-04T17:38:59.870372Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 825380026 2025-04-04T17:38:59.870422Z node 1 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-04T17:38:59.881108Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-04T17:38:59.881178Z node 1 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-04T17:38:59.881327Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 1, wbId# [1:8388350642965737326:1634689637] 2025-04-04T17:38:59.881372Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2025-04-04T17:38:59.881416Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2025-04-04T17:38:59.881446Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2025-04-04T17:38:59.881482Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2025-04-04T17:38:59.881513Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2025-04-04T17:38:59.881543Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2025-04-04T17:38:59.881571Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2025-04-04T17:38:59.881918Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 1, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 825420026 2025-04-04T17:38:59.882619Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: CommonLoggerInitError } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 825420026 2025-04-04T17:38:59.882885Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 825420026 2025-04-04T17:38:59.883059Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: CommonLoggerInitError } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 825420026 2025-04-04T17:38:59.883199Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 825420026 2025-04-04T17:38:59.883328Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 825420026 (TWithBackTrace) ydb/library/actors/testlib/test_runtime.cpp:1262: Dispatched 2500000 events, limit reached. TWithBackTrace::TWithBackTrace<>()+80 (0xEE27D40) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+26247 (0x2914E937) NActors::TTestActorRuntimeBase::DispatchEvents(NActors::TDispatchOptions const&)+49 (0x29148191) NKikimr::NCmsTest::TTestEnv::SetPDiskState(TSet, std::__y1::allocator> const&, NKikimrBlobStorage::TPDiskState_E, NKikimrBlobStorage::EDriveStatus)+901 (0xEE53DB5) NKikimr::NCmsTest::NTestSuiteTSentinelUnstableTests::TTestCaseBSControllerCantChangeStatus::Execute_(NUnitTest::TTestContext&)+2328 (0xEE52868) std::__y1::__function::__func, void ()>::operator()()+280 (0xEE56278) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0xF6EAB46) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0xF6C42E9) NKikimr::NCmsTest::NTestSuiteTSentinelUnstableTests::TCurrentTest::Execute()+1094 (0xEE55266) NUnitTest::TTestFactory::Execute()+2438 (0xF6C5BB6) NUnitTest::RunMain(int, char**)+5213 (0xF6E547D) ??+0 (0x7F93B1BE3D90) __libc_start_main+128 (0x7F93B1BE3E40) _start+41 (0xCD19029) >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] |99.8%| [TM] {RESULT} ydb/core/cms/ut_sentinel_unstable/unittest >> test_public_api.py::TestDocApiTables::test_create_table [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] >> test_encryption.py::TestEncryption::test_simple_encryption >> test_tpch.py::TestTpchS1::test_tpch[19] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[20] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] |99.8%| [TA] $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TA] {RESULT} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] |99.8%| [TA] $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TA] {RESULT} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] 2025-04-04 17:39:19,818 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-04 17:39:20,083 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 836395 892M 867M 768M ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/i5tq/0006c6/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules 867988 1.8G 1.7G 1.3G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/i5tq/0006c6/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_inse Test command err: /home/runner/.ya/build/build_root/i5tq/0006c6/ydb/tests/olap/scenario/test-results/py3test/ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:16: PytestCollectionWarning: cannot collect test class 'TestContext' because it has a __init__ constructor (from: test_scheme_load.py) /home/runner/.ya/build/build_root/i5tq/0006c6/environment/arcadia/ydb/tests/olap/scenario/helpers/thread_helper.py:4: PytestCollectionWarning: cannot collect test class 'TestThread' because it has a __init__ constructor (from: test_scheme_load.py) class TestThread(threading.Thread): /home/runner/.ya/build/build_root/i5tq/0006c6/ydb/tests/olap/scenario/test-results/py3test/ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:16: PytestCollectionWarning: cannot collect test class 'TestContext' because it has a __init__ constructor (from: test_insert.py) /home/runner/.ya/build/build_root/i5tq/0006c6/environment/arcadia/ydb/tests/olap/scenario/helpers/thread_helper.py:4: PytestCollectionWarning: cannot collect test class 'TestThread' because it has a __init__ constructor (from: test_insert.py) class TestThread(threading.Thread): /home/runner/.ya/build/build_root/i5tq/0006c6/ydb/tests/olap/scenario/test-results/py3test/ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:16: PytestCollectionWarning: cannot collect test class 'TestContext' because it has a __init__ constructor (from: test_alter_compression.py) /home/runner/.ya/build/build_root/i5tq/0006c6/environment/arcadia/ydb/tests/olap/scenario/helpers/thread_helper.py:4: PytestCollectionWarning: cannot collect test class 'TestThread' because it has a __init__ constructor (from: test_alter_compression.py) class TestThread(threading.Thread): /home/runner/.ya/build/build_root/i5tq/0006c6/ydb/tests/olap/scenario/test-results/py3test/ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:16: PytestCollectionWarning: cannot collect test class 'TestContext' because it has a __init__ constructor (from: test_alter_tiering.py) /home/runner/.ya/build/build_root/i5tq/0006c6/environment/arcadia/ydb/tests/olap/scenario/helpers/thread_helper.py:4: PytestCollectionWarning: cannot collect test class 'TestThread' because it has a __init__ constructor (from: test_alter_tiering.py) class TestThread(threading.Thread): /home/runner/.ya/build/build_root/i5tq/0006c6/ydb/tests/olap/scenario/test-results/py3test/ydb/tests/olap/scenario/test_alter_tiering.py:37: PytestCollectionWarning: cannot collect test class 'TestLoop' because it has a __init__ constructor (from: test_alter_tiering.py) /home/runner/.ya/build/build_root/i5tq/0006c6/ydb/tests/olap/scenario/test-results/py3test/ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:16: PytestCollectionWarning: cannot collect test class 'TestContext' because it has a __init__ constructor (from: test_simple.py) File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "/home/runner/.ya/build/build_root/i5tq/0006c6/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 88, in test ctx.executable(self, ctx) File "ydb/tests/olap/scenario/test_insert.py", line 90, in scenario_read_data_during_bulk_upsert thread.join() File "/home/runner/.ya/build/build_root/i5tq/0006c6/environment/arcadia/ydb/tests/olap/scenario/helpers/thread_helper.py", line 13, in join super().join(timeout) File "contrib/tools/python3/Lib/threading.py", line 1149, in join self._wait_for_tstate_lock() File "contrib/tools/python3/Lib/threading.py", line 1169, in _wait_for_tstate_lock if lock.acquire(block, timeout): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...home/runner/.ya/build/build_root/i5tq/0006c6/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/i5tq/0006c6/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/i5tq/0006c6/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/i5tq/0006c6', '--source-root', '/home/runner/.ya/build/build_root/i5tq/0006c6/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/i5tq/0006c6/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8067063302/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...home/runner/.ya/build/build_root/i5tq/0006c6/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/i5tq/0006c6/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/i5tq/0006c6/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/i5tq/0006c6', '--source-root', '/home/runner/.ya/build/build_root/i5tq/0006c6/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/i5tq/0006c6/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8067063302/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.8%| [TM] {RESULT} ydb/tests/olap/scenario/py3test >> test_workload.py::TestYdbWorkload::test[row] >> test_workload.py::TestYdbWorkload::test >> Backup::UuidValue |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> Backup::UuidValue [GOOD] >> S3PathStyleBackup::DisableVirtualAddressing >> test_tpch.py::TestTpchS1::test_tpch[20] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[21] >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/backup/unittest >> Backup::UuidValue [GOOD] Test command err: Found S3 object: "ProducerUuidValueBackup/data_00.csv" Found S3 object: "ProducerUuidValueBackup/metadata.json" Found S3 object: "ProducerUuidValueBackup/scheme.pb" |99.8%| [TM] {RESULT} ydb/tests/functional/backup/unittest |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/backup/s3_path_style/unittest >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] |99.8%| [TM] {RESULT} ydb/tests/functional/backup/s3_path_style/unittest >> ConsistentIndexRead::InteractiveTx >> KqpQueryService::ReplyPartLimitProxyNode >> KqpQueryService::ReplyPartLimitProxyNode [GOOD] >> NodeIdDescribe::HasDistribution >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] >> Transfer::Main_ColumnTable >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_workload.py::TestYdbKvWorkload::test[row] [GOOD] >> test_workload.py::TestYdbKvWorkload::test[column] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] >> Transfer::Main_ColumnTable [GOOD] >> Transfer::Main_ColumnTable_KeyColumnOrder |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.8%| [TA] $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TA] {RESULT} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] >> Transfer::Main_ColumnTable_KeyColumnOrder [GOOD] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] [GOOD] >> test_workload.py::TestYdbWorkload::test [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/transfer/unittest >> Transfer::Main_ColumnTable_KeyColumnOrder [GOOD] Test command err: >>>>> connectionString = localhost:2764/?database=local Attempt=19 count=0 Attempt=18 count=0 Attempt=17 count=0 Attempt=16 count=0 Attempt=15 count=0 Attempt=14 count=0 Attempt=13 count=1 >>>>> connectionString = localhost:2764/?database=local Attempt=19 count=0 Attempt=18 count=0 Attempt=17 count=0 Attempt=16 count=0 Attempt=15 count=0 Attempt=14 count=0 Attempt=13 count=1 |99.9%| [TM] {RESULT} ydb/tests/functional/transfer/unittest >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> Replication::Types >> KqpQuerySession::NoLocalAttach |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/oltp_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/oltp_workload/tests/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> Replication::Types [GOOD] >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/replication/unittest >> Replication::Types [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/replication/unittest >> NodeIdDescribe::HasDistribution [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/encryption/py3test >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/encryption/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_query_svc/unittest >> NodeIdDescribe::HasDistribution [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_svc/unittest |99.9%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpQuerySession::NoLocalAttach [GOOD] >> test_workload.py::TestYdbLogWorkload::test[row] [GOOD] >> test_workload.py::TestYdbLogWorkload::test[column] >> test_workload.py::TestYdbLogWorkload::test[column] [FAIL] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_query_session/unittest >> KqpQuerySession::NoLocalAttach [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_session/unittest >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/log/tests/py3test >> test_workload.py::TestYdbLogWorkload::test[column] [FAIL] |99.9%| [TM] {RESULT} ydb/tests/stress/log/tests/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[21] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/py3test >> test_tpch.py::TestTpchS1::test_tpch[21] [FAIL] 2025-04-04 17:42:07,963 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-04 17:42:09,364 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 919951 674M 660M 569M ydb-tests-functional-tpc-medium --basetemp /home/runner/.ya/build/build_root/i5tq/001e4d/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 922360 5.9G 5.9G 5.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/i5tq/001e4d/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/ 923741 4.5G 4.5G 4.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/i5tq/001e4d/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/ 1038520 150M 148M 89.6M └─ ydb -e grpc://localhost:30742 -d /local/test_db workload tpch --path olap_yatests/tpch/s1 run --json /home/runner/.ya/build/build_root/i5tq/001e4d/ydb/tests/functional/ Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/load/lib/tpch.py", line 45, in test_tpch self.run_workload_test(self._get_path(), query_num) File "ydb/tests/olap/load/lib/conftest.py", line 285, in run_workload_test result = YdbCliHelper.workload_run( File "ydb/tests/olap/lib/ydb_cli.py", line 310, in workload_run ).process() File "ydb/tests/olap/lib/ydb_cli.py", line 286, in process self._exec_cli() File "ydb/tests/olap/lib/ydb_cli.py", line 273, in _exec_cli process = yatest.common.process.execute(self._get_cmd(), check_exit_code=False) File "library/python/testing/yatest_common/yatest/common/process.py", line 656, in execute res.wait(check_exit_code, timeout, on_timeout) File "library/python/testing/yatest_common/yatest/common/process.py", line 400, in wait _wait() File "library/python/testing/yatest_common/yatest/common/process.py", line 335, in _wait pid, sts, rusage = os.wait4(self._process.pid, 0) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: .../build/build_root/i5tq/001e4d/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/i5tq/001e4d/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/i5tq/001e4d', '--source-root', '/home/runner/.ya/build/build_root/i5tq/001e4d/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/i5tq/001e4d/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/tpc/medium', '--test-tool-bin', '/home/runner/.ya/tools/v4/8067063302/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/functional/tpc/medium', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_tpch.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: ((".../build/build_root/i5tq/001e4d/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/i5tq/001e4d/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/i5tq/001e4d', '--source-root', '/home/runner/.ya/build/build_root/i5tq/001e4d/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/i5tq/001e4d/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/tpc/medium', '--test-tool-bin', '/home/runner/.ya/tools/v4/8067063302/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/functional/tpc/medium', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_tpch.py']' stopped by 600 seconds timeout",), {}) |99.9%| [TA] $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/kv/tests/py3test >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/kv/tests/py3test >> test_workload.py::TestYdbWorkload::test[row] [GOOD] >> test_workload.py::TestYdbWorkload::test[column] >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/olap_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/olap_workload/tests/py3test >> ConsistentIndexRead::InteractiveTx [GOOD] >> KqpExtTest::SecondaryIndexSelectUsingScripting >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_indexes/unittest >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_indexes/unittest >> test_workload.py::TestYdbWorkload::test[column] [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/simple_queue/tests/py3test >> test_workload.py::TestYdbWorkload::test[column] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/simple_queue/tests/py3test |99.9%| CLEANING BUILD ROOT Number of suites skipped by size: 92 ydb/tests/fq/mem_alloc [size:medium] ------ sole chunk ran 12 tests (total:617.66s - recipes:7.33s test:600.10s recipes:3.69s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_result_limits.py::TestResultLimits::test_many_rows (good) duration: 236.68s test_result_limits.py::TestResultLimits::test_large_row (good) duration: 73.87s test_alloc_default.py::TestAlloc::test_node_limit[kikimr0] (good) duration: 61.22s test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] (timeout) duration: 30.96s test_alloc_default.py::TestAlloc::test_default_delta[kikimr0] (good) duration: 30.64s test_result_limits.py::TestResultLimits::test_quotas[kikimr0] (good) duration: 30.11s test_alloc_default.py::TestAlloc::test_up_down[kikimr0] (good) duration: 29.94s test_alloc_default.py::TestAlloc::test_alloc_and_free[kikimr0] (good) duration: 28.43s test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] (good) duration: 28.12s test_dc_local.py::TestAlloc::test_dc_locality[kikimr0] (good) duration: 25.99s 2 more tests with 25.42s total duration are not listed. Info: Test run has exceeded 8.0G (8388608K) memory limit with 8.5G (8895228K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 823576 46.7M 45.3M 6.8M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 823785 34.7M 23.4M 10.5M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 827177 704M 630M 373M │ └─ ydb-tests-fq-mem_alloc --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest 944907 661M 24.1M 391M │ ├─ ydb-tests-fq-mem_alloc --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doct 944908 5.5G 5.5G 5.1G │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/test 824759 2.0G 1.9G 1.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/ydb_data_i Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/stderr [timeout] test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] [default-linux-x86_64-release-asan] (30.96s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/test_alloc_default.py.TestAlloc.test_hard_limit.kikimr0.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff ------ TIMEOUT: 10 - GOOD, 1 - TIMEOUT, 1 - SKIPPED ydb/tests/fq/mem_alloc ------ [test_discovery.py] chunk ran 3 tests (total:157.12s - setup:0.01s test:157.04s) Info: Test run has exceeded 10.0G (10485760K) memory limit with 13.7G (14369120K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 923751 46.7M 46.7M 12.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 923762 34.7M 32.5M 15.3M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 923765 849M 852M 749M └─ ydb-tests-functional-api --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --docte 956354 1.3G 1.3G 893M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 959261 1.3G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff 956355 1.4G 1.4G 981M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 956357 1.4G 1.4G 995M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 956359 1.4G 1.4G 977M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 956360 1.4G 1.4G 988M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 956363 1.4G 1.4G 976M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 956367 1.4G 1.4G 982M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 956368 1.4G 1.4G 977M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 956370 1.4G 1.4G 988M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/stderr ydb/tests/functional/hive [size:medium] nchunks:80 ------ [test_drain.py 0/20] chunk ran 1 test (total:68.47s - setup:0.04s test:68.20s) [fail] test_drain.py::TestHive::test_drain_on_stop [default-linux-x86_64-release-asan] (60.76s) ydb/tests/functional/hive/test_drain.py:93: in test_drain_on_stop wait_tablets_are_active( ydb/tests/library/common/delayed.py:151: in wait_tablets_are_active predicate(raise_error=True) ydb/tests/library/common/delayed.py:141: in predicate raise AssertionError( E AssertionError: E ############################## E 0 seconds passed, 54 tablet(s) are not active. Inactive tablets are (first 10 entries): (72075186224037895: None) (72075186224038607: 6) (72075186224038615: 6) (72075186224038627: 6) (72075186224038671: 6) (72075186224038674: 3) (72075186224038682: 4) (72075186224038691: 4) (72075186224038703: 6) (72075186224038722: None). Additional info is empty E ############################## Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff/test_drain.py.TestHive.test_drain_on_stop.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff ------ FAIL: 6 - GOOD, 1 - FAIL ydb/tests/functional/hive ydb/tests/functional/postgresql [size:medium] ------ sole chunk ran 14 tests (total:98.27s - test:98.08s) [fail] test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [default-linux-x86_64-release-asan] (38.48s) teardown failed: ydb/tests/functional/postgresql/test_postgres.py:77: in teardown_class cls.cluster.stop() ydb/tests/library/harness/kikimr_runner.py:490: in stop raise daemon.SeveralDaemonErrors(saved_exceptions) E ydb.tests.library.harness.daemon.SeveralDaemonErrors: Daemon failed with message: Bad exit_code.. E Process exit_code = 100. E Stdout file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff/test_postgres.py.TestPostgresSuite.test_postgres_suite.horology/cluster/node_1/stdout E Stderr file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff/test_postgres.py.TestPostgresSuite.test_postgres_suite.horology/cluster/node_1/stderr E Stderr content: E E GRpc memory quota was set but disabled due to issues with grpc quoter, to enable it use EnableGRpcMemoryQuota option E Current KQP shutdown state: spent 0 seconds, not started yet E warning: address range table at offset 0x0 has a premature terminator entry at offset 0x10 E warning: address range table at offset 0x30 has a premature terminator entry at offset 0x40 E warning: address range table at offset 0x60 has a premature terminator entry at offset 0x70 E warning: address range table at offset 0x90 has a premature terminator entry at offset 0xa0 E warning: address range table at offset 0xc0 has a premature terminator entry at offset 0xd0 E warning: address range table at offset 0xac0 has a premature terminator entry at offset 0xad0 E warning: address range table at offset 0xb20 has a premature terminator entry at offset 0xb30 E warning: address range table at offset 0xb50 has a premature terminator entry at offset 0xb60 E warning: address range table at offset 0xb80 has a premature terminator entry at offset 0xb90 E warning: address range table at offset 0xbb0 has a premature terminator entry at offset 0xbc0 E warning: address range table at offset 0xbe0 has a premature terminator entry at offset 0xbf0 E warning: address range table at offset 0xc10 has a premature terminator entry at offset 0xc20 E warning: address range table at offset 0xc40 has a premature terminator entry at offset 0xc50 E warning: address range table at offset 0xc70 has a premature terminator ent ..[snippet truncated].. -S/yt/yql/providers/yt/provider/yql_yt_provider.cpp:345:20 E #5 0x4848e47a in InitYtProvider /-S/ydb/core/kqp/host/kqp_host.cpp:1861:38 E #6 0x4848e47a in NKikimr::NKqp::(anonymous namespace)::TKqpHost::Init(NYql::EKikimrQueryType) /-S/ydb/core/kqp/host/kqp_host.cpp:1935:17 E #7 0x4848af20 in NKikimr::NKqp::(anonymous namespace)::TKqpHost::SetupSession(NYql::EKikimrQueryType) /-S/ydb/core/kqp/host/kqp_host.cpp:2016:9 E #8 0x48488078 in NKikimr::NKqp::(anonymous namespace)::TKqpHost::SetupYqlTransformer(NYql::EKikimrQueryType) /-S/ydb/core/kqp/host/kqp_host.cpp:2031:9 E #9 0x484e8880 in NKikimr::NKqp::(anonymous namespace)::TKqpHost::PrepareQueryInternal(NKikimr::NKqp::TKqpQueryRef const&, TIntrusivePtr>, NYql::EKikimrQueryType, NKikimr::NKqp::IKqpHost::TPrepareSettings const&, NYql::TExprContext&) /-S/ydb/core/kqp/host/kqp_host.cpp:1592:9 E #10 0x48476a86 in operator() /-S/ydb/core/kqp/host/kqp_host.cpp:1235:24 E #11 0x48476a86 in CheckedProcess /-S/ydb/core/kqp/host/kqp_host.cpp:830:28 E #12 0x48476a86 in CheckedProcessQuery<(lambda at /-S/ydb/core/kqp/host/kqp_host.cpp:1234:13)> /-S/ydb/core/kqp/host/kqp_host.cpp:853:12 E #13 0x48476a86 in NKikimr::NKqp::(anonymous namespace)::TKqpHost::PrepareGenericQuery(NKikimr::NKqp::TKqpQueryRef const&, NKikimr::NKqp::IKqpHost::TPrepareSettings const&, TIntrusivePtr>) /-S/ydb/core/kqp/host/kqp_host.cpp:1233:16 E #14 0x480b9852 in NKikimr::NKqp::TKqpCompileActor::StartCompilation(NActors::TActorContext const&) /-S/ydb/core/kqp/compile_service/kqp_compile_actor.cpp:273:47 E #15 0x480b7667 in Bootstrap /-S/ydb/core/kqp/compile_service/kqp_compile_actor.cpp:110:17 E #16 0x480b7667 in NActors::TActorBootstrapped::StateBootstrap(TAutoPtr&) /-S/ydb/library/actors/core/actor_bootstrapped.h:22:22 E #17 0x1f15c54c in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.h:553:23 E #18 0x1f156b84 in NActors::TExecutorThread::Execute(NActors::TMailbox*, bool) /-S/ydb/library/actors/core/executor_thread.cpp:269:28 E #19 0x1f15f47e in NActors::TExecutorThread::ProcessExecutorPool()::$_0::operator()(NActors::TMailbox*, bool) const /-S/ydb/lib... Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff/test_postgres.py.TestPostgresSuite.test_postgres_suite.float8.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff ------ FAIL: 13 - GOOD, 1 - FAIL ydb/tests/functional/postgresql ydb/tests/functional/serverless [size:medium] nchunks:20 ------ [test_serverless.py 4/10] chunk ran 2 tests (total:252.44s - test:251.19s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [default-linux-x86_64-release-asan] (55.27s) ydb/tests/functional/serverless/test_serverless.py:442: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:339: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:359: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "[DISK_SPACE_EXHAUSTED] Cannot perform writes: database is out of disk space" severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:441: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*OUT_OF_SPACE.*'): E AssertionError: Regex pattern did not match. E Regex: '.*OUT_OF_SPACE.*' E Input: 'message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "[DISK_SPACE_EXHAUSTED] Cannot perform writes: database is out of disk space" severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [default-linux-x86_64-release-asan] (188.67s) ydb/tests/functional/serverless/test_serverless.py:442: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:339: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:359: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "[DISK_SPACE_EXHAUSTED] Cannot perform writes: database is out of disk space" severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:441: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*OUT_OF_SPACE.*'): E AssertionError: Regex pattern did not match. E Regex: '.*OUT_OF_SPACE.*' E Input: 'message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "[DISK_SPACE_EXHAUSTED] Cannot perform writes: database is out of disk space" severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ FAIL: 18 - GOOD, 2 - FAIL ydb/tests/functional/serverless ydb/tests/functional/sqs/cloud [size:medium] nchunks:40 ------ [20/40] chunk ran 2 tests (total:90.14s - test:90.08s) [fail] test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [default-linux-x86_64-release-asan] (24.48s) ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:466: in test_dlq_mechanics_in_cloud assert_that(len(self._read_single_message_no_wait(q1)), equal_to(0)) E AssertionError: E Expected: <0> E but: was <1> Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_dlq_mechanics_in_cloud.tables_format_v1-tables_format_v1-std.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff ------ [34/40] chunk ran 2 tests (total:111.82s - setup:0.01s test:111.76s) [fail] test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [default-linux-x86_64-release-asan] (80.44s) ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:829: in test_yc_events_processor assert len(lines) >= 2, "Got only %s event lines after all attempts" % len(lines) E AssertionError: Got only 0 event lines after all attempts E assert 0 >= 2 E + where 0 = len([]) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_yc_events_processor.tables_format_v0.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff ------ FAIL: 74 - GOOD, 2 - FAIL ydb/tests/functional/sqs/cloud ------ sole chunk ran 1 test (total:114.77s - setup:0.01s test:114.51s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 14.5G (15177232K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 850193 46.7M 44.8M 12.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 850335 35.1M 32.5M 15.6M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 850354 1.2G 1.2G 1.1G └─ functional-sqs-merge_split_common_table-std --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini - 852471 1.9G 2.0G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 852477 1.8G 1.7G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 852478 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 852479 1.5G 1.5G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 852489 1.5G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 852500 1.6G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 852516 1.5G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 852525 1.5G 1.6G 1.1G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/testing_out_stuff/stderr ydb/tests/functional/tpc/medium [size:medium] nchunks:2 ------ [test_tpch.py] chunk ran 22 tests (total:621.52s - setup:0.03s test:600.14s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_tpch.py::TestTpchS1::test_tpch[21] (fail) duration: 143.03s test_tpch.py::TestTpchS1::test_tpch[1] (good) duration: 138.96s test_tpch.py::TestTpchS1::test_tpch[20] (good) duration: 31.95s test_tpch.py::TestTpchS1::test_tpch[8] (good) duration: 27.02s test_tpch.py::TestTpchS1::test_tpch[18] (good) duration: 24.88s test_tpch.py::TestTpchS1::test_tpch[2] (good) duration: 24.72s test_tpch.py::TestTpchS1::test_tpch[7] (good) duration: 24.16s test_tpch.py::TestTpchS1::test_tpch[5] (good) duration: 20.73s test_tpch.py::TestTpchS1::test_tpch[11] (good) duration: 20.02s test_tpch.py::TestTpchS1::test_tpch[9] (good) duration: 19.93s 11 more tests with 134.69s total duration are not listed. test_tpch.py::TestTpchS1::test_tpch[22] test was not launched inside chunk. Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/stderr [fail] test_tpch.py::TestTpchS1::test_tpch[21] [default-linux-x86_64-release-asan] (143.03s) ydb/tests/olap/load/lib/tpch.py:45: in test_tpch self.run_workload_test(self._get_path(), query_num) ydb/tests/olap/load/lib/conftest.py:297: in run_workload_test self.process_query_result(result, query_num, qparams.iterations, True) ydb/tests/olap/load/lib/conftest.py:245: in process_query_result raise exc ydb/tests/olap/lib/ydb_cli.py:286: in process self._exec_cli() ydb/tests/olap/lib/ydb_cli.py:273: in _exec_cli process = yatest.common.process.execute(self._get_cmd(), check_exit_code=False) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:400: in wait _wait() library/python/testing/yatest_common/yatest/common/process.py:335: in _wait pid, sts, rusage = os.wait4(self._process.pid, 0) library/python/pytest/plugins/ya.py:347: in _graceful_shutdown _graceful_shutdown_on_log(not capman.is_globally_capturing()) library/python/pytest/plugins/ya.py:321: in _graceful_shutdown_on_log pytest.exit("Graceful shutdown requested") E Failed: Graceful shutdown requested Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch.py.TestTpchS1.test_tpch.21.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff ------ TIMEOUT: 63 - GOOD, 1 - FAIL, 1 - NOT_LAUNCHED ydb/tests/functional/tpc/medium ydb/tests/olap/scenario [size:medium] ------ sole chunk ran 8 tests (total:622.97s - test:600.02s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_insert.py::TestInsert::test[read_data_during_bulk_upsert] (timeout) duration: 559.38s test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] (good) duration: 57.01s 6 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [timeout] test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [default-linux-x86_64-release-asan] (559.38s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_insert.py.TestInsert.test.read_data_during_bulk_upsert.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - GOOD, 6 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] ------ sole chunk ran 6 tests (total:541.27s - test:516.64s) [fail] data_correctness.py::TestDataCorrectness::test [default-linux-x86_64-release-asan] (150.22s) ydb/tests/olap/ttl_tiering/data_correctness.py:191: in test self.ydb_client.query(f"delete from `{table_path}`") ydb/tests/olap/ttl_tiering/base.py:68: in query return self.session_pool.execute_with_retries(statement) contrib/python/ydb/py3/ydb/query/pool.py:202: in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/query/pool.py:200: in wrapped_callee return [result_set for result_set in it] contrib/python/ydb/py3/ydb/_utilities.py:173: in __next__ return self._next() contrib/python/ydb/py3/ydb/_utilities.py:164: in _next res = self.wrapper(next(self.it)) contrib/python/ydb/py3/ydb/query/session.py:350: in lambda resp: base.wrap_execute_query_response( contrib/python/ydb/py3/ydb/query/base.py:172: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/query/base.py:189: in wrap_execute_query_response issues._process_response(response_pb) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.PreconditionFailed: message: "Write transactions between column and row tables are disabled at current time." severity: 1 (server_code: 400120) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_correctness.py.TestDataCorrectness.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff [fail] ttl_unavailable_s3.py::TestUnavailableS3::test [default-linux-x86_64-release-asan] (69.38s) ydb/tests/olap/ttl_tiering/ttl_unavailable_s3.py:76: in test upsert_chunk(i) ydb/tests/olap/ttl_tiering/ttl_unavailable_s3.py:57: in upsert_chunk return self.ydb_client.query(f""" ydb/tests/olap/ttl_tiering/base.py:68: in query return self.session_pool.execute_with_retries(statement) contrib/python/ydb/py3/ydb/query/pool.py:202: in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/query/pool.py:200: in wrapped_callee return [result_set for result_set in it] contrib/python/ydb/py3/ydb/_utilities.py:173: in __next__ return self._next() contrib/python/ydb/py3/ydb/_utilities.py:164: in _next res = self.wrapper(next(self.it)) contrib/python/ydb/py3/ydb/query/session.py:350: in lambda resp: base.wrap_execute_query_response( contrib/python/ydb/py3/ydb/query/base.py:172: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/query/base.py:189: in wrap_execute_query_response issues._process_response(response_pb) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.PreconditionFailed: message: "Type annotation" issue_code: 1030 severity: 2 issues { position { row: 13 column: 17 } message: "At function: RemoveSystemMembers, At function: PersistableRepr, At function: OrderedSqlProject" end_position { row: 13 column: 17 } severity: 2 issues { position { row: 13 column: 40 } message: "At function: ToList" end_position { row: 13 column: 40 } severity: 2 issues { position { row: 7 column: 65 } message: "At function: OrderedMap" end_position { row: 7 column: 65 } severity: 2 issues { position { row: 7 column: 69 } message: "At function: AsStructUnordered" end_position { row: 7 column: 69 } severity: 2 issues { position { row: 8 column: 25 } message: "At function: Unwrap" end_position { row: 8 column: 25 } severity: 2 issues { position { row: 8 column: 32 } message: "At function: SafeCast" end_position { row: 8 column: 32 } severity: 2 issues { position { row: 8 column: 62 } message: "At function: +" end_position { row: 8 column: 62 } severity: 2 issues { position { row: 8 column: 45 } message: "At function: +" end_position { row: 8 column: 45 } severity: 2 issues { position { row: 8 column: 56 } message: "At function: *" end_position { row: 8 column: 56 } severity: 2 issues { position { row: 8 column: 56 } message: "Integral type implicit bitcast: Int32 and Uint64" end_position { row: 8 column: 56 } issue_code: 1107 severity: 2 } } } } } } } } } } ,message: "Data manipulation queries do not support column shard tables." issue_code: 2029 severity: 1 (server_code: 400120) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_unavailable_s3.py.TestUnavailableS3.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff [fail] ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [default-linux-x86_64-release-asan] (22.35s) ydb/tests/olap/ttl_tiering/ttl_delete_s3.py:124: in test_data_unchanged_after_ttl_change self.ydb_client.query(""" ydb/tests/olap/ttl_tiering/base.py:68: in query return self.session_pool.execute_with_retries(statement) contrib/python/ydb/py3/ydb/query/pool.py:202: in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/query/pool.py:200: in wrapped_callee return [result_set for result_set in it] contrib/python/ydb/py3/ydb/_utilities.py:173: in __next__ return self._next() contrib/python/ydb/py3/ydb/_utilities.py:164: in _next res = self.wrapper(next(self.it)) contrib/python/ydb/py3/ydb/query/session.py:350: in lambda resp: base.wrap_execute_query_response( contrib/python/ydb/py3/ydb/query/base.py:172: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/query/base.py:189: in wrap_execute_query_response issues._process_response(response_pb) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.PreconditionFailed: message: "Type annotation" issue_code: 1030 severity: 2 issues { position { row: 17 column: 17 } message: "At function: RemoveSystemMembers, At function: PersistableRepr, At function: OrderedSqlProject" end_position { row: 17 column: 17 } severity: 2 issues { position { row: 17 column: 40 } message: "At function: ToList" end_position { row: 17 column: 40 } severity: 2 issues { position { row: 7 column: 66 } message: "At function: OrderedMap" end_position { row: 7 column: 66 } severity: 2 issues { position { row: 10 column: 28 } message: "At function: AsStructUnordered" end_position { row: 10 column: 28 } severity: 2 issues { position { row: 9 column: 27 } message: "At function: Unwrap" end_position { row: 9 column: 27 } severity: 2 issues { position { row: 9 column: 34 } message: "At function: SafeCast" end_position { row: 9 column: 34 } severity: 2 issues { position { row: 8 column: 65 } message: "At function: +" end_position { row: 8 column: 65 } severity: 2 issues { position { row: 8 column: 27 } message: "At function: SafeCast" end_position { row: 8 column: 27 } severity: 2 issues { position { row: 8 column: 49 } message: "At function: /" end_position { row: 8 column: 49 } severity: 2 issues { position { row: 6 column: 40 } message: "At function: /" end_position { row: 6 column: 40 } severity: 2 issues { position { row: 6 column: 35 } message: "At function: -" end_position { row: 6 column: 35 } severity: 2 issues { position { row: 6 column: 35 } message: "Integral type implicit bitcast: Uint64 and Int32" end_position { row: 6 column: 35 } issue_code: 1107 severity: 2 } } issues { position { row: 6 column: 42 } message: "At function: SafeCast" end_position { row: 6 column: 42 } severity: 2 issues { position { row: 6 column: 51 } message: "At function: -" end_position { row: 6 column: 51 } severity: 2 issues { position { row: 6 column: 51 } message: "Integral type implicit bitcast: Uint64 and Int32" end_position { row: 6 column: 51 } issue_code: 1107 severity: 2 } } } } } } } } } } } } } ,message: "Data manipulation queries do not support column shard tables." issue_code: 2029 severity: 1 (server_code: 400120) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_data_unchanged_after_ttl_change.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff [fail] ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete [default-linux-x86_64-release-asan] (69.70s) ydb/tests/olap/ttl_tiering/ttl_delete_s3.py:277: in test_ttl_delete self.ydb_client.query(""" ydb/tests/olap/ttl_tiering/base.py:68: in query return self.session_pool.execute_with_retries(statement) contrib/python/ydb/py3/ydb/query/pool.py:202: in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/query/pool.py:200: in wrapped_callee return [result_set for result_set in it] contrib/python/ydb/py3/ydb/_utilities.py:173: in __next__ return self._next() contrib/python/ydb/py3/ydb/_utilities.py:164: in _next res = self.wrapper(next(self.it)) contrib/python/ydb/py3/ydb/query/session.py:350: in lambda resp: base.wrap_execute_query_response( contrib/python/ydb/py3/ydb/query/base.py:172: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/query/base.py:189: in wrap_execute_query_response issues._process_response(response_pb) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.PreconditionFailed: message: "Type annotation" issue_code: 1030 severity: 2 issues { position { row: 17 column: 17 } message: "At function: RemoveSystemMembers, At function: PersistableRepr, At function: OrderedSqlProject" end_position { row: 17 column: 17 } severity: 2 issues { position { row: 17 column: 40 } message: "At function: ToList" end_position { row: 17 column: 40 } severity: 2 issues { position { row: 7 column: 66 } message: "At function: OrderedMap" end_position { row: 7 column: 66 } severity: 2 issues { position { row: 10 column: 28 } message: "At function: AsStructUnordered" end_position { row: 10 column: 28 } severity: 2 issues { position { row: 9 column: 27 } message: "At function: Unwrap" end_position { row: 9 column: 27 } severity: 2 issues { position { row: 9 column: 34 } message: "At function: SafeCast" end_position { row: 9 column: 34 } severity: 2 issues { position { row: 8 column: 65 } message: "At function: +" end_position { row: 8 column: 65 } severity: 2 issues { position { row: 8 column: 27 } message: "At function: SafeCast" end_position { row: 8 column: 27 } severity: 2 issues { position { row: 8 column: 49 } message: "At function: /" end_position { row: 8 column: 49 } severity: 2 issues { position { row: 6 column: 40 } message: "At function: /" end_position { row: 6 column: 40 } severity: 2 issues { position { row: 6 column: 35 } message: "At function: -" end_position { row: 6 column: 35 } severity: 2 issues { position { row: 6 column: 35 } message: "Integral type implicit bitcast: Uint64 and Int32" end_position { row: 6 column: 35 } issue_code: 1107 severity: 2 } } issues { position { row: 6 column: 42 } message: "At function: SafeCast" end_position { row: 6 column: 42 } severity: 2 issues { position { row: 6 column: 51 } message: "At function: -" end_position { row: 6 column: 51 } severity: 2 issues { position { row: 6 column: 51 } message: "Integral type implicit bitcast: Uint64 and Int32" end_position { row: 6 column: 51 } issue_code: 1107 severity: 2 } } } } } } } } } } } } } ,message: "Execution" issue_code: 1060 severity: 1 issues { message: "(NYql::NDq::TDqOutputChannelChunkSizeLimitExceeded) ydb/library/yql/dq/runtime/dq_output_channel.cpp:317: Row data size is too big: 56000009 bytes, exceeds limit of 50331648 bytes" issue_code: 2029 severity: 1 } (server_code: 400120) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_ttl_delete.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff [fail] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-release-asan] (39.39s) ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py:100: in test self.ydb_client.query( ydb/tests/olap/ttl_tiering/base.py:68: in query return self.session_pool.execute_with_retries(statement) contrib/python/ydb/py3/ydb/query/pool.py:202: in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/query/pool.py:200: in wrapped_callee return [result_set for result_set in it] contrib/python/ydb/py3/ydb/_utilities.py:173: in __next__ return self._next() contrib/python/ydb/py3/ydb/_utilities.py:164: in _next res = self.wrapper(next(self.it)) contrib/python/ydb/py3/ydb/query/session.py:350: in lambda resp: base.wrap_execute_query_response( contrib/python/ydb/py3/ydb/query/base.py:172: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/query/base.py:189: in wrap_execute_query_response issues._process_response(response_pb) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.PreconditionFailed: message: "Type annotation" issue_code: 1030 severity: 2 issues { position { row: 15 column: 17 } message: "At function: RemoveSystemMembers, At function: PersistableRepr, At function: OrderedSqlProject" end_position { row: 15 column: 17 } severity: 2 issues { position { row: 15 column: 40 } message: "At function: ToList" end_position { row: 15 column: 40 } severity: 2 issues { position { row: 5 column: 66 } message: "At function: OrderedMap" end_position { row: 5 column: 66 } severity: 2 issues { position { row: 8 column: 28 } message: "At function: AsStructUnordered" end_position { row: 8 column: 28 } severity: 2 issues { position { row: 7 column: 27 } message: "At function: Unwrap" end_position { row: 7 column: 27 } severity: 2 issues { position { row: 7 column: 34 } message: "At function: SafeCast" end_position { row: 7 column: 34 } severity: 2 issues { position { row: 6 column: 41 } message: "At function: +" end_position { row: 6 column: 41 } severity: 2 issues { position { row: 6 column: 36 } message: "At function: +" end_position { row: 6 column: 36 } severity: 2 issues { position { row: 6 column: 36 } message: "Integral type implicit bitcast: Uint64 and Int32" end_position { row: 6 column: 36 } issue_code: 1107 severity: 2 } } } issues { position { row: 6 column: 41 } message: "At function: +" end_position { row: 6 column: 41 } severity: 2 issues { position { row: 6 column: 41 } message: "Integral type implicit bitcast: Uint64 and Int32" end_position { row: 6 column: 41 } issue_code: 1107 severity: 2 } } } } } } } } ,message: "Data manipulation queries do not support column shard tables." issue_code: 2029 severity: 1 (server_code: 400120) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ FAIL: 1 - GOOD, 5 - FAIL ydb/tests/olap/ttl_tiering ydb/tests/stress/log/tests [size:medium] ------ sole chunk ran 2 tests (total:206.29s - test:206.25s) [fail] test_workload.py::TestYdbLogWorkload::test[column] [default-linux-x86_64-release-asan] (22.65s) ydb/tests/stress/log/tests/test_workload.py:59: in test yatest.common.execute( library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:422: in _finalise raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/apps/ydb/ydb --verbose --endpoint grpc://localhost:10430 --database=/Root workload log init --path column --int-cols 2 --str-cols 5 --key-cols 4 --len 200 --store column --min-partitions 100 --partition-size 10 --auto-partition 0' has failed with code 1. E Errors: E b"(!) New version of YDB CLI is available. Run 'ydb update' command for update. You can also disable further version checks with 'ydb version --disable-checks' command\nStatus: PRECONDITION_FAILED\nIssues: \n
: Error: Execution, code: 1060\n
:35:46: Error: Executing CREATE TABLE\n
: Error: OLAP schema operations are not supported, code: 2029\n" Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbLogWorkload.test.column.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff ------ FAIL: 1 - GOOD, 1 - FAIL ydb/tests/stress/log/tests ------ [test_disk.py 0/10] chunk ran 1 test (total:55.10s - test:55.04s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 12.9G (13528604K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 395478 46.7M 46.7M 12.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 396006 34.5M 32.1M 14.9M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 396054 783M 785M 678M └─ ydb-tests-tools-nemesis-ut --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doc 400709 1.4G 1.3G 982M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 400728 1.3G 1.3G 975M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 400731 1.4G 1.3G 971M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 400732 1.3G 1.3G 967M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 400734 1.4G 1.4G 981M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 400736 1.3G 1.3G 966M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 400746 1.4G 1.3G 970M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 400750 1.4G 1.4G 977M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 400751 1.4G 1.3G 976M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/stderr ------ [test_tablet.py 0/10] chunk ran 1 test (total:101.78s - test:101.63s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 13.3G (13911836K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 394675 46.7M 46.7M 12.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 394745 34.9M 32.6M 15.5M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 394750 793M 779M 689M └─ ydb-tests-tools-nemesis-ut --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doc 397225 1.6G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 397246 1.6G 1.3G 955M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 397262 1.6G 1.3G 933M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 397263 1.6G 1.3G 928M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 397264 1.6G 1.3G 942M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 397300 1.6G 1.4G 977M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 397315 1.6G 1.4G 987M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 397317 1.6G 1.4G 1015M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/stderr ------ [1/10] chunk ran 1 test (total:169.79s - test:169.75s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 12.4G (13013368K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 724382 46.6M 46.6M 6.7M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 724548 34.6M 23.8M 10.3M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 724554 48.0M 47.7M 23.0M └─ test_tool run_ut @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/testin 725006 6.2G 6.1G 6.1G └─ ydb-core-blobstorage-ut_blobstorage-ut_balancing --trace-path-append /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing 765790 6.2G 6.1G 6.1G └─ ydb-core-blobstorage-ut_blobstorage-ut_balancing --trace-path-append /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balanc Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/testing_out_stuff/stderr ydb/core/cms/ut_sentinel_unstable [size:medium] ------ sole chunk ran 1 test (total:578.31s - test:576.28s) [fail] TSentinelUnstableTests::BSControllerCantChangeStatus [default-linux-x86_64-release-asan] (560.15s) (TWithBackTrace) ydb/library/actors/testlib/test_runtime.cpp:1262: Dispatched 2500000 events, limit reached. TWithBackTrace::TWithBackTrace<>() at /-S/util/generic/bt_exception.h:16:5 NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) at /-S/ydb/library/actors/testlib/test_runtime.cpp:0:48 DispatchEvents at /-S/ydb/library/actors/testlib/test_runtime.cpp:0:0 NKikimr::NCmsTest::TTestEnv::SetPDiskState(TSet, std::__y1::allocator> const&, NKikimrBlobStorage::TPDiskState_E, NKikimrBlobStorage::EDriveStatus) at /-S/ydb/core/cms/sentinel_ut_helpers.h:237:9 NKikimr::NCmsTest::NTestSuiteTSentinelUnstableTests::TTestCaseBSControllerCantChangeStatus::Execute_(NUnitTest::TTestContext&) at /-S/ydb/core/cms/sentinel_ut_unstable.cpp:0:17 operator() at /-S/ydb/core/cms/sentinel_ut_unstable.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/TSentinelUnstableTests.BSControllerCantChangeStatus.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/TSentinelUnstableTests.BSControllerCantChangeStatus.out ------ FAIL: 1 - FAIL ydb/core/cms/ut_sentinel_unstable ydb/core/health_check/ut [size:medium] nchunks:10 ------ [3/10] chunk ran 5 tests (total:51.25s - test:51.22s) [fail] THealthCheckTest::LayoutIncorrect [default-linux-x86_64-release-asan] (5.56s) assertion failed at ydb/core/health_check/health_check_ut.cpp:2170, virtual void NKikimr::NTestSuiteTHealthCheckTest::TTestCaseLayoutIncorrect::Execute_(NUnitTest::TTestContext &): (issue_log.message() == "Database has storage issues") failed: ("Database has multiple issues" != Database has storage issues) , with diff: ("|)Database has (mul|s)t(ipl|orag)e issues("|) GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::NTestSuiteTHealthCheckTest::TTestCaseLayoutIncorrect::Execute_(NUnitTest::TTestContext&) at /-S/ydb/core/health_check/health_check_ut.cpp:0:17 operator() at /-S/ydb/core/health_check/health_check_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/health_check/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/health_check/ut/test-results/unittest/testing_out_stuff/THealthCheckTest.LayoutIncorrect.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/health_check/ut/test-results/unittest/testing_out_stuff/THealthCheckTest.LayoutIncorrect.out ------ FAIL: 46 - GOOD, 1 - FAIL ydb/core/health_check/ut ydb/core/keyvalue/ut_trace [size:medium] nchunks:5 ------ [0/5] chunk ran 1 test (total:5.06s - test:5.03s) [fail] TKeyValueTracingTest::ReadHuge [default-linux-x86_64-release-asan] (2.08s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.out ------ [1/5] chunk ran 1 test (total:5.16s - test:5.13s) [fail] TKeyValueTracingTest::ReadSmall [default-linux-x86_64-release-asan] (2.04s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.out ------ [2/5] chunk ran 1 test (total:13.38s - test:13.35s) [fail] TKeyValueTracingTest::WriteHuge [default-linux-x86_64-release-asan] (2.36s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.out ------ [3/5] chunk ran 1 test (total:10.78s - test:10.74s) [fail] TKeyValueTracingTest::WriteSmall [default-linux-x86_64-release-asan] (2.07s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.out ------ FAIL: 4 - FAIL ydb/core/keyvalue/ut_trace ydb/core/kqp/ut/cost [size:medium] nchunks:50 ------ [12/50] chunk ran 1 test (total:19.67s - test:19.64s) [crashed] KqpCost::OlapWriteRow [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: 100) ==765209==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000017d741cf bp 0x7ffe2a6fabc0 sp 0x7ffe2a6faa20 T0) ==765209==The signal is caused by a READ memory access. ==765209==Hint: address points to the zero page. 2025-04-04T17:25:58.054871Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489510513713085031:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T17:25:58.058270Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; warning: address range table at offset 0x0 has a premature terminator entry at offset 0x10 warning: address range table at offset 0x30 has a premature terminator entry at offset 0x40 warning: address range table at offset 0x60 has a premature terminator entry at offset 0x70 warning: address range table at offset 0x90 has a premature terminator entry at offset 0xa0 warning: address range table at offset 0xc0 has a premature terminator entry at offset 0xd0 warning: address range table at offset 0xa20 has a premature terminator entry at offset 0xa30 warning: address range table at offset 0xa50 has a premature terminator entry at offset 0xa60 warning: address range table at offset 0xa80 has a premature terminator entry at offset 0xa90 warning: address range table at offset 0xab0 has a premature terminator entry at offset 0xac0 warning: address range table at offset 0xae0 has a premature terminator entry at offset 0xaf0 warning: address range table at offset 0xb10 has a premature terminator entry at offset 0xb20 warning: address range table at offset 0xb40 has a premature terminator entry at offset 0xb50 warning: address range table at offset 0xb70 has a premature terminator entry at offset 0xb80 warning: address range table at offset 0xba0 has a premature terminator entry at offset 0xbb0 warning: address range table at offset 0xbd0 has a premature terminator entry at offset 0xbe0 warning: address range table at offset 0xc00 has a premature terminator entry at offset 0xc10 warning: address range table at offset 0xc30 has a premature terminator entry at offset 0xc40 warning: address range table at offset 0xc60 has a premature terminator entry at offset 0xc70 warning: address range table at offset 0xc90 has a premature terminator entry at offset 0xca0 warning: addres ..[snippet truncated].. 0x186e0e55 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428:12 #11 0x186e0e55 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987:10 #12 0x186e0e55 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x186b1b78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x17d96975 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x186b3445 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x186db78c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7f21125bfd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7f21125bfe3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x15ad0028 in _start (/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x15ad0028) (BuildId: b39ae28ee39da90c850ce26824d563677aab4934) SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==765209==ABORTING Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.out ------ FAIL: 21 - GOOD, 1 - CRASHED ydb/core/kqp/ut/cost ydb/core/kqp/ut/pg [size:medium] nchunks:10 ------ [0/10] chunk ran 12 tests (total:157.02s - test:156.84s) [fail] KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname [default-linux-x86_64-release-asan] (7.89s) (NYdb::V3::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.AlterColumnSetDefaultFromSequenceWithSchemaname.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.AlterColumnSetDefaultFromSequenceWithSchemaname.out ------ [1/10] chunk ran 12 tests (total:130.04s - setup:0.02s test:129.94s) [fail] KqpPg::DropSequence [default-linux-x86_64-release-asan] (7.93s) (NYdb::V3::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.DropSequence.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.DropSequence.out ------ [8/10] chunk ran 11 tests (total:415.97s - setup:0.02s test:415.89s) [fail] KqpPg::TempTablesWithCache [default-linux-x86_64-release-asan] (12.59s) (NYdb::V3::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesWithCache.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesWithCache.out [fail] KqpPg::TempTablesDrop [default-linux-x86_64-release-asan] (9.75s) (NYdb::V3::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesDrop.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesDrop.out ------ [9/10] chunk ran 11 tests (total:414.34s - test:414.20s) [fail] PgCatalog::CheckSetConfig [default-linux-x86_64-release-asan] (8.67s) (NYdb::V3::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/PgCatalog.CheckSetConfig.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/PgCatalog.CheckSetConfig.out ------ FAIL: 109 - GOOD, 5 - FAIL ydb/core/kqp/ut/pg ydb/core/kqp/ut/query [size:medium] nchunks:50 ------ [15/50] chunk ran 4 tests (total:37.39s - test:37.35s) [crashed] KqpLimits::TooBigColumn+useSink [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpLimits.TooBigColumn.useSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpLimits.TooBigColumn.useSink.out ------ [47/50] chunk ran 3 tests (total:63.34s - test:63.28s) [fail] KqpStats::SysViewClientLost [default-linux-x86_64-release-asan] (48.15s) assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x18B8D15F 1. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x18273E98 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x18285F67 3. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x18285F67 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x18285F67 5. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x18285F67 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x18285F67 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x18BC2E65 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x18BC2E65 9. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x18BC2E65 10. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18B93B88 11. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x18285175 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18B95455 13. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x18BBD79C 14. ??:0: ?? @ 0x7F939E098D8F 15. ??:0: ?? @ 0x7F939E098E3F 16. ??:0: ?? @ 0x15C21028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.out ------ FAIL: 170 - GOOD, 1 - FAIL, 1 - CRASHED ydb/core/kqp/ut/query ydb/core/kqp/ut/tx [size:medium] nchunks:50 ------ [21/50] chunk ran 2 tests (total:42.60s - test:42.52s) [fail] KqpSinkTx::OlapInvalidateOnError [default-linux-x86_64-release-asan] (15.88s) assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x17F890FD 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x17F699FA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x17F70037 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x17F70037 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x17F70037 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17F70037 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17F70037 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x17F6F245 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F04332D5D8F 17. ??:0: ?? @ 0x7F04332D5E3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.out ------ [25/50] chunk ran 2 tests (total:37.44s - test:37.36s) [fail] KqpSnapshotIsolation::TConflictReadWriteOlap [default-linux-x86_64-release-asan] (19.87s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x17FBC427 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x17FA567A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F0C5732BD8F 17. ??:0: ?? @ 0x7F0C5732BE3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.out ------ [26/50] chunk ran 2 tests (total:22.86s - test:22.81s) [fail] KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [default-linux-x86_64-release-asan] (6.80s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x17FB9CA0 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x17FA544A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7FF06C81BD8F 17. ??:0: ?? @ 0x7FF06C81BE3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.out [fail] KqpSnapshotIsolation::TConflictReadWriteOltp [default-linux-x86_64-release-asan] (11.54s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x17FB9CA0 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x17FA5222 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7FF06C81BD8F 17. ??:0: ?? @ 0x7FF06C81BE3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.out ------ [27/50] chunk ran 2 tests (total:31.63s - test:31.55s) [fail] KqpSnapshotIsolation::TConflictWriteOltp [default-linux-x86_64-release-asan] (10.73s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x17FB28C0 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x17FA4BA2 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7FB70BDE0D8F 17. ??:0: ?? @ 0x7FB70BDE0E3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.out [fail] KqpSnapshotIsolation::TConflictWriteOlap [default-linux-x86_64-release-asan] (15.98s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::ABORTED) failed: (SUCCESS != ABORTED) , with diff: (SUCC|ABORT)E(SS|D) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92: DoExecute @ 0x17FB5047 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:118: Execute_ @ 0x17FA4FFA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7FB70BDE0D8F 17. ??:0: ?? @ 0x7FB70BDE0E3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.out ------ [28/50] chunk ran 2 tests (total:33.62s - test:33.56s) [fail] KqpSnapshotIsolation::TConflictWriteOltpNoSink [default-linux-x86_64-release-asan] (10.61s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x17FB28C0 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x17FA4DCA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7FD152838D8F 17. ??:0: ?? @ 0x7FD152838E3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.out ------ [29/50] chunk ran 2 tests (total:22.29s - test:22.22s) [fail] KqpSnapshotIsolation::TReadOnlyOltpNoSink [default-linux-x86_64-release-asan] (7.10s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x17FC10AE 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x17FA5ACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F42CBF9DD8F 17. ??:0: ?? @ 0x7F42CBF9DE3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.out [fail] KqpSnapshotIsolation::TReadOnlyOltp [default-linux-x86_64-release-asan] (10.67s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x17FC10AE 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x17FA58A2 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F42CBF9DD8F 17. ??:0: ?? @ 0x7F42CBF9DE3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.out ------ [30/50] chunk ran 2 tests (total:41.90s - test:41.86s) [fail] KqpSnapshotIsolation::TSimpleOltp [default-linux-x86_64-release-asan] (18.78s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x17FAC851 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x17FA4522 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F597D984D8F 17. ??:0: ?? @ 0x7F597D984E3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.out ------ [31/50] chunk ran 2 tests (total:21.30s - test:21.26s) [fail] KqpSnapshotIsolation::TSimpleOltpNoSink [default-linux-x86_64-release-asan] (10.64s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x17FAC851 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x17FA474A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F498BFE3D8F 17. ??:0: ?? @ 0x7F498BFE3E3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.out ------ FAIL: 93 - GOOD, 11 - FAIL ydb/core/kqp/ut/tx ydb/core/kqp/ut/view [size:medium] ------ sole chunk ran 23 tests (total:223.46s - test:223.33s) [fail] TCreateAndDropViewTest::DropViewIfExists [default-linux-x86_64-release-asan] (7.66s) (NYdb::V3::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.DropViewIfExists.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.DropViewIfExists.out [fail] TCreateAndDropViewTest::CallDropViewOnTable [default-linux-x86_64-release-asan] (12.17s) (NYdb::V3::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.CallDropViewOnTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.CallDropViewOnTable.out ------ FAIL: 21 - GOOD, 2 - FAIL ydb/core/kqp/ut/view ydb/core/sys_view/ut [size:medium] nchunks:10 ------ [6/10] chunk ran 6 tests (total:339.34s - test:338.93s) [fail] SystemView::PartitionStatsFields [default-linux-x86_64-release-asan] (11.37s) greater-or-equal assertion failed at ydb/core/sys_view/ut_kqp.cpp:228, void NKikimr::NSysView::(anonymous namespace)::TYsonFieldChecker::Uint64GreaterOrEquals(ui64): value.AsUint64() >= expected GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NSysView::NTestSuiteSystemView::TTestCasePartitionStatsFields::Execute_(NUnitTest::TTestContext&) at /-S/ydb/core/sys_view/ut_kqp.cpp:823:15 operator() at /-S/ydb/core/sys_view/ut_kqp.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.PartitionStatsFields.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/sys_view/ut/test-results/unittest/testing_out_stuff/SystemView.PartitionStatsFields.out ------ FAIL: 58 - GOOD, 1 - FAIL ydb/core/sys_view/ut ydb/core/tx/columnshard/ut_rw [size:medium] nchunks:60 ------ [28/60] chunk ran 1 test (total:602.39s - test:600.01s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString (timeout) duration: 601.19s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/stderr [timeout] TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [default-linux-x86_64-release-asan] (601.19s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKString.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKString.out ------ [29/60] chunk ran 1 test (total:602.44s - test:600.07s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 (timeout) duration: 601.24s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/stderr [timeout] TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [default-linux-x86_64-release-asan] (601.24s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKUtf8.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKUtf8.out ------ [32/60] chunk ran 1 test (total:609.97s - setup:0.02s test:600.11s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 (timeout) duration: 607.04s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/stderr [timeout] TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [default-linux-x86_64-release-asan] (607.04s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranule_PKInt64.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranule_PKInt64.out ------ [35/60] chunk ran 1 test (total:603.21s - test:600.10s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 (timeout) duration: 601.00s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/stderr [timeout] TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [default-linux-x86_64-release-asan] (601.00s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranule_PKUInt64.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranule_PKUInt64.out ------ TIMEOUT: 55 - GOOD, 4 - TIMEOUT ydb/core/tx/columnshard/ut_rw ydb/core/tx/datashard/ut_incremental_backup [size:medium] nchunks:4 ------ [0/4] chunk ran 3 tests (total:252.10s - test:251.29s) [fail] IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [default-linux-x86_64-release-asan] (158.67s) assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode) at /-S/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:0:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.out ------ FAIL: 10 - GOOD, 1 - FAIL ydb/core/tx/datashard/ut_incremental_backup ydb/core/tx/schemeshard/ut_index [size:medium] nchunks:80 ------ [1/80] chunk ran 1 test (total:618.21s - setup:0.01s test:600.11s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] (timeout) duration: 612.10s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff/stderr [timeout] TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] [default-linux-x86_64-release-asan] (612.10s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff/TAsyncIndexTests.CdcAndMergeWithReboots.TabletReboots.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff/TAsyncIndexTests.CdcAndMergeWithReboots.TabletReboots.out ------ [9/80] chunk ran 1 test (total:610.50s - setup:0.01s test:600.10s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TAsyncIndexTests::MergeBothWithReboots[TabletReboots] (timeout) duration: 605.40s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff/stderr [timeout] TAsyncIndexTests::MergeBothWithReboots[TabletReboots] [default-linux-x86_64-release-asan] (605.40s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff/TAsyncIndexTests.MergeBothWithReboots.TabletReboots.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index/test-results/unittest/testing_out_stuff/TAsyncIndexTests.MergeBothWithReboots.TabletReboots.out ------ TIMEOUT: 25 - GOOD, 2 - TIMEOUT ydb/core/tx/schemeshard/ut_index ydb/core/tx/tiering/ut [size:medium] nchunks:60 ------ [3/60] chunk ran 1 test (total:37.70s - test:37.65s) [crashed] ColumnShardTiers::TTLUsage [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.out ------ [4/60] chunk ran 1 test (total:167.37s - test:167.26s) [crashed] ColumnShardTiers::TieringUsage [default-linux-x86_64-release-asan] (144.67s) Test crashed (return code: 100) ==703024==ERROR: LeakSanitizer: detected memory leaks Indirect leak of 6000 byte(s) in 30 object(s) allocated from: #0 0x17ad16dd in operator new[](unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:89:3 #1 0x18c77b90 in TAtomicsArray /-S/library/cpp/monlib/metrics/atomics_array.h:12:23 #2 0x18c77b90 in TExponentialHistogramCollector /-S/library/cpp/monlib/metrics/histogram_collector_exponential.cpp:16:15 #3 0x18c77b90 in MakeHolder /-S/util/generic/ptr.h:370:27 #4 0x18c77b90 in NMonitoring::ExponentialHistogram(unsigned int, double, double) /-S/library/cpp/monlib/metrics/histogram_collector_exponential.cpp:66:16 #5 0x226756ce in NKikimr::NOlap::NBlobOperations::TWriteCounters::TWriteCounters(NKikimr::NOlap::NBlobOperations::TConsumerCounters const&) /-S/ydb/core/tx/columnshard/blobs_action/counters/write.cpp:16:56 #6 0x2266f873 in construct /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:167:24 #7 0x2266f873 in construct /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator_traits.h:319:9 #8 0x2266f873 in __shared_ptr_emplace, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:296:5 #9 0x2266f873 ..[snippet truncated].. :function&)>, TSet, std::__y1::allocator> const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.cpp:1554:22 #47 0x30334ea3 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:474:13 #48 0x30333fc2 in GrabEdgeEvent /-S/ydb/library/actors/testlib/test_runtime.h:523:20 #49 0x30333fc2 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:529:20 #50 0x3032c222 in NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) /-S/ydb/library/actors/testlib/test_runtime.h:574:24 #51 0x3032bdfa in NActors::TTestActorRuntime::SimulateSleep(TDuration) /-S/ydb/core/testlib/actors/test_runtime.cpp:291:9 #52 0x3038d344 in NKikimr::Tests::NCommon::THelper::StartSchemaRequestTableServiceImpl(TBasicString> const&, bool, bool) const /-S/ydb/core/testlib/common_helper.cpp:165:34 #53 0x179c4712 in NKikimr::NTestSuiteColumnShardTiers::TTestCaseTieringUsage::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/tx/tiering/ut/ut_tiers.cpp:444:17 SUMMARY: AddressSanitizer: 1091869 byte(s) leaked in 22113 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TieringUsage.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TieringUsage.out ------ FAIL: 8 - GOOD, 2 - CRASHED ydb/core/tx/tiering/ut ydb/core/tx/tx_proxy/ut_base_tenant [size:medium] nchunks:10 ------ [0/10] chunk ran 3 tests (total:28.05s - test:27.98s) [fail] TModifyUserTest::ModifyUser [default-linux-x86_64-release-asan] (3.67s) assertion failed at ydb/core/tx/tx_proxy/proxy_ut.cpp:1044, virtual void NTestSuiteTModifyUserTest::TTestCaseModifyUser::Execute_(NUnitTest::TTestContext &): (client.ModifyUser("/dc-1", { .User = "user2", .Password = "pas2user"}, user1Token) == NMsgBusProxy::MSTATUS_OK) failed: (MSTATUS_ERROR != MSTATUS_OK) , with diff: MSTATUS_(ERR|)O(R|K) GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteTModifyUserTest::TTestCaseModifyUser::Execute_(NUnitTest::TTestContext&) at /-S/ydb/core/tx/tx_proxy/proxy_ut.cpp:1044:13 operator() at /-S/ydb/core/tx/tx_proxy/proxy_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/testing_out_stuff/TModifyUserTest.ModifyUser.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/testing_out_stuff/TModifyUserTest.ModifyUser.out ------ FAIL: 24 - GOOD, 1 - FAIL ydb/core/tx/tx_proxy/ut_base_tenant ydb/core/tx/tx_proxy/ut_schemereq [size:medium] nchunks:10 ------ [2/10] chunk ran 30 tests (total:317.79s - setup:0.01s test:317.13s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 [default-linux-x86_64-release-asan] (10.77s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:19476 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 [default-linux-x86_64-release-asan] (12.12s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:14182 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 [default-linux-x86_64-release-asan] (10.08s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:15245 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [default-linux-x86_64-release-asan] (10.91s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:10953 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 [default-linux-x86_64-release-asan] (11.80s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:23328 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 [default-linux-x86_64-release-asan] (12.85s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:62431 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 [default-linux-x86_64-release-asan] (11.95s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:16547 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 [default-linux-x86_64-release-asan] (12.41s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:23183 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [default-linux-x86_64-release-asan] (13.71s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:27271 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24.out ------ FAIL: 291 - GOOD, 9 - FAIL ydb/core/tx/tx_proxy/ut_schemereq ydb/core/viewer/ut [size:medium] nchunks:10 ------ [5/10] chunk ran 5 tests (total:606.74s - test:600.09s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: Viewer::JsonStorageListingV1PDiskIdFilter (timeout) duration: 242.30s Viewer::JsonStorageListingV2NodeIdFilter (good) duration: 163.29s Viewer::JsonStorageListingV2GroupIdFilter (good) duration: 117.00s Viewer::JsonStorageListingV2 (good) duration: 72.48s Viewer::JsonStorageListingV2PDiskIdFilter test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/stderr [timeout] Viewer::JsonStorageListingV1PDiskIdFilter [default-linux-x86_64-release-asan] (242.30s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.JsonStorageListingV1PDiskIdFilter.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.JsonStorageListingV1PDiskIdFilter.out ------ TIMEOUT: 48 - GOOD, 1 - NOT_LAUNCHED, 1 - TIMEOUT ydb/core/viewer/ut ydb/public/sdk/cpp/src/client/federated_topic/ut [size:medium] nchunks:10 ------ [0/10] chunk ran 2 tests (total:63.78s - setup:0.03s test:63.36s) [crashed] BasicUsage::BasicWriteSession [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -4) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/testing_out_stuff/BasicUsage.BasicWriteSession.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/testing_out_stuff/BasicUsage.BasicWriteSession.out ------ FAIL: 18 - GOOD, 1 - CRASHED ydb/public/sdk/cpp/src/client/federated_topic/ut ydb/public/sdk/cpp/src/client/topic/ut [size:medium] nchunks:10 ------ [3/10] chunk ran 12 tests (total:188.58s - test:188.50s) [crashed] TxUsage::Sinks_Oltp_WriteToTopicAndTable_4 [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/src/client/topic/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/src/client/topic/ut/test-results/unittest/testing_out_stuff/TxUsage.Sinks_Oltp_WriteToTopicAndTable_4.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/src/client/topic/ut/test-results/unittest/testing_out_stuff/TxUsage.Sinks_Oltp_WriteToTopicAndTable_4.out ------ FAIL: 122 - GOOD, 1 - CRASHED ydb/public/sdk/cpp/src/client/topic/ut ydb/services/persqueue_v1/ut [size:medium] nchunks:10 ------ [3/10] chunk ran 13 tests (total:283.59s - test:283.45s) [crashed] TPersQueueTest::DisableDeduplication [default-linux-x86_64-release-asan] (15.93s) Test crashed (return code: 100) ==540261==ERROR: LeakSanitizer: detected memory leaks Indirect leak of 6229720 byte(s) in 95 object(s) allocated from: #0 0x181ba3ef in malloc /-S/contrib/libs/clang18-rt/lib/asan/asan_malloc_linux.cpp:68:3 #1 0x19459ef3 in grpc_event_engine::experimental::MemoryAllocator::MakeSlice(grpc_event_engine::experimental::MemoryRequest) /-S/contrib/libs/grpc/src/core/lib/event_engine/memory_allocator.cc:63:13 #2 0x1943481d in maybe_make_read_slices /-S/contrib/libs/grpc/src/core/lib/iomgr/tcp_posix.cc:1070:57 #3 0x1943481d in tcp_handle_read(void*, y_absl::lts_y_20240722::Status) /-S/contrib/libs/grpc/src/core/lib/iomgr/tcp_posix.cc:1094:5 #4 0x19438c77 in Run /-S/contrib/libs/grpc/src/core/lib/iomgr/closure.h:303:5 #5 0x19438c77 in tcp_read(grpc_endpoint*, grpc_slice_buffer*, grpc_closure*, bool, int) /-S/contrib/libs/grpc/src/core/lib/iomgr/tcp_posix.cc:1156:5 #6 0x19792698 in continue_read_action_locked /-S/contrib/libs/grpc/src/core/ext/transport/chttp2/transport/chttp2_transport.cc:2592:3 #7 0x19792698 in read_action_locked(void*, y_absl::lts_y_20240722::Status) /-S/contrib/libs/grpc/src/core/ext/transport/chttp2/transport/chttp2_transport.cc:2581:7 #8 0x193294c6 in grpc_combiner_continue_exec_ctx() /-S/contrib/libs/grpc/src/core/lib/iomgr/combiner.cc:231:5 #9 0x193027f4 in grpc_core::ExecCtx::Flush() /-S/contrib/libs/grpc/src/core/lib/iomgr/exec_ctx.cc:75:17 #10 0x194480a4 in end_worker /-S/contrib/l ..[snippet truncated].. gr/pollset.cc:48:10 #14 0x19422727 in cq_next(grpc_completion_queue*, gpr_timespec, void*) /-S/contrib/libs/grpc/src/core/lib/surface/completion_queue.cc:1036:29 #15 0x19fc609e in grpc::CompletionQueue::AsyncNextInternal(void**, bool*, gpr_timespec) /-S/contrib/libs/grpc/src/cpp/common/completion_queue_cc.cc:166:15 #16 0x1f09cb3f in Next /-S/contrib/libs/grpc/include/grpcpp/completion_queue.h:182:13 #17 0x1f09cb3f in NYdbGrpc::PullEvents(grpc::CompletionQueue*) /-S/ydb/public/sdk/cpp/src/library/grpc/client/grpc_client_low.cpp:189:18 #18 0x19fdb42e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428:12 #19 0x19fdb42e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987:10 #20 0x19fdb42e in (anonymous namespace)::TThreadFactoryFuncObj::DoExecute() /-S/util/thread/factory.cpp:61:13 #21 0x19fdb97c in Execute /-S/util/thread/factory.h:15:13 #22 0x19fdb97c in (anonymous namespace)::TSystemThreadFactory::TPoolThread::ThreadProc(void*) /-S/util/thread/factory.cpp:36:41 #23 0x18509504 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:244:20 #24 0x181b7f38 in asan_thread_start(void*) /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239:28 SUMMARY: AddressSanitizer: 7085174 byte(s) leaked in 1615 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/TPersQueueTest.DisableDeduplication.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/TPersQueueTest.DisableDeduplication.out ------ FAIL: 132 - GOOD, 1 - CRASHED ydb/services/persqueue_v1/ut ydb/services/ydb/ut [size:medium] nchunks:60 ------ [29/60] chunk ran 5 tests (total:51.48s - test:51.43s) [fail] YdbLogStore::AlterLogTable [default-linux-x86_64-release-asan] (5.75s) assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:13 operator() at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.out ------ [50/60] chunk ran 4 tests (total:603.35s - test:600.05s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: YdbYqlClient::SimpleColumnFamilies (good) duration: 579.89s YdbYqlClient::TableKeyRangesSinglePartition (timeout) duration: 7.81s YdbYqlClient::SecurityTokenAuthMultiTenantSDKAsync (good) duration: 7.16s YdbYqlClient::SecurityTokenAuthMultiTenantSDK (good) duration: 6.40s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/stderr [timeout] YdbYqlClient::TableKeyRangesSinglePartition [default-linux-x86_64-release-asan] (7.81s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbYqlClient.TableKeyRangesSinglePartition.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbYqlClient.TableKeyRangesSinglePartition.out ------ TIMEOUT: 285 - GOOD, 1 - FAIL, 1 - TIMEOUT ydb/services/ydb/ut Total 399 suites: 370 - GOOD 22 - FAIL 7 - TIMEOUT Total 9272 tests: 9194 - GOOD 51 - FAIL 8 - NOT_LAUNCHED 10 - TIMEOUT 2 - SKIPPED 7 - CRASHED Cache efficiency ratio is 89.81% (41978 of 46742). Local: 0 (0.00%), dist: 5123 (10.96%), by dynamic uids: 0 (0.00%), avoided: 36855 (78.85%) Dist cache download: count=4738, size=13.16 GiB, speed=144.99 MiB/s Disk usage for tools/sdk at least 68.55 MiB Additional disk space consumed for build cache 848.89 GiB Critical path: [ 43829 ms] [CC] [GnCJIswIoDYPlSN35nGPTA default-linux-x86_64 release asan]: $(SOURCE_ROOT)/ydb/core/grpc_services/rpc_read_rows.cpp [started: 0 (1743785425490), finished: 43829 (1743785469319)] [ 154 ms] [AR] [N-bVe30QAa9onD2jDjbYAA default-linux-x86_64 release asan]: $(BUILD_ROOT)/ydb/core/grpc_services/libydb-core-grpc_services.a [started: 43868 (1743785469358), finished: 44022 (1743785469512)] [ 62078 ms] [LD] [ScjsK1OC3n5wJiN716kFiw default-linux-x86_64 release asan]: $(BUILD_ROOT)/ydb/apps/ydbd/ydbd [started: 583345 (1743786008835), finished: 645423 (1743786070913)] [623416 ms] [TM] [rnd-3vh72ctjxg3gcogt asan default-linux-x86_64 release]: ydb/tests/olap/scenario/py3test [started: 2333990 (1743787759480), finished: 2957406 (1743788382896)] Time from start: 3412901.8830566406 ms, time elapsed by graph 729477 ms, time diff 2683424.8830566406 ms. The longest 10 tasks: [623416 ms] [TM] [rnd-3vh72ctjxg3gcogt asan default-linux-x86_64 release]: ydb/tests/olap/scenario/py3test [started: 1743787759480, finished: 1743788382896] [622024 ms] [TM] [rnd-10312701944057063260 asan default-linux-x86_64 release]: ydb/tests/functional/tpc/medium/py3test [started: 1743787927363, finished: 1743788549387] [618874 ms] [TM] [rnd-16347515126388397359 asan default-linux-x86_64 release]: ydb/core/tx/schemeshard/ut_index/unittest [started: 1743785713583, finished: 1743786332457] [618115 ms] [TM] [rnd-m7lshd1ctqrzvzq6 asan default-linux-x86_64 release]: ydb/tests/fq/mem_alloc/py3test [started: 1743787726633, finished: 1743788344748] [611455 ms] [TM] [rnd-6005154716934970288 asan default-linux-x86_64 release]: ydb/core/tx/schemeshard/ut_index/unittest [started: 1743785720619, finished: 1743786332074] [610649 ms] [TM] [rnd-4154857074208942550 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_rw/unittest [started: 1743785742549, finished: 1743786353198] [607372 ms] [TM] [rnd-4560050923635873666 asan default-linux-x86_64 release]: ydb/core/viewer/ut/unittest [started: 1743785876389, finished: 1743786483761] [603985 ms] [TM] [rnd-7027402737127806848 asan default-linux-x86_64 release]: ydb/services/ydb/ut/unittest [started: 1743786153206, finished: 1743786757191] [603954 ms] [TM] [rnd-1341003272707727111 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_rw/unittest [started: 1743786149702, finished: 1743786753656] [602895 ms] [TM] [rnd-16387649877961562718 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_rw/unittest [started: 1743787150286, finished: 1743787753181] Total time by type: [137077480 ms] [TM] [count: 3928, ave time 34897.53 msec] [ 10917895 ms] [prepare:get from dist cache] [count: 5123, ave time 2131.15 msec] [ 10066595 ms] [LD] [count: 369, ave time 27280.75 msec] [ 1312508 ms] [TS] [count: 225, ave time 5833.37 msec] [ 401223 ms] [prepare:tools] [count: 20, ave time 20061.15 msec] [ 323325 ms] [CC] [count: 12, ave time 26943.75 msec] [ 256716 ms] [TA] [count: 211, ave time 1216.66 msec] [ 205454 ms] [prepare:bazel-store] [count: 3, ave time 68484.67 msec] [ 123516 ms] [prepare:put into local cache, clean build dir] [count: 5137, ave time 24.04 msec] [ 100270 ms] [prepare:AC] [count: 4, ave time 25067.50 msec] [ 7176 ms] [prepare:resources] [count: 1, ave time 7176.00 msec] [ 6593 ms] [prepare:put to dist cache] [count: 331, ave time 19.92 msec] [ 2351 ms] [AR] [count: 11, ave time 213.73 msec] [ 914 ms] [ld] [count: 2, ave time 457.00 msec] [ 643 ms] [CF] [count: 2, ave time 321.50 msec] [ 465 ms] [BI] [count: 1, ave time 465.00 msec] [ 347 ms] [SB] [count: 1, ave time 347.00 msec] [ 236 ms] [UN] [count: 1, ave time 236.00 msec] [ 201 ms] [CP] [count: 1, ave time 201.00 msec] [ 12 ms] [prepare:clean] [count: 3, ave time 4.00 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 138646704 ms (93.03%) Total run tasks time - 149041781 ms Configure time - 34.2 s Statistics overhead 1986 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json Ok + echo 0 + ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.Q8w3Bp39ki --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends -X --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-release-asan Configuring dependencies for platform tools [2 ymakes processing] [7792/7792 modules configured] [3536/4553 modules rendered] [2 ymakes processing] [7792/7792 modules configured] [4425/4553 modules rendered] [2 ymakes processing] [7792/7792 modules configured] [4553/4553 modules rendered] Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [7798/7798 modules configured] [4553/4553 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution | 2.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw | 3.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut | 3.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut | 3.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel | 4.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql | 4.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/example/ydb-tests-example | 5.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut | 5.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan | 5.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut | 6.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr | 7.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut | 6.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot | 7.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut | 8.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut | 8.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index | 9.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut | 9.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |10.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |10.8%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |11.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |12.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |12.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |13.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |13.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |13.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |14.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |14.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |13.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |13.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |13.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |13.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |14.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |14.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |15.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |15.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/pgwire/pgwire |15.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |15.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |16.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |16.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |16.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |16.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |16.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |16.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut |17.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |17.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay |17.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |17.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |17.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |18.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |18.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |18.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |18.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |17.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |17.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |17.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |17.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |17.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |18.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |18.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |18.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |18.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |18.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |18.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |19.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |19.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |19.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/async_io/ut/ydb-library-yql-providers-solomon-async_io-ut |19.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |19.6%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |19.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |20.3%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |20.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |20.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |21.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |21.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |22.1%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |22.2%| CLEANING SYMRES |22.6%| PREPARE $(TEST_TOOL_HOST-sbr:8067063302) |21.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |21.4%| PREPARE $(CLANG_FORMAT-2212207123) |21.6%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |21.7%| PREPARE $(WITH_JDK17-sbr:7832760150) |21.9%| PREPARE $(JDK_DEFAULT-472926544) |22.0%| PREPARE $(WITH_JDK-sbr:7832760150) |22.1%| PREPARE $(JDK17-472926544) |22.2%| PREPARE $(FLAKE8_PY3-715603131) |22.5%| PREPARE $(PYTHON) |22.6%| PREPARE $(CLANG16-1380963495) |22.7%| PREPARE $(CLANG14-1922233694) |22.9%| PREPARE $(LLD_ROOT-3808007503) |23.0%| PREPARE $(YMAKE_PYTHON3-4256832079) |23.2%| PREPARE $(CLANG-1922233694) |23.4%| PREPARE $(GDB) |23.7%| PREPARE $(CLANG18-3363451693) |23.9%| PREPARE $(CLANG-2518231432) |24.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |24.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |24.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |25.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |25.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |26.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |26.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |26.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |27.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |27.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |27.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |28.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |28.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |28.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |26.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |26.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |27.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |27.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |27.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |28.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |28.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |28.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |28.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |28.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |28.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |29.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/ydb-tests-sql |29.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |29.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |29.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |29.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |29.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun |30.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |30.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |30.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |30.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |30.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |30.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |30.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |31.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |31.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |31.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |31.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |31.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |31.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |30.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |30.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |30.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |30.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |30.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |30.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |30.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |31.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |31.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |31.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |31.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |31.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |31.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |31.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |32.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |32.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |32.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |33.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |32.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |32.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |33.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |33.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |33.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |33.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |33.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |33.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |33.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |33.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |33.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |33.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |34.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |34.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |34.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |34.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |34.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |34.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |34.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |35.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |35.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |34.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |34.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |34.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |34.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |34.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |34.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |35.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |35.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |35.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |35.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |35.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |33.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |33.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |33.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |33.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |33.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |33.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |34.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |34.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |34.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |34.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |34.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |34.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |34.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |34.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |34.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |34.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |34.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |34.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |35.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |35.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |35.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |35.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |35.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |35.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |34.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |34.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |34.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |34.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |35.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut |35.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |35.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |35.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |35.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |35.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |35.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |36.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |36.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |36.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |36.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |35.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |36.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |36.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |36.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |36.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |36.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |36.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |37.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |37.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |37.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |37.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |37.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/config/ut/ydb-services-config-ut |37.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |37.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |37.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |37.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |37.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |38.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |38.2%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |38.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |38.4%| [AR] {RESULT} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |38.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |38.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |38.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |39.0%| [AR] {RESULT} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |39.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |39.5%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |39.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |39.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun |39.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |39.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut |39.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |39.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |39.7%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |39.8%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |39.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer |40.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |40.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |40.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |40.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |40.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |40.9%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |41.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |41.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |41.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |41.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |41.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |41.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |41.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |41.9%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |42.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |42.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |42.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |41.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |42.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |42.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |42.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |42.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |42.5%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |42.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |42.7%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |42.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |42.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |43.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |43.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |43.3%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |43.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |43.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |43.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |43.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |43.6%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |44.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |44.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |44.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |43.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |43.7%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |43.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |43.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |44.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |44.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |44.3%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |44.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |44.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |44.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |45.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |45.1%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |45.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ydb-tests-olap |45.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |45.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |45.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |45.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |45.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |45.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |45.8%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |46.0%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |46.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |46.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |46.6%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |46.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |46.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |46.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |47.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |47.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |47.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |46.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |46.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |46.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |47.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |47.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |47.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |47.3%| [LD] {RESULT} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |47.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |47.6%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql |48.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |48.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |48.2%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |48.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |48.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |48.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |48.6%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun |48.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |48.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |48.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |49.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |49.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |49.3%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |49.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |48.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |48.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |48.9%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |49.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |49.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |49.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |49.5%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |49.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |49.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |49.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |50.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |50.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |50.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |50.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |50.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |50.7%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |50.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |51.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |50.4%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |50.4%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |50.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |50.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |50.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |50.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |51.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |51.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |51.3%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |51.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |51.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |51.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |51.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |51.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |52.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |52.2%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |52.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |52.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |52.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |52.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |52.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |52.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |52.5%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |52.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |52.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |52.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |52.8%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |52.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |53.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |53.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |53.2%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |53.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |53.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |53.8%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/async_io/ut/ydb-library-yql-providers-solomon-async_io-ut |53.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |54.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |54.1%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |54.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |54.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |54.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |54.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |54.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |54.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |54.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |54.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |54.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |54.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |54.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |55.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |55.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |55.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |55.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |55.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |55.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |55.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |55.7%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |55.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |55.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |55.9%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |56.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |56.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |56.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |56.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |56.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |56.5%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |56.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut >> BasicUsage::BasicWriteSession >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString |56.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |56.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |56.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |56.6%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |56.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |56.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |56.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |56.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |56.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |57.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |57.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |57.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |57.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |57.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |57.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |57.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |57.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |57.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |57.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |57.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |58.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |58.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |58.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |58.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |58.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/docs/generator/generator |58.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |58.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |58.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |58.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |58.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |58.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |58.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |59.0%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |59.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |59.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |59.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |59.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |59.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |59.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |59.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |59.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |47.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |47.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |48.0%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |48.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |48.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |48.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |48.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |48.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |48.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |48.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |48.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |48.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |48.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |48.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |48.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |48.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |49.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |49.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |49.1%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |49.2%| COMPACTING CACHE 848.9GiB |49.2%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |49.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |49.4%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |49.4%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |49.5%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |49.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |49.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |49.6%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |49.7%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |49.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |49.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |49.9%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |49.9%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |50.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |50.0%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |50.1%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut |50.1%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |50.2%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |50.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |50.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |50.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |50.4%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |50.5%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |50.5%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |50.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |50.6%| [LD] {RESULT} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |50.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |50.8%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |50.8%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |50.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |50.9%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |51.0%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |51.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |51.1%| [LD] {RESULT} $(B)/ydb/apps/pgwire/pgwire |51.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |51.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |51.3%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |51.3%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example |51.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |51.4%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |51.5%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |51.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |51.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |51.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |51.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |51.8%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |51.8%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |51.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |51.9%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun |52.0%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |52.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |52.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |52.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |52.2%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |52.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |52.3%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |52.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |52.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |52.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |52.5%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |52.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |52.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |52.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |52.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |52.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |52.9%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |52.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |53.0%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |53.0%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |53.1%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |53.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |53.2%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |53.3%| [LD] {RESULT} $(B)/ydb/tests/olap/docs/generator/generator |53.3%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |53.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |53.4%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |53.5%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay |53.6%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |53.6%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |53.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |53.7%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |53.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |53.8%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |53.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |53.9%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |54.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |54.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |54.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |54.2%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |54.2%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |54.3%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |54.3%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |54.4%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |54.4%| [LD] {RESULT} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |54.5%| [LD] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |54.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |54.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |54.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |54.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |54.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |54.8%| [LD] {RESULT} $(B)/ydb/services/config/ut/ydb-services-config-ut |54.9%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |55.0%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |55.0%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |55.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |55.1%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |55.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |55.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |55.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer |55.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |55.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |55.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |55.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |55.6%| [LD] {RESULT} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |55.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |55.7%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |55.7%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |55.8%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |55.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |55.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |56.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |56.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |56.1%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |56.1%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |56.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |56.2%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |56.3%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |56.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |56.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |56.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |56.5%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |56.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |56.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |56.7%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |56.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |56.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |56.9%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |56.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |57.0%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |57.0%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |57.1%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |57.1%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |57.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |57.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |57.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |57.4%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |57.4%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |57.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |57.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |57.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |57.6%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |57.7%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |57.8%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |57.8%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |57.9%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |57.9%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |58.0%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |58.0%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |58.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |58.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |58.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |58.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |58.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |58.4%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |58.4%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |58.5%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |58.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |58.6%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |58.6%| [LD] {RESULT} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |58.7%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |58.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |58.8%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |58.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |58.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |59.0%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |59.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |59.1%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |59.1%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |59.2%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |59.3%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |59.3%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |59.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |59.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |59.5%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |59.5%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |59.6%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |59.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |59.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |59.8%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |59.8%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |59.9%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |59.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |60.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |60.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |60.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |60.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |60.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |60.3%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |60.3%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |60.4%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |60.4%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |60.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |60.5%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |60.6%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool |60.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |60.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |60.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |60.8%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |60.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |60.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |61.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |61.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |61.1%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |61.2%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |61.2%| [LD] {RESULT} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |61.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |61.3%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |61.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |61.4%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |61.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |61.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |61.6%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |61.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |61.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |61.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |61.8%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |61.9%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |61.9%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |62.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |62.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |62.1%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |62.2%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |62.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |62.3%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |62.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |62.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |62.5%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |62.5%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |62.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |62.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |62.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |62.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |62.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |62.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |62.9%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |63.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |63.0%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |63.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |63.1%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |63.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |63.2%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |63.3%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |63.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |63.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |63.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |63.5%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |63.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |63.6%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |63.7%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |63.7%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |63.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |63.9%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |63.9%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |64.0%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |64.0%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |64.1%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |64.1%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |64.2%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |64.2%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |64.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |64.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |64.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |64.5%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |64.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |64.6%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |64.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |64.7%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |64.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |64.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |64.9%| [LD] {RESULT} $(B)/ydb/tests/olap/ydb-tests-olap |64.9%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |65.0%| [LD] {RESULT} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |65.0%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |65.1%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |65.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |65.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |65.2%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |65.3%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |65.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |65.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |65.5%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |65.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |65.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |65.6%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |65.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |65.8%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |65.8%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |65.9%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |65.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |66.0%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |66.0%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |66.1%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |66.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |66.2%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |66.3%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut |66.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |66.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |66.4%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |66.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |66.5%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |66.6%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |66.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |66.7%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |66.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |66.8%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |66.9%| [LD] {RESULT} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |66.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |67.0%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |67.0%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |67.1%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |67.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |67.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |67.3%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |67.3%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |67.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |67.4%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> TKeyValueTracingTest::ReadHuge |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpPg::TempTablesDrop >> TKeyValueTracingTest::ReadHuge [FAIL] |67.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |67.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |67.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |69.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |69.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |69.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |69.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |69.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |69.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable >> YdbYqlClient::TableKeyRangesSinglePartition |69.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |69.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |69.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |69.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |69.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/unittest |69.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |69.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |70.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |70.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |70.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> KqpPg::TempTablesDrop [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0xF9BEE90) TestOneRead(TBasicString>, TBasicString>)+4580 (0xF151154) NTestSuiteTKeyValueTracingTest::TTestCaseReadHuge::Execute_(NUnitTest::TTestContext&)+130 (0xF157AE2) std::__y1::__function::__func, void ()>::operator()()+280 (0xF168858) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0xF9EC5E6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0xF9C58B9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1094 (0xF167846) NUnitTest::TTestFactory::Execute()+2438 (0xF9C7186) NUnitTest::RunMain(int, char**)+5213 (0xF9E6F1D) ??+0 (0x7FAA85B96D90) __libc_start_main+128 (0x7FAA85B96E40) _start+41 (0xD0AA029) |70.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TModifyUserTest::ModifyUser |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |70.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest |70.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |70.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |70.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest |70.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 >> TKeyValueTracingTest::ReadSmall >> TKeyValueTracingTest::WriteHuge >> PgCatalog::CheckSetConfig >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 >> KqpPg::TempTablesWithCache |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 |71.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |71.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpPg::DropSequence >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 >> TKeyValueTracingTest::WriteSmall >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::TempTablesDrop [GOOD] Test command err: Trying to start YDB, gRPC: 6535, MsgBus: 5434 2025-04-04T18:08:23.822052Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521464961948656:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:23.824363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000242/r3tmp/tmpaWxqN7/pdisk_1.dat 2025-04-04T18:08:24.136066Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:24.188562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:24.189452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:24.191588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6535, node 1 2025-04-04T18:08:24.341469Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:24.341485Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:24.341490Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:24.341585Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5434 TClient is connected to server localhost:5434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:24.900696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:25.764546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521473551883762:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:25.764546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521473551883773:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:25.764639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:25.776382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T18:08:25.786895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521473551883776:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T18:08:25.861286Z node 1 :TX_PROXY ERROR: Actor# [1:7489521473551883829:2332] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:26.153381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:1, at schemeshard: 72057594046644480 2025-04-04T18:08:26.657615Z node 1 :TX_PROXY ERROR: Actor# [1:7489521477846851357:2462] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:26.670346Z node 1 :TX_PROXY ERROR: Actor# [1:7489521477846851364:2467] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/YjA0N2E0OGUtNThkMzc3OTEtMTIyNjc4NjMtNzRjOTE2YzY=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:26.689043Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-04T18:08:26.703576Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489521477846851417:2365], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:31: Error: At function: KiReadTable!
:3:31: Error: Cannot find table 'db.[/Root/PgTemp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T18:08:26.703912Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjA0N2E0OGUtNThkMzc3OTEtMTIyNjc4NjMtNzRjOTE2YzY=, ActorId: [1:7489521473551883758:2327], ActorState: ExecuteState, TraceId: 01jr0ztt229gqd691a5dvk9nf0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T18:08:26.748831Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489521477846851430:2373], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:31: Error: At function: KiReadTable!
:3:31: Error: Cannot find table 'db.[/Root/PgTemp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T18:08:26.749107Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmUzMTBmMDUtMzYwMTlkNzItZWViMGI5ZTAtZmEwNDhiNmE=, ActorId: [1:7489521477846851426:2370], ActorState: ExecuteState, TraceId: 01jr0ztt3f6krmh3x36qczst51, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |71.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |71.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |71.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TieringUsage |72.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TKeyValueTracingTest::WriteHuge [FAIL] >> ColumnShardTiers::TTLUsage |73.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] >> YdbLogStore::AlterLogTable [FAIL] |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TModifyUserTest::ModifyUser [GOOD] >> YdbYqlClient::TableKeyRangesSinglePartition [GOOD] |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TModifyUserTest::ModifyUser [GOOD] Test command err: 2025-04-04T18:08:28.088703Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521485444790623:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:28.088831Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000377/r3tmp/tmpwiofmi/pdisk_1.dat 2025-04-04T18:08:28.462648Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:28.506209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:28.506355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:28.508706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4756 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T18:08:28.690837Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485444790882:2103] Handle TEvNavigate describe path dc-1 2025-04-04T18:08:28.690894Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521485444791161:2257] HANDLE EvNavigateScheme dc-1 2025-04-04T18:08:28.691021Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489521485444790925:2123], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T18:08:28.691061Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7489521485444790925:2123], path# /dc-1, domainOwnerId# 72057594046644480 2025-04-04T18:08:28.691302Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489521485444791162:2258][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-04-04T18:08:28.698205Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489521485444790579:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489521485444791167:2258] 2025-04-04T18:08:28.698302Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489521485444790579:2052] Subscribe: subscriber# [1:7489521485444791167:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T18:08:28.698416Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489521485444790576:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489521485444791166:2258] 2025-04-04T18:08:28.698433Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489521485444790582:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7489521485444791168:2258] 2025-04-04T18:08:28.698444Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489521485444790576:2049] Subscribe: subscriber# [1:7489521485444791166:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T18:08:28.698453Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7489521485444790582:2055] Subscribe: subscriber# [1:7489521485444791168:2258], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-04-04T18:08:28.698510Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489521485444791167:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489521485444790579:2052] 2025-04-04T18:08:28.698549Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489521485444791166:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489521485444790576:2049] 2025-04-04T18:08:28.698550Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489521485444790579:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489521485444791167:2258] 2025-04-04T18:08:28.698566Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489521485444790576:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489521485444791166:2258] 2025-04-04T18:08:28.698567Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489521485444791168:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489521485444790582:2055] 2025-04-04T18:08:28.698580Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489521485444790582:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7489521485444791168:2258] 2025-04-04T18:08:28.698625Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489521485444791162:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489521485444791164:2258] 2025-04-04T18:08:28.698650Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489521485444791162:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489521485444791163:2258] 2025-04-04T18:08:28.698699Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7489521485444791162:2258][/dc-1] Set up state: owner# [1:7489521485444790925:2123], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T18:08:28.698809Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489521485444791162:2258][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7489521485444791165:2258] 2025-04-04T18:08:28.698873Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7489521485444791162:2258][/dc-1] Path was already updated: owner# [1:7489521485444790925:2123], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-04-04T18:08:28.698920Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489521485444791166:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489521485444791163:2258], cookie# 1 2025-04-04T18:08:28.698937Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489521485444791167:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489521485444791164:2258], cookie# 1 2025-04-04T18:08:28.698952Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489521485444791168:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489521485444791165:2258], cookie# 1 2025-04-04T18:08:28.702542Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489521485444790576:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489521485444791166:2258], cookie# 1 2025-04-04T18:08:28.702629Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489521485444790579:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489521485444791167:2258], cookie# 1 2025-04-04T18:08:28.702653Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489521485444790582:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489521485444791168:2258], cookie# 1 2025-04-04T18:08:28.702702Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489521485444791166:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489521485444790576:2049], cookie# 1 2025-04-04T18:08:28.702718Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489521485444791167:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489521485444790579:2052], cookie# 1 2025-04-04T18:08:28.702731Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489521485444791168:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489521485444790582:2055], cookie# 1 2025-04-04T18:08:28.702800Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489521485444791162:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489521485444791163:2258], cookie# 1 2025-04-04T18:08:28.702851Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489521485444791162:2258][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T18:08:28.702877Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489521485444791162:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489521485444791164:2258], cookie# 1 2025-04-04T18:08:28.702903Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489521485444791162:2258][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T18:08:28.702962Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489521485444791162:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7489521485444791165:2258], cookie# 1 2025-04-04T18:08:28.702981Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489521485444791162:2258][/dc-1] Unexpected sync response: sender# [1:7489521485444791165:2258], cookie# 1 2025-04-04T18:08:28.757268Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489521485444790925:2123], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-04-04T18:08:28.757735Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489521485444790925:2123], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... by path# { Subscriber: { Subscriber: [1:7489521485444791162:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 15 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743790108956 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [1:7489521485444791162:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 15 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743790108956 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2025-04-04T18:08:29.387605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046644480 2025-04-04T18:08:29.397062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710666 2025-04-04T18:08:29.397299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMzMwOSwiaWF0IjoxNzQzNzkwMTA5LCJzdWIiOiJ1c2VyMiJ9.2huJ5sChreaxzmkN0lVNV8gsEQ6YskXuoYUzgzNu15EuePWPcoYz6NZgUxVYQjQkX6xKD9RLEQ7iNMmzxIdWxB1hSWwAKFDpnxA1cE4PL5ebZoPbl8s4h59DZ_jSwL66uKimuB5ynkHeFVuqezC37UeLU_zERDbxm4Qv-0Ml_9X1hScHXceVxmGdJO0iCNi6DsrODRAYTiIvqrKInMUEAogiMoo3OroJjr-2u1AjFkhYhxzZ5JSWX3ydxVtfLeVDmHoMf-XSIYLjVja9z-SgDbzG3MIY_77HMGZABhZMn6p9SGCghUEL6WY1Rutyjy3KPnTj5uwIfcn7BvM1h2iqAg" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMzMwOSwiaWF0IjoxNzQzNzkwMTA5LCJzdWIiOiJ1c2VyMiJ9.**" IsAdmin: true, at schemeshard: 72057594046644480 TClient::Ls request: /dc-1 2025-04-04T18:08:29.397968Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485444790882:2103] Handle TEvNavigate describe path /dc-1 2025-04-04T18:08:29.398000Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489739758603:2376] HANDLE EvNavigateScheme /dc-1 2025-04-04T18:08:29.398111Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7489521485444790925:2123], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-04-04T18:08:29.398201Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489521485444791162:2258][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7489521485444790925:2123], cookie# 16 2025-04-04T18:08:29.398270Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489521485444791166:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489521485444791163:2258], cookie# 16 2025-04-04T18:08:29.398301Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489521485444791167:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489521485444791164:2258], cookie# 16 2025-04-04T18:08:29.398318Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489521485444791168:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489521485444791165:2258], cookie# 16 2025-04-04T18:08:29.398343Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489521485444790576:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489521485444791166:2258], cookie# 16 2025-04-04T18:08:29.398519Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489521485444790579:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489521485444791167:2258], cookie# 16 2025-04-04T18:08:29.398559Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7489521485444790582:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7489521485444791168:2258], cookie# 16 2025-04-04T18:08:29.398589Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489521485444791166:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 13 Partial: 0 }: sender# [1:7489521485444790576:2049], cookie# 16 2025-04-04T18:08:29.398617Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489521485444791167:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 13 Partial: 0 }: sender# [1:7489521485444790579:2052], cookie# 16 2025-04-04T18:08:29.398651Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7489521485444791168:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 13 Partial: 0 }: sender# [1:7489521485444790582:2055], cookie# 16 2025-04-04T18:08:29.398680Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489521485444791162:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 13 Partial: 0 }: sender# [1:7489521485444791163:2258], cookie# 16 2025-04-04T18:08:29.398697Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489521485444791162:2258][/dc-1] Sync is in progress: cookie# 16, size# 3, half# 1, successes# 1, faulires# 0 2025-04-04T18:08:29.398711Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489521485444791162:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 13 Partial: 0 }: sender# [1:7489521485444791164:2258], cookie# 16 2025-04-04T18:08:29.398731Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489521485444791162:2258][/dc-1] Sync is done: cookie# 16, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-04-04T18:08:29.398750Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489521485444791162:2258][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 13 Partial: 0 }: sender# [1:7489521485444791165:2258], cookie# 16 2025-04-04T18:08:29.398760Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7489521485444791162:2258][/dc-1] Unexpected sync response: sender# [1:7489521485444791165:2258], cookie# 16 2025-04-04T18:08:29.398805Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7489521485444790925:2123], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-04-04T18:08:29.398880Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7489521485444790925:2123], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7489521485444791162:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743790108956 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-04-04T18:08:29.398960Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7489521485444790925:2123], cacheItem# { Subscriber: { Subscriber: [1:7489521485444791162:2258] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743790108956 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 16 IsSync: true Partial: 0 } 2025-04-04T18:08:29.399127Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7489521489739758604:2377], recipient# [1:7489521489739758603:2376], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } }] } 2025-04-04T18:08:29.399156Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489739758603:2376] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.399223Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489739758603:2376] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-04-04T18:08:29.400032Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489739758603:2376] Handle TEvDescribeSchemeResult Forward to# [1:7489521489739758602:2375] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 66 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743790108956 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\001\020\200\010\032\005user1 \003" EffectiveACL: "\n\016\010\001\020\200\010\032\005user1 \003" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 6 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4DLQkENolzD24hGxFd6t\nIpnaA6M++nIMGTOoAjskBIk3YVXIMVjRqeDOxf4lAoTsbJYk2lbb/SQXEz8WthD5\n0phEJroSpNjnvWlDv6E13FHvL+DbxFhOJtsA/K0b1T/w7htmeNehmRYahXQ/wZiV\ngNfz4wBxtuaZjJBgE6/Lb7TayFlZvKXvuJpIysQtwC0ELjv+IqF4H84IefQwyqu7\nLOPtHM36cqpPeDwwhkRmL9BchD+97ES4HltzSzBq0yoJB00iNLsbeEoNhZzAdDzu\nU9Pn3gvFr30zJzCjAPYDJJeo1noLy8ahLtjFqloHPqmPfV/3LKm+s5ZhSttUPsg7\nEwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743876509146 } Sids { Name: "user2" Type: USER } Sids { Name: "user1" Type: USER } Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743790108956 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\001\020\200\010\032\005user1 \003" EffectiveACL: "\n\016\010\001\020\200\010\032\005user1 \003" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 6 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743790108984 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "roo... (TRUNCATED) ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TableKeyRangesSinglePartition [GOOD] Test command err: 2025-04-04T18:08:26.971887Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521476525211722:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:26.973147Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/0001d4/r3tmp/tmpKshUiO/pdisk_1.dat 2025-04-04T18:08:27.401252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:27.402064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:27.402446Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30094, node 1 2025-04-04T18:08:27.424791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:27.427738Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T18:08:27.427929Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T18:08:27.547837Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:27.547864Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:27.547872Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:27.547985Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6762 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:27.965781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:29.949231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 >> KqpPg::DropSequence [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0xF9BEE90) TestOneRead(TBasicString>, TBasicString>)+4580 (0xF151154) NTestSuiteTKeyValueTracingTest::TTestCaseReadSmall::Execute_(NUnitTest::TTestContext&)+130 (0xF157812) std::__y1::__function::__func, void ()>::operator()()+280 (0xF168858) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0xF9EC5E6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0xF9C58B9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1094 (0xF167846) NUnitTest::TTestFactory::Execute()+2438 (0xF9C7186) NUnitTest::RunMain(int, char**)+5213 (0xF9E6F1D) ??+0 (0x7F4AD9E99D90) __libc_start_main+128 (0x7F4AD9E99E40) _start+41 (0xD0AA029) >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0xF9BEE90) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4134 (0xF14B8A6) NTestSuiteTKeyValueTracingTest::TTestCaseWriteHuge::Execute_(NUnitTest::TTestContext&)+216 (0xF157558) std::__y1::__function::__func, void ()>::operator()()+280 (0xF168858) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0xF9EC5E6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0xF9C58B9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1094 (0xF167846) NUnitTest::TTestFactory::Execute()+2438 (0xF9C7186) NUnitTest::RunMain(int, char**)+5213 (0xF9E6F1D) ??+0 (0x7FC69A340D90) __libc_start_main+128 (0x7FC69A340E40) _start+41 (0xD0AA029) >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::DropSequence [GOOD] Test command err: Trying to start YDB, gRPC: 4819, MsgBus: 29960 2025-04-04T18:08:28.349170Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521485442956224:2263];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:28.349362Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000233/r3tmp/tmpOeNiWK/pdisk_1.dat 2025-04-04T18:08:28.750795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:28.750900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:28.753352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4819, node 1 2025-04-04T18:08:28.787472Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:28.832409Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T18:08:28.834658Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T18:08:28.905396Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:28.905435Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:28.905444Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:28.905551Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29960 TClient is connected to server localhost:29960 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:29.469061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:31.385447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521498327858576:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.385447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521498327858581:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.385569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.390367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T18:08:31.401600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521498327858590:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T18:08:31.482507Z node 1 :TX_PROXY ERROR: Actor# [1:7489521498327858641:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:31.534384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T18:08:31.661953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T18:08:31.705377Z node 1 :TX_PROXY ERROR: Actor# [1:7489521498327858773:2416] txid# 281474976710662, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-04-04T18:08:31.715722Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjczMDEzZjctMjZmOGRkMDgtZDY5OTdhMGMtNTAzYjJjNzc=, ActorId: [1:7489521498327858555:2328], ActorState: ExecuteState, TraceId: 01jr0ztyyd28y7a41pd19wmp7a, Create QueryResponse for error on request, msg: 2025-04-04T18:08:31.736481Z node 1 :TX_PROXY ERROR: Actor# [1:7489521498327858784:2421] txid# 281474976710664, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 [GOOD] >> KqpPg::TempTablesWithCache [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0xF9BEE90) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4134 (0xF14B8A6) NTestSuiteTKeyValueTracingTest::TTestCaseWriteSmall::Execute_(NUnitTest::TTestContext&)+216 (0xF157248) std::__y1::__function::__func, void ()>::operator()()+280 (0xF168858) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0xF9EC5E6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0xF9C58B9) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1094 (0xF167846) NUnitTest::TTestFactory::Execute()+2438 (0xF9C7186) NUnitTest::RunMain(int, char**)+5213 (0xF9E6F1D) ??+0 (0x7FD2EAD40D90) __libc_start_main+128 (0x7FD2EAD40E40) _start+41 (0xD0AA029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 [GOOD] Test command err: Starting YDB, grpc: 30228, msgbus: 23766 2025-04-04T18:08:28.674996Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521487344212526:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:28.675048Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/00021d/r3tmp/tmp7vPkcg/pdisk_1.dat 2025-04-04T18:08:29.185330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:29.185456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:29.188774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:29.189891Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30228, node 1 2025-04-04T18:08:29.334062Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:29.334089Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:29.334097Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:29.334232Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23766 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T18:08:29.558519Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487344212783:2131] Handle TEvNavigate describe path dc-1 2025-04-04T18:08:29.558598Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180556:2440] HANDLE EvNavigateScheme dc-1 2025-04-04T18:08:29.560008Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180556:2440] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.609357Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180556:2440] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-04T18:08:29.621806Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180556:2440] Handle TEvDescribeSchemeResult Forward to# [1:7489521491639180555:2439] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T18:08:29.649864Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487344212783:2131] Handle TEvProposeTransaction 2025-04-04T18:08:29.649918Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487344212783:2131] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T18:08:29.650047Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487344212783:2131] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489521491639180569:2446] 2025-04-04T18:08:29.743249Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180569:2446] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T18:08:29.743339Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180569:2446] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-04T18:08:29.743363Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180569:2446] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:29.743451Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180569:2446] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:29.743848Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180569:2446] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.744015Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180569:2446] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-04T18:08:29.744091Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180569:2446] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-04T18:08:29.744376Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180569:2446] txid# 281474976710657 HANDLE EvClientConnected 2025-04-04T18:08:29.745141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T18:08:29.747484Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180569:2446] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-04T18:08:29.747584Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180569:2446] txid# 281474976710657 SEND to# [1:7489521491639180568:2445] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-04T18:08:29.762634Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487344212783:2131] Handle TEvProposeTransaction 2025-04-04T18:08:29.762685Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487344212783:2131] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T18:08:29.762726Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487344212783:2131] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489521491639180623:2487] 2025-04-04T18:08:29.765159Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180623:2487] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T18:08:29.765254Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180623:2487] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-04T18:08:29.765271Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180623:2487] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:29.765319Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180623:2487] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:29.765656Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180623:2487] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.765764Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180623:2487] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:29.765816Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180623:2487] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T18:08:29.765954Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180623:2487] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T18:08:29.766332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T18:08:29.768745Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180623:2487] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-04T18:08:29.768789Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491639180623:2487] txid# 281474976710658 SEND to# [1:7489521491639180622:2486] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-04T18:08:31.453167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521500229115297:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.453172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521500229115305:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.453318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.453579Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487344212783:2131] Handle TEvProposeTransaction 2025-04-04T18:08:31.453607Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487344212783:2131] TxId# 281474976710659 ProcessProposeTransaction 2025-04-04 ... c-1" 2025-04-04T18:08:31.537219Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115384:2570] txid# 281474976710660 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-04T18:08:31.537235Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115384:2570] txid# 281474976710660 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-04T18:08:31.537857Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115384:2570] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T18:08:31.537964Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115384:2570] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.538136Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115384:2570] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.538346Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115384:2570] HANDLE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:31.538401Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115384:2570] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-04-04T18:08:31.538552Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115384:2570] txid# 281474976710660 HANDLE EvClientConnected 2025-04-04T18:08:31.542031Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115384:2570] txid# 281474976710660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-04T18:08:31.543413Z node 1 :TX_PROXY ERROR: Actor# [1:7489521500229115384:2570] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:31.543452Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115384:2570] txid# 281474976710660 SEND to# [1:7489521500229115311:2339] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 48} 2025-04-04T18:08:31.722408Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487344212783:2131] Handle TEvProposeTransaction 2025-04-04T18:08:31.722462Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487344212783:2131] TxId# 281474976710661 ProcessProposeTransaction 2025-04-04T18:08:31.722503Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487344212783:2131] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [1:7489521500229115409:2582] 2025-04-04T18:08:31.725175Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115409:2582] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:35306" 2025-04-04T18:08:31.725250Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115409:2582] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-04T18:08:31.725269Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115409:2582] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:31.725337Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115409:2582] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.725628Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115409:2582] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.725741Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115409:2582] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:31.725790Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115409:2582] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-04T18:08:31.725932Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115409:2582] txid# 281474976710661 HANDLE EvClientConnected 2025-04-04T18:08:31.734133Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115409:2582] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-04-04T18:08:31.734187Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115409:2582] txid# 281474976710661 SEND to# [1:7489521500229115408:2332] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-04T18:08:31.766165Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487344212783:2131] Handle TEvProposeTransaction 2025-04-04T18:08:31.766201Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487344212783:2131] TxId# 281474976710662 ProcessProposeTransaction 2025-04-04T18:08:31.766698Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487344212783:2131] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7489521500229115431:2596] 2025-04-04T18:08:31.768719Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115431:2596] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:35320" 2025-04-04T18:08:31.768778Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115431:2596] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-04T18:08:31.768794Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115431:2596] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:31.768840Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115431:2596] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.769099Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115431:2596] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.769187Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115431:2596] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:31.769231Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115431:2596] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-04T18:08:31.769346Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115431:2596] txid# 281474976710662 HANDLE EvClientConnected 2025-04-04T18:08:31.769835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:31.777363Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115431:2596] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-04T18:08:31.777445Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115431:2596] txid# 281474976710662 SEND to# [1:7489521500229115430:2346] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-04T18:08:31.816140Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487344212783:2131] Handle TEvProposeTransaction 2025-04-04T18:08:31.816172Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487344212783:2131] TxId# 281474976710663 ProcessProposeTransaction 2025-04-04T18:08:31.816223Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487344212783:2131] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7489521500229115471:2618] 2025-04-04T18:08:31.818720Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115471:2618] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMzMxMSwiaWF0IjoxNzQzNzkwMTExLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.d34un70Hqt5lDP7HopoTW5WnDUXRzMhT6gnEkQkWjBRseRJ5PRAB_E9pxeSVS6sIwxpN37m_cT-25G_s1t2jUbksA_J_MK7ewG3fb7kQOjH58yPY6o6L9sNrSI5qCU8cDhAhE1buPRh1oLem1IHBO1vvD8h7DrEb7tZeJ-IaPO5TBEW3AEwO9120GVdSGK0b_dW-dA5UNhYefO-JtXP3Bygd7jw26jjKHxdNKHUcF1XQmfXVYuATp7DjwtKgoffnntt9WwLdcClLL8JEvlYnkg08O7jmODoxo7DAdenF_6pdXUL0kRfPeIyHLJjWYnww1bsMt3BjKc73cbC8nY8A7Q\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMzMxMSwiaWF0IjoxNzQzNzkwMTExLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:35344" 2025-04-04T18:08:31.818784Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115471:2618] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-04T18:08:31.818818Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115471:2618] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-04T18:08:31.818876Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115471:2618] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.819287Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115471:2618] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.819328Z node 1 :TX_PROXY ERROR: Actor# [1:7489521500229115471:2618] txid# 281474976710663, Access denied for ordinaryuser, attempt to manage user 2025-04-04T18:08:31.819445Z node 1 :TX_PROXY ERROR: Actor# [1:7489521500229115471:2618] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-04-04T18:08:31.819485Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500229115471:2618] txid# 281474976710663 SEND to# [1:7489521500229115470:2352] Source {TEvProposeTransactionStatus Status# 5} 2025-04-04T18:08:31.829583Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjNiNjk3MS01NDNjNzVhYy05ZjA2YTczNy00NzczY2RkMA==, ActorId: [1:7489521500229115456:2352], ActorState: ExecuteState, TraceId: 01jr0ztz1xaebqamhr4q1fszwh, Create QueryResponse for error on request, msg: 2025-04-04T18:08:31.829935Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487344212783:2131] Handle TEvExecuteKqpTransaction 2025-04-04T18:08:31.829968Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487344212783:2131] TxId# 281474976710664 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 [GOOD] Test command err: Starting YDB, grpc: 29773, msgbus: 11407 2025-04-04T18:08:28.393019Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521486952941343:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:28.393103Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000231/r3tmp/tmpGvinFO/pdisk_1.dat 2025-04-04T18:08:28.939937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:28.940050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:28.944884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:28.984748Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29773, node 1 2025-04-04T18:08:29.043782Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T18:08:29.053733Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T18:08:29.073438Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:29.073474Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:29.073482Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:29.073605Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11407 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T18:08:29.318283Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486952941586:2115] Handle TEvNavigate describe path dc-1 2025-04-04T18:08:29.318359Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909384:2441] HANDLE EvNavigateScheme dc-1 2025-04-04T18:08:29.319742Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909384:2441] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.358874Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909384:2441] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-04T18:08:29.370859Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909384:2441] Handle TEvDescribeSchemeResult Forward to# [1:7489521491247909383:2440] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T18:08:29.397785Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486952941586:2115] Handle TEvProposeTransaction 2025-04-04T18:08:29.397817Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486952941586:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T18:08:29.397920Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486952941586:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489521491247909406:2449] 2025-04-04T18:08:29.499438Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909406:2449] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T18:08:29.499524Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909406:2449] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:29.499540Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909406:2449] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:29.499596Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909406:2449] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:29.500032Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909406:2449] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.500203Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909406:2449] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-04T18:08:29.500311Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909406:2449] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-04T18:08:29.500512Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909406:2449] txid# 281474976710657 HANDLE EvClientConnected 2025-04-04T18:08:29.501295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T18:08:29.503615Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909406:2449] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-04T18:08:29.503671Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909406:2449] txid# 281474976710657 SEND to# [1:7489521491247909405:2448] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-04T18:08:29.519368Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486952941586:2115] Handle TEvProposeTransaction 2025-04-04T18:08:29.519397Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486952941586:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T18:08:29.519431Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486952941586:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489521491247909449:2488] 2025-04-04T18:08:29.521747Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909449:2488] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T18:08:29.521820Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909449:2488] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:29.521856Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909449:2488] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:29.521908Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909449:2488] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:29.522191Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909449:2488] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.522300Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909449:2488] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:29.522345Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909449:2488] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T18:08:29.522474Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909449:2488] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T18:08:29.522977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T18:08:29.525425Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909449:2488] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-04T18:08:29.525472Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491247909449:2488] txid# 281474976710658 SEND to# [1:7489521491247909448:2487] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-04T18:08:31.337738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521499837844125:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.337745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521499837844133:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.337861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.338182Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486952941586:2115] H ... m CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:31.429474Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844212:2570] txid# 281474976710660 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-04T18:08:31.430010Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844212:2570] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T18:08:31.430101Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844212:2570] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.430320Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844212:2570] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.430418Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844212:2570] HANDLE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:31.430456Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844212:2570] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-04-04T18:08:31.430597Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844212:2570] txid# 281474976710660 HANDLE EvClientConnected 2025-04-04T18:08:31.433430Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844212:2570] txid# 281474976710660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-04T18:08:31.435234Z node 1 :TX_PROXY ERROR: Actor# [1:7489521499837844212:2570] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:31.435288Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844212:2570] txid# 281474976710660 SEND to# [1:7489521499837844139:2339] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 48} 2025-04-04T18:08:31.648693Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486952941586:2115] Handle TEvProposeTransaction 2025-04-04T18:08:31.648743Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486952941586:2115] TxId# 281474976710661 ProcessProposeTransaction 2025-04-04T18:08:31.648794Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486952941586:2115] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [1:7489521499837844235:2581] 2025-04-04T18:08:31.651587Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844235:2581] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:43062" 2025-04-04T18:08:31.651655Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844235:2581] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:31.651672Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844235:2581] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:31.651721Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844235:2581] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.652051Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844235:2581] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.652162Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844235:2581] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:31.652242Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844235:2581] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-04T18:08:31.652445Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844235:2581] txid# 281474976710661 HANDLE EvClientConnected 2025-04-04T18:08:31.659461Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844235:2581] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-04-04T18:08:31.659519Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844235:2581] txid# 281474976710661 SEND to# [1:7489521499837844234:2332] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-04T18:08:31.759333Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486952941586:2115] Handle TEvProposeTransaction 2025-04-04T18:08:31.759376Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486952941586:2115] TxId# 281474976710662 ProcessProposeTransaction 2025-04-04T18:08:31.759448Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486952941586:2115] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7489521499837844258:2595] 2025-04-04T18:08:31.761848Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844258:2595] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000\n\031\010\000\022\025\010\001\020\200\010\032\014ordinaryuser \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:43072" 2025-04-04T18:08:31.761916Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844258:2595] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:31.761933Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844258:2595] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:31.762009Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844258:2595] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.762367Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844258:2595] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.762520Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844258:2595] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:31.762584Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844258:2595] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-04T18:08:31.762809Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844258:2595] txid# 281474976710662 HANDLE EvClientConnected 2025-04-04T18:08:31.763248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:31.765573Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844258:2595] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-04T18:08:31.765645Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844258:2595] txid# 281474976710662 SEND to# [1:7489521499837844257:2346] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-04T18:08:31.813012Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486952941586:2115] Handle TEvProposeTransaction 2025-04-04T18:08:31.813051Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486952941586:2115] TxId# 281474976710663 ProcessProposeTransaction 2025-04-04T18:08:31.813098Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486952941586:2115] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7489521499837844296:2615] 2025-04-04T18:08:31.815575Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844296:2615] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMzMxMSwiaWF0IjoxNzQzNzkwMTExLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.hNxFiEvOQxIeSqZkpRLpttgwmZhQimX_dA9-qBV9YCwPuEtA1dt_a4fI42obTuWNY7CF-EZ3rAYM7tSdTPhtmXlW42fNPVH6H2hhMFp1QDi0Q-9t4O55Ct23YBKeD8fpkkXsHx936MfyI8_n_SSFmSR3QFzIZjVlJHjzzl0OPDXcKq8iJnke4g3KMzsivOMsSr3JfO2b64l6puXUrOeT3xnKEDunN5okHrYZ1RT0lJDHdOm7VnFbCHZNnBtKTLVu6Ty9V3Fm8LnqLxtxIcqVET3AYxW7owySRaTCH16V059NvcrBAdDN-xXjWvZMMWpxnnpiJ-spuB4rljNz4vAR1w\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMzMxMSwiaWF0IjoxNzQzNzkwMTExLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:43088" 2025-04-04T18:08:31.815644Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844296:2615] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:31.815673Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844296:2615] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-04T18:08:31.815723Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844296:2615] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.816010Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844296:2615] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.816085Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844296:2615] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:31.816124Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844296:2615] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-04-04T18:08:31.816293Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844296:2615] txid# 281474976710663 HANDLE EvClientConnected 2025-04-04T18:08:31.836362Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844296:2615] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-04-04T18:08:31.836412Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499837844296:2615] txid# 281474976710663 SEND to# [1:7489521499837844295:2352] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [GOOD] Test command err: Starting YDB, grpc: 24805, msgbus: 23525 2025-04-04T18:08:28.605647Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521487752414702:2073];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:28.605719Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/00020f/r3tmp/tmpCV6iMd/pdisk_1.dat 2025-04-04T18:08:29.146632Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:29.185912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:29.186030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 24805, node 1 2025-04-04T18:08:29.197949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:29.274814Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:29.274842Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:29.274849Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:29.274959Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23525 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T18:08:29.499864Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487752414964:2138] Handle TEvNavigate describe path dc-1 2025-04-04T18:08:29.499935Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382724:2437] HANDLE EvNavigateScheme dc-1 2025-04-04T18:08:29.501005Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382724:2437] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.544935Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382724:2437] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } TClient::Ls response: 2025-04-04T18:08:29.567316Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382724:2437] Handle TEvDescribeSchemeResult Forward to# [1:7489521492047382723:2436] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T18:08:29.591093Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487752414964:2138] Handle TEvProposeTransaction 2025-04-04T18:08:29.591125Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487752414964:2138] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T18:08:29.591230Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487752414964:2138] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489521492047382737:2443] 2025-04-04T18:08:29.673876Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382737:2443] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T18:08:29.673962Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382737:2443] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T18:08:29.673978Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382737:2443] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:29.674046Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382737:2443] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:29.674442Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382737:2443] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.674574Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382737:2443] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-04T18:08:29.674668Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382737:2443] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-04T18:08:29.674835Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382737:2443] txid# 281474976710657 HANDLE EvClientConnected 2025-04-04T18:08:29.675731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T18:08:29.678186Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382737:2443] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-04T18:08:29.678265Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382737:2443] txid# 281474976710657 SEND to# [1:7489521492047382736:2442] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-04T18:08:29.691825Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487752414964:2138] Handle TEvProposeTransaction 2025-04-04T18:08:29.691856Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487752414964:2138] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T18:08:29.691896Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487752414964:2138] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489521492047382790:2482] 2025-04-04T18:08:29.694448Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382790:2482] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T18:08:29.694523Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382790:2482] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T18:08:29.694540Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382790:2482] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:29.694607Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382790:2482] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:29.694943Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382790:2482] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.695079Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382790:2482] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:29.695158Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382790:2482] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T18:08:29.695312Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382790:2482] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T18:08:29.695840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T18:08:29.698159Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382790:2482] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-04T18:08:29.698207Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492047382790:2482] txid# 281474976710658 SEND to# [1:7489521492047382789:2481] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-04T18:08:31.402066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521500637317463:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.402071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521500637317471:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.402175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.402461Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487752414964:2138] Handle TEvProposeTransaction 2025-04-04T18:08:31.402491Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487752414964:2138] TxId# 281474976710659 ProcessProposeTransaction 2025-04-04 ... txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T18:08:31.518465Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317551:2566] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.518643Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317551:2566] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.518766Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317551:2566] HANDLE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:31.518836Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317551:2566] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-04-04T18:08:31.518975Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317551:2566] txid# 281474976710660 HANDLE EvClientConnected 2025-04-04T18:08:31.521612Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317551:2566] txid# 281474976710660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-04T18:08:31.522874Z node 1 :TX_PROXY ERROR: Actor# [1:7489521500637317551:2566] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:31.522904Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317551:2566] txid# 281474976710660 SEND to# [1:7489521500637317477:2339] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 48} 2025-04-04T18:08:31.798571Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487752414964:2138] Handle TEvProposeTransaction 2025-04-04T18:08:31.798612Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487752414964:2138] TxId# 281474976710661 ProcessProposeTransaction 2025-04-04T18:08:31.798659Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487752414964:2138] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [1:7489521500637317580:2579] 2025-04-04T18:08:31.801385Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317580:2579] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:44570" 2025-04-04T18:08:31.801466Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317580:2579] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T18:08:31.801509Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317580:2579] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:31.801565Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317580:2579] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.802033Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317580:2579] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.802163Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317580:2579] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:31.802257Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317580:2579] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-04T18:08:31.802548Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317580:2579] txid# 281474976710661 HANDLE EvClientConnected 2025-04-04T18:08:31.810684Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317580:2579] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-04-04T18:08:31.810727Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317580:2579] txid# 281474976710661 SEND to# [1:7489521500637317579:2332] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-04T18:08:31.928356Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487752414964:2138] Handle TEvProposeTransaction 2025-04-04T18:08:31.928388Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487752414964:2138] TxId# 281474976710662 ProcessProposeTransaction 2025-04-04T18:08:31.928431Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487752414964:2138] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7489521500637317600:2593] 2025-04-04T18:08:31.930811Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317600:2593] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:44574" 2025-04-04T18:08:31.930875Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317600:2593] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T18:08:31.930892Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317600:2593] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:31.930942Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317600:2593] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.931249Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317600:2593] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.931336Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317600:2593] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:31.931398Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317600:2593] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-04T18:08:31.931595Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317600:2593] txid# 281474976710662 HANDLE EvClientConnected 2025-04-04T18:08:31.932022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:31.934155Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317600:2593] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-04T18:08:31.934196Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317600:2593] txid# 281474976710662 SEND to# [1:7489521500637317599:2346] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-04T18:08:31.975898Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487752414964:2138] Handle TEvProposeTransaction 2025-04-04T18:08:31.975934Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487752414964:2138] TxId# 281474976710663 ProcessProposeTransaction 2025-04-04T18:08:31.975988Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487752414964:2138] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7489521500637317641:2616] 2025-04-04T18:08:31.977653Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317641:2616] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMzMxMSwiaWF0IjoxNzQzNzkwMTExLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.P-Y1QOFDL2XwAiQVElzgB5JapU6-v56fAAp8-fXmFCXXFGGMwy9-ALOWOnh2DLbWqq2Rja0Lm6rhdvQMh4jUwK3wdKxlTdf3cBqw3jTlD556u8oAMrVGJUymQLxs1lCqGFz_0bD6lzaLtVTgW6y7MBIQw8_QeFBE1dxUdZdq-iIH5PU0U-ud5e5s9XWLHfshvEMZ-eJNQ58T-XWULD2Jys9qeNSugrK83pYogOO5mLhO8_4iHL0qJwZNgn0WtfzJTGRKvcEi_2D_J5GoLaK2xtj9i59feqpVRmARyX3TJiH5qxAFJ8rbsDkl-23eOzx_Wpy2M7GKTeZeQEcRXG6Pqg\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMzMxMSwiaWF0IjoxNzQzNzkwMTExLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:44594" 2025-04-04T18:08:31.977703Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317641:2616] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T18:08:31.977716Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317641:2616] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-04T18:08:31.977815Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317641:2616] txid# 281474976710663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-04T18:08:31.977843Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317641:2616] txid# 281474976710663 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-04T18:08:31.977875Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317641:2616] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.978126Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317641:2616] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.978155Z node 1 :TX_PROXY ERROR: Actor# [1:7489521500637317641:2616] txid# 281474976710663, Access denied for ordinaryuser, attempt to manage user 2025-04-04T18:08:31.978233Z node 1 :TX_PROXY ERROR: Actor# [1:7489521500637317641:2616] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-04-04T18:08:31.978279Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521500637317641:2616] txid# 281474976710663 SEND to# [1:7489521500637317640:2352] Source {TEvProposeTransactionStatus Status# 5} 2025-04-04T18:08:31.988222Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjU5MDFmMzctNjgzYTlhNTEtODE5Yzk0MTQtMzg0YzIyMTE=, ActorId: [1:7489521500637317626:2352], ActorState: ExecuteState, TraceId: 01jr0ztz6w9nqwya17nf8vcay0, Create QueryResponse for error on request, msg: 2025-04-04T18:08:31.988585Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487752414964:2138] Handle TEvExecuteKqpTransaction 2025-04-04T18:08:31.988637Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487752414964:2138] TxId# 281474976710664 ProcessProposeKqpTransaction |75.0%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |75.0%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 [GOOD] Test command err: Starting YDB, grpc: 24680, msgbus: 17243 2025-04-04T18:08:28.539236Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521485272879248:2276];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:28.539733Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/00023a/r3tmp/tmpDq9uwj/pdisk_1.dat 2025-04-04T18:08:28.941129Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:28.957105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:28.957211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:28.965085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24680, node 1 2025-04-04T18:08:29.078853Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:29.078877Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:29.078888Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:29.078999Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17243 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T18:08:29.330315Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485272879283:2115] Handle TEvNavigate describe path dc-1 2025-04-04T18:08:29.330369Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847081:2438] HANDLE EvNavigateScheme dc-1 2025-04-04T18:08:29.331555Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847081:2438] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.374768Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847081:2438] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-04T18:08:29.385336Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847081:2438] Handle TEvDescribeSchemeResult Forward to# [1:7489521489567847080:2437] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T18:08:29.403397Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485272879283:2115] Handle TEvProposeTransaction 2025-04-04T18:08:29.403426Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485272879283:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T18:08:29.403566Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485272879283:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489521489567847094:2444] 2025-04-04T18:08:29.478880Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847094:2444] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T18:08:29.478999Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847094:2444] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:29.479027Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847094:2444] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:29.479089Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847094:2444] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:29.479568Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847094:2444] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.479813Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847094:2444] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-04T18:08:29.479898Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847094:2444] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-04T18:08:29.480107Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847094:2444] txid# 281474976710657 HANDLE EvClientConnected 2025-04-04T18:08:29.480872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T18:08:29.483133Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847094:2444] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-04T18:08:29.483199Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847094:2444] txid# 281474976710657 SEND to# [1:7489521489567847093:2443] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-04T18:08:29.496983Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485272879283:2115] Handle TEvProposeTransaction 2025-04-04T18:08:29.497014Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485272879283:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T18:08:29.497076Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485272879283:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489521489567847136:2482] 2025-04-04T18:08:29.499728Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847136:2482] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T18:08:29.499794Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847136:2482] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:29.499812Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847136:2482] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:29.499891Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847136:2482] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:29.500286Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847136:2482] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.500436Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847136:2482] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:29.500492Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847136:2482] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T18:08:29.500640Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847136:2482] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T18:08:29.501144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T18:08:29.503375Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847136:2482] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-04T18:08:29.503425Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489567847136:2482] txid# 281474976710658 SEND to# [1:7489521489567847135:2481] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-04T18:08:31.557322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521498157781825:2336], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.557444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.557547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521498157781836:2339], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.557893Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485272879283:2115] Handle TEvProposeTransaction 2025-04-04T18:08:31.557914Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485272879283:2115] TxId# 281474976710659 ProcessProposeTransaction 2025-04-04 ... 725Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781912:2570] txid# 281474976710660 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:31.641743Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781912:2570] txid# 281474976710660 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-04T18:08:31.642612Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781912:2570] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T18:08:31.642728Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781912:2570] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.642925Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781912:2570] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.643082Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781912:2570] HANDLE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:31.643176Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781912:2570] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-04-04T18:08:31.643319Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781912:2570] txid# 281474976710660 HANDLE EvClientConnected 2025-04-04T18:08:31.646652Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781912:2570] txid# 281474976710660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-04T18:08:31.648474Z node 1 :TX_PROXY ERROR: Actor# [1:7489521498157781912:2570] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:31.648510Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781912:2570] txid# 281474976710660 SEND to# [1:7489521498157781839:2340] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 48} 2025-04-04T18:08:31.829327Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485272879283:2115] Handle TEvProposeTransaction 2025-04-04T18:08:31.829365Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485272879283:2115] TxId# 281474976710661 ProcessProposeTransaction 2025-04-04T18:08:31.829416Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485272879283:2115] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [1:7489521498157781938:2581] 2025-04-04T18:08:31.831793Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781938:2581] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45886" 2025-04-04T18:08:31.831855Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781938:2581] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:31.831873Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781938:2581] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:31.831940Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781938:2581] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.832339Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781938:2581] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.832486Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781938:2581] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:31.832544Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781938:2581] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-04T18:08:31.832783Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781938:2581] txid# 281474976710661 HANDLE EvClientConnected 2025-04-04T18:08:31.839985Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781938:2581] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-04-04T18:08:31.840035Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781938:2581] txid# 281474976710661 SEND to# [1:7489521498157781937:2332] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-04T18:08:31.904759Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485272879283:2115] Handle TEvProposeTransaction 2025-04-04T18:08:31.904785Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485272879283:2115] TxId# 281474976710662 ProcessProposeTransaction 2025-04-04T18:08:31.904824Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485272879283:2115] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7489521498157781958:2595] 2025-04-04T18:08:31.906965Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781958:2595] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45888" 2025-04-04T18:08:31.907025Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781958:2595] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:31.907043Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781958:2595] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:31.907097Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781958:2595] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.907375Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781958:2595] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.907543Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781958:2595] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:31.907617Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781958:2595] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-04T18:08:31.907763Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781958:2595] txid# 281474976710662 HANDLE EvClientConnected 2025-04-04T18:08:31.908171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:31.910200Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781958:2595] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-04T18:08:31.910265Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157781958:2595] txid# 281474976710662 SEND to# [1:7489521498157781957:2346] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-04T18:08:31.951143Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485272879283:2115] Handle TEvProposeTransaction 2025-04-04T18:08:31.951171Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485272879283:2115] TxId# 281474976710663 ProcessProposeTransaction 2025-04-04T18:08:31.951221Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485272879283:2115] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7489521498157782001:2620] 2025-04-04T18:08:31.953756Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157782001:2620] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMzMxMSwiaWF0IjoxNzQzNzkwMTExLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.iIr1HjR6QJCtJUjn6ccsKmL0wfDAKc9qQPO7ReqlO3r3mqZ0x07G8eSAoniG7sL0UyxSyacw0l4sv21xhQUhepycDl_nMmZ_kQU0hf0n1k3vIUFTF3PNE0BUSWXXsl6-khV1XJATpDkpqhO_hfHDGVKbhRGJAbLBGFlJTgSUS3YHXEEkRwJFmPDlV2rVHbRx6VBJL2Uoh2FxDMTahTU6z6ynOlzlycasLfHmq28vWecfRVmWdn5g2g1yJNhBRF5gE0n7CEnZa3P94AUtA_i6sVBdTciWdpH345_igXHaGdvqUJOctcP8o7B-102ETF93JZ9Wlri6Bh2ddxwYm6Cvgg\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMzMxMSwiaWF0IjoxNzQzNzkwMTExLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:45918" 2025-04-04T18:08:31.953816Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157782001:2620] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:31.953832Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157782001:2620] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-04T18:08:31.954085Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157782001:2620] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.954670Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157782001:2620] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.954729Z node 1 :TX_PROXY ERROR: Actor# [1:7489521498157782001:2620] txid# 281474976710663, Access denied for ordinaryuser on path /dc-1, with access AlterSchema 2025-04-04T18:08:31.954867Z node 1 :TX_PROXY ERROR: Actor# [1:7489521498157782001:2620] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser on path /dc-1" issue_code: 200000 severity: 1 } 2025-04-04T18:08:31.954933Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498157782001:2620] txid# 281474976710663 SEND to# [1:7489521498157782000:2352] Source {TEvProposeTransactionStatus Status# 5} 2025-04-04T18:08:31.963289Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Zjc3NmQ2MGQtODRkZjUwODktZmQxZGI2NGYtNWRmMDE3ZmI=, ActorId: [1:7489521498157781983:2352], ActorState: ExecuteState, TraceId: 01jr0ztz6303pbbb3jj01kaczc, Create QueryResponse for error on request, msg: 2025-04-04T18:08:31.963828Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485272879283:2115] Handle TEvExecuteKqpTransaction 2025-04-04T18:08:31.963859Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485272879283:2115] TxId# 281474976710664 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 [GOOD] Test command err: Starting YDB, grpc: 26024, msgbus: 6955 2025-04-04T18:08:28.466577Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521486996722262:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:28.496267Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/00023f/r3tmp/tmpQ4L7Kj/pdisk_1.dat 2025-04-04T18:08:28.967505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:28.967612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:28.970953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:28.985449Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26024, node 1 2025-04-04T18:08:29.118826Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T18:08:29.118900Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T18:08:29.138770Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:29.138790Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:29.138795Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:29.138885Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6955 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T18:08:29.385962Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486996722457:2112] Handle TEvNavigate describe path dc-1 2025-04-04T18:08:29.386009Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690285:2440] HANDLE EvNavigateScheme dc-1 2025-04-04T18:08:29.387109Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690285:2440] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.424994Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690285:2440] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-04T18:08:29.435615Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690285:2440] Handle TEvDescribeSchemeResult Forward to# [1:7489521491291690284:2439] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T18:08:29.456425Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486996722457:2112] Handle TEvProposeTransaction 2025-04-04T18:08:29.456457Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486996722457:2112] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T18:08:29.456551Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486996722457:2112] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489521491291690298:2446] 2025-04-04T18:08:29.544175Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690298:2446] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T18:08:29.544311Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690298:2446] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-04T18:08:29.544343Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690298:2446] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:29.544441Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690298:2446] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:29.544825Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690298:2446] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.545004Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690298:2446] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-04T18:08:29.545142Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690298:2446] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-04T18:08:29.545337Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690298:2446] txid# 281474976710657 HANDLE EvClientConnected 2025-04-04T18:08:29.546177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T18:08:29.548738Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690298:2446] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-04T18:08:29.548824Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690298:2446] txid# 281474976710657 SEND to# [1:7489521491291690297:2445] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-04T18:08:29.564784Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486996722457:2112] Handle TEvProposeTransaction 2025-04-04T18:08:29.564809Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486996722457:2112] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T18:08:29.564894Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486996722457:2112] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489521491291690350:2487] 2025-04-04T18:08:29.567016Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690350:2487] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T18:08:29.567069Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690350:2487] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-04T18:08:29.567082Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690350:2487] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:29.567133Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690350:2487] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:29.567466Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690350:2487] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.567579Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690350:2487] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:29.567642Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690350:2487] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T18:08:29.567806Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690350:2487] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T18:08:29.568346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T18:08:29.570845Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690350:2487] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-04T18:08:29.570905Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491291690350:2487] txid# 281474976710658 SEND to# [1:7489521491291690349:2486] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-04T18:08:31.479497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521499881625033:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.479502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521499881625026:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.479625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.480109Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486996722457:2112] Han ... :7489521499881625111:2567] txid# 281474976710660 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-04T18:08:31.597387Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625111:2567] txid# 281474976710660 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-04T18:08:31.597983Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625111:2567] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T18:08:31.598078Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625111:2567] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.598417Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625111:2567] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.598609Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625111:2567] HANDLE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:31.598666Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625111:2567] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-04-04T18:08:31.598904Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625111:2567] txid# 281474976710660 HANDLE EvClientConnected 2025-04-04T18:08:31.602135Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625111:2567] txid# 281474976710660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-04T18:08:31.604025Z node 1 :TX_PROXY ERROR: Actor# [1:7489521499881625111:2567] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:31.604076Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625111:2567] txid# 281474976710660 SEND to# [1:7489521499881625040:2339] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 48} 2025-04-04T18:08:31.823882Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486996722457:2112] Handle TEvProposeTransaction 2025-04-04T18:08:31.823915Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486996722457:2112] TxId# 281474976710661 ProcessProposeTransaction 2025-04-04T18:08:31.823958Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486996722457:2112] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [1:7489521499881625137:2578] 2025-04-04T18:08:31.825617Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625137:2578] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:34894" 2025-04-04T18:08:31.825661Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625137:2578] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-04T18:08:31.825676Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625137:2578] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:31.825884Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625137:2578] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.826086Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625137:2578] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.826161Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625137:2578] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:31.826201Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625137:2578] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-04T18:08:31.826368Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625137:2578] txid# 281474976710661 HANDLE EvClientConnected 2025-04-04T18:08:31.833229Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625137:2578] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-04-04T18:08:31.833275Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625137:2578] txid# 281474976710661 SEND to# [1:7489521499881625136:2332] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-04T18:08:31.896090Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486996722457:2112] Handle TEvProposeTransaction 2025-04-04T18:08:31.896120Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486996722457:2112] TxId# 281474976710662 ProcessProposeTransaction 2025-04-04T18:08:31.896169Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486996722457:2112] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7489521499881625157:2592] 2025-04-04T18:08:31.898371Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625157:2592] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000\n\031\010\000\022\025\010\001\020\200\010\032\014ordinaryuser \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:34902" 2025-04-04T18:08:31.898419Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625157:2592] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-04T18:08:31.898435Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625157:2592] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:31.898485Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625157:2592] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.898736Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625157:2592] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.898895Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625157:2592] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:31.898961Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625157:2592] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-04T18:08:31.899109Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625157:2592] txid# 281474976710662 HANDLE EvClientConnected 2025-04-04T18:08:31.899526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:31.902353Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625157:2592] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-04T18:08:31.902399Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625157:2592] txid# 281474976710662 SEND to# [1:7489521499881625156:2346] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-04T18:08:31.948256Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486996722457:2112] Handle TEvProposeTransaction 2025-04-04T18:08:31.948299Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486996722457:2112] TxId# 281474976710663 ProcessProposeTransaction 2025-04-04T18:08:31.948343Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486996722457:2112] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7489521499881625193:2610] 2025-04-04T18:08:31.950682Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625193:2610] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMzMxMSwiaWF0IjoxNzQzNzkwMTExLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.eqSiK6VymyYlLn9KiOwd9wEYDuAjNrUgF0TnEVPPkQtky9q_B5wrz_aJi14rAZwvAgHQsLgw2txnl8B4HiAbZJCNqho0FrKaUH3a4AaUOOofAfVdmwkis1Wj7VO16uhLqKz2LdTbRwTU4gwhaJvoc62p6gB2wEVbWx_620EkNiYfdvRMvZyBSNeAFydLesUqh-nIFoPV5MQQU_2FN1ipuvkJZ2bJPS4XJ_HLUChpVXAsHuDfVCh4M7dSIWlK9SoLPUETMygBofkMEvIPjK8hEdqeRy1vBBEgqKB2Jodacf760MQaX_XubqtYc5jUldlPlvL7ezf3ctOGgEIB_mkc1Q\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMzMxMSwiaWF0IjoxNzQzNzkwMTExLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:34928" 2025-04-04T18:08:31.950755Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625193:2610] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-04-04T18:08:31.950772Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625193:2610] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-04T18:08:31.950834Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625193:2610] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.951124Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625193:2610] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.951171Z node 1 :TX_PROXY ERROR: Actor# [1:7489521499881625193:2610] txid# 281474976710663, Access denied for ordinaryuser, attempt to manage user 2025-04-04T18:08:31.951299Z node 1 :TX_PROXY ERROR: Actor# [1:7489521499881625193:2610] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-04-04T18:08:31.951329Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499881625193:2610] txid# 281474976710663 SEND to# [1:7489521499881625192:2352] Source {TEvProposeTransactionStatus Status# 5} 2025-04-04T18:08:31.963506Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzVkZTg5NGItYzA0ZjI4ZjUtM2NiMTJmNDgtZTBmMzI4MjM=, ActorId: [1:7489521499881625178:2352], ActorState: ExecuteState, TraceId: 01jr0ztz5z9j0ttrdn09506ah0, Create QueryResponse for error on request, msg: 2025-04-04T18:08:31.963878Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486996722457:2112] Handle TEvExecuteKqpTransaction 2025-04-04T18:08:31.963953Z node 1 :TX_PROXY DEBUG: actor# [1:7489521486996722457:2112] TxId# 281474976710664 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 [GOOD] Test command err: Starting YDB, grpc: 6808, msgbus: 17969 2025-04-04T18:08:28.466564Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521485467661462:2212];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:28.466950Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000226/r3tmp/tmpZflmLS/pdisk_1.dat 2025-04-04T18:08:28.890895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:28.891020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:28.898786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:28.924930Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6808, node 1 2025-04-04T18:08:28.981512Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T18:08:28.981540Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T18:08:29.040341Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:29.040374Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:29.040387Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:29.040643Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17969 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T18:08:29.307994Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485467661543:2114] Handle TEvNavigate describe path dc-1 2025-04-04T18:08:29.308073Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629348:2444] HANDLE EvNavigateScheme dc-1 2025-04-04T18:08:29.309316Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629348:2444] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.352504Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629348:2444] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-04T18:08:29.366958Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629348:2444] Handle TEvDescribeSchemeResult Forward to# [1:7489521489762629347:2443] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T18:08:29.397676Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485467661543:2114] Handle TEvProposeTransaction 2025-04-04T18:08:29.397709Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485467661543:2114] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T18:08:29.397906Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485467661543:2114] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489521489762629363:2452] 2025-04-04T18:08:29.502822Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629363:2452] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T18:08:29.502911Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629363:2452] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T18:08:29.502934Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629363:2452] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:29.503005Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629363:2452] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:29.503405Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629363:2452] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.503604Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629363:2452] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-04T18:08:29.503739Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629363:2452] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-04T18:08:29.503930Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629363:2452] txid# 281474976710657 HANDLE EvClientConnected 2025-04-04T18:08:29.504705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T18:08:29.506902Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629363:2452] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-04T18:08:29.506993Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629363:2452] txid# 281474976710657 SEND to# [1:7489521489762629362:2451] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-04T18:08:29.523461Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485467661543:2114] Handle TEvProposeTransaction 2025-04-04T18:08:29.523513Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485467661543:2114] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T18:08:29.523553Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485467661543:2114] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489521489762629413:2491] 2025-04-04T18:08:29.526125Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629413:2491] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T18:08:29.526189Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629413:2491] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T18:08:29.526204Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629413:2491] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:29.526290Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629413:2491] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:29.526618Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629413:2491] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.526736Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629413:2491] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:29.526787Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629413:2491] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T18:08:29.526944Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629413:2491] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T18:08:29.527390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T18:08:29.529761Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629413:2491] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-04T18:08:29.529809Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489762629413:2491] txid# 281474976710658 SEND to# [1:7489521489762629412:2490] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-04T18:08:31.548685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521498352564091:2336], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.548693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521498352564103:2339], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.548800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.550356Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485467661543:2114] Han ... marker# P6 2025-04-04T18:08:31.644028Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498352564179:2576] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.644215Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498352564179:2576] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.644369Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498352564179:2576] HANDLE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:31.644418Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498352564179:2576] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-04-04T18:08:31.644591Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498352564179:2576] txid# 281474976710660 HANDLE EvClientConnected 2025-04-04T18:08:31.648544Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498352564179:2576] txid# 281474976710660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-04T18:08:31.650630Z node 1 :TX_PROXY ERROR: Actor# [1:7489521498352564179:2576] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:31.650729Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498352564179:2576] txid# 281474976710660 SEND to# [1:7489521498352564105:2340] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 48} 2025-04-04T18:08:31.822626Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485467661543:2114] Handle TEvProposeTransaction 2025-04-04T18:08:31.822662Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485467661543:2114] TxId# 281474976710661 ProcessProposeTransaction 2025-04-04T18:08:31.822725Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485467661543:2114] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [1:7489521498352564206:2588] 2025-04-04T18:08:31.825197Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498352564206:2588] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:58002" 2025-04-04T18:08:31.825266Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498352564206:2588] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T18:08:31.825297Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498352564206:2588] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:31.825350Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498352564206:2588] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.825767Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498352564206:2588] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.825858Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498352564206:2588] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:31.825924Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498352564206:2588] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-04T18:08:31.826151Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498352564206:2588] txid# 281474976710661 HANDLE EvClientConnected 2025-04-04T18:08:31.833741Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498352564206:2588] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-04-04T18:08:31.833792Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521498352564206:2588] txid# 281474976710661 SEND to# [1:7489521498352564205:2333] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-04T18:08:32.094679Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485467661543:2114] Handle TEvProposeTransaction 2025-04-04T18:08:32.094718Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485467661543:2114] TxId# 281474976710662 ProcessProposeTransaction 2025-04-04T18:08:32.094783Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485467661543:2114] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7489521502647531528:2605] 2025-04-04T18:08:32.097140Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502647531528:2605] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000\n\031\010\000\022\025\010\001\020\200\010\032\014ordinaryuser \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:58018" 2025-04-04T18:08:32.097225Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502647531528:2605] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T18:08:32.097262Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502647531528:2605] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:32.097320Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502647531528:2605] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:32.097696Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502647531528:2605] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:32.097800Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502647531528:2605] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:32.097855Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502647531528:2605] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-04T18:08:32.098045Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502647531528:2605] txid# 281474976710662 HANDLE EvClientConnected 2025-04-04T18:08:32.098541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:32.101103Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502647531528:2605] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-04T18:08:32.101153Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502647531528:2605] txid# 281474976710662 SEND to# [1:7489521502647531527:2346] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-04T18:08:32.133931Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485467661543:2114] Handle TEvProposeTransaction 2025-04-04T18:08:32.133960Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485467661543:2114] TxId# 281474976710663 ProcessProposeTransaction 2025-04-04T18:08:32.134007Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485467661543:2114] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7489521502647531568:2627] 2025-04-04T18:08:32.136467Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502647531568:2627] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMzMxMiwiaWF0IjoxNzQzNzkwMTEyLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.OW38fELG-AOHN7vt9RFhIpzpSO1rxUI0zBjOpfRLZ1PKNOdbp60h69f7IGiUppjfKWOMgELllOKlGDcG4xZYhaixAnHa64OcuKU-oNov5V5ztHD152KA2wUAsJLXbxnz1WJY2WUBB5aGGerjSjAojw-XBiTS1Tzre2qT5ybyAxIa3j_jct8jndJ2GfNI5eOJWk8SnfQif-jnwyFJE1-IRQfi3saVf6kFjKUkobgqH89dVwTocmi3zmXr418236kVXOAUtOz1FtZU2YSBnLoRdtx53xGDFtJutdrAGCT8uaASpIbgXR8x7iJdOT3GzvP9q1zh8ZFQ7XKeVElMLkkMLw\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMzMxMiwiaWF0IjoxNzQzNzkwMTEyLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:58034" 2025-04-04T18:08:32.136550Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502647531568:2627] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T18:08:32.136570Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502647531568:2627] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-04T18:08:32.136735Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502647531568:2627] txid# 281474976710663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-04T18:08:32.136781Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502647531568:2627] txid# 281474976710663 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-04-04T18:08:32.136819Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502647531568:2627] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:32.137042Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502647531568:2627] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:32.137084Z node 1 :TX_PROXY ERROR: Actor# [1:7489521502647531568:2627] txid# 281474976710663, Access denied for ordinaryuser, attempt to manage user 2025-04-04T18:08:32.137196Z node 1 :TX_PROXY ERROR: Actor# [1:7489521502647531568:2627] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-04-04T18:08:32.137235Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502647531568:2627] txid# 281474976710663 SEND to# [1:7489521502647531567:2352] Source {TEvProposeTransactionStatus Status# 5} 2025-04-04T18:08:32.144253Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTUwZGU5YS0xYWRiY2IxYS1iNDM3MjEyNy00MDc1YzJjZA==, ActorId: [1:7489521502647531553:2352], ActorState: ExecuteState, TraceId: 01jr0ztzbx0dkv95km1dgaz8e6, Create QueryResponse for error on request, msg: 2025-04-04T18:08:32.144548Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485467661543:2114] Handle TEvExecuteKqpTransaction 2025-04-04T18:08:32.144579Z node 1 :TX_PROXY DEBUG: actor# [1:7489521485467661543:2114] TxId# 281474976710664 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 [GOOD] Test command err: Starting YDB, grpc: 16500, msgbus: 8108 2025-04-04T18:08:28.724228Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521487011668060:2207];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:28.724479Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000237/r3tmp/tmpEBwOQE/pdisk_1.dat 2025-04-04T18:08:29.231324Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:29.254166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:29.254432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:29.259931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16500, node 1 2025-04-04T18:08:29.374352Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:29.374385Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:29.374400Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:29.374554Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8108 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T18:08:29.606362Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487011668186:2139] Handle TEvNavigate describe path dc-1 2025-04-04T18:08:29.606441Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306635945:2437] HANDLE EvNavigateScheme dc-1 2025-04-04T18:08:29.607704Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306635945:2437] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.660920Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306635945:2437] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-04T18:08:29.672412Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306635945:2437] Handle TEvDescribeSchemeResult Forward to# [1:7489521491306635944:2436] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T18:08:29.688070Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487011668186:2139] Handle TEvProposeTransaction 2025-04-04T18:08:29.688118Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487011668186:2139] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T18:08:29.688241Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487011668186:2139] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489521491306635958:2443] 2025-04-04T18:08:29.811184Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306635958:2443] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T18:08:29.811264Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306635958:2443] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T18:08:29.811282Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306635958:2443] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:29.811334Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306635958:2443] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:29.811683Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306635958:2443] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.811918Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306635958:2443] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-04T18:08:29.812018Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306635958:2443] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-04T18:08:29.812247Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306635958:2443] txid# 281474976710657 HANDLE EvClientConnected 2025-04-04T18:08:29.813125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T18:08:29.815487Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306635958:2443] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-04T18:08:29.815575Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306635958:2443] txid# 281474976710657 SEND to# [1:7489521491306635957:2442] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-04T18:08:29.830540Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487011668186:2139] Handle TEvProposeTransaction 2025-04-04T18:08:29.830577Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487011668186:2139] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T18:08:29.830611Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487011668186:2139] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489521491306636016:2487] 2025-04-04T18:08:29.832871Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306636016:2487] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T18:08:29.832938Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306636016:2487] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T18:08:29.832953Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306636016:2487] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:29.833066Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306636016:2487] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:29.833363Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306636016:2487] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.833490Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306636016:2487] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:29.833542Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306636016:2487] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T18:08:29.833730Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306636016:2487] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T18:08:29.834139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T18:08:29.836445Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306636016:2487] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-04T18:08:29.836488Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521491306636016:2487] txid# 281474976710658 SEND to# [1:7489521491306636015:2486] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-04T18:08:31.579973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521499896570689:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.579977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521499896570700:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.580112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.580467Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487011668186:2139] Handle TEvProposeTransaction 2025-04-04T18:08:31.580495Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487011668186:2139] TxId# 281474976710659 ProcessProposeTransaction 2025-04-04T1 ... 4root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:56140" 2025-04-04T18:08:31.941078Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499896570802:2580] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T18:08:31.941095Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499896570802:2580] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:31.941158Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499896570802:2580] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:31.941469Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499896570802:2580] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:31.941565Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499896570802:2580] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:31.941614Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499896570802:2580] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-04T18:08:31.941809Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499896570802:2580] txid# 281474976710661 HANDLE EvClientConnected 2025-04-04T18:08:31.949553Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499896570802:2580] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-04-04T18:08:31.949607Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521499896570802:2580] txid# 281474976710661 SEND to# [1:7489521499896570801:2332] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-04T18:08:32.053120Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487011668186:2139] Handle TEvProposeTransaction 2025-04-04T18:08:32.053156Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487011668186:2139] TxId# 281474976710662 ProcessProposeTransaction 2025-04-04T18:08:32.053205Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487011668186:2139] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7489521504191538121:2597] 2025-04-04T18:08:32.055140Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538121:2597] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\r\010\001\022\t\032\007dbadmin\n\025\010\000\022\021\010\001\020\200\200\002\032\007dbadmin \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:56150" 2025-04-04T18:08:32.055233Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538121:2597] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T18:08:32.055251Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538121:2597] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:32.055300Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538121:2597] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:32.055570Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538121:2597] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:32.055653Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538121:2597] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:32.055697Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538121:2597] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-04T18:08:32.055841Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538121:2597] txid# 281474976710662 HANDLE EvClientConnected 2025-04-04T18:08:32.056273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:32.059172Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538121:2597] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-04T18:08:32.059261Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538121:2597] txid# 281474976710662 SEND to# [1:7489521504191538120:2346] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-04T18:08:32.067109Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487011668186:2139] Handle TEvProposeTransaction 2025-04-04T18:08:32.067144Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487011668186:2139] TxId# 281474976710663 ProcessProposeTransaction 2025-04-04T18:08:32.067192Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487011668186:2139] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7489521504191538139:2612] 2025-04-04T18:08:32.069459Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538139:2612] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "" NewOwner: "dbadmin" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:56164" 2025-04-04T18:08:32.069526Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538139:2612] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T18:08:32.069543Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538139:2612] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:32.069591Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538139:2612] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:32.069911Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538139:2612] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:32.070006Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538139:2612] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:32.070056Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538139:2612] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-04-04T18:08:32.070198Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538139:2612] txid# 281474976710663 HANDLE EvClientConnected 2025-04-04T18:08:32.070625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T18:08:32.072611Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538139:2612] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-04-04T18:08:32.072653Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538139:2612] txid# 281474976710663 SEND to# [1:7489521504191538138:2347] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-04-04T18:08:32.111380Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487011668186:2139] Handle TEvProposeTransaction 2025-04-04T18:08:32.111418Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487011668186:2139] TxId# 281474976710664 ProcessProposeTransaction 2025-04-04T18:08:32.111462Z node 1 :TX_PROXY DEBUG: actor# [1:7489521487011668186:2139] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [1:7489521504191538171:2626] 2025-04-04T18:08:32.113946Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538171:2626] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\007dbadmin\022\030\022\026\n\024all-users@well-known\032\325\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMzMxMiwiaWF0IjoxNzQzNzkwMTEyLCJzdWIiOiJkYmFkbWluIn0.ZF17D3BbmclTsglKJjzKvyzcPc_V3nb4Oera5MBfYkHifwJyhe1RrzQTtBIdA5w2LyaqiM0C3u5XZN3b7J3uu7ADOWwnA6lhXIEl2vv_94sPkxcbURZefczPavFdb9wRYGFZS2f3lyajS5cg9aC9p97mKlf3GquvqeBQ-Ts4UX-J8QaIRo8NL73GdHZjK5cw59EX_WwGZzBqbPU40XI0gHEFW7KtfNzS0ZudukeHj4ZZLp5GMBq-uNRp9tg7ZnWdxG_iSxhWmxWLPbZddloNNvzchlTPKiuiUQ89vtJ2eYf0lluDKjWWJJgU5qQBjQiUHKXsqIL51-HIS3y0jzYUgg\"\005Login*\201\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMzMxMiwiaWF0IjoxNzQzNzkwMTEyLCJzdWIiOiJkYmFkbWluIn0.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:56194" 2025-04-04T18:08:32.114024Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538171:2626] txid# 281474976710664 Bootstrap, UserSID: dbadmin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-04-04T18:08:32.114062Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538171:2626] txid# 281474976710664 Bootstrap, UserSID: dbadmin IsClusterAdministrator: 0 2025-04-04T18:08:32.114295Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538171:2626] txid# 281474976710664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-04-04T18:08:32.114341Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538171:2626] txid# 281474976710664 HandleResolveDatabase, UserSID: dbadmin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 1 DatabaseOwner: dbadmin 2025-04-04T18:08:32.114403Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538171:2626] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:32.114695Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538171:2626] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:32.114815Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538171:2626] HANDLE EvNavigateKeySetResult, txid# 281474976710664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:32.114926Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538171:2626] txid# 281474976710664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710664 TabletId# 72057594046644480} 2025-04-04T18:08:32.115136Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538171:2626] txid# 281474976710664 HANDLE EvClientConnected 2025-04-04T18:08:32.122666Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538171:2626] txid# 281474976710664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710664} 2025-04-04T18:08:32.122716Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521504191538171:2626] txid# 281474976710664 SEND to# [1:7489521504191538170:2353] Source {TEvProposeTransactionStatus txid# 281474976710664 Status# 48} >> BasicUsage::BasicWriteSession [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [GOOD] Test command err: Starting YDB, grpc: 31684, msgbus: 15581 2025-04-04T18:08:28.699107Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521488012809180:2146];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:28.699290Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/00022d/r3tmp/tmp56Pe7g/pdisk_1.dat 2025-04-04T18:08:29.293868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:29.294029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:29.300527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:29.313011Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31684, node 1 2025-04-04T18:08:29.386314Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T18:08:29.396017Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-04-04T18:08:29.426287Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:29.426310Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:29.426319Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:29.426450Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15581 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T18:08:29.662832Z node 1 :TX_PROXY DEBUG: actor# [1:7489521488012809358:2132] Handle TEvNavigate describe path dc-1 2025-04-04T18:08:29.662890Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777136:2445] HANDLE EvNavigateScheme dc-1 2025-04-04T18:08:29.663946Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777136:2445] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.699331Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777136:2445] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-04T18:08:29.711517Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777136:2445] Handle TEvDescribeSchemeResult Forward to# [1:7489521492307777135:2444] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T18:08:29.734854Z node 1 :TX_PROXY DEBUG: actor# [1:7489521488012809358:2132] Handle TEvProposeTransaction 2025-04-04T18:08:29.734910Z node 1 :TX_PROXY DEBUG: actor# [1:7489521488012809358:2132] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T18:08:29.735041Z node 1 :TX_PROXY DEBUG: actor# [1:7489521488012809358:2132] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489521492307777158:2453] 2025-04-04T18:08:29.840258Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777158:2453] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T18:08:29.840383Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777158:2453] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:29.840413Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777158:2453] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:29.840488Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777158:2453] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:29.840839Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777158:2453] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.841046Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777158:2453] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-04T18:08:29.841159Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777158:2453] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-04T18:08:29.841345Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777158:2453] txid# 281474976710657 HANDLE EvClientConnected 2025-04-04T18:08:29.842100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T18:08:29.844553Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777158:2453] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-04T18:08:29.844617Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777158:2453] txid# 281474976710657 SEND to# [1:7489521492307777157:2452] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-04T18:08:29.858619Z node 1 :TX_PROXY DEBUG: actor# [1:7489521488012809358:2132] Handle TEvProposeTransaction 2025-04-04T18:08:29.858646Z node 1 :TX_PROXY DEBUG: actor# [1:7489521488012809358:2132] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T18:08:29.858691Z node 1 :TX_PROXY DEBUG: actor# [1:7489521488012809358:2132] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489521492307777204:2495] 2025-04-04T18:08:29.861187Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777204:2495] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T18:08:29.861259Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777204:2495] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:29.861276Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777204:2495] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:29.861348Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777204:2495] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:29.861663Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777204:2495] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.861772Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777204:2495] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:29.861836Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777204:2495] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T18:08:29.862026Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777204:2495] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T18:08:29.862626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T18:08:29.864947Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777204:2495] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-04T18:08:29.865042Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521492307777204:2495] txid# 281474976710658 SEND to# [1:7489521492307777203:2494] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-04T18:08:31.884700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521500897711879:2336], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.884710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521500897711887:2339], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.884861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.885228Z node 1 :TX_PROXY DEBUG: actor# [1:7489521488012809358:2132] H ... 868Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679260:2574] txid# 281474976710660 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:32.008898Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679260:2574] txid# 281474976710660 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-04T18:08:32.009463Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679260:2574] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T18:08:32.009541Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679260:2574] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:32.009845Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679260:2574] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:32.009997Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679260:2574] HANDLE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:32.010092Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679260:2574] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-04-04T18:08:32.010335Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679260:2574] txid# 281474976710660 HANDLE EvClientConnected 2025-04-04T18:08:32.013397Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679260:2574] txid# 281474976710660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-04T18:08:32.015156Z node 1 :TX_PROXY ERROR: Actor# [1:7489521505192679260:2574] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:32.015197Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679260:2574] txid# 281474976710660 SEND to# [1:7489521500897711893:2340] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 48} 2025-04-04T18:08:32.273511Z node 1 :TX_PROXY DEBUG: actor# [1:7489521488012809358:2132] Handle TEvProposeTransaction 2025-04-04T18:08:32.273553Z node 1 :TX_PROXY DEBUG: actor# [1:7489521488012809358:2132] TxId# 281474976710661 ProcessProposeTransaction 2025-04-04T18:08:32.273631Z node 1 :TX_PROXY DEBUG: actor# [1:7489521488012809358:2132] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [1:7489521505192679286:2585] 2025-04-04T18:08:32.275348Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679286:2585] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:59994" 2025-04-04T18:08:32.275422Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679286:2585] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:32.275440Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679286:2585] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:32.275491Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679286:2585] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:32.275852Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679286:2585] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:32.275966Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679286:2585] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:32.276018Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679286:2585] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-04T18:08:32.276228Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679286:2585] txid# 281474976710661 HANDLE EvClientConnected 2025-04-04T18:08:32.284014Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679286:2585] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-04-04T18:08:32.284069Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679286:2585] txid# 281474976710661 SEND to# [1:7489521505192679285:2333] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-04T18:08:32.377422Z node 1 :TX_PROXY DEBUG: actor# [1:7489521488012809358:2132] Handle TEvProposeTransaction 2025-04-04T18:08:32.377460Z node 1 :TX_PROXY DEBUG: actor# [1:7489521488012809358:2132] TxId# 281474976710662 ProcessProposeTransaction 2025-04-04T18:08:32.377505Z node 1 :TX_PROXY DEBUG: actor# [1:7489521488012809358:2132] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7489521505192679309:2599] 2025-04-04T18:08:32.379983Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679309:2599] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60006" 2025-04-04T18:08:32.380051Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679309:2599] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:32.380068Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679309:2599] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:32.380111Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679309:2599] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:32.380442Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679309:2599] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:32.380567Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679309:2599] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:32.380620Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679309:2599] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-04T18:08:32.380755Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679309:2599] txid# 281474976710662 HANDLE EvClientConnected 2025-04-04T18:08:32.381190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:32.383483Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679309:2599] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-04T18:08:32.383535Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679309:2599] txid# 281474976710662 SEND to# [1:7489521505192679308:2346] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-04T18:08:32.429833Z node 1 :TX_PROXY DEBUG: actor# [1:7489521488012809358:2132] Handle TEvProposeTransaction 2025-04-04T18:08:32.429871Z node 1 :TX_PROXY DEBUG: actor# [1:7489521488012809358:2132] TxId# 281474976710663 ProcessProposeTransaction 2025-04-04T18:08:32.429920Z node 1 :TX_PROXY DEBUG: actor# [1:7489521488012809358:2132] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7489521505192679347:2619] 2025-04-04T18:08:32.432557Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679347:2619] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMzMxMiwiaWF0IjoxNzQzNzkwMTEyLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.GfUm54N5i-_CgRxnNPptopD3RpCD6NZwhXzspxAmSjhxvGxLEoC3js1heEWzhKP_IntztNdtkinocbwr8UHYt28cKIW7gpsf9l6FvKJOlsbSAOH6vAq9QZcR90Rs0xo9eS4wbFRljEy5xhNH3_csHptq5RBQwEq67u01CUwdiv8DUOd2zVaaumvQkhFPh8YSA_JX7szX-IRf7OBCuiGS9FhM2C6SkA0c8Zpk0M-N6BN28L3ItEoq38LLLZXdkYLISADssOUIUb19YwTp6fyDd3wPbIV9GD3tijY76iBLU38yyE_0v8TvC9PQ9scAdeqKQ_QgDDYNTvdy9yr_IOx81A\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMzMxMiwiaWF0IjoxNzQzNzkwMTEyLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60040" 2025-04-04T18:08:32.432635Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679347:2619] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:32.432653Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679347:2619] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-04T18:08:32.432697Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679347:2619] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:32.433043Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679347:2619] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:32.433100Z node 1 :TX_PROXY ERROR: Actor# [1:7489521505192679347:2619] txid# 281474976710663, Access denied for ordinaryuser on path /dc-1, with access AlterSchema 2025-04-04T18:08:32.433218Z node 1 :TX_PROXY ERROR: Actor# [1:7489521505192679347:2619] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser on path /dc-1" issue_code: 200000 severity: 1 } 2025-04-04T18:08:32.433259Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521505192679347:2619] txid# 281474976710663 SEND to# [1:7489521505192679346:2352] Source {TEvProposeTransactionStatus Status# 5} 2025-04-04T18:08:32.444534Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjExZmE1NjItNWI4NDY1ZjYtM2Q2YjQzMWUtOGJjNzJlNmY=, ActorId: [1:7489521505192679332:2352], ActorState: ExecuteState, TraceId: 01jr0ztzn1erz1hxyf0264jg8z, Create QueryResponse for error on request, msg: 2025-04-04T18:08:32.444953Z node 1 :TX_PROXY DEBUG: actor# [1:7489521488012809358:2132] Handle TEvExecuteKqpTransaction 2025-04-04T18:08:32.444984Z node 1 :TX_PROXY DEBUG: actor# [1:7489521488012809358:2132] TxId# 281474976710664 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 [GOOD] Test command err: Starting YDB, grpc: 24604, msgbus: 5326 2025-04-04T18:08:28.855398Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521484995462075:2071];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:28.855439Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000206/r3tmp/tmpEONJcI/pdisk_1.dat 2025-04-04T18:08:29.354895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:29.355013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:29.355528Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:29.361350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24604, node 1 2025-04-04T18:08:29.470921Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:29.470941Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:29.470946Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:29.471044Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5326 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-04-04T18:08:29.720968Z node 1 :TX_PROXY DEBUG: actor# [1:7489521484995462311:2115] Handle TEvNavigate describe path dc-1 2025-04-04T18:08:29.721027Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430105:2432] HANDLE EvNavigateScheme dc-1 2025-04-04T18:08:29.722473Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430105:2432] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.756995Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430105:2432] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-04-04T18:08:29.768540Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430105:2432] Handle TEvDescribeSchemeResult Forward to# [1:7489521489290430104:2431] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-04-04T18:08:29.793231Z node 1 :TX_PROXY DEBUG: actor# [1:7489521484995462311:2115] Handle TEvProposeTransaction 2025-04-04T18:08:29.793264Z node 1 :TX_PROXY DEBUG: actor# [1:7489521484995462311:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-04-04T18:08:29.793407Z node 1 :TX_PROXY DEBUG: actor# [1:7489521484995462311:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7489521489290430118:2438] 2025-04-04T18:08:29.876529Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430118:2438] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T18:08:29.876619Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430118:2438] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:29.876640Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430118:2438] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:29.876713Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430118:2438] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:29.877068Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430118:2438] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.877284Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430118:2438] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-04-04T18:08:29.877373Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430118:2438] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-04-04T18:08:29.877563Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430118:2438] txid# 281474976710657 HANDLE EvClientConnected 2025-04-04T18:08:29.878362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-04-04T18:08:29.880808Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430118:2438] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-04-04T18:08:29.880871Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430118:2438] txid# 281474976710657 SEND to# [1:7489521489290430117:2437] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-04-04T18:08:29.895252Z node 1 :TX_PROXY DEBUG: actor# [1:7489521484995462311:2115] Handle TEvProposeTransaction 2025-04-04T18:08:29.895282Z node 1 :TX_PROXY DEBUG: actor# [1:7489521484995462311:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-04-04T18:08:29.895346Z node 1 :TX_PROXY DEBUG: actor# [1:7489521484995462311:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7489521489290430169:2478] 2025-04-04T18:08:29.897794Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430169:2478] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-04-04T18:08:29.897862Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430169:2478] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:29.897881Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430169:2478] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:29.897933Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430169:2478] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:29.898321Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430169:2478] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:29.898454Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430169:2478] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:29.898515Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430169:2478] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-04-04T18:08:29.898670Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430169:2478] txid# 281474976710658 HANDLE EvClientConnected 2025-04-04T18:08:29.899261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T18:08:29.901732Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430169:2478] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-04-04T18:08:29.901790Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521489290430169:2478] txid# 281474976710658 SEND to# [1:7489521489290430168:2477] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-04-04T18:08:31.940183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521497880364853:2339], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.940207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521497880364845:2336], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.940344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.940639Z node 1 :TX_PROXY DEBUG: actor# [1:7489521484995462311:2115] Handle TEvProposeTransaction 2025-04-04T18:08:31.940669Z node 1 :TX_PROXY DEBUG: actor# [1:7489521484995462311:2115] TxId# 281474976710659 ProcessProposeTransaction 2025-04-04T1 ... m CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:32.046630Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332235:2567] txid# 281474976710660 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-04-04T18:08:32.047319Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332235:2567] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-04-04T18:08:32.047424Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332235:2567] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:32.047714Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332235:2567] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:32.047917Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332235:2567] HANDLE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:32.047974Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332235:2567] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-04-04T18:08:32.048122Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332235:2567] txid# 281474976710660 HANDLE EvClientConnected 2025-04-04T18:08:32.050888Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332235:2567] txid# 281474976710660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-04-04T18:08:32.052128Z node 1 :TX_PROXY ERROR: Actor# [1:7489521502175332235:2567] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:32.052173Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332235:2567] txid# 281474976710660 SEND to# [1:7489521497880364859:2340] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 48} 2025-04-04T18:08:32.234582Z node 1 :TX_PROXY DEBUG: actor# [1:7489521484995462311:2115] Handle TEvProposeTransaction 2025-04-04T18:08:32.234614Z node 1 :TX_PROXY DEBUG: actor# [1:7489521484995462311:2115] TxId# 281474976710661 ProcessProposeTransaction 2025-04-04T18:08:32.234702Z node 1 :TX_PROXY DEBUG: actor# [1:7489521484995462311:2115] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [1:7489521502175332261:2578] 2025-04-04T18:08:32.237134Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332261:2578] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:35592" 2025-04-04T18:08:32.237212Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332261:2578] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:32.237236Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332261:2578] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:32.237289Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332261:2578] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:32.237695Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332261:2578] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:32.237809Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332261:2578] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:32.237855Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332261:2578] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-04-04T18:08:32.237998Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332261:2578] txid# 281474976710661 HANDLE EvClientConnected 2025-04-04T18:08:32.245825Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332261:2578] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-04-04T18:08:32.245892Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332261:2578] txid# 281474976710661 SEND to# [1:7489521502175332260:2333] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-04-04T18:08:32.466340Z node 1 :TX_PROXY DEBUG: actor# [1:7489521484995462311:2115] Handle TEvProposeTransaction 2025-04-04T18:08:32.466388Z node 1 :TX_PROXY DEBUG: actor# [1:7489521484995462311:2115] TxId# 281474976710662 ProcessProposeTransaction 2025-04-04T18:08:32.466438Z node 1 :TX_PROXY DEBUG: actor# [1:7489521484995462311:2115] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [1:7489521502175332284:2592] 2025-04-04T18:08:32.468864Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332284:2592] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000\n\031\010\000\022\025\010\001\020\200\010\032\014ordinaryuser \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:35598" 2025-04-04T18:08:32.468942Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332284:2592] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:32.468962Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332284:2592] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-04-04T18:08:32.469016Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332284:2592] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:32.469455Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332284:2592] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:32.469588Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332284:2592] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:32.469667Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332284:2592] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-04-04T18:08:32.469900Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332284:2592] txid# 281474976710662 HANDLE EvClientConnected 2025-04-04T18:08:32.470438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:32.472881Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332284:2592] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-04-04T18:08:32.472938Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332284:2592] txid# 281474976710662 SEND to# [1:7489521502175332283:2346] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-04-04T18:08:32.521961Z node 1 :TX_PROXY DEBUG: actor# [1:7489521484995462311:2115] Handle TEvProposeTransaction 2025-04-04T18:08:32.521992Z node 1 :TX_PROXY DEBUG: actor# [1:7489521484995462311:2115] TxId# 281474976710663 ProcessProposeTransaction 2025-04-04T18:08:32.522029Z node 1 :TX_PROXY DEBUG: actor# [1:7489521484995462311:2115] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [1:7489521502175332324:2614] 2025-04-04T18:08:32.524448Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332324:2614] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMzMxMiwiaWF0IjoxNzQzNzkwMTEyLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.Qtr7t--sTvak4izeNXWstlayel-x6RfVDn3SJMjYQ-oPVvqe0wANJIm6s06MccFOLMuBjTNPf7KuJEpApFt-7gjdoChtnF0focHsTwUCqGMZ2ExXJJOuUihObiKKKIfBK4I27pK-Upr8JTBHZyLPkthnB7IyLS1AcT5CVmyvsVJnrHwZE5E01__lOlvZGVSSkoc4McLMSg3vwyN4KhENtYHq2kQoNed9dpf6JVxzGRKKddn0OymC6WEMN-wDbbgGMWYAk5DXZVVIoISFbIl_qIaTN5CEcwPfbva9zzNktLvH1DKze69yNjehI1A42Y-f6VxNdrlQEhVnHetzlTSt4A\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzgzMzMxMiwiaWF0IjoxNzQzNzkwMTEyLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:35626" 2025-04-04T18:08:32.524532Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332324:2614] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-04-04T18:08:32.524550Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332324:2614] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-04-04T18:08:32.524594Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332324:2614] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-04-04T18:08:32.524984Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332324:2614] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-04-04T18:08:32.525157Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332324:2614] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-04-04T18:08:32.525242Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332324:2614] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-04-04T18:08:32.525485Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332324:2614] txid# 281474976710663 HANDLE EvClientConnected 2025-04-04T18:08:32.533877Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332324:2614] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-04-04T18:08:32.533935Z node 1 :TX_PROXY DEBUG: Actor# [1:7489521502175332324:2614] txid# 281474976710663 SEND to# [1:7489521502175332323:2352] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::TempTablesWithCache [GOOD] Test command err: Trying to start YDB, gRPC: 12017, MsgBus: 3451 2025-04-04T18:08:28.291066Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521485904509134:2203];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:28.291399Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000221/r3tmp/tmppgrVQs/pdisk_1.dat 2025-04-04T18:08:28.763975Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:28.765327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:28.765436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:28.772022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12017, node 1 2025-04-04T18:08:28.874840Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:28.874861Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:28.874868Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:28.874983Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3451 TClient is connected to server localhost:3451 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:29.434049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:29.455366Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T18:08:31.319419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521498789411545:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.319418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521498789411553:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.319535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.323808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T18:08:31.334853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521498789411559:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T18:08:31.412335Z node 1 :TX_PROXY ERROR: Actor# [1:7489521498789411612:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:31.458435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T18:08:31.632081Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-04-04T18:08:31.654053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:32.144671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T18:08:32.421787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:1, at schemeshard: 72057594046644480 2025-04-04T18:08:32.744520Z node 1 :TX_PROXY ERROR: Actor# [1:7489521503084379558:2716] txid# 281474976710677, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:32.757525Z node 1 :TX_PROXY ERROR: Actor# [1:7489521503084379565:2721] txid# 281474976710678, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ODY0ZjliODktOTgyMzMwMzEtOWE4N2Q0MmMtNTEzYTNlOWQ=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:32.775045Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-04-04T18:08:32.857096Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-04-04T18:08:32.880783Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489521503084379688:2449], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:31: Error: At function: KiReadTable!
:3:31: Error: Cannot find table 'db.[/Root/PgTemp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T18:08:32.881080Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDFjNmNmOTItZDhkNjBkM2UtODA4Zjg0ZTMtOTc2NjM4MDA=, ActorId: [1:7489521503084379686:2448], ActorState: ExecuteState, TraceId: 01jr0zv03367rzkhpw9kss4wb3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: |75.5%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |75.5%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::BasicWriteSession [GOOD] Test command err: 2025-04-04T18:08:21.755042Z :BasicWriteSession INFO: Random seed for debugging is 1743790101755009 2025-04-04T18:08:21.960401Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521458276419810:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:21.960498Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T18:08:21.977929Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489521456600365874:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:21.978081Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000222/r3tmp/tmpV1YXLM/pdisk_1.dat 2025-04-04T18:08:22.100448Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T18:08:22.103254Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T18:08:22.282570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:22.282963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:22.288649Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T18:08:22.289877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:22.293965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:22.294113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:22.304136Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:22.328311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16454, node 1 2025-04-04T18:08:22.362447Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T18:08:22.362470Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T18:08:22.386305Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/o32v/000222/r3tmp/yandex1v1fNm.tmp 2025-04-04T18:08:22.386331Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/o32v/000222/r3tmp/yandex1v1fNm.tmp 2025-04-04T18:08:22.386454Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/o32v/000222/r3tmp/yandex1v1fNm.tmp 2025-04-04T18:08:22.386576Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T18:08:22.431522Z INFO: TTestServer started on Port 14688 GrpcPort 16454 TClient is connected to server localhost:14688 PQClient connected to localhost:16454 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:22.704088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-04-04T18:08:24.488813Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489521469485268083:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:24.489048Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489521469485268061:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:24.489179Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:24.494575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-04-04T18:08:24.506297Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489521469485268089:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-04-04T18:08:24.737932Z node 2 :TX_PROXY ERROR: Actor# [2:7489521469485268117:2128] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:24.757389Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489521469485268132:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T18:08:24.757396Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489521471161322778:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T18:08:24.757589Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmM4ODUyYmYtOWE2MDIxZWMtY2M4M2UyZWUtMjU2NTBmNDI=, ActorId: [1:7489521471161322737:2335], ActorState: ExecuteState, TraceId: 01jr0ztqys0szw9qchc69kmdg8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T18:08:24.757591Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjNlNGU3ZjktZmU3OGIyZDAtMWJjOTRiMjYtY2MzMGI4NzE=, ActorId: [2:7489521469485268058:2307], ActorState: ExecuteState, TraceId: 01jr0ztqx7ag2vmrtdq2tmzhyz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T18:08:24.758932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T18:08:24.759669Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T18:08:24.759880Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T18:08:24.850351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:24.949925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:16454", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-04-04T18:08:25.100508Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jr0ztrd583586gy41x80kamm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzZkNWJlNzMtY2ZjMDllMzEtODhlZmY4NDgtOTM4ODhjNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489521475456290520:3011] 2025-04-04T18:08:26.960702Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489521458276419810:2074];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:26.961140Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T18:08:26.978336Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489521456600365874:2072];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:26.978473Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T18:08:31.269520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:16454 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-04-04T18:08:31.331837Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to loca ... ULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-04-04T18:08:33.000634Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7489521505521062681:2541] (SourceId=src_id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-04-04T18:08:33.000662Z node 1 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [1:7489521505521062681:2541] (SourceId=src_id, PreferedPartition=(NULL)) Start idle 2025-04-04T18:08:33.000699Z node 1 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-04-04T18:08:33.001572Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 1, Generation: 1 2025-04-04T18:08:33.001613Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7489521509816030005:2541], now have 1 active actors on pipe 2025-04-04T18:08:33.001687Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T18:08:33.001722Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T18:08:33.001798Z node 1 :PERSQUEUE INFO: new Cookie src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src_id 2025-04-04T18:08:33.001898Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-04-04T18:08:33.001955Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T18:08:33.002088Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T18:08:33.002122Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T18:08:33.002207Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T18:08:33.002389Z node 1 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0 2025-04-04T18:08:33.003569Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-04-04T18:08:33.003671Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743790113003 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-04-04T18:08:33.003806Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session established. Init response: session_id: "src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0" supported_codecs { codecs: 1 codecs: 2 codecs: 3 } 2025-04-04T18:08:33.004105Z :DEBUG: [/Root] TraceId [] SessionId [src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0] MessageGroupId [src_id] Write 1 messages with Id from 1 to 1 2025-04-04T18:08:33.004579Z :DEBUG: [/Root] TraceId [] SessionId [src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0] MessageGroupId [src_id] Write session: try to update token 2025-04-04T18:08:33.004615Z :DEBUG: [/Root] TraceId [] SessionId [src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 1 2025-04-04T18:08:33.005274Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0 grpc read done: success: 1 data: write_request[data omitted] 2025-04-04T18:08:33.005675Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-04-04T18:08:33.005875Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T18:08:33.005910Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T18:08:33.006008Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-04-04T18:08:33.006123Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T18:08:33.006309Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-04-04T18:08:33.006338Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-04-04T18:08:33.006660Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 1 partNo : 0 messageNo: 1 size 96 offset: -1 2025-04-04T18:08:33.006965Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 1 partNo 0 2025-04-04T18:08:33.008012Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 170 count 1 nextOffset 1 batches 1 2025-04-04T18:08:33.008801Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 158 WTime 1743790113008 2025-04-04T18:08:33.008993Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T18:08:33.009083Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 1 size 158 2025-04-04T18:08:33.011494Z node 1 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 1 size 158 actorID [1:7489521501226095039:2490] 2025-04-04T18:08:33.011627Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 103 WriteNewSizeFromSupportivePartitions# 0 2025-04-04T18:08:33.011640Z node 1 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 1 parts 0 size 158 2025-04-04T18:08:33.011695Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-04-04T18:08:33.011756Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-04-04T18:08:33.011799Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-04-04T18:08:33.011907Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-04-04T18:08:33.011995Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-04-04T18:08:33.012037Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-04-04T18:08:33.012196Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-04-04T18:08:33.012237Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-04-04T18:08:33.012291Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-04-04T18:08:33.012319Z node 1 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T18:08:33.012392Z node 1 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1743790113006 queuesize 0 startOffset 0 2025-04-04T18:08:33.012875Z :DEBUG: [/Root] TraceId [] SessionId [src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-04-04T18:08:33.013568Z :DEBUG: [/Root] TraceId [] SessionId [src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 1 written { } } write_statistics { persisting_time { nanos: 3000000 } min_queue_wait_time { nanos: 1000000 } max_queue_wait_time { nanos: 1000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-04-04T18:08:33.013629Z :DEBUG: [/Root] TraceId [] SessionId [src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2025-04-04T18:08:33.014304Z :DEBUG: [/Root] TraceId [] SessionId [src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0] MessageGroupId [src_id] Write session: acknoledged message 1 Got new write session event: AcksEvent: { seqNo : 1, State : EES_WRITTEN, offset : 0, partitionId : 0 } write stat: Write time 0.003000s minimal time in partition queue 0.001000s maximal time in partition queue 0.001000s partition quoted time 0.000000s topic quoted time 0.000000s 2025-04-04T18:08:33.014766Z :INFO: [/Root] TraceId [] SessionId [src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2025-04-04T18:08:33.014802Z :INFO: [/Root] TraceId [] SessionId [src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0] MessageGroupId [src_id] Write session will now close 2025-04-04T18:08:33.014904Z :DEBUG: [/Root] TraceId [] SessionId [src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0] MessageGroupId [src_id] Write session: aborting 2025-04-04T18:08:33.015770Z :INFO: [/Root] TraceId [] SessionId [src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0] MessageGroupId [src_id] Write session: gracefully shut down, all writes complete 2025-04-04T18:08:33.015827Z :DEBUG: [/Root] TraceId [] SessionId [src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-04-04T18:08:33.015893Z :DEBUG: [/Root] TraceId [] SessionId [src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0] MessageGroupId [src_id] Write session is aborting and will not restart 2025-04-04T18:08:33.015961Z :DEBUG: [/Root] TraceId [] SessionId [src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0] MessageGroupId [src_id] Write session: destroy 2025-04-04T18:08:33.016422Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0 grpc read done: success: 0 data: 2025-04-04T18:08:33.016457Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0 grpc read failed 2025-04-04T18:08:33.016514Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0 grpc closed 2025-04-04T18:08:33.016531Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|cc4279df-b8f58cec-ad51e89b-bf3f5995_0 is DEAD 2025-04-04T18:08:33.018053Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T18:08:33.018388Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7489521509816030005:2541] destroyed 2025-04-04T18:08:33.018427Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname [GOOD] Test command err: Trying to start YDB, gRPC: 27448, MsgBus: 11420 2025-04-04T18:08:28.210406Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521485477332571:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:28.210482Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/00022b/r3tmp/tmp3NcGvl/pdisk_1.dat 2025-04-04T18:08:28.665642Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:28.670937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:28.671042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:28.675060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27448, node 1 2025-04-04T18:08:28.842753Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:28.842778Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:28.842785Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:28.842926Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11420 TClient is connected to server localhost:11420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:29.457659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:31.451061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521498362235123:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.451064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521498362235131:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.451231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.455227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T18:08:31.465514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521498362235137:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T18:08:31.530353Z node 1 :TX_PROXY ERROR: Actor# [1:7489521498362235188:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:31.576762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T18:08:32.264109Z node 1 :TX_PROXY ERROR: Actor# [1:7489521502657202676:2440] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/seq\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1])" issue_code: 200200 severity: 1 } 2025-04-04T18:08:32.270308Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWUyN2RhODAtYTQ0NjI5ZGQtYTgxOTA4NjQtYWRhMDU1YzA=, ActorId: [1:7489521498362235102:2329], ActorState: ExecuteState, TraceId: 01jr0ztzfs6j6b956sq8wkce2z, Create QueryResponse for error on request, msg: 2025-04-04T18:08:32.288108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T18:08:32.368155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-04-04T18:08:32.865709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-04-04T18:08:32.892644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-04-04T18:08:33.210919Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489521485477332571:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:33.211044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T18:08:33.428532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-04-04T18:08:33.459723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOlap >> KqpSnapshotIsolation::TSimpleOltp >> KqpSnapshotIsolation::TSimpleOltpNoSink |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInvalidateOnError >> KqpSnapshotIsolation::TReadOnlyOltpNoSink >> KqpSnapshotIsolation::TReadOnlyOltp |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> KqpSnapshotIsolation::TConflictReadWriteOlap |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltp |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> KqpSnapshotIsolation::TConflictWriteOltp >> KqpSnapshotIsolation::TConflictWriteOltpNoSink |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/unittest |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4 |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/unittest |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/unittest >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/unittest |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] Test command err: 2025-04-04T18:08:26.965685Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521478877967726:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:26.965956Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/0001f5/r3tmp/tmpSCpuLu/pdisk_1.dat 2025-04-04T18:08:27.403014Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24802, node 1 2025-04-04T18:08:27.420494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:27.420677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:27.431382Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T18:08:27.431417Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T18:08:27.474408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:27.547838Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:27.547860Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:27.547867Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:27.547986Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6110 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:27.947606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:28.145136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:40962" , at schemeshard: 72057594046644480 2025-04-04T18:08:28.145647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-04-04T18:08:28.145698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusPreconditionFailed, reason: Column stores are not supported, at schemeshard: 72057594046644480 2025-04-04T18:08:28.148827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusPreconditionFailed Reason: "Column stores are not supported" TxId: 281474976710658 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T18:08:28.150165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusPreconditionFailed, reason: Column stores are not supported, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-04-04T18:08:28.151829Z node 1 :TX_PROXY ERROR: Actor# [1:7489521487467903271:2609] txid# 281474976710658, issues: { message: "Column stores are not supported" severity: 1 } assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1BD59E20) NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&)+8617 (0x1B3E5E59) std::__y1::__function::__func, void ()>::operator()()+280 (0x1B40C758) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1BD8FB26) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1BD60849) NTestSuiteYdbLogStore::TCurrentTest::Execute()+1094 (0x1B40B966) NUnitTest::TTestFactory::Execute()+2438 (0x1BD62116) NUnitTest::RunMain(int, char**)+5213 (0x1BD8A45D) ??+0 (0x7F78EECCCD90) __libc_start_main+128 (0x7F78EECCCE40) _start+41 (0x18851029) |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> THealthCheckTest::LayoutIncorrect |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |83.1%| [TA] $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.2%| [TA] {RESULT} $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigColumn+useSink |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> PgCatalog::CheckSetConfig [GOOD] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> THealthCheckTest::LayoutIncorrect [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewClientLost |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> PgCatalog::CheckSetConfig [GOOD] Test command err: Trying to start YDB, gRPC: 21069, MsgBus: 21230 2025-04-04T18:08:28.220435Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521486295041967:2141];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:28.233320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000239/r3tmp/tmpuvmDsS/pdisk_1.dat 2025-04-04T18:08:28.579091Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21069, node 1 2025-04-04T18:08:28.642191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:28.642327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:28.658791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:28.790768Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:28.790796Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:28.790807Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:28.790923Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21230 TClient is connected to server localhost:21230 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:29.429917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:29.443366Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T18:08:31.258764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521499179944442:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.258780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521499179944434:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.258923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:31.263122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T18:08:31.273413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521499179944448:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T18:08:31.375484Z node 1 :TX_PROXY ERROR: Actor# [1:7489521499179944499:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:31.423643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T18:08:31.823072Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489521499179944664:2367], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect, At function: PgSetItem
:2:31: Error: At function: PgReadTable!
:2:31: Error: Unsupported table: pgtable 2025-04-04T18:08:31.823401Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzMwOGQ4YTMtMWJjZDE2OWYtNDU3YzYxYWQtOGY2ZTgzOGQ=, ActorId: [1:7489521499179944428:2328], ActorState: ExecuteState, TraceId: 01jr0ztz1qan2jr4v79nanfp2m, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 2968, MsgBus: 28252 2025-04-04T18:08:32.582392Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489521506305175777:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:32.582477Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000239/r3tmp/tmplZBhyJ/pdisk_1.dat 2025-04-04T18:08:32.675933Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2968, node 2 2025-04-04T18:08:32.712822Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:32.712985Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:32.715021Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:32.743798Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:32.743822Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:32.743828Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:32.743960Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28252 TClient is connected to server localhost:28252 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:33.229496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:35.737900Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489521519190078324:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:35.737929Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489521519190078332:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:35.737953Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:35.741164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T18:08:35.749649Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489521519190078338:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T18:08:35.803515Z node 2 :TX_PROXY ERROR: Actor# [2:7489521519190078389:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 22660, MsgBus: 15291 2025-04-04T18:08:38.371046Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7489521528892715716:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:38.371107Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000239/r3tmp/tmp6hi1AO/pdisk_1.dat 2025-04-04T18:08:38.486913Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:38.514893Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:38.515029Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:38.516901Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22660, node 3 2025-04-04T18:08:38.558130Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:38.558166Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:38.558176Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:38.558322Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15291 TClient is connected to server localhost:15291 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:39.034854Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:39.044311Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-04-04T18:08:41.717753Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489521541777618261:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:41.717834Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7489521541777618269:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:41.717902Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:41.721971Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T18:08:41.734716Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7489521541777618275:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T18:08:41.803067Z node 3 :TX_PROXY ERROR: Actor# [3:7489521541777618326:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 25378, MsgBus: 21823 2025-04-04T18:08:43.200101Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7489521552117351663:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:43.200174Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000239/r3tmp/tmpzGEeAt/pdisk_1.dat 2025-04-04T18:08:43.379239Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:43.413107Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:43.413211Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:43.416312Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25378, node 4 2025-04-04T18:08:43.532790Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:43.532815Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:43.532823Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:43.532935Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21823 TClient is connected to server localhost:21823 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:44.174547Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:47.736398Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489521569297221389:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:47.736828Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7489521569297221381:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:47.736892Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:47.741328Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T18:08:47.752361Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7489521569297221395:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T18:08:47.847021Z node 4 :TX_PROXY ERROR: Actor# [4:7489521569297221446:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:47.875832Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T18:08:48.130377Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7489521573592188880:2359], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect, At function: PgSetItem
:2:31: Error: At function: PgReadTable!
:2:31: Error: Unsupported table: pgtable 2025-04-04T18:08:48.132427Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OGYwYWE4MGYtZmNjNDU2YTMtMzFmOTU2NjAtZTRiYTk3ZWI=, ActorId: [4:7489521569297221376:2330], ActorState: ExecuteState, TraceId: 01jr0zvezh6nzv9d1xtyqn17f0, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-04-04T18:08:48.200773Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7489521552117351663:2191];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:48.200869Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::LayoutIncorrect [GOOD] Test command err: 2025-04-04T18:08:47.843385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:450:2412], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:08:47.843741Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:08:47.843857Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000184/r3tmp/tmpFhzymF/pdisk_1.dat 2025-04-04T18:08:48.234723Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7180, node 1 TClient is connected to server localhost:1432 2025-04-04T18:08:48.648668Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:48.648730Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:48.648761Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:48.649245Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration |87.8%| [TA] $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |87.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Viewer::JsonStorageListingV1PDiskIdFilter |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> Viewer::JsonStorageListingV2PDiskIdFilter >> TPersQueueTest::DisableDeduplication |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |88.9%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |89.0%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |89.1%| [TA] $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [TA] {RESULT} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4 [GOOD] |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> KqpLimits::TooBigColumn+useSink [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::PartitionStatsFields |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigColumn+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 27699, MsgBus: 8522 2025-04-04T18:08:48.386927Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521571397861560:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:48.387071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/00036a/r3tmp/tmpaMGNq6/pdisk_1.dat 2025-04-04T18:08:48.739923Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27699, node 1 2025-04-04T18:08:48.797679Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:48.797707Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:48.797718Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:48.797887Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T18:08:48.800917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:48.801117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:48.804073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8522 TClient is connected to server localhost:8522 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:49.341961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:49.357451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:49.481824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:49.615429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:49.693264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:51.248301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521584282765225:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:51.248407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:51.489402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:51.516653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T18:08:51.572428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T18:08:51.599953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T18:08:51.631654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T18:08:51.700347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T18:08:51.742088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521584282765742:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:51.742198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:51.742220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521584282765747:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:51.745786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T18:08:51.755286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521584282765749:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T18:08:51.810497Z node 1 :TX_PROXY ERROR: Actor# [1:7489521584282765802:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:52.950838Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911;tx_id=3; 2025-04-04T18:08:52.959363Z node 1 :TX_DATASHARD ERROR: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 2025-04-04T18:08:52.959767Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489521588577733444:2496], Table: `/Root/KeyValue` ([72057594046644480:6:1]), SessionActorId: [1:7489521588577733383:2496]Got BAD REQUEST for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[1:7489521588577733444:2496].{
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911, code: 2017 } 2025-04-04T18:08:52.961161Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489521588577733418:2496], SessionActorId: [1:7489521588577733383:2496], statusCode=BAD_REQUEST. Issue=
: Error: Bad request. Table: `/Root/KeyValue`., code: 2017
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911, code: 2017 . sessionActorId=[1:7489521588577733383:2496]. isRollback=0 2025-04-04T18:08:53.030781Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGE0YmU1ODYtZmRjZjExMTgtYTI5YTcxM2UtNjU0YWExODI=, ActorId: [1:7489521588577733383:2496], ActorState: ExecuteState, TraceId: 01jr0zvkb4c5hndmbp1s4be72z, got TEvKqpBuffer::TEvError in ExecuteState, status: BAD_REQUEST send to: [1:7489521588577733419:2496] from: [1:7489521588577733418:2496] 2025-04-04T18:08:53.030922Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489521588577733419:2496] TxId: 281474976710671. Ctx: { TraceId: 01jr0zvkb4c5hndmbp1s4be72z, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGE0YmU1ODYtZmRjZjExMTgtYTI5YTcxM2UtNjU0YWExODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. BAD_REQUEST: {
: Error: Bad request. Table: `/Root/KeyValue`., code: 2017 subissue: {
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911, code: 2017 } } 2025-04-04T18:08:53.031928Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGE0YmU1ODYtZmRjZjExMTgtYTI5YTcxM2UtNjU0YWExODI=, ActorId: [1:7489521588577733383:2496], ActorState: ExecuteState, TraceId: 01jr0zvkb4c5hndmbp1s4be72z, Create QueryResponse for error on request, msg:
: Error: Bad request. Table: `/Root/KeyValue`., code: 2017
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911, code: 2017 |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4 [GOOD] Test command err: 2025-04-04T18:08:41.194674Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521544315833655:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:41.195715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/0003d6/r3tmp/tmpPOvNnu/pdisk_1.dat 2025-04-04T18:08:41.441774Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T18:08:41.655031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:41.655743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:41.659638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:41.693899Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7192, node 1 2025-04-04T18:08:41.715393Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T18:08:41.715426Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T18:08:41.862510Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/o32v/0003d6/r3tmp/yandexu6mO80.tmp 2025-04-04T18:08:41.862542Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/o32v/0003d6/r3tmp/yandexu6mO80.tmp 2025-04-04T18:08:41.862771Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/o32v/0003d6/r3tmp/yandexu6mO80.tmp 2025-04-04T18:08:41.862922Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T18:08:42.081108Z INFO: TTestServer started on Port 63934 GrpcPort 7192 TClient is connected to server localhost:63934 PQClient connected to localhost:7192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:42.393368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T18:08:42.436376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T18:08:42.594749Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:42.604211Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-04-04T18:08:44.506914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521557200736204:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:44.514505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:44.515002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521557200736217:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:44.527753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T18:08:44.545233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521557200736219:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T18:08:44.629503Z node 1 :TX_PROXY ERROR: Actor# [1:7489521557200736283:2452] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:45.184633Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489521557200736298:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T18:08:45.191597Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzI0MWQzNDItMTM5YWRiYzItY2JmMTZhZDEtZjA5MGE4ZDg=, ActorId: [1:7489521557200736202:2337], ActorState: ExecuteState, TraceId: 01jr0zvbebffev7y9hvztg6q2e, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T18:08:45.194771Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T18:08:45.311975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T18:08:45.348201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T18:08:45.502600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7489521561495703889:2638] 2025-04-04T18:08:46.192319Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489521544315833655:2206];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:46.192436Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-04-04T18:08:51.876707Z :Sinks_Oltp_WriteToTopicAndTable_4 INFO: TTopicSdkTestSetup started 2025-04-04T18:08:51.905411Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-04-04T18:08:51.942586Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7489521587265507998:2830] connected; active server actors: 1 2025-04-04T18:08:51.945371Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-04-04T18:08:51.945993Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-04-04T18:08:51.948659Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-04-04T18:08:51.950363Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-04-04T18:08:51.951595Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-04-04T18:08:51.956415Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-04-04T18:08:51.960427Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-04-04T18:08:51.960868Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-04-04T18:08:51.961504Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-04-04T18:08:51.961542Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-04-04T18:08:51.961572Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-04-04T18:08:51.961674Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-04-04T18:08:51.961731Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-04-04T18:08:51.962016Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-04-04T18:08:51.962063Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7489521587265508029:2470], now have 1 active actors on pipe 2025-04-04T18:08:51.985303Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7489521587265507997:2829], now have 1 active actors on pipe 2025-04-04T18:08:52.000584Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7489521544315833918 RawX2: 4294969491 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelPr ... 2.978765Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489521591560475804:2522], SessionActorId: [1:7489521591560475617:2522], Got LOCKS BROKEN for table. ShardID=72075186224037896, Sink=[1:7489521591560475804:2522].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-04-04T18:08:52.979236Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7489521591560475804:2522], SessionActorId: [1:7489521591560475617:2522], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table_A`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7489521591560475617:2522]. isRollback=0 2025-04-04T18:08:52.979400Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzJiNjI0NWYtZDFkMzE5NTQtYTI5N2Q5NDYtYzM0NTk2NDI=, ActorId: [1:7489521591560475617:2522], ActorState: ExecuteState, TraceId: 01jr0zvkqa5prwnvak01rjvqjg, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7489521591560475805:2522] from: [1:7489521591560475804:2522] 2025-04-04T18:08:52.979495Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7489521591560475805:2522] TxId: 281474976710679. Ctx: { TraceId: 01jr0zvkqa5prwnvak01rjvqjg, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzJiNjI0NWYtZDFkMzE5NTQtYTI5N2Q5NDYtYzM0NTk2NDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/table_A`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-04-04T18:08:52.979633Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzJiNjI0NWYtZDFkMzE5NTQtYTI5N2Q5NDYtYzM0NTk2NDI=, ActorId: [1:7489521591560475617:2522], ActorState: ExecuteState, TraceId: 01jr0zvkqa5prwnvak01rjvqjg, Create QueryResponse for error on request, msg: 2025-04-04T18:08:52.980517Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7489521591560475804 RawX2: 4503603922340314 } Data { Operations { PartitionId: 0 Path: "/Root/topic_A" SupportivePartition: 100000 } Immediate: true WriteId { NodeId: 1 KeyId: 281474976710677 } } 2025-04-04T18:08:52.980549Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] PartitionId {0, {1, 281474976710677}, 100000} for WriteId {1, 281474976710677} 2025-04-04T18:08:52.980572Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 0 has WriteId {1, 281474976710677} 2025-04-04T18:08:52.980587Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] immediate transaction 2025-04-04T18:08:52.981507Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T18:08:52.981534Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Try execute txs with state CALCULATED 2025-04-04T18:08:52.981546Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710679, State CALCULATED 2025-04-04T18:08:52.981560Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710679 State CALCULATED FrontTxId 281474976710679 2025-04-04T18:08:52.981572Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710679, NewState WAIT_RS 2025-04-04T18:08:52.981582Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710679 moved from CALCULATED to WAIT_RS 2025-04-04T18:08:52.981601Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-04-04T18:08:52.981613Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] HaveParticipantsDecision 1 2025-04-04T18:08:52.981634Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710679, NewState EXECUTING 2025-04-04T18:08:52.981646Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710679 moved from WAIT_RS to EXECUTING 2025-04-04T18:08:52.981656Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Received 0, Expected 0 2025-04-04T18:08:52.981687Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId: 281474976710679 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2025-04-04T18:08:52.981703Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] complete TxId 281474976710679 2025-04-04T18:08:52.981728Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] delete partitions for TxId 281474976710679 2025-04-04T18:08:52.981764Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvPQ::TEvDeletePartition to partition {0, {1, 281474976710677}, 100000} 2025-04-04T18:08:52.981786Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710679, NewState EXECUTED 2025-04-04T18:08:52.981805Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710679 moved from EXECUTING to EXECUTED 2025-04-04T18:08:52.981973Z node 1 :PERSQUEUE DEBUG: [TxId: 281474976710679] save tx TxId: 281474976710679 State: EXECUTED MinStep: 1743790133000 MaxStep: 1743790163000 PredicatesReceived { TabletId: 72075186224037896 Predicate: false } Operations { PartitionId: 0 Path: "/Root/topic_A" SupportivePartition: 100000 } Step: 1743790133022 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 7489521591560475804 RawX2: 4503603922340314 } WriteId { NodeId: 1 KeyId: 281474976710677 } Partitions { } 2025-04-04T18:08:52.982044Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T18:08:52.982421Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {1, 281474976710677}, 100000}, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoRequest 2025-04-04T18:08:52.982461Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {1, 281474976710677}, 100000}, State: StateIdle] Send TEvPQ::TEvGetWriteInfoError 2025-04-04T18:08:52.982508Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {1, 281474976710677}, 100000}, State: StateIdle] Handle TEvPQ::TEvDeletePartition 2025-04-04T18:08:52.982777Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoError Cookie 100000, Message Write info requested while writes are not complete 2025-04-04T18:08:52.983091Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-04-04T18:08:52.983117Z node 1 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from D0000100000(+) to D0000100001(-) 2025-04-04T18:08:52.983197Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvLongTxService::TEvLockStatus LockId: 281474976710677 LockNode: 1 Status: STATUS_NOT_FOUND 2025-04-04T18:08:52.983217Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxWriteInfo: WriteId {1, 281474976710677}, TxId 0, Status STATUS_SUBSCRIBED 2025-04-04T18:08:52.983258Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] there is already a transaction TxId 0 for WriteId {1, 281474976710677} 2025-04-04T18:08:52.983601Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T18:08:52.983623Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Try execute txs with state EXECUTED 2025-04-04T18:08:52.983633Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710679, State EXECUTED 2025-04-04T18:08:52.983643Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710679 State EXECUTED FrontTxId 281474976710679 2025-04-04T18:08:52.983656Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TPersQueue::SendEvReadSetAckToSenders 2025-04-04T18:08:52.983694Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 1743790133022 txid# 281474976710679 TabletSource# 72075186224037896 TabletDest# 72075186224037894 SetTabletConsumer# 72075186224037894 Flags# 0 Seqno# 1} 2025-04-04T18:08:52.983725Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710679, NewState WAIT_RS_ACKS 2025-04-04T18:08:52.983759Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710679 moved from EXECUTED to WAIT_RS_ACKS 2025-04-04T18:08:52.983777Z node 1 :PERSQUEUE DEBUG: [TxId: 281474976710679] PredicateAcks: 0/0 2025-04-04T18:08:52.983816Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] WriteId {1, 281474976710677} Partitions.size=1 2025-04-04T18:08:52.983827Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 0 2025-04-04T18:08:52.983836Z node 1 :PERSQUEUE DEBUG: [TxId: 281474976710679] PredicateAcks: 0/0 2025-04-04T18:08:52.983850Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] WriteId {1, 281474976710677} Partitions.size=1 2025-04-04T18:08:52.984315Z node 1 :PERSQUEUE DEBUG: Deleting head blob in L1. Partition 100000 offset 0 count 1 actorID [1:7489521591560475472:2499] 2025-04-04T18:08:52.985153Z node 1 :PERSQUEUE DEBUG: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 0 partno 0 count 1 parts 0 2025-04-04T18:08:52.986039Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPQ::TEvDeletePartitionDone {0, {1, 281474976710677}, 100000} 2025-04-04T18:08:52.986108Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvUnsubscribeLock for WriteId {1, 281474976710677} 2025-04-04T18:08:52.986135Z node 1 :PERSQUEUE WARN: [PQ: 72075186224037894] Unknown transaction 0 2025-04-04T18:08:52.986200Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-04-04T18:08:52.986664Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-04-04T18:08:53.285318Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server connected, pipe [1:7489521595855443183:3112], now have 1 active actors on pipe 2025-04-04T18:08:53.287301Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|82757bd0-d97660c-69fe5ff8-200f8edd_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-04-04T18:08:53.287343Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|82757bd0-d97660c-69fe5ff8-200f8edd_0] PartitionId [0] Generation [1] Write session will now close 2025-04-04T18:08:53.287436Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|82757bd0-d97660c-69fe5ff8-200f8edd_0] PartitionId [0] Generation [1] Write session: aborting 2025-04-04T18:08:53.287821Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|82757bd0-d97660c-69fe5ff8-200f8edd_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-04-04T18:08:53.287870Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|82757bd0-d97660c-69fe5ff8-200f8edd_0] PartitionId [0] Generation [1] Write session: destroy 2025-04-04T18:08:53.293769Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message_group_id|82757bd0-d97660c-69fe5ff8-200f8edd_0 grpc read done: success: 0 data: 2025-04-04T18:08:53.293824Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|82757bd0-d97660c-69fe5ff8-200f8edd_0 grpc read failed 2025-04-04T18:08:53.293854Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|82757bd0-d97660c-69fe5ff8-200f8edd_0 grpc closed 2025-04-04T18:08:53.293887Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|82757bd0-d97660c-69fe5ff8-200f8edd_0 is DEAD 2025-04-04T18:08:53.294817Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T18:08:53.294868Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-04-04T18:08:53.295007Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [1:7489521591560475758:2563] destroyed 2025-04-04T18:08:53.295035Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [1:7489521591560475761:2563] destroyed 2025-04-04T18:08:53.295074Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |92.2%| [TA] $(B)/ydb/public/sdk/cpp/src/client/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.2%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |92.9%| [TA] $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.0%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [GOOD] >> TCreateAndDropViewTest::CallDropViewOnTable >> TSentinelUnstableTests::BSControllerCantChangeStatus >> KqpSnapshotIsolation::TReadOnlyOltp [FAIL] >> KqpSnapshotIsolation::TSimpleOltpNoSink [FAIL] >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] >> KqpSnapshotIsolation::TConflictWriteOltp [FAIL] >> KqpSnapshotIsolation::TConflictWriteOltpNoSink [FAIL] >> KqpSnapshotIsolation::TConflictReadWriteOltp [FAIL] >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] >> KqpSinkTx::OlapInvalidateOnError [FAIL] >> KqpSnapshotIsolation::TSimpleOltp [FAIL] >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TCreateAndDropViewTest::CallDropViewOnTable [GOOD] >> TCreateAndDropViewTest::DropViewIfExists |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 29720, MsgBus: 26925 2025-04-04T18:08:39.141520Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521535633012780:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:39.143445Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000354/r3tmp/tmpzrByf8/pdisk_1.dat 2025-04-04T18:08:39.600650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:39.600828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 29720, node 1 2025-04-04T18:08:39.603080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:39.622095Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T18:08:39.622212Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T18:08:39.652433Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:39.710395Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:39.710428Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:39.710439Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:39.710569Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26925 TClient is connected to server localhost:26925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:40.418449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:40.444315Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T18:08:42.577871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521548517915224:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:42.577875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521548517915236:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:42.577977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:42.582518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T18:08:42.594140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521548517915239:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T18:08:42.670643Z node 1 :TX_PROXY ERROR: Actor# [1:7489521548517915290:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:42.904380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T18:08:43.071801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T18:08:44.235539Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489521535633012780:2145];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:44.237750Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T18:08:44.287172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:45.979521Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjBlYWUxYWEtYTQ4MWFlMTAtZmNjMWNhN2YtODgwYjcwYzI=, ActorId: [1:7489521561402825651:2970], ActorState: ExecuteState, TraceId: 01jr0zvcqjbgv74fags5xgdqn9, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-04T18:08:54.651695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T18:08:54.651745Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x17FC10AE 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x17FA5ACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F6BE5EDFD8F 17. ??:0: ?? @ 0x7F6BE5EDFE3F 18. ??:0: ?? @ 0x15B7B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltp [FAIL] Test command err: Trying to start YDB, gRPC: 63470, MsgBus: 65018 2025-04-04T18:08:39.124053Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521534512901220:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:39.124222Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/00033b/r3tmp/tmp3FaxLz/pdisk_1.dat 2025-04-04T18:08:39.570065Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:39.573911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:39.574025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:39.577055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63470, node 1 2025-04-04T18:08:39.711431Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:39.711461Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:39.711469Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:39.711569Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65018 TClient is connected to server localhost:65018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:40.422122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:42.273660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521547397803644:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:42.273699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521547397803656:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:42.273776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:42.280725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T18:08:42.291608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521547397803658:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T18:08:42.357284Z node 1 :TX_PROXY ERROR: Actor# [1:7489521547397803710:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:42.839789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T18:08:42.977336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T18:08:44.067544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:44.157757Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489521534512901220:2193];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:44.160334Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T18:08:45.940595Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDE4MTdjN2YtNmZlZTlkMzEtMjM0MTQ0MC1hYWE2YzA1NA==, ActorId: [1:7489521560282714205:2970], ActorState: ExecuteState, TraceId: 01jr0zvcm782yv57czy48qhdfb, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-04T18:08:54.569278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T18:08:54.569302Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x17FC10AE 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x17FA58A2 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7FC1EF73DD8F 17. ??:0: ?? @ 0x7FC1EF73DE3F 18. ??:0: ?? @ 0x15B7B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 13924, MsgBus: 5580 2025-04-04T18:08:40.795330Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521538775791554:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:40.795399Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000317/r3tmp/tmpo5jW10/pdisk_1.dat 2025-04-04T18:08:41.206708Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13924, node 1 2025-04-04T18:08:41.229535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:41.229636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:41.238165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:41.349520Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:41.349544Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:41.349549Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:41.349665Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5580 TClient is connected to server localhost:5580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:41.919443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:44.173973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521555955661411:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:44.174076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521555955661395:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:44.174460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:44.178690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T18:08:44.191678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521555955661415:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T18:08:44.286763Z node 1 :TX_PROXY ERROR: Actor# [1:7489521555955661466:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:44.574197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T18:08:44.751953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T18:08:45.892079Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489521538775791554:2065];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:45.925378Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T18:08:45.974359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:47.321631Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzdkMTY5ZWItMzI0MjI5MzgtZWQ3MDVkMGMtYTE2NTc4OWI=, ActorId: [1:7489521568840571766:2970], ActorState: ExecuteState, TraceId: 01jr0zve48axznx035jgmbqdba, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-04T18:08:56.206463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T18:08:56.206506Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x17FB28C0 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x17FA4DCA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F4D73428D8F 17. ??:0: ?? @ 0x7F4D73428E3F 18. ??:0: ?? @ 0x15B7B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltp [FAIL] Test command err: Trying to start YDB, gRPC: 64692, MsgBus: 1259 2025-04-04T18:08:40.730927Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521538454418349:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:40.731003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000325/r3tmp/tmpcsjr2u/pdisk_1.dat 2025-04-04T18:08:41.129217Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64692, node 1 2025-04-04T18:08:41.154429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:41.154556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:41.198958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:41.260006Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:41.260061Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:41.260071Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:41.260230Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1259 TClient is connected to server localhost:1259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:41.827700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:43.732612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521551339320888:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:43.732859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:43.738116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521551339320915:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:43.754067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T18:08:43.766420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521551339320917:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T18:08:43.851445Z node 1 :TX_PROXY ERROR: Actor# [1:7489521551339320970:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:44.143035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T18:08:44.295210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T18:08:45.479775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:46.221069Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489521538454418349:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:46.285873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T18:08:46.869645Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWYwYjFkNDQtODZkMDM4YzEtZGQyNWQ2ZGYtZjEyMWJjM2Y=, ActorId: [1:7489521564224231274:2969], ActorState: ExecuteState, TraceId: 01jr0zvdnhf57xh7wt5h1dskts, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-04T18:08:56.122465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T18:08:56.122523Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x17FB9CA0 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x17FA5222 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F8EB3F5DD8F 17. ??:0: ?? @ 0x7F8EB3F5DE3F 18. ??:0: ?? @ 0x15B7B028 |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_alter_compression.py::TestAlterCompression::test[alter_compression] |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInvalidateOnError [FAIL] Test command err: Trying to start YDB, gRPC: 1075, MsgBus: 11515 2025-04-04T18:08:39.133268Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521534425735375:2202];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:39.133481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/00036f/r3tmp/tmphfmX6J/pdisk_1.dat 2025-04-04T18:08:39.575476Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:39.596476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:39.596572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 1075, node 1 2025-04-04T18:08:39.599663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:39.714258Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:39.714289Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:39.714299Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:39.714442Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11515 TClient is connected to server localhost:11515 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:40.470112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:42.283905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521547310637790:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:42.283985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521547310637776:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:42.284211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:42.289244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T18:08:42.301766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521547310637796:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T18:08:42.368155Z node 1 :TX_PROXY ERROR: Actor# [1:7489521547310637847:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:42.844900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T18:08:42.995052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489521547310638020:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T18:08:42.995233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489521547310638020:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T18:08:42.995240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489521547310638019:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T18:08:42.995299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489521547310638019:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T18:08:42.995514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489521547310638020:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T18:08:42.995520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489521547310638019:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T18:08:42.995620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489521547310638020:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T18:08:42.995641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489521547310638019:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T18:08:42.995953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489521547310638020:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T18:08:42.996095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489521547310638020:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T18:08:42.996136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489521547310638019:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T18:08:42.996200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489521547310638020:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T18:08:42.996273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489521547310638019:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T18:08:42.996316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489521547310638020:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T18:08:42.996405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489521547310638019:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T18:08:42.996432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489521547310638020:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T18:08:42.996565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489521547310638019:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T18:08:42.996743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489521547310638020:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T18:08:42.996914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489521547310638020:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T18:08:42.996915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489521547310638019:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T18:08:42.997039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7489521547310638020:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T18:08:42.997059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489521547310638019:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T18:08:42.997206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489521547310638019:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T18:08:42.997314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7489521547310638019:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T18:08:43.034943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489521547310638031:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T18:08:43.035041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489521547310638031:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T18:08:43.035307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_ ... -04T18:08:57.718444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;self_id=[1:7489521555900574823:2728];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037949;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.718709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;self_id=[1:7489521555900574823:2728];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037949;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.821745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;self_id=[1:7489521568785479749:3175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038093;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.822122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;self_id=[1:7489521568785479749:3175];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038093;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.822420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037999;self_id=[1:7489521568785479653:3156];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037999;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.822730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037999;self_id=[1:7489521568785479653:3156];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037999;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.824809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;self_id=[1:7489521568785480196:3292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038055;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.825077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;self_id=[1:7489521568785480196:3292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038055;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.826887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;self_id=[1:7489521568785479826:3180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038097;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.827152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;self_id=[1:7489521568785479826:3180];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038097;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.829408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[1:7489521568785479836:3187];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038090;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.829638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[1:7489521568785479836:3187];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038090;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.830938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[1:7489521568785480242:3305];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038056;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.831162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[1:7489521568785480242:3305];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038056;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.833531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[1:7489521568785480189:3287];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038073;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.833764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[1:7489521568785480189:3287];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038073;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.834525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038000;self_id=[1:7489521568785479715:3162];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038000;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.834721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038000;self_id=[1:7489521568785479715:3162];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038000;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.837713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038062;self_id=[1:7489521568785479769:3176];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038062;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.837940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038062;self_id=[1:7489521568785479769:3176];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038062;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.838635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[1:7489521568785480068:3225];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038081;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.838818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[1:7489521568785480068:3225];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038081;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.840755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[1:7489521568785480170:3269];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038079;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.840954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[1:7489521568785480170:3269];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038079;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.843215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038002;self_id=[1:7489521568785479684:3158];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038002;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.843409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038002;self_id=[1:7489521568785479684:3158];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038002;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.845245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[1:7489521568785479725:3168];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038006;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.845411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[1:7489521568785479725:3168];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038006;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.847853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[1:7489521568785479730:3173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038014;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.848040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[1:7489521568785479730:3173];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038014;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.849410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[1:7489521568785479842:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038091;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.849601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[1:7489521568785479842:3192];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038091;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.851455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038058;self_id=[1:7489521568785480203:3299];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038058;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.851643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038058;self_id=[1:7489521568785480203:3299];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038058;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.853941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038096;self_id=[1:7489521568785479896:3207];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038096;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.854124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038096;self_id=[1:7489521568785479896:3207];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038096;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x17F890FD 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x17F699FA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x17F70037 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x17F70037 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x17F70037 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17F70037 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17F70037 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x17F6F245 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F8AD888BD8F 17. ??:0: ?? @ 0x7F8AD888BE3F 18. ??:0: ?? @ 0x15B7B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOltp [FAIL] Test command err: Trying to start YDB, gRPC: 10427, MsgBus: 22644 2025-04-04T18:08:40.775131Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521539347264839:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:40.776020Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000327/r3tmp/tmpFBm12Y/pdisk_1.dat 2025-04-04T18:08:41.185312Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:41.185810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:41.185929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:41.215565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10427, node 1 2025-04-04T18:08:41.299564Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:41.299593Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:41.299606Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:41.299788Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22644 TClient is connected to server localhost:22644 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:41.859430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:43.968157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521552232167388:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:43.968338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:43.968594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521552232167407:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:43.976551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T18:08:43.989290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521552232167409:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T18:08:44.054713Z node 1 :TX_PROXY ERROR: Actor# [1:7489521556527134756:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:44.330690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T18:08:44.468464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T18:08:45.595960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:45.775315Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489521539347264839:2063];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:45.775513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T18:08:46.978993Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWQyZWVlMjMtZTg4OGJlY2EtNTM2OGYzMjUtNzYxNWY1ZTI=, ActorId: [1:7489521565117077768:2970], ActorState: ExecuteState, TraceId: 01jr0zvdsh302fwe1tp29m5re2, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-04T18:08:56.162286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T18:08:56.162334Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x17FB28C0 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x17FA4BA2 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F0547694D8F 17. ??:0: ?? @ 0x7F0547694E3F 18. ??:0: ?? @ 0x15B7B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 25927, MsgBus: 6454 2025-04-04T18:08:39.102743Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521532926280071:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:39.103968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000366/r3tmp/tmpjSlhGH/pdisk_1.dat 2025-04-04T18:08:39.550570Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:39.570636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:39.570742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:39.572428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25927, node 1 2025-04-04T18:08:39.710892Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:39.710922Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:39.710934Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:39.711084Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6454 TClient is connected to server localhost:6454 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:40.414966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:40.447250Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T18:08:42.363352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521545811182631:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:42.363460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521545811182619:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:42.363593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:42.368133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T18:08:42.377927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521545811182634:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T18:08:42.434357Z node 1 :TX_PROXY ERROR: Actor# [1:7489521545811182685:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:42.839836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T18:08:42.969379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T18:08:44.099033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:44.107617Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489521532926280071:2061];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:44.108429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T18:08:45.934170Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDI0NzY2LTY4ZGUwYTVmLTJlNGE1ZTkzLTI3YzFmNGRm, ActorId: [1:7489521558696093026:2969], ActorState: ExecuteState, TraceId: 01jr0zvcm4d4xty6xwvh3cq7nf, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-04T18:08:54.519862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T18:08:54.519903Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x17FAC851 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x17FA474A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F66EE551D8F 17. ??:0: ?? @ 0x7F66EE551E3F 18. ??:0: ?? @ 0x15B7B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 24630, MsgBus: 2568 2025-04-04T18:08:39.050938Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521534879149197:2064];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:39.051984Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/00034d/r3tmp/tmpiqItna/pdisk_1.dat 2025-04-04T18:08:39.501787Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:39.520556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:39.520667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:39.532720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24630, node 1 2025-04-04T18:08:39.710291Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:39.710322Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:39.710332Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:39.710508Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2568 TClient is connected to server localhost:2568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:40.415304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:42.288031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521547764051725:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:42.288117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521547764051744:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:42.288214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:42.293725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T18:08:42.306221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521547764051761:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T18:08:42.401995Z node 1 :TX_PROXY ERROR: Actor# [1:7489521547764051812:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:42.844862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T18:08:43.039978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7489521547764052007:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T18:08:43.040202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7489521547764052007:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T18:08:43.040712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7489521547764052007:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T18:08:43.040884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7489521547764052007:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T18:08:43.040991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7489521547764052007:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T18:08:43.041121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7489521547764052007:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T18:08:43.041244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7489521547764052007:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T18:08:43.041371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7489521547764052007:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T18:08:43.041502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7489521547764052007:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T18:08:43.041665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7489521547764052007:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T18:08:43.041790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7489521547764052007:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T18:08:43.041926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7489521547764052007:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T18:08:43.042864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489521547764052008:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T18:08:43.042909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489521547764052008:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T18:08:43.043089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489521547764052008:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T18:08:43.043234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489521547764052008:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T18:08:43.043391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489521547764052008:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T18:08:43.043504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489521547764052008:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T18:08:43.043604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489521547764052008:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T18:08:43.043728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489521547764052008:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T18:08:43.043881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489521547764052008:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T18:08:43.043995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489521547764052008:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T18:08:43.044100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489521547764052008:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T18:08:43.044194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7489521547764052008:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T18:08:43.091094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489521547764052018:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T18:08:43.091232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489521547764052018:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T18:08:43.091514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_i ... 1:7489521569238894431:3340];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.036183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[1:7489521569238894376:3332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038021;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.036485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[1:7489521569238894376:3332];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.037712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038000;self_id=[1:7489521569238894359:3322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038000;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.037957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038000;self_id=[1:7489521569238894359:3322];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038000;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.038133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[1:7489521569238894360:3323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038048;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.038315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[1:7489521569238894360:3323];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038048;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.039759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;self_id=[1:7489521569238894034:3256];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038068;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.039992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;self_id=[1:7489521569238894034:3256];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038068;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.044508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7489521569238894288:3306];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038015;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.044907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7489521569238894288:3306];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.045324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[1:7489521569238894362:3324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038046;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.045514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[1:7489521569238894362:3324];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038046;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.046361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[1:7489521569238894296:3310];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.046570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[1:7489521569238894296:3310];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.046842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[1:7489521569238894059:3278];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038082;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.047005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[1:7489521569238894059:3278];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038082;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.055165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[1:7489521569238893969:3214];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038066;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.055458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[1:7489521569238893969:3214];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038066;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.061282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[1:7489521569238894358:3321];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038010;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.061584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[1:7489521569238894358:3321];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.065372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[1:7489521569238894380:3334];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038017;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.065667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[1:7489521569238894380:3334];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.067466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[1:7489521569238894370:3328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038054;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.067781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[1:7489521569238894370:3328];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038054;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.067997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[1:7489521569238894389:3338];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038014;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.068236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[1:7489521569238894389:3338];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038014;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.068380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[1:7489521569238894500:3341];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.068561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[1:7489521569238894500:3341];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.068904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[1:7489521569238894372:3329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038031;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.069087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[1:7489521569238894372:3329];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.072549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[1:7489521569238894302:3314];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038018;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.072845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[1:7489521569238894302:3314];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.073517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[1:7489521569238894297:3311];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038053;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.073762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[1:7489521569238894297:3311];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038053;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.073979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7489521569238894375:3331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:58.074152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7489521569238894375:3331];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::ABORTED) failed: (SUCCESS != ABORTED) , with diff: (SUCC|ABORT)E(SS|D) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92: DoExecute @ 0x17FB5047 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:118: Execute_ @ 0x17FA4FFA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F5378E0DD8F 17. ??:0: ?? @ 0x7F5378E0DE3F 18. ??:0: ?? @ 0x15B7B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 13854, MsgBus: 15552 2025-04-04T18:08:39.489240Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521534902995675:2199];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:39.489919Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000336/r3tmp/tmp1zQWij/pdisk_1.dat 2025-04-04T18:08:39.828884Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:39.833067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:39.833175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:39.836460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13854, node 1 2025-04-04T18:08:39.891229Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:39.891264Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:39.891273Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:39.891455Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15552 TClient is connected to server localhost:15552 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:40.472888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:42.657998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521547787898069:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:42.658106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521547787898087:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:42.658197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:42.663438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T18:08:42.677709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521547787898098:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T18:08:42.758794Z node 1 :TX_PROXY ERROR: Actor# [1:7489521547787898149:2338] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:43.045117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-04-04T18:08:43.217270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489521552082865642:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T18:08:43.217485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489521552082865642:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T18:08:43.217806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489521552082865642:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T18:08:43.218052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489521552082865642:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T18:08:43.218177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489521552082865642:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T18:08:43.218572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489521552082865642:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T18:08:43.218652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489521552082865639:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T18:08:43.218704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489521552082865642:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T18:08:43.218711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489521552082865639:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T18:08:43.218839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489521552082865642:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T18:08:43.218842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489521552082865639:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T18:08:43.218965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489521552082865639:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T18:08:43.218976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489521552082865642:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T18:08:43.219103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489521552082865639:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T18:08:43.219104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489521552082865642:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T18:08:43.219207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489521552082865639:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T18:08:43.219238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489521552082865642:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T18:08:43.219310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489521552082865639:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T18:08:43.219358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7489521552082865642:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T18:08:43.219422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489521552082865639:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T18:08:43.219509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489521552082865639:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T18:08:43.219622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489521552082865639:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T18:08:43.219739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489521552082865639:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T18:08:43.219851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7489521552082865639:2349];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T18:08:43.271985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489521552082865638:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T18:08:43.272062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7489521552082865638:2348];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T18:08:43.272325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;sel ... 08:57.439464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[1:7489521573557708091:3365];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038061;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.439475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[1:7489521573557708104:3374];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.439623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[1:7489521573557708083:3358];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038024;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.439778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[1:7489521573557708083:3358];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038024;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.439867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[1:7489521573557708232:3397];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038009;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.440045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[1:7489521573557708232:3397];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.440046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[1:7489521573557708115:3381];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038022;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.440180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[1:7489521573557708115:3381];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.441502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[1:7489521573557708096:3369];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038079;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.441688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[1:7489521573557708096:3369];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038079;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.441912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;self_id=[1:7489521573557708089:3364];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038064;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.442091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;self_id=[1:7489521573557708089:3364];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038064;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.445081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[1:7489521573557708310:3398];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.445323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[1:7489521573557708310:3398];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.446020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[1:7489521573557708210:3395];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038018;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.446114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[1:7489521573557708109:3376];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038029;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.446211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[1:7489521573557708210:3395];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.446358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[1:7489521573557708109:3376];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038029;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.447041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7489521573557708098:3370];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.447223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7489521573557708098:3370];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.454305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[1:7489521573557708202:3392];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038010;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.454605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[1:7489521573557708202:3392];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.455250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[1:7489521573557708321:3399];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.455441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[1:7489521573557708321:3399];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.455761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[1:7489521573557708230:3396];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.455938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[1:7489521573557708230:3396];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.457309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[1:7489521573557708119:3385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.457499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[1:7489521573557708119:3385];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.458836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[1:7489521573557708102:3373];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038051;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.459012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[1:7489521573557708102:3373];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038051;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.460381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[1:7489521573557708209:3394];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038001;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.460574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[1:7489521573557708209:3394];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.465514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[1:7489521573557708193:3390];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038011;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.465555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[1:7489521573557708184:3389];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038025;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.465794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[1:7489521573557708184:3389];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; 2025-04-04T18:08:57.465802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[1:7489521573557708193:3390];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038011;fline=columnshard_impl.cpp:762;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x17FBC427 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x17FA567A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F04B8B83D8F 17. ??:0: ?? @ 0x7F04B8B83E3F 18. ??:0: ?? @ 0x15B7B028 |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 5605, MsgBus: 65348 2025-04-04T18:08:39.995804Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521533288657433:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:40.001284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000335/r3tmp/tmpdcJ7Lj/pdisk_1.dat 2025-04-04T18:08:40.313264Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5605, node 1 2025-04-04T18:08:40.378390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:40.378531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:40.380269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:40.396603Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:40.396630Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:40.396638Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:40.396770Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65348 TClient is connected to server localhost:65348 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:40.932860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:43.127876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521550468527125:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:43.128090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:43.134509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521550468527152:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:43.138470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T18:08:43.149764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521550468527154:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T18:08:43.211321Z node 1 :TX_PROXY ERROR: Actor# [1:7489521550468527205:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:43.517110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T18:08:43.660100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T18:08:44.814162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:45.534471Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489521533288657433:2200];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:45.645525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T18:08:46.190726Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTNiNjJjZmUtNWIzNmI0ZDMtZGJmNjhkM2EtNzczOTQ5OTg=, ActorId: [1:7489521563353437465:2968], ActorState: ExecuteState, TraceId: 01jr0zvd0t8s33zc46ef5s3gg8, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-04T18:08:55.313279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T18:08:55.313320Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x17FB9CA0 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x17FA544A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F198FE15D8F 17. ??:0: ?? @ 0x7F198FE15E3F 18. ??:0: ?? @ 0x15B7B028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOltp [FAIL] Test command err: Trying to start YDB, gRPC: 16739, MsgBus: 4649 2025-04-04T18:08:39.057773Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521534365498853:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:39.058066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000350/r3tmp/tmpR6OXMo/pdisk_1.dat 2025-04-04T18:08:39.511881Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:39.522486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:39.522579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:39.536516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16739, node 1 2025-04-04T18:08:39.710861Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:39.710893Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:39.710906Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:39.711032Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4649 TClient is connected to server localhost:4649 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:40.434067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:40.449553Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T18:08:42.374271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521547250401276:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:42.374376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521547250401268:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:42.374776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:42.378688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T18:08:42.389896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521547250401282:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T18:08:42.481147Z node 1 :TX_PROXY ERROR: Actor# [1:7489521547250401333:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:42.839706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T18:08:42.981824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T18:08:44.058509Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489521534365498853:2197];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:44.058581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T18:08:44.124199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:45.933989Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTNjYjRhNTktYWUyODVlYTUtZDQwOGFlNjEtOGE0MGMwZGI=, ActorId: [1:7489521560135311688:2971], ActorState: ExecuteState, TraceId: 01jr0zvckfddeeenpy6tzks7v3, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-04-04T18:08:54.507476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T18:08:54.507531Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x17FAC851 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x17FA4522 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F6BB3002D8F 17. ??:0: ?? @ 0x7F6BB3002E3F 18. ??:0: ?? @ 0x15B7B028 >> SystemView::PartitionStatsFields [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow Test command err: Trying to start YDB, gRPC: 15559, MsgBus: 21204 2025-04-04T18:08:42.690949Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521545306939473:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:42.691097Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/0003c5/r3tmp/tmpxomu8D/pdisk_1.dat 2025-04-04T18:08:43.075100Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:43.076791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:43.076911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:43.081345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15559, node 1 2025-04-04T18:08:43.254444Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:43.254488Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:43.254498Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:43.254637Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21204 TClient is connected to server localhost:21204 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:43.884443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:43.936185Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-04-04T18:08:43.957676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:44.250554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:44.451010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T18:08:44.550600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-04-04T18:08:46.386425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521562486810434:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:46.386535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:46.664066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:46.697278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T18:08:46.741930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T18:08:46.776175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T18:08:46.806038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T18:08:46.877334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T18:08:46.944550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521562486810951:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:46.944610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:46.944803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521562486810956:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:46.949159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T18:08:46.960568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521562486810958:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T18:08:47.039555Z node 1 :TX_PROXY ERROR: Actor# [1:7489521566781778307:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:47.691262Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489521545306939473:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:47.691341Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T18:08:48.050460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-04-04T18:08:48.210270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489521571076746031:2510];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T18:08:48.210483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489521571076746031:2510];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T18:08:48.210793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489521571076746031:2510];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T18:08:48.210833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489521571076746027:2507];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T18:08:48.210900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7489521571076746027:2507];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T18:08:48.210924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489521571076746031:2510];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T18:08:48.211228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489521571076746031:2510];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T18:08:48.211405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489521571076746031:2510];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T18:08:48.211568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489521571076746031:2510];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T18:08:48.211717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489521571076746031:2510];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T18:08:48.211829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489521571076746031:2510];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T18:08:48.211935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489521571076746031:2510];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T18:08:48.212050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489521571076746031:2510];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T18:08:48.212175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7489521571076746031:2510];tablet_id=72075186224037922;p ... RN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T18:08:48.413864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T18:08:48.413904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T18:08:48.414093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T18:08:48.414139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T18:08:48.414308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T18:08:48.414346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T18:08:48.443743Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T18:08:48.444079Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T18:08:48.450499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T18:08:48.450500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T18:08:48.457271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T18:08:48.457272Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T18:08:48.463798Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T18:08:48.463799Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T18:08:48.470910Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T18:08:48.470910Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-04-04T18:08:48.635311Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-04-04T18:08:48.635310Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-04-04T18:08:48.636015Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;self_id=[1:7489521571076746142:2516];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037923;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037928;receive=72075186224037927; 2025-04-04T18:08:48.636479Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-04-04T18:08:48.718919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710675;tx_id=281474976710675;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710675; query_phases { duration_us: 9167 cpu_time_us: 2327 affected_shards: 1 } query_phases { duration_us: 7059 cpu_time_us: 281 affected_shards: 1 } compilation { duration_us: 55824 cpu_time_us: 53120 } process_cpu_time_us: 678 total_duration_us: 76361 total_cpu_time_us: 56406 AddressSanitizer:DEADLYSIGNAL ================================================================= ==1060653==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000017d741cf bp 0x7ffcae288480 sp 0x7ffcae2882e0 T0) ==1060653==The signal is caused by a READ memory access. ==1060653==Hint: address points to the zero page. warning: address range table at offset 0x0 has a premature terminator entry at offset 0x10 warning: address range table at offset 0x30 has a premature terminator entry at offset 0x40 warning: address range table at offset 0x60 has a premature terminator entry at offset 0x70 warning: address range table at offset 0x90 has a premature terminator entry at offset 0xa0 warning: address range table at offset 0xc0 has a premature terminator entry at offset 0xd0 warning: address range table at offset 0xa20 has a premature terminator entry at offset 0xa30 warning: address range table at offset 0xa50 has a premature terminator entry at offset 0xa60 warning: address range table at offset 0xa80 has a premature terminator entry at offset 0xa90 warning: address range table at offset 0xab0 has a premature terminator entry at offset 0xac0 warning: address range table at offset 0xae0 has a premature terminator entry at offset 0xaf0 warning: address range table at offset 0xb10 has a premature terminator entry at offset 0xb20 warning: address range table at offset 0xb40 has a premature terminator entry at offset 0xb50 warning: address range table at offset 0xb70 has a premature terminator entry at offset 0xb80 warning: address range table at offset 0xba0 has a premature terminator entry at offset 0xbb0 warning: address range table at offset 0xbd0 has a premature terminator entry at offset 0xbe0 warning: address range table at offset 0xc00 has a premature terminator entry at offset 0xc10 warning: address range table at offset 0xc30 has a premature terminator entry at offset 0xc40 warning: address range table at offset 0xc60 has a premature terminator entry at offset 0xc70 warning: address range table at offset 0xc90 has a premature terminator entry at offset 0xca0 warning: address range table at offset 0xcc0 has a premature terminator entry at offset 0xcd0 warning: address range table at offset 0xcf0 has a premature terminator entry at offset 0xd00 warning: address range table at offset 0xd20 has a premature terminator entry at offset 0xd30 warning: address range table at offset 0xd50 has a premature terminator entry at offset 0xd60 warning: address range table at offset 0xd80 has a premature terminator entry at offset 0xd90 warning: address range table at offset 0x1420 has a premature terminator entry at offset 0x1430 warning: address range table at offset 0x14b0 has a premature terminator entry at offset 0x14c0 warning: address range table at offset 0x14e0 has a premature terminator entry at offset 0x14f0 warning: address range table at offset 0x1510 has a premature terminator entry at offset 0x1520 warning: address range table at offset 0x1540 has a premature terminator entry at offset 0x1550 warning: address range table at offset 0x1570 has a premature terminator entry at offset 0x1580 warning: address range table at offset 0x15a0 has a premature terminator entry at offset 0x15b0 warning: address range table at offset 0x15d0 has a premature terminator entry at offset 0x15e0 warning: address range table at offset 0x1600 has a premature terminator entry at offset 0x1610 2025-04-04T18:08:58.074505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T18:08:58.074569Z node 1 :IMPORT WARN: Table profiles were not loaded #0 0x17d741cf in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x17d741cf in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x17d741cf in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x17d741cf in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x17d741cf in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x17d97987 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x17d97987 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x17d97987 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x17d97987 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #9 0x17d97987 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311:10 #10 0x186e0e55 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428:12 #11 0x186e0e55 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987:10 #12 0x186e0e55 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x186b1b78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x17d96975 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x186b3445 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x186db78c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7fe48a332d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7fe48a332e3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x15ad0028 in _start (/home/runner/.ya/build/build_root/o32v/0003c5/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x15ad0028) (BuildId: b39ae28ee39da90c850ce26824d563677aab4934) AddressSanitizer can not provide additional info. SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==1060653==ABORTING |95.9%| [TA] $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TA] $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::PartitionStatsFields [GOOD] Test command err: 2025-04-04T18:08:54.381348Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521600082436778:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:54.381457Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000340/r3tmp/tmpnEOjKP/pdisk_1.dat 2025-04-04T18:08:54.822534Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:54.831015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:54.831756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 16614, node 1 2025-04-04T18:08:54.846513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:54.848195Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T18:08:54.848219Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T18:08:54.987550Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:54.987589Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:54.987665Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:54.987815Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12195 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:55.569334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:55.606962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:57.082839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521612967339671:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:57.082855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521612967339660:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:57.082955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:57.090508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-04-04T18:08:57.112507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521612967339674:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-04-04T18:08:57.185919Z node 1 :TX_PROXY ERROR: Actor# [1:7489521612967339745:2701] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:58.000876Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jr0zvqqm5qd9d8e4r8vwsdz4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTkzYjgyNjktNGIzODc0MTItNzQ0MzAyYTYtNzg2MDFmMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T18:08:58.179404Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jr0zvrnm39vjj4edm81fgt9j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmZjZDU3YTYtNWVmODExMTAtZTdlNmZlZWMtZDg2ZDM2M2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T18:08:58.188361Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7489521617262307121:2360], owner: [1:7489521617262307117:2358], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-04T18:08:58.189757Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7489521617262307121:2360], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T18:08:58.190855Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7489521617262307121:2360], row count: 1, finished: 1 2025-04-04T18:08:58.190911Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7489521617262307121:2360], owner: [1:7489521617262307117:2358], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-04T18:08:58.197079Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743790138175, txId: 281474976710662] shutting down 2025-04-04T18:08:59.276846Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jr0zvst4cya7j363vyfdjj2n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjNmNDAzYzUtNmQ0ZjFkNjYtMTBlM2Q3YmUtMTBjNjU4Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T18:08:59.278278Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7489521621557274462:2371], owner: [1:7489521621557274458:2369], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-04T18:08:59.278846Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7489521621557274462:2371], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T18:08:59.279127Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7489521621557274462:2371], row count: 1, finished: 1 2025-04-04T18:08:59.279167Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7489521621557274462:2371], owner: [1:7489521621557274458:2369], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-04T18:08:59.282869Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743790139275, txId: 281474976710664] shutting down 2025-04-04T18:08:59.381004Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489521600082436778:2076];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:59.381103Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T18:09:00.393792Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0zvtw16nr0nvzdh06m3w19, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjU0YjA3NDYtNTdiYjBiNjEtYTBmMDk4MDctNDkzNTk0OTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T18:09:00.395235Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7489521625852241814:2384], owner: [1:7489521625852241810:2382], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-04T18:09:00.395809Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7489521625852241814:2384], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T18:09:00.396144Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7489521625852241814:2384], row count: 1, finished: 1 2025-04-04T18:09:00.396186Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7489521625852241814:2384], owner: [1:7489521625852241810:2382], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-04T18:09:00.398717Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743790140393, txId: 281474976710666] shutting down 2025-04-04T18:09:01.495303Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jr0zvvz04qxa135zj7mp7ehs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjIyMjZhYzctOGRhOTkxODAtN2M2NDQxNGUtZmZhMzNiMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T18:09:01.497112Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7489521630147209163:2399], owner: [1:7489521630147209159:2397], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-04T18:09:01.497776Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7489521630147209163:2399], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T18:09:01.498189Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7489521630147209163:2399], row count: 1, finished: 1 2025-04-04T18:09:01.498233Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7489521630147209163:2399], owner: [1:7489521630147209159:2397], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-04T18:09:01.501707Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743790141494, txId: 281474976710668] shutting down 2025-04-04T18:09:01.658128Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jr0zvw251eta3j25b9vgw8cj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmIwODExYjQtMmI0ZDdmMGItNmE0MTI0YWQtODNkNmJkNTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-04-04T18:09:01.660063Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7489521630147209195:2408], owner: [1:7489521630147209191:2406], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-04T18:09:01.660950Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7489521630147209195:2408], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-04-04T18:09:01.661490Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7489521630147209195:2408], row count: 1, finished: 1 2025-04-04T18:09:01.661539Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7489521630147209195:2408], owner: [1:7489521630147209191:2406], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-04-04T18:09:01.664725Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743790141657, txId: 281474976710670] shutting down >> TPersQueueTest::DisableDeduplication [GOOD] |96.2%| [TA] $(B)/ydb/core/sys_view/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TA] {RESULT} $(B)/ydb/core/sys_view/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_drain.py::TestHive::test_drain_on_stop |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::DisableDeduplication [GOOD] Test command err: 2025-04-04T18:08:51.661643Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521587399534654:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:51.662762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T18:08:51.706772Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489521586510338696:2146];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:51.711852Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-04-04T18:08:51.861584Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-04-04T18:08:51.869525Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000326/r3tmp/tmpbTi8Bx/pdisk_1.dat 2025-04-04T18:08:52.147219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:52.148420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:52.149267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:52.149350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:52.153203Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-04-04T18:08:52.153405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:52.155275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:52.176023Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30395, node 1 2025-04-04T18:08:52.220079Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T18:08:52.220119Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-04-04T18:08:52.357068Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/o32v/000326/r3tmp/yandexyhLNAb.tmp 2025-04-04T18:08:52.357110Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/o32v/000326/r3tmp/yandexyhLNAb.tmp 2025-04-04T18:08:52.357464Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/o32v/000326/r3tmp/yandexyhLNAb.tmp 2025-04-04T18:08:52.357643Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T18:08:52.570453Z INFO: TTestServer started on Port 27057 GrpcPort 30395 TClient is connected to server localhost:27057 PQClient connected to localhost:30395 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:52.856664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-04-04T18:08:52.931048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-04-04T18:08:54.426840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521600284437637:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:54.426840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521600284437629:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:54.426977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:54.432729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-04-04T18:08:54.435937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521600284437677:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:54.436029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:54.449373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521600284437643:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-04-04T18:08:54.519874Z node 1 :TX_PROXY ERROR: Actor# [1:7489521600284437726:2759] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:54.773333Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7489521600284437745:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T18:08:54.773342Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7489521599395240914:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-04-04T18:08:54.773690Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjYyMGJlYy0xYTMyNTI2Yy1lMzUzNTkxLTljMGQ3ZWUx, ActorId: [2:7489521599395240875:2311], ActorState: ExecuteState, TraceId: 01jr0zvn7segtyb8y8y9b07frw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T18:08:54.779642Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T18:08:54.780006Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWM1ZGUxZmEtZWUzZTVjZGEtZmJkZWQzOTItMmY0NDM2YjI=, ActorId: [1:7489521600284437611:2336], ActorState: ExecuteState, TraceId: 01jr0zvn4d3b93s2hskcpw2j42, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-04-04T18:08:54.780384Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-04-04T18:08:54.848054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T18:08:54.926764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T18:08:55.019142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-04-04T18:08:55.489442Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jr0zvnsnbzj4yfpqajwz6s9t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDYzNTU1NTktYjg3MTZjZmYtYjQ5MjA3ZWEtYTkxZGViZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7489521604579405490:3102] 2025-04-04T18:08:56.660221Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489521587399534654:2078];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:56.660374Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T18:08:56.706285Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7489521586510338696:2146];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:56.706362Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClus ... partition 1(assignId:2), count# 3, size# 530, partitionsAsked# 1, maxTimeLag# 0ms 2025-04-04T18:09:02.911720Z node 2 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T18:09:02.911795Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-04-04T18:09:02.911789Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2)maxCount 3 maxSize 530 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid c57fbd52-bbf38ea3-192ea793-51db6b30 2025-04-04T18:09:02.911864Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 2 messageNo: 0 requestId: cookie: 0 2025-04-04T18:09:02.912132Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-04-04T18:09:02.912157Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 1 2025-04-04T18:09:02.912232Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] read cookie 2 Topic 'rt3.dc1--topic1' partition 1 user debug offset 0 count 3 size 530 endOffset 3 max time lag 0ms effective offset 0 2025-04-04T18:09:02.912257Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 3 2025-04-04T18:09:02.912301Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-04-04T18:09:02.912318Z node 2 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-04-04T18:09:02.912397Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 1 messageNo: 0 requestId: cookie: 0 2025-04-04T18:09:02.912391Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 99 bytes ..." SourceId: "" SeqNo: 1 WriteTimestampMS: 1743790142776 CreateTimestampMS: 1743790142774 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 99 bytes ..." SourceId: "" SeqNo: 2 WriteTimestampMS: 1743790142820 CreateTimestampMS: 1743790142775 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 99 bytes ..." SourceId: "" SeqNo: 3 WriteTimestampMS: 1743790142824 CreateTimestampMS: 1743790142775 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 40 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 0 } 2025-04-04T18:09:02.912392Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 99 bytes ..." SourceId: "" SeqNo: 1 WriteTimestampMS: 1743790142770 CreateTimestampMS: 1743790142766 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 99 bytes ..." SourceId: "" SeqNo: 2 WriteTimestampMS: 1743790142771 CreateTimestampMS: 1743790142766 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 99 bytes ..." SourceId: "" SeqNo: 3 WriteTimestampMS: 1743790142775 CreateTimestampMS: 1743790142766 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 40 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 0 } 2025-04-04T18:09:02.913723Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3) wait data in partition inited, cookie 1 from offset3 2025-04-04T18:09:02.913724Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:1) wait data in partition inited, cookie 1 from offset3 2025-04-04T18:09:02.913773Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid df76cce1-540be49b-77914513-612e72ae has messages 1 2025-04-04T18:09:02.913774Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3) EndOffset 3 ReadOffset 3 ReadGuid 361b3011-de343d32-86a1bfe9-3d5daf62 has messages 1 2025-04-04T18:09:02.913898Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 read done: guid# 361b3011-de343d32-86a1bfe9-3d5daf62, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:3), size# 496 2025-04-04T18:09:02.913954Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 response to read: guid# 361b3011-de343d32-86a1bfe9-3d5daf62 2025-04-04T18:09:02.914088Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 99 bytes ..." SourceId: "" SeqNo: 1 WriteTimestampMS: 1743790142786 CreateTimestampMS: 1743790142783 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 99 bytes ..." SourceId: "" SeqNo: 2 WriteTimestampMS: 1743790142787 CreateTimestampMS: 1743790142784 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 99 bytes ..." SourceId: "" SeqNo: 3 WriteTimestampMS: 1743790142791 CreateTimestampMS: 1743790142784 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 40 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 0 } 2025-04-04T18:09:02.914194Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 Process answer. Aval parts: 0 2025-04-04T18:09:02.914196Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) wait data in partition inited, cookie 1 from offset3 2025-04-04T18:09:02.914219Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) EndOffset 3 ReadOffset 3 ReadGuid c57fbd52-bbf38ea3-192ea793-51db6b30 has messages 1 2025-04-04T18:09:02.914326Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 read done: guid# df76cce1-540be49b-77914513-612e72ae, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:1), size# 496 2025-04-04T18:09:02.914349Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 response to read: guid# df76cce1-540be49b-77914513-612e72ae 2025-04-04T18:09:02.914467Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 Process answer. Aval parts: 0 2025-04-04T18:09:02.914541Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 read done: guid# c57fbd52-bbf38ea3-192ea793-51db6b30, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2), size# 496 2025-04-04T18:09:02.914554Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 response to read: guid# c57fbd52-bbf38ea3-192ea793-51db6b30 2025-04-04T18:09:02.914654Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 Process answer. Aval parts: 0 Got data event with total 3 messages, current total messages: 3 2025-04-04T18:09:02.915649Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 grpc read done: success# 1, data# { read_request { bytes_size: 496 } } 2025-04-04T18:09:02.915770Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 got read request: guid# 7f159f7c-79a7d623-cf1b0bd3-a84fe8b1 Got data event with total 3 messages, current total messages: 6 2025-04-04T18:09:02.916209Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 grpc read done: success# 1, data# { read_request { bytes_size: 496 } } 2025-04-04T18:09:02.916277Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 got read request: guid# 24b6592c-74d5618e-87251cbc-9efecc98 Got data event with total 3 messages, current total messages: 9 2025-04-04T18:09:02.917898Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 grpc read done: success# 0, data# { } 2025-04-04T18:09:02.917925Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 grpc read failed 2025-04-04T18:09:02.917954Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 closed 2025-04-04T18:09:02.918393Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/debug session shared/debug_1_1_12955563111585884778_v1 is DEAD 2025-04-04T18:09:02.918776Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/debug_1_1_12955563111585884778_v1 2025-04-04T18:09:02.918823Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [1:7489521634644177572:2535] disconnected; active server actors: 1 2025-04-04T18:09:02.918848Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7489521634644177578:2539] destroyed 2025-04-04T18:09:02.918879Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/debug_1_1_12955563111585884778_v1 2025-04-04T18:09:02.918855Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic1] pipe [1:7489521634644177572:2535] client debug disconnected session shared/debug_1_1_12955563111585884778_v1 2025-04-04T18:09:02.918897Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7489521634644177579:2540] destroyed 2025-04-04T18:09:02.918896Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/debug_1_1_12955563111585884778_v1 2025-04-04T18:09:02.918911Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/debug_1_1_12955563111585884778_v1 2025-04-04T18:09:02.918917Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/debug_1_1_12955563111585884778_v1 2025-04-04T18:09:02.918925Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7489521634644177577:2538] destroyed 2025-04-04T18:09:02.918932Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/debug_1_1_12955563111585884778_v1 |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TCreateAndDropViewTest::DropViewIfExists [GOOD] |96.5%| [TA] $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_tpch.py::TestTpchS1::test_tpch[21] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/view/unittest >> TCreateAndDropViewTest::DropViewIfExists [GOOD] Test command err: Trying to start YDB, gRPC: 15190, MsgBus: 27369 2025-04-04T18:08:58.240328Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521617754030067:2060];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:58.240447Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000171/r3tmp/tmpLBfznm/pdisk_1.dat 2025-04-04T18:08:58.565055Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:58.583636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:58.583763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:58.586770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15190, node 1 2025-04-04T18:08:58.642845Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:58.642874Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:58.642889Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:58.643050Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27369 TClient is connected to server localhost:27369 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:59.120670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:09:00.737977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521626343965326:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:09:00.737987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521626343965334:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:09:00.738077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:09:00.741508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-04-04T18:09:00.750493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521626343965340:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-04-04T18:09:00.818669Z node 1 :TX_PROXY ERROR: Actor# [1:7489521626343965391:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:09:00.981008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-04-04T18:09:01.108720Z node 1 :TX_PROXY ERROR: Actor# [1:7489521630638932809:2407] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/table\', error: path is not a view (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:09:01.108802Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710661, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/table', error: path is not a view (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-04T18:09:01.109146Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmVhMDcwNWMtZWUwN2E3MzAtYTgzYjY2NDktOTIxOTgyNjY=, ActorId: [1:7489521626343965314:2328], ActorState: ExecuteState, TraceId: 01jr0zvvn6ef3aj5jvnmk263z3, Create QueryResponse for error on request, msg: 2025-04-04T18:09:01.130632Z node 1 :TX_PROXY ERROR: Actor# [1:7489521630638932823:2415] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/table\', error: path is not a view (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:09:01.130693Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710663, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/table', error: path is not a view (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeTable, state: EPathStateNoChanges) 2025-04-04T18:09:01.130965Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmVhMDcwNWMtZWUwN2E3MzAtYTgzYjY2NDktOTIxOTgyNjY=, ActorId: [1:7489521626343965314:2328], ActorState: ExecuteState, TraceId: 01jr0zvvnxay5eravafn7q1122, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 24565, MsgBus: 24868 2025-04-04T18:09:01.848158Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7489521627150125207:2059];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:09:01.848240Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000171/r3tmp/tmpuW7hx9/pdisk_1.dat 2025-04-04T18:09:01.950390Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:09:01.994078Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:09:01.994156Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:09:01.996017Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24565, node 2 2025-04-04T18:09:02.047737Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:09:02.047764Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:09:02.047773Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:09:02.047897Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24868 TClient is connected to server localhost:24868 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:09:02.478420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:09:04.825771Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489521640035027752:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:09:04.825808Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7489521640035027760:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:09:04.825830Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:09:04.829204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-04-04T18:09:04.838117Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7489521640035027766:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T18:09:04.914374Z node 2 :TX_PROXY ERROR: Actor# [2:7489521640035027819:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:09:04.986022Z node 2 :TX_PROXY ERROR: Actor# [2:7489521640035027895:2381] txid# 281474976715662, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } |96.9%| [TM] {RESULT} ydb/core/kqp/ut/view/unittest |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage Test command err: 2025-04-04T18:08:33.022607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:08:33.023107Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:08:33.023192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/00026d/r3tmp/tmpV6xT3Z/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16858, node 1 TClient is connected to server localhost:14479 2025-04-04T18:08:33.668954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T18:08:33.727290Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:33.737427Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:33.737510Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:33.737573Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:33.738108Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T18:08:33.775185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:33.775415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:33.787482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:33.919448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-04-04T18:08:34.034829Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:687:2579], Recipient [1:740:2624]: NKikimr::TEvTablet::TEvBoot 2025-04-04T18:08:34.036677Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:687:2579], Recipient [1:740:2624]: NKikimr::TEvTablet::TEvRestored 2025-04-04T18:08:34.037099Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:740:2624];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T18:08:34.066002Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:740:2624];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T18:08:34.066677Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-04-04T18:08:34.077207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T18:08:34.077517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T18:08:34.077877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T18:08:34.078032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T18:08:34.078171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T18:08:34.078708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T18:08:34.078997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T18:08:34.079140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T18:08:34.079307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T18:08:34.079442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T18:08:34.079559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T18:08:34.079764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:740:2624];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T18:08:34.102146Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:687:2579], Recipient [1:740:2624]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-04-04T18:08:34.104431Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-04-04T18:08:34.105634Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T18:08:34.105728Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T18:08:34.105932Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T18:08:34.106124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T18:08:34.106220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T18:08:34.106303Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T18:08:34.106411Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T18:08:34.106481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T18:08:34.106534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T18:08:34.106565Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T18:08:34.106780Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T18:08:34.106866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T18:08:34.106926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T18:08:34.106967Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T18:08:34.107086Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T18:08:34.107159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T18:08:34.107202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T18:08:34.107230Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T18:08:34.107330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T18:08:34.107380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T18:08:34.107409Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T18:08:34.107469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T18:08:34.107519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T18:08:34.107555Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T18:08:34.108099Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=82; 2025-04-04T18:08:34.108207Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=51; 2025-04-04T18:08:34.108321Z node 1 :TX_COLUMNSHARD INFO: tablet_id ... ;raw_bytes=96858239;count=2;records=82491} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=18276048;raw_bytes=616499685;count=6;records=517509} inactive {blob_bytes=26334856;raw_bytes=883196628;count=17;records=748294} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037888 TEvBlobStorage::TEvPut tId=72075186224037888;c=1;:74/0:size=3447;count=18;;1:size=60872;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445408;count=1;;7:size=1445928;count=1;;8:size=1445448;count=1;;9:size=1445376;count=1;;10:size=1445528;count=1;;11:size=4137168;count=5;;12:size=1445744;count=1;;13:size=1445360;count=1;;14:size=1445608;count=1;;15:size=1445920;count=1;;16:size=1445360;count=1;;17:size=808584;count=1;;18:size=2314192;count=2;;19:size=1402896;count=1;;20:size=1403096;count=1;;21:size=1402840;count=1;;22:size=1445400;count=1;;23:size=1987408;count=3;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72075186224037888;c=0;:74/0:size=3516;count=19;;1:size=60872;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445408;count=1;;7:size=1445928;count=1;;8:size=1445448;count=1;;9:size=1445376;count=1;;10:size=1445528;count=1;;11:size=4137168;count=5;;12:size=1445744;count=1;;13:size=1445360;count=1;;14:size=1445608;count=1;;15:size=1445920;count=1;;16:size=1445360;count=1;;17:size=808584;count=1;;18:size=2314192;count=2;;19:size=1402896;count=1;;20:size=1403096;count=1;;21:size=1402840;count=1;;22:size=1445400;count=1;;23:size=1987408;count=3;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-04-04T18:09:04.181833Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e373e59a-117f11f0-9814eff3-7b756ae7;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-04-04T18:09:04.181933Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e373e59a-117f11f0-9814eff3-7b756ae7;fline=with_appended.cpp:65;portions=26,27,28,;task_id=e373e59a-117f11f0-9814eff3-7b756ae7; 2025-04-04T18:09:04.182332Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=e373e59a-117f11f0-9814eff3-7b756ae7;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:26;path_id:3;records_count:56040;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:1974264;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-04-04T18:09:04.182593Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=e373e59a-117f11f0-9814eff3-7b756ae7;fline=tiering.cpp:49;tiering_info=__DEFAULT/0.000000s;$$DELETE/352828.000000s;; 2025-04-04T18:09:04.182729Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=e373e59a-117f11f0-9814eff3-7b756ae7;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:27;path_id:3;records_count:56040;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:1974144;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-04-04T18:09:04.182849Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=e373e59a-117f11f0-9814eff3-7b756ae7;fline=tiering.cpp:49;tiering_info=__DEFAULT/0.000000s;$$DELETE/408868.000000s;; 2025-04-04T18:09:04.182924Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=e373e59a-117f11f0-9814eff3-7b756ae7;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:28;path_id:3;records_count:28016;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:987568;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-04-04T18:09:04.182999Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=e373e59a-117f11f0-9814eff3-7b756ae7;fline=tiering.cpp:49;tiering_info=__DEFAULT/0.000000s;$$DELETE/436884.000000s;; 2025-04-04T18:09:04.183073Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e373e59a-117f11f0-9814eff3-7b756ae7;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::e373e59a-117f11f0-9814eff3-7b756ae7; 2025-04-04T18:09:04.183170Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e373e59a-117f11f0-9814eff3-7b756ae7;fline=granule.cpp:101;event=OnCompactionFinished;info=(granule:3;path_id:3;size:21193880;portions_count:28;); 2025-04-04T18:09:04.183253Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e373e59a-117f11f0-9814eff3-7b756ae7;tablet_id=72075186224037888;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T18:09:04.183385Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e373e59a-117f11f0-9814eff3-7b756ae7;tablet_id=72075186224037888;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T18:09:04.183506Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e373e59a-117f11f0-9814eff3-7b756ae7;tablet_id=72075186224037888;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=1; 2025-04-04T18:09:04.183597Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e373e59a-117f11f0-9814eff3-7b756ae7;tablet_id=72075186224037888;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=21000; 2025-04-04T18:09:04.183655Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e373e59a-117f11f0-9814eff3-7b756ae7;tablet_id=72075186224037888;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T18:09:04.183722Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e373e59a-117f11f0-9814eff3-7b756ae7;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T18:09:04.183781Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e373e59a-117f11f0-9814eff3-7b756ae7;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T18:09:04.183877Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e373e59a-117f11f0-9814eff3-7b756ae7;tablet_id=72075186224037888;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.600000s; 2025-04-04T18:09:04.183957Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=e373e59a-117f11f0-9814eff3-7b756ae7;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T18:09:04.184185Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:21 Blob count: 3 VERIFY failed (2025-04-04T18:09:04.184432Z): tablet_id=72075186224037888;task_id=e373e59a-117f11f0-9814eff3-7b756ae7;verification=CompactionsLimit.Dec() >= 0;fline=ro_controller.cpp:39; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+873 (0x17DE1949) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+571 (0x17DD290B) NActors::TVerifyFormattedRecordWriter::~TVerifyFormattedRecordWriter()+326 (0x190DCC96) NKikimr::NYDBTest::NColumnShard::TReadOnlyController::DoOnWriteIndexComplete(NKikimr::NOlap::TColumnEngineChanges const&, NKikimr::NColumnShard::TColumnShard const&)+4340 (0x451E2464) NKikimr::NColumnShard::TTxWriteIndex::Complete(NActors::TActorContext const&)+4797 (0x2B3DC2FD) NKikimr::NTabletFlatExecutor::TSeat::Complete(NActors::TActorContext const&, bool)+810 (0x1D8D7A8A) NKikimr::NTabletFlatExecutor::TLogicRedo::Confirm(unsigned int, NActors::TActorContext const&, NActors::TActorId const&)+3491 (0x1D8C8943) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&)+3435 (0x1D73903B) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+1257 (0x1D6DD9A9) NActors::IActor::Receive(TAutoPtr&)+253 (0x190BDA0D) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+3557 (0x3017AE85) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+12610 (0x30173702) NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration)+1076 (0x3017DA74) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration)+292 (0x30334EA4) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration)+419 (0x30333FC3) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration)+307 (0x3032C223) NActors::TTestActorRuntime::SimulateSleep(TDuration)+1115 (0x3032BDFB) NKikimr::NTestSuiteColumnShardTiers::TTestCaseTTLUsage::Execute_(NUnitTest::TTestContext&)+3620 (0x179CA414) std::__y1::__function::__func, void ()>::operator()()+280 (0x179DACE8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x18291E16) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x18262B19) NKikimr::NTestSuiteColumnShardTiers::TCurrentTest::Execute()+1094 (0x179D9CD6) NUnitTest::TTestFactory::Execute()+2438 (0x182643E6) NUnitTest::RunMain(int, char**)+5213 (0x1828C74D) ??+0 (0x7F53210F7D90) __libc_start_main+128 (0x7F53210F7E40) _start+41 (0x15916029) >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_workload.py::TestYdbLogWorkload::test[column] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] |97.4%| [TM] {RESULT} ydb/tests/functional/postgresql/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [GOOD] >> test_workload.py::TestYdbLogWorkload::test[column] [FAIL] >> TSentinelUnstableTests::BSControllerCantChangeStatus [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel_unstable/unittest >> TSentinelUnstableTests::BSControllerCantChangeStatus [GOOD] Test command err: 2025-04-04T18:08:58.997723Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-04-04T18:08:58.997820Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-04-04T18:08:58.997914Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-04T18:08:58.997947Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-04-04T18:08:58.998003Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-04-04T18:08:58.998109Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-04-04T18:08:59.001304Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-04-04T18:08:59.018360Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 18 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: ... 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 35420026 2025-04-04T18:09:16.084197Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 35420026 2025-04-04T18:09:16.084345Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 35420026 2025-04-04T18:09:16.084405Z node 1 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-04-04T18:09:16.084815Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 2:11, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-04T18:09:16.084878Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 8:34, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-04T18:09:16.084922Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 5:21, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-04-04T18:09:16.084968Z node 1 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-04T18:09:16.085215Z node 1 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { }, cookie# 133 2025-04-04T18:09:16.085248Z node 1 :CMS ERROR: [Sentinel] [Main] Unsuccesful response from BSC: error# 2025-04-04T18:09:16.095543Z node 1 :CMS DEBUG: [Sentinel] [Main] Retrying: attempt# 1 2025-04-04T18:09:16.095621Z node 1 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-04T18:09:16.095851Z node 1 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { }, cookie# 134 2025-04-04T18:09:16.095881Z node 1 :CMS ERROR: [Sentinel] [Main] Unsuccesful response from BSC: error# 2025-04-04T18:09:16.106285Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-04-04T18:09:16.106363Z node 1 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-04-04T18:09:16.106479Z node 1 :CMS DEBUG: [Sentinel] [Main] Retrying: attempt# 2 2025-04-04T18:09:16.106509Z node 1 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-04-04T18:09:16.106652Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 1, wbId# [1:8388350642965737326:1634689637] 2025-04-04T18:09:16.106689Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2025-04-04T18:09:16.106715Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2025-04-04T18:09:16.106736Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2025-04-04T18:09:16.106767Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2025-04-04T18:09:16.106812Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2025-04-04T18:09:16.106869Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2025-04-04T18:09:16.106908Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2025-04-04T18:09:16.107267Z node 1 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true, cookie# 135 2025-04-04T18:09:16.107309Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 2:11 2025-04-04T18:09:16.107342Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 5:21 2025-04-04T18:09:16.107364Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 8:34 2025-04-04T18:09:16.107814Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 1, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 35440026 2025-04-04T18:09:16.108223Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 35440026 2025-04-04T18:09:16.108617Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 35440026 2025-04-04T18:09:16.108775Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 35440026 2025-04-04T18:09:16.108922Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 35440026 2025-04-04T18:09:16.109068Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 35440026 2025-04-04T18:09:16.109184Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 35440026 2025-04-04T18:09:16.109327Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 35440026 2025-04-04T18:09:16.109388Z node 1 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s |97.5%| [TM] {RESULT} ydb/core/cms/ut_sentinel_unstable/unittest |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/log/tests/py3test >> test_workload.py::TestYdbLogWorkload::test[column] [FAIL] |97.6%| [TM] {RESULT} ydb/tests/stress/log/tests/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] |97.8%| [TA] $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} |97.8%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> test_drain.py::TestHive::test_drain_on_stop [FAIL] >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [FAIL] >> test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] [GOOD] >> KqpStats::SysViewClientLost [FAIL] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [FAIL] |97.9%| [TA] $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |98.0%| [TA] {RESULT} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewClientLost [FAIL] Test command err: Trying to start YDB, gRPC: 21419, MsgBus: 29425 2025-04-04T18:08:49.770512Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7489521578829443154:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:49.770759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/00034a/r3tmp/tmpwOdVXh/pdisk_1.dat 2025-04-04T18:08:50.145926Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21419, node 1 2025-04-04T18:08:50.194642Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:50.194700Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:50.194709Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:50.194822Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T18:08:50.196492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:50.196680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:50.199002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29425 TClient is connected to server localhost:29425 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-04-04T18:08:50.669804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:50.689472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:50.813660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:50.958578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:51.023303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:52.589803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521591714346819:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:52.589911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:52.819821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-04-04T18:08:52.849673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-04-04T18:08:52.909513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-04-04T18:08:52.939390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-04-04T18:08:52.967565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-04-04T18:08:52.998181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-04-04T18:08:53.040619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521596009314626:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:53.040689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:53.040778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7489521596009314631:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:53.044632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-04-04T18:08:53.053357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7489521596009314633:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-04-04T18:08:53.142075Z node 1 :TX_PROXY ERROR: Actor# [1:7489521596009314687:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:53.940126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-04-04T18:08:54.768470Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7489521578829443154:2062];send_to=[0:7307199536658146131:7762515]; 2025-04-04T18:08:54.768543Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-04-04T18:08:57.667890Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037919, for tableId 16 don't have enough keys: RowCount: 70000 DataSize: 11402320 IndexSize: 55010 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 1 2025-04-04T18:09:05.142063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-04-04T18:09:05.142139Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:09:08.067358Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037919, for tableId 16 don't have enough keys: RowCount: 280000 DataSize: 45769800 IndexSize: 220924 ByKeyFilterSize: 0 RowCountHistogram: 2 DataSizeHistogram: 4 2025-04-04T18:09:19.058274Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037919, for tableId 16 don't have enough keys: RowCount: 405000 DataSize: 67506860 IndexSize: 326228 ByKeyFilterSize: 0 RowCountHistogram: 4 DataSizeHistogram: 6 2025-04-04T18:09:20.341386Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743790160332, txId: 281474976710672] shutting down 2025-04-04T18:09:20.395425Z node 1 :RPC_REQUEST WARN: Client lost 2025-04-04T18:09:21.566786Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743790161558, txId: 281474976710674] shutting down 2025-04-04T18:09:22.758141Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743790162751, txId: 281474976710676] shutting down 2025-04-04T18:09:23.900486Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743790163891, txId: 281474976710678] shutting down 2025-04-04T18:09:25.109759Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743790165103, txId: 281474976710680] shutting down 2025-04-04T18:09:26.303726Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743790166295, txId: 281474976710682] shutting down 2025-04-04T18:09:27.484210Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743790167475, txId: 281474976710684] shutting down 2025-04-04T18:09:28.649184Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743790168643, txId: 281474976710686] shutting down 2025-04-04T18:09:29.054109Z node 1 :TABLET_STATS_BUILDER ERROR: Stats at datashard 72075186224037919, for tableId 16 don't have enough keys: RowCount: 475000 DataSize: 79069700 IndexSize: 382122 ByKeyFilterSize: 0 RowCountHistogram: 4 DataSizeHistogram: 7 2025-04-04T18:09:29.806272Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743790169795, txId: 281474976710688] shutting down 2025-04-04T18:09:30.934812Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743790170929, txId: 281474976710690] shutting down 2025-04-04T18:09:32.073483Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743790172068, txId: 281474976710692] shutting down assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x18B8D15F 1. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x18273E98 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x18285F67 3. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x18285F67 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x18285F67 5. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x18285F67 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x18285F67 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x18BC2E65 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x18BC2E65 9. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x18BC2E65 10. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18B93B88 11. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x18285175 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18B95455 13. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x18BBD79C 14. ??:0: ?? @ 0x7F7D2662CD8F 15. ??:0: ?? @ 0x7F7D2662CE3F 16. ??:0: ?? @ 0x15C21028 |98.1%| [TA] $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |98.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change >> Viewer::JsonStorageListingV2PDiskIdFilter [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/mem_alloc/py3test >> test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] [GOOD] |98.3%| [TM] {RESULT} ydb/tests/fq/mem_alloc/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV2PDiskIdFilter [GOOD] Test command err: 2025-04-04T18:09:32.854788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:3099:2432], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:09:32.856333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:09:32.856847Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:09:32.858984Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:3082:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:09:32.859187Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:3114:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:09:32.859688Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:3102:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:09:32.860363Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:3105:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:09:32.860457Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:09:32.860591Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:3120:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:09:32.860771Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:09:32.860921Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:09:32.861395Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:09:32.861427Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:09:32.861670Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:09:32.862019Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:09:32.862144Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:3108:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:09:32.862309Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:09:32.862367Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:09:32.862526Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:3111:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:09:32.862610Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:09:32.863311Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:09:32.863728Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:09:32.863757Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:09:32.863786Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:09:32.865331Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:3117:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:09:32.866261Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:09:32.866788Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T18:09:33.190950Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:09:33.349705Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-04T18:09:33.362495Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-04T18:09:33.781751Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 24001, node 1 TClient is connected to server localhost:61676 2025-04-04T18:09:33.991552Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:09:33.991604Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:09:33.991634Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:09:33.992046Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [FAIL] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete [FAIL] >> Viewer::JsonStorageListingV1PDiskIdFilter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV1PDiskIdFilter [GOOD] Test command err: 2025-04-04T18:09:31.938584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:1315:2236], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:09:31.939800Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:09:31.939859Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:09:31.942061Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:2693:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:09:31.942173Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:2696:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:09:31.942655Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:2687:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:09:31.943218Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:09:31.943286Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:09:31.943900Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:2681:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:09:31.944016Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:2684:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:09:31.944227Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:3119:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:09:31.944389Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:09:31.944481Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:09:31.944517Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:09:31.944962Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:09:31.945011Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:09:31.945049Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:09:31.945129Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:2690:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:09:31.945631Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:09:31.945683Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:09:31.945749Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:09:31.945943Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:2699:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:09:31.946272Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:09:31.946331Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:09:31.946719Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:09:31.946761Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:09:31.947113Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T18:09:32.351851Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:09:32.544575Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-04T18:09:32.557750Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-04T18:09:32.896652Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 9327, node 1 TClient is connected to server localhost:13978 2025-04-04T18:09:33.147928Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:09:33.147977Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:09:33.148005Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:09:33.148422Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T18:10:29.587826Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:3113:2432], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:10:29.589662Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:10:29.589898Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:10:29.591760Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:3109:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:10:29.591959Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:3119:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:10:29.592356Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [17:1269:2179], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:10:29.592519Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:3116:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:10:29.593070Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:10:29.593128Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:10:29.593216Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:10:29.593324Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:10:29.593712Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:10:29.593749Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:10:29.593799Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:10:29.594154Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:10:29.595410Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [14:3122:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:10:29.595546Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:3125:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:10:29.596541Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:10:29.596600Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:10:29.597255Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:10:29.597289Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:10:29.597437Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:1303:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:10:29.598423Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [16:3128:2375], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:10:29.598485Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:10:29.598834Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-04-04T18:10:29.599240Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:10:29.599694Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-04-04T18:10:29.872904Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:10:30.090157Z node 10 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-04-04T18:10:30.107444Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-04-04T18:10:30.590014Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 2159, node 10 TClient is connected to server localhost:8176 2025-04-04T18:10:30.951216Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:10:30.951300Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:10:30.951354Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:10:30.952066Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration |98.4%| [TA] $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.5%| [TA] {RESULT} $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> data_correctness.py::TestDataCorrectness::test >> ColumnShardTiers::TieringUsage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TieringUsage [GOOD] Test command err: 2025-04-04T18:08:32.601718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:08:32.602049Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:08:32.602101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/000292/r3tmp/tmp4Kx0uf/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24385, node 1 TClient is connected to server localhost:65322 2025-04-04T18:08:33.233145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T18:08:33.294062Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:33.297952Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-04-04T18:08:33.298025Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-04-04T18:08:33.298096Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-04-04T18:08:33.298599Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-04-04T18:08:33.335652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:33.336721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:33.348485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2025-04-04T18:08:43.734480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:679:2569], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:43.734700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:43.837158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-04-04T18:08:44.244208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:821:2660], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:44.244424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:44.245514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:826:2665], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-04-04T18:08:44.250856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-04-04T18:08:44.401624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:828:2667], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-04-04T18:08:44.685906Z node 1 :TX_PROXY ERROR: Actor# [1:923:2733] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-04-04T18:08:45.305625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-04-04T18:08:45.818290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-04-04T18:08:46.676559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-04-04T18:08:47.497914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-04-04T18:08:48.006625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-04-04T18:08:49.331772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-04-04T18:08:49.632189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-04T18:09:05.020973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715708:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-04T18:09:17.121578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715715:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-04-04T18:09:19.650033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715732:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715732 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 15 2025-04-04T18:09:19.868570Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037892;self_id=[1:2917:4253];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T18:09:19.886301Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037892;self_id=[1:2917:4253];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T18:09:19.886615Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037892 2025-04-04T18:09:19.896249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2917:4253];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T18:09:19.896513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2917:4253];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T18:09:19.896849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2917:4253];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T18:09:19.896980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2917:4253];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T18:09:19.897093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2917:4253];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T18:09:19.897203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2917:4253];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T18:09:19.897340Z node ... ];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=1; 2025-04-04T18:10:49.966182Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;self_id=[1:2920:4255];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T18:10:49.966229Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;self_id=[1:2920:4255];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=columnshard_impl.cpp:1054;background=cleanup;skip_reason=in_progress; 2025-04-04T18:10:49.966282Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;self_id=[1:2920:4255];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T18:10:49.966338Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;self_id=[1:2920:4255];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=1.000000s; 2025-04-04T18:10:49.966372Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;self_id=[1:2920:4255];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T18:10:49.966458Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1735152712000 at tablet 72075186224037895 2025-04-04T18:10:49.966484Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037895;self_id=[1:2923:4257];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037895;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=1; 2025-04-04T18:10:49.966509Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037895;self_id=[1:2923:4257];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037895;fline=columnshard_impl.cpp:519;problem=Background activities cannot be started: no index at tablet; 2025-04-04T18:10:49.966649Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1735152712000 at tablet 72075186224037894 2025-04-04T18:10:49.966671Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037894;self_id=[1:2928:4259];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037894;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=1; 2025-04-04T18:10:49.966690Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037894;self_id=[1:2928:4259];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037894;fline=columnshard_impl.cpp:519;problem=Background activities cannot be started: no index at tablet; 2025-04-04T18:10:50.087205Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=23671672-118011f0-9bf674c4-5d6d56a8;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-04-04T18:10:50.087257Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=23671672-118011f0-9bf674c4-5d6d56a8;fline=with_appended.cpp:65;portions=18,;task_id=23671672-118011f0-9bf674c4-5d6d56a8; 2025-04-04T18:10:50.087573Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=23671672-118011f0-9bf674c4-5d6d56a8;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::23671672-118011f0-9bf674c4-5d6d56a8; 2025-04-04T18:10:50.087625Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=23671672-118011f0-9bf674c4-5d6d56a8;fline=granule.cpp:101;event=OnCompactionFinished;info=(granule:16;path_id:16;size:110808;portions_count:7;); 2025-04-04T18:10:50.087657Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=23671672-118011f0-9bf674c4-5d6d56a8;tablet_id=72075186224037893;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T18:10:50.087701Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=23671672-118011f0-9bf674c4-5d6d56a8;tablet_id=72075186224037893;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T18:10:50.087755Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=23671672-118011f0-9bf674c4-5d6d56a8;tablet_id=72075186224037893;fline=columnshard_impl.cpp:1054;background=cleanup;skip_reason=in_progress; 2025-04-04T18:10:50.087785Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=23671672-118011f0-9bf674c4-5d6d56a8;tablet_id=72075186224037893;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T18:10:50.087836Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=23671672-118011f0-9bf674c4-5d6d56a8;tablet_id=72075186224037893;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999500s; 2025-04-04T18:10:50.087875Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=23671672-118011f0-9bf674c4-5d6d56a8;tablet_id=72075186224037893;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T18:10:50.088149Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037893 Save Batch GenStep: 1:15 Blob count: 1 2025-04-04T18:10:50.088199Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T18:10:50.088268Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=17;external_task_id=23671672-118011f0-9bf674c4-5d6d56a8;mem=5382;cpu=0; 2025-04-04T18:10:50.088395Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037893 Save Batch GenStep: 1:16 Blob count: 1 2025-04-04T18:10:50.088616Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=239f93f8-118011f0-91cfe925-131bffdd;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::CLEANUP::PORTIONS;success=1; 2025-04-04T18:10:50.088665Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=239f93f8-118011f0-91cfe925-131bffdd;fline=manager.cpp:15;event=unlock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::239f93f8-118011f0-91cfe925-131bffdd; 2025-04-04T18:10:50.088695Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=239f93f8-118011f0-91cfe925-131bffdd;tablet_id=72075186224037893;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T18:10:50.088722Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=239f93f8-118011f0-91cfe925-131bffdd;tablet_id=72075186224037893;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T18:10:50.088761Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=239f93f8-118011f0-91cfe925-131bffdd;tablet_id=72075186224037893;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=1; 2025-04-04T18:10:50.088820Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=239f93f8-118011f0-91cfe925-131bffdd;tablet_id=72075186224037893;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=0;portions_prepared=2;drop=0;skip=0;portions_counter=2;chunks=18;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T18:10:50.088874Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=239f93f8-118011f0-91cfe925-131bffdd;tablet_id=72075186224037893;fline=manager.cpp:10;event=lock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::23b2bcee-118011f0-971b41c5-c121e0e7; 2025-04-04T18:10:50.088917Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=239f93f8-118011f0-91cfe925-131bffdd;tablet_id=72075186224037893;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T18:10:50.088952Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=239f93f8-118011f0-91cfe925-131bffdd;tablet_id=72075186224037893;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999500s; 2025-04-04T18:10:50.088977Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=239f93f8-118011f0-91cfe925-131bffdd;tablet_id=72075186224037893;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T18:10:50.089068Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037893 Delete Blob DS:2181038080:[72075186224037893:1:12:14:0:1136:0] 2025-04-04T18:10:50.089105Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037893 Delete Blob DS:2181038080:[72075186224037893:1:11:13:0:1520:0] 2025-04-04T18:10:50.089140Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T18:10:50.089169Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=18;external_task_id=239f93f8-118011f0-91cfe925-131bffdd;mem=504;cpu=0; 2025-04-04T18:10:50.089285Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2920:4255];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=504;external_task_id=23b2bcee-118011f0-971b41c5-c121e0e7;type=CS::TTL;priority=0;; 2025-04-04T18:10:50.089815Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037893;self_id=[1:2920:4255];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;path_id=16;fline=storage.cpp:87;event=granule_compaction_weight;priority=(10,19999998864); 2025-04-04T18:10:50.089877Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037893;self_id=[1:2920:4255];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;path_id=16;fline=optimizer.h:893;stop_instant=NO_VALUE_OPTIONAL;size=2656;next=;count=2;info={bytes=1136;count=1;records=1};event=start_optimization;stop_point=;main_portion=18; 2025-04-04T18:10:50.090016Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;self_id=[1:2920:4255];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=manager.cpp:10;event=lock;process_id=CS::GENERAL::23b2e872-118011f0-89325d50-11063c40; 2025-04-04T18:10:50.090111Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2920:4255];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=5382;external_task_id=23b2e872-118011f0-89325d50-11063c40;type=CS::GENERAL;priority=0;; Cleaning waiting... Fake storage clean FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 |98.6%| [TA] $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.7%| [TA] {RESULT} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] Test command err: 2025-04-04T18:08:44.268758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-04-04T18:08:44.269360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-04-04T18:08:44.269449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/o32v/0003a4/r3tmp/tmpIZTt4g/pdisk_1.dat 2025-04-04T18:08:45.009489Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:595:2519], Recipient [1:408:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T18:08:45.009627Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T18:08:45.009687Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T18:08:45.009946Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:592:2517], Recipient [1:408:2403]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-04-04T18:08:45.009982Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-04-04T18:08:45.160425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-04-04T18:08:45.162522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T18:08:45.166130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-04-04T18:08:45.167885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-04-04T18:08:45.168029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-04-04T18:08:45.168174Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T18:08:45.169316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-04-04T18:08:45.170815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-04-04T18:08:45.170900Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T18:08:45.170943Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-04T18:08:45.171227Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:408:2403], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T18:08:45.171273Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T18:08:45.171336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T18:08:45.171390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-04-04T18:08:45.171495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T18:08:45.171530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T18:08:45.173171Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T18:08:45.173912Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T18:08:45.173969Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-04T18:08:45.174133Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:408:2403], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T18:08:45.174179Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T18:08:45.174271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T18:08:45.174345Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-04-04T18:08:45.174408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T18:08:45.174493Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T18:08:45.174923Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T18:08:45.174970Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-04-04T18:08:45.175087Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:408:2403], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-04-04T18:08:45.175120Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-04-04T18:08:45.175159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T18:08:45.175203Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-04-04T18:08:45.175276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-04-04T18:08:45.175310Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-04-04T18:08:45.175362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T18:08:45.184094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T18:08:45.184858Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T18:08:45.184918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T18:08:45.186371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-04-04T18:08:45.187909Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:600:2524], Recipient [1:408:2403]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:602:2525] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-04-04T18:08:45.187971Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-04-04T18:08:45.188019Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-04-04T18:08:45.188203Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:404:2399], Recipient [1:408:2403]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-04-04T18:08:45.188688Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:604:2527], Recipient [1:408:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T18:08:45.188738Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T18:08:45.188791Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T18:08:45.188968Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:592:2517], Recipient [1:408:2403]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-04-04T18:08:45.189002Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-04-04T18:08:45.189081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-04-04T18:08:45.189124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-04-04T18:08:45.189166Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-04-04T18:08:45.258948Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:408:2403]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-04-04T18:08:45.259051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2025-04-04T18:08:45.259088Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-04-04T18:08:45.259149Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-04-04T18:08:45.259215Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-04-04T18:08:45.322765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-04-04T18:08:45.326487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-04-04T18:08:45.341109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-04-04T18:08:45.417131Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:404:2399], Recipient [1:408:2403]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-04-04T18:08:45.419212Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:635:2543], Recipient [1:408:2403]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-04-04T18:08:45.419277Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-04-04T18:08:45.419312Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-04-04T18:08:45.419446Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:568:2495], Recipient [1:408:2403]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-04-04T18:08:45.419481Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-04-04T18:08:45.419572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-04-04T18:08:45.419757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-04-04T18:08:45.419828Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationP ... # 2146435090, Sender [0:0:0], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-04T18:10:53.672921Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-04T18:10:53.672940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-04T18:10:53.672983Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-04T18:10:53.673005Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-04T18:10:53.673065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 22 shard idx 72057594046644480:7 data size 0 row count 0 2025-04-04T18:10:53.673109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037894 maps to shardIdx: 72057594046644480:7 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 22], pathId map=TableA, is column=0, is olap=0 2025-04-04T18:10:53.673136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037894 followerId=0, pathId 22: RowCount 0, DataSize 0 2025-04-04T18:10:53.673159Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037894, followerId 0 2025-04-04T18:10:53.673203Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:7 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-04-04T18:10:53.673265Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T18:10:53.683516Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435090, Sender [0:0:0], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-04T18:10:53.683551Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-04T18:10:53.683570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-04T18:10:53.756032Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-04T18:10:53.756429Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:1558:3198], Recipient [1:408:2403]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037895 TableLocalId: 24 Generation: 1 Round: 58 TableStats { DataSize: 54 RowCount: 2 IndexSize: 54 InMemSize: 0 LastAccessTime: 5450 LastUpdateTime: 5450 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 1 RowUpdates: 2 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 54 IndexSize: 54 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 16 Memory: 119596 Storage: 142 } ShardState: 2 UserTablePartOwners: 72075186224037895 NodeId: 1 StartTime: 4950 TableOwnerId: 72057594046644480 FollowerId: 0 2025-04-04T18:10:53.756460Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-04-04T18:10:53.756496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037895 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 24] state 'Ready' dataSize 54 rowCount 2 cpuUsage 0.0016 2025-04-04T18:10:53.756577Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037895 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 24] raw table stats: DataSize: 54 RowCount: 2 IndexSize: 54 InMemSize: 0 LastAccessTime: 5450 LastUpdateTime: 5450 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 1 RowUpdates: 2 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 54 IndexSize: 54 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-04-04T18:10:53.756604Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-04-04T18:10:53.809369Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435090, Sender [0:0:0], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-04T18:10:53.809427Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-04T18:10:53.809450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-04-04T18:10:53.809493Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-04-04T18:10:53.809518Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-04-04T18:10:53.809582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 24 shard idx 72057594046644480:8 data size 54 row count 2 2025-04-04T18:10:53.809628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037895 maps to shardIdx: 72057594046644480:8 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 24], pathId map=TableA, is column=0, is olap=0 2025-04-04T18:10:53.809659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037895 followerId=0, pathId 24: RowCount 2, DataSize 54 2025-04-04T18:10:53.809680Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037895, followerId 0 2025-04-04T18:10:53.809727Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:8 with partCount# 1, rowCount# 2, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-04-04T18:10:53.809806Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-04-04T18:10:53.820100Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435090, Sender [0:0:0], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-04-04T18:10:53.820142Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-04-04T18:10:53.820163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-04-04T18:10:53.881173Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T18:10:53.881228Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T18:10:53.881279Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:408:2403], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T18:10:53.881297Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T18:10:53.902321Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T18:10:53.985123Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-04-04T18:10:54.057516Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T18:10:54.057578Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T18:10:54.057633Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:408:2403], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T18:10:54.057651Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T18:10:54.078526Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037898 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T18:10:54.161082Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037899 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-04-04T18:10:54.243579Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWE1OTgzOWYtZWE1ZWUxNmUtMWQ0N2FlNi0yZjEwZjhhNg==, ActorId: [1:2001:3527], ActorState: ExecuteState, TraceId: 01jr0zvhz086mzm840gany119x, Create QueryResponse for error on request, msg: 2025-04-04T18:10:54.243744Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T18:10:54.243774Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-04-04T18:10:54.243870Z node 1 :KQP_SLOW_LOG WARN: SessionId: ydb://session/3?node_id=1&id=OWE1OTgzOWYtZWE1ZWUxNmUtMWQ0N2FlNi0yZjEwZjhhNg==, Slow query, duration: 600.000000s, status: GENERIC_ERROR, user: UNAUTHENTICATED, results: 0b, text: "RESTORE `MyCollection`;", parameters: 0b 2025-04-04T18:10:54.244901Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:408:2403], Recipient [1:408:2403]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-04-04T18:10:54.244930Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1867A120) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+3512 (0x457E3FD8) NKikimr::NTestSuiteIncrementalBackup::TTestCaseComplexRestoreBackupCollection::Execute_(NUnitTest::TTestContext&)+19716 (0x17E13634) std::__y1::__function::__func, void ()>::operator()()+280 (0x17DD35C8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x186AFE26) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x18680B49) NKikimr::NTestSuiteIncrementalBackup::TCurrentTest::Execute()+1094 (0x17DD25B6) NUnitTest::TTestFactory::Execute()+2438 (0x18682416) NUnitTest::RunMain(int, char**)+5213 (0x186AA75D) ??+0 (0x7F583EDE2D90) __libc_start_main+128 (0x7F583EDE2E40) _start+41 (0x15B25029) |98.8%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |98.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [FAIL] |99.0%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |99.0%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> test_tpch.py::TestTpchS1::test_tpch[21] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[22] >> test_tpch.py::TestTpchS1::test_tpch[22] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/py3test >> test_tpch.py::TestTpchS1::test_tpch[22] [GOOD] |99.2%| [TA] $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} |99.2%| [TA] {RESULT} $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} >> data_correctness.py::TestDataCorrectness::test [FAIL] >> ttl_unavailable_s3.py::TestUnavailableS3::test >> ttl_unavailable_s3.py::TestUnavailableS3::test [FAIL] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> ttl_unavailable_s3.py::TestUnavailableS3::test [FAIL] |99.3%| [TM] {RESULT} ydb/tests/olap/ttl_tiering/py3test >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] Test command err: 2025-04-04T18:08:52.515197Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T18:08:52.609649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T18:08:52.629784Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T18:08:52.630110Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T18:08:52.638196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T18:08:52.638417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T18:08:52.638643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T18:08:52.638717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T18:08:52.638807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T18:08:52.638877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T18:08:52.638940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T18:08:52.639063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T18:08:52.639152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T18:08:52.639244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T18:08:52.639366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T18:08:52.639542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T18:08:52.664450Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T18:08:52.664735Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T18:08:52.664808Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T18:08:52.665020Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T18:08:52.665208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T18:08:52.665289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T18:08:52.665345Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T18:08:52.665460Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T18:08:52.665534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T18:08:52.665580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T18:08:52.665631Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T18:08:52.665825Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T18:08:52.665897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T18:08:52.665940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T18:08:52.665976Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T18:08:52.666089Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T18:08:52.666186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T18:08:52.666280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T18:08:52.666317Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T18:08:52.666413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T18:08:52.666455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T18:08:52.666489Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T18:08:52.666540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T18:08:52.666583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T18:08:52.666637Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T18:08:52.667122Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=78; 2025-04-04T18:08:52.667233Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=54; 2025-04-04T18:08:52.667327Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-04-04T18:08:52.667416Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-04-04T18:08:52.667626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T18:08:52.667715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T18:08:52.667762Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T18:08:52.667990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T18:08:52.668047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T18:08:52.668092Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T18:08:52.668275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T18:08:52.668348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T18:08:52.668389Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T18:08:52.668589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T18:08:52.668633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T18:08:52.668664Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T18:08:52.668805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T18:08:52.668852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T18:08:52.668927Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... O: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=215310; 2025-04-04T18:14:16.902358Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:15760:17711];tablet_id=9437184;parent=[1:15721:17679];fline=manager.cpp:82;event=ask_data;request=request_id=372;1={portions_count=135};; 2025-04-04T18:14:16.904224Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15721:17679];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=135;path_id=1; 2025-04-04T18:14:16.912797Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15721:17679];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; 2025-04-04T18:14:17.342828Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-04T18:14:17.342919Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=11; 2025-04-04T18:14:17.349294Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=6306; 2025-04-04T18:14:17.355542Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=5226; 2025-04-04T18:14:17.355633Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=6266; 2025-04-04T18:14:17.355758Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=79; 2025-04-04T18:14:17.355851Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=59; 2025-04-04T18:14:17.355975Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=92; 2025-04-04T18:14:17.356079Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=73; 2025-04-04T18:14:17.356253Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=139; 2025-04-04T18:14:17.356282Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=13326; 2025-04-04T18:14:17.362354Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-04T18:14:17.362429Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=10; 2025-04-04T18:14:17.368111Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5618; 2025-04-04T18:14:17.390918Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=22727; 2025-04-04T18:14:17.391035Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=35; 2025-04-04T18:14:17.391102Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=28; 2025-04-04T18:14:17.391140Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2025-04-04T18:14:17.391174Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2025-04-04T18:14:17.391212Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-04-04T18:14:17.391274Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=33; 2025-04-04T18:14:17.391313Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-04-04T18:14:17.391395Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=52; 2025-04-04T18:14:17.391433Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-04-04T18:14:17.391492Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=27; 2025-04-04T18:14:17.391574Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=53; 2025-04-04T18:14:17.391667Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=59; 2025-04-04T18:14:17.391707Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=29234; 2025-04-04T18:14:17.391871Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2425692;raw_bytes=4011492;count=1;records=39328} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=111593408;raw_bytes=187115688;count=44;records=1805672} inactive {blob_bytes=178990352;raw_bytes=300417312;count=90;records=2897034} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T18:14:17.392447Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15721:17679];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T18:14:17.392510Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15721:17679];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T18:14:17.392572Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15721:17679];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T18:14:17.392615Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15721:17679];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=0; 2025-04-04T18:14:17.392781Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T18:14:17.392850Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T18:14:17.393030Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=22; 2025-04-04T18:14:17.393090Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-04T18:14:17.393131Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T18:14:17.393172Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T18:14:17.393211Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T18:14:17.393301Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T18:14:17.396868Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T18:14:17.400549Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15721:17679];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T18:14:17.402269Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15721:17679];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T18:14:17.402313Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T18:14:17.402342Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T18:14:17.402389Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15721:17679];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T18:14:17.402454Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15721:17679];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T18:14:17.402676Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15721:17679];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=22; 2025-04-04T18:14:17.402741Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15721:17679];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-04T18:14:17.402787Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15721:17679];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T18:14:17.402838Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15721:17679];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T18:14:17.402875Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15721:17679];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T18:14:17.402948Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15721:17679];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-04-04T18:14:17.402999Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15721:17679];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] Test command err: 2025-04-04T18:08:54.284529Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T18:08:54.377101Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T18:08:54.403645Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T18:08:54.403996Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T18:08:54.413223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T18:08:54.413464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T18:08:54.413927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T18:08:54.414050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T18:08:54.414181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T18:08:54.414319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T18:08:54.414435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T18:08:54.414561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T18:08:54.414720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T18:08:54.414874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T18:08:54.414999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T18:08:54.415194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T18:08:54.446690Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T18:08:54.447037Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T18:08:54.447118Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T18:08:54.447301Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T18:08:54.447472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T18:08:54.447547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T18:08:54.447592Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T18:08:54.447687Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T18:08:54.447770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T18:08:54.447815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T18:08:54.447847Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T18:08:54.448035Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T18:08:54.448106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T18:08:54.448149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T18:08:54.448178Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T18:08:54.448275Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T18:08:54.448379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T18:08:54.448447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T18:08:54.448479Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T18:08:54.448553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T18:08:54.448588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T18:08:54.448618Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T18:08:54.448670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T18:08:54.448710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T18:08:54.448741Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T18:08:54.449220Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-04-04T18:08:54.449348Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=63; 2025-04-04T18:08:54.449432Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-04-04T18:08:54.449511Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-04-04T18:08:54.449703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T18:08:54.449767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T18:08:54.449815Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T18:08:54.450013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T18:08:54.450064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T18:08:54.450104Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T18:08:54.450305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T18:08:54.450368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T18:08:54.450414Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T18:08:54.450617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T18:08:54.450660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T18:08:54.450695Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T18:08:54.450819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T18:08:54.450856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T18:08:54.450948Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... O: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=220357; 2025-04-04T18:14:19.960181Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:15753:17704];tablet_id=9437184;parent=[1:15714:17672];fline=manager.cpp:82;event=ask_data;request=request_id=372;1={portions_count=135};; 2025-04-04T18:14:19.961662Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15714:17672];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=135;path_id=1; 2025-04-04T18:14:19.969509Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15714:17672];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; 2025-04-04T18:14:20.387212Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-04T18:14:20.387288Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=9; 2025-04-04T18:14:20.393268Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=5913; 2025-04-04T18:14:20.399814Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=5448; 2025-04-04T18:14:20.399898Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=6563; 2025-04-04T18:14:20.400021Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=73; 2025-04-04T18:14:20.400114Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=55; 2025-04-04T18:14:20.400236Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=90; 2025-04-04T18:14:20.400355Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=77; 2025-04-04T18:14:20.400518Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=127; 2025-04-04T18:14:20.400552Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=13225; 2025-04-04T18:14:20.405140Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-04T18:14:20.405205Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=10; 2025-04-04T18:14:20.410933Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5664; 2025-04-04T18:14:20.434026Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=23020; 2025-04-04T18:14:20.434133Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=37; 2025-04-04T18:14:20.434191Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=21; 2025-04-04T18:14:20.434228Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-04-04T18:14:20.434288Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2025-04-04T18:14:20.434337Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=9; 2025-04-04T18:14:20.434424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=48; 2025-04-04T18:14:20.434471Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-04-04T18:14:20.434566Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=64; 2025-04-04T18:14:20.434607Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-04-04T18:14:20.434669Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=35; 2025-04-04T18:14:20.434745Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=45; 2025-04-04T18:14:20.434824Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=50; 2025-04-04T18:14:20.434857Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=29611; 2025-04-04T18:14:20.434995Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2425692;raw_bytes=4011492;count=1;records=39328} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=111593408;raw_bytes=187115688;count=44;records=1805672} inactive {blob_bytes=178990352;raw_bytes=300417312;count=90;records=2897034} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T18:14:20.435501Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15714:17672];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T18:14:20.435556Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15714:17672];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T18:14:20.435620Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15714:17672];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T18:14:20.435663Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15714:17672];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=0; 2025-04-04T18:14:20.435798Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T18:14:20.435852Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T18:14:20.435999Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=22; 2025-04-04T18:14:20.436055Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-04T18:14:20.436091Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T18:14:20.436130Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T18:14:20.436161Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T18:14:20.436244Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T18:14:20.439306Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T18:14:20.441693Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15714:17672];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T18:14:20.443173Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15714:17672];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T18:14:20.443205Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T18:14:20.443225Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T18:14:20.443271Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15714:17672];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T18:14:20.443324Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15714:17672];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T18:14:20.443371Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15714:17672];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=22; 2025-04-04T18:14:20.443427Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15714:17672];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-04T18:14:20.443467Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15714:17672];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T18:14:20.443506Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15714:17672];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T18:14:20.443561Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15714:17672];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T18:14:20.443626Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15714:17672];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=1.000000s; 2025-04-04T18:14:20.443669Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15714:17672];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; >> test_alter_compression.py::TestAlterCompression::test[alter_compression] [GOOD] >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:130:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:132:2058] recipient: [1:108:2140] 2025-04-04T18:08:57.250348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T18:08:57.250421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T18:08:57.250535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T18:08:57.250583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T18:08:57.251397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T18:08:57.251439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T18:08:57.251514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T18:08:57.251580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T18:08:57.251823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T18:08:57.317739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T18:08:57.317806Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:172:2058] recipient: [1:15:2062] 2025-04-04T18:08:57.335664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T18:08:57.336092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T18:08:57.336351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T18:08:57.352409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T18:08:57.352647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T18:08:57.353239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T18:08:57.354607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T18:08:57.367435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T18:08:57.368969Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T18:08:57.369059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T18:08:57.369845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T18:08:57.369913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T18:08:57.370047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T18:08:57.370203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] Leader for TabletID 72057594037968897 is [1:215:2214] sender: [1:216:2058] recipient: [1:209:2210] 2025-04-04T18:08:57.379689Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-04T18:08:57.512076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T18:08:57.512353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T18:08:57.512616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T18:08:57.512891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T18:08:57.512962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T18:08:57.515920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T18:08:57.516109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T18:08:57.516355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T18:08:57.516432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T18:08:57.516472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T18:08:57.516530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T18:08:57.519065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T18:08:57.519171Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T18:08:57.519244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T18:08:57.521548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T18:08:57.521611Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T18:08:57.521659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T18:08:57.521741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T18:08:57.525946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T18:08:57.528466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T18:08:57.528685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T18:08:57.529758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T18:08:57.529918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T18:08:57.529976Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T18:08:57.530421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T18:08:57.530488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T18:08:57.530680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T18:08:57.530774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T18:08:57.533484Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T18:08:57.533547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T18:08:57.533755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T18:08:57.533802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T18:08:57.534345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T18:08:57.534401Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T18:08:57.534529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T18:08:57.534598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T18:08:57.534652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T18:08:57.534688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T18:08:57.534731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T18:08:57.534775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T18:08:57.534819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id ... tionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T18:14:39.612433Z node 163 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T18:14:39.612589Z node 163 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 172us result status StatusSuccess 2025-04-04T18:14:39.613080Z node 163 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T18:14:39.623758Z node 163 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][163:1179:2962] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-04T18:14:39.623829Z node 163 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][163:1148:2962] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-04-04T18:14:39.623928Z node 163 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][163:1179:2962] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743790479592277 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743790479592277 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1743790479592277 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-04-04T18:14:39.625309Z node 163 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][163:1179:2962] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-04-04T18:14:39.625376Z node 163 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][163:1148:2962] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:130:2058] recipient: [1:107:2139] Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:132:2058] recipient: [1:108:2140] 2025-04-04T18:08:57.645034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-04-04T18:08:57.645120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T18:08:57.645233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-04-04T18:08:57.645279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-04-04T18:08:57.645328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-04-04T18:08:57.645368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-04-04T18:08:57.645438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-04-04T18:08:57.645513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-04-04T18:08:57.645769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-04-04T18:08:57.712999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-04-04T18:08:57.713060Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2152] sender: [1:172:2058] recipient: [1:15:2062] 2025-04-04T18:08:57.727074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-04-04T18:08:57.727549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-04-04T18:08:57.727730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-04-04T18:08:57.735041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-04-04T18:08:57.735252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-04-04T18:08:57.735739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-04-04T18:08:57.736982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T18:08:57.740142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-04-04T18:08:57.741266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T18:08:57.741380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T18:08:57.741568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-04-04T18:08:57.741605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T18:08:57.741640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-04-04T18:08:57.741742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:211:2058] recipient: [1:209:2210] Leader for TabletID 72057594037968897 is [1:215:2214] sender: [1:216:2058] recipient: [1:209:2210] 2025-04-04T18:08:57.748448Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:236:2058] recipient: [1:15:2062] 2025-04-04T18:08:57.848018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-04-04T18:08:57.848236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T18:08:57.848486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-04-04T18:08:57.848712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-04-04T18:08:57.848768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-04-04T18:08:57.851221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-04-04T18:08:57.851377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-04-04T18:08:57.851570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T18:08:57.851626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-04-04T18:08:57.851658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-04-04T18:08:57.851705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-04-04T18:08:57.853524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T18:08:57.853577Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-04-04T18:08:57.853607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-04-04T18:08:57.855454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T18:08:57.855493Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T18:08:57.855527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-04-04T18:08:57.855575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-04-04T18:08:57.858743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-04-04T18:08:57.860371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-04-04T18:08:57.860535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:128:2153] sender: [1:251:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-04-04T18:08:57.861381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-04-04T18:08:57.861546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 128 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T18:08:57.861591Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T18:08:57.861830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-04-04T18:08:57.861878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-04-04T18:08:57.862030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-04-04T18:08:57.862115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-04-04T18:08:57.864257Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-04-04T18:08:57.864334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-04-04T18:08:57.864510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-04-04T18:08:57.864549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-04-04T18:08:57.864948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-04-04T18:08:57.864990Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-04-04T18:08:57.865100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T18:08:57.865153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T18:08:57.865199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-04-04T18:08:57.865227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T18:08:57.865261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-04-04T18:08:57.865300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-04-04T18:08:57.865334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id ... nSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T18:15:50.412764Z node 202 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-04-04T18:15:50.412952Z node 202 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 212us result status StatusSuccess 2025-04-04T18:15:50.413516Z node 202 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-04-04T18:15:50.424379Z node 202 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][202:1116:2881] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-04-04T18:15:50.424482Z node 202 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][202:1062:2881] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-04-04T18:15:50.424613Z node 202 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][202:1116:2881] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743790550404079 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743790550404079 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743790550404079 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-04-04T18:15:50.426577Z node 202 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][202:1116:2881] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-04-04T18:15:50.426650Z node 202 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][202:1062:2881] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } |99.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index/test-results/unittest/{meta.json ... results_accumulator.log} |99.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] Test command err: 2025-04-04T18:08:22.007424Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T18:08:22.068662Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T18:08:22.094193Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T18:08:22.094519Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T18:08:22.102967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T18:08:22.103216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T18:08:22.103459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T18:08:22.103597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T18:08:22.103727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T18:08:22.103842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T18:08:22.103960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T18:08:22.104106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T18:08:22.104296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T18:08:22.104445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T18:08:22.104564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T18:08:22.104674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T18:08:22.135058Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T18:08:22.135309Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T18:08:22.135372Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T18:08:22.135551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T18:08:22.135716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T18:08:22.135793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T18:08:22.135840Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T18:08:22.135945Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T18:08:22.136021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T18:08:22.136067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T18:08:22.136117Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T18:08:22.136330Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T18:08:22.136406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T18:08:22.136451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T18:08:22.136482Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T18:08:22.136564Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T18:08:22.136639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T18:08:22.136716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T18:08:22.136746Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T18:08:22.136819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T18:08:22.136854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T18:08:22.136900Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T18:08:22.136966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T18:08:22.137005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T18:08:22.137035Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T18:08:22.137442Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=56; 2025-04-04T18:08:22.137556Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=50; 2025-04-04T18:08:22.137629Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-04-04T18:08:22.137722Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-04-04T18:08:22.137908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T18:08:22.137975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T18:08:22.138009Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T18:08:22.138205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T18:08:22.138299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T18:08:22.138339Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T18:08:22.138505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T18:08:22.138557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T18:08:22.138592Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T18:08:22.138808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T18:08:22.138853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T18:08:22.138887Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T18:08:22.139007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T18:08:22.139044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T18:08:22.139118Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... X_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=296903; 2025-04-04T18:16:48.447609Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:13985:15928];tablet_id=9437184;parent=[1:13946:15896];fline=manager.cpp:82;event=ask_data;request=request_id=399;1={portions_count=193};; 2025-04-04T18:16:48.449319Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13946:15896];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=193;path_id=1; 2025-04-04T18:16:48.458662Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13946:15896];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; 2025-04-04T18:16:48.983203Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-04T18:16:48.983277Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=9; 2025-04-04T18:16:48.986012Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=2673; 2025-04-04T18:16:48.989251Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=2270; 2025-04-04T18:16:48.989319Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=3253; 2025-04-04T18:16:48.989431Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=70; 2025-04-04T18:16:48.989525Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=60; 2025-04-04T18:16:48.989652Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=93; 2025-04-04T18:16:48.989751Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=68; 2025-04-04T18:16:48.989889Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=106; 2025-04-04T18:16:48.989920Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=6606; 2025-04-04T18:16:48.996066Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-04T18:16:48.996125Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=8; 2025-04-04T18:16:49.023292Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=27095; 2025-04-04T18:16:49.069209Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=45831; 2025-04-04T18:16:49.069317Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=33; 2025-04-04T18:16:49.069375Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=23; 2025-04-04T18:16:49.069414Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2025-04-04T18:16:49.069452Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-04-04T18:16:49.069487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-04-04T18:16:49.069549Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=31; 2025-04-04T18:16:49.069587Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-04-04T18:16:49.069664Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=45; 2025-04-04T18:16:49.069705Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-04-04T18:16:49.069757Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=22; 2025-04-04T18:16:49.069836Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=47; 2025-04-04T18:16:49.069902Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=37; 2025-04-04T18:16:49.069936Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=73770; 2025-04-04T18:16:49.070091Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=128676200;raw_bytes=191860770;count=66;records=1845000} inactive {blob_bytes=245561044;raw_bytes=360315949;count=127;records=3499542} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T18:16:49.070597Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13946:15896];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T18:16:49.070657Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13946:15896];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T18:16:49.070715Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13946:15896];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T18:16:49.070756Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13946:15896];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=0; 2025-04-04T18:16:49.070946Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T18:16:49.070999Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T18:16:49.071199Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=21; 2025-04-04T18:16:49.071255Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-04T18:16:49.071295Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T18:16:49.071335Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T18:16:49.071370Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T18:16:49.071458Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T18:16:49.075066Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T18:16:49.078349Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13946:15896];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T18:16:49.079930Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13946:15896];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T18:16:49.079960Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T18:16:49.079982Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T18:16:49.080021Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13946:15896];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T18:16:49.080074Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13946:15896];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T18:16:49.080283Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13946:15896];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=21; 2025-04-04T18:16:49.080338Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13946:15896];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-04T18:16:49.080386Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13946:15896];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T18:16:49.080427Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13946:15896];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T18:16:49.080464Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13946:15896];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T18:16:49.080528Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13946:15896];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-04-04T18:16:49.080578Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13946:15896];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] Test command err: 2025-04-04T18:08:53.155727Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-04-04T18:08:53.248341Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-04-04T18:08:53.274867Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:137:2169];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-04-04T18:08:53.275197Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-04-04T18:08:53.284592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-04-04T18:08:53.284855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-04-04T18:08:53.285125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-04-04T18:08:53.285268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-04-04T18:08:53.285394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-04-04T18:08:53.285504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-04-04T18:08:53.285614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-04-04T18:08:53.285772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-04-04T18:08:53.286010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-04-04T18:08:53.286170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-04-04T18:08:53.286365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-04-04T18:08:53.286503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:137:2169];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-04-04T18:08:53.317214Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-04-04T18:08:53.317482Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-04-04T18:08:53.317555Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-04-04T18:08:53.317759Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T18:08:53.317952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-04-04T18:08:53.318038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-04-04T18:08:53.318084Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-04-04T18:08:53.318188Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-04-04T18:08:53.318277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-04-04T18:08:53.318330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-04-04T18:08:53.318363Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-04-04T18:08:53.318568Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-04-04T18:08:53.318645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-04-04T18:08:53.318695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-04-04T18:08:53.318727Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-04-04T18:08:53.318818Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-04-04T18:08:53.318893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-04-04T18:08:53.318965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-04-04T18:08:53.319000Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-04-04T18:08:53.319078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-04-04T18:08:53.319132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-04-04T18:08:53.319182Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-04-04T18:08:53.319237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-04-04T18:08:53.319285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-04-04T18:08:53.319312Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-04-04T18:08:53.319697Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2025-04-04T18:08:53.319797Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=43; 2025-04-04T18:08:53.319871Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-04-04T18:08:53.319942Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=31; 2025-04-04T18:08:53.320137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-04-04T18:08:53.320222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-04-04T18:08:53.320260Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-04-04T18:08:53.320481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-04-04T18:08:53.320552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-04-04T18:08:53.320604Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-04-04T18:08:53.320778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-04-04T18:08:53.320828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-04-04T18:08:53.320859Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-04-04T18:08:53.321050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-04-04T18:08:53.321100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-04-04T18:08:53.321129Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-04-04T18:08:53.321249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-04-04T18:08:53.321290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-04-04T18:08:53.321355Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:152;step=TTxUpdateSchema.Execute_Finish; ... UMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=320809; 2025-04-04T18:17:09.323151Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:13986:15929];tablet_id=9437184;parent=[1:13947:15897];fline=manager.cpp:82;event=ask_data;request=request_id=399;1={portions_count=193};; 2025-04-04T18:17:09.325293Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13947:15897];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1453;size=193;path_id=1; 2025-04-04T18:17:09.336086Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13947:15897];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1504;stage=finished; 2025-04-04T18:17:09.915347Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-04T18:17:09.915437Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=11; 2025-04-04T18:17:09.918644Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=3128; 2025-04-04T18:17:09.922325Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=2559; 2025-04-04T18:17:09.922405Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=3693; 2025-04-04T18:17:09.922537Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=81; 2025-04-04T18:17:09.922639Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=61; 2025-04-04T18:17:09.922782Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=106; 2025-04-04T18:17:09.922890Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=72; 2025-04-04T18:17:09.923065Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=137; 2025-04-04T18:17:09.923100Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=7616; 2025-04-04T18:17:09.929322Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-04-04T18:17:09.929392Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=10; 2025-04-04T18:17:09.956475Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=26998; 2025-04-04T18:17:10.001473Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=44894; 2025-04-04T18:17:10.001599Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=35; 2025-04-04T18:17:10.001671Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=26; 2025-04-04T18:17:10.001719Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-04-04T18:17:10.001767Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-04-04T18:17:10.001814Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=9; 2025-04-04T18:17:10.001889Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=40; 2025-04-04T18:17:10.001934Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2025-04-04T18:17:10.002022Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=53; 2025-04-04T18:17:10.002068Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-04-04T18:17:10.002129Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=28; 2025-04-04T18:17:10.002228Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=60; 2025-04-04T18:17:10.002334Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=51; 2025-04-04T18:17:10.002380Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=72938; 2025-04-04T18:17:10.002561Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=128676200;raw_bytes=191860770;count=66;records=1845000} inactive {blob_bytes=245561044;raw_bytes=360315949;count=127;records=3499542} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-04-04T18:17:10.003101Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13947:15897];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-04-04T18:17:10.003168Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13947:15897];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-04-04T18:17:10.003247Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13947:15897];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-04-04T18:17:10.003301Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13947:15897];process=SwitchToWork;fline=column_engine_logs.cpp:495;event=OnTieringModified;new_count_tierings=0; 2025-04-04T18:17:10.003572Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T18:17:10.003636Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T18:17:10.003866Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=21; 2025-04-04T18:17:10.003933Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-04T18:17:10.003976Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T18:17:10.004026Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T18:17:10.004066Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T18:17:10.004168Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; 2025-04-04T18:17:10.007518Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T18:17:10.012186Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13947:15897];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-04-04T18:17:10.014472Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13947:15897];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-04-04T18:17:10.014512Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-04-04T18:17:10.014539Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-04-04T18:17:10.014586Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13947:15897];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:513;event=EnqueueBackgroundActivities;periodic=0; 2025-04-04T18:17:10.014656Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13947:15897];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:783;event=start_indexation_tasks;insert_overload_size=0; 2025-04-04T18:17:10.014930Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13947:15897];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:243;event=StartCleanup;portions_count=21; 2025-04-04T18:17:10.015003Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13947:15897];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:285;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-04-04T18:17:10.015053Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13947:15897];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:318;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-04-04T18:17:10.015100Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13947:15897];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1063;background=cleanup;skip_reason=no_changes; 2025-04-04T18:17:10.015143Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13947:15897];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1095;background=cleanup;skip_reason=no_changes; 2025-04-04T18:17:10.015221Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13947:15897];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.998000s; 2025-04-04T18:17:10.015273Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13947:15897];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1004;background=ttl;skip_reason=no_changes; |99.8%| [TA] $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] 2025-04-04 18:18:56,900 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-04-04 18:18:57,065 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1063677 1.1G 1.1G 1.0G ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/o32v/000170/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-module 1084846 1.8G 1.8G 1.3G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/o32v/000170/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_ins Test command err: /home/runner/.ya/build/build_root/o32v/000170/ydb/tests/olap/scenario/test-results/py3test/ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:16: PytestCollectionWarning: cannot collect test class 'TestContext' because it has a __init__ constructor (from: test_scheme_load.py) /home/runner/.ya/build/build_root/o32v/000170/environment/arcadia/ydb/tests/olap/scenario/helpers/thread_helper.py:4: PytestCollectionWarning: cannot collect test class 'TestThread' because it has a __init__ constructor (from: test_scheme_load.py) class TestThread(threading.Thread): /home/runner/.ya/build/build_root/o32v/000170/ydb/tests/olap/scenario/test-results/py3test/ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:16: PytestCollectionWarning: cannot collect test class 'TestContext' because it has a __init__ constructor (from: test_alter_compression.py) /home/runner/.ya/build/build_root/o32v/000170/environment/arcadia/ydb/tests/olap/scenario/helpers/thread_helper.py:4: PytestCollectionWarning: cannot collect test class 'TestThread' because it has a __init__ constructor (from: test_alter_compression.py) class TestThread(threading.Thread): /home/runner/.ya/build/build_root/o32v/000170/ydb/tests/olap/scenario/test-results/py3test/ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:16: PytestCollectionWarning: cannot collect test class 'TestContext' because it has a __init__ constructor (from: test_insert.py) /home/runner/.ya/build/build_root/o32v/000170/environment/arcadia/ydb/tests/olap/scenario/helpers/thread_helper.py:4: PytestCollectionWarning: cannot collect test class 'TestThread' because it has a __init__ constructor (from: test_insert.py) class TestThread(threading.Thread): /home/runner/.ya/build/build_root/o32v/000170/ydb/tests/olap/scenario/test-results/py3test/ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:16: PytestCollectionWarning: cannot collect test class 'TestContext' because it has a __init__ constructor (from: test_alter_tiering.py) /home/runner/.ya/build/build_root/o32v/000170/environment/arcadia/ydb/tests/olap/scenario/helpers/thread_helper.py:4: PytestCollectionWarning: cannot collect test class 'TestThread' because it has a __init__ constructor (from: test_alter_tiering.py) class TestThread(threading.Thread): /home/runner/.ya/build/build_root/o32v/000170/ydb/tests/olap/scenario/test-results/py3test/ydb/tests/olap/scenario/test_alter_tiering.py:37: PytestCollectionWarning: cannot collect test class 'TestLoop' because it has a __init__ constructor (from: test_alter_tiering.py) /home/runner/.ya/build/build_root/o32v/000170/ydb/tests/olap/scenario/test-results/py3test/ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:16: PytestCollectionWarning: cannot collect test class 'TestContext' because it has a __init__ constructor (from: test_simple.py) File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "/home/runner/.ya/build/build_root/o32v/000170/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 88, in test ctx.executable(self, ctx) File "ydb/tests/olap/scenario/test_insert.py", line 90, in scenario_read_data_during_bulk_upsert thread.join() File "/home/runner/.ya/build/build_root/o32v/000170/environment/arcadia/ydb/tests/olap/scenario/helpers/thread_helper.py", line 13, in join super().join(timeout) File "contrib/tools/python3/Lib/threading.py", line 1149, in join self._wait_for_tstate_lock() File "contrib/tools/python3/Lib/threading.py", line 1169, in _wait_for_tstate_lock if lock.acquire(block, timeout): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...o32v/000170/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/o32v/000170/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8067063302/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_insert.py::TestInsert::test[read_data_during_bulk_upsert]', '--test-filter', 'test_alter_compression.py::TestAlterCompression::test[alter_compression]', '--test-filter', 'test_alter_tiering.py::TestAlterTiering::test[many_tables]', '--test-filter', 'test_simple.py::TestSimple::test[alter_table]', '--test-filter', 'test_simple.py::TestSimple::test[alter_tablestore]', '--test-filter', 'test_simple.py::TestSimple::test[table]', '--test-filter', 'test_simple.py::TestSimple::test[tablestores]', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...o32v/000170/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/o32v/000170/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8067063302/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_insert.py::TestInsert::test[read_data_during_bulk_upsert]', '--test-filter', 'test_alter_compression.py::TestAlterCompression::test[alter_compression]', '--test-filter', 'test_alter_tiering.py::TestAlterTiering::test[many_tables]', '--test-filter', 'test_simple.py::TestSimple::test[alter_table]', '--test-filter', 'test_simple.py::TestSimple::test[alter_tablestore]', '--test-filter', 'test_simple.py::TestSimple::test[table]', '--test-filter', 'test_simple.py::TestSimple::test[tablestores]', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.9%| [TM] {RESULT} ydb/tests/olap/scenario/py3test |99.9%| CLEANING BUILD ROOT ydb/tests/functional/hive [size:medium] nchunks:20 ------ [test_drain.py 0/20] chunk ran 1 test (total:34.05s - test:33.95s) [fail] test_drain.py::TestHive::test_drain_on_stop [default-linux-x86_64-release-asan] (29.35s) ydb/tests/functional/hive/test_drain.py:93: in test_drain_on_stop wait_tablets_are_active( ydb/tests/library/common/delayed.py:151: in wait_tablets_are_active predicate(raise_error=True) ydb/tests/library/common/delayed.py:141: in predicate raise AssertionError( E AssertionError: E ############################## E 0 seconds passed, 35 tablet(s) are not active. Inactive tablets are (first 10 entries): (72075186224038605: 6) (72075186224038624: 4) (72075186224038627: 4) (72075186224038667: 6) (72075186224038696: 4) (72075186224038776: 4) (72075186224038792: 4) (72075186224038797: 6) (72075186224038800: 4) (72075186224038827: 6). Additional info is empty E ############################## Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff/test_drain.py.TestHive.test_drain_on_stop.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/functional/hive ydb/tests/functional/serverless [size:medium] nchunks:10 ------ [test_serverless.py 0/10] chunk ran 1 test (total:122.83s - test:122.47s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [default-linux-x86_64-release-asan] (117.11s) ydb/tests/functional/serverless/test_serverless.py:442: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:339: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:359: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "[DISK_SPACE_EXHAUSTED] Cannot perform writes: database is out of disk space" severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:441: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*OUT_OF_SPACE.*'): E AssertionError: Regex pattern did not match. E Regex: '.*OUT_OF_SPACE.*' E Input: 'message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "[DISK_SPACE_EXHAUSTED] Cannot perform writes: database is out of disk space" severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ [test_serverless.py 1/10] chunk ran 1 test (total:122.75s - test:122.37s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [default-linux-x86_64-release-asan] (116.98s) ydb/tests/functional/serverless/test_serverless.py:442: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:339: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:359: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:105: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "[DISK_SPACE_EXHAUSTED] Cannot perform writes: database is out of disk space" severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:441: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*OUT_OF_SPACE.*'): E AssertionError: Regex pattern did not match. E Regex: '.*OUT_OF_SPACE.*' E Input: 'message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "[DISK_SPACE_EXHAUSTED] Cannot perform writes: database is out of disk space" severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ FAIL: 2 - FAIL ydb/tests/functional/serverless ydb/tests/olap/scenario [size:medium] ------ sole chunk ran 7 tests (total:616.34s - test:600.06s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_alter_compression.py::TestAlterCompression::test[alter_compression] (good) duration: 359.06s test_insert.py::TestInsert::test[read_data_during_bulk_upsert] (timeout) duration: 251.97s 5 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [timeout] test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [default-linux-x86_64-release-asan] (251.97s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_insert.py.TestInsert.test.read_data_during_bulk_upsert.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - GOOD, 5 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] ------ sole chunk ran 5 tests (total:249.28s - test:237.81s) [fail] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-release-asan] (37.21s) ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py:100: in test self.ydb_client.query( ydb/tests/olap/ttl_tiering/base.py:68: in query return self.session_pool.execute_with_retries(statement) contrib/python/ydb/py3/ydb/query/pool.py:202: in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/query/pool.py:200: in wrapped_callee return [result_set for result_set in it] contrib/python/ydb/py3/ydb/_utilities.py:173: in __next__ return self._next() contrib/python/ydb/py3/ydb/_utilities.py:164: in _next res = self.wrapper(next(self.it)) contrib/python/ydb/py3/ydb/query/session.py:350: in lambda resp: base.wrap_execute_query_response( contrib/python/ydb/py3/ydb/query/base.py:172: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/query/base.py:189: in wrap_execute_query_response issues._process_response(response_pb) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.PreconditionFailed: message: "Type annotation" issue_code: 1030 severity: 2 issues { position { row: 15 column: 17 } message: "At function: RemoveSystemMembers, At function: PersistableRepr, At function: OrderedSqlProject" end_position { row: 15 column: 17 } severity: 2 issues { position { row: 15 column: 40 } message: "At function: ToList" end_position { row: 15 column: 40 } severity: 2 issues { position { row: 5 column: 66 } message: "At function: OrderedMap" end_position { row: 5 column: 66 } severity: 2 issues { position { row: 8 column: 28 } message: "At function: AsStructUnordered" end_position { row: 8 column: 28 } severity: 2 issues { position { row: 7 column: 27 } message: "At function: Unwrap" end_position { row: 7 column: 27 } severity: 2 issues { position { row: 7 column: 34 } message: "At function: SafeCast" end_position { row: 7 column: 34 } severity: 2 issues { position { row: 6 column: 41 } message: "At function: +" end_position { row: 6 column: 41 } severity: 2 issues { position { row: 6 column: 36 } message: "At function: +" end_position { row: 6 column: 36 } severity: 2 issues { position { row: 6 column: 36 } message: "Integral type implicit bitcast: Uint64 and Int32" end_position { row: 6 column: 36 } issue_code: 1107 severity: 2 } } } issues { position { row: 6 column: 41 } message: "At function: +" end_position { row: 6 column: 41 } severity: 2 issues { position { row: 6 column: 41 } message: "Integral type implicit bitcast: Uint64 and Int32" end_position { row: 6 column: 41 } issue_code: 1107 severity: 2 } } } } } } } } ,message: "Data manipulation queries do not support column shard tables." issue_code: 2029 severity: 1 (server_code: 400120) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff [fail] ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [default-linux-x86_64-release-asan] (15.44s) ydb/tests/olap/ttl_tiering/ttl_delete_s3.py:124: in test_data_unchanged_after_ttl_change self.ydb_client.query(""" ydb/tests/olap/ttl_tiering/base.py:68: in query return self.session_pool.execute_with_retries(statement) contrib/python/ydb/py3/ydb/query/pool.py:202: in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/query/pool.py:200: in wrapped_callee return [result_set for result_set in it] contrib/python/ydb/py3/ydb/_utilities.py:173: in __next__ return self._next() contrib/python/ydb/py3/ydb/_utilities.py:164: in _next res = self.wrapper(next(self.it)) contrib/python/ydb/py3/ydb/query/session.py:350: in lambda resp: base.wrap_execute_query_response( contrib/python/ydb/py3/ydb/query/base.py:172: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/query/base.py:189: in wrap_execute_query_response issues._process_response(response_pb) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.PreconditionFailed: message: "Type annotation" issue_code: 1030 severity: 2 issues { position { row: 17 column: 17 } message: "At function: RemoveSystemMembers, At function: PersistableRepr, At function: OrderedSqlProject" end_position { row: 17 column: 17 } severity: 2 issues { position { row: 17 column: 40 } message: "At function: ToList" end_position { row: 17 column: 40 } severity: 2 issues { position { row: 7 column: 66 } message: "At function: OrderedMap" end_position { row: 7 column: 66 } severity: 2 issues { position { row: 10 column: 28 } message: "At function: AsStructUnordered" end_position { row: 10 column: 28 } severity: 2 issues { position { row: 9 column: 27 } message: "At function: Unwrap" end_position { row: 9 column: 27 } severity: 2 issues { position { row: 9 column: 34 } message: "At function: SafeCast" end_position { row: 9 column: 34 } severity: 2 issues { position { row: 8 column: 65 } message: "At function: +" end_position { row: 8 column: 65 } severity: 2 issues { position { row: 8 column: 27 } message: "At function: SafeCast" end_position { row: 8 column: 27 } severity: 2 issues { position { row: 8 column: 49 } message: "At function: /" end_position { row: 8 column: 49 } severity: 2 issues { position { row: 6 column: 40 } message: "At function: /" end_position { row: 6 column: 40 } severity: 2 issues { position { row: 6 column: 35 } message: "At function: -" end_position { row: 6 column: 35 } severity: 2 issues { position { row: 6 column: 35 } message: "Integral type implicit bitcast: Uint64 and Int32" end_position { row: 6 column: 35 } issue_code: 1107 severity: 2 } } issues { position { row: 6 column: 42 } message: "At function: SafeCast" end_position { row: 6 column: 42 } severity: 2 issues { position { row: 6 column: 51 } message: "At function: -" end_position { row: 6 column: 51 } severity: 2 issues { position { row: 6 column: 51 } message: "Integral type implicit bitcast: Uint64 and Int32" end_position { row: 6 column: 51 } issue_code: 1107 severity: 2 } } } } } } } } } } } } } ,message: "Data manipulation queries do not support column shard tables." issue_code: 2029 severity: 1 (server_code: 400120) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_data_unchanged_after_ttl_change.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff [fail] ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete [default-linux-x86_64-release-asan] (53.47s) ydb/tests/olap/ttl_tiering/ttl_delete_s3.py:277: in test_ttl_delete self.ydb_client.query(""" ydb/tests/olap/ttl_tiering/base.py:68: in query return self.session_pool.execute_with_retries(statement) contrib/python/ydb/py3/ydb/query/pool.py:202: in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/query/pool.py:200: in wrapped_callee return [result_set for result_set in it] contrib/python/ydb/py3/ydb/_utilities.py:173: in __next__ return self._next() contrib/python/ydb/py3/ydb/_utilities.py:164: in _next res = self.wrapper(next(self.it)) contrib/python/ydb/py3/ydb/query/session.py:350: in lambda resp: base.wrap_execute_query_response( contrib/python/ydb/py3/ydb/query/base.py:172: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/query/base.py:189: in wrap_execute_query_response issues._process_response(response_pb) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.PreconditionFailed: message: "Type annotation" issue_code: 1030 severity: 2 issues { position { row: 17 column: 17 } message: "At function: RemoveSystemMembers, At function: PersistableRepr, At function: OrderedSqlProject" end_position { row: 17 column: 17 } severity: 2 issues { position { row: 17 column: 40 } message: "At function: ToList" end_position { row: 17 column: 40 } severity: 2 issues { position { row: 7 column: 66 } message: "At function: OrderedMap" end_position { row: 7 column: 66 } severity: 2 issues { position { row: 10 column: 28 } message: "At function: AsStructUnordered" end_position { row: 10 column: 28 } severity: 2 issues { position { row: 9 column: 27 } message: "At function: Unwrap" end_position { row: 9 column: 27 } severity: 2 issues { position { row: 9 column: 34 } message: "At function: SafeCast" end_position { row: 9 column: 34 } severity: 2 issues { position { row: 8 column: 65 } message: "At function: +" end_position { row: 8 column: 65 } severity: 2 issues { position { row: 8 column: 27 } message: "At function: SafeCast" end_position { row: 8 column: 27 } severity: 2 issues { position { row: 8 column: 49 } message: "At function: /" end_position { row: 8 column: 49 } severity: 2 issues { position { row: 6 column: 40 } message: "At function: /" end_position { row: 6 column: 40 } severity: 2 issues { position { row: 6 column: 35 } message: "At function: -" end_position { row: 6 column: 35 } severity: 2 issues { position { row: 6 column: 35 } message: "Integral type implicit bitcast: Uint64 and Int32" end_position { row: 6 column: 35 } issue_code: 1107 severity: 2 } } issues { position { row: 6 column: 42 } message: "At function: SafeCast" end_position { row: 6 column: 42 } severity: 2 issues { position { row: 6 column: 51 } message: "At function: -" end_position { row: 6 column: 51 } severity: 2 issues { position { row: 6 column: 51 } message: "Integral type implicit bitcast: Uint64 and Int32" end_position { row: 6 column: 51 } issue_code: 1107 severity: 2 } } } } } } } } } } } } } ,message: "Execution" issue_code: 1060 severity: 1 issues { message: "(NYql::NDq::TDqOutputChannelChunkSizeLimitExceeded) ydb/library/yql/dq/runtime/dq_output_channel.cpp:317: Row data size is too big: 56000009 bytes, exceeds limit of 50331648 bytes" issue_code: 2029 severity: 1 } (server_code: 400120) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_ttl_delete.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff [fail] data_correctness.py::TestDataCorrectness::test [default-linux-x86_64-release-asan] (87.14s) ydb/tests/olap/ttl_tiering/data_correctness.py:191: in test self.ydb_client.query(f"delete from `{table_path}`") ydb/tests/olap/ttl_tiering/base.py:68: in query return self.session_pool.execute_with_retries(statement) contrib/python/ydb/py3/ydb/query/pool.py:202: in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/query/pool.py:200: in wrapped_callee return [result_set for result_set in it] contrib/python/ydb/py3/ydb/_utilities.py:173: in __next__ return self._next() contrib/python/ydb/py3/ydb/_utilities.py:164: in _next res = self.wrapper(next(self.it)) contrib/python/ydb/py3/ydb/query/session.py:350: in lambda resp: base.wrap_execute_query_response( contrib/python/ydb/py3/ydb/query/base.py:172: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/query/base.py:189: in wrap_execute_query_response issues._process_response(response_pb) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.PreconditionFailed: message: "Write transactions between column and row tables are disabled at current time." severity: 1 (server_code: 400120) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_correctness.py.TestDataCorrectness.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff [fail] ttl_unavailable_s3.py::TestUnavailableS3::test [default-linux-x86_64-release-asan] (36.66s) ydb/tests/olap/ttl_tiering/ttl_unavailable_s3.py:76: in test upsert_chunk(i) ydb/tests/olap/ttl_tiering/ttl_unavailable_s3.py:57: in upsert_chunk return self.ydb_client.query(f""" ydb/tests/olap/ttl_tiering/base.py:68: in query return self.session_pool.execute_with_retries(statement) contrib/python/ydb/py3/ydb/query/pool.py:202: in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/query/pool.py:200: in wrapped_callee return [result_set for result_set in it] contrib/python/ydb/py3/ydb/_utilities.py:173: in __next__ return self._next() contrib/python/ydb/py3/ydb/_utilities.py:164: in _next res = self.wrapper(next(self.it)) contrib/python/ydb/py3/ydb/query/session.py:350: in lambda resp: base.wrap_execute_query_response( contrib/python/ydb/py3/ydb/query/base.py:172: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/query/base.py:189: in wrap_execute_query_response issues._process_response(response_pb) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.PreconditionFailed: message: "Type annotation" issue_code: 1030 severity: 2 issues { position { row: 13 column: 17 } message: "At function: RemoveSystemMembers, At function: PersistableRepr, At function: OrderedSqlProject" end_position { row: 13 column: 17 } severity: 2 issues { position { row: 13 column: 40 } message: "At function: ToList" end_position { row: 13 column: 40 } severity: 2 issues { position { row: 7 column: 65 } message: "At function: OrderedMap" end_position { row: 7 column: 65 } severity: 2 issues { position { row: 7 column: 69 } message: "At function: AsStructUnordered" end_position { row: 7 column: 69 } severity: 2 issues { position { row: 8 column: 25 } message: "At function: Unwrap" end_position { row: 8 column: 25 } severity: 2 issues { position { row: 8 column: 32 } message: "At function: SafeCast" end_position { row: 8 column: 32 } severity: 2 issues { position { row: 8 column: 62 } message: "At function: +" end_position { row: 8 column: 62 } severity: 2 issues { position { row: 8 column: 45 } message: "At function: +" end_position { row: 8 column: 45 } severity: 2 issues { position { row: 8 column: 56 } message: "At function: *" end_position { row: 8 column: 56 } severity: 2 issues { position { row: 8 column: 56 } message: "Integral type implicit bitcast: Int32 and Uint64" end_position { row: 8 column: 56 } issue_code: 1107 severity: 2 } } } } } } } } } } ,message: "Data manipulation queries do not support column shard tables." issue_code: 2029 severity: 1 (server_code: 400120) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_unavailable_s3.py.TestUnavailableS3.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ FAIL: 5 - FAIL ydb/tests/olap/ttl_tiering ydb/tests/stress/log/tests [size:medium] ------ sole chunk ran 1 test (total:17.17s - test:17.13s) [fail] test_workload.py::TestYdbLogWorkload::test[column] [default-linux-x86_64-release-asan] (11.80s) ydb/tests/stress/log/tests/test_workload.py:59: in test yatest.common.execute( library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:422: in _finalise raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/apps/ydb/ydb --verbose --endpoint grpc://localhost:27651 --database=/Root workload log init --path column --int-cols 2 --str-cols 5 --key-cols 4 --len 200 --store column --min-partitions 100 --partition-size 10 --auto-partition 0' has failed with code 1. E Errors: E b"(!) New version of YDB CLI is available. Run 'ydb update' command for update. You can also disable further version checks with 'ydb version --disable-checks' command\nStatus: PRECONDITION_FAILED\nIssues: \n
: Error: Execution, code: 1060\n
:35:46: Error: Executing CREATE TABLE\n
: Error: OLAP schema operations are not supported, code: 2029\n" Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbLogWorkload.test.column.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/stress/log/tests ydb/core/keyvalue/ut_trace [size:medium] nchunks:5 ------ [0/5] chunk ran 1 test (total:5.67s - test:5.64s) [fail] TKeyValueTracingTest::ReadHuge [default-linux-x86_64-release-asan] (2.06s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.out ------ [1/5] chunk ran 1 test (total:5.79s - test:5.75s) [fail] TKeyValueTracingTest::ReadSmall [default-linux-x86_64-release-asan] (2.08s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.out ------ [2/5] chunk ran 1 test (total:5.88s - test:5.85s) [fail] TKeyValueTracingTest::WriteHuge [default-linux-x86_64-release-asan] (2.12s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.out ------ [3/5] chunk ran 1 test (total:6.48s - test:6.45s) [fail] TKeyValueTracingTest::WriteSmall [default-linux-x86_64-release-asan] (2.13s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.out ------ FAIL: 4 - FAIL ydb/core/keyvalue/ut_trace ydb/core/kqp/ut/cost [size:medium] nchunks:50 ------ [0/50] chunk ran 1 test (total:22.01s - test:21.96s) [crashed] KqpCost::OlapWriteRow [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: 100) ==1060653==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000017d741cf bp 0x7ffcae288480 sp 0x7ffcae2882e0 T0) ==1060653==The signal is caused by a READ memory access. ==1060653==Hint: address points to the zero page. warning: address range table at offset 0x0 has a premature terminator entry at offset 0x10 warning: address range table at offset 0x30 has a premature terminator entry at offset 0x40 warning: address range table at offset 0x60 has a premature terminator entry at offset 0x70 warning: address range table at offset 0x90 has a premature terminator entry at offset 0xa0 warning: address range table at offset 0xc0 has a premature terminator entry at offset 0xd0 warning: address range table at offset 0xa20 has a premature terminator entry at offset 0xa30 warning: address range table at offset 0xa50 has a premature terminator entry at offset 0xa60 warning: address range table at offset 0xa80 has a premature terminator entry at offset 0xa90 warning: address range table at offset 0xab0 has a premature terminator entry at offset 0xac0 warning: address range table at offset 0xae0 has a premature terminator entry at offset 0xaf0 warning: address range table at offset 0xb10 has a premature terminator entry at offset 0xb20 warning: address range table at offset 0xb40 has a premature terminator entry at offset 0xb50 warning: address range table at offset 0xb70 has a premature terminator entry at offset 0xb80 warning: address range table at offset 0xba0 has a premature terminator entry at offset 0xbb0 warning: address range table at offset 0xbd0 has a premature terminator entry at offset 0xbe0 warning: address range table at offset 0xc00 has a premature terminator entry at offset 0xc10 warning: address range table at offset 0xc30 has a premature terminator entry at offset 0xc40 warning: address range table at offset 0xc60 has a premature terminator entry at offset 0xc70 warning: address range table at offset 0xc90 has a premature terminator entry at offset 0xca0 warning: address range table at offset 0xcc0 has a premature terminator entry at offset 0xcd0 warning: address range table at offset 0xcf0 has a premature terminator entry at offset 0xd00 warning: address range table at offset 0xd20 has a premature terminator entry at offset 0xd30 warning: address range table at offset 0xd50 has a premature terminator entry at offset 0xd60 warning: address range table at off ..[snippet truncated].. 0x186e0e55 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428:12 #11 0x186e0e55 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987:10 #12 0x186e0e55 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x186b1b78 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x17d96975 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x186b3445 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x186db78c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7fe48a332d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7fe48a332e3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x15ad0028 in _start (/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x15ad0028) (BuildId: b39ae28ee39da90c850ce26824d563677aab4934) SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==1060653==ABORTING Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.out ------ FAIL: 1 - CRASHED ydb/core/kqp/ut/cost ydb/core/kqp/ut/query [size:medium] nchunks:50 ------ [1/50] chunk ran 1 test (total:49.46s - test:49.43s) [fail] KqpStats::SysViewClientLost [default-linux-x86_64-release-asan] (45.78s) assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x18B8D15F 1. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x18273E98 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x18285F67 3. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x18285F67 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x18285F67 5. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x18285F67 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x18285F67 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x18BC2E65 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x18BC2E65 9. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x18BC2E65 10. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x18B93B88 11. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x18285175 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x18B95455 13. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x18BBD79C 14. ??:0: ?? @ 0x7F7D2662CD8F 15. ??:0: ?? @ 0x7F7D2662CE3F 16. ??:0: ?? @ 0x15C21028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.out ------ FAIL: 1 - GOOD, 1 - FAIL ydb/core/kqp/ut/query ydb/core/kqp/ut/tx [size:medium] nchunks:50 ------ [0/50] chunk ran 1 test (total:25.32s - test:25.26s) [fail] KqpSinkTx::OlapInvalidateOnError [default-linux-x86_64-release-asan] (20.06s) assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x17F890FD 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x17F699FA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x17F70037 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x17F70037 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x17F70037 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17F70037 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17F70037 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x17F6F245 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F8AD888BD8F 17. ??:0: ?? @ 0x7F8AD888BE3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.out ------ [1/50] chunk ran 1 test (total:24.26s - test:24.20s) [fail] KqpSnapshotIsolation::TConflictReadWriteOlap [default-linux-x86_64-release-asan] (19.67s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x17FBC427 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x17FA567A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F04B8B83D8F 17. ??:0: ?? @ 0x7F04B8B83E3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.out ------ [10/50] chunk ran 1 test (total:25.33s - test:25.29s) [fail] KqpSnapshotIsolation::TSimpleOltpNoSink [default-linux-x86_64-release-asan] (19.51s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x17FAC851 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x17FA474A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F66EE551D8F 17. ??:0: ?? @ 0x7F66EE551E3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.out ------ [2/50] chunk ran 1 test (total:22.94s - test:22.90s) [fail] KqpSnapshotIsolation::TConflictReadWriteOltp [default-linux-x86_64-release-asan] (18.37s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x17FB9CA0 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x17FA5222 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F8EB3F5DD8F 17. ??:0: ?? @ 0x7F8EB3F5DE3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.out ------ [3/50] chunk ran 1 test (total:24.13s - test:24.08s) [fail] KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [default-linux-x86_64-release-asan] (19.58s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x17FB9CA0 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x17FA544A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F198FE15D8F 17. ??:0: ?? @ 0x7F198FE15E3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.out ------ [4/50] chunk ran 1 test (total:24.75s - test:24.70s) [fail] KqpSnapshotIsolation::TConflictWriteOlap [default-linux-x86_64-release-asan] (20.22s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::ABORTED) failed: (SUCCESS != ABORTED) , with diff: (SUCC|ABORT)E(SS|D) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92: DoExecute @ 0x17FB5047 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:118: Execute_ @ 0x17FA4FFA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F5378E0DD8F 17. ??:0: ?? @ 0x7F5378E0DE3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.out ------ [5/50] chunk ran 1 test (total:23.03s - test:23.00s) [fail] KqpSnapshotIsolation::TConflictWriteOltp [default-linux-x86_64-release-asan] (18.03s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x17FB28C0 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x17FA4BA2 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F0547694D8F 17. ??:0: ?? @ 0x7F0547694E3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.out ------ [6/50] chunk ran 1 test (total:22.90s - test:22.86s) [fail] KqpSnapshotIsolation::TConflictWriteOltpNoSink [default-linux-x86_64-release-asan] (18.12s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x17FB28C0 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x17FA4DCA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F4D73428D8F 17. ??:0: ?? @ 0x7F4D73428E3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.out ------ [7/50] chunk ran 1 test (total:24.21s - test:24.18s) [fail] KqpSnapshotIsolation::TReadOnlyOltp [default-linux-x86_64-release-asan] (19.47s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x17FC10AE 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x17FA58A2 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7FC1EF73DD8F 17. ??:0: ?? @ 0x7FC1EF73DE3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.out ------ [8/50] chunk ran 1 test (total:24.50s - test:24.47s) [fail] KqpSnapshotIsolation::TReadOnlyOltpNoSink [default-linux-x86_64-release-asan] (19.66s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x17FC10AE 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x17FA5ACA 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F6BE5EDFD8F 17. ??:0: ?? @ 0x7F6BE5EDFE3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.out ------ [9/50] chunk ran 1 test (total:25.14s - test:25.11s) [fail] KqpSnapshotIsolation::TSimpleOltp [default-linux-x86_64-release-asan] (20.33s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x188A8EBF 1. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x17FAC851 2. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x17F00B0A 3. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x17FA4522 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x17FAAA97 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x17FAAA97 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x17FAAA97 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:311: operator() @ 0x17FAAA97 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:428: operator() @ 0x188DEBC5 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:987: operator() @ 0x188DEBC5 11. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x188DEBC5 12. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x188AF8E8 13. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x17FA9CA5 14. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x188B11B5 15. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x188D94FC 16. ??:0: ?? @ 0x7F6BB3002D8F 17. ??:0: ?? @ 0x7F6BB3002E3F 18. ??:0: ?? @ 0x15B7B028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.out ------ FAIL: 11 - FAIL ydb/core/kqp/ut/tx ydb/core/tx/datashard/ut_incremental_backup [size:medium] nchunks:4 ------ [0/4] chunk ran 1 test (total:145.51s - test:145.44s) [fail] IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [default-linux-x86_64-release-asan] (133.89s) assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode) at /-S/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:0:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.out ------ FAIL: 1 - FAIL ydb/core/tx/datashard/ut_incremental_backup ydb/core/tx/tiering/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:38.07s - test:38.03s) [crashed] ColumnShardTiers::TTLUsage [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.out ------ FAIL: 1 - GOOD, 1 - CRASHED ydb/core/tx/tiering/ut ydb/services/ydb/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:21.56s - test:21.51s) [fail] YdbLogStore::AlterLogTable [default-linux-x86_64-release-asan] (4.32s) assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:13 operator() at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:395:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.out ------ FAIL: 1 - GOOD, 1 - FAIL ydb/services/ydb/ut Total 29 suites: 17 - GOOD 11 - FAIL 1 - TIMEOUT Total 76 tests: 41 - GOOD 27 - FAIL 5 - NOT_LAUNCHED 1 - TIMEOUT 2 - CRASHED Cache efficiency ratio is 98.46% (38958 of 39567). Local: 379 (0.96%), dist: 0 (0.00%), by dynamic uids: 0 (0.00%), avoided: 38579 (97.50%) Dist cache download: count=0, size=0 bytes, speed=0.0 bytes/s Disk usage for tools/sdk 3.65 GiB Additional disk space consumed for build cache 0 bytes Critical path: [616747 ms] [TM] [rnd-tl1oevwi0wrgzl2l asan default-linux-x86_64 release]: ydb/tests/olap/scenario/py3test [started: 0 (1743790136475), finished: 616747 (1743790753222)] Time from start: 673675.6682128906 ms, time elapsed by graph 616747 ms, time diff 56928.668212890625 ms. The longest 10 tasks: [616747 ms] [TM] [rnd-tl1oevwi0wrgzl2l asan default-linux-x86_64 release]: ydb/tests/olap/scenario/py3test [started: 1743790136475, finished: 1743790753222] [511403 ms] [TM] [rnd-9498859518561415637 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_rw/unittest [started: 1743790100112, finished: 1743790611515] [501068 ms] [TM] [rnd-9927798878670400197 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_rw/unittest [started: 1743790131438, finished: 1743790632506] [416526 ms] [TM] [rnd-6133526936425601463 asan default-linux-x86_64 release]: ydb/core/tx/schemeshard/ut_index/unittest [started: 1743790135438, finished: 1743790551964] [345954 ms] [TM] [rnd-7348018051978424972 asan default-linux-x86_64 release]: ydb/core/tx/schemeshard/ut_index/unittest [started: 1743790135169, finished: 1743790481123] [329985 ms] [TM] [rnd-2747340465959457725 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_rw/unittest [started: 1743790132682, finished: 1743790462667] [329121 ms] [TM] [rnd-12862620635877753103 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_rw/unittest [started: 1743790130787, finished: 1743790459908] [249674 ms] [TM] [rnd-ni85fl3uecbujylu asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1743790136570, finished: 1743790386244] [153685 ms] [TM] [rnd-7319937147357217667 asan default-linux-x86_64 release]: ydb/tests/functional/tpc/medium/py3test [started: 1743790142058, finished: 1743790295743] [146027 ms] [TM] [rnd-3061220602666889019 asan default-linux-x86_64 release]: ydb/core/tx/datashard/ut_incremental_backup/unittest [started: 1743790118801, finished: 1743790264828] Total time by type: [6402582 ms] [TM] [count: 587, ave time 10907.29 msec] [ 79320 ms] [prepare:get from local cache] [count: 379, ave time 209.29 msec] [ 26580 ms] [prepare:AC] [count: 2, ave time 13290.00 msec] [ 22655 ms] [prepare:put to dist cache] [count: 375, ave time 60.41 msec] [ 12378 ms] [TA] [count: 22, ave time 562.64 msec] [ 7380 ms] [prepare:bazel-store] [count: 1, ave time 7380.00 msec] [ 6718 ms] [prepare:tools] [count: 17, ave time 395.18 msec] [ 887 ms] [prepare:clean] [count: 3, ave time 295.67 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 6414960 ms (100.00%) Total run tasks time - 6414960 ms Configure time - 30.4 s Statistics overhead 1490 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/report.json Ok + echo 0